一、播放視頻說明
1、兩種方式播放視頻
①shader播放YUV,后面再介紹。
②RGB直接顯示數據,簡單。性能差,用到FFmpeg的格式轉換,沒有shader效率高。本文介紹這個方式。
2.GLSurfaceView原理(雙緩沖機制):
SurfaceHolder: getHolder().getSurface();
得到Surface,取出其中緩沖地址,寫入RGB數據。
3.新建一個java的XPlay組件,繼承與GLSurfaceView,並在xml中進行布局編寫。
4.調用native函數:public native void Open(String url,Object surface);
5.在C++中進行取出緩沖地址,將數據放到緩沖並發送出去進行顯示。
二、函數說明
1、需要的頭文件
#include <android/native_window.h>
#include <android/native_window_jni.h>
2.函數說明
ANativeWindow *nwin = ANativeWindow_fromSurface(env,surface);//env為JNIEnv *env ,surface為java傳過來的jobject surface 生成一個原始的窗口類
ANativeWindow_setBuffersGeometry(nwin,outWidth,outHeight,WINDOW_FORMAT_RGBA_8888); // 對原始的窗口類設置,設置寬高格式,不是surface的寬高,設置nativewindow的buff,可自動拉伸
ANativeWindow_Buffer wbuf;
//雙緩沖
ANativeWindow_lock(nwin,&wbuf,0); //加鎖、獲取nwin的緩沖到wbuf中,
uint8_t *dst = (uint8_t*)wbuf.bits; //緩沖地址,跟顯卡交互的, 地址是:內存地址
memcpy(dst,rgb,outWidth*outHeight*4); // 復制rgb到地址中,寬*高*4 ,rgb為像素轉換完成的數據
ANativeWindow_unlockAndPost(nwin); //解鎖並post出去
代碼說明:
1.新建XPlay
2.在布局文件layout中加入Xplay
3.C++代碼
layout的XML:
<?xml version="1.0" encoding="utf-8"?> <android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:app="http://schemas.android.com/apk/res-auto" xmlns:tools="http://schemas.android.com/tools" android:layout_width="match_parent" android:layout_height="match_parent" tools:context="aplay.testffmpeg.MainActivity"> <XPlay android:layout_width="match_parent" android:layout_height="match_parent" /> </android.support.constraint.ConstraintLayout>
Xplay代碼:

import android.content.Context; import android.opengl.GLSurfaceView; import android.util.AttributeSet; import android.view.SurfaceHolder; public class XPlay extends GLSurfaceView implements Runnable,SurfaceHolder.Callback { public XPlay(Context context, AttributeSet attrs) { super( context,attrs ); } @Override public void run() { //在線程中進行顯示,不阻礙 Open("/sdcard/1080.mp4",getHolder().getSurface()); } @Override public void surfaceCreated(SurfaceHolder var1){ new Thread( this ).start(); } @Override public void surfaceChanged(SurfaceHolder var1, int var2, int var3, int var4){ } @Override public void surfaceDestroyed(SurfaceHolder var1){ } public native void Open(String url,Object surface); }
C++代碼

/****************************************** 包含像素轉換和重采樣的代碼 *******************************************/ #include <jni.h> #include <string> #include <android/log.h> #include <android/native_window.h> #include <android/native_window_jni.h> #define LOGW(...) __android_log_print(ANDROID_LOG_WARN,"testff",__VA_ARGS__) extern "C"{ #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libavcodec/jni.h> #include <libswscale/swscale.h> #include <libswresample/swresample.h> } #include<iostream> using namespace std; static double r2d(AVRational r) { return r.num==0||r.den == 0 ? 0 :(double)r.num/(double)r.den; } //當前時間戳 clock long long GetNowMs() { struct timeval tv; gettimeofday(&tv,NULL); int sec = tv.tv_sec%360000; long long t = sec*1000+tv.tv_usec/1000; return t; } extern "C" JNIEXPORT jint JNI_OnLoad(JavaVM *vm,void *res) { av_jni_set_java_vm(vm,0); return JNI_VERSION_1_4; } extern "C" JNIEXPORT jstring JNICALL Java_aplay_testffmpeg_MainActivity_stringFromJNI( JNIEnv *env, jobject /* this */) { std::string hello = "Hello from C++ "; // TODO hello += avcodec_configuration(); return env->NewStringUTF(hello.c_str()); } extern "C" JNIEXPORT void JNICALL Java_aplay_testffmpeg_XPlay_Open(JNIEnv *env, jobject instance, jstring url_, jobject surface) { const char *path = env->GetStringUTFChars(url_, 0); //初始化解封裝 av_register_all(); //初始化網絡 avformat_network_init(); avcodec_register_all(); //打開文件 AVFormatContext *ic = NULL; //char path[] = "/sdcard/video.flv"; int re = avformat_open_input(&ic,path,0,0); if(re != 0) { LOGW("avformat_open_input failed!:%s",av_err2str(re)); return; } LOGW("avformat_open_input %s success!",path); //獲取流信息 re = avformat_find_stream_info(ic,0); if(re != 0) { LOGW("avformat_find_stream_info failed!"); } LOGW("duration = %lld nb_streams = %d",ic->duration,ic->nb_streams); int fps = 0; int videoStream = 0; int audioStream = 1; for(int i = 0; i < ic->nb_streams; i++) { AVStream *as = ic->streams[i]; if(as->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { LOGW("視頻數據"); videoStream = i; fps = r2d(as->avg_frame_rate); LOGW("fps = %d,width=%d height=%d codeid=%d pixformat=%d",fps, as->codecpar->width, as->codecpar->height, as->codecpar->codec_id, as->codecpar->format ); } else if(as->codecpar->codec_type ==AVMEDIA_TYPE_AUDIO ) { LOGW("音頻數據"); audioStream = i; LOGW("sample_rate=%d channels=%d sample_format=%d", as->codecpar->sample_rate, as->codecpar->channels, as->codecpar->format ); } } //ic->streams[videoStream]; //獲取音頻流信息 audioStream = av_find_best_stream(ic,AVMEDIA_TYPE_AUDIO,-1,-1,NULL,0); LOGW("av_find_best_stream audioStream = %d",audioStream); ////////////////////////////////////////////////////////// //打開視頻解碼器 //軟解碼器 AVCodec *codec = avcodec_find_decoder(ic->streams[videoStream]->codecpar->codec_id); //硬解碼 codec = avcodec_find_decoder_by_name("h264_mediacodec"); if(!codec) { LOGW("avcodec_find failed!"); return; } //解碼器初始化 AVCodecContext *vc = avcodec_alloc_context3(codec); avcodec_parameters_to_context(vc,ic->streams[videoStream]->codecpar); vc->thread_count = 8; //打開解碼器 re = avcodec_open2(vc,0,0); //vc->time_base = ic->streams[videoStream]->time_base; LOGW("vc timebase = %d/ %d",vc->time_base.num,vc->time_base.den); if(re != 0) { LOGW("avcodec_open2 video failed!"); return; } ////////////////////////////////////////////////////////// //打開音頻解碼器 //軟解碼器 AVCodec *acodec = avcodec_find_decoder(ic->streams[audioStream]->codecpar->codec_id); //硬解碼 //codec = avcodec_find_decoder_by_name("h264_mediacodec"); if(!acodec) { LOGW("avcodec_find failed!"); return; } //音頻解碼器初始化 AVCodecContext *ac = avcodec_alloc_context3(acodec); avcodec_parameters_to_context(ac,ic->streams[audioStream]->codecpar); ac->thread_count = 8; //打開解碼器 re = avcodec_open2(ac,0,0); if(re != 0) { LOGW("avcodec_open2 audio failed!"); return; } //讀取幀數據 AVPacket *pkt = av_packet_alloc(); AVFrame *frame = av_frame_alloc(); long long start = GetNowMs(); int frameCount = 0; //初始化像素格式轉換的上下文 SwsContext *vctx = NULL; int outWidth = 1280; int outHeight = 720; char *rgb = new char[1920*1080*4]; char *pcm = new char[48000*4*2]; //音頻重采樣上下文初始化 SwrContext *actx = swr_alloc(); actx = swr_alloc_set_opts(actx, av_get_default_channel_layout(2), AV_SAMPLE_FMT_S16,ac->sample_rate, av_get_default_channel_layout(ac->channels), ac->sample_fmt,ac->sample_rate, 0,0 ); re = swr_init(actx); if(re != 0) { LOGW("swr_init failed!"); } else { LOGW("swr_init success!"); } //顯示窗口初始化 ANativeWindow *nwin = ANativeWindow_fromSurface(env,surface); ANativeWindow_setBuffersGeometry(nwin,outWidth,outHeight,WINDOW_FORMAT_RGBA_8888); ANativeWindow_Buffer wbuf; for(;;) { //超過三秒 if(GetNowMs() - start >= 3000) { LOGW("now decode fps is %d",frameCount/3); start = GetNowMs(); frameCount = 0; } int re = av_read_frame(ic,pkt); if(re != 0) { LOGW("讀取到結尾處!"); int pos = 20 * r2d(ic->streams[videoStream]->time_base); av_seek_frame(ic,videoStream,pos,AVSEEK_FLAG_BACKWARD|AVSEEK_FLAG_FRAME ); continue; } AVCodecContext *cc = vc; if(pkt->stream_index == audioStream) cc=ac; //發送到線程中解碼 re = avcodec_send_packet(cc,pkt); //清理 int p = pkt->pts; av_packet_unref(pkt); if(re != 0) { LOGW("avcodec_send_packet failed!"); continue; } for(;;) { re = avcodec_receive_frame(cc,frame); if(re !=0) { //LOGW("avcodec_receive_frame failed!"); break; } //LOGW("avcodec_receive_frame %lld",frame->pts); //如果是視頻幀 if(cc == vc) { frameCount++; vctx = sws_getCachedContext(vctx, frame->width, frame->height, (AVPixelFormat)frame->format, outWidth, outHeight, AV_PIX_FMT_RGBA, SWS_FAST_BILINEAR, 0,0,0 ); if(!vctx) { LOGW("sws_getCachedContext failed!"); } else { uint8_t *data[AV_NUM_DATA_POINTERS] = {0}; data[0] =(uint8_t *)rgb; int lines[AV_NUM_DATA_POINTERS] = {0}; lines[0] = outWidth * 4; int h = sws_scale(vctx, (const uint8_t **)frame->data, frame->linesize,0, frame->height, data,lines); LOGW("sws_scale = %d",h); if(h > 0) { ANativeWindow_lock(nwin,&wbuf,0); uint8_t *dst = (uint8_t*)wbuf.bits; memcpy(dst,rgb,outWidth*outHeight*4); ANativeWindow_unlockAndPost(nwin); } } } else //音頻 { uint8_t *out[2] = {0}; out[0] = (uint8_t*) pcm; //音頻重采樣 int len = swr_convert(actx,out, frame->nb_samples, (const uint8_t**)frame->data, frame->nb_samples); LOGW("swr_convert = %d",len); } } ////////////////////// } delete rgb; delete pcm; //關閉上下文 avformat_close_input(&ic); env->ReleaseStringUTFChars(url_, path); }