利用ffmpeg進行視頻軟解播放


前段時間,公司的一個項目需要一個rtsp的播放庫,原本打算直接用vlc播放的,但我覺得vlc太龐大了,很多功能沒必要,還不如用ffmpeg+d3d簡單的實現一個庫,因此就有了今天講的這個東西。一個解碼庫,分為三個部分:網絡,解碼,顯示。網絡和解碼在ffmpeg里帶了,直接用就好,顯示,用d3d直接顯示yuv是最佳方案了。整個庫采用多線程模型,播放一路就創建一個播放線程。庫的接口如下:

struct hvplayer; typedef struct hvplayer hvplayer; /** *播放退出回調 *@param h 播放器指針 *@param state 退出狀態 */ typedef void (*playend_cb)(hvplayer *h); enum player_state { PLAYER_CONNECTING=0, PLAYER_PLAYING, PLAYER_OFF }; /** *初始化一個播放器 *@param hwnd 一個表示畫面顯示區域的id,windows上為窗口句柄 *@param url rtsp地址 *@return NULL失敗,>0為一個播放器指針 */ HVEXP hvplayer *hvplayer_new(int32_t hwnd, const char *url); /** *播放一個rtsp視頻,該方法是一個非阻塞函數,內部會創建一個線程去執行播放 *任務,自動無限重連,直到調用hvplayer_close * *@param h 一個指向播放器的指針 *@return 成功為0,失敗-1 */ HVEXP int hvplayer_play(hvplayer *h); /** *獲取播放器的狀態,成功會設置state * *@param h 一個指向播放器的指針 *@return player_state枚舉的值 */ HVEXP int hvplayer_getstate(hvplayer *h); /** *停止播放,並結束播放線程,該方法會阻塞至播放線程結束,同時釋放hvplayer句柄 * *@param h 一個指向播放器的指針 */ HVEXP void hvplayer_close(hvplayer *h); /** *注冊播放結束回調,hvplayer_close后回調會被調用 * *@param h 一個指向播放器的指針 *@param cb 具體見回調定義 *@waring 多次注冊會覆蓋 *@waring 不要阻塞該調用線程 */ HVEXP void hvplayer_set_endcb(hvplayer *h, playend_cb cb); /************************************************************************/
 /*SDK初始化,必須在使用SDK之前初始化 *@return 0成功 /************************************************************************/ HVEXP int hvdevicevideo_init(void);

使用起來很簡單,一個hvplayer對象對應一路視頻,先hvplayer_new(),在hvplayer_play(),最后hvplayer_close()就好了。庫使用前要調用hvdevicevideo_init進行初始化.
hvplayer的定義如下:

 1 struct hvplayer  2 {  3  playend_cb end;  4  int32_t hwnd;  5  int32_t flage;  6     char *url;  7     enum player_state ste;  8  HANDLE thread;  9  clock_t pretm; 10     int play; 11 };

flage是控制重連循環的,play是控制幀數據讀取循環的,pretm是控制rtsp服務器連接超時的.

play_loop是播放線程的函數,外層是重連循環,_do是實際播放循環.

 1 static int play_loop(void* p)  2 {  3     hvplayer *h=(hvplayer*)p;  4     while(h->flage)  5  {  6         h->ste=PLAYER_CONNECTING;  7         h->pretm=clock();  8  _do(h);  9         int s=TIMEOUT_S-(clock()-h->pretm)/CLOCKS_PER_SEC; 10         if(s>0) Sleep(s*1000); 11  } 12     if(h->end) h->end(h); 13     free(h->url); 14     free(h); 15     return 1; 16 }

 

 1 static void _do(hvplayer*h)  2 {  3     AVCodec *codec=NULL;  4     AVCodecContext *cc=NULL;  5     AVPacket pk={0};  6     AVFormatContext *afc=NULL;  7     int vindex=-1;  8     afc=avformat_alloc_context();  9     if(afc==0){ 10         goto err; 11  } 12     afc->interrupt_callback.callback=timeoutcheck; 13     afc->interrupt_callback.opaque=h; 14     AVDictionary *dir=NULL; 15     char *k1="stimeout"; 16     char *v1="10"; 17     char *k2="rtsp_transport"; 18     char *v2="tcp"; 19     char *k4="max_delay"; 20     char *v4="50000"; 21     int r=av_dict_set(&dir,k1,v1,0); 22     r=av_dict_set(&dir,k2,v2,0); 23     av_dict_set(&dir,k4,v4,0); 24     if(avformat_open_input(&afc,h->url,NULL,&dir)) { 25         goto err; 26  } 27     if(avformat_find_stream_info(afc,NULL)<0) { 28         goto err; 29  } 30     for(int i=0;i<(int)afc->nb_streams;i++) 31  { 32         if(afc->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) 33  { 34             vindex=i; 35             break; 36  } 37  } 38     if(vindex==-1){ 39         goto err; 40  } 41     cc=afc->streams[vindex]->codec; 42     codec=avcodec_find_decoder(cc->codec_id); 43     
44     if(codec==0) goto end; 45     if(avcodec_open2(cc,codec,NULL)<0) { 46         goto err; 47  } 48     int dr; 49  hvframe frame; 50     render *render=render_create(RENDER_TYPE_D3D,h->hwnd,cc->coded_width,cc->coded_height); 51     if(render==NULL) 52  { 53         render=render_create(RENDER_TYPE_GDI,h->hwnd,cc->coded_width,cc->coded_height); 54         if(render==NULL){ 55             goto end; 56  } 57  } 58     AVFrame *yuv_buf=av_frame_alloc(); 59     if(yuv_buf==0) { 60         goto end; 61  } 62     h->ste=PLAYER_PLAYING; 63  DWORD a,b; 64     while (h->flage&&av_read_frame(afc,&pk)>=0) 65  { 66         
67         if(pk.stream_index==vindex) 68  { 69                 avcodec_decode_video2(cc,yuv_buf,&dr,&pk); 70                 if(dr>0) 71  { 72                     frame.h=cc->coded_height; 73                     frame.y=yuv_buf->data[0]; 74                     frame.u=yuv_buf->data[1]; 75                     frame.v=yuv_buf->data[2]; 76                     frame.ypitch=yuv_buf->linesize[0]; 77                     frame.uvpitch=yuv_buf->linesize[1]; 78                     render->draw(render,&frame); 79  } 80  } 81         av_free_packet(&pk); 82  } 83 end: 84     if(yuv_buf) av_frame_free(&yuv_buf); 85     if(render) render->destory(&render); 86 err: 87     if(cc) avcodec_close(cc); 88     if(afc){ 89         avformat_close_input(&afc); 90  avformat_free_context(afc); 91  } 92 }

 

  1. avformat_alloc_context()分配AVFormatContext對象。
  2. avformat_open_input()打開一個流媒體源,可以是文件,rtsp,這是一個阻塞函數,知道解析成功或失敗才返回.那如何設置超時時間呢?這個對象提供了兩個中斷回調,解析時ffmpeg會以一定頻率調用這個回調,這個回調的返回值影響avformat_open_input()是否立馬返回.這個回調函數指針就是AVFormatContext->interrupt_callback.callback,同時可以用->interrupt_callback.opaque綁定一個用戶數據.回調函數返回1時,avformat_open_input()會失敗返回,返回0則正常運行.

          我是這樣處理的(為了6秒超時判定):

 1 #define  TIMEOUT_S 6
 2 
 3 int timeoutcheck(void *p)  4 {  5     hvplayer *h=(hvplayer*)p;  6     if(h->flage==0) return 1;  7     if(h->ste==PLAYER_CONNECTING)  8  {  9         clock_t ctm=clock(); 10         int s=(ctm-h->pretm)/CLOCKS_PER_SEC; 11         if(s>=TIMEOUT_S) 12             return 1; 13  } 14     return 0; 15 }

  3、avformat_find_stream_info()解析流的格式信息,再用以下代碼獲取AVCodecContext:

 1 for(int i=0;i<(int)afc->nb_streams;i++)  2  {  3         if(afc->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)  4  {  5             vindex=i;  6             break;  7  }  8  }  9     if(vindex==-1){ 10         goto err; 11  } 12     cc=afc->streams[vindex]->codec;

  4、avcodec_find_decoder(),利用AVCodecContext->codec_id創建AVCodec,再用avcodec_open2()打開解碼器.

  5、循環調用av_read_frame()獲取一幀編碼數據,再調用avcodec_decode_video2(AVCodec*,AVFrame*,int*,AVPacket*)解碼為yuv,第三個參數標示是否解碼成功(>0成功),最后調用顯示模塊顯示即可.

 

顯示上,我將其封裝為render對象,分別實現了d3d,gdi.render的結構如下:

1 struct render{ 2     int hwnd; 3     void (*draw)(struct render *self,hvframe *frame); 4     void (*destory)(struct render **self); 5 }; 6 typedef struct render render;

d3d實現如下:

 1 #include <stdlib.h>
 2 #include "hvtype.h"
 3 #include "irender.h"
 4 #include "winapi.h"
 5 struct render_d3d  6 {  7     render base;  8     IDirect3D9 *d3d;  9     IDirect3DDevice9 *d3d_dev; 10     IDirect3DSurface9 *surface; 11  RECT rec; 12 }; 13 
14 void d3d_draw(render *h,hvframe *frame); 15 void render_free(render **h); 16 
17 render *d3d_new(int hwnd,int pic_w,int pic_h) 18 { 19     struct render_d3d *result=(struct render_d3d *)malloc(sizeof(*result)); 20     result->base.hwnd=hwnd; 21     result->d3d=Direct3DCreate9(D3D_SDK_VERSION); 22     if(result->d3d==NULL) return NULL; 23  D3DPRESENT_PARAMETERS d3dpp; 24     memset(&d3dpp,0,sizeof(d3dpp)); 25     d3dpp.Windowed=TRUE; 26     d3dpp.SwapEffect=D3DSWAPEFFECT_DISCARD; 27     d3dpp.BackBufferFormat=D3DFMT_UNKNOWN; 28     GetClientRect((HWND)result->base.hwnd,&result->rec); 29     HRESULT re=IDirect3D9_CreateDevice(result->d3d,D3DADAPTER_DEFAULT,D3DDEVTYPE_HAL,(HWND)result->base.hwnd, 30         D3DCREATE_SOFTWARE_VERTEXPROCESSING,&d3dpp,&result->d3d_dev); 31     if(FAILED(re)) return NULL; 32     re=IDirect3DDevice9_CreateOffscreenPlainSurface(result->d3d_dev,pic_w,pic_h, 33         (D3DFORMAT)MAKEFOURCC('Y','V','1','2'), 34         D3DPOOL_DEFAULT,&result->surface,NULL); 35     if(FAILED(re)) return NULL; 36     result->base.draw=d3d_draw; 37     result->base.destory=render_free; 38     return (render *)result; 39 } 40 
41 void d3d_draw(render *h,hvframe *frame) 42 { 43     struct render_d3d *self=(struct render_d3d*)h; 44  D3DLOCKED_RECT texture; 45     HRESULT re=IDirect3DSurface9_LockRect(self->surface,&texture,NULL,D3DLOCK_DONOTWAIT); 46     if(FAILED(re)) return; 47     int uvstep=texture.Pitch/2; 48     char *dest=(char*)texture.pBits; 49     char *vdest=(char*)texture.pBits+frame->h*texture.Pitch; 50     char *udest=(char*)vdest+frame->h/2*uvstep; 51     int uvn=0; 52     for(int i=0;i<frame->h;i++) 53  { 54         memcpy(dest,frame->y,frame->ypitch); 55         frame->y+=frame->ypitch; 56         dest+=texture.Pitch; 57  } 58     for(int i=0;i<frame->h/2;i++) 59  { 60         memcpy(vdest,frame->v,frame->uvpitch); 61         memcpy(udest,frame->u,frame->uvpitch); 62         vdest+=uvstep; 63         udest+=uvstep; 64         frame->v+=frame->uvpitch; 65         frame->u+=frame->uvpitch; 66  } 67     IDirect3DSurface9_UnlockRect(self->surface); 68     IDirect3DDevice9_BeginScene(self->d3d_dev); 69     IDirect3DSurface9 *back_surface; 70     IDirect3DDevice9_GetBackBuffer(self->d3d_dev,0,0,D3DBACKBUFFER_TYPE_MONO,&back_surface); 71     IDirect3DDevice9_StretchRect(self->d3d_dev,self->surface,NULL,back_surface,&self->rec,D3DTEXF_LINEAR); 72     IDirect3DDevice9_EndScene(self->d3d_dev); 73     IDirect3DDevice9_Present(self->d3d_dev,NULL,NULL,NULL,NULL); 74 } 75 void render_free(render **h) 76 { 77     if((*h)==0)return; 78     struct render_d3d *self=(struct render_d3d*)(*h); 79     IDirect3DDevice9_Clear(self->d3d_dev,0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0,0,0), 1.0f, 0 ); 80     IDirect3DDevice9_Present(self->d3d_dev,NULL,NULL,NULL,NULL); 81     IDirect3DSurface9_Release(self->surface); 82     IDirect3DDevice9_Release(self->d3d_dev); 83     IDirect3D9_Release(self->d3d); 84     free(*h); 85     *h=0; 86 }

都是標准的固定管線步驟,就不贅述了,唯一要注意的就是yuv數據填充到surface中時要注意行對齊.

gdi實現如下:

 1 #include <stdlib.h>
 2 #include <windows.h>
 3 #include "hvpfconvert.h"
 4 #include "irender.h"
 5 #include "winapi.h"
 6 
 7 struct render_gdi  8 {  9     render base; 10  BITMAPINFO bmp; 11  HDC dc; 12     hvpfconvert *convert; 13     uint8_t *buf; 14  RECT rec; 15 }; 16 
17 
18 void gdi_free(render **h) 19 { 20     if((*h)==0)return; 21     struct render_gdi *self=(struct render_gdi*)(*h); 22     FillRect(self->dc,&self->rec,(HBRUSH)(pGetStockObject(BLACK_BRUSH))); 23     ReleaseDC((HWND)self->base.hwnd,self->dc); 24     hvpfconvert_free(&self->convert); 25     free(*h); 26     *h=0; 27 } 28 
29 void gdi_draw(render *h,hvframe *frame) 30 { 31     struct render_gdi *self=(struct render_gdi*)h; 32     hvpfconvert_convert2(self->convert, frame, self->buf); 33     pSetStretchBltMode(self->dc, STRETCH_HALFTONE); 34     pStretchDIBits(self->dc, 0, 0, self->rec.right, self->rec.bottom, 0, 0, 35         self->bmp.bmiHeader.biWidth, frame->h, self->buf, &self->bmp, DIB_RGB_COLORS, SRCCOPY); 36 } 37 
38 render *gdi_new(int32_t hwnd,int pic_w,int pic_h) 39 { 40     struct render_gdi *re = (struct render_gdi *)calloc(1,sizeof(*re)); 41     re->convert = hvpfconvert_new(pic_w, pic_h, pic_w, pic_h, PF_YUV420P, PF_BGR24); 42     re->dc = GetDC((HWND)hwnd); 43     re->base.hwnd = hwnd; 44     GetClientRect((HWND)re->base.hwnd, &re->rec); 45     re->buf = (uint8_t*)malloc(hvpfconvert_get_size(pic_w, pic_h, PF_RGB24)); 46     re->bmp.bmiHeader.biBitCount = 24; 47     re->bmp.bmiHeader.biClrImportant = BI_RGB; 48     re->bmp.bmiHeader.biClrUsed = 0; 49     re->bmp.bmiHeader.biCompression = 0; 50     re->bmp.bmiHeader.biHeight = -pic_h; 51     re->bmp.bmiHeader.biWidth = pic_w; 52     re->bmp.bmiHeader.biPlanes = 1; 53     re->bmp.bmiHeader.biSize = sizeof(re->bmp.bmiHeader); 54     re->bmp.bmiHeader.biSizeImage = pic_w*pic_h * 3; 55     re->bmp.bmiHeader.biXPelsPerMeter = 0; 56     re->bmp.bmiHeader.biYPelsPerMeter = 0; 57     re->base.destory=gdi_free; 58     re->base.draw=gdi_draw; 59     return (render *)re; 60 }

gdi是利用StretchDIBits函數顯示位圖,因此要將yuv轉換為rgb數據,圖像格式轉換,ffmpeg也自帶了高效的轉換函數:sws_scale().

最后是sdk的初始化,hvdevicevideo_init()我調用了ffmpeg庫和d3d所需的初始化函數:

1 int hvdevicevideo_init(void){ 2     if(hv_winapi_init()) return -1; 3  avcodec_register_all(); 4  av_register_all(); 5  avformat_network_init(); 6     sdk_init=1; 7  CoInitializeEx(NULL,COINIT_MULTITHREADED); 8     return 0; 9 }

最后整個庫,性能還不錯,不必vlc差。在支持d3d的i5機器上解碼一路1080p視頻占3%-5%左右的cpu.不支持的將啟用gdi顯示,每路占13%的cpu.可優化點是rtsp,ffmpeg的rtsp效率
一般般,基本有400毫秒左右的延遲,而liv555可以做到200的延遲,不過我的需求400延遲可以接受,因此就沒去折騰了。后來公司有一嵌入式的項目,我又做了一版linux的實現,用了英特爾的vaapi驅動進行硬解碼(因為項目運行的機器是一款atom的cpu,主頻只有1.2g,軟解一路要占40%多的cpu),后續再寫基於ffmpeg的vaapi硬解碼播放吧.

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM