參考:https://www.cnblogs.com/wenjingu/p/3990071.html
不過上面是在windows下的開發,直接移植到Linux環境下會編譯出現問題。
下面是我將其移植到Linux下並編譯通過錄制成功、播放成功的代碼。
(1)編譯FFmpeg
下載下來先,我下的是ffmpeg-3.0.12.tar.xz
執行configure-make-make install三部曲
>./configure --enable-shared --prefix=/home/ffmpeg
>make
>make install
將include和lib兩個文件夾拷貝到QT工程目錄下的ffmpeg文件夾下
(2)QT工程引入FFmpeg進行開發
.pro文件增加如下
INCLUDEPATH += ./ffmpeg/include \
LIBS += ./ffmpeg/lib/libavcodec.so \
./ffmpeg/lib/libavdevice.so \
./ffmpeg/lib/libavfilter.so \
./ffmpeg/lib/libavformat.so \
./ffmpeg/lib/libavutil.so \
./ffmpeg/lib/libswresample.so \
./ffmpeg/lib/libswscale.so \
.h文件引入開發所需頭文件,然后聲明函數
#include <pthread.h> #include <unistd.h> extern "C" { #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libswscale/swscale.h" #include "libavdevice/avdevice.h" } class recordVideo:public QObject { void recordVideoExec();
static void* ReadingThrd(void * pParam);
};
.cpp編寫實現代碼
void recordVideo::recordVideoExec() { pthread_t thread; if(0 == pthread_create(&thread, NULL,ReadingThrd, NULL)) { printf("thread create succ, main thread id is %u\n",(unsigned)pthread_self()); } } static AVFormatContext *i_fmt_ctx; static AVStream *i_video_stream; static AVFormatContext *o_fmt_ctx; static AVStream *o_video_stream; static bool bStop = false; static int frame_nums = 0; void *recordVideo::ReadingThrd(void * pParam) { avcodec_register_all(); av_register_all(); avformat_network_init(); /* should set to NULL so that avformat_open_input() allocate a new one */ i_fmt_ctx = NULL; //這是我用ONVIF協議得到的攝像頭RTSP流媒體地址 char rtspUrl[] = "rtsp://10.19.17.23:554/Streaming/Channels/101?transportmode=unicast&profile=Profile_1"; const char *filename = "2.mp4"; if (avformat_open_input(&i_fmt_ctx, rtspUrl, NULL, NULL)!=0) { fprintf(stderr, " = could not open input file\n"); return nullptr; } if (avformat_find_stream_info(i_fmt_ctx, NULL)<0) { fprintf(stderr, " = could not find stream info\n"); return nullptr; } //av_dump_format(i_fmt_ctx, 0, argv[1], 0); /* find first video stream */ for (unsigned i=0; i<i_fmt_ctx->nb_streams; i++) { if (i_fmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { i_video_stream = i_fmt_ctx->streams[i]; break; } } if (i_video_stream == NULL) { fprintf(stderr, " = didn't find any video stream\n"); return nullptr; } avformat_alloc_output_context2(&o_fmt_ctx, NULL, NULL, filename); /* * since all input files are supposed to be identical (framerate, dimension, color format, ...) * we can safely set output codec values from first input file */ o_video_stream = avformat_new_stream(o_fmt_ctx, NULL); { AVCodecContext *c; c = o_video_stream->codec; c->bit_rate = 400000; c->codec_id = i_video_stream->codec->codec_id; c->codec_type = i_video_stream->codec->codec_type; c->time_base.num = i_video_stream->time_base.num; c->time_base.den = i_video_stream->time_base.den; fprintf(stderr, " = time_base.num = %d time_base.den = %d\n", c->time_base.num, c->time_base.den); c->width = i_video_stream->codec->width; c->height = i_video_stream->codec->height; c->pix_fmt = i_video_stream->codec->pix_fmt; printf(" = width: %d height: %d pix_fmt: %d\n", c->width, c->height, c->pix_fmt); c->flags = i_video_stream->codec->flags; c->flags |= CODEC_FLAG_GLOBAL_HEADER; c->me_range = i_video_stream->codec->me_range; c->max_qdiff = i_video_stream->codec->max_qdiff; c->qmin = i_video_stream->codec->qmin; c->qmax = i_video_stream->codec->qmax; c->qcompress = i_video_stream->codec->qcompress; } avio_open(&o_fmt_ctx->pb, filename, AVIO_FLAG_WRITE); avformat_write_header(o_fmt_ctx, NULL); int last_pts = 0; int last_dts = 0; int64_t pts, dts; while(!bStop) { //printf("------------------------------------------------------\n"); AVPacket i_pkt; av_init_packet(&i_pkt); i_pkt.size = 0; i_pkt.data = NULL; if (av_read_frame(i_fmt_ctx, &i_pkt) <0 ) break; /* * pts and dts should increase monotonically * pts should be >= dts */ i_pkt.flags |= AV_PKT_FLAG_KEY; pts = i_pkt.pts; i_pkt.pts += last_pts; dts = i_pkt.dts; i_pkt.dts += last_dts; i_pkt.stream_index = 0; //printf("%lld %lld\n", i_pkt.pts, i_pkt.dts); static int num = 1; printf(" = frame %d\n", num++); av_interleaved_write_frame(o_fmt_ctx, &i_pkt); //av_free_packet(&i_pkt); //av_init_packet(&i_pkt); //Sleep(10); //sleep(1); //linux下usleep()可以接受毫秒,要包含頭文件#include <unistd.h> if(frame_nums > 2000) { bStop = true; } frame_nums++; usleep(10); } last_dts += dts; last_pts += pts; avformat_close_input(&i_fmt_ctx); av_write_trailer(o_fmt_ctx); avcodec_close(o_fmt_ctx->streams[0]->codec); av_freep(&o_fmt_ctx->streams[0]->codec); av_freep(&o_fmt_ctx->streams[0]); avio_close(o_fmt_ctx->pb); av_free(o_fmt_ctx); }
(3)然后在main函數之中創建類對象,通過類對象調用成員函數 recordVideoExec()即可