
Android 的直播,主要使用的是camera采集的數據推流到服務器上,在客戶端播放camera采集的數據。采用SurfaceView+ SurfaceTexture來顯示camera數據, SurfaceTexture在接收圖像流之后,可以進行二次處理,比如(加字體水印), 處理完畢后再送給另一個SurfaceView用於顯示.
先說說,android端采集攝像頭數據,這個網上有很多的資料,可以百度下。這里可以在回調函數OnPreviewFrame(byte[] data,Camera cma)中獲取攝像頭數據data.
關鍵是如何把data傳輸到服務器,要實現這個目標,我使用ffmpeg 來解決這個為題。在android 工程創建jni文件夾,使用jni調用。native函數如下:
// 在OnPreviewFrame中調用
public native void push(byte[] data);
// 可以在OnSurfcaeChanged()中調用. 在jni中初始化輸出路徑.
public native int initPush(String outputurl,int width,int height);
在jni 層的代碼如下:
static void ffmpeg_init() {
avcodec_register_all();
#if CONFIG_AVDEVICE
avdevice_register_all();
#endif
avfilter_register_all();
av_register_all();
avformat_network_init();
}
static int avinit(int width, int height){ /**編碼開始*/ int ret; if(codec==NULL){ yuv_width = width; yuv_height = height; y_length = width * height; uv_length = width * height / 4; LOGI("init start\n"); codec = avcodec_find_encoder(AV_CODEC_ID_H264); if (!codec) { LOGI("Codec not found\n"); return -1; } c = avcodec_alloc_context3(codec); if (!c) { LOGI("Could not allocate video codec context\n"); return -1; } /* put sample parameters */ c->bit_rate = 400000; /* resolution must be a multiple of two */ c->width = width; c->height = height; /* frames per second */ //c->time_base = (AVRational){1,5}; c->time_base.den = 25; c->time_base.num = 1; /* emit one intra frame every ten frames * check frame pict_type before passing frame * to encoder, if frame->pict_type is AV_PICTURE_TYPE_I * then gop_size is ignored and the output of encoder * will always be I frame irrespective to gop_size */ if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) c->flags |= CODEC_FLAG_GLOBAL_HEADER; c->gop_size = 10; c->max_b_frames = 10; c->pix_fmt = AV_PIX_FMT_YUV420P; //av_opt_set(c->priv_data, "preset", "slow", 0); av_opt_set(c->priv_data, "preset", "superfast", 0); av_opt_set(c->priv_data, "tune", "zerolatency", 0); /* open it */ if (avcodec_open2(c, codec, NULL) < 0) { LOGI("Could not open codec\n"); return -1; } LOGI("init end 1\n"); video_st = avformat_new_stream(ofmt_ctx, codec); if (video_st == NULL) { LOGI("video_st == NULL\n"); return -1; } video_st->time_base.num = 1; video_st->time_base.den = 25; video_st->codec = c; return 0; } /**編碼結束*/
}
JNIEXPORT jint JNICALL Java_com_example_directvideo_MainActivity_initPush (JNIEnv *env, jobject obj,jstring out ,jint width, jint height){ int ret ; ffmpeg_init(); const char* str = //"rtmp://192.168.1.102:1935/myapp/test1"; //"/storage/emulated/0/zsy.mp4"; (*env)->GetStringUTFChars(env, out, 0); LOGI("%s %d %d ", str,width,height); //AVOutputFormat *ofmt = NULL; //AVFormatContext *ofmt_ctx = NULL; //Output avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", str); //RTMP //avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", output_str);//UDP if (!ofmt_ctx) { LOGI( "Could not create output context\n"); ret = AVERROR_UNKNOWN; goto end; } ret = avinit(width, height); ofmt = ofmt_ctx->oformat; //Open output URL if (!(ofmt->flags & AVFMT_NOFILE)) { ret = avio_open(&ofmt_ctx->pb, str, AVIO_FLAG_WRITE); if (ret < 0) { LOGI( "Could not open output URL '%s'", str); goto end; } } ret = avformat_write_header(ofmt_ctx, NULL); if (ret < 0) { LOGE( "Error occurred when opening output URL\n"); goto end; } (*env)->ReleaseStringUTFChars(env, out, str); if (ret < 0 && ret != AVERROR_EOF) { LOGI( "Error occurred.\n"); return ret; } intited = 1; return 0; }
注意傳輸視頻流時,必須使用h264編碼器進行編碼。初始化成功后,便可使用push()來推送每一幀的camera數據到服務器上。
本人在ubuntu系統中,利用ngnix + rtmp搭建了簡易的流媒體服務器。
在pc機上,使用ffplay rtmp://192.168.1.102:1935/myapp/test1的命令來播放視屏.
