【Android】Android Camera實時數據采集及通過MediaCodec硬編碼編碼數據的流程


吐槽:

  其實常用流程都差不多,但是有時候還是會忘記某一步的詳細用法,但是各位朋友請注意,官方已經不推薦Camera類的使用(現在是android.hardware.camera2),但無奈公司項目之前是使用Camera類實現的,並且Camera2貌似是基於API 21以上的,這Android 7的風聲都放出來了,可是6.0現在出了3個多月了市場占有率也才貌似3%不到,什么時候才能有個標准化和統一規范,作為一名Android開發者實屬不易啊,嘆氣~
Android實現攝像頭實時數據采集及通過硬編碼編碼數據的流程
 
/* 
 * 編碼器獲取數據,編碼,編碼后的數據的處理等大致流程如下:
 */
/* 1.獲取原始幀 */ 
@Override
onPreviewFrame( byte[] onPreviewData, Camera camera) { 
    /* 在此可以對onPreviewData進行Rotate或者Scale
     * 也可以轉換yuv的格式,例如yuv420P(YV12)或者yuv420SP(NV21/NV12)
     * 相關開源類庫可以使用libyuv/ffmpeg等
     */
    getRawFrame(onPreviewData)
    /* 然后將onPreviewData加入Camera回調*/
    addCallbackBuffer(onPreviewData);
}
private void getRawFrame( byte[] rawFrame ) { encodFrame(rawFrame); }
/* 2.進行編碼 */
private byte[] encodFrame(byte[] inputData) { return encodedData; } 
/* 3.取得編碼后的數據便可進行相應的操作,可以保存為本地文件,也可進行推流 */ 
Operation ? Send(byte[] sendData) : Save(byte[] saveData) 
 

上述代碼onPreviewFrame為Camera類的接口,使用Camera前需要進行SurfaceView及SurfaceHolder的初始化及相應interface的實現:

 
// init the preview surface
private void initview() {
    SurfaceView surfaceView = (SurfaceView) findViewById(R.id.record_surface);
    SurfaceHolder surfaceHolder = surfaceView.getHolder();
    surfaceHolder.addCallback(this);
    surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);    
}


@Override
public void surfaceCreated(SurfaceHolder holder) {
    openCamera(holder); // 開啟相機
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
    releaseCamera(); // 在surfaceDestroyed的時候記得releaseCamera
}

private void openCamera(SurfaceHolder holder) {
    releaseCamera();
    try {
            camera = getCamera(Camera.CameraInfo.CAMERA_FACING_BACK); // 根據需求選擇前/后置攝像頭
        } catch (Exception e) {
            camera = null;
            if (AppContext.isDebugMode) {
                e.printStackTrace();
            }
        }
    if(mCamera != null){
    try {
            mCamera.setPreviewCallback(this);
            mCamera.setDisplayOrientation(90); // 此方法為官方提供的旋轉顯示部分的方法,並不會影響onPreviewFrame方法中的原始數據;
            if(parameters == null){
                parameters = mCamera.getParameters();
            }
            parameters.setPreviewFormat(ImageFormat.NV21); // 常用格式:NV21 / YV12
            parameters.setPreviewSize(width, height); // 還可以設置很多相機的參數,但是建議先遍歷當前相機是否支持該配置,不然可能會導致出錯;
            mCamera.setParameters(parameters);
            mCamera.setPreviewDisplay(holder);
            mCamera.startPreview();
    } catch (IOException e) {
        e.printStackTrace();
    }
    }
}

@TargetApi(9)
private Camera getCamera(int cameraType) {
    Camera camera = null;
    try {
        camera = Camera.open(cameraType); 
    } catch (Exception e) {
        e.printStackTrace();
    }
    return camera; // returns null if camera is unavailable
}

private synchronized void releaseCamera() {
    if (camera != null) {
        try {
            camera.setPreviewCallback(null);
        } catch (Exception e) {
            e.printStackTrace();
        }
        try {
            camera.stopPreview();
        } catch (Exception e) {
            e.printStackTrace();
        }
        try {
            camera.release();
        } catch (Exception e) {
            e.printStackTrace();
        }
        camera = null;
    }
}
 

MediaCodec硬編碼實現部分:

 此處推薦參考SRS開源項目中的實現方法: https://github.com/ossrs/srs-sea.git
 
// video device.
private Camera camera;
private MediaCodec vencoder;
private MediaCodecInfo vmci;
private MediaCodec.BufferInfo vebi;
private byte[] vbuffer;
// video camera settings.
private Camera.Size vsize;
private int vcolor;
private int vbitrate_kbps = 300;
private final static int VFPS = 20;
private final static int VGOP = 5;
private final static int VWIDTH = 640;
private final static int VHEIGHT = 480;

/* 首先需要初始化MediaCodec的配置 */
private void initMediaCodec() {
     // choose the right vencoder, perfer qcom then google.
    vcolor = chooseVideoEncoder();
    // vencoder yuv to 264 es stream.
    // requires sdk level 16+, Android 4.1, 4.1.1, the JELLY_BEAN
    try {
        vencoder = MediaCodec.createByCodecName(vmci.getName());
    } catch (IOException e) {
        Log.e(TAG, "create vencoder failed.");
        e.printStackTrace();
        return;
    }
    vebi = new MediaCodec.BufferInfo();
    // setup the vencoder.
    // @see https://developer.android.com/reference/android/media/MediaCodec.html
    MediaFormat vformat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, vsize.width, vsize.height);
    vformat.setInteger(MediaFormat.KEY_COLOR_FORMAT, vcolor);
    vformat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
    vformat.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * vbitrate_kbps);
    vformat.setInteger(MediaFormat.KEY_FRAME_RATE, VFPS);
    vformat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VGOP);
    Log.i(TAG, String.format("vencoder %s, color=%d, bitrate=%d, fps=%d, gop=%d, size=%dx%d",
            vmci.getName(), vcolor, vbitrate_kbps, VFPS, VGOP, vsize.width, vsize.height));
    // the following error can be ignored:
    // 1. the storeMetaDataInBuffers error:
    //      [OMX.qcom.video.encoder.avc] storeMetaDataInBuffers (output) failed w/ err -2147483648
    //      @see http://bigflake.com/mediacodec/#q12
    vencoder.configure(vformat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    vencoder.start();
}

// for the vbuffer for YV12(android YUV), @see below:
// https://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat(int)
// https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12
private int getYuvBuffer(int width, int height) {
    // stride = ALIGN(width, 16)
    int stride = (int) Math.ceil(width / 16.0) * 16;
    // y_size = stride * height
    int y_size = stride * height;
    // c_stride = ALIGN(stride/2, 16)
    int c_stride = (int) Math.ceil(width / 32.0) * 16;
    // c_size = c_stride * height/2
    int c_size = c_stride * height / 2;
    // size = y_size + c_size * 2
    return y_size + c_size * 2;
}

// choose the video encoder by name.
private MediaCodecInfo chooseVideoEncoder(String name, MediaCodecInfo def) {
    int nbCodecs = MediaCodecList.getCodecCount();
    for (int i = 0; i < nbCodecs; i++) {
        MediaCodecInfo mci = MediaCodecList.getCodecInfoAt(i);
        if (!mci.isEncoder()) {
            continue;
        }
        String[] types = mci.getSupportedTypes();
        for (int j = 0; j < types.length; j++) {
            if (types[j].equalsIgnoreCase(VCODEC)) {
                //Log.i(TAG, String.format("vencoder %s types: %s", mci.getName(), types[j]));
                if (name == null) {
                    return mci;
                }

                if (mci.getName().contains(name)) {
                    return mci;
                }
            }
        }
    }
    return def;
}

// choose the right supported color format. @see below:
// https://developer.android.com/reference/android/media/MediaCodecInfo.html
// https://developer.android.com/reference/android/media/MediaCodecInfo.CodecCapabilities.html
private int chooseVideoEncoder() {
    // choose the encoder "video/avc":
    //      1. select one when type matched.
    //      2. perfer google avc.
    //      3. perfer qcom avc.
    vmci = chooseVideoEncoder(null, null);
    //vmci = chooseVideoEncoder("google", vmci);
    //vmci = chooseVideoEncoder("qcom", vmci);

    int matchedColorFormat = 0;
    MediaCodecInfo.CodecCapabilities cc = vmci.getCapabilitiesForType(VCODEC);
    for (int i = 0; i < cc.colorFormats.length; i++) {
        int cf = cc.colorFormats[i];
        Log.i(TAG, String.format("vencoder %s supports color fomart 0x%x(%d)", vmci.getName(), cf, cf));

        // choose YUV for h.264, prefer the bigger one.
        // corresponding to the color space transform in onPreviewFrame
        if ((cf >= cc.COLOR_FormatYUV420Planar && cf <= cc.COLOR_FormatYUV420SemiPlanar)) {
            if (cf > matchedColorFormat) {
                matchedColorFormat = cf;
            }
        }
    }
    for (int i = 0; i < cc.profileLevels.length; i++) {
        MediaCodecInfo.CodecProfileLevel pl = cc.profileLevels[i];
        Log.i(TAG, String.format("vencoder %s support profile %d, level %d", vmci.getName(), pl.profile, pl.level));
    }
    Log.i(TAG, String.format("vencoder %s choose color format 0x%x(%d)", vmci.getName(), matchedColorFormat, matchedColorFormat));
    return matchedColorFormat;
}
 

  上述代碼為SRS的部分實現,僅作參考。

  還推薦一個項目,該項目實現了編碼后的數據存為本地.h264文件,方便分析,本人Fork的git地址:https://github.com/eterrao/MediaCodecEncodeH264.git 

  原作者git地址:https://github.com/sszhangpengfei/MediaCodecEncodeH264.git 

  (在此感謝擁有開源共享精神的各位朋友,因為你們我才能在學習和成長的路上少了很多坑!)

  實際上MediaCodec的實現步驟基本都大同小異,但是請注意在API20以后編碼器數據處理的機制有所改變,官方給出的建議如下:

鏈接:developer.android.com/reference/android/media/MediaCodec.html

以下摘抄官方API:

Depending on the API version, you can process data in three ways:
Processing Mode API version <= 20
Jelly Bean/KitKat
API version >= 21
Lollipop and later
Synchronous API using buffer arrays Supported Deprecated
Synchronous API using buffers Not Available Supported
Asynchronous API using buffers Not Available Supported
Asynchronous Processing using Buffers

Since LOLLIPOP, the preferred method is to process data asynchronously by setting a callback before calling configure. Asynchronous mode changes the state transitions slightly, because you must call start() after flush() to transition the codec to the Running sub-state and start receiving input buffers. Similarly, upon an initial call to start the codec will move directly to the Running sub-state and start passing available input buffers via the callback.

MediaCodec is typically used like this in asynchronous mode:

 
MediaCodec codec = MediaCodec.createByCodecName(name);
 MediaFormat mOutputFormat; // member variable
 codec.setCallback(new MediaCodec.Callback() {
   @Override
   void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
     ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
     // fill inputBuffer with valid data
     …
     codec.queueInputBuffer(inputBufferId, …);
   }

   @Override
   void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, …) {
     ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
     MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
     // bufferFormat is equivalent to mOutputFormat
     // outputBuffer is ready to be processed or rendered.
     …
     codec.releaseOutputBuffer(outputBufferId, …);
   }

   @Override
   void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
     // Subsequent data will conform to new format.
     // Can ignore if using getOutputFormat(outputBufferId)
     mOutputFormat = format; // option B
   }

   @Override
   void onError(…) {
     …
   }
 });
 codec.configure(format, …);
 mOutputFormat = codec.getOutputFormat(); // option B
 codec.start();
 // wait for processing to complete
 codec.stop();
 codec.release();
 

Synchronous Processing using Buffers

Since LOLLIPOP, you should retrieve input and output buffers using getInput/OutputBuffer(int) and/or getInput/OutputImage(int) even when using the codec in synchronous mode. This allows certain optimizations by the framework, e.g. when processing dynamic content. This optimization is disabled if you call getInput/OutputBuffers().

Note: do not mix the methods of using buffers and buffer arrays at the same time. Specifically, only call getInput/OutputBuffers directly after start() or after having dequeued an output buffer ID with the value ofINFO_OUTPUT_FORMAT_CHANGED.

MediaCodec is typically used like this in synchronous mode:

 
 MediaCodec codec = MediaCodec.createByCodecName(name);
 codec.configure(format, …);
 MediaFormat outputFormat = codec.getOutputFormat(); // option B
 codec.start();
 for (;;) {
   int inputBufferId = codec.dequeueInputBuffer(timeoutUs);
   if (inputBufferId >= 0) {
     ByteBuffer inputBuffer = codec.getInputBuffer(…);
     // fill inputBuffer with valid data
     …
     codec.queueInputBuffer(inputBufferId, …);
   }
   int outputBufferId = codec.dequeueOutputBuffer(…);
   if (outputBufferId >= 0) {
     ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
     MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
     // bufferFormat is identical to outputFormat
     // outputBuffer is ready to be processed or rendered.
     …
     codec.releaseOutputBuffer(outputBufferId, …);
   } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
     // Subsequent data will conform to new format.
     // Can ignore if using getOutputFormat(outputBufferId)
     outputFormat = codec.getOutputFormat(); // option B
   }
 }
 codec.stop();
 codec.release();
 

Synchronous Processing using Buffer Arrays (deprecated)

In versions KITKAT_WATCH and before, the set of input and output buffers are represented by the ByteBuffer[] arrays. After a successful call to start(), retrieve the buffer arrays using getInput/OutputBuffers(). Use the buffer ID-s as indices into these arrays (when non-negative), as demonstrated in the sample below. Note that there is no inherent correlation between the size of the arrays and the number of input and output buffers used by the system, although the array size provides an upper bound.

 
 MediaCodec codec = MediaCodec.createByCodecName(name);
 codec.configure(format, …);
 codec.start();
 ByteBuffer[] inputBuffers = codec.getInputBuffers();
 ByteBuffer[] outputBuffers = codec.getOutputBuffers();
 for (;;) {
   int inputBufferId = codec.dequeueInputBuffer(…);
   if (inputBufferId >= 0) {
     // fill inputBuffers[inputBufferId] with valid data
     …
     codec.queueInputBuffer(inputBufferId, …);
   }
   int outputBufferId = codec.dequeueOutputBuffer(…);
   if (outputBufferId >= 0) {
     // outputBuffers[outputBufferId] is ready to be processed or rendered.
     …
     codec.releaseOutputBuffer(outputBufferId, …);
   } else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
     outputBuffers = codec.getOutputBuffers();
   } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
     // Subsequent data will conform to new format.
     MediaFormat format = codec.getOutputFormat();
   }
 }
 codec.stop();
 codec.release();
 
 
相關參考資料:
 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM