Camera -- takePicture流程


一、framework

frameworks/base/core/java/android/hardware/Camera.java

    /**
     * Equivalent to takePicture(shutter, raw, null, jpeg).
     *
     * @see #takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback)
     */
    public final void takePicture(ShutterCallback shutter, PictureCallback raw,
            PictureCallback jpeg) {
        takePicture(shutter, raw, null, jpeg);
    }
    private native final void native_takePicture(int msgType);

    /**
     * Triggers an asynchronous image capture. The camera service will initiate
     * a series of callbacks to the application as the image capture progresses.
     * The shutter callback occurs after the image is captured. This can be used
     * to trigger a sound to let the user know that image has been captured. The
     * raw callback occurs when the raw image data is available (NOTE: the data
     * will be null if there is no raw image callback buffer available or the
     * raw image callback buffer is not large enough to hold the raw image).
     * The postview callback occurs when a scaled, fully processed postview
     * image is available (NOTE: not all hardware supports this). The jpeg
     * callback occurs when the compressed image is available. If the
     * application does not need a particular callback, a null can be passed
     * instead of a callback method.
     *
     * <p>This method is only valid when preview is active (after
     * {@link #startPreview()}).  Preview will be stopped after the image is
     * taken; callers must call {@link #startPreview()} again if they want to
     * re-start preview or take more pictures. This should not be called between
     * {@link android.media.MediaRecorder#start()} and
     * {@link android.media.MediaRecorder#stop()}.
     *
     * <p>After calling this method, you must not call {@link #startPreview()}
     * or take another picture until the JPEG callback has returned.
     *
     * @param shutter   the callback for image capture moment, or null
     * @param raw       the callback for raw (uncompressed) image data, or null
     * @param postview  callback with postview image data, may be null
     * @param jpeg      the callback for JPEG image data, or null
     */
    public final void takePicture(ShutterCallback shutter, PictureCallback raw,
            PictureCallback postview, PictureCallback jpeg) {
        mShutterCallback = shutter;
        mRawImageCallback = raw;
        mPostviewCallback = postview;
        mJpegCallback = jpeg;

        // If callback is not set, do not send me callbacks.
        int msgType = 0;
        if (mShutterCallback != null) {
            msgType |= CAMERA_MSG_SHUTTER;
        }
        if (mRawImageCallback != null) {
            msgType |= CAMERA_MSG_RAW_IMAGE;
        }
        if (mPostviewCallback != null) {
            msgType |= CAMERA_MSG_POSTVIEW_FRAME;
        }
        if (mJpegCallback != null) {
            msgType |= CAMERA_MSG_COMPRESSED_IMAGE;
        }

        native_takePicture(msgType);
        mFaceDetectionRunning = false;
    }

takePicture->native_takePicture, 会调用到JNINativeMethod, 这里通过VM执行c++的代码

二、JNI

frameworks/base/core/jni/android_hardware_Camera.cpp

static void android_hardware_Camera_takePicture(JNIEnv *env, jobject thiz, int msgType)
{
    ALOGV("takePicture");
    JNICameraContext* context;
    sp<Camera> camera = get_native_camera(env, thiz, &context);
    if (camera == 0) return;

    /*
     * When CAMERA_MSG_RAW_IMAGE is requested, if the raw image callback
     * buffer is available, CAMERA_MSG_RAW_IMAGE is enabled to get the
     * notification _and_ the data; otherwise, CAMERA_MSG_RAW_IMAGE_NOTIFY
     * is enabled to receive the callback notification but no data.
     *
     * Note that CAMERA_MSG_RAW_IMAGE_NOTIFY is not exposed to the
     * Java application.
     */
    if (msgType & CAMERA_MSG_RAW_IMAGE) {
        ALOGV("Enable raw image callback buffer");
        if (!context->isRawImageCallbackBufferAvailable()) {
            ALOGV("Enable raw image notification, since no callback buffer exists");
            msgType &= ~CAMERA_MSG_RAW_IMAGE;
            msgType |= CAMERA_MSG_RAW_IMAGE_NOTIFY;
        }
    }

    if (camera->takePicture(msgType) != NO_ERROR) {
        jniThrowRuntimeException(env, "takePicture failed");
        return;
    }
}

static JNINativeMethod camMethods[] = {
  { "native_takePicture",
    "(I)V",
    (void *)android_hardware_Camera_takePicture },
};

 

frameworks/av/camera/Camera.cpp

// take a picture
status_t Camera::takePicture(int msgType)
{
    ALOGV("takePicture: 0x%x", msgType);
    sp <ICamera> c = mCamera;
    if (c == 0) return NO_INIT;
    return c->takePicture(msgType);
}

frameworks/av/camera/ICamera.cpp

这边会涉及到binder IPC, binder其实是一种C/S架构通信方式, 会通知作为Server进程的V4LCameraAdapter里面的pictureThread。

    // take a picture - returns an IMemory (ref-counted mmap)
    status_t takePicture(int msgType)
    {   
        ALOGV("takePicture: 0x%x", msgType);
        Parcel data, reply;
        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
        data.writeInt32(msgType);
        remote()->transact(TAKE_PICTURE, data, &reply);
        status_t ret = reply.readInt32();
        return ret;
    } 

 

上面源码的mCameraAdapter是由下面函数创建的, 而CameraAdapter_Factory在下面实现。

extern "C" CameraAdapter* CameraAdapter_Factory(size_t);

三、HAL

hardware/amlogic/camera/V4LCameraAdapter/V4LCameraAdapter.cpp

extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
{
    CameraAdapter *adapter = NULL;
    Mutex::Autolock lock(gAdapterLock);

    LOG_FUNCTION_NAME;

#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT

    if( sensor_index == (size_t)(iCamerasNum)){
            //MAX_CAM_NUM_ADD_VCAM-1) ){
        adapter = new V4LCamAdpt(sensor_index);
    }else{
#endif
        adapter = new V4LCameraAdapter(sensor_index);
#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
    }
#endif

    if ( adapter ) {
        CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d", sensor_index);
    } else {
        CAMHAL_LOGEA("Camera adapter create failed!");
    }

    LOG_FUNCTION_NAME_EXIT;

    return adapter;
}

HAL层的takePicture

/*static*/ int V4LCameraAdapter::beginPictureThread(void *cookie)
{
    V4LCameraAdapter *c = (V4LCameraAdapter *)cookie;
    return c->pictureThread();
}

/*static*/ int V4LCameraAdapter::beginPictureThread(void *cookie)
{
    V4LCameraAdapter *c = (V4LCameraAdapter *)cookie;
    return c->pictureThread();
}

int V4LCameraAdapter::pictureThread()
{
    status_t ret = NO_ERROR;
    int width, height;
    CameraFrame frame;
    int dqTryNum = 3;

#ifndef AMLOGIC_USB_CAMERA_SUPPORT
    setMirrorEffect();
#endif

    if( (mIoctlSupport & IOCTL_MASK_FLASH)
        &&(FLASHLIGHT_ON == mFlashMode)){
        set_flash_mode( mCameraHandle, "on");
    }
    if (true)
    {
        mVideoInfo->buf.index = 0;
        mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;

#ifdef AMLOGIC_USB_CAMERA_SUPPORT
        if(mIsDequeuedEIOError){
            CAMHAL_LOGEA("DQBUF EIO has occured!\n");
            return -EINVAL;
        }
#endif
        ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
        if (ret < 0)
        {
            CAMHAL_LOGEA("VIDIOC_QBUF Failed");
            return -EINVAL;
        }
        nQueued ++;

#ifndef AMLOGIC_USB_CAMERA_SUPPORT
        if(mIoctlSupport & IOCTL_MASK_ROTATE){
            set_rotate_value(mCameraHandle,mRotateValue);
        }
#endif

        enum v4l2_buf_type bufType;
        if (!mVideoInfo->isStreaming)
        {
            bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;

            ret = ioctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
            if (ret < 0) {
                CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
                return ret;
            }

            mVideoInfo->isStreaming = true;
        }

        int index = 0;
        char *fp = this->GetFrame(index);
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
        while((mVideoInfo->buf.length != mVideoInfo->buf.bytesused)&&(dqTryNum>0)){
        if(NULL != fp){
            mVideoInfo->buf.index = 0;
            mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;


            if(mIsDequeuedEIOError){
                CAMHAL_LOGEA("DQBUF EIO has occured!\n");
                break;
            }

            ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
            if (ret < 0)
            {
                CAMHAL_LOGEB("VIDIOC_QBUF Failed errno=%d\n", errno);
                break;
            }
            nQueued ++;
            dqTryNum --;
        }

#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
        usleep( 10000 );
#endif
        fp = this->GetFrame(index);
    }
#endif

#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
        while(!fp && (-1 == index) ){
            usleep( 10000 );
            fp = this->GetFrame(index);
        }
#else
        if(!fp)
        {
            CAMHAL_LOGDA("GetFrame fail, this may stop preview\n");
            return 0; //BAD_VALUE;
        }
#endif
        if (!mCaptureBuf || !mCaptureBuf->data)
        {
            return 0; //BAD_VALUE;
        }

        int width, height;
        uint8_t* dest = (uint8_t*)mCaptureBuf->data;
        uint8_t* src = (uint8_t*) fp;
        if((mCaptureWidth <= 0)||(mCaptureHeight <= 0)){
            mParams.getPictureSize(&width, &height);
        }else{
            width = mCaptureWidth;
            height = mCaptureHeight;
        }

#ifndef AMLOGIC_USB_CAMERA_SUPPORT
        if((mRotateValue==90)||(mRotateValue==270)){
            int temp = 0;
            temp = width;
            width = height;
            height = temp;
        }
#endif

        LOGD("pictureThread mCaptureBuf=%#x dest=%#x fp=%#x width=%d height=%d", (uint32_t)mCaptureBuf, (uint32_t)dest, (uint32_t)fp, width, height);
        LOGD("length=%d bytesused=%d index=%d", mVideoInfo->buf.length, mVideoInfo->buf.bytesused, index);

        if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_RGB24){ // rgb24
            frame.mLength = width*height*3;
            frame.mQuirks = CameraFrame::ENCODE_RAW_RGB24_TO_JPEG | CameraFrame::HAS_EXIF_DATA;
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
            //convert yuyv to rgb24
            yuyv422_to_rgb24(src,dest,width,height);
#else
            memcpy(dest,src,mVideoInfo->buf.length);
#endif
        }else if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ //   422I
            frame.mLength = width*height*2;
            frame.mQuirks = CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG | CameraFrame::HAS_EXIF_DATA;
            memcpy(dest, src, mVideoInfo->buf.length);
        }else if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_NV21){ //   420sp
            frame.mLength = width*height*3/2;
            frame.mQuirks = CameraFrame::ENCODE_RAW_YUV420SP_TO_JPEG | CameraFrame::HAS_EXIF_DATA;
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
            //convert yuyv to nv21
            yuyv422_to_nv21(src,dest,width,height);
#else
            memcpy(dest,src,mVideoInfo->buf.length);
#endif
        }else{ //default case
            frame.mLength = width*height*3;
            frame.mQuirks = CameraFrame::ENCODE_RAW_RGB24_TO_JPEG | CameraFrame::HAS_EXIF_DATA;
            memcpy(dest, src, mVideoInfo->buf.length);
        }

        notifyShutterSubscribers();
        //TODO correct time to call this?
        if (NULL != mEndImageCaptureCallback)
            mEndImageCaptureCallback(mEndCaptureData);

        //gen  exif message
        ExifElementsTable* exiftable = new ExifElementsTable();
        GenExif(exiftable);

        frame.mFrameMask = CameraFrame::IMAGE_FRAME;
        frame.mFrameType = CameraFrame::IMAGE_FRAME;
        frame.mBuffer = mCaptureBuf->data;
        frame.mCookie2 = (void*)exiftable;
        frame.mAlignment = width;
        frame.mOffset = 0;
        frame.mYuv[0] = 0;
        frame.mYuv[1] = 0;
        frame.mWidth = width;
        frame.mHeight = height;
        frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);

        if (mVideoInfo->isStreaming)
        {
            bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
            if (ret < 0)
            {
                CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
                return ret;
            }

            mVideoInfo->isStreaming = false;
        }

        mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;

        nQueued = 0;
        nDequeued = 0;

        /* Unmap buffers */
        if (munmap(mVideoInfo->mem[0], mVideoInfo->buf.length) < 0)
            CAMHAL_LOGEA("Unmap failed");


#ifdef AMLOGIC_USB_CAMERA_SUPPORT
    mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
    mVideoInfo->rb.count = 0;

    ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
    if (ret < 0) {
        CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
        return ret;
    }else{
        CAMHAL_LOGDA("VIDIOC_REQBUFS delete buffer success\n");
    }
#endif
    }

    if( (mIoctlSupport & IOCTL_MASK_FLASH)
        &&(FLASHLIGHT_ON == mFlashMode)){
        set_flash_mode( mCameraHandle, "off");
    }
#ifndef AMLOGIC_USB_CAMERA_SUPPORT
    if(mIoctlSupport & IOCTL_MASK_ROTATE){
        set_rotate_value(mCameraHandle,0);
        mRotateValue = 0;
    }
#endif

    // start preview thread again after stopping it in UseBuffersCapture
    {
        Mutex::Autolock lock(mPreviewBufferLock);
        UseBuffersPreview(mPreviewBuffers, mPreviewBufferCount);
    }
    startPreview();

    ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
    if (ret)
        LOGE("setInitFrameRefCount err=%d", ret);
    else
        ret = sendFrameToSubscribers(&frame);
    //LOGD("pictureThread /sendFrameToSubscribers ret=%d", ret);

    return ret;
}

status_t V4LCameraAdapter::takePicture()
{
    LOG_FUNCTION_NAME;
    if (createThread(beginPictureThread, this) == false)
        return -1;
    LOG_FUNCTION_NAME_EXIT;
    return NO_ERROR;
}

 可以由下面看出这里会操作设备节点

const char *SENSOR_PATH[]={
            "/dev/video0",
            "/dev/video1",
            "/dev/video2",
            "/dev/video3",
            "/dev/video4",
        };
#define DEVICE_PATH(_sensor_index) (SENSOR_PATH[_sensor_index])
#else
#define DEVICE_PATH(_sensor_index) (_sensor_index == 0 ? "/dev/video0" : "/dev/video1")
#endif

status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
{
    ...
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
        if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), O_RDWR | O_NONBLOCK )) != -1)
#else
        if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), O_RDWR)) != -1)
#endif
    ...
}

四、kernel - driver

待续

 


免责声明!

本站转载的文章为个人学习借鉴使用,本站对版权不负任何法律责任。如果侵犯了您的隐私权益,请联系本站邮箱yoyou2525@163.com删除。



 
粤ICP备18138465号  © 2018-2025 CODEPRJ.COM