mx53 camera HAL preview(接口层)

首先,android有个camera的server gingerbread/frameworks/base/services/camera/libcameraservice

如果不想用他默认的FakeCamera,则要实现自己的libcamera.so ,就是所说的camera HAL : gingerbread/hardware/mx5x/libcamera

而这个HAL 其实就是要继承并实现 gingerbread/frameworks/base/include/camera/CameraHardwareInterface.h 中的类: class CameraHardwareInterface

CameraHal.h:

class CameraHal : public CameraHardwareInterface {
    public:
        virtual sp<IMemoryHeap> getPreviewHeap() const;
        virtual sp<IMemoryHeap> getRawHeap() const;

        virtual void        setCallbacks(notify_callback notify_cb,
                data_callback data_cb,
                data_callback_timestamp data_cb_timestamp,
                void* user);

        virtual void        enableMsgType(int32_t msgType);
        virtual void        disableMsgType(int32_t msgType);
        virtual bool        msgTypeEnabled(int32_t msgType);

        virtual bool        useOverlay() { return true; }
        virtual status_t    setOverlay(const sp<Overlay> &overlay);

        virtual status_t    startPreview();
        virtual void        stopPreview();
        virtual bool        previewEnabled();

        virtual status_t    startRecording();
        virtual void        stopRecording();
        virtual bool        recordingEnabled();
        virtual void        releaseRecordingFrame(const sp<IMemory>& mem);

        virtual status_t    autoFocus();
        virtual status_t    cancelAutoFocus();
        virtual status_t    takePicture();
virtual status_t    cancelPicture();
        virtual status_t    dump(int fd, const Vector<String16>& args) const;
        virtual status_t    setParameters(const CameraParameters& params);
        virtual CameraParameters  getParameters() const;
        virtual status_t    sendCommand(int32_t command, int32_t arg1,
                int32_t arg2);
        virtual void release();

        CAMERA_HAL_ERR_RET setCaptureDevice(sp<CaptureDeviceInterface> capturedevice);
        CAMERA_HAL_ERR_RET setPostProcessDevice(sp<PostProcessDeviceInterface> postprocessdevice);
        CAMERA_HAL_ERR_RET setJpegEncoder(sp<JpegEncoderInterface>jpegencoder);
        CAMERA_HAL_ERR_RET  Init();
        void  setPreviewRotate(CAMERA_PREVIEW_ROTATE previewRotate);

        CameraHal();
        virtual             ~CameraHal();

private:

  // 私有的成员太多就不列了
};
CameraHal.cpp


getPreiwHeap: 获取preview data所在的内存地址。

这里有个类MemroyHeapBase, 它是Android搞的一套基于Binder机制的对内存操作的类。用于创建共享内存。

        
    sp<IMemoryHeap> CameraHal::getPreviewHeap() const
    {
        CAMERA_HAL_LOG_FUNC;
    
        return mPreviewHeap;
    }

mPreviewHeap 初始化:

status_t CameraHal::PreparePreviwBuf()
    {
        CAMERA_HAL_LOG_FUNC;
        status_t ret = NO_ERROR;
        unsigned int i =0;

        //temply hard code here
        if (mTakePicFlag == 0){
            if(V4L2_PIX_FMT_NV12)
                mPreviewFrameSize = mCaptureDeviceCfg.width*mCaptureDeviceCfg.height*3/2;
              else
                mPreviewFrameSize = mCaptureDeviceCfg.width*mCaptureDeviceCfg.height *2; // 获取preview帧大小

            //now the preview fmt is supposed to be YUV420SP, so, it is now hard code here
 mPreviewHeap.clear();
            for (i = 0; i< mPreviewHeapBufNum; i++)
                mPreviewBuffers[i].clear();
            mPreviewHeap = new MemoryHeapBase(mPreviewFrameSize * mPreviewHeapBufNum);
            if (mPreviewHeap == NULL)
                return NO_MEMORY;
            for (i = 0; i < mPreviewHeapBufNum; i++)
                mPreviewBuffers[i] = new MemoryBase(mPreviewHeap, mPreviewFrameSize* i, mPreviewFrameSize);
        }
        /*allocate the buffer for IPU process*/
if (mPPDeviceNeed || mPPDeviceNeedForPic){
            mPmemAllocator = new PmemAllocator(mPPbufNum, mCaptureFrameSize);

            if(mPmemAllocator == NULL || mPmemAllocator->err_ret < 0){
                return NO_MEMORY;
            }
            for (i = 0; i < mPPbufNum; i++){
                if(mPmemAllocator->allocate(&(mPPbuf[i]),mCaptureFrameSize) < 0){
                    return NO_MEMORY;
                }
            }
        }
 return ret;
    }


    sp<IMemoryHeap> CameraHal::getRawHeap() const
    {
        return NULL;
    }
void CameraHal::setCallbacks(notify_callback notify_cb,
            data_callback data_cb,
            data_callback_timestamp data_cb_timestamp,
            void* user)
    {
        Mutex::Autolock lock(mLock); 
        mNotifyCb = notify_cb;
        mDataCb = data_cb;
        mDataCbTimestamp = data_cb_timestamp;
        mCallbackCookie = user;
    }
/*
 注册callback函数,callback的作用是在进行某些操作是可以返回相关的数据,例如takePicture时:
 调用mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, JpegMemBase, mCallbackCookie);则是告诉上层应用现在返回压缩好的jpeg数据(数据在JpegMemBase中)
 但其实mx53的preiw 是用overlay做,data_cb在preiw是没用
*/

 virtual void        enableMsgType(int32_t msgType);
 virtual void        disableMsgType(int32_t msgType);
 virtual bool        msgTypeEnabled(int32_t msgType);

这三个函数是设定在按下快门或对焦好或拍好照等之后要不要把相关的信息告诉上层(调用相关的callback)

        virtual bool        useOverlay() { return true; }
        virtual status_t    setOverlay(const sp<Overlay> &overlay);

使用overlay, overlay的概念? 太多内容了,简单地说就是把camera的数据直接写到framebuffer 而不经过android层,这在preview中很有用

这里有编博文可以参考http://zhougaofeng.ixiezi.com/2009/12/02/android-camera-preview-and-take-picture-with-v4l2/


virtual status_t    startPreview();

看名称就知道是开始preview:

主要实现:

status_t CameraHal::CameraHALStartPreview()
    {
        CAMERA_HAL_LOG_FUNC;
        status_t ret = NO_ERROR;
        int  max_fps, min_fps;  

        //一些基本参数,大小,旋转,名字(因为可能有多个camera)
        mParameters.getPreviewSize((int *)&(mCaptureDeviceCfg.width),(int *)&(mCaptureDeviceCfg.height));
        mCaptureDeviceCfg.fmt = mPreviewCapturedFormat;
        mCaptureDeviceCfg.rotate = (SENSOR_PREVIEW_ROTATE)mPreviewRotate;
        mCaptureDeviceCfg.tv.numerator = 1;
        mCaptureDevice->GetDevName(mCameraSensorName);

        //camera有uvc和csi两种,具体是什么有什么区别我还真不知道
        if (strstr(mCameraSensorName, "uvc") == NULL){
        //according to google's doc getPreviewFrameRate & getPreviewFpsRange should support both.
        // so here just a walkaround, if the app set the frameRate, will follow this frame rate.
        if (mParameters.getPreviewFrameRate() >= 15)
            mCaptureDeviceCfg.tv.denominator = mParameters.getPreviewFrameRate();
        else{
            mParameters.getPreviewFpsRange(&min_fps, &max_fps);
            CAMERA_HAL_LOG_INFO("###start the capture the fps is %d###", max_fps);
            mCaptureDeviceCfg.tv.denominator = max_fps/1000;
        }
        }else{
                mCaptureDeviceCfg.tv.denominator = 15;
        }
        mCaptureBufNum = PREVIEW_CAPTURE_BUFFER_NUM;
        mPPbufNum = POST_PROCESS_BUFFER_NUM;
        mTakePicFlag = false;      


        // 上一篇博文(驱动部分)有提到camera v4l2  的使用
        if ((ret = PrepareCaptureDevices()) < 0){
            CAMERA_HAL_ERR("PrepareCaptureDevices error ");
            return ret;
        }
        if (mPPDeviceNeed){     
             // 预处理? 不清楚。                 
            if ((ret = PreparePostProssDevice()) < 0){
                CAMERA_HAL_ERR("PreparePostProssDevice error");
                return ret;
            }
        }
        if ((ret = PreparePreviwBuf()) < 0){
            CAMERA_HAL_ERR("PreparePreviwBuf error");
            return ret;
        }                      
        //注册一下锁变量,主要是为了overlay和capture不冲突
        if ((ret = PreparePreviwMisc()) < 0){ 
           CAMERA_HAL_ERR("PreparePreviwMisc error");
            return ret;
        }     

//      
       if ((ret = CameraHALPreviewStart()) < 0){
            CAMERA_HAL_ERR("CameraHALPreviewStart error");
            return ret;
        }
        return ret;
    }

status_t CameraHal ::CameraHALPreviewStart()
    {   
        CAMERA_HAL_LOG_FUNC;
        status_t ret = NO_ERROR;
        if (mCaptureDevice->DevStart()<0) //就是 ioctl (mCameraDevice, VIDIOC_STREAMON, &type),此处fsl mx53支持同时打开overlay和capture
            return INVALID_OPERATION; 
        
        mCaptureFrameThread = new CaptureFrameThread(this);
        mPreviewShowFrameThread = new PreviewShowFrameThread(this);
        mEncodeFrameThread = new EncodeFrameThread(this);
        if(mPPDeviceNeed){
            mPostProcessThread = new PostProcessThread(this);
            if (mPostProcessThread == NULL)
                return UNKNOWN_ERROR;
        }

        if (mCaptureFrameThread == NULL ||
                mPreviewShowFrameThread == NULL ||
                mEncodeFrameThread == NULL){
            return UNKNOWN_ERROR;
        }

        mPreviewRunning = true;
        return ret;
    }

//上面函数的4条thread,其中postprocessthread没看不明白是做什么的(自动对焦?),下面稍微解析一下capture 和show thread
int CameraHal ::captureframeThread()
    {
        CAMERA_HAL_LOG_FUNC;

        unsigned int DeqBufIdx = 0;
        struct timespec ts;

        do {
            clock_gettime(CLOCK_REALTIME, &ts);
            ts.tv_nsec +=100000; // 100ms
        } while (mPreviewRunning && !error_status &&(sem_timedwait(&avab_dequeue_frame, &ts) != 0) );
        // 因为是几条thread一起工作,当然要做同步啦,其中avab_dequeue_frame 就要去DevQueue之后才 avaliable
        if(!mPreviewRunning || error_status)
            return UNKNOWN_ERROR;
        mCaptureDevice->DevDequeue(&DeqBufIdx); // 这个当然就是capture一帧数据到buffer中以备后用

        nCameraBuffersQueued--;

        buffer_index_maps[dequeue_head]=DeqBufIdx; //PrepareCaptureDevices时会query几个buffer,这里记住他的index
        dequeue_head ++;
        dequeue_head %= mCaptureBufNum;
        // 下面是同步的东西,没什么好说的。
        if(!mPPDeviceNeed){
            sem_post(&avab_show_frame);
            sem_post(&avab_enc_frame);
        }else{
            sem_post(&avab_pp_in_frame);
        }
        return NO_ERROR;
    }

 int CameraHal ::previewshowFrameThread()
    {
        CAMERA_HAL_LOG_FUNC;
        struct timespec ts;
        int display_index = 0;
        DMA_BUFFER InBuf;
        int queue_back_index = 0;

        do {
            clock_gettime(CLOCK_REALTIME, &ts);
            ts.tv_nsec +=100000; // 100ms
        } while (!error_status && mPreviewRunning &&(sem_timedwait(&avab_show_frame, &ts) != 0) );

        if ((mPreviewRunning == 0) || error_status)
            return UNKNOWN_ERROR;

        if (!mPPDeviceNeed){
            display_index = buffer_index_maps[display_head];
            InBuf = mCaptureBuffers[display_index];
            display_head ++;
            display_head %= mCaptureBufNum;
        }else{
            display_index = display_head;
            InBuf = mPPbuf[display_index];
            display_head ++;
            display_head %= mPPbufNum;
        }

        if (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
            convertNV12toYUV420SP((uint8_t*)(InBuf.virt_start),
                    (uint8_t*)(mPreviewBuffers[preview_heap_buf_head]->pointer()),mCaptureDeviceCfg.width, mCaptureDeviceCfg.height);
            mDataCb(CAMERA_MSG_PREVIEW_FRAME, mPreviewBuffers[preview_heap_buf_head], mCallbackCookie);
            preview_heap_buf_head ++;
            preview_heap_buf_head %= mPreviewHeapBufNum;
        }

        pthread_mutex_lock(&mOverlayMutex);

        if (mOverlay != 0) {                    
//            //InBuf 是从capture 中QueueBuffer来的,放进了overlay之后怎么做其实没有看懂,fsl这方面写得很复杂,只知道它初始化时就分配了共享内存
//          ctx->data_shared, 然后把buffer的地址传进去data_shared->queued_bufs[data_shared->queued_tail] = phy_addr , 而有个很纠结的事情
//          是,它只调用了queueBuffer 而没有调用DqueueBuffer,overlay中有一条专门的thread去处理queueBuffer后的数据 。
            if (mOverlay->queueBuffer((overlay_buffer_t)InBuf.phy_offset) < 0){
                CAMERA_HAL_ERR("queueBuffer failed. May be bcos stream was not turned on yet.");
            }

            if (is_first_buffer) {
                is_first_buffer = 0;
                last_display_index = display_index;
                pthread_mutex_unlock(&mOverlayMutex);
                goto show_out;
            }
        }

        if (!mPPDeviceNeed){
            if (mOverlay != 0){
                queue_back_index = last_display_index;
            }else{
                queue_back_index = display_index;
            }
        }
        pthread_mutex_unlock(&mOverlayMutex);

        do {
            clock_gettime(CLOCK_REALTIME, &ts);
            ts.tv_nsec +=200000; // 100ms
        } while ((sem_timedwait(&avab_enc_frame_finish, &ts) != 0)&&!error_status && mPreviewRunning );

        if (!mPPDeviceNeed){
            //queue the v4l2 buf back
            if(mCaptureDevice->DevQueue(queue_back_index) <0){
                CAMERA_HAL_ERR("The Capture device queue buf error !!!!");
                return INVALID_OPERATION;
            }
            last_display_index = display_index;
            nCameraBuffersQueued++;
            sem_post(&avab_dequeue_frame);
        }else{
            sem_post(&avab_pp_out_frame);
        }

show_out:

        return NO_ERROR;
    }







  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值