RK3399 Android7.1 Hal层Camera模块获取HDMI输入图像数据

提示:文章写完后,目录可以自动生成,如何生成可参考右边的帮助文档


前言

hal层代码:hardware/rockchip/camera/CameraHal/
rk3399 7.1 的HAL中在new CamaerHal对象的时候会根据不同类型的sensor创建不同的adapter,以下是区别:
CameraUSBAdapter: USB接口类型的摄像头,走v4l2.
CameraIspSOCAdapter:dvp接口,isp控制器接收。
CameraIspAdapter:mipi接口,isp控制器接收,不走v4l2.
CameraSOCAdapter:dvp接口,VIP控制器接收。

这里使用CameraIspAdapter:mipi接口来获取hdmi输入。

一、camera流程

在CameraHal_Module.cpp中。

获取所有设备:camera_get_number_of_cameras
打开设备:camera_device_open
然后给打开的设备设置参数camera_device_t* camera_dev->ops->set_parameters
预览功能打开camera_dev->ops->preview_enabled(camera_dev)
开始预览camera_dev->ops->start_preview(camera_dev)

关闭设备:
camera_dev->ops->stop_preview(camera_dev);
camera_dev->ops->release(camera_dev);
camera_dev->common.close(&camera_dev->common);

二、代码分析

1.CameraHal_Module.cpp

代码如下(示例):
在CameraHal_Module.cpp中
camera_device_open打开的设备struct camera_device * device都会被下面android::CameraHal调用。

static android::CameraHal* gCameraHals[CAMERAS_SUPPORT_MAX];

像上面流程中提到的参数设置以及预览关闭设备都在其中。

2.获取数据

android::CameraHal定义在CameraHal.cpp中,其中成员CameraAdapter就是不同类型的sensor创建不同的adapter的基类。我们需要的hdmi输入就在CameraIspAdapter.cpp中。所需要的图像数据就在CameraIspAdapter::bufferCb中。
代码如下(示例):

void CameraIspAdapter::bufferCb( MediaBuffer_t* pMediaBuffer )
{
    static int writeoneframe = 0;
    ulong_t y_addr = 0,uv_addr = 0;
	uint32_t y_size;
    void* y_addr_vir = NULL,*uv_addr_vir = NULL ;
    int width = 0,height = 0;
    int fmt = 0;
	int tem_val;
	ulong_t phy_addr=0;
    void* graphic_buffer = NULL;

	Mutex::Autolock lock(mLock);
    // get & check buffer meta data
    PicBufMetaData_t *pPicBufMetaData = (PicBufMetaData_t *)(pMediaBuffer->pMetaData);
    HalHandle_t  tmpHandle = m_camDevice->getHalHandle();

    debugShowFPS();

    if(pPicBufMetaData->Type == PIC_BUF_TYPE_YCbCr420 || pPicBufMetaData->Type == PIC_BUF_TYPE_YCbCr422){        
        if(pPicBufMetaData->Type == PIC_BUF_TYPE_YCbCr420){
            fmt = V4L2_PIX_FMT_NV12;	//yes
        }else{
            fmt = V4L2_PIX_FMT_YUYV;
        }

        if(pPicBufMetaData->Layout == PIC_BUF_LAYOUT_SEMIPLANAR ){
            y_addr = (ulong_t)(pPicBufMetaData->Data.YCbCr.semiplanar.Y.pBuffer);
            //now gap of y and uv buffer is 0. so uv addr could be calc from y addr.
            uv_addr = (ulong_t)(pPicBufMetaData->Data.YCbCr.semiplanar.CbCr.pBuffer);
            width = pPicBufMetaData->Data.YCbCr.semiplanar.Y.PicWidthPixel;
            height = pPicBufMetaData->Data.YCbCr.semiplanar.Y.PicHeightPixel;
            //get vir addr
            HalMapMemory( tmpHandle, y_addr, 100, HAL_MAPMEM_READWRITE, &y_addr_vir );
            HalMapMemory( tmpHandle, uv_addr, 100, HAL_MAPMEM_READWRITE, &uv_addr_vir );

            HalGetANDROIDMemory(tmpHandle, y_addr, &graphic_buffer);
			
#if defined(RK_DRM_GRALLOC) // cht should use fd 
			HalGetMemoryMapFd(tmpHandle, y_addr,(int*)&phy_addr);


#else
            if(gCamInfos[mCamId].pcam_total_info->mIsIommuEnabled){
                HalGetMemoryMapFd(tmpHandle, y_addr,(int*)&phy_addr);
            }
            else{
                phy_addr = y_addr;
			}
#endif
           
            /* ddl@rock-chips.com:  v1.3.0 */
            y_size = pPicBufMetaData->Data.YCbCr.semiplanar.Y.PicWidthPixel*pPicBufMetaData->Data.YCbCr.semiplanar.Y.PicHeightPixel;
            if (uv_addr > (y_addr+y_size)) {
                memcpy((void*)((ulong_t)y_addr_vir+y_size),uv_addr_vir, y_size/2);
            }
            
        }else if(pPicBufMetaData->Layout == PIC_BUF_LAYOUT_COMBINED){
            y_addr = (ulong_t)(pPicBufMetaData->Data.YCbCr.combined.pBuffer );
            width = pPicBufMetaData->Data.YCbCr.combined.PicWidthPixel>>1;
            height = pPicBufMetaData->Data.YCbCr.combined.PicHeightPixel;
            HalMapMemory( tmpHandle, y_addr, 100, HAL_MAPMEM_READWRITE, &y_addr_vir );
            HalGetANDROIDMemory(tmpHandle, y_addr, &graphic_buffer);
#if defined(RK_DRM_GRALLOC) // should use fd
			HalGetMemoryMapFd(tmpHandle, y_addr,(int*)&phy_addr);
#else
            if(gCamInfos[mCamId].pcam_total_info->mIsIommuEnabled)
                HalGetMemoryMapFd(tmpHandle, y_addr,(int*)&phy_addr);
            else
                phy_addr = y_addr;
#endif
        }
    } else if(pPicBufMetaData->Type == PIC_BUF_TYPE_RAW16) {

        y_addr = (ulong_t)(pPicBufMetaData->Data.raw.pBuffer );
        width = pPicBufMetaData->Data.raw.PicWidthPixel;
        height = pPicBufMetaData->Data.raw.PicHeightPixel;
        fmt = V4L2_PIX_FMT_SBGGR10;
        HalMapMemory( tmpHandle, y_addr, 100, HAL_MAPMEM_READWRITE, &y_addr_vir );
        HalGetANDROIDMemory(tmpHandle, y_addr, &graphic_buffer);
#if defined(RK_DRM_GRALLOC) // should use fd
		HalGetMemoryMapFd(tmpHandle, y_addr,(int*)&phy_addr);
#else
        if(gCamInfos[mCamId].pcam_total_info->mIsIommuEnabled)
            HalGetMemoryMapFd(tmpHandle, y_addr,(int*)&phy_addr);
        else
            phy_addr = y_addr;
#endif		
    } else {
        LOGE("not support this type(%dx%d)  ,just support  yuv20 now",width,height);
        return ;
    }
    

    if ( pMediaBuffer->pNext != NULL ) {
        MediaBufLockBuffer( (MediaBuffer_t*)pMediaBuffer->pNext );
    }
	
	if( !mIsCtsTest && (preview_frame_inval > 0) ){
	  	preview_frame_inval--;
		LOG1("frame_inval:%d\n",preview_frame_inval);

        if(m_camDevice->isSOCSensor() == false){
			bool awb_ret = m_camDevice->isAwbStable();
			LOG1("awb test fps(%d) awb stable(%d)\n", preview_frame_inval, awb_ret);
			
			if( awb_ret!=true){
				LOG1("awb test fps(%d) awb stable(%d)\n", preview_frame_inval, awb_ret);
				goto end;
			}
		}else{
			goto end;
		}
    }


    if(mIsSendToTunningTh){
        MediaBufLockBuffer( pMediaBuffer );
        //new frames
        FramInfo_s *tmpFrame=(FramInfo_s *)malloc(sizeof(FramInfo_s));
        if(!tmpFrame){
            MediaBufUnlockBuffer( pMediaBuffer );
            return;
        }
        //add to vector
        memset(tmpFrame, 0x0, sizeof(*tmpFrame));
        tmpFrame->frame_index = (ulong_t)tmpFrame; 
        tmpFrame->phy_addr = (ulong_t)phy_addr;
        tmpFrame->frame_width = width;
        tmpFrame->frame_height= height;
        tmpFrame->vir_addr = (ulong_t)y_addr_vir;
        tmpFrame->frame_fmt = fmt;
        tmpFrame->used_flag = (ulong_t)pMediaBuffer; // tunning thread will use pMediaBuffer

        {
            Mutex::Autolock lock(mFrameArrayLock);
            mFrameInfoArray.add((void*)tmpFrame,(void*)pMediaBuffer);
        }
        Message_cam msg;
        msg.command = ISP_TUNNING_CMD_PROCESS_FRAME;
        msg.arg2 = (void*)(tmpFrame);
        msg.arg3 = (void*)(tmpFrame->used_flag);
        mISPTunningQ->put(&msg);

    }else{
        if (mIsSendToUvcTh){
        	MediaBufLockBuffer( pMediaBuffer );
        	FramInfo_s *tmpFrame=(FramInfo_s *)malloc(sizeof(FramInfo_s));
			if(!tmpFrame){
				MediaBufUnlockBuffer( pMediaBuffer );
				return;
			}
			memset(tmpFrame, 0x0, sizeof(*tmpFrame));
			tmpFrame->frame_index = (ulong_t)tmpFrame; 
			tmpFrame->phy_addr = (ulong_t)phy_addr;
			tmpFrame->frame_width = width;
			tmpFrame->frame_height= height;
			tmpFrame->vir_addr = (ulong_t)y_addr_vir;
			tmpFrame->frame_fmt = fmt;		
			tmpFrame->used_flag = 6;
			{
				Mutex::Autolock lock(mFrameArrayLock);
				mFrameInfoArray.add((void*)tmpFrame,(void*)pMediaBuffer);
			}
			Message_cam msg;
			msg.command = ISP_UVC_CMD_PROCESS_FRAME;
			msg.arg2 = (void*)(tmpFrame);
			mISPUvcQ.put(&msg);

        }
        //need to send face detection ?
    	if(mRefEventNotifier->isNeedSendToFaceDetect()){  
    	    MediaBufLockBuffer( pMediaBuffer );
    		//new frames
    		FramInfo_s *tmpFrame=(FramInfo_s *)malloc(sizeof(FramInfo_s));
    		if(!tmpFrame){
    			MediaBufUnlockBuffer( pMediaBuffer );
    			return;
          }
          //add to vector
          memset(tmpFrame, 0x0, sizeof(*tmpFrame));
          tmpFrame->frame_index = (ulong_t)tmpFrame; 
          tmpFrame->phy_addr = (ulong_t)phy_addr;
          tmpFrame->frame_width = width;
          tmpFrame->frame_height= height;
          tmpFrame->vir_addr = (ulong_t)y_addr_vir;
          tmpFrame->frame_fmt = fmt;
    	  
          tmpFrame->used_flag = 4;

          tmpFrame->zoom_value = mZoomVal;
        
          {
            Mutex::Autolock lock(mFrameArrayLock);
            mFrameInfoArray.add((void*)tmpFrame,(void*)pMediaBuffer);

          }
          mRefEventNotifier->notifyNewFaceDecFrame(tmpFrame);
        }
    	//need to display ?
    	if(mRefDisplayAdapter->isNeedSendToDisplay()){  
	    property_set("sys.hdmiin.display", "1");//just used by hdmi-in
    	    MediaBufLockBuffer( pMediaBuffer );
    		//new frames
    		FramInfo_s *tmpFrame=(FramInfo_s *)malloc(sizeof(FramInfo_s));
    		if(!tmpFrame){
    			MediaBufUnlockBuffer( pMediaBuffer );
    			return;
          }
          //add to vector
          memset(tmpFrame, 0x0, sizeof(*tmpFrame));
          tmpFrame->frame_index = (ulong_t)tmpFrame; 
          tmpFrame->phy_addr = (ulong_t)phy_addr;
          tmpFrame->frame_width = width;
          tmpFrame->frame_height= height;
          tmpFrame->vir_addr = (ulong_t)y_addr_vir;
          tmpFrame->frame_fmt = fmt;
    	  
          tmpFrame->used_flag = 0;

          #if (USE_RGA_TODO_ZOOM == 1)  
             tmpFrame->zoom_value = mZoomVal;
          #else
          if((tmpFrame->frame_width > 2592) && (tmpFrame->frame_height > 1944) && (mZoomVal != 100) ){
             tmpFrame->zoom_value = mZoomVal;
          }else
             tmpFrame->zoom_value = 100;
          #endif
        
          {
            Mutex::Autolock lock(mFrameArrayLock);
            mFrameInfoArray.add((void*)tmpFrame,(void*)pMediaBuffer);
            mDispFrameLeak++;

          }
          mRefDisplayAdapter->notifyNewFrame(tmpFrame);

        }

    	//video enc ?
    	if(mRefEventNotifier->isNeedSendToVideo()) {
            MediaBufLockBuffer( pMediaBuffer );
            //new frames
            FramInfo_s *tmpFrame=(FramInfo_s *)malloc(sizeof(FramInfo_s));
            if(!tmpFrame){
            	MediaBufUnlockBuffer( pMediaBuffer );
            	return;
            }          
            //add to vector
            memset(tmpFrame, 0x0, sizeof(*tmpFrame));
            tmpFrame->frame_index = (ulong_t)tmpFrame; 
            tmpFrame->phy_addr = (ulong_t)phy_addr;
            tmpFrame->frame_width = width;
            tmpFrame->frame_height= height;
            tmpFrame->vir_addr = (ulong_t)y_addr_vir;
            tmpFrame->frame_fmt = fmt;
            tmpFrame->used_flag = 1;
#if (USE_RGA_TODO_ZOOM == 1)  
            tmpFrame->zoom_value = mZoomVal;
#else
            if((tmpFrame->frame_width > 2592) && (tmpFrame->frame_height > 1944) && (mZoomVal != 100) ) {
                tmpFrame->zoom_value = mZoomVal;
            } else {
                tmpFrame->zoom_value = 100;
            }
#endif
          
            {
                Mutex::Autolock lock(mFrameArrayLock);
                mFrameInfoArray.add((void*)tmpFrame,(void*)pMediaBuffer);
                mVideoEncFrameLeak++;
            }
            mRefEventNotifier->notifyNewVideoFrame(tmpFrame);		
    	}
	flashProcess();
        
    	//picture ?
    	if((curFlashStatus.capture_ready )&& (mRefEventNotifier->isNeedSendToPicture()) ) {
            bool send_to_pic = true;
			MediaBufLockBuffer( pMediaBuffer );
            //new frames
            FramInfo_s *tmpFrame=(FramInfo_s *)malloc(sizeof(FramInfo_s));
            if(!tmpFrame){
				MediaBufUnlockBuffer( pMediaBuffer );
				return;
            }
			memset(tmpFrame, 0x0, sizeof(*tmpFrame));

            if (mfd.enable && mMFNRAvailable) {
                mMFNRAdapter->setDimension(width, height);
                mMFNRAdapter->sendBlockedMsg(CMD_GPU_PROCESS_INIT);
                mMFNRAdapter->wrapData(graphic_buffer, y_addr_vir, phy_addr, width*height*3/2, NULL);

                if((mfd.frame_cnt == 0) && (mMFNRAdapter->mCheckInitialized)) {
                	mMFNRAdapter->sendBlockedMsg(CMD_GPU_PROCESS_SETFRAMES);
                    mfd.process_frames = mMFNRAdapter->getFrameCount();
                }

            	if(mfd.frame_cnt < mfd.process_frames) {
            		mMFNRAdapter->sendBlockedMsg(CMD_GPU_PROCESS_UPDATE);
            		mfd.buffer_full = false;
            		mfd.frame_cnt++ ;
            	} else {
            		mfd.frame_cnt = 0;
            		mfd.buffer_full = true;
            	}
            } else {
            	mfd.frame_cnt = 0;
            	mfd.buffer_full = true;
            }

			if(mfd.buffer_full == true) {
				#if 0
	             if(mFlashStatus && ((ulong_t)(pPicBufMetaData->priv) != 1)){
	                pPicBufMetaData->priv = NULL;
	                send_to_pic = false;
	                LOG1("not the desired flash pic,skip it,mFlashStatus %d!",mFlashStatus);
	            }
	            #endif
				if (send_to_pic) {
					float flash_luminance = 0;
					tmpFrame->vir_addr = (ulong_t)y_addr_vir;
                    if ((mfd.enable) && (mMFNRAdapter->mCheckInitialized)) {
                    	mMFNRAdapter->sendBlockedMsg(CMD_GPU_PROCESS_RENDER);
                        mMFNRAdapter->sendBlockedMsg(CMD_GPU_PROCESS_GETRESULT);
                        //mMutliFrameDenoise->getResult(tmpFrame->vir_addr);
                    }

					if( tmpFrame->vir_addr == 0) {
						LOGE("mfd tmpFrame->vir_addr is NULL!");
					}

                    if (uvnr.enable && mUVNRAvailable) {
                        if (!mUVNRAdapter->mCheckInitialized) {
                            mUVNRAdapter->setDimension(width, height);
                            mUVNRAdapter->sendBlockedMsg(CMD_GPU_PROCESS_INIT);
                        }
                        mUVNRAdapter->wrapData(graphic_buffer, y_addr_vir, phy_addr, width*height*3/2, NULL);
                        mUVNRAdapter->sendBlockedMsg(CMD_GPU_PROCESS_UPDATE);
                        mUVNRAdapter->sendBlockedMsg(CMD_GPU_PROCESS_RENDER);
                        mUVNRAdapter->sendBlockedMsg(CMD_GPU_PROCESS_GETRESULT);
                    }
					if( tmpFrame->vir_addr == NULL) {
						LOGE("uvnr tmpFrame->vir_addr is NULL!");
					}
	                //add to vector
	                tmpFrame->frame_index = (ulong_t)tmpFrame; 
	                tmpFrame->phy_addr = (ulong_t)phy_addr;
	                tmpFrame->frame_width = width;
	                tmpFrame->frame_height= height;
	                //tmpFrame->vir_addr = (ulong_t)y_addr_vir;
	                tmpFrame->frame_fmt = fmt;
	                tmpFrame->used_flag = 2;
	                tmpFrame->res = &mImgAllFovReq;
#if (USE_RGA_TODO_ZOOM == 1)  
	                tmpFrame->zoom_value = mZoomVal;
#else
	                if((tmpFrame->frame_width > 2592) && (tmpFrame->frame_height > 1944) && (mZoomVal != 100) ){
	                    tmpFrame->zoom_value = mZoomVal;
	                } else {
	                    tmpFrame->zoom_value = 100;
	                }
#endif
	                {
	                    Mutex::Autolock lock(mFrameArrayLock);
	                    mFrameInfoArray.add((void*)tmpFrame,(void*)pMediaBuffer);
	                    mPicEncFrameLeak++;
	                }
	                picture_info_s &picinfo = mRefEventNotifier->getPictureInfoRef();
	                getCameraParamInfo(picinfo.cameraparam);
	                mRefEventNotifier->notifyNewPicFrame(tmpFrame);
			curFlashStatus.capture_ready = false;
	            }
            }
    	}

    	//preview data callback ?
    	if(mRefEventNotifier->isNeedSendToDataCB() && (mRefDisplayAdapter->getDisplayStatus() == 0)) {
            MediaBufLockBuffer( pMediaBuffer );
            //new frames
            FramInfo_s *tmpFrame=(FramInfo_s *)malloc(sizeof(FramInfo_s));
            if(!tmpFrame){
            	MediaBufUnlockBuffer( pMediaBuffer );
            	return;
            }
            //add to vector
            memset(tmpFrame, 0x0, sizeof(*tmpFrame));
            tmpFrame->frame_index = (ulong_t)tmpFrame; 
            tmpFrame->phy_addr = (ulong_t)phy_addr;
            tmpFrame->frame_width = width;
            tmpFrame->frame_height= height;
            tmpFrame->vir_addr = (ulong_t)y_addr_vir;
            tmpFrame->frame_fmt = fmt;
            tmpFrame->used_flag = 3;
            tmpFrame->isp_info = pMediaBuffer->ispinfo;
#if (USE_RGA_TODO_ZOOM == 1)  
            tmpFrame->zoom_value = mZoomVal;
#else
            if((tmpFrame->frame_width > 2592) && (tmpFrame->frame_height > 1944) && (mZoomVal != 100) ) {
                tmpFrame->zoom_value = mZoomVal;
            } else {
                tmpFrame->zoom_value = 100;
            }
#endif

            {
                Mutex::Autolock lock(mFrameArrayLock);
                mFrameInfoArray.add((void*)tmpFrame,(void*)pMediaBuffer);
                mPreviewCBFrameLeak++;
            }
                mRefEventNotifier->notifyNewPreviewCbFrame(tmpFrame);			
        }
    }
end:
	
	tem_val =0 ;
}


其中 mCamId 是设备号,支持两路hdmi输入可以具体区分。y_addr_vir是应用层可直接获取采集数据的内存。如想要获取hdmi图像数据,其中可设置回调

//	printf("mCamId = %d \n",mCamId);
	if(mCamId == 0){
		hdmi0(fmt, width, height, y_addr_vir);
	}
	if(mCamId == 1){
		hdmi1(fmt, width, height, y_addr_vir);
	}

总结

以上是介绍一种RK3399 Android7.1 Hal层Camera模块获取HDMI输入图像数据的一种方式。通过设置回填函数的形式。也许还有很多更好的方法,欢迎大家一起讨论。

修改后的CameraHal链接 https://download.csdn.net/download/qq_41563600/86402466

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

此心安处是吾鄕

你的鼓励是我最大的动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值