前言
前面系列文章、把 Android Camera 框架逻辑基本描述清晰,本篇是解决虚拟摄像头bug实践记录文章;笔者通过 v4l2loopback 构造虚拟摄像头实践过程中,视频流喂到驱动中、通过 CameraApp 打开虚拟机摄像头,图像显示有一帧是绿屏、通过 logcat 观察发现,日志如下:
10-10 10:17:21.592 +0000 3560 4717 V CameraDeviceClient: onResultAvailable
10-10 10:17:21.597 +0000 3560 4716 V Camera3-OutputStream: getBufferLockedCommon:555
10-10 10:17:21.597 +0000 3560 4716 V Camera3-Device: wrapAsHidlRequest:3365, frame_number=3
10-10 10:17:21.597 +0000 3560 4716 V Camera3-Device: wrapAsHidlRequest:3397, captureRequest->inputBuffer.bufferId=-1
10-10 10:17:21.597 +0000 3560 4716 V Camera3-Device: stream 0 now have 4 buffer caches, buf 0xedaa0c80
10-10 10:17:21.597 +0000 3560 4716 V Camera3-Device: wrapAsHidlRequest:3419, frame_number=3
10-10 10:17:21.597 +0000 3560 4716 V Camera3-Device: processBatchCaptureRequests:3482, mHidlSession send to halSession layer
10-10 10:17:21.598 +0000 3530 3628 I FslCameraHAL: Camera::processCaptureRequest() camera[1] Request Frame:3 Settings:0x0
10-10 10:17:21.598 +0000 3530 3628 I FslCameraHAL: Camera::processCaptureRequest() camera[1] Capturing new frame.
10-10 10:17:21.598 +0000 3530 3628 I FslCameraHAL: vendor/nxp-opensource/imx/libcamera3/CameraUtils.cpp init() FrameNumber=3, OutBuffer=1
10-10 10:17:21.598 +0000 3530 3628 I FslCameraHAL: Camera::processCaptureRequest() camera[1] callback=0xeb7030e4
10-10 10:17:21.598 +0000 3530 3628 I FslCameraHAL: VideoStream::requestCapture() 321
10-10 10:17:22.608 +0000 3530 3619 I FslCameraHAL: MMAPStream::onFrameAcquireLocked() cfilledbuffer.index=2
10-10 10:17:22.608 +0000 3530 3619 I FslCameraHAL: VideoStream::acquireFrameLocked() mBuffers[2]=0xeb70b260
10-10 10:17:22.608 +0000 3530 3619 I FslCameraHAL: VideoStream::processCaptureRequest() mOutBuffersNumber=1
10-10 10:17:22.608 +0000 3530 3619 I FslCameraHAL: Stream::processCaptureBuffer(), mJpeg=0
10-10 10:17:22.608 +0000 3530 3619 I FslCameraHAL: Stream::processFrameBuffer() 347
10-10 10:17:22.609 +0000 3560 4716 V Camera3-Stream: getBuffer: Already dequeued max output buffers (2), wait for next returned one.
10-10 10:17:22.610 +0000 3560 4716 V Camera3-Stream: Camera3Stream getBuffer:513
10-10 10:17:22.610 +0000 3530 3619 I FslCameraHAL: MMAPStream::onFrameReturnLocked() enter
10-10 10:17:22.610 +0000 3560 4716 V Camera3-OutputStream: getBufferLockedCommon:555
10-10 10:17:22.610 +0000 3560 4716 V Camera3-Device: wrapAsHidlRequest:3365, frame_number=4
10-10 10:17:22.610 +0000 3560 4716 V Camera3-Device: wrapAsHidlRequest:3397, captureRequest->inputBuffer.bufferId=-1
10-10 10:17:22.610 +0000 3560 4716 V Camera3-Device: wrapAsHidlRequest:3419, frame_number=4
10-10 10:17:22.610 +0000 3560 4716 V Camera3-Device: processBatchCaptureRequests:3482, mHidlSession send to halSession layer
10-10 10:17:22.611 +0000 3530 3628 I FslCameraHAL: Camera::processCaptureRequest() camera[1] Request Frame:4 Settings:0x0
10-10 10:17:22.611 +0000 3530 3628 I FslCameraHAL: Camera::processCaptureRequest() camera[1] Capturing new frame.
10-10 10:17:22.611 +0000 3530 3628 I FslCameraHAL: vendor/nxp-opensource/imx/libcamera3/CameraUtils.cpp init() FrameNumber=4, OutBuffer=1
10-10 10:17:22.611 +0000 3530 3628 I FslCameraHAL: Camera::processCaptureRequest() camera[1] callback=0xeb7030e4
10-10 10:17:22.611 +0000 3530 3628 I FslCameraHAL: VideoStream::requestCapture() 321
10-10 10:17:22.615 +0000 3530 3619 I FslCameraHAL: MMAPStream::onFrameReturnLocked() cfilledbuffer.index=2
10-10 10:17:22.615 +0000 3530 3619 I FslCameraHAL: VideoStream::handleMessage() 471
10-10 10:17:22.615 +0000 3530 3619 I FslCameraHAL: VideoStream::handleCaptureFrame() 367
10-10 10:17:22.615 +0000 3530 3619 I FslCameraHAL: VideoStream::processCaptureSettings() 441
10-10 10:17:22.616 +0000 3560 4716 V Camera3-Stream: getBuffer: Already dequeued max output buffers (2), wait for next returned one.
10-10 10:17:22.616 +0000 3560 4717 V Camera2-FrameProcessorBase: processNewFrames: Camera 1: Process new frames
10-10 10:17:22.616 +0000 3530 3619 I FslCameraHAL: MMAPStream::onFrameAcquireLocked() enter
10-10 10:17:22.616 +0000 3560 4717 V Camera2-FrameProcessorBase: processSingleFrame: Camera 1: Process single frame (is empty? 0)
10-10 10:17:22.616 +0000 3560 4717 V Camera2-FrameProcessorBase: processListeners: Camera 1: Got 1 range listeners out of 1
10-10 10:17:22.616 +0000 3560 4717 V CameraDeviceClient: onResultAvailable
10-10 10:17:23.385 +0000 4229 4229 W ViewRootImpl[Launcher]: Cancelling event due to no window focus: MotionEvent { action=ACTION_CANCEL, actionButton=0, id[0]=0, x[0]=1440.9208, y[0]=188.67592, toolType[0]=TOOL_TYPE_MOUSE, buttonState=0, metaState=0, flags=0x0, edgeFlags=0x0, pointerCount=1, historySize=0, eventTime=306941, downTime=302949, deviceId=6, source=0x2002 }
10-10 10:17:23.636 +0000 3530 3619 I FslCameraHAL: MMAPStream::onFrameAcquireLocked() cfilledbuffer.index=0
10-10 10:17:23.636 +0000 3530 3619 I FslCameraHAL: VideoStream::acquireFrameLocked() mBuffers[0]=0xeb70b2a0
10-10 10:17:23.636 +0000 3530 3619 I FslCameraHAL: VideoStream::processCaptureRequest() mOutBuffersNumber=1
10-10 10:17:23.636 +0000 3530 3619 I FslCameraHAL: Stream::processCaptureBuffer(), mJpeg=0
10-10 10:17:23.636 +0000 3530 3619 I FslCameraHAL: Stream::processFrameBuffer() 347
10-10 10:17:23.636 +0000 3530 3619 E dpug2d : g2d_blitEx: Invalid src planes[0] pointer=0x0 ! ///> 在显示库中报错planes[0] 指针为零
10-10 10:17:23.642 +0000 3560 4716 V Camera3-Stream: Camera3Stream getBuffer:513
10-10 10:17:23.642 +0000 3530 3619 I FslCameraHAL: MMAPStream::onFrameReturnLocked() enter
10-10 10:17:23.642 +0000 3560 4716 V Camera3-OutputStream: getBufferLockedCommon:555
10-10 10:17:23.642 +0000 3560 4716 V Camera3-Device: wrapAsHidlRequest:3365, frame_number=5
10-10 10:17:23.642 +0000 3560 4716 V Camera3-Device: wrapAsHidlRequest:3397, captureRequest->inputBuffer.bufferId=-1
10-10 10:17:23.642 +0000 3560 4716 V Camera3-Device: wrapAsHidlRequest:3419, frame_number=5
10-10 10:17:23.642 +0000 3560 4716 V Camera3-Device: processBatchCaptureRequests:3482, mHidlSession send to halSession layer
10-10 10:17:23.642 +0000 3530 3628 I FslCameraHAL: Camera::processCaptureRequest() camera[1] Request Frame:5 Settings:0x0
10-10 10:17:23.642 +0000 3530 3628 I FslCameraHAL: Camera::processCaptureRequest() camera[1] Capturing new frame.
10-10 10:17:23.642 +0000 3530 3628 I FslCameraHAL: vendor/nxp-opensource/imx/libcamera3/CameraUtils.cpp init() FrameNumber=5, OutBuffer=1
10-10 10:17:23.642 +0000 3530 3628 I FslCameraHAL: Camera::processCaptureRequest() camera[1] callback=0xeb7030e4
10-10 10:17:23.642 +0000 3530 3628 I FslCameraHAL: VideoStream::requestCapture() 321
10-10 10:17:23.647 +0000 3530 3619 I FslCameraHAL: MMAPStream::onFrameReturnLocked() cfilledbuffer.index=0
10-10 10:17:23.647 +0000 3530 3619 I FslCameraHAL: VideoStream::handleMessage() 471
10-10 10:17:23.647 +0000 3530 3619 I FslCameraHAL: VideoStream::handleCaptureFrame() 367
10-10 10:17:23.647 +0000 3530 3619 I FslCameraHAL: VideoStream::processCaptureSettings() 441
10-10 10:17:23.647 +0000 3560 4716 V Camera3-Stream: getBuffer: Already dequeued max output buffers (2), wait for next returned one.
10-10 10:17:23.648 +0000 3560 4717 V Camera2-FrameProcessorBase: processNewFrames: Camera 1: Process new frames
10-10 10:17:23.648 +0000 3560 4717 V Camera2-FrameProcessorBase: processSingleFrame: Camera 1: Process single frame (is empty? 0)
10-10 10:17:23.648 +0000 3560 4717 V Camera2-FrameProcessorBase: processListeners: Camera 1: Got 1 range listeners out of 1
10-10 10:17:23.648 +0000 3530 3619 I FslCameraHAL: MMAPStream::onFrameAcquireLocked() enter
闭源显示驱动库
这个库文件是 NXP 以库的方式提供,也无法通过此部分查找问题的原因,智能回到摄像头 captureRequest 结果回送的地方进行查找;
因此本篇实践记录、分析 captureRequest 在 CameraProvide 侧处理逻辑、进一步查找这个bug来源; 摄像头的 CameraProvide 侧源码都是芯片厂家提供、每家代码逻辑都有差异,笔者使用的是NXP的IMX8QM平台的 android8.1 版本,摄像头 libcamera3 库是NXP提供。
从日志中可以看到 " dpug2d : g2d_blitEx: Invalid src planes[0] pointer=0x0 ! " 错误提示信息,此程序上文是 Straem::processCaptureBuffer() 函数,调用 Stream::processFrameBuffer() 流程; cameraProvide侧代码是被 CameraService 服务通过Binder 调用的,CameraDeviceClient 通过代理链接到CameraServer 上,CameraDeviceClient 可等同于是发起者,在配置摄像头流时创建 captureRequest 对象,此对象有个 repeating 属性为 true, CameraServer 就充分下发 captureRequest 至
CameraProvide 侧给到摄像头对象,摄像头通过 Stream 对象、通过 ioctl( IO_QBUF) 来获取摄像头的帧数据;大致数据流程就是这样子。
函数调用关系如下:
CameraServer
-------------
| Binder
V
Camera::processCaptureRequest(camera3_capture_request_t *request)
---->sp<CaptureRequest> capture = new CaptureRequest();
----> devStream->requestCapture(capture);
----> mMessageQueue.postMessage(new CMessage(MSG_FRAME, 0)); //> MSG_FRAME 有新帧
-------------------
|
|
ret = handleCaptureFrame(); <<——/
----> ret = processCaptureRequest(*buf, req);
----> stream->processCaptureBuffer(src, req->mSettings);
----> res = processFrameBuffer(src, meta);
----> imageProcess->handleFrame(*out, src);
上面的简图就是 CameraProvide 侧响应服务端 captureRequest 请求的流程,我们从 processCaptureBuffer() 源码、开始分析如下:
int32_t Stream::processCaptureBuffer(StreamBuffer& src,
sp<Metadata> meta)
{
int32_t res = 0;
StreamBuffer* out = mCurrent;
if (out == NULL || out->mBufHandle == NULL) {
ALOGE("%s invalid buffer handle", __func__);
return 0;
}
if (out->mAcquireFence != -1) {
res = sync_wait(out->mAcquireFence, CAMERA_SYNC_TIMEOUT);
if (res == -ETIME) {
ALOGE("%s: Timeout waiting on buffer acquire fence",
__func__);
return res;
} else if (res) {
ALOGE("%s: Error waiting on buffer acquire fence: %s(%d)",
__func__, strerror(-res), res);
ALOGV("fence id:%d", out->mAcquireFence);
}
close(out->mAcquireFence);
}
ALOGI("Stream::%s(), mJpeg=%d", __func__, (int)mJpeg);
if (mJpeg) {
mJpegBuilder->reset();
mJpegBuilder->setMetadata(meta);
res = processJpegBuffer(src, meta);
mJpegBuilder->setMetadata(NULL);
}
else {
res = processFrameBuffer(src, meta);
}
return res;
}
在程序结尾处调用 res = processFrameBuffer(src, meta) 函数,内容如下:
int32_t Stream::processFrameBuffer(StreamBuffer& src,
sp<Metadata> meta __unused)
{
sp<Stream>& device = src.mStream;
if (device == NULL) {
ALOGE("%s invalid device stream", __func__);
return 0;
}
StreamBuffer* out = mCurrent;
if (out == NULL || out->mBufHandle == NULL) {
ALOGE("%s invalid buffer handle", __func__);
return 0;
}
ALOGI("Stream::%s() %d", __func__, __LINE__);
fsl::ImageProcess *imageProcess = fsl::ImageProcess::getInstance();
//ImageProcess *imageProcess = ImageProcess::getInstance();
return imageProcess->handleFrame(*out, src);
}
程序继续调用 handleFrame 函数,内容如下:
int ImageProcess::handleFrame(StreamBuffer& dstBuf, StreamBuffer& srcBuf)
{
int ret = 0;
if (srcBuf.mStream == NULL || dstBuf.mStream == NULL) {
return -EINVAL;
}
do {
// firstly try GPU.
ret = handleFrameByGPU(dstBuf, srcBuf);
if (ret == 0) {
break;
}
// try gpu 2d.
ret = handleFrameBy2D(dstBuf, srcBuf);
if (ret == 0) {
break;
}
// try ipu.
ret = handleFrameByIPU(dstBuf, srcBuf);
if (ret == 0) {
break;
}
// try pxp.
ret = handleFrameByPXP(dstBuf, srcBuf);
if (ret == 0) {
break;
}
// try opencl.
ret = handleFrameByOpencl(dstBuf, srcBuf);
if (ret == 0) {
break;
}
// no hardware exists.
ret = handleFrameByCPU(dstBuf, srcBuf);
} while(false);
return ret;
}
我们看到此 handleFrame() 函数是把源buffer内容即摄像头raw Meta数据,送到目标 buffer 中,即显卡显存中,此处是出现bug的入口处;由此我们了解摄像头内容是如何显示到屏幕过程。
IMX8QM的显卡 2d gpu 驱动,我进一步跟踪源码分析,handleFrameBy2GPU() 函数送显示源码如下:
int ImageProcess::handleFrameByGPU(StreamBuffer& dstBuf, StreamBuffer& srcBuf)
{
// gpu 2d exists.
if (mCopyEngine == NULL) {
return -EINVAL;
}
sp<Stream> src, dst;
src = srcBuf.mStream;
dst = dstBuf.mStream;
// can't do resize for YUV.
if (dst->width() != src->width() ||
dst->height() != src->height()) {
return -EINVAL;
}
int dstFormat = convertPixelFormatToV4L2Format(dst->format());
int srcFormat = convertPixelFormatToV4L2Format(src->format());
ALOGI("ImageProcess::%s(), d_format=%#0x, s_format=%#0x ", __func__, dst->format(), src->format());
// can't do csc for YUV.
if ((dst->format() != src->format()) &&
(dstFormat != srcFormat)) {
return -EINVAL;
}
void* g2dHandle = getHandle();
int size = (srcBuf.mSize > dstBuf.mSize) ? dstBuf.mSize : srcBuf.mSize;
struct g2d_buf s_buf, d_buf;
s_buf.buf_paddr = srcBuf.mPhyAddr;
s_buf.buf_vaddr = srcBuf.mVirtAddr;
d_buf.buf_paddr = dstBuf.mPhyAddr;
d_buf.buf_vaddr = dstBuf.mVirtAddr;
int ret = mCopyEngine(g2dHandle, (void*)&d_buf, (void*)&s_buf,
(void*)(intptr_t)size);
if (ret == 0) {
mFinishEngine(g2dHandle);
}
ALOGI("ImageProcess::%s() s_PhyAddr=%#0x, s_VirtAddr=%#0x, d_PhyAddr=%#0x, d_VirtAddr=%#0x", __func__,
srcBuf.mPhyAddr, srcBuf.mVirtAddr, dstBuf.mPhyAddr, dstBuf.mVirtAddr);
return ret;
}
跨平台移植注意事项
此源码直接使用 " libg2d.ko " 显卡驱动、也就是对显示做简单封装,也就是说虚拟摄像头在不通显卡驱动情况下、都需要对此函数进行封装和调整。
在系统启动时、libg2d.ko 加载至内容、并正常运行,日志如下:
10-11 08:27:10.911 5164 5189 I OpenGLRenderer: Initialized EGL, version 1.4
10-11 08:27:10.912 5164 5189 D OpenGLRenderer: Swap behavior 2
10-11 08:27:10.928 5164 5189 I display : open gpu gralloc module!
摄像头获取到sensor数据后、会把摄像头数据送给显卡,调用 imageProcess->handleFrame() 函数,该函数首先选择是 IMX_GPU 显卡驱动,该驱动是闭源库,具体是什么原因导致数据没有正确送入显卡就无法进一步查找,使用 handleFrameByOpencl() 函数送显是没有问题。
总结
至此、笔者的虚拟摄像头驱动就移植验证完成,还差一篇总结性、和过程记录内容,待时间方便时、总结后发出、与大家分享。