/* HAL返回result流程 */
/frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
void Camera3Device::sProcessCaptureResult(const camera3_callback_ops *cb,const camera3_capture_result *result)
Camera3Device *d = const_cast<Camera3Device*>(static_cast<const Camera3Device*>(cb));
d->processCaptureResult(result);
/* Camera HAL device callback method */
hardware::Return<void> Camera3Device::processCaptureResult(const hardware::hidl_vec<hardware::camera::device::V3_2::CaptureResult>& results)
for (const auto& result : results) {
processOneCaptureResultLocked(result, noPhysMetadata);
}
void Camera3Device::processOneCaptureResultLocked(const hardware::camera::device::V3_2::CaptureResult& result,const hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadatas)
for (size_t i = 0; i < result.outputBuffers.size(); i++)
res = mInterface->popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);/*取buff*/
processCaptureResult(&r);
void Camera3Device::processCaptureResult(const camera3_capture_result *result)
sendPartialCaptureResult(result->result, request.resultExtras,frameNumber);
returnOutputBuffers(result->output_buffers,result->num_output_buffers, shutterTimestamp);
void Camera3Device::returnOutputBuffers(const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,nsecs_t timestamp)
for (size_t i = 0; i < numBuffers; i++)
Camera3Stream *stream = Camera3Stream::cast(outputBuffers[i].stream);
status_t res = stream->returnBuffer(outputBuffers[i], timestamp);
/* frameworks\av\services\camera\libcameraservice\device3\Camera3Stream.cpp */
status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,nsecs_t timestamp)
/*/frameworks/av/services/camera/libcameraservice/device3/Camera3OutputStream.cpp*/
status_t Camera3OutputStream::returnBufferLocked(const camera3_stream_buffer &buffer,nsecs_t timestamp)
status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true);
/*/frameworks/av/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp*/
status_t Camera3IOStreamBase::returnAnyBufferLocked(const camera3_stream_buffer &buffer,nsecs_t timestamp,bool output)
returnBufferCheckedLocked(buffer, timestamp, output,&releaseFence);
/*/frameworks/av/services/camera/libcameraservice/device3/Camera3OutputStream.cpp*/
status_t Camera3OutputStream::returnBufferCheckedLocked(const camera3_stream_buffer &buffer,nsecs_t timestamp,bool output,/*out*/sp<Fence> *releaseFenceOut)
res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence);/*简单的说就是buff入队*/
mBufferReturnedSignal.signal();/*唤醒处理图像的线程*/
sendCaptureResult(metadata, request.resultExtras,collectedPartialResult, frameNumber,hasInputBufferInRequest, request.physicalMetadatas);
insertResultLocked(&captureResult, frameNumber);
void Camera3Device::insertResultLocked(CaptureResult *result,uint32_t frameNumber)
// Valid result, insert into queue
List<CaptureResult>::iterator queuedResult = mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
mResultSignal.signal();/*通知result处理线程*/
/*frameworks层处理图像返回线程*/
/frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
mFrameProcessor = new FrameProcessorBase(mDevice);
threadName = String8::format("CDU-%s-FrameProc", mCameraIdStr.string());
mFrameProcessor->run(threadName.string());
mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID,FRAME_PROCESSOR_LISTENER_MAX_ID,/*listener*/this,/*sendPartials*/true);
status_t FrameProcessorBase::registerListener(int32_t minId,int32_t maxId, const wp<FilteredListener>& listener, bool sendPartials)
RangeListener rListener = { minId, maxId, listener, sendPartials };
mRangeListeners.push_back(rListener);
/frameworks/av/services/camera/libcameraservice/common/FrameProcessorBase.cpp
bool FrameProcessorBase::threadLoop()
res = device->waitForNextFrame(kWaitDuration);
while (mResultQueue.empty())
res = mResultSignal.waitRelative(mOutputLock, timeout);/*等待结果返回*/
processNewFrames(device);
void FrameProcessorBase::processNewFrames(const sp<CameraDeviceBase> &device)
while ( (res = device->getNextResult(&result)) == OK)
if (!processSingleFrame(result, device)) {
break;
}
/frameworks/av/services/camera/libcameraservice/common/FrameProcessorBase.cpp
bool FrameProcessorBase::processSingleFrame(CaptureResult &result,const sp<CameraDeviceBase> &device)
return processListeners(result, device) == OK;
status_t FrameProcessorBase::processListeners(const CaptureResult &result,const sp<CameraDeviceBase> &device)
(*item)->onResultAvailable(result);
/frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
/* Device-related methods */
void CameraDeviceClient::onResultAvailable(const CaptureResult& result)
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
remoteCb->onResultReceived(result.mMetadata, result.mResultExtras,result.mPhysicalMetadatas);
/frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
public void onResultReceived(CameraMetadataNative result,CaptureResultExtras resultExtras, PhysicalCaptureResultInfo physicalResults[])
最终APP获取图像数据buff是通过ImageReader中OnImageAvailableListener 的 onImageAvailable 获取。
mImageReader = ImageReader.newInstance(mCaptureSize.getWidth(), mCaptureSize.getHeight(),ImageFormat.JPEG, 1);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener()
public void onImageAvailable(ImageReader reader)
final Image image = reader.acquireNextImage();