参考链接:
参考链接:Android 源码 Camera2 架构初识_android camera2源码分析-CSDN博客
Android 的相机硬件抽象层 (HAL) 可将 Camera2 中较高层级的相机框架 API 连接到底层的相机驱动程序和硬件。相机子系统包括相机管道组件的实现,而相机 HAL 可提供用于实现这些组件版本的接口。
APP整体框架图:
服务调用流程
示例代码
package com.itech.camera;
//import static com.itech.camera.CameraUtilsTest.getCameraInstance;
import static android.provider.MediaStore.Files.FileColumns.MEDIA_TYPE_IMAGE;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.params.SessionConfiguration;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.provider.Settings;
import android.util.DisplayMetrics;
import android.util.Log;
import android.util.Size;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.Toast;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Method;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.concurrent.Executor;
public class MainActivity extends AppCompatActivity {
private CameraManager mCameraManager;
private CameraDevice mCamera;
private CameraCaptureSession mCaptureSession;
private static final int REQUEST_CAMERA_PERMISSION = 1;
private static final String TAG = "MainActivity_camera";
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private Handler mForegroundHandler;
private SurfaceView mSurfaceView;
private Button mLineView;
private Button mDrag;
private static class ListerTest extends DragTouchListener {
public ListerTest(int dragDirection, int dragDistance) {
super(dragDirection, dragDistance);
}
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
return super.onTouch(view, motionEvent);
}
@Override
protected void onDragComplete(View view) {
super.onDragComplete(view);
Log.d(TAG, "onDragComplete: ");
}
@Override
protected void onDragRebound(View view) {
super.onDragRebound(view);
}
}
public void position(){
//获取屏幕区域的宽高等尺寸获取
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
int widthPixels = metrics.widthPixels;
int heightPixels = metrics.heightPixels;
//应用程序App区域宽高等尺寸获取
Rect rect = new Rect();
getWindow().getDecorView().getWindowVisibleDisplayFrame(rect);
//获取状态栏高度
Rect rectangle = new Rect();
getWindow().getDecorView().getWindowVisibleDisplayFrame(rect);
int statusBarHeight = rectangle.top;
//View布局区域宽高等尺寸获取
Rect rectView = new Rect();
getWindow().findViewById(Window.ID_ANDROID_CONTENT).getDrawingRect(rect);
String tag = String.format("CameraDevice-JV-%s", "mCameraId");
Log.d(TAG, "position: 屏幕区域的宽高等尺寸:" + widthPixels +","+heightPixels);
Log.d(TAG, "position: 应用程序App区域宽高等尺寸:" + rect.width() +","+rect.height());
Log.d(TAG, "position: 获取状态栏高度:" + rectangle.width() +","+rectangle.height());
Log.d(TAG, "position: View布局区域:" + rectView.width() +","+rectView.height() +"," +Integer.parseInt("456") +",tag " + tag);
}
@SuppressLint({"MissingInflatedId", "ClickableViewAccessibility"})
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d(TAG, "onCreate: 所在的任务的id为: " + getTaskId());
Class<?> clazz = null;
try {
clazz = Class.forName("android.os.SystemProperties");
Method m = clazz.getDeclaredMethod("get", String.class);
String info = (String)m.invoke(null, "ro.serialno");
Log.d(TAG, "onCreate: serial --> " + info);
} catch (Exception e) {
e.printStackTrace();
}
if(!checkCameraPermission()) {
//Log.d(TAG, "onResume: need checkCameraPermission");
return;
}
// Start a background thread to manage camera requests
//mBackgroundThread = new HandlerThread("background");
//mBackgroundThread.start();
//mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
//mForegroundHandler = new Handler(getMainLooper());
mCameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);
setContentView(R.layout.activity_main);
mSurfaceView = findViewById(R.id.mainSurfaceView);//找到显示surface
mLineView = findViewById(R.id.button_capture);
mDrag = findViewById(R.id.button_drag);
//new DragTouchListener(2,2);
mDrag.setOnTouchListener(new ListerTest(2,5));
mLineView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
startActivity(new Intent(MainActivity.this,LineViewActivity.class));
}
});
//mSurfaceView.getHolder().setType();
mSurfaceView.getHolder().addCallback(mSurfaceHolderCallback);//设置回调
}
public void onClickAreaChartsView(View v){
startActivity(new Intent(MainActivity.this,AreaChartsViewActivity.class));
}
@Override
protected void onResume() {
super.onResume();
position();//获取高度
}
@Override
protected void onPause() {
super.onPause();
if (mCamera != null) {
mCamera.close();
}
}
final SurfaceHolder.Callback mSurfaceHolderCallback = new SurfaceHolder.Callback() {
private String mCameraId;
/** Whether we received a change callback after setting our fixed surface size. */
private boolean mGotSecondCallback;
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "Surface created");
mCameraId = null;
mGotSecondCallback = false;
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "Surface destroyed");
holder.removeCallback(this);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// On the first invocation, width and height were automatically set to the view's size
if (mCameraId == null) {
// Find the device's back-facing camera and set the destination buffer sizes
try {
for (String cameraId : mCameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraId);
if (cameraCharacteristics.get(cameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) {
Log.e(TAG, "Found a back-facing camera");
StreamConfigurationMap info = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] s = info.getOutputSizes(ImageFormat.JPEG);
for (Size a :s){
Log.e(TAG, "Size : "+a);
}
// Danger, W.R.! Attempting to use too large a preview size could
// exceed the camera bus' bandwidth limitation, resulting in
// gorgeous previews but the storage of garbage capture data.
Log.i(TAG, "SurfaceView size: " + mSurfaceView.getWidth() + 'x' + mSurfaceView.getHeight());
SurfaceHolder surfaceHolder = mSurfaceView.getHolder();
surfaceHolder.setFixedSize(1280,720);
mCameraId = cameraId;
return;
// Control flow continues with this method one more time
// (since we just changed our own size)
}
}
} catch (CameraAccessException ex) {
Log.e(TAG, "Unable to list cameras", ex);
}
} else if (!mGotSecondCallback) {
if (mCamera != null) {
Log.e(TAG, "Aborting camera open because it hadn't been closed");
return;
}
// Open the camera device
try {
if (ActivityCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
mCameraManager.openCamera(mCameraId, mCameraStateCallback, mBackgroundHandler);
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to configure output surface", ex);
}
mGotSecondCallback = true;
// Control flow continues in mCameraStateCallback.onOpened()
}
}
} ;
private boolean checkCameraPermission() {
int cameraPermission1 = ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
int cameraPermission2 = ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (cameraPermission1 != PackageManager.PERMISSION_GRANTED || cameraPermission2 != PackageManager.PERMISSION_GRANTED ) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA,Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION);
return false;
}
return true;
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Log.e(TAG,"权限授予成功,可以进行拍照操作 PERMISSION_GRANTED");
// 权限授予成功,可以进行拍照操作
} else {
Log.e(TAG,"权限授予失败,无法进行拍照操作");
//
}
}
}
/**
* Calledbacks invoked upon state changes in our {@code CameraDevice}. <p>These are run on
* {@code mBackgroundThread}.</p>
*/
final CameraDevice.StateCallback mCameraStateCallback =
new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
Log.e(TAG, "Successfully opened camera");
mCamera = camera;
try {
List<OutputConfiguration> outputs = Arrays.asList(new OutputConfiguration( mSurfaceView.getHolder().getSurface()));
SessionConfiguration config = new SessionConfiguration(SessionConfiguration.SESSION_REGULAR,outputs,new Executor() {
@Override
public void execute(Runnable command) {
Log.e(TAG, "Executor execute ");
command.run();
}
} ,mCaptureSessionListener);
camera.createCaptureSession(config);
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to create a capture session", ex);
}
// Control flow continues in mCaptureSessionListener.onConfigured()
}
@Override
public void onDisconnected(CameraDevice camera) {
Log.e(TAG, "Camera was disconnected");
}
@Override
public void onError(CameraDevice camera, int error) {
Log.e(TAG, "State error on device '" + camera.getId() + "': code " + error);
}};
/**
* Callbacks invoked upon state changes in our {@code CameraCaptureSession}. <p>These are run on
* {@code mBackgroundThread}.</p>
*/
final CameraCaptureSession.StateCallback mCaptureSessionListener = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
Log.e(TAG, "Finished configuring camera outputs");
mCaptureSession = session;
SurfaceHolder holder = mSurfaceView.getHolder();
if (holder != null) {
try {
// Build a request for preview footage
CaptureRequest.Builder requestBuilder = mCamera.createCaptureRequest(mCamera.TEMPLATE_PREVIEW);
requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
//setAutoFlash(requestBuilder);
requestBuilder.addTarget(holder.getSurface());
CaptureRequest previewRequest = requestBuilder.build();
// Start displaying preview images
try {
session.setRepeatingRequest(previewRequest, /*listener*/null,
/*handler*/null);
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to make repeating preview request", ex);
}
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to build preview request", ex);
}
}
else {
Log.e(TAG, "Holder didn't exist when trying to formulate preview request");
}
}
@Override
public void onClosed(CameraCaptureSession session) {
Log.e(TAG, "CameraCaptureSession onClosed");
mCaptureSession = null;
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.e(TAG, "Configuration error on device '" + mCamera.getId());
}
@Override
public void onActive(@NonNull CameraCaptureSession session) {
super.onActive(session);
}
};
}
Camera2 CameraService 服务启动
system/core/rootdir/init.rc
service media /system/bin/mediaserver
class main
user media
group audio camera inet net_bt net_bt_admin net_bw_acct drmrpc mediadrm
ioprio rt 4
注册相机服务
frameworks/av/media/mediaserver/main_mediaserver.cpp
using namespace android;
int main(int argc __unused, char** argv)
{
signal(SIGPIPE, SIG_IGN);
char value[PROPERTY_VALUE_MAX];
bool doLog = (property_get("ro.test_harness", value, "0") > 0) && (atoi(value) == 1);
pid_t childPid;
if (doLog && (childPid = fork()) != 0) {
......
} else {
// all other services
......
CameraService::instantiate();
/*CameraService 类 instantiate() 实际继承自 BinderService。
instantiate() 方法内部调用了 publish() 方法。
这里将 CameraService 服务注册到了“总管” ServiceManager 中。
对应的查询名称是通过 CameraService::getServiceName() 方法获取的 “media.camera”。
*/
......
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
}
}
版本2:
//Main_mediaServer.cpp
int main(int argc __unused, char** argv){
…
sp<ProcessState> proc(ProcessState::self());
//获取ServieManager
sp<IServiceManager> sm = defaultServiceManager();
ALOGI("ServiceManager: %p", sm.get());
AudioFlinger::instantiate();
//初始化media服务
MediaPlayerService::instantiate();
//初始化资源管理服务
ResourceManagerService::instantiate();
//初始化Camera服务
CameraService::instantiate();
//初始化音频服务
AudioPolicyService::instantiate();
SoundTriggerHwService::instantiate();
//初始化Radio服务
RadioService::instantiate();
registerExtensions();
//开始线程池
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
}
frameworks/native/include/binder/BinderService.h
// ---------------------------------------------------------------------------
namespace android {
template<typename SERVICE>
class BinderService
{
friend class BinderService<CameraService>;
public:
static status_t publish(bool allowIsolated = false) {
sp<IServiceManager> sm(defaultServiceManager());
return sm->addService(
String16(SERVICE::getServiceName()),
new SERVICE(), allowIsolated);
}
......
static void instantiate() { publish(); }
static char const* getServiceName() { return "media.camera"; }
......
};
}; // namespace android
// ---------------------------------------------------------------------------
CameraService::onFirstRef
1.加载相机 HAL 模块
定义camera_module_t
hardware/libhardware/include/hardware/camera_common.h
typedef struct camera_module {
/**
* 摄像头模块的常用方法。 *必须*是 camera_module 的第一个成员
*/
hw_module_t common;
/**
* 返回通过相机模块可访问的相机设备的数量。
* 摄像机设备的编号从 0 到 N-1,其中 N 是这个调用返回的值。
* open() 的相机设备名称只是转换成字符串的数字。
*/
int (*get_number_of_cameras)(void);
/**
* 返回给定相机设备的静态相机信息。
*/
int (*get_camera_info)(int camera_id, struct camera_info *info);
/**
* 提供指向 HAL 模块的回调函数指针,以通知框架异步相机模块事件。
*/
int (*set_callbacks)(const camera_module_callbacks_t *callbacks);
/**
* 获取查询供应商扩展元数据标签信息的方法。
* HAL 应该填写所有供应商标签操作方法,
* 或者如果未定义供应商标签,则保持 ops 不变。
*
* 这里使用的 vendor_tag_ops 结构定义在:
* system/media/camera/include/system/vendor_tags.h
*/
void (*get_vendor_tag_ops)(vendor_tag_ops_t* ops);
/**
* 如果此摄像机 HAL 模块支持多个设备 HAL API 版本,则打开特定的旧摄像机 HAL 设备。
* 例如,如果相机模块同时支持 CAMERA_DEVICE_API_VERSION_1_0 和 CAMERA_DEVICE_API_VERSION_3_2 设备 API,
* 则框架可以调用此函数以将相机设备作为 CAMERA_DEVICE_API_VERSION_1_0 设备打开。
*/
int (*open_legacy)(const struct hw_module_t* module, const char* id,
uint32_t halVersion, struct hw_device_t** device);
/**
* 打开或关闭与给定相机 ID 相关联的闪光灯的手电筒模式。
* 如果操作成功,则 HAL 必须通过使用新状态调用 camera_module_callbacks.torch_mode_status_change() 来通知框架闪光灯状态。
*
*/
int (*set_torch_mode)(const char* camera_id, bool enabled);
/**
* 在成功调用相机 HAL 库之后,相机服务将在调用任何其他方法之前调用此方法。
* 如果不需要初始化,则 HAL 模块可以将其保留为 NULL。
*
*/
int (*init)();
/* 留作将来使用 */
void* reserved[5];
} camera_module_t;
2.初始化相机 HAL 模块
CameraModule 是 HAL 相机模块的包装器类。
frameworks/av/services/camera/libcameraservice/common/CameraModule.cpp
CameraModule::CameraModule(camera_module_t *module) {
if (module == NULL) {
ALOGE("%s: camera hardware module must not be null", __FUNCTION__);
assert(0);
}
mModule = module;
}
int CameraModule::init() {
ATRACE_CALL();
int res = OK;
if (getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4 &&
mModule->init != NULL) {
ATRACE_BEGIN("camera_module->init");
res = mModule->init();
ATRACE_END();
}
mCameraInfoMap.setCapacity(getNumberOfCameras());
return res;
}
int CameraModule::getNumberOfCameras() {
int numCameras;
ATRACE_BEGIN("camera_module->get_number_of_cameras");
numCameras = mModule->get_number_of_cameras();
ATRACE_END();
return numCameras;
}
3.获取相机信息填充 camera_info
frameworks/av/services/camera/libcameraservice/common/CameraModule.cpp
int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) {
ATRACE_CALL();
Mutex::Autolock lock(mCameraInfoLock);
if (cameraId < 0) {
ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId);
return -EINVAL;
}
// 仅为 API2 设备覆盖 static_camera_characteristics
int apiVersion = mModule->common.module_api_version;
if (apiVersion < CAMERA_MODULE_API_VERSION_2_0) {
int ret;
ATRACE_BEGIN("camera_module->get_camera_info");
ret = mModule->get_camera_info(cameraId, info);
ATRACE_END();
return ret;
}
ssize_t index = mCameraInfoMap.indexOfKey(cameraId);
if (index == NAME_NOT_FOUND) {
// 从 raw 模块获取相机信息并缓存它
camera_info rawInfo, cameraInfo;
ATRACE_BEGIN("camera_module->get_camera_info");
int ret = mModule->get_camera_info(cameraId, &rawInfo);
ATRACE_END();
if (ret != 0) {
return ret;
}
int deviceVersion = rawInfo.device_version;
if (deviceVersion < CAMERA_DEVICE_API_VERSION_2_0) {
// static_camera_characteristics is invalid
*info = rawInfo;
return ret;
}
CameraMetadata m;
m = rawInfo.static_camera_characteristics;
deriveCameraCharacteristicsKeys(rawInfo.device_version, m);
cameraInfo = rawInfo;
cameraInfo.static_camera_characteristics = m.release();
index = mCameraInfoMap.add(cameraId, cameraInfo);
}
assert(index != NAME_NOT_FOUND);
// return the cached camera info
*info = mCameraInfoMap[index];
return OK;
}
4.将 CameraService 注册到 CameraDeviceFactory
5.ping CameraServiceProxy
sp<ICameraServiceProxy> CameraService::getCameraServiceProxy() {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.camera.proxy"));
if (binder == nullptr) {
return nullptr;
}
sp<ICameraServiceProxy> proxyBinder = interface_cast<ICameraServiceProxy>(binder);
return proxyBinder;
}
void CameraService::pingCameraServiceProxy() {
sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
if (proxyBinder == nullptr) return;
proxyBinder->pingForUserUpdate();
}
整体代码
frameworks/av/services/camera/libcameraservice/CameraService.cpp
void CameraService::onFirstRef()
{
ALOGI("CameraService process starting");
BnCameraService::onFirstRef();
// 如果服务正在重启,则更新电池寿命跟踪
BatteryNotifier& notifier(BatteryNotifier::getInstance());
notifier.noteResetCamera();
notifier.noteResetFlashlight();
// 加载相机 HAL 模块
camera_module_t *rawModule;
int err = hw_get_module(CAMERA_HARDWARE_MODULE_ID,
(const hw_module_t **)&rawModule);
if (err < 0) {
ALOGE("Could not load camera HAL module: %d (%s)", err, strerror(-err));
logServiceError("Could not load camera HAL module", err);
mNumberOfCameras = 0;
return;
}
// 初始化相机 HAL 模块
mModule = new CameraModule(rawModule);
err = mModule->init();
if (err != OK) {
ALOGE("Could not initialize camera HAL module: %d (%s)", err,
strerror(-err));
logServiceError("Could not initialize camera HAL module", err);
mNumberOfCameras = 0;
delete mModule;
mModule = nullptr;
return;
}
ALOGI("Loaded \"%s\" camera module", mModule->getModuleName());
// 获取 camera 个数
mNumberOfCameras = mModule->getNumberOfCameras();
mNumberOfNormalCameras = mNumberOfCameras;
// 在我们第一次调用 get_camera_info 之前设置供应商标签,
// 因为 HAL 可能需要在 get_camera_info 中设置静态供应商密钥
VendorTagDescriptor::clearGlobalVendorTagDescriptor();
if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_2) {
setUpVendorTags();
}
mFlashlight = new CameraFlashlight(*mModule, *this);
// 查找闪光灯
status_t res = mFlashlight->findFlashUnits();
if (res) {
// impossible because we haven't open any camera devices.
ALOGE("Failed to find flash units.");
}
int latestStrangeCameraId = INT_MAX;
for (int i = 0; i < mNumberOfCameras; i++) {
String8 cameraId = String8::format("%d", i);
// 获取相机信息,填充 camera_info
struct camera_info info;
bool haveInfo = true;
status_t rc = mModule->getCameraInfo(i, &info);
if (rc != NO_ERROR) {
ALOGE("%s: Received error loading camera info for device %d, cost and"
" conflicting devices fields set to defaults for this device.",
__FUNCTION__, i);
haveInfo = false;
}
// 检查向后兼容性支持
if (haveInfo) {
if (checkCameraCapabilities(i, info, &latestStrangeCameraId) != OK) {
delete mModule;
mModule = nullptr;
return;
}
}
// Defaults to use for cost and conflicting devices
int cost = 100;
char** conflicting_devices = nullptr;
size_t conflicting_devices_length = 0;
// If using post-2.4 module version, query the cost + conflicting devices from the HAL
if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4 && haveInfo) {
cost = info.resource_cost;
conflicting_devices = info.conflicting_devices;
conflicting_devices_length = info.conflicting_devices_length;
}
std::set<String8> conflicting;
for (size_t i = 0; i < conflicting_devices_length; i++) {
conflicting.emplace(String8(conflicting_devices[i]));
}
// 初始化每个相机设备的状态
{
Mutex::Autolock lock(mCameraStatesLock);
mCameraStates.emplace(cameraId, std::make_shared<CameraState>(cameraId, cost,
conflicting));
}
if (mFlashlight->hasFlashUnit(cameraId)) {
mTorchStatusMap.add(cameraId,
ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF);
}
}
if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_1) {
mModule->setCallbacks(this);
}
// 将 CameraService 注册到 CameraDeviceFactory
CameraDeviceFactory::registerService(this);
// ping CameraServiceProxy
CameraService::pingCameraServiceProxy();
}
Camera2 CameraService连接相机服务
frameworks/base/core/java/android/hardware/camera2/CameraManager.java
step1:
public final class CameraManager {
......
private ArrayList<String> mDeviceIdList;
......
private ArrayList<String> getOrCreateDeviceIdListLocked() throws CameraAccessException {
if (mDeviceIdList == null) {
int numCameras = 0;
// 1. 获取 CameraService
ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
ArrayList<String> deviceIdList = new ArrayList<>();
// 如果 CameraService 为 null,返回空列表
if (cameraService == null) {
return deviceIdList;
}
try {
// 2. 获取 Camera 个数
numCameras = cameraService.getNumberOfCameras(CAMERA_TYPE_ALL);
} catch(CameraRuntimeException e) {
throw e.asChecked();
} catch (RemoteException e) {
// camera service just died - if no camera service, then no devices
return deviceIdList;
}
CameraMetadataNative info = new CameraMetadataNative();
for (int i = 0; i < numCameras; ++i) {
// 不可移动摄像头使用从 0 开始的整数作为标识符
boolean isDeviceSupported = false;
try {
// 3. 获取描述 Camera 元信息
cameraService.getCameraCharacteristics(i, info);
if (!info.isEmpty()) {
isDeviceSupported = true;
} else {
throw new AssertionError("Expected to get non-empty characteristics");
}
} catch(IllegalArgumentException e) {
// 从服务获得 BAD_VALUE,表示不支持此设备。
} catch(CameraRuntimeException e) {
// DISCONNECTED 意味着 HAL 在获取设备信息时报告了一个底层错误;
// 跳过设备。其他错误,继续传播异常。
if (e.getReason() != CameraAccessException.CAMERA_DISCONNECTED) {
throw e.asChecked();
}
} catch(RemoteException e) {
// 相机服务死亡,没有设备列出
deviceIdList.clear();
return deviceIdList;
}
if (isDeviceSupported) {
// 4. 将 Camera 从 0 开始的整数标识符添加到设备 ID 列表
deviceIdList.add(String.valueOf(i));
} else {
Log.w(TAG, "Error querying camera device " + i + " for listing.");
}
}
mDeviceIdList = deviceIdList;
}
return mDeviceIdList;
}
......
@NonNull
public String[] getCameraIdList() throws CameraAccessException {
synchronized (mLock) {
// 创建 ID 列表可处理设备枚举中的各种已知故障,
// 因此只有它抛出的异常是意外的,并且应该向上传播。
return getOrCreateDeviceIdListLocked().toArray(new String[0]);
}
}
private void connectCameraServiceLocked() {
// 只有在必要时才重新连接
if (mCameraService != null) return;
Log.i(TAG, "Connecting to camera service");
// 从 ServiceManager 中查询并获取 CameraService Binder 对象
IBinder cameraServiceBinder = ServiceManager.getService(CAMERA_SERVICE_BINDER_NAME);
if (cameraServiceBinder == null) {
// CameraService 现在已经关闭,将 mCameraService 设置为 null
return;
}
try {
// 为 Binder 对象设置死亡代理
cameraServiceBinder.linkToDeath(this, /*flags*/ 0);
} catch (RemoteException e) {
// CameraService 现在已经关闭,将 mCameraService 设置为 null
return;
}
// 将 Binder 对象转化为 CameraService
ICameraService cameraServiceRaw = ICameraService.Stub.asInterface(cameraServiceBinder);
/**
* 将 CameraService 包装在装饰器中,该装饰器会自动将返回码转换为异常。
*/
ICameraService cameraService =
CameraServiceBinderDecorator.newInstance(cameraServiceRaw);
try {
// 设置供应商标签
CameraServiceBinderDecorator.throwOnError(
CameraMetadataNative.nativeSetupGlobalVendorTagDescriptor());
} catch (CameraRuntimeException e) {
handleRecoverableSetupErrors(e, "Failed to set up vendor tags");
}
try {
// 添加监听器
cameraService.addListener(this);
mCameraService = cameraService;
} catch(CameraRuntimeException e) {
// 意外故障
throw new IllegalStateException("Failed to register a camera service listener",
e.asChecked());
} catch (RemoteException e) {
// CameraService 现在已经关闭,将 mCameraService 设置为 null
}
}
......
}
......
private static final class CameraManagerGlobal extends ICameraServiceListener.Stub
implements IBinder.DeathRecipient {
......
public ICameraService getCameraService() {
synchronized(mLock) {
connectCameraServiceLocked();
if (mCameraService == null) {
Log.e(TAG, "Camera service is unavailable");
}
return mCameraService;
}
}
......
}
......
}
getNumberOfCameras调用流程
frameworks/av/camera/ICameraService.cpp
class BpCameraService: public BpInterface<ICameraService>
{
public:
BpCameraService(const sp<IBinder>& impl)
: BpInterface<ICameraService>(impl)
{
}
// get number of cameras available that support standard camera operations
virtual int32_t getNumberOfCameras()
{
// CAMERA_TYPE_BACKWARD_COMPATIBLE 代表向后兼容的类型
return getNumberOfCameras(CAMERA_TYPE_BACKWARD_COMPATIBLE);
}
// get number of cameras available of a given type
virtual int32_t getNumberOfCameras(int type)
{
Parcel data, reply;
data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
data.writeInt32(type);
remote()->transact(BnCameraService::GET_NUMBER_OF_CAMERAS, data, &reply);
if (readExceptionCode(reply)) return 0;
return reply.readInt32();
}
......
}
status_t BnCameraService::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
switch(code) {
case GET_NUMBER_OF_CAMERAS: {
CHECK_INTERFACE(ICameraService, data, reply);
reply->writeNoException();
reply->writeInt32(getNumberOfCameras(data.readInt32()));
return NO_ERROR;
} break;
......
}
}
int32_t CameraService::getNumberOfCameras(int type) {
ATRACE_CALL();
switch (type) {
case CAMERA_TYPE_BACKWARD_COMPATIBLE:
return mNumberOfNormalCameras;
case CAMERA_TYPE_ALL:
return mNumberOfCameras;
default:
ALOGW("%s: Unknown camera type %d, returning 0",
__FUNCTION__, type);
return 0;
}
}
Camera2 openCamera 流程分析
1. 创建 CameraDeviceImpl
public class CameraDeviceImpl extends CameraDevice {
......
public CameraDeviceImpl(String cameraId, StateCallback callback, Handler handler,
CameraCharacteristics characteristics) {
if (cameraId == null || callback == null || handler == null || characteristics == null) {
throw new IllegalArgumentException("Null argument given");
}
mCameraId = cameraId;
mDeviceCallback = callback;
mDeviceHandler = handler;
mCharacteristics = characteristics;
final int MAX_TAG_LEN = 23;
String tag = String.format("CameraDevice-JV-%s", mCameraId);
if (tag.length() > MAX_TAG_LEN) {
tag = tag.substring(0, MAX_TAG_LEN);
}
TAG = tag;
Integer partialCount =
mCharacteristics.get(CameraCharacteristics.REQUEST_PARTIAL_RESULT_COUNT);
if (partialCount == null) {
// 1 means partial result is not supported.
mTotalPartialCount = 1;
} else {
mTotalPartialCount = partialCount;
}
}
......
}
2.CameraService::connectDevice
connectHelper
1. makeClient
2. client->initialize
frameworks/av/services/camera/libcameraservice/CameraService.cpp status_t CameraService::connectDevice( const sp<ICameraDeviceCallbacks>& cameraCb, int cameraId, const String16& clientPackageName, int clientUid, /*out*/ sp<ICameraDeviceUser>& device) { ATRACE_CALL(); status_t ret = NO_ERROR; String8 id = String8::format("%d", cameraId); sp<CameraDeviceClient> client = nullptr; ret = connectHelper<ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id, CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, clientUid, API_2, false, false, /*out*/client); if(ret != NO_ERROR) { logRejected(id, getCallingPid(), String8(clientPackageName), String8::format("%s (%d)", strerror(-ret), ret)); return ret; } device = client; return NO_ERROR; } template<class CALLBACK, class CLIENT> status_t CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId, int halVersion, const String16& clientPackageName, int clientUid, apiLevel effectiveApiLevel, bool legacyMode, bool shimUpdateOnly, /*out*/sp<CLIENT>& device) { status_t ret = NO_ERROR; String8 clientName8(clientPackageName); int clientPid = getCallingPid(); ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) for HAL version %s and " "Camera API version %d", clientPid, clientName8.string(), cameraId.string(), (halVersion == -1) ? "default" : std::to_string(halVersion).c_str(), static_cast<int>(effectiveApiLevel)); sp<CLIENT> client = nullptr; { // 获取 AutoConditionLock 并阻止其他客户端连接 std::unique_ptr<AutoConditionLock> lock = AutoConditionLock::waitAndAcquire(mServiceLockWrapper, DEFAULT_CONNECT_TIMEOUT_NS); if (lock == nullptr) { ALOGE("CameraService::connect X (PID %d) rejected (too many other clients connecting)." , clientPid); return -EBUSY; } // 强制执行客户端权限并进行基本的健全性检查 if((ret = validateConnectLocked(cameraId, /*inout*/clientUid)) != NO_ERROR) { return ret; } // 在获取锁后检查 shim 参数,如果它们已经被更新并且我们正在进行 shim 更新,立即返回 if (shimUpdateOnly) { auto cameraState = getCameraState(cameraId); if (cameraState != nullptr) { if (!cameraState->getShimParams().isEmpty()) return NO_ERROR; } } sp<BasicClient> clientTmp = nullptr; std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>> partial; if ((ret = handleEvictionsLocked(cameraId, clientPid, effectiveApiLevel, IInterface::asBinder(cameraCb), clientName8, /*out*/&clientTmp, /*out*/&partial)) != NO_ERROR) { return ret; } if (clientTmp.get() != nullptr) { // 处理 API1 MediaRecorder 的特殊情况,其中返回了现有客户端 device = static_cast<CLIENT*>(clientTmp.get()); return NO_ERROR; } // 如果有必要,让手电筒有机会关闭设备。 mFlashlight->prepareDeviceOpen(cameraId); // TODO: Update getDeviceVersion + HAL interface to use strings for Camera IDs int id = cameraIdToInt(cameraId); if (id == -1) { ALOGE("%s: Invalid camera ID %s, cannot get device version from HAL.", __FUNCTION__, cameraId.string()); return BAD_VALUE; } int facing = -1; int deviceVersion = getDeviceVersion(id, /*out*/&facing); sp<BasicClient> tmp = nullptr; // 获取 BasicClient if((ret = makeClient(this, cameraCb, clientPackageName, cameraId, facing, clientPid, clientUid, getpid(), legacyMode, halVersion, deviceVersion, effectiveApiLevel, /*out*/&tmp)) != NO_ERROR) { return ret; } client = static_cast<CLIENT*>(tmp.get()); LOG_ALWAYS_FATAL_IF(client.get() == nullptr, "%s: CameraService in invalid state", __FUNCTION__); // 从 HAL 模块初始化客户端 if ((ret = client->initialize(mModule)) != OK) { ALOGE("%s: Could not initialize client from HAL module.", __FUNCTION__); return ret; } // 更新旧版客户端的 shim 参数 if (effectiveApiLevel == API_1) { // 假设我们一直收到 API1 的 Client 子类 sp<Client> shimClient = reinterpret_cast<Client*>(client.get()); String8 rawParams = shimClient->getParameters(); CameraParameters params(rawParams); auto cameraState = getCameraState(cameraId); if (cameraState != nullptr) { cameraState->setShimParams(params); } else { ALOGE("%s: Cannot update shim parameters for camera %s, no such device exists.", __FUNCTION__, cameraId.string()); } } if (shimUpdateOnly) { // 如果仅更新旧的 shim 参数,请立即断开客户端连接 mServiceLock.unlock(); client->disconnect(); mServiceLock.lock(); } else { // 否则,将客户端添加到活动客户端列表 finishConnectLocked(client, partial); } } // 锁被销毁,允许进一步连接调用 // Important: release the mutex here so the client can call back into the service from its // destructor (can be at the end of the call) device = client; return NO_ERROR; }
整体代码
frameworks/base/core/java/android/hardware/camera2/CameraManager.java
public final class CameraManager {
......
private CameraDevice openCameraDeviceUserAsync(String cameraId,
CameraDevice.StateCallback callback, Handler handler)
throws CameraAccessException {
CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
CameraDevice device = null;
try {
synchronized (mLock) {
ICameraDeviceUser cameraUser = null;
android.hardware.camera2.impl.CameraDeviceImpl deviceImpl =
new android.hardware.camera2.impl.CameraDeviceImpl(
cameraId,
callback,
handler,
characteristics);
BinderHolder holder = new BinderHolder();
ICameraDeviceCallbacks callbacks = deviceImpl.getCallbacks();
int id = Integer.parseInt(cameraId);
try {
if (supportsCamera2ApiLocked(cameraId)) {
// 使用 cameraservice 的 cameradeviceclient 实现 HAL3.2+ 设备
ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
if (cameraService == null) {
throw new CameraRuntimeException(
CameraAccessException.CAMERA_DISCONNECTED,
"Camera service is currently unavailable");
}
//CameraService::connectDevice(…) 主要调用了 connectHelper(…) 方法。
cameraService.connectDevice(callbacks, id,
mContext.getOpPackageName(), USE_CALLING_UID, holder);
cameraUser = ICameraDeviceUser.Stub.asInterface(holder.getBinder());
} else {
// 对 HAL1 设备使用旧版相机实现
Log.i(TAG, "Using legacy camera HAL.");
cameraUser = CameraDeviceUserShim.connectBinderShim(callbacks, id);
}
} catch (CameraRuntimeException e) {
......
} catch (RemoteException e) {
......
}
// TODO: factor out callback to be non-nested, then move setter to constructor
// 目前,调用 setRemoteDevice 将触发初始的 onOpened/onUnconfigured 回调。
deviceImpl.setRemoteDevice(cameraUser);
device = deviceImpl;
}
} catch (NumberFormatException e) {
......
} catch (CameraRuntimeException e) {
......
}
return device;
}
......
......
@RequiresPermission(android.Manifest.permission.CAMERA)
public void openCamera(@NonNull String cameraId,
@NonNull final CameraDevice.StateCallback callback, @Nullable Handler handler)
throws CameraAccessException {
if (cameraId == null) {
throw new IllegalArgumentException("cameraId was null");
} else if (callback == null) {
throw new IllegalArgumentException("callback was null");
} else if (handler == null) {
if (Looper.myLooper() != null) {
handler = new Handler();
} else {
throw new IllegalArgumentException(
"Handler argument is null, but no looper exists in the calling thread");
}
}
openCameraDeviceUserAsync(cameraId, callback, handler);
}
......
}
CameraService::makeClient
CameraService::makeClient status_t CameraService::makeClient(const sp<CameraService>& cameraService, const sp<IInterface>& cameraCb, const String16& packageName, const String8& cameraId, int facing, int clientPid, uid_t clientUid, int servicePid, bool legacyMode, int halVersion, int deviceVersion, apiLevel effectiveApiLevel, /*out*/sp<BasicClient>* client) { // TODO: Update CameraClients + HAL interface to use strings for Camera IDs int id = cameraIdToInt(cameraId); if (id == -1) { ALOGE("%s: Invalid camera ID %s, cannot convert to integer.", __FUNCTION__, cameraId.string()); return BAD_VALUE; } if (halVersion < 0 || halVersion == deviceVersion) { // 默认路径:调用者未指定 HAL 版本,根据 HAL 报告的设备版本创建 CameraClient。 switch(deviceVersion) { case CAMERA_DEVICE_API_VERSION_1_0: if (effectiveApiLevel == API_1) { // Camera1 API route sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get()); *client = new CameraClient(cameraService, tmp, packageName, id, facing, clientPid, clientUid, getpid(), legacyMode); } else { // Camera2 API route ALOGW("Camera using old HAL version: %d", deviceVersion); return -EOPNOTSUPP; } break; case CAMERA_DEVICE_API_VERSION_2_0: case CAMERA_DEVICE_API_VERSION_2_1: case CAMERA_DEVICE_API_VERSION_3_0: case CAMERA_DEVICE_API_VERSION_3_1: case CAMERA_DEVICE_API_VERSION_3_2: case CAMERA_DEVICE_API_VERSION_3_3: if (effectiveApiLevel == API_1) { // Camera1 API 路由 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get()); *client = new Camera2Client(cameraService, tmp, packageName, id, facing, clientPid, clientUid, servicePid, legacyMode); } else { // Camera2 API 路由 sp<ICameraDeviceCallbacks> tmp = static_cast<ICameraDeviceCallbacks*>(cameraCb.get()); *client = new CameraDeviceClient(cameraService, tmp, packageName, id, facing, clientPid, clientUid, servicePid); } break; default: // Should not be reachable ALOGE("Unknown camera device HAL version: %d", deviceVersion); return INVALID_OPERATION; } } else { // 调用者要求特定的 HAL 版本。根据请求的 HAL 版本创建 CameraClient。 if (deviceVersion > CAMERA_DEVICE_API_VERSION_1_0 && halVersion == CAMERA_DEVICE_API_VERSION_1_0) { // 只支持高 HAL 版本设备打开为 HAL1.0 设备。 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get()); *client = new CameraClient(cameraService, tmp, packageName, id, facing, clientPid, clientUid, servicePid, legacyMode); } else { // 目前尚不支持其他组合(例如,以HAL2.x 打开的 HAL3.x)。 ALOGE("Invalid camera HAL version %x: HAL %x device can only be" " opened as HAL %x device", halVersion, deviceVersion, CAMERA_DEVICE_API_VERSION_1_0); return INVALID_OPERATION; } } return NO_ERROR; }
1.CameraDeviceClient 构造函数中创建了 Camera2ClientBase 对象。
frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService, const sp<ICameraDeviceCallbacks>& remoteCallback, const String16& clientPackageName, int cameraId, int cameraFacing, int clientPid, uid_t clientUid, int servicePid) : Camera2ClientBase(cameraService, remoteCallback, clientPackageName, cameraId, cameraFacing, clientPid, clientUid, servicePid), mInputStream(), mRequestIdCounter(0) { ATRACE_CALL(); ALOGI("CameraDeviceClient %d: Opened", cameraId); }
1.1CameraDeviceClientBase 对象
Camera2ClientBase 构造器是一个模板函数,TClientBase 会被 CameraDeviceClientBase 替换,会先构造一个 CameraDeviceClientBase 对象。
template <typename TClientBase>
Camera2ClientBase<TClientBase>::Camera2ClientBase(
const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
const String16& clientPackageName,
int cameraId,
int cameraFacing,
int clientPid,
uid_t clientUid,
int servicePid):
//CameraDeviceClientBase 构造器中又构造了一个 BasicClient 对象。
TClientBase(cameraService, remoteCallback, clientPackageName,
cameraId, cameraFacing, clientPid, clientUid, servicePid),
mSharedCameraCallbacks(remoteCallback),
mDeviceVersion(cameraService->getDeviceVersion(cameraId)),
mDeviceActive(false)
{
ALOGI("Camera %d: Opened. Client: %s (PID %d, UID %d)", cameraId,
String8(clientPackageName).string(), clientPid, clientUid);
mInitialClientPid = clientPid;
//创建 CameraDevice
mDevice = CameraDeviceFactory::createDevice(cameraId);
LOG_ALWAYS_FATAL_IF(mDevice == 0, "Device should never be NULL here.");
}
CameraDeviceClientBase::CameraDeviceClientBase(
const sp<CameraService>& cameraService,
const sp<ICameraDeviceCallbacks>& remoteCallback,
const String16& clientPackageName,
int cameraId,
int cameraFacing,
int clientPid,
uid_t clientUid,
int servicePid) :
BasicClient(cameraService,
IInterface::asBinder(remoteCallback),
clientPackageName,
cameraId,
cameraFacing,
clientPid,
clientUid,
servicePid),
mRemoteCallback(remoteCallback) {
}
CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
const sp<IBinder>& remoteCallback,
const String16& clientPackageName,
int cameraId, int cameraFacing,
int clientPid, uid_t clientUid,
int servicePid):
mClientPackageName(clientPackageName), mDisconnected(false)
{
mCameraService = cameraService;
mRemoteBinder = remoteCallback;
mCameraId = cameraId;
mCameraFacing = cameraFacing;
mClientPid = clientPid;
mClientUid = clientUid;
mServicePid = servicePid;
mOpsActive = false;
mDestructionStarted = false;
}
1.2 创建 Camera3Device 对象
frameworks/av/services/camera/libcameraservice/CameraDeviceFactory.cpp sp<CameraDeviceBase> CameraDeviceFactory::createDevice(int cameraId) { sp<CameraService> svc = sService.promote(); if (svc == 0) { ALOGE("%s: No service registered", __FUNCTION__); return NULL; } int deviceVersion = svc->getDeviceVersion(cameraId, /*facing*/NULL); sp<CameraDeviceBase> device; switch (deviceVersion) { case CAMERA_DEVICE_API_VERSION_2_0: case CAMERA_DEVICE_API_VERSION_2_1: device = new Camera2Device(cameraId); break; case CAMERA_DEVICE_API_VERSION_3_0: case CAMERA_DEVICE_API_VERSION_3_1: case CAMERA_DEVICE_API_VERSION_3_2: case CAMERA_DEVICE_API_VERSION_3_3: device = new Camera3Device(cameraId); break; default: ALOGE("%s: Camera %d: Unknown HAL device version %d", __FUNCTION__, cameraId, deviceVersion); device = NULL; break; } ALOGV_IF(device != 0, "Created a new camera device for version %d", deviceVersion); return device; }
frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp Camera3Device::Camera3Device(int id): mId(id), mIsConstrainedHighSpeedConfiguration(false), mHal3Device(NULL), mStatus(STATUS_UNINITIALIZED), mStatusWaiters(0), mUsePartialResult(false), mNumPartialResults(1), mNextResultFrameNumber(0), mNextReprocessResultFrameNumber(0), mNextShutterFrameNumber(0), mNextReprocessShutterFrameNumber(0), mListener(NULL) { ATRACE_CALL(); //HAL 回调函数 camera3_callback_ops::notify = &sNotify; camera3_callback_ops::process_capture_result = &sProcessCaptureResult; ALOGV("%s: Created device for camera %d", __FUNCTION__, id); }
2.client->initialize 进入HAL open
frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp status_t CameraDeviceClient::initialize(CameraModule *module) { ATRACE_CALL(); status_t res; res = Camera2ClientBase::initialize(module); if (res != OK) { return res; } String8 threadName; mFrameProcessor = new FrameProcessorBase(mDevice); threadName = String8::format("CDU-%d-FrameProc", mCameraId); mFrameProcessor->run(threadName.string()); mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID, FRAME_PROCESSOR_LISTENER_MAX_ID, /*listener*/this, /*sendPartials*/true); return OK; } template <typename TClientBase> status_t Camera2ClientBase<TClientBase>::initialize(CameraModule *module) { ATRACE_CALL(); ALOGV("%s: Initializing client for camera %d", __FUNCTION__, TClientBase::mCameraId); status_t res; // 验证操作权限 res = TClientBase::startCameraOps(); if (res != OK) { return res; } if (mDevice == NULL) { ALOGE("%s: Camera %d: No device connected", __FUNCTION__, TClientBase::mCameraId); return NO_INIT; } res = mDevice->initialize(module); if (res != OK) { ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", __FUNCTION__, TClientBase::mCameraId, strerror(-res), res); return res; } res = mDevice->setNotifyCallback(this); return OK; } status_t Camera3Device::initialize(CameraModule *module) { ATRACE_CALL(); Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); if (mStatus != STATUS_UNINITIALIZED) { CLOGE("Already initialized!"); return INVALID_OPERATION; } /** 打开 HAL device */ status_t res; String8 deviceName = String8::format("%d", mId); camera3_device_t *device; ATRACE_BEGIN("camera3->open"); res = module->open(deviceName.string(), reinterpret_cast<hw_device_t**>(&device)); ATRACE_END(); if (res != OK) { SET_ERR_L("Could not open camera: %s (%d)", strerror(-res), res); return res; } /** 交叉检查设备版本 */ if (device->common.version < CAMERA_DEVICE_API_VERSION_3_0) { SET_ERR_L("Could not open camera: " "Camera device should be at least %x, reports %x instead", CAMERA_DEVICE_API_VERSION_3_0, device->common.version); device->common.close(&device->common); return BAD_VALUE; } camera_info info; res = CameraService::filterGetInfoErrorCode(module->getCameraInfo( mId, &info)); if (res != OK) return res; if (info.device_version != device->common.version) { SET_ERR_L("HAL reporting mismatched camera_info version (%x)" " and device version (%x).", info.device_version, device->common.version); device->common.close(&device->common); return BAD_VALUE; } /** 使用回调函数初始化设备 */ ATRACE_BEGIN("camera3->initialize"); res = device->ops->initialize(device, this); ATRACE_END(); if (res != OK) { SET_ERR_L("Unable to initialize HAL device: %s (%d)", strerror(-res), res); device->common.close(&device->common); return BAD_VALUE; } /** 启动状态跟踪器线程 */ mStatusTracker = new StatusTracker(this); res = mStatusTracker->run(String8::format("C3Dev-%d-Status", mId).string()); if (res != OK) { SET_ERR_L("Unable to start status tracking thread: %s (%d)", strerror(-res), res); device->common.close(&device->common); mStatusTracker.clear(); return res; } bool aeLockAvailable = false; camera_metadata_ro_entry aeLockAvailableEntry; res = find_camera_metadata_ro_entry(info.static_camera_characteristics, ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailableEntry); if (res == OK && aeLockAvailableEntry.count > 0) { aeLockAvailable = (aeLockAvailableEntry.data.u8[0] == ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE); } /** 启动请求队列线程 */ mRequestThread = new RequestThread(this, mStatusTracker, device, aeLockAvailable); res = mRequestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string()); if (res != OK) { SET_ERR_L("Unable to start request queue thread: %s (%d)", strerror(-res), res); device->common.close(&device->common); mRequestThread.clear(); return res; } mPreparerThread = new PreparerThread(); /** Everything is good to go */ mDeviceVersion = device->common.version; mDeviceInfo = info.static_camera_characteristics; mHal3Device = device; internalUpdateStatusLocked(STATUS_UNCONFIGURED); mNextStreamId = 0; mDummyStreamId = NO_STREAM; mNeedConfig = true; mPauseStateNotify = false; // HAL是否会发送早期的部分结果元数据? if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) { camera_metadata_entry partialResultsCount = mDeviceInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT); if (partialResultsCount.count > 0) { mNumPartialResults = partialResultsCount.data.i32[0]; mUsePartialResult = (mNumPartialResults > 1); } } else { camera_metadata_entry partialResultsQuirk = mDeviceInfo.find(ANDROID_QUIRKS_USE_PARTIAL_RESULT); if (partialResultsQuirk.count > 0 && partialResultsQuirk.data.u8[0] == 1) { mUsePartialResult = true; } } camera_metadata_entry configs = mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS); for (uint32_t i = 0; i < configs.count; i += 4) { if (configs.data.i32[i] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && configs.data.i32[i + 3] == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT) { mSupportedOpaqueInputSizes.add(Size(configs.data.i32[i + 1], configs.data.i32[i + 2])); } } return OK; }
Camera2 预览流程
frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
public class CameraDeviceImpl extends CameraDevice {
......
public void setRemoteDevice(ICameraDeviceUser remoteDevice) {
synchronized(mInterfaceLock) {
// TODO: Move from decorator to direct binder-mediated exceptions
// If setRemoteFailure already called, do nothing
if (mInError) return;
mRemoteDevice = CameraBinderDecorator.newInstance(remoteDevice);
mDeviceHandler.post(mCallOnOpened);
mDeviceHandler.post(mCallOnUnconfigured);
}
}
......
......
@Override
public CaptureRequest.Builder createCaptureRequest(int templateType)
throws CameraAccessException {
synchronized(mInterfaceLock) {
checkIfCameraClosedOrInError();
//CameraMetadataNative 类代表 Binder 跨进程通信到 camera service 的相机元数据封送的实现。
CameraMetadataNative templatedRequest = new CameraMetadataNative();
try {
//mRemoteDevice 是哪里赋值的?
/*
如果对 openCamera(…) 流程还有印象,就知道其赋值是在 CameraManager 类 openCameraDeviceUserAsync(…) 方法中。
分析可知 mRemoteDevice 对象实际指向一个
ICameraDeviceUser.Stub.Proxy 类型的对象。
ICameraDeviceUser.Stub 和 ICameraDeviceUser.Stub.Proxy
是编译 ICameraDeviceUser.aidl 生成的。
最终会调用 BpCameraDeviceUser 类 createDefaultRequest(…) 方法。
BpCameraDeviceUser 类 createDefaultRequest(…)
方法会调用到 BnCameraDeviceUser 同名方法。
*/
mRemoteDevice.createDefaultRequest(templateType, /*out*/templatedRequest);
} catch (CameraRuntimeException e) {
throw e.asChecked();
} catch (RemoteException e) {
// impossible
return null;
}
CaptureRequest.Builder builder = new CaptureRequest.Builder(
templatedRequest, /*reprocess*/false, CameraCaptureSession.SESSION_ID_NONE);
return builder;
}
}
......
}
1.createDefaultRequest
1.CameraDeviceClient::createDefaultRequest
frameworks/av/camera/camera2/ICameraDeviceUser.cpp status_t BnCameraDeviceUser::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { switch(code) { ...... case CREATE_DEFAULT_REQUEST: { CHECK_INTERFACE(ICameraDeviceUser, data, reply); int templateId = data.readInt32(); CameraMetadata request; status_t ret; ret = createDefaultRequest(templateId, &request); reply->writeNoException(); reply->writeInt32(ret); // out-variables are after exception and return value reply->writeInt32(1); // to mark presence of metadata object request.writeToParcel(const_cast<Parcel*>(reply)); return NO_ERROR; } break; ...... } } frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp // Create a request object from a template. status_t CameraDeviceClient::createDefaultRequest(int templateId, /*out*/ CameraMetadata* request) { ATRACE_CALL(); ALOGV("%s (templateId = 0x%x)", __FUNCTION__, templateId); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; Mutex::Autolock icl(mBinderSerializationLock); if (!mDevice.get()) return DEAD_OBJECT; CameraMetadata metadata; //mDevice 是在 Camera2ClientBase 构造器中初始化的。 //结合 openCamera 流程可知 mDevice 指向 Camera3Device 对象。 if ( (res = mDevice->createDefaultRequest(templateId, &metadata) ) == OK && request != NULL) { request->swap(metadata); } return res; }
Camera3Device::createDefaultRequest(…) 方法实际上调用了 camera3_device_t 结构体内 ops 指向的 camera3_device_ops_t 结构体内的 construct_default_request_settings 函数指针(厂家需要实现)。
2.Camera3Device::createDefaultRequest
frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp status_t Camera3Device::createDefaultRequest(int templateId, CameraMetadata *request) { ...... const camera_metadata_t *rawRequest; // construct_default_request_settings 函数指针(厂家需要实现)。 ATRACE_BEGIN("camera3->construct_default_request_settings"); rawRequest = mHal3Device->ops->construct_default_request_settings( mHal3Device, templateId); ATRACE_END(); if (rawRequest == NULL) { ALOGI("%s: template %d is not supported on this camera device", __FUNCTION__, templateId); return BAD_VALUE; } *request = rawRequest; mRequestTemplateCache[templateId] = rawRequest; return OK; }
2.CaptureRequest.Builder 设置 Surface
frameworks/base/core/java/android/hardware/camera2/CaptureRequest.java public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>> implements Parcelable { ...... private final HashSet<Surface> mSurfaceSet; ...... private CaptureRequest(CameraMetadataNative settings, boolean isReprocess, int reprocessableSessionId) { mSettings = CameraMetadataNative.move(settings); mSurfaceSet = new HashSet<Surface>(); mIsReprocess = isReprocess; if (isReprocess) { if (reprocessableSessionId == CameraCaptureSession.SESSION_ID_NONE) { throw new IllegalArgumentException("Create a reprocess capture request with an " + "invalid session ID: " + reprocessableSessionId); } mReprocessableSessionId = reprocessableSessionId; } else { mReprocessableSessionId = CameraCaptureSession.SESSION_ID_NONE; } } ...... public final static class Builder { private final CaptureRequest mRequest; public Builder(CameraMetadataNative template, boolean reprocess, int reprocessableSessionId) { mRequest = new CaptureRequest(template, reprocess, reprocessableSessionId); } public void addTarget(@NonNull Surface outputTarget) { mRequest.mSurfaceSet.add(outputTarget); } ...... } ...... }
3.CameraCaptureSession 用于相机预览
frameworks/base/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java public class CameraDeviceImpl extends CameraDevice { ...... @Override public void createCaptureSession(List<Surface> outputs, CameraCaptureSession.StateCallback callback, Handler handler) throws CameraAccessException { List<OutputConfiguration> outConfigurations = new ArrayList<>(outputs.size()); for (Surface surface : outputs) { outConfigurations.add(new OutputConfiguration(surface)); } createCaptureSessionInternal(null, outConfigurations, callback, handler, /*isConstrainedHighSpeed*/false); } ...... ...... private void createCaptureSessionInternal(InputConfiguration inputConfig, List<OutputConfiguration> outputConfigurations, CameraCaptureSession.StateCallback callback, Handler handler, boolean isConstrainedHighSpeed) throws CameraAccessException { synchronized(mInterfaceLock) { ...... // TODO: dont block for this boolean configureSuccess = true; CameraAccessException pendingException = null; Surface input = null; try { // 配置流然后阻塞直到空闲 IDLE configureSuccess = configureStreamsChecked(inputConfig, outputConfigurations, isConstrainedHighSpeed); if (configureSuccess == true && inputConfig != null) { input = new Surface(); try { mRemoteDevice.getInputSurface(/*out*/input); } catch (CameraRuntimeException e) { e.asChecked(); } } } catch (CameraAccessException e) { configureSuccess = false; pendingException = e; input = null; if (DEBUG) { Log.v(TAG, "createCaptureSession - failed with exception ", e); } } catch (RemoteException e) { // impossible return; } List<Surface> outSurfaces = new ArrayList<>(outputConfigurations.size()); for (OutputConfiguration config : outputConfigurations) { outSurfaces.add(config.getSurface()); } // 如果 configureOutputs 成功,则触发 onConfigured,否则触发 onConfigureFailed。 CameraCaptureSessionCore newSession = null; if (isConstrainedHighSpeed) { newSession = new CameraConstrainedHighSpeedCaptureSessionImpl(mNextSessionId++, outSurfaces, callback, handler, this, mDeviceHandler, configureSuccess, mCharacteristics); } else { // 进入此分支 newSession = new CameraCaptureSessionImpl(mNextSessionId++, input, outSurfaces, callback, handler, this, mDeviceHandler, configureSuccess); } // TODO: wait until current session closes, then create the new session mCurrentSession = newSession; if (pendingException != null) { throw pendingException; } mSessionStateCallback = mCurrentSession.getDeviceStateCallback(); } } ...... }
1.configureStreamsChecked
public class CameraDeviceImpl extends CameraDevice { public boolean configureStreamsChecked(InputConfiguration inputConfig, List<OutputConfiguration> outputs, boolean isConstrainedHighSpeed) throws CameraAccessException { // Treat a null input the same an empty list if (outputs == null) { outputs = new ArrayList<OutputConfiguration>(); } if (outputs.size() == 0 && inputConfig != null) { throw new IllegalArgumentException("cannot configure an input stream without " + "any output streams"); } checkInputConfiguration(inputConfig); boolean success = false; synchronized(mInterfaceLock) { checkIfCameraClosedOrInError(); // Streams to create HashSet<OutputConfiguration> addSet = new HashSet<OutputConfiguration>(outputs); // Streams to delete List<Integer> deleteList = new ArrayList<Integer>(); // 确定需要创建哪些流,删除哪些流 for (int i = 0; i < mConfiguredOutputs.size(); ++i) { int streamId = mConfiguredOutputs.keyAt(i); OutputConfiguration outConfig = mConfiguredOutputs.valueAt(i); if (!outputs.contains(outConfig)) { deleteList.add(streamId); } else { addSet.remove(outConfig); // 不要创建之前创建的流 } } mDeviceHandler.post(mCallOnBusy); stopRepeating(); try { waitUntilIdle(); mRemoteDevice.beginConfigure(); // 如果输入配置不同,请重新配置输入流。 InputConfiguration currentInputConfig = mConfiguredInput.getValue(); if (inputConfig != currentInputConfig && (inputConfig == null || !inputConfig.equals(currentInputConfig))) { if (currentInputConfig != null) { mRemoteDevice.deleteStream(mConfiguredInput.getKey()); mConfiguredInput = new SimpleEntry<Integer, InputConfiguration>( REQUEST_ID_NONE, null); } if (inputConfig != null) { int streamId = mRemoteDevice.createInputStream(inputConfig.getWidth(), inputConfig.getHeight(), inputConfig.getFormat()); mConfiguredInput = new SimpleEntry<Integer, InputConfiguration>( streamId, inputConfig); } } // 首先删除所有流(以释放HW资源) for (Integer streamId : deleteList) { mRemoteDevice.deleteStream(streamId); mConfiguredOutputs.delete(streamId); } // 添加所有新流 for (OutputConfiguration outConfig : outputs) { if (addSet.contains(outConfig)) { //Camera3Device 类 createStream int streamId = mRemoteDevice.createStream(outConfig); mConfiguredOutputs.put(streamId, outConfig); } } try { mRemoteDevice.endConfigure(isConstrainedHighSpeed); } catch (IllegalArgumentException e) { // OK. camera service can reject stream config if it's not supported by HAL // This is only the result of a programmer misusing the camera2 api. Log.w(TAG, "Stream configuration failed"); return false; } success = true; } catch (CameraRuntimeException e) { if (e.getReason() == CAMERA_IN_USE) { throw new IllegalStateException("The camera is currently busy." + " You must wait until the previous operation completes."); } throw e.asChecked(); } catch (RemoteException e) { // impossible return false; } finally { if (success && outputs.size() > 0) { mDeviceHandler.post(mCallOnIdle); } else { // Always return to the 'unconfigured' state if we didn't hit a fatal error mDeviceHandler.post(mCallOnUnconfigured); } } } return success; } }
mRemoteDevice.createInputStream
status_t CameraDeviceClient::createStream(const OutputConfiguration &outputConfiguration) { ATRACE_CALL(); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; Mutex::Autolock icl(mBinderSerializationLock); // 拿到 bufferProducer sp<IGraphicBufferProducer> bufferProducer = outputConfiguration.getGraphicBufferProducer(); if (bufferProducer == NULL) { ALOGE("%s: bufferProducer must not be null", __FUNCTION__); return BAD_VALUE; } if (!mDevice.get()) return DEAD_OBJECT; // 不要为同一目标 Surface 创建多个流 { ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer)); if (index != NAME_NOT_FOUND) { ALOGW("%s: Camera %d: Buffer producer already has a stream for it " "(ID %zd)", __FUNCTION__, mCameraId, index); return ALREADY_EXISTS; } } // HACK b/10949105 // 查询消费者使用情况位,以使用 controlledByApp 参数设置 GLConsumer 的异步操作模式 bool useAsync = false; int32_t consumerUsage; if ((res = bufferProducer->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &consumerUsage)) != OK) { ALOGE("%s: Camera %d: Failed to query consumer usage", __FUNCTION__, mCameraId); return res; } if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) { ALOGW("%s: Camera %d: Forcing asynchronous mode for stream", __FUNCTION__, mCameraId); useAsync = true; } int32_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_RENDERSCRIPT; int32_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK | GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_HW_COMPOSER; bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 && (consumerUsage & allowedFlags) != 0; sp<IBinder> binder = IInterface::asBinder(bufferProducer); sp<Surface> surface = new Surface(bufferProducer, useAsync); ANativeWindow *anw = surface.get(); int width, height, format; android_dataspace dataSpace; if ((res = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) { ALOGE("%s: Camera %d: Failed to query Surface width", __FUNCTION__, mCameraId); return res; } if ((res = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) { ALOGE("%s: Camera %d: Failed to query Surface height", __FUNCTION__, mCameraId); return res; } if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) { ALOGE("%s: Camera %d: Failed to query Surface format", __FUNCTION__, mCameraId); return res; } if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, reinterpret_cast<int*>(&dataSpace))) != OK) { ALOGE("%s: Camera %d: Failed to query Surface dataSpace", __FUNCTION__, mCameraId); return res; } // FIXME: remove this override since the default format should be // IMPLEMENTATION_DEFINED. b/9487482 if (format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) { ALOGW("%s: Camera %d: Overriding format %#x to IMPLEMENTATION_DEFINED", __FUNCTION__, mCameraId, format); format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; } // 将尺寸舍入到此格式可用的最接近尺寸 if (flexibleConsumer && !CameraDeviceClient::roundBufferDimensionNearest(width, height, format, dataSpace, mDevice->info(), /*out*/&width, /*out*/&height)) { ALOGE("%s: No stream configurations with the format %#x defined, failed to create stream.", __FUNCTION__, format); return BAD_VALUE; } int streamId = -1; res = mDevice->createStream(surface, width, height, format, dataSpace, static_cast<camera3_stream_rotation_t> (outputConfiguration.getRotation()), &streamId); if (res == OK) { mStreamMap.add(binder, streamId); ALOGV("%s: Camera %d: Successfully created a new stream ID %d", __FUNCTION__, mCameraId, streamId); /** * 设置流转换标志以自动旋转相机流以进行预览 */ int32_t transform = 0; res = getRotationTransformLocked(&transform); if (res != OK) { // Error logged by getRotationTransformLocked. return res; } res = mDevice->setStreamTransform(streamId, transform); if (res != OK) { ALOGE("%s: Failed to set stream transform (stream id %d)", __FUNCTION__, streamId); return res; } return streamId; } return res; }
Camera3Device 类 createStream
frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
status_t Camera3Device::createStream(sp<Surface> consumer, uint32_t width, uint32_t height, int format, android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id) { ATRACE_CALL(); Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d", mId, mNextStreamId, width, height, format, dataSpace, rotation); status_t res; bool wasActive = false; switch (mStatus) { case STATUS_ERROR: CLOGE("Device has encountered a serious error"); return INVALID_OPERATION; case STATUS_UNINITIALIZED: CLOGE("Device not initialized"); return INVALID_OPERATION; case STATUS_UNCONFIGURED: case STATUS_CONFIGURED: // OK break; case STATUS_ACTIVE: ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); res = internalPauseAndWaitLocked(); if (res != OK) { SET_ERR_L("Can't pause captures to reconfigure streams!"); return res; } wasActive = true; break; default: SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } assert(mStatus != STATUS_ACTIVE); sp<Camera3OutputStream> newStream; if (format == HAL_PIXEL_FORMAT_BLOB) { ssize_t blobBufferSize; if (dataSpace != HAL_DATASPACE_DEPTH) { blobBufferSize = getJpegBufferSize(width, height); if (blobBufferSize <= 0) { SET_ERR_L("Invalid jpeg buffer size %zd", blobBufferSize); return BAD_VALUE; } } else { blobBufferSize = getPointCloudBufferSize(); if (blobBufferSize <= 0) { SET_ERR_L("Invalid point cloud buffer size %zd", blobBufferSize); return BAD_VALUE; } } newStream = new Camera3OutputStream(mNextStreamId, consumer, width, height, blobBufferSize, format, dataSpace, rotation); } else { newStream = new Camera3OutputStream(mNextStreamId, consumer, width, height, format, dataSpace, rotation); } newStream->setStatusTracker(mStatusTracker); res = mOutputStreams.add(mNextStreamId, newStream); if (res < 0) { SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res); return res; } *id = mNextStreamId++; mNeedConfig = true; // Continue captures if active at start if (wasActive) { ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); res = configureStreamsLocked(); if (res != OK) { CLOGE("Can't reconfigure device for new stream %d: %s (%d)", mNextStreamId, strerror(-res), res); return res; } internalResumeLocked(); } ALOGV("Camera %d: Created new stream", mId); return OK; }
configureStreamsLocked()进入HAL
status_t Camera3Device::configureStreamsLocked() { ATRACE_CALL(); status_t res; if (mStatus != STATUS_UNCONFIGURED && mStatus != STATUS_CONFIGURED) { CLOGE("Not idle"); return INVALID_OPERATION; } if (!mNeedConfig) { ALOGV("%s: Skipping config, no stream changes", __FUNCTION__); return OK; } // Workaround for device HALv3.2 or older spec bug - zero streams requires // adding a dummy stream instead. // TODO: Bug: 17321404 for fixing the HAL spec and removing this workaround. if (mOutputStreams.size() == 0) { addDummyStreamLocked(); } else { tryRemoveDummyStreamLocked(); } // Start configuring the streams ALOGV("%s: Camera %d: Starting stream configuration", __FUNCTION__, mId); camera3_stream_configuration config; config.operation_mode = mIsConstrainedHighSpeedConfiguration ? CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE : CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE; config.num_streams = (mInputStream != NULL) + mOutputStreams.size(); Vector<camera3_stream_t*> streams; streams.setCapacity(config.num_streams); if (mInputStream != NULL) { camera3_stream_t *inputStream; inputStream = mInputStream->startConfiguration(); if (inputStream == NULL) { SET_ERR_L("Can't start input stream configuration"); return INVALID_OPERATION; } streams.add(inputStream); } for (size_t i = 0; i < mOutputStreams.size(); i++) { // Don't configure bidi streams twice, nor add them twice to the list if (mOutputStreams[i].get() == static_cast<Camera3StreamInterface*>(mInputStream.get())) { config.num_streams--; continue; } camera3_stream_t *outputStream; outputStream = mOutputStreams.editValueAt(i)->startConfiguration(); if (outputStream == NULL) { SET_ERR_L("Can't start output stream configuration"); return INVALID_OPERATION; } streams.add(outputStream); } config.streams = streams.editArray(); // Do the HAL configuration; will potentially touch stream // max_buffers, usage, priv fields. ATRACE_BEGIN("camera3->configure_streams"); res = mHal3Device->ops->configure_streams(mHal3Device, &config); ATRACE_END(); if (res == BAD_VALUE) { // HAL rejected this set of streams as unsupported, clean up config // attempt and return to unconfigured state if (mInputStream != NULL && mInputStream->isConfiguring()) { res = mInputStream->cancelConfiguration(); if (res != OK) { SET_ERR_L("Can't cancel configuring input stream %d: %s (%d)", mInputStream->getId(), strerror(-res), res); return res; } } for (size_t i = 0; i < mOutputStreams.size(); i++) { sp<Camera3OutputStreamInterface> outputStream = mOutputStreams.editValueAt(i); if (outputStream->isConfiguring()) { res = outputStream->cancelConfiguration(); if (res != OK) { SET_ERR_L( "Can't cancel configuring output stream %d: %s (%d)", outputStream->getId(), strerror(-res), res); return res; } } } // Return state to that at start of call, so that future configures // properly clean things up internalUpdateStatusLocked(STATUS_UNCONFIGURED); mNeedConfig = true; ALOGV("%s: Camera %d: Stream configuration failed", __FUNCTION__, mId); return BAD_VALUE; } else if (res != OK) { // Some other kind of error from configure_streams - this is not // expected SET_ERR_L("Unable to configure streams with HAL: %s (%d)", strerror(-res), res); return res; } // Finish all stream configuration immediately. // TODO: Try to relax this later back to lazy completion, which should be // faster if (mInputStream != NULL && mInputStream->isConfiguring()) { res = mInputStream->finishConfiguration(mHal3Device); if (res != OK) { SET_ERR_L("Can't finish configuring input stream %d: %s (%d)", mInputStream->getId(), strerror(-res), res); return res; } } for (size_t i = 0; i < mOutputStreams.size(); i++) { sp<Camera3OutputStreamInterface> outputStream = mOutputStreams.editValueAt(i); if (outputStream->isConfiguring()) { res = outputStream->finishConfiguration(mHal3Device); if (res != OK) { SET_ERR_L("Can't finish configuring output stream %d: %s (%d)", outputStream->getId(), strerror(-res), res); return res; } } } // Request thread needs to know to avoid using repeat-last-settings protocol // across configure_streams() calls mRequestThread->configurationComplete(); // Update device state mNeedConfig = false; internalUpdateStatusLocked((mDummyStreamId == NO_STREAM) ? STATUS_CONFIGURED : STATUS_UNCONFIGURED); ALOGV("%s: Camera %d: Stream configuration complete", __FUNCTION__, mId); // tear down the deleted streams after configure streams. mDeletedStreams.clear(); return OK; }
2.CameraCaptureSessionImpl
public class CameraCaptureSessionImpl extends CameraCaptureSession implements CameraCaptureSessionCore { ...... CameraCaptureSessionImpl(int id, Surface input, List<Surface> outputs, CameraCaptureSession.StateCallback callback, Handler stateHandler, android.hardware.camera2.impl.CameraDeviceImpl deviceImpl, Handler deviceStateHandler, boolean configureSuccess) { if (outputs == null || outputs.isEmpty()) { throw new IllegalArgumentException("outputs must be a non-null, non-empty list"); } else if (callback == null) { throw new IllegalArgumentException("callback must not be null"); } mId = id; mIdString = String.format("Session %d: ", mId); // TODO: extra verification of outputs mOutputs = outputs; mInput = input; mStateHandler = checkHandler(stateHandler); mStateCallback = createUserStateCallbackProxy(mStateHandler, callback); mDeviceHandler = checkNotNull(deviceStateHandler, "deviceStateHandler must not be null"); mDeviceImpl = checkNotNull(deviceImpl, "deviceImpl must not be null"); /* * 对所有内部即将到来的事件使用与设备的 StateCallback 相同的 handler * * 这样可以确保 CameraDevice.StateCallback 和CameraDeviceImpl.CaptureCallback 事件之间的总体顺序。 */ mSequenceDrainer = new TaskDrainer<>(mDeviceHandler, new SequenceDrainListener(), /*name*/"seq"); mIdleDrainer = new TaskSingleDrainer(mDeviceHandler, new IdleDrainListener(), /*name*/"idle"); mAbortDrainer = new TaskSingleDrainer(mDeviceHandler, new AbortDrainListener(), /*name*/"abort"); // CameraDevice 应该在构造我们之前调用 configureOutputs 并完成它 if (configureSuccess) { mStateCallback.onConfigured(this); if (DEBUG) Log.v(TAG, mIdString + "Created session successfully"); mConfigureSuccess = true; } else { mStateCallback.onConfigureFailed(this); mClosed = true; // 不要触发其他任何回调,也不要进行其他任何工作 Log.e(TAG, mIdString + "Failed to create capture session; configuration failed"); mConfigureSuccess = false; } } ...... }
4. CameraCaptureSession 设置重复请求预览数据
public class CameraCaptureSessionImpl extends CameraCaptureSession
implements CameraCaptureSessionCore {
......
@Override
public synchronized int setRepeatingRequest(CaptureRequest request, CaptureCallback callback,
Handler handler) throws CameraAccessException {
if (request == null) {
throw new IllegalArgumentException("request must not be null");
} else if (request.isReprocess()) {
throw new IllegalArgumentException("repeating reprocess requests are not supported");
}
checkNotClosed();
handler = checkHandler(handler, callback);
if (DEBUG) {
Log.v(TAG, mIdString + "setRepeatingRequest - request " + request + ", callback " +
callback + " handler" + " " + handler);
}
return addPendingSequence(mDeviceImpl.setRepeatingRequest(request,
createCaptureCallbackProxy(handler, callback), mDeviceHandler));
}
......
}
public class CameraDeviceImpl extends CameraDevice {
......
public int setRepeatingRequest(CaptureRequest request, CaptureCallback callback,
Handler handler) throws CameraAccessException {
List<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
requestList.add(request);
return submitCaptureRequest(requestList, callback, handler, /*streaming*/true);
}
......
......
private int submitCaptureRequest(List<CaptureRequest> requestList, CaptureCallback callback,
Handler handler, boolean repeating) throws CameraAccessException {
// 如果回调有效,则需要有效的 handler,或者当前线程需要 looper
handler = checkHandler(handler, callback);
// 确保所有请求至少具有 1 个 surface,所有 surface 都不为空
for (CaptureRequest request : requestList) {
if (request.getTargets().isEmpty()) {
throw new IllegalArgumentException(
"Each request must have at least one Surface target");
}
for (Surface surface : request.getTargets()) {
if (surface == null) {
throw new IllegalArgumentException("Null Surface targets are not allowed");
}
}
}
synchronized(mInterfaceLock) {
checkIfCameraClosedOrInError();
int requestId;
if (repeating) {
stopRepeating();
}
LongParcelable lastFrameNumberRef = new LongParcelable();
try {//最终会调用 CameraDeviceClient 类 submitRequestList(…) 方法。
requestId = mRemoteDevice.submitRequestList(requestList, repeating,
/*out*/lastFrameNumberRef);
if (DEBUG) {
Log.v(TAG, "last frame number " + lastFrameNumberRef.getNumber());
}
} catch (CameraRuntimeException e) {
throw e.asChecked();
} catch (RemoteException e) {
// impossible
return -1;
}
if (callback != null) {
mCaptureCallbackMap.put(requestId, new CaptureCallbackHolder(callback,
requestList, handler, repeating, mNextSessionId - 1));
} else {
if (DEBUG) {
Log.d(TAG, "Listen for request " + requestId + " is null");
}
}
long lastFrameNumber = lastFrameNumberRef.getNumber();
if (repeating) {
if (mRepeatingRequestId != REQUEST_ID_NONE) {
checkEarlyTriggerSequenceComplete(mRepeatingRequestId, lastFrameNumber);
}
mRepeatingRequestId = requestId;
} else {
mRequestLastFrameNumbersList.add(new RequestLastFrameNumbersHolder(requestList,
requestId, lastFrameNumber));
}
if (mIdle) {
mDeviceHandler.post(mCallOnActive);
}
mIdle = false;
return requestId;
}
}
......
}
CameraDeviceClient 类 submitRequestList
status_t CameraDeviceClient::submitRequestList(List<sp<CaptureRequest> > requests, bool streaming, int64_t* lastFrameNumber) { ATRACE_CALL(); ALOGV("%s-start of function. Request list size %zu", __FUNCTION__, requests.size()); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; Mutex::Autolock icl(mBinderSerializationLock); if (!mDevice.get()) return DEAD_OBJECT; if (requests.empty()) { ALOGE("%s: Camera %d: Sent null request. Rejecting request.", __FUNCTION__, mCameraId); return BAD_VALUE; } List<const CameraMetadata> metadataRequestList; int32_t requestId = mRequestIdCounter; uint32_t loopCounter = 0; for (List<sp<CaptureRequest> >::iterator it = requests.begin(); it != requests.end(); ++it) { sp<CaptureRequest> request = *it; if (request == 0) { ALOGE("%s: Camera %d: Sent null request.", __FUNCTION__, mCameraId); return BAD_VALUE; } else if (request->mIsReprocess) { if (!mInputStream.configured) { ALOGE("%s: Camera %d: no input stream is configured.", __FUNCTION__, mCameraId); return BAD_VALUE; } else if (streaming) { ALOGE("%s: Camera %d: streaming reprocess requests not supported.", __FUNCTION__, mCameraId); return BAD_VALUE; } } CameraMetadata metadata(request->mMetadata); if (metadata.isEmpty()) { ALOGE("%s: Camera %d: Sent empty metadata packet. Rejecting request.", __FUNCTION__, mCameraId); return BAD_VALUE; } else if (request->mSurfaceList.isEmpty()) { ALOGE("%s: Camera %d: Requests must have at least one surface target. " "Rejecting request.", __FUNCTION__, mCameraId); return BAD_VALUE; } if (!enforceRequestPermissions(metadata)) { // Callee logs return PERMISSION_DENIED; } /** * 写入我们根据捕获请求的 surface 目标列表计算出的输出流 ID */ Vector<int32_t> outputStreamIds; outputStreamIds.setCapacity(request->mSurfaceList.size()); for (size_t i = 0; i < request->mSurfaceList.size(); ++i) { sp<Surface> surface = request->mSurfaceList[i]; if (surface == 0) continue; sp<IGraphicBufferProducer> gbp = surface->getIGraphicBufferProducer(); int idx = mStreamMap.indexOfKey(IInterface::asBinder(gbp)); // 尝试使用未创建的 surface 提交请求 if (idx == NAME_NOT_FOUND) { ALOGE("%s: Camera %d: Tried to submit a request with a surface that" " we have not called createStream on", __FUNCTION__, mCameraId); return BAD_VALUE; } int streamId = mStreamMap.valueAt(idx); outputStreamIds.push_back(streamId); ALOGV("%s: Camera %d: Appending output stream %d to request", __FUNCTION__, mCameraId, streamId); } metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0], outputStreamIds.size()); if (request->mIsReprocess) { metadata.update(ANDROID_REQUEST_INPUT_STREAMS, &mInputStream.id, 1); } metadata.update(ANDROID_REQUEST_ID, &requestId, /*size*/1); loopCounter++; // loopCounter starts from 1 ALOGV("%s: Camera %d: Creating request with ID %d (%d of %zu)", __FUNCTION__, mCameraId, requestId, loopCounter, requests.size()); metadataRequestList.push_back(metadata); } mRequestIdCounter++; if (streaming) { res = mDevice->setStreamingRequestList(metadataRequestList, lastFrameNumber); if (res != OK) { ALOGE("%s: Camera %d: Got error %d after trying to set streaming " "request", __FUNCTION__, mCameraId, res); } else { mStreamingRequestList.push_back(requestId); } } else { res = mDevice->captureList(metadataRequestList, lastFrameNumber); if (res != OK) { ALOGE("%s: Camera %d: Got error %d after trying to set capture", __FUNCTION__, mCameraId, res); } ALOGV("%s: requestId = %d ", __FUNCTION__, requestId); } ALOGV("%s: Camera %d: End of function", __FUNCTION__, mCameraId); if (res == OK) { return requestId; } return res; }
CameraMetadata::update
status_t CameraMetadata::update(uint32_t tag, const int32_t *data, size_t data_count) { status_t res; if (mLocked) { ALOGE("%s: CameraMetadata is locked", __FUNCTION__); return INVALID_OPERATION; } if ( (res = checkType(tag, TYPE_INT32)) != OK) { return res; } return updateImpl(tag, (const void*)data, data_count); }
status_t CameraMetadata::updateImpl(uint32_t tag, const void *data, size_t data_count) { status_t res; if (mLocked) { ALOGE("%s: CameraMetadata is locked", __FUNCTION__); return INVALID_OPERATION; } int type = get_camera_metadata_tag_type(tag); if (type == -1) { ALOGE("%s: Tag %d not found", __FUNCTION__, tag); return BAD_VALUE; } // 安全检查-确保数据未指向该元数据,因为如果需要调整大小,该元数据将失效 size_t bufferSize = get_camera_metadata_size(mBuffer); uintptr_t bufAddr = reinterpret_cast<uintptr_t>(mBuffer); uintptr_t dataAddr = reinterpret_cast<uintptr_t>(data); if (dataAddr > bufAddr && dataAddr < (bufAddr + bufferSize)) { ALOGE("%s: Update attempted with data from the same metadata buffer!", __FUNCTION__); return INVALID_OPERATION; } size_t data_size = calculate_camera_metadata_entry_data_size(type, data_count); res = resizeIfNeeded(1, data_size); if (res == OK) { camera_metadata_entry_t entry; res = find_camera_metadata_entry(mBuffer, tag, &entry); if (res == NAME_NOT_FOUND) { res = add_camera_metadata_entry(mBuffer, tag, data, data_count); } else if (res == OK) { res = update_camera_metadata_entry(mBuffer, entry.index, data, data_count, NULL); } } if (res != OK) { ALOGE("%s: Unable to update metadata entry %s.%s (%x): %s (%d)", __FUNCTION__, get_camera_metadata_section_name(tag), get_camera_metadata_tag_name(tag), tag, strerror(-res), res); } IF_ALOGV() { ALOGE_IF(validate_camera_metadata_structure(mBuffer, /*size*/NULL) != OK, "%s: Failed to validate metadata structure after update %p", __FUNCTION__, mBuffer); } return res; }
submitRequestsHelper(…) 方法求线程设置重复请求
Camera3Device 类 setStreamingRequestList(…) 方法实际工作是由 submitRequestsHelper(…) 完成的。
status_t Camera3Device::submitRequestsHelper( const List<const CameraMetadata> &requests, bool repeating, /*out*/ int64_t *lastFrameNumber) { ATRACE_CALL(); Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); status_t res = checkStatusOkToCaptureLocked(); if (res != OK) { // error logged by previous call return res; } RequestList requestList; // convertMetadataListToRequestListLocked(…) 转换函数流程。 //遍历 metadataList,每个元素作为入参调用 setUpRequestLocked(…),返回 CaptureRequest 对象。 res = convertMetadataListToRequestListLocked(requests, /*out*/&requestList); if (res != OK) { // error logged by previous call return res; } if (repeating) { res = mRequestThread->setRepeatingRequests(requestList, lastFrameNumber); } else { res = mRequestThread->queueRequestList(requestList, lastFrameNumber); } if (res == OK) { waitUntilStateThenRelock(/*active*/true, kActiveTimeout); if (res != OK) { SET_ERR_L("Can't transition to active in %f seconds!", kActiveTimeout/1e9); } ALOGV("Camera %d: Capture request %" PRId32 " enqueued", mId, (*(requestList.begin()))->mResultExtras.requestId); } else { CLOGE("Cannot queue request. Impossible."); return BAD_VALUE; } return res; }
status_t Camera3Device::convertMetadataListToRequestListLocked( const List<const CameraMetadata> &metadataList, RequestList *requestList) { if (requestList == NULL) { CLOGE("requestList cannot be NULL."); return BAD_VALUE; } int32_t burstId = 0; for (List<const CameraMetadata>::const_iterator it = metadataList.begin(); it != metadataList.end(); ++it) { sp<CaptureRequest> newRequest = setUpRequestLocked(*it); if (newRequest == 0) { CLOGE("Can't create capture request"); return BAD_VALUE; } // 设置突发 ID 和请求 ID newRequest->mResultExtras.burstId = burstId++; if (it->exists(ANDROID_REQUEST_ID)) { if (it->find(ANDROID_REQUEST_ID).count == 0) { CLOGE("RequestID entry exists; but must not be empty in metadata"); return BAD_VALUE; } newRequest->mResultExtras.requestId = it->find(ANDROID_REQUEST_ID).data.i32[0]; } else { CLOGE("RequestID does not exist in metadata"); return BAD_VALUE; } requestList->push_back(newRequest); ALOGV("%s: requestId = %" PRId32, __FUNCTION__, newRequest->mResultExtras.requestId); } // 如果这是高速视频录制请求,设置批次大小。 if (mIsConstrainedHighSpeedConfiguration && requestList->size() > 0) { auto firstRequest = requestList->begin(); for (auto& outputStream : (*firstRequest)->mOutputStreams) { if (outputStream->isVideoStream()) { (*firstRequest)->mBatchSize = requestList->size(); break; } } } return OK; }
setUpRequestLocked
setUpRequestLocked(…) 函数中,首先调用 configureStreamsLocked() 配置流,然后调用 createCaptureRequest(…) 创建 CaptureRequest 对象。
sp<Camera3Device::CaptureRequest> Camera3Device::setUpRequestLocked( const CameraMetadata &request) { status_t res; if (mStatus == STATUS_UNCONFIGURED || mNeedConfig) { res = configureStreamsLocked(); // 流配置由于不支持的配置而失败。设备回到未配置状态。客户端可以尝试其他配置 if (res == BAD_VALUE && mStatus == STATUS_UNCONFIGURED) { CLOGE("No streams configured"); return NULL; } // 流配置由于其他原因而失败,致命。 if (res != OK) { SET_ERR_L("Can't set up streams: %s (%d)", strerror(-res), res); return NULL; } // 流配置成功配置为空流配置。 if (mStatus == STATUS_UNCONFIGURED) { CLOGE("No streams configured"); return NULL; } } sp<CaptureRequest> newRequest = createCaptureRequest(request); return newRequest; }
configureStreamsLocked()进入HAL
status_t Camera3Device::configureStreamsLocked() { ATRACE_CALL(); status_t res; if (mStatus != STATUS_UNCONFIGURED && mStatus != STATUS_CONFIGURED) { CLOGE("Not idle"); return INVALID_OPERATION; } if (!mNeedConfig) { ALOGV("%s: Skipping config, no stream changes", __FUNCTION__); return OK; } // 设备 HALv3.2 或更旧规范错误的解决方法-零流需要添加虚拟流。 if (mOutputStreams.size() == 0) { addDummyStreamLocked(); } else { tryRemoveDummyStreamLocked(); } // 开始配置流 ALOGV("%s: Camera %d: Starting stream configuration", __FUNCTION__, mId); camera3_stream_configuration config; config.operation_mode = mIsConstrainedHighSpeedConfiguration ? CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE : CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE; config.num_streams = (mInputStream != NULL) + mOutputStreams.size(); Vector<camera3_stream_t*> streams; streams.setCapacity(config.num_streams); if (mInputStream != NULL) { camera3_stream_t *inputStream; // 开始输入流配置 inputStream = mInputStream->startConfiguration(); if (inputStream == NULL) { SET_ERR_L("Can't start input stream configuration"); return INVALID_OPERATION; } streams.add(inputStream); } for (size_t i = 0; i < mOutputStreams.size(); i++) { // 不要配置两次 bidi 流,也不要将它们两次添加到列表中 if (mOutputStreams[i].get() == static_cast<Camera3StreamInterface*>(mInputStream.get())) { config.num_streams--; continue; } camera3_stream_t *outputStream; // 开始输出流配置 outputStream = mOutputStreams.editValueAt(i)->startConfiguration(); if (outputStream == NULL) { SET_ERR_L("Can't start output stream configuration"); return INVALID_OPERATION; } streams.add(outputStream); } config.streams = streams.editArray(); // 做 HAL 配置 ATRACE_BEGIN("camera3->configure_streams"); res = mHal3Device->ops->configure_streams(mHal3Device, &config); ATRACE_END(); if (res == BAD_VALUE) { // HAL 将这组流拒绝为不支持,清理配置尝试并返回到未配置状态 if (mInputStream != NULL && mInputStream->isConfiguring()) { res = mInputStream->cancelConfiguration(); if (res != OK) { SET_ERR_L("Can't cancel configuring input stream %d: %s (%d)", mInputStream->getId(), strerror(-res), res); return res; } } for (size_t i = 0; i < mOutputStreams.size(); i++) { sp<Camera3OutputStreamInterface> outputStream = mOutputStreams.editValueAt(i); if (outputStream->isConfiguring()) { res = outputStream->cancelConfiguration(); if (res != OK) { SET_ERR_L( "Can't cancel configuring output stream %d: %s (%d)", outputStream->getId(), strerror(-res), res); return res; } } } // 返回到调用开始的状态,以便将来正确配置清理内容 internalUpdateStatusLocked(STATUS_UNCONFIGURED); mNeedConfig = true; ALOGV("%s: Camera %d: Stream configuration failed", __FUNCTION__, mId); return BAD_VALUE; } else if (res != OK) { // 来自 configure_streams 的其他类型的错误-这不是期望的 SET_ERR_L("Unable to configure streams with HAL: %s (%d)", strerror(-res), res); return res; } // 立即完成所有流配置 if (mInputStream != NULL && mInputStream->isConfiguring()) { res = mInputStream->finishConfiguration(mHal3Device); if (res != OK) { SET_ERR_L("Can't finish configuring input stream %d: %s (%d)", mInputStream->getId(), strerror(-res), res); return res; } } for (size_t i = 0; i < mOutputStreams.size(); i++) { sp<Camera3OutputStreamInterface> outputStream = mOutputStreams.editValueAt(i); if (outputStream->isConfiguring()) { res = outputStream->finishConfiguration(mHal3Device); if (res != OK) { SET_ERR_L("Can't finish configuring output stream %d: %s (%d)", outputStream->getId(), strerror(-res), res); return res; } } } // 请求线程需要知道以避免在 configure_streams() 调用之间使用重复最后设置协议 mRequestThread->configurationComplete(); // 提高请求线程的优先级,以便高速记录到 SCHED_FIFO if (mIsConstrainedHighSpeedConfiguration) { pid_t requestThreadTid = mRequestThread->getTid(); res = requestPriority(getpid(), requestThreadTid, kConstrainedHighSpeedThreadPriority, true); if (res != OK) { ALOGW("Can't set realtime priority for request processing thread: %s (%d)", strerror(-res), res); } else { ALOGD("Set real time priority for request queue thread (tid %d)", requestThreadTid); } } else { // TODO: Set/restore normal priority for normal use cases } // 更新设备状态 mNeedConfig = false; internalUpdateStatusLocked((mDummyStreamId == NO_STREAM) ? STATUS_CONFIGURED : STATUS_UNCONFIGURED); ALOGV("%s: Camera %d: Stream configuration complete", __FUNCTION__, mId); // 配置流后,删除已删除的流 mDeletedStreams.clear(); return OK; }
创建 CaptureRequest 对象
sp<Camera3Device::CaptureRequest> Camera3Device::createCaptureRequest( const CameraMetadata &request) { ATRACE_CALL(); status_t res; sp<CaptureRequest> newRequest = new CaptureRequest; newRequest->mSettings = request; camera_metadata_entry_t inputStreams = newRequest->mSettings.find(ANDROID_REQUEST_INPUT_STREAMS); if (inputStreams.count > 0) { if (mInputStream == NULL || mInputStream->getId() != inputStreams.data.i32[0]) { CLOGE("Request references unknown input stream %d", inputStreams.data.u8[0]); return NULL; } // 首次使用时流配置(分配/注册)的延迟完成 if (mInputStream->isConfiguring()) { res = mInputStream->finishConfiguration(mHal3Device); if (res != OK) { SET_ERR_L("Unable to finish configuring input stream %d:" " %s (%d)", mInputStream->getId(), strerror(-res), res); return NULL; } } // 检查是否正在准备流 if (mInputStream->isPreparing()) { CLOGE("Request references an input stream that's being prepared!"); return NULL; } newRequest->mInputStream = mInputStream; newRequest->mSettings.erase(ANDROID_REQUEST_INPUT_STREAMS); } camera_metadata_entry_t streams = newRequest->mSettings.find(ANDROID_REQUEST_OUTPUT_STREAMS); if (streams.count == 0) { CLOGE("Zero output streams specified!"); return NULL; } for (size_t i = 0; i < streams.count; i++) { int idx = mOutputStreams.indexOfKey(streams.data.i32[i]); if (idx == NAME_NOT_FOUND) { CLOGE("Request references unknown stream %d", streams.data.u8[i]); return NULL; } sp<Camera3OutputStreamInterface> stream = mOutputStreams.editValueAt(idx); // 首次使用时流配置(分配/注册)的延迟完成 if (stream->isConfiguring()) { res = stream->finishConfiguration(mHal3Device); if (res != OK) { SET_ERR_L("Unable to finish configuring stream %d: %s (%d)", stream->getId(), strerror(-res), res); return NULL; } } // 检查是否正在准备流 if (stream->isPreparing()) { CLOGE("Request references an output stream that's being prepared!"); return NULL; } // 将输出流添加到 CaptureRequest 成员 mOutputStreams 向量中 newRequest->mOutputStreams.push(stream); } newRequest->mSettings.erase(ANDROID_REQUEST_OUTPUT_STREAMS); newRequest->mBatchSize = 1; return newRequest; }
mRequestThread->setRepeatingRequests
RequestThread 是在 Camera open 流程中启动的,RequestThread 类调用 run 方法,就会启动 threadLoop() 函数,此函数返回值为 true,就会进入循环模式一直调用 threadLoop(),返回 false 则只调用一次。
status_t Camera3Device::initialize(CameraModule *module) { ...... /** Start up request queue thread */ mRequestThread = new RequestThread(this, mStatusTracker, device, aeLockAvailable); res = mRequestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string()); ...... } status_t Camera3Device::RequestThread::setRepeatingRequests( const RequestList &requests, /*out*/ int64_t *lastFrameNumber) { Mutex::Autolock l(mRequestLock); if (lastFrameNumber != NULL) { *lastFrameNumber = mRepeatingLastFrameNumber; } mRepeatingRequests.clear(); mRepeatingRequests.insert(mRepeatingRequests.begin(), requests.begin(), requests.end()); unpauseForNewRequests(); mRepeatingLastFrameNumber = NO_IN_FLIGHT_REPEATING_FRAMES; return OK; }
dd
5.RequestThread::threadLoop
等待下一批请求,然后将其放入 mNextRequests。如果 mNextRequests 超时,它将为空。这里主要调用了 waitForNextRequestLocked() 方法获取 CaptureRequest,然后给 NextRequest 成员赋值,最后将 nextRequest 添加到 mNextRequests 中。如果还存在额外的请求,继续调用 waitForNextRequestLocked() 逐个获取 CaptureRequest,并给 NextRequest 成员赋值,最后添加到 mNextRequests 中。
bool Camera3Device::RequestThread::threadLoop() {
ATRACE_CALL();
status_t res;
// 处理暂停状态
if (waitIfPaused()) {
return true;
}
// 等待下一批请求
waitForNextRequestBatch();
if (mNextRequests.size() == 0) {
return true;
}
// 获取最新的请求 ID(如果有的话)
int latestRequestId;
camera_metadata_entry_t requestIdEntry = mNextRequests[mNextRequests.size() - 1].
captureRequest->mSettings.find(ANDROID_REQUEST_ID);
if (requestIdEntry.count > 0) {
latestRequestId = requestIdEntry.data.i32[0];
} else {
ALOGW("%s: Did not have android.request.id set in the request.", __FUNCTION__);
latestRequestId = NAME_NOT_FOUND;
}
// 准备一批 HAL 请求和输出缓冲区
res = prepareHalRequests();
if (res == TIMED_OUT) {
// 如果输出缓冲区超时,这不是致命错误
cleanUpFailedRequests(/*sendRequestError*/ true);
return true;
} else if (res != OK) {
cleanUpFailedRequests(/*sendRequestError*/ false);
return false;
}
// 通知 waitUntilRequestProcessed 线程一个新的请求 ID
{
Mutex::Autolock al(mLatestRequestMutex);
mLatestRequestId = latestRequestId;
mLatestRequestSignal.signal();
}
// 向HAL提交一批请求。
// 仅当批量提交多个请求时才使用刷新锁。
bool useFlushLock = mNextRequests.size() > 1;
if (useFlushLock) {
mFlushLock.lock();
}
ALOGVV("%s: %d: submitting %d requests in a batch.", __FUNCTION__, __LINE__,
mNextRequests.size());
for (auto& nextRequest : mNextRequests) {
// 提交请求并阻塞,直到为下一个做好准备
ATRACE_ASYNC_BEGIN("frame capture", nextRequest.halRequest.frame_number);
ATRACE_BEGIN("camera3->process_capture_request");
//框架一次只对 process_capture_request() 进行一次调用,并且所有调用均来自同一线程。
res = mHal3Device->ops->process_capture_request(mHal3Device, &nextRequest.halRequest);
ATRACE_END();
if (res != OK) {
// 此处仅应针对格式错误的请求或设备级错误而失败,因此请考虑所有错误都是致命的。
// 错误的元数据失败应通过 notify 来通知。
SET_ERR("RequestThread: Unable to submit capture request %d to HAL"
" device: %s (%d)", nextRequest.halRequest.frame_number, strerror(-res),
res);
cleanUpFailedRequests(/*sendRequestError*/ false);
if (useFlushLock) {
mFlushLock.unlock();
}
return false;
}
// 标记请求已成功提交。
nextRequest.submitted = true;
// 更新发送给 HAL 的最新请求
if (nextRequest.halRequest.settings != NULL) { // 如果没有更改,就不要更新
Mutex::Autolock al(mLatestRequestMutex);
camera_metadata_t* cloned = clone_camera_metadata(nextRequest.halRequest.settings);
mLatestRequest.acquire(cloned);
}
if (nextRequest.halRequest.settings != NULL) {
nextRequest.captureRequest->mSettings.unlock(nextRequest.halRequest.settings);
}
// 删除所有先前排队的触发器(解锁后)
res = removeTriggers(mPrevRequest);
if (res != OK) {
SET_ERR("RequestThread: Unable to remove triggers "
"(capture request %d, HAL device: %s (%d)",
nextRequest.halRequest.frame_number, strerror(-res), res);
cleanUpFailedRequests(/*sendRequestError*/ false);
if (useFlushLock) {
mFlushLock.unlock();
}
return false;
}
}
if (useFlushLock) {
mFlushLock.unlock();
}
// 取消设置为当前请求
{
Mutex::Autolock l(mRequestLock);
mNextRequests.clear();
}
return true;
}
1.RequestThread::waitForNextRequestBatch
void Camera3Device::RequestThread::waitForNextRequestBatch() {
// Optimized a bit for the simple steady-state case (single repeating
// request), to avoid putting that request in the queue temporarily.
Mutex::Autolock l(mRequestLock);
assert(mNextRequests.empty());
NextRequest nextRequest;
nextRequest.captureRequest = waitForNextRequestLocked();
if (nextRequest.captureRequest == nullptr) {
return;
}
nextRequest.halRequest = camera3_capture_request_t();
nextRequest.submitted = false;
mNextRequests.add(nextRequest);
// 等待额外的请求
const size_t batchSize = nextRequest.captureRequest->mBatchSize;
for (size_t i = 1; i < batchSize; i++) {
NextRequest additionalRequest;
additionalRequest.captureRequest = waitForNextRequestLocked();
if (additionalRequest.captureRequest == nullptr) {
break;
}
additionalRequest.halRequest = camera3_capture_request_t();
additionalRequest.submitted = false;
mNextRequests.add(additionalRequest);
}
if (mNextRequests.size() < batchSize) {
ALOGE("RequestThread: only get %d out of %d requests. Skipping requests.",
mNextRequests.size(), batchSize);
cleanUpFailedRequests(/*sendRequestError*/true);
}
return;
}
RequestThread::waitForNextRequestLocked
waitForNextRequestLocked() 主要用来获取下一个 CaptureRequest,首先遍历 mRepeatingRequests,将其首元素取出赋给 nextRequest,接着将其剩余的元素插入到 mRequestQueue。以后再次调用 waitForNextRequestLocked() 则从 mRequestQueue 取出元素赋给 nextRequest。
sp<Camera3Device::CaptureRequest>
Camera3Device::RequestThread::waitForNextRequestLocked() {
status_t res;
sp<CaptureRequest> nextRequest;
while (mRequestQueue.empty()) {
if (!mRepeatingRequests.empty()) {
// 始终以原子方式将所有请求放入 repeating request list 中。
// 确保完整的序列捕获到应用程序。
const RequestList &requests = mRepeatingRequests;
RequestList::const_iterator firstRequest =
requests.begin();
nextRequest = *firstRequest;
mRequestQueue.insert(mRequestQueue.end(),
++firstRequest,
requests.end());
// No need to wait any longer
mRepeatingLastFrameNumber = mFrameNumber + requests.size() - 1;
break;
}
res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout);
if ((mRequestQueue.empty() && mRepeatingRequests.empty()) ||
exitPending()) {
Mutex::Autolock pl(mPauseLock);
if (mPaused == false) {
ALOGV("%s: RequestThread: Going idle", __FUNCTION__);
mPaused = true;
// Let the tracker know
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != 0) {
statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
}
}
// Stop waiting for now and let thread management happen
return NULL;
}
}
if (nextRequest == NULL) {
// 尚无 repeating request,因此队列现在必须有一个条目。
RequestList::iterator firstRequest =
mRequestQueue.begin();
nextRequest = *firstRequest;
mRequestQueue.erase(firstRequest);
}
// 如果我们已经通过 setPaused 清除 mDoPause 取消暂停,
// 则需要更新内部暂停状态(capture/setRepeatingRequest 直接取消暂停)。
Mutex::Autolock pl(mPauseLock);
if (mPaused) {
ALOGV("%s: RequestThread: Unpaused", __FUNCTION__);
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != 0) {
statusTracker->markComponentActive(mStatusId);
}
}
mPaused = false;
// 检查自上次以来是否已重新配置,如果是,请重置预览请求。
// 在配置调用之间不能使用 “NULL request == repeat”。
if (mReconfigured) {
mPrevRequest.clear();
mReconfigured = false;
}
if (nextRequest != NULL) {
nextRequest->mResultExtras.frameNumber = mFrameNumber++;
nextRequest->mResultExtras.afTriggerId = mCurrentAfTriggerId;
nextRequest->mResultExtras.precaptureTriggerId = mCurrentPreCaptureTriggerId;
// 由于 RequestThread::clear() 从输入流中删除缓冲区,
// 因此在解锁 mRequestLock 之前在此处获取正确的缓冲区
if (nextRequest->mInputStream != NULL) {
res = nextRequest->mInputStream->getInputBuffer(&nextRequest->mInputBuffer);
if (res != OK) {
// 无法从 gralloc 队列获取输入缓冲区-这可能是由于断开队列或其他生产者行为不当造成的,
// 因此不是致命错误
ALOGE("%s: Can't get input buffer, skipping request:"
" %s (%d)", __FUNCTION__, strerror(-res), res);
if (mListener != NULL) {
mListener->notifyError(
ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
nextRequest->mResultExtras);
}
return NULL;
}
}
}
handleAePrecaptureCancelRequest(nextRequest);
return nextRequest;
}
2.prepareHalRequests
status_t Camera3Device::RequestThread::prepareHalRequests() { ATRACE_CALL(); for (auto& nextRequest : mNextRequests) { sp<CaptureRequest> captureRequest = nextRequest.captureRequest; camera3_capture_request_t* halRequest = &nextRequest.halRequest; Vector<camera3_stream_buffer_t>* outputBuffers = &nextRequest.outputBuffers; // 准备向 HAL 发送的请求 halRequest->frame_number = captureRequest->mResultExtras.frameNumber; // 插入所有排队的触发器(在锁定元数据之前) status_t res = insertTriggers(captureRequest); if (res < 0) { SET_ERR("RequestThread: Unable to insert triggers " "(capture request %d, HAL device: %s (%d)", halRequest->frame_number, strerror(-res), res); return INVALID_OPERATION; } int triggerCount = res; bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0); mPrevTriggers = triggerCount; // 如果请求与上次相同,或者我们上次有触发器 if (mPrevRequest != captureRequest || triggersMixedIn) { /** * 如果设置了触发器但未设置触发器 ID,则插入虚拟触发器 ID */ res = addDummyTriggerIds(captureRequest); if (res != OK) { SET_ERR("RequestThread: Unable to insert dummy trigger IDs " "(capture request %d, HAL device: %s (%d)", halRequest->frame_number, strerror(-res), res); return INVALID_OPERATION; } /** * 该请求应进行预排序 */ captureRequest->mSettings.sort(); halRequest->settings = captureRequest->mSettings.getAndLock(); mPrevRequest = captureRequest; ALOGVV("%s: Request settings are NEW", __FUNCTION__); IF_ALOGV() { camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t(); find_camera_metadata_ro_entry( halRequest->settings, ANDROID_CONTROL_AF_TRIGGER, &e ); if (e.count > 0) { ALOGV("%s: Request (frame num %d) had AF trigger 0x%x", __FUNCTION__, halRequest->frame_number, e.data.u8[0]); } } } else { // leave request.settings NULL to indicate 'reuse latest given' ALOGVV("%s: Request settings are REUSED", __FUNCTION__); } uint32_t totalNumBuffers = 0; // 填充缓冲区 if (captureRequest->mInputStream != NULL) { halRequest->input_buffer = &captureRequest->mInputBuffer; totalNumBuffers += 1; } else { halRequest->input_buffer = NULL; } outputBuffers->insertAt(camera3_stream_buffer_t(), 0, captureRequest->mOutputStreams.size()); halRequest->output_buffers = outputBuffers->array(); for (size_t i = 0; i < captureRequest->mOutputStreams.size(); i++) { //Camera3OutputStreamInterface (用于管理来自相机设备的单个输出数据流)接口继承自 Camera3StreamInterface 接口。 //Camera3StreamInterface 接口用于管理来自相机设备的单个输入和/或输出数据流。其中定义了 getBuffer(…) 纯虚函数。 //调用 CaptureRequest 成员 mOutputStreams 向量中对象的 getBuffer(…) 函数,实际上就是调用 Camera3OutputStream 类 getBuffer(…) 具体实现。 //Camera3OutputStream 并没有直接继承 Camera3Stream,而是直接继承自 Camera3IOStreamBase,Camera3IOStreamBase 又继承自 Camera3Stream。 //真正获取 buffer 的函数实际是 getBufferLocked(…),它位于 Camera3OutputStream 类中。 res = captureRequest->mOutputStreams.editItemAt(i)-> getBuffer(&outputBuffers->editItemAt(i)); if (res != OK) { // 无法从 gralloc 队列获取输出缓冲区-这可能是由于废弃的队列或其他使用者行为不当造成的, // 因此不是致命错误 ALOGE("RequestThread: Can't get output buffer, skipping request:" " %s (%d)", strerror(-res), res); return TIMED_OUT; } halRequest->num_output_buffers++; } totalNumBuffers += halRequest->num_output_buffers; // 进行中的队列中的日志请求 sp<Camera3Device> parent = mParent.promote(); if (parent == NULL) { // 应该不会发生,并且无处发送错误,所以只需记录一下 CLOGE("RequestThread: Parent is gone"); return INVALID_OPERATION; } res = parent->registerInFlight(halRequest->frame_number, totalNumBuffers, captureRequest->mResultExtras, /*hasInput*/halRequest->input_buffer != NULL, captureRequest->mAeTriggerCancelOverride); ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64 ", burstId = %" PRId32 ".", __FUNCTION__, captureRequest->mResultExtras.requestId, captureRequest->mResultExtras.frameNumber, captureRequest->mResultExtras.burstId); if (res != OK) { SET_ERR("RequestThread: Unable to register new in-flight request:" " %s (%d)", strerror(-res), res); return INVALID_OPERATION; } } return OK; }
CaptureRequest 类
class Camera3Device : public CameraDeviceBase, private camera3_callback_ops { ...... private: class CaptureRequest : public LightRefBase<CaptureRequest> { public: CameraMetadata mSettings; sp<camera3::Camera3Stream> mInputStream; camera3_stream_buffer_t mInputBuffer; Vector<sp<camera3::Camera3OutputStreamInterface> > mOutputStreams; CaptureResultExtras mResultExtras; // 用于取消不支持 CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL 的设备的 AE 预捕获触发 AeTriggerCancelOverride_t mAeTriggerCancelOverride; // 一次应提交给 HAL 的请求数。 // 例如,如果批次大小为 8, // 则此请求和随后的 7 个请求将同时提交给 HAL。 // 随后的 7 个请求的批处理将被请求线程忽略。 int mBatchSize; }; ...... }
CaptureRequest 成员 mOutputStreams 是一个 Vector,这个向量中的每个对象是一个指向 camera3::Camera3OutputStreamInterface 的强引用。camera3::Camera3OutputStreamInterface 只是一个接口,它是在 Camera3Device 类 createStream(…) 方法中添加的,其中创建了 Camera3OutputStream (用于管理来自相机设备的单个输出数据流)对象,并将其添加到 mOutputStreams 指向的 KeyedVector 向量中。在其后的流程中,Camera3Device::createCaptureRequest(…) 方法中会在 Camera3Device 类 mOutputStreams 成员获取输出流,并将流 push 到 CaptureRequest 成员 mOutputStreams 向量中。
Camera3OutputStream getBufferLocked
mConsumer 指向了 Surface,Surface 继承了 ANativeWindow。Surface 类是 ANativeWindow 的实现,可将图形缓冲区输入到 BufferQueue 中。
frameworks/av/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
status_t res = OK;
// 仅在已配置流时才应调用此函数。
if (mState != STATE_CONFIGURED) {
ALOGE("%s: Stream %d: Can't get buffers if stream is not in CONFIGURED state %d",
__FUNCTION__, mId, mState);
return INVALID_OPERATION;
}
// 如果即将达到限制,等待新缓冲区返回
if (getHandoutOutputBufferCountLocked() == camera3_stream::max_buffers) {
ALOGV("%s: Already dequeued max output buffers (%d), wait for next returned one.",
__FUNCTION__, camera3_stream::max_buffers);
res = mOutputBufferReturnedSignal.waitRelative(mLock, kWaitForBufferDuration);
if (res != OK) {
if (res == TIMED_OUT) {
ALOGE("%s: wait for output buffer return timed out after %lldms (max_buffers %d)",
__FUNCTION__, kWaitForBufferDuration / 1000000LL,
camera3_stream::max_buffers);
}
return res;
}
}
// 真正获取 buffer 的函数
res = getBufferLocked(buffer);
if (res == OK) {
// 激活 BufferListener 回调函数
fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true);
}
return res;
}
status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer) {
ATRACE_CALL();
status_t res;
if ((res = getBufferPreconditionCheckLocked()) != OK) {
return res;
}
ANativeWindowBuffer* anb;
int fenceFd;
/**
* 短暂释放锁,以避免在以下情况下出现死锁:
* Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
* This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
* Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
* This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
* StreamingProcessor lock.
* Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
* and try to lock bufferQueue lock.
* Then there is circular locking dependency.
*/
//mConsumer 指向了 Surface,Surface 继承了 ANativeWindow。
sp<ANativeWindow> currentConsumer = mConsumer;
mLock.unlock();
res = currentConsumer->dequeueBuffer(currentConsumer.get(), &anb, &fenceFd);
mLock.lock();
if (res != OK) {
ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
return res;
}
/**
* HAL现在拥有 FenceFD,但发生错误的情况除外,
* 在这种情况下,我们将其重新分配给 acquire_fence
*/
handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
/*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK, /*output*/true);
return OK;
}
ANativeWindow 结构体的定义
system/core/include/system/window.h
struct ANativeWindow
{
#ifdef __cplusplus
ANativeWindow()
: flags(0), minSwapInterval(0), maxSwapInterval(0), xdpi(0), ydpi(0)
{
common.magic = ANDROID_NATIVE_WINDOW_MAGIC;
common.version = sizeof(ANativeWindow);
memset(common.reserved, 0, sizeof(common.reserved));
}
/* Implement the methods that sp<ANativeWindow> expects so that it
can be used to automatically refcount ANativeWindow's. */
void incStrong(const void* /*id*/) const {
common.incRef(const_cast<android_native_base_t*>(&common));
}
void decStrong(const void* /*id*/) const {
common.decRef(const_cast<android_native_base_t*>(&common));
}
#endif
......
int (*dequeueBuffer)(struct ANativeWindow* window,
struct ANativeWindowBuffer** buffer, int* fenceFd);
......
}
Surface 类是 ANativeWindow 的实现,
可将图形缓冲区输入到 BufferQueue 中。
frameworks/native/libs/gui/Surface.cpp
Surface::Surface(
const sp<IGraphicBufferProducer>& bufferProducer,
bool controlledByApp)
: mGraphicBufferProducer(bufferProducer),
mGenerationNumber(0)
{
ANativeWindow::setSwapInterval = hook_setSwapInterval;
ANativeWindow::dequeueBuffer = hook_dequeueBuffer;
ANativeWindow::cancelBuffer = hook_cancelBuffer;
ANativeWindow::queueBuffer = hook_queueBuffer;
ANativeWindow::query = hook_query;
ANativeWindow::perform = hook_perform;
ANativeWindow::dequeueBuffer_DEPRECATED = hook_dequeueBuffer_DEPRECATED;
ANativeWindow::cancelBuffer_DEPRECATED = hook_cancelBuffer_DEPRECATED;
ANativeWindow::lockBuffer_DEPRECATED = hook_lockBuffer_DEPRECATED;
ANativeWindow::queueBuffer_DEPRECATED = hook_queueBuffer_DEPRECATED;
......
}
int Surface::hook_dequeueBuffer(ANativeWindow* window,
ANativeWindowBuffer** buffer, int* fenceFd) {
Surface* c = getSelf(window);
return c->dequeueBuffer(buffer, fenceFd);
}
int Surface::dequeueBuffer(android_native_buffer_t** buffer, int* fenceFd) { ATRACE_CALL(); ALOGV("Surface::dequeueBuffer"); uint32_t reqWidth; uint32_t reqHeight; bool swapIntervalZero; PixelFormat reqFormat; uint32_t reqUsage; { Mutex::Autolock lock(mMutex); reqWidth = mReqWidth ? mReqWidth : mUserWidth; reqHeight = mReqHeight ? mReqHeight : mUserHeight; swapIntervalZero = mSwapIntervalZero; reqFormat = mReqFormat; reqUsage = mReqUsage; } // Drop the lock so that we can still touch the Surface while blocking in IGBP::dequeueBuffer int buf = -1; sp<Fence> fence; status_t result = mGraphicBufferProducer->dequeueBuffer(&buf, &fence, swapIntervalZero, reqWidth, reqHeight, reqFormat, reqUsage); if (result < 0) { ALOGV("dequeueBuffer: IGraphicBufferProducer::dequeueBuffer(%d, %d, %d, %d, %d)" "failed: %d", swapIntervalZero, reqWidth, reqHeight, reqFormat, reqUsage, result); return result; } Mutex::Autolock lock(mMutex); sp<GraphicBuffer>& gbuf(mSlots[buf].buffer); // this should never happen ALOGE_IF(fence == NULL, "Surface::dequeueBuffer: received null Fence! buf=%d", buf); if (result & IGraphicBufferProducer::RELEASE_ALL_BUFFERS) { freeAllBuffers(); } if ((result & IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION) || gbuf == 0) { //mGraphicBufferProducer 是在 Surface 构造器中初始化的。它实际指向一个 //BpGraphicBufferProducer 对象。调用 BpGraphicBufferProducer 类 dequeueBuffer(…),远端 //BnGraphicBufferProducer 类 dequeueBuffer(…) 会响应。 result = mGraphicBufferProducer->requestBuffer(buf, &gbuf); if (result != NO_ERROR) { ALOGE("dequeueBuffer: IGraphicBufferProducer::requestBuffer failed: %d", result); mGraphicBufferProducer->cancelBuffer(buf, fence); return result; } } if (fence->isValid()) { *fenceFd = fence->dup(); if (*fenceFd == -1) { ALOGE("dequeueBuffer: error duping fence: %d", errno); // dup() should never fail; something is badly wrong. Soldier on // and hope for the best; the worst that should happen is some // visible corruption that lasts until the next frame. } } else { *fenceFd = -1; } *buffer = gbuf.get(); return OK; }
frameworks/native/libs/gui/IGraphicBufferProducer.cpp
class BpGraphicBufferProducer : public BpInterface<IGraphicBufferProducer> { public: ...... virtual status_t dequeueBuffer(int *buf, sp<Fence>* fence, bool async, uint32_t width, uint32_t height, PixelFormat format, uint32_t usage) { Parcel data, reply; data.writeInterfaceToken(IGraphicBufferProducer::getInterfaceDescriptor()); data.writeInt32(static_cast<int32_t>(async)); data.writeUint32(width); data.writeUint32(height); data.writeInt32(static_cast<int32_t>(format)); data.writeUint32(usage); status_t result = remote()->transact(DEQUEUE_BUFFER, data, &reply); if (result != NO_ERROR) { return result; } *buf = reply.readInt32(); bool nonNull = reply.readInt32(); if (nonNull) { *fence = new Fence(); reply.read(**fence); } result = reply.readInt32(); return result; } ...... }
BufferQueueProducer 继承自 BnGraphicBufferProducer。因此远端 BnGraphicBufferProducer 类 dequeueBuffer(…) 具体实现位于 BufferQueueProducer 中。while 循环中调用 waitForFreeSlotThenRelock(…) 查找缓存区,然后就可以获取到 GraphicBuffer。
BufferQueueProducer
status_t BufferQueueProducer::dequeueBuffer(int *outSlot, sp<android::Fence> *outFence, bool async, uint32_t width, uint32_t height, PixelFormat format, uint32_t usage) { ATRACE_CALL(); { // Autolock scope Mutex::Autolock lock(mCore->mMutex); mConsumerName = mCore->mConsumerName; } // Autolock scope BQ_LOGV("dequeueBuffer: async=%s w=%u h=%u format=%#x, usage=%#x", async ? "true" : "false", width, height, format, usage); if ((width && !height) || (!width && height)) { BQ_LOGE("dequeueBuffer: invalid size: w=%u h=%u", width, height); return BAD_VALUE; } status_t returnFlags = NO_ERROR; EGLDisplay eglDisplay = EGL_NO_DISPLAY; EGLSyncKHR eglFence = EGL_NO_SYNC_KHR; bool attachedByConsumer = false; { // Autolock scope Mutex::Autolock lock(mCore->mMutex); mCore->waitWhileAllocatingLocked(); if (format == 0) { format = mCore->mDefaultBufferFormat; } // 启用消费者请求的使用位 usage |= mCore->mConsumerUsageBits; const bool useDefaultSize = !width && !height; if (useDefaultSize) { width = mCore->mDefaultWidth; height = mCore->mDefaultHeight; } int found = BufferItem::INVALID_BUFFER_SLOT; while (found == BufferItem::INVALID_BUFFER_SLOT) { status_t status = waitForFreeSlotThenRelock("dequeueBuffer", async, &found, &returnFlags); if (status != NO_ERROR) { return status; } // This should not happen if (found == BufferQueueCore::INVALID_BUFFER_SLOT) { BQ_LOGE("dequeueBuffer: no available buffer slots"); return -EBUSY; } const sp<GraphicBuffer>& buffer(mSlots[found].mGraphicBuffer); // 如果不允许我们分配新的缓冲区, // 则 waitForFreeSlotThenRelock 必须返回一个包含缓冲区的 Slot。 // 如果需要重新分配此缓冲区以满足请求的属性,我们将其释放并尝试获取另一个缓冲区。 if (!mCore->mAllowAllocation) { if (buffer->needsReallocation(width, height, format, usage)) { mCore->freeBufferLocked(found); found = BufferItem::INVALID_BUFFER_SLOT; continue; } } } *outSlot = found; ATRACE_BUFFER_INDEX(found); attachedByConsumer = mSlots[found].mAttachedByConsumer; mSlots[found].mBufferState = BufferSlot::DEQUEUED; const sp<GraphicBuffer>& buffer(mSlots[found].mGraphicBuffer); if ((buffer == NULL) || buffer->needsReallocation(width, height, format, usage)) { mSlots[found].mAcquireCalled = false; mSlots[found].mGraphicBuffer = NULL; mSlots[found].mRequestBufferCalled = false; mSlots[found].mEglDisplay = EGL_NO_DISPLAY; mSlots[found].mEglFence = EGL_NO_SYNC_KHR; mSlots[found].mFence = Fence::NO_FENCE; mCore->mBufferAge = 0; returnFlags |= BUFFER_NEEDS_REALLOCATION; } else { // 我们加1是因为这是该缓冲区排队时的帧号 mCore->mBufferAge = mCore->mFrameCounter + 1 - mSlots[found].mFrameNumber; } BQ_LOGV("dequeueBuffer: setting buffer age to %" PRIu64, mCore->mBufferAge); if (CC_UNLIKELY(mSlots[found].mFence == NULL)) { BQ_LOGE("dequeueBuffer: about to return a NULL fence - " "slot=%d w=%d h=%d format=%u", found, buffer->width, buffer->height, buffer->format); } eglDisplay = mSlots[found].mEglDisplay; eglFence = mSlots[found].mEglFence; *outFence = mSlots[found].mFence; mSlots[found].mEglFence = EGL_NO_SYNC_KHR; mSlots[found].mFence = Fence::NO_FENCE; mCore->validateConsistencyLocked(); } // Autolock scope if (returnFlags & BUFFER_NEEDS_REALLOCATION) { status_t error; BQ_LOGV("dequeueBuffer: allocating a new buffer for slot %d", *outSlot); sp<GraphicBuffer> graphicBuffer(mCore->mAllocator->createGraphicBuffer( width, height, format, usage, &error)); if (graphicBuffer == NULL) { BQ_LOGE("dequeueBuffer: createGraphicBuffer failed"); return error; } { // Autolock scope Mutex::Autolock lock(mCore->mMutex); if (mCore->mIsAbandoned) { BQ_LOGE("dequeueBuffer: BufferQueue has been abandoned"); return NO_INIT; } graphicBuffer->setGenerationNumber(mCore->mGenerationNumber); mSlots[*outSlot].mGraphicBuffer = graphicBuffer; } // Autolock scope } if (attachedByConsumer) { returnFlags |= BUFFER_NEEDS_REALLOCATION; } if (eglFence != EGL_NO_SYNC_KHR) { EGLint result = eglClientWaitSyncKHR(eglDisplay, eglFence, 0, 1000000000); // 如果出现问题,打印 log,但是返回缓冲区而不同步对其的访问。 // 现在中止出队操作为时已晚。 if (result == EGL_FALSE) { BQ_LOGE("dequeueBuffer: error %#x waiting for fence", eglGetError()); } else if (result == EGL_TIMEOUT_EXPIRED_KHR) { BQ_LOGE("dequeueBuffer: timeout waiting for fence"); } eglDestroySyncKHR(eglDisplay, eglFence); } BQ_LOGV("dequeueBuffer: returning slot=%d/%" PRIu64 " buf=%p flags=%#x", *outSlot, mSlots[*outSlot].mFrameNumber, mSlots[*outSlot].mGraphicBuffer->handle, returnFlags); return returnFlags; }
3.HAL 发送新的捕获请求process_capture_request
框架一次只对 process_capture_request() 进行一次调用,并且所有调用均来自同一线程。
int QCamera3HardwareInterface::process_capture_request(
const struct camera3_device *device,
camera3_capture_request_t *request)
{
CDBG("%s: E", __func__);
QCamera3HardwareInterface *hw =
reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("%s: NULL camera device", __func__);
return -EINVAL;
}
int rc = hw->processCaptureRequest(request);
CDBG("%s: X", __func__);
return rc;
}
int QCamera3HardwareInterface::processCaptureRequest(
camera3_capture_request_t *request)
{
ATRACE_CALL();
int rc = NO_ERROR;
int32_t request_id;
CameraMetadata meta;
pthread_mutex_lock(&mMutex);
// 验证请求的有效性质
rc = validateCaptureRequest(request);
if (rc != NO_ERROR) {
ALOGE("%s: incoming request is not valid", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
meta = request->settings;
// 对于第一个捕获请求,发送捕获意图,然后在所有流上进行流传输
if (mFirstRequest) {
/* 获取用于流配置的eis信息 */
cam_is_type_t is_type;
char is_type_value[PROPERTY_VALUE_MAX];
property_get("camera.is_type", is_type_value, "0");
is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
int32_t hal_version = CAM_HAL_V3;
uint8_t captureIntent =
meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
mCaptureIntent = captureIntent;
memset(mParameters, 0, sizeof(parm_buffer_t));
AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
sizeof(hal_version), &hal_version);
AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
sizeof(captureIntent), &captureIntent);
}
//如果启用了EIS,则将其打开以用于视频录制,
//前置相机和4k视频没有EIS
bool setEis = mEisEnable && (gCamCapability[mCameraId]->position == CAM_POSITION_BACK &&
(mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_RECORD ||
mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT));
int32_t vsMode;
vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
rc = AddSetParmEntryToBatch(mParameters,
CAM_INTF_PARM_DIS_ENABLE,
sizeof(vsMode), &vsMode);
//除非支持EIS,否则IS类型将为0。如果支持EIS,则取决于流和视频大小,可以为1或4
if (setEis){
if (m_bIs4KVideo) {
is_type = IS_TYPE_DIS;
} else {
is_type = IS_TYPE_EIS_2_0;
}
}
for (size_t i = 0; i < request->num_output_buffers; i++) {
const camera3_stream_buffer_t& output = request->output_buffers[i];
QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
/*for livesnapshot stream is_type will be DIS*/
if (setEis && output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
rc = channel->registerBuffer(output.buffer, IS_TYPE_DIS);
} else {
rc = channel->registerBuffer(output.buffer, is_type);
}
if (rc < 0) {
ALOGE("%s: registerBuffer failed",
__func__);
pthread_mutex_unlock(&mMutex);
return -ENODEV;
}
}
/*设置捕获意图、hal版本和dis激活参数到后端*/
mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
mParameters);
//首先初始化所有流
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
if (setEis && (*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
rc = channel->initialize(IS_TYPE_DIS);
} else {
rc = channel->initialize(is_type);
}
if (NO_ERROR != rc) {
ALOGE("%s : Channel initialization failed %d", __func__, rc);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
if (mRawDumpChannel) {
rc = mRawDumpChannel->initialize(is_type);
if (rc != NO_ERROR) {
ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
if (mSupportChannel) {
rc = mSupportChannel->initialize(is_type);
if (rc < 0) {
ALOGE("%s: Support channel initialization failed", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
//然后启动它们
CDBG_HIGH("%s: Start META Channel", __func__);
rc = mMetadataChannel->start();
if (rc < 0) {
ALOGE("%s: META channel start failed", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
if (mSupportChannel) {
rc = mSupportChannel->start();
if (rc < 0) {
ALOGE("%s: Support channel start failed", __func__);
mMetadataChannel->stop();
pthread_mutex_unlock(&mMutex);
return rc;
}
}
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
CDBG_HIGH("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
rc = channel->start();
if (rc < 0) {
ALOGE("%s: channel start failed", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
if (mRawDumpChannel) {
CDBG("%s: Starting raw dump stream",__func__);
rc = mRawDumpChannel->start();
if (rc != NO_ERROR) {
ALOGE("%s: Error Starting Raw Dump Channel", __func__);
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3Channel *channel =
(QCamera3Channel *)(*it)->stream->priv;
ALOGE("%s: Stopping Regular Channel mask=%d", __func__,
channel->getStreamTypeMask());
channel->stop();
}
if (mSupportChannel)
mSupportChannel->stop();
mMetadataChannel->stop();
pthread_mutex_unlock(&mMutex);
return rc;
}
}
mWokenUpByDaemon = false;
mPendingRequest = 0;
}
uint32_t frameNumber = request->frame_number;
cam_stream_ID_t streamID;
if (meta.exists(ANDROID_REQUEST_ID)) {
request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
mCurrentRequestId = request_id;
CDBG("%s: Received request with id: %d",__func__, request_id);
} else if (mFirstRequest || mCurrentRequestId == -1){
ALOGE("%s: Unable to find request id field, \
& no previous id available", __func__);
return NAME_NOT_FOUND;
} else {
CDBG("%s: Re-using old request id", __func__);
request_id = mCurrentRequestId;
}
CDBG("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
__func__, __LINE__,
request->num_output_buffers,
request->input_buffer,
frameNumber);
// 首先获取所有请求缓冲区
streamID.num_streams = 0;
int blob_request = 0;
uint32_t snapshotStreamId = 0;
for (size_t i = 0; i < request->num_output_buffers; i++) {
const camera3_stream_buffer_t& output = request->output_buffers[i];
QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
//调用函数存储jpeg数据的本地副本以进行编码参数
blob_request = 1;
snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
}
if (output.acquire_fence != -1) {
rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
close(output.acquire_fence);
if (rc != OK) {
ALOGE("%s: sync wait failed %d", __func__, rc);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
streamID.streamID[streamID.num_streams] =
channel->getStreamID(channel->getStreamTypeMask());
streamID.num_streams++;
}
if (blob_request && mRawDumpChannel) {
CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
streamID.streamID[streamID.num_streams] =
mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
streamID.num_streams++;
}
if(request->input_buffer == NULL) {
rc = setFrameParameters(request, streamID, snapshotStreamId);
if (rc < 0) {
ALOGE("%s: fail to set frame parameters", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
} else {
if (request->input_buffer->acquire_fence != -1) {
rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
close(request->input_buffer->acquire_fence);
if (rc != OK) {
ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
}
/* 更新待处理请求列表和待处理缓冲区映射 */
PendingRequestInfo pendingRequest;
pendingRequest.frame_number = frameNumber;
pendingRequest.num_buffers = request->num_output_buffers;
pendingRequest.request_id = request_id;
pendingRequest.blob_request = blob_request;
pendingRequest.bUrgentReceived = 0;
pendingRequest.input_buffer = request->input_buffer;
pendingRequest.settings = request->settings;
pendingRequest.pipeline_depth = 0;
pendingRequest.partial_result_cnt = 0;
extractJpegMetadata(pendingRequest.jpegMetadata, request);
//提取捕获意图
if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
mCaptureIntent =
meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
}
pendingRequest.capture_intent = mCaptureIntent;
for (size_t i = 0; i < request->num_output_buffers; i++) {
RequestedBufferInfo requestedBuf;
requestedBuf.stream = request->output_buffers[i].stream;
requestedBuf.buffer = NULL;
pendingRequest.buffers.push_back(requestedBuf);
// 添加缓存区句柄到待处理缓存区列表
PendingBufferInfo bufferInfo;
bufferInfo.frame_number = frameNumber;
bufferInfo.buffer = request->output_buffers[i].buffer;
bufferInfo.stream = request->output_buffers[i].stream;
mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
mPendingBuffersMap.num_buffers++;
CDBG("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
__func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
bufferInfo.stream->format);
}
CDBG("%s: mPendingBuffersMap.num_buffers = %d",
__func__, mPendingBuffersMap.num_buffers);
mPendingRequestsList.push_back(pendingRequest);
if(mFlush) {
pthread_mutex_unlock(&mMutex);
return NO_ERROR;
}
// 通知元数据通道我们收到请求
mMetadataChannel->request(NULL, frameNumber);
metadata_buffer_t reproc_meta;
memset(&reproc_meta, 0, sizeof(metadata_buffer_t));
if(request->input_buffer != NULL){
rc = setReprocParameters(request, &reproc_meta, snapshotStreamId);
if (NO_ERROR != rc) {
ALOGE("%s: fail to set reproc parameters", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
// 在其他流上调用请求
for (size_t i = 0; i < request->num_output_buffers; i++) {
const camera3_stream_buffer_t& output = request->output_buffers[i];
QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
if (channel == NULL) {
ALOGE("%s: invalid channel pointer for stream", __func__);
continue;
}
if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
rc = channel->request(output.buffer, frameNumber,
request->input_buffer, (request->input_buffer)? &reproc_meta : mParameters);
if (rc < 0) {
ALOGE("%s: Fail to request on picture channel", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
} else {
CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
__LINE__, output.buffer, frameNumber);
rc = channel->request(output.buffer, frameNumber);
}
if (rc < 0)
ALOGE("%s: request failed", __func__);
}
if(request->input_buffer == NULL) {
/*将参数设置到后端*/
mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
}
mFirstRequest = false;
// 添加了定时condition等待
struct timespec ts;
uint8_t isValidTimeout = 1;
rc = clock_gettime(CLOCK_REALTIME, &ts);
if (rc < 0) {
isValidTimeout = 0;
ALOGE("%s: Error reading the real time clock!!", __func__);
}
else {
// 将超时设置为5秒
ts.tv_sec += 5;
}
//阻塞在条件变量上
mPendingRequest++;
while (mPendingRequest >= MIN_INFLIGHT_REQUESTS) {
if (!isValidTimeout) {
CDBG("%s: Blocking on conditional wait", __func__);
pthread_cond_wait(&mRequestCond, &mMutex);
}
else {
CDBG("%s: Blocking on timed conditional wait", __func__);
rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
if (rc == ETIMEDOUT) {
rc = -ENODEV;
ALOGE("%s: Unblocked on timeout!!!!", __func__);
break;
}
}
CDBG("%s: Unblocked", __func__);
if (mWokenUpByDaemon) {
mWokenUpByDaemon = false;
if (mPendingRequest < MAX_INFLIGHT_REQUESTS)
break;
}
}
pthread_mutex_unlock(&mMutex);
return rc;
}
6.HAL 通过 process_capture_result() 调用返回
打开 camera 设备时候,会给 camera3_callback_ops::process_capture_result 赋值,上面的函数调用实际调用到 sProcessCaptureResult 函数。它会调用listener的notifyShutter方法,此处的listener其实是CameraDeviceClient类,所以会调用CameraDeviceClient类的notifyShutter方法:
camera3_callback_ops::notify
//Camera3Device.cpp void Camera3Device::notify(const camera3_notify_msg *msg) { NotificationListener *listener; { Mutex::Autolock l(mOutputLock); listener = mListener; } ... switch (msg->type) { case CAMERA3_MSG_ERROR: { notifyError(msg->message.error, listener); break; } case CAMERA3_MSG_SHUTTER: { notifyShutter(msg->message.shutter, listener); break; } default: SET_ERR("Unknown notify message from HAL: %d", msg->type); } } // Camera3Device.cpp void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg, NotificationListener *listener) { ... // Set timestamp for the request in the in-flight tracking // and get the request ID to send upstream { Mutex::Autolock l(mInFlightLock); idx = mInFlightMap.indexOfKey(msg.frame_number); if (idx >= 0) { InFlightRequest &r = mInFlightMap.editValueAt(idx); // Call listener, if any if (listener != NULL) { //调用监听的notifyShutter法国法 listener->notifyShutter(r.resultExtras, msg.timestamp); } ... //将待处理的result发送到Buffer sendCaptureResult(r.pendingMetadata, r.resultExtras, r.partialResult.collectedResult, msg.frame_number, r.hasInputBuffer, r.aeTriggerCancelOverride); returnOutputBuffers(r.pendingOutputBuffers.array(), r.pendingOutputBuffers.size(), r.shutterTimestamp); r.pendingOutputBuffers.clear(); removeInFlightRequestIfReadyLocked(idx); } } ... } //CameraDeviceClient.cpp void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,nsecs_t timestamp) { // Thread safe. Don't bother locking. sp<ICameraDeviceCallbacks> remoteCb = getRemoteCallback(); if (remoteCb != 0) { //调用应用层的回调(CaptureCallback的onCaptureStarted方法) remoteCb->onCaptureStarted(resultExtras, timestamp); } }
此处的ICameraDeviceCallbacks对应的是Java层的CameraDeviceImpl.java中的内部类CameraDeviceCallbacks,所以会调用它的onCaptureStarted方法
//CameraDeviceImpl.java @Override public void onCaptureStarted(final CaptureResultExtras resultExtras, final long timestamp) { int requestId = resultExtras.getRequestId(); final long frameNumber = resultExtras.getFrameNumber(); final CaptureCallbackHolder holder; synchronized(mInterfaceLock) { if (mRemoteDevice == null) return; // Camera already closed // Get the callback for this frame ID, if there is one holder = CameraDeviceImpl.this.mCaptureCallbackMap.get(requestId); ... // Dispatch capture start notice holder.getHandler().post(new Runnable() { @Override public void run() { if (!CameraDeviceImpl.this.isClosed()) { holder.getCallback().onCaptureStarted(CameraDeviceImpl.this,holder.getRequest( resultExtras.getSubsequenceId()),timestamp, frameNumber); } } }); } }
Camera3Device::Camera3Device(int id): mId(id), mIsConstrainedHighSpeedConfiguration(false), mHal3Device(NULL), mStatus(STATUS_UNINITIALIZED), mStatusWaiters(0), mUsePartialResult(false), mNumPartialResults(1), mNextResultFrameNumber(0), mNextReprocessResultFrameNumber(0), mNextShutterFrameNumber(0), mNextReprocessShutterFrameNumber(0), mListener(NULL) { ATRACE_CALL(); camera3_callback_ops::notify = &sNotify; camera3_callback_ops::process_capture_result = &sProcessCaptureResult; ALOGV("%s: Created device for camera %d", __FUNCTION__, id); }
mCallbackOps->notify(mCallbackOps, ¬ify_msg);
mCallbackOps->process_capture_result(mCallbackOps, &result);
camera3_callback_ops::process_capture_result
void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
camera3_stream_buffer_t *buffer,
uint32_t frame_number, void *userdata)
{
QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
if (hw == NULL) {
ALOGE("%s: Invalid hw %p", __func__, hw);
return;
}
hw->captureResultCb(metadata, buffer, frame_number);
return;
}
void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
camera3_stream_buffer_t *buffer, uint32_t frame_number)
{
pthread_mutex_lock(&mMutex);
/*假设在任何重新处理之前调用flush()。收到任何回调后立即发送通知和结果*/
if (mLoopBackResult) {
/* 发送通知 */
camera3_notify_msg_t notify_msg;
notify_msg.type = CAMERA3_MSG_SHUTTER;
notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
mCallbackOps->notify(mCallbackOps, ¬ify_msg);
/* 发送捕获结果 */
mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
free(mLoopBackResult);
mLoopBackResult = NULL;
}
if (metadata_buf)
handleMetadataWithLock(metadata_buf);
else
handleBufferWithLock(buffer, frame_number);
pthread_mutex_unlock(&mMutex);
}
void QCamera3HardwareInterface::handleBufferWithLock( camera3_stream_buffer_t *buffer, uint32_t frame_number) { ATRACE_CALL(); // 如果待处理的请求列表中不存在帧号,则直接将缓冲区发送到 framework, // 并更新待处理的缓冲区映射,否则,记录缓冲区。 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); while (i != mPendingRequestsList.end() && i->frame_number != frame_number){ i++; } if (i == mPendingRequestsList.end()) { // 验证所有挂起的请求frame_number是否更大 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); j != mPendingRequestsList.end(); j++) { if (j->frame_number < frame_number) { ALOGE("%s: Error: pending frame number %d is smaller than %d", __func__, j->frame_number, frame_number); } } camera3_capture_result_t result; memset(&result, 0, sizeof(camera3_capture_result_t)); result.result = NULL; result.frame_number = frame_number; result.num_output_buffers = 1; result.partial_result = 0; for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin(); m != mPendingFrameDropList.end(); m++) { QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv; uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask()); if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) { buffer->status=CAMERA3_BUFFER_STATUS_ERROR; CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d", __func__, frame_number, streamID); m = mPendingFrameDropList.erase(m); break; } } result.output_buffers = buffer; CDBG("%s: result frame_number = %d, buffer = %p", __func__, frame_number, buffer->buffer); for (List<PendingBufferInfo>::iterator k = mPendingBuffersMap.mPendingBufferList.begin(); k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) { if (k->buffer == buffer->buffer) { CDBG("%s: Found Frame buffer, take it out from list", __func__); mPendingBuffersMap.num_buffers--; k = mPendingBuffersMap.mPendingBufferList.erase(k); break; } } CDBG("%s: mPendingBuffersMap.num_buffers = %d", __func__, mPendingBuffersMap.num_buffers); mCallbackOps->process_capture_result(mCallbackOps, &result); } else { if (i->input_buffer) { CameraMetadata settings; camera3_notify_msg_t notify_msg; memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t)); nsecs_t capture_time = systemTime(CLOCK_MONOTONIC); if(i->settings) { settings = i->settings; if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) { capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0]; } else { ALOGE("%s: No timestamp in input settings! Using current one.", __func__); } } else { ALOGE("%s: Input settings missing!", __func__); } notify_msg.type = CAMERA3_MSG_SHUTTER; notify_msg.message.shutter.frame_number = frame_number; notify_msg.message.shutter.timestamp = capture_time; if (i->input_buffer->release_fence != -1) { int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER); close(i->input_buffer->release_fence); if (rc != OK) { ALOGE("%s: input buffer sync wait failed %d", __func__, rc); } } for (List<PendingBufferInfo>::iterator k = mPendingBuffersMap.mPendingBufferList.begin(); k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) { if (k->buffer == buffer->buffer) { CDBG("%s: Found Frame buffer, take it out from list", __func__); mPendingBuffersMap.num_buffers--; k = mPendingBuffersMap.mPendingBufferList.erase(k); break; } } CDBG("%s: mPendingBuffersMap.num_buffers = %d", __func__, mPendingBuffersMap.num_buffers); bool notifyNow = true; for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); j != mPendingRequestsList.end(); j++) { if (j->frame_number < frame_number) { notifyNow = false; break; } } if (notifyNow) { camera3_capture_result result; memset(&result, 0, sizeof(camera3_capture_result)); result.frame_number = frame_number; result.result = i->settings; result.input_buffer = i->input_buffer; result.num_output_buffers = 1; result.output_buffers = buffer; result.partial_result = PARTIAL_RESULT_COUNT; mCallbackOps->notify(mCallbackOps, ¬ify_msg); mCallbackOps->process_capture_result(mCallbackOps, &result); CDBG("%s: Notify reprocess now %d!", __func__, frame_number); i = mPendingRequestsList.erase(i); mPendingRequest--; } else { // 缓存重新处理结果以供以后使用 PendingReprocessResult pendingResult; memset(&pendingResult, 0, sizeof(PendingReprocessResult)); pendingResult.notify_msg = notify_msg; pendingResult.buffer = *buffer; pendingResult.frame_number = frame_number; mPendingReprocessResultList.push_back(pendingResult); CDBG("%s: Cache reprocess result %d!", __func__, frame_number); } } else { for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); j != i->buffers.end(); j++) { if (j->stream == buffer->stream) { if (j->buffer != NULL) { ALOGE("%s: Error: buffer is already set", __func__); } else { j->buffer = (camera3_stream_buffer_t *)malloc( sizeof(camera3_stream_buffer_t)); *(j->buffer) = *buffer; CDBG("%s: cache buffer %p at result frame_number %d", __func__, buffer, frame_number); } } } } } }
Camera3Device::sProcessCaptureResult
void Camera3Device::sProcessCaptureResult(const camera3_callback_ops *cb,
const camera3_capture_result *result) {
Camera3Device *d =
const_cast<Camera3Device*>(static_cast<const Camera3Device*>(cb));d->processCaptureResult(result);
}
void Camera3Device::processCaptureResult(const camera3_capture_result *result) { ATRACE_CALL(); status_t res; uint32_t frameNumber = result->frame_number; if (result->result == NULL && result->num_output_buffers == 0 && result->input_buffer == NULL) { SET_ERR("No result data provided by HAL for frame %d", frameNumber); return; } // 对于HAL3.2或更高版本,如果 HAL 不支持 partial, // 则当此结果中包含元数据时,必须始终将 partial_result 设置为1。 if (!mUsePartialResult && mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2 && result->result != NULL && result->partial_result != 1) { SET_ERR("Result is malformed for frame %d: partial_result %u must be 1" " if partial result is not supported", frameNumber, result->partial_result); return; } bool isPartialResult = false; CameraMetadata collectedPartialResult; CaptureResultExtras resultExtras; bool hasInputBufferInRequest = false; // 从进行中的请求列表中获取快门时间戳和resultExtras, // 并在此帧的快门通知中添加。 如果尚未收到快门时间戳, // 将输出缓冲区附加到进行中的请求中,当快门时间戳到达时,将返回它们。 // 如果已收到所有结果数据和快门时间戳,更新进行中状态并删除进行中条目。 nsecs_t shutterTimestamp = 0; { Mutex::Autolock l(mInFlightLock); ssize_t idx = mInFlightMap.indexOfKey(frameNumber); if (idx == NAME_NOT_FOUND) { SET_ERR("Unknown frame number for capture result: %d", frameNumber); return; } InFlightRequest &request = mInFlightMap.editValueAt(idx); ALOGVV("%s: got InFlightRequest requestId = %" PRId32 ", frameNumber = %" PRId64 ", burstId = %" PRId32 ", partialResultCount = %d", __FUNCTION__, request.resultExtras.requestId, request.resultExtras.frameNumber, request.resultExtras.burstId, result->partial_result); // 如果部分计数不是0(仅用于缓冲区),则始终将其更新为最新的数。 // 当框架将相邻的部分结果聚合为一个时,将使用最新的部分计数。 if (result->partial_result != 0) request.resultExtras.partialResultCount = result->partial_result; // 检查此结果是否只包含部分元数据 if (mUsePartialResult && result->result != NULL) { if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) { if (result->partial_result > mNumPartialResults || result->partial_result < 1) { SET_ERR("Result is malformed for frame %d: partial_result %u must be in" " the range of [1, %d] when metadata is included in the result", frameNumber, result->partial_result, mNumPartialResults); return; } isPartialResult = (result->partial_result < mNumPartialResults); if (isPartialResult) { request.partialResult.collectedResult.append(result->result); } } else { camera_metadata_ro_entry_t partialResultEntry; res = find_camera_metadata_ro_entry(result->result, ANDROID_QUIRKS_PARTIAL_RESULT, &partialResultEntry); if (res != NAME_NOT_FOUND && partialResultEntry.count > 0 && partialResultEntry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) { // A partial result. Flag this as such, and collect this // set of metadata into the in-flight entry. isPartialResult = true; request.partialResult.collectedResult.append( result->result); request.partialResult.collectedResult.erase( ANDROID_QUIRKS_PARTIAL_RESULT); } } if (isPartialResult) { // Fire off a 3A-only result if possible if (!request.partialResult.haveSent3A) { request.partialResult.haveSent3A = processPartial3AResult(frameNumber, request.partialResult.collectedResult, request.resultExtras); } } } shutterTimestamp = request.shutterTimestamp; hasInputBufferInRequest = request.hasInputBuffer; // 我们是否得到了这次捕获的(最终的)结果元数据? if (result->result != NULL && !isPartialResult) { if (request.haveResultMetadata) { SET_ERR("Called multiple times with metadata for frame %d", frameNumber); return; } if (mUsePartialResult && !request.partialResult.collectedResult.isEmpty()) { collectedPartialResult.acquire( request.partialResult.collectedResult); } request.haveResultMetadata = true; } uint32_t numBuffersReturned = result->num_output_buffers; if (result->input_buffer != NULL) { if (hasInputBufferInRequest) { numBuffersReturned += 1; } else { ALOGW("%s: Input buffer should be NULL if there is no input" " buffer sent in the request", __FUNCTION__); } } request.numBuffersLeft -= numBuffersReturned; if (request.numBuffersLeft < 0) { SET_ERR("Too many buffers returned for frame %d", frameNumber); return; } camera_metadata_ro_entry_t entry; res = find_camera_metadata_ro_entry(result->result, ANDROID_SENSOR_TIMESTAMP, &entry); if (res == OK && entry.count == 1) { request.sensorTimestamp = entry.data.i64[0]; } // 如果还没有接收到shutter事件,则将输出缓冲区附加到正在处理的请求。 // 否则,将输出缓冲区返回到流。 if (shutterTimestamp == 0) { request.pendingOutputBuffers.appendArray(result->output_buffers, result->num_output_buffers); } else { returnOutputBuffers(result->output_buffers, result->num_output_buffers, shutterTimestamp); } if (result->result != NULL && !isPartialResult) { if (shutterTimestamp == 0) { request.pendingMetadata = result->result; request.partialResult.collectedResult = collectedPartialResult; } else { CameraMetadata metadata; metadata = result->result; sendCaptureResult(metadata, request.resultExtras, collectedPartialResult, frameNumber, hasInputBufferInRequest, request.aeTriggerCancelOverride); } } removeInFlightRequestIfReadyLocked(idx); } // scope for mInFlightLock if (result->input_buffer != NULL) { if (hasInputBufferInRequest) { Camera3Stream *stream = Camera3Stream::cast(result->input_buffer->stream); res = stream->returnInputBuffer(*(result->input_buffer)); // Note: stream may be deallocated at this point, if this buffer was the // last reference to it. if (res != OK) { ALOGE("%s: RequestThread: Can't return input buffer for frame %d to" " its stream:%s (%d)", __FUNCTION__, frameNumber, strerror(-res), res); } } else { ALOGW("%s: Input buffer should be NULL if there is no input" " buffer sent in the request, skipping input buffer return.", __FUNCTION__); } } }
returnOutputBuffers(…) 函数
void Camera3Device::returnOutputBuffers( const camera3_stream_buffer_t *outputBuffers, size_t numBuffers, nsecs_t timestamp) { for (size_t i = 0; i < numBuffers; i++) { Camera3Stream *stream = Camera3Stream::cast(outputBuffers[i].stream); status_t res = stream->returnBuffer(outputBuffers[i], timestamp); // 如果该缓冲区是对流的最后引用,则流可能在此时被释放。 if (res != OK) { ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res); } } }
status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer, nsecs_t timestamp) { ATRACE_CALL(); Mutex::Autolock l(mLock); /** * TODO: 首先检查状态是否有效。 * * <HAL3.2 IN_CONFIG and IN_RECONFIG in addition to CONFIGURED. * >= HAL3.2 CONFIGURED only * * 也对getBuffer执行此操作。 */ status_t res = returnBufferLocked(buffer, timestamp); if (res == OK) { fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/true); } // 即使缓冲区返回失败,我们仍然希望向等待缓冲区返回的人发送信号。 mOutputBufferReturnedSignal.signal(); return res; }
Camera3OutputStream::returnBufferLocked
status_t Camera3OutputStream::returnBufferLocked( const camera3_stream_buffer &buffer, nsecs_t timestamp) { ATRACE_CALL(); status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true); if (res != OK) { return res; } mLastTimestamp = timestamp; return OK; }
status_t Camera3IOStreamBase::returnAnyBufferLocked( const camera3_stream_buffer &buffer, nsecs_t timestamp, bool output) { status_t res; // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be // decrementing the internal refcount next. In case this is the last ref, we // might get destructed on the decStrong(), so keep an sp around until the // end of the call - otherwise have to sprinkle the decStrong on all exit // points. sp<Camera3IOStreamBase> keepAlive(this); decStrong(this); if ((res = returnBufferPreconditionCheckLocked()) != OK) { return res; } sp<Fence> releaseFence; res = returnBufferCheckedLocked(buffer, timestamp, output, &releaseFence); // Res may be an error, but we still want to decrement our owned count // to enable clean shutdown. So we'll just return the error but otherwise // carry on if (releaseFence != 0) { mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); } if (output) { mHandoutOutputBufferCount--; } mHandoutTotalBufferCount--; if (mHandoutTotalBufferCount == 0 && mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG && mState != STATE_PREPARING) { /** * Avoid a spurious IDLE->ACTIVE->IDLE transition when using buffers * before/after register_stream_buffers during initial configuration * or re-configuration, or during prepare pre-allocation */ ALOGV("%s: Stream %d: All buffers returned; now idle", __FUNCTION__, mId); sp<StatusTracker> statusTracker = mStatusTracker.promote(); if (statusTracker != 0) { statusTracker->markComponentIdle(mStatusId, mCombinedFence); } } mBufferReturnedSignal.signal(); if (output) { mLastTimestamp = timestamp; } return res; }
Camera3OutputStream::returnBufferCheckedLocked
这里消费者 queueBuffer,正真开始消费 Camera 帧。它会调用 currentConsumer 的 queueBuffer 方法,而具体的 Consumer 则是在应用层初始化 Camera 时进行绑定的,典型的 Consumer 有 SurfaceTexture,ImageReader 等,而在 Native 层中,它会调用 BufferQueueProducer 的 queueBuffer 方法:
status_t Camera3OutputStream::returnBufferCheckedLocked(
const camera3_stream_buffer &buffer,
nsecs_t timestamp,
bool output,
/*out*/
sp<Fence> *releaseFenceOut) {
(void)output;
ALOG_ASSERT(output, "Expected output to be true");
status_t res;
// Fence management - always honor release fence from HAL
sp<Fence> releaseFence = new Fence(buffer.release_fence);
int anwReleaseFence = releaseFence->dup();
/**
* 简单释放锁以避免死锁
* StreamingProcessor::startStream -> Camera3Stream::isConfiguring
* 在 queueBuffer 期间(此线程将进入StreamingProcessor::onFrameAvailable)
*/
sp<ANativeWindow> currentConsumer = mConsumer;
mLock.unlock();
/**
* 将缓冲区返回到 ANativeWindow
*/
if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
// 取消 buffer
res = currentConsumer->cancelBuffer(currentConsumer.get(),
container_of(buffer.buffer, ANativeWindowBuffer, handle),
anwReleaseFence);
if (res != OK) {
ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
" %s (%d)", __FUNCTION__, mId, strerror(-res), res);
}
} else {
if (mTraceFirstBuffer && (stream_type == CAMERA3_STREAM_OUTPUT)) {
{
char traceLog[48];
snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
ATRACE_NAME(traceLog);
}
mTraceFirstBuffer = false;
}
// 设置时间戳
res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp);
if (res != OK) {
ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
return res;
}
// 消费者 queueBuffer
res = currentConsumer->queueBuffer(currentConsumer.get(),
container_of(buffer.buffer, ANativeWindowBuffer, handle),
anwReleaseFence);
if (res != OK) {
ALOGE("%s: Stream %d: Error queueing buffer to native window: "
"%s (%d)", __FUNCTION__, mId, strerror(-res), res);
}
}
mLock.lock();
// 一旦一个有效的缓冲区被返回到队列中,就不能再将所有的缓冲区取出来进行预分配。
if (buffer.status != CAMERA3_BUFFER_STATUS_ERROR) {
mStreamUnpreparable = true;
}
if (res != OK) {
close(anwReleaseFence);
}
*releaseFenceOut = releaseFence;
return res;
}
BufferQueueProducer 的 queueBuffer 方法
status_t BufferQueueProducer::queueBuffer(int slot, const QueueBufferInput &input, QueueBufferOutput *output) { ATRACE_CALL(); ATRACE_BUFFER_INDEX(slot); int64_t timestamp; bool isAutoTimestamp; android_dataspace dataSpace; Rect crop; int scalingMode; uint32_t transform; uint32_t stickyTransform; bool async; sp<Fence> fence; input.deflate(×tamp, &isAutoTimestamp, &dataSpace, &crop, &scalingMode, &transform, &async, &fence, &stickyTransform); Region surfaceDamage = input.getSurfaceDamage(); if (fence == NULL) { BQ_LOGE("queueBuffer: fence is NULL"); return BAD_VALUE; } switch (scalingMode) { case NATIVE_WINDOW_SCALING_MODE_FREEZE: case NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW: case NATIVE_WINDOW_SCALING_MODE_SCALE_CROP: case NATIVE_WINDOW_SCALING_MODE_NO_SCALE_CROP: break; default: BQ_LOGE("queueBuffer: unknown scaling mode %d", scalingMode); return BAD_VALUE; } sp<IConsumerListener> frameAvailableListener; sp<IConsumerListener> frameReplacedListener; int callbackTicket = 0; BufferItem item; { // Autolock scope Mutex::Autolock lock(mCore->mMutex); if (mCore->mIsAbandoned) { BQ_LOGE("queueBuffer: BufferQueue has been abandoned"); return NO_INIT; } const int maxBufferCount = mCore->getMaxBufferCountLocked(async); if (async && mCore->mOverrideMaxBufferCount) { // FIXME: Some drivers are manually setting the buffer count // (which they shouldn't), so we do this extra test here to // handle that case. This is TEMPORARY until we get this fixed. if (mCore->mOverrideMaxBufferCount < maxBufferCount) { BQ_LOGE("queueBuffer: async mode is invalid with " "buffer count override"); return BAD_VALUE; } } if (slot < 0 || slot >= maxBufferCount) { BQ_LOGE("queueBuffer: slot index %d out of range [0, %d)", slot, maxBufferCount); return BAD_VALUE; } else if (mSlots[slot].mBufferState != BufferSlot::DEQUEUED) { BQ_LOGE("queueBuffer: slot %d is not owned by the producer " "(state = %d)", slot, mSlots[slot].mBufferState); return BAD_VALUE; } else if (!mSlots[slot].mRequestBufferCalled) { BQ_LOGE("queueBuffer: slot %d was queued without requesting " "a buffer", slot); return BAD_VALUE; } BQ_LOGV("queueBuffer: slot=%d/%" PRIu64 " time=%" PRIu64 " dataSpace=%d" " crop=[%d,%d,%d,%d] transform=%#x scale=%s", slot, mCore->mFrameCounter + 1, timestamp, dataSpace, crop.left, crop.top, crop.right, crop.bottom, transform, BufferItem::scalingModeName(static_cast<uint32_t>(scalingMode))); const sp<GraphicBuffer>& graphicBuffer(mSlots[slot].mGraphicBuffer); Rect bufferRect(graphicBuffer->getWidth(), graphicBuffer->getHeight()); Rect croppedRect; crop.intersect(bufferRect, &croppedRect); if (croppedRect != crop) { BQ_LOGE("queueBuffer: crop rect is not contained within the " "buffer in slot %d", slot); return BAD_VALUE; } // Override UNKNOWN dataspace with consumer default if (dataSpace == HAL_DATASPACE_UNKNOWN) { dataSpace = mCore->mDefaultBufferDataSpace; } mSlots[slot].mFence = fence; mSlots[slot].mBufferState = BufferSlot::QUEUED; ++mCore->mFrameCounter; mSlots[slot].mFrameNumber = mCore->mFrameCounter; item.mAcquireCalled = mSlots[slot].mAcquireCalled; item.mGraphicBuffer = mSlots[slot].mGraphicBuffer; item.mCrop = crop; item.mTransform = transform & ~static_cast<uint32_t>(NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY); item.mTransformToDisplayInverse = (transform & NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY) != 0; item.mScalingMode = static_cast<uint32_t>(scalingMode); item.mTimestamp = timestamp; item.mIsAutoTimestamp = isAutoTimestamp; item.mDataSpace = dataSpace; item.mFrameNumber = mCore->mFrameCounter; item.mSlot = slot; item.mFence = fence; item.mIsDroppable = mCore->mDequeueBufferCannotBlock || async; item.mSurfaceDamage = surfaceDamage; mStickyTransform = stickyTransform; if (mCore->mQueue.empty()) { // When the queue is empty, we can ignore mDequeueBufferCannotBlock // and simply queue this buffer mCore->mQueue.push_back(item); frameAvailableListener = mCore->mConsumerListener; } else { // When the queue is not empty, we need to look at the front buffer // state to see if we need to replace it BufferQueueCore::Fifo::iterator front(mCore->mQueue.begin()); if (front->mIsDroppable) { // If the front queued buffer is still being tracked, we first // mark it as freed if (mCore->stillTracking(front)) { mSlots[front->mSlot].mBufferState = BufferSlot::FREE; mCore->mFreeBuffers.push_front(front->mSlot); } // Overwrite the droppable buffer with the incoming one *front = item; frameReplacedListener = mCore->mConsumerListener; } else { mCore->mQueue.push_back(item); frameAvailableListener = mCore->mConsumerListener; } } mCore->mBufferHasBeenQueued = true; mCore->mDequeueCondition.broadcast(); output->inflate(mCore->mDefaultWidth, mCore->mDefaultHeight, mCore->mTransformHint, static_cast<uint32_t>(mCore->mQueue.size())); ATRACE_INT(mCore->mConsumerName.string(), mCore->mQueue.size()); // Take a ticket for the callback functions callbackTicket = mNextCallbackTicket++; mCore->validateConsistencyLocked(); } // Autolock scope // Wait without lock held if (mCore->mConnectedApi == NATIVE_WINDOW_API_EGL) { // Waiting here allows for two full buffers to be queued but not a // third. In the event that frames take varying time, this makes a // small trade-off in favor of latency rather than throughput. //mLastQueueBufferFence->waitForever("Throttling EGL Production"); mLastQueueBufferFence = fence; } // Don't send the GraphicBuffer through the callback, and don't send // the slot number, since the consumer shouldn't need it item.mGraphicBuffer.clear(); item.mSlot = BufferItem::INVALID_BUFFER_SLOT; // Call back without the main BufferQueue lock held, but with the callback // lock held so we can ensure that callbacks occur in order { Mutex::Autolock lock(mCallbackMutex); while (callbackTicket != mCurrentCallbackTicket) { mCallbackCondition.wait(mCallbackMutex); } if (frameAvailableListener != NULL) { // 回调 SurfaceTexture 中定义好的监听 IConsumerListener 的 onFrameAvailable 方法来对数据进行处理 frameAvailableListener->onFrameAvailable(item); } else if (frameReplacedListener != NULL) { frameReplacedListener->onFrameReplaced(item); } ++mCurrentCallbackTicket; mCallbackCondition.broadcast(); } return NO_ERROR; }
它最后会调用 Consumer 的回调 FrameAvailableListener 的 onFrameAvailable 方法,到这里,就比较清晰为什么我们在写 Camera 应用,为其初始化 Surface 时,我们需要重写 FrameAvailableListener 了。