一. 高通 camera hal 简介(camera.msm8998.so)
以高通 msm8998 平台为例介绍高通 camera hal 。
库文件名称:camera.msm8998.so
在线查看源码路径:http://xrefandroid.com/android-11.0.0_r48/
源码目录:/hardware/qcom/camera/msm8998/QCamera2
主要文件:QCamera2Hal.cpp 定义了 HAL_MODULE_INFO_SYM
从https://blog.csdn.net/kk3087961/article/details/135992303可知,
CameraProvider初始化时,会调用hardware.c 中load(...)方法,
hardware.load(...) 方法 先打开camera hal so库(camera.msm8998.so),然后调用hmi = (struct hw_module_t *)dlsym(handle, HAL_MODULE_INFO_SYM_AS_STR),hmi为从camera.msm8998.so获取的HMI(HAL_MODULE_INFO_SYM)方法返回的hw_module_t对象.
即上层最终调用camera hal的入口为HAL_MODULE_INFO_SYM。
QCamera2Hal.cpp 中定义的 HAL_MODULE_INFO_SYM 初始化 camera_module_t 对象,
camera_module_t 对象中的方法从 QCamera2Factory 读取,从 gQCamera2Factory( QCamera2Factory类对象) 获取。
module_api_version 为 CAMERA_MODULE_API_VERSION_2_4 ,从宏定义可知为 HARDWARE_MODULE_API_VERSION(2, 4) =》 HARDWARE_MAKE_API_VERSION(maj,min)
hal_api_version为 HARDWARE_HAL_API_VERSION,从宏定义可知为 HARDWARE_MAKE_API_VERSION(1, 0)
CameraProvider 调用hal接口方法 get_number_of_cameras 时会创建QCamera2Factory类对象gQCamera2Factory,
QCamera2Factory类 构造方法 QCamera2Factory::QCamera2Factory
1.调用 get_num_of_cameras()获取摄像头数,该方法
调用ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity) 将获取的entity.name 赋值给g_cam_ctrl.video_dev_name[num_cameras]
调用get_sensor_info , 调用ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity) 从驱动获取
facing,mount_angle,is_yuv,type并赋值给 g_cam_ctrl.info[num_cameras]
sort_camera_info 对摄像头信息进行重新排序,0-背面主摄像头 1-前主摄像头
2.如果宏定义了QCAMERA_HAL1_SUPPORT 支持hal1 ,
调用QCameraMuxer::getCameraMuxer(&gQCameraMuxer, mNumOfCameras) 通过new QCameraMuxer(num_of_cameras) 获取gQCameraMuxer对象,
如果是hal1则通过 gQCameraMuxer 对象调用接口方法;否则hal3通过 gQCamera2Factory对象调用接口方法。
遍历 摄像头如果宏定义了QCAMERA_HAL1_SUPPORT 支持hal1
则设置mHalDescriptors[i].device_version 为CAMERA_DEVICE_API_VERSION_1_0,否则设置为 CAMERA_DEVICE_API_VERSION_3_0。
hardware/qcom/camera/msm8998/QCamera2/QCamera2Hal.cpp
static hw_module_t camera_common = {
.tag = HARDWARE_MODULE_TAG,
.module_api_version = CAMERA_MODULE_API_VERSION_2_4,
.hal_api_version = HARDWARE_HAL_API_VERSION,
.id = CAMERA_HARDWARE_MODULE_ID,
.name = "QCamera Module",
.author = "Qualcomm Innovation Center Inc",
.methods = &qcamera::QCamera2Factory::mModuleMethods,
.dso = NULL,
.reserved = {0}
};
camera_module_t HAL_MODULE_INFO_SYM = {
.common = camera_common,
.get_number_of_cameras = qcamera::QCamera2Factory::get_number_of_cameras,
.get_camera_info = qcamera::QCamera2Factory::get_camera_info,
.set_callbacks = qcamera::QCamera2Factory::set_callbacks,
.get_vendor_tag_ops = qcamera::QCamera3VendorTags::get_vendor_tag_ops,
.open_legacy = NULL,
.set_torch_mode = qcamera::QCamera2Factory::set_torch_mode,
.init = NULL,
.reserved = {0}
};
hardware/qcom/camera/msm8998/QCamera2/QCamera2Factory.cpp
QCamera2Factory::QCamera2Factory()
{
mHalDescriptors = NULL;
mCallbacks = NULL;
mNumOfCameras = get_num_of_cameras();
mNumOfCameras_expose = get_num_of_cameras_to_expose();
int bDualCamera = 0;
char propDefault[PROPERTY_VALUE_MAX];
char prop[PROPERTY_VALUE_MAX];
property_get("persist.camera.HAL3.enabled", prop, "1");
int isHAL3Enabled = atoi(prop);
#ifndef QCAMERA_HAL1_SUPPORT
isHAL3Enabled = 1;
#endif
// Signifies whether system has to enable dual camera mode
snprintf(propDefault, PROPERTY_VALUE_MAX, "%d", isDualCamAvailable(isHAL3Enabled));
property_get("persist.camera.dual.camera", prop, propDefault);
bDualCamera = atoi(prop);
LOGH("dualCamera:%d ", bDualCamera);
#ifndef QCAMERA_HAL1_SUPPORT
bDualCamera = 0;
#endif
if(bDualCamera) {
LOGI("Enabling QCamera Muxer");
#ifdef QCAMERA_HAL1_SUPPORT
if (!gQCameraMuxer) {
QCameraMuxer::getCameraMuxer(&gQCameraMuxer, mNumOfCameras);
if (!gQCameraMuxer) {
LOGE("Error !! Failed to get QCameraMuxer");
}
}
#endif
}
#ifdef QCAMERA_HAL1_SUPPORT
if (!gQCameraMuxer && (mNumOfCameras > 0) &&(mNumOfCameras <= MM_CAMERA_MAX_NUM_SENSORS)) {
#else
if ((mNumOfCameras > 0) &&(mNumOfCameras <= MM_CAMERA_MAX_NUM_SENSORS)) {
#endif
mHalDescriptors = new hal_desc[mNumOfCameras];
if ( NULL != mHalDescriptors) {
uint32_t cameraId = 0;
for (int i = 0; i < mNumOfCameras ; i++, cameraId++) {
mHalDescriptors[i].cameraId = cameraId;
// Set Device version to 3.x when both HAL3 is enabled & its BAYER sensor
if (isHAL3Enabled && !(is_yuv_sensor(cameraId))) {
mHalDescriptors[i].device_version =
CAMERA_DEVICE_API_VERSION_3_0;
} else {
mHalDescriptors[i].device_version =
CAMERA_DEVICE_API_VERSION_1_0;
}
}
} else {
LOGE("Not enough resources to allocate HAL descriptor table!");
}
} else {
LOGI("%d camera devices detected!", mNumOfCameras);
}
}
}
hardware/qcom/camera/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
uint8_t get_num_of_cameras()
{
...
get_sensor_info();
/*
对摄像头信息进行重新排序,让后置摄像头id小于前置摄像头id
曝光的相机顺序为
0-背面主摄像头
1-前主摄像头
++-背面辅助摄像头
++-前辅助摄像头
++-背面主摄像头+背面辅助摄像头
++-前主摄像头+前辅助摄像头
++-安全摄像头
*/
sort_camera_info();
}
void get_sensor_info()
{
int rc = 0;
int dev_fd = -1;
struct media_device_info mdev_info;
int num_media_devices = 0;
size_t num_cameras = 0;
LOGD("E");
while (1) {
char dev_name[32];
snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
if (dev_fd < 0) {
LOGD("Done discovering media devices\n");
break;
}
num_media_devices++;
memset(&mdev_info, 0, sizeof(mdev_info));
rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
if (rc < 0) {
LOGE("Error: ioctl media_dev failed: %s\n", strerror(errno));
close(dev_fd);
dev_fd = -1;
num_cameras = 0;
break;
}
if(strncmp(mdev_info.model, MSM_CONFIGURATION_NAME, sizeof(mdev_info.model)) != 0) {
close(dev_fd);
dev_fd = -1;
continue;
}
unsigned int num_entities = 1;
while (1) {
struct media_entity_desc entity;
uint32_t temp;
uint32_t mount_angle;
uint32_t facing;
int32_t type = 0;
uint8_t is_yuv;
uint8_t is_secure;
memset(&entity, 0, sizeof(entity));
entity.id = num_entities++;
rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
if (rc < 0) {
LOGD("Done enumerating media entities\n");
rc = 0;
break;
}
if(entity.type == MEDIA_ENT_T_V4L2_SUBDEV &&
entity.group_id == MSM_CAMERA_SUBDEV_SENSOR) {
temp = entity.flags >> 8;
mount_angle = (temp & 0xFF) * 90;
facing = ((entity.flags & CAM_SENSOR_FACING_MASK) ?
CAMERA_FACING_FRONT:CAMERA_FACING_BACK);
if (entity.flags & CAM_SENSOR_TYPE_MASK) {
type = CAM_TYPE_AUX;
} else {
type = CAM_TYPE_MAIN;
}
is_yuv = ((entity.flags & CAM_SENSOR_FORMAT_MASK) ?
CAM_SENSOR_YUV:CAM_SENSOR_RAW);
is_secure = ((entity.flags & CAM_SENSOR_SECURE_MASK) ?
CAM_TYPE_SECURE:0);
LOGL("index = %u flag = %x mount_angle = %u "
"facing = %u type: %u is_yuv = %u\n",
(unsigned int)num_cameras, (unsigned int)temp,
(unsigned int)mount_angle, (unsigned int)facing,
(unsigned int)type, (uint8_t)is_yuv);
g_cam_ctrl.info[num_cameras].facing = (int)facing;
g_cam_ctrl.info[num_cameras].orientation = (int)mount_angle;
g_cam_ctrl.cam_type[num_cameras] = type | is_secure;
g_cam_ctrl.is_yuv[num_cameras] = is_yuv;
LOGD("dev_info[id=%zu,name='%s', facing = %d, angle = %d type = %d]\n",
num_cameras, g_cam_ctrl.video_dev_name[num_cameras],
g_cam_ctrl.info[num_cameras].facing,
g_cam_ctrl.info[num_cameras].orientation,
g_cam_ctrl.cam_type[num_cameras]);
num_cameras++;
continue;
}
}
close(dev_fd);
dev_fd = -1;
}
LOGD("num_cameras=%d\n", g_cam_ctrl.num_cam);
return;
}
hardware/libhardware/include/hardware/camera_common.h
typedef struct camera_module {
/**
* 必须是的第一个成员,camera_module_t可以强制转换为hw_module_t
*/
hw_module_t common;
/**
* 获取相机数量:
*/
int (*get_number_of_cameras)(void);
/**
* 返回给定相机设备的静态相机信息:
*
*/
int (*get_camera_info)(int camera_id, struct camera_info *info);
/**
* set_callbacks:
*
* 向HAL模块提供回调函数指针,以通知框架异步相机模块事件。
* 该框架将在初始相机HAL模块加载后在get_number_of_ccameras()方法之后调用此函数一次,
*/
int (*set_callbacks)(const camera_module_callbacks_t *callbacks);
/**
* 获取用于查询供应商扩展元数据标记信息的方法
*/
void (*get_vendor_tag_ops)(vendor_tag_ops_t* ops);
/**
* 如果有多个设备HAL API,请打开特定的传统相机HAL设备版本由该相机HAL模块支持。
* 例如,如果摄像头模块支持camera_DEVICE_API_VERSION_1_0和CAMERA_DEVICE_ API_ VERSION_3_,
* 框架可以调用此函数以打开相机设备CAMERA_DEVICE_API_VERSION_1_0设备
*/
int (*open_legacy)(const struct hw_module_t* module, const char* id,
uint32_t halVersion, struct hw_device_t** device);
/**
* 设置闪光灯模式:
*/
int (*set_torch_mode)(const char* camera_id, bool enabled);
/**
* 此方法在任何其他方法之前由相机服务调用在摄像机HAL库成功后立即调用加载。
*/
int (*init)();
/* 保留以备将来使用*/
void* reserved[5];
} camera_module_t;
typedef struct camera_info {
/**
* 用于面向相机的定义。
* 版本信息 (基于camera_module_t.common.module_api_Version):
*CAMERA_MODULE_API_VERSION_2_3或更低版本:是CAMERA_FACING_BACK或CAMERA_FACING_FRONT。
*CAMERA_MODULE_API_VERSION_2_4或更高版本:应该是CAMERA_FACING_BACK、CAMERA_FACING_FRONT或CAMERA_FACING_EXTERNAL。
*/
int facing;
/**
* 该值是相机图像需要顺时针旋转,以便在以其自然的方向显示。它应该是0、90、180或270。
* 例如,假设一个设备有一个自然高的屏幕。这个背面摄像头传感器安装在横向。
* 你正在看屏幕如果摄像头传感器的顶部与右侧对齐屏幕边缘的自然方向,值应为90。
* 如果正面摄像头传感器的顶部与的右侧对齐屏幕上,该值应为270。
*/
int orientation;
/**
*camera_device_t.common.version的值。
*版本信息(基于camera_module_t.common.module_api_Versiond
*/
uint32_t device_version;
/**
* 相机的固定特性,包括所有静态相机元数据
*/
const camera_metadata_t *static_camera_characteristics;
/**
* 相机资源成本,总共资源成本需要<=100才允许打开,
*例如 Camera1 resource_cost = 50, Camera2 resource_cost =100, Camera3 resource_cost = 50,
*则只允许同时打开 Camera1和Camera3,不允许同时打开 Camera1和Camera2 因为总资源超过100了
*/
int resource_cost;
/**
* 摄像机设备ID的数组,指示不能同时打开的其他设备
*/
char** conflicting_devices;
/**
* conflicting_devices字段中给定的数组的长度
*/
size_t conflicting_devices_length;
} camera_info_t;
二、hal1 流程
1. hal1 初始化
调用QCameraMuxer::getCameraMuxer(&gQCameraMuxer, mNumOfCameras) 初始化QCameraMuxer并赋值给gQCameraMuxer。
调用QCameraMuxer::QCameraMuxer 初始化遍历 m_nPhyCameras 为mNumOfCameras
调用QCameraMuxer::setupLogicalCameras //该方法中,
调用m_pPhyCamera = new qcamera_physical_descriptor_t[m_nPhyCameras]创建qcamera_physical_descriptor_t数组。
遍历该数组,获取摄像头基本信息,主要流程如下:
调用 QCamera2HardwareInterface::getCapabilities(cameraId,info, &m_pPhyCamera[i].type)获取 camera_info
调用p_info = get_cam_info(cameraId, p_cam_type) 返回 g_cam_ctrl.info ,该信息是在一.1 中初始化的。
将 g_cam_ctrl.info 赋值给 m_pPhyCamera[i],m_pLogicalCamera[index],
设置 m_pPhyCamera[i].device_version和m_pLogicalCamera[index].device_version 为 CAMERA_DEVICE_API_VERSION_1_0
2.hal1 打开摄像头
由https://blog.csdn.net/kk3087961/article/details/135992303 中4.2.2可知,应用层调用camera.open方法
会调用CameraProvider中 CameraDevice的实现类android::hardware::camera::device::V1_0::implementation::CameraDevice对象CameraDevice::open(const sp<ICameraDeviceCallback>& callback)方法,
该方法基于hal版本调用 mModule->openLegacy(mCameraId.c_str(),CAMERA_DEVICE_API_VERSION_1_0,(hw_device_t **)&mDevice)或者 mModule->open(mCameraId.c_str(), (hw_device_t **)&mDevice)
获取hw_device_t赋值给全局变量 mDevice ,后面对摄像头的操作都是基于mDevice,
ps:这里的 mModule 是CameraProvider初始化时 mModule = new CameraModule(rawModule),CameraModule中包含的 rawModule 是加载camera.msm8998.so获取的camera_module_t
并在CameraService 初始化时候 创建new android::hardware::camera::device::V1_0::implementation::CameraDevice(mModule, cameraId, mCameraDeviceNames)对象 时传递CameraModule对象参数
例如打开预览 CameraDevice::startPreview()调用 mDevice->ops->start_preview(mDevice)对应QCamera2HardwareInterface::start_preview(struct camera_device *device)
mModule->open(..) 会调用CameraModule::open(..) CameraModule::open方法中调用 mModule->common.methods->open(&mModule->common, id, device) CameraModule中的mModule对应上面的 rawModule
因为QCamera2Factory::mModuleMethods 初始化 .open = QCamera2Factory::camera_device_open ,
即打开摄像头调用 QCamera2Factory::camera_device_open =》QCameraMuxer::cameraDeviceOpen
QCameraMuxer::cameraDeviceOpen(int camera_id,struct hw_device_t **hw_device) 执行如下2步,,其中参数hw_device为CameraDevice::open 传递的引用变量camera3_device_t *device.
2.1.调用 cam = &m_pLogicalCamera[camera_id]根据cameraID 获取逻辑摄像头,遍历逻辑摄像头包含的物理摄像头(可能有主辅摄)
调用new QCamera2HardwareInterface((uint32_t)phyId)为每个物理摄像头cameraid创建QCamera2HardwareInterface对象,
QCamera2HardwareInterface::QCamera2HardwareInterface 方法 调用 m_stateMachine(this) 调用 QCameraStateMachine 构造函数 将QCamera2HardwareInterface对象赋值给 m_parent
QCameraStateMachine::QCameraStateMachine 构造函数中调用 pthread_create(&cmd_pid,NULL,smEvtProcRoutine,this)
在线程中执行QCameraStateMachine::smEvtProcRoutine 方法,该方法中开启无限循环处理 pme->api_queue 消息。
DeferredWorkThread.launch(deferredWorkRoutine, this) 开启 DeferredWorkThread线程并执行deferredWorkRoutine方法,该方法线程阻塞,并无限循环处理消息,
例如 mCmdQueue.dequeue() 后dw->cmd为 CMD_DEF_CREATE_JPEG_SESSION 则处理拍照的jpeg数据,
QCamera2HardwareInterface::QCamera2HardwareInterface 方法 还会调用
mCameraDevice.ops = &mCameraOps ,mCameraDevice为 camera_device_t 对象,即相机操作方法 (如预览、拍照等)camera_device_t.ops 是在QCamera2HardwareInterface::mCameraOps 中定义的。
ps:这里的类 QCamera2HardwareInterface 命名与cameraservice中的类CameraHardwareInterface是对应的。
cameraDeviceOpen 方法还会调用hw->openCamera(&hw_dev[i])
QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device) 方法执行如下2步.
2.1.1 该方法调用 QCamera2HardwareInterface::openCamera()
QCamera2HardwareInterface::openCamera() 主要调用
2.1.1.1 调用camera_open((uint8_t)mCameraId, &mCameraHandle)。
mm_camera_interface.camera_open(uint8_t camera_idx, mm_camera_vtbl_t **camera_vtbl) 初始化 cam_obj (mm_camera_obj_t 结构体 cam_obj->vtbl.camera_handle 高8位存储调用camera_open次数,低8位存储camera_idx )
及 cam_obj->vtbl.ops(通过mm_camera_ops将实现方法映射到mm_camera_ops_t结构体方法) 等信息 调用 mm_camera_open(cam_obj) 该方法。
mm_camera.mm_camera_open(mm_camera_obj_t *my_obj) 调用 open(dev_name, O_RDWR | O_NONBLOCK) 打开摄像头驱动返回值赋值给 my_obj->ctrl_fd,这里的my_obj就是cam_obj。mCameraHandle(mm_camera_vtbl_t 结构体)对应 cam_obj->vtbl 。
2.1.1.2调用 initCapabilities(mCameraId,mCameraHandle)
QCamera2HardwareInterface::initCapabilities 方法调用 getCapabilities(cameraHandle->ops, handle) 将返回的cam_capability_t信息 赋值给变量 gCamCapability[cameraId]
QCamera2HardwareInterface::getCapabilities(mm_camera_ops_t *ops,uint32_t cam_handle)方法中执行如下3步:
#1.先调用capabilityHeap->allocate(1, sizeof(cam_capability_t)) 为capabilityHeap缓冲区分配ION共享内存,
调用QCameraBufferMaps::makeSingletonBufMapList(CAM_MAPPING_BUF_TYPE_CAPABILITY,0 /*stream id*/, 0 /*buffer index*/, -1 /*plane index*/,0 /*cookie*/, capabilityHeap->getFd(0), sizeof(cam_capability_t),bufMapList, capabilityHeap->getPtr(0))
创建buf类型为CAM_MAPPING_BUF_TYPE_CAPABILITY的cam_buf_map_type_list结构体对象bufMapList。
其中mBufMapList.buf_maps[0].buffer 对应capabilityHeap.mPtr[0],mBufMapList.buf_maps[0].fd 对应实现共享内存文件描述词capabilityHeap.mMemInfo[0].fd
#2.调用ops->map_bufs(cam_handle,&bufMapList) 这里的cam_handle是 camera_open时 初始化的 高8位存储调用camera_open次数,低8位存储camera_idx ,从mm_camera_ops定义可知map_bufs 对应 mm_camera_intf_map_bufs 方法,
mm_camera_interface.mm_camera_intf_map_bufs(uint32_t camera_handle,const cam_buf_map_type_list *buf_map_list) 方法 中my_obj 是根据camera_handle获取 camera_open时初始化的cam_obj(mm_camera_obj_t 结构体),调用 mm_camera_map_bufs(my_obj, buf_map_list)
mm_camera.mm_camera_map_bufs(mm_camera_obj_t *my_obj,const cam_buf_map_type_list* buf_map_list) 调用 mm_camera_util_bundled_sendmsg(my_obj,&packet, sizeof(cam_sock_packet_t),sendfds, numbufs)
mm_camera.mm_camera_util_bundled_sendmsg(mm_camera_obj_t *my_obj,...) 调用mm_camera_socket_bundle_sendmsg(my_obj->ds_fd, msg, buf_size, sendfds, numfds)
mm_camera_sock.mm_camera_socket_bundle_sendmsg(int fd,...) 调用 sendmsg(fd, &(msgh), 0) socket发送数据到vendor通知映射共享内存。
#3.调用memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t)) 将capabilityHeap共享内存数据拷贝到cam_capability_t结构体变量 cap_ptr,然后返回cap_ptr.
2.1.1.3调用mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,camEvtHandle, (void *) this);
register_event_notify 对应 mm_camera_intf_register_event_notify 方法,这里camEvtHandle对应QCamera2HardwareInterface::camEvtHandle,
mm_camera_interface.mm_camera_intf_register_event_notify(...)方法调用 mm_camera_register_event_notify_internal(my_obj,evt_cb,user_data);
evt_cb 就是上面的QCamera2HardwareInterface::camEvtHandle,user_data就是QCamera2HardwareInterface对象
mm_camera.mm_camera_register_event_notify_internal(...) 方法将 evt_cb 和user_data 赋值给 my_obj->evt
2.1.2调用*hw_device = &mCameraDevice.common 将物理摄像头hw_device_t信息赋值给hw_device
2.2.调用*hw_device = &cam->dev.common,将获取的摄像头基本信息赋值给了CameraDevice::open 传递的引用变量camera3_device_t *device, 其中 cam为hal1 初始化时获取的m_pLogicalCamera对象,参数hw_device为CameraDevice::open 传递的引用变量camera3_device_t *device. camera_device_t第一个对象common是hw_device_t
cam->dev.ops = &mCameraMuxerOps,即对上面摄像头的mDevice操作都是调用 mCameraMuxerOps 对应的QCameraMuxer类中方法如: QCameraMuxer::start_preview
hardware/interfaces/camera/common/1.0/default/CameraModule.cpp
int CameraModule::open(const char* id, struct hw_device_t** device) {
res = filterOpenErrorCode(mModule->common.methods->open(&mModule->common, id, device));
return res;
}
hardware/qcom/camera/msm8998/QCamera2/QCamera2Factory.cpp
int QCamera2Factory::camera_device_open(
const struct hw_module_t *module, const char *id,
struct hw_device_t **hw_device)
{
...
//如果是hal1 (定义了宏QCAMERA_HAL1_SUPPORT)调用
rc = gQCameraMuxer->camera_device_open(module, id, hw_device)
}
hardware/qcom/camera/msm8998/QCamera2/HAL/QCameraMuxer.cpp
namespace qcamera {
QCameraMuxer *gMuxer = NULL;
void QCameraMuxer::getCameraMuxer(
QCameraMuxer** pMuxer, uint32_t num_of_cameras)
{
*pMuxer = NULL;
if (!gMuxer) {
gMuxer = new QCameraMuxer(num_of_cameras);
}
CHECK_MUXER();
*pMuxer = gMuxer;
LOGH("gMuxer: %p ", gMuxer);
return;
}
QCameraMuxer::QCameraMuxer(uint32_t num_of_cameras)
: mJpegClientHandle(0),
m_pPhyCamera(NULL),
m_pLogicalCamera(NULL),
m_pCallbacks(NULL),
m_bAuxCameraExposed(FALSE),
m_nPhyCameras(num_of_cameras),
m_nLogicalCameras(0),
m_MainJpegQ(releaseJpegInfo, this),
m_AuxJpegQ(releaseJpegInfo, this),
m_pRelCamMpoJpeg(NULL),
m_pMpoCallbackCookie(NULL),
m_pJpegCallbackCookie(NULL),
m_bDumpImages(FALSE),
m_bMpoEnabled(TRUE),
m_bFrameSyncEnabled(FALSE),
m_bRecordingHintInternallySet(FALSE)
{
setupLogicalCameras();
memset(&mJpegOps, 0, sizeof(mJpegOps));
memset(&mJpegMpoOps, 0, sizeof(mJpegMpoOps));
memset(&mGetMemoryCb, 0, sizeof(mGetMemoryCb));
memset(&mDataCb, 0, sizeof(mDataCb));
// initialize mutex for MPO composition
pthread_mutex_init(&m_JpegLock, NULL);
// launch MPO composition thread
m_ComposeMpoTh.launch(composeMpoRoutine, this);
//Check whether dual camera images need to be dumped
char prop[PROPERTY_VALUE_MAX];
property_get("persist.camera.dual.camera.dump", prop, "0");
m_bDumpImages = atoi(prop);
LOGH("dualCamera dump images:%d ", m_bDumpImages);
}
int QCameraMuxer::setupLogicalCameras()
{
int rc = NO_ERROR;
char prop[PROPERTY_VALUE_MAX];
int i = 0;
int primaryType = CAM_TYPE_MAIN;
LOGH("[%d] E: rc = %d", rc);
// Signifies whether AUX camera has to be exposed as physical camera
property_get("persist.camera.aux.camera", prop, "0");
m_bAuxCameraExposed = atoi(prop);
// Signifies whether AUX camera needs to be swapped
property_get("persist.camera.auxcamera.swap", prop, "0");
int swapAux = atoi(prop);
if (swapAux != 0) {
primaryType = CAM_TYPE_AUX;
}
m_pPhyCamera = new qcamera_physical_descriptor_t[m_nPhyCameras];
memset(m_pPhyCamera, 0x00,
(m_nPhyCameras * sizeof(qcamera_physical_descriptor_t)));
uint32_t cameraId = 0;
m_nLogicalCameras = 0;
// Enumerate physical cameras and logical
for (i = 0; i < m_nPhyCameras ; i++, cameraId++) {
camera_info *info = &m_pPhyCamera[i].cam_info;
rc = QCamera2HardwareInterface::getCapabilities(cameraId,
info, &m_pPhyCamera[i].type);
m_pPhyCamera[i].id = cameraId;
m_pPhyCamera[i].device_version = CAMERA_DEVICE_API_VERSION_1_0;
m_pPhyCamera[i].mode = CAM_MODE_PRIMARY;
if (!m_bAuxCameraExposed && (m_pPhyCamera[i].type != primaryType)) {
m_pPhyCamera[i].mode = CAM_MODE_SECONDARY;
LOGH("Camera ID: %d, Aux Camera, type: %d, facing: %d",
cameraId, m_pPhyCamera[i].type,
m_pPhyCamera[i].cam_info.facing);
}
else {
m_nLogicalCameras++;
LOGH("Camera ID: %d, Main Camera, type: %d, facing: %d",
cameraId, m_pPhyCamera[i].type,
m_pPhyCamera[i].cam_info.facing);
}
}
// Allocate Logical Camera descriptors
m_pLogicalCamera = new qcamera_logical_descriptor_t[m_nLogicalCameras];
memset(m_pLogicalCamera, 0x00,
(m_nLogicalCameras * sizeof(qcamera_logical_descriptor_t)));
// Assign MAIN cameras for each logical camera
int index = 0;
for (i = 0; i < m_nPhyCameras ; i++) {
if (m_pPhyCamera[i].mode == CAM_MODE_PRIMARY) {
m_pLogicalCamera[index].nPrimaryPhyCamIndex = 0;
m_pLogicalCamera[index].id = index;
m_pLogicalCamera[index].device_version = CAMERA_DEVICE_API_VERSION_1_0;
m_pLogicalCamera[index].pId[0] = i;
m_pLogicalCamera[index].type[0] = CAM_TYPE_MAIN;
m_pLogicalCamera[index].mode[0] = CAM_MODE_PRIMARY;
m_pLogicalCamera[index].sync_3a[0] = CAM_3A_SYNC_FOLLOW;
m_pLogicalCamera[index].facing = m_pPhyCamera[i].cam_info.facing;
m_pLogicalCamera[index].numCameras++;
LOGH("Logical Main Camera ID: %d, facing: %d,"
"Phy Id: %d type: %d mode: %d",
m_pLogicalCamera[index].id,
m_pLogicalCamera[index].facing,
m_pLogicalCamera[index].pId[0],
m_pLogicalCamera[index].type[0],
m_pLogicalCamera[index].mode[0]);
index++;
}
}
//Now assign AUX cameras to logical camera
for (i = 0; i < m_nPhyCameras ; i++) {
if (m_pPhyCamera[i].mode == CAM_MODE_SECONDARY) {
for (int j = 0; j < m_nLogicalCameras; j++) {
int n = m_pLogicalCamera[j].numCameras;
///@note n can only be 1 at this point
if ((n < MAX_NUM_CAMERA_PER_BUNDLE) &&
(m_pLogicalCamera[j].facing ==
m_pPhyCamera[i].cam_info.facing)) {
m_pLogicalCamera[j].pId[n] = i;
m_pLogicalCamera[j].type[n] = CAM_TYPE_AUX;
m_pLogicalCamera[j].mode[n] = CAM_MODE_SECONDARY;
m_pLogicalCamera[j].sync_3a[n] = CAM_3A_SYNC_FOLLOW;
m_pLogicalCamera[j].numCameras++;
LOGH("Aux %d for Logical Camera ID: %d,"
"aux phy id:%d, type: %d mode: %d",
n, j, m_pLogicalCamera[j].pId[n],
m_pLogicalCamera[j].type[n], m_pLogicalCamera[j].mode[n]);
}
}
}
}
//Print logical and physical camera tables
for (i = 0; i < m_nLogicalCameras ; i++) {
for (uint8_t j = 0; j < m_pLogicalCamera[i].numCameras; j++) {
LOGH("Logical Camera ID: %d, index: %d, "
"facing: %d, Phy Id: %d type: %d mode: %d",
i, j, m_pLogicalCamera[i].facing,
m_pLogicalCamera[i].pId[j], m_pLogicalCamera[i].type[j],
m_pLogicalCamera[i].mode[j]);
}
}
LOGH("[%d] X: rc = %d", rc);
return rc;
}
int QCameraMuxer::cameraDeviceOpen(int camera_id,
struct hw_device_t **hw_device)
{
int rc = NO_ERROR;
uint32_t phyId = 0;
qcamera_logical_descriptor_t *cam = NULL;
char prop[PROPERTY_VALUE_MAX];
property_get("persist.camera.dc.frame.sync", prop, "1");
m_bFrameSyncEnabled = atoi(prop);
// Get logical camera
cam = &m_pLogicalCamera[camera_id];
if (m_pLogicalCamera[camera_id].device_version ==
CAMERA_DEVICE_API_VERSION_1_0) {
// HW Dev Holders
hw_device_t *hw_dev[cam->numCameras];
// Open all physical cameras
for (uint32_t i = 0; i < cam->numCameras; i++) {
phyId = cam->pId[i];
QCamera2HardwareInterface *hw =
new QCamera2HardwareInterface((uint32_t)phyId);
if (!hw) {
LOGE("Allocation of hardware interface failed");
return NO_MEMORY;
}
hw_dev[i] = NULL;
// Make Camera HWI aware of its mode
cam_sync_related_sensors_event_info_t info;
info.sync_control = CAM_SYNC_RELATED_SENSORS_ON;
info.mode = m_pPhyCamera[phyId].mode;
info.type = m_pPhyCamera[phyId].type;
rc = hw->setRelatedCamSyncInfo(&info);
hw->setFrameSyncEnabled(m_bFrameSyncEnabled);
if (rc != NO_ERROR) {
LOGE("setRelatedCamSyncInfo failed %d", rc);
delete hw;
return rc;
}
rc = hw->openCamera(&hw_dev[i]);
hw->getCameraSessionId(&m_pPhyCamera[phyId].camera_server_id);
m_pPhyCamera[phyId].dev = reinterpret_cast<camera_device_t*>(hw_dev[i]);
m_pPhyCamera[phyId].hwi = hw;
cam->sId[i] = m_pPhyCamera[phyId].camera_server_id;
LOGH("camera id %d server id : %d hw device %x, hw %x",
phyId, cam->sId[i], hw_dev[i], hw);
}
} else {
LOGE("Device version for camera id %d invalid %d",
camera_id, m_pLogicalCamera[camera_id].device_version);
return BAD_VALUE;
}
cam->dev.common.tag = HARDWARE_DEVICE_TAG;
cam->dev.common.version = HARDWARE_DEVICE_API_VERSION(1, 0);
cam->dev.common.close = close_camera_device;
cam->dev.ops = &mCameraMuxerOps;
cam->dev.priv = (void*)cam;
*hw_device = &cam->dev.common;
return rc;
}
camera_device_ops_t QCameraMuxer::mCameraMuxerOps = {
.set_preview_window = QCameraMuxer::set_preview_window,
.set_callbacks = QCameraMuxer::set_callBacks,
.enable_msg_type = QCameraMuxer::enable_msg_type,
.disable_msg_type = QCameraMuxer::disable_msg_type,
.msg_type_enabled = QCameraMuxer::msg_type_enabled,
.start_preview = QCameraMuxer::start_preview,
.stop_preview = QCameraMuxer::stop_preview,
.preview_enabled = QCameraMuxer::preview_enabled,
.store_meta_data_in_buffers= QCameraMuxer::store_meta_data_in_buffers,
.start_recording = QCameraMuxer::start_recording,
.stop_recording = QCameraMuxer::stop_recording,
.recording_enabled = QCameraMuxer::recording_enabled,
.release_recording_frame = QCameraMuxer::release_recording_frame,
.auto_focus = QCameraMuxer::auto_focus,
.cancel_auto_focus = QCameraMuxer::cancel_auto_focus,
.take_picture = QCameraMuxer::take_picture,
.cancel_picture = QCameraMuxer::cancel_picture,
.set_parameters = QCameraMuxer::set_parameters,
.get_parameters = QCameraMuxer::get_parameters,
.put_parameters = QCameraMuxer::put_parameters,
.send_command = QCameraMuxer::send_command,
.release = QCameraMuxer::release,
.dump = QCameraMuxer::dump,
};
}
hardware/qcom/camera/msm8998/QCamera2/stack/common/mm_camera_interface.h
typedef struct {
int32_t (*query_capability) (uint32_t camera_handle);
int32_t (*register_event_notify) (uint32_t camera_handle, mm_camera_event_notify_t evt_cb,void *user_data);
int32_t (*close_camera) (uint32_t camera_handle);
int32_t (*map_buf) (uint32_t camera_handle,uint8_t buf_type,int fd,size_t size,void *buffer);
int32_t (*map_bufs) (uint32_t camera_handle,const cam_buf_map_type_list *buf_map_list);
int32_t (*unmap_buf) (uint32_t camera_handle,uint8_t buf_type);
int32_t (*set_parms) (uint32_t camera_handle,parm_buffer_t *parms);
int32_t (*get_parms) (uint32_t camera_handle,parm_buffer_t *parms);
int32_t (*do_auto_focus) (uint32_t camera_handle);
int32_t (*cancel_auto_focus) (uint32_t camera_handle);
int32_t (*prepare_snapshot) (uint32_t camera_handle,int32_t do_af_flag);
int32_t (*start_zsl_snapshot) (uint32_t camera_handle, uint32_t ch_id);
int32_t (*stop_zsl_snapshot) (uint32_t camera_handle, uint32_t ch_id);
uint32_t (*add_channel) (uint32_t camera_handle,mm_camera_channel_attr_t *attr,mm_camera_buf_notify_t channel_cb,void *userdata);
int32_t (*delete_channel) (uint32_t camera_handle,uint32_t ch_id);
int32_t (*get_bundle_info) (uint32_t camera_handle,uint32_t ch_id,cam_bundle_config_t *bundle_info);
uint32_t (*add_stream) (uint32_t camera_handle,uint32_t ch_id);
int32_t (*delete_stream) (uint32_t camera_handle,uint32_t ch_id,uint32_t stream_id);
int32_t (*link_stream) (uint32_t camera_handle,uint32_t ch_id,uint32_t stream_id,uint32_t linked_ch_id);
int32_t (*config_stream) (uint32_t camera_handle,uint32_t ch_id,uint32_t stream_id,mm_camera_stream_config_t *config);
int32_t (*map_stream_buf) (uint32_t camera_handle,uint32_t ch_id,uint32_t stream_id,uint8_t buf_type,uint32_t buf_idx,int32_t plane_idx,int fd,size_t size,void *buffer);
int32_t (*map_stream_bufs) (uint32_t camera_handle,uint32_t ch_id,const cam_buf_map_type_list *buf_map_list);
int32_t (*unmap_stream_buf) (uint32_t camera_handle,uint32_t ch_id,uint32_t stream_id,uint8_t buf_type,uint32_t buf_idx,int32_t plane_idx);
int32_t (*set_stream_parms) (uint32_t camera_handle,uint32_t ch_id,uint32_t s_id,cam_stream_parm_buffer_t *parms);
int32_t (*get_stream_parms) (uint32_t camera_handle,uint32_t ch_id,uint32_t s_id,cam_stream_parm_buffer_t *parms);
int32_t (*start_channel) (uint32_t camera_handle,uint32_t ch_id, bool start_sensor_streaming);
int32_t (*start_sensor_streaming) (uint32_t camera_handle,uint32_t ch_id);
int32_t (*stop_channel) (uint32_t camera_handle,uint32_t ch_id,bool stop_immediately);
int32_t (*qbuf) (uint32_t camera_handle,uint32_t ch_id,mm_camera_buf_def_t *buf);
int32_t (*cancel_buffer) (uint32_t camera_handle,uint32_t ch_id,uint32_t stream_id,uint32_t buf_idx);
int32_t (*get_queued_buf_count) (uint32_t camera_handle,uint32_t ch_id,uint32_t stream_id);
int32_t (*request_super_buf) (uint32_t camera_handle,uint32_t ch_id,mm_camera_req_buf_t *buf);
int32_t (*cancel_super_buf_request) (uint32_t camera_handle,uint32_t ch_id);
int32_t (*flush_super_buf_queue) (uint32_t camera_handle,uint32_t ch_id, uint32_t frame_idx);
int32_t (*configure_notify_mode) (uint32_t camera_handle,uint32_t ch_id,mm_camera_super_buf_notify_mode_t notify_mode);
int32_t (*process_advanced_capture) (uint32_t camera_handle,uint32_t ch_id, mm_camera_advanced_capture_t type,int8_t start_flag, void *in_value);
int32_t (*get_session_id) (uint32_t camera_handle,uint32_t* sessionid);
int32_t (*set_dual_cam_cmd)(uint32_t camera_handle);
int32_t (*flush) (uint32_t camera_handle);
int32_t (*register_stream_buf_cb) (uint32_t camera_handle,uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t buf_cb,mm_camera_stream_cb_type cb_type, void *userdata);
int32_t (*register_frame_sync) (uint32_t camera_handle,uint32_t ch_id, uint32_t stream_id,mm_camera_intf_frame_sync_t *sync_attr);
int32_t (*handle_frame_sync_cb) (uint32_t camera_handle,uint32_t ch_id, uint32_t stream_id,mm_camera_cb_req_type req_type);
} mm_camera_ops_t;
typedef struct {
uint32_t camera_handle;
mm_camera_ops_t *ops;
} mm_camera_vtbl_t;
hardware/qcom/camera/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
struct camera_info *get_cam_info(uint32_t camera_id, cam_sync_type_t *pCamType)
{
*pCamType = g_cam_ctrl.cam_type[camera_id];
return &g_cam_ctrl.info[camera_id];
}
int32_t camera_open(uint8_t camera_idx, mm_camera_vtbl_t **camera_vtbl)
{
...
memset(cam_obj, 0, sizeof(mm_camera_obj_t));
cam_obj->ctrl_fd = -1;
cam_obj->ds_fd = -1;
cam_obj->ref_count++;
cam_obj->my_num = 0;
//mm_camera_util_generate_handler 方法返回 (handler<<8) | index; 其中 handler 记录调用camera_open计数,
//即高8位存储调用camera_open次数,低8位存储camera_idx
cam_obj->my_hdl = mm_camera_util_generate_handler(cam_idx);
cam_obj->vtbl.camera_handle = cam_obj->my_hdl; /* set handler */
cam_obj->vtbl.ops = &mm_camera_ops;
pthread_mutex_init(&cam_obj->cam_lock, NULL);
pthread_mutex_init(&cam_obj->muxer_lock, NULL);
/* unlock global interface lock, if not, in dual camera use case,
* current open will block operation of another opened camera obj*/
pthread_mutex_lock(&cam_obj->cam_lock);
pthread_mutex_unlock(&g_intf_lock);
rc = mm_camera_open(cam_obj);
g_cam_ctrl.cam_obj[cam_idx] = cam_obj;
*camera_vtbl = &cam_obj->vtbl;
}
const char *mm_camera_util_get_dev_name(uint32_t cam_handle)
{
char *dev_name = NULL;
uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
if(cam_idx < MM_CAMERA_MAX_NUM_SENSORS) {
dev_name = g_cam_ctrl.video_dev_name[cam_idx];
}
return dev_name;
}
static mm_camera_ops_t mm_camera_ops = {
.query_capability = mm_camera_intf_query_capability,
.register_event_notify = mm_camera_intf_register_event_notify,
.close_camera = mm_camera_intf_close,
.set_parms = mm_camera_intf_set_parms,
.get_parms = mm_camera_intf_get_parms,
.do_auto_focus = mm_camera_intf_do_auto_focus,
.cancel_auto_focus = mm_camera_intf_cancel_auto_focus,
.prepare_snapshot = mm_camera_intf_prepare_snapshot,
.start_zsl_snapshot = mm_camera_intf_start_zsl_snapshot,
.stop_zsl_snapshot = mm_camera_intf_stop_zsl_snapshot,
.map_buf = mm_camera_intf_map_buf,
.map_bufs = mm_camera_intf_map_bufs,
.unmap_buf = mm_camera_intf_unmap_buf,
.add_channel = mm_camera_intf_add_channel,
.delete_channel = mm_camera_intf_del_channel,
.get_bundle_info = mm_camera_intf_get_bundle_info,
.add_stream = mm_camera_intf_add_stream,
.link_stream = mm_camera_intf_link_stream,
.delete_stream = mm_camera_intf_del_stream,
.config_stream = mm_camera_intf_config_stream,
.qbuf = mm_camera_intf_qbuf,
.cancel_buffer = mm_camera_intf_cancel_buf,
.get_queued_buf_count = mm_camera_intf_get_queued_buf_count,
.map_stream_buf = mm_camera_intf_map_stream_buf,
.map_stream_bufs = mm_camera_intf_map_stream_bufs,
.unmap_stream_buf = mm_camera_intf_unmap_stream_buf,
.set_stream_parms = mm_camera_intf_set_stream_parms,
.get_stream_parms = mm_camera_intf_get_stream_parms,
.start_channel = mm_camera_intf_start_channel,
.start_sensor_streaming = mm_camera_intf_start_sensor_streaming,
.stop_channel = mm_camera_intf_stop_channel,
.request_super_buf = mm_camera_intf_request_super_buf,
.cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
.flush_super_buf_queue = mm_camera_intf_flush_super_buf_queue,
.configure_notify_mode = mm_camera_intf_configure_notify_mode,
.process_advanced_capture = mm_camera_intf_process_advanced_capture,
.get_session_id = mm_camera_intf_get_session_id,
.set_dual_cam_cmd = mm_camera_intf_set_dual_cam_cmd,
.flush = mm_camera_intf_flush,
.register_stream_buf_cb = mm_camera_intf_register_stream_buf_cb,
.register_frame_sync = mm_camera_intf_reg_frame_sync,
.handle_frame_sync_cb = mm_camera_intf_handle_frame_sync_cb
};
static int32_t mm_camera_intf_map_bufs(uint32_t camera_handle,
const cam_buf_map_type_list *buf_map_list)
{
int32_t rc = -1;
mm_camera_obj_t * my_obj = NULL;
uint32_t handle = get_main_camera_handle(camera_handle);
uint32_t aux_handle = get_aux_camera_handle(camera_handle);
...
//根据camera_handle获取 camera_open时初始化的cam_obj(mm_camera_obj_t 结构体)
my_obj = mm_camera_util_get_camera_by_handler(handle);
rc = mm_camera_map_bufs(my_obj, buf_map_list);
return rc;
}
hardware/qcom/camera/msm8998/QCamera2/HAL/QCameraStateMachine.cpp
QCameraStateMachine::QCameraStateMachine(QCamera2HardwareInterface *ctrl) :
api_queue(),
evt_queue()
{
m_parent = ctrl;
m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
cmd_pid = 0;
cam_sem_init(&cmd_sem, 0);
pthread_create(&cmd_pid,
NULL,
smEvtProcRoutine,
this);
...
}
hardware/qcom/camera/msm8998/QCamera2/HAL/QCamera2HWI.cpp
QCamera2HardwareInterface::QCamera2HardwareInterface(uint32_t cameraId)
: mCameraId(cameraId),
mCameraHandle(NULL),
mMasterCamera(CAM_TYPE_MAIN),
mCameraOpened(false),
mDualCamera(false),
m_pFovControl(NULL),
m_bRelCamCalibValid(false),
mPreviewWindow(NULL),
mMsgEnabled(0),
mStoreMetaDataInFrame(0),
mJpegCb(NULL),
mCallbackCookie(NULL),
mJpegCallbackCookie(NULL),
m_bMpoEnabled(TRUE),
m_stateMachine(this),
m_smThreadActive(true),
m_postprocessor(this),
m_thermalAdapter(QCameraThermalAdapter::getInstance()),
m_cbNotifier(this),
m_perfLockMgr(),
m_bPreviewStarted(false),
m_bRecordStarted(false),
m_currentFocusState(CAM_AF_STATE_INACTIVE),
mDumpFrmCnt(0U),
mDumpSkipCnt(0U),
mThermalLevel(QCAMERA_THERMAL_NO_ADJUSTMENT),
mActiveAF(false),
m_HDRSceneEnabled(false),
mLongshotEnabled(false),
mLiveSnapshotThread(0),
mIntPicThread(0),
mFlashNeeded(false),
mFlashConfigured(false),
mDeviceRotation(0U),
mCaptureRotation(0U),
mJpegExifRotation(0U),
mUseJpegExifRotation(false),
mIs3ALocked(false),
mPrepSnapRun(false),
mZoomLevel(0),
mPreviewRestartNeeded(false),
mVFrameCount(0),
mVLastFrameCount(0),
mVLastFpsTime(0),
mVFps(0),
mPFrameCount(0),
mPLastFrameCount(0),
mPLastFpsTime(0),
mPFps(0),
mLowLightConfigured(false),
mInstantAecFrameCount(0),
m_bIntJpegEvtPending(false),
m_bIntRawEvtPending(false),
mReprocJob(0),
mJpegJob(0),
mMetadataAllocJob(0),
mInitPProcJob(0),
mParamAllocJob(0),
mParamInitJob(0),
mOutputCount(0),
mInputCount(0),
mAdvancedCaptureConfigured(false),
mHDRBracketingEnabled(false),
mNumPreviewFaces(-1),
mJpegClientHandle(0),
mJpegHandleOwner(false),
mMetadataMem(NULL),
mCACDoneReceived(false),
m_bNeedRestart(false),
mBootToMonoTimestampOffset(0),
bDepthAFCallbacks(true),
m_bNeedHalPP(FALSE)
{
#ifdef TARGET_TS_MAKEUP
memset(&mFaceRect, -1, sizeof(mFaceRect));
#endif
getLogLevel();
ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_QCAMERA2HWI);
mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
mCameraDevice.common.version = HARDWARE_DEVICE_API_VERSION(1, 0);
mCameraDevice.common.close = close_camera_device;
mCameraDevice.ops = &mCameraOps;
mCameraDevice.priv = this;
开启 DeferredWorkThread线程并执行deferredWorkRoutine方法,该方法线程阻塞,并无限循环处理消息,例如 CMD_DEF_CREATE_JPEG_SESSION处理拍照的jpeg数据
DeferredWorkThread.launch(deferredWorkRoutine, this);
mDeferredWorkThread.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, FALSE);
...
}
camera_device_ops_t QCamera2HardwareInterface::mCameraOps = {
.set_preview_window = QCamera2HardwareInterface::set_preview_window,
.set_callbacks = QCamera2HardwareInterface::set_CallBacks,
.enable_msg_type = QCamera2HardwareInterface::enable_msg_type,
.disable_msg_type = QCamera2HardwareInterface::disable_msg_type,
.msg_type_enabled = QCamera2HardwareInterface::msg_type_enabled,
.start_preview = QCamera2HardwareInterface::start_preview,
.stop_preview = QCamera2HardwareInterface::stop_preview,
.preview_enabled = QCamera2HardwareInterface::preview_enabled,
.store_meta_data_in_buffers= QCamera2HardwareInterface::store_meta_data_in_buffers,
.start_recording = QCamera2HardwareInterface::start_recording,
.stop_recording = QCamera2HardwareInterface::stop_recording,
.recording_enabled = QCamera2HardwareInterface::recording_enabled,
.release_recording_frame = QCamera2HardwareInterface::release_recording_frame,
.auto_focus = QCamera2HardwareInterface::auto_focus,
.cancel_auto_focus = QCamera2HardwareInterface::cancel_auto_focus,
.take_picture = QCamera2HardwareInterface::take_picture,
.cancel_picture = QCamera2HardwareInterface::cancel_picture,
.set_parameters = QCamera2HardwareInterface::set_parameters,
.get_parameters = QCamera2HardwareInterface::get_parameters,
.put_parameters = QCamera2HardwareInterface::put_parameters,
.send_command = QCamera2HardwareInterface::send_command,
.release = QCamera2HardwareInterface::release,
.dump = QCamera2HardwareInterface::dump,
};
int QCamera2HardwareInterface::getCapabilities(uint32_t cameraId,
struct camera_info *info, cam_sync_type_t *p_cam_type)
{
ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_GET_CAP);
int rc = NO_ERROR;
struct camera_info *p_info = NULL;
pthread_mutex_lock(&gCamLock);
p_info = get_cam_info(cameraId, p_cam_type);
p_info->device_version = CAMERA_DEVICE_API_VERSION_1_0;
p_info->static_camera_characteristics = NULL;
memcpy(info, p_info, sizeof (struct camera_info));
pthread_mutex_unlock(&gCamLock);
return rc;
}
int QCamera2HardwareInterface::openCamera()
{
...
rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
initCapabilities(mCameraId,mCameraHandle);
mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
camEvtHandle,
(void *) this);
}
int QCamera2HardwareInterface::initCapabilities(uint32_t cameraId,
mm_camera_vtbl_t *cameraHandle)
{
rc = camera_open((uint8_t)cameraId, &cameraHandle);
handle = get_main_camera_handle(cameraHandle->camera_handle);
gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
}
cam_capability_t *QCamera2HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
uint32_t cam_handle)
{
int rc = NO_ERROR;
QCameraHeapMemory *capabilityHeap = NULL;
cam_capability_t *cap_ptr = NULL;
capabilityHeap = new QCameraHeapMemory(1);
/* Allocate memory for capability buffer 设置ION共享内存*/
rc = capabilityHeap->allocate(1, sizeof(cam_capability_t));
/* Map memory for capability buffer 功能缓冲区的映射内存*/
memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
cam_buf_map_type_list bufMapList;
//bufMapList结构体包含 类型,ION共享内存地址等信息
rc = QCameraBufferMaps::makeSingletonBufMapList(
CAM_MAPPING_BUF_TYPE_CAPABILITY,
0 /*stream id*/, 0 /*buffer index*/, -1 /*plane index*/,
0 /*cookie*/, capabilityHeap->getFd(0), sizeof(cam_capability_t),
bufMapList, capabilityHeap->getPtr(0));
ops->map_bufs(cam_handle,&bufMapList);
/* Query Capability */
rc = ops->query_capability(cam_handle);
cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
memset(cap_ptr, 0, sizeof(cam_capability_t));
memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
int index;
for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
}
return cap_ptr;
}
qcom/camera/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera.c
ps:mm_camera.c文件中 #ifdef DAEMON_PRESENT 宏定义判断,Android O之后已经取消守护进程这个宏定义,如msm8937(sdm429)平台。
int32_t mm_camera_open(mm_camera_obj_t *my_obj)
{
...
dev_name_value = mm_camera_util_get_dev_name_by_num(my_obj->my_num,my_obj->my_hdl);
snprintf(dev_name, sizeof(dev_name), "/dev/%s",dev_name_value);
sscanf(dev_name, "/dev/video%d", &cam_idx);
...
//这里会每隔20毫秒 尝试打开摄像头,连续尝试20次,
my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK);
}
int32_t mm_camera_map_bufs(mm_camera_obj_t *my_obj,
const cam_buf_map_type_list* buf_map_list)
{
int32_t rc = 0;
cam_sock_packet_t packet;
memset(&packet, 0, sizeof(cam_sock_packet_t));
packet.msg_type = CAM_MAPPING_TYPE_FD_BUNDLED_MAPPING;
memcpy(&packet.payload.buf_map_list, buf_map_list,
sizeof(packet.payload.buf_map_list));
int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM];
uint32_t numbufs = packet.payload.buf_map_list.length;
uint32_t i;
for (i = 0; i < numbufs; i++) {
sendfds[i] = packet.payload.buf_map_list.buf_maps[i].fd;
packet.payload.buf_map_list.buf_maps[i].buffer =
buf_map_list->buf_maps[i].buffer;
}
for (i = numbufs; i < CAM_MAX_NUM_BUFS_PER_STREAM; i++) {
packet.payload.buf_map_list.buf_maps[i].fd = -1;
sendfds[i] = -1;
}
rc = mm_camera_util_bundled_sendmsg(my_obj,
&packet, sizeof(cam_sock_packet_t),
sendfds, numbufs);
...
pthread_mutex_unlock(&my_obj->cam_lock);
return rc;
}
hardware/qcom/camera/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
int32_t mm_camera_util_bundled_sendmsg(mm_camera_obj_t *my_obj,
void *msg,
size_t buf_size,
int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
int numfds)
{
int32_t rc = -1;
uint32_t status;
if(mm_camera_socket_bundle_sendmsg(my_obj->ds_fd, msg, buf_size, sendfds, numfds) > 0) {
/* wait for event that mapping/unmapping is done */
mm_camera_util_wait_for_event(my_obj, CAM_EVENT_TYPE_MAP_UNMAP_DONE, &status);
if (MSM_CAMERA_STATUS_SUCCESS == status) {
rc = 0;
}
}
return rc;
}
int mm_camera_socket_bundle_sendmsg(
int fd,
void *msg,
size_t buf_size,
int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
int numfds)
{
struct msghdr msgh;
struct iovec iov[1];
struct cmsghdr * cmsghp = NULL;
char control[CMSG_SPACE(sizeof(int) * numfds)];
int *fds_ptr = NULL;
if (msg == NULL) {
LOGD("msg is NULL");
return -1;
}
memset(&msgh, 0, sizeof(msgh));
msgh.msg_name = NULL;
msgh.msg_namelen = 0;
iov[0].iov_base = msg;
iov[0].iov_len = buf_size;
msgh.msg_iov = iov;
msgh.msg_iovlen = 1;
LOGD("iov_len=%llu",
(unsigned long long int)iov[0].iov_len);
msgh.msg_control = NULL;
msgh.msg_controllen = 0;
/* if numfds is valid, we need to pass it through control msg */
if (numfds > 0) {
msgh.msg_control = control;
msgh.msg_controllen = sizeof(control);
cmsghp = CMSG_FIRSTHDR(&msgh);
if (cmsghp != NULL) {
cmsghp->cmsg_level = SOL_SOCKET;
cmsghp->cmsg_type = SCM_RIGHTS;
cmsghp->cmsg_len = CMSG_LEN(sizeof(int) * numfds);
fds_ptr = (int*) CMSG_DATA(cmsghp);
memcpy(fds_ptr, sendfds, sizeof(int) * numfds);
} else {
LOGE("ctrl msg NULL");
return -1;
}
}
//socket发送数据到vendor通知映射共享内存
return sendmsg(fd, &(msgh), 0);
}
/hardware/qcom/camera/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_muxer.c
const char *mm_camera_util_get_dev_name_by_num(uint8_t cam_num, uint32_t cam_handle)
{
uint32_t handle = (cam_handle >> (cam_num * MM_CAMERA_HANDLE_SHIFT_MASK));
return mm_camera_util_get_dev_name(handle);
}
/hardware/interfaces/camera/device/1.0/default/CameraDevice.cpp
CameraDevice::CameraDevice(
sp<CameraModule> module, const std::string& cameraId,
const SortedVector<std::pair<std::string, std::string>>& cameraDeviceNames) :
//这里的mModule是CameraProvider初始化时mModule = new CameraModule(rawModule),CameraModule中包含的rawModule是加载camera.msm8998.so获取的camera_module_t
//并在3.3.2 CameraService 初始化时候 创建android::hardware::camera::device::V1_0::implementation::CameraDevice对象 时传递CameraModule对象参数
mModule(module),
mCameraId(cameraId),
mDisconnected(false),
mCameraDeviceNames(cameraDeviceNames) {
...
}
Return<Status> CameraDevice::open(const sp<ICameraDeviceCallback>& callback) {
ALOGI("Opening camera %s", mCameraId.c_str());
Mutex::Autolock _l(mLock);
camera_info info;
status_t res = mModule->getCameraInfo(mCameraIdInt, &info);
if (res != OK) {
ALOGE("Could not get camera info: %s: %d", mCameraId.c_str(), res);
return getHidlStatus(res);
}
int rc = OK;
if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_3 &&
info.device_version > CAMERA_DEVICE_API_VERSION_1_0) {
// Open higher version camera device as HAL1.0 device.
rc = mModule->openLegacy(mCameraId.c_str(),
CAMERA_DEVICE_API_VERSION_1_0,
(hw_device_t **)&mDevice);
} else {
rc = mModule->open(mCameraId.c_str(), (hw_device_t **)&mDevice);
}
if (rc != OK) {
mDevice = nullptr;
ALOGE("Could not open camera %s: %d", mCameraId.c_str(), rc);
return getHidlStatus(rc);
}
initHalPreviewWindow();
mDeviceCallback = callback;
if (mDevice->ops->set_callbacks) {
mDevice->ops->set_callbacks(mDevice,
sNotifyCb, sDataCb, sDataCbTimestamp, sGetMemory, this);
}
return getHidlStatus(rc);
}
void CameraDevice::initHalPreviewWindow()
{
mHalPreviewWindow.cancel_buffer = sCancelBuffer;
mHalPreviewWindow.lock_buffer = sLockBuffer;
mHalPreviewWindow.dequeue_buffer = sDequeueBuffer;
mHalPreviewWindow.enqueue_buffer = sEnqueueBuffer;
mHalPreviewWindow.set_buffer_count = sSetBufferCount;
mHalPreviewWindow.set_buffers_geometry = sSetBuffersGeometry;
mHalPreviewWindow.set_crop = sSetCrop;
mHalPreviewWindow.set_timestamp = sSetTimestamp;
mHalPreviewWindow.set_usage = sSetUsage;
mHalPreviewWindow.set_swap_interval = sSetSwapInterval;
mHalPreviewWindow.get_min_undequeued_buffer_count =
sGetMinUndequeuedBufferCount;
}
3.hal1开启预览
从https://blog.csdn.net/kk3087961/article/details/135992303#t4 可知
如果是hal1上层开启预览会调用CameraService中的 CameraClient::startPreview() =》CameraClient::startPreviewMode()
ps:CameraClient初始化时会mHardware = new CameraHardwareInterface(camera_device_name)
3.1.mHardware->setPreviewWindow(mPreviewWindow) 调用
=》CameraHardwareInterface::setPreviewWindow(const sp<ANativeWindow>& buf) 从 《android camera client、CameraService及camera hal调用流程》可知
其中buf 为 CameraClient::setPreviewTarget 中初始化的 window ,而window为 new Surface(bufferProducer, /*controlledByApp*/ true) 创建封装 bufferProducer(从上层获取的IGraphicBufferProducer) 的Surface 对象 。
从 /frameworks/native/libs/gui/include/gui/Surface.h 可知 Surface 继承 ANativeWindow
调用 mPreviewWindow = buf 将 buf 赋值给 CameraHardwareInterface类中的全局变量 mPreviewWindow
调用 mHidlDevice->setPreviewWindow(buf.get() ? this : nullptr) 其中 this 为 CameraHardwareInterface(实现ICameraDevicePreviewCallback接口)
最终调用hal实现接口 CameraDevice::setPreviewWindow(const sp<ICameraDevicePreviewCallback>& window)。
CameraDevice::setPreviewWindow(const sp<ICameraDevicePreviewCallback>& window) mHalPreviewWindow.mPreviewCallback = window ,
mHalPreviewWindow 为CameraPreviewWindow 结构体对象(CameraPreviewWindow继承 preview_stream_ops ),CameraDevice::initHalPreviewWindow 方法中定义了 mHalPreviewWindow 所有方法的实现。
调用mDevice->ops->set_preview_window(mDevice, (window == nullptr) ? nullptr : &mHalPreviewWindow)
如 2.2描述hal1对上面摄像头的mDevice操作都是调用 mCameraMuxerOps 对应的QCameraMuxer类中方法,这里调用 QCameraMuxer::set_preview_window(struct camera_device * device,struct preview_stream_ops *window)
QCameraMuxer::set_preview_window(struct camera_device * device,struct preview_stream_ops *window)
遍历该逻辑摄像头的所有物理摄像头 调用 hwi->set_preview_window(pCam->dev, window)
QCamera2HardwareInterface::set_preview_window(struct camera_device *device,struct preview_stream_ops *window) 调用w->processAPI(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, (void *)window)
QCamera2HardwareInterface::processAPI 方法,调用 m_stateMachine.procAPI(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, NULL) 调用 api_queue.enqueue((void *)node)其中node设置CMD类型为: QCAMERA_SM_CMD_TYPE_API,设置 evt为 QCAMERA_SM_EVT_SET_PREVIEW_WINDOW
QCameraStateMachine::smEvtProcRoutine 会无限循环处理api_queue消息,调用 pme->stateMachine(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, NULL)
QCameraStateMachine::stateMachine 这里m_state 默认是QCAMERA_SM_STATE_PREVIEW_STOPPED,调用 procEvtPreviewStoppedState(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, NULL)
QCameraStateMachine::procEvtPreviewStoppedState 调用 m_parent->setPreviewWindow((struct preview_stream_ops *)payload)
QCamera2HardwareInterface::setPreviewWindow(struct preview_stream_ops *window) 方法 将 window (即上面的 mHalPreviewWindow)赋值给 变量 mPreviewWindow
3.1.1 QCameraGrallocMemory 中 mWindow 初始化流程
QCameraStream::QCameraStream 中设置 mMemVtbl.get_bufs = get_bufs;
并在3.2.1.2.2中 设置流到kernel驱动,当底层需要 分配流缓冲区时,会回调 QCameraStream::get_bufs
QCameraStream::get_bufs(cam_frame_len_offset_t *offset,uint8_t *num_bufs,uint8_t **initial_reg_flag,mm_camera_buf_def_t **bufs,mm_camera_map_unmap_ops_tbl_t *ops_tbl,void *user_data)
调用stream->getBufs(offset, num_bufs,initial_reg_flag, bufs, ops_tbl)
QCameraStream::getBufs(cam_frame_len_offset_t *offset,uint8_t *num_bufs,uint8_t **initial_reg_flag,mm_camera_buf_def_t **bufs,mm_camera_map_unmap_ops_tbl_t *ops_tbl)
调用mAllocator.allocateStreamBuf(mStreamInfo->stream_type,mFrameLenOffset.frame_len, mFrameLenOffset.mp[0].stride,mFrameLenOffset.mp[0].scanline, numBufAlloc)
QCamera2HardwareInterface::allocateStreamBuf case CAM_STREAM_TYPE_PREVIEW 调用new QCameraGrallocMemory(mGetMemory, mCallbackCookie) 初始化grallocMemory
调用 grallocMemory->setWindowInfo(mPreviewWindow,dim.width,dim.height, stride, scanline, mParameters.getPreviewHalPixelFormat(),maxFPS, usage),设置 mem = grallocMemory
QCameraGrallocMemory::setWindowInfo 将传递的 mPreviewWindow 赋值给 mWindow
#调用 mem->allocate(bufferCnt, size)
QCameraGrallocMemory::allocate(uint8_t count, size_t /*size*/) 遍历从native window分配缓冲区数量,
#1调用 mWindow->dequeue_buffer(mWindow, &mBufferHandle[cnt], &stride) 获取数据给变量 mBufferHandle(类型 buffer_handle_t ,存储内存地址 ) ,如上mWindow 初始化流程 可知 mWindow 为 mPreviewWindow,3.1 可知 mPreviewWindow为mHalPreviewWindow ,mHalPreviewWindow.mPreviewCallback 为上层传递的 CameraHardwareInterface(实现ICameraDevicePreviewCallback接口),mHalPreviewWindow 为CameraPreviewWindow 结构体对象(CameraPreviewWindow继承 preview_stream_ops ),CameraDevice::initHalPreviewWindow 方法中定义了 mHalPreviewWindow.dequeue_buffer 方法的实现为 sDequeueBuffer
CameraDevice::sDequeueBuffer(struct preview_stream_ops* w,buffer_handle_t** buffer, int *stride) 调用object->mPreviewCallback->dequeueBuffer(匿名回调函数) ,
通过匿名回调函数 buf 为 ANativeWindowBuffer->handle ,将 ANativeWindowBuffer->handle.getNativeHandle赋值给 buffer(类型 buffer_handle_t ,存储内存地址 ) ,
调用 mBufferIdMap[buffer] = bufferId 保存 bufferId和buffer到 mBufferIdMap 中,object->mPreviewCallback 即为 CameraHardwareInterface 类,
CameraHardwareInterface::dequeueBuffer(dequeueBuffer_cb _hidl_cb) 方法中 ANativeWindow *a = mPreviewWindow.get()
调用 native_window_dequeue_buffer_and_wait(a, &anb) 如下可知 从Surface中获取 ANativeWindowBuffer 对象anb, buf = anb->handle,调用CameraHardwareInterface::getBufferId(anb)获取bufferId为 mNextBufferId++值,调用 mReversedBufMap[bufId] = anb , mReversedBufMap 存储 ANativeWindowBuffer 和bufId信息 。
调用 _hidl_cb(s, bufferId, buf, stride)回调数据给hal,,其中buf(类型为hidl_handle结构体)是从ANativeWindowBuffer->handle 获取的, /frameworks/native/libs/nativewindow/include/system/window.h
window.native_window_dequeue_buffer_and_wait(ANativeWindow *anw,struct ANativeWindowBuffer** anb) 调用anw->dequeueBuffer_DEPRECATED(anw, anb) 如下对应hook_dequeueBuffer_DEPRECATED
/frameworks/native/libs/gui/Surface.cpp
ANativeWindow::dequeueBuffer_DEPRECATED = hook_dequeueBuffer_DEPRECATED;
Surface::hook_dequeueBuffer_DEPRECATED(ANativeWindow* window,ANativeWindowBuffer** buffer) 方法中 Surface* c = getSelf(window);ANativeWindowBuffer* buf;
调用 c->dequeueBuffer(&buf, &fenceFd) 从Surface中获取 ANativeWindowBuffer 并赋值给 buffer ,
#2如下调用可知,调用ioctl 将预览数据通过ion共享内存映射到 mBufferHandle 内存地址,mBufferHandle 又从 Surface 中的 ANativeWindowBuffer->handle.getNativeHandle 中获取 ,
即上层应用可以从ANativeWindowBuffer获取到预览数据。
mPrivateHandle[cnt] =(struct private_handle_t *)(*mBufferHandle[cnt]);
mMemInfo[cnt].main_ion_fd = open("/dev/ion", O_RDONLY);
//最终 Surface::dequeueBuffer 方法 先从CameraHardwareInterface类中的全局变量 mPreviewWindow获取ANativeWindowBuffer, 然后将 ANativeWindowBuffer->handle.getNativeHandle->fd 赋值给ion_info_fd.fd
ion_info_fd.fd = mPrivateHandle[cnt]->fd
ioctl(mMemInfo[cnt].main_ion_fd,ION_IOC_IMPORT, &ion_info_fd)
3.1.2 预览刷新流程
如3.2.1.2.2 中 QCamera2HardwareInterface::preview_stream_cb_routine 回调会调用memory->enqueueBuffer(idx) 这里的buf_idx 回调数据的buf_idx,
QCameraGrallocMemory::enqueueBuffer(idx) 调用 mWindow->enqueue_buffer(mWindow, (buffer_handle_t *)mBufferHandle[index])
如上mWindow 初始化流程 可知 mWindow 为 mPreviewWindow,3.1 可知 mPreviewWindow为mHalPreviewWindow ,mHalPreviewWindow.mPreviewCallback 为上层传递的 CameraHardwareInterface(实现ICameraDevicePreviewCallback接口),
mHalPreviewWindow 为CameraPreviewWindow 结构体对象(CameraPreviewWindow继承 preview_stream_ops ),CameraDevice::initHalPreviewWindow 方法中定义了 mHalPreviewWindow.enqueue_buffer 方法的实现为 sEnqueueBuffer
CameraDevice::sEnqueueBuffer(struct preview_stream_ops* w, buffer_handle_t* buffer) 方法中 CameraPreviewWindow* object = static_cast<CameraPreviewWindow*>(w),
调用 uint64_t bufferId = object->mBufferIdMap.at(buffer); 从mBufferIdMap中取出 buffer对应的 bufferId,
调用object->mPreviewCallback->enqueueBuffer(bufferId) 完成刷新 ,object->mPreviewCallback 即为 CameraHardwareInterface类
CameraHardwareInterface::enqueueBuffer(uint64_t bufferId) 调用 mPreviewWindow.get()->queueBuffer(a, mReversedBufMap.at(bufferId), -1) ,
从mReversedBufMap 取出 bufferId 对应 Surface中获取的 ANativeWindowBuffer 对象,ANativeWindowBuffer 对象中 包含 预览数据共享内存。
从3.1可知 封装 mPreviewWindow 为封装bufferProducer(从上层获取的IGraphicBufferProducer) 的 Surface 对象,Surface 继承 ANativeWindow,即通过 ANativeWindow 接口调用 完成屏幕预览刷新。
3.2.mHardware->startPreview()
CameraHardwareInterface::startPreview 调用CameraProviderManager::mapToStatusT(mHidlDevice->startPreview())
mHidlDevice 由3.3.2可知即hardware::camera::device::V1_0::ICameraDevice对象(android::hardware::camera::device::V1_0::implementation::CameraDevice),
mHidlDevice->startPreview()即 调用hal实现接口 CameraDevice::startPreview(), 该方法调用 mDevice->ops->start_preview(mDevice),
如 2.2描述hal1对上面摄像头的mDevice操作都是调用 mCameraMuxerOps 对应的QCameraMuxer类中方法,这里调用 QCameraMuxer::start_preview(struct camera_device * device)
QCameraMuxer::start_preview 调用
3.2.1 遍历该逻辑摄像头的所有物理摄像头 调用hwi->prepare_preview(pCam->dev),
QCamera2HardwareInterface::prepare_preview(pCam->dev) 调用hw->processAPI(QCAMERA_SM_EVT_PREPARE_PREVIEW, NULL) 发送api请求
QCamera2HardwareInterface::processAPI 方法,调用 m_stateMachine.procAPI(QCAMERA_SM_EVT_PREPARE_PREVIEW, NULL) 调用 api_queue.enqueue((void *)node),其中node设置CMD类型为:QCAMERA_SM_CMD_TYPE_API,设置 evt为QCAMERA_SM_EVT_PREPARE_PREVIEW
QCameraStateMachine::smEvtProcRoutine 会无限循环处理api_queue消息,调用 pme->stateMachine(QCAMERA_SM_EVT_PREPARE_PREVIEW, NULL)
QCameraStateMachine::stateMachine 这里m_state 默认是QCAMERA_SM_STATE_PREVIEW_STOPPED,调用procEvtPreviewStoppedState(QCAMERA_SM_EVT_PREPARE_PREVIEW, NULL)
QCameraStateMachine::procEvtPreviewStoppedState 调用 m_parent->preparePreview() 设置m_state = QCAMERA_SM_STATE_PREVIEW_READY
QCamera2HardwareInterface::preparePreview() 调用addChannel(QCAMERA_CH_TYPE_PREVIEW)
QCamera2HardwareInterface::addChannel(QCAMERA_CH_TYPE_PREVIEW) 调用 addPreviewChannel()
QCamera2HardwareInterface::addPreviewChannel()调用 handle = getCamHandleForChannel(QCAMERA_CH_TYPE_PREVIEW) 获取 camera_handle ,高8位存储调用camera_open次数,低8位存储camera_idx,调用 pChannel = new QCameraChannel(handle, mCameraHandle->ops) 创建QCameraChannel对象,
QCameraChannel::QCameraChannel(handle, mCameraHandle->ops) handle 赋值全局变量 m_camHandle , mCameraHandle->ops 赋值全局变量 m_camOps ,m_camOps 对应mm_camera_interface.c中 mm_camera_ops,其中 .add_channel = mm_camera_intf_add_channel,
3.2.1.1.调用 pChannel->init(NULL, NULL, NULL)
QCameraChannel::init(NULL, NULL, NULL) 调用调用 m_camOps->add_channel(m_camHandle,NULL, NULL, NULL)如上可知m_camHandle 对应camera_handle , m_camOps 对应mm_camera_interface.c中 mm_camera_ops,其中 .add_channel = mm_camera_intf_add_channel,
mm_camera_interface.mm_camera_intf_add_channel(uint32_t camera_handle,...)根据camera_handle获取 camera_open时初始化的 cam_obj (mm_camera_obj_t 结构体)
调用 mm_camera_add_channel(my_obj,NULL, NULL, NULL) ,这里的my_obj 对应上面的 mm_camera.mm_camera_add_channel(mm_camera_obj_t *my_obj,..)从 my_obj->ch[ch_idx] 获取ch_obj,然后初始化 ch_obj 结构。首先调用 mm_camera_util_generate_handler 为其生成一个句柄(也是该函数的返回值),然后将状态设置ch_obj->state = MM_CHANNEL_STATE_STOPPED ,注意这里还保存了 my_obj 的指针及其 session id,最后调用 mm_channel_init 完成了 Channel 的初始化
mm_camera_channel.mm_channel_init(mm_channel_t *my_obj,..) 调用 mm_camera_poll_thread_launch(&my_obj->poll_thread[0],MM_CAMERA_POLL_TYPE_DATA)在打开的通道中启动数据轮询线程,修改my_obj->state为 MM_CHANNEL_STATE_STOPPED
3.2.1.2.调用 addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,metadata_stream_cb_routine, this)
QCamera2HardwareInterface::addStreamToChannel 调用 pStreamInfo = allocateStreamInfoBuf(CAM_STREAM_TYPE_METADATA,getStreamRefCount(CAM_STREAM_TYPE_METADATA, MM_CAMERA_TYPE_MAIN), MM_CAMERA_TYPE_MAIN)申请内存空间buffer
然后调用 pChannel->addStream(QCamera2HardwareInterface,pStreamInfo, NULL, &padding_info,metadata_stream_cb_routine, QCamera2HardwareInterface, false,true, ROTATE_0, MM_CAMERA_TYPE_MAIN) this即QCamera2HardwareInterface对象
padding_info 从gCamCapability[mCameraId]->padding_info 获取
QCameraChannel::addStream(QCamera2HardwareInterface,pStreamInfo, NULL, &padding_info,metadata_stream_cb_routine, QCamera2HardwareInterface, false,true, ROTATE_0, MM_CAMERA_TYPE_MAIN)
new QCameraStream(QCamera2HardwareInterface,m_camHandle, ch_handle, m_camOps, paddingInfo, true,ROTATE_0) 实例对象,调用QCameraStream::QCameraStream构造方法
将 m_camHandle (即 camera_handle) 赋值给 mCamHandle,ch_handle 是基于camera_handle获取指定cameraid(一般是主摄是0) 的句柄赋值给 mChannelHandle,将 mDefferedAllocation 设置为true
然后调用初始化方法 pStream->init,然后调用 mStreams.add(pStream) 将pStream添加到 mStreams 列表中
QCameraStream::init(pStreamInfo, NULL,metadata_stream_cb_routine, QCamera2HardwareInterface, false) metadata_stream_cb_routine 赋值给 mDataCB ,QCamera2HardwareInterface赋值给 mUserData,mDefferedAllocation 为true,基于padding_info和streamInfo->dim 设置 streamInfo->buf_planes 信息,
3.2.1.2.1调用mCamOps->add_stream(mCamHandle, mChannelHandle) 返回值赋值给 mHandle
如上可知m_camHandle 对应camera_handle , m_camOps 对应mm_camera_interface.c中 mm_camera_ops,其中 .add_stream = mm_camera_intf_add_stream,
mm_camera_interface.mm_camera_intf_add_stream(uint32_t camera_handle,uint32_t ch_id) 根据mCamHandle 获取cam_obj 赋值给 my_obj
mm_camera.mm_camera_add_stream(mm_camera_obj_t *my_obj,uint32_t ch_id)调用 mm_channel_fsm_fn(ch_obj,MM_CHANNEL_EVT_ADD_STREAM,NULL,(void *)&s_hdl) 其中s_hdl = 0
mm_camera_channel.mm_channel_fsm_fn(mm_channel_t *my_obj,...) 这里的my_obj为 ch_obj ,判断 my_obj->state 为MM_CHANNEL_STATE_STOPPED 调用 mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
mm_camera_channel.mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,..) case MM_CHANNEL_EVT_ADD_STREAM: 调用mm_channel_add_stream(my_obj)
mm_camera_channel.mm_channel_add_stream(mm_channel_t *my_obj)从my_obj->streams[idx]获取 stream_obj,初始化 stream_obj,设置流的状态为 stream_obj->state = MM_STREAM_STATE_INITED
调用 mm_stream_fsm_fn(stream_obj, MM_STREAM_EVT_ACQUIRE, NULL, NULL) 申请流
mm_camera_stream.mm_stream_fsm_fn(mm_stream_t *my_obj,mm_stream_evt_type_t evt,..) 这里的my_obj为stream_obj, my_obj->state 为 MM_STREAM_STATE_INITED 调用
mm_camera_stream.mm_stream_fsm_inited(mm_stream_t *my_obj,...)
调用 dev_name_value = mm_camera_util_get_dev_name_by_num(my_obj->ch_obj->cam_obj->my_num, cam_handle) 获取相机设备节点,这里的my_num 主摄被初始化为0,辅摄像被初始化为1,cam_handle = my_obj->ch_obj->cam_obj->my_hdl 对 应 camera_handle ,
调用my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK) 打开驱动节点 dev/vedio0或1 句柄赋值给 my_obj->fd。
调用 my_obj->state = MM_STREAM_STATE_ACQUIRED 设置状态为MM_STREAM_STATE_ACQUIRED,表示 流已经 acquired, fd opened ,
3.2.1.2.2调用 configStream()
QCameraStream::configStream()设置stream_config.mem_vtbl = mMemVtbl , stream_config.stream_cb = dataNotifyCB
调用mCamOps->config_stream(mCamHandle,mChannelHandle, mHandle, &stream_config),mCamHandle对应camera_handle,mChannelHandle 对应 add_channel创建通道返回的句柄,mHandle对应add_stream 返回的句柄,stream_config对应流配置信息
mm_camera_ops 方法定义 .config_stream = mm_camera_intf_config_stream,
mm_camera_interface.mm_camera_intf_config_stream(uint32_t camera_handle,...)) 调用 mm_camera_config_stream(my_obj, chid, strid, config),my_obj 是根据camera_handle获取 camera_open时初始化的cam_obj(mm_camera_obj_t 结构体),chid 对应add_channel创建通道返回的句柄,strid对应add_stream 返回的句柄
mm_camera.mm_camera_config_stream(mm_camera_obj_t *my_obj,...) 调用 mm_channel_fsm_fn(ch_obj,MM_CHANNEL_EVT_CONFIG_STREAM,(void *)&payload,NULL)
mm_camera_channel.mm_channel_fsm_fn(mm_channel_t *my_obj,...) 这里的my_obj为 ch_obj ,判断 my_obj->state 为MM_CHANNEL_STATE_STOPPED 调用 mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
mm_camera_channel.mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,..)调用 mm_channel_config_stream(my_obj, payload->stream_id,payload->config);
mm_camera_channel.mm_channel_config_stream(mm_channel_t *my_obj,...) 调用 mm_stream_fsm_fn(stream_obj,MM_STREAM_EVT_SET_FMT,(void *)config,NULL)
stream_obj 是基于 my_obj和payload->stream_id 获取的当前流对象,
mm_camera_stream.mm_stream_fsm_fn(mm_stream_t *my_obj,mm_stream_evt_type_t evt,..)这里的my_obj为stream_obj, my_obj->state 为 MM_STREAM_STATE_ACQUIRED 调用
mm_camera_stream.mm_stream_fsm_acquired(mm_stream_t *my_obj,..) ,调用 mm_stream_config(my_obj, config) 然后设置 my_obj->state = MM_STREAM_STATE_CFG
mm_camera_stream.mm_stream_config(mm_stream_t *my_obj,mm_camera_stream_config_t *config) 将config信息赋值给 my_obj,例如 my_obj->mem_vtbl = config->mem_vtbl, mem_vtbl 对应 QCameraStream::QCameraStream 中初始化对象 mMemVtbl ,mMemVtbl.get_bufs = get_bufs ,get_bufs 对应QCameraStream::get_bufs 用于分配流缓冲区,my_obj->buf_cb[cb_index].cb = config->stream_cb,将数据回调接口赋值给my_obj ,
这样驱动数据就可以通过QCameraStream::dataNotifyCB 接口回调到上层 。
回调的具体步骤 :
QCameraStream::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,void *userdata)会调用processDataNotify
QCameraStream::processDataNotify 方法中调用 mDataQ.enqueue((void *)frame) ,然后调用 mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE)取消线程阻塞,
从3.2.3.1 可知 dataProcRoutine 是 mProcTh 线程中执行的方法
QCameraStream::dataProcRoutine 无限循环中取消阻塞,开始处理 CAMERA_CMD_TYPE_DO_NEXT_JOB,先调用 mDataQ.dequeue()从mDataQ 中取出mm_camera_super_buf_t给frame
调用pme->mDataCB(frame, pme, pme->mUserData) , 其中pme为QCameraStream对象,如果当前向驱动请求的CAM_STREAM_TYPE_METADATA流, 从3.2.1.2可知 mDataCB 为 QCamera2HardwareInterface::metadata_stream_cb_routine(frame, pme, pme->mUserData)
如果当前向驱动请求的CAM_STREAM_TYPE_PREVIEW流,3.2.1.3可知 mDataCB 为QCamera2HardwareInterface::preview_stream_cb_routine(frame, pme, pme->mUserData)
QCamera2HardwareInterface::preview_stream_cb_routine (mm_camera_super_buf_t *super_frame,QCameraStream * stream,void *userdata)方法中
#调用 mm_camera_buf_def_t *frame = super_frame->bufs[0] ,这个frame 就是yuv数据 mm_camera_buf_def_t结构体包含了yuv格式,手机平台可以在这里对相机预览数据做算法处理
获取yuv地址: 调用 unsigned char* yuv = (unsigned char *)frame->buffer
获取实际宽高: cam_dimension_t dim; stream->getFrameDimension(dim)
获取yuv格式:stream->getFormat(previewFmt) 例如 : CAM_FORMAT_YUV_420_NV21 一般预览格式为 CAM_FORMAT_YUV_420_YV12,拍照数据为CAM_FORMAT_YUV_420_NV21
获取 offset:cam_frame_len_offset_t offset; stream->getFrameOffset(offset) offset包含stride等信息
#调用 memory->enqueueBuffer(idx) 完成预览刷新,,详情见 3.1.2 预览刷新流程.
#如果上层注册了预览回调 调用 pme->sendPreviewCallback(stream, memory, idx) 其中 memory 为 (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info
QCamera2HardwareInterface::sendPreviewCallback 调用 m_cbNotifier.notifyCallback(cbArg) cbArg.cb_type为 QCAMERA_DATA_CALLBACK,cbArg.msg_type 为 CAMERA_MSG_PREVIEW_FRAME
cbArg.data 为mGetMemory(memory->getFd(idx),previewBufSize, 1, mCallbackCookie)
QCamera2HardwareInterface::notifyCallback 调用 mDataQ.enqueue((void *)cbArg) 将cbArg加入 mDataQ 队列,调用 mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE) 取消线程阻塞
QCameraCbNotifier::cbNotifyRoutine 该方法 开启无限循环,阻塞线程等带消息,如果线程被取消阻塞,且cmd是CAMERA_CMD_TYPE_DO_NEXT_JOB,msg是QCAMERA_NOTIFY_CALLBACK
调用pme->mNotifyCb(cb->msg_type,cb->ext1,cb->ext2,pme->mCallbackCookie) ,最终会调用上层的方法 调用 mm_stream_set_fmt(my_obj)
mm_camera_stream.mm_stream_set_fmt(mm_stream_t *my_obj)将my_obj信息封装到fmt,调用 ioctl(my_obj->fd, VIDIOC_S_FMT, &fmt) 通过这个ioctl的方式,设置流到kernel驱动
3.2.1.3.调用 addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,preview_stream_cb_routine, this)
QCamera2HardwareInterface::addStreamToChannel 调用 pStreamInfo = allocateStreamInfoBuf(CAM_STREAM_TYPE_PREVIEW,getStreamRefCount(CAM_STREAM_TYPE_PREVIEW, MM_CAMERA_TYPE_MAIN), MM_CAMERA_TYPE_MAIN)申请内存空间buffer
然后调用 pChannel->addStream(QCamera2HardwareInterface,pStreamInfo, NULL, &padding_info,preview_stream_cb_routine, QCamera2HardwareInterface, false,true, ROTATE_0, MM_CAMERA_TYPE_MAIN) this即QCamera2HardwareInterface对象padding_info 从gCamCapability[mCameraId]->padding_info 获取,后续流程同2
3.2.1.4.调用 pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,synchronous_stream_cb_routine)
QCameraChannel::setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,synchronous_stream_cb_routine) 调用 mStreams[i]->setSyncDataCB(synchronous_stream_cb_routine)
为前面 创建的 CAM_STREAM_TYPE_METADATA和CAM_STREAM_TYPE_PREVIEW 类型QCameraStream 设置回调函数
QCameraStream::setSyncDataCB(synchronous_stream_cb_routine) 调用 mCamOps->register_stream_buf_cb(mCamHandle,mChannelHandle, mHandle, dataNotifySYNCCB, MM_CAMERA_STREAM_CB_TYPE_SYNC,this);这里mCamHandle 对应camera_handle,mChannelHandle对应基于camera_handle获取指定cameraid(一般是主摄是0) 的句柄,mHandle对应mCamOps->add_stream返回的句柄,
将synchronous_stream_cb_routine 赋值给全局变量 mSYNCDataCB,dataNotifySYNCCB方法调用mSYNCDataCB处理回调数据,mCamOps 对应mm_camera_interface.c中 mm_camera_ops.register_stream_buf_cb = mm_camera_intf_register_stream_buf_cb,
mm_camera_interface.mm_camera_intf_register_stream_buf_cb(...) 调用 mm_camera_reg_stream_buf_cb(my_obj, chid, strid,buf_cb, cb_type, userdata)
其中my_obj对应cam_obj,chid对应 mChannelHandle低16位,strid 对应add_stream 返回的句柄(保存在上面的 mHandle),buf_cb 对应dataNotifySYNCCB,cb_type对应 MM_CAMERA_STREAM_CB_TYPE_SYNC,userdata对应QCameraStream
mm_camera.mm_camera_reg_stream_buf_cb(mm_camera_obj_t *my_obj,..)
调用mm_channel_fsm_fn(ch_obj,MM_CHANNEL_EVT_REG_STREAM_BUF_CB,(void*)&payload, NULL);其中ch_obj是基于 cam_obj和mChannelHandle 获取的mm_channel_t,payload.buf_cb.cb 对应dataNotifySYNCCB, payload.buf_cb.cb_type对应MM_CAMERA_STREAM_CB_TYPE_SYNC,
payload.buf_cb.cb_type对应 MM_CAMERA_STREAM_CB_TYPE_SYNC, payload.buf_cb.user_data对应QCameraStream
mm_camera_channel.mm_channel_fsm_fn(mm_channel_t *my_obj,...) 这里的my_obj为 ch_obj ,判断 my_obj->state 为MM_CHANNEL_STATE_STOPPED 调用 mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
mm_camera_channel.mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,..)调用 mm_channel_reg_stream_buf_cb (my_obj,payload->stream_id, payload->buf_cb) 其中 my_obj为ch_obj,payload->stream_id 为add_stream 返回的句柄(赋值给全局变量mHandle) ,payload->buf_cb 为dataNotifySYNCCB
mm_camera_channel.mm_channel_reg_stream_buf_cb (mm_channel_t* my_obj,uint32_t stream_id, mm_stream_data_cb_t buf_cb) 调用 s_obj = mm_channel_util_get_stream_by_handler(ch_obj,stream_id) 从ch_obj 通道中基于stream_id获取流信息,调用mm_stream_reg_buf_cb(s_obj, buf_cb)
mm_camera_stream.mm_stream_reg_buf_cb(mm_stream_t *my_obj,mm_stream_data_cb_t val)调用 my_obj->buf_cb[0] = dataNotifySYNCCB 将buf_cb[0] 赋值为dataNotifySYNCCB
3.2.1.5 调用 m_channels[QCAMERA_CH_TYPE_PREVIEW] = pChannel 将pChannel 赋值给全局变量 m_channels[QCAMERA_CH_TYPE_PREVIEW]
3.2.2 遍历该逻辑摄像头的所有物理摄像头 调用 hwi->bundleRelatedCameras(true)
3.2.3 遍历该逻辑摄像头的所有物理摄像头 调用 hwi->start_preview(pCam->dev) QCAMERA_SM_EVT_START_PREVIEW
QCamera2HardwareInterface::start_preview(pCam->dev) 调用hw->processAPI(QCAMERA_SM_EVT_START_PREVIEW, NULL)
QCamera2HardwareInterface::processAPI 方法,调用 m_stateMachine.procAPI(QCAMERA_SM_EVT_START_PREVIEW, NULL) 调用 api_queue.enqueue((void *)node),其中node设置CMD类型为:QCAMERA_SM_CMD_TYPE_API,设置 evt为 QCAMERA_SM_EVT_START_PREVIEW
QCameraStateMachine::smEvtProcRoutine 会无限循环处理api_queue消息,调用 pme->stateMachine(QCAMERA_SM_EVT_START_PREVIEW, NULL)
QCameraStateMachine::stateMachine 这里m_state 是 QCAMERA_SM_STATE_PREVIEW_READY 调用 procEvtPreviewReadyState(QCAMERA_SM_EVT_START_PREVIEW, payload)
QCameraStateMachine::procEvtPreviewReadyState 调用 m_parent->startPreview() 设置 m_state = QCAMERA_SM_STATE_PREVIEWING,
这个m_parent是QCameraStateMachine::QCameraStateMachine构造函数传递的QCamera2HardwareInterface 对象
QCamera2HardwareInterface::startPreview() 从一. 初始化信息可知调用 startChannel(QCAMERA_CH_TYPE_PREVIEW);
QCamera2HardwareInterface::startChannel 调用 m_channels[QCAMERA_CH_TYPE_PREVIEW]->start() m_channels[QCAMERA_CH_TYPE_PREVIEW]是3.2.1中初始化的QCameraChannel对象
QCameraChannel::start() 调用
3.2.3.1.遍历当前通道中的所有流调用 mStreams[i]->start(),预览通道QCAMERA_CH_TYPE_PREVIEW 包含 CAM_STREAM_TYPE_METADATA 流和CAM_STREAM_TYPE_PREVIEW流
QCameraStream::start() 通过mProcTh.launch(dataProcRoutine, this) 开启新线程 dataProcRoutine 执行无限循环,
CAMERA_CMD_TYPE_DO_NEXT_JOB 分支,从 mDataQ.dequeue() 队列中取出数据并放入mDataCB中,等待数据返回到对应的stream回调中去
3.2.3.2.调用 m_camOps->start_channel(m_camHandle, m_handle, /*start_sensor_streaming*/true) m_camHandle为 camera_handle,m_handle 对应创建预览 QCameraChannel 返回的句柄,
m_camOps 对应mm_camera_interface.c中 mm_camera_ops,其中.start_channel = mm_camera_intf_start_channel,
mm_camera_interface.mm_camera_intf_start_channel(uint32_t camera_handle,uint32_t ch_id,bool start_sensor_streaming) 1.调用 mm_camera_start_channel(my_obj, chid);2.调用 mm_camera_start_sensor_stream_on(my_obj, ch_id).
my_obj是根据camera_handle获取camera_open时 初始化对象 cam_obj,ch_id 是创建的预览 QCameraChannel 返回的句柄
3.2.3.2.1 mm_camera_start_channel(my_obj, chid) 调用 mm_channel_fsm_fn(ch_obj,MM_CHANNEL_EVT_START,NULL,NULL)
mm_camera_channel.mm_channel_fsm_fn(mm_channel_t *my_obj,...) my_obj为 ch_obj ,判断 my_obj->state 为 MM_CHANNEL_STATE_STOPPED 调用 mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
mm_camera_channel.mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,...) 判断evt为MM_CHANNEL_EVT_START 调用 mm_channel_start(my_obj) 然后设置 my_obj->state = MM_CHANNEL_STATE_ACTIVE
mm_camera_channel.mm_channel_start(mm_channel_t *my_obj)创建cb thread,cmd thread线程以及;为每个stream分配buf,mm_stream_fsm_fn(s_objs[i],MM_STREAM_EVT_REG_BUF,NULL,NULL) 后设置my_obj->state = MM_STREAM_STATE_REG
调用 mm_stream_fsm_fn(s_objs[i],MM_STREAM_EVT_START,NULL,NULL)开启stream
mm_camera_stream.mm_stream_fsm_fn(mm_stream_t *my_obj,mm_stream_evt_type_t evt,..) 这里的my_obj为stream_obj, my_obj->state 为 MM_STREAM_STATE_REG调用 mm_stream_fsm_reg(my_obj, MM_STREAM_EVT_START, NULL, NULL)
mm_camera_stream.mm_stream_fsm_reg(mm_stream_t * my_obj,...) 设置my_obj->state = MM_STREAM_STATE_ACTIVE ,调用 mm_stream_streamon(my_obj)
mm_camera_stream.mm_stream_streamon(my_obj) 调用ioctl(my_obj->fd, VIDIOC_STREAMON, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) 通过ioctl的方式,指令为:VIDIOC_STREAMON,向内核发送v4l2请求,启动一个数据流!
3.2.3.2.2调用 mm_camera_start_sensor_stream_on(my_obj, ch_id)
mm_camera.mm_camera_start_sensor_stream_on(mm_camera_obj_t *my_obj, uint32_t ch_id) 调用 mm_channel_fsm_fn(ch_obj,MM_CHANNEL_EVT_START_SENSOR_STREAMING,NULL,NULL);
mm_camera_channel.mm_channel_fsm_fn(mm_channel_t *my_obj,...) 这里的my_obj为 ch_obj ,判断 my_obj->state 为 MM_CHANNEL_STATE_ACTIVE 调用 mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val)
mm_camera_channel.mm_channel_fsm_fn_active(my_obj, MM_CHANNEL_EVT_START_SENSOR_STREAMING,NULL,0) 调用 mm_channel_start_sensor_streaming(my_obj)
mm_camera_channel.mm_channel_start_sensor_streaming(mm_channel_t *my_obj) 遍历通道所有流调用 mm_stream_fsm_fn(s_objs[i],MM_STREAM_EVT_START_SENSOR_STREAMING,NULL,NULL)
mm_camera_stream.mm_stream_fsm_fn(mm_stream_t *my_obj,mm_stream_evt_type_t evt,..)这里的my_obj为stream_obj, my_obj->state 为 MM_STREAM_STATE_ACTIVE调用 mm_stream_fsm_active(my_obj, MM_STREAM_EVT_START_SENSOR_STREAMING, NULL, NULL)
mm_camera_stream.mm_stream_fsm_active(mm_stream_t * my_obj,...) 调用 mm_stream_start_sensor_streaming(my_obj)
mm_camera_stream.mm_stream_start_sensor_streaming(mm_stream_t *my_obj)封装shim_cmd_data 其中command = MSM_CAMERA_PRIV_STREAM_ON,value = CAM_STREAM_ON_TYPE_START_SENSOR_STREAMING
调用mm_camera_module_send_cmd(shim_cmd) todo
4.hal1 拍照流程
从https://blog.csdn.net/kk3087961/article/details/135992303#t4 可知
如果是hal1上层拍照会调用CameraService中的 Camera2Client::takePicture() 调用mHardware->takePicture()
CameraHardwareInterface::takePicture() 调用 mHidlDevice->takePicture(),mHidlDevice 由3.3.2可知即hardware::camera::device::V1_0::ICameraDevice对象(android::hardware::camera::device::V1_0::implementation::CameraDevice),
mHidlDevice->takePicture()即 调用hal实现接口 CameraDevice::takePicture(), 该方法调用 mDevice->ops->take_picture
如 2.2描述hal1对上面摄像头的mDevice操作都是调用 mCameraMuxerOps 对应的 QCameraMuxer 类中方法,这里调用 QCameraMuxer::take_picture(struct camera_device * device)
QCameraMuxer::take_picture 调用
// 1. call pre_take_picture first
遍历所有摄像头 调用hwi->pre_take_picture(pCam->dev)
QCamera2HardwareInterface::pre_take_picture 调用 hw->processAPI(QCAMERA_SM_EVT_PRE_TAKE_PICTURE, NULL)
流程参考3.2.1
// 2. Check if preview restart is needed. Check all cameras. 检查预览是否需要重启
// 3. if preview restart needed. stop the preview first 如果是则先停止预览
遍历所有摄像头 调用 QCamera2HardwareInterface::take_picture(pCam->dev)
QCamera2HardwareInterface::take_picture 调用 调用hw->processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL)
流程参考3.2.1 最终会调用m_parent->takePicture()
QCamera2HardwareInterface::takePicture()
1.调用 addCaptureChannel
QCamera2HardwareInterface::addCaptureChannel() 调用 addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,NULL, this)
调用pChannel->init(&attr,capture_channel_cb_routine, this) 向通道中注册拍照数据回调 capture_channel_cb_routine,
参考3.2.1.2流程 最终会调用驱动接口注册数据流回调,手机平台可以在这里对相机拍照数据做算法处理
2.调用 queueDeferredWork(CMD_DEF_PPROC_START,args) 这里的args.pprocArgs 为m_channels[QCAMERA_CH_TYPE_CAPTURE]
QCamera2HardwareInterface::queueDeferredWork(CMD_DEF_PPROC_START,args) 调用 mCmdQueue.enqueue(dw) 其中 dw.cmd为 CMD_DEF_PPROC_START
从2.1 QCamera2HardwareInterface::QCamera2HardwareInterface 方法可知 deferredWorkRoutine 处理 mCmdQueue消息,
QCamera2HardwareInterface::deferredWorkRoutine case CMD_DEF_PPROC_START 调用pme->m_postprocessor.start(pChannel),其中 pChannel 为 dw->args.pprocArgs,m_postprocessor为QCameraPostProcessor
QCameraPostProcessor::start(QCameraChannel *pSrcChannel) pSrcChannel为拍照通道 QCAMERA_CH_TYPE_CAPTURE,调用 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, TRUE, FALSE)
QCameraPostProcessor::dataProcessRoutine(void *data) case CAMERA_CMD_TYPE_START_DATA_PROC 调用 m_inputJpegQ.init() 等设置m_active = true,
调用 m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC,FALSE,FALSE)
QCameraPostProcessor::dataSaveRoutine(void *data) case CAMERA_CMD_TYPE_START_DATA_PROC 调用 m_inputSaveQ.init() 设置m_active = true,
3.调用 queueDeferredWork(CMD_DEF_CREATE_JPEG_SESSION,args) 这里的args.pprocArgs为m_channels[QCAMERA_CH_TYPE_CAPTURE]
QCamera2HardwareInterface::queueDeferredWork(CMD_DEF_CREATE_JPEG_SESSION,args) 调用 mCmdQueue.enqueue(dw) 其中 dw.cmd为 CMD_DEF_CREATE_JPEG_SESSION
从2.1 QCamera2HardwareInterface::QCamera2HardwareInterface 方法可知 deferredWorkRoutine 处理 mCmdQueue消息,
QCamera2HardwareInterface::deferredWorkRoutine case CMD_DEF_CREATE_JPEG_SESSION调用pme->m_postprocessor.createJpegSession(pChannel),其中 pChannel 为 dw->args.pprocArgs;
QCameraPostProcessor::createJpegSession(QCameraChannel *pSrcChannel)
遍历pSrcChannel通道中的 拍照流 类型CAM_STREAM_TYPE_SNAPSHOT pSnapshotStream以及缩略图流 pThumbStream,调用getJpegEncodingConfig(encodeParam, pSnapshotStream, pThumbStream) 获取压缩jpg参数。
调用mJpegHandle.create_session(mJpegClientHandle,&encodeParam,&mJpegSessionId)
从jpegdec_open(mm_jpegdec_ops_t *ops) 方法中可知create_session 对应 mm_jpegdec_intf_create_session,
mm_jpegdec_interface.mm_jpegdec_intf_create_session(...)方法调用mm_jpegdec_create_session(g_jpegdec_obj, client_hdl, p_params, p_session_id) mm_jpegdec.mm_jpegdec_create_session(mm_jpeg_obj *my_obj,...) 调用mm_jpegdec_session_create(p_session) 其中p_session为调用mm_jpeg_get_new_session_idx(my_obj, clnt_idx, &p_session)获取。
mm_jpegdec.mm_jpegdec_session_create(mm_jpeg_job_session_t* p_session) 调用OMX_GetHandle(&p_session->omx_handle, "OMX.qcom.image.jpeg.decoder",(void *)p_session,&p_session->omx_callbacks),即调用OMX.qcom.image.jpeg.decoder库完成jpeg压缩编码
hardware/qcom/camera/msm8998/QCamera2/HAL/QCameraMuxer.cpp
int QCameraMuxer::start_preview(struct camera_device * device)
{
int rc = NO_ERROR;
qcamera_physical_descriptor_t *pCam = NULL;
qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
// prepare preview first for all cameras首先为所有相机准备预览
for (uint32_t i = 0; i < cam->numCameras; i++) {
pCam = gMuxer->getPhysicalCamera(cam, i);
QCamera2HardwareInterface *hwi = pCam->hwi;
rc = hwi->prepare_preview(pCam->dev);
}
if (cam->numCameras > 1) {
uint sessionId = 0;
// Set up sync for camera sessions 设置相机会话的同步
for (uint32_t i = 0; i < cam->numCameras; i++) {
pCam = gMuxer->getPhysicalCamera(cam, i);
QCamera2HardwareInterface *hwi = pCam->hwi;
if(pCam->mode == CAM_MODE_PRIMARY) {
// bundle primary cam with all aux cameras 将主摄像头与所有辅助摄像头捆绑在一起
for (uint32_t j = 0; j < cam->numCameras; j++) {
if (j == cam->nPrimaryPhyCamIndex) {
continue;
}
sessionId = cam->sId[j];
rc = hwi->bundleRelatedCameras(true);
}
}
if (pCam->mode == CAM_MODE_SECONDARY) {
// bundle all aux cam with primary cams 将所有辅助摄像头与主摄像头捆绑在一起
sessionId = cam->sId[cam->nPrimaryPhyCamIndex];
rc = hwi->bundleRelatedCameras(true);
}
}
// Remember Sync is ON
cam->bSyncOn = true;
}
// Start Preview for all cameras 开始预览所有相机
for (uint32_t i = 0; i < cam->numCameras; i++) {
pCam = gMuxer->getPhysicalCamera(cam, i);
QCamera2HardwareInterface *hwi = pCam->hwi;
rc = hwi->start_preview(pCam->dev);
}
LOGH("X");
return rc;
}
hardware/interfaces/camera/device/1.0/default/CameraDevice.cpp
Return<Status> CameraDevice::startPreview() {
if (mDevice->ops->start_preview) {
return getHidlStatus(mDevice->ops->start_preview(mDevice));
}
}
hardware/qcom/camera/msm8998/QCamera2/HAL/QCameraStateMachine.cpp
int32_t QCameraStateMachine::procAPI(qcamera_sm_evt_enum_t evt,
void *api_payload)
{
//申请内存
qcamera_sm_cmd_t *node =
(qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
if (NULL == node) {
ALOGE("%s: No memory for qcamera_sm_cmd_t", __func__);
return NO_MEMORY;
}
memset(node, 0, sizeof(qcamera_sm_cmd_t));
//设置CMD类型为:QCAMERA_SM_CMD_TYPE_API
node->cmd = QCAMERA_SM_CMD_TYPE_API;
node->evt = evt;
node->evt_payload = api_payload;
//node进入事件队列,该事件会在 smEvtProcRoutine 方法无限循环中处理。
if (api_queue.enqueue((void *)node)) {
//内核锁 ,调用pthread_cond_signal唤醒 smEvtProcRoutine 方法无限循环阻塞
cam_sem_post(&cmd_sem);
return NO_ERROR;
} else {
free(node);
return UNKNOWN_ERROR;
}
}
void *QCameraStateMachine::smEvtProcRoutine(void *data)
{
int running = 1, ret;
QCameraStateMachine *pme = (QCameraStateMachine *)data;
CDBG_HIGH("%s: E", __func__);
do {//死循环
do {
//这里线程先条件等待:ret != 0
ret = cam_sem_wait(&pme->cmd_sem);
if (ret != 0 && errno != EINVAL) {
ALOGE("%s: cam_sem_wait error (%s)",
__func__, strerror(errno));
return NULL;
}
} while (ret != 0);
// we got notified about new cmd avail in cmd queue
// first check API cmd queue:先检查API命令队列
//前面入队,这里就出队列了,看到了吗
qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)pme->api_queue.dequeue();
if (node == NULL) {
// no API cmd, then check evt cmd queue
//如果没有API命令,在检查evnet命令,这里出队
node = (qcamera_sm_cmd_t *)pme->evt_queue.dequeue();
}
if (node != NULL) {
switch (node->cmd) {
//我们对应的是这个API事件
case QCAMERA_SM_CMD_TYPE_API:
pme->stateMachine(node->evt, node->evt_payload);
// API is in a way sync call, so evt_payload is managed by HWI
// no need to free payload for API
break;
case QCAMERA_SM_CMD_TYPE_EVT:
pme->stateMachine(node->evt, node->evt_payload);
// EVT is async call, so payload need to be free after use
free(node->evt_payload);
node->evt_payload = NULL;
break;
case QCAMERA_SM_CMD_TYPE_EXIT:
running = 0;
break;
default:
break;
}
free(node);
node = NULL;
}
} while (running);
CDBG_HIGH("%s: X", __func__);
return NULL;
}
int32_t QCameraStateMachine::stateMachine(qcamera_sm_evt_enum_t evt, void *payload)
{
int32_t rc = NO_ERROR;
LOGL("m_state %d, event (%d)", m_state, evt);
switch (m_state) {
case QCAMERA_SM_STATE_PREVIEW_STOPPED:
rc = procEvtPreviewStoppedState(evt, payload);
break;
case QCAMERA_SM_STATE_PREVIEW_READY:
rc = procEvtPreviewReadyState(evt, payload);
break;
case QCAMERA_SM_STATE_PREVIEWING:
rc = procEvtPreviewingState(evt, payload);
break;
case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
rc = procEvtPrepareSnapshotState(evt, payload);
break;
case QCAMERA_SM_STATE_PIC_TAKING:
rc = procEvtPicTakingState(evt, payload);
break;
case QCAMERA_SM_STATE_RECORDING:
rc = procEvtRecordingState(evt, payload);
break;
case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
rc = procEvtVideoPicTakingState(evt, payload);
break;
case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
rc = procEvtPreviewPicTakingState(evt, payload);
break;
default:
break;
}
return rc;
}
int32_t QCameraStateMachine::procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt,
void *payload)
{
int32_t rc = NO_ERROR;
qcamera_api_result_t result;
memset(&result, 0, sizeof(qcamera_api_result_t));
LOGL("event (%d)", evt);
switch (evt) {
...
case QCAMERA_SM_EVT_START_PREVIEW:
{
rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
if (NO_ERROR != rc) {
LOGE("Param init deferred work failed");
} else if (m_parent->mPreviewWindow == NULL) {
rc = m_parent->preparePreview();
if(rc == NO_ERROR) {
// preview window is not set yet, move to previewReady state
m_state = QCAMERA_SM_STATE_PREVIEW_READY;
} else {
LOGE("preparePreview failed");
}
} else {
rc = m_parent->preparePreview();
if (rc == NO_ERROR) {
applyDelayedMsgs();
rc = m_parent->startPreview();
if (rc != NO_ERROR) {
m_parent->unpreparePreview();
} else {
// start preview success, move to previewing state
m_state = QCAMERA_SM_STATE_PREVIEWING;
}
}
}
result.status = rc;
result.request_api = evt;
result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
m_parent->signalAPIResult(&result);
}
break;
default:
break;
}
return rc;
}
int32_t QCameraStateMachine::procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt,
void *payload)
{
int32_t rc = NO_ERROR;
qcamera_api_result_t result;
memset(&result, 0, sizeof(qcamera_api_result_t));
LOGL("event (%d)", evt);
switch (evt) {
case QCAMERA_SM_EVT_START_PREVIEW:
{
//这个m_parent是QCameraStateMachine::QCameraStateMachine构造函数传递的QCamera2HardwareInterface对象
if (m_parent->mPreviewWindow != NULL) {
rc = m_parent->startPreview();
if (rc != NO_ERROR) {
m_parent->unpreparePreview();
m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
} else {
m_state = QCAMERA_SM_STATE_PREVIEWING;
}
}
// no ops here
rc = NO_ERROR;
result.status = rc;
result.request_api = evt;
result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
m_parent->signalAPIResult(&result);
}
break;
}
return rc;
}
hardware/qcom/camera/msm8998/QCamera2/HAL/QCamera2HWI.cpp
int QCamera2HardwareInterface::prepare_preview(struct camera_device *device)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
qcamera_api_result_t apiResult;
qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_PREPARE_PREVIEW;
ret = hw->processAPI(evt, NULL);
return ret;
}
int QCamera2HardwareInterface::bundleRelatedCameras(bool syncOn)
{
int32_t rc = mParameters.bundleRelatedCameras(syncOn);
return rc;
}
int QCamera2HardwareInterface::start_preview(struct camera_device *device)
{
CDBG_HIGH("%s: zcf E", __func__);
ATRACE_CALL();
int ret = NO_ERROR;
//这里的device指针强转成QCamera2HardwareInterface 类型
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
CDBG_HIGH("[KPI Perf] %s: E PROFILE_START_PREVIEW", __func__);
//线程锁
hw->lockAPI();
qcamera_api_result_t apiResult;
//事件类型 QCAMERA_SM_EVT_START_PREVIEW
qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_START_PREVIEW;
if (hw->isNoDisplayMode()) {
CDBG_HIGH("%s: zcf isNoDisplayMode ", __func__);
evt = QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW;
}
//发送事件
ret = hw->processAPI(evt, NULL);
if (ret == NO_ERROR) {
//解锁并且等待线程唤醒
hw->waitAPIResult(evt, &apiResult);
ret = apiResult.status;
}
//解锁
hw->unlockAPI();
hw->m_bPreviewStarted = true;
CDBG_HIGH("[KPI Perf] %s: X", __func__);
CDBG_HIGH("%s: zcf X", __func__);
return ret;
}
int QCamera2HardwareInterface::processAPI(qcamera_sm_evt_enum_t api, void *api_payload)
{
int ret = DEAD_OBJECT;
if (m_smThreadActive) {
ret = m_stateMachine.procAPI(api, api_payload);
}
return ret;
}
int32_t QCamera2HardwareInterface::preparePreview()
{
...
rc = addChannel(QCAMERA_CH_TYPE_PREVIEW);
}
int32_t QCamera2HardwareInterface::addChannel(qcamera_ch_type_enum_t ch_type)
{
int32_t rc = UNKNOWN_ERROR;
switch (ch_type) {
case QCAMERA_CH_TYPE_ZSL:
rc = addZSLChannel();
break;
case QCAMERA_CH_TYPE_CAPTURE:
rc = addCaptureChannel();
break;
case QCAMERA_CH_TYPE_PREVIEW:
rc = addPreviewChannel();
break;
case QCAMERA_CH_TYPE_VIDEO:
rc = addVideoChannel();
break;
case QCAMERA_CH_TYPE_SNAPSHOT:
rc = addSnapshotChannel();
break;
case QCAMERA_CH_TYPE_RAW:
rc = addRawChannel();
break;
case QCAMERA_CH_TYPE_METADATA:
rc = addMetaDataChannel();
break;
case QCAMERA_CH_TYPE_CALLBACK:
rc = addCallbackChannel();
break;
case QCAMERA_CH_TYPE_ANALYSIS:
rc = addAnalysisChannel();
break;
default:
break;
}
return rc;
}
int QCamera2HardwareInterface::startPreview()
{
int32_t rc = NO_ERROR;
m_perfLockMgr.acquirePerfLockIfExpired(PERF_LOCK_START_PREVIEW);
updateThermalLevel((void *)&mThermalLevel);
setDisplayFrameSkip();
// start preview stream
if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) {
rc = startChannel(QCAMERA_CH_TYPE_ZSL);
} else if (isSecureMode()) {
//这里的 getSecureStreamType =》QCameraParametersIntf::getSecureStreamType=》QCameraParameters.getSecureStreamType() 返回mSecureStraemType
//mSecureStraemType是在QCameraParameters::setSecureMode 方法中 判断 mm_camera_interface.c get_cam_type(uint32_t camera_id) g_cam_ctrl.cam_type[camera_id]
//如果是 CAM_TYPE_SECURE 赋值CAM_STREAM_TYPE_RAW,否则赋值为 CAM_STREAM_TYPE_PREVIEW ,一般都是 CAM_STREAM_TYPE_PREVIEW
// 而g_cam_ctrl.cam_type[camera_id] 初始化是在 hal初始化QCamera2Factory::QCamera2Factory=》get_num_of_cameras()=》g_cam_ctrl.cam_type[num_cameras] = type | is_secure;
if (mParameters.getSecureStreamType() == CAM_STREAM_TYPE_RAW) {
rc = startChannel(QCAMERA_CH_TYPE_RAW);
}else {
rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
}
} else {
rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
}
if ((msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME))
&& (m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL)) {
rc = startChannel(QCAMERA_CH_TYPE_CALLBACK);
}
updatePostPreviewParameters();
m_stateMachine.setPreviewCallbackNeeded(true);
// if job id is non-zero, that means the postproc init job is already
// pending or complete
//如果作业id为非零,则表示postproc初始化作业已经存在挂起或完成将后期处理初始化任务排队到延迟线程
if (mInitPProcJob == 0) {
mInitPProcJob = deferPPInit();
}
LOGI("X rc = %d", rc);
return rc;
}
int32_t QCamera2HardwareInterface::startChannel(qcamera_ch_type_enum_t ch_type)
{
int32_t rc = UNKNOWN_ERROR;
rc = m_channels[ch_type]->start();
return rc;
}
int32_t QCamera2HardwareInterface::addPreviewChannel()
{
int32_t rc = NO_ERROR;
QCameraChannel *pChannel = NULL;
char value[PROPERTY_VALUE_MAX];
bool raw_yuv = false;
if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
// if we had preview channel before, delete it first
delete m_channels[QCAMERA_CH_TYPE_PREVIEW];
m_channels[QCAMERA_CH_TYPE_PREVIEW] = NULL;
}
uint32_t handle = getCamHandleForChannel(QCAMERA_CH_TYPE_PREVIEW);
pChannel = new QCameraChannel(handle, mCameraHandle->ops);
// preview only channel, don't need bundle attr and cb ,仅预览频道,不需要捆绑attr和cb
rc = pChannel->init(NULL, NULL, NULL);
// meta data stream always coexists with preview if applicable元数据流始终与预览共存(如果适用)
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
metadata_stream_cb_routine, this);
if (isRdiMode()) {
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
rdi_mode_stream_cb_routine, this);
} else {
if (isNoDisplayMode()) {
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
nodisplay_preview_stream_cb_routine, this);
} else {
//添加预览流
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
preview_stream_cb_routine, this);
if (needSyncCB(CAM_STREAM_TYPE_PREVIEW) == TRUE) {
//处理预览yuv数据回调
pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,
synchronous_stream_cb_routine);
}
}
}
property_get("persist.camera.raw_yuv", value, "0");
raw_yuv = atoi(value) > 0 ? true : false;
if ( raw_yuv ) {
//处理raw yuv(未经过任何压缩或处理的原始图像数据)数据回调,导出 raw图方法adb pull /data/vendor/camera/
rc = addStreamToChannel(pChannel,CAM_STREAM_TYPE_RAW,
preview_raw_stream_cb_routine,this);
if ( rc != NO_ERROR ) {
LOGE("add raw stream failed, ret = %d", __FUNCTION__, rc);
delete pChannel;
return rc;
}
}
m_channels[QCAMERA_CH_TYPE_PREVIEW] = pChannel;
return rc;
}
int32_t QCamera2HardwareInterface::addStreamToChannel(QCameraChannel *pChannel,
cam_stream_type_t streamType,
stream_cb_routine streamCB,
void *userData)
{
int32_t rc = NO_ERROR;
QCameraHeapMemory *pStreamInfo = NULL;
uint32_t cam_type = MM_CAMERA_TYPE_MAIN;
bool needAuxStream = FALSE;
if (streamType == CAM_STREAM_TYPE_RAW) {
prepareRawStream(pChannel);
}
if (isDualCamera()) {
if (!((mParameters.isDCmAsymmetricSnapMode()) &&
(streamType == CAM_STREAM_TYPE_SNAPSHOT))) {
cam_type |= MM_CAMERA_TYPE_AUX;
} else {
needAuxStream = TRUE;
}
}
//这里申请内存空间buffer
pStreamInfo = allocateStreamInfoBuf(streamType,
getStreamRefCount(streamType, cam_type), cam_type);
bool bDynAllocBuf = false;
if (isZSLMode() && streamType == CAM_STREAM_TYPE_SNAPSHOT) {
bDynAllocBuf = true;
}
cam_padding_info_t padding_info;
getPaddingInfo(streamType, &padding_info);
bool deferAllocation = needDeferred(streamType);
//添加数据流
rc = pChannel->addStream(*this,
pStreamInfo, NULL, &padding_info,
streamCB, userData, bDynAllocBuf,
deferAllocation, ROTATE_0, cam_type);
/*Add stream for Asymmetric dual camera use case*/
if (needAuxStream) {
cam_type = MM_CAMERA_TYPE_AUX;
pStreamInfo = allocateStreamInfoBuf(streamType,
getStreamRefCount(streamType, cam_type), cam_type);
if (pStreamInfo == NULL) {
LOGE("no mem for stream info buf");
return NO_MEMORY;
}
rc = pChannel->addStream(*this,
pStreamInfo, NULL, &padding_info,
streamCB, userData, bDynAllocBuf,
deferAllocation, ROTATE_0, cam_type);
if (rc != NO_ERROR) {
LOGE("add stream type (%d) cam = %d failed, ret = %d",
streamType, cam_type, rc);
return rc;
}
}
return rc;
}
hardware/qcom/camera/msm8998/QCamera2/HAL/QCameraChannel.cpp
int32_t QCameraChannel::addStream(QCameraAllocator &allocator,
QCameraHeapMemory *streamInfoBuf, QCameraHeapMemory *miscBuf,
cam_padding_info_t *paddingInfo, stream_cb_routine stream_cb,
void *userdata, bool bDynAllocBuf, bool bDeffAlloc,
cam_rotation_t online_rotation,
uint32_t cam_type)
{
int32_t rc = NO_ERROR;
uint32_t ch_handle = m_handle;
if (cam_type == MM_CAMERA_TYPE_MAIN) {
//get_main_camera_handle 方法 handler & (0x0000ffff),即取低16位值
ch_handle = get_main_camera_handle(m_handle);
} else if (cam_type == MM_CAMERA_TYPE_AUX) {
// get_aux_camera_handle 方法handler & (0x0000ffff << (16 * 1)) 即 handler & (0xffff0000) 即 取高16位的值
ch_handle = get_aux_camera_handle(m_handle);
}
//new一个QCameraStream 实例对象,这个 m_camOps 是在 QCameraChannel::QCameraChannel 构造函数传参,对应mm_camera_interface.c中mm_camera_ops
QCameraStream *pStream = new QCameraStream(allocator,
m_camHandle, ch_handle, m_camOps, paddingInfo, bDeffAlloc,
online_rotation);
//调用初始化方法
rc = pStream->init(streamInfoBuf, miscBuf,
stream_cb, userdata, bDynAllocBuf);
return rc;
}
int32_t QCameraChannel::init(mm_camera_channel_attr_t *attr,
mm_camera_buf_notify_t dataCB,
void *userData)
{
//这个m_camOps是在 QCameraChannel::QCameraChannel 构造函数传参,对应mm_camera_interface.c中mm_camera_ops
m_handle = m_camOps->add_channel(m_camHandle,
attr,
dataCB,
userData);
mActiveCameras = MM_CAMERA_TYPE_MAIN;
if (isDualChannel()) {
mActiveCameras |= MM_CAMERA_TYPE_AUX;
}
mMasterCamera = MM_CAMERA_TYPE_MAIN;
mBundledSnapshot = false;
return NO_ERROR;
}
hardware/qcom/camera/msm8998/QCamera2/HAL/QCameraStream.cpp
QCameraStream::QCameraStream(QCameraAllocator &allocator,
uint32_t camHandle, uint32_t chId,
mm_camera_ops_t *camOps, cam_padding_info_t *paddingInfo,
bool deffered, cam_rotation_t online_rotation):
mDumpFrame(0),
mDumpMetaFrame(0),
mDumpSkipCnt(0),
mStreamTimestamp(0),
mCamHandle(camHandle),
mChannelHandle(chId),
mHandle(0),
mCamOps(camOps),
mStreamInfo(NULL),
mNumBufs(0),
mNumPlaneBufs(0),
mNumBufsNeedAlloc(0),
mRegFlags(NULL),
mDataCB(NULL),
mSYNCDataCB(NULL),
mUserData(NULL),
mDataQ(releaseFrameData, this),
mStreamInfoBuf(NULL),
mMiscBuf(NULL),
mStreamBufs(NULL),
mStreamBatchBufs(NULL),
mAllocator(allocator),
mBufDefs(NULL),
mPlaneBufDefs(NULL),
mOnlineRotation(online_rotation),
mStreamBufsAcquired(false),
m_bActive(false),
mDynBufAlloc(false),
mBufAllocPid(0),
mDefferedAllocation(deffered),
wait_for_cond(false),
mAllocTaskId(0),
mMapTaskId(0),
mSyncCBEnabled(false)
{
mDualStream = is_dual_camera_by_handle(chId);
if (get_main_camera_handle(chId)) {
mCamType = MM_CAMERA_TYPE_MAIN;
}
if (get_aux_camera_handle(chId)) {
mCamType |= MM_CAMERA_TYPE_AUX;
}
mMemVtbl.user_data = this;
if ( !deffered ) {
mMemVtbl.get_bufs = get_bufs;
mMemVtbl.put_bufs = put_bufs;
} else {
mMemVtbl.get_bufs = get_bufs_deffered;
mMemVtbl.put_bufs = put_bufs_deffered;
}
mMemVtbl.invalidate_buf = invalidate_buf;
mMemVtbl.clean_invalidate_buf = clean_invalidate_buf;
mMemVtbl.clean_buf = clean_buf;
mMemVtbl.set_config_ops = set_config_ops;
memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
memcpy(&mPaddingInfo, paddingInfo, sizeof(cam_padding_info_t));
memset(&mCropInfo, 0, sizeof(cam_rect_t));
memset(&m_MemOpsTbl, 0, sizeof(mm_camera_map_unmap_ops_tbl_t));
memset(&m_OutputCrop, 0, sizeof(cam_stream_parm_buffer_t));
memset(&m_ImgProp, 0, sizeof(cam_stream_parm_buffer_t));
memset(&mAllocTask, 0, sizeof(mAllocTask));
memset(&mMapTask, 0, sizeof(mMapTask));
pthread_mutex_init(&mCropLock, NULL);
pthread_mutex_init(&mParameterLock, NULL);
mCurMetaMemory = NULL;
mCurBufIndex = -1;
mCurMetaIndex = -1;
mFirstTimeStamp = 0;
memset (&mStreamMetaMemory, 0,
(sizeof(MetaMemory) * CAMERA_MIN_VIDEO_BATCH_BUFFERS));
pthread_mutex_init(&m_lock, NULL);
pthread_cond_init(&m_cond, NULL);
}
int32_t QCameraStream::init(QCameraHeapMemory *streamInfoBuf,
QCameraHeapMemory *miscBuf,
stream_cb_routine stream_cb,
void *userdata,
bool bDynallocBuf)
{
int32_t rc = OK;
// assign and map stream info memory
mStreamInfoBuf = streamInfoBuf;
mStreamInfo = reinterpret_cast<cam_stream_info_t *>(mStreamInfoBuf->getPtr(0));
mNumBufs = mStreamInfo->num_bufs;
mDynBufAlloc = bDynallocBuf;
// Calculate buffer size for deffered allocation
if (mDefferedAllocation) {
mAllocTask.bgFunction = backgroundAllocate;
mAllocTask.bgArgs = this;
mAllocTaskId = mAllocator.scheduleBackgroundTask(&mAllocTask);
}
mHandle = mCamOps->add_stream(mCamHandle, mChannelHandle);
mMasterCamera = MM_CAMERA_TYPE_MAIN;
if (mCamType & MM_CAMERA_TYPE_MAIN) {
mActiveCameras = MM_CAMERA_TYPE_MAIN;
}
if (mCamType & MM_CAMERA_TYPE_AUX) {
mActiveCameras |= MM_CAMERA_TYPE_AUX;
}
rc = mapBufs(mStreamInfoBuf, CAM_MAPPING_BUF_TYPE_STREAM_INFO, NULL);
mMiscBuf = miscBuf;
if (miscBuf) {
rc = mapBufs(mMiscBuf, CAM_MAPPING_BUF_TYPE_MISC_BUF, NULL);
}
rc = configStream();
if (mDefferedAllocation) {
mMapTask.bgFunction = backgroundMap;
mMapTask.bgArgs = this;
mMapTaskId = mAllocator.scheduleBackgroundTask(&mMapTask);
}
mDataCB = stream_cb;
mUserData = userdata;
return 0;
}
int32_t QCameraStream::start()
{
int32_t rc = 0;
mDataQ.init();
rc = mProcTh.launch(dataProcRoutine, this);
...
return rc;
}
void QCameraStream::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
void *userdata)
{
//从3.2.1.2.2调用 configStream可知,调用驱动时设置了dataNotifyCB回调,即驱动会回调dataNotifyCB方法
QCameraStream* stream = (QCameraStream *)userdata;
mm_camera_super_buf_t *frame =
(mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
*frame = *recvd_frame;
stream->processDataNotify(frame);
return;
}
int32_t QCameraStream::processDataNotify(mm_camera_super_buf_t *frame)
{
//将驱动回调的数据添加到mDataQ后,调用sendCmd 取消线程阻塞,dataProcRoutine 无限循环中取消阻塞,开始处理CAMERA_CMD_TYPE_DO_NEXT_JOB
if (mDataQ.enqueue((void *)frame)) {
return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
} else {
if (!m_bActive) {
LOGW("Stream thread is not active, no ops here %d", getMyType());
} else {
bufDone(frame);
}
free(frame);
return NO_ERROR;
}
}
hardware/qcom/camera/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
static uint32_t mm_camera_intf_add_channel(uint32_t camera_handle,
mm_camera_channel_attr_t *attr,
mm_camera_buf_notify_t channel_cb,
void *userdata)
{
uint32_t ch_id = 0, aux_ch_id = 0;
mm_camera_obj_t * my_obj = NULL;
uint32_t handle = get_main_camera_handle(camera_handle);
uint32_t aux_handle = get_aux_camera_handle(camera_handle);
if (handle) {
pthread_mutex_lock(&g_intf_lock);
my_obj = mm_camera_util_get_camera_by_handler(handle);
if(my_obj) {
//通过调用实现层的 mm_camera_add_channel 来获取一个 channel id,也就是其句柄。
ch_id = mm_camera_add_channel(my_obj, attr, channel_cb, userdata);
}
}
return ch_id;
}
static uint32_t mm_camera_intf_add_stream(uint32_t camera_handle,
uint32_t ch_id)
{
uint32_t stream_id = 0, aux_stream_id;
mm_camera_obj_t *my_obj = NULL;
uint32_t m_ch_id = get_main_camera_handle(ch_id);
uint32_t aux_chid = get_aux_camera_handle(ch_id);
if (m_ch_id) {
uint32_t handle = get_main_camera_handle(camera_handle);
my_obj = mm_camera_util_get_camera_by_handler(handle);
if(my_obj) {
stream_id = mm_camera_add_stream(my_obj, m_ch_id);
}
}
return stream_id;
}
static int32_t mm_camera_intf_register_stream_buf_cb(uint32_t camera_handle,
uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t buf_cb,
mm_camera_stream_cb_type cb_type, void *userdata)
{
int32_t rc = 0;
mm_camera_obj_t * my_obj = NULL;
uint32_t strid = get_main_camera_handle(stream_id);
uint32_t aux_strid = get_aux_camera_handle(stream_id);
if (strid) {
uint32_t handle = get_main_camera_handle(camera_handle);
uint32_t chid = get_main_camera_handle(ch_id);
//根据camera_handle获取 camera_open时初始化的cam_obj(mm_camera_obj_t 结构体)
my_obj = mm_camera_util_get_camera_by_handler(handle);
if(my_obj) {
rc = mm_camera_reg_stream_buf_cb(my_obj, chid, strid,
buf_cb, cb_type, userdata);
}
}
return (int32_t)rc;
}
static int32_t mm_camera_intf_start_channel(uint32_t camera_handle,
uint32_t ch_id,
bool start_sensor_streaming)
{
int32_t rc = -1;
mm_camera_obj_t * my_obj = NULL;
uint32_t chid = get_main_camera_handle(ch_id);
uint32_t aux_chid = get_aux_camera_handle(ch_id);
if (chid) {
uint32_t handle = get_main_camera_handle(camera_handle);
my_obj = mm_camera_util_get_camera_by_handler(handle);
if(my_obj) {
rc = mm_camera_start_channel(my_obj, chid);
// Start sensor streaming now if needed.
if (rc == 0 && start_sensor_streaming) {
//my_obj是根据camera_handle获取camera_open时 初始化对象 cam_obj,ch_id 是创建的预览 QCameraChannel 返回的句柄
rc = mm_camera_start_sensor_stream_on(my_obj, ch_id);
}
}
}
...
return rc;
}
hardware/qcom/camera/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera.c
uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
mm_camera_channel_attr_t *attr,
mm_camera_buf_notify_t channel_cb,
void *userdata)
{
mm_channel_t *ch_obj = NULL;
uint8_t ch_idx = 0;
uint32_t ch_hdl = 0;
for(ch_idx = 0; ch_idx < MM_CAMERA_CHANNEL_MAX; ch_idx++) {
if (MM_CHANNEL_STATE_NOTUSED == my_obj->ch[ch_idx].state) {
ch_obj = &my_obj->ch[ch_idx];
break;
}
}
/*初始化 ch_obj 结构。首先调用 mm_camera_util_generate_handler 为其生成一个句柄(也是该函数的返回值),
*然后将状态设置为 STOPPED,注意这里还保存了 my_obj 的指针及其 session id,最后调用 mm_channel_init 完成了 Channel 的初始化。*/
if (NULL != ch_obj) {
/* initialize channel obj */
memset(ch_obj, 0, sizeof(mm_channel_t));
ch_hdl = mm_camera_util_generate_handler_by_num(my_obj->my_num, ch_idx);
ch_obj->my_hdl = ch_hdl;
ch_obj->state = MM_CHANNEL_STATE_STOPPED;
ch_obj->cam_obj = my_obj;
pthread_mutex_init(&ch_obj->ch_lock, NULL);
ch_obj->sessionid = my_obj->sessionid;
mm_channel_init(ch_obj, attr, channel_cb, userdata);
}
pthread_mutex_unlock(&my_obj->cam_lock);
return ch_hdl;
}
uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
uint32_t ch_id)
{
uint32_t s_hdl = 0;
mm_channel_t * ch_obj =
mm_camera_util_get_channel_by_handler(my_obj, ch_id);
if (NULL != ch_obj) {
mm_channel_fsm_fn(ch_obj,
MM_CHANNEL_EVT_ADD_STREAM,
NULL,
(void *)&s_hdl);
}
return s_hdl;
}
int32_t mm_camera_reg_stream_buf_cb(mm_camera_obj_t *my_obj,
uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t stream_cb,
mm_camera_stream_cb_type cb_type, void *userdata)
{
int rc = 0;
mm_stream_data_cb_t buf_cb;
mm_channel_t * ch_obj =
mm_camera_util_get_channel_by_handler(my_obj, ch_id);
if (NULL != ch_obj) {
memset(&buf_cb, 0, sizeof(mm_stream_data_cb_t));
buf_cb.cb = stream_cb;
buf_cb.cb_count = -1;
buf_cb.cb_type = cb_type;
buf_cb.user_data = userdata;
mm_evt_paylod_reg_stream_buf_cb payload;
memset(&payload, 0, sizeof(mm_evt_paylod_reg_stream_buf_cb));
payload.buf_cb = buf_cb;
payload.stream_id = stream_id;
mm_channel_fsm_fn(ch_obj,
MM_CHANNEL_EVT_REG_STREAM_BUF_CB,
(void*)&payload, NULL);
}
return rc;
}
int32_t mm_camera_start_sensor_stream_on(mm_camera_obj_t *my_obj, uint32_t ch_id)
{
int32_t rc = -1;
mm_channel_t * ch_obj =
mm_camera_util_get_channel_by_handler(my_obj, ch_id);
if (NULL != ch_obj) {
rc = mm_channel_fsm_fn(ch_obj,
MM_CHANNEL_EVT_START_SENSOR_STREAMING,
NULL,
NULL);
}
return rc;
}
int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj, uint32_t ch_id)
{
int32_t rc = -1;
mm_channel_t * ch_obj =
mm_camera_util_get_channel_by_handler(my_obj, ch_id);
if (NULL != ch_obj) {
rc = mm_channel_fsm_fn(ch_obj,
MM_CHANNEL_EVT_START,
NULL,
NULL);
}
return rc;
}
hardware/qcom/camera/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
int32_t mm_channel_init(mm_channel_t *my_obj,
mm_camera_channel_attr_t *attr,
mm_camera_buf_notify_t channel_cb,
void *userdata)
{
int32_t rc = 0;
my_obj->bundle.super_buf_notify_cb = channel_cb;
my_obj->bundle.user_data = userdata;
if (NULL != attr) {
my_obj->bundle.superbuf_queue.attr = *attr;
}
my_obj->num_s_cnt = 0;
memset(&my_obj->frame_sync, 0, sizeof(my_obj->frame_sync));
pthread_mutex_init(&my_obj->frame_sync.sync_lock, NULL);
mm_muxer_frame_sync_queue_init(&my_obj->frame_sync.superbuf_queue);
my_obj->bundle.is_cb_active = 1;
LOGD("Launch data poll thread in channel open");
snprintf(my_obj->poll_thread[0].threadName, THREAD_NAME_SIZE, "CAM_dataPoll");
//在打开的通道中启动数据轮询线程
mm_camera_poll_thread_launch(&my_obj->poll_thread[0],
MM_CAMERA_POLL_TYPE_DATA);
/* change state to stopped state 修改状态为MM_CHANNEL_STATE_STOPPED*/
my_obj->state = MM_CHANNEL_STATE_STOPPED;
return rc;
}
int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
mm_channel_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = -1;
switch (my_obj->state) {
case MM_CHANNEL_STATE_NOTUSED:
rc = mm_channel_fsm_fn_notused(my_obj, evt, in_val, out_val);
break;
case MM_CHANNEL_STATE_STOPPED:
rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
break;
case MM_CHANNEL_STATE_ACTIVE:
rc = mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val);
break;
case MM_CHANNEL_STATE_PAUSED:
rc = mm_channel_fsm_fn_paused(my_obj, evt, in_val, out_val);
break;
default:
LOGD("Not a valid state (%d)", my_obj->state);
break;
}
return rc;
}
int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
mm_channel_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = 0;
LOGD("E evt = %d", evt);
switch (evt) {
case MM_CHANNEL_EVT_ADD_STREAM:
{
uint32_t s_hdl = 0;
s_hdl = mm_channel_add_stream(my_obj);
*((uint32_t*)out_val) = s_hdl;
rc = 0;
}
break;
case MM_CHANNEL_EVT_REG_STREAM_BUF_CB:
{
mm_evt_paylod_reg_stream_buf_cb *payload =
(mm_evt_paylod_reg_stream_buf_cb *)in_val;
rc = mm_channel_reg_stream_buf_cb (my_obj,
payload->stream_id, payload->buf_cb);
}
break;
case MM_CHANNEL_EVT_START:
{
rc = mm_channel_start(my_obj);
/* first stream started in stopped state
* move to active state */
if (0 == rc) {
my_obj->state = MM_CHANNEL_STATE_ACTIVE;
}
}
break;
case MM_CHANNEL_EVT_CONFIG_STREAM:
{
mm_evt_paylod_config_stream_t *payload =
(mm_evt_paylod_config_stream_t *)in_val;
rc = mm_channel_config_stream(my_obj,
payload->stream_id,
payload->config);
}
break;
...
}
return rc;
}
int32_t mm_channel_start(mm_channel_t *my_obj)
{
int32_t rc = 0;
int i = 0, j = 0;
mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
uint8_t num_streams_to_start = 0;
uint8_t num_streams_in_bundle_queue = 0;
mm_stream_t *s_obj = NULL;
int meta_stream_idx = 0;
cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
if (my_obj->streams[i].my_hdl > 0) {
s_obj = mm_channel_util_get_stream_by_handler(my_obj,
my_obj->streams[i].my_hdl);
if (NULL != s_obj) {
stream_type = s_obj->stream_info->stream_type;
/* remember meta data stream index */
if ((stream_type == CAM_STREAM_TYPE_METADATA) &&
(s_obj->ch_obj == my_obj)) {
meta_stream_idx = num_streams_to_start;
}
s_objs[num_streams_to_start++] = s_obj;
if (!s_obj->stream_info->noFrameExpected) {
num_streams_in_bundle_queue++;
}
}
}
}
if (meta_stream_idx > 0 ) {
/* always start meta data stream first, so switch the stream object with the first one */
s_obj = s_objs[0];
s_objs[0] = s_objs[meta_stream_idx];
s_objs[meta_stream_idx] = s_obj;
}
if (NULL != my_obj->bundle.super_buf_notify_cb) {
/* need to send up cb, therefore launch thread */
/* init superbuf queue 初始化superbuf队列*/
mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
my_obj->bundle.superbuf_queue.num_streams = num_streams_in_bundle_queue;
my_obj->bundle.superbuf_queue.expected_frame_id =
my_obj->bundle.superbuf_queue.attr.user_expected_frame_id;
my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;
my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0;
my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0;
my_obj->bundle.superbuf_queue.led_on_num_frames = 0;
my_obj->bundle.superbuf_queue.good_frame_id = 0;
for (i = 0; i < num_streams_to_start; i++) {
/* Only bundle streams that belong to the channel */
if(!(s_objs[i]->stream_info->noFrameExpected)) {
if (s_objs[i]->ch_obj == my_obj) {
/* set bundled flag to streams */
s_objs[i]->is_bundled = 1;
}
my_obj->bundle.superbuf_queue.bundled_streams[j++] = s_objs[i]->my_hdl;
}
}
/* launch cb thread for dispatching super buf through cb 启动cb线程,通过cb调度superbuf中 */
snprintf(my_obj->cb_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBuf");
mm_camera_cmd_thread_launch(&my_obj->cb_thread,
mm_channel_dispatch_super_buf,
(void*)my_obj);
/* launch cmd thread for super buf dataCB 启动 cmd 线程,作为superbuf接收数据的回调函数*/
snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBufCB");
mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
mm_channel_process_stream_buf,
(void*)my_obj);
/* set flag to TRUE */
my_obj->bundle.is_active = TRUE;
}
/* link any streams first before starting the rest of the streams */
for (i = 0; i < num_streams_to_start; i++) {
if (s_objs[i]->ch_obj != my_obj) {
pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
s_objs[i]->linked_stream->linked_obj = my_obj;
s_objs[i]->linked_stream->is_linked = 1;
pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
continue;
}
}
for (i = 0; i < num_streams_to_start; i++) {
if (s_objs[i]->ch_obj != my_obj) {
continue;
}
/* allocate buf 为每个strean分配 GET buf*/
rc = mm_stream_fsm_fn(s_objs[i],
MM_STREAM_EVT_GET_BUF,
NULL,
NULL);
/* reg buf 为每个strean分配 REG buf*/
rc = mm_stream_fsm_fn(s_objs[i],
MM_STREAM_EVT_REG_BUF,
NULL,
NULL);
/* start stream 开启流 */
rc = mm_stream_fsm_fn(s_objs[i],
MM_STREAM_EVT_START,
NULL,
NULL);
}
...
my_obj->bWaitForPrepSnapshotDone = 0;
if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
//为帧同步注册通道
mm_frame_sync_register_channel(my_obj);
}
return rc;
}
int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
mm_channel_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = 0;
LOGD("E evt = %d", evt);
switch (evt) {
...
case MM_CHANNEL_EVT_START_SENSOR_STREAMING:
{
rc = mm_channel_start_sensor_streaming(my_obj);
}
break;
default:
LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
my_obj->state, evt, in_val, out_val);
break;
}
LOGD("X rc = %d", rc);
return rc;
}
int32_t mm_channel_reg_stream_buf_cb (mm_channel_t* my_obj,
uint32_t stream_id, mm_stream_data_cb_t buf_cb)
{
int32_t rc = -1;
//从ch_obj 通道中基于stream_id获取流信息
mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
stream_id);
if (NULL != s_obj) {
rc = mm_stream_reg_buf_cb(s_obj, buf_cb);
}
return rc;
}
uint32_t mm_channel_add_stream(mm_channel_t *my_obj)
{
int32_t rc = 0;
uint8_t idx = 0;
uint32_t s_hdl = 0;
mm_stream_t *stream_obj = NULL;
/* check available stream */
for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
stream_obj = &my_obj->streams[idx];
break;
}
}
/* initialize stream object 初始化 mm_stream_t *stream_obj 这个结构体*/
memset(stream_obj, 0, sizeof(mm_stream_t));
stream_obj->fd = -1;
stream_obj->my_hdl = mm_camera_util_generate_handler_by_num (
my_obj->cam_obj->my_num, idx);
stream_obj->ch_obj = my_obj;
//设置流的状态为 MM_STREAM_STATE_INITED
stream_obj->state = MM_STREAM_STATE_INITED;
/* acquire stream 申请流*/
rc = mm_stream_fsm_fn(stream_obj, MM_STREAM_EVT_ACQUIRE, NULL, NULL);
return s_hdl;
}
int32_t mm_channel_start_sensor_streaming(mm_channel_t *my_obj)
{
int32_t rc = 0;
int i = 0;
mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
uint8_t num_streams_to_start = 0;
mm_stream_t *s_obj = NULL;
...
for (i = 0; i < num_streams_to_start; i++) {
/* start sensor streaming */
rc = mm_stream_fsm_fn(s_objs[i],
MM_STREAM_EVT_START_SENSOR_STREAMING,
NULL,
NULL);
}
return rc;
}
hardware/qcom/camera/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
mm_stream_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = -1;
LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
my_obj->my_hdl, my_obj->fd, my_obj->state);
switch (my_obj->state) {
case MM_STREAM_STATE_NOTUSED:
LOGD("Not handling evt in unused state");
break;
case MM_STREAM_STATE_INITED:
rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
break;
case MM_STREAM_STATE_ACQUIRED:
rc = mm_stream_fsm_acquired(my_obj, evt, in_val, out_val);
break;
case MM_STREAM_STATE_CFG:
rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);
break;
case MM_STREAM_STATE_BUFFED:
rc = mm_stream_fsm_buffed(my_obj, evt, in_val, out_val);
break;
case MM_STREAM_STATE_REG:
rc = mm_stream_fsm_reg(my_obj, evt, in_val, out_val);
break;
case MM_STREAM_STATE_ACTIVE:
rc = mm_stream_fsm_active(my_obj, evt, in_val, out_val);
break;
default:
LOGD("Not a valid state (%d)", my_obj->state);
break;
}
LOGD("X rc =%d",rc);
return rc;
}
int32_t mm_stream_fsm_inited(mm_stream_t *my_obj,
mm_stream_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = 0;
char dev_name[MM_CAMERA_DEV_NAME_LEN];
const char *dev_name_value = NULL;
switch(evt) {
case MM_STREAM_EVT_ACQUIRE:
mm_stream_init(my_obj);
uint32_t cam_handle = my_obj->ch_obj->cam_obj->my_hdl;
dev_name_value = mm_camera_util_get_dev_name_by_num(
my_obj->ch_obj->cam_obj->my_num, cam_handle);
snprintf(dev_name, sizeof(dev_name), "/dev/%s",
dev_name_value);
//打开dev/vedio0或1
my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK);
rc = mm_stream_set_ext_mode(my_obj);
if (0 == rc) {
//设置状态为MM_STREAM_STATE_ACQUIRED,表示 流已经 acquired, fd opened ,
my_obj->state = MM_STREAM_STATE_ACQUIRED;
}
break;
default:
LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
my_obj->state, evt, in_val, out_val);
break;
}
return rc;
}
int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
mm_stream_data_cb_t val)
{
int32_t rc = -1;
uint8_t i;
for (i=0 ;i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
//实际上只有第一个buf_cb为空才会赋值回调函数
if(NULL == my_obj->buf_cb[i].cb) {
my_obj->buf_cb[i] = val;
rc = 0;
break;
}
}
return rc;
}
int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
mm_stream_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = 0;
switch(evt) {
case MM_STREAM_EVT_UNREG_BUF:
rc = mm_stream_unreg_buf(my_obj);
/* change state to buffed */
my_obj->state = MM_STREAM_STATE_BUFFED;
break;
case MM_STREAM_EVT_START:
{
uint8_t has_cb = 0;
uint8_t i;
/* launch cmd thread if CB is not null */
pthread_mutex_lock(&my_obj->cb_lock);
for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
if((NULL != my_obj->buf_cb[i].cb) &&
(my_obj->buf_cb[i].cb_type != MM_CAMERA_STREAM_CB_TYPE_SYNC)) {
has_cb = 1;
break;
}
}
if (has_cb) {
snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_StrmAppData");
mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
mm_stream_dispatch_app_data,
(void *)my_obj);
}
my_obj->state = MM_STREAM_STATE_ACTIVE;
rc = mm_stream_streamon(my_obj);
}
break;
case MM_STREAM_EVT_SET_PARM:
{
mm_evt_paylod_set_get_stream_parms_t *payload =
(mm_evt_paylod_set_get_stream_parms_t *)in_val;
rc = mm_stream_set_parm(my_obj, payload->parms);
}
break;
case MM_STREAM_EVT_GET_PARM:
{
mm_evt_paylod_set_get_stream_parms_t *payload =
(mm_evt_paylod_set_get_stream_parms_t *)in_val;
rc = mm_stream_get_parm(my_obj, payload->parms);
}
break;
default:
LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
my_obj->state, evt, in_val, out_val);
}
LOGD("X rc = %d", rc);
return rc;
}
int32_t mm_stream_streamon(mm_stream_t *my_obj)
{
int32_t rc = 0;
int8_t i;
enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
...
mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
LOGD("E, my_handle = 0x%x, fd = %d, state = %d session_id:%d stream_id:%d",
my_obj->my_hdl, my_obj->fd, my_obj->state, cam_obj->sessionid,
my_obj->server_stream_id);
//通过ioctl的方式,指令为:VIDIOC_STREAMON,向内核发送v4l2请求,启动一个数据流!
rc = ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);
...
LOGD("X rc = %d",rc);
return rc;
}
int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
mm_stream_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = 0;
LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
my_obj->my_hdl, my_obj->fd, my_obj->state);
switch(evt) {
...
case MM_STREAM_EVT_START_SENSOR_STREAMING:
{
rc = mm_stream_start_sensor_streaming(my_obj);
}
break;
default:
LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
my_obj->state, evt, in_val, out_val);
}
LOGD("X rc = %d", rc);
return rc;
}
int32_t mm_stream_start_sensor_streaming(mm_stream_t *my_obj)
{
int32_t rc = 0;
enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
uint8_t idx = mm_camera_util_get_index_by_num(
my_obj->ch_obj->cam_obj->my_num, my_obj->my_hdl);
mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
cam_shim_packet_t *shim_cmd;
cam_shim_cmd_data shim_cmd_data;
unsigned int value = CAM_STREAM_ON_TYPE_START_SENSOR_STREAMING;
memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
shim_cmd_data.command = MSM_CAMERA_PRIV_STREAM_ON;
shim_cmd_data.stream_id = my_obj->server_stream_id;
shim_cmd_data.value = &value;
shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_SET_PARM,
cam_obj->sessionid, &shim_cmd_data);
//
rc = mm_camera_module_send_cmd(shim_cmd);
mm_camera_destroy_shim_cmd_packet(shim_cmd);
LOGD("X rc = %d",rc);
return rc;
}