具体再分析下 Camera的open流程,上面我们分析到上层调用到HAL层的camera_module_t结构体的open_legacy函数,也就是映射到QCamera2Factory的open_legacy
/*===========================================================================
* FUNCTION : open_legacy
*
* DESCRIPTION: Function to open older hal version implementation
*
* PARAMETERS :
* @hw_device : ptr to struct storing camera hardware device info
* @camera_id : camera ID
* @halVersion: Based on camera_module_t.common.module_api_version
*
* RETURN : 0 -- success
* none-zero failure code
*==========================================================================*/
int QCamera2Factory::open_legacy(const struct hw_module_t* module,
const char* id, uint32_t halVersion, struct hw_device_t** device)
{
int rc = NO_ERROR;
if (module != &HAL_MODULE_INFO_SYM.common) {
LOGE("Invalid module. Trying to open %p, expect %p",
module, &HAL_MODULE_INFO_SYM.common);
return INVALID_OPERATION;
}
if (!id) {
LOGE("Invalid camera id");
return BAD_VALUE;
}
#ifdef QCAMERA_HAL1_SUPPORT
if(gQCameraMuxer)
rc = gQCameraMuxer->open_legacy(module, id, halVersion, device);
else
#endif
rc = gQCamera2Factory->openLegacy(atoi(id), halVersion, device);
return rc;
}
gQCamera2Factory->openLegacy <---------- QCamera2Factory.cpp//qcom和mtk厂商定制的接口
|
|
QCamera2HardwareInterface *hw =
new QCamera2HardwareInterface((uint32_t)cameraId);
rc = hw->openCamera(hw_device); <-----------------------QCamera2Factory.cpp
|
|
int QCamera2HardwareInterface::openCamera() <-----------------------QCamera2HWI.cpp
|
|
camera_open((uint8_t)mCameraId, &mCameraHandle); <---------------mm_camera_interface.c//不管API1还是API2最后都是要跑到这里的
|
|
rc = mm_camera_open(cam_obj); <---mm_camera.c//不管API1还是API2最后都是要跑到这里的
|
|
my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK);
在V4L2框架中,Camera被看做一个视频设备,使用open函数打开这个设备,那么,接下来就会调用到Kernel层的代码
|
|
static struct v4l2_file_operations msm_fops = { <------------msm.c
.owner = THIS_MODULE,
.open = msm_open,
.poll = msm_poll,
.release = msm_close,
.unlocked_ioctl = video_ioctl2,
#ifdef CONFIG_COMPAT
.compat_ioctl32 = video_ioctl2,
#endif
};
|
|
static int msm_open(struct file *filep) <------------msm.c
|
|
rc = v4l2_fh_open(filep); <--------------v4l2-fh.c
|
|
rc = msm_camera_power_up(power_info, s_ctrl->sensor_device_type,
sensor_i2c_client); <---msm_camera_dt_util.c
|
|
rc = msm_sensor_match_id(s_ctrl); <-------msm_sensor.c
|
|
msm_camera_power_down(power_info, s_ctrl->sensor_device_type, sensor_i2c_client); <--msm_camera_dt_util.c
分析下代码:hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
/*===========================================================================
* FUNCTION : mm_camera_open
*
* DESCRIPTION: open a camera
*
* PARAMETERS :
* @my_obj : ptr to a camera object
*
* RETURN : int32_t type of status
* 0 -- success
* -1 -- failure
*==========================================================================*/
int32_t mm_camera_open(mm_camera_obj_t *my_obj)
{
char dev_name[MM_CAMERA_DEV_NAME_LEN];
int32_t rc = 0;
int8_t n_try=MM_CAMERA_DEV_OPEN_TRIES;
uint8_t sleep_msec=MM_CAMERA_DEV_OPEN_RETRY_SLEEP;
int cam_idx = 0;
const char *dev_name_value = NULL;
int l_errno = 0;
pthread_condattr_t cond_attr;
LOGD("begin\n");
if (NULL == my_obj) {
goto on_error;
}
dev_name_value = mm_camera_util_get_dev_name_by_num(my_obj->my_num,
my_obj->my_hdl);
if (NULL == dev_name_value) {
goto on_error;
}
snprintf(dev_name, sizeof(dev_name), "/dev/%s",
dev_name_value);
sscanf(dev_name, "/dev/video%d", &cam_idx);
LOGI("[wanghl]dev name = %s, cam_idx = %d", dev_name, cam_idx);//后摄:[wanghl]dev name = /dev/video1, cam_idx = 1 后辅摄:[wanghl]dev name = /dev/video2, cam_idx = 2
/*
开后双摄的时候:
11-01 11:33:45.449 562 1509 I QCamera : <MCI><INFO> camera_open: 3152: [wanghl]Dual Camera: Main ID = 0 Aux ID = 1
11-01 11:33:45.450 562 1509 I QCamera : <MCI><INFO> mm_camera_open: 285: [wanghl]dev name = /dev/video1, cam_idx = 1
11-01 11:33:45.518 562 1509 I QCamera : <MCI><INFO> mm_camera_open: 285: [wanghl]dev name = /dev/video2, cam_idx = 2
11-01 11:33:45.589 562 1509 E QCamera : <HAL><ERROR> openCamera: 2026: [wanghl]isDualCamera
看到两个camera 设备都打开了
1|console:/ # cat sys/class/video4linux/video0/name
msm-config
console:/ # cat sys/class/video4linux/video1/name
msm-sensor
console:/ # cat sys/class/video4linux/video2/name
msm-sensor
*/
do{
n_try--;
errno = 0;
my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK);//调用系统调用open()的方法,打开设备节点dev/video0,dev/video1,这个顺序是和video的注册顺序相关的。
l_errno = errno;
LOGD("ctrl_fd = %d, errno == %d", my_obj->ctrl_fd, l_errno);
if((my_obj->ctrl_fd >= 0) || (errno != EIO && errno != ETIMEDOUT) || (n_try <= 0 )) {
break;
}
LOGE("Failed with %s error, retrying after %d milli-seconds",
strerror(errno), sleep_msec);
usleep(sleep_msec * 1000U);
}while (n_try > 0);
if (my_obj->ctrl_fd < 0) {
LOGE("cannot open control fd of '%s' (%s)\n",
dev_name, strerror(l_errno));
if (l_errno == EBUSY)
rc = -EUSERS;
else
rc = -1;
goto on_error;
} else {
mm_camera_get_session_id(my_obj, &my_obj->sessionid);
LOGH("Camera Opened id = %d sessionid = %d", cam_idx, my_obj->sessionid);
}
#ifdef DAEMON_PRESENT
/* open domain socket*/
n_try = MM_CAMERA_DEV_OPEN_TRIES;
do {
n_try--;
my_obj->ds_fd = mm_camera_socket_create(cam_idx, MM_CAMERA_SOCK_TYPE_UDP);
l_errno = errno;
LOGD("ds_fd = %d, errno = %d", my_obj->ds_fd, l_errno);
if((my_obj->ds_fd >= 0) || (n_try <= 0 )) {
LOGD("opened, break out while loop");
break;
}
LOGD("failed with I/O error retrying after %d milli-seconds",
sleep_msec);
usleep(sleep_msec * 1000U);
} while (n_try > 0);
if (my_obj->ds_fd < 0) {
LOGE("cannot open domain socket fd of '%s'(%s)\n",
dev_name, strerror(l_errno));
rc = -1;
goto on_error;
}
#else /* DAEMON_PRESENT */
cam_status_t cam_status;
cam_status = mm_camera_module_open_session(my_obj->sessionid,
mm_camera_module_event_handler);
if (cam_status < 0) {
LOGE("Failed to open session");
if (cam_status == CAM_STATUS_BUSY) {
rc = -EUSERS;
} else {
rc = -1;
}
goto on_error;
}
#endif /* DAEMON_PRESENT */
pthread_condattr_init(&cond_attr);
pthread_condattr_setclock(&cond_attr, CLOCK_MONOTONIC);
pthread_mutex_init(&my_obj->msg_lock, NULL);
pthread_mutex_init(&my_obj->cb_lock, NULL);
pthread_mutex_init(&my_obj->evt_lock, NULL);
pthread_cond_init(&my_obj->evt_cond, &cond_attr);
pthread_condattr_destroy(&cond_attr);
LOGD("Launch evt Thread in Cam Open");
snprintf(my_obj->evt_thread.threadName, THREAD_NAME_SIZE, "CAM_Dispatch");
mm_camera_cmd_thread_launch(&my_obj->evt_thread,
mm_camera_dispatch_app_event,
(void *)my_obj);
/* launch event poll thread
* we will add evt fd into event poll thread upon user first register for evt */
LOGD("Launch evt Poll Thread in Cam Open");
snprintf(my_obj->evt_poll_thread.threadName, THREAD_NAME_SIZE, "CAM_evntPoll");
mm_camera_poll_thread_launch(&my_obj->evt_poll_thread,
MM_CAMERA_POLL_TYPE_EVT);
mm_camera_evt_sub(my_obj, TRUE);
/* unlock cam_lock, we need release global intf_lock in camera_open(),
* in order not block operation of other Camera in dual camera use case.*/
pthread_mutex_unlock(&my_obj->cam_lock);
LOGD("end (rc = %d)\n", rc);
return rc;
on_error:
if (NULL == dev_name_value) {
LOGE("Invalid device name\n");
rc = -1;
}
if (NULL == my_obj) {
LOGE("Invalid camera object\n");
rc = -1;
} else {
if (my_obj->ctrl_fd >= 0) {
close(my_obj->ctrl_fd);
my_obj->ctrl_fd = -1;
}
#ifdef DAEMON_PRESENT
if (my_obj->ds_fd >= 0) {
mm_camera_socket_close(my_obj->ds_fd);
my_obj->ds_fd = -1;
}
#endif
}
/* unlock cam_lock, we need release global intf_lock in camera_open(),
* in order not block operation of other Camera in dual camera use case.*/
pthread_mutex_unlock(&my_obj->cam_lock);
return rc;
}
上面的系统调用open()函数是应用层的,它最终对应内核层(驱动)的open函数为msm_open(),如下:
kernel/msm-4.9/drivers/media/platform/msm/camera_v2/msm.c
static struct v4l2_file_operations msm_fops = {
.owner = THIS_MODULE,
.open = msm_open,//真正的open
.poll = msm_poll,
.release = msm_close,
.unlocked_ioctl = video_ioctl2,
#ifdef CONFIG_COMPAT
.compat_ioctl32 = video_ioctl2,
#endif
};
static int msm_open(struct file *filep)//msm.c
{
int rc = -1;
unsigned long flags;
struct msm_video_device *pvdev = video_drvdata(filep);
if (WARN_ON(!pvdev))
return rc;
/* !!! only ONE open is allowed !!! */
if (atomic_cmpxchg(&pvdev->opened, 0, 1))
return -EBUSY;
spin_lock_irqsave(&msm_pid_lock, flags);
msm_pid = get_pid(task_pid(current));
spin_unlock_irqrestore(&msm_pid_lock, flags);
/* create event queue */
rc = v4l2_fh_open(filep);
if (rc < 0)
return rc;
spin_lock_irqsave(&msm_eventq_lock, flags);
msm_eventq = filep->private_data;
spin_unlock_irqrestore(&msm_eventq_lock, flags);
/* register msm_v4l2_pm_qos_request */
msm_pm_qos_add_request();
return rc;
}
---------------------------------------------------------------------------------
分析一下APP上层调用framework接口open camera是怎么调用的!
<一>骁龙相机用反射的方法打开摄像机:
packages/apps/SnapdragonCamera/src/com/android/camera/AndroidCameraManagerImpl.java
@Override
public void handleMessage(final Message msg) {
try {
switch (msg.what) {
case OPEN_CAMERA:
try {
Log.e(TAG, "[wanghl]SnapCamera open camera msg.arg1:" + msg.arg1);
Method openMethod = Class.forName("android.hardware.Camera").getMethod(
"openLegacy", int.class, int.class);
mCamera = (android.hardware.Camera) openMethod.invoke(
null, msg.arg1, CAMERA_HAL_API_VERSION_1_0);//HAL1 version code: private static final int CAMERA_HAL_API_VERSION_1_0 = 0x100;
} catch (Exception e) {
/* Retry with open if openLegacy doesn't exist/fails */
Log.v(TAG, "openLegacy failed due to " + e.getMessage()
+ ", using open instead");
mCamera = android.hardware.Camera.open(msg.arg1);
}
...
用反射的方法找到类Class.forName("android.hardware.Camera"),也就是frameworks/base/core/java/android/hardware/Camera.java这个Camera.java类的getMethod("openLegacy", int.class, int.class)的openLegacy方法,
frameworks/base/core/java/android/hardware/Camera.java
public static Camera openLegacy(int cameraId, int halVersion) {//能够多传递一个halVersion参数
Log.e(TAG, "[wanghl] openLegacy" + "cameraId:" +cameraId + "halVersion:" + halVersion);
if (halVersion < CAMERA_HAL_API_VERSION_1_0) {
throw new IllegalArgumentException("Invalid HAL version " + halVersion);
}
if((cameraId == 0) || (cameraId == 1))
{
Log.e(TAG, "[wanghl] openLegacy, ignore" + cameraId);
return null;
}
return new Camera(cameraId, halVersion);//然后调用到Camera的构造方法,带俩参数
}
private Camera(int cameraId, int halVersion) {
Log.e(TAG, "[wanghl] Camera" + cameraId + halVersion);
int err = cameraInitVersion(cameraId, halVersion);//然后调用cameraInitVersion方法
if (checkInitErrors(err)) {
if (err == -EACCES) {
throw new RuntimeException("Fail to connect to camera service");
} else if (err == -ENODEV) {
throw new RuntimeException("Camera initialization failed");
} else if (err == -ENOSYS) {
throw new RuntimeException("Camera initialization failed because some methods"
+ " are not implemented");
} else if (err == -EOPNOTSUPP) {
throw new RuntimeException("Camera initialization failed because the hal"
+ " version is not supported by this device");
} else if (err == -EINVAL) {
throw new RuntimeException("Camera initialization failed because the input"
+ " arugments are invalid");
} else if (err == -EBUSY) {
throw new RuntimeException("Camera initialization failed because the camera"
+ " device was already opened");
} else if (err == -EUSERS) {
throw new RuntimeException("Camera initialization failed because the max"
+ " number of camera devices were already opened");
}
// Should never hit this.
throw new RuntimeException("Unknown camera error");
}
}
private int cameraInitVersion(int cameraId, int halVersion) {
mShutterCallback = null;
mRawImageCallback = null;
mJpegCallback = null;
mPreviewCallback = null;
mPostviewCallback = null;
mUsingPreviewAllocation = false;
mZoomListener = null;
/* ### QC ADD-ONS: START */
mCameraDataCallback = null;
mCameraMetaDataCallback = null;
/* ### QC ADD-ONS: END */
Looper looper;
Log.e(TAG, "[wanghl] cameraInitVersion" + cameraId + halVersion);
if ((looper = Looper.myLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else if ((looper = Looper.getMainLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else {
mEventHandler = null;
}
String packageName = ActivityThread.currentOpPackageName();
//Force HAL1 if the package name falls in this bucket
String packageList = SystemProperties.get("vendor.camera.hal1.packagelist", "");
if (packageList.length() > 0) {
TextUtils.StringSplitter splitter = new TextUtils.SimpleStringSplitter(',');
splitter.setString(packageList);
for (String str : splitter) {
if (packageName.equals(str)) {
halVersion = CAMERA_HAL_API_VERSION_1_0;
break;
}
}
}
return native_setup(new WeakReference<Camera>(this), cameraId, halVersion, packageName);//此处camera的初始化
}
private native final int native_setup(Object camera_this, int cameraId, int halVersion,
String packageName);
让我们来看一下camera的native_setup函数(一般android中native这个字段就表明了调用的是本地接口):
此处通过native字段可以确定native_setup调用的就是jni接口.即frameworks/base/core/jni/android_hardware_Camera.cpp中具体实现了这个函数,让我们来看看它是是如何转接这个函数的.
static const JNINativeMethod camMethods[] = {
...
{ "native_setup",
"(Ljava/lang/Object;IILjava/lang/String;)I",
(void*)android_hardware_Camera_native_setup },
...
}
对,就是这里,它表示java中native_setup()真正的实现是android_hardware_Camera_native_setup() .就这样,java语言就直接调用了C++来实现.
frameworks/base/core/jni/android_hardware_Camera.cpp
// connect to camera service
static jint android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz,
jobject weak_this, jint cameraId, jint halVersion, jstring clientPackageName)
{
...
sp<Camera> camera;
if (halVersion == CAMERA_HAL_API_VERSION_NORMAL_CONNECT) {//CAMERA_HAL_API_VERSION_NORMAL_CONNECT = -2,像zoom这种第三方应用的halVersion就是-2,就会跑这里
// Default path: hal version is don't care, do normal camera connect.
camera = Camera::connect(cameraId, clientName,//调用的是frameworks/av/camera/Camera.cpp的connect方法,在JNI函数里面,我们找到Camera C/S架构的客户端了,它调用connect函数向服务器发送连接请求
Camera::USE_CALLING_UID, Camera::USE_CALLING_PID);
} else {//SnapCamera骁龙相机的halVersion是0x100(256),所以就会跑到这个方法里来
jint status = Camera::connectLegacy(cameraId, halVersion, clientName,//调用的是frameworks/av/camera/Camera.cpp的connectLegacy方法
Camera::USE_CALLING_UID, camera);
if (status != NO_ERROR) {
return status;
}
}
...
}
//客户端的connect()函数的实现在libcamera_client.so中,他的源码在以下路径中:
frameworks/av/camera/Camera.cpp
sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
int clientUid, int clientPid)
{
ALOGI("[wanghl]%s: connect camera device", __FUNCTION__);
return CameraBaseT::connect(cameraId, clientPackageName, clientUid, clientPid);
}
status_t Camera::connectLegacy(int cameraId, int halVersion,
const String16& clientPackageName,
int clientUid,
sp<Camera>& camera)
{
ALOGI("[wanghl]%s: connect legacy camera device", __FUNCTION__);//10-31 15:57:24.728 4260 4375 I Camera : [wanghl]connectLegacy: connect legacy camera device
sp<Camera> c = new Camera(cameraId);
sp<::android::hardware::ICameraClient> cl = c;
status_t status = NO_ERROR;
const sp<::android::hardware::ICameraService>& cs = CameraBaseT::getCameraService();//通过类CameraBase.cpp的getCameraService()方法获得了CameraService实例,一般我们这么描述:Framework 层需要通过 Binder机制 远程调用 Runtime 中 CameraService 的函数,CameraBase.cpp的getCameraService()方法里面就有看到IServiceManager和IBinder相关的操作
binder::Status ret;
if (cs != nullptr) {
ret = cs.get()->connectLegacy(cl, cameraId, halVersion, clientPackageName,//cs.get()我是理解成获得了CameraService这个类,然后调用了CameraService这个类的connectLegacy方法,
clientUid, /*out*/&(c->mCamera));
}
if (ret.isOk() && c->mCamera != nullptr) {
IInterface::asBinder(c->mCamera)->linkToDeath(c);
c->mStatus = NO_ERROR;
camera = c;
} else {
switch(ret.serviceSpecificErrorCode()) {
case hardware::ICameraService::ERROR_DISCONNECTED:
status = -ENODEV;
break;
case hardware::ICameraService::ERROR_CAMERA_IN_USE:
status = -EBUSY;
break;
case hardware::ICameraService::ERROR_INVALID_OPERATION:
status = -EINVAL;
break;
case hardware::ICameraService::ERROR_MAX_CAMERAS_IN_USE:
status = -EUSERS;
break;
case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
status = BAD_VALUE;
break;
case hardware::ICameraService::ERROR_DEPRECATED_HAL:
status = -EOPNOTSUPP;
break;
case hardware::ICameraService::ERROR_DISABLED:
status = -EACCES;
break;
case hardware::ICameraService::ERROR_PERMISSION_DENIED:
status = PERMISSION_DENIED;
break;
default:
status = -EINVAL;
ALOGW("An error occurred while connecting to camera %d: %s", cameraId,
(cs != nullptr) ? "Service not available" : ret.toString8().string());
break;
}
c.clear();
}
return status;
}
所以我们来看下CameraService这个类的connectLegacy方法,
frameworks/av/services/camera/libcameraservice/CameraService.cpp
Status CameraService::connectLegacy(
const sp<ICameraClient>& cameraClient,
int api1CameraId, int halVersion,
const String16& clientPackageName,
int clientUid,
/*out*/
sp<ICamera>* device) {
ATRACE_CALL();
String8 id = cameraIdIntToStr(api1CameraId);
Status ret = Status::ok();
sp<Client> client = nullptr;
ALOGI("[wanghl] CameraService::connectLegacy");//10-31 15:57:24.728 696 4481 I CameraService: [wanghl] CameraService::connectLegacy
ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId, halVersion,//然后调用到了connectHelper方法才真正实现了连接逻辑(HAL3 时最终也调用到这个方法)。
clientPackageName, clientUid, USE_CALLING_PID, API_1,
/*legacyMode*/ true, /*shimUpdateOnly*/ false,
/*out*/client);
if(!ret.isOk()) {
logRejected(id, getCallingPid(), String8(clientPackageName),
ret.toString8());
return ret;
}
*device = client;
return ret;
}
template<class CALLBACK, class CLIENT>
Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
int api1CameraId, int halVersion, const String16& clientPackageName, int clientUid,
int clientPid, apiLevel effectiveApiLevel, bool legacyMode, bool shimUpdateOnly,
/*out*/sp<CLIENT>& device) {
binder::Status ret = binder::Status::ok();
String8 clientName8(clientPackageName);
int originalClientPid = 0;
ALOGI("[wanghl]CameraService::connect call (PID %d \"%s\", camera ID %s) for HAL version %s and "
"Camera API version %d", clientPid, clientName8.string(), cameraId.string(),
(halVersion == -1) ? "default" : std::to_string(halVersion).c_str(),
static_cast<int>(effectiveApiLevel));//10-31 15:57:24.728 696 4481 I CameraService: [wanghl]CameraService::connect call (PID -1 "org.codeaurora.snapcam", camera ID 1) for HAL version 256 and Camera API version 1
...
sp<BasicClient> tmp = nullptr;
ALOGI("[wanghl] makeClient");//10-31 15:57:24.730 696 4481 I CameraService: [wanghl] makeClient
if(!(ret = makeClient(this, cameraCb, clientPackageName,//于是就调用到了makeClient这里,输出了一个CameraClient类,
cameraId, api1CameraId, facing,
clientPid, clientUid, getpid(), legacyMode,
halVersion, deviceVersion, effectiveApiLevel,
/*out*/&tmp)).isOk()) {
return ret;
}
client = static_cast<CLIENT*>(tmp.get());//获得CameraClient这个类对象
LOG_ALWAYS_FATAL_IF(client.get() == nullptr, "%s: CameraService in invalid state",
__FUNCTION__);
err = client->initialize(mCameraProviderManager, mMonitorTags);//调用得到的CameraClient类对象的initialize方法
...
}
Status CameraService::makeClient(const sp<CameraService>& cameraService,
const sp<IInterface>& cameraCb, const String16& packageName, const String8& cameraId,
int api1CameraId, int facing, int clientPid, uid_t clientUid, int servicePid,
bool legacyMode, int halVersion, int deviceVersion, apiLevel effectiveApiLevel,
/*out*/sp<BasicClient>* client) {//client实例则是最终的返回结果。
ALOGI("[wanghl]Camera using old HAL version: %d in makeClient", deviceVersion);//10-31 15:57:24.730 696 4481 I CameraService: [wanghl]Camera using old HAL version: 771(0x303) in makeClient
if (halVersion < 0 || halVersion == deviceVersion) {
// Default path: HAL version is unspecified by caller, create CameraClient
// based on device version reported by the HAL.
switch(deviceVersion) {
case CAMERA_DEVICE_API_VERSION_1_0:
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new CameraClient(cameraService, tmp, packageName,
api1CameraId, facing, clientPid, clientUid,
getpid(), legacyMode);
} else { // Camera2 API route
ALOGW("Camera using old HAL version: %d", deviceVersion);
return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
"Camera device \"%s\" HAL version %d does not support camera2 API",
cameraId.string(), deviceVersion);
}
break;
case CAMERA_DEVICE_API_VERSION_3_0:
case CAMERA_DEVICE_API_VERSION_3_1:
case CAMERA_DEVICE_API_VERSION_3_2:
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_4:
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, packageName,
cameraId, api1CameraId,
facing, clientPid, clientUid,
servicePid, legacyMode);
} else { // Camera2 API route
sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
*client = new CameraDeviceClient(cameraService, tmp, packageName, cameraId,
facing, clientPid, clientUid, servicePid);
}
break;
default:
// Should not be reachable
ALOGE("Unknown camera device HAL version: %d", deviceVersion);
return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
"Camera device \"%s\" has unknown HAL version %d",
cameraId.string(), deviceVersion);
}
} else {
// A particular HAL version is requested by caller. Create CameraClient
// based on the requested HAL version.
if (deviceVersion > CAMERA_DEVICE_API_VERSION_1_0 &&//CAMERA_DEVICE_API_VERSION_1_0是0x100,0x303>0x100&&0x100=0x100,条件成立
halVersion == CAMERA_DEVICE_API_VERSION_1_0) {
// Only support higher HAL version device opened as HAL1.0 device.
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new CameraClient(cameraService, tmp, packageName,//CameraClient.cpp类的构造函数,输出了这个CameraClient类对象
api1CameraId, facing, clientPid, clientUid,
servicePid, legacyMode);
} else {
// Other combinations (e.g. HAL3.x open as HAL2.x) are not supported yet.
ALOGE("Invalid camera HAL version %x: HAL %x device can only be"
" opened as HAL %x device", halVersion, deviceVersion,
CAMERA_DEVICE_API_VERSION_1_0);
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
"Camera device \"%s\" (HAL version %d) cannot be opened as HAL version %d",
cameraId.string(), deviceVersion, halVersion);
}
}
return Status::ok();
}
类的构造函数
frameworks/av/services/camera/libcameraservice/api1/CameraClient.cpp
CameraClient::CameraClient(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
const String16& clientPackageName,
int cameraId, int cameraFacing,
int clientPid, int clientUid,
int servicePid, bool legacyMode):
Client(cameraService, cameraClient, clientPackageName,
String8::format("%d", cameraId), cameraId, cameraFacing, clientPid,
clientUid, servicePid)
{
int callingPid = getCallingPid();
LOG1("CameraClient::CameraClient E (pid %d, id %d)", callingPid, cameraId);//10-31 16:50:10.653 696 3157 I CameraClient: [wanghl]CameraClient::CameraClient E (pid 4009, id 0)
mHardware = NULL;
mMsgEnabled = 0;
mSurface = 0;
mPreviewWindow = 0;
mDestructionStarted = false;
// Callback is disabled by default
mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
mLegacyMode = legacyMode;
mPlayShutterSound = true;
mLongshotEnabled = false;
mBurstCnt = 0;
LOG1("CameraClient::CameraClient X (pid %d, id %d)", callingPid, cameraId);
}
initialize方法
status_t CameraClient::initialize(sp<CameraProviderManager> manager,
const String8& /*monitorTags*/) {
int callingPid = getCallingPid();
status_t res;
ALOGI("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);//10-31 16:50:11.377 696 696 I CameraClient: [wanghl]CameraClient::initialize E (pid 4009, id 0)
// Verify ops permissions
res = startCameraOps();
if (res != OK) {
return res;
}
char camera_device_name[10];
snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
ALOGI("[wanghl] camera_device_name = %s", camera_device_name);//10-31 16:50:11.381 696 696 I CameraClient: [wanghl] camera_device_name = 0
mHardware = new CameraHardwareInterface(camera_device_name);//实例化Camera Hal接口mHardware
res = mHardware->initialize(manager);//CameraHardwareInterface类的initialize方法,mHardware调用initialize()进入HAL层打开camera驱动
if (res != OK) {
ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
mHardware.clear();
return res;
}
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
handleCallbackTimestampBatch,
(void *)(uintptr_t)mCameraId);//用于处理底层Camera回调函数传来的数据和消息?
// Enable zoom, error, focus, and metadata messages by default
enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);
LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
return OK;
}
frameworks/av/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
status_t CameraHardwareInterface::initialize(sp<CameraProviderManager> manager) {
ALOGI("[wanghl]Opening camera %s", mName.string());//10-31 16:50:11.381 696 696 I CameraHardwareInterface: [wanghl]Opening camera 0
status_t ret = manager->openSession(mName.string(), this, &mHidlDevice);//CameraHardwareInterface类的initialize又会去调用CameraProviderManager类的openSession方法,带3参数
if (ret != OK) {
ALOGE("%s: openSession failed! %s (%d)", __FUNCTION__, strerror(-ret), ret);
}
return ret;
}
frameworks/av/services/camera/libcameraservice/common/CameraProviderManager.cpp
status_t CameraProviderManager::openSession(const std::string &id,
const sp<hardware::camera::device::V3_2::ICameraDeviceCallback>& callback,
/*out*/
sp<hardware::camera::device::V3_2::ICameraDeviceSession> *session) {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
auto deviceInfo = findDeviceInfoLocked(id,
/*minVersion*/ {3,0}, /*maxVersion*/ {4,0});
if (deviceInfo == nullptr) return NAME_NOT_FOUND;
auto *deviceInfo3 = static_cast<ProviderInfo::DeviceInfo3*>(deviceInfo);
Status status;
hardware::Return<void> ret;
ret = deviceInfo3->mInterface->open(callback, [&status, &session]
(Status s, const sp<device::V3_2::ICameraDeviceSession>& cameraSession) {
status = s;
if (status == Status::OK) {
*session = cameraSession;
}
});
if (!ret.isOk()) {
ALOGE("%s: Transaction error opening a session for camera device %s: %s",
__FUNCTION__, id.c_str(), ret.description().c_str());
return DEAD_OBJECT;
}
return mapToStatusT(status);
}
//跑得是V1_0这个方法
status_t CameraProviderManager::openSession(const std::string &id,
const sp<hardware::camera::device::V1_0::ICameraDeviceCallback>& callback,
/*out*/
sp<hardware::camera::device::V1_0::ICameraDevice> *session) {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
auto deviceInfo = findDeviceInfoLocked(id,
/*minVersion*/ {1,0}, /*maxVersion*/ {2,0});
if (deviceInfo == nullptr) return NAME_NOT_FOUND;
auto *deviceInfo1 = static_cast<ProviderInfo::DeviceInfo1*>(deviceInfo);
hardware::Return<Status> status = deviceInfo1->mInterface->open(callback);//从网上获悉这里调用的是CameraDevice::open(...)函数
if (!status.isOk()) {
ALOGE("%s: Transaction error opening a session for camera device %s: %s",
__FUNCTION__, id.c_str(), status.description().c_str());
return DEAD_OBJECT;
}
if (status == Status::OK) {
*session = deviceInfo1->mInterface;
}
return mapToStatusT(status);
}
hardware/interfaces/camera/device/1.0/default/CameraDevice.cpp
#define LOG_TAG "CamDev@1.0-impl"
...
Return<Status> CameraDevice::open(const sp<ICameraDeviceCallback>& callback) {
ALOGI("[wanghl] CameraDevice::open Opening camera %s", mCameraId.c_str());//10-31 21:14:22.391 561 1480 I CamDev@1.0-impl: [wanghl] CameraDevice::open Opening camera 0
Mutex::Autolock _l(mLock);
camera_info info;
status_t res = mModule->getCameraInfo(mCameraIdInt, &info);
if (res != OK) {
ALOGE("Could not get camera info: %s: %d", mCameraId.c_str(), res);
return getHidlStatus(res);
}
int rc = OK;
ALOGI("[wanghl] mModule->getModuleApiVersion() :0x%x, info.device_version :0x%x", mModule->getModuleApiVersion(), info.device_version);//11-01 10:06:54.888 561 1489 I CamDev@1.0-impl: [wanghl] mModule->getModuleApiVersion() :0x204, info.device_version :0x304
if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_3 &&//0x204 >= 0x203 && 0x304 > 0x100
info.device_version > CAMERA_DEVICE_API_VERSION_1_0) {
// Open higher version camera device as HAL1.0 device.
ALOGI("[wanghl] CameraDevice::open mModule->openLegacy");//10-31 21:14:22.391 561 1480 I CamDev@1.0-impl: [wanghl] CameraDevice::open mModule->openLegacy
rc = mModule->openLegacy(mCameraId.c_str(),//调用的是CameraModule::openLegacy
CAMERA_DEVICE_API_VERSION_1_0,
(hw_device_t **)&mDevice);
} else {
ALOGI("[wanghl] CameraDevice::open mModule->open");
rc = mModule->open(mCameraId.c_str(), (hw_device_t **)&mDevice);
}
if (rc != OK) {
mDevice = nullptr;
ALOGE("Could not open camera %s: %d", mCameraId.c_str(), rc);
return getHidlStatus(rc);
}
initHalPreviewWindow();
mDeviceCallback = callback;
mQDeviceCallback =
vendor::qti::hardware::camera::device::V1_0::IQCameraDeviceCallback::castFrom(callback);
if(mQDeviceCallback == nullptr) {
ALOGI("could not cast ICameraDeviceCallback to IQCameraDeviceCallback");
}
if (mDevice->ops->set_callbacks) {
mDevice->ops->set_callbacks(mDevice,
sNotifyCb, sDataCb, sDataCbTimestamp, sGetMemory, this);
}
return getHidlStatus(rc);
}
hardware/interfaces/camera/common/1.0/default/CameraModule.cpp
int CameraModule::openLegacy(
const char* id, uint32_t halVersion, struct hw_device_t** device) {
int res;
ATRACE_BEGIN("camera_module->open_legacy");
res = mModule->open_legacy(&mModule->common, id, halVersion, device);
ATRACE_END();
return res;
}
和之前分析的一样,mModule->open_legacy最终也是调用到camera_module_t结构的成员函数:
.open_legacy = (qcamera::QCameraCommon::needHAL1Support()) ?
qcamera::QCamera2Factory::open_legacy : NULL,
hardware/qcom/camera/QCamera2/QCamera2Factory.cpp
/*===========================================================================
* FUNCTION : openLegacy
*
* DESCRIPTION: Function to open older hal version implementation
*
* PARAMETERS :
* @camera_id : camera ID
* @halVersion: Based on camera_module_t.common.module_api_version
* @hw_device : ptr to struct storing camera hardware device info
*
* RETURN : 0 -- success
* none-zero failure code
*==========================================================================*/
int QCamera2Factory::openLegacy(
int32_t cameraId, uint32_t halVersion, struct hw_device_t** hw_device)
{
int rc = NO_ERROR;
LOGI("[wanghl]openLegacy halVersion: %d cameraId = %d", halVersion, cameraId);
//Assumption: all cameras can support legacy API version
if (cameraId < 0 || cameraId >= gQCamera2Factory->getNumberOfCameras())
return -ENODEV;
switch(halVersion)
{
#ifdef QCAMERA_HAL1_SUPPORT
case CAMERA_DEVICE_API_VERSION_1_0:
{
CAMSCOPE_INIT(CAMSCOPE_SECTION_HAL);
QCamera2HardwareInterface *hw =
new QCamera2HardwareInterface((uint32_t)cameraId);
if (!hw) {
LOGE("Allocation of hardware interface failed");
return NO_MEMORY;
}
rc = hw->openCamera(hw_device);//QCamera2HardwareInterface::openCamera方法
if (rc != NO_ERROR) {
delete hw;
}
break;
}
#endif
default:
LOGE("Device API version: %d for camera id %d invalid",
halVersion, cameraId);
return BAD_VALUE;
}
return rc;
}
QCamera2Factory::open_legacy ->QCamera2HardwareInterface::openCamera 带一个参数
hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
/*===========================================================================
* FUNCTION : openCamera
*
* DESCRIPTION: open camera
*
* PARAMETERS :
* @hw_device : double ptr for camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device)
{
KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_OPENCAMERA);
int rc = NO_ERROR;
int enable_fdleak=0;
int enable_memleak=0;
char prop[PROPERTY_VALUE_MAX];
if (mCameraOpened) {
*hw_device = NULL;
LOGE("Permission Denied");
return PERMISSION_DENIED;
}
LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
mCameraId);
m_perfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
#ifdef FDLEAK_FLAG
property_get("persist.vendor.camera.fdleak.enable", prop, "0");
enable_fdleak = atoi(prop);
if (enable_fdleak) {
LOGI("fdleak tool is enable for camera hal");
hal_debug_enable_fdleak_trace();
}
#endif
#ifdef MEMLEAK_FLAG
property_get("persist.vendor.camera.memleak.enable", prop, "0");
enable_memleak = atoi(prop);
if (enable_memleak) {
LOGI("memleak tool is enable for camera hal");
hal_debug_enable_memleak_trace();
}
#endif
rc = openCamera();//调用本类的不带参数的方法openCamera()
if (rc == NO_ERROR){
*hw_device = &mCameraDevice.common;
if (m_thermalAdapter.init(this) != 0) {
LOGW("Init thermal adapter failed");
}
}
else
*hw_device = NULL;
LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
mCameraId, rc);
return rc;
}
/*===========================================================================
* FUNCTION : openCamera
*
* DESCRIPTION: open camera
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::openCamera()
{
int32_t rc = NO_ERROR;
char value[PROPERTY_VALUE_MAX];
if (mCameraHandle) {
LOGE("Failure: Camera already opened");
return ALREADY_EXISTS;
}
rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
if (rc < 0) {
LOGE("Failed to reserve flash for camera id: %d",
mCameraId);
return UNKNOWN_ERROR;
}
// alloc param buffer
DeferWorkArgs args;
memset(&args, 0, sizeof(args));
mParamAllocJob = queueDeferredWork(CMD_DEF_PARAM_ALLOC, args);
if (mParamAllocJob == 0) {
LOGE("Failed queueing PARAM_ALLOC job");
return -ENOMEM;
}
if (gCamCapability[mCameraId] != NULL) {
// allocate metadata buffers
DeferWorkArgs args;
DeferMetadataAllocArgs metadataAllocArgs;
memset(&args, 0, sizeof(args));
memset(&metadataAllocArgs, 0, sizeof(metadataAllocArgs));
uint32_t padding =
gCamCapability[mCameraId]->padding_info.plane_padding;
metadataAllocArgs.size = PAD_TO_SIZE(sizeof(metadata_buffer_t),
padding);
metadataAllocArgs.bufferCnt = CAMERA_MIN_METADATA_BUFFERS;
args.metadataAllocArgs = metadataAllocArgs;
mMetadataAllocJob = queueDeferredWork(CMD_DEF_METADATA_ALLOC, args);
if (mMetadataAllocJob == 0) {
LOGE("Failed to allocate metadata buffer");
rc = -ENOMEM;
goto error_exit1;
}
rc = camera_open((uint8_t)mCameraId, &mCameraHandle);//hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c的camera_open
if (rc) {
LOGE("camera_open failed. rc = %d, mCameraHandle = %p",
rc, mCameraHandle);
goto error_exit2;
}
mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
camEvtHandle,
(void *) this);
} else {
LOGH("Capabilities not inited, initializing now.");
rc = camera_open((uint8_t)mCameraId, &mCameraHandle);//hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c的camera_open
if (rc) {
LOGE("camera_open failed. rc = %d, mCameraHandle = %p",
rc, mCameraHandle);
goto error_exit2;
}
if(NO_ERROR != initCapabilities(mCameraId,mCameraHandle)) {
LOGE("initCapabilities failed.");
rc = UNKNOWN_ERROR;
goto error_exit3;
}
mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
camEvtHandle,
(void *) this);
}
mBundledSnapshot = 0;
mActiveCameras = MM_CAMERA_TYPE_MAIN;
mFallbackMode = CAM_NO_FALLBACK;
if (isDualCamera()) {
LOGE("[wanghl]isDualCamera");//11-01 10:39:39.315 562 1508 E QCamera : <HAL><ERROR> openCamera: 2026: [wanghl]isDualCamera
mActiveCameras |= MM_CAMERA_TYPE_AUX;
// Create and initialize FOV-control object
m_pFovControl = QCameraFOVControl::create(gCamCapability[mCameraId]->main_cam_cap,//我们分析下QCameraFOVControl类的create方法
gCamCapability[mCameraId]->aux_cam_cap);
if (m_pFovControl) {
*gCamCapability[mCameraId] = m_pFovControl->consolidateCapabilities(//QCameraFOVControl这个类做了双摄融合的一些相关操作,
gCamCapability[mCameraId]->main_cam_cap,
gCamCapability[mCameraId]->aux_cam_cap);
} else {
LOGE("FOV-control: Failed to create an object");
rc = NO_MEMORY;
goto error_exit3;
}
}
// Init params in the background
// 1. It's safe to queue init job, even if alloc job is not yet complete.
// It will be queued to the same thread, so the alloc is guaranteed to
// finish first.
// 2. However, it is not safe to begin param init until after camera is
// open. That is why we wait until after camera open completes to schedule
// this task.
memset(&args, 0, sizeof(args));
mParamInitJob = queueDeferredWork(CMD_DEF_PARAM_INIT, args);
if (mParamInitJob == 0) {
LOGE("Failed queuing PARAM_INIT job");
rc = -ENOMEM;
goto error_exit3;
}
mCameraOpened = true;
//Notify display HAL that a camera session is active.
//But avoid calling the same during bootup because camera service might open/close
//cameras at boot time during its initialization and display service will also internally
//wait for camera service to initialize first while calling this display API, resulting in a
//deadlock situation. Since boot time camera open/close calls are made only to fetch
//capabilities, no need of this display bw optimization.
//Use "service.bootanim.exit" property to know boot status.
property_get("service.bootanim.exit", value, "0");
if (atoi(value) == 1) {
pthread_mutex_lock(&gCamLock);
if (gNumCameraSessions++ == 0) {
setCameraLaunchStatus(true);
}
pthread_mutex_unlock(&gCamLock);
}
// Setprop to decide the time source (whether boottime or monotonic).
// By default, use monotonic time.
property_get("persist.vendor.camera.time.monotonic", value, "1");
mBootToMonoTimestampOffset = 0;
if (atoi(value) == 1) {
// if monotonic is set, then need to use time in monotonic.
// So, Measure the clock offset between BOOTTIME and MONOTONIC
// The clock domain source for ISP is BOOTTIME and
// for Video/display is MONOTONIC
// The below offset is used to convert from clock domain of other subsystem
// (video/hardware composer) to that of camera. Assumption is that this
// offset won't change during the life cycle of the camera device. In other
// words, camera device shouldn't be open during CPU suspend.
mBootToMonoTimestampOffset = getBootToMonoTimeOffset();
}
LOGH("mBootToMonoTimestampOffset = %lld", mBootToMonoTimestampOffset);
memset(value, 0, sizeof(value));
property_get("persist.vendor.camera.depth.focus.cb", value, "1");
bDepthAFCallbacks = atoi(value);
memset(value, 0, sizeof(value));
property_get("persist.vendor.camera.cache.optimize", value, "1");
m_bOptimizeCacheOps = atoi(value);
return NO_ERROR;
error_exit3:
if(mJpegClientHandle) {
deinitJpegHandle();
}
mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
mCameraHandle = NULL;
error_exit2:
waitDeferredWork(mMetadataAllocJob);
error_exit1:
waitDeferredWork(mParamAllocJob);
return rc;
}
QCameraFOVControl* QCameraFOVControl::create(
cam_capability_t *capsMainCam,
cam_capability_t *capsAuxCam,
uint8_t isHAL3)
{
...
// Based on focal lengths, map main and aux camera to wide and tele
if (pFovControl->mDualCamParams.paramsMain.focalLengthMm <
pFovControl->mDualCamParams.paramsAux.focalLengthMm) {
pFovControl->mFovControlData.camWide = CAM_TYPE_MAIN;
pFovControl->mFovControlData.camTele = CAM_TYPE_AUX;
pFovControl->mFovControlData.camState = STATE_WIDE;
} else {
pFovControl->mFovControlData.camWide = /*CAM_TYPE_AUX*/CAM_TYPE_MAIN;//本来双摄预览是CAM_TYPE_AUX(红外),改成CAM_TYPE_MAIN后双摄预览就切成了CAM_TYPE_MAIN(RGB)
pFovControl->mFovControlData.camTele = /*CAM_TYPE_MAIN*/CAM_TYPE_AUX;//
pFovControl->mFovControlData.camState = STATE_TELE;
}
}
接下来的调用流程就和之前分析的是一样的了camera_open->mm_camera_open(hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c)->
my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK);
<二>以上我们分析了高通骁龙相机用反射的方法打开了camera,并且能够指定hal版本,
下面我们再看下普通APP是怎么打开camera,普通APP的API或者说JAVA的接口是怎么样的?
在apk(AndroidPackage:安卓安装包)中要想操作camera,必须要如下获取一个具体的camera对象:
Camera camera = Camera.open(int cameraId);
Camera类的open方法就在frameworks/base/core/java/android/hardware/Camera.java
public static Camera open(int cameraId) {//新建一个camera(id);对象,它就是系统已经实现好的,应用开发者不需要管它哪来的,尽管调用就好了,这就是接口,这就是API.据说是android 4.0就写好这个对象了
Log.e(TAG, "[wanghl] open(cameraID)");
return new Camera(cameraId);
}
Camera(cameraId)的具体实现
/** used by Camera#open, Camera#open(int) */
Camera(int cameraId) {
Log.e(TAG, "[wanghl] Camera open" + cameraId);
if(cameraId >= getNumberOfCameras()){
throw new RuntimeException("Unknown camera ID");
}
/*
这段是我测试的时候强制把camera id 0和1该成了双摄的id:2,可以忽略
if(cameraId == 0)
{
cameraId = 2;
Log.e(TAG, "[wanghl] force 0 to cameraId: " + cameraId);
}
else if(cameraId == 1){
// Log.e(TAG, "[wanghl] ignore cameraId: " + cameraId);
// return;
cameraId = 2;
Log.e(TAG, "[wanghl] force 1 to cameraId: " + cameraId);
}
*/
int err = cameraInitNormal(cameraId);
if (checkInitErrors(err)) {
if (err == -EACCES) {
throw new RuntimeException("Fail to connect to camera service");
} else if (err == -ENODEV) {
throw new RuntimeException("Camera initialization failed");
}
// Should never hit this.
throw new RuntimeException("Unknown camera error");
}
initAppOps();
}
private int cameraInitNormal(int cameraId) {
return cameraInitVersion(cameraId, CAMERA_HAL_API_VERSION_NORMAL_CONNECT);//指定了hal 版本是-2, private static final int CAMERA_HAL_API_VERSION_NORMAL_CONNECT = -2;
}
这样就和反射的方法殊途同归地走到一样的函数接口:cameraInitVersion,比如Zoom的软件就是用的这种方法,虽然我们没有源码,看到通过打印的log可以判断,他就是调用的这种打开方法
<三>调用Camera的不带任何参数open()方法打开相机,我们来看一段示例代码,能不能运行不知道,只是看下大概流程,跟踪一下区别:
private void initCamera()
{
Camera camera; // 定义系统所用的照相机
if (!isPreview)
{
camera = Camera.open(); //1.调用Camera的open()方法打开相机。
}
if (camera != null && !isPreview)
{
try
{
//2.调用Camera的setParameters()方法获取拍照参数。该方法返回一个Camera.Parameters对象。
Camera.Parameters parameters = camera.getParameters();
//3.调用Camera.Paramers对象方法设置拍照参数
// 设置预览照片的大小
parameters.setPreviewSize(screenWidth, screenHeight);
// 每秒显示4帧
parameters.setPreviewFrameRate(4);
// 设置图片格式
parameters.setPictureFormat(PixelFormat.JPEG);
// 设置JPG照片的质量
parameters.set("jpeg-quality",85);
//设置照片的大小
parameters.setPictureSize(screenWidth, screenHeight);
//4.调用Camera的setParameters,并将Camera.Paramers作为参数传入,这样即可对相机的拍照参数进行控制
camera.setParameters(parameters);
/**
* 5.调用Camera的startPreview()方法开始预览取景,在预览取景之前需要调用
* Camera的setPreViewDisplay(SurfaceHolder holder)方法设置使用哪个SurfaceView来显示取景图片。
* 通过SurfaceView显示取景画面
*/
camera.setPreviewDisplay(surfaceHolder);
// 6.开始预览
camera.startPreview();
// 7.自动对焦
camera.autoFocus(afcb);
// 8.调用Camera的takePicture()方法进行拍照.
camera.takePicture(null, null , myjpegCallback);
}
catch (Exception e)
{
e.printStackTrace();
}
isPreview = true;
}
}
展开具体代码来看:
/**
* Creates a new Camera object to access the first back-facing camera on the
* device. If the device does not have a back-facing camera, this returns
* null. Otherwise acts like the {@link #open(int)} call.
*
* @return a new Camera object for the first back-facing camera, or null if there is no
* backfacing camera
* @see #open(int)
*/
public static Camera open() {
Log.e(TAG, "[wanghl] open()");
int numberOfCameras = getNumberOfCameras();
CameraInfo cameraInfo = new CameraInfo();
for (int i = 0; i < numberOfCameras; i++) {
getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) {//如果是后摄才使用Camera(cameraId)一样的方法打开,打开流程就是和带参数的一样,只是ID指定了是摄像头信息为后摄的那个ID,其他就和第二种情况分析的一样
return new Camera(i);
}
}
return null;//否则返回空,说就只打开后摄
}
public static class CameraInfo {
...
/**
* The facing of the camera is opposite to that of the screen.
*/
public static final int CAMERA_FACING_BACK = 0;
/**
* The facing of the camera is the same as that of the screen.
*/
public static final int CAMERA_FACING_FRONT = 1;
...
/**
* The direction that the camera faces. It should be
* CAMERA_FACING_BACK or CAMERA_FACING_FRONT.
*/
public int facing;//CameraInfo提供的摄像头方向只有两种,前或者后
...
}
=============================================================================================
typedef enum {
MM_CHANNEL_EVT_ADD_STREAM,//0
MM_CHANNEL_EVT_DEL_STREAM,//1
MM_CHANNEL_EVT_LINK_STREAM,//2
MM_CHANNEL_EVT_CONFIG_STREAM,//3
MM_CHANNEL_EVT_GET_BUNDLE_INFO,//4
MM_CHANNEL_EVT_START,//5
MM_CHANNEL_EVT_STOP,//6
MM_CHANNEL_EVT_PAUSE,//7
MM_CHANNEL_EVT_RESUME,//8
MM_CHANNEL_EVT_REQUEST_SUPER_BUF,//9
MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,//10
MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE,//11
MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE,//12
MM_CHANNEL_EVT_START_ZSL_SNAPSHOT,//13
MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT,//14
MM_CHANNEL_EVT_MAP_STREAM_BUF,//15
MM_CHANNEL_EVT_UNMAP_STREAM_BUF,//16
MM_CHANNEL_EVT_SET_STREAM_PARM,//17
MM_CHANNEL_EVT_GET_STREAM_PARM,//18
MM_CHANNEL_EVT_DO_ACTION,//19
MM_CHANNEL_EVT_DELETE,//20
MM_CHANNEL_EVT_AF_BRACKETING,//21
MM_CHANNEL_EVT_AE_BRACKETING,//22
MM_CHANNEL_EVT_FLASH_BRACKETING,//23
MM_CHANNEL_EVT_ZOOM_1X,//24
MM_CAMERA_EVT_CAPTURE_SETTING,//25
MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT,//26
MM_CHANNEL_EVT_MAP_STREAM_BUFS,//27
MM_CHANNEL_EVT_REG_STREAM_BUF_CB,//28
MM_CHANNEL_EVT_REG_FRAME_SYNC,//29
MM_CHANNEL_EVT_TRIGGER_FRAME_SYNC,//30
} mm_channel_evt_type_t;
启动预览的时候:
mm_stream_streamon
->rc = ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);
->
cam_shim_packet_t *shim_cmd;
cam_shim_cmd_data shim_cmd_data;
memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
shim_cmd_data.command = MSM_CAMERA_PRIV_STREAM_ON;//
shim_cmd_data.stream_id = my_obj->server_stream_id;
shim_cmd_data.value = NULL;
LOGI("[wanghl] mm_camera_create_shim_cmd_packet");
shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_SET_PARM,
cam_obj->sessionid, &shim_cmd_data);
rc = mm_camera_module_send_cmd(shim_cmd);
->mct_shimlayer_process_event
->mct_shimlayer_handle_parm
->mct_controller_proc_serv_msg//将图像服务器消息发送到媒体控制器的消息队列
mm_camera_destroy_shim_cmd_packet(shim_cmd);
QCameraCmdThread::launch这个类的方法也会创建线程
代码从framework哪个目录开始区分API1还是API2?
frameworks/av/services/camera/libcameraservice/api1/
frameworks/av/services/camera/libcameraservice/api2/
代码从hardware那个目录开始区分HAL1还是HAL3?
hardware/qcom/camera/QCamera2/HAL
hardware/qcom/camera/QCamera2/HAL3
---------------------------------------------------------------------------------------------------------------
setParameters参数设置实例:
Camera.Parameters parameters = camera.getParameters();
parameters.setPictureSize(1920, 1080);//将键值对存储到LinkedHashMap结构
parameters.setPreviewSize(1920, 1080);//将键值对存储到LinkedHashMap结构
camera.setParameters(parameters);//设置完所有参数之后调用这个让参数生效
追一下setParameters流程,看把parameters传到哪里了?
->
public void setParameters(Parameters params) {...}
native_setParameters(params.flatten());//params.flatten()将LinkedHashMap转换成string
android_hardware_Camera.cpp:
{ "native_setParameters",
"(Ljava/lang/String;)V",
(void *)android_hardware_Camera_setParameters }
static void android_hardware_Camera_setParameters(JNIEnv *env, jobject thiz, jstring params)//C函数
参数和返回值,具体的每一个字符的对应关系如下
字符 Java类型 C类型
V void void
Z jboolean boolean
I jint int
J jlong long
D jdouble double
F jfloat float
B jbyte byte
C jchar char
S jshort short
上面的都是基本类型。如果Java函数的参数是class,则以"L"开头,以";"结尾,中间是用"/" 隔开的包及类名。而其对应的C函数名的参数则为jobject. 一个例外是String类,其对应的类为jstring
由Camera.java我们知道private native final void native_setParameters(String params);//java接口的参数是String类
"(Ljava/lang/String;)V",//L开头,;结尾,说明java函数的参数是类,java/lang是包名(类似import java.lang.String,java库的包名),String是类名,返回值是void,C函数对应的参数是jstring
java.lang包是java语言的核心,它提供了java中的基础类。包括基本Object类、Class类、String类、基本类型的包装类、基本的数学类等等最基本的类。
aidl的作用:
aidl提供的通信接口,通信方法进行进程间通信
进程<------------------------------------------>service进程
android_hardware_Camera_setParameters()->//android_hardware_Camera.cpp
(camera->setParameters(params8)->
status_t Camera::setParameters(const String8& params)//Camera.cpp
sp <::android::hardware::ICamera> c = mCamera;// mCamera 就是在 CameraService::connect(&mCamera) 时得到的。所以 mCamera = CameraClient(代理类对象)
return c->setParameters(params);->
status_t CameraClient::setParameters(const String8& params) {//CameraClient.cpp
ALOGI("[wanghl]setParameters (pid %d) (%s)", getCallingPid(), params.string());//这个打印可以看下
11-13 10:46:27.976 699 1864 I CameraClient: [wanghl]setParameters (pid 4307) (ae-bracket-hdr-values=Off,AE-Bracket;antibanding-values=off,60hz,50hz,auto;auto-exposure-lock-supported=true;auto-exposure-values=frame-average,center-weighted,spot-metering,center-weighted,spot-metering-adv,center-weighted-adv;auto-hdr-supported=true;auto-whitebalance-lock-supported=true;avtimer=disable;brightness-step=1;cache-video-buffers=disable;capture-burst-interval=1;capture-burst-queue-depth=2;capture-burst-retroactive=2;cds-mode-values=off,on,auto;contrast-step=1;denoise-values=denoise-off,denoise-on;dis=disable;dis-values=enable,disable;effect-values=none,mono,negative,solarize,sepia,posterize,whiteboard,blackboard,aqua,emboss,sketch,neon;exposure-compensation-step=0.166667;face-detection-values=off,on;finetune-values=enable,disable;flip-mode-values=off,flip-v,flip-h,flip-vh;focal-length=3.3;focus-distances=Infinity,Infinity,Infinity;focus-mode-values=fixed;hdr-mode-values=hdr-mode-multiframe;hdr-need-1x-values=false,true;hfr-size-values=;histogram=disable;histogra//这个log实际上打印不完全,preview-size没打印出来
return mHardware->setParameters(p);-> //就是调用 CameraHardwareInterface::setParameters()
status_t CameraHardwareInterface::setParameters(const CameraParameters ¶ms)//CameraHardwareInterface.cpp
mHidlDevice->setParameters(params.flatten().string()));->//mHidlDevice 是 CameraDevice
Return<Status> CameraDevice::setParameters(const hidl_string& params) {//CameraDevice.cpp
if (mDevice->ops->set_parameters) {->//mDevice 就是由 open HAL 层得到的 就是调用 QCamera2HardwareInterface::set_parameters()
int QCamera2HardwareInterface::set_parameters(struct camera_device *device, const char *parms)//QCamera2HWI.cpp
LOGI("[wanghl]E camera id %d, parms : %s", hw->getCameraId(), parms);//参数打印出来的还是字符串表示的所有参数
ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS, (void *)parms);->//更新状态机状态为:QCAMERA_SM_EVT_SET_PARAMS,命令和参数都会传递到状态机里
int32_t QCameraStateMachine::procAPI(qcamera_sm_evt_enum_t evt,
void *api_payload)//QCameraStateMachine.cpp 处理来自framework层来的API请求
node->evt_payload = api_payload;//参数传到了payload,这个打印出来还是所有参数的字符串
cam_sem_post(&cmd_sem);//计数信号量增加,允许线程访问
int32_t QCameraStateMachine::procEvtPreviewingState(qcamera_sm_evt_enum_t evt,
void *payload)//QCameraStateMachine.cpp 接收到的参数:payload
case QCAMERA_SM_EVT_SET_PARAMS://状态机接收的消息
rc = m_parent->updateParameters((char*)payload, needRestart);->
int QCamera2HardwareInterface::updateParameters(const char *parms, bool &needRestart)//QCamera2HWI.cpp
(1)rc = mParameters.updateParameters(str, needRestart);->//更新了QCameraParametersIntf对象的参数,等于参数已经存储到QCameraParametersIntf这个对象了
(2)m_channels[i]->UpdateStreamBasedParameters(mParameters);->//将这个类QCameraParametersIntf更新到StreamBasedParameters,更新stream相关的信息
走了两步,分步骤解体:
先来分析(1)的流程:
int32_t QCameraParametersIntf::updateParameters(const String8& params, bool &needRestart)//QCameraParametersIntf.cpp
mImpl->updateParameters(params, needRestart);//mImpl就是类QCameraParametersIntf的一个成员:QCameraParameters *mImpl;
int32_t QCameraParameters::updateParameters(const String8& p, bool &needRestart)//QCameraParameters.cpp
if ((rc = setPreviewSize(params))) final_rc = rc;
int32_t QCameraParameters::setPreviewSize(const QCameraParameters& params)//QCameraParameters.cpp
params.getPreviewSize(&width, &height);//通过QCameraParameters类的getPreviewSize方法可以拿到此次需要设置的分辨率大小参数
LOGI("[wanghl]Requested preview size %d x %d", width, height);//打印出这次需要设置的分辨率大小,[wanghl]Requested preview size 1280 x 720
CameraParameters::setPreviewSize(width, height);//在系统支持列表里找到和要求设置的分辨率一样的就设置
void CameraParameters::setPreviewSize(int width, int height)//hardware/qcom/camera/QCamera2/HAL/CameraParameters.cpp
set(KEY_PREVIEW_SIZE, str);//两个参数都是char *类型数据
void CameraParameters::set(const char *key, const char *value)//CameraParameters.cpp
mMap.replaceValueFor(String8(key), String8(value));
mMap是个什么玩意儿呢?
头文件CameraParameters.h有定义: DefaultKeyedVector<String8,String8> mMap;//DefaultKeyedVector 类型容器实际上是一个模板类,继承自 KeyedVector 模板类,实现在 KeyedVector.h 文件中.这里实际上应该就指定了KEY和VALUE的类型是String8
system/core/libutils/include/utils/KeyedVector.h:
template <typename KEY, typename VALUE>//模板类,键值对类型的容器
class DefaultKeyedVector : public KeyedVector<KEY, VALUE>//继承自 KeyedVector 模板类
{
public:
inline DefaultKeyedVector(const VALUE& defValue = VALUE());
const VALUE& valueFor(const KEY& key) const;//DefaultKeyedVector 类只是比基类添加了 valueFor() 方法和一个默认 value 值 mDefault。
private:
VALUE mDefault;
};
template<typename KEY, typename VALUE> inline
ssize_t KeyedVector<KEY,VALUE>::replaceValueFor(const KEY& key, const VALUE& value) {
key_value_pair_t<KEY,VALUE> pair(key, value);
mVector.remove(pair);//删除键值对
return mVector.add(pair);//增加键值对,实际上就是替换了键值对
}
//所以,以上,我们设置了一个参数,到了HAL层这边就是将参数存储到了DefaultKeyedVector这个模板类
再来分析(2)的流程:
int32_t QCameraChannel::UpdateStreamBasedParameters(QCameraParametersIntf ¶m)//QCameraChannel.cpp,更新流的基本设置,更新到preview流,video流,snapshot/postview流
rc = mStreams[i]->setParameter(param_buf);->
int32_t QCameraStream::setParameter(cam_stream_parm_buffer_t ¶m)//QCameraStream.cpp
rc = mCamOps->set_stream_parms(mCamHandle,//实际上从log上看,这个方法都没有跑,所以存储参数到(1)步骤就结束了,但是在启动预览的时候会调用这个方法:
mChannelHandle,
mHandle,
&mStreamInfo->parm_buf);->
int QCamera2HardwareInterface::startPreview()//QCamera2HWI.cpp
rc = startChannel(QCAMERA_CH_TYPE_ZSL);
int32_t QCamera2HardwareInterface::startChannel(qcamera_ch_type_enum_t ch_type)//QCamera2HWI.cpp
rc = m_channels[ch_type]->start();
int32_t QCameraChannel::start()//QCameraChannel.cpp
mStreams[i]->setBundleInfo();
int32_t QCameraStream::setBundleInfo()//QCameraStream.cpp
ret = mCamOps->set_stream_parms(mCamHandle, channelHdl, streamHdl, &mStreamInfo->parm_buf);//启动预览就调用到了这里,设置流参数,这里就已经是要让参数生效了
/* camera ops v-table */
static mm_camera_ops_t mm_camera_ops = {//mm_camera_interface.c
.set_stream_parms = mm_camera_intf_set_stream_parms,
static int32_t mm_camera_intf_set_stream_parms(uint32_t camera_handle,//mm_camera_interface.c
uint32_t ch_id,
uint32_t s_id,
cam_stream_parm_buffer_t *parms)
rc = mm_camera_set_stream_parms(my_obj, chid, strid, parms);->
int32_t mm_camera_set_stream_parms(mm_camera_obj_t *my_obj,
uint32_t ch_id,
uint32_t s_id,
cam_stream_parm_buffer_t *parms)//mm_camera.c
rc = mm_channel_fsm_fn(ch_obj,
MM_CHANNEL_EVT_SET_STREAM_PARM,
(void *)&payload,
NULL);->
int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
mm_channel_evt_type_t evt,
void * in_val,
void * out_val)//mm_camera_channel.c
case MM_CHANNEL_STATE_STOPPED:
rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);->
int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
mm_channel_evt_type_t evt,
void * in_val,
void * out_val)//mm_camera_channel.c
case MM_CHANNEL_EVT_SET_STREAM_PARM:
rc = mm_channel_set_stream_parm(my_obj, payload);->
int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
mm_evt_paylod_set_get_stream_parms_t *payload)//mm_camera_channel.c
rc = mm_stream_fsm_fn(s_obj,
MM_STREAM_EVT_SET_PARM,
(void *)payload,
NULL);->
int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
mm_stream_evt_type_t evt,
void * in_val,
void * out_val)//mm_camera_stream.c
case MM_STREAM_STATE_CFG:
rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);->
int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
mm_stream_evt_type_t evt,
void * in_val,
void * out_val)//mm_camera_stream.c
case MM_STREAM_EVT_SET_PARM:
rc = mm_stream_set_parm(my_obj, payload->parms);->
int32_t mm_stream_set_parm(mm_stream_t *my_obj,
cam_stream_parm_buffer_t *in_value)//mm_camera_stream.c
rc = mm_camera_util_s_ctrl(cam_obj, stream_id, my_obj->fd,
CAM_PRIV_STREAM_PARM, &value);->
int32_t mm_camera_util_s_ctrl(__unused mm_camera_obj_t *my_obj, __unused int stream_id, int32_t fd, uint32_t id, int32_t *value)
shim_cmd_data.command = id;//id = CAM_PRIV_STREAM_PARM;
shim_cmd_data.stream_id = stream_id;
shim_cmd_data.value = NULL;
shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_SET_PARM, my_obj->sessionid,&shim_cmd_data);->//发送CAM_SHIM_SET_PARM命令
rc = mm_camera_module_send_cmd(shim_cmd);
接下来看接收处理CAM_SHIM_SET_PARM命令的地方:
int mct_shimlayer_process_event(cam_shim_packet_t *packet)//mct_shim_layer.c 这就跳转到到vendor目录下了:vendor/qcom/proprietary/mm-camera/mm-camera2/media-controller/mct_shim_layer/mct_shim_layer.c
case CAM_SHIM_SET_PARM:
rc = mct_shimlayer_handle_parm(packet->cmd_type, session_id, parm_event);
static boolean mct_shimlayer_handle_parm(cam_shim_cmd_type cmd_type,
uint32_t session_id, cam_shim_cmd_data *parm_event)//mct_shim_layer.c
if (mct_controller_proc_serv_msg(&serv_msg) == FALSE) {//serv_msg.u.hal_msg.id = cmd_type = MSM_CAMERA_SET_PARM
boolean mct_controller_proc_serv_msg(mct_serv_msg_t *serv_msg)//mct_controller.c
*msg = *serv_msg;
/* Push message to Media Controller Message Queue
* and post signal to Media Controller */
mct_queue_push_tail(mct->serv_cmd_q, msg);->//推送消息到MCT消息队列并且发送信号到MCT
static void* mct_controller_thread_run(void *data)//mct_controller.c
{
...
mct_this = (mct_controller_t *)data;
...
do{
...
while(1)
{
...
msg = (mct_serv_msg_t *)mct_queue_pop_head(mct_this->serv_cmd_q);//取出MCT命令队列的消息,这个是由Imaging Server发送给MCT/MediaController的消息
...
}
...
proc_ret = mct_controller_proc_serv_msg_internal(mct_this, msg);//MCT处理imaging server发来的消息,mct_this就来源于data(将被强制转化为mct_controller_t *)数据
}while(1);
}
static mct_process_ret_t mct_controller_proc_serv_msg_internal(
mct_controller_t *mct, mct_serv_msg_t *msg)//mct_controller.c
case SERV_MSG_HAL:
ret.u.serv_msg_ret.error = pipeline->process_serv_msg(&msg->u.hal_msg, pipeline);
static boolean mct_pipeline_process_serv_msg(void *message, mct_pipeline_t *pipeline)//mct_pipeline.c
case MSM_CAMERA_SET_PARM:
ret = mct_pipeline_process_set(data, pipeline);
static boolean mct_pipeline_process_set(struct msm_v4l2_event_data *data, mct_pipeline_t *pipeline)
//stream = mct_pipeline_get_stream(pipeline, &info);//steam也是从pipeline拿到的
CLOGI(CAM_MCT_MODULE, "[wanghl]mct_pipeline_process_set,command=0x%x", data->command);
//11-06 11:27:30.814 566 4592 I mm-camera: <MCT >< INFO> 4146: mct_pipeline_process_set: [wanghl]mct_pipeline_process_set,command=0x8000019//CAM_PRIV_STREAM_PARM
case CAM_PRIV_STREAM_PARM: {
CLOGE(CAM_MCT_MODULE, "[wanghl]stream->streaminfo.dim:%d x %d,stream->streaminfo.stream_type : %d",
stream->streaminfo.dim.width, stream->streaminfo.dim.height, stream->streaminfo.stream_type);
//11-18 19:19:01.299 564 4521 E mm-camera: <MCT ><ERROR> 4428: mct_pipeline_process_set: [wanghl]stream->streaminfo.dim:720 x 480,stream->streaminfo.stream_type : 1
//11-18 19:19:01.300 564 4521 E mm-camera: <MCT ><ERROR> 4428: mct_pipeline_process_set: [wanghl]stream->streaminfo.dim:720 x 480,stream->streaminfo.stream_type : 3
//11-18 19:19:01.300 564 4521 E mm-camera: <MCT ><ERROR> 4428: mct_pipeline_process_set: [wanghl]stream->streaminfo.dim:640 x 426,stream->streaminfo.stream_type : 11
//就是说到这里,stream_type为1,3,11,也就是1:preview, 3:snapshot, 11:analysis 的stream已经被拿到了
event_data.type = MCT_EVENT_CONTROL_PARM_STREAM_BUF;//MCT事件类型是MCT_EVENT_CONTROL_PARM_STREAM_BUF
event_data.control_event_data = (void *)&stream->streaminfo.parm_buf;
event_data.size = sizeof(cam_stream_parm_buffer_t);
stream->streaminfo.parm_buf = stream_info_buf->parm_buf;
ret = mct_pipeline_pack_event(MCT_EVENT_CONTROL_CMD,///*MCT_EVENT_CONTROL_CMD代表的是来自MCT的控制命令 Control command from Media Controller */
(pack_identity(MCT_PIPELINE_SESSION(pipeline), stream->streamid)),
MCT_EVENT_DOWNSTREAM, &event_data, &cmd_event);//方向是MCT_EVENT_DOWNSTREAM
if (ret == FALSE) {
CLOGE(CAM_MCT_MODULE, "Error in packing event");
break;
}
if (pipeline->send_event)
ret = pipeline->send_event(pipeline, data->stream_id, &cmd_event);//发送事件
//处理事件:
static boolean module_sensor_module_process_event(mct_module_t *module,
mct_event_t *event)//module_sensor.c
case MCT_EVENT_CONTROL_PARM_STREAM_BUF: {
ret = module_sensor_event_control_parm_stream_buf(
module, event, bundle_info.s_bundle);->
static boolean module_sensor_event_control_parm_stream_buf(
mct_module_t *module,
mct_event_t* event,
module_sensor_bundle_info_t *s_bundle __attribute__((unused)))//module_sensor.c
case CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO: {//97
sensor_util_assign_bundle_id(module, event->identity, &stream_parm->bundleInfo);->
int32_t sensor_util_assign_bundle_id(mct_module_t* module, uint32_t identity, cam_bundle_config_t* bundle)//sensor_util.c
/* assign the bundle id to streams and get max dimensions for a bundle */
mct_list_traverse(port_data->stream_list, sensor_util_fill_bundle_params, bundle_info);//绑定ID并获取最大分辨率
static boolean sensor_util_fill_bundle_params(void* data, void* user_data)//sensor_util.c
SERR("[wanghl] stream_info->width: %d, stream_info->width : %d", stream_info->width, stream_info->height)
//看log,这里已经能够取出cam_stream_type_t为1,3,11的三个分辨率信息,和之前分析类似
11-19 10:30:14.251 563 4368 E mm-camera: <SENSOR><ERROR> 475: sensor_util_fill_bundle_params: [wanghl] stream_info->width: 1280, stream_info->width : 720
11-19 10:30:14.251 563 4368 E mm-camera: <SENSOR><ERROR> 475: sensor_util_fill_bundle_params: [wanghl] stream_info->width: 1920, stream_info->width : 1080
11-19 10:30:14.251 563 4368 E mm-camera: <SENSOR><ERROR> 475: sensor_util_fill_bundle_params: [wanghl] stream_info->width: 640, stream_info->width : 360
-------------------------------------------------------------------------------------------------
setResolutionIdx
SENSOR_SET_RESOLUTION
MSM_CAMERA_PRIV_QUERY_CAP//开机之后还没开camera就会发这个命令了
CAM_INTF_PARM_MAX_DIMENSION//45
[wanghl]module_sensor_event_control_set_parm event type =32//CAM_INTF_PARM_AEC_ROI,
[wanghl]module_sensor_event_control_set_parm event type =51// CAM_INTF_PARM_BURST_LED_ON_PERIOD,
[wanghl]module_sensor_event_control_set_parm event type =95// CAM_INTF_PARM_ADV_CAPTURE_MODE,
[wanghl]module_sensor_event_control_set_parm event type =161//CAM_INTF_PARM_STATS_DEBUG_MASK,
[wanghl]module_sensor_event_control_set_parm event type =162// CAM_INTF_PARM_STATS_AF_PAAF,
[wanghl]module_sensor_event_control_set_parm event type =170// CAM_INTF_META_JPEG_ORIENTATION,
[wanghl]module_sensor_event_control_set_parm event type =190// CAM_INTF_PARM_CUSTOM,
[wanghl]module_sensor_event_control_set_parm event type =113// CAM_INTF_META_STREAM_INFO, 疑似
[wanghl]module_sensor_event_control_set_parm event type =45// CAM_INTF_PARM_MAX_DIMENSION,疑似
[wanghl]module_sensor_event_control_set_parm event type =10// CAM_INTF_PARM_FPS_RANGE,
[wanghl]module_sensor_event_control_set_parm event type =18// CAM_INTF_META_LENS_OPT_STAB_MODE,
[wanghl]module_sensor_event_control_set_parm event type =51// CAM_INTF_PARM_BURST_LED_ON_PERIOD,
[wanghl]module_sensor_event_control_set_parm event type =95// CAM_INTF_PARM_ADV_CAPTURE_MODE,
[wanghl]module_sensor_event_control_set_parm event type =161// CAM_INTF_PARM_STATS_DEBUG_MASK,
[wanghl]module_sensor_event_control_set_parm event type =162// CAM_INTF_PARM_STATS_AF_PAAF,
[wanghl]module_sensor_event_control_set_parm event type =170// CAM_INTF_META_JPEG_ORIENTATION,
[wanghl]module_sensor_event_control_set_parm event type =190// CAM_INTF_PARM_CUSTOM,
[wanghl]module_sensor_event_control_set_parm event type =51// CAM_INTF_PARM_BURST_LED_ON_PERIOD,
[wanghl]module_sensor_event_control_set_parm event type =95// CAM_INTF_PARM_ADV_CAPTURE_MODE,
[wanghl]module_sensor_event_control_set_parm event type =161// CAM_INTF_PARM_STATS_DEBUG_MASK,
[wanghl]module_sensor_event_control_set_parm event type =162// CAM_INTF_PARM_STATS_AF_PAAF,
[wanghl]module_sensor_event_control_set_parm event type =170// CAM_INTF_META_JPEG_ORIENTATION,
[wanghl]module_sensor_event_control_set_parm event type =190// CAM_INTF_PARM_CUSTOM,
CAM_INTF_PARM_HAL_VERSION = 0x1,
CAM_INTF_META_MODE,
CAM_INTF_META_AEC_MODE,
CAM_INTF_PARM_WHITE_BALANCE,
CAM_INTF_PARM_FOCUS_MODE,
CAM_INTF_PARM_ANTIBANDING,
CAM_INTF_PARM_EXPOSURE_COMPENSATION,
CAM_INTF_PARM_EV_STEP,
CAM_INTF_PARM_AEC_LOCK,
CAM_INTF_PARM_FPS_RANGE, /* 10 */
CAM_INTF_PARM_AWB_LOCK,
CAM_INTF_PARM_EFFECT,
CAM_INTF_PARM_RAW_DIMENSION,
CAM_INTF_PARM_DIS_ENABLE,
CAM_INTF_PARM_LED_MODE,
CAM_INTF_META_HISTOGRAM,
CAM_INTF_META_FACE_DETECTION,
CAM_INTF_META_LENS_OPT_STAB_MODE,
CAM_INTF_META_AUTOFOCUS_DATA,
CAM_INTF_PARM_QUERY_FLASH4SNAP, /* 20 */
CAM_INTF_PARM_EXPOSURE,
CAM_INTF_PARM_SHARPNESS,
CAM_INTF_PARM_CONTRAST,
CAM_INTF_PARM_SATURATION,
CAM_INTF_PARM_BRIGHTNESS,
CAM_INTF_PARM_ISO,
CAM_INTF_PARM_USERZOOM,
CAM_INTF_PARM_ROLLOFF,
CAM_INTF_PARM_MODE, /* camera mode */
CAM_INTF_PARM_AEC_ALGO_TYPE, /* 30 */ /* auto exposure algorithm */
CAM_INTF_PARM_FOCUS_ALGO_TYPE, /* focus algorithm */
CAM_INTF_PARM_AEC_ROI,
CAM_INTF_PARM_AF_ROI,
CAM_INTF_PARM_SCE_FACTOR,
CAM_INTF_PARM_FD,
CAM_INTF_PARM_MCE,
CAM_INTF_PARM_HFR,
CAM_INTF_PARM_REDEYE_REDUCTION,
CAM_INTF_PARM_WAVELET_DENOISE,
CAM_INTF_PARM_TEMPORAL_DENOISE, /* 40 */
CAM_INTF_PARM_HISTOGRAM,
CAM_INTF_PARM_ASD_ENABLE,
CAM_INTF_PARM_RECORDING_HINT,
CAM_INTF_PARM_HDR,
CAM_INTF_PARM_MAX_DIMENSION,
CAM_INTF_PARM_BESTSHOT_MODE,
CAM_INTF_PARM_FRAMESKIP,
CAM_INTF_PARM_ZSL_MODE, /* indicating if it's running in ZSL mode */
CAM_INTF_PARM_BURST_NUM,
CAM_INTF_PARM_RETRO_BURST_NUM, /* 50 */
CAM_INTF_PARM_BURST_LED_ON_PERIOD,
CAM_INTF_PARM_HDR_NEED_1X, /* if HDR needs 1x output */
CAM_INTF_PARM_LOCK_CAF,
CAM_INTF_PARM_VIDEO_HDR,
CAM_INTF_PARM_SENSOR_HDR,
CAM_INTF_PARM_ROTATION,
CAM_INTF_PARM_SCALE,
CAM_INTF_PARM_VT, /* indicating if it's a Video Call Apllication */
CAM_INTF_META_CROP_DATA,
CAM_INTF_META_PREP_SNAPSHOT_DONE, /* 60 */
CAM_INTF_META_GOOD_FRAME_IDX_RANGE,
CAM_INTF_META_ASD_HDR_SCENE_DATA,
CAM_INTF_META_ASD_SCENE_INFO,
CAM_INTF_META_CURRENT_SCENE,
CAM_INTF_META_AEC_INFO,
CAM_INTF_META_SENSOR_INFO,
CAM_INTF_META_CHROMATIX_LITE_ISP,
CAM_INTF_META_CHROMATIX_LITE_PP,
CAM_INTF_META_CHROMATIX_LITE_AE,
CAM_INTF_META_CHROMATIX_LITE_AWB, /* 70 */
CAM_INTF_META_CHROMATIX_LITE_AF,
CAM_INTF_META_CHROMATIX_LITE_ASD,
CAM_INTF_META_EXIF_DEBUG_AE,
CAM_INTF_META_EXIF_DEBUG_AWB,
CAM_INTF_META_EXIF_DEBUG_AF,
CAM_INTF_META_EXIF_DEBUG_ASD,
CAM_INTF_META_EXIF_DEBUG_STATS,
CAM_INTF_META_EXIF_DEBUG_BESTATS,
CAM_INTF_META_EXIF_DEBUG_BHIST,
CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
CAM_INTF_PARM_GET_CHROMATIX,
CAM_INTF_PARM_SET_RELOAD_CHROMATIX,
CAM_INTF_PARM_SET_AUTOFOCUSTUNING, /* 80 */
CAM_INTF_PARM_GET_AFTUNE,
CAM_INTF_PARM_SET_RELOAD_AFTUNE,
CAM_INTF_PARM_SET_VFE_COMMAND,
CAM_INTF_PARM_SET_PP_COMMAND,
CAM_INTF_PARM_TINTLESS,
CAM_INTF_PARM_LONGSHOT_ENABLE,
CAM_INTF_PARM_RDI_MODE,
CAM_INTF_PARM_CDS_MODE,
CAM_INTF_PARM_TONE_MAP_MODE,
CAM_INTF_PARM_CAPTURE_FRAME_CONFIG, /* 90 */
CAM_INTF_PARM_DUAL_LED_CALIBRATION,
CAM_INTF_PARM_ADV_CAPTURE_MODE,
CAM_INTF_PARM_DO_REPROCESS,
CAM_INTF_PARM_SET_BUNDLE,
CAM_INTF_PARM_STREAM_FLIP,
CAM_INTF_PARM_GET_OUTPUT_CROP,
CAM_INTF_PARM_EZTUNE_CMD,
CAM_INTF_PARM_VFE1_RESERVED_RDI,
CAM_INTF_PARM_INT_EVT,
CAM_INTF_META_FRAME_NUMBER_VALID,
CAM_INTF_META_URGENT_FRAME_NUMBER_VALID,
CAM_INTF_META_FRAME_DROPPED, /* 100 */
CAM_INTF_META_COLOR_CORRECT_MODE,
CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
CAM_INTF_META_COLOR_CORRECT_GAINS,
CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM,
CAM_INTF_META_PRED_COLOR_CORRECT_GAINS,
CAM_INTF_META_FRAME_NUMBER,
CAM_INTF_META_URGENT_FRAME_NUMBER,
CAM_INTF_META_STREAM_INFO,
CAM_INTF_META_AEC_ROI,
CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, /* 110 */
CAM_INTF_META_AEC_STATE,
CAM_INTF_META_AF_ROI,
CAM_INTF_META_AF_DEFAULT_ROI,
CAM_INTF_META_AF_TRIGGER,
CAM_INTF_META_AF_STATE,
CAM_INTF_META_AWB_REGIONS,
CAM_INTF_META_AWB_STATE,
CAM_INTF_META_BLACK_LEVEL_LOCK,
CAM_INTF_META_CAPTURE_INTENT,
CAM_INTF_META_DEMOSAIC,
CAM_INTF_META_EDGE_MODE, /* 120 */
CAM_INTF_META_SHARPNESS_STRENGTH,
CAM_INTF_META_FLASH_POWER,
CAM_INTF_META_FLASH_FIRING_TIME,
CAM_INTF_META_FLASH_STATE,
CAM_INTF_META_GEOMETRIC_MODE,
CAM_INTF_META_GEOMETRIC_STRENGTH,
CAM_INTF_META_HOTPIXEL_MODE,
CAM_INTF_META_LENS_APERTURE,
CAM_INTF_META_LENS_FILTERDENSITY,
CAM_INTF_META_LENS_FOCAL_LENGTH, /* 130 */
CAM_INTF_META_LENS_FOCUS_DISTANCE,
CAM_INTF_META_LENS_FOCUS_RANGE,
CAM_INTF_META_LENS_SHADING_MAP_MODE,
CAM_INTF_META_LENS_STATE,
CAM_INTF_META_NOISE_REDUCTION_MODE,
CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
CAM_INTF_META_SCALER_CROP_REGION,
CAM_INTF_META_SCENE_FLICKER,
CAM_INTF_META_SENSOR_EXPOSURE_TIME,
CAM_INTF_META_SENSOR_FRAME_DURATION, /* 140 */
CAM_INTF_META_SENSOR_SENSITIVITY,
CAM_INTF_META_SENSOR_TIMESTAMP,
CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,
CAM_INTF_META_SHADING_MODE,
CAM_INTF_META_SHADING_STRENGTH,
CAM_INTF_META_STATS_FACEDETECT_MODE,
CAM_INTF_META_STATS_HISTOGRAM_MODE,
CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
CAM_INTF_META_STATS_SHARPNESS_MAP,
CAM_INTF_META_TONEMAP_MODE, /* 150 */
CAM_INTF_META_TONEMAP_CURVES,
CAM_INTF_META_FLASH_MODE,
CAM_INTF_META_LENS_SHADING_MAP,
CAM_INTF_META_PRIVATE_DATA,
CAM_INTF_PARM_STATS_DEBUG_MASK,
CAM_INTF_PARM_STATS_AF_PAAF,
CAM_INTF_META_STREAM_ID,
CAM_INTF_PARM_FOCUS_BRACKETING,
CAM_INTF_PARM_FLASH_BRACKETING,
CAM_INTF_PARM_GET_IMG_PROP, /* 160 */
CAM_INTF_META_JPEG_GPS_COORDINATES,
CAM_INTF_META_JPEG_GPS_PROC_METHODS,
CAM_INTF_META_JPEG_GPS_TIMESTAMP,
CAM_INTF_META_JPEG_ORIENTATION,
CAM_INTF_META_JPEG_QUALITY,
CAM_INTF_META_JPEG_THUMB_QUALITY,
CAM_INTF_META_JPEG_THUMB_SIZE,
CAM_INTF_META_TEST_PATTERN_DATA,
CAM_INTF_META_PROFILE_TONE_CURVE,
CAM_INTF_META_NEUTRAL_COL_POINT, /* 170 */
CAM_INTF_META_CAC_INFO,
CAM_INTF_PARM_CAC,
CAM_INTF_META_IMG_HYST_INFO,
CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
CAM_INTF_META_OTP_WB_GRGB,
CAM_INTF_META_LED_MODE_OVERRIDE,
CAM_INTF_META_FOCUS_POSITION,
CAM_INTF_PARM_EXPOSURE_TIME,
CAM_INTF_META_AWB_INFO,
CAM_INTF_PARM_MANUAL_FOCUS_POS, /* 180 */
CAM_INTF_PARM_WB_MANUAL,
CAM_INTF_PARM_HW_DATA_OVERWRITE,
CAM_INTF_META_IMGLIB, /* cam_intf_meta_imglib_t */
CAM_INTF_PARM_CUSTOM,
CAM_INTF_PARM_RELATED_SENSORS_CALIBRATION,
CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
CAM_INTF_META_SNAP_CROP_INFO_ISP,
CAM_INTF_META_SNAP_CROP_INFO_CPP, /* 190 */
CAM_INTF_PARM_DCRF,
CAM_INTF_META_DCRF,
CAM_INTF_PARM_FLIP,
CAM_INTF_BUF_DIVERT_INFO,
CAM_INTF_META_USE_AV_TIMER,
CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
CAM_INTF_PARM_REQUEST_FRAMES,
CAM_INTF_PARM_REQUEST_OPS_MODE,
CAM_INTF_META_LDAF_EXIF,
CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN,
CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, /* 200 */
CAM_INTF_META_CDS_DATA,
CAM_INTF_META_LOW_LIGHT,
CAM_INTF_META_IMG_DYN_FEAT, /* 200 */
CAM_INTF_PARM_MANUAL_CAPTURE_TYPE,
CAM_INTF_AF_STATE_TRANSITION,
CAM_INTF_META_FACE_RECOG,
CAM_INTF_META_FACE_BLINK,
CAM_INTF_META_FACE_GAZE,
CAM_INTF_META_FACE_SMILE,
CAM_INTF_META_FACE_LANDMARK, /* 210 */
CAM_INTF_META_FACE_CONTOUR,
CAM_INTF_META_VIDEO_STAB_MODE,
CAM_INTF_META_TOUCH_AE_RESULT,
CAM_INTF_PARM_INITIAL_EXPOSURE_INDEX,
CAM_INTF_META_ISP_SENSITIVITY,
CAM_INTF_PARM_INSTANT_AEC,
CAM_INTF_META_REPROCESS_FLAGS,
CAM_INTF_PARM_JPEG_ENCODE_CROP,
CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
CAM_INTF_PARM_QUADRA_CFA,
CAM_INTF_META_RAW,
CAM_INTF_META_STREAM_INFO_FOR_PIC_RES,
CAM_INTF_META_FOCUS_DEPTH_INFO,
CAM_INTF_META_IR_MODE,
CAM_INTF_META_AEC_CONVERGENCE_SPEED,
CAM_INTF_META_AWB_CONVERGENCE_SPEED,
CAM_INTF_META_FOCUS_VALUE,
CAM_INTF_META_SPOT_LIGHT_DETECT,
CAM_INTF_PARM_HAL_BRACKETING_HDR,
CAM_INTF_META_DC_SAC_OUTPUT_INFO,
CAM_INTF_META_DC_LOW_POWER_ENABLE,
CAM_INTF_META_DC_IN_SNAPSHOT_PP_ZOOM_RANGE,
CAM_INTF_META_DC_BOKEH_MODE,
CAM_INTF_PARM_FOV_COMP_ENABLE,
CAM_INTF_META_LED_CALIB_RESULT,
CAM_INTF_PARM_SYNC_DC_PARAMETERS,
CAM_INTF_META_AF_FOCUS_POS,
CAM_INTF_META_AEC_LUX_INDEX,
CAM_INTF_META_AF_OBJ_DIST_CM,
CAM_INTF_META_BINNING_CORRECTION_MODE,
CAM_INTF_META_OIS_READ_DATA,
CAM_INTF_PARM_FLUSH_FRAMES,
CAM_INTF_PARAM_BOKEH_BLUR_LEVEL,
CAM_INTF_META_RTB_DATA,
CAM_INTF_META_DC_CAPTURE,
CAM_INTF_PARM_SKIP_FINE_SCAN,
CAM_INTF_PARM_BOKEH_MODE,
CAM_INTF_META_USERZOOM,
CAM_INTF_META_TUNING_PARAMS,
CAM_INTF_PARM_RESOLUTION_IDX,//wanghl,257
CAM_INTF_PARM_MAX
} cam_intf_parm_type_t;
zoom 预览大小:640*480
skype预设的大小:640*360
另外这两个APP都是走HAL3,必须改成强制走HAL1
当系统中只有一个后摄的时候,skype不认这个摄像头,预览打不开;
当系统中只有一个前摄像头的时候,skype和zoom都会认这个摄像头,预览都能打开,所以最好系统设置摄像头的时候设置成前摄;
当zoom从预览回调里面拿到预览数据之后,转成bitmap,再完成切图,放大人脸,再转回YUV数组,这样的话,本地的预览显示和远程的图像都能够进行切图显示,
但是当skype从修改预览回调的数据之后,只能远程的能有切图效果,本地送的显示由于是在更底层的时候就送显示了,所以修改预览回调数据的时候并不能在本地生效。
所以需要在底层,sensor出图的时候就完成切图动作,具体怎么切在预览回调里面进行处理,因为zoom和skype都有预览回调,不像我们自己的APP或者骁龙相机,是没有预览回调的:
在预览回调里每隔一段时间调用以下方法切换setting下标:
private void setIdx(int size){
Parameters p=getParameters();
//Size newPreviewSize = p.getPreviewSize();
//p.setPreviewSize(640, 480);
//Log.e(TAG, "[wanghl] setResolutionIdx(size):" + size);
stopPreview();
p.setResolutionIdx(size);
//p.setPreviewSize(640, 480);
setParameters(p);
setPreviewCallback(mPreviewCallback);//必须设置,如果不设置回调,第一次stopPreview就不出图了
//int bfsize = 640 * 480 * ImageFormat.getBitsPerPixel(p.getPreviewFormat())/8;
//addCallbackBuffer(new byte[bfsize]);
startPreview();
}
==========================================================================================================
IR摄像头开机后概率性打不开的问题:
正常log:
[ 15.805014] c2395_sub probe succeeded
异常log:
[ 16.579739] msm_cci_init:1442: hw_version = 0x10020004
[ 16.579751] msm_cci_irq:1813 MASTER_1 error 0x40000000
[ 16.586191] msm_cci_i2c_read:965 read_words = 0, exp words = 1
[ 16.592980] msm_cci_i2c_read_bytes:1048 failed rc -22
[ 16.598994] diag: In diag_send_feature_mask_update, control channel is not open, p: 0, 0000000000000000
[ 16.599008] diag: In diag_send_peripheral_buffering_mode, buffering flag not set for 0
[ 16.599586] diag: In diag_send_peripheral_buffering_mode, buffering flag not set for 0
[ 16.618904] msm_camera_cci_i2c_read: line 47 rc = -22
[ 16.626745] msm_sensor_match_id: c2395_sub: read id failed//读取第一次ID失败
[ 16.631814] msm_sensor_check_id:1432 match id failed rc -22
[ 16.740476] msm_cci_init:1442: hw_version = 0x10020004
[ 16.740559] msm_cci_i2c_read:965 read_words = 0, exp words = 1
[ 16.744810] msm_cci_i2c_read_bytes:1048 failed rc -22
[ 16.750754] msm_camera_cci_i2c_read: line 47 rc = -22
[ 16.755855] msm_sensor_match_id: c2395_sub: read id failed//读取第二次ID失败
[ 16.760622] msm_sensor_check_id:1432 match id failed rc -22
[ 16.871130] msm_cci_init:1442: hw_version = 0x10020004
[ 16.871251] msm_cci_i2c_read:965 read_words = 0, exp words = 1
[ 16.875168] msm_cci_i2c_read_bytes:1048 failed rc -22
[ 16.881170] msm_camera_cci_i2c_read: line 47 rc = -22
[ 16.886398] msm_sensor_match_id: c2395_sub: read id failed//读取第三次ID失败
[ 16.891232] msm_sensor_check_id:1432 match id failed rc -22
[ 16.950863] c2395_sub power up failed[ 16.950899] MSM-SENSOR-INIT msm_sensor_driver_cmd:82 msm_sensor_driver_cmd failed (non-fatal) rc -22//判定上电时序失败
int msm_sensor_power_up(struct msm_sensor_ctrl_t *s_ctrl)//kernel/msm-4.9/drivers/media/platform/msm/camera_v2/sensor/msm_sensor.c
{
...
for (retry = 0; retry < 3; retry++) {
rc = msm_camera_power_up(power_info, s_ctrl->sensor_device_type, sensor_i2c_client);//上电
rc = msm_sensor_check_id(s_ctrl);//读id
if (rc < 0) {
msm_camera_power_down(power_info, s_ctrl->sensor_device_type, sensor_i2c_client);//读不到就下电
msleep(20);
continue;//继续下一次
} else {
break;//读到ID就退出来
}
}
}
按照datasheet时序图来修改,重启机器137次后问题又复现了,说明可能和时序先后没有太大的关系;
后来发现IR摄像头的AVDD改成用L22供电之后,L22代码配置的电压是2.56到2.84,询问原厂这个电压是不够的,至少需要2.8-3.3V,所以又修改配置电压到2.8,经过测试250次重启都没有出现过一次挂掉的现象。
wanghl@wanghl-HP:~/code/new_disk/clean/sdm439_android$ diff kernel/msm-4.9/arch/arm64/boot/dts/pubtron/sdm439-regulator-A5.dtsi kernel/msm-4.9/arch/arm64/boot/dts/pubtron/sdm439-regulator.dtsi
355,357c355,357
< regulator-min-microvolt = <2800000>;//改后的配置
< regulator-max-microvolt = <2800000>;
< qcom,init-voltage = <2800000>;
---
> regulator-min-microvolt = <2560000>;//原始配置
> regulator-max-microvolt = <2840000>;
> qcom,init-voltage = <2560000>;
改之前的代码在93次的时候挂掉(放了一夜,第二天早上一重启就挂掉),另外有天测试上午挂了一次,下午挂了一次(次数大概也是几十次)
单单修改L22电压看下情况,看是不是和上电时序无关,只和这个电压有关:测试100次 reboot还没挂,手动拔插电源44次后挂了(但是看dmesg,c2395_sub probe成功了)
全彩主摄的AVDD是采用GPIO控制,实际上是GPIO35控制了电源IC:SGM2036-3.3 这个LDO的使能端,这个LDO的电源电压3.3比较稳定,所以没有问题。L22这个PMIC配置的电压低了所以才出的问题。
互换全彩和红外的摄像头的AVDD,改成全彩摄像头用L22 2.8V供电,红外摄像头用LDO 3.3V供电,手动拔插电150次后,变成全彩的摄像头挂了打不开(也是有probe成功),红外摄像头能打开的情况,
说明AVDD用2.8V 来供电是有问题的,原厂也建议正常工作是要3.3V,2.8-3.3V,2.8太极限了。
修改RPM限制,将L22修改到3.3V还是到8次和26次的时候分别会挂,这就蛋疼了!
qcom,cam-vreg-name = "cam_vdig", "cam_vio", "cam_vana",
"cam_vaf";
qcom,cam-vreg-min-voltage = <1200000 0 3300000 2850000>;
qcom,cam-vreg-max-voltage = <1200000 0 3300000 2850000>;
qcom,cam-vreg-op-mode = <200000 0 80000 100000>;
rpm-regulator-ldoa22 {
status = "okay";
pm8953_l22: regulator-l22 {
regulator-min-microvolt = <3300000>;
regulator-max-microvolt = <3300000>;
qcom,init-voltage = <3300000>;
status = "okay";
};
};
avdd设置范围:(qcom,cam-vreg-min-voltage,qcom,cam-vreg-max-voltage)必须要在l22的范围内:(regulator-min-microvolt,regulator-max-microvolt),否则log会报错:
pm8909_l5: requested voltage range [2800000, 2800000] does not fit within constraints: [3300000, 3300000],l22最小是3.3,你却想设置最小为2.8,那肯定要报错了,也就是说l22的下限必须更小,上限必须更大
将最大电流从80mA设置到PM439 datasheet描述的150mA再次测试:
qcom,cam-vreg-op-mode = <200000 0 150000 100000>;//80000-> 150000uA
测试了10次还是挂了
========================================================================================================================================================================
IMX317 camera调试记录
csiphy是MIPI协议的物理层,而CSID(Camera Serial Interface Decoder Module )是对应的协议层的一部分,是用于解码数据流使用的.
CSI-D 配置
static struct msm_camera_csid_vc_cfg imx135_cid_cfg[] = {
{ 0, CSI_RAW10, CSI_DECODE_10BIT },
{ 1, 0x35, CSI_DECODE_8BIT },
{ 2, CSI_EMBED_DATA, CSI_DECODE_8BIT}
};
每一行设置的第一列被称为 CID (channel ID). Virtual Channel (VC)和 Data Type (DT)独一无
二的组合映射到唯一的 CID值. 下表列出对于特定的 VC可能的 CID值.
//msm_camera.h
struct msm_camera_csid_vc_cfg {
uint8_t cid;
uint8_t dt;
uint8_t decode_format;
};
IMX317一开始IIC写初始化指令的时候,写了两三个寄存器之后就开始数据错误,比如少发了两三个寄存器地址,后面每个寄存器地址写的时候加了延时0xFF(256us),数据波形就正常了,
IMX317没有start或stop寄存器指令,直接下初始化指令就能stream on,但是如果没有参考imx274下start和stop指令,datalane连波形都出不来
IMX317打开camera的时候,MIPI线上有波形输出,但是打开camera之后一会儿闪退了,kernel log:
[ 8120.987335] msm_csid_init: CSID_VERSION = 0x30040002
[ 8120.987863] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0x800
[ 8120.993389] MSM-CPP cpp_init_hardware:1133 CPP HW Version: 0x40030002
[ 8120.998063] MSM-CPP cpp_init_hardware:1151 stream_cnt:0
[ 8121.052734] msm_cci_init:1442: hw_version = 0x10020004
[ 8121.053060] msm_sensor_match_id: read id: 0x8000 expected id 0x8000:
[ 8121.205537] [wanghl]v4l_s_fmt p->type:9[ 8121.223740] [wanghl]v4l_s_fmt p->type:9
[wanghl]v4l_s_fmt p->type:9[ 8121.247845] [wanghl]v4l_s_fmt p->type:9
[wanghl]v4l_streamon[ 8121.259010] [wanghl] camera_v4l2_streamon 1
[wanghl] camera_v4l2_streamon return 0[ 8121.269667] [wanghl]v4l_streamon
[wanghl] camera_v4l2_streamon 1[ 8121.272254] [wanghl] camera_v4l2_streamon return 0
[ 8121.281110] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0x800
[ 8121.561965] [wanghl]v4l_streamon[ 8121.561998] [wanghl] camera_v4l2_streamon 1
[wanghl] camera_v4l2_streamon return 0[ 8121.681863] [wanghl]v4l_streamon
[wanghl] camera_v4l2_streamon 1[ 8121.684196] [wanghl] camera_v4l2_streamon return 0
[ 8121.816110] msm_ispif_read_irq_status: 21 callbacks suppressed
[ 8121.816115] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8121.820951] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8121.826436] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8121.831864] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8121.837300] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8121.842801] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8121.848227] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8121.853697] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8121.859165] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8121.864671] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8121.870222] msm_vfe40_process_error_status: vfe 1 camif error status: 0x78c
[ 8121.875670] 0x0000: 00000001 00000040 00000000 04380780
[ 8121.882481] 0x0010: 0000077f 00000437 00000000 00000000
[ 8121.887995] 0x0020: ffffffff 00140405 000009f0 00000000
[ 8121.893303] msm_vfe40_process_violation_status: vfe 1 camif violation
[ 8124.400949] msm_private_ioctl:Notifying subdevs about potential sof freeze
[ 8124.401067] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0xff
[ 8124.401071] MSM-SENSOR-INIT msm_sensor_init_subdev_ioctl:122 default
[ 8124.401071]
[ 8124.401248] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR0 = 0x90
[ 8124.401256] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR1 = 0x90
[ 8124.401263] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR2 = 0x90
[ 8124.401270] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR3 = 0x90
[ 8124.401278] msm_csiphy_irq CSIPHY0_IRQ_CLK_STATUS_ADDR0 = 0xc0
[ 8124.401286] msm_csiphy_irq CSIPHY0_IRQ_CLK_STATUS_ADDR1 = 0x0
[ 8124.401325] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR0 = 0x90
[ 8124.401332] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR1 = 0x90
[ 8124.401339] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR2 = 0x90
[ 8124.401346] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR3 = 0x90
[ 8124.401353] msm_csiphy_irq CSIPHY0_IRQ_CLK_STATUS_ADDR0 = 0xc0
[ 8124.401360] msm_csiphy_irq CSIPHY0_IRQ_CLK_STATUS_ADDR1 = 0x0
[ 8124.405145] ispif_process_irq: PIX0 frame id: 538
[ 8124.409932] ispif_process_irq: PIX0 frame id: 539
[ 8124.414758] ispif_process_irq: PIX0 frame id: 540
[ 8124.419528] ispif_process_irq: PIX0 frame id: 541
[ 8124.424316] ispif_process_irq: PIX0 frame id: 542
[ 8126.822178] msm_ispif_read_irq_status: 1033 callbacks suppressed
[ 8126.822184] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8126.827361] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8126.832711] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8126.838051] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8126.843515] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8126.848983] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8126.854454] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8126.859917] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8126.865386] msm_ispif_read_irq_status: VFE1 pix0 overflow.
[ 8126.870942] msm_ispif_read_irq_status: VFE1 pix0 overflow.
高通文档介绍出现VFE overflow是因为VFE时钟设置小于输出sensor mipi的输出速率
static inline void msm_ispif_read_irq_status(struct ispif_irq_status *out,void *data)-->msm_ispif.c
if (out[VFE1].ispifIrqStatus0 & PIX_INTF_0_OVERFLOW_IRQ) {
pr_err_ratelimited("%s: VFE1 pix0 overflow.\n",
__func__);
fatal_err = true;
}
提高VFE clock可以防止overflow,修改VFE时钟可以修改sensor驱动的op_pixel_clk参数
后面是从代理那里更新了setting并配置对了vc之后camera才点亮的
=============================================================================
SDM632 双摄项目 LT9611 HDMI调试记录
1)IIC不通,发完IIC地址连ACK响应都没有,说明从机LT9611压根都没有工作,最大可能是哪个电源没供上,或者RST没拉高,但是示波器查看RST是有正常拉低200ms再拉高,硬件说3.3和1.8(都是硬件直接上电,不由软件直接控制),
实际上用示波器检查发现LT9611其他路电源都正常,但是19pin的VCCTP18本来应该是1.8V的实际测量只有1V左右,后来找CTO要了原理图和位号图一查发现是这一路电源上面的一个贴片磁珠FB803(220R/100MHZ 200圈,滤波用)没有贴,
后来从另外一个板子拆了一颗同型号的贴上去,IIC立马通信正常,药到病除。另外SDM632的模组扣到以前调通的SDM439广告机的板子上IIC是能通信上的,更可以验证SDM632 双摄项目的底板是有问题的。
2)开启LT9611的自检模式,发现彩条信号都没有出来,串口log和SDM439广告机log一样,但是接口板子的HDMI座子量不到信号,后经硬件老师傅检查发现HDMI座子的ESD没通,直接用线短接直连彩条信号就出来了
3)正常模式下没有输出图像,查看log发现LT9611没有检测到有效的MIPI信号,后经查相关代码发现dtsi的配置里面dsi-phy版本是v2,
qcom,mdss-dsi-panel-timings-phy-v2 = [//40个字节用来指定PHY version 2
24 1f 08 09 05 03 04 a0
24 1f 08 09 05 03 04 a0
24 1f 08 09 05 03 04 a0
24 1f 08 09 05 03 04 a0
24 1c 08 09 05 03 04 a0];
而不再是像SDM439一样使用qcom,mdss-dsi-panel-timings-phy-12nm,或者更早之前的qcom,mdss-dsi-panel-timings
- qcom,mdss-dsi-panel-timings: An array of length 12 that specifies the PHY timing settings for the panel.
- qcom,mdss-dsi-panel-timings-phy-v2: An array of length 40 char that specifies the PHY version 2 lane timing settings for the panel.
- qcom,mdss-dsi-panel-timings-phy-12nm: An array of length 8 char that specifies the 12nm DSI PHY lane timing settings for the panel.
从开机log可以看到对应的dsi phy的版本:SDM632的开机log如下:
[ 4.327680] mdss_dsi_ctrl_probe: Dsi Ctrl->0 initialized, DSI rev:0x10040002, PHY rev:0x2//14nm
分别对应:
enum phy_rev {
DSI_PHY_REV_UNKNOWN = 0x00,
DSI_PHY_REV_10 = 0x01, /* REV 1.0 - 20nm, 28nm */
DSI_PHY_REV_20 = 0x02, /* REV 2.0 - 14nm */
DSI_PHY_REV_12NM = 0x03, /* 12nm PHY */
DSI_PHY_REV_MAX,
};
这是从寄存器读出来的,可以看下SDM439对应的版本就是DSI_PHY_REV_12NM
[ 4.040164] mdss_dsi_ctrl_probe: Dsi Ctrl->0 initialized, DSI rev:0x10040002, PHY rev:0x3/* 12nm PHY */
所以得把LK和kernel里的timing全部改成PHY version 2的配置,注意LK要先配才有可能出图:
static const uint32_t lt9611_14nm_video_timings[] = {//配置成14nm的
0x24, 0x1f, 0x08, 0x09, 0x05, 0x03, 0x04, 0xa0,
0x24, 0x1f, 0x08, 0x09, 0x05, 0x03, 0x04, 0xa0,
0x24, 0x1f, 0x08, 0x09, 0x05, 0x03, 0x04, 0xa0,
0x24, 0x1f, 0x08, 0x09, 0x05, 0x03, 0x04, 0xa0,
0x24, 0x1c, 0x08, 0x09, 0x05, 0x03, 0x04, 0xa0,
};
因为当你LT9611在做初始化配置,检测MIPI信号的时候,检测之前就得把MIPI信号准备好,也就是LK的MIPI信号必须配置正确,这样配置后检测才会正常打印log:
[ 5.933271] LT9611_Video_Check: h_act_a:1920, h_act_b:0, v_act:1080, v_tal:1125//否则这些参数可能会是0
修改DSI-PHY配置版本之后重新全编译就能正常显示开机logo(LT9611在做初始化的时候,mipi信号还停留在开机logo阶段,android还没跑起来,还没跑到动画,所以虽然初始化在kernel但是能看到logo,
那为了更早能看到logo显示还得移植LT9611驱动到LK)和动画以及进入到launcher
4)进入到launcher之后图像会关闭丢失,检查rst没有掉电,看到串口log会进入休眠,
打印出:Suspending console(s) (use no_console_suspend to debug)
休眠再唤醒屏幕点不亮,目前的做法是暂时把系统休眠时间设置为一个小时,只要系统不休眠,显示就不会中断,问题后续再查
====================================================================================================================================================================================
会议平板项目,休眠的时候TP的IIC会报错,无法通信上,AVDD没有掉电,IIC的SCL和SDA休眠之后一直处于低电平,怀疑是上拉电阻没有配,因为上拉是在SDM439核心板内部,后和硬件确认是接到PMIC的L6,重新配置即可,就不会报错且能够休眠唤醒。因为以前A58F的上拉是上拉到L10,是在底板上拉,所以需要求改vcc_i2c-supply = <&pm8953_l5>;//used for voltage level conversion
================
带按键话机项目 TP固件版本查看:./sys/devices/platform/soc/78b7000.i2c/i2c-3/3-0038/fts_fw_version
TP固件版本查看: sys/bus/i2c/devices/i2c-3/3-002e/tlsc_version
手势开关:sys/devices/platform/soc/78b7000.i2c/i2c-3/3-002e/tlsc_gs_ctl
======================================================================
SDM632双摄项目同事能够打开两个摄像头预览,但是拿不到回调帧的问题:
在sdm439模块上,用SDM632双摄项目的底板,接两个imx317的镜头,看log,probe两个都成功了,但是一直不停地报这个qos.c的错误,之前是驱动延时或上层加太多log会出现这个问题:
[ 371.008797] msm_csid_init: CSID_VERSION = 0x30040002
[ 371.013662] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0x800
[ 371.019154] msm_csid_init: CSID_VERSION = 0x30040002
[ 371.023376] msm_csid_irq CSID2_IRQ_STATUS_ADDR = 0x800
[ 371.119121] MSM-CPP cpp_init_hardware:1133 CPP HW Version: 0x40030002
[ 371.119163] MSM-CPP cpp_init_hardware:1151 stream_cnt:0
[ 371.130301] MSM-CPP cpp_release_hardware:1214 cpp hw release done
[ 371.191871] type=1400 audit(1579404587.479:105): avc: denied { read } for pid=7815 comm="android.hardwar" name="name" dev="sysfs" ino=33804 scontext=u:r:hal_camera_default:s0 tcontext=u:object_r:sysfs:0
[ 371.191971] type=1400 audit(1579404592.489:106): avc: denied { read } for pid=7864 comm="android.hardwar" name="name" dev="sysfs" ino=33804 scontext=u:r:hal_camera_default:s0 tcontext=u:object_r:sysfs:0
[ 371.205202] msm_pm_qos_update_request: update request 100
[ 371.205207] msm_pm_qos_add_request: add request
[ 371.205207] pm_qos_update_request() called for unknown object
[ 371.205217] ------------[ cut here ]------------
[ 371.205232] WARNING: CPU: 1 PID: 7864 at /home/wanghl/code/new_disk/clean/sdm439_android/kernel/msm-4.9/kernel/power/qos.c:671 pm_qos_update_request.part.5+0x1c/0x28
[ 371.205270] Modules linked in: machine_ext_dlkm(O) machine_dlkm(O) wcd9335_dlkm(O) cpe_lsm_dlkm(O) wcd_cpe_dlkm(O) analog_cdc_dlkm(O) digital_cdc_dlkm(O) stub_dlkm(O) mbhc_dlkm(O) wsa881x_analog_dlkm(O)
[ 371.205277] CPU: 1 PID: 7864 Comm: android.hardwar Tainted: G W O 4.9.112 #10
[ 371.205280] Hardware name: QRD, pubtron E18 (DT)
[ 371.205284] task: ffffffc07fbc6c80 task.stack: ffffffc05a708000
[ 371.205289] PC is at pm_qos_update_request.part.5+0x1c/0x28
[ 371.205294] LR is at pm_qos_update_request.part.5+0x1c/0x28
[ 371.205298] pc : [<ffffff9d459f1918>] lr : [<ffffff9d459f1918>] pstate: 60400145
[ 371.205300] sp : ffffffc05a70b9c0
[ 371.205309] x29: ffffffc05a70b9c0 x28: ffffffc05a700000
[ 371.205318] x27: ffffff9d481b2408 x26: ffffffc05cb7bcc0
[ 371.205326] x25: ffffffc09cd0d910 x24: ffffff9d4763f000
[ 371.205334] x23: ffffffc05a70ba88 x22: 0000000000002000
[ 371.205343] x21: 0000000000000000 x20: 0000000000000064
[ 371.205351] x19: ffffff9d481b22c8 x18: 0000000000000000
[ 371.205359] x17: 0000000000000000 x16: ffffff9d45ad1f4c
[ 371.205367] x15: 0000000000000000 x14: 00000000f0fd6dbf
[ 371.205376] x13: 00000000ffad0918 x12: ffffffc05a70b9c0
[ 371.205384] x11: ffffffc05a70b9c0 x10: ffffffc05a70b9c0
[ 371.205392] x9 : 0000000000000001 x8 : ffffff9d4719f090
[ 371.205401] x7 : ffffffc0b5609090 x6 : 0000000000000000
[ 371.205409] x5 : ffffffc05a70b810 x4 : ffffff9d459312b8
[ 371.205417] x3 : 0000000000000000 x2 : 0000000000040900
[ 371.205425] x1 : 0000000000040900 x0 : 0000000000000031
这次出现这个问题是因为移植驱动的时候没有移植效果文件chromatix_imx317导致的。配一个摄像头的时候OK了,但是配上另外一个摄像头就不行了,问题又出现了,原因不明。后来同事查明是系统编译的问题,可以支持两路回调帧的获取,
这个问题不再跟踪。后来单开第二个摄像头(CSI2上)也会,可能是第二个摄像头的问题!
=============================================================================================
adb disable-verity需要在lunch平台之后才会生效
================================================
settle_cnt不合适闪退log:
[ 134.886865] msm_vidc: info: Closed video instance: 0000000000000000
[ 134.938019] msm_vidc: info: Opening video instance: 0000000000000000, 0
[ 135.015119] binder: 555:679 transaction failed 29201/-1, size 32-0 line 3025
[ 135.109700] msm_pm_qos_update_request: update request 100[ 135.109731] msm_pm_qos_add_request: add request
msm_pm_qos_update_request: update request -1[ 135.118250] msm_pm_qos_add_request: add request
[ 135.129629] msm_csid_init: CSID_VERSION = 0x30050001
[ 135.130036] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0x800
[ 135.135695] MSM-CPP cpp_init_hardware:1133 CPP HW Version: 0x40030003
[ 135.138714] MSM-CPP cpp_init_hardware:1151 stream_cnt:0
[ 135.190057] msm_cci_init:1446: hw_version = 0x10020005
[ 135.376329] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0x800
[ 137.964312] msm_private_ioctl:Notifying subdevs about potential sof freeze//SOF
[ 137.964492] msm_csiphy_irq: 12 callbacks suppressed
[ 137.964498] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR0 = 0x10
[ 137.974903] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR1 = 0x1
[ 137.980457] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR2 = 0x0
[ 137.985923] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR3 = 0x22
[ 137.991308] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR4 = 0x0
[ 137.996680] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR5 = 0x40
[ 138.002148] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR6 = 0x4
[ 138.007529] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR7 = 0x0
[ 138.012999] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR8 = 0x88
[ 138.018381] msm_csiphy_irq CSIPHY0_IRQ_STATUS_ADDR9 = 0x40
[ 138.024248] MSM-SENSOR-INIT msm_sensor_init_subdev_ioctl:122 default
[ 138.024248]
[ 140.635487] MSM-CPP cpp_release_hardware:1214 cpp hw release done
[ 140.810680] msm_vidc: info: Closed video instance: 0000000000000000
[ 140.862980] msm_vidc: info: Opening video instance: 0000000000000000, 0
[ 140.981543] binder: 555:2408 transaction failed 29201/-1, size 32-0 line 3025
[ 140.982200] type=1400 audit(1579492038.199:53): avc: denied { read } for pid=3459 comm=43616D6572612048616E646C657220 name="u:object_r:vendor_default_prop:s0" dev="tmpfs" ino=15180 scontext=u:r:priv_ap0
[ 140.990552] type=1400 audit(1579492044.159:54): avc: denied { call } for pid=555 comm="Binder:555_5" scontext=u:r:surfaceflinger:s0 tcontext=u:r:mediacodec:s0 tclass=binder permissive=0 b/77924251
sof freeze(SOF:start of frame)表示ISP这边没有收到sensor这边输出的图像帧数据,这时必须检查 CSID/CSIPHY/CAMIF是否出错。有专门建立了thread来负责SOF的检测,start_sof_check_thread() -> mct_bus_sof_thread_run(),log当中会有下面的错误发出:
kernel/msm-4.9/drivers/media/platform/msm/camera_v2/msm.c
static long msm_private_ioctl(struct file *file, void *fh,
bool valid_prio, unsigned int cmd, void *arg)
...
case MSM_CAM_V4L2_IOCTL_NOTIFY_DEBUG: {
if (event_data->status) {
pr_err("%s:Notifying subdevs about potential sof freeze\n",
__func__);
} else {
pr_err("%s:Notifying subdevs about sof recover\n",
__func__);
}
首先看能否dump出现数据:vendor/qcom/proprietary/mm-camera/mm-camera2/media-controller/modules/iface2/iface_util.c
void iface_util_dump_frame(int ion_fd, hw_stream_info_t *stream_info,
cam_stream_type_t stream_type, iface_frame_buffer_t *image_buf,
uint32_t frame_idx, uint32_t session_id)
...
/* Usage: To enable dumps
Preview: adb shell setprop persist.vendor.camera.isp.dump 2
Analysis: adb shell setprop persist.vendor.camera.isp.dump 2048
Snapshot: adb shell setprop persist.vendor.camera.isp.dump 8
Video: adb shell setprop persist.vendor.camera.isp.dump 16
To dump 10 frames again, just reset prop value to 0 and then set again */
...
#ifdef _ANDROID_
property_get("persist.vendor.camera.isp.dump_cnt", value, "10");//默认dump 10 frames
frm_num = atoi(value);
#endif
adb root
adb shell setprop persist.vendor.camera.isp.dump 8
adb shell chmod 777 /data
yuv镜像文件*.yuv在/data/misc/camera
Dear Qualcomm:
Now we are porting the imx317 camera sensor driver in our SDM632 platform,the register setting of full-size (4K,10bit,4lane) work fine but the setting of 1080P(1920*1080 mode3,10bit,4lane) was abort while open SnapDragonCamera APP,As a contrast,the same setting of full-size and 1080P work both fine in the platform of SDM439,I tried to modify the .data_rate、.vt_pixel_clk、.op_pixel_clk、.settle_cnt ,but it didn't work!I was wondering what's the difference between this two platforms and how to match reasonable parameter to make it work in SDM632?Please check the driver code、 kernel log and logcat log if you need!
full-size kernel log:
console:/ $ [ 164.209632] msm_vidc: info: Closed video instance: 0000000000000000
[ 164.251169] msm_vidc: info: Opening video instance: 0000000000000000, 0
[ 164.332466] binder: 557:666 transaction failed 29201/-1, size 32-0 line 3025
[ 164.511711] type=1400 audit(1579492067.659:55): avc: denied { call } for pid=557 comm="Binder:557_1" scontext=u:r:surfaceflinger:s0 tcontext=u:r:mediacodec:s0 tclass=binder permissive=0 b/77924251
[ 164.511841] type=1400 audit(1579492067.829:56): avc: denied { read } for pid=3778 comm=43616D6572612048616E646C657220 name="u:object_r:vendor_default_prop:s0" dev="tmpfs" ino=16487 scontext=u:r:priv_ap0
[ 164.608244] binder: 878:2508 transaction failed 29189/-22, size 100-0 line 3017
[ 164.617792] msm_pm_qos_update_request: update request 100[ 164.617838] msm_pm_qos_add_request: add request
msm_pm_qos_update_request: update request -1[ 164.626349] msm_pm_qos_add_request: add request
[ 164.634493] msm_csid_init: CSID_VERSION = 0x30050001
[ 164.635206] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0x800
[ 164.648249] MSM-CPP cpp_init_hardware:1133 CPP HW Version: 0x40030003
[ 164.648301] MSM-CPP cpp_init_hardware:1151 stream_cnt:0
[ 164.690944] msm_cci_init:1446: hw_version = 0x10020005
[ 164.892833] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0x800
[ 165.170691] msm_vfe40_process_error_status: vfe 1 camif error status: 0x10fc0000
[ 165.170850] 0x0000: 00000001 00000040 00000000 087e0f18
[ 165.177243] 0x0010: 00000f17 0000087d 00000000 00000000
[ 165.182579] 0x0020: ffffffff 0014084b 10fc0000 a0d4000
1080P kernel log:
[ 248.314479] binder: 1369:2371 transaction failed 29189/-22, size 472-0 line 3017
[ 248.516506] ------------[ cut here ]------------
[ 248.516556] WARNING: CPU: 4 PID: 3360 at /home/wanghl/code/new_disk/m610_sdm632_la101c29_android/kernel/msm-4.9/drivers/media/v4l2-core/videobuf2-v4l2.c:149 vb2_warn_zero_bytesused.isra.13.part.14+0x240
[ 248.520278] CPU: 4 PID: 3360 Comm: VideoEncMsgThre Tainted: G S O 4.9.112 #4
[ 248.527715] No change in context(0==0), skip
[ 248.546233] Hardware name: Qualcomm Technologies, Inc. SDM632 PMI632 QRD-SDM632双摄项目 (DT)
[ 248.550754] task: ffffffeec382dd00 task.stack: ffffffeebe408000
[ 248.558144] PC is at vb2_warn_zero_bytesused.isra.13.part.14+0x24/0x70
[ 248.563953] LR is at __fill_vb2_buffer+0x458/0x5a0
[ 248.570539] pc : [<ffffff9ba4c26e3c>] lr : [<ffffff9ba4c272e0>] pstate: 40400145
[ 248.575320] sp : ffffffeebe40b730
[ 248.583106]
[ 248.583106] PC: 0xffffff9ba4c26dfc:
[ 248.586285]
[ 248.586285] LR: 0xffffff9ba4c272a0:
[ 248.591324]
[ 248.591324] SP: 0xffffffeebe40b6f0:
[ 248.596267] ---[ end trace 3efd6abadd941126 ]---
[ 248.601008] Call trace:
[ 248.605603] Exception stack(0xffffffeebe40b530 to 0xffffffeebe40b660)
[ 248.607786] b520: ffffffeeba3a1c00 0000007fffffffff
[ 248.614386] b540: 0000000012b72000 ffffff9ba4c26e3c 0000000040400145 000000000000003d
[ 248.622197] b560: 0000000000000000 ffffff9ba53154fc ffffffeec382dd00 0000000000000b30
[ 248.630009] b580: ffffffeebe40b5e0 ffffff9ba42f455c 0000000000000001 00000000000001c0
[ 248.637825] b5a0: ffffffeebe40b5e0 ffffff9ba42f4550 0000000000000001 ffffff9ba53154fc
[ 248.645634] b5c0: ffffff9ba435672c 0000000000000b30 ffffff9ba5ccf010 0000000000040900
[ 248.653447] b5e0: ffffffeebe40b640 ffffff9ba53154fc 00000000000001c0 0000000000040900
[ 248.661259] b600: ffffff9ba6cfe000 0000000000000001 0000000000000000 ffffffeec3ac9480
[ 248.669072] b620: 00000000000a3000 ffffffeeba3a1c60 0000000000000000 ffffffeebc2ba1b8
[ 248.676886] b640: ffffffeebe40b858 0000000000000004 0000000000000000 0000000000000000
[ 248.684704] [<ffffff9ba4c26e3c>] vb2_warn_zero_bytesused.isra.13.part.14+0x24/0x70
[ 248.692508] [<ffffff9ba4c272e0>] __fill_vb2_buffer+0x458/0x5a0
[ 248.699971] [<ffffff9ba4c21674>] __qbuf_userptr+0x9c/0x504
[ 248.705785] [<ffffff9ba4c24068>] __buf_prepare+0xc4/0x178
[ 248.711254] [<ffffff9ba4c24360>] vb2_core_qbuf+0x160/0x298
[ 248.716721] [<ffffff9ba4c26cf0>] vb2_qbuf+0xa4/0xf0
[ 248.722103] [<ffffff9ba4c6d58c>] msm_venc_qbuf+0x50/0x148
[ 248.726879] [<ffffff9ba4c5692c>] msm_vidc_qbuf+0x204/0x460
[ 248.732434] [<ffffff9ba4c4171c>] msm_v4l2_qbuf+0x30/0x3c
[ 248.737817] [<ffffff9ba4c02260>] v4l_qbuf+0x60/0x70
[ 248.743284] [<ffffff9ba4c017d8>] __video_do_ioctl+0x1e8/0x2b0
[ 248.747886] [<ffffff9ba4c01290>] video_usercopy+0x310/0x62c
[ 248.753787] [<ffffff9ba4c015e4>] video_ioctl2+0x38/0x44
[ 248.759169] [<ffffff9ba4bfac00>] v4l2_ioctl+0xd0/0x130
[ 248.764379] [<ffffff9ba4c18e6c>] v4l2_compat_ioctl32+0x26c/0x3a88
[ 248.769604] [<ffffff9ba44d9464>] compat_SyS_ioctl+0xfc/0x1904
[ 248.775753] [<ffffff9ba42843dc>] __sys_trace_return+0x0/0x4
[ 248.782072] use of bytesused == 0 is deprecated and will be removed in the future,
[ 248.790965] use VIDIOC_DECODER_CMD(V4L2_DEC_CMD_STOP) instead.
[ 248.822679] msm_vidc: info: Closed video instance: 0000000000000000
[ 248.876205] msm_vidc: info: Opening video instance: 0000000000000000, 0
[ 248.936694] type=1400 audit(1579492150.499:48): avc: denied { read } for pid=3081 comm="Thread-10" name="version" dev="proc" ino=4026532053 scontext=u:r:untrusted_app_25:s0:c512,c768 tcontext=u:object_d
[ 248.937681] type=1400 audit(1579492152.629:49): avc: denied { read } for pid=3374 comm=43616D6572612048616E646C657220 name="u:object_r:vendor_default_prop:s0" dev="tmpfs" ino=17695 scontext=u:r:priv_ap0
[ 248.984667] binder: 556:1619 transaction failed 29201/-1, size 32-0 line 3025
[ 248.984826] type=1400 audit(1579492152.629:49): avc: denied { read } for pid=3374 comm=43616D6572612048616E646C657220 name="u:object_r:vendor_default_prop:s0" dev="tmpfs" ino=17695 scontext=u:r:priv_ap0
[ 248.994808] type=1400 audit(1579492152.679:50): avc: denied { call } for pid=556 comm="Binder:556_4" scontext=u:r:surfaceflinger:s0 tcontext=u:r:mediacodec:s0 tclass=binder permissive=0 b/77924251
[ 249.078994] msm_pm_qos_update_request: update request 100[ 249.079024] msm_pm_qos_add_request: add request
msm_pm_qos_update_request: update request -1[ 249.087632] msm_pm_qos_add_request: add request
[ 249.095742] msm_csid_init: CSID_VERSION = 0x30050001
[ 249.096451] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0x800
[ 249.103112] MSM-CPP cpp_init_hardware:1133 CPP HW Version: 0x40030003
[ 249.106057] MSM-CPP cpp_init_hardware:1151 stream_cnt:0
[ 249.155643] msm_cci_init:1446: hw_version = 0x10020005
[ 249.470318] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0x800
[ 249.588809] ispif_process_irq: PIX0 frame id: 0
[ 249.622176] ispif_process_irq: PIX0 frame id: 1
[ 249.688902] ispif_process_irq: PIX0 frame id: 2
[ 249.722282] ispif_process_irq: PIX0 frame id: 3
[ 249.755647] ispif_process_irq: PIX0 frame id: 4
[ 250.627032] msm_vfe40_process_error_status: vfe 1 camif error status: 0xfb0
[ 250.627099] 0x0000: 00000001 00000040 00000000 0446078c
[ 250.633055] 0x0010: 0000078b 00000445 00000000 00000000
[ 250.638388] 0x0020: ffffffff 00140413 00000fb0 00000000
[ 253.598381] CAM-SMMU cam_smmu_check_vaddr_in_range:298 Cannot find vaddr:0000000000000000 in SMMU.
[ 253.598381] vfe uses invalid virtual address
[ 253.598492] msm_vfe_iommu_fault_handler: fault address is 1003b000
[ 253.611272] msm_isp_process_iommu_page_fault:1858] VFE1 Handle Page fault! vfe_dev 0000000000000000
[ 253.616864] msm_isp_halt_send_error: vfe1 fatal error!
[ 253.626138] msm_isp_buf_mgr_debug: ==== SMMU page fault addr 1003b000 ====
[ 253.637714] msm_isp_buf_mgr_debug: nearby stream id 80010008, frame_id 0
[ 253.638332] msm_isp_buf_mgr_debug: nearby buf index 4, plane 0, state 3
[ 253.645645] msm_isp_buf_mgr_debug: buf address 0000000000000000 -- 0000000000000000
[ 253.651787] msm_isp_print_ping_pong_address: stream 0 ping bit 0 uses buffer 0000000000000000-0000000000000000, num_isp 1
[ 253.658991] msm_isp_print_ping_pong_address: stream 0 ping bit 1 uses buffer 0000000000000000-0000000000000000, num_isp 1
[ 253.678860] msm_isp_print_ping_pong_address: stream 1 ping bit 0 uses buffer 0000000000000000-0000000000000000, num_isp 1
[ 253.684743] msm_isp_print_ping_pong_address: stream 1 ping bit 1 uses buffer 0000000000000000-0000000000000000, num_isp 1
[ 253.700832] 0x0000: 00000000 00000000 00000000 00000001
[ 253.703011] 0x0010: 11a00000 11600000 00000004 0240029c
[ 253.708312] 0x0020: 00770437 00f021bb 00000001 ffffffff
[ 253.717640] 0x0030: 00000001 11bfe000 117fe000 00000004
[ 253.718890] 0x0040: 04dd017d 0077021b 00f010db 00000001
[ 253.724986] 0x0050: ffffffff 00000001 10a00000 10e00000
[ 253.735509] 0x0060: 00000000 065b029c 00770437 00f021bb
[ 253.736307] 0x0070: 00000001 ffffffff 00000001 10bfe000
[ 253.740753] 0x0080: 10ffe000 00000000 08f8017d 0077021b
[ 253.746161] 0x0090: 00f010db 00000001 ffffffff 00000000
[ 253.756503] 0x00A0: 00000000 00000000 00000000 00000000
[ 253.757242] 0x00B0: 00000000 00000000 00000000 00000000
[ 253.764878] 0x00C0: 00000000 00000000 00000000 00000000
[ 253.770132] 0x00D0: 00000000 00000000 00000000 00000000
[ 253.773270] 0x00E0: 00000000 00000000 00000000 00000000
[ 253.777901] 0x00F0: 00000000 00000000 00000000 00000000
[ 253.787891] 0x0100: 00000000 00000000 00000000 00000000
[ 253.788464] 0x0110: 00000000 8bc0003f 00000000 00000000
[ 253.795505] 0x0120: 10040000 10020000 00000000 8b40007f
[ 253.798813] 0x0130: 00000001 ffffffff 1001c000 10018000
[ 253.808809] 0x0140: 00000000 8ac0007f 00000001 ffffffff
[ 253.809401] 0x0150: 00000000 00000000 00000000 8ab0000f
[ 253.819655] 0x0160: 00000000 00000000 10052000 10032000
[ 253.820128] 0x0170: 00000000 8aa80007 00000001 ffffffff
[ 253.828748] 0x0180: 00000000 00000000 00000000 8a98000f
[ 253.830702] 0x0190: 00000000 00000000 00000000 00000000
[ 253.836860] 0x01A0: 00000000 8a88000f 00000000 00000000
[ 253.842139] 0x01B0: 1001f000 1001b000 00000000 8a78000f
[ 253.846616] 0x01C0: 00000001 ffffffff 00000000 00000000
[ 253.852820] 0x01D0: 00000000 00000000 00000000 00000000
[ 253.857124] 0x01E0: 00000000 00000000 00000000 00000000
[ 253.865236] 0x01F0: 00000000 00000000 00000000 00000000
[ 253.874719] CAM-SMMU cam_smmu_check_vaddr_in_range:298 Cannot find vaddr:0000000000000000 in SMMU.
[ 253.874719] vfe uses invalid virtual address
[ 253.876765] msm_vfe_iommu_fault_handler: fault address is 1003b5c0
[ 253.889382] msm_isp_process_iommu_page_fault: overflow detected during IOMMU
[ 255.364509] msm_vfe40_axi_halt:VFE1 halt timeout rc=0
[ 255.490160] MSM-CPP cpp_release_hardware:1214 cpp hw release done
[ 255.550607] msm_vidc: info: Closed video instance: 0000000000000000
[ 255.606182] msm_vidc: info: Opening video instance: 0000000000000000, 0
[ 255.715465] binder: 556:655 transaction failed 29201/-1, size 32-0 line 3025
[ 255.716131] type=1400 audit(1579492152.679:50): avc: denied { call } for pid=556 comm="Binder:556_4" scontext=u:r:surfaceflinger:s0 tcontext=u:r:mediacodec:s0 tclass=binder permissive=0 b/77924251
[ 255.721737] type=1400 audit(1579492159.409:51): avc: denied { call } for pid=556 comm="Binder:556_2" scontext=u:r:surfaceflinger:s0 tcontext=u:r:mediacodec:s0 tclass=binder permissive=0 b/77924251
[ 256.020045] kgsl kgsl-3d0: |counter_delta| Abnormal value:0x235a319 (0x236ca83) from perf counter : 0x3b0
[ 303.564311] healthd: battery l=50 v=4171 t=24.8 h=2 st=3 c=-61 fc=3045000 cc=0 chg=
网上搜索有碰到一个也现实SMMU内存错误的例子,应该是ISO设置超过800会出现类似的错误:
https://forums.oneplus.com/threads/manual-mode-only-up-to-800-iso-cameraapi-2.585929/
没找到解决方法,等待高通2.10之后的case回复。
===================================================================
用SDM439驱动SDM632 双摄项目板子做EPTZ的实验,从8M切到裁切的1080P的setting正常,但是从1080P的setting切到8M就出现了abort的现象,闪退了,log如下:
console:/ $ [ 5472.295303] wcd_event_notify: event WCD_EVENT_PRE_HPHR_PA_ON (11)
[ 5472.295377] wcd_enable_curr_micbias: enter, cs_mb_en: 2
[ 5472.300650] wcd_enable_curr_micbias: exit
[ 5472.305604] wcd_event_notify: event WCD_EVENT_PRE_HPHL_PA_ON (9)
[ 5472.310101] wcd_enable_curr_micbias: enter, cs_mb_en: 2
[ 5472.316160] wcd_enable_curr_micbias: exit
[ 5472.337338] afe_get_cal_topology_id: cal_type 8 not initialized for this port 4096
[ 5472.337390] afe_get_cal_topology_id: cal_type 9 not initialized for this port 4096
[ 5472.344765] send_afe_cal_type cal_block not found!!
[ 5472.391531] IRQ6 no longer affine to CPU3
[ 5476.625318] msm_isp_buf_enqueue: Invalid bufq, handle 0x1000204, stream id 10002 num_plane 2
[ 5476.744332] msm_csid_irq CSID0_IRQ_STATUS_ADDR = 0xaff
[ 5477.094751] msm_vfe40_process_error_status: vfe 0 camif error status: 0x780
[ 5477.094793] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5477.100668] 0x0010: 000c07b7 0018086b 00000000 00000000
[ 5477.106285] 0x0020: ffffffff 00140839 00000780 00000000
[ 5477.111583] msm_vfe40_process_error_status: vfe 1 camif error status: 0x20
[ 5477.116780] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5477.123564] 0x0010: 000007ab 0018086b 00000000 00000000
[ 5477.128900] 0x0020: ffffffff 00140839 00000020 affc0000
[ 5477.267046] msm_isp_composite_irq: irq 2 out of sync for dual vfe on vfe 0
[ 5477.267114] msm_isp_halt_send_error: ping pong mismatch on vfe0 recovery count 0
[ 5477.273109] msm_isp_process_overflow_irq: vfe 0 overflowmask 0,bus_error 0
[ 5477.323767] msm_isp_axi_halt: VFE0 Bus overflow detected: start recovery!
[ 5477.824104] msm_vfe40_axi_halt:VFE0 halt timeout rc=0
[ 5477.842689] msm_isp_axi_halt: VFE1 Bus overflow detected: start recovery!
[ 5478.342300] msm_vfe40_axi_halt:VFE1 halt timeout rc=0
[ 5478.359248] msm_ispif_restart_frame_boundary: ISPIF reset hw done, Restarting[ 5478.361475] msm_vfe40_process_error_status: vfe 1 camif error status: 0x20
[ 5478.366290] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5478.372077] 0x0010: 000007ab 0018086b 00000000 00000000
[ 5478.377455] 0x0020: ffffffff 00140000 00000020 00000000
[ 5478.382826] msm_vfe40_process_error_status: vfe 0 camif error status: 0x780
[ 5478.388049] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5478.394866] 0x0010: 000c07b7 0018086b 00000000 00000000
[ 5478.400312] 0x0020: ffffffff 00140000 00000780 a5100000
[ 5478.405702] msm_vfe40_process_violation_status: crop enc y violation//裁切非法?
[ 5478.463416] msm_isp_composite_irq: irq 2 out of sync for dual vfe on vfe 0
[ 5478.463467] msm_isp_halt_send_error: ping pong mismatch on vfe0 recovery count 1
[ 5478.469374] msm_isp_process_overflow_irq: vfe 0 overflowmask 0,bus_error 0
[ 5478.509455] msm_isp_axi_halt: VFE0 Bus overflow detected: start recovery!//axi总线挂起,VFE0 溢出
[ 5479.001558] msm_vfe40_axi_halt:VFE0 halt timeout rc=0
[ 5479.004062] msm_isp_axi_halt: VFE1 Bus overflow detected: start recovery!
[ 5479.501921] msm_vfe40_axi_halt:VFE1 halt timeout rc=0
[ 5479.508115] msm_ispif_restart_frame_boundary: ISPIF reset hw done, Restarting[ 5479.522947] msm_vfe40_process_error_status: vfe 1 camif error status: 0x20
[ 5479.523106] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5479.528814] 0x0010: 000007ab 0018086b 00000000 00000000
[ 5479.534197] 0x0020: ffffffff 00140000 00000020 00000000
[ 5479.539603] msm_vfe40_process_error_status: vfe 0 camif error status: 0x780
[ 5479.544792] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5479.551508] 0x0010: 000c07b7 0018086b 00000000 00000000
[ 5479.557061] 0x0020: ffffffff 00140000 00000780 a5780000
[ 5479.624790] msm_isp_composite_irq: irq 2 out of sync for dual vfe on vfe 0
[ 5479.624835] msm_isp_halt_send_error: ping pong mismatch on vfe0 recovery count 2
[ 5479.630608] msm_isp_process_overflow_irq: vfe 0 overflowmask 0,bus_error 0
[ 5479.673622] msm_isp_axi_halt: VFE0 Bus overflow detected: start recovery!
[ 5479.782814] wcd_event_notify: event WCD_EVENT_PRE_HPHR_PA_OFF (14)
[ 5479.815406] wcd_event_notify: event WCD_EVENT_PRE_HPHL_PA_OFF (13)
[ 5479.816381] wcd_event_notify: event WCD_EVENT_POST_HPHR_PA_OFF (12)
[ 5479.822605] wcd_enable_curr_micbias: enter, cs_mb_en: 0
[ 5479.827559] wcd_enable_curr_micbias: exit
[ 5479.852936] wcd_event_notify: event WCD_EVENT_POST_HPHL_PA_OFF (10)
[ 5479.853452] wcd_enable_curr_micbias: enter, cs_mb_en: 0
[ 5479.858462] wcd_enable_curr_micbias: exit
[ 5480.172115] msm_vfe40_axi_halt:VFE0 halt timeout rc=0
[ 5480.173124] msm_isp_axi_halt: VFE1 Bus overflow detected: start recovery!
[ 5480.671939] msm_vfe40_axi_halt:VFE1 halt timeout rc=0
[ 5480.676126] msm_ispif_restart_frame_boundary: ISPIF reset hw done, Restarting[ 5480.683816] msm_vfe40_process_error_status: vfe 1 camif error status: 0x20
[ 5480.684031] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5480.689787] 0x0010: 000007ab 0018086b 00000000 00000000
[ 5480.695316] 0x0020: ffffffff 00140000 00000020 00000000
[ 5480.700531] msm_vfe40_process_error_status: vfe 0 camif error status: 0x780
[ 5480.705911] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5480.712536] 0x0010: 000c07b7 0018086b 00000000 00000000
[ 5480.718093] 0x0020: ffffffff 00140000 00000780 a5740000
[ 5480.723525] msm_vfe40_process_violation_status: crop enc y violation
[ 5480.787505] msm_isp_composite_irq: irq 2 out of sync for dual vfe on vfe 0
[ 5480.787654] msm_isp_halt_send_error: ping pong mismatch on vfe0 recovery count 3
[ 5480.793368] msm_isp_process_overflow_irq: vfe 0 overflowmask 0,bus_error 0
[ 5480.826190] msm_isp_axi_halt: VFE0 Bus overflow detected: start recovery!
[ 5481.323065] msm_vfe40_axi_halt:VFE0 halt timeout rc=0
[ 5481.325149] msm_isp_axi_halt: VFE1 Bus overflow detected: start recovery!
[ 5481.824565] msm_vfe40_axi_halt:VFE1 halt timeout rc=0
[ 5481.830980] msm_ispif_restart_frame_boundary: ISPIF reset hw done, Restarting[ 5481.846749] msm_vfe40_process_error_status: vfe 1 camif error status: 0x20
[ 5481.846867] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5481.852531] 0x0010: 000007ab 0018086b 00000000 00000000
[ 5481.858001] 0x0020: ffffffff 00140000 00000020 00000000
[ 5481.863385] msm_vfe40_process_error_status: vfe 0 camif error status: 0x780
[ 5481.868594] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5481.875276] 0x0010: 000c07b7 0018086b 00000000 00000000
[ 5481.880860] 0x0020: ffffffff 00140000 00000780 a52c0000
[ 5481.948657] msm_isp_composite_irq: irq 2 out of sync for dual vfe on vfe 0
[ 5481.948784] msm_isp_halt_send_error: ping pong mismatch on vfe0 recovery count 4
[ 5481.954497] msm_isp_process_overflow_irq: vfe 0 overflowmask 0,bus_error 0
[ 5481.988530] msm_isp_axi_halt: VFE0 Bus overflow detected: start recovery!
[ 5482.481391] msm_vfe40_axi_halt:VFE0 halt timeout rc=0
[ 5482.482105] msm_isp_axi_halt: VFE1 Bus overflow detected: start recovery!
[ 5482.981704] msm_vfe40_axi_halt:VFE1 halt timeout rc=0
[ 5482.990346] msm_ispif_restart_frame_boundary: ISPIF reset hw done, Restarting[ 5483.007882] msm_vfe40_process_error_status: vfe 1 camif error status: 0x20
[ 5483.008016] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5483.013664] 0x0010: 000007ab 0018086b 00000000 00000000
[ 5483.019132] 0x0020: ffffffff 00140000 00000020 00000000
[ 5483.024518] msm_vfe40_process_error_status: vfe 0 camif error status: 0x780
[ 5483.029728] 0x0000: 00000001 00000040 00000000 087e07b8
[ 5483.036408] 0x0010: 000c07b7 0018086b 00000000 00000000
[ 5483.041994] 0x0020: ffffffff 00140000 00000780 a5940000
[ 5483.109821] msm_isp_composite_irq: irq 2 out of sync for dual vfe on vfe 0
[ 5483.109949] msm_isp_halt_send_error: vfe0 fatal error!
[ 5483.274305] MSM-CPP cpp_release_hardware:1214 cpp hw release done
[ 5483.559795] msm_vidc: info: Closed video instance: 0000000000000000
[ 5483.618035] msm_vidc: info: Opening video instance: 0000000000000000, 0
[ 5483.721518] binder: 585:647 transaction failed 29201/-1, size 32-0 line 3025
[ 5483.721811] type=1400 audit(1580632126.839:47): avc: denied { read } for pid=3339 comm=43616D6572612048616E646C657220 name="u:object_r:vendor_default_prop:s0" dev="tmpfs" ino=12626 scontext=u:r:priv_ap0
[ 5483.730191] type=1400 audit(1580633773.659:48): avc: denied { call } for pid=585 comm="Binder:585_2" scontext=u:r:surfaceflinger:s0 tcontext=u:r:mediacodec:s0 tclass=binder permissive=0 b/77924251
所谓ping-pong buffer,也就是定义两个buffer,当有数据进来的时候,负责写入buffer的进程就寻找第一个没有被占用而且可写的buffer,进行写入,写好之后,将占用flag释放,同时设置一个flag提示此buffer已经可读,
然后再接下去找另外一个可写的buffer,写入新的数据。而读入的进程也是一直对buffer状态进行检测,一旦发现没有被占用,而且已经可以被读,就把这个buffer的数据取出来,然后标志为可写。pingpong是一种数据缓存的手段,
通过pingpong操作可以提高数据传输的效率。在两个模块间交换数据时,上一级处理的结果不能马上被下一级所处理完成,这样上一级必须等待下一级处理完成才可以送新的数据,这样就会对性能产生很大的损失。
引入pingpong后我们可以不去等待下一级处理结束,而是将结果保存在pong路的缓存中,pong路的数据准备好的时刻,ping路的数据也处理完毕(下一级),然后无需等待直接处理pong路数据,上一级也无需等待,转而将结果存储在ping路。
这样便提高了处理效率。
AXI(Advanced eXtensible Interface高级可扩展接口)是一种总线协议,该协议是ARM公司提出的AMBA(Advanced Microcontroller Bus Architecture)3.0协议中最重要的部分,是一种面向高性能、高带宽、低延迟的片内总线。
它的地址/控制和数据相位是分离的,支持不对齐的数据传输,同时在突发传输中,只需要首地址,同时分离的读写数据通道、并支持Outstanding传输访问和乱序访问,并更加容易进行时序收敛。AXI 是AMBA 中一个新的高性能协议。
AXI 技术丰富了现有的AMBA 标准内容,满足超高性能和复杂的片上系统(SoC)设计的需求。
===================================================
HAL层,EPTZ调查
ISP也称为Camera2架构中的VFE,负责处理传感器模块的原数据流并生成ISP输出。
02-10 04:24:35.527 519 3571 E QCamera : <HAL><ERROR> preview_stream_cb_routine: 872: [wanghl]preview_stream_cb_routine: frame_len =3112960, dim.w = 1920, dim.h = 1080, after allign w=1920, h=1080
02-10 04:24:35.537 519 3573 E QCamera : <HAL><ERROR> preview_raw_stream_cb_routine: 2105: [wanghl]preview_raw_stream_cb_routine: frame_len =11210752, dim.w = 3864, dim.h = 2174, after allign w=3864, h=2176
02-10 04:24:35.702 519 3571 E QCamera : <HAL><ERROR> preview_stream_cb_routine: 872: [wanghl]preview_stream_cb_routine: frame_len =3112960, dim.w = 1920, dim.h = 1080, after allign w=1920, h=1080
02-10 04:24:35.715 519 3573 E QCamera : <HAL><ERROR> preview_raw_stream_cb_routine: 2105: [wanghl]preview_raw_stream_cb_routine: frame_len =11210752, dim.w = 3864, dim.h = 2174, after allign w=3864, h=2176
preview_raw_stream_cb_routine默认不会跑,除非设置了persist.vendor.camera.raw_yuv为1且重启camera,但是会造成camera闪退,但是从打印的结果来看,预览raw图的寬高是3864*2174,也就是驱动里的full-size
RDI:Raw Dump Interface 原始数据转储接口
HAL层和User Space之间通过mm_camera_interface接口连接,User Space和Kernel Space之间通过V4L2 标准视频API接口连接。
HAL层和mm_camera_interface层使用到的关键结构/概念:
Channel(通道) - 一个用于把多个图像流绑定在一起的宽松概念(A loose concept to bundle multiple image streams together)
通道的类型:hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.h
typedef enum {
QCAMERA_CH_TYPE_ZSL,
QCAMERA_CH_TYPE_CAPTURE,
QCAMERA_CH_TYPE_PREVIEW,
QCAMERA_CH_TYPE_VIDEO,
QCAMERA_CH_TYPE_SNAPSHOT,
QCAMERA_CH_TYPE_RAW,
QCAMERA_CH_TYPE_METADATA,
QCAMERA_CH_TYPE_ANALYSIS,
QCAMERA_CH_TYPE_CALLBACK,
QCAMERA_CH_TYPE_REPROCESSING,
QCAMERA_CH_TYPE_MAX
} qcamera_ch_type_enum_t;
Stream(流) - 数据流的最小元素;每个流只能有一种格式;它是用于在摄像头硬件和应用之间交换捕获图像缓冲区的接口(The minimum streaming element;each stream can have only one format;it is the interface to exchange capture image buffers between camera hardware and the application)
流的类型:hardware/qcom/camera/QCamera2/stack/common/cam_types.h
typedef enum {
CAM_STREAM_TYPE_DEFAULT, /* default stream type */
CAM_STREAM_TYPE_PREVIEW, /* preview */
CAM_STREAM_TYPE_POSTVIEW, /* postview */
CAM_STREAM_TYPE_SNAPSHOT, /* snapshot */
CAM_STREAM_TYPE_VIDEO, /* video */
CAM_STREAM_TYPE_CALLBACK, /* app requested callback */
CAM_STREAM_TYPE_IMPL_DEFINED, /* opaque format: could be display, video enc, ZSL YUV */
CAM_STREAM_TYPE_METADATA, /* meta data */
CAM_STREAM_TYPE_RAW, /* raw dump from camif */
CAM_STREAM_TYPE_OFFLINE_PROC, /* offline process */
CAM_STREAM_TYPE_PARM, /* mct internal stream */
CAM_STREAM_TYPE_ANALYSIS, /* analysis stream */
CAM_STREAM_TYPE_DEPTH, /* Depth stream for depth sensor*/
CAM_STREAM_TYPE_MAX,
} cam_stream_type_t;
Stream bundling(流捆绑) - 在通道里面,多个流可以捆绑在一起,这样有两个用处,(1)硬件不会开始流输出知道所有绑定的流都被打开 (2)当第一个被绑定的流关闭了,硬件也就停止了(Within the channel,multiple streams can be bundled so that:(1)Hardware does not start the streaming until all bundled streams are turned on (2)When the first bundled stream is turned off, hardware is stopped)
上层操作流程:
1.Open camera 打开摄像头
2.Query capability 查询camera支持的能力
3.Add camera chanel 添加camera通道
4.Add streams into camera channel 往通道里添加流
5.Set stream format 设置流格式
6.Map IOMMU memory to back-end daemon and wait for the mapping acknowledgement 映射IOMMU(input/output memory management unit输入输出内存管理单元,将系统传给设备的内核空间地址转换为物理地址)内存到后台守护程序并等待映射回复
7.Bundle streams for ZSL-like streaming purpose 绑定流用于零时延拍照目的
8.Start streams 开始流输出
9.Poll and notify HAL image buffers 等待轮询并通知HAL层图像缓冲区
10.Stop streams 停止流输出
11.Unmap IOMMU memory from back end and wait for the unmap acknowledgement 从后台解除IOMMU内存映射并等待解除回复
12.Delete stream 删除流
13.Deltet channel 删除通道
14.Close camera 关闭摄像头
callback函数
定义:
回调函数就是一个通过函数指针调用的函数。如果你把函数的指针(地址)作为参数传递给另一个函数,当 这个指针被用为调用它所指向的函数时,我们就说这是回调函数。回调函数不是由该函数的实现方直接调用,而是在特定的事件或条件发生时由另外的一方调用的, 用于对该事件或条件进行响应。
实现的机制:
[1]定义一个回调函数;
[2]提供函数实现的一方在初始化的时候,将回调函数的函数指针注册给调用者;
[3]当特定的事件或条件发生的时候,调用者使用函数指针调用回调函数对事件进行处理。
RAW图像就是CMOS或者CCD图像感应器将捕捉到的光源信号转化为数字信号的原始数据。RAW文件是一种记录了数码相机传感器的原始信息,同时记录了由相机拍摄所产生的一些元数据(Metadata,如ISO的设置、快门速度、光圈值、白平衡等)的文件。RAW是未经处理、也未经压缩的格式,可以把RAW概念化为“原始图像编码数据”或更形象的称为“数字底片”。sensor的每一像素对应一个彩色滤光片,滤光片按Bayer pattern分布。外部光线暗在每个像素点上存储的是单色的,将每一个像素的数据直接输出,即RAW RGB data.所以说RAW是从影像传感器中得到的最原始的信息数据包,而非图像。所以每个像素仅仅包含了光谱的一部分,必须通过插值(速度与质量权衡线性插值补偿算法最好)来实现还原每个像素的RGB值,填补缺失的两个色彩。所以直接对RAW图数据进行裁切不太现实。
因为每个相机品牌会用不同的的技术编码来记录RAW格式文件,所以每个品牌的RAW格式都是有自己的独有的后缀名,RAW只是这些名称的统称而已。比如华为手机的RAW文件是*.dng,佳能的后缀名是*.CR3,尼康是*.NEF,索尼是*.ARW,富士是*.RAF,松下是*.RW2,宾得是*.PEF,徕卡是*.DNG等等
高通平台可以dump两种RAW图,一种RAW图遵循标准MIPI CSI2协议,一种RAW遵循高通自有的压缩格式.
adb shell setprop persist.vendor.camera.rdi.mode enable
dump出来的RAW图符合MIPI CSI2协议,即用5个字节代表4个pixel, 对于RAW10,每个pixel占用10bit,4个pixel = 40bit = 5个字节
默认情况下,高通平台按照64bit对齐
adb shell setprop persist.vendor.camera.rdi.mode disable
dump 出来的RAW遵循QCOM RAW格式,即64bit的word类型存放 6个pixel,每个pixel占用10bit,多出来的4bit为符号位。补0
默认格式是17
/*Default value is CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG*/
property_get("persist.vendor.camera.raw.format", raw_format, "17");//可以通过属性来修改raw图格式
关于mipi raw数据格式类型,定义在:hardware/qcom/camera/QCamera2/stack/common/cam_types.h
hardware/qcom/camera/QCamera2/stack/common/cam_types.h
typedef enum {
CAM_FORMAT_JPEG = 0,
CAM_FORMAT_YUV_420_NV12 = 1,
CAM_FORMAT_YUV_420_NV21,
CAM_FORMAT_YUV_420_NV21_ADRENO,
CAM_FORMAT_YUV_420_YV12,
CAM_FORMAT_YUV_422_NV16,
CAM_FORMAT_YUV_422_NV61,
CAM_FORMAT_YUV_420_NV12_VENUS,
/* Note: For all raw formats, each scanline needs to be 16 bytes aligned */
/* Packed YUV/YVU raw format, 16 bpp: 8 bits Y and 8 bits UV.
* U and V are interleaved with Y: YUYV or YVYV */
CAM_FORMAT_YUV_RAW_8BIT_YUYV,
CAM_FORMAT_YUV_RAW_8BIT_YVYU,
CAM_FORMAT_YUV_RAW_8BIT_UYVY, //10
CAM_FORMAT_YUV_RAW_8BIT_VYUY,
/* QCOM RAW formats where data is packed into 64bit word.
* 8BPP: 1 64-bit word contains 8 pixels p0 - p7, where p0 is
* stored at LSB.
* 10BPP: 1 64-bit word contains 6 pixels p0 - p5, where most
* significant 4 bits are set to 0. P0 is stored at LSB.
* 12BPP: 1 64-bit word contains 5 pixels p0 - p4, where most
* significant 4 bits are set to 0. P0 is stored at LSB. */
CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG,
CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG,
CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB,
CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR,
CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG,
CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG,//默认
CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB,//18
CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR,
CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG, //20
CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG,
CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB,
CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR,
/* MIPI RAW formats based on MIPI CSI-2 specifiction.
* 8BPP: Each pixel occupies one bytes, starting at LSB.
* Output with of image has no restrictons.
* 10BPP: Four pixels are held in every 5 bytes. The output
* with of image must be a multiple of 4 pixels.
* 12BPP: Two pixels are held in every 3 bytes. The output
* width of image must be a multiple of 2 pixels. */
CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG,
CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG,
CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB,
CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR,
CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG,
CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG,
CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB, //30
CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR,
CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG,
CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG,
CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB,
CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR,
/* Ideal raw formats where image data has gone through black
* correction, lens rolloff, demux/channel gain, bad pixel
* correction, and ABF.
* Ideal raw formats could output any of QCOM_RAW and MIPI_RAW
* formats, plus plain8 8bbp, plain16 800, plain16 10bpp, and
* plain 16 12bpp */
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG, //40
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB, //50
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG, //60
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB, //70
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR,
CAM_FORMAT_BAYER_RAW_PLAIN16_10BPP_GBRG,
CAM_FORMAT_BAYER_RAW_PLAIN16_10BPP_GRBG,
CAM_FORMAT_BAYER_RAW_PLAIN16_10BPP_RGGB,
CAM_FORMAT_BAYER_RAW_PLAIN16_10BPP_BGGR,
/* generic 8-bit raw */
CAM_FORMAT_JPEG_RAW_8BIT,
CAM_FORMAT_META_RAW_8BIT,
/* generic 10-bit raw */
CAM_FORMAT_META_RAW_10BIT,
/* QCOM RAW formats where data is packed into 64bit word.
* 14BPP: 1 64-bit word contains 4 pixels p0 - p3, where most
* significant 4 bits are set to 0. P0 is stored at LSB.
*/
CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG,
CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG,
CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB, //80
CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR,
/* MIPI RAW formats based on MIPI CSI-2 specifiction.
* 14 BPPP: 1st byte: P0 [13:6]
* 2nd byte: P1 [13:6]
* 3rd byte: P2 [13:6]
* 4th byte: P3 [13:6]
* 5th byte: P0 [5:0]
* 7th byte: P1 [5:0]
* 8th byte: P2 [5:0]
* 9th byte: P3 [5:0]
*/
CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GBRG,
CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GRBG,
CAM_FORMAT_BAYER_MIPI_RAW_14BPP_RGGB,
CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG, //90
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR,
/* 14BPP: 1st byte: P0 [8:0]
* 2nd byte: P0 [13:9]
* 3rd byte: P1 [8:0]
* 4th byte: P1 [13:9]
*/
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_BGGR,
CAM_FORMAT_YUV_444_NV24,
CAM_FORMAT_YUV_444_NV42,
/* Y plane only, used for FD, 8BPP */
CAM_FORMAT_Y_ONLY, //100
/* UBWC format */
CAM_FORMAT_YUV_420_NV12_UBWC,
CAM_FORMAT_YUV_420_NV21_VENUS,
/* RGB formats */
CAM_FORMAT_8888_ARGB,
/* Y plane only */
CAM_FORMAT_Y_ONLY_10_BPP,
CAM_FORMAT_Y_ONLY_12_BPP,
CAM_FORMAT_Y_ONLY_14_BPP,
CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GREY,
CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GREY,
CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GREY,
CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GREY,
CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GREY,
CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GREY,
CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GREY,
CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GREY,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GREY,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GREY,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GREY,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GREY,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GREY,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GREY,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GREY,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GREY,
CAM_FORMAT_DEPTH16,
CAM_FORMAT_DEPTH8,
CAM_FORMAT_DEPTH_POINT_CLOUD,
CAM_FORMAT_MAX
} cam_format_t;
使用高通chromatix工具查看时,对于mipi csi2协议的raw,选择mipi格式来查看,对于QCOM RAW, 选择packed格式来查看
MIPI扫盲:
对IMX317来说:MIPI发送器为CSI-2标准,也就是说,
The pixel signals are output by the CSI-2 High-speed serial interface. 像素信号根据CSI-2高速串行接口输出
See the MIPI Standard 查看MIPI标准
・MIPI Alliance Standard for Camera Serial Interface 2 (CSI-2) Version 1.1 使用MIPI联盟标准中的camera的CSI-2 版本V1.1
・MIPI Alliance Specification for D-PHY Version 1.1 使用MIPI联盟规格里的D-PHY版本V1.1
D-PHY中的PHY是物理层(Physical)的意思,那么D是什么意思呢?在MIPI D-PHY的文档中有提到过,D-PHY的最初版本的设计目标是500Mbits/s,而D是罗马数字(拉丁文数字)中500 。同理C和M分别是罗马数字中的100和1000,也就是C-PHY和M-PHY中C和M的意思了,CSI分为CSI2和CSI3,CSI3速度就更快了,它的PHY层就是用的M-PHY。D-PHY协议最多支持5个Lane(通道)(一个时钟Lane,4个数据Lane),最少需要两个Lane(一个时钟Lane,一个数据Lane)。CSI/DSI的物理层(Phy Layer)由专门的WorkGroup负责制定,其目前的标准是D-PHY。D-PHY采用1对源同步的差分时钟和1~4对差分数据线来进行数据传输。数据传输采用DDR(Double Data Rate)方式,即在时钟的上下边沿都有数据传输。高速模式(Burst Mode)最主要的模式,用来传输图像,也就是我们camera的这个应用场景。在高速模式下,通道状态是差分的0或者1,也就是线对内P比N高时,定义为1,P比N低时,定义为0,此时典型的线上电压为差分200MV,会比TMDS的400mV和LVDS的350mV以及USB2.0的400mV相比具有低电压百幅,低功耗的优势。
另外我们看到SDM632文档介绍的MIPI标准:80-nu323-2文档有提到标准是D-PHY(Version V1.1)
SDM632双摄项目:/ # ls /dev/video* -al
crw-rw---- 1 system camera 81, 2 1970-01-05 01:48 /dev/video0
crw-rw---- 1 system camera 81, 21 1970-01-05 01:48 /dev/video1
crw-rw---- 1 system camera 81, 0 1970-01-05 01:48 /dev/video32
crw-rw---- 1 system camera 81, 1 1970-01-05 01:48 /dev/video33
/dev/videoX V4L2设备号(81, X),主设备号都是81
内核自带参考说明文档:kernel/msm-4.9/Documentation/zh_CN/video4linux/v4l2-framework.txt
Linux系统中视频输入设备主要包括四个部分:
1)字符设备驱动:V4L2本身就是一个字符设备cdev,具有字符设备所有的特性,暴露接口给用户空间;
kernel/msm-4.9/include/linux/cdev.h
struct cdev {
struct kobject kobj;
struct module *owner;
const struct file_operations *ops;
struct list_head list;
dev_t dev;
unsigned int count;
};
2)V4L2驱动核心:主要是构建一个内核中标准视频设备驱动的框架,为视频操作提供统一的接口函数;v4l2核心源码位于kernel/msm-4.9/drivers/media/v4l2-core/,根据功能又可以分为四类:
Kconfig v4l2-clk.c v4l2-dev.c v4l2-fh.c v4l2-mem2mem.c vb2-trace.c videobuf2-dvb.c videobuf-core.c videobuf-vmalloc.c
Makefile v4l2-common.c v4l2-device.c v4l2-flash-led-class.c v4l2-of.c videobuf2-core.c videobuf2-memops.c videobuf-dma-contig.c
tuner-core.c v4l2-compat-ioctl32.c v4l2-dv-timings.c v4l2-ioctl.c v4l2-subdev.c videobuf2-dma-contig.c videobuf2-v4l2.c videobuf-dma-sg.c
v4l2-async.c v4l2-ctrls.c v4l2-event.c v4l2-mc.c v4l2-trace.c videobuf2-dma-sg.c videobuf2-vmalloc.c videobuf-dvb.c
<1>字符设备模块:由v4l2-dev.c实现,主要作用申请字符主设备号、注册class和提供video_device注册注销等相关函数;
<2>V4L2基础框架:由v4l2-device.c、v4l2-subdev.c、v4l2-fh.c、v4l2-ctrls.c等文件构建V4L2基础框架;
<3>videobuf管理:由videobuf2-core.c、videobuf2-dma-contig.c、videobuf2-dma-sg.c、videobuf2-memops.c、videobuf2-vmalloc.c、v4l2-mem2mem.c等文件实现,完成videobuffer的分配、管理和注销;
<4>Ioctl框架:由v4l2-ioctl.c文件实现,构建V4L2 ioctl的框架。
3)平台V4L2设备驱动:在V4L2框架下,根据平台自身的特性实现与平台相关的V4L2驱动部分,包括注册video_device和v4l2_device;
video_device结构体用于在/dev目录下生成设备节点文件,把操作设备的接口暴露给用户空间
kernel/msm-4.9/include/media/v4l2-dev.h
struct video_device
{
#if defined(CONFIG_MEDIA_CONTROLLER)
struct media_entity entity;
struct media_intf_devnode *intf_devnode;
struct media_pipeline pipe;
#endif
const struct v4l2_file_operations *fops;//设置为已有的 v4l2_file_operations 结构体,即V4L2设备操作集合。v4l2_file_operations 结构体是 file_operations 的一个子集。其主要区别在于:因 inode 参数从未被使用,它将被忽略
u32 device_caps;
/* sysfs */
struct device dev;
struct cdev *cdev;//字符设备
struct v4l2_device *v4l2_dev;//设置为 v4l2_device 父设备。
struct device *dev_parent;//仅在使用 NULL 作为父设备结构体参数注册 v4l2_device 时设置此参数。只有在一个硬件设备包含多一个 PCI 设备,共享同一个v4l2_device 核心时才会发生。
struct v4l2_ctrl_handler *ctrl_handler;
struct vb2_queue *queue;//指向video buffer队列
struct v4l2_prio_state *prio;//保持对优先级的跟踪。用于实现 VIDIOC_G/S_PRIORITY。如果设置为 NULL,则会使用 v4l2_device 中的 v4l2_prio_state 结构体。 如果要对每个设备节点(组)实现独立的优先级,可以将其指向自己实现的 v4l2_prio_state 结构体。
/* device info */
char name[32];//设置为唯一的描述性设备名。
int vfl_type;//device type
int vfl_dir;
int minor;//次设备号
u16 num;
unsigned long flags;
int index;
/* V4L2 file handles */
spinlock_t fh_lock;
struct list_head fh_list;
int dev_debug;
v4l2_std_id tvnorms;
/* callbacks */
void (*release)(struct video_device *vdev);
const struct v4l2_ioctl_ops *ioctl_ops;//如果你使用v4l2_ioctl_ops 来简化 ioctl 的维护, (强烈建议使用,且将来可能变为强制性的!),然后设置你自己的 v4l2_ioctl_ops 结构体.
DECLARE_BITMAP(valid_ioctls, BASE_VIDIOC_PRIVATE);
DECLARE_BITMAP(disable_locking, BASE_VIDIOC_PRIVATE);
struct mutex *lock;//如果你要在驱动中实现所有的锁操作,则设为 NULL 。否则就要设置一个指向 struct mutex_lock 结构体的指针,这个锁将在 unlocked_ioctl 文件操作被调用前由内核获得,并在调用返回后释放。
};
分配video_device结构体用:video_device_alloc()
释放结构体用:video_device_release()
注册video_device:video_register_device(struct video_device *vdev, int type, int nr)//注册视频设备:这会为你创建一个字符设备。
注意:如果 v4l2_device 父设备的 mdev 域为非 NULL 值,视频设备实体将自动注册为媒体设备。
vdev:需要注册的video_device
type:设备类型,注册哪种设备是根据类型(type)参数。存在以下类型:
VFL_TYPE_GRABBER: 用于视频输入/输出设备的 videoX
VFL_TYPE_VBI: 用于垂直消隐数据的 vbiX (例如,隐藏式字幕,图文电视)
VFL_TYPE_RADIO: 用于广播调谐器的 radioX
nr:设备节点名编号,如/dev/video[nr]
最后一个参数让你确定一个所控制设备的设备节点号数量(例如 videoX 中的 X)。
通常你可以传入-1,让 v4l2 框架自己选择第一个空闲的编号。但是有时用户
需要选择一个特定的节点号。驱动允许用户通过驱动模块参数选择一个特定的
设备节点号是很普遍的。这个编号将会传递给这个函数,且 video_register_device
将会试图选择这个设备节点号。如果这个编号被占用,下一个空闲的设备节点
编号将被选中,并向内核日志中发送一个警告信息。
只要设备节点被创建,一些属性也会同时创建。在 /sys/class/video4linux
目录中你会找到这些设备。例如进入其中的 video0 目录,你会看到‘name’和
‘index’属性。‘name’属性值就是 video_device 结构体中的‘name’域。
‘index’属性值就是设备节点的索引值:每次调用 video_register_device(),
索引值都递增 1 。第一个视频设备节点总是从索引值 0 开始。
例如:
SDM632双摄项目:/sys/class/video4linux # ls
radio0 v4l-subdev1 v4l-subdev11 v4l-subdev13 v4l-subdev15 v4l-subdev17 v4l-subdev3 v4l-subdev5 v4l-subdev7 v4l-subdev9 video1 video33
v4l-subdev0 v4l-subdev10 v4l-subdev12 v4l-subdev14 v4l-subdev16 v4l-subdev2 v4l-subdev4 v4l-subdev6 v4l-subdev8 video0 video32
SDM632双摄项目:/sys/class/video4linux # cat video0/name
msm-config
SDM632双摄项目:/sys/class/video4linux # cat video0/index
0
SDM632双摄项目:/sys/class/video4linux # cat video1/name
msm-sensor
SDM632双摄项目:/sys/class/video4linux # cat video1/index
0
//查看下subdev都有那些设备:
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev0/name
msm_cci
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev1/name
msm_csiphy
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev2/name
msm_csiphy
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev3/name
msm_csiphy
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev4/name
msm_csid
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev5/name
msm_csid
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev6/name
msm_csid
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev7/name
msm_actuator
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev8/name
msm_eeprom
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev9/name
msm_eeprom
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev10/name
msm_camera_flash
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev11/name
msm_sensor_init
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev12/name
cpp
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev13/name
vfe
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev14/name
vfe
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev15/name
msm_ispif
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev16/name
msm_buf_mngr
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev17/name
imx317_sub1//真实存在的sony sensor
SDM632双摄项目:/sys/class/video4linux # cat v4l-subdev18/name
cat: v4l-subdev18/name: No such file or directory
用户可以设置 udev 规则,利用索引属性生成花哨的设备名(例如:用‘mpegX’
代表 MPEG 视频捕获设备节点)。
在设备成功注册后,就可以使用这些域:
- vfl_type: 传递给 video_register_device 的设备类型。
- minor: 已指派的次设备号。
- num: 设备节点编号 (例如 videoX 中的 X)。
- index: 设备索引号。
释放video_device:video_unregister_device()//这个操作将从 sysfs 中移除设备节点(导致 udev 将其从 /dev 中移除)。
kernel/msm-4.9/include/media/v4l2-device.h
struct v4l2_device {
struct device *dev;
#if defined(CONFIG_MEDIA_CONTROLLER)
struct media_device *mdev;
#endif
struct list_head subdevs;//用链表管理注册的subdev
spinlock_t lock;
char name[V4L2_DEVICE_NAME_SIZE];//device 名字
void (*notify)(struct v4l2_subdev *sd,
unsigned int notification, void *arg);
struct v4l2_ctrl_handler *ctrl_handler;
struct v4l2_prio_state prio;
struct kref ref;//引用计数
void (*release)(struct v4l2_device *v4l2_dev);
};
v4l2_device在v4l2框架中充当所有v4l2_subdev的父设备,管理着注册在其下的子设备,
注册v4l2_device:v4l2_device_register(struct device *dev, struct v4l2_device *v4l2_dev)
注销v4l2_device: v4l2_device_release(struct kref *ref)
4)具体的sensor驱动:主要上电、提供工作时钟、视频图象裁剪、流IO开启等,实现各种设备控制方法供上层调用并注册v4l2_subdev.
kernel/msm-4.9/include/media/v4l2-subdev.h
struct v4l2_subdev {
#if defined(CONFIG_MEDIA_CONTROLLER)
struct media_entity entity;
#endif
struct list_head list;
struct module *owner;
bool owner_v4l2_dev;
u32 flags;
struct v4l2_device *v4l2_dev;//指向父设备
const struct v4l2_subdev_ops *ops;//提供一些控制v4l2设备的接口
const struct v4l2_subdev_internal_ops *internal_ops;//向v4l2框架提供的接口函数
struct v4l2_ctrl_handler *ctrl_handler;//subdev控制接口
char name[V4L2_SUBDEV_NAME_SIZE];
u32 grp_id;
void *dev_priv;
void *host_priv;
struct video_device *devnode;
struct device *dev;
struct device_node *of_node;
struct list_head async_list;
struct v4l2_async_subdev *asd;
struct v4l2_async_notifier *notifier;
struct v4l2_subdev_platform_data *pdata;
};
每个子设备都需要实现一个v4l2_subdev结构体,结构体里包括了对子设备操作的成员v4l2_subdev_ops和v4l2_subdev_internal_ops
struct v4l2_subdev_ops {
const struct v4l2_subdev_core_ops *core;//视频设备通用的操作:初始化、加载FW、上电和reset等等
const struct v4l2_subdev_tuner_ops *tuner;//tuner特有的操作
const struct v4l2_subdev_audio_ops *audio;//audio特有的操作
const struct v4l2_subdev_video_ops *video;//视频设备的特有操作:裁剪图像、开关视频流
const struct v4l2_subdev_vbi_ops *vbi;
const struct v4l2_subdev_ir_ops *ir;
const struct v4l2_subdev_sensor_ops *sensor;
const struct v4l2_subdev_pad_ops *pad;
};
以及
struct v4l2_subdev_internal_ops {
int (*registered)(struct v4l2_subdev *sd);//当subdev注册时被调用,读取IC的ID来进行识别
void (*unregistered)(struct v4l2_subdev *sd);
int (*open)(struct v4l2_subdev *sd, struct v4l2_subdev_fh *fh);//当设备节点被打开时调用,通常会给设备上电和设置视频捕捉FMT
int (*close)(struct v4l2_subdev *sd, struct v4l2_subdev_fh *fh);
};
视频设备通常需要实现core和video成员,这两个OPS中的操作都是可选的,但是对于视频流设备video->s_stream(开启或关闭流IO)必须要实现。v4l2_subdev_internal_ops是向V4L2框架提供的接口,只能被V4L2框架层调用。在注册或打开子设备的时候,进行一些辅助性操作。
subdev的注册和注销:
v4l2_device_register_subdev()
v4l2_device_unregister_subdev()
kernel/msm-4.9/include/media/v4l2-ctrls.h
struct v4l2_ctrl_handler {
struct mutex _lock;
struct mutex *lock;
struct list_head ctrls;//其中成员ctrls作为链表存储包括设置亮度、饱和度、对比度和清晰度等方法,可以通过v4l2_ctrl_new_xxx()函数创建具体方法并添加到链表ctrls
struct list_head ctrl_refs;
struct v4l2_ctrl_ref *cached;
struct v4l2_ctrl_ref **buckets;
v4l2_ctrl_notify_fnc notify;
void *notify_priv;
u16 nr_of_buckets;
int error;
};
v4l2 核心 API 提供了一个处理视频缓冲的标准方法(称为“videobuf”)。
这些方法使驱动可以通过统一的方式实现 read()、mmap() 和 overlay()。
目前在设备上支持视频缓冲的方法有分散/聚集 DMA(videobuf-dma-sg)、
线性 DMA(videobuf-dma-contig)以及大多用于 USB 设备的用 vmalloc
分配的缓冲(videobuf-vmalloc)。
V4L2支持三种不同的IO访问方式
1)read和write:是基本帧IO访问方式,通过read读取每一帧数据,数据需要在内核和用户之间拷贝,这种方式访问速度可能会非常慢
2)内存映射缓冲区(V4L2_MEMORY_MMAP):是在内存空间开辟缓冲区,应用通过mmap()系统调用映射到用户地址空间。这些缓冲区可以是大而连续的DMA缓冲区、通过vmalloc()创建的虚拟缓冲区,或者直接在设备的IO内存中开辟的缓冲区(如果硬件支持的话)
V4L2_MEMORY_MMAP数据流通过程:
parallel AHB
Camera sensor------------>CAM IF------>DMA controller-------->RAM
or MIPI
CAMIF可以对图像数据进行调整(翻转、裁剪和格式转换等),然后DMA控制器设置DMA通道请求AHB将图像数据传到分配好的DMA缓冲区。待图像数据传输到DMA缓冲区之后,mmap操作把缓冲区映射到用户空间,应用就可以直接访问缓冲区的数据。而为了使设备支持流IO这种方式,v4l2需要实现对video buffer的管理,即实现vb2_queue
补充下:
AHB,是Advanced High performance Bus的缩写,译作高级高性能总线,这是一种“系统总线”。
AHB主要用于高性能模块(如CPU、DMA和DSP等)之间的连接。AHB 系统由主模块、从模块和基础结构(Infrastructure)3部分组成,整个AHB总线上的传输都由主模块发出,由从模块负责回应。
APB,是Advanced Peripheral Bus的缩写,这是一种外围总线。
APB主要用于低带宽的周边外设之间的连接,例如UART、1284等,它的总线架构不像 AHB支持多个主模块,在APB里面唯一的主模块就是APB 桥。
这两者都是总线,符合AMBA规范。
3)用户空间缓冲区(V4L2_MEMORY_USERPTR):是用户空间的应用中开辟缓冲区,用户与内核空间之间交换缓冲区指针。很明显,在这种情况下是不需要mmap()调用的,但驱动有效的支持用户空间缓冲区,其工作将也会更困难。
read和write方式属于帧IO访问方式,每一帧都要通过IO操作,需要用户和内核之间数据拷贝,而后两种是流IO访问方式,不需要内存拷贝,访问速度比较快。内存映射缓冲区访问方式是比较常用的方式。
kernel/msm-4.9/include/media/videobuf2-core.h
/*
vb2_queue代表一个videobuff队列,vb2_buffer是这个队列中的成员,vb2_mem_ops是缓冲内存的操作函数集,vb2_ops用来管理队列
*/
struct vb2_queue {
unsigned int type;
unsigned int io_modes;//访问IO的方式:mmap、userptr etc
struct device *dev;
unsigned long dma_attrs;
unsigned fileio_read_once:1;
unsigned fileio_write_immediately:1;
unsigned allow_zero_bytesused:1;
unsigned quirk_poll_must_check_waiting_for_buffers:1;
struct mutex *lock;
void *owner;
const struct vb2_ops *ops;//buffer队列操作函数集合
const struct vb2_mem_ops *mem_ops;//buffer memory操作集合
const struct vb2_buf_ops *buf_ops;
void *drv_priv;
unsigned int buf_struct_size;
u32 timestamp_flags;
gfp_t gfp_flags;
u32 min_buffers_needed;
/* private: internal use only */
struct mutex mmap_lock;
unsigned int memory;
struct vb2_buffer *bufs[VB2_MAX_FRAME];//代表每个frame buffer
unsigned int num_buffers;//分配的buffer个数
struct list_head queued_list;
unsigned int queued_count;
atomic_t owned_by_drv_count;
struct list_head done_list;
spinlock_t done_lock;
wait_queue_head_t done_wq;
struct device *alloc_devs[VB2_MAX_PLANES];
unsigned int streaming:1;
unsigned int start_streaming_called:1;
unsigned int error:1;
unsigned int waiting_for_buffers:1;
unsigned int is_multiplanar:1;
unsigned int is_output:1;
unsigned int copy_timestamp:1;
unsigned int last_buffer_dequeued:1;
struct vb2_fileio_data *fileio;
struct vb2_threadio_data *threadio;
#ifdef CONFIG_VIDEO_ADV_DEBUG
/*
* Counters for how often these queue-related ops are
* called. Used to check for unbalanced ops.
*/
u32 cnt_queue_setup;
u32 cnt_wait_prepare;
u32 cnt_wait_finish;
u32 cnt_start_streaming;
u32 cnt_stop_streaming;
#endif
};
/* vb2_mem_ops 包含了内存映射缓冲区、用户空间缓冲区的内存操作方法 */
struct vb2_mem_ops {
void *(*alloc)(struct device *dev, unsigned long attrs,
unsigned long size,
enum dma_data_direction dma_dir,
gfp_t gfp_flags);//分配视频缓存
void (*put)(void *buf_priv);//释放视频缓存
struct dma_buf *(*get_dmabuf)(void *buf_priv, unsigned long flags);
void *(*get_userptr)(struct device *dev, unsigned long vaddr,
unsigned long size,
enum dma_data_direction dma_dir);//获取用户空间视频缓冲区指针
void (*put_userptr)(void *buf_priv);//释放用户空间视频缓冲区指针
//用于缓存同步
void (*prepare)(void *buf_priv);
void (*finish)(void *buf_priv);
void *(*attach_dmabuf)(struct device *dev,
struct dma_buf *dbuf,
unsigned long size,
enum dma_data_direction dma_dir);
void (*detach_dmabuf)(void *buf_priv);
int (*map_dmabuf)(void *buf_priv);
void (*unmap_dmabuf)(void *buf_priv);
void *(*vaddr)(void *buf_priv);//缓存虚拟地址
void *(*cookie)(void *buf_priv);//缓存物理地址
unsigned int (*num_users)(void *buf_priv);//返回当期在用户空间的buffer数
int (*mmap)(void *buf_priv, struct vm_area_struct *vma);//把缓存区映射到用户空间
};
mem_ops由kernel自身实现并提供了三种类型的视频缓冲区操作方法:连续的DMA缓冲区、集散的DMA缓冲区以及vmalloc创建的缓冲区,分别由videobuf2-dma-contig.c、videobuf2-dma-sg.c和videobuf-vmalloc.c文件实现,可以根据实际情况来使用。
vb2_ops是用来管理buffer队列的函数集合,包括队列和缓冲区初始化等
struct vb2_ops {
int (*queue_setup)(struct vb2_queue *q,
unsigned int *num_buffers, unsigned int *num_planes,
unsigned int sizes[], struct device *alloc_devs[]);//队列初始化
//释放和获取设备操作锁
void (*wait_prepare)(struct vb2_queue *q);
void (*wait_finish)(struct vb2_queue *q);
//对buffer的操作
int (*buf_init)(struct vb2_buffer *vb);
int (*buf_prepare)(struct vb2_buffer *vb);
void (*buf_finish)(struct vb2_buffer *vb);
void (*buf_cleanup)(struct vb2_buffer *vb);
//开始/停止视频流
int (*start_streaming)(struct vb2_queue *q, unsigned int count);
void (*stop_streaming)(struct vb2_queue *q);
//把VB传递给驱动,以填充frame数据
void (*buf_queue)(struct vb2_buffer *vb);
};
v4l2_buffer状态机
VIDIOC_QBUF 应用程序 VIDIOC_DQBUF
放入输入队列------------------------处理数据<----------------取出输出队列中
一个缓冲区 一个缓冲区
| |
| 视频采集输入队列 视频采集输出队列 |
_____\|/______________________________驱动程序_________________________|___________
/ / / / / / 采集数据 / / / /
/IN3/ /IN2/ /IN1/ /OUT2/ /OUT1/
/___/ /___/ /___/ /____/ /____/
一个frame buffer(vb2_buffer/v4l2_buffer)可以有三种状态:
1.在驱动的输入队列中,驱动程序将会对此队列中的缓冲区进行处理,用户空间通过IOCTL:VIDIOC_QBUF把缓冲区放入到队列。对于一个视频捕获设备,传入队列中的缓冲区是空的,驱动会往其中填充数据;
2.在驱动的输出队列中,这些缓冲区已由驱动处理过,对于一个视频捕获设备,缓存区已经填充了视频数据,正等用户空间来认领;
3.用户空间状态的队列,已经通过IOCTL:VIDIOC_DQBUF传出到用户空间的缓冲区,此时,缓冲区由用户空间拥有,驱动无法访问。
最后落脚点的struct v4l2_buffer结构如下:
struct v4l2_buffer {
__u32 index;//buffer 序号,缓存编号
__u32 type;//buffer类型,视频捕捉模式
__u32 bytesused;//缓冲区已经使用的byte数
__u32 flags;//缓存当前状态
__u32 field;
struct timeval timestamp;//时间戳,代表帧捕获的时间
struct v4l2_timecode timecode;
__u32 sequence;
/* memory location */
__u32 memory;//表示缓冲区是内存映射缓冲区还是用户空间缓冲区
union {
__u32 offset;//内核缓冲区的位置
unsigned long userptr;//缓冲区的用户空间地址
struct v4l2_plane *planes;
__s32 fd;
} m;
__u32 length;//缓冲区大小,单位byte
__u32 reserved2;
__u32 reserved;
};
bytesused是图像数据所占的字节数,如果是V4L2_MEMORY_MMAP方式,m.offset是内核空间图像数据存放的开始地址,会传递给mmap函数作为一个偏移,通过mmap映射返回一个缓冲区指针p,p+byteused是图像数据在进程的虚拟地址空间所占区域;
如果是用户指针缓冲区的方式,可以获取的图像数据开始地址的指针m.userptr,userptr是一个用户空间的指针,userptr+bytesused便是所占的虚拟地址空间,应用可以直接访问。
---------------------------------
在HAL层实现切图,但是出现了两个问题,一个是骁龙相机正常(API1+HAL1),但是用自己的测试APP(API1+HAL3)就打不开;第二个是用自己的APP测试能够切图,但是颜色不对,变成了阿凡达的颜色?
首先看第一个问题:
出现问题不要瞎猜测,添加log看是具体卡在哪一步,比如切图的时候,骁龙相机正常,但是自己写的测试APP闪退,添加log看是卡在哪一步?
最终发现是卡在cutYuv这个函数,只要切图必挂!
void cutYuv(unsigned char *tarYuv, unsigned char *srcYuv, int32_t startW,
int32_t startH, int32_t cutW, int32_t cutH, int32_t srcW, int32_t srcH)
{
int i;
int j = 0;
int k = 0;
if (NULL == srcYuv)
{
LOGE("[wanghl] srcYUV failed 0");
return;
}
unsigned char *tmpY = (unsigned char *)malloc(cutW*cutH);
if (NULL == tmpY)
{
LOGE("[wanghl] malloc failed 1");
return;
}
unsigned char *tmpUV = (unsigned char *)malloc(cutW*cutH/2);
if (NULL == tmpUV)
{
LOGE("[wanghl] malloc failed 2");
return;
}
//LOGE("[wanghl] cutYuv 1,startH:%d, cutH:%d, startH:%d", startH, cutH, startH);
for(i=startH; i<cutH+startH; i++) {
//LOGE("[wanghl] i:%d, cutW:%d, startW:%d, srcW:%d, cutW:%d", i, cutW, startW, srcW, cutW);
//memcpy(tmpY/*+j*cutW*/, /*srcYuv*/tmpUV/*+startW+i*srcW*/, /*cutW*/2);
memcpy(tmpY+j*cutW, srcYuv+startW+i*srcW, cutW);
j++;
}
//LOGE("[wanghl] cutYuv 2, srcH:%d", srcH);
for(i=startH/2; i<(cutH+startH)/2; i++) {
//LOGE("[wanghl] i :%d", i);
memcpy(tmpUV+k*cutW, srcYuv+startW+srcW*srcH+i*srcW, cutW);
//memcpy(tmpUV+k*cutW, tmpY, cutW);
k++;
}
//LOGE("[wanghl] cutYuv 3");
memcpy(tarYuv, tmpY, cutW*cutH);
//LOGE("[wanghl] cutYuv 4");
memcpy(tarYuv+cutW*cutH, tmpUV, cutW*cutH/2);
//LOGE("[wanghl] cutYuv 5");
free(tmpY);
free(tmpUV);
//LOGE("[wanghl] cutYuv 6");
}
进一步添加log:这个函数一开始调试的时候卡住了,卡在memcpy(tmpY+j*cutW, srcYuv+startW+i*srcW, cutW);
根据美籍领导提点,memcpy卡住一般就三种情况:1.空指针,2.野指针,3.长度越界
1.空指针比较好处理,输入参数srcYuv判空,输出参数tmpY判空,发现不是这两种情况,为了养成良好的习惯,函数输入参数也应该判空,然后malloc结果也应该判断
2.野指针,指针是非空的,指针本来指向某一个内存地址,指向另外一个指针,但是这个指针因为某些原因发生了改变,所指向的内容也被释放了,特别是多线程,异步操作的时候,那你还继续指向这块内存,指向一个没人要的荒郊野岭区域,那你就成了一个野指针,通过实验,把srcYuv+startW+i*srcW替换成一个固定的字符串就能跑过去,或者传入一个正常的指针比如tmpUV进去也正常了,说明这个srcYuv指针确实出了问题,后面针对srcYuv指针进行了检查:
static unsigned char *yuv_snap_4k = NULL;
yuv_snap_4k = buf_info->buf->buffer;//这个指针是直接指向另外一个地址,怀疑这个地址发生了变化,我们把这个地址打印出来看下就知道是不是地址发生了改变
LOGE("[wanghl]buf_info:%p, buf_info->buf: %p, buf_info->buf->buffer:%p", buf_info, buf_info->buf, buf_info->buf->buffer);
02-25 04:47:04.682 521 3288 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1942: [wanghl]buf_info:0xdd00d880, buf_info->buf: 0xd935fb68, buf_info->buf->buffer:0xd1318000
02-25 04:47:04.739 521 3288 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1942: [wanghl]buf_info:0xdd00d880, buf_info->buf: 0xd93603d8, buf_info->buf->buffer:0xd0724000
02-25 04:47:04.800 521 3288 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1942: [wanghl]buf_info:0xdd00d880, buf_info->buf: 0xd935fd84, buf_info->buf->buffer:0xd5710000
02-25 04:47:04.864 521 3288 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1942: [wanghl]buf_info:0xdd00d880, buf_info->buf: 0xd93605f4, buf_info->buf->buffer:0xcf82e000
02-25 04:47:04.928 521 3288 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1942: [wanghl]buf_info:0xdd00d880, buf_info->buf: 0xd935ffa0, buf_info->buf->buffer:0xd451d000
02-25 04:47:04.992 521 3288 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1942: [wanghl]buf_info:0xdd00d880, buf_info->buf: 0xd935fb68, buf_info->buf->buffer:0xd1f0c000
02-25 04:47:05.060 521 3288 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1942: [wanghl]buf_info:0xdd00d880, buf_info->buf: 0xd93601bc, buf_info->buf->buffer:0xd1318000
说明我们保存的这个指针:buf_info->buf->buffer,一直在重新创建和释放,一边在变,所以你指向它,你就成了一个野指针!问题的根本找到了,美籍领导建议我们重新分配一个内存来存这个buffer,这样就不会出现直接指向一个指针的风险,自己开辟一块内存来保存buffer数据,不需要的时候自己free释放掉就好,所以修改如下:
if(yuv_snap_4k == NULL)
{
yuv_snap_4k = (unsigned char *)malloc(buf_info->buf->frame_len);//自己创建分配一块内存
if(yuv_snap_4k != NULL)
{
LOGE("[wanghl]yuv_snap_4k malloc success");
} else {
LOGE("[wanghl]yuv_snap_4k malloc failed, free");
free(yuv_snap_4k);
yuv_snap_4k = NULL;
}
} else {
LOGE("[wanghl]copy snapshot full size frame buffer to yuv_snap_4k");
memset(yuv_snap_4k, 0, buf_info->buf->frame_len);
memcpy(yuv_snap_4k, buf_info->buf->buffer, buf_info->buf->frame_len);//拷贝4k snapshot stream的数据
}
cutYuv(yuvDst, yuv_snap_4k, 640*0, 848, cut_width, cut_height, srcWidth, srcHeight);//这样就不会挂
3.长度越界的问题,cutW硬编码为长度是2还是会挂,说明不是长度越界的问题
--------------------------------
帧数据的关键结构:mm_camera_interface.h
typedef struct mm_camera_buf_def {
uint32_t stream_id;
cam_stream_type_t stream_type;
cam_stream_buf_type buf_type;
uint32_t buf_idx;
uint8_t is_uv_subsampled;
struct timespec ts;
uint32_t frame_idx;
union {
mm_camera_plane_buf_def_t planes_buf;
mm_camera_user_buf_def_t user_buf;
};
int fd;
void *buffer;
size_t frame_len;
void *mem_info;
uint32_t flags;
uint32_t cache_flags;
} mm_camera_buf_def_t;
HAL层mm_stream_read_msm_frame调用VIDIOC_DQBUF命令从视频采集输出队列中取出已含有采集数据的帧缓冲区,也就是返回v4l2_buffer,开启预览的时候,能读回来三种stream type的buffer数据,不管:
typedef enum {
CAM_STREAM_TYPE_DEFAULT, /* default stream type */ 0
CAM_STREAM_TYPE_PREVIEW, /* preview */ 1
CAM_STREAM_TYPE_POSTVIEW, /* postview */ 2
CAM_STREAM_TYPE_SNAPSHOT, /* snapshot */ 3
CAM_STREAM_TYPE_VIDEO, /* video */ 4
CAM_STREAM_TYPE_CALLBACK, /* app requested callback */ 5
CAM_STREAM_TYPE_IMPL_DEFINED, /* opaque format: could be display, video enc, ZSL YUV */ 6
CAM_STREAM_TYPE_METADATA, /* meta data */ 7
CAM_STREAM_TYPE_RAW, /* raw dump from camif */ 8
CAM_STREAM_TYPE_OFFLINE_PROC, /* offline process */ 9
CAM_STREAM_TYPE_PARM, /* mct internal stream */ 10
CAM_STREAM_TYPE_ANALYSIS, /* analysis stream */ 11
CAM_STREAM_TYPE_DEPTH, /* Depth stream for depth sensor*/ 11
CAM_STREAM_TYPE_MAX,
} cam_stream_type_t;
02-19 03:08:59.467 520 3192 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1717: [wanghl] vb.length = num_planes = 2
02-19 03:08:59.467 520 3192 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1761: [wanghl]VIDIOC_DQBUF buf_index 2, frame_idx 377, stream type 3, rc 0,queued: 2, buf_type = 0 flags = 8192 FD = 94 my_num 0 buf fd 103,buf stream_id:2050,dim.width:3840, dim.height:2160,frame_len:12533760//stream type 3, snapshot拍照数据,full-size大小
02-19 03:08:59.487 520 3192 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1717: [wanghl] vb.length = num_planes = 1
02-19 03:08:59.487 520 3192 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1761: [wanghl]VIDIOC_DQBUF buf_index 9, frame_idx 378, stream type 7, rc 0,queued: 5, buf_type = 0 flags = 8192 FD = 88 my_num 0 buf fd 41,buf stream_id:1536,dim.width:764376, dim.height:1,frame_len:765952//stream type 7 meta data元数据,数据量是比较小的
02-19 03:08:59.489 520 3192 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1717: [wanghl] vb.length = num_planes = 2
02-19 03:08:59.489 520 3192 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1761: [wanghl]VIDIOC_DQBUF buf_index 3, frame_idx 377, stream type 1, rc 0,queued: 1, buf_type = 0 flags = 8192 FD = 91 my_num 0 buf fd 130,buf stream_id:1793,dim.width:1920, dim.height:1080,frame_len:3112960//stream type 1 预览数据
02-19 03:08:59.491 520 3196 E QCamera : <HAL><ERROR> preview_stream_cb_routine: 874: [wanghl]preview_stream_cb_routine:frame->stream_id = 1793, frame->stream_type=1, frame_len =3112960, dim.w = 1920, dim.h = 1080, after allign w=1920, h=1080//frame->stream_type=1 预览回调
调用流程:
int32_t mm_stream_qbuf(mm_stream_t *my_obj, mm_camera_buf_def_t *buf)->
rc = mm_camera_poll_thread_add_poll_fd(&my_obj->ch_obj->poll_thread[0],
idx, my_obj->my_hdl, my_obj->fd, mm_stream_data_notify,
(void*)my_obj, mm_camera_async_call);//添加fd到poll线程
rc = ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);//将buffer插入队列
static void mm_stream_data_notify(void* user_data)->
int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
mm_camera_buf_info_t* buf_info,
uint8_t num_planes)->
rc = ioctl(my_obj->fd, VIDIOC_DQBUF, &vb);->
zoom(API1,HAL3)打开的时候是1,3,5,7四种stream type都有帧数据,测试的API2是1,5,7 stream有数据,API1(HAL3)也是1,3,5,7四个stream有数据,骁龙相机API1(HAL1)打开是1,3,7(没有回调)
zoom刚开起来的时候使用的stream是type 1;
进入会议之后显示的stream是type 5,所以我只替换了type 1的buffer,进入zoom会议之后不起作用了,所以stream type 3也必须修改掉,替换掉
CONFIG_COMPAT有定义
//使用思科的USB UVC摄像头在ubuntu上做实验,思科网真PrecisionHD USB摄像头:1280 x 720像素progressive@30fps(720 p)
//capture.c log:
set 640*480 YUYV format,pixelformat:0x47504a4d,pix.field:1
0x47:G
0x50:P
0x4a:J
0x4d:M
倒序:MJPG,也就是默认JPEG模式:
#define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') /* Motion-JPEG */
说明直接拿到的就是JPEG文件,而不是YUV文件。
videodev2.h
struct v4l2_format {
__u32 type;
union {
struct v4l2_pix_format pix; /* V4L2_BUF_TYPE_VIDEO_CAPTURE */
struct v4l2_pix_format_mplane pix_mp; /* V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE */高通是用这种格式
struct v4l2_window win; /* V4L2_BUF_TYPE_VIDEO_OVERLAY */
struct v4l2_vbi_format vbi; /* V4L2_BUF_TYPE_VBI_CAPTURE */
struct v4l2_sliced_vbi_format sliced; /* V4L2_BUF_TYPE_SLICED_VBI_CAPTURE */
struct v4l2_sdr_format sdr; /* V4L2_BUF_TYPE_SDR_CAPTURE */
__u8 raw_data[200]; /* user-defined */
} fmt;
};
/* map to v4l2_format.fmt.raw_data */
struct msm_v4l2_format_data {
enum v4l2_buf_type type;
unsigned int width;
unsigned int height;
unsigned int pixelformat; /* FOURCC */
unsigned char num_planes;
unsigned int plane_sizes[VIDEO_MAX_PLANES];
};
高通的设置,打开摄像头的时候设置的:
02-19 03:09:59.690 520 3386 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2569: [wanghl]fmt:2// CAM_FORMAT_YUV_420_NV21,
02-19 03:09:59.690 520 3386 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2578: [wanghl]V4L2_PIX_FMT_NV21
hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
mm_stream_fsm_fn->
case MM_STREAM_STATE_CFG:
rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);
mm_stream_fsm_cfg->
case MM_STREAM_EVT_SET_FMT:
mm_stream_config->
int32_t mm_stream_set_fmt(mm_stream_t *my_obj)
msm_fmt.width = (unsigned int)my_obj->stream_info->dim.width;
msm_fmt.height = (unsigned int)my_obj->stream_info->dim.height;
msm_fmt.pixelformat = mm_stream_get_v4l2_fmt(my_obj->stream_info->fmt);
case CAM_FORMAT_YUV_420_NV21://说明设置的是这个类型
case CAM_FORMAT_YUV_420_NV21_VENUS:
LOGE("[wanghl]V4L2_PIX_FMT_NV21");//打印出来了
val = V4L2_PIX_FMT_NV21;
break;
memcpy(fmt.fmt.raw_data, &msm_fmt, sizeof(msm_fmt));
rc = ioctl(my_obj->fd, VIDIOC_S_FMT, &fmt);
align((2*(Y_Stride * Y_Scanlines) + 2*(UV_Stride * UV_Scanlines) + Extradata), 4096)
对比发现骁龙相机预览的时候的frame_len和内存对齐后的寬高是不一样的:
骁龙相机:1080P预览的时候,frame_len = 3112960,内存对齐后:width:1920,height:1080
测试APP:取4K的snapshot stream的时候,frame_len = 3153920,内存对齐后:width:1920,height:1088
计算公式参考:kernel/msm-4.9/include/uapi/media/msm_media_info.h
static inline unsigned int VENUS_BUFFER_SIZE(
int color_fmt, int width, int height)
case COLOR_FMT_NV21:
case COLOR_FMT_NV12:
uv_alignment = 4096;
y_plane = y_stride * y_sclines;
uv_plane = uv_stride * uv_sclines + uv_alignment;
size = y_plane + uv_plane +
MSM_MEDIA_MAX(extra_size, 8 * y_stride);
size = MSM_MEDIA_ALIGN(size, 4096);
/* Additional size to cover last row of non-aligned frame */
if (width >= 2400 && height >= 2400) {
size += MSM_MEDIA_ALIGN(width, w_alignment) *
w_alignment;
size = MSM_MEDIA_ALIGN(size, 4096);
}
break;
所以:size = y_plane + uv_plane + MSM_MEDIA_MAX(extra_size, 8 * y_stride);
= 1920*1088 + 1920*1088/2 + 4096 + MSM_MEDIA_MAX(16*1024=16384, 8 * 1920=15360)
= 3133440 + 4096 + 16384
= 3153920
3153920 / 4096 = 770,所以可以按照4096对齐(图像宽度要对齐,图像高度要对齐,图像尺寸还要4K对齐),但是骁龙相机这个长度不知道怎么算出来的
骁龙相机和测试APP:取4K的snapshot stream的时候,frame_len 都是 12533760 = 3840 * 2176 * 1.5,对齐后的寬高都是3840*2176*1.5
怀疑是高通走HAL1,自己测试APP和zoom走HAL3有区别,所以强制修改HAL3走HAL1,
1)修改Camera.java
private int cameraInitNormal(int cameraId) {
return cameraInitVersion(cameraId, CAMERA_HAL_API_VERSION_NORMAL_CONNECT);->改成CAMERA_HAL_API_VERSION_1_0
}
2)或者把skype/zoom的包名添加到vendor.camera.hal1.packagelist属性应该也可以:实际测试不起作用
adb shell setprop vendor.camera.hal1.packagelist zoom包名
//Force HAL1 if the package name falls in this bucket
String packageList = SystemProperties.get("vendor.camera.hal1.packagelist", "");
if (packageList.length() > 0) {
TextUtils.StringSplitter splitter = new TextUtils.SimpleStringSplitter(',');
splitter.setString(packageList);
for (String str : splitter) {
if (packageName.equals(str)) {
halVersion = CAMERA_HAL_API_VERSION_1_0;
break;
}
}
}
让我们自己的APP强制走HAL1之后,切图颜色正常,但是没有stream type为3的stream,也就是没有snapshot的stream,所以从snapshot stream拿到的图片数据是静止的,没有刷新,但是颜色是正常的,1080p预览的时候,frame_len 是3112960,内存对齐后的寬高是1920*1080,都和骁龙相机一样。说明HAL1和HAL3预览的preview size不一样,frame_len不一样,颜色格式应该也不一样,否则为什么切图偏色的问题改成HAL1就好了,只是不刷新而已。
看下YUV 格式的区别:
强制走HAL1之后:
骁龙:
02-24 12:29:22.379 519 3464 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:131//CAM_FORMAT_MAX
02-24 12:29:22.386 519 3464 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:2//V4L2_PIX_FMT_NV21
02-24 12:29:22.386 519 3464 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2807: [wanghl]V4L2_PIX_FMT_NV21
02-24 12:29:22.393 519 3464 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:2//V4L2_PIX_FMT_NV21
02-24 12:29:22.393 519 3464 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2807: [wanghl]V4L2_PIX_FMT_NV21
02-24 12:29:22.402 519 3464 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:105//CAM_FORMAT_Y_ONLY,用于人脸检测
测试APP:
02-24 12:32:19.051 520 4044 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:131//CAM_FORMAT_MAX
02-24 12:32:19.057 520 4044 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:2//V4L2_PIX_FMT_NV21
02-24 12:32:19.057 520 4044 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2807: [wanghl]V4L2_PIX_FMT_NV21
02-24 12:32:19.061 520 4044 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:105//CAM_FORMAT_Y_ONLY,用于人脸检测
zoom:
02-24 12:37:00.539 520 4965 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:131//CAM_FORMAT_MAX
02-24 12:37:00.553 520 4965 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:2//V4L2_PIX_FMT_NV21
02-24 12:37:00.553 520 4965 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2807: [wanghl]V4L2_PIX_FMT_NV21
02-24 12:37:00.563 520 4965 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:105//CAM_FORMAT_Y_ONLY,用于人脸检测
hardware/qcom/camera/QCamera2/stack/common/cam_types.h
typedef enum {
CAM_FORMAT_JPEG = 0,//0
CAM_FORMAT_YUV_420_NV12 = 1,//1
CAM_FORMAT_YUV_420_NV21,//2
CAM_FORMAT_YUV_420_NV21_ADRENO,//3
CAM_FORMAT_YUV_420_YV12,//4
CAM_FORMAT_YUV_422_NV16,//5
CAM_FORMAT_YUV_422_NV61,//6
CAM_FORMAT_YUV_420_NV12_VENUS,//7
/* Note: For all raw formats, each scanline needs to be 16 bytes aligned */
/* Packed YUV/YVU raw format, 16 bpp: 8 bits Y and 8 bits UV.
* U and V are interleaved with Y: YUYV or YVYV */
CAM_FORMAT_YUV_RAW_8BIT_YUYV,//8
CAM_FORMAT_YUV_RAW_8BIT_YVYU,//9
CAM_FORMAT_YUV_RAW_8BIT_UYVY, //10
CAM_FORMAT_YUV_RAW_8BIT_VYUY,//11
/* QCOM RAW formats where data is packed into 64bit word.
* 8BPP: 1 64-bit word contains 8 pixels p0 - p7, where p0 is
* stored at LSB.
* 10BPP: 1 64-bit word contains 6 pixels p0 - p5, where most
* significant 4 bits are set to 0. P0 is stored at LSB.
* 12BPP: 1 64-bit word contains 5 pixels p0 - p4, where most
* significant 4 bits are set to 0. P0 is stored at LSB. */
CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG,//12
CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG,//13
CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB,//14
CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR,//15
CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG,//16
CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG,//17
CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB,//18
CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR,//19
CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG, //20
CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG,//21
CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB,//22
CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR,//23
/* MIPI RAW formats based on MIPI CSI-2 specifiction.
* 8BPP: Each pixel occupies one bytes, starting at LSB.
* Output with of image has no restrictons.
* 10BPP: Four pixels are held in every 5 bytes. The output
* with of image must be a multiple of 4 pixels.
* 12BPP: Two pixels are held in every 3 bytes. The output
* width of image must be a multiple of 2 pixels. */
CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG,//24
CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG,//25
CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB,//26
CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR,//27
CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG,//28
CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG,//29
CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB, //30
CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR,//31
CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG,//32
CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG,//33
CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB,//34
CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR,//35
/* Ideal raw formats where image data has gone through black
* correction, lens rolloff, demux/channel gain, bad pixel
* correction, and ABF.
* Ideal raw formats could output any of QCOM_RAW and MIPI_RAW
* formats, plus plain8 8bbp, plain16 800, plain16 10bpp, and
* plain 16 12bpp */
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG,//36
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG,//37
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB,//38
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR,//39
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG, //40
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB, //50
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG, //60
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB, //70
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB,
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR,
CAM_FORMAT_BAYER_RAW_PLAIN16_10BPP_GBRG,
CAM_FORMAT_BAYER_RAW_PLAIN16_10BPP_GRBG,
CAM_FORMAT_BAYER_RAW_PLAIN16_10BPP_RGGB,
CAM_FORMAT_BAYER_RAW_PLAIN16_10BPP_BGGR,
/* generic 8-bit raw */
CAM_FORMAT_JPEG_RAW_8BIT,//80
CAM_FORMAT_META_RAW_8BIT,//81
/* generic 10-bit raw */
CAM_FORMAT_META_RAW_10BIT,//82
/* QCOM RAW formats where data is packed into 64bit word.
* 14BPP: 1 64-bit word contains 4 pixels p0 - p3, where most
* significant 4 bits are set to 0. P0 is stored at LSB.
*/
CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG,//83
CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG,//84
CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB, //80//85
CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR,//86
/* MIPI RAW formats based on MIPI CSI-2 specifiction.
* 14 BPPP: 1st byte: P0 [13:6]
* 2nd byte: P1 [13:6]
* 3rd byte: P2 [13:6]
* 4th byte: P3 [13:6]
* 5th byte: P0 [5:0]
* 7th byte: P1 [5:0]
* 8th byte: P2 [5:0]
* 9th byte: P3 [5:0]
*/
CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GBRG,//87
CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GRBG,//88
CAM_FORMAT_BAYER_MIPI_RAW_14BPP_RGGB,//89
CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR,//90
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GBRG,//91
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GRBG,//92
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_RGGB,//93
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_BGGR,//94
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG, //90//95
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG,//96
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB,//97
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR,//98
/* 14BPP: 1st byte: P0 [8:0]
* 2nd byte: P0 [13:9]
* 3rd byte: P1 [8:0]
* 4th byte: P1 [13:9]
*/
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GBRG,//99
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GRBG,//100
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_RGGB,//101
CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_BGGR,//102
CAM_FORMAT_YUV_444_NV24,//103
CAM_FORMAT_YUV_444_NV42,//104
/* Y plane only, used for FD, 8BPP */
CAM_FORMAT_Y_ONLY, //100//105
/* UBWC format */
CAM_FORMAT_YUV_420_NV12_UBWC,//106
CAM_FORMAT_YUV_420_NV21_VENUS,//107
/* RGB formats */
CAM_FORMAT_8888_ARGB,//108
/* Y plane only */
CAM_FORMAT_Y_ONLY_10_BPP,//109
CAM_FORMAT_Y_ONLY_12_BPP,//110
CAM_FORMAT_Y_ONLY_14_BPP,//111
CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GREY,//112
CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GREY,//113
CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GREY,//114
CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GREY,//115
CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GREY,//116
CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GREY,//117
CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GREY,//118
CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GREY,//119
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GREY,//120
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GREY,//121
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GREY,//122
CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GREY,//123
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GREY,//124
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GREY,//125
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GREY,//126
CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GREY,//127
CAM_FORMAT_DEPTH16,//128
CAM_FORMAT_DEPTH8,//129
CAM_FORMAT_DEPTH_POINT_CLOUD,//130
CAM_FORMAT_MAX//131
} cam_format_t;
没改之前:
骁龙:
02-25 04:55:11.504 549 5048 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:131
02-25 04:55:11.512 549 5048 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:2
02-25 04:55:11.512 549 5048 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2807: [wanghl]V4L2_PIX_FMT_NV21
02-25 04:55:11.517 549 5048 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:2
02-25 04:55:11.517 549 5048 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2807: [wanghl]V4L2_PIX_FMT_NV21
02-25 04:55:11.522 549 5048 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:105
测试APP和zoom:
02-25 05:07:40.468 549 549 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:131
02-25 05:07:40.542 549 549 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:7//CAM_FORMAT_YUV_420_NV12_VENUS ==> V4L2_PIX_FMT_NV12 ==>COLOR_FMT_NV12,多设置了一个7的格式
02-25 05:07:40.544 549 549 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:2
02-25 05:07:40.544 549 549 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2807: [wanghl]V4L2_PIX_FMT_NV21
02-25 05:07:40.548 549 549 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:2
02-25 05:07:40.548 549 549 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2807: [wanghl]V4L2_PIX_FMT_NV21
02-25 05:07:40.553 549 549 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:2
02-25 05:07:40.553 549 549 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2807: [wanghl]V4L2_PIX_FMT_NV21
02-25 05:07:40.557 549 549 E QCamera : <MCI><ERROR> mm_stream_get_v4l2_fmt: 2798: [wanghl]fmt:105
所以怀疑HAL1和HAL3的预览的格式是不是不一样,增加log打印验证下:
骁龙:
02-25 04:49:06.587 520 3533 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1899: [wanghl]VIDIOC_DQBUF buf_index 4, frame_idx 426, stream type 1, rc 0,queued: 1, buf_type = 0 flags = 0x2000 FD = 91 my_num 0 buf fd 135,buf stream_id:1793,dim.width:1920, dim.height:1080,frame_len:3112960,after align:width=1920, height=1080, vb.bytesused:0, fmt:2//preview
02-25 04:49:06.635 520 3533 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1899: [wanghl]VIDIOC_DQBUF buf_index 1, frame_idx 427, stream type 3, rc 0,queued: 2, buf_type = 0 flags = 0x2000 FD = 94 my_num 0 buf fd 99,buf stream_id:2050,dim.width:3840, dim.height:2160,frame_len:12533760,after align:width=3840, height=2176, vb.bytesused:0, fmt:2//snapshot
说明骁龙相机的snapshot和preview流都是2/*CAM_FORMAT_YUV_420_NV21*/
测试APP:
02-25 04:51:04.324 520 3685 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1899: [wanghl]VIDIOC_DQBUF buf_index 9, frame_idx 163, stream type 1, rc 0,queued: 3, buf_type = 0 flags = 0x2000 FD = 83 my_num 0 buf fd 229,buf stream_id:3329,dim.width:1920, dim.height:1080,frame_len:3153920,after align:width=1920, height=1088, vb.bytesused:0, fmt:7//preview
02-25 04:51:04.335 520 3685 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1899: [wanghl]VIDIOC_DQBUF buf_index 6, frame_idx 162, stream type 5, rc 0,queued: 5, buf_type = 0 flags = 0x2000 FD = 103 my_num 0 buf fd 180,buf stream_id:3843,dim.width:1920, dim.height:1080,frame_len:3133440,after align:width=1920, height=1088, vb.bytesused:0, fmt:2//app callback,app确实有调用callback
02-25 04:51:04.349 520 3685 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1899: [wanghl]VIDIOC_DQBUF buf_index 6, frame_idx 163, stream type 3, rc 0,queued: 4, buf_type = 0 flags = 0x2000 FD = 122 my_num 0 buf fd 193,buf stream_id:4100,dim.width:3840, dim.height:2160,frame_len:12533760,after align:width=3840, height=2176, vb.bytesused:0, fmt:2//snapshot
问题发现了,测试APP的snapshot的type是2/*CAM_FORMAT_YUV_420_NV21*/,但是preview却是使用的type为7:/*CAM_FORMAT_YUV_420_NV12_VENUS*/,裁剪的时候,4K snapshot流是NV21格式,preview是NV12格式,所以UV格式颠倒了,才会出现说裁切之后颜色不对,肤色变成阿凡达的问题,所以裁切之后,把NV21格式转换成NV12格式再送preview buffer即可
zoom:
02-25 05:07:22.948 520 4417 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1899: [wanghl]VIDIOC_DQBUF buf_index 6, frame_idx 1258, stream type 1, rc 0,queued: 3, buf_type = 0 flags = 0x2000 FD = 83 my_num 0 buf fd 186,buf stream_id:7169,dim.width:640, dim.height:480,frame_len:483328,after align:width=640, height=480, vb.bytesused:0, fmt:7//preview 640*480
02-25 05:07:22.950 520 4417 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1899: [wanghl]VIDIOC_DQBUF buf_index 9, frame_idx 1258, stream type 5, rc 0,queued: 3, buf_type = 0 flags = 0x2000 FD = 103 my_num 0 buf fd 200,buf stream_id:7683,dim.width:640, dim.height:480,frame_len:491520,after align:width=640, height=512, vb.bytesused:0, fmt:2//callback 640*512
02-25 05:07:22.958 520 4417 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1899: [wanghl]VIDIOC_DQBUF buf_index 1, frame_idx 1258, stream type 7, rc 0,queued: 10, buf_type = 0 flags = 0x2000 FD = 67 my_num 0 buf fd 131,buf stream_id:6912,dim.width:764376, dim.height:1,frame_len:765952,after align:width=764376, height=1, vb.bytesused:0, fmt:131
02-25 05:07:22.966 520 4417 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1899: [wanghl]VIDIOC_DQBUF buf_index 7, frame_idx 1259, stream type 3, rc 0,queued: 3, buf_type = 0 flags = 0x2000 FD = 122 my_num 0 buf fd 184,buf stream_id:7940,dim.width:3840, dim.height:2160,frame_len:12533760,after align:width=3840, height=2176, vb.bytesused:0, fmt:2//snapshot 3840*2176
zoom结果和测试APP一致
API2 zoom rooms 只有两路流,type 5/*callback*/ 和 type 7/*metadata*/
02-25 05:57:49.143 521 4370 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1918: [wanghl]VIDIOC_DQBUF buf_index 7, frame_idx 14741, stream type 5, rc 0,queued: 4, buf_type = 0 flags = 0x2000 FD = 75 my_num 0 buf fd 161,buf stream_id:4353,dim.width:1280, dim.height:720,frame_len:1474560,after align:width=1280, height=768, vb.bytesused:0, fmt:2//callback,1280*768,NV21 fmt
02-25 05:57:49.157 521 4370 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1918: [wanghl]VIDIOC_DQBUF buf_index 0, frame_idx 14741, stream type 7, rc 0,queued: 10, buf_type = 0 flags = 0x2000 FD = 67 my_num 0 buf fd 97,buf stream_id:4096,dim.width:764376, dim.height:1,frame_len:765952,after align:width=764376, height=1, vb.bytesused:0, fmt:131//metadata
===================================================================
在内核空间和用户空间交换数据时,get_user和put_user是两个两用的函数。相对于copy_to_user和copy_from_user,这两个函数主要用于完成一些简单类型变量(char、int、long等)的拷贝任务,对于一些复合类型的变量,比如数据结构或者数组类型,
get_user和put_user函数还是无法胜任,这两个函数内部将对指针指向的对象长度进行检查,在arm平台上只支持长度为1,2,4,8的变量。
ISP完成:色彩空间转换(RGB转换为YUV),在YUV色彩空间之上进行彩噪去除与边缘加强,色彩与对比度加强,然后输出YUV420格式的数据。所以想拿到全尺寸的YUV图像数据只能在ISP驱动里面获取。但实际好像不是这样的:
VFE-->YUV Buffers-->CPP-->YUV Buffers
而CPP这里面的工作包括:Denoise-->Scale-->Sharpen-->Rotate
所以裁切感觉是放在CPP里的,根据高通的官方文档:
Camera postprocessor (CPP):Supports flip/rotate, denoise, smooth/sharpen, crop, and upscale features for full-size VFE output frames
所以CPP可以实现在全尺寸的VFE输出帧上进行crop操作
ISP主要工作是图像处理:
WNR 小波降噪
SCE 肤色增强
MCE 记忆色增强
ASF 自适应空间滤镜
VFE 视频前端
CPP 摄像头后处理器
ABF 自适应拜耳滤镜
CC 色彩校正
ACE 高级色度增强
ISP 图像信号处理器
高通平台确定ISP version
1.vendor下的ISP version:
//vendor/qcom/proprietary/mm-camera/mm-camera2/media-controller/modules/isp2/module/Android.mk
else ifeq ($(call is-board-platform-in-list,msm8937 msm8953),true)//SDM632应该属于msm8953
LOCAL_C_INCLUDES += $(LOCAL_MMCAMERA_PATH)/media-controller/modules/isp2/module/isp42
endif
所以使用的是ISP42
2.kernel下的ISP version:
//kernel/msm-4.9/arch/arm64/boot/dts/pubtron/msm8953-camera.dtsi
compatible = "qcom,vfe40";
所以kernel下是isp40,vendor下是isp42
SDM632 camera overview文档(2018.April)说camera架构还使用的Memory Buffer Sharing Mechanism – Domain Socket 作为内存共享的机制,实际上SDM439上面这个已经去掉这个通信方法,改成了用MCT Shim layer来进行HAL层的MM camera Interface和Media Controller的通信(2018.Mar)
Domain sockets are used to share buffer information via file descriptors between the media server
(camera HAL) and mm-qcamera-daemon (mm-camera) processes.
Types of buffer information exchanged
CAM_MAPPING_BUF_TYPE_CAPABILITY – Camera capability buffer
CAM_MAPPING_BUF_TYPE_PARM_BUF – Camera parameters buffer
CAM_MAPPING_BUF_TYPE_STREAM_BUF – Stream buffers
CAM_MAPPING_BUF_TYPE_STREAM_INFO – Stream information buffer
CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF – Offline reprocess input buffer
Relevant source code pointers
Socket implementation – QCamera2\stack\mm-camera-interface\src\
mm_camera_sock.c
Message transmitter – QCamera2\stack\mm-camera-interface\src\
mm_camera_stream.c
Message receiver – mm-camera\mm-camera2\server-imaging\server.c//这个文档都不会被编译
实际代码里面也确认DAEMON_PRESENT这个宏开关没有打开,用的已经是MCT Shim layer机制了,可能是文档没有及时更新。
调查
int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
mm_camera_buf_info_t* buf_info,
uint8_t num_planes)//mm_camera_stream.c
函数属于哪个进程,打印出进程PID!
一.打开骁龙相机:
1)添加log:
02-25 04:47:45.878 519 3367 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1870: [wanghl] vb.length = num_planes = 2, my_obj->frame_offset.mp[0].stride:3840, pid:519
02-25 04:47:45.887 519 3367 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1870: [wanghl] vb.length = num_planes = 1, my_obj->frame_offset.mp[0].stride:764376, pid:519
02-25 04:47:45.890 519 3367 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1870: [wanghl] vb.length = num_planes = 2, my_obj->frame_offset.mp[0].stride:1920, pid:519
02-25 04:47:45.918 519 3367 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1870: [wanghl] vb.length = num_planes = 1, my_obj->frame_offset.mp[0].stride:764376, pid:519
02-25 04:47:45.921 519 3367 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1870: [wanghl] vb.length = num_planes = 2, my_obj->frame_offset.mp[0].stride:1920, pid:519
进程PID为519
2)执行adb shell ps -e查询:
USER PID Name
cameraserver 519 1 309528 23944 0 0 S android.hardware.camera.provider@2.4-service//发现PID:519是camera.provider
cameraserver 861 1 117644 20956 0 0 S cameraserver
二.打开自己的APP:
1)添加log:
02-25 04:54:14.732 519 3572 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1870: [wanghl] vb.length = num_planes = 1, my_obj->frame_offset.mp[0].stride:764376, pid:519
02-25 04:54:14.734 519 3572 E QCamera : <MCI><ERROR> mm_stream_read_msm_frame: 1870: [wanghl] vb.length = num_planes = 2, my_obj->frame_offset.mp[0].stride:1920, pid:519
2)2)执行adb shell ps -e查询:
cameraserver 519 1 348312 58232 binder_thread_read 0 S android.hardware.camera.provider@2.4-service
cameraserver 861 1 102888 20168 binder_thread_read 0 S cameraserver
发现两个进程(两个APP)打开的camera之后,读到数据的地方都属于同一个进程camera.provider
camera打开的时候,kernel log:
[ 1301.674241] [wanghl] v4l_dqbuf
[ 1301.674674] [wanghl] v4l_dqbuf
[ 1301.678787] [wanghl] camera_v4l2_dqbuf//kernel/msm-4.9/drivers/media/v4l2-core/v4l2-ioctl.c
[ 1301.678793] [wanghl] v4l_dqbuf
[ 1301.688327] [wanghl] camera_v4l2_dqbuf
[ 1301.691417] msm_vidc: err: [wanghl] msm_vidc_dqbuf//kernel/msm-4.9/drivers/media/platform/msm/vidc_3x/msm_vidc.c
[ 1301.695574] [wanghl] v4l_dqbuf
[ 1301.699878] msm_vidc: err: [wanghl] msm_vidc_dqbuf
[ 1301.703487] [wanghl] v4l_dqbuf
[ 1301.707777] msm_vidc: err: [wanghl] msm_vidc_dqbuf
[ 1301.711419] [wanghl] v4l_dqbuf
[ 1301.715041] [wanghl] v4l_dqbuf
[ 1301.715978] [wanghl] camera_v4l2_dqbuf
[ 1301.715987] [wanghl] v4l_dqbuf
[ 1301.725566] [wanghl] camera_v4l2_dqbuf
[ 1301.728607] msm_vidc: err: [wanghl] msm_vidc_dqbuf
[ 1301.732853] [wanghl] v4l_dqbuf
[ 1301.737117] msm_vidc: err: [wanghl] msm_vidc_dqbuf
[ 1301.740279] [wanghl] v4l_dqbuf
[ 1301.745066] msm_vidc: err: [wanghl] msm_vidc_dqbuf
[ 1301.748182] [wanghl] v4l_dqbuf
[ 1301.752963] msm_vidc: err: [wanghl] msm_vidc_dqbuf
[ 1301.756088] [wanghl] v4l_dqbuf
[ 1301.760853] msm_vidc: err: [wanghl] msm_vidc_dqbuf
[ 1301.762188] [wanghl] v4l_dqbuf
[ 1301.768708] [wanghl] camera_v4l2_dqbuf
[ 1301.771923] [wanghl] v4l_dqbuf
[ 1301.775571] msm_vidc: err: [wanghl] msm_vidc_dqbuf
[ 1301.778655] [wanghl] v4l_dqbuf
[ 1301.783450] msm_vidc: err: [wanghl] msm_vidc_dqbuf
[ 1301.786538] [wanghl] v4l_dqbuf
[ 1301.786615] [wanghl] v4l_dqbuf
[ 1301.789572] [wanghl] camera_v4l2_dqbuf
[ 1301.789583] [wanghl] v4l_dqbuf
[ 1301.801202] [wanghl] camera_v4l2_dqbuf
经过测试API1和API2都可以同时打开两个camera,并且1920*1080显示,从14:17挂测到都17:17三个小时没有出现卡顿,非常流畅,一丁点卡顿都没有!但是芯片很烫.志强这个测试APP只有两种流:1,7
==========================================================================
(1)
hardware/qcom/camera/QCamera2/stack/mm-camera-interface/Android.mk: error: libmmcamera_interface (native:vendor) should not link to lib_helloalgo (native:platform)
网上有碰到类似的问题:
Root Cause:
Android.mk 中写明会编译生成两个LOCAL_MODULE,第一个LOCAL_MODULE会依赖引用第二个LOCAL_MODULE。问题的关键来了,参照平台默认的 sensor HAL 代码的Android.mk,我对第一个LOCAL_MODULE新增了LOCAL_PROPRIETARY_MODULE := true 属性,
而第二个LOCAL_MODULE没有这么做。LOCAL_PROPRIETARY_MODULE是Android vendor module 必须具有的属性,此属性声明将 vendor module放入vendor分区(vendor native),未声明该属性的module将放在system分区(platform native),
且这两个分区是不能进行链接的,所以编译会报两个LOCAL_MODULE无法链接的错误。
参考:LOCAL_PROPRIETARY_MODULE
Resolution:
将移植的vendor module中的Andorid.mk 中所有LOCAL_MODULE都加上LOCAL_PROPRIETARY_MODULE := true 属性。
我们的解决方法是:LOCAL_VENDOR_MODULE := true,会生成.so到vendor目录下
(2)
ninja: error: 'out/target/product/SDM632双摄项目/obj_arm/lib/lib_helloalgo.so.toc', needed by 'out/target/product/SDM632双摄项目/obj_arm/SHARED_LIBRARIES/libmmcamera_interface_intermediates/LINKED/libmmcamera_interface.so', missing and no known rule to make it
10:27:29 ninja failed with: exit status 1
修改过后:
wanghl@wanghl-HP:~/code/new_disk/m610_sdm632_la101c29_android$ find out/target/product/SDM632双摄项目 -name "lib_helloalgo.so" 2>/dev/null
out/target/product/SDM632双摄项目/symbols/vendor/lib64/lib_helloalgo.so
out/target/product/SDM632双摄项目/symbols/vendor/lib/lib_helloalgo.so
out/target/product/SDM632双摄项目/obj_arm/lib/lib_helloalgo.so
out/target/product/SDM632双摄项目/obj_arm/SHARED_LIBRARIES/lib_helloalgo_intermediates/LINKED/lib_helloalgo.so
out/target/product/SDM632双摄项目/obj_arm/SHARED_LIBRARIES/lib_helloalgo_intermediates/PACKED/lib_helloalgo.so
out/target/product/SDM632双摄项目/obj_arm/SHARED_LIBRARIES/lib_helloalgo_intermediates/lib_helloalgo.so
out/target/product/SDM632双摄项目/obj/SHARED_LIBRARIES/lib_helloalgo_intermediates/LINKED/lib_helloalgo.so
out/target/product/SDM632双摄项目/obj/SHARED_LIBRARIES/lib_helloalgo_intermediates/PACKED/lib_helloalgo.so
out/target/product/SDM632双摄项目/obj/SHARED_LIBRARIES/lib_helloalgo_intermediates/lib_helloalgo.so
out/target/product/SDM632双摄项目/vendor/lib64/lib_helloalgo.so//64位库文件也有
out/target/product/SDM632双摄项目/vendor/lib/lib_helloalgo.so//32位库文件有
//
wanghl@wanghl-HP:~/code/new_disk/m610_sdm632_la101c29_android$ find out/target/product/SDM632双摄项目 -name "lib_helloalgo.so.toc" 2>/dev/null
out/target/product/SDM632双摄项目/obj_arm/lib/lib_helloalgo.so.toc//加了LOCAL_VENDOR_MODULE := true之后就有了,生成的so在lib下
out/target/product/SDM632双摄项目/obj_arm/SHARED_LIBRARIES/lib_helloalgo_intermediates/lib_helloalgo.so.toc
out/target/product/SDM632双摄项目/obj/SHARED_LIBRARIES/lib_helloalgo_intermediates/lib_helloalgo.so.toc
(3)
脚本编译模块:
1)
./oem_build.sh SDM632 -m libmmcamera_interface
2)
./oem_build.sh SDM632 -p hardware/qcom/camera/QCamera2/stack/mm-camera-interface/
(4)
03-05 06:19:09.257 2218 2218 E vndksupport: Could not load /vendor/lib/hw/camera.msm8953.so from default namespace: dlopen failed: library "lib_helloalgo.so" not found.
网上的类似问题:
供应商原生开发套件,Vendor Native Development Kit,简称VNDK.
问题引发:
vndksupport:Could not load demo.so from default namespace:dlopen failed: library “libstdc++.so” not found.
demo.so位于vendor下面,找不到libstdc++.so,而libstdc++.so的库位于system/lib和system/lib64中。
这说明vendor下面的库引用libstdc++.so时,无法找到system/lib中的libstdc++.so。
后来生成到vendor下,全编,镜像全烧之后,这个lib_helloalgo.so就能被加载成功了,如果单单push这个libstdc++.so到系统目录下也是不行的
---------------------------------------------------------------
经过3.10电话沟通的结果,按照高通工程师的说法是:
1.HAL层只能拿到APP请求的stream type类型的数据,如果APP没有请求全尺寸的shapshot流,那我们也拿不到全尺寸的流;也别想要妄图从ISP,CPP(两个模块都会涉及到裁切和缩放)截获到数据流,因为ISP和CPP是硬件处理的,是一行一行处理数据的,不是帧数据,他的数据流不会暴露在内存里,HAL层能拿到的数据都是已经裁切缩放的YUV数据了。所以妄图在ISP或CPP里去修改帧数据是不现实的。ZSL preview是HAL1的概念,API2没有这个概念,而且HAL1也是需要APP去请求打开ZSL preview才会有。
2.一般sensor驱动里会影响到VFE/CAMIF接收数据的,影响到sensor ouput size与平台匹配的参数就集中在:
/* Res 1 */
{
.data_rate = 720000000ULL * 4, //mipi速率
.x_output = 1932,//有效像素
.y_output = 1094,//有效像素
.line_length_pclk = 260,
.frame_length_lines = 9240,//每帧的行数,可能会影响sensor输出大小的匹配,与line_count有计算关系,也会影响到曝光
.vt_pixel_clk = 72072000,//实际不起作用
.op_pixel_clk = 320000000,//像素时钟,也就是每秒处理多少个像素,太大太小也不行
...
},
还有就是settle_cnt(必须处于一定的范围)
3.SDM632 1080P setting点不亮,内存报SMMU的内存错误只是现象,不是原因,原因是CAMIF status出错了,就是sensor输出和平台接受参数不匹配
4.SDM632 支持三路摄像头同时打开,但是是两路RAW和一路YUV的情况,因为YUV不需要经过ISP处理,一路ISP只能处理一路流,所以632有两路ISP,只能同时处理两路流。只能同时打开两路RAW摄像头,不能说一路ISP可以处理13M,两路可以处理26M,三个IMX317,一个8M,三个也才24M,没有达到26M的上限,不能这么算,因为根本原因是两路ISP只能处理两路流
5.CSI1点不亮的现象,CSI1和CSI0和CSI2一样,没有什么特殊,可以配成任何方向的sensor,具体原来还要另外查
6.怎么用示波器查看mipi data数据有没有达到30fps,可以看一秒内有没有33个脉冲数据,也就是一个脉冲大概是33ms,有的话说明帧率是够的,没有丢帧之类的
这些值修改可能会工作但是可能会影响帧率,增加CTS测试失败的风险。
------------------------------------------------------