Android8.1 startPreview流程分析(一)

一、应用执行流程:

 (1)、camera调用startPreview()流程,开始的位置是应用调用Camera.startPreview();

(2)、日志分析流程图:

二、framework执行流程

 frameworks/base/core/java/android/hardware/Camera.java

 819     /**
 820      * Starts capturing and drawing preview frames to the screen.
 821      * Preview will not actually start until a surface is supplied
 822      * with {@link #setPreviewDisplay(SurfaceHolder)} or
 823      * {@link #setPreviewTexture(SurfaceTexture)}.
 824      *
 825      * <p>If {@link #setPreviewCallback(Camera.PreviewCallback)},
 826      * {@link #setOneShotPreviewCallback(Camera.PreviewCallback)}, or
 827      * {@link #setPreviewCallbackWithBuffer(Camera.PreviewCallback)} were
 828      * called, {@link Camera.PreviewCallback#onPreviewFrame(byte[], Camera)}
 829      * will be called when preview data becomes available.
 830      *
 831      * @throws RuntimeException if starting preview fails; usually this would be
 832      *    because of a hardware or other low-level error, or because release()
 833      *    has been called on this Camera instance.
 834      */
 835     public native final void startPreview();

frameworks/base/core/jni/android_hardware_Camera.cpp

   831 static void android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz)
 832 {
 833     ALOGV("startPreview");
 834     sp<Camera> camera = get_native_camera(env, thiz, NULL);
 835     if (camera == 0) return;
 836
 837     if (camera->startPreview() != NO_ERROR) {
 838         jniThrowRuntimeException(env, "startPreview failed");
 839         return;
 840     }
 841 }

frameworks/av/camera/Camera.cpp

 

178 // start preview mode
179 status_t Camera::startPreview()
180 {
181     ALOGE("Camera.cpp startPreview");
182     sp <::android::hardware::ICamera> c = mCamera;
183     if (c == 0) return NO_INIT;
184     return c->startPreview();
185 }

frameworks/av/camera/ICamera.cpp

 

115     // start preview mode, must call setPreviewTarget first
116     status_t startPreview()
117     {
118         ALOGE("ICamera startPreview");
119         Parcel data, reply;
120         data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
121         remote()->transact(START_PREVIEW, data, &reply);
122         return reply.readInt32();
123     }

三、Binder调用

frameworks/av/services/camera/libcameraservice/CameraClient.cpp

 369 // start preview mode
 370 status_t CameraClient::startPreview() {
 371     ALOGE("cameraclient startPreview (pid %d)", getCallingPid());
 372     android::CallStack stack;
 373     stack.update();
 374     stack.log("zjy_cameraclient_startPreview");
 375     return startCameraMode(CAMERA_PREVIEW_MODE);
 376 }

frameworks/av/services/camera/libcameraservice/CameraHardwareInterface.h

520 status_t CameraHardwareInterface::startPreview()
521 {
522     ALOGV("%s(%s)", __FUNCTION__, mName.string());
523     ALOGE("CameraHardWareInterface startPreview");
524     if (CC_LIKELY(mHidlDevice != nullptr)) {
525         return CameraProviderManager::mapToStatusT(
526                 mHidlDevice->startPreview());
527     }
528     return INVALID_OPERATION;
529 }

四、HAL
hardware/interfaces/camera/device/1.0/default/CameraDevice.cpp

 

 789 Return<Status> CameraDevice::startPreview() {
 790     ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str());
 791     ALOGE("zjy CameraDevice startPreview");
 792     Mutex::Autolock _l(mLock);
 793     if (!mDevice) {
 794         ALOGE("%s called while camera is not opened", __FUNCTION__);
 795         return Status::OPERATION_NOT_SUPPORTED;
 796     }
 797     if (mDevice->ops->start_preview) {
 798         return getHidlStatus(mDevice->ops->start_preview(mDevice));
 799     }
 800     return Status::INTERNAL_ERROR; // HAL should provide start_preview
 801 }

五、Camera.h

hardware/libhardware/include/hardware/camera.h

struct camera_device: 

  • 这里就声明了我们想要追踪的 camera_device_t 。
  • ops 对应的类型是 camera_device_ops_t ,这个结构中声明了函数指针。

285 typedef struct camera_device {
286     /**
287      * camera_device.common.version must be in the range
288      * HARDWARE_DEVICE_API_VERSION(0,0)-(1,FF). CAMERA_DEVICE_API_VERSION_1_0 is
289      * recommended.
290      */
291     hw_device_t common;
292     camera_device_ops_t *ops;
293     void *priv;
294 } camera_device_t;

 98 struct camera_device;
 99 typedef struct camera_device_ops {
100     /** Set the ANativeWindow to which preview frames are sent */
101     int (*set_preview_window)(struct camera_device *,
102             struct preview_stream_ops *window);
103
104     /** Set the notification and data callbacks */
105     void (*set_callbacks)(struct camera_device *,
106             camera_notify_callback notify_cb,
107             camera_data_callback data_cb,
108             camera_data_timestamp_callback data_cb_timestamp,
109             camera_request_memory get_memory,
110             void *user);
111
112     /**
113      * The following three functions all take a msg_type, which is a bitmask of
114      * the messages defined in include/ui/Camera.h
115      */
116
117     /**
118      * Enable a message, or set of messages.
119      */
120     void (*enable_msg_type)(struct camera_device *, int32_t msg_type);
121
122     /**
123      * Disable a message, or a set of messages.
124      *
125      * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera
126      * HAL should not rely on its client to call releaseRecordingFrame() to
127      * release video recording frames sent out by the cameral HAL before and
128      * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL
129      * clients must not modify/access any video recording frame after calling
130      * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
131      */

132     void (*disable_msg_type)(struct camera_device *, int32_t msg_type);
133
134     /**
135      * Query whether a message, or a set of messages, is enabled.  Note that
136      * this is operates as an AND, if any of the messages queried are off, this
137      * will return false.
138      */
139     int (*msg_type_enabled)(struct camera_device *, int32_t msg_type);
140
141     /**
142      * Start preview mode.
143      */
144     int (*start_preview)(struct camera_device *);
145
146     /**
147      * Stop a previously started preview.
148      */
149     void (*stop_preview)(struct camera_device *);
150
151     /**
152      * Returns true if preview is enabled.
153      */
154     int (*preview_enabled)(struct camera_device *);
155
156     /**
157      * Request the camera HAL to store meta data or real YUV data in the video
158      * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If
159      * it is not called, the default camera HAL behavior is to store real YUV
160      * data in the video buffers.
161      *
162      * This method should be called before startRecording() in order to be
163      * effective.
164      *
165      * If meta data is stored in the video buffers, it is up to the receiver of
166      * the video buffers to interpret the contents and to find the actual frame
167      * data with the help of the meta data in the buffer. How this is done is
168      * outside of the scope of this method.
169      *
170      * Some camera HALs may not support storing meta data in the video buffers,
171      * but all camera HALs should support storing real YUV data in the video
172      * buffers. If the camera HAL does not support storing the meta data in the
173      * video buffers when it is requested to do do, INVALID_OPERATION must be
174      * returned. It is very useful for the camera HAL to pass meta data rather
175      * than the actual frame data directly to the video encoder, since the
176      * amount of the uncompressed frame data can be very large if video size is

171      * but all camera HALs should support storing real YUV data in the video
172      * buffers. If the camera HAL does not support storing the meta data in the
173      * video buffers when it is requested to do do, INVALID_OPERATION must be
174      * returned. It is very useful for the camera HAL to pass meta data rather
175      * than the actual frame data directly to the video encoder, since the
176      * amount of the uncompressed frame data can be very large if video size is
177      * large.
178      *
179      * @param enable if true to instruct the camera HAL to store
180      *        meta data in the video buffers; false to instruct
181      *        the camera HAL to store real YUV data in the video
182      *        buffers.
183      *
184      * @return OK on success.
185      */
186     int (*store_meta_data_in_buffers)(struct camera_device *, int enable);
187
188     /**
189      * Start record mode. When a record image is available, a
190      * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding
191      * frame. Every record frame must be released by a camera HAL client via
192      * releaseRecordingFrame() before the client calls
193      * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
194      * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
195      * responsibility to manage the life-cycle of the video recording frames,
196      * and the client must not modify/access any video recording frames.
197      */
198     int (*start_recording)(struct camera_device *);
199
200     /**
201      * Stop a previously started recording.
202      */
203     void (*stop_recording)(struct camera_device *);
204
205     /**
206      * Returns true if recording is enabled.
207      */
208     int (*recording_enabled)(struct camera_device *);
209
210     /**
211      * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
212      *
213      * It is camera HAL client's responsibility to release video recording
214      * frames sent out by the camera HAL before the camera HAL receives a call
215      * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to
216      * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's

217      * responsibility to manage the life-cycle of the video recording frames.
218      */
219     void (*release_recording_frame)(struct camera_device *,
220                     const void *opaque);
221
222     /**
223      * Start auto focus, the notification callback routine is called with
224      * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be
225      * called again if another auto focus is needed.
226      */
227     int (*auto_focus)(struct camera_device *);
228
229     /**
230      * Cancels auto-focus function. If the auto-focus is still in progress,
231      * this function will cancel it. Whether the auto-focus is in progress or
232      * not, this function will return the focus position to the default.  If
233      * the camera does not support auto-focus, this is a no-op.
234      */
235     int (*cancel_auto_focus)(struct camera_device *);
236
237     /**
238      * Take a picture.
239      */
240     int (*take_picture)(struct camera_device *);
241
242     /**
243      * Cancel a picture that was started with takePicture. Calling this method
244      * when no picture is being taken is a no-op.
245      */
246     int (*cancel_picture)(struct camera_device *);
247
248     /**
249      * Set the camera parameters. This returns BAD_VALUE if any parameter is
250      * invalid or not supported.
251      */
252     int (*set_parameters)(struct camera_device *, const char *parms);
253
254     /** Retrieve the camera parameters.  The buffer returned by the camera HAL
255         must be returned back to it with put_parameters, if put_parameters
256         is not NULL.
257      */
258     char *(*get_parameters)(struct camera_device *);
259
260     /** The camera HAL uses its own memory to pass us the parameters when we
261         call get_parameters.  Use this function to return the memory back to

262         the camera HAL, if put_parameters is not NULL.  If put_parameters
263         is NULL, then you have to use free() to release the memory.
264     */
265     void (*put_parameters)(struct camera_device *, char *);
266
267     /**
268      * Send command to camera driver.
269      */
270     int (*send_command)(struct camera_device *,
271                 int32_t cmd, int32_t arg1, int32_t arg2);
272
273     /**
274      * Release the hardware resources owned by this object.  Note that this is
275      * *not* done in the destructor.
276      */
277     void (*release)(struct camera_device *);
278
279     /**
280      * Dump state of the camera hardware
281      */
282     int (*dump)(struct camera_device *, int fd);
283 } camera_device_ops_t;

在 Linux 下用 find . -name "*.cpp" | xargs grep "start_preview =" 可以找到一些对应的文件,这些文件所处的位置与具体的设备商有关。

/hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp

   93 camera_device_ops_t QCamera2HardwareInterface::mCameraOps = {
   94     .set_preview_window =        QCamera2HardwareInterface::set_preview_window,
   95     .set_callbacks =             QCamera2HardwareInterface::set_CallBacks,
   96     .enable_msg_type =           QCamera2HardwareInterface::enable_msg_type,
   97     .disable_msg_type =          QCamera2HardwareInterface::disable_msg_type,
   98     .msg_type_enabled =          QCamera2HardwareInterface::msg_type_enabled,
   99
  100     .start_preview =             QCamera2HardwareInterface::start_preview,
  101     .stop_preview =              QCamera2HardwareInterface::stop_preview,
  102     .preview_enabled =           QCamera2HardwareInterface::preview_enabled,
  103     .store_meta_data_in_buffers= QCamera2HardwareInterface::store_meta_data_in_buffers,
  104
  105     .start_recording =           QCamera2HardwareInterface::start_recording,
  106     .stop_recording =            QCamera2HardwareInterface::stop_recording,
  107     .recording_enabled =         QCamera2HardwareInterface::recording_enabled,
  108     .release_recording_frame =   QCamera2HardwareInterface::release_recording_frame,
  109
  110     .auto_focus =                QCamera2HardwareInterface::auto_focus,
  111     .cancel_auto_focus =         QCamera2HardwareInterface::cancel_auto_focus,
  112
  113     .take_picture =              QCamera2HardwareInterface::take_picture,
  114     .cancel_picture =            QCamera2HardwareInterface::cancel_picture,
  115
  116     .set_parameters =            QCamera2HardwareInterface::set_parameters,
  117     .get_parameters =            QCamera2HardwareInterface::get_parameters,
  118     .put_parameters =            QCamera2HardwareInterface::put_parameters,
  119     .send_command =              QCamera2HardwareInterface::send_command,
  120
  121     .release =                   QCamera2HardwareInterface::release,
  122     .dump =                      QCamera2HardwareInterface::dump,
  123 };

-----------------------------------------------------------------------------------------------------------------------------------

366 int QCamera2HardwareInterface::start_preview(struct camera_device *device)
  367 {
  368     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_START_PREVIEW);
  369     int ret = NO_ERROR;
  370     QCamera2HardwareInterface *hw =
  371         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
  372     if (!hw) {
  373         LOGE("NULL camera device");
  374         return BAD_VALUE;
  375     }
  376     LOGI("[KPI Perf]: E PROFILE_START_PREVIEW camera id %d",
  377              hw->getCameraId());
  378
  379     hw->m_perfLockMgr.acquirePerfLockIfExpired(PERF_LOCK_START_PREVIEW);
  380     hw->lockAPI();
  381     qcamera_api_result_t apiResult;
  382     qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_START_PREVIEW;
  383     uint32_t cam_type = CAM_TYPE_MAIN;
  384     if (hw->isDualCamera()) {
  385         cam_type = MM_CAMERA_DUAL_CAM;
  386     }
  387     if (hw->isNoDisplayMode(cam_type)) {
  388         evt = QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW;
  389     }
  390     ret = hw->processAPI(evt, NULL);
  391     if (ret == NO_ERROR) {
  392         hw->waitAPIResult(evt, &apiResult);
  393         ret = apiResult.status;
  394     }
  395     hw->unlockAPI();
  396     LOGI("[KPI Perf]: X ret = %d", ret);
  397     return ret;
  398 }

------------------------------------------------------------------------------------------------------------------------------------------

6696 /*===========================================================================
 6697  * FUNCTION   : processAPI
 6698  *
 6699  * DESCRIPTION: process API calls from upper layer
 6700  *
 6701  * PARAMETERS :
 6702  *   @api         : API to be processed
 6703  *   @api_payload : ptr to API payload if any
 6704  *
 6705  * RETURN     : int32_t type of status
 6706  *              NO_ERROR  -- success
 6707  *              none-zero failure code
 6708  *==========================================================================*/
 6709 int QCamera2HardwareInterface::processAPI(qcamera_sm_evt_enum_t api, void *api_payload)
 6710 {
 6711     int ret = DEAD_OBJECT;
 6712
 6713     if (m_smThreadActive) {
 6714         ret = m_stateMachine.procAPI(api, api_payload);
 6715     }
 6716
 6717     return ret;
 6718 }

这时候调到

/hardware/qcom/camera/QCamera2/HAL/QCameraStateMachine.cpp

 215 /*===========================================================================
 216  * FUNCTION   : procAPI
 217  *
 218  * DESCRIPTION: process incoming API request from framework layer.
 219  *
 220  * PARAMETERS :
 221  *   @evt          : event to be processed
 222  *   @api_payload  : API payload. Can be NULL if not needed.
 223  *
 224  * RETURN     : int32_t type of status
 225  *              NO_ERROR  -- success
 226  *              none-zero failure code
 227  *==========================================================================*/
 228 int32_t QCameraStateMachine::procAPI(qcamera_sm_evt_enum_t evt,
 229                                      void *api_payload)
 230 {
 231     qcamera_sm_cmd_t *node =
 232         (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
 233     if (NULL == node) {
 234         LOGE("No memory for qcamera_sm_cmd_t");
 235         return NO_MEMORY;
 236     }
 237
 238     memset(node, 0, sizeof(qcamera_sm_cmd_t));
 239     node->cmd = QCAMERA_SM_CMD_TYPE_API;
 240     node->evt = evt;
 241     node->evt_payload = api_payload;
 242     if (api_queue.enqueue((void *)node)) {
 243         cam_sem_post(&cmd_sem);
 244         return NO_ERROR;
 245     } else {
 246         LOGE("API enqueue failed API = %d", evt);
 247         free(node);
 248         return UNKNOWN_ERROR;
 249     }
 250 }

 57 void *QCameraStateMachine::smEvtProcRoutine(void *data)
  58 {
  59     android::CallStack stack;
  60     stack.update();
  61     stack.log("zjy_smEvtProcRoutine");
  62     int running = 1, ret;
  63     QCameraStateMachine *pme = (QCameraStateMachine *)data;
  64
  65     LOGH("E");
  66     do {
  67         do {
  68             ret = cam_sem_wait(&pme->cmd_sem);
  69             if (ret != 0 && errno != EINVAL) {
  70                 LOGE("cam_sem_wait error (%s)",
  71                             strerror(errno));
  72                 return NULL;
  73             }
  74         } while (ret != 0);
  75
  76         // we got notified about new cmd avail in cmd queue
  77         // first check API cmd queue
  78         qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)pme->api_queue.dequeue();
  79         if (node == NULL) {
  80             // no API cmd, then check evt cmd queue
  81             node = (qcamera_sm_cmd_t *)pme->evt_queue.dequeue();
  82         }
  83         LOGE("zjy smEvtProcRountine cmd:%d",node->cmd);
  84         if (node != NULL) {
  85             switch (node->cmd) {
  86             case QCAMERA_SM_CMD_TYPE_API:
  87                 pme->stateMachine(node->evt, node->evt_payload);
  88                 // API is in a way sync call, so evt_payload is managed by HWI

  89                 // no need to free payload for API
  90                 break;
  91             case QCAMERA_SM_CMD_TYPE_EVT:
  92                 pme->stateMachine(node->evt, node->evt_payload);
  93
  94                 // EVT is async call, so payload need to be free after use
  95                 free(node->evt_payload);
  96                 node->evt_payload = NULL;
  97                 break;
  98             case QCAMERA_SM_CMD_TYPE_EXIT:
  99                 running = 0;
 100                 break;
 101             default:
 102                 break;
 103             }
 104             free(node);
 105             node = NULL;
 106         }
 107     } while (running);
 108     LOGH("X");
 109     return NULL;
 110 }

-------------------------------------------------------------------------------------------------------------------------

 304 int32_t QCameraStateMachine::stateMachine(qcamera_sm_evt_enum_t evt, void *payload)
 305 {
 306     int32_t rc = NO_ERROR;
 307     LOGL("m_state %d, event (%d)", m_state, evt);
 308     switch (m_state) {
 309     case QCAMERA_SM_STATE_PREVIEW_STOPPED:
 310         rc = procEvtPreviewStoppedState(evt, payload);
 311         break;
 312     case QCAMERA_SM_STATE_PREVIEW_READY:
 313         rc = procEvtPreviewReadyState(evt, payload);
 314         break;
 315     case QCAMERA_SM_STATE_PREVIEWING:
 316         rc = procEvtPreviewingState(evt, payload);
 317         break;
 318     case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
 319         rc = procEvtPrepareSnapshotState(evt, payload);
 320         break;
 321     case QCAMERA_SM_STATE_PIC_TAKING:
 322         rc = procEvtPicTakingState(evt, payload);
 323         break;
 324     case QCAMERA_SM_STATE_RECORDING:
 325         rc = procEvtRecordingState(evt, payload);
 326         break;
 327     case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
 328         rc = procEvtVideoPicTakingState(evt, payload);
 329         break;
 330     case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
 331         rc = procEvtPreviewPicTakingState(evt, payload);
 332         break;
 333     default:
 334         break;
 335     }

 336
 337     if (m_parent->isDualCamera()) {
 338         /* Update the FOVControl dual camera result state based on the current state.
 339          Update the result only in previewing and recording states */
 340         bool updateResultState = false;
 341         if (((m_state == QCAMERA_SM_STATE_PREVIEWING) ||
 342                 (m_state == QCAMERA_SM_STATE_RECORDING)) && !m_parent->mActiveAF
 343                 && !m_parent->m_bPreparingHardware) {
 344             updateResultState = true;
 345         }
 346         m_parent->m_pFovControl->UpdateFlag(FOVCONTROL_FLAG_UPDATE_RESULT_STATE,
 347                 &updateResultState);
 348     }           
 349
 350     return rc;
 351 } 

-------------------------------------------------------------------------------------------------------------------------------------------------

367 int32_t QCameraStateMachine::procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt,
 368                                                         void *payload)
 369 {
 370     int32_t rc = NO_ERROR;
 371     qcamera_api_result_t result;
 372     memset(&result, 0, sizeof(qcamera_api_result_t));
 373
 374     LOGL("event (%d)", evt);
 375     switch (evt) {
 376     case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
 377         {
 378             rc = m_parent->setPreviewWindow((struct preview_stream_ops *)payload);
 379             result.status = rc;
 380             result.request_api = evt;
 381             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 382             m_parent->signalAPIResult(&result);
 383         }
 384         break;
 385     case QCAMERA_SM_EVT_SET_CALLBACKS:
 386         {
 387             qcamera_sm_evt_setcb_payload_t *setcbs =
 388                 (qcamera_sm_evt_setcb_payload_t *)payload;
 389             rc = m_parent->setCallBacks(setcbs->notify_cb,
 390                                         setcbs->data_cb,
 391                                         setcbs->data_cb_timestamp,
 392                                         setcbs->get_memory,
 393                                         setcbs->user);
 394             result.status = rc;
 395             result.request_api = evt;
 396             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 397             m_parent->signalAPIResult(&result);
 398         }
 399         break;
 400     case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
 401         {
 402             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 403             if (NO_ERROR != rc) {
 404                 LOGE("Param init deferred work failed");
 405             } else {
 406                 rc = m_parent->enableMsgType(*((int32_t *)payload));
 407             }
 408             result.status = rc;
 409             result.request_api = evt;
 410             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;

411             m_parent->signalAPIResult(&result);
 412         }
 413         break;
 414     case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
 415         {
 416             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 417             if (NO_ERROR != rc) {
 418                 LOGE("Param init deferred work failed");
 419             } else {
 420                 rc = m_parent->disableMsgType(*((int32_t *)payload));
 421             }
 422             result.status = rc;
 423             result.request_api = evt;
 424             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 425             m_parent->signalAPIResult(&result);
 426         }
 427         break;
 428     case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
 429         {
 430             int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
 431             result.status = rc;
 432             result.request_api = evt;
 433             result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
 434             result.enabled = enabled;
 435             m_parent->signalAPIResult(&result);
 436         }
 437         break;
 438     case QCAMERA_SM_EVT_SET_PARAMS:
 439         {
 440             bool needRestart = false;
 441
 442             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 443             if (NO_ERROR != rc) {
 444                 LOGE("Param init deferred work failed");
 445             } else {
 446                 rc = m_parent->updateParameters((char*)payload, needRestart);
 447             }
 448             result.status = rc;
 449             result.request_api = evt;
 450             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 451             m_parent->signalAPIResult(&result);

452         }
 453         break;
 454     case QCAMERA_SM_EVT_SET_PARAMS_STOP:
 455         {
 456             m_parent->m_memoryPool.clear();
 457             result.status = rc;
 458             result.request_api = evt;
 459             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 460             m_parent->signalAPIResult(&result);
 461         }
 462         break;
 463     case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
 464         {
 465             rc = m_parent->commitParameterChanges();
 466             result.status = rc;
 467             result.request_api = evt;
 468             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 469             m_parent->signalAPIResult(&result);
 470         }
 471         break;
 472     case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
 473         {
 474             m_parent->setNeedRestart(false);
 475             result.status            = rc;
 476             result.request_api       = evt;
 477             result.result_type       = QCAMERA_API_RESULT_TYPE_DEF;
 478             m_parent->signalAPIResult(&result);
 479         }
 480         break;
 481     case QCAMERA_SM_EVT_GET_PARAMS:
 482         {
 483             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 484             if (NO_ERROR != rc) {
 485                 LOGE("Param init deferred work failed");
 486                 char* nullParams = (char *)malloc(1);
 487                 if (nullParams) {
 488                     memset(nullParams, 0, 1);
 489                 }
 490                 result.params = nullParams;
 491             } else {
 492                 result.params = m_parent->getParameters();
 493             }
 494             rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
 495             result.status = rc;
 496             result.request_api = evt;

497             result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
 498             m_parent->signalAPIResult(&result);
 499         }
 500         break;
 501     case QCAMERA_SM_EVT_PUT_PARAMS:
 502         {
 503             rc = m_parent->putParameters((char*)payload);
 504             result.status = rc;
 505             result.request_api = evt;
 506             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 507             m_parent->signalAPIResult(&result);
 508         }
 509         break;
 510     case QCAMERA_SM_EVT_PREPARE_PREVIEW:
 511         {
 512             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 513             if (NO_ERROR != rc) {
 514                 LOGE("Param init deferred work failed");
 515             } else {
 516                 rc = m_parent->preparePreview();
 517             }
 518             if (rc == NO_ERROR) {
 519                 //prepare preview success, move to ready state
 520                 m_state = QCAMERA_SM_STATE_PREVIEW_READY;
 521             }
 522             result.status = rc;
 523             result.request_api = evt;
 524             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 525             m_parent->signalAPIResult(&result);
 526         }
 527         break;
 528     case QCAMERA_SM_EVT_START_PREVIEW:
 529         {
 530             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 531             if (NO_ERROR != rc) {
 532                 LOGE("Param init deferred work failed");
 533             } else if (m_parent->mPreviewWindow == NULL) {
 534                 rc = m_parent->preparePreview();
 535                 if(rc == NO_ERROR) {
 536                     // preview window is not set yet, move to previewReady state
 537                     m_state = QCAMERA_SM_STATE_PREVIEW_READY;
 538                 } else {
 539                     LOGE("preparePreview failed");
 540                 }

541             } else {
 542                 rc = m_parent->preparePreview();
 543                 if (rc == NO_ERROR) {
 544                     applyDelayedMsgs();
 545                     rc = m_parent->startPreview();
 546                     if (rc != NO_ERROR) {
 547                         m_parent->unpreparePreview();
 548                     } else {
 549                         // start preview success, move to previewing state
 550                         m_state = QCAMERA_SM_STATE_PREVIEWING;
 551                     }
 552                 }
 553             }
 554             result.status = rc;
 555             result.request_api = evt;
 556             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 557             m_parent->signalAPIResult(&result);
 558         }
 559         break;
 560     case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
 561         {
 562             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 563             if (NO_ERROR != rc) {
 564                 LOGE("Param init deferred work failed");
 565             } else {
 566                 rc = m_parent->preparePreview();
 567             }
 568             if (rc == NO_ERROR) {
 569                 applyDelayedMsgs();
 570                 rc = m_parent->startPreview();
 571                 if (rc != NO_ERROR) {
 572                     m_parent->unpreparePreview();
 573                 } else {
 574                     m_state = QCAMERA_SM_STATE_PREVIEWING;
 575                 }
 576             }
 577             result.status = rc;
 578             result.request_api = evt;
 579             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 580             m_parent->signalAPIResult(&result);
 581         }
 582     break;
 583     case QCAMERA_SM_EVT_STOP_PREVIEW:
 584         {
 585             // no op needed here
 586             LOGW("already in preview stopped state, do nothing");

 586             LOGW("already in preview stopped state, do nothing");
 587             result.status = NO_ERROR;
 588             result.request_api = evt;
 589             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 590             m_parent->signalAPIResult(&result);
 591         }
 592         break;
 593     case QCAMERA_SM_EVT_PREVIEW_ENABLED:
 594     case QCAMERA_SM_EVT_RECORDING_ENABLED:
 595         {
 596             result.status = NO_ERROR;
 597             result.request_api = evt;
 598             result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
 599             result.enabled = 0;
 600             m_parent->signalAPIResult(&result);
 601         }
 602         break;
 603     case QCAMERA_SM_EVT_RELEASE:
 604         {
 605             rc = m_parent->release();
 606             result.status = rc;
 607             result.request_api = evt;
 608             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 609             m_parent->signalAPIResult(&result);
 610         }
 611         break;
 612     case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
 613         {
 614             rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
 615             result.status = rc;
 616             result.request_api = evt;
 617             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 618             m_parent->signalAPIResult(&result);
 619         }
 620         break;
 621     case QCAMERA_SM_EVT_DUMP:
 622         {
 623             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 624             if (NO_ERROR != rc) {
 625                 LOGE("Param init deferred work failed");
 626             } else {
 627                 rc = m_parent->dump(*((int *)payload));
 628             }
 629             result.status = rc;
 630             result.request_api = evt;

 631             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 632             m_parent->signalAPIResult(&result);
 633         }
 634         break;
 635     case QCAMERA_SM_EVT_SEND_COMMAND:
 636         {
 637             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 638             if (NO_ERROR != rc) {
 639                 LOGE("Param init deferred work failed");
 640             } else {
 641                 qcamera_sm_evt_command_payload_t *cmd_payload =
 642                         (qcamera_sm_evt_command_payload_t *)payload;
 643                 rc = m_parent->sendCommand(cmd_payload->cmd,
 644                         cmd_payload->arg1,
 645                         cmd_payload->arg2);
 646             }
 647             result.status = rc;
 648             result.request_api = evt;
 649             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 650             m_parent->signalAPIResult(&result);
 651         }
 652         break;
 653     case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
 654         {
 655             LOGW("Free video handle %d %d", evt, m_state);
 656             QCameraVideoMemory::closeNativeHandle((const void *)payload);
 657         }
 658     case QCAMERA_SM_EVT_PRE_START_RECORDING:
 659     case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
 660     case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
 661     case QCAMERA_SM_EVT_START_RECORDING:
 662     case QCAMERA_SM_EVT_STOP_RECORDING:
 663     case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
 664     case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
 665     case QCAMERA_SM_EVT_TAKE_PICTURE:
 666         {
 667             LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
 668             rc = INVALID_OPERATION;
 669             result.status = rc;
 670             result.request_api = evt;
 671             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 672             m_parent->signalAPIResult(&result);
 673         }
 674         break;
 675     case QCAMERA_SM_EVT_START_AUTO_FOCUS:

 676     case QCAMERA_SM_EVT_CANCEL_PICTURE:
 677         {
 678             // no op needed here
 679             LOGW("No ops for evt(%d) in state(%d)", evt, m_state);
 680             result.status = NO_ERROR;
 681             result.request_api = evt;
 682             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 683             m_parent->signalAPIResult(&result);
 684         }
 685         break;
 686     case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
 687         {
 688             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 689             if (NO_ERROR != rc) {
 690                 LOGE("Param init deferred work failed");
 691             } else {
 692                 rc = m_parent->cancelAutoFocus();
 693             }
 694             result.status = rc;
 695             result.request_api = evt;
 696             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 697             m_parent->signalAPIResult(&result);
 698         }
 699         break;
 700     case QCAMERA_SM_EVT_REG_FACE_IMAGE:
 701         {
 702             int32_t faceID = 0;
 703             qcamera_sm_evt_reg_face_payload_t *reg_payload =
 704                 (qcamera_sm_evt_reg_face_payload_t *)payload;
 705             rc = m_parent->registerFaceImage(reg_payload->img_ptr,
 706                                              reg_payload->config,
 707                                              faceID);
 708             result.status = rc;
 709             result.request_api = evt;
 710             result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
 711             result.handle = faceID;
 712             m_parent->signalAPIResult(&result);
 713         }
 714         break;
 715     case QCAMERA_SM_EVT_THERMAL_NOTIFY:
 716         {
 717             rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 718             if (NO_ERROR != rc) {
 719                 LOGE("Param init deferred work failed");
 720             } else {

 721                 rc = m_parent->updateThermalLevel(payload);
 722             }
 723         }
 724         break;
 725     case QCAMERA_SM_EVT_EVT_NOTIFY:
 726         {
 727             mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
 728             switch (cam_evt->server_event_type) {
 729             case CAM_EVENT_TYPE_DAEMON_DIED:
 730                 {
 731                     m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
 732                                             CAMERA_ERROR_SERVER_DIED,
 733                                             0);
 734                 }
 735                 break;
 736             default:
 737                 LOGE("Invalid internal event %d in state(%d)",
 738                              cam_evt->server_event_type, m_state);
 739                 break;
 740             }
 741         }
 742         break;
 743     case QCAMERA_SM_EVT_SNAPSHOT_DONE:
 744         {
 745             // No ops, but need to notify
 746             LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
 747             result.status = rc;
 748             result.request_api = evt;
 749             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
 750             m_parent->signalEvtResult(&result);
 751         }
 752        break;
 753     case QCAMERA_SM_EVT_EVT_INTERNAL:
 754        {
 755            qcamera_sm_internal_evt_payload_t *internal_evt =
 756                (qcamera_sm_internal_evt_payload_t *)payload;
 757            switch (internal_evt->evt_type) {
 758            case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
 759                rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
 760                if (NO_ERROR != rc) {
 761                    LOGE("Param init deferred work failed");
 762                } else {
 763                    rc = m_parent->mParameters.updateFlashMode(internal_evt->led_data);
 764                }
 765                break;

 766            default:
 767                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
 768                break;
 769            }
 770        }
 771        break;
 772     case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
 773     default:
 774         LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
 775         break;
 776     }
 777
 778     return rc;
 779 }

------------------------------------------------------------------------------------------------------------------------------------------

然后调到/hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp中

4038 int QCamera2HardwareInterface::startPreview()
 4039 {
 4040     KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_STARTPREVIEW);
 4041     int32_t rc = NO_ERROR;
 4042     android::CallStack stack;
 4043     stack.update();
 4044     stack.log("zjy_startPreview");
 4045     LOGI("zjy startPreview E ZSL = %d Recording Hint = %d, HDR enable:%d", mParameters.isZSLMode(),
 4046             mParameters.getRecordingHintValue(),mParameters.isHDREnabled());
 4047
 4048     m_perfLockMgr.acquirePerfLockIfExpired(PERF_LOCK_START_PREVIEW);
 4049
 4050     updateThermalLevel((void *)&mThermalLevel);
 4051
 4052     setDisplayFrameSkip();
 4053
 4054     // start preview stream
 4055     if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) {
 4056         rc = startChannel(QCAMERA_CH_TYPE_ZSL);
 4057     } else if (isSecureMode()) {
 4058         if (mParameters.getSecureStreamType() == CAM_STREAM_TYPE_RAW && !isRdiMode()) {
 4059             rc = startChannel(QCAMERA_CH_TYPE_RAW);
 4060         }else {
 4061             rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
 4062         }
 4063     } else {
 4064         rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
 4065     }
 4066
 4067     if (isDualCamera()) {
 4068         if (rc == NO_ERROR) {
 4069             mParameters.setDCDeferCamera(CAM_DEFER_PROCESS);
 4070             mParameters.setDCLowPowerMode(mActiveCameras);
 4071         } else {
 4072             mParameters.setDCDeferCamera(CAM_DEFER_FLUSH);
 4073         }
 4074     }
 4075
 4076     if (rc != NO_ERROR) {
 4077         LOGE("failed to start channels");
 4078         m_perfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
 4079         return rc;
 4080     }
 4081
 4082     if ((msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME))
 4083             && (m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL)) {
 4084         rc = startChannel(QCAMERA_CH_TYPE_CALLBACK);

4085         if (rc != NO_ERROR) {
 4086             LOGE("failed to start callback stream");
 4087             stopChannel(QCAMERA_CH_TYPE_ZSL);
 4088             stopChannel(QCAMERA_CH_TYPE_PREVIEW);
 4089             m_perfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
 4090             return rc;
 4091         }
 4092     }
 4093
 4094     updatePostPreviewParameters();
 4095     m_stateMachine.setPreviewCallbackNeeded(true);
 4096
 4097 #ifdef TARGET_ARCSOFT_BEAUTY
 4098     //init arcsoft preview beauty
 4099     if(!mbInitQCameraArcBeautyV && mParameters.isArcBeautyEnable()){
 4100         mQCameraArcBeautyV.init();
 4101         mbInitQCameraArcBeautyV = true;
 4102     }
 4103 #endif
 4104
 4105     // if job id is non-zero, that means the postproc init job is already
 4106     // pending or complete
 4107     if (mInitPProcJob == 0
 4108             && !(isSecureMode() && isRdiMode())) {
 4109         mInitPProcJob = deferPPInit();
 4110         if (mInitPProcJob == 0) {
 4111             LOGE("Unable to initialize postprocessor, mCameraHandle = %p",
 4112                      mCameraHandle);
 4113             rc = -ENOMEM;
 4114             m_perfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
 4115             return rc;
 4116         }
 4117     }
 4118     m_bPreviewStarted = true;
 4119
 4120     //configure snapshot skip for dual camera usecases
 4121     configureSnapshotSkip(true);
 4122
 4123     LOGI("X rc = %d", rc);
 4124     return rc;
 4125 }

在QCamera2HWI.cpp中的QCamera2HardwareInterface::startPreview()函数中调用startChannel(QCAMERA_CH_TYPE_PREVIEW);

-----------------------------------------------------------------------------------------------------------------

/hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp

9393 /*===========================================================================
 9394  * FUNCTION   : startChannel
 9395  *
 9396  * DESCRIPTION: start a channel by its type
 9397  *
 9398  * PARAMETERS :
 9399  *   @ch_type : channel type
 9400  *
 9401  * RETURN     : int32_t type of status
 9402  *              NO_ERROR  -- success
 9403  *              none-zero failure code
 9404  *==========================================================================*/
 9405 int32_t QCamera2HardwareInterface::startChannel(qcamera_ch_type_enum_t ch_type)
 9406 {
 9407     int32_t rc = UNKNOWN_ERROR;
 9408     if (m_channels[ch_type] != NULL) {
 9409         rc = m_channels[ch_type]->start();
 9410     }
 9411     return rc;
 9412 }
 9413

进入QCamera2HardwareInterface::startChannel(qcamera_ch_type_enum_t ch_type)调用m_channels[ch_type]->start();

------------------------------------------------------------------------------------------------------------------------------------------------------------------

进入QCameraChannel::start()函数开始执行两个过程,分别是mStreams[i]->start()和m_camOps->start_channel(m_camHandle, m_handle);

/hardware/qcom/camera/QCamera2/HAL/QCameraChannel.cpp

351 /*===========================================================================
 352  * FUNCTION   : start
 353  *
 354  * DESCRIPTION: start channel, which will start all streams belong to this channel
 355  *
 356  * PARAMETERS : None
 357  *
 358  * RETURN     : int32_t type of status
 359  *              NO_ERROR  -- success
 360  *              none-zero failure code
 361  *==========================================================================*/
 362 int32_t QCameraChannel::start()
 363 {   
 364     int32_t rc = NO_ERROR;
 365     
 366     if(m_bIsActive) {
 367         LOGW("Attempt to start active channel");
 368         return rc;
 369     }
 370     
 371     // there is more than one stream in the channel
 372     // we need to notify mctl that all streams in this channel need to be bundled
 373     for (size_t i = 0; i < mStreams.size(); i++) {
 374         if ((mStreams[i] != NULL) &&
 375                 (validate_handle(m_handle, mStreams[i]->getChannelHandle()))) {
 376             mStreams[i]->setBundleInfo();
 377         }
 378     }
 379
 380     
 381     for (size_t i = 0; i < mStreams.size(); i++) {
 382         if ((mStreams[i] != NULL) &&
 383                 (validate_handle(m_handle, mStreams[i]->getChannelHandle()))) {
 384             mStreams[i]->start();
 385         }
 386     }
 387     rc = m_camOps->start_channel(m_camHandle, m_handle);
 388     
 389     if (rc != NO_ERROR) {
 390         for (size_t i = 0; i < mStreams.size(); i++) {
 391             if ((mStreams[i] != NULL) &&
 392                 (validate_handle(m_handle, mStreams[i]->getChannelHandle()))) {
 393                 mStreams[i]->stop();

 394             }
 395         }
 396     } else {
 397         m_bIsActive = true;
 398         for (size_t i = 0; i < mStreams.size(); i++) {
 399             if (mStreams[i] != NULL) {
 400                 mStreams[i]->cond_signal();
 401             }
 402         }
 403     }
 404
 405     return rc;
 406 }

----------------------------------------------------------------------------------------------------------------------------------------------------------

首先是mStreams[i]->start()过程,调用QCameraStream::start()函数通过mProcTh.launch(dataProcRoutine, this)开启新线程执行CAMERA_CMD_TYPE_DO_NEXT_JOB分支,从mDataQ队列中取出数据并放入mDataCB中,等待数据返回到对应的stream回调中去,后面过程是向kernel请求数据的过程.

/hardware/qcom/camera/QCamera2/HAL/QCameraStream.cpp

916 /*===========================================================================
 917  * FUNCTION   : start
 918  *
 919  * DESCRIPTION: start stream. Will start main stream thread to handle stream
 920  *              related ops.
 921  *
 922  * PARAMETERS : none
 923  *
 924  * RETURN     : int32_t type of status
 925  *              NO_ERROR  -- success
 926  *              none-zero failure code
 927  *==========================================================================*/
 928 int32_t QCameraStream::start()
 929 {
 930     int32_t rc = 0;
 931     mDataQ.init();
 932     rc = mProcTh.launch(dataProcRoutine, this);
 933     if (rc == NO_ERROR) {
 934         m_bActive = true;
 935     }
 936
 937     mCurMetaMemory = NULL;
 938     mCurBufIndex = -1;
 939     mCurMetaIndex = -1;
 940     mFirstTimeStamp = 0;
 941     memset (&mStreamMetaMemory, 0,
 942             (sizeof(MetaMemory) * CAMERA_MIN_VIDEO_BATCH_BUFFERS));
 943     return rc;
 944 }

dataProcRountine方法:

1157 void *QCameraStream::dataProcRoutine(void *data)
1158 {
1159     int running = 1;
1160     int ret;
1161     QCameraStream *pme = (QCameraStream *)data;
1162     QCameraCmdThread *cmdThread = &pme->mProcTh;
1163     cmdThread->setName("CAM_strmDatProc");
1164
1165     LOGD("E");
1166     do {
1167         do {
1168             ret = cam_sem_wait(&cmdThread->cmd_sem);
1169             if (ret != 0 && errno != EINVAL) {
1170                 LOGE("cam_sem_wait error (%s)",
1171                        strerror(errno));
1172                 return NULL;
1173             }
1174         } while (ret != 0);
1175
1176         // we got notified about new cmd avail in cmd queue
1177         camera_cmd_type_t cmd = cmdThread->getCmd();
1178         switch (cmd) {
1179         case CAMERA_CMD_TYPE_DO_NEXT_JOB: 
1180             {
1181                 LOGD("Do next job");
1182                 mm_camera_super_buf_t *frame =
1183                     (mm_camera_super_buf_t *)pme->mDataQ.dequeue();
1184                 if (NULL != frame) {
1185                     if (pme->mDataCB != NULL) {
1186                         pme->mDataCB(frame, pme, pme->mUserData);
1187                     } else {
1188                         // no data cb routine, return buf here
1189                         pme->bufDone(frame);
1190                         free(frame);
1191                     }
1192                 }
1193             }
1194             break;
1195         case CAMERA_CMD_TYPE_EXIT:
1196             LOGH("Exit");

1197             /* flush data buf queue */
1198             pme->mDataQ.flush();
1199             running = 0;
1200             break;
1201         default:
1202             break;
1203         }
1204     } while (running);
1205     LOGH("X");
1206     return NULL;
1207 }

创建新线程执行CAMERA_CMD_TYPE_DO_NEXT_JOB分支

/hardware/qcom/camera/QCamera2/util/QCameraCmdThread.cpp

 80 /*===========================================================================
 81  * FUNCTION   : launch
 82  *
 83  * DESCRIPTION: launch Cmd Thread
 84  *
 85  * PARAMETERS :
 86  *   @start_routine : thread routine function ptr
 87  *   @user_data     : user data ptr
 88  *
 89  * RETURN     : int32_t type of status
 90  *              NO_ERROR  -- success
 91  *              none-zero failure code
 92  *==========================================================================*/
 93 int32_t QCameraCmdThread::launch(void *(*start_routine)(void *),
 94                                  void* user_data)
 95 {
 96     /* launch the thread */
 97     pthread_create(&cmd_pid,
 98                    NULL,
 99                    start_routine,
100                    user_data);
101     return NO_ERROR;
102 }

-------------------------------------------------------------------------------------------------------------------------------------------------

然后是m_camOps->start_channel(m_camHandle, m_handle)过程,进入mm_camera.c中的mm_camera_start_channel(mm_camera_obj_t *my_obj, uint32_t ch_id)函数,执行mm_channel_fsm_fn函数。

通过搜索"mm_camera_ops ="可知,m_camOps的定义在/hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c中

/* camera ops v-table */
3122 static mm_camera_ops_t mm_camera_ops = {
3123     .query_capability = mm_camera_intf_query_capability,
3124     .register_event_notify = mm_camera_intf_register_event_notify,
3125     .close_camera = mm_camera_intf_close,
3126     .set_parms = mm_camera_intf_set_parms,
3127     .get_parms = mm_camera_intf_get_parms,
3128     .do_auto_focus = mm_camera_intf_do_auto_focus,
3129     .cancel_auto_focus = mm_camera_intf_cancel_auto_focus,
3130     .prepare_snapshot = mm_camera_intf_prepare_snapshot,
3131     .start_zsl_snapshot = mm_camera_intf_start_zsl_snapshot,
3132     .stop_zsl_snapshot = mm_camera_intf_stop_zsl_snapshot,
3133     .map_buf = mm_camera_intf_map_buf,
3134     .map_bufs = mm_camera_intf_map_bufs,
3135     .unmap_buf = mm_camera_intf_unmap_buf,
3136     .add_channel = mm_camera_intf_add_channel,
3137     .delete_channel = mm_camera_intf_del_channel,
3138     .get_bundle_info = mm_camera_intf_get_bundle_info,
3139     .add_stream = mm_camera_intf_add_stream,
3140     .link_stream = mm_camera_intf_link_stream,
3141     .delete_stream = mm_camera_intf_del_stream,
3142     .config_stream = mm_camera_intf_config_stream,
3143     .qbuf = mm_camera_intf_qbuf,
3144     .cancel_buffer = mm_camera_intf_cancel_buf,
3145     .get_queued_buf_count = mm_camera_intf_get_queued_buf_count,
3146     .map_stream_buf = mm_camera_intf_map_stream_buf,
3147     .map_stream_bufs = mm_camera_intf_map_stream_bufs,
3148     .unmap_stream_buf = mm_camera_intf_unmap_stream_buf,
3149     .set_stream_parms = mm_camera_intf_set_stream_parms,
3150     .get_stream_parms = mm_camera_intf_get_stream_parms,
3151     .start_channel = mm_camera_intf_start_channel,
3152     .stop_channel = mm_camera_intf_stop_channel,
3153     .request_super_buf = mm_camera_intf_request_super_buf,
3154     .cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
3155     .flush_super_buf_queue = mm_camera_intf_flush_super_buf_queue,
3156     .configure_notify_mode = mm_camera_intf_configure_notify_mode,
3157     .process_advanced_capture = mm_camera_intf_process_advanced_capture,
3158     .get_session_id = mm_camera_intf_get_session_id,
3159     .set_dual_cam_cmd = mm_camera_intf_set_dual_cam_cmd,
3160     .flush = mm_camera_intf_flush,
3161     .register_stream_buf_cb = mm_camera_intf_register_stream_buf_cb,
3162     .register_frame_sync = mm_camera_intf_reg_frame_sync,
3163     .handle_frame_sync_cb = mm_camera_intf_handle_frame_sync_cb
3164 };

  .start_channel = mm_camera_intf_start_channel可知 start_channel对应该类中的mm_camera_intf_start_channel方法:

1301 static int32_t mm_camera_intf_start_channel(uint32_t camera_handle,
1302                                             uint32_t ch_id)
1303 {
1304     int32_t rc = -1;
1305     mm_camera_obj_t * my_obj = NULL;
1306     uint32_t chid = get_main_camera_handle(ch_id);
1307     uint32_t aux_chid = get_aux_camera_handle(ch_id);
1308
1309     if (chid) {
1310         uint32_t handle = get_main_camera_handle(camera_handle);
1311         pthread_mutex_lock(&g_intf_lock);
1312
1313         my_obj = mm_camera_util_get_camera_by_handler(handle);
1314         if(my_obj) {
1315             pthread_mutex_lock(&my_obj->cam_lock);
1316             pthread_mutex_unlock(&g_intf_lock);
1317             rc = mm_camera_start_channel(my_obj, chid);
1318         } else {
1319             pthread_mutex_unlock(&g_intf_lock);
1320         }
1321     }
1322
1323     if (aux_chid && rc == 0) {
1324         uint32_t aux_handle = get_aux_camera_handle(camera_handle);
1325         pthread_mutex_lock(&g_intf_lock);
1326
1327         my_obj = mm_camera_util_get_camera_head(aux_handle);
1328         if(my_obj) {
1329             pthread_mutex_lock(&my_obj->muxer_lock);
1330             pthread_mutex_unlock(&g_intf_lock);
1331             rc = mm_camera_muxer_start_channel(aux_handle, aux_chid, my_obj);
1332         } else {
1333             pthread_mutex_unlock(&g_intf_lock);
1334         }
1335     }
1336     LOGH("X ch_id = %u rc = %d", ch_id, rc);
1337     return rc;
1338 }

------------------------------------------------------------------------------------------------------------------------------------------------------------

mm_camera_start_channel方法定义在/hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c中

1250 int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj, uint32_t ch_id)
1251 {
1252     int32_t rc = -1;
1253     mm_channel_t * ch_obj =
1254         mm_camera_util_get_channel_by_handler(my_obj, ch_id);
1255
1256     if (NULL != ch_obj) {
1257         pthread_mutex_lock(&ch_obj->ch_lock);
1258         pthread_mutex_unlock(&my_obj->cam_lock);
1259
1260         rc = mm_channel_fsm_fn(ch_obj,
1261                                MM_CHANNEL_EVT_START,
1262                                NULL,
1263                                NULL);

1264     } else {
1265         pthread_mutex_unlock(&my_obj->cam_lock);
1266     }
1267
1268     return rc;
1269 }

-----------------------------------------------------------------------------------------------------------------------------------------------------------------

Stream开启之前是stopped状态,所以进入mm_camera_channel.c中调用mm_channel_fsm_fn_stopped函数,根据MM_CHANNEL_EVT_START条件开始执行mm_channel_start功能;

/hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c

 839 int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
 840                           mm_channel_evt_type_t evt,
 841                           void * in_val,
 842                           void * out_val)
 843 {
 844     int32_t rc = -1;
 845
 846     LOGD("E state = %d", my_obj->state);
 847     switch (my_obj->state) {
 848     case MM_CHANNEL_STATE_NOTUSED:
 849         rc = mm_channel_fsm_fn_notused(my_obj, evt, in_val, out_val);
 850         break;
 851     case MM_CHANNEL_STATE_STOPPED:
 852         rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
 853         break;
 854     case MM_CHANNEL_STATE_ACTIVE:
 855         rc = mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val);
 856         break;
 857     case MM_CHANNEL_STATE_PAUSED:
 858         rc = mm_channel_fsm_fn_paused(my_obj, evt, in_val, out_val);
 859         break;
 860     default:
 861         LOGD("Not a valid state (%d)", my_obj->state);
 862         break;
 863     }
 864
 865     /* unlock ch_lock */
 866     pthread_mutex_unlock(&my_obj->ch_lock);
 867     LOGD("X rc = %d", rc);
 868     return rc;
 869 }

mm_channel_fsm_fn传的参数是MM_CHANNEL_EVT_START,在mm_camera_channel.c中调用mm_channel_start(mm_channel_t *my_obj)函数,过程包括开启cb thread,cmd thread以及为每个stream分配buf,注册buf和开启stream。

920 int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
 921                                   mm_channel_evt_type_t evt,
 922                                   void * in_val,
 923                                   void * out_val)
 924 {
 925     int32_t rc = 0;
 926     LOGD("E evt = %d", evt);
 927     switch (evt) {
 928     case MM_CHANNEL_EVT_ADD_STREAM:
 929         {
 930             uint32_t s_hdl = 0;
 931             s_hdl = mm_channel_add_stream(my_obj);
 932             *((uint32_t*)out_val) = s_hdl;
 933             rc = 0;
 934         }
 935         break;
 936     case MM_CHANNEL_EVT_LINK_STREAM:
 937         {
 938             mm_camera_stream_link_t *stream_link = NULL;
 939             uint32_t s_hdl = 0;
 940             stream_link = (mm_camera_stream_link_t *) in_val;
 941             s_hdl = mm_channel_link_stream(my_obj, stream_link);
 942             *((uint32_t*)out_val) = s_hdl;
 943             rc = 0;
 944         }
 945         break;
 946     case MM_CHANNEL_EVT_REG_FRAME_SYNC:
 947         {
 948             mm_evt_paylod_reg_frame_sync *frame_sync = NULL;
 949             uint32_t s_hdl = 0;
 950             frame_sync = (mm_evt_paylod_reg_frame_sync *) in_val;
 951             s_hdl = mm_channel_reg_frame_sync(my_obj, frame_sync);
 952             *((uint32_t*)out_val) = s_hdl;
 953             rc = 0;
 954         }
 955         break;
 956     case MM_CHANNEL_EVT_TRIGGER_FRAME_SYNC:
 957         {
 958             rc = mm_channel_trigger_frame_sync(my_obj,
 959                     (mm_evt_paylod_trigger_frame_sync *)in_val);
 960         }
 961         break;
 962     case MM_CHANNEL_EVT_DEL_STREAM:
 963         {
 964             uint32_t s_id = *((uint32_t *)in_val);
 965             rc = mm_channel_del_stream(my_obj, s_id);

966         }
 967         break;
 968     case MM_CHANNEL_EVT_START:
 969         {
 970             rc = mm_channel_start(my_obj);
 971             /* first stream started in stopped state
 972              * move to active state */
 973             if (0 == rc) {
 974                 my_obj->state = MM_CHANNEL_STATE_ACTIVE;
 975             }
 976         }
 977         break;
 978     case MM_CHANNEL_EVT_CONFIG_STREAM:
 979         {
 980             mm_evt_paylod_config_stream_t *payload =
 981                 (mm_evt_paylod_config_stream_t *)in_val;
 982             rc = mm_channel_config_stream(my_obj,
 983                                           payload->stream_id,
 984                                           payload->config);
 985         }
 986         break;
 987     case MM_CHANNEL_EVT_GET_BUNDLE_INFO:
 988         {
 989             cam_bundle_config_t *payload =
 990                 (cam_bundle_config_t *)in_val;
 991             rc = mm_channel_get_bundle_info(my_obj, payload);
 992         }
 993         break;
 994     case MM_CHANNEL_EVT_DELETE:
 995         {
 996             mm_channel_release(my_obj);
 997             rc = 0;
 998         }
 999         break;
1000     case MM_CHANNEL_EVT_SET_STREAM_PARM:
1001         {
1002             mm_evt_paylod_set_get_stream_parms_t *payload =
1003                 (mm_evt_paylod_set_get_stream_parms_t *)in_val;
1004             rc = mm_channel_set_stream_parm(my_obj, payload);
1005         }
1006         break;
1007     case MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT:
1008         {
1009             uint32_t stream_id = *((uint32_t *)in_val);
1010             rc = mm_channel_get_queued_buf_count(my_obj, stream_id);

1011         }
1012         break;
1013     case MM_CHANNEL_EVT_GET_STREAM_PARM:
1014         {
1015             mm_evt_paylod_set_get_stream_parms_t *payload =
1016                 (mm_evt_paylod_set_get_stream_parms_t *)in_val;
1017             rc = mm_channel_get_stream_parm(my_obj, payload);
1018         }
1019         break;
1020     case MM_CHANNEL_EVT_DO_ACTION:
1021         {
1022             mm_evt_paylod_do_stream_action_t *payload =
1023                 (mm_evt_paylod_do_stream_action_t *)in_val;
1024             rc = mm_channel_do_stream_action(my_obj, payload);
1025         }
1026         break;
1027     case MM_CHANNEL_EVT_MAP_STREAM_BUF:
1028         {
1029             cam_buf_map_type *payload =
1030                 (cam_buf_map_type *)in_val;
1031             rc = mm_channel_map_stream_buf(my_obj, payload);
1032         }
1033         break;
1034     case MM_CHANNEL_EVT_MAP_STREAM_BUFS:
1035         {
1036             cam_buf_map_type_list *payload =
1037                 (cam_buf_map_type_list *)in_val;
1038             rc = mm_channel_map_stream_bufs(my_obj, payload);
1039         }
1040         break;
1041     case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
1042         {
1043             cam_buf_unmap_type *payload =
1044                 (cam_buf_unmap_type *)in_val;
1045             rc = mm_channel_unmap_stream_buf(my_obj, payload);
1046         }
1047         break;
1048     case MM_CHANNEL_EVT_REG_STREAM_BUF_CB:
1049         {
1050             mm_evt_paylod_reg_stream_buf_cb *payload =
1051                 (mm_evt_paylod_reg_stream_buf_cb *)in_val;
1052             rc = mm_channel_reg_stream_buf_cb (my_obj,
1053                     payload->stream_id, payload->buf_cb);
1054         }
1055         break;

1056     default:
1057         LOGW("invalid state (%d) for evt (%d)",
1058                     my_obj->state, evt);
1059         break;
1060     }
1061     LOGD("E rc = %d", rc);
1062     return rc;
1063 }

调用mm_channel_start

1777 int32_t mm_channel_start(mm_channel_t *my_obj)
1778 {
1779     int32_t rc = 0;
1780     int i = 0, j = 0;
1781     mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
1782     uint8_t num_streams_to_start = 0;
1783     uint8_t num_streams_in_bundle_queue = 0;
1784     mm_stream_t *s_obj = NULL;
1785     int meta_stream_idx = 0;
1786     cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
1787
1788     for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
1789         if (my_obj->streams[i].my_hdl > 0) {
1790             s_obj = mm_channel_util_get_stream_by_handler(my_obj,
1791                                                           my_obj->streams[i].my_hdl);
1792             if (NULL != s_obj) {
1793                 stream_type = s_obj->stream_info->stream_type;
1794                 /* remember meta data stream index */
1795                 if ((stream_type == CAM_STREAM_TYPE_METADATA) &&
1796                         (s_obj->ch_obj == my_obj)) {
1797                     meta_stream_idx = num_streams_to_start;
1798                 }
1799                 s_objs[num_streams_to_start++] = s_obj;
1800
1801                 if (!s_obj->stream_info->noFrameExpected ||
1802                         (s_obj->is_frame_shared && (s_obj->ch_obj == my_obj))) {
1803                     num_streams_in_bundle_queue++;
1804                 }
1805             }
1806         }
1807     }
1808
1809     if (meta_stream_idx > 0 ) {
1810         /* always start meta data stream first, so switch the stream object with the first one */
1811         s_obj = s_objs[0];
1812         s_objs[0] = s_objs[meta_stream_idx];
1813         s_objs[meta_stream_idx] = s_obj;
1814     }
1815
1816     if (NULL != my_obj->bundle.super_buf_notify_cb) {
1817         /* need to send up cb, therefore launch thread */
1818         /* init superbuf queue */

1819         mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
1820         my_obj->bundle.superbuf_queue.num_streams = num_streams_in_bundle_queue;
1821         my_obj->bundle.superbuf_queue.expected_frame_id =
1822                 my_obj->bundle.superbuf_queue.attr.user_expected_frame_id;
1823         my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;
1824         my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0;
1825         my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0;
1826         my_obj->bundle.superbuf_queue.led_on_num_frames = 0;
1827         my_obj->bundle.superbuf_queue.good_frame_id = 0;
1828
1829         for (i = 0; i < num_streams_to_start; i++) {
1830             /* Only bundle streams that belong to the channel */
1831             if(!(s_objs[i]->stream_info->noFrameExpected) ||
1832                     (s_objs[i]->is_frame_shared && (s_objs[i]->ch_obj == my_obj))) {
1833                 if (s_objs[i]->ch_obj == my_obj) {
1834                     /* set bundled flag to streams */
1835                     s_objs[i]->is_bundled = 1;
1836                 }
1837                 my_obj->bundle.superbuf_queue.bundled_streams[j] = s_objs[i]->my_hdl;
1838
1839                 if (s_objs[i]->is_frame_shared && (s_objs[i]->ch_obj == my_obj)) {
1840                     mm_stream_t *dst_obj = NULL;
1841                     if (s_objs[i]->master_str_obj != NULL) {
1842                         dst_obj = s_objs[i]->master_str_obj;
1843                     } else if (s_objs[i]->aux_str_obj[0] != NULL) {
1844                         dst_obj = s_objs[i]->aux_str_obj[0];
1845                     }
1846                     if (dst_obj) {
1847                         my_obj->bundle.superbuf_queue.bundled_streams[j]
1848                             |= dst_obj->my_hdl;
1849                     }
1850                 }
1851                 j++;
1852             }
1853         }
1854
1855         /* launch cb thread for dispatching super buf through cb */
1856         snprintf(my_obj->cb_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBuf");
1857         mm_camera_cmd_thread_launch(&my_obj->cb_thread,
1858                                     mm_channel_dispatch_super_buf,
1859                                     (void*)my_obj);

1860
1861         /* launch cmd thread for super buf dataCB */
1862         snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBufCB");

1863         mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
1864                                     mm_channel_process_stream_buf,
1865                                     (void*)my_obj);

1866
1867         /* set flag to TRUE */
1868         my_obj->bundle.is_active = TRUE;
1869     }
1870
1871     /* link any streams first before starting the rest of the streams */
1872     for (i = 0; i < num_streams_to_start; i++) {
1873         if ((s_objs[i]->ch_obj != my_obj) && my_obj->bundle.is_active) {
1874             pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
1875             s_objs[i]->linked_stream->linked_obj = my_obj;
1876             s_objs[i]->linked_stream->is_linked = 1;
1877             pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
1878             continue;
1879         }
1880     }
1881
1882     for (i = 0; i < num_streams_to_start; i++) {
1883         if (s_objs[i]->ch_obj != my_obj) {
1884             continue;
1885         }
1886         /* all streams within a channel should be started at the same time */
1887         if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) {
1888             LOGE("stream already started idx(%d)", i);
1889             rc = -1;
1890             break;
1891         }
1892
1893         /* allocate buf */
1894         rc = mm_stream_fsm_fn(s_objs[i],
1895                               MM_STREAM_EVT_GET_BUF,
1896                               NULL,
1897                               NULL);
1898         if (0 != rc) {
1899             LOGE("get buf failed at idx(%d)", i);
1900             break;
1901         }
1902
1903         /* reg buf */
1904         rc = mm_stream_fsm_fn(s_objs[i],
1905                               MM_STREAM_EVT_REG_BUF,
1906                               NULL,
1907                               NULL);

1908         if (0 != rc) {
1909             LOGE("reg buf failed at idx(%d)", i);
1910             break;
1911         }
1912
1913         /* start stream */
1914         rc = mm_stream_fsm_fn(s_objs[i],
1915                               MM_STREAM_EVT_START,
1916                               NULL,
1917                               NULL);
1918         if (0 != rc) {
1919             LOGE("start stream failed at idx(%d)", i);
1920             break;
1921         }
1922     }
1923
1924     /* error handling */
1925     if (0 != rc) {
1926         /* unlink the streams first */
1927         for (j = 0; j < num_streams_to_start; j++) {
1928             if (s_objs[j]->ch_obj != my_obj) {
1929                 pthread_mutex_lock(&s_objs[j]->linked_stream->buf_lock);
1930                 s_objs[j]->linked_stream->is_linked = 0;
1931                 s_objs[j]->linked_stream->linked_obj = NULL;
1932                 pthread_mutex_unlock(&s_objs[j]->linked_stream->buf_lock);
1933
1934                 if (TRUE == my_obj->bundle.is_active) {
1935                     mm_channel_flush_super_buf_queue(my_obj, 0,
1936                             s_objs[i]->stream_info->stream_type);
1937                 }
1938                 memset(s_objs[j], 0, sizeof(mm_stream_t));
1939                 continue;
1940             }
1941         }
1942
1943         for (j = 0; j <= i; j++) {
1944             if ((NULL == s_objs[j]) || (s_objs[j]->ch_obj != my_obj)) {
1945                 continue;
1946             }
1947             /* stop streams*/
1948             mm_stream_fsm_fn(s_objs[j],
1949                              MM_STREAM_EVT_STOP,
1950                              NULL,
1951                              NULL);

1952
1953             /* unreg buf */

1954             mm_stream_fsm_fn(s_objs[j],
1955                              MM_STREAM_EVT_UNREG_BUF,
1956                              NULL,
1957                              NULL);

1958
1959             /* put buf back */
1960             mm_stream_fsm_fn(s_objs[j],
1961                              MM_STREAM_EVT_PUT_BUF,
1962                              NULL,
1963                              NULL);

1964         }
1965
1966         /* destroy super buf cmd thread */
1967         if (TRUE == my_obj->bundle.is_active) {
1968             /* first stop bundle thread */
1969             mm_camera_cmd_thread_release(&my_obj->cmd_thread);
1970             mm_camera_cmd_thread_release(&my_obj->cb_thread);
1971
1972             /* deinit superbuf queue */
1973             mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
1974
1975             /* memset super buffer queue info */
1976             my_obj->bundle.is_active = 0;
1977             memset(&my_obj->bundle.superbuf_queue, 0, sizeof(mm_channel_queue_t));
1978         }
1979     }
1980     my_obj->bWaitForPrepSnapshotDone = 0;
1981     if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
1982         LOGH("registering Channel obj %p", my_obj);
1983         mm_frame_sync_register_channel(my_obj);
1984     }
1985     return rc;
1986 }

--------------------------------------------------------------------------------------------------------------------------------------------------------------------

首先是执行launch cb thread和launch cmd thread过程,开启两个线程;其次是allocate buf过程,调用mm_camera_stream.c中的mm_stream_fsm_cfg函数,根据MM_STREAM_EVT_GET_BUF条件调用mm_stream_init_bufs,在mm_stream_init_bufs(mm_stream_t * my_obj)函数中会执行get_bufs过程。

/hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c

496 int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
 497                          mm_stream_evt_type_t evt,
 498                          void * in_val,
 499                          void * out_val)
 500 {
 501     int32_t rc = -1;
 502
 503     LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
 504           my_obj->my_hdl, my_obj->fd, my_obj->state);
 505     switch (my_obj->state) {
 506     case MM_STREAM_STATE_NOTUSED:
 507         LOGD("Not handling evt in unused state");
 508         break;
 509     case MM_STREAM_STATE_INITED:
 510         rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
 511         break;
 512     case MM_STREAM_STATE_ACQUIRED:
 513         rc = mm_stream_fsm_acquired(my_obj, evt, in_val, out_val);
 514         break;
 515     case MM_STREAM_STATE_CFG:
 516         rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);
 517         break;
 518     case MM_STREAM_STATE_BUFFED:
 519         rc = mm_stream_fsm_buffed(my_obj, evt, in_val, out_val);
 520         break;
 521     case MM_STREAM_STATE_REG:
 522         rc = mm_stream_fsm_reg(my_obj, evt, in_val, out_val);
 523         break;
 524     case MM_STREAM_STATE_ACTIVE:
 525         rc = mm_stream_fsm_active(my_obj, evt, in_val, out_val);
 526         break;
 527     default:
 528         LOGD("Not a valid state (%d)", my_obj->state);
 529         break;
 530     }
 531     LOGD("X rc =%d",rc);
 532     return rc;
 533 }

711 int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
 712                           mm_stream_evt_type_t evt,
 713                           void * in_val,
 714                           void * out_val)
 715 {
 716     int32_t rc = 0;
 717     LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
 718           my_obj->my_hdl, my_obj->fd, my_obj->state);
 719     switch(evt) {
 720     case MM_STREAM_EVT_SET_FMT:
 721         {
 722             mm_camera_stream_config_t *config =
 723                 (mm_camera_stream_config_t *)in_val;
 724
 725             rc = mm_stream_config(my_obj, config);
 726
 727             /* change state to configed */
 728             my_obj->state = MM_STREAM_STATE_CFG;
 729
 730             break;
 731         }
 732     case MM_STREAM_EVT_RELEASE:
 733         rc = mm_stream_release(my_obj);
 734         my_obj->state = MM_STREAM_STATE_NOTUSED;
 735         break;
 736     case MM_STREAM_EVT_SET_PARM:
 737         {
 738             mm_evt_paylod_set_get_stream_parms_t *payload =
 739                 (mm_evt_paylod_set_get_stream_parms_t *)in_val;
 740             rc = mm_stream_set_parm(my_obj, payload->parms);
 741         }
 742         break;
 743     case MM_STREAM_EVT_GET_PARM:
 744         {
 745             mm_evt_paylod_set_get_stream_parms_t *payload =
 746                 (mm_evt_paylod_set_get_stream_parms_t *)in_val;
 747             rc = mm_stream_get_parm(my_obj, payload->parms);
 748         }
 749         break;
 750     case MM_STREAM_EVT_GET_BUF:
 751         rc = mm_stream_init_bufs(my_obj);
 752         /* change state to buff allocated */
 753         if(0 == rc) {
 754             my_obj->state = MM_STREAM_STATE_BUFFED;
 755         }
 756         break;

 757     case MM_STREAM_EVT_TRIGGER_FRAME_SYNC:
 758         {
 759             mm_camera_cb_req_type type =
 760                     *((mm_camera_cb_req_type *)in_val);
 761             rc = mm_stream_trigger_frame_sync(my_obj, type);
 762         }
 763         break;
 764     default:
 765         LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
 766                     my_obj->state, evt, in_val, out_val);
 767     }
 768     LOGD("X rc = %d", rc);
 769     return rc;
 770 }

2323 int32_t mm_stream_init_bufs(mm_stream_t * my_obj)
2324 {
2325     int32_t i, rc = 0;
2326     uint8_t *reg_flags = NULL;
2327     LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
2328           my_obj->my_hdl, my_obj->fd, my_obj->state);
2329
2330     /* deinit buf if it's not NULL*/
2331     if ((NULL != my_obj->buf) && (!my_obj->is_res_shared)) {
2332         mm_stream_deinit_bufs(my_obj);
2333     }
2334
2335     if (!my_obj->is_res_shared) {
2336         rc = my_obj->mem_vtbl.get_bufs(&my_obj->frame_offset,
2337                 &my_obj->total_buf_cnt, &reg_flags, &my_obj->buf,
2338                 &my_obj->map_ops, my_obj->mem_vtbl.user_data);
2339         if (rc == 0) {
2340             for (i = 0; i < my_obj->total_buf_cnt; i++) {
2341                 my_obj->buf_status[i].initial_reg_flag = reg_flags[i];
2342             }
2343             if ((my_obj->num_s_cnt != 0) && (my_obj->total_buf_cnt != 0)) {
2344                 rc = mm_camera_muxer_get_stream_bufs(my_obj);
2345             }
2346         }
2347     }
2348     if (0 != rc || ((my_obj->buf_num > 0) && (NULL == my_obj->buf))) {
2349         LOGE("Error get buf, rc = %d\n", rc);
2350         rc =-1;
2351         return rc;
2352     }
2353
2354     LOGH("Buffer count = %d buf id = %d",my_obj->buf_num, my_obj->buf_idx);
2355     for (i = my_obj->buf_idx; i < (my_obj->buf_idx + my_obj->buf_num); i++) {
2356         my_obj->buf[i].stream_id = my_obj->my_hdl;
2357         my_obj->buf[i].stream_type = my_obj->stream_info->stream_type;
2358
2359         if (my_obj->buf[i].buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
2360             my_obj->buf[i].user_buf.bufs_used =
2361                     (int8_t)my_obj->stream_info->user_buf_info.frame_buf_cnt;
2362             if (reg_flags) {
2363                 my_obj->buf[i].user_buf.buf_in_use = reg_flags[i];
2364             }
2365         }
2366     }
2367
2368     if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {

2369         my_obj->plane_buf = my_obj->buf[0].user_buf.plane_buf;
2370         if (my_obj->plane_buf != NULL) {
2371             my_obj->plane_buf_num =
2372                     my_obj->buf_num *
2373                     my_obj->stream_info->user_buf_info.frame_buf_cnt;
2374             for (i = 0; i < my_obj->plane_buf_num; i++) {
2375                 my_obj->plane_buf[i].stream_id = my_obj->my_hdl;
2376                 my_obj->plane_buf[i].stream_type = my_obj->stream_info->stream_type;
2377             }
2378         }
2379         my_obj->cur_bufs_staged = 0;
2380         my_obj->cur_buf_idx = -1;
2381     }
2382
2383     free(reg_flags);
2384     reg_flags = NULL;
2385
2386     /* update in stream info about number of stream buffers */
2387     my_obj->stream_info->num_bufs = my_obj->total_buf_cnt;
2388
2389     return rc;
2390 }

调用get_bufs方法:

/hardware/qcom/camera/QCamera2/HAL/QCameraStream.cpp

 66 int32_t QCameraStream::get_bufs(
  67                      cam_frame_len_offset_t *offset,
  68                      uint8_t *num_bufs,
  69                      uint8_t **initial_reg_flag,
  70                      mm_camera_buf_def_t **bufs,
  71                      mm_camera_map_unmap_ops_tbl_t *ops_tbl,
  72                      void *user_data)
  73 {
  74     QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
  75     if (!stream) {
  76         LOGE("getBufs invalid stream pointer");
  77         return NO_MEMORY;
  78     }
  79
  80     if (stream->mStreamInfo != NULL
  81             && stream->mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
  82         //Batch Mode. Allocate Butch buffers
  83         return stream->allocateBatchBufs(offset, num_bufs,
  84                 initial_reg_flag, bufs, ops_tbl);
  85     } else {
  86         // Plane Buffer. Allocate plane buffer
  87         return stream->getBufs(offset, num_bufs,
  88                 initial_reg_flag, bufs, ops_tbl);
  89     }
  90 }

-------------------------------------------------------------------------------------------------------------------------------------------------------------------------

再次是reg buf过程,调用mm_camera_stream.c中的mm_stream_fsm_buffed函数,根据MM_STREAM_EVT_REG_BUF条件调用mm_stream_reg_buf,在 mm_stream_reg_buf(mm_stream_t * my_obj)函数中会执行mm_stream_qbuf过程;

/hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c

788 int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
 789                              mm_stream_evt_type_t evt,
 790                              void * in_val,
 791                              void * out_val)
 792 {
 793     int32_t rc = 0;
 794     LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
 795           my_obj->my_hdl, my_obj->fd, my_obj->state);
 796     switch(evt) {
 797     case MM_STREAM_EVT_PUT_BUF:
 798         rc = mm_stream_deinit_bufs(my_obj);
 799         /* change state to configed */
 800         my_obj->state = MM_STREAM_STATE_CFG;
 801         break;
 802     case MM_STREAM_EVT_REG_BUF:
 803         rc = mm_stream_reg_buf(my_obj);
 804         /* change state to regged */
 805         if(0 == rc) {
 806             my_obj->state = MM_STREAM_STATE_REG;
 807         }
 808         break;
 809     case MM_STREAM_EVT_SET_PARM:
 810         {
 811             mm_evt_paylod_set_get_stream_parms_t *payload =
 812                 (mm_evt_paylod_set_get_stream_parms_t *)in_val;
 813             rc = mm_stream_set_parm(my_obj, payload->parms);
 814         }
 815         break;
 816     case MM_STREAM_EVT_GET_PARM:
 817         {
 818             mm_evt_paylod_set_get_stream_parms_t *payload =
 819                 (mm_evt_paylod_set_get_stream_parms_t *)in_val;
 820             rc = mm_stream_get_parm(my_obj, payload->parms);
 821         }
 822         break;
 823     default:
 824         LOGW("invalid state (%d) for evt (%d), in(%p), out(%p)",
 825                     my_obj->state, evt, in_val, out_val);
 826     }
 827     LOGD("X rc = %d", rc);
 828     return rc;
 829 }

/hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c

2447 int32_t mm_stream_reg_buf(mm_stream_t * my_obj)
2448 {
2449     int32_t rc = 0;
2450     uint8_t i;
2451     LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
2452           my_obj->my_hdl, my_obj->fd, my_obj->state);
2453
2454     rc = mm_stream_request_buf(my_obj);
2455     if (rc != 0) {
2456         return rc;
2457     }
2458
2459     my_obj->queued_buffer_count = 0;
2460     for(i = my_obj->buf_idx; i < (my_obj->buf_idx + my_obj->buf_num); i++){
2461         /* check if need to qbuf initially */
2462         if (my_obj->buf_status[i].initial_reg_flag) {
2463             rc = mm_stream_qbuf(my_obj, &my_obj->buf[i]);
2464             if (rc != 0) {
2465                 LOGE("VIDIOC_QBUF idx = %d rc = %d\n", i, rc);
2466                 break;
2467             }
2468             my_obj->buf_status[i].buf_refcnt = 0;
2469             my_obj->buf_status[i].in_kernel = 1;
2470         } else {
2471             /* the buf is held by upper layer, will not queue into kernel.
2472              * add buf reference count */
2473             my_obj->buf_status[i].buf_refcnt = 1;
2474             my_obj->buf_status[i].in_kernel = 0;
2475         }
2476     }
2477
2478     return rc;
2479 }

在mm_stream_qbuf过程,通过判断1 == my_obj->queued_buffer_count条件来Add fd to data poll thread;

/hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c

1930 int32_t mm_stream_qbuf(mm_stream_t *my_obj, mm_camera_buf_def_t *buf)
1931 {
1932     int32_t rc = 0;
1933     uint32_t length = 0;
1934     struct v4l2_buffer buffer;
1935     struct v4l2_plane planes[VIDEO_MAX_PLANES];
1936     LOGD("E, my_handle = 0x%x, fd = %d, state = %d, stream type = %d",
1937           my_obj->my_hdl, my_obj->fd, my_obj->state,
1938          my_obj->stream_info->stream_type);
1939
1940     if (buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
1941         LOGD("USERPTR num_buf = %d, idx = %d",
1942                 buf->user_buf.bufs_used, buf->buf_idx);
1943         memset(&planes, 0, sizeof(planes));
1944         planes[0].length = my_obj->stream_info->user_buf_info.size;
1945         planes[0].m.userptr = buf->fd;
1946         length = 1;
1947     } else {
1948         memcpy(planes, buf->planes_buf.planes, sizeof(planes));
1949         length = buf->planes_buf.num_planes;
1950     }
1951
1952     memset(&buffer, 0, sizeof(buffer));
1953     buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1954     buffer.memory = V4L2_MEMORY_USERPTR;
1955     buffer.index = (__u32)buf->buf_idx;
1956     buffer.m.planes = &planes[0];
1957     buffer.length = (__u32)length;
1958
1959     rc = mm_stream_handle_cache_ops(my_obj, buf, FALSE);
1960     if (rc != 0) {
1961         LOGE("Error cleaning/invalidating the buffer");
1962     }
1963     pthread_mutex_lock(&my_obj->buf_lock);
1964     my_obj->queued_buffer_count++;
1965     if (1 == my_obj->queued_buffer_count) {
1966         uint8_t idx = mm_camera_util_get_index_by_num(
1967                 my_obj->ch_obj->cam_obj->my_num, my_obj->my_hdl);
1968         /* Add fd to data poll thread */
1969         LOGH("Add Poll FD %p type: %d idx = %d num = %d fd = %d",
1970                 my_obj,my_obj->stream_info->stream_type, idx,
1971                 my_obj->ch_obj->cam_obj->my_num, my_obj->fd);
1972         rc = mm_camera_poll_thread_add_poll_fd(&my_obj->ch_obj->poll_thread[0],
1973                 idx, my_obj->my_hdl, my_obj->fd, mm_stream_data_notify,
1974                 (void*)my_obj, mm_camera_async_call);
1975         if (0 > rc) {

1976             LOGE("Add poll on stream %p type: %d fd error (rc=%d)",
1977                  my_obj, my_obj->stream_info->stream_type, rc);
1978         } else {
1979             LOGH("Started poll on stream %p type: %d",
1980                 my_obj, my_obj->stream_info->stream_type);
1981         }
1982     }
1983     pthread_mutex_unlock(&my_obj->buf_lock);
1984
1985     rc = ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);
1986     pthread_mutex_lock(&my_obj->buf_lock);
1987     if (0 > rc) {
1988         LOGE("VIDIOC_QBUF ioctl call failed on stream type %d (rc=%d): %s",
1989              my_obj->stream_info->stream_type, rc, strerror(errno));
1990         my_obj->queued_buffer_count--;
1991         if (0 == my_obj->queued_buffer_count) {
1992             uint8_t idx = mm_camera_util_get_index_by_num(
1993                     my_obj->ch_obj->cam_obj->my_num, my_obj->my_hdl);
1994             /* Remove fd from data poll in case of failing
1995              * first buffer queuing attempt */
1996             LOGH("Stoping poll on stream %p type: %d",
1997                 my_obj, my_obj->stream_info->stream_type);
1998             mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
1999                     idx, my_obj->my_hdl, mm_camera_async_call);
2000             LOGH("Stopped poll on stream %p type: %d",
2001                 my_obj, my_obj->stream_info->stream_type);
2002         }
2003     } else {
2004         LOGH("VIDIOC_QBUF buf_index %d, frame_idx %d stream type %d, rc %d,"
2005                 " queued: %d, buf_type = %d stream-FD = %d my_num %d buf fd: %d",
2006                 buffer.index, buf->frame_idx, my_obj->stream_info->stream_type, rc,
2007                 my_obj->queued_buffer_count, buf->buf_type, my_obj->fd,
2008                 my_obj->ch_obj->cam_obj->my_num, buf->fd);
2009     }
2010     pthread_mutex_unlock(&my_obj->buf_lock);
2011
2012     return rc;
2013 }
2014

最后是start stream过程,调用mm_stream_fsm_reg函数,通过条件MM_STREAM_EVT_START,执行mm_stream_streamon函数;

/hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c

847 int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
 848                           mm_stream_evt_type_t evt,
 849                           void * in_val,
 850                           void * out_val)
 851 {
 852     int32_t rc = 0;
 853     LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
 854           my_obj->my_hdl, my_obj->fd, my_obj->state);
 855
 856     switch(evt) {
 857     case MM_STREAM_EVT_UNREG_BUF:
 858         rc = mm_stream_unreg_buf(my_obj);
 859
 860         /* change state to buffed */
 861         my_obj->state = MM_STREAM_STATE_BUFFED;
 862         break;
 863     case MM_STREAM_EVT_START:
 864         {
 865             uint8_t has_cb = 0;
 866             uint8_t i;
 867             /* launch cmd thread if CB is not null */
 868             pthread_mutex_lock(&my_obj->cb_lock);
 869             for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
 870                 if((NULL != my_obj->buf_cb[i].cb) &&
 871                         (my_obj->buf_cb[i].cb_type != MM_CAMERA_STREAM_CB_TYPE_SYNC)) {
 872                     has_cb = 1;
 873                     break;
 874                 }
 875             }
 876             pthread_mutex_unlock(&my_obj->cb_lock);
 877
 878             pthread_mutex_lock(&my_obj->cmd_lock);
 879             if (has_cb) {
 880                 snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_StrmAppData");
 881                 mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
 882                                             mm_stream_dispatch_app_data,
 883                                             (void *)my_obj);
 884             }
 885             pthread_mutex_unlock(&my_obj->cmd_lock);

 886
 887             my_obj->state = MM_STREAM_STATE_ACTIVE;
 888             rc = mm_stream_streamon(my_obj);
 889             if (0 != rc) {
 890                 /* failed stream on, need to release cmd thread if it's launched */
 891                 pthread_mutex_lock(&my_obj->cmd_lock);
 892                 if (has_cb) {
 893                     mm_camera_cmd_thread_release(&my_obj->cmd_thread);
 894                 }
 895                 pthread_mutex_unlock(&my_obj->cmd_lock);
 896                 my_obj->state = MM_STREAM_STATE_REG;
 897                 break;
 898             }
 899         }
 900         break;
 901     case MM_STREAM_EVT_SET_PARM:
 902         {
 903             mm_evt_paylod_set_get_stream_parms_t *payload =
 904                 (mm_evt_paylod_set_get_stream_parms_t *)in_val;
 905             rc = mm_stream_set_parm(my_obj, payload->parms);
 906         }
 907         break;
 908     case MM_STREAM_EVT_GET_PARM:
 909         {
 910             mm_evt_paylod_set_get_stream_parms_t *payload =
 911                 (mm_evt_paylod_set_get_stream_parms_t *)in_val;
 912             rc = mm_stream_get_parm(my_obj, payload->parms);
 913         }
 914         break;
 915     default:
 916         LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
 917                     my_obj->state, evt, in_val, out_val);
 918     }
 919     LOGD("X rc = %d", rc);
 920     return rc;
 921 }

在mm_camera_stream.c中调用mm_stream_streamon(mm_stream_t *my_obj)函数,向kernel发送v4l2请求,等待数据回调。

/hardware/qcom/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c

1353 int32_t mm_stream_streamon(mm_stream_t *my_obj)
1354 {
1355     int32_t rc = 0;
1356     int8_t i;
1357     enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1358     uint8_t idx = mm_camera_util_get_index_by_num(
1359             my_obj->ch_obj->cam_obj->my_num, my_obj->my_hdl);
1360
1361     LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
1362           my_obj->my_hdl, my_obj->fd, my_obj->state);
1363
1364     pthread_mutex_lock(&my_obj->buf_lock);
1365     for (i = my_obj->buf_idx; i < (my_obj->buf_idx + my_obj->buf_num); i++) {
1366         if ((my_obj->buf_status[i].map_status == 0) &&
1367                 (my_obj->buf_status[i].in_kernel)) {
1368             LOGD("waiting for mapping to done: strm fd = %d",
1369                      my_obj->fd);
1370             struct timespec ts;
1371             clock_gettime(CLOCK_MONOTONIC, &ts);
1372             ts.tv_sec += WAIT_TIMEOUT;
1373             rc = pthread_cond_timedwait(&my_obj->buf_cond, &my_obj->buf_lock, &ts);
1374             if (rc == ETIMEDOUT) {
1375                 LOGE("Timed out. Abort stream-on \n");
1376                 rc = -1;
1377             }
1378             break;
1379         } else if (my_obj->buf_status[i].map_status < 0) {
1380             LOGD("Buffer mapping failed. Abort Stream On");
1381             rc = -1;
1382             break;
1383         }
1384     }
1385     pthread_mutex_unlock(&my_obj->buf_lock);
1386
1387     if (rc < 0) {
1388         /* remove fd from data poll thread in case of failure */
1389         mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
1390                 idx, my_obj->my_hdl, mm_camera_sync_call);
1391         return rc;
1392     }
1393     mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
1394     LOGD("E, my_handle = 0x%x, fd = %d, state = %d session_id:%d stream_id:%d",
1395             my_obj->my_hdl, my_obj->fd, my_obj->state, cam_obj->sessionid,
1396             my_obj->server_stream_id);
1397
1398     rc = ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);

1399     if (rc < 0 && my_obj->stream_info->num_bufs != 0) {
1400         LOGE("ioctl VIDIOC_STREAMON failed: rc=%d, errno %d",
1401                 rc, errno);
1402         goto error_case;
1403     }
1404
1405 #ifndef DAEMON_PRESENT
1406     cam_shim_packet_t *shim_cmd;
1407     cam_shim_cmd_data shim_cmd_data;
1408
1409     memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
1410     shim_cmd_data.command = MSM_CAMERA_PRIV_STREAM_ON;
1411     shim_cmd_data.stream_id = my_obj->server_stream_id;
1412     shim_cmd_data.value = NULL;
1413     shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_SET_PARM,
1414             cam_obj->sessionid, &shim_cmd_data);
1415     rc = mm_camera_module_send_cmd(shim_cmd);
1416     mm_camera_destroy_shim_cmd_packet(shim_cmd);
1417     if (rc < 0) {
1418         LOGE("Module StreamON failed: rc=%d", rc);
1419         ioctl(my_obj->fd, VIDIOC_STREAMOFF, &buf_type);
1420         goto error_case;
1421     }
1422 #endif
1423     LOGD("X rc = %d",rc);
1424     return rc;
1425 error_case:
1426      /* remove fd from data poll thread in case of failure */
1427      mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
1428              idx, my_obj->my_hdl, mm_camera_sync_call);
1429
1430     LOGD("X rc = %d",rc);
1431     return rc;
1432 }
 

后续分析在startPreview流程分析二中进行。。。。

  • 0
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值