【转载】Camera安卓源码-高通mm_camera架构剖析

https://blog.csdn.net/hbw1992322/article/details/75259311

				版权声明:本文为博主原创文章,未经博主允许不得转载。					https://blog.csdn.net/hbw1992322/article/details/75259311				</div>
							            <div id="content_views" class="markdown_views">
						<!-- flowchart 箭头图标 勿删 -->
						<svg xmlns="http://www.w3.org/2000/svg" style="display: none;"><path stroke-linecap="round" d="M5,0 0,2.5 5,5z" id="raphael-marker-block" style="-webkit-tap-highlight-color: rgba(0, 0, 0, 0);"></path></svg>
						<p>主要涉及三方面: <br>
  1. Camera open
  2. Camera preview
  3. Camera capture

1. Camera Open

mm_camera&mm_camera_open()

首先,mm_camera层用一个结构体去表示从底层获取的camera对象,这个结构体叫做mm_camera_obj
如下结构体所示,mm_camera对象包含了两个线程:
1. mm_camera_poll_thread_t
2. mm_camera_cmd_thread_t
还包含了一个用于存储自身所拥有的Channel的数组:
mm_channel_t ch[MM_CAMERA_CHANNEL_MAX]

typedef struct mm_camera_obj {
    uint32_t my_hdl;
    int ref_count;
    int32_t ctrl_fd;
    int32_t ds_fd; /* domain socket fd */
    pthread_mutex_t cam_lock;
    pthread_mutex_t cb_lock; /* lock for evt cb */
    mm_channel_t ch[MM_CAMERA_CHANNEL_MAX];
    mm_camera_evt_obj_t evt;
    mm_camera_poll_thread_t evt_poll_thread; /* evt poll thread */
    mm_camera_cmd_thread_t evt_thread;       /* thread for evt CB */
    mm_camera_vtbl_t vtbl;

    pthread_mutex_t evt_lock;
    pthread_cond_t evt_cond;
    mm_camera_event_t evt_rcvd;

    pthread_mutex_t msg_lock; /* lock for sending msg through socket */
    uint32_t sessionid; /* Camera server session id */
} mm_camera_obj_t;
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20

而在camer open的过程中,kernel层以上最终会调用到mm_camera_open(mm_camera_obj_t *my_obj)方法。该方法是在mm_camera_interface中被调用的。

int32_t camera_open(uint8_t camera_idx, mm_camera_vtbl_t **camera_vtbl)
{
    ......
    mm_camera_obj_t *cam_obj = NULL;

    cam_obj = (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
    if(NULL == cam_obj) {
        pthread_mutex_unlock(&g_intf_lock);
        LOGE("no mem");
        return -EINVAL;
    }

    /* initialize camera obj */
    memset(cam_obj, 0, sizeof(mm_camera_obj_t));
    cam_obj->ctrl_fd = -1;
    cam_obj->ds_fd = -1;
    cam_obj->ref_count++;
    cam_obj->my_hdl = mm_camera_util_generate_handler(camera_idx);
    cam_obj->vtbl.camera_handle = cam_obj->my_hdl; /* set handler */
    cam_obj->vtbl.ops = &mm_camera_ops;
    pthread_mutex_init(&cam_obj->cam_lock, NULL);
    /* unlock global interface lock, if not, in dual camera use case,
      * current open will block operation of another opened camera obj*/
    pthread_mutex_lock(&cam_obj->cam_lock);
    pthread_mutex_unlock(&g_intf_lock);

    rc = mm_camera_open(cam_obj);
    ......
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29

也就是通过mm_camera_open方法去填充这个mm_camera_obj的结构体,再供上层使用。

接下来进入mm_camera_open方法体:

    LOGD("Launch evt Thread in Cam Open");
    snprintf(my_obj->evt_thread.threadName, THREAD_NAME_SIZE, "CAM_Dispatch");
    mm_camera_cmd_thread_launch(&my_obj->evt_thread,
                                mm_camera_dispatch_app_event,
                                (void *)my_obj);

    /* launch event poll thread
     * we will add evt fd into event poll thread upon user first register for evt */
    LOGD("Launch evt Poll Thread in Cam Open");
    snprintf(my_obj->evt_poll_thread.threadName, THREAD_NAME_SIZE, "CAM_evntPoll");
    mm_camera_poll_thread_launch(&my_obj->evt_poll_thread,
                                 MM_CAMERA_POLL_TYPE_EVT);
    mm_camera_evt_sub(my_obj, TRUE);
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13

会开启两个线程。

1.1 线程1:mm_camera_cmd_thread

其中第一个mm_camera_cmd_thread_launch方法在mm_camera_thread中定义:

int32_t mm_camera_cmd_thread_launch(mm_camera_cmd_thread_t * cmd_thread,
                                    mm_camera_cmd_cb_t cb,
                                    void* user_data){

    cmd_thread->cb = cb;
    cmd_thread->user_data = user_data;
    cmd_thread->is_active = TRUE;

    /* launch the thread */
    pthread_create(&cmd_thread->cmd_pid,
                   NULL,
                   mm_camera_cmd_thread,
                   (void *)cmd_thread);
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
typedef struct {
    uint8_t is_active;     /*indicates whether thread is active or not */
    cam_queue_t cmd_queue; /* cmd queue (queuing dataCB, asyncCB, or exitCMD) */
    pthread_t cmd_pid;           /* cmd thread ID */
    cam_semaphore_t cmd_sem;     /* semaphore for cmd thread */
    cam_semaphore_t sync_sem;     /* semaphore for synchronization with cmd thread */
    mm_camera_cmd_cb_t cb;       /* cb for cmd */
    void* user_data;             /* user_data for cb */
    char threadName[THREAD_NAME_SIZE];
} mm_camera_cmd_thread_t;
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10

也就是先给mm_camera_obj中的mm_camera_cmd_thread_t的cb赋值,然后启动该线程的方法体mm_camera_cmd_thread

static void *mm_camera_cmd_thread(void *data)
{
    int running = 1;
    int ret;
    mm_camera_cmd_thread_t *cmd_thread =
                (mm_camera_cmd_thread_t *)data;
    mm_camera_cmdcb_t* node = NULL;

    mm_camera_cmd_thread_name(cmd_thread->threadName);
    do {
        do {
            ret = cam_sem_wait(&cmd_thread->cmd_sem);
            if (ret != 0 && errno != EINVAL) {
                LOGE("cam_sem_wait error (%s)",
                            strerror(errno));
                return NULL;
            }
        } while (ret != 0);

        /* we got notified about new cmd avail in cmd queue */
        node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
        while (node != NULL) {
            switch (node->cmd_type) {
            case MM_CAMERA_CMD_TYPE_EVT_CB:
            case MM_CAMERA_CMD_TYPE_DATA_CB:
            case MM_CAMERA_CMD_TYPE_REQ_DATA_CB:
            case MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB:
            case MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY:
            case MM_CAMERA_CMD_TYPE_START_ZSL:
            case MM_CAMERA_CMD_TYPE_STOP_ZSL:
            case MM_CAMERA_CMD_TYPE_GENERAL:
            case MM_CAMERA_CMD_TYPE_FLUSH_QUEUE:
                if (NULL != cmd_thread->cb) {
                    cmd_thread->cb(node, cmd_thread->user_data);
                }
                break;
            case MM_CAMERA_CMD_TYPE_EXIT:
            default:
                running = 0;
                break;
            }
            free(node);
            node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
        } /* (node != NULL) */
    } while (running);
    return NULL;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47

该线程体中会不断循环的从线程队列中取出cmd,并执行cb,这里的cb是之前通过mm_camera_cmd_thread_launch传进来的,在源码中是方法:

static void mm_camera_dispatch_app_event(mm_camera_cmdcb_t *cmd_cb,
                                         void* user_data)
{
    int i;
    mm_camera_event_t *event = &cmd_cb->u.evt;
    mm_camera_obj_t * my_obj = (mm_camera_obj_t *)user_data;
    if (NULL != my_obj) {
        mm_camera_cmd_thread_name(my_obj->evt_thread.threadName);
        pthread_mutex_lock(&my_obj->cb_lock);
        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
            if(my_obj->evt.evt[i].evt_cb) {
                my_obj->evt.evt[i].evt_cb(
                    my_obj->my_hdl,
                    event,
                    my_obj->evt.evt[i].user_data);
            }
        }
        pthread_mutex_unlock(&my_obj->cb_lock);
    }
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20

而此处的my_obj->evt是在camera_open的时候通过register_event_notify传入的

int QCamera3HardwareInterface::openCamera()
{
    int rc = 0;
    char value[PROPERTY_VALUE_MAX];

    KPI_ATRACE_CALL();
    if (mCameraHandle) {
        LOGE("Failure: Camera already opened");
        return ALREADY_EXISTS;
    }

    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
    if (rc < 0) {
        LOGE("Failed to reserve flash for camera id: %d",
                mCameraId);
        return UNKNOWN_ERROR;
    }

    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
    if (rc) {
        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
        return rc;
    }

    if (!mCameraHandle) {
        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
        return -ENODEV;
    }

    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
            camEvtHandle, (void *)this);
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31

用于处理mm_camera的一些事件。

1.2 线程2:mm_camera_poll_thread

调用方法mm_camera_poll_thread_launch去创建mm_camera_obj中的poll_thread:

int32_t mm_camera_poll_thread_launch(mm_camera_poll_thread_t * poll_cb,
                                     mm_camera_poll_thread_type_t poll_type)
{
    int32_t rc = 0;
    size_t i = 0, cnt = 0;
    poll_cb->poll_type = poll_type;

    //Initialize poll_fds
    cnt = sizeof(poll_cb->poll_fds) / sizeof(poll_cb->poll_fds[0]);
    for (i = 0; i < cnt; i++) {
        poll_cb->poll_fds[i].fd = -1;
    }
    //Initialize poll_entries
    cnt = sizeof(poll_cb->poll_entries) / sizeof(poll_cb->poll_entries[0]);
    for (i = 0; i < cnt; i++) {
        poll_cb->poll_entries[i].fd = -1;
    }
    //Initialize pipe fds
    poll_cb->pfds[0] = -1;
    poll_cb->pfds[1] = -1;
    rc = pipe(poll_cb->pfds);
    if(rc < 0) {
        LOGE("pipe open rc=%d\n", rc);
        return -1;
    }

    poll_cb->timeoutms = -1;  /* Infinite seconds */

    LOGD("poll_type = %d, read fd = %d, write fd = %d timeout = %d",
         poll_cb->poll_type,
        poll_cb->pfds[0], poll_cb->pfds[1],poll_cb->timeoutms);

    pthread_mutex_init(&poll_cb->mutex, NULL);
    pthread_cond_init(&poll_cb->cond_v, NULL);

    /* launch the thread */
    pthread_mutex_lock(&poll_cb->mutex);
    poll_cb->status = 0;
    pthread_create(&poll_cb->pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
    if(!poll_cb->status) {
        pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
    }

    pthread_mutex_unlock(&poll_cb->mutex);
    LOGD("End");
    return rc;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47

插一句在Linux系统中一切皆可以看成是文件,文件又可分为:普通文件、目录文件、链接文件和设备文件。文件描述符(file descriptor)是内核为了高效管理已被打开的文件所创建的索引,其是一个非负整数(通常是小整数),用于指代被打开的文件,所有执行I/O操作的系统调用都通过文件描述符

此处会建立一个pipe,int pipe(int filedes[2]);, 需要往里传入一个二元的数组,fd[0]代表读数据的一端,读取时必须关闭写入端,即close(filedes[1]);;而fd[1]代表写数据的一端,写入时必须关闭读取端,即close(filedes[0])。点此处了解

开启线程体:

static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
{
    do {
         for(i = 0; i < poll_cb->num_fds; i++) {
            poll_cb->poll_fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
         }

         rc = poll(poll_cb->poll_fds, poll_cb->num_fds, poll_cb->timeoutms);
         if(rc > 0) {
            if ((poll_cb->poll_fds[0].revents & POLLIN) &&
                (poll_cb->poll_fds[0].revents & POLLRDNORM)) {
                /* if we have data on pipe, we only process pipe in this iteration */
                LOGD("cmd received on pipe\n");
                mm_camera_poll_proc_pipe(poll_cb);
            } else {
                for(i=1; i<poll_cb->num_fds; i++) {
                    /* Checking for ctrl events */
                    if ((poll_cb->poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
                        (poll_cb->poll_fds[i].revents & POLLPRI)) {
                        LOGD("mm_camera_evt_notify\n");
                        if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
                            poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
                        }
                    }

                    if ((MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) &&
                        (poll_cb->poll_fds[i].revents & POLLIN) &&
                        (poll_cb->poll_fds[i].revents & POLLRDNORM)) {
                        LOGD("mm_stream_data_notify\n");
                        if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
                            poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
                        }
                    }
                }
            }
        } else {
            /* in error case sleep 10 us and then continue. hard coded here */
            usleep(10);
            continue;
        }
    } while ((poll_cb != NULL) && (poll_cb->state == MM_CAMERA_POLL_TASK_STATE_POLL));
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42

回调到mm_camera.c中的mm_camera_enqueue_evt,在这个方法中会调用:

    mm_camera_cmdcb_t *node = NULL;
    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
        /* enqueue to evt cmd thread */
        cam_queue_enq(&(my_obj->evt_thread.cmd_queue), node);
        /* wake up evt cmd thread */
        cam_sem_post(&(my_obj->evt_thread.cmd_sem));
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6

将node放入到mm_camera_obj的cmd_queue中去,也就是1.1中创建的mm_camera_cmd_thread_t的cmd_queue中去供其出队操作。

2. Camera Preview

Channel&stream

会涉及到四个内容:

  • QCamera3Channel
  • QCamera3Stream
  • mm_channel
  • mm_stream

最先,在QCamera3HWI的初始化时,就创建了一个mm_channel:

int QCamera3HardwareInterface::initialize(
        const struct camera3_callback_ops *callback_ops)
{
    ......

    mCallbackOps = callback_ops;

    mChannelHandle = mCameraHandle->ops->add_channel(
            mCameraHandle->camera_handle, NULL, NULL, this);
     ......       
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11

调用了mm_camera.c中的mm_camera_add_channel 方法,而后调用mm_camera_channel.c中的mm_channel_init方法:

int32_t mm_channel_init(mm_channel_t *my_obj,
                        mm_camera_channel_attr_t *attr,
                        mm_camera_buf_notify_t channel_cb,
                        void *userdata)
{
    int32_t rc = 0;

    my_obj->bundle.super_buf_notify_cb = channel_cb;
    my_obj->bundle.user_data = userdata;
    if (NULL != attr) {
        my_obj->bundle.superbuf_queue.attr = *attr;
    }

    LOGD("Launch data poll thread in channel open");
    snprintf(my_obj->poll_thread[0].threadName, THREAD_NAME_SIZE, "CAM_dataPoll");
    mm_camera_poll_thread_launch(&my_obj->poll_thread[0],
                                 MM_CAMERA_POLL_TYPE_DATA);

    /* change state to stopped state */
    my_obj->state = MM_CHANNEL_STATE_STOPPED;
    return rc;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22

这里的cb是null,每个mm_channel_t中均保有一个线程数组,在上述init方法中,开启了此channel的一个poll_thread[0]。而该poll_thread的notify_cb是在mm_camera_stream.c中被赋值的,在mm_camera_stream.c中执行mm_camera_poll_thread_add_poll_fd,将mm_stream_data_notify作为notify_cb,在mm_stream_data_notify中会执行mm_stream_dispatch_sync_data,在mm_stream_dispatch_sync_data中会执行回调,此回调其实就是QCamera3Stream的dataNotifyCB,至此就可以将mm层的数据回调回HAL层。

在开启preview之前,需要先进行一些configure操作。在QCamera3HWI的configureStreamsPerfLocked中new出了一系列的QCamera3Channel,比如metadatachannel,yuvchannel,supportchannel等。而在Channel创建的时候,会初始化QCamera3ProcessingChannel的成员变量QCamera3PostProcessor m_postprocessor; // post processor, 执行m_postprocessor.init,而在init操作中,会启动postprocessor中的一个线程,线程体为dataProcessRoutine,用于channel处理数据。 也就是说QCamera3Channel持有一个处理数据的线程。

之后当执行preview的请求时,即QCamera3HardwareInterface::processCaptureRequest时,会执行以下三个步骤:

  • init channel
  • start channel
  • request channel

2.1 Init Channel

首先,initialize Channel,此处的channel指的是QCamera3Channel:

int QCamera3HardwareInterface::processCaptureRequest()
{
    ......
//First initialize all streams
        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
            it != mStreamInfo.end(); it++) {
            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
               setEis)
                rc = channel->initialize(is_type);
            else {
                rc = channel->initialize(IS_TYPE_NONE);
            }
            if (NO_ERROR != rc) {
                LOGE("Channel initialization failed %d", rc);
                pthread_mutex_unlock(&mMutex);
                goto error_exit;
            }
        }
    ......
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22

在Channel的init过程中,先为此channel添加QCamera3Stream:

int32_t QCamera3YUVChannel::initialize(cam_is_type_t isType)
{
    ATRACE_CALL();
    int32_t rc = NO_ERROR;
    cam_dimension_t streamDim;

    if (NULL == mCamera3Stream) {
        LOGE("Camera stream uninitialized");
        return NO_INIT;
    }

    if (1 <= m_numStreams) {
        // Only one stream per channel supported in v3 Hal
        return NO_ERROR;
    }

    mIsType  = isType;
    mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_CALLBACK,
            mCamera3Stream->width, mCamera3Stream->height);
    streamDim.width = mCamera3Stream->width;
    streamDim.height = mCamera3Stream->height;

    rc = QCamera3Channel::addStream(mStreamType,
            mStreamFormat,
            streamDim,
            ROTATE_0,
            mNumBufs,
            mPostProcMask,
            mIsType);
    if (rc < 0) {
        LOGE("addStream failed");
        return rc;
    }

    cam_stream_buf_plane_info_t buf_planes;
    cam_padding_info_t paddingInfo = mPaddingInfo;

    memset(&buf_planes, 0, sizeof(buf_planes));
    //to ensure a big enough buffer size set the height and width
    //padding to max(height padding, width padding)
    paddingInfo.width_padding = MAX(paddingInfo.width_padding, paddingInfo.height_padding);
    paddingInfo.height_padding = paddingInfo.width_padding;

    rc = mm_stream_calc_offset_snapshot(mStreamFormat, &streamDim, &paddingInfo,
            &buf_planes);
    if (rc < 0) {
        LOGE("mm_stream_calc_offset_preview failed");
        return rc;
    }

    mFrameLen = buf_planes.plane_info.frame_len;

    if (NO_ERROR != rc) {
        LOGE("Initialize failed, rc = %d", rc);
        return rc;
    }

    /* initialize offline meta memory for input reprocess */
    rc = QCamera3ProcessingChannel::initialize(isType);
    if (NO_ERROR != rc) {
        LOGE("Processing Channel initialize failed, rc = %d",
                 rc);
    }

    return rc;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66

在addStream中,new QCamera3Stream,并init:

QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
                                               m_handle,
                                               m_camOps,
                                               &mPaddingInfo,
                                               this);
    if (pStream == NULL) {
        LOGE("No mem for Stream");
        return NO_MEMORY;
    }
    LOGD("batch size is %d", batchSize);

    rc = pStream->init(streamType, streamFormat, streamDim, streamRotation,
            NULL, minStreamBufNum, postprocessMask, isType, batchSize,
            streamCbRoutine, this);
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14

QCamera3Stream的init会调用mm_camera.c中的mm_camera_add_stream方法:

uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
                              uint32_t ch_id)
{
    uint32_t s_hdl = 0;
    mm_channel_t * ch_obj =
        mm_camera_util_get_channel_by_handler(my_obj, ch_id);

    if (NULL != ch_obj) {
        pthread_mutex_lock(&ch_obj->ch_lock);
        pthread_mutex_unlock(&my_obj->cam_lock);

        mm_channel_fsm_fn(ch_obj,
                          MM_CHANNEL_EVT_ADD_STREAM,
                          NULL,
                          (void *)&s_hdl);
    } else {
        pthread_mutex_unlock(&my_obj->cam_lock);
    }

    return s_hdl;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21

首先会获取之前创建的一个mm_camera_channel,将其作为参数传入mm_channel_fsm_fn, 而回顾之前创建的那个mm_camera_channel,在其创建之后,会将自身状态改变为MM_CHANNEL_STATE_STOPPED, 所以这里进入mm_channel_fsm_fn之后会执行mm_channel_fsm_fn_stopped,并且执行case MM_CHANNEL_EVT_ADD_STREAM:

int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
                                  mm_channel_evt_type_t evt,
                                  void * in_val,
                                  void * out_val)
{
    int32_t rc = 0;
    LOGD("E evt = %d", evt);
    switch (evt) {
    case MM_CHANNEL_EVT_ADD_STREAM:
        {
            uint32_t s_hdl = 0;
            s_hdl = mm_channel_add_stream(my_obj);
            *((uint32_t*)out_val) = s_hdl;
            rc = 0;
        }
        break;
    case...

    ...
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20

也就是执行mm_channel_add_stream(my_obj),根据mm_camera_stream的状态机,最终会调用到:

mm_camera_stream.c

int32_t mm_stream_fsm_inited(mm_stream_t *my_obj,
                             mm_stream_evt_type_t evt,
                             void * in_val,
                             void * out_val)
{
    int32_t rc = 0;
    char dev_name[MM_CAMERA_DEV_NAME_LEN];
    const char *dev_name_value = NULL;
    if (NULL == my_obj) {
      LOGE("NULL camera object\n");
      return -1;
    }

    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
          my_obj->my_hdl, my_obj->fd, my_obj->state);
    switch(evt) {
    case MM_STREAM_EVT_ACQUIRE:
        if ((NULL == my_obj->ch_obj) || (NULL == my_obj->ch_obj->cam_obj)) {
            LOGE("NULL channel or camera obj\n");
            rc = -1;
            break;
        }

        dev_name_value = mm_camera_util_get_dev_name(my_obj->ch_obj->cam_obj->my_hdl);
        if (NULL == dev_name_value) {
            LOGE("NULL device name\n");
            rc = -1;
            break;
        }

        snprintf(dev_name, sizeof(dev_name), "/dev/%s",
                 dev_name_value);

        my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK);
        if (my_obj->fd < 0) {
            LOGE("open dev returned %d\n", my_obj->fd);
            rc = -1;
            break;
        }
        LOGD("open dev fd = %d\n", my_obj->fd);
        rc = mm_stream_set_ext_mode(my_obj);
        if (0 == rc) {
            my_obj->state = MM_STREAM_STATE_ACQUIRED;
        } else {
            /* failed setting ext_mode
             * close fd */
            close(my_obj->fd);
            my_obj->fd = -1;
            break;
        }
        break;
    default:
        LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
                    my_obj->state, evt, in_val, out_val);
        break;
    }
    return rc;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60

在上述代码中,我们可以看到,
stream执行了一个open操作:my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK);,表明stream与某个video设备建立了关联,并可以去读取某些数据。

至此,我们就从上到下的建立了QCamera3Channel -> QCamera3Stream -> mm_channel -> mm_stream 之间的关联。

2.2 Start Channel

Channel init结束之后,QCamera3HWI会执行channel->start():

int32_t QCamera3Channel::start()
{
    ATRACE_CALL();
    int32_t rc = NO_ERROR;

    if (m_numStreams > 1) {
        LOGW("bundle not supported");
    } else if (m_numStreams == 0) {
        return NO_INIT;
    }

    if(m_bIsActive) {
        LOGW("Attempt to start active channel");
        return rc;
    }

    for (uint32_t i = 0; i < m_numStreams; i++) {
        if (mStreams[i] != NULL) {
            mStreams[i]->start();
        }
    }

    m_bIsActive = true;

    return rc;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26

也就是说一个QCamera3Channel只拥有一个QCamera3Stream,会执行QCamera3Stream的start方法:

int32_t QCamera3Stream::start()
{
    int32_t rc = 0;

    mDataQ.init();
    mTimeoutFrameQ.init();
    if (mBatchSize)
        mFreeBatchBufQ.init();
    rc = mProcTh.launch(dataProcRoutine, this);
    return rc;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11

这其中,会启动QCamera3Stream所拥有的一个成员变量线程QCameraCmdThread mProcTh;,线程体为:

void *QCamera3Stream::dataProcRoutine(void *data)
{
    int running = 1;
    int ret;
    QCamera3Stream *pme = (QCamera3Stream *)data;
    QCameraCmdThread *cmdThread = &pme->mProcTh;

    cmdThread->setName(mStreamNames[pme->mStreamInfo->stream_type]);

    LOGD("E");
    do {
        do {
            ret = cam_sem_wait(&cmdThread->cmd_sem);
            if (ret != 0 && errno != EINVAL) {
                LOGE("cam_sem_wait error (%s)",
                       strerror(errno));
                return NULL;
            }
        } while (ret != 0);

        // we got notified about new cmd avail in cmd queue
        camera_cmd_type_t cmd = cmdThread->getCmd();
        switch (cmd) {
        case CAMERA_CMD_TYPE_TIMEOUT:
            {
                int32_t bufIdx = (int32_t)(pme->mTimeoutFrameQ.dequeue());
                pme->cancelBuffer(bufIdx);
                break;
            }
        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
            {
                LOGD("Do next job");
                mm_camera_super_buf_t *frame =
                    (mm_camera_super_buf_t *)pme->mDataQ.dequeue();
                if (NULL != frame) {
                    if (UNLIKELY(frame->bufs[0]->buf_type ==
                            CAM_STREAM_BUF_TYPE_USERPTR)) {
                        pme->handleBatchBuffer(frame);
                    } else if (pme->mDataCB != NULL) {
                        pme->mDataCB(frame, pme, pme->mUserData);
                    } else {
                        // no data cb routine, return buf here
                        pme->bufDone(frame->bufs[0]->buf_idx);
                    }
                }
            }
            break;
        case CAMERA_CMD_TYPE_EXIT:
            LOGH("Exit");
            /* flush data buf queue */
            pme->mDataQ.flush();
            pme->mTimeoutFrameQ.flush();
            pme->flushFreeBatchBufQ();
            running = 0;
            break;
        default:
            break;
        }
    } while (running);
    LOGD("X");
    return NULL;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62

可以看到在case DO_NEXT_JOB中,会从stream的数据队列中出队一个mm_camera_super_buf_t,然后通过mDataCB将该buffer往上层传,而此处的buffer指的是在stream init的时候传入的QCamera3Channel的streamCbRoutine,也就是说会将数据通过回调抛到Channel层。而在streamCbRoutine中,会调用Channel自身持有的线程去做数据处理操作,也就是执行:m_postprocessor.processData(frame,ppInfo->output, resultFrameNumber); 在processData中,会将buffer入队列,让channel的线程去处理,线程体为dataProcessRoutine

至此,QCamera3Channel的start操作执行完毕。

接下来会执行:

rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle, mChannelHandle);
 
 
  • 1

执行mm_camera.c的:

int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj, uint32_t ch_id)
{
    int32_t rc = -1;
    mm_channel_t * ch_obj =
        mm_camera_util_get_channel_by_handler(my_obj, ch_id);

    if (NULL != ch_obj) {
        pthread_mutex_lock(&ch_obj->ch_lock);
        pthread_mutex_unlock(&my_obj->cam_lock);

        rc = mm_channel_fsm_fn(ch_obj,
                               MM_CHANNEL_EVT_START,
                               NULL,
                               NULL);
    } else {
        pthread_mutex_unlock(&my_obj->cam_lock);
    }

    return rc;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20

进入到mmchannel的state machine中,由于之前addstream没有改变channel的状态,所以这次的case依然为MM_CHANNEL_STATE_STOPPED:

int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
                          mm_channel_evt_type_t evt,
                          void * in_val,
                          void * out_val)
{
    int32_t rc = -1;

    LOGD("E state = %d", my_obj->state);
    switch (my_obj->state) {
    case MM_CHANNEL_STATE_NOTUSED:
        rc = mm_channel_fsm_fn_notused(my_obj, evt, in_val, out_val);
        break;
    case MM_CHANNEL_STATE_STOPPED:
        rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
        break;
    case MM_CHANNEL_STATE_ACTIVE:
        rc = mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val);
        break;
    case MM_CHANNEL_STATE_PAUSED:
        rc = mm_channel_fsm_fn_paused(my_obj, evt, in_val, out_val);
        break;
    default:
        LOGD("Not a valid state (%d)", my_obj->state);
        break;
    }

    /* unlock ch_lock */
    pthread_mutex_unlock(&my_obj->ch_lock);
    LOGD("X rc = %d", rc);
    return rc;
}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31

进入mm_channel_fsm_fn_stopped,case为:

case MM_CHANNEL_EVT_START:
        {
            rc = mm_channel_start(my_obj);
            /* first stream started in stopped state
             * move to active state */
            if (0 == rc) {
                my_obj->state = MM_CHANNEL_STATE_ACTIVE;
            }
        }
        break;
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10

进入mm_channel_start:

/*===========================================================================
 * FUNCTION   : mm_channel_start
 *
 * DESCRIPTION: start a channel, which will start all streams in the channel
 *
 * PARAMETERS :
 *   @my_obj       : channel object
 *
 * RETURN     : int32_t type of status
 *              0  -- success
 *              -1 -- failure
 *==========================================================================*/

int32_t mm_channel_start(mm_channel_t *my_obj)
{
    int32_t rc = 0;
    int i = 0, j = 0;
    mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
    uint8_t num_streams_to_start = 0;
    uint8_t num_streams_in_bundle_queue = 0;
    mm_stream_t *s_obj = NULL;
    int meta_stream_idx = 0;
    cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;

    for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
        if (my_obj->streams[i].my_hdl > 0) {
            s_obj = mm_channel_util_get_stream_by_handler(my_obj,
                                                          my_obj->streams[i].my_hdl);
            if (NULL != s_obj) {
                stream_type = s_obj->stream_info->stream_type;
                /* remember meta data stream index */
                if ((stream_type == CAM_STREAM_TYPE_METADATA) &&
                        (s_obj->ch_obj == my_obj)) {
                    meta_stream_idx = num_streams_to_start;
                }
                s_objs[num_streams_to_start++] = s_obj;

                if (!s_obj->stream_info->noFrameExpected) {
                    num_streams_in_bundle_queue++;
                }
            }
        }
    }

    if (meta_stream_idx > 0 ) {
        /* always start meta data stream first, so switch the stream object with the first one */
        s_obj = s_objs[0];
        s_objs[0] = s_objs[meta_stream_idx];
        s_objs[meta_stream_idx] = s_obj;
    }

    if (NULL != my_obj->bundle.super_buf_notify_cb) {
        /* need to send up cb, therefore launch thread */
        /* init superbuf queue */
        mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
        my_obj->bundle.superbuf_queue.num_streams = num_streams_in_bundle_queue;
        my_obj->bundle.superbuf_queue.expected_frame_id =
                my_obj->bundle.superbuf_queue.attr.user_expected_frame_id;
        my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;
        my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0;
        my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0;
        my_obj->bundle.superbuf_queue.led_on_num_frames = 0;
        my_obj->bundle.superbuf_queue.good_frame_id = 0;

        for (i = 0; i < num_streams_to_start; i++) {
            /* Only bundle streams that belong to the channel */
            if(!(s_objs[i]->stream_info->noFrameExpected)) {
                if (s_objs[i]->ch_obj == my_obj) {
                    /* set bundled flag to streams */
                    s_objs[i]->is_bundled = 1;
                }
                my_obj->bundle.superbuf_queue.bundled_streams[j++] = s_objs[i]->my_hdl;
            }
        }

        /* launch cb thread for dispatching super buf through cb */
        snprintf(my_obj->cb_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBuf");
        mm_camera_cmd_thread_launch(&my_obj->cb_thread,
                                    mm_channel_dispatch_super_buf,
                                    (void*)my_obj);

        /* launch cmd thread for super buf dataCB */
        snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBufCB");
        mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
                                    mm_channel_process_stream_buf,
                                    (void*)my_obj);

        /* set flag to TRUE */
        my_obj->bundle.is_active = TRUE;
    }

    /* link any streams first before starting the rest of the streams */
    for (i = 0; i < num_streams_to_start; i++) {
        if (s_objs[i]->ch_obj != my_obj) {
            pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
            s_objs[i]->linked_stream->linked_obj = my_obj;
            s_objs[i]->linked_stream->is_linked = 1;
            pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
            continue;
        }
    }

    for (i = 0; i < num_streams_to_start; i++) {
        if (s_objs[i]->ch_obj != my_obj) {
            continue;
        }
        /* all streams within a channel should be started at the same time */
        if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) {
            LOGE("stream already started idx(%d)", i);
            rc = -1;
            break;
        }

        /* allocate buf */
        rc = mm_stream_fsm_fn(s_objs[i],
                              MM_STREAM_EVT_GET_BUF,
                              NULL,
                              NULL);
        if (0 != rc) {
            LOGE("get buf failed at idx(%d)", i);
            break;
        }

        /* reg buf */
        rc = mm_stream_fsm_fn(s_objs[i],
                              MM_STREAM_EVT_REG_BUF,
                              NULL,
                              NULL);
        if (0 != rc) {
            LOGE("reg buf failed at idx(%d)", i);
            break;
        }

        /* start stream */
        rc = mm_stream_fsm_fn(s_objs[i],
                              MM_STREAM_EVT_START,
                              NULL,
                              NULL);
        if (0 != rc) {
            LOGE("start stream failed at idx(%d)", i);
            break;
        }
    }

    /* error handling */
    if (0 != rc) {
        /* unlink the streams first */
        for (j = 0; j < num_streams_to_start; j++) {
            if (s_objs[j]->ch_obj != my_obj) {
                pthread_mutex_lock(&s_objs[j]->linked_stream->buf_lock);
                s_objs[j]->linked_stream->is_linked = 0;
                s_objs[j]->linked_stream->linked_obj = NULL;
                pthread_mutex_unlock(&s_objs[j]->linked_stream->buf_lock);

                if (TRUE == my_obj->bundle.is_active) {
                    mm_channel_flush_super_buf_queue(my_obj, 0,
                            s_objs[i]->stream_info->stream_type);
                }
                memset(s_objs[j], 0, sizeof(mm_stream_t));
                continue;
            }
        }

        for (j = 0; j <= i; j++) {
            if ((NULL == s_objs[j]) || (s_objs[j]->ch_obj != my_obj)) {
                continue;
            }
            /* stop streams*/
            mm_stream_fsm_fn(s_objs[j],
                             MM_STREAM_EVT_STOP,
                             NULL,
                             NULL);

            /* unreg buf */
            mm_stream_fsm_fn(s_objs[j],
                             MM_STREAM_EVT_UNREG_BUF,
                             NULL,
                             NULL);

            /* put buf back */
            mm_stream_fsm_fn(s_objs[j],
                             MM_STREAM_EVT_PUT_BUF,
                             NULL,
                             NULL);
        }

        /* destroy super buf cmd thread */
        if (TRUE == my_obj->bundle.is_active) {
            /* first stop bundle thread */
            mm_camera_cmd_thread_release(&my_obj->cmd_thread);
            mm_camera_cmd_thread_release(&my_obj->cb_thread);

            /* deinit superbuf queue */
            mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);

            /* memset super buffer queue info */
            my_obj->bundle.is_active = 0;
            memset(&my_obj->bundle.superbuf_queue, 0, sizeof(mm_channel_queue_t));
        }
    }
    my_obj->bWaitForPrepSnapshotDone = 0;
    if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
        LOGH("registering Channel obj %p", my_obj);
        mm_frame_sync_register_channel(my_obj);
    }
    return rc;
}

}
 
 
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69
  • 70
  • 71
  • 72
  • 73
  • 74
  • 75
  • 76
  • 77
  • 78
  • 79
  • 80
  • 81
  • 82
  • 83
  • 84
  • 85
  • 86
  • 87
  • 88
  • 89
  • 90
  • 91
  • 92
  • 93
  • 94
  • 95
  • 96
  • 97
  • 98
  • 99
  • 100
  • 101
  • 102
  • 103
  • 104
  • 105
  • 106
  • 107
  • 108
  • 109
  • 110
  • 111
  • 112
  • 113
  • 114
  • 115
  • 116
  • 117
  • 118
  • 119
  • 120
  • 121
  • 122
  • 123
  • 124
  • 125
  • 126
  • 127
  • 128
  • 129
  • 130
  • 131
  • 132
  • 133
  • 134
  • 135
  • 136
  • 137
  • 138
  • 139
  • 140
  • 141
  • 142
  • 143
  • 144
  • 145
  • 146
  • 147
  • 148
  • 149
  • 150
  • 151
  • 152
  • 153
  • 154
  • 155
  • 156
  • 157
  • 158
  • 159
  • 160
  • 161
  • 162
  • 163
  • 164
  • 165
  • 166
  • 167
  • 168
  • 169
  • 170
  • 171
  • 172
  • 173
  • 174
  • 175
  • 176
  • 177
  • 178
  • 179
  • 180
  • 181
  • 182
  • 183
  • 184
  • 185
  • 186
  • 187
  • 188
  • 189
  • 190
  • 191
  • 192
  • 193
  • 194
  • 195
  • 196
  • 197
  • 198
  • 199
  • 200
  • 201
  • 202
  • 203
  • 204
  • 205
  • 206
  • 207
  • 208
  • 209

代码中的if判断if (NULL != my_obj->bundle.super_buf_notify_cb)中,其实super_buf_notify_cb==NULL,因为在mm_channel_init的时候cb值传入为null。

  • 3
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值