MediaCodec与ACodec通知分析

ACodec与MediaCodec的通知。OMX的组件解码之后,当ACodec的onOMXFillBufferDone会被回调,去取得解码后的数据。
ACodec在onOMXFillBufferDone调用后会调用notify通知MediaCodec(notify->setInt32("what", CodecBase::kWhatDrainThisBuffer);//发给MediaCodec的消息。
MediaCodec收到ACodec发的消息之后会更新updateBuffers(kPortIndexOutput, msg),同时onOutputBufferAvailable()中通知Decoder。


1.ACodec的onOMXFillBufferDone;

    bool ACodec::BaseState::onOMXFillBufferDone(
    IOMX::buffer_id bufferID,
    size_t rangeOffset, size_t rangeLength,
    OMX_U32 flags,
    int64_t timeUs,
    int fenceFd) {
    ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x",
    mCodec->mComponentName.c_str(), bufferID, timeUs, flags);
    ssize_t index;
    status_t err= OK;
    #if TRACK_BUFFER_TIMING
    index = mCodec->mBufferStats.indexOfKey(timeUs);
    if (index >= 0) {
    ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index);
    stats->mFillBufferDoneTimeUs = ALooper::GetNowUs();
    ALOGI("frame PTS %lld: %lld",
    timeUs,
    stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs);
    mCodec->mBufferStats.removeItemsAt(index);
    stats = NULL;
    }
    #endif
    BufferInfo *info =
    mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);//根据bufferID找到ACodec的BufferInfo
    BufferInfo::Status status = BufferInfo::getSafeStatus(info);
    if (status != BufferInfo::OWNED_BY_COMPONENT) {
    ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
    mCodec->dumpBuffers(kPortIndexOutput);
    mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
    if (fenceFd >= 0) {
    ::close(fenceFd);
    }
    return true;
    }
    info->mDequeuedAt = ++mCodec->mDequeueCounter;
    info->mStatus = BufferInfo::OWNED_BY_US;
    if (info->mRenderInfo != NULL) {
    // The fence for an emptied buffer must have signaled, but there still could be queued
    // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these,
    // as we will soon requeue this buffer to the surface. While in theory we could still keep
    // track of buffers that are requeued to the surface, it is better to add support to the
    // buffer-queue to notify us of released buffers and their fences (in the future).
    mCodec->notifyOfRenderedFrames(true /* dropIncomplete */);
    }
    // byte buffers cannot take fences, so wait for any fence now
    if (mCodec->mNativeWindow == NULL) {
    (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone");
    fenceFd = -1;
    }
    info->setReadFence(fenceFd, "onOMXFillBufferDone");
    PortMode mode = getPortMode(kPortIndexOutput);
    switch (mode) {
    case KEEP_BUFFERS:
    break;
    case RESUBMIT_BUFFERS:
    {
    if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS)
    || mCodec->mPortEOS[kPortIndexOutput])) {
    ALOGV("[%s] calling fillBuffer %u",
    mCodec->mComponentName.c_str(), info->mBufferID);
    err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd);//继续填充outputbuffer
    info->mFenceFd = -1;
    if (err != OK) {
    mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
    return true;
    }
    info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
    break;
    }
    sp<AMessage> reply =
    new AMessage(kWhatOutputBufferDrained, mCodec);//ACodec生成reply消息,最终会传给MediaCodec
    if (!mCodec->mSentFormat && rangeLength > 0) {
    mCodec->sendFormatChange(reply);
    }
    if (mCodec->usingMetadataOnEncoderOutput()) {
    native_handle_t *handle = NULL;
    VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data();
    VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data();
    if (info->mData->size() >= sizeof(grallocMeta)
    && grallocMeta.eType == kMetadataBufferTypeGrallocSource) {
    handle = (native_handle_t *)(uintptr_t)grallocMeta.pHandle;
    } else if (info->mData->size() >= sizeof(nativeMeta)
    && nativeMeta.eType == kMetadataBufferTypeANWBuffer) {
    #ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
    // ANativeWindowBuffer is only valid on 32-bit/mediaserver process
    handle = NULL;
    #else
    handle = (native_handle_t *)nativeMeta.pBuffer->handle;
    #endif
    }
    info->mData->meta()->setPointer("handle", handle);
    info->mData->meta()->setInt32("rangeOffset", rangeOffset);
    info->mData->meta()->setInt32("rangeLength", rangeLength);
    } else {
    info->mData->setRange(rangeOffset, rangeLength);
    }
    #if 0
    if (mCodec->mNativeWindow == NULL) {
    if (IsIDR(info->mData)) {
    ALOGI("IDR frame");
    }
    }
    #endif
    if (mCodec->mSkipCutBuffer != NULL) {
    mCodec->mSkipCutBuffer->submit(info->mData);
    }
    info->mData->meta()->setInt64("timeUs", timeUs);
    info->mData->meta()->setObject("graphic-buffer", info->mGraphicBuffer);//在info的mDate(sp<ABuffer>)的mMeta(sp<AMessage>)中设置graphic-buffer
    sp<AMessage> notify = mCodec->mNotify->dup();
    notify->setInt32("what", CodecBase::kWhatDrainThisBuffer);//发给MediaCodec的消息
    notify->setInt32("buffer-id", info->mBufferID);
    notify->setBuffer("buffer", info->mData);
    notify->setInt32("flags", flags);
    reply->setInt32("buffer-id", info->mBufferID);//reply在发给(传给)MediaCodec的BufferInfo的mNotify的时候,已经设置的buffer-id
    (void)mCodec->setDSModeHint(reply, flags, timeUs);
    notify->setMessage("reply", reply);//把reply设进去,用于MediaCodec向ACodec发消息
    notify->post();//notify发出去之后MediaCodec会处理
    info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
    if (flags & OMX_BUFFERFLAG_EOS) {
    ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str());
    sp<AMessage> notify = mCodec->mNotify->dup();
    notify->setInt32("what", CodecBase::kWhatEOS);
    notify->setInt32("err", mCodec->mInputEOSResult);
    notify->post();
    mCodec->mPortEOS[kPortIndexOutput] = true;
    }
    break;
    }
    case FREE_BUFFERS:
    err = mCodec->freeBuffer(kPortIndexOutput, index);
    if (err != OK) {
    mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
    return true;
    }
    break;
    default:
    ALOGE("Invalid port mode: %d", mode);
    return false;
    }
    return true;
    }

 

 

2.MediaCodec收到ACodec发的消息之后会更新updateBuffers(kPortIndexOutput, msg);同时onOutputBufferAvailable()中通知Decoder

    case CodecBase::kWhatDrainThisBuffer:
    {
    /* size_t index = */updateBuffers(kPortIndexOutput, msg);//updateBuffers
    if (mState == FLUSHING
    || mState == STOPPING
    || mState == RELEASING) {
    returnBuffersToCodecOnPort(kPortIndexOutput);
    break;
    }
    sp<ABuffer> buffer;
    CHECK(msg->findBuffer("buffer", &buffer));
    int32_t omxFlags;
    CHECK(msg->findInt32("flags", &omxFlags));
    buffer->meta()->setInt32("omxFlags", omxFlags);
    if (mFlags & kFlagGatherCodecSpecificData) {
    // This is the very first output buffer after a
    // format change was signalled, it'll either contain
    // the one piece of codec specific data we can expect
    // or there won't be codec specific data.
    if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) {
    status_t err =
    amendOutputFormatWithCodecSpecificData(buffer);
    if (err != OK) {
    ALOGE("Codec spit out malformed codec "
    "specific data!");
    }
    }
    mFlags &= ~kFlagGatherCodecSpecificData;
    if (mFlags & kFlagIsAsync) {
    onOutputFormatChanged();
    } else {
    mFlags |= kFlagOutputFormatChanged;
    }
    }
    if (mFlags & kFlagIsAsync) {
    onOutputBufferAvailable();//通知Decoder
    } else if (mFlags & kFlagDequeueOutputPending) {
    CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
    ++mDequeueOutputTimeoutGeneration;
    mFlags &= ~kFlagDequeueOutputPending;
    mDequeueOutputReplyID = 0;
    } else {
    postActivityNotificationIfPossible();
    }
    break;
    }

 

 

3.1 MediaCodec的updateBuffers,找到随消息过来的reply,并存放在MediaCodec的BufferInfo的mNotify中。

    size_t MediaCodec::updateBuffers(
    int32_t portIndex, const sp<AMessage> &msg) {
    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
    uint32_t bufferID;
    CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
    Mutex::Autolock al(mBufferLock);
    Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
    for (size_t i = 0; i < buffers->size(); ++i) {
    BufferInfo *info = &buffers->editItemAt(i);
    if (info->mBufferID == bufferID) {
    CHECK(info->mNotify == NULL);
    CHECK(msg->findMessage("reply", &info->mNotify));//找到随消息过来的reply,并存放在MediaCodec的BufferInfo的mNotify中
    info->mFormat =
    (portIndex == kPortIndexInput) ? mInputFormat : mOutputFormat;
    mAvailPortBuffers[portIndex].push_back(i);
    return i;
    }
    }
    TRESPASS();
    return 0;
    }

 

 

3.2 MediaCodec的onOutputBufferAvailable,通知Decoder有可用的output buffer.


    void MediaCodec::onOutputBufferAvailable() {
    int32_t index;
    while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
    const sp<ABuffer> &buffer =
    mPortBuffers[kPortIndexOutput].itemAt(index).mData;
    sp<AMessage> msg = mCallback->dup();//发送给Decoder的消息,kWhatCodecNotify
    msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);//设置callbackID,CB_OUTPUT_AVAILABLE,通知有可用的outputbuffer
    msg->setInt32("index", index);
    msg->setSize("offset", buffer->offset());
    msg->setSize("size", buffer->size());
    int64_t timeUs;
    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
    msg->setInt64("timeUs", timeUs);
    int32_t omxFlags;
    CHECK(buffer->meta()->findInt32("omxFlags", &omxFlags));
    uint32_t flags = 0;
    if (omxFlags & OMX_BUFFERFLAG_SYNCFRAME) {
    flags |= BUFFER_FLAG_SYNCFRAME;
    }
    if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) {
    flags |= BUFFER_FLAG_CODECCONFIG;
    }
    if (omxFlags & OMX_BUFFERFLAG_EOS) {
    flags |= BUFFER_FLAG_EOS;
    }
    msg->setInt32("flags", flags);
    msg->post();
    }
    }

 


3.Decoder收到从MediaCodec::onOutputBufferAvailable发回的回调消息kWhatCodecNotify之后,知道有可用的buffer,就handleAnOutputBuffer.

    void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
    ALOGV("[%s] onMessage: %s", mComponentName.c_str(), msg->debugString().c_str());
    switch (msg->what()) {
    case kWhatCodecNotify://MediaCodec发回的回调消息
    {
    int32_t cbID;
    CHECK(msg->findInt32("callbackID", &cbID));
    ALOGV("[%s] kWhatCodecNotify: cbID = %d, paused = %d",
    mIsAudio ? "audio" : "video", cbID, mPaused);
    if (mPaused) {
    break;
    }
    switch (cbID) {
    case MediaCodec::CB_INPUT_AVAILABLE:
    {
    int32_t index;
    CHECK(msg->findInt32("index", &index));
    handleAnInputBuffer(index);
    break;
    }
    case MediaCodec::CB_OUTPUT_AVAILABLE: // CB_OUTPUT_AVAILABLE,有可用的output buffer
    {
    int32_t index;
    size_t offset;
    size_t size;
    int64_t timeUs;
    int32_t flags;
    CHECK(msg->findInt32("index", &index));
    CHECK(msg->findSize("offset", &offset));
    CHECK(msg->findSize("size", &size));
    CHECK(msg->findInt64("timeUs", &timeUs));
    CHECK(msg->findInt32("flags", &flags));
    handleAnOutputBuffer(index, offset, size, timeUs, flags);//开始handleAnOutputBuffer
    break;
    }
    case MediaCodec::CB_OUTPUT_FORMAT_CHANGED:
    {
    sp<AMessage> format;
    CHECK(msg->findMessage("format", &format));
    handleOutputFormatChange(format);
    break;
    }
    case MediaCodec::CB_ERROR:
    {
    status_t err;
    CHECK(msg->findInt32("err", &err));
    ALOGE("Decoder (%s) reported error : 0x%x",
    mIsAudio ? "audio" : "video", err);
    handleError(err);
    break;
    }
    default:
    {
    TRESPASS();
    break;
    }
    }
    break;
    }
    case kWhatRenderBuffer:
    {
    if (!isStaleReply(msg)) {
    onRenderBuffer(msg);
    }
    break;
    }
    case kWhatSetVideoSurface:
    {
    sp<AReplyToken> replyID;
    CHECK(msg->senderAwaitsResponse(&replyID));
    sp<RefBase> obj;
    CHECK(msg->findObject("surface", &obj));
    sp<Surface> surface = static_cast<Surface *>(obj.get()); // non-null
    int32_t err = INVALID_OPERATION;
    // NOTE: in practice mSurface is always non-null, but checking here for completeness
    if (mCodec != NULL && mSurface != NULL) {
    // TODO: once AwesomePlayer is removed, remove this automatic connecting
    // to the surface by MediaPlayerService.
    //
    // at this point MediaPlayerService::client has already connected to the
    // surface, which MediaCodec does not expect
    err = native_window_api_disconnect(surface.get(), NATIVE_WINDOW_API_MEDIA);
    if (err == OK) {
    err = mCodec->setSurface(surface);
    ALOGI_IF(err, "codec setSurface returned: %d", err);
    if (err == OK) {
    // reconnect to the old surface as MPS::Client will expect to
    // be able to disconnect from it.
    (void)native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_MEDIA);
    mSurface = surface;
    }
    }
    if (err != OK) {
    // reconnect to the new surface on error as MPS::Client will expect to
    // be able to disconnect from it.
    (void)native_window_api_connect(surface.get(), NATIVE_WINDOW_API_MEDIA);
    }
    }
    sp<AMessage> response = new AMessage;
    response->setInt32("err", err);
    response->postReply(replyID);
    break;
    }
    default:
    DecoderBase::onMessageReceived(msg);
    break;
    }
    }

 


4.Decoder::handleAnOutputBuffer, 之后Decoder要与Render交互确定是否渲染。参看《NuPlayerDecoder与NuPlayerRender分析》

    bool NuPlayer::Decoder::handleAnOutputBuffer(
    size_t index,
    size_t offset,
    size_t size,
    int64_t timeUs,
    int32_t flags) {
    // CHECK_LT(bufferIx, mOutputBuffers.size());
    sp<ABuffer> buffer;
    mCodec->getOutputBuffer(index, &buffer);//用index找到对应的ABuffer
    if (index >= mOutputBuffers.size()) {
    for (size_t i = mOutputBuffers.size(); i <= index; ++i) {
    mOutputBuffers.add();
    }
    }
    mOutputBuffers.editItemAt(index) = buffer;
    buffer->setRange(offset, size);
    buffer->meta()->clear();
    buffer->meta()->setInt64("timeUs", timeUs);
    setPcmFormat(buffer->meta());
    bool eos = flags & MediaCodec::BUFFER_FLAG_EOS;
    // we do not expect CODECCONFIG or SYNCFRAME for decoder
    sp<AMessage> reply = new AMessage(kWhatRenderBuffer, this);//Decoder发给Renderer的用于回调的消息
    reply->setSize("buffer-ix", index);
    reply->setInt32("generation", mBufferGeneration);
    if (eos) {
    ALOGI("[%s] saw output EOS", mIsAudio ? "audio" : "video");
    buffer->meta()->setInt32("eos", true);
    reply->setInt32("eos", true);
    } else if (mSkipRenderingUntilMediaTimeUs >= 0) {
    if (timeUs < mSkipRenderingUntilMediaTimeUs) {
    ALOGV("[%s] dropping buffer at time %lld as requested.",
    mComponentName.c_str(), (long long)timeUs);
    reply->post();
    return true;
    }
    mSkipRenderingUntilMediaTimeUs = -1;
    }
    mNumFramesTotal += !mIsAudio;
    // wait until 1st frame comes out to signal resume complete
    notifyResumeCompleteIfNecessary();
    if (mRenderer != NULL) {
    // send the buffer to renderer.
    mRenderer->queueBuffer(mIsAudio, buffer, reply);//Renderer的相关处理,注意设进去的reply
    if (eos && !isDiscontinuityPending()) {
    mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
    }
    }
    return true;
    }

 

 

5. Decoder 接受 renderer对(mVideoQueue的QueueEntry)buffer发回的消费消息,并在Decoder的onRenderBuffer中调用MediaCodec的mCodec>renderOutputBufferAndRelease(bufferIx, timestampNs)或者mCodec->releaseOutputBuffer(bufferIx)。

5.1. Decoder 接受 renderer对(mVideoQueue的QueueEntry)buffer发回的消息进行消费

    case kWhatRenderBuffer:
    {
    if (!isStaleReply(msg)) {
    onRenderBuffer(msg);
    }
    break;
    }


5.2 Decoder的onRenderBuffer

    void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {
    status_t err;
    int32_t render;
    size_t bufferIx;
    int32_t eos;
    CHECK(msg->findSize("buffer-ix", &bufferIx));//找到buffer-ix
    if (!mIsAudio) {
    int64_t timeUs;
    sp<ABuffer> buffer = mOutputBuffers[bufferIx];
    buffer->meta()->findInt64("timeUs", &timeUs);
    if (mCCDecoder != NULL && mCCDecoder->isSelected()) {
    mCCDecoder->display(timeUs);
    }
    }
    if (msg->findInt32("render", &render) && render) {
    int64_t timestampNs;
    CHECK(msg->findInt64("timestampNs", ×tampNs));
    err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs);//Decoder发给MediaCodec渲染加release
    } else {
    mNumOutputFramesDropped += !mIsAudio;
    err = mCodec->releaseOutputBuffer(bufferIx);//不渲染直接release
    }
    if (err != OK) {
    ALOGE("failed to release output buffer for %s (err=%d)",
    mComponentName.c_str(), err);
    handleError(err);
    }
    if (msg->findInt32("eos", &eos) && eos
    && isDiscontinuityPending()) {
    finishHandleDiscontinuity(true /* flushOnTimeChange */);
    }
    }


6.Decoder到MediaCodec
MediaCodec的mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs)或者mCodec->releaseOutputBuffer(bufferIx),一个会真的渲染一个不渲染.
最终会到MediaCodec的onReleaseOutputBuffer(msg)处理.
   
status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
    sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);//发送消息
    msg->setSize("index", index);
    msg->setInt32("render", true);//设置是否渲染
    msg->setInt64("timestampNs", timestampNs);//timestampNs


    sp<AMessage> response;
    return PostAndAwaitResponse(msg, &response);
}


status_t MediaCodec::releaseOutputBuffer(size_t index) {
    sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);//发送消息
    msg->setSize("index", index);


    sp<AMessage> response;
    return PostAndAwaitResponse(msg, &response);
}


然后到MediaCodec的onReleaseOutputBuffer(msg)处理;
        case kWhatReleaseOutputBuffer:
        {
            sp<AReplyToken> replyID;
            CHECK(msg->senderAwaitsResponse(&replyID));


            if (!isExecuting()) {
                PostReplyWithError(replyID, INVALID_OPERATION);
                break;
            } else if (mFlags & kFlagStickyError) {
                PostReplyWithError(replyID, getStickyError());
                break;
            }


            status_t err = onReleaseOutputBuffer(msg);//onReleaseOutputBuffer(msg)处理


            PostReplyWithError(replyID, err);
            break;
        }


//onReleaseOutputBuffer

    status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
    size_t index;
    CHECK(msg->findSize("index", &index));
    int32_t render;
    if (!msg->findInt32("render", &render)) { //设置了render为true则渲染,否则不渲染直接release buffer
    render = 0;
    }
    if (!isExecuting()) {
    return -EINVAL;
    }
    if (index >= mPortBuffers[kPortIndexOutput].size()) {
    return -ERANGE;
    }
    BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
    if (info->mNotify == NULL || !info->mOwnedByClient) {
    return -EACCES;
    }
    // synchronization boundary for getBufferAndFormat
    {
    Mutex::Autolock al(mBufferLock);
    info->mOwnedByClient = false;
    }
    if (render && info->mData != NULL && info->mData->size() != 0) { //render是否为true
    info->mNotify->setInt32("render", true);//reply设置render为true
    int64_t mediaTimeUs = -1;
    info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
    int64_t renderTimeNs = 0;
    if (!msg->findInt64("timestampNs", &renderTimeNs)) {//Renderer给的timestampNs
    // use media timestamp if client did not request a specific render timestamp
    ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
    renderTimeNs = mediaTimeUs * 1000;
    }
    info->mNotify->setInt64("timestampNs", renderTimeNs);
    if (mSoftRenderer != NULL) {//是否用softRender,如果设置过SoftwareRender而则用软件渲染,不然就让ACodec去硬件渲染
    std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
    info->mData->data(), info->mData->size(),
    mediaTimeUs, renderTimeNs, NULL, info->mFormat);
    // if we are running, notify rendered frames
    if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
    sp<AMessage> notify = mOnFrameRenderedNotification->dup();
    sp<AMessage> data = new AMessage;
    if (CreateFramesRenderedMessage(doneFrames, data)) {
    notify->setMessage("data", data);
    notify->post();
    }
    }
    }
    }
    info->mNotify->post();//info-mNotify保存着ACodec传过来的reply消息,ACdoec会硬件渲染,不管硬件渲染还是软件熏染都会fillbuffer
    info->mNotify = NULL;
    return OK;
    }


7.MediaCodec到ACodec,MediaCodec::onReleaseOutputBuffer中判断使用硬件渲染后者软件渲染,并使用info->mNotify中ACodec传过来的reply消息,最终发给ACodec接受并处理。
ACodec接受消息后调用onOutputBufferDrained(msg),看是正在的硬件渲染。

    bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
    case kWhatInputBufferFilled:
    {
    onInputBufferFilled(msg);
    break;
    }
    case kWhatOutputBufferDrained://MediaCodec消费reply消息
    {
    onOutputBufferDrained(msg);
    break;
    }
    case ACodec::kWhatOMXMessageList:
    {
    return checkOMXMessage(msg) ? onOMXMessageList(msg) : true;
    }
    case ACodec::kWhatOMXMessageItem:
    {
    // no need to check as we already did it for kWhatOMXMessageList
    return onOMXMessage(msg);
    }
    case ACodec::kWhatOMXMessage:
    {
    return checkOMXMessage(msg) ? onOMXMessage(msg) : true;
    }
    case ACodec::kWhatSetSurface:
    {
    sp<AReplyToken> replyID;
    CHECK(msg->senderAwaitsResponse(&replyID));
    sp<RefBase> obj;
    CHECK(msg->findObject("surface", &obj));
    status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
    sp<AMessage> response = new AMessage;
    response->setInt32("err", err);
    response->postReply(replyID);
    break;
    }
    case ACodec::kWhatCreateInputSurface:
    case ACodec::kWhatSetInputSurface:
    case ACodec::kWhatSignalEndOfInputStream:
    {
    // This may result in an app illegal state exception.
    ALOGE("Message 0x%x was not handled", msg->what());
    mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION);
    return true;
    }
    case ACodec::kWhatOMXDied:
    {
    // This will result in kFlagSawMediaServerDie handling in MediaCodec.
    ALOGE("OMX/mediaserver died, signalling error!");
    mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT);
    break;
    }
    case ACodec::kWhatReleaseCodecInstance:
    {
    ALOGI("[%s] forcing the release of codec",
    mCodec->mComponentName.c_str());
    status_t err = mCodec->mOMX->freeNode(mCodec->mNode);
    ALOGE_IF("[%s] failed to release codec instance: err=%d",
    mCodec->mComponentName.c_str(), err);
    sp<AMessage> notify = mCodec->mNotify->dup();
    notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
    notify->post();
    break;
    }
    default:
    return false;
    }
    return true;
    }

 

8.ACodec的onOutputBufferDrained(msg);//真正的硬件渲染

    void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
    IOMX::buffer_id bufferID;
    CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));//找到bufferID
    ssize_t index;
    BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);//根据bufferID找到ACodec的BufferInfo
    BufferInfo::Status status = BufferInfo::getSafeStatus(info);
    if (status != BufferInfo::OWNED_BY_DOWNSTREAM) {
    ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
    mCodec->dumpBuffers(kPortIndexOutput);
    mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
    return;
    }
    android_native_rect_t crop;
    if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) {
    status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop);
    ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err);
    }
    bool skip = mCodec->getDSModeHint(msg);
    int32_t render;
    if (!skip && mCodec->mNativeWindow != NULL //mCodec->mNativeWindow != NULL 才用硬件渲染。软件渲染在MediaCodec的onReleaseOutputBuffer已经用SoftwareRender处理
    && msg->findInt32("render", &render) && render != 0
    && info->mData != NULL && info->mData->size() != 0) {
    ATRACE_NAME("render");
    // The client wants this buffer to be rendered.
    // save buffers sent to the surface so we can get render time when they return
    int64_t mediaTimeUs = -1;
    info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
    if (mediaTimeUs >= 0) {
    mCodec->mRenderTracker.onFrameQueued(
    mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
    }
    int64_t timestampNs = 0;
    if (!msg->findInt64("timestampNs", ×tampNs)) {
    // use media timestamp if client did not request a specific render timestamp
    if (info->mData->meta()->findInt64("timeUs", ×tampNs)) {
    ALOGV("using buffer PTS of %lld", (long long)timestampNs);
    timestampNs *= 1000;
    }
    }
    status_t err;
    err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);//使用timestampNs
    ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err);
    info->checkReadFence("onOutputBufferDrained before queueBuffer");
    err = mCodec->mNativeWindow->queueBuffer(
    mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);//插入mNaiveWindow(surface)进行渲染
    info->mFenceFd = -1;
    if (err == OK) {
    info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
    } else {
    ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err);
    mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
    info->mStatus = BufferInfo::OWNED_BY_US;
    // keeping read fence as write fence to avoid clobbering
    info->mIsReadFence = false;
    }
    } else {
    if (mCodec->mNativeWindow != NULL &&
    (info->mData == NULL || info->mData->size() != 0)) {
    // move read fence into write fence to avoid clobbering
    info->mIsReadFence = false;
    ATRACE_NAME("frame-drop");
    }
    info->mStatus = BufferInfo::OWNED_BY_US;
    }
    PortMode mode = getPortMode(kPortIndexOutput);
    switch (mode) {
    case KEEP_BUFFERS:
    {
    // XXX fishy, revisit!!! What about the FREE_BUFFERS case below?
    if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
    // We cannot resubmit the buffer we just rendered, dequeue
    // the spare instead.
    info = mCodec->dequeueBufferFromNativeWindow();
    }
    break;
    }
    case RESUBMIT_BUFFERS:
    {
    if (!mCodec->mPortEOS[kPortIndexOutput]) {
    if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
    // We cannot resubmit the buffer we just rendered, dequeue
    // the spare instead.
    info = mCodec->dequeueBufferFromNativeWindow();
    }
    if (info != NULL) {
    ALOGV("[%s] calling fillBuffer %u",
    mCodec->mComponentName.c_str(), info->mBufferID);
    info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS");
    status_t err = mCodec->mOMX->fillBuffer(
    mCodec->mNode, info->mBufferID, info->mFenceFd);//渲染之后重新填充buffer,然后回调onOMXFillBufferDone
    info->mFenceFd = -1;
    if (err == OK) {
    info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
    } else {
    mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
    }
    }
    }
    break;
    }
    case FREE_BUFFERS:
    {
    status_t err = mCodec->freeBuffer(kPortIndexOutput, index);
    if (err != OK) {
    mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
    }
    break;
    }
    default:
    ALOGE("Invalid port mode: %d", mode);
    return;
    }
    }
---------------------  
作者:晓鱼弋弋  
来源:CSDN  
原文:https://blog.csdn.net/coolcary/article/details/51939080  
版权声明:本文为博主原创文章,转载请附上博文链接!

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值