MediaCodec_Analyze-2-config

MediaCodec Analyse – config

Refrence: https://source.android.google.cn/devices/media

Android 媒体架构

一 APK调用的核心API

Android APK使用 MediaCodec API 播放音视频的简易流程:

MediaCodec codec = MediaCodec.createDecoderByType("video/avc");
MediaFormat format = MediaFormat.createVideoFormat("video/avc", 320, 480);
codec.configure(format, surface, null, 0);
codec.start();

上一章节,MediaCodec codec = MediaCodec.createDecoderByType("video/avc");已经创建好APK层的MediaCodec对象实例了。在framework层最终在native层的ACodec中,通过omx = client.interface();err = omx->allocateNode(componentName.c_str(), observer, &omxNode);等语句,和OMX进行沟通。

接着就需要对MediaCodec对象实例进行配置了。

codec.configure(format, surface, null, 0);

在java-framework层简单处理一下mediaformat相关变量,调用jni层的 native_configure(...) 函数进行配置。

frameworks\base\media\java\android\media\MediaCodec.java

    public void configure(
            @Nullable MediaFormat format,
            @Nullable Surface surface, @Nullable MediaCrypto crypto,
            @ConfigureFlag int flags) {
        configure(format, surface, crypto, null, flags);
    }

    private void configure(
            @Nullable MediaFormat format, @Nullable Surface surface,
            @Nullable MediaCrypto crypto, @Nullable IHwBinder descramblerBinder,
            @ConfigureFlag int flags) {
        if (crypto != null && descramblerBinder != null) {
            throw new IllegalArgumentException("Can't use crypto and descrambler together!");
        }

        ...... //处理mediaformat相关变量

        native_configure(keys, values, surface, crypto, descramblerBinder, flags);
    }

    private native final void native_configure(
            @Nullable String[] keys, @Nullable Object[] values,
            @Nullable Surface surface, @Nullable MediaCrypto crypto,
            @Nullable IHwBinder descramblerBinder, @ConfigureFlag int flags);

android_media_MediaCodec_native_configure(...)中简单处理了一下MediaFormat相关的变量,就调用JMediaCodec::configure(...),在其中也没太多操作,然后调用native层MediaCodec的config配置函数。

frameworks\base\media\jni\android_media_MediaCodec.cpp

static void android_media_MediaCodec_native_configure(
        JNIEnv *env,
        jobject thiz,
        jobjectArray keys, jobjectArray values,
        jobject jsurface,
        jobject jcrypto,
        jobject descramblerBinderObj,
        jint flags) {
    sp<JMediaCodec> codec = getMediaCodec(env, thiz);

    ...... //相关mediaformat变量的处理

    err = codec->configure(format, bufferProducer, crypto, descrambler, flags);

    throwExceptionAsNecessary(env, err);
}

status_t JMediaCodec::configure(
        const sp<AMessage> &format,
        const sp<IGraphicBufferProducer> &bufferProducer,
        const sp<ICrypto> &crypto,
        const sp<IDescrambler> &descrambler,
        int flags) {
    sp<Surface> client;
    if (bufferProducer != NULL) {
        mSurfaceTextureClient =
            new Surface(bufferProducer, true /* controlledByApp */);
    } else {
        mSurfaceTextureClient.clear();
    }

    constexpr int32_t CONFIGURE_FLAG_ENCODE = 1;
    AString mime;
    CHECK(format->findString("mime", &mime));
    mGraphicOutput = (mime.startsWithIgnoreCase("video/") || mime.startsWithIgnoreCase("image/"))
            && !(flags & CONFIGURE_FLAG_ENCODE);
    mHasCryptoOrDescrambler = (crypto != nullptr) || (descrambler != nullptr);

    // 此处的mCodec是native层的MediaCodec对象实例。
    return mCodec->configure(format, mSurfaceTextureClient, crypto, descrambler, flags);
}

MediaCodec::configure(...)先处理了mediaformat等相关参数变量,然后发送kWhatConfigure消息。

frameworks\av\media\libstagefright\MediaCodec.cpp

status_t MediaCodec::configure(
        const sp<AMessage> &format,
        const sp<Surface> &surface,
        const sp<ICrypto> &crypto,
        const sp<IDescrambler> &descrambler,
        uint32_t flags) {
    sp<AMessage> msg = new AMessage(kWhatConfigure, this);

    ...... //mediametrics相关处理

    ...... //如果是video,把video相关的mediametrics进行设置处理

    msg->setMessage("format", format);
    msg->setInt32("flags", flags);
    msg->setObject("surface", surface);

    ...... //crypto和descrambler的处理

    // save msg for reset
    mConfigureMsg = msg;

    status_t err;
    std::vector<MediaResourceParcel> resources;
    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
    resources.push_back(MediaResource::GraphicMemoryResource(1));
    for (int i = 0; i <= kMaxRetry; ++i) {
        if (i > 0) {
            // Don't try to reclaim resource for the first time.
            if (!mResourceManagerProxy->reclaimResource(resources)) {
                break;
            }
        }

        sp<AMessage> response;
        err = PostAndAwaitResponse(msg, &response);
        if (err != OK && err != INVALID_OPERATION) {
            ALOGE("configure failed with err 0x%08x, resetting...", err);
            reset();
        }
        if (!isResourceError(err)) {
            break;
        }
    }

    return err;
}

MediaCodec::onMessageReceived(...)case kWhatConfigure中,先处理了mediaformat等相关参数变量,然后调用mCodec->initiateConfigureComponent(format);,即ACodec的initiateConfigureComponent函数。

frameworks\av\media\libstagefright\MediaCodec.cpp

void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        ......
        case kWhatConfigure:
        {
            sp<AReplyToken> replyID;
            CHECK(msg->senderAwaitsResponse(&replyID));

            ...... //相关mediaformat参数变量的处理

            mReplyID = replyID;
            setState(CONFIGURING);

            ...... //crypto和descrambler的处理

            extractCSD(format); // csd-0、csd-1的处理

            int32_t tunneled;
            if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
                ALOGI("Configuring TUNNELED video playback.");
                mTunneled = true;
            } else {
                mTunneled = false;
            }

            mCodec->initiateConfigureComponent(format);
            break;
        }
        ......
    }
}

ACodec::initiateConfigureComponent(...)只是发送一条kWhatConfigureComponent消息,在ACodec::LoadedState::onMessageReceived(...)case kWhatConfigureComponent中,判断mime是否存在,然后调用mCodec->configureCodec(mime.c_str(), msg);,即ACodec的configureCodec函数。

frameworks\av\media\libstagefright\ACodec.cpp

void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) {
    msg->setWhat(kWhatConfigureComponent);
    msg->setTarget(this);
    msg->post();
}

bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
    bool handled = false;

    switch (msg->what()) {
        case ACodec::kWhatConfigureComponent:
        {
            onConfigureComponent(msg);
            handled = true;
            break;
        }
        ......
    }
    return handled;
}

bool ACodec::LoadedState::onConfigureComponent(const sp<AMessage> &msg) {
    ALOGV("onConfigureComponent");

    CHECK(mCodec->mOMXNode != NULL);

    status_t err = OK;
    AString mime;
    if (!msg->findString("mime", &mime)) {
        err = BAD_VALUE;
    } else {
        err = mCodec->configureCodec(mime.c_str(), msg);
    }
    if (err != OK) {
        ALOGE("[%s] configureCodec returning error %d", mCodec->mComponentName.c_str(), err);

        mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
        return false;
    }

    mCodec->mCallback->onComponentConfigured(mCodec->mInputFormat, mCodec->mOutputFormat);

    return true;
}

ACodec::configureCodec(...)中,主要进行下面的步骤:

  • encoder相关参数处理
  • surface(即NativeWindow)的处理
  • 根据media类型进行相关处理

frameworks\av\media\libstagefright\ACodec.cpp

status_t ACodec::configureCodec(const char *mime, const sp<AMessage> &msg) {
    int32_t encoder;
    if (!msg->findInt32("encoder", &encoder)) {
        encoder = false;
    }

    sp<AMessage> inputFormat = new AMessage;
    sp<AMessage> outputFormat = new AMessage;
    mConfigFormat = msg;

    mIsEncoder = encoder;
    mIsVideo = !strncasecmp(mime, "video/", 6);
    mIsImage = !strncasecmp(mime, "image/", 6);

    mPortMode[kPortIndexInput] = IOMX::kPortModePresetByteBuffer;
    mPortMode[kPortIndexOutput] = IOMX::kPortModePresetByteBuffer;

    status_t err = setComponentRole(encoder /* isEncoder */, mime);

    if (err != OK) {
        return err;
    }

    OMX_VIDEO_CONTROLRATETYPE bitrateMode;
    int32_t bitrate = 0, quality;
    // FLAC encoder or video encoder in constant quality mode doesn't need a
    // bitrate, other encoders do.
    if (encoder) {
        if (mIsVideo || mIsImage) {
            if (!findVideoBitrateControlInfo(msg, &bitrateMode, &bitrate, &quality)) {
                return INVALID_OPERATION;
            }
        } else if (strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) && !msg->findInt32("bitrate", &bitrate)) {
            return INVALID_OPERATION;
        }
    }

    // propagate bitrate to the output so that the muxer has it
    if (encoder && msg->findInt32("bitrate", &bitrate)) {
        // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the
        // average bitrate. We've been setting both bitrate and max-bitrate to this same value.
        outputFormat->setInt32("bitrate", bitrate);
        outputFormat->setInt32("max-bitrate", bitrate);
    }

    int32_t storeMeta;
    if (encoder) {
        IOMX::PortMode mode = IOMX::kPortModePresetByteBuffer;
        if (msg->findInt32("android._input-metadata-buffer-type", &storeMeta) && storeMeta != kMetadataBufferTypeInvalid) {
            if (storeMeta == kMetadataBufferTypeNativeHandleSource) {
                mode = IOMX::kPortModeDynamicNativeHandle;
            } else if (storeMeta == kMetadataBufferTypeANWBuffer || storeMeta == kMetadataBufferTypeGrallocSource) {
                mode = IOMX::kPortModeDynamicANWBuffer;
            } else {
                return BAD_VALUE;
            }
        }
        err = setPortMode(kPortIndexInput, mode);
        if (err != OK) {
            return err;
        }

        if (mode != IOMX::kPortModePresetByteBuffer) {
            uint32_t usageBits;
            if (mOMXNode->getParameter((OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, &usageBits, sizeof(usageBits)) == OK) {
                inputFormat->setInt32("using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN));
            }
        }
    }

    int32_t lowLatency = 0;
    if (msg->findInt32("low-latency", &lowLatency)) {
        err = setLowLatency(lowLatency);
        if (err != OK) {
            return err;
        }
    }

    int32_t prependSPSPPS = 0;
    if (encoder && mIsVideo && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) && prependSPSPPS != 0) {
        OMX_INDEXTYPE index;
        err = mOMXNode->getExtensionIndex("OMX.google.android.index.prependSPSPPSToIDRFrames", &index);

        if (err == OK) {
            PrependSPSPPSToIDRFramesParams params;
            InitOMXParams(&params);
            params.bEnable = OMX_TRUE;

            err = mOMXNode->setParameter(index, &params, sizeof(params));
        }

        if (err != OK) {
            ALOGE("Encoder could not be configured to emit SPS/PPS before IDR frames. (err %d)", err);

            return err;
        }
    }

    // Only enable metadata mode on encoder output if encoder can prepend
    // sps/pps to idr frames, since in metadata mode the bitstream is in an
    // opaque handle, to which we don't have access.
    if (encoder && mIsVideo) {
        OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) && storeMeta != 0);
        if (mFlags & kFlagIsSecure) {
            enable = OMX_TRUE;
        }

        err = setPortMode(kPortIndexOutput, enable ? IOMX::kPortModePresetSecureBuffer : IOMX::kPortModePresetByteBuffer);
        if (err != OK) {
            return err;
        }

        if (!msg->findInt64(KEY_REPEAT_PREVIOUS_FRAME_AFTER, &mRepeatFrameDelayUs)) {
            mRepeatFrameDelayUs = -1LL;
        }

        if (!msg->findDouble("time-lapse-fps", &mCaptureFps)) {
            float captureRate;
            if (msg->findAsFloat(KEY_CAPTURE_RATE, &captureRate)) {
                mCaptureFps = captureRate;
            } else {
                mCaptureFps = -1.0;
            }
        }

        if (!msg->findInt32(KEY_CREATE_INPUT_SURFACE_SUSPENDED, (int32_t*)&mCreateInputBuffersSuspended)) {
            mCreateInputBuffersSuspended = false;
        }
    }

    if (encoder && (mIsVideo || mIsImage)) {
        // only allow 32-bit value, since we pass it as U32 to OMX.
        if (!msg->findInt64(KEY_MAX_PTS_GAP_TO_ENCODER, &mMaxPtsGapUs)) {
            mMaxPtsGapUs = 0LL;
        } else if (mMaxPtsGapUs > INT32_MAX || mMaxPtsGapUs < INT32_MIN) {
            ALOGW("Unsupported value for max pts gap %lld", (long long) mMaxPtsGapUs);
            mMaxPtsGapUs = 0LL;
        }

        if (!msg->findFloat(KEY_MAX_FPS_TO_ENCODER, &mMaxFps)) {
            mMaxFps = -1;
        }

        // notify GraphicBufferSource to allow backward frames
        if (mMaxPtsGapUs < 0LL) {
            mMaxFps = -1;
        }
    }

    // NOTE: we only use native window for video decoders
    sp<RefBase> obj;
    bool haveNativeWindow = msg->findObject("native-window", &obj) && obj != NULL && mIsVideo && !encoder;
    mUsingNativeWindow = haveNativeWindow;
    if (mIsVideo && !encoder) {
        inputFormat->setInt32("adaptive-playback", false);

        int32_t usageProtected;
        if (msg->findInt32("protected", &usageProtected) && usageProtected) {
            if (!haveNativeWindow) {
                ALOGE("protected output buffers must be sent to an ANativeWindow");
                return PERMISSION_DENIED;
            }
            mFlags |= kFlagIsGrallocUsageProtected;
            mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
        }
    }
    if (mFlags & kFlagIsSecure) {
        // use native_handles for secure input buffers
        err = setPortMode(kPortIndexInput, IOMX::kPortModePresetSecureBuffer);

        if (err != OK) {
            ALOGI("falling back to non-native_handles");
            setPortMode(kPortIndexInput, IOMX::kPortModePresetByteBuffer);
            err = OK; // ignore error for now
        }

        OMX_INDEXTYPE index;
        if (mOMXNode->getExtensionIndex("OMX.google.android.index.preregisterMetadataBuffers", &index) == OK) {
            OMX_CONFIG_BOOLEANTYPE param;
            InitOMXParams(&param);
            param.bEnabled = OMX_FALSE;
            if (mOMXNode->getParameter(index, &param, sizeof(param)) == OK) {
                if (param.bEnabled == OMX_TRUE) {
                    mFlags |= kFlagPreregisterMetadataBuffers;
                }
            }
        }
    }
    if (haveNativeWindow) {
        sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get()));

        // START of temporary support for automatic FRC - THIS WILL BE REMOVED
        int32_t autoFrc;
        if (msg->findInt32("auto-frc", &autoFrc)) {
            bool enabled = autoFrc;
            OMX_CONFIG_BOOLEANTYPE config;
            InitOMXParams(&config);
            config.bEnabled = (OMX_BOOL)enabled;
            status_t temp = mOMXNode->setConfig((OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, &config, sizeof(config));
            if (temp == OK) {
                outputFormat->setInt32("auto-frc", enabled);
            } else if (enabled) {
                ALOGI("codec does not support requested auto-frc (err %d)", temp);
            }
        }
        // END of temporary support for automatic FRC

        int32_t tunneled;
        if (msg->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
            ALOGI("Configuring TUNNELED video playback.");
            mTunneled = true;

            int32_t audioHwSync = 0;
            if (!msg->findInt32("audio-hw-sync", &audioHwSync)) {
                ALOGW("No Audio HW Sync provided for video tunnel");
            }
            err = configureTunneledVideoPlayback(audioHwSync, nativeWindow);
            if (err != OK) {
                ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", audioHwSync, nativeWindow.get());
                return err;
            }

            int32_t maxWidth = 0, maxHeight = 0;
            if (msg->findInt32("max-width", &maxWidth) &&
                    msg->findInt32("max-height", &maxHeight)) {

                err = mOMXNode->prepareForAdaptivePlayback(kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
                if (err != OK) {
                    ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", mComponentName.c_str(), err);
                    // allow failure
                    err = OK;
                } else {
                    inputFormat->setInt32("max-width", maxWidth);
                    inputFormat->setInt32("max-height", maxHeight);
                    inputFormat->setInt32("adaptive-playback", true);
                }
            }
        } else {
            ALOGV("Configuring CPU controlled video playback.");
            mTunneled = false;

            // Explicity reset the sideband handle of the window for
            // non-tunneled video in case the window was previously used
            // for a tunneled video playback.
            err = native_window_set_sideband_stream(nativeWindow.get(), NULL);
            if (err != OK) {
                ALOGE("set_sideband_stream(NULL) failed! (err %d).", err);
                return err;
            }

            err = setPortMode(kPortIndexOutput, IOMX::kPortModeDynamicANWBuffer);
            if (err != OK) {
                // if adaptive playback has been requested, try JB fallback
                // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
                // LARGE MEMORY REQUIREMENT

                // we will not do adaptive playback on software accessed
                // surfaces as they never had to respond to changes in the
                // crop window, and we don't trust that they will be able to.
                int usageBits = 0;
                bool canDoAdaptivePlayback;

                if (nativeWindow->query(nativeWindow.get(), NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usageBits) != OK) {
                    canDoAdaptivePlayback = false;
                } else {
                    canDoAdaptivePlayback = (usageBits & (GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
                }

                int32_t maxWidth = 0, maxHeight = 0;
                if (canDoAdaptivePlayback && msg->findInt32("max-width", &maxWidth) && msg->findInt32("max-height", &maxHeight)) {
                    ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", mComponentName.c_str(), maxWidth, maxHeight);

                    err = mOMXNode->prepareForAdaptivePlayback(kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
                    ALOGW_IF(err != OK, "[%s] prepareForAdaptivePlayback failed w/ err %d", mComponentName.c_str(), err);

                    if (err == OK) {
                        inputFormat->setInt32("max-width", maxWidth);
                        inputFormat->setInt32("max-height", maxHeight);
                        inputFormat->setInt32("adaptive-playback", true);
                    }
                }
                // allow failure
                err = OK;
            } else {
                ALOGV("[%s] setPortMode on output to %s succeeded", mComponentName.c_str(), asString(IOMX::kPortModeDynamicANWBuffer));
                CHECK(storingMetadataInDecodedBuffers());
                inputFormat->setInt32("adaptive-playback", true);
            }

            int32_t push;
            if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
                mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
            }
        }

        int32_t rotationDegrees;
        if (msg->findInt32("rotation-degrees", &rotationDegrees)) {
            mRotationDegrees = rotationDegrees;
        } else {
            mRotationDegrees = 0;
        }
    }

    AudioEncoding pcmEncoding = kAudioEncodingPcm16bit;
    (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding);
    // invalid encodings will default to PCM-16bit in setupRawAudioFormat.

    if (mIsVideo || mIsImage) {
        // determine need for software renderer
        bool usingSwRenderer = false;
        if (haveNativeWindow) {
            bool requiresSwRenderer = false;
            OMX_PARAM_U32TYPE param;
            InitOMXParams(&param);
            param.nPortIndex = kPortIndexOutput;

            status_t err = mOMXNode->getParameter((OMX_INDEXTYPE)OMX_IndexParamVideoAndroidRequiresSwRenderer, &param, sizeof(param));

            if (err == OK && param.nU32 == 1) {
                requiresSwRenderer = true;
            }

            if (mComponentName.startsWith("OMX.google.") || requiresSwRenderer) {
                usingSwRenderer = true;
                haveNativeWindow = false;
                (void)setPortMode(kPortIndexOutput, IOMX::kPortModePresetByteBuffer);
            } else if (!storingMetadataInDecodedBuffers()) {
                err = setPortMode(kPortIndexOutput, IOMX::kPortModePresetANWBuffer);
                if (err != OK) {
                    return err;
                }
            }

        }


        if (encoder) {
            err = setupVideoEncoder(mime, msg, outputFormat, inputFormat);
        } else {
            err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat);
        }

        if (err != OK) {
            return err;
        }

        if (haveNativeWindow) {
            mNativeWindow = static_cast<Surface *>(obj.get());

            // fallback for devices that do not handle flex-YUV for native buffers
            int32_t requestedColorFormat = OMX_COLOR_FormatUnused;
            if (msg->findInt32("color-format", &requestedColorFormat) && requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) {
                status_t err = getPortFormat(kPortIndexOutput, outputFormat);
                if (err != OK) {
                    return err;
                }
                int32_t colorFormat = OMX_COLOR_FormatUnused;
                OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
                if (!outputFormat->findInt32("color-format", &colorFormat)) {
                    ALOGE("ouptut port did not have a color format (wrong domain?)");
                    return BAD_VALUE;
                }
                ALOGD("[%s] Requested output format %#x and got %#x.", mComponentName.c_str(), requestedColorFormat, colorFormat);
                if (!IsFlexibleColorFormat(mOMXNode, colorFormat, haveNativeWindow, &flexibleEquivalent) || flexibleEquivalent != (OMX_U32)requestedColorFormat) {
                    // device did not handle flex-YUV request for native window, fall back
                    // to SW renderer
                    ALOGI("[%s] Falling back to software renderer", mComponentName.c_str());
                    mNativeWindow.clear();
                    mNativeWindowUsageBits = 0;
                    haveNativeWindow = false;
                    usingSwRenderer = true;
                    // TODO: implement adaptive-playback support for bytebuffer mode.
                    // This is done by SW codecs, but most HW codecs don't support it.
                    err = setPortMode(kPortIndexOutput, IOMX::kPortModePresetByteBuffer);
                    inputFormat->setInt32("adaptive-playback", false);
                    if (mFlags & kFlagIsGrallocUsageProtected) {
                        // fallback is not supported for protected playback
                        err = PERMISSION_DENIED;
                    } else if (err == OK) {
                        err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat);
                    }
                }
            }
        }

        if (usingSwRenderer) {
            outputFormat->setInt32("using-sw-renderer", 1);
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG) || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II)) {
        int32_t numChannels, sampleRate;
        if (!msg->findInt32("channel-count", &numChannels) || !msg->findInt32("sample-rate", &sampleRate)) {
            // Since we did not always check for these, leave them optional
            // and have the decoder figure it all out.
            err = OK;
        } else {
            err = setupRawAudioFormat(
                    encoder ? kPortIndexInput : kPortIndexOutput,
                    sampleRate,
                    numChannels);
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
        int32_t numChannels, sampleRate;
        if (!msg->findInt32("channel-count", &numChannels) || !msg->findInt32("sample-rate", &sampleRate)) {
            err = INVALID_OPERATION;
        } else {
            int32_t isADTS, aacProfile;
            int32_t sbrMode;
            int32_t maxOutputChannelCount;
            int32_t pcmLimiterEnable;
            drcParams_t drc;
            if (!msg->findInt32("is-adts", &isADTS)) {
                isADTS = 0;
            }
            if (!msg->findInt32("aac-profile", &aacProfile)) {
                aacProfile = OMX_AUDIO_AACObjectNull;
            }
            if (!msg->findInt32("aac-sbr-mode", &sbrMode)) {
                sbrMode = -1;
            }

            if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) {
                maxOutputChannelCount = -1;
            }
            if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) {
                // value is unknown
                pcmLimiterEnable = -1;
            }
            if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) {
                // value is unknown
                drc.encodedTargetLevel = -1;
            }
            if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) {
                // value is unknown
                drc.drcCut = -1;
            }
            if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) {
                // value is unknown
                drc.drcBoost = -1;
            }
            if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) {
                // value is unknown
                drc.heavyCompression = -1;
            }
            if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) {
                // value is unknown
                drc.targetRefLevel = -2;
            }
            if (!msg->findInt32("aac-drc-effect-type", &drc.effectType)) {
                // value is unknown
                drc.effectType = -2; // valid values are -1 and over
            }
            if (!msg->findInt32("aac-drc-album-mode", &drc.albumMode)) {
                // value is unknown
                drc.albumMode = -1; // valid values are 0 and 1
            }
            if (!msg->findInt32("aac-drc-output-loudness", &drc.outputLoudness)) {
                // value is unknown
                drc.outputLoudness = -1;
            }

            err = setupAACCodec(
                    encoder, numChannels, sampleRate, bitrate, aacProfile,
                    isADTS != 0, sbrMode, maxOutputChannelCount, drc,
                    pcmLimiterEnable);
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
        err = setupAMRCodec(encoder, false /* isWAMR */, bitrate);
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
        err = setupAMRCodec(encoder, true /* isWAMR */, bitrate);
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW)
            || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) {
        // These are PCM-like formats with a fixed sample rate but
        // a variable number of channels.

        int32_t numChannels;
        if (!msg->findInt32("channel-count", &numChannels)) {
            err = INVALID_OPERATION;
        } else {
            int32_t sampleRate;
            if (!msg->findInt32("sample-rate", &sampleRate)) {
                sampleRate = 8000;
            }
            err = setupG711Codec(encoder, sampleRate, numChannels);
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
        // numChannels needs to be set to properly communicate PCM values.
        int32_t numChannels = 2, sampleRate = 44100, compressionLevel = -1;
        if (encoder && (!msg->findInt32("channel-count", &numChannels) || !msg->findInt32("sample-rate", &sampleRate))) {
            ALOGE("missing channel count or sample rate for FLAC encoder");
            err = INVALID_OPERATION;
        } else {
            if (encoder) {
                if (!msg->findInt32(
                            "complexity", &compressionLevel) &&
                    !msg->findInt32(
                            "flac-compression-level", &compressionLevel)) {
                    compressionLevel = 5; // default FLAC compression level
                } else if (compressionLevel < 0) {
                    ALOGW("compression level %d outside [0..8] range, "
                          "using 0",
                          compressionLevel);
                    compressionLevel = 0;
                } else if (compressionLevel > 8) {
                    ALOGW("compression level %d outside [0..8] range, "
                          "using 8",
                          compressionLevel);
                    compressionLevel = 8;
                }
            }
            err = setupFlacCodec(
                    encoder, numChannels, sampleRate, compressionLevel, pcmEncoding);
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
        int32_t numChannels, sampleRate;
        if (encoder
                || !msg->findInt32("channel-count", &numChannels)
                || !msg->findInt32("sample-rate", &sampleRate)) {
            err = INVALID_OPERATION;
        } else {
            err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding);
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) {
        int32_t numChannels;
        int32_t sampleRate;
        if (!msg->findInt32("channel-count", &numChannels)
                || !msg->findInt32("sample-rate", &sampleRate)) {
            err = INVALID_OPERATION;
        } else {
            err = setupAC3Codec(encoder, numChannels, sampleRate);
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) {
        int32_t numChannels;
        int32_t sampleRate;
        if (!msg->findInt32("channel-count", &numChannels)
                || !msg->findInt32("sample-rate", &sampleRate)) {
            err = INVALID_OPERATION;
        } else {
            err = setupEAC3Codec(encoder, numChannels, sampleRate);
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC4)) {
        int32_t numChannels;
        int32_t sampleRate;
        if (!msg->findInt32("channel-count", &numChannels)
                || !msg->findInt32("sample-rate", &sampleRate)) {
            err = INVALID_OPERATION;
        } else {
            err = setupAC4Codec(encoder, numChannels, sampleRate);
        }
    }

    if (err != OK) {
        return err;
    }

    if (!msg->findInt32("encoder-delay", &mEncoderDelay)) {
        mEncoderDelay = 0;
    }

    if (!msg->findInt32("encoder-padding", &mEncoderPadding)) {
        mEncoderPadding = 0;
    }

    if (msg->findInt32("channel-mask", &mChannelMask)) {
        mChannelMaskPresent = true;
    } else {
        mChannelMaskPresent = false;
    }

    int32_t maxInputSize;
    if (msg->findInt32("max-input-size", &maxInputSize)) {
        err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize);
        err = OK; // ignore error
    } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) {
        err = setMinBufferSize(kPortIndexInput, 8192);  // XXX
        err = OK; // ignore error
    }

    int32_t priority;
    if (msg->findInt32("priority", &priority)) {
        err = setPriority(priority);
        err = OK; // ignore error
    }

    int32_t rateInt = -1;
    float rateFloat = -1;
    if (!msg->findFloat("operating-rate", &rateFloat)) {
        msg->findInt32("operating-rate", &rateInt);
        rateFloat = (float)rateInt;  // 16MHz (FLINTMAX) is OK for upper bound.
    }
    if (rateFloat > 0) {
        err = setOperatingRate(rateFloat, mIsVideo);
        err = OK; // ignore errors
    }

    if (err == OK) {
        err = setVendorParameters(msg);
        if (err != OK) {
            return err;
        }
    }

    // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame.
    mBaseOutputFormat = outputFormat;
    mLastOutputFormat.clear();

    err = getPortFormat(kPortIndexInput, inputFormat);
    if (err == OK) {
        err = getPortFormat(kPortIndexOutput, outputFormat);
        if (err == OK) {
            mInputFormat = inputFormat;
            mOutputFormat = outputFormat;
        }
    }

    // create data converters if needed
    if (!mIsVideo && !mIsImage && err == OK) {
        AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit;
        if (encoder) {
            (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
            mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding);
            if (mConverter[kPortIndexInput] != NULL) {
                ALOGD("%s: encoder %s input format pcm encoding converter from %d to %d",
                        __func__, mComponentName.c_str(), pcmEncoding, codecPcmEncoding);
                mInputFormat->setInt32("pcm-encoding", pcmEncoding);
            }
        } else {
            (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
            mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding);
            if (mConverter[kPortIndexOutput] != NULL) {
                ALOGD("%s: decoder %s output format pcm encoding converter from %d to %d",
                        __func__, mComponentName.c_str(), codecPcmEncoding, pcmEncoding);
                mOutputFormat->setInt32("pcm-encoding", pcmEncoding);
            }
        }
    }

    return err;
}

config完成后,通过mCodec->mCallback->onComponentConfigured(mCodec->mInputFormat, mCodec->mOutputFormat);调用MediaCodec的void CodecCallback::onComponentConfigured(...)

frameworks\av\media\libstagefright\MediaCodec.cpp

void CodecCallback::onComponentConfigured(const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
    sp<AMessage> notify(mNotify->dup());
    notify->setInt32("what", kWhatComponentConfigured);
    notify->setMessage("input-format", inputFormat);
    notify->setMessage("output-format", outputFormat);
    notify->post();
}

void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatCodecNotify:
        {
            int32_t what;
            CHECK(msg->findInt32("what", &what));

            switch (what) {
                ......
                case kWhatComponentConfigured:
                {
                    if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
                        // In case a kWhatError or kWhatRelease message came in and replied,
                        // we log a warning and ignore.
                        ALOGW("configure interrupted by error or release, current state %d",
                              mState);
                        break;
                    }
                    CHECK_EQ(mState, CONFIGURING);

                    // reset input surface flag
                    mHaveInputSurface = false;

                    CHECK(msg->findMessage("input-format", &mInputFormat));
                    CHECK(msg->findMessage("output-format", &mOutputFormat));

                    // limit to confirming the opt-in behavior to minimize any behavioral change
                    if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
                        // signal frame dropping mode in the input format as this may also be
                        // meaningful and confusing for an encoder in a transcoder scenario
                        mInputFormat->setInt32("allow-frame-drop", mAllowFrameDroppingBySurface);
                    }
                    sp<AMessage> interestingFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
                    ALOGV("[%s] configured as input format: %s, output format: %s",
                            mComponentName.c_str(),
                            mInputFormat->debugString(4).c_str(),
                            mOutputFormat->debugString(4).c_str());
                    int32_t usingSwRenderer;
                    if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer) && usingSwRenderer) {
                        mFlags |= kFlagUsesSoftwareRenderer;
                    }
                    setState(CONFIGURED);
                    (new AMessage)->postReply(mReplyID);

                    // augment our media metrics info, now that we know more things
                    // such as what the codec extracted from any CSD passed in.
                    if (mMetricsHandle != 0) {
                        sp<AMessage> format;
                        if (mConfigureMsg != NULL &&
                            mConfigureMsg->findMessage("format", &format)) {
                                // format includes: mime
                                AString mime;
                                if (format->findString("mime", &mime)) {
                                    mediametrics_setCString(mMetricsHandle, kCodecMime, mime.c_str());
                                }
                            }
                        // perhaps video only?
                        int32_t profile = 0;
                        if (interestingFormat->findInt32("profile", &profile)) {
                            mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
                        }
                        int32_t level = 0;
                        if (interestingFormat->findInt32("level", &level)) {
                            mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
                        }
                        // bitrate and bitrate mode, encoder only
                        if (mFlags & kFlagIsEncoder) {
                            // encoder specific values
                            int32_t bitrate_mode = -1;
                            if (mOutputFormat->findInt32(KEY_BITRATE_MODE, &bitrate_mode)) {
                                    mediametrics_setCString(mMetricsHandle, kCodecBitrateMode,
                                          asString_BitrateMode(bitrate_mode));
                            }
                            int32_t bitrate = -1;
                            if (mOutputFormat->findInt32(KEY_BIT_RATE, &bitrate)) {
                                    mediametrics_setInt32(mMetricsHandle, kCodecBitrate, bitrate);
                            }
                        } else {
                            // decoder specific values
                        }
                    }
                    break;
                }
            }
            ...
        }
        ...
    }
}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值