MediaCodec Analyse – start
Refrence: https://source.android.google.cn/devices/media
一 APK调用的核心API
Android APK使用 MediaCodec API 播放音视频的简易流程:
MediaCodec codec = MediaCodec.createDecoderByType("video/avc");
MediaFormat format = MediaFormat.createVideoFormat("video/avc", 320, 480);
codec.configure(format, surface, null, 0);
codec.start();
上一章节,codec.configure(format, surface, null, 0);
配置完了。调用codec.start();
就可以使开始播放了。
补充
在开始分析
codec.start();
之前,要先分析一下ACodec中的mCodec->mOMXNode
变量。因为之后有很多类似status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateIdle);
的语句。
在执行APK中MediaCodec codec = MediaCodec.createDecoderByType("video/avc");
语句时,会调用到ACodec::UninitializedState::onAllocateComponent(...)
函数,在其中,通过HIDL架构得到sp<IOMX> omx;
实例,然后会执行err = omx->allocateNode(componentName.c_str(), observer, &omxNode);
。
frameworks\av\media\libstagefright\ACodec.cpp
bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
ALOGV("onAllocateComponent");
...... //
sp<CodecObserver> observer = new CodecObserver(notify);
sp<IOMX> omx;
sp<IOMXNode> omxNode;
status_t err = NAME_NOT_FOUND;
OMXClient client;
if (client.connect(owner.c_str()) != OK) {
mCodec->signalError(OMX_ErrorUndefined, NO_INIT);
return false;
}
omx = client.interface();
pid_t tid = gettid();
int prevPriority = androidGetThreadPriority(tid);
androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND);
err = omx->allocateNode(componentName.c_str(), observer, &omxNode);
androidSetThreadPriority(tid, prevPriority);
...... //
mCodec->mOMX = omx;
mCodec->mOMXNode = omxNode;
mCodec->mCallback->onComponentAllocated(mCodec->mComponentName.c_str());
mCodec->changeState(mCodec->mLoadedState);
return true;
}
在Omx::allocateNode(...)
函数中,主要有2个操作:
- 通过
mMaster->makeComponentInstance(...)
创建component实例,并通过instance->setHandle(handle);
保存 - 创建
TWOmxNode
实例,并回调返回给ACodec
frameworks\av\media\libstagefright\omx\1.0\Omx.cpp
Return<void> Omx::allocateNode(const hidl_string& name, const sp<IOmxObserver>& observer, allocateNode_cb _hidl_cb) {
using ::android::IOMXNode;
using ::android::IOMXObserver;
sp<OMXNodeInstance> instance;
{
Mutex::Autolock autoLock(mLock);
if (mLiveNodes.size() == kMaxNodeInstances) {
_hidl_cb(toStatus(NO_MEMORY), nullptr);
return Void();
}
instance = new OMXNodeInstance(this, new LWOmxObserver(observer), name.c_str());
// 保存component实例
OMX_COMPONENTTYPE *handle;
// 通过mMaster创建component实例
OMX_ERRORTYPE err = mMaster->makeComponentInstance(name.c_str(), &OMXNodeInstance::kCallbacks, instance.get(), &handle);
if (err != OMX_ErrorNone) {
LOG(ERROR) << "Failed to allocate omx component '" << name.c_str() << "' err=" << asString(err) << "(0x" << std::hex << unsigned(err) << ")";
_hidl_cb(toStatus(StatusFromOMXError(err)), nullptr);
return Void();
}
// 将创建好的component实例保存到OMXNodeInstance实例中
instance->setHandle(handle);
......
}
observer->linkToDeath(this, 0);
// 创建TWOmxNode实例
_hidl_cb(toStatus(OK), new TWOmxNode(instance));
return Void();
}
mHandle
的处理
由上文可知,通过OMX_ERRORTYPE err = mMaster->makeComponentInstance(name.c_str(), &OMXNodeInstance::kCallbacks, instance.get(), &handle);
语句即可创建ComponentInstance实例,然后将其保存到handle
变量中,最后调用instance->setHandle(handle);
,使其保存到OMXNodeInstance实例的mHandle
变量中。
makeComponentInstance
函数传入的name
,可能是OMX.realtek.video.decoder
。OMX_ERRORTYPE err = plugin->makeComponentInstance(name, callbacks, appData, component);
语句会调用具体的实现,可能是softplugin,也可能是vendorplugin(即SOC厂商实现的OMXPlugin)。
frameworks\av\media\libstagefright\omx\OMXMaster.cpp
OMX_ERRORTYPE OMXMaster::makeComponentInstance(
const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component) {
ALOGI("makeComponentInstance(%s) in %s process", name, mProcessName);
Mutex::Autolock autoLock(mLock);
*component = NULL;
ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
if (index < 0) {
return OMX_ErrorInvalidComponentName;
}
OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
OMX_ERRORTYPE err = plugin->makeComponentInstance(name, callbacks, appData, component);
if (err != OMX_ErrorNone) {
return err;
}
mPluginByInstance.add(*component, plugin);
return err;
}
framework\av\media\libstagefright\omx\OMXNodeInstance.cpp
void OMXNodeInstance::setHandle(OMX_HANDLETYPE handle) {
CLOG_LIFE(allocateNode, "handle=%p", handle);
CHECK(mHandle == NULL);
mHandle = handle;
if (handle != NULL) {
mDispatcher = new CallbackDispatcher(this);
}
}
TWOmxNode
实例的处理
结合上下文,WOmxNode
中的mBase
变量保存的是OMXNodeInstance
实例。
frameworks\av\media\libstagefright\omx\1.0\WOmxNode.cpp
TWOmxNode::TWOmxNode(sp<IOMXNode> const& base) : mBase(base) { }
因此,ACodec中的err = omx->allocateNode(componentName.c_str(), observer, &omxNode);
中的omxNode
实际是WOmxNode
实例,即mCodec->mOMXNode = omxNode;
保存的是WOmxNode
实例。
二 codec.start();
frameworks\base\media\java\android\media\MediaCodec.java
public final void start() {
native_start();
synchronized(mBufferLock) {
cacheBuffers(true /* input */);
cacheBuffers(false /* input */);
}
}
private native final void native_start();
frameworks\base\media\jni\android_media_MediaCodec.cpp
static void android_media_MediaCodec_start(JNIEnv *env, jobject thiz) {
ALOGV("android_media_MediaCodec_start");
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
status_t err = codec->start();
throwExceptionAsNecessary(env, err, ACTION_CODE_FATAL, "start failed");
}
status_t JMediaCodec::start() {
return mCodec->start();
}
frameworks\av\media\libstagefright\MediaCodec.cpp
status_t MediaCodec::start() {
sp<AMessage> msg = new AMessage(kWhatStart, this);
status_t err;
std::vector<MediaResourceParcel> resources;
resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
// Don't know the buffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.
resources.push_back(MediaResource::GraphicMemoryResource(1));
for (int i = 0; i <= kMaxRetry; ++i) {
if (i > 0) {
...... //MediaResource的处理
}
sp<AMessage> response;
err = PostAndAwaitResponse(msg, &response);
if (!isResourceError(err)) {
break;
}
}
return err;
}
void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
......
case kWhatStart:
{
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mState == FLUSHED) {
setState(STARTED);
if (mHavePendingInputBuffers) {
onInputBufferAvailable();
mHavePendingInputBuffers = false;
}
mCodec->signalResume();
PostReplyWithError(replyID, OK);
break;
} else if (mState != CONFIGURED) {
PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
mReplyID = replyID;
setState(STARTING);
mCodec->initiateStart();
break;
}
......
}
}
frameworks\av\media\libstagefright\ACodec.cpp
void ACodec::initiateStart() {
(new AMessage(kWhatStart, this))->post();
}
bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
bool handled = false;
switch (msg->what()) {
......
case ACodec::kWhatStart:
{
onStart();
handled = true;
break;
}
......
}
return handled;
}
void ACodec::LoadedState::onStart() {
ALOGV("onStart");
status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateIdle);
if (err != OK) {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
} else {
mCodec->changeState(mCodec->mLoadedToIdleState);
}
}
frameworks\av\media\libstagefright\omx\1.0\WOmxNode.cpp
Return<Status> TWOmxNode::sendCommand(uint32_t cmd, int32_t param) {
return toStatus(mBase->sendCommand(toEnumCommandType(cmd), param));
}
frameworks\av\media\libstagefright\omx\OMXNodeInstance.cpp
status_t OMXNodeInstance::sendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param) {
const sp<IOMXBufferSource> bufferSource(getBufferSource());
if (bufferSource != NULL && cmd == OMX_CommandStateSet) {
if (param == OMX_StateIdle) {
// Initiating transition from Executing -> Idle
// ACodec is waiting for all buffers to be returned, do NOT
// submit any more buffers to the codec.
bufferSource->onOmxIdle();
} else if (param == OMX_StateLoaded) {
// Initiating transition from Idle/Executing -> Loaded
// Buffers are about to be freed.
bufferSource->onOmxLoaded();
setBufferSource(NULL);
}
// fall through
}
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
}
if (cmd == OMX_CommandStateSet) {
// There are no configurations past first StateSet command.
mSailed = true;
}
// bump internal-state debug level for 2 input and output frames past a command
{
Mutex::Autolock _l(mDebugLock);
bumpDebugLevel_l(2 /* numInputBuffers */, 2 /* numOutputBuffers */);
}
const char *paramString = cmd == OMX_CommandStateSet ? asString((OMX_STATETYPE)param) : portString(param);
CLOG_STATE(sendCommand, "%s(%d), %s(%d)", asString(cmd), cmd, paramString, param);
OMX_ERRORTYPE err = OMX_SendCommand(mHandle, cmd, param, NULL);
CLOG_IF_ERROR(sendCommand, err, "%s(%d), %s(%d)", asString(cmd), cmd, paramString, param);
return StatusFromOMXError(err);
}
这是SOC厂商实现的内容,一般在libOMX_Core.so
中。
frameworks/native/headers/media_plugin/media/openmax/OMX_Core.h
#define OMX_SendCommand( \
hComponent, \
Cmd, \
nParam, \
pCmdData) \
((OMX_COMPONENTTYPE*)(hComponent))->SendCommand( \
hComponent, \
Cmd, \
nParam, \
pCmdData) /* Macro End */
由文章开头的“补充”内容可知,OMX_ERRORTYPE err = OMX_SendCommand(mHandle, cmd, param, NULL);
语句中的mHandle
变量保存的是通过mMaster->makeComponentInstance(...)
创建component实例。
所以((OMX_COMPONENTTYPE*)(hComponent))->SendCommand(hComponent, Cmd, nParam, pCmdData)
函数,就是调用通过mMaster->makeComponentInstance(...)
创建的component实例的SendCommand
函数。