Stagefright AwesomePlayer 播放流程的函数调用
android多媒体框架中, stagefright其实是AwesomePlayer的代理, 对AwesomePlayer进行了一层浅封装, 实际功能由AwesomePlayer完成, MediaPlayer大致流程、StagefrightPlayer、AwesomePlayer相关调用:
mediaPlayer.setDataSource(path);
mediaPlayer.prepare();
mediaPlayer.start();
StagefrightPlayer.cpp
AwesomePlayer *mPlayer;
...
status_t StagefrightPlayer::setDataSource(
const char *url, const KeyedVector<String8, String8> *headers) {
return mPlayer->setDataSource(url, headers);
}
status_t StagefrightPlayer::prepare() {
return mPlayer->prepare();
}
status_t StagefrightPlayer::start() {
ALOGV("start");
return mPlayer->play();
}
status_t StagefrightPlayer::stop() {
ALOGV("stop");
return pause(); // what's the difference?
}
status_t StagefrightPlayer::pause() {
ALOGV("pause");
return mPlayer->pause();
}
AwesomePlayer 和 TimedEventQueue:
TimedEventQueue是AwesomePlayer的时间事件队列,也是事件调度器。通过TimedEventQueue::postTimedEvent(),按照延时的优先顺序把事件放到TimedEventQueue的队列之中。然后由其内部的线程会从队列中依次取出这些事件,执行回调事件的功能函数
TimedEventQueue.h
struct TimedEventQueue {
typedef int32_t event_id;
struct Event : public RefBase {
Event()
: mEventID(0) {
}
virtual ~Event() {}
event_id eventID() {
return mEventID;
}
protected:
virtual void fire(TimedEventQueue *queue, int64_t now_us) = 0;
private:
friend class TimedEventQueue;
event_id mEventID;
void setEventID(event_id id) {
mEventID = id;
}
Event(const Event &);
Event &operator=(const Event &);
};
TimedEventQueue();
~TimedEventQueue();
void start();
void stop(bool flush = false);
event_id postEvent(const sp<Event> &event);
event_id postEventToBack(const sp<Event> &event);
event_id postEventWithDelay(const sp<Event> &event, int64_t delay_us);
event_id postTimedEvent(const sp<Event> &event, int64_t realtime_us);
bool cancelEvent(event_id id);
void cancelEvents(bool (*predicate)(void *cookie, const sp<Event> &event),void *cookie,bool stopAfterFirstMatch = false);
static int64_t getRealTimeUs();
private:
struct QueueItem {
sp<Event> event; //事件
int64_t realtime_us; //时间
};
struct StopEvent : public TimedEventQueue::Event {
virtual void fire(TimedEventQueue *queue, int64_t now_us) {
queue->mStopped = true;
}
};
pthread_t mThread; //独立线程,在TimedEventQueue::start被创建
List<QueueItem> mQueue; //mQueue时间事件队列
Mutex mLock;
Condition mQueueNotEmptyCondition;
Condition mQueueHeadChangedCondition;
event_id mNextEventID;
bool mRunning;
bool mStopped;
static void *ThreadWrapper(void *me);
void threadEntry();
sp<Event> removeEventFromQueue_l(event_id id);
TimedEventQueue(const TimedEventQueue &);
TimedEventQueue &operator=(const TimedEventQueue &);
};
TimedEventQueue.cpp
void TimedEventQueue::start() {
if (mRunning) {
return;
}
mStopped = false;
pthread_attr_t attr;
pthread_attr_init(&attr);
pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
pthread_create(&mThread, &attr, ThreadWrapper, this);
pthread_attr_destroy(&attr);
mRunning = true;
}
void TimedEventQueue::stop(bool flush) {
if (!mRunning) {
return;
}
if (flush) {
postEventToBack(new StopEvent);
} else {
postTimedEvent(new StopEvent, INT64_MIN);
}
void *dummy;
pthread_join(mThread, &dummy);
mQueue.clear();
mRunning = false;
}
AwesomePlayer
AwesomePlayer是通过event事件调度来实现这些功能之间的驱动和调用的
AwesomeEvent将想要调用的功能函数与事件捆绑, 继承于TimedEventQueue::Event,
AwesomePlayer只是通过其拥有的TimedEventQueue实例,调用TimedEventQueue::postTimedEvent(), 按照延时的优先顺序把事件放到TimedEventQueue的队列之中。
TimedEventQueue start之后,自己内部的线程会从队列中依次取出这些事件,然后通过event->fire回调事件的功能函数。实现具体的功能。
AwesomePlayer.h:
...
//OMXClient是android中openmax的入口
OMXClient mClient;
//事件调度器:mQueue是AwesomePlayer的以时间为基础的事件队列
TimedEventQueue mQueue;
bool mQueueStarted;
wp<MediaPlayerBase> mListener;
bool mUIDValid;
uid_t mUID;
sp<ANativeWindow> mNativeWindow;
sp<MediaPlayerBase::AudioSink> mAudioSink;
SystemTimeSource mSystemTimeSource;
TimeSource *mTimeSource;
String8 mUri;
KeyedVector<String8, String8> mUriHeaders;
sp<DataSource> mFileSource;
sp<MediaSource> mVideoTrack;
sp<MediaSource> mVideoSource;
sp<AwesomeRenderer> mVideoRenderer;
bool mVideoRendererIsPreview;
sp<MediaSource> mAudioTrack;
sp<MediaSource> mAudioSource;
...
sp<TimedEventQueue::Event> mVideoEvent;
bool mVideoEventPending;
sp<TimedEventQueue::Event> mStreamDoneEvent;
bool mStreamDoneEventPending;
sp<TimedEventQueue::Event> mBufferingEvent;
bool mBufferingEventPending;
sp<TimedEventQueue::Event> mCheckAudioStatusEvent;
bool mAudioStatusEventPending;
sp<TimedEventQueue::Event> mVideoLagEvent;
bool mVideoLagEventPending;
sp<TimedEventQueue::Event> mAsyncPrepareEvent;
...
AwesomePlayer.cpp
struct AwesomeEvent : public TimedEventQueue::Event {
AwesomeEvent(
AwesomePlayer *player,
void (AwesomePlayer::*method)()) : mPlayer(player),
mMethod(method) {
}
protected:
virtual ~AwesomeEvent() {}
virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
(mPlayer->*mMethod)();
}
private:
AwesomePlayer *mPlayer;
void (AwesomePlayer::*mMethod)();
AwesomeEvent(const AwesomeEvent &);
AwesomeEvent &operator=(const AwesomeEvent &);
};
AwesomePlayer::AwesomePlayer()
: mQueueStarted(false),
mUIDValid(false),
mTimeSource(NULL),
mVideoRendererIsPreview(false),
mAudioPlayer(NULL),
mDisplayWidth(0),
mDisplayHeight(0),
mFlags(0),
mExtractorFlags(0),
mVideoBuffer(NULL),
mDecryptHandle(NULL),
mLastVideoTimeUs(-1),
mTextPlayer(NULL) {
CHECK_EQ(mClient.connect(), (status_t)OK);
DataSource::RegisterDefaultSniffers();
//AwesomePlayer定义多类型的event事件,并把和这些事件相关的功能函数绑定起来
mVideoEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoEvent);
mVideoEventPending = false;
mStreamDoneEvent = new AwesomeEvent(this, &AwesomePlayer::onStreamDone);
mStreamDoneEventPending = false;
mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate);
mBufferingEventPending = false;
mVideoLagEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoLagUpdate);
mVideoEventPending = false;
mCheckAudioStatusEvent = new AwesomeEvent(
this, &AwesomePlayer::onCheckAudioStatus);
mAudioStatusEventPending = false;
#ifdef SLSI_ULP_AUDIO
mLibAudioHandle = NULL;
mIsULPAudio = false;
mUseULPAudio = false;
#endif
reset();
}
1、prepare()函数实现的调用流程:
prepare()=>prepare_l()=>prepareAsync_l()=>mQueue.postEvent(mAsyncPrepareEvent)
status_t AwesomePlayer::prepare() {
Mutex::Autolock autoLock(mLock);
return prepare_l();
}
status_t AwesomePlayer::prepare_l() {
if (mFlags & PREPARED) {
return OK;
}
if (mFlags & PREPARING) {
return UNKNOWN_ERROR;
}
mIsAsyncPrepare = false;
status_t err = prepareAsync_l();
if (err != OK) {
return err;
}
while (mFlags & PREPARING) {
mPreparedCondition.wait(mLock);
}
return mPrepareResult;
}
status_t AwesomePlayer::prepareAsync_l() {
if (mFlags & PREPARING) {
return UNKNOWN_ERROR; // async prepare already pending
}
if (!mQueueStarted) {
mQueue.start();
mQueueStarted = true;
}
modifyFlags(PREPARING, SET);
mAsyncPrepareEvent = new AwesomeEvent(
this, &AwesomePlayer::onPrepareAsyncEvent);
mQueue.postEvent(mAsyncPrepareEvent);
return OK;
}
2、play()函数实现的调用流程:
play() =>play_l()=>postVideoEvent_l():
postVideoEvent_l()内部调用mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs)把mVideoEvent事件放入mQueue时间事件队列,由mQueue中的线程自行调度。
而mVideoEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoEvent);
mVideoEvent绑定onVideoEvent()函数事件,onVideoEvent()内部调用mVideoSource->read(&mVideoBuffer, &options)解码,onVideoEvent()内部调用mVideoRenderer->render(mVideoBuffer)渲染解码数据,onVideoEvent()内部会调用postVideoEvent_l()从
而实现循环读取、解码、播放
status_t AwesomePlayer::play() {
Mutex::Autolock autoLock(mLock);
modifyFlags(CACHE_UNDERRUN, CLEAR);
return play_l();
}
status_t AwesomePlayer::play_l() {
modifyFlags(SEEK_PREVIEW, CLEAR);
if (mFlags & PLAYING) {
return OK;
}
if (!(mFlags & PREPARED)) {
status_t err = prepare_l();
if (err != OK) {
return err;
}
}
modifyFlags(PLAYING, SET);
modifyFlags(FIRST_FRAME, SET);
...
if (mVideoSource != NULL) {
// Kick off video playback
postVideoEvent_l();
if (mAudioSource != NULL && mVideoSource != NULL) {
postVideoLagEvent_l();
}
}
return OK;
}
void AwesomePlayer::postVideoEvent_l(int64_t delayUs) {
if (mVideoEventPending) {
return;
}
mVideoEventPending = true;
//把mVideoEvent事件放入mQueue时间事件队列,由mQueue中的线程进行调度
mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs);
}
void AwesomePlayer::onVideoEvent() {
Mutex::Autolock autoLock(mLock);
if (!mVideoEventPending) {
return;
}
mVideoEventPending = false;
if (mSeeking != NO_SEEK) {
if (mVideoBuffer) {
mVideoBuffer->release();
mVideoBuffer = NULL;
}
if (mSeeking == SEEK && isStreamingHTTP() && mAudioSource != NULL
&& !(mFlags & SEEK_PREVIEW)) {
if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
mAudioPlayer->pause();
modifyFlags(AUDIO_RUNNING, CLEAR);
}
mAudioSource->pause();
}
}
if (!mVideoBuffer) {
MediaSource::ReadOptions options;
if (mSeeking != NO_SEEK) {
LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6);
options.setSeekTo(mSeekTimeUs,
mSeeking == SEEK_VIDEO_ONLY
? MediaSource::ReadOptions::SEEK_NEXT_SYNC
: MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
}
for (;;) {
status_t err = mVideoSource->read(&mVideoBuffer, &options);
options.clearSeekTo();
if (err != OK) {
CHECK(mVideoBuffer == NULL);
if (err == INFO_FORMAT_CHANGED) {
LOGV("VideoSource signalled format change.");
notifyVideoSize_l();
if (mVideoRenderer != NULL) {
mVideoRendererIsPreview = false;
initRenderer_l();
}
continue;
}
if (mSeeking != NO_SEEK) {
LOGV("video stream ended while seeking!");
}
finishSeekIfNecessary(-1);
if (mAudioPlayer != NULL
&& !(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {
startAudioPlayer_l();
}
modifyFlags(VIDEO_AT_EOS, SET);
postStreamDoneEvent_l(err);
return;
}
if (mVideoBuffer->range_length() == 0) {
mVideoBuffer->release();
mVideoBuffer = NULL;
continue;
}
break;
}
{
Mutex::Autolock autoLock(mStatsLock);
++mStats.mNumVideoFramesDecoded;
}
}
int64_t timeUs;
CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
mLastVideoTimeUs = timeUs;
if (mSeeking == SEEK_VIDEO_ONLY) {
if (mSeekTimeUs > timeUs) {
LOGI("XXX mSeekTimeUs = %lld us, timeUs = %lld us",
mSeekTimeUs, timeUs);
}
}
{
Mutex::Autolock autoLock(mMiscStateLock);
mVideoTimeUs = timeUs;
}
SeekType wasSeeking = mSeeking;
finishSeekIfNecessary(timeUs);
if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {
status_t err = startAudioPlayer_l();
if (err != OK) {
LOGE("Starting the audio player failed w/ err %d", err);
return;
}
}
if ((mFlags & TEXTPLAYER_STARTED) && !(mFlags & (TEXT_RUNNING | SEEK_PREVIEW))) {
mTextPlayer->resume();
modifyFlags(TEXT_RUNNING, SET);
}
TimeSource *ts =
((mFlags & AUDIO_AT_EOS) || !(mFlags & AUDIOPLAYER_STARTED))
? &mSystemTimeSource : mTimeSource;
if (mFlags & FIRST_FRAME) {
modifyFlags(FIRST_FRAME, CLEAR);
mSinceLastDropped = 0;
mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
}
int64_t realTimeUs, mediaTimeUs;
if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
&& mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
}
if (wasSeeking == SEEK_VIDEO_ONLY) {
int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
int64_t latenessUs = nowUs - timeUs;
if (latenessUs > 0) {
LOGI("after SEEK_VIDEO_ONLY we're late by %.2f secs", latenessUs / 1E6);
}
}
if (wasSeeking == NO_SEEK) {
// Let's display the first frame after seeking right away.
int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
int64_t latenessUs = nowUs - timeUs;
if (latenessUs > 500000ll
&& mAudioPlayer != NULL
&& mAudioPlayer->getMediaTimeMapping(
&realTimeUs, &mediaTimeUs)) {
LOGI("we're much too late (%.2f secs), video skipping ahead",
latenessUs / 1E6);
mVideoBuffer->release();
mVideoBuffer = NULL;
mSeeking = SEEK_VIDEO_ONLY;
mSeekTimeUs = mediaTimeUs;
postVideoEvent_l();
return;
}
if (latenessUs > 40000) {
// We're more than 40ms late.
LOGV("we're late by %lld us (%.2f secs)",
latenessUs, latenessUs / 1E6);
if (!(mFlags & SLOW_DECODER_HACK)
|| mSinceLastDropped > FRAME_DROP_FREQ)
{
LOGV("we're late by %lld us (%.2f secs) dropping "
"one after %d frames",
latenessUs, latenessUs / 1E6, mSinceLastDropped);
mSinceLastDropped = 0;
mVideoBuffer->release();
mVideoBuffer = NULL;
{
Mutex::Autolock autoLock(mStatsLock);
++mStats.mNumVideoFramesDropped;
}
postVideoEvent_l();
return;
}
}
if (latenessUs < -10000) {
// We're more than 10ms early.
postVideoEvent_l(10000);
return;
}
}
if ((mNativeWindow != NULL)
&& (mVideoRendererIsPreview || mVideoRenderer == NULL)) {
mVideoRendererIsPreview = false;
initRenderer_l();
}
if (mVideoRenderer != NULL) {
mSinceLastDropped++;
//解码后数据由mVideoRenderer刷新
mVideoRenderer->render(mVideoBuffer);
}
mVideoBuffer->release();
mVideoBuffer = NULL;
if (wasSeeking != NO_SEEK && (mFlags & SEEK_PREVIEW)) {
modifyFlags(SEEK_PREVIEW, CLEAR);
return;
}
postVideoEvent_l();
}
3、mVideoSource->read(&mVideoBuffer, &options)读取数据
mVideoSource->read(&mVideoBuffer, &options)具体是调用OMXCodec.read来读取数据。而OMXCodec.read主要分两步来实现数据的读取:
(1) 、通过调用drainInputBuffers()对mPortBuffers[kPortIndexInput]进行填充,这一步完成 parse。由OpenMAX从数据源把demux后的数据读取到输入缓冲区,作为OpenMAX的输入。
drainInputBuffers()=>OMXCodec::drainInputBuffer(BufferInfo *info)内部调用mOMX->emptyBuffer(
mNode, info->mBuffer, 0, offset,
flags, timestampUs);
IOMX.cpp:
virtual status_t emptyBuffer(
node_id node,
buffer_id buffer,
OMX_U32 range_offset, OMX_U32 range_length,
OMX_U32 flags, OMX_TICKS timestamp) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeIntPtr((intptr_t)node);
data.writeIntPtr((intptr_t)buffer);
data.writeInt32(range_offset);
data.writeInt32(range_length);
data.writeInt32(flags);
data.writeInt64(timestamp);
remote()->transact(EMPTY_BUFFER, data, &reply);
return reply.readInt32();
}
(2)、通过fillOutputBuffers()对mPortBuffers[kPortIndexOutput]进行填充,这一步完成 decode。由OpenMAX对输入缓冲区中的数据进行解码,然后把解码后可以显示的视频数据输出到输出缓冲区
fillOutputBuffers()=>OMXCodec::fillOutputBuffer(BufferInfo *info)内部调用
mOMX->fillBuffer(mNode, info->mBuffer);
IOMX.cpp:
virtual status_t fillBuffer(node_id node, buffer_id buffer) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeIntPtr((intptr_t)node);
data.writeIntPtr((intptr_t)buffer);
remote()->transact(FILL_BUFFER, data, &reply);
return reply.readInt32();
}
MediaBuffer.h
class MediaBuffer {
public:
// The underlying data remains the responsibility of the caller!
MediaBuffer(void *data, size_t size);
MediaBuffer(size_t size);
MediaBuffer(const sp<GraphicBuffer>& graphicBuffer);
MediaBuffer(const sp<ABuffer> &buffer);
// Decrements the reference count and returns the buffer to its
// associated MediaBufferGroup if the reference count drops to 0.
void release();
// Increments the reference count.
void add_ref();
void *data() const;
size_t size() const;
size_t range_offset() const;
size_t range_length() const;
void set_range(size_t offset, size_t length);
sp<GraphicBuffer> graphicBuffer() const;
sp<MetaData> meta_data();
// Clears meta data and resets the range to the full extent.
void reset();
void setObserver(MediaBufferObserver *group);
// Returns a clone of this MediaBuffer increasing its reference count.
// The clone references the same data but has its own range and
// MetaData.
MediaBuffer *clone();
int refcount() const;
protected:
virtual ~MediaBuffer();
private:
friend class MediaBufferGroup;
friend class OMXDecoder;
// For use by OMXDecoder, reference count must be 1, drop reference
// count to 0 without signalling the observer.
void claim();
MediaBufferObserver *mObserver;
MediaBuffer *mNextBuffer;
int mRefCount;
void *mData;
size_t mSize, mRangeOffset, mRangeLength;
sp<GraphicBuffer> mGraphicBuffer;
sp<ABuffer> mBuffer;
bool mOwnsData;
sp<MetaData> mMetaData;
MediaBuffer *mOriginal;
void setNextBuffer(MediaBuffer *buffer);
MediaBuffer *nextBuffer();
MediaBuffer(const MediaBuffer &);
MediaBuffer &operator=(const MediaBuffer &);
};
OMXCodec.h
struct OMXCodec : public MediaSource,
public MediaBufferObserver {
...
static sp<MediaSource> Create(
const sp<IOMX> &omx,
const sp<MetaData> &meta, bool createEncoder,
const sp<MediaSource> &source,
const char *matchComponentName = NULL,
uint32_t flags = 0,
const sp<ANativeWindow> &nativeWindow = NULL);
...
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options = NULL);
...
// A list of indices into mPortStatus[kPortIndexOutput] filled with data.
List<size_t> mFilledBuffers;
Condition mBufferFilled;
...
struct BufferInfo {
IOMX::buffer_id mBuffer;
BufferStatus mStatus;
sp<IMemory> mMem;
size_t mSize;
void *mData;
MediaBuffer *mMediaBuffer;
};
...
};
OMXCodec.cpp
status_t OMXCodec::read(
MediaBuffer **buffer, const ReadOptions *options) {
status_t err = OK;
*buffer = NULL;
Mutex::Autolock autoLock(mLock);
if (mState != EXECUTING && mState != RECONFIGURING) {
return UNKNOWN_ERROR;
}
bool seeking = false;
int64_t seekTimeUs;
ReadOptions::SeekMode seekMode;
if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
seeking = true;
}
if (mInitialBufferSubmit) {
mInitialBufferSubmit = false;
if (seeking) {
CHECK(seekTimeUs >= 0);
mSeekTimeUs = seekTimeUs;
mSeekMode = seekMode;
// There's no reason to trigger the code below, there's
// nothing to flush yet.
seeking = false;
mPaused = false;
}
drainInputBuffers();
if (mState == EXECUTING) {
// Otherwise mState == RECONFIGURING and this code will trigger
// after the output port is reenabled.
fillOutputBuffers();
}
}
if (seeking) {
while (mState == RECONFIGURING) {
if ((err = waitForBufferFilled_l()) != OK) {
return err;
}
}
if (mState != EXECUTING) {
return UNKNOWN_ERROR;
}
CODEC_LOGV("seeking to %lld us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6);
mSignalledEOS = false;
CHECK(seekTimeUs >= 0);
mSeekTimeUs = seekTimeUs;
mSeekMode = seekMode;
mFilledBuffers.clear();
CHECK_EQ((int)mState, (int)EXECUTING);
bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput);
bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput);
if (emulateInputFlushCompletion) {
onCmdComplete(OMX_CommandFlush, kPortIndexInput);
}
if (emulateOutputFlushCompletion) {
onCmdComplete(OMX_CommandFlush, kPortIndexOutput);
}
while (mSeekTimeUs >= 0) {
if ((err = waitForBufferFilled_l()) != OK) {
return err;
}
}
}
while (mState != ERROR && !mNoMoreOutputData && mFilledBuffers.empty()) {
if ((err = waitForBufferFilled_l()) != OK) {
return err;
}
}
if (mState == ERROR) {
return UNKNOWN_ERROR;
}
if (mFilledBuffers.empty()) {
return mSignalledEOS ? mFinalStatus : ERROR_END_OF_STREAM;
}
if (mOutputPortSettingsHaveChanged) {
mOutputPortSettingsHaveChanged = false;
return INFO_FORMAT_CHANGED;
}
size_t index = *mFilledBuffers.begin();
mFilledBuffers.erase(mFilledBuffers.begin());
BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
info->mStatus = OWNED_BY_CLIENT;
info->mMediaBuffer->add_ref();
*buffer = info->mMediaBuffer;
return OK;
}
void OMXCodec::drainInputBuffers() {
CHECK(mState == EXECUTING || mState == RECONFIGURING);
if (mFlags & kUseSecureInputBuffers) {
Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
for (size_t i = 0; i < buffers->size(); ++i) {
if (!drainAnyInputBuffer()
|| (mFlags & kOnlySubmitOneInputBufferAtOneTime)) {
break;
}
}
} else {
Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
for (size_t i = 0; i < buffers->size(); ++i) {
BufferInfo *info = &buffers->editItemAt(i);
if (info->mStatus != OWNED_BY_US) {
continue;
}
// OMXCodec::drainInputBuffer(BufferInfo *info)
// 内部
if (!drainInputBuffer(info)) {
break;
}
if (mFlags & kOnlySubmitOneInputBufferAtOneTime) {
break;
}
}
}
}
void OMXCodec::fillOutputBuffers() {
CHECK_EQ((int)mState, (int)EXECUTING);
// This is a workaround for some decoders not properly reporting
// end-of-output-stream. If we own all input buffers and also own
// all output buffers and we already signalled end-of-input-stream,
// the end-of-output-stream is implied.
if (mSignalledEOS
&& countBuffersWeOwn(mPortBuffers[kPortIndexInput])
== mPortBuffers[kPortIndexInput].size()
&& countBuffersWeOwn(mPortBuffers[kPortIndexOutput])
== mPortBuffers[kPortIndexOutput].size()) {
#ifdef USE_ALP_AUDIO
/* SEC mp3 decoder should be finished by EOS flag in output buffer. */
/* Do not apply this workaround */
if (strcmp(mComponentName, "OMX.SEC.MP3.Decoder") != 0) {
#endif
mNoMoreOutputData = true;
mBufferFilled.signal();
return;
#ifdef USE_ALP_AUDIO
}
#endif
}
Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
for (size_t i = 0; i < buffers->size(); ++i) {
BufferInfo *info = &buffers->editItemAt(i);
if (info->mStatus == OWNED_BY_US) {
fillOutputBuffer(&buffers->editItemAt(i));
}
}
}
void OMXCodec::fillOutputBuffer(BufferInfo *info) {
CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
if (mNoMoreOutputData) {
CODEC_LOGV("There is no more output data available, not "
"calling fillOutputBuffer");
return;
}
if (info->mMediaBuffer != NULL) {
sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
if (graphicBuffer != 0) {
// When using a native buffer we need to lock the buffer before
// giving it to OMX.
CODEC_LOGV("Calling lockBuffer on %p", info->mBuffer);
int err = mNativeWindow->lockBuffer(mNativeWindow.get(),
graphicBuffer.get());
if (err != 0) {
CODEC_LOGE("lockBuffer failed w/ error 0x%08x", err);
setState(ERROR);
return;
}
}
}
CODEC_LOGV("Calling fillBuffer on buffer %p", info->mBuffer);
status_t err = mOMX->fillBuffer(mNode, info->mBuffer);
if (err != OK) {
CODEC_LOGE("fillBuffer failed w/ error 0x%08x", err);
setState(ERROR);
return;
}
info->mStatus = OWNED_BY_COMPONENT;
}