以android5.0/5/1 (Lolliop)为例
1. App layer
packages/apps/Music/src/com/android/music
重要文件AudioPreview.java
这里主要调用MediaPlayer的一些函数和AudioManager的函数
如MediaPlayer的
mPlayer.start();
mPlayer.pause();
mPlayer.isPlaying()
mPlayer.getDuration()
mPlayer.getCurrentPosition()
mPlayer.seekTo(progress);
mPlayer.setDataSource()
AudioManager的
mAudioManager.requestAudioFocus
mAudioManager.abandonAudioFocus
如下代码
class PreviewPlayer extends MediaPlayer
private void start() {
mAudioManager.requestAudioFocus(mAudioFocusListener, AudioManager.STREAM_MUSIC,
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
mPlayer.start();
mProgressRefresher.postDelayed(new ProgressRefresher(), 200);
}
常规调用
private MediaPlayer mp = new MediaPlayer();
mp.setDataSource("/sdcard/test.mp3");
mp.prepare();
mp.start();
prepare()和prepareAsync()
提供了同步和异步两种方式设置播放器进入prepare状态
如果MediaPlayer实例是由create方法创建的,那么第一次启动播放前不需要再调用prepare()了,因为create()实现里已经调用过了。
MediaPlayer要播放的文件主要包括3个来源:
a. 用户在应用中事先自带的resource资源
例如:MediaPlayer.create(this, R.raw.test);
b. 存储在SD卡或其他文件路径下的媒体文件
例如:mp.setDataSource("/sdcard/test.mp3");
c. 网络上的媒体文件
例如:mp.setDataSource("http://xxx.yyy.com/test.mp3");
需要监听的listener
setOnCompletionListener(MediaPlayer.OnCompletionListener listener)、
setOnErrorListener(MediaPlayer.OnErrorListener listener)
2. frameworks/base
frameworks/base/media/java/android/media/MediaPlayer.java
JNI
frameworks/base/media/jni/android_media_MediaPlayer.cpp
3. frameworks/av/media/libmedia
target: system/lib/libmedia.so
mediaplayer.cpp, IMediaPlayer.cpp
frameworks/av/include/media/mediaplayer.h
IMediaPlayer.h实现MediaPlayer功能的接口。
在IMediaPlayer类中,主要定义MediaPlayer的功能接口,这个类必须被继承才能够使用。
IMediaPlayer.cpp
status_t start()
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
remote()->transact(START, data, &reply);
return reply.readInt32();
}
case START: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(start());
return NO_ERROR;
} break;
4. frameworks/av/media/libmediaplayerservice
target: system/lib/libmediaplayerservice.so
MediaPlayerFactory.cpp MediaPlayerService.cpp
StagefrightPlayer.cpp nuplayer/NuPlayer.cpp MidiFile.cpp DLNAStagefrightPlayer.cpp HDCP.cpp TestPlayerStub.cpp
enum player_type {
PV_PLAYER = 1,
SONIVOX_PLAYER = 2,
STAGEFRIGHT_PLAYER = 3,
NU_PLAYER = 4,
// Test players are available only in the 'test' and 'eng' builds.
// The shared library with the test player is passed passed as an
// argument to the 'test:' url in the setDataSource call.
TEST_PLAYER = 5,
DLNA_PLAYER = 1001,
DTCPIP_PLAYER = 1002,
};
在MediaPlayerService::Client中的setDataSource,会根据播放文件的类型选择合适的播放器。
setDataSource_pre会调用createPlayer(),然后调用MediaPlayerFactory::createPlayer(playerType, this, notify),返回播放器的实例。
player_type playerType = MediaPlayerFactory::getPlayerType(this,
fd,
offset,
length);
sp<MediaPlayerBase> p = setDataSource_pre(playerType);
在setDataSource_pre中接着new AudioOutput,最后一句把mAudioOutput传到MediaPlayerInterface中,
又因为播放器都是从类MediaPlayerInterface中继承而来,
比如class StagefrightPlayer : public MediaPlayerInterface,
所以播放器可以通过这个来操作AudioOutput里的函数。
AudioOutput里的函数大都通过AudioTrack的函数来操作,
AudioTrack是在status_t MediaPlayerService::AudioOutput::open(...)这个函数里new出来的。
if (!p->hardwareOutput()) {
mAudioOutput = new AudioOutput(mAudioSessionId);
static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
}
setDataSource(Context context, Uri uri) MediaPlayer.java
setDataSource(Context context, Uri uri, Map<String, String> headers)
setDataSource(FileDescriptor fd)
setDataSource(FileDescriptor fd, long offset, long length)
MediaPlayer::setDataSource(int fd, int64_t offset, int64_t length) MediaPlayer.cpp
BpMediaPlayer::setDataSource(int fd, int64_t offset, int64_t length) ImediaPlayer.cpp
case SET_DATA_SOURCE_FD BnMediaPlayer::onTransact
MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64_t length) MediaPlayerService.cpp
playerType = MediaPlayerFactory::getPlayerType();
sp<MediaPlayerBase> p = setDataSource_pre(playerType);
sp<MediaPlayerBase> p = createPlayer(playerType);
MediaPlayerFactory::createPlayer(playerType, this, notify);
mAudioOutput = new AudioOutput()
static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
setDataSource_post(p, p->setDataSource(fd, offset, length));
attachNewPlayer(player);
prepareAsync MediaPlayer.java
prepareAsync MediaPlayer.cpp
MediaPlayer::prepareAsync_l()
mPlayer->prepareAsync()
BpMediaPlayer::prepareAsync IMediaPlayer.cpp
case PREPARE_ASYNC BnMediaPlayer::onTransact
p->prepareAsync(); MediaPlayerService.cpp
NuPlayerDriver::prepareAsync() NuPlayer.cpp
NuPlayer::GenericSource::prepareAsync() GenericSource.cpp
StagefrightPlayer::prepareAsync() StagefrightPlayer.cpp
AwesomePlayer::prepareAsync() AwesomePlayer.cpp
MediaPlayer::notify(MEDIA_PREPARED) MediaPlayer.cpp
start MediaPlayer.java
start MediaPlayer.cpp
BpMediaPlayer::start IMediaPlayer.cpp
case START BnMediaPlayer::onTransact IMediaPlayer.cpp
start MediaPlayerService.java
start NuPlayerDriver.java
start GenericSource.java
MediaPlayerService.cpp
sp<MediaPlayerBase> MediaPlayerService::Client::setDataSource_pre(
player_type playerType)
{
ALOGV("player type = %d", playerType);
// create the right type of player
sp<MediaPlayerBase> p = createPlayer(playerType);
if (p == NULL) {
return p;
}
if (!p->hardwareOutput()) {
mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),
mPid, mAudioAttributes);
static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
}
return p;
}
status_t MediaPlayerService::AudioOutput::open(...)
{ ...
t = new AudioTrack(...)
...
}
status_t MediaPlayerService::AudioOutput::start()
{
ALOGV("start");
if (mCallbackData != NULL) {
mCallbackData->endTrackSwitch();
}
if (mTrack != 0) {
mTrack->setVolume(mLeftVolume, mRightVolume);
mTrack->setAuxEffectSendLevel(mSendLevel);
return mTrack->start();
}
return NO_INIT;
}
ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size)
{
LOG_ALWAYS_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
//ALOGV("write(%p, %u)", buffer, size);
if (mTrack != 0) {
ssize_t ret = mTrack->write(buffer, size);
if (ret >= 0) {
mBytesWritten += ret;
}
return ret;
}
return NO_INIT;
}
AudioSink: AudioOutput:write
AudioTrack::write()
AudioFlinger: MixerThread::threadLoop_write
PlaybackThread::threadLoop_write mNormalSink->write
5. frameworks/av/media/mediaserver
target: system/bin/mediaserver
main_mediaserver.cpp
main_mediaserver.cpp是Mediaplayer Server启动的主程序,涉及AudioFlinger()、AudioPolicyService()、MediaPlayerService()的加载。
AudioFlinger::instantiate();
MediaPlayerService::instantiate();
CameraService::instantiate();
AudioPolicyService::instantiate();
SoundTriggerHwService::instantiate();
播放流程如下:
Java端发起调用,MediaPlayer会转至MediaPlayerService,
然后会调用相应的解码工具解码后创建AudioTrack,
所有待输出的AudioTrack在AudioFlinger::AudioMixer里合成,
然后通过AudioHAL(AudioHardwareInterface的实际实现者)传至实际的硬件来实现播放
5. reference link:
http://yangguangfu.iteye.com/blog/699000
http://blog.csdn.net/gouboft/article/details/14126859
http://www.bkjia.com/Androidjc/866420.html
http://blog.csdn.net/myarrow/article/details/7036955