本文ijkplayer的版本是k0.8.8
一、Ijkplayer 初始化流程
1、Java层 – 在IjkVideoView中创建IMediaPlayer播放器接口
//创建Java层IMediaPlayer
mMediaPlayer = createPlayer(mSettings.getPlayer());
//根据设置的播放类型创建对应的IMediaPlayer
public IMediaPlayer createPlayer(int playerType) {
IMediaPlayer mediaPlayer = null;
switch (playerType) {
//创建ExoMediaPlayer
case Settings.PV_PLAYER__IjkExoMediaPlayer: {
IjkExoMediaPlayer IjkExoMediaPlayer = new IjkExoMediaPlayer(mAppContext);
mediaPlayer = IjkExoMediaPlayer;
}
break;
//创建AndroidMediaPlayer
case Settings.PV_PLAYER__AndroidMediaPlayer: {
AndroidMediaPlayer androidMediaPlayer = new AndroidMediaPlayer();
mediaPlayer = androidMediaPlayer;
}
break;
//创建IjkMediaPlayer
case Settings.PV_PLAYER__IjkMediaPlayer:
default: {
IjkMediaPlayer ijkMediaPlayer = null;
if (mUri != null) {
ijkMediaPlayer = new IjkMediaPlayer();
...
}
mediaPlayer = ijkMediaPlayer;
}
break;
}
if (mSettings.getEnableDetachedSurfaceTextureView()) {
mediaPlayer = new TextureMediaPlayer(mediaPlayer);
}
return mediaPlayer;
}
默认我们会创建ijkplayer为我们提供的IjkMediaPlayer播放器。
2、Java层 – IjkMediaPlayer构造方法
public IjkMediaPlayer() {
this(sLocalLibLoader);
}
public IjkMediaPlayer(IjkLibLoader libLoader) {
initPlayer(libLoader);
}
private void initPlayer(IjkLibLoader libLoader) {
loadLibrariesOnce(libLoader);
initNativeOnce();
Looper looper;
if ((looper = Looper.myLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else if ((looper = Looper.getMainLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else {
mEventHandler = null;
}
/*
* Native setup requires a weak reference to our object. It's easier to
* create it here than in C++.
*/
native_setup(new WeakReference<IjkMediaPlayer>(this));
}
private static void initNativeOnce() {
synchronized (IjkMediaPlayer.class) {
if (!mIsNativeInitialized) {
native_init();
mIsNativeInitialized = true;
}
}
}
private static native void native_init();
private native void native_setup(Object IjkMediaPlayer_this);
可以看到Java层创建IjkMediaPlayer播放器时,会调用 native_init
和 native_setup
方法做初始化操作。
下面我们看看native层这两个方法做了啥
3、JNI层 – ijkplayer_jni.c – 初始化分析
我们每在Java层声明一个Native方法时,都需要在JNI层声明一个与之对应的方法,并通过注册的方式将两个方法关联在一起。
注册的方式一般分为:动态注册 和 静态注册,一般的开源框架和系统源码都是采用动态注册的。
3.1、注册JNI方法
//加载JNI时会执行JNI_OnLoad
JNIEXPORT jint JNI_OnLoad(JavaVM *vm, void *reserved)
{
JNIEnv* env = NULL;
g_jvm = vm;
if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
return -1;
}
assert(env != NULL);
pthread_mutex_init(&g_clazz.mutex, NULL );
// FindClass returns LocalReference
IJK_FIND_JAVA_CLASS(env, g_clazz.clazz, JNI_CLASS_IJKPLAYER);
//动态注册
(*env)->RegisterNatives(env, g_clazz.clazz, g_methods, NELEM(g_methods) );
ijkmp_global_init();
ijkmp_global_set_inject_callback(inject_callback);
FFmpegApi_global_init(env);
return JNI_VERSION_1_4;
}
static JNINativeMethod g_methods[] = {
{
"_setDataSource",
"(Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;)V",
(void *) IjkMediaPlayer_setDataSourceAndHeaders
},
{ "_setDataSourceFd", "(I)V", (void *) IjkMediaPlayer_setDataSourceFd },
{ "_setDataSource", "(Ltv/danmaku/ijk/media/player/misc/IMediaDataSource;)V", (void *)IjkMediaPlayer_setDataSourceCallback },
{ "_setAndroidIOCallback", "(Ltv/danmaku/ijk/media/player/misc/IAndroidIO;)V", (void *)IjkMediaPlayer_setAndroidIOCallback },
{ "_setVideoSurface", "(Landroid/view/Surface;)V", (void *) IjkMediaPlayer_setVideoSurface },
{ "_prepareAsync", "()V", (void *) IjkMediaPlayer_prepareAsync },
{ "_start", "()V", (void *) IjkMediaPlayer_start },
{ "_stop", "()V", (void *) IjkMediaPlayer_stop },
{ "seekTo", "(J)V", (void *) IjkMediaPlayer_seekTo },
{ "_pause", "()V", (void *) IjkMediaPlayer_pause },
{ "isPlaying", "()Z", (void *) IjkMediaPlayer_isPlaying },
...
{ "native_init", "()V", (void *) IjkMediaPlayer_native_init },
{ "native_setup", "(Ljava/lang/Object;)V", (void *) IjkMediaPlayer_native_setup },
{ "native_finalize", "()V", (void *) IjkMediaPlayer_native_finalize },
{ "_setOption", "(ILjava/lang/String;Ljava/lang/String;)V", (void *) IjkMediaPlayer_setOption },
{ "_setOption", "(ILjava/lang/String;J)V", (void *) IjkMediaPlayer_setOptionLong },
...
};
注册需要三个参数分别为:
- 参数一:Java层声明的native方法
- 参数二:Java层声明的native方法的方法签名。()表示方法,()里面的内容表示参数的签名,例如
I
表示int类型入参,V表示返回值void。 - 参数三:jni方法,也就是native方法的实现方法
可以看到native_init
方法JNI层实现是IjkMediaPlayer_native_init
方法,native_setup
方法JNI层实现是IjkMediaPlayer_native_setup
。
还有一些其它常用的方法都会在这里注册。
3.2、IjkMediaPlayer_native_init
static void
IjkMediaPlayer_native_init(JNIEnv *env)
{
MPTRACE("%s\n", __func__);
}
啥事没干。下面在看看 native_setup 方法做了啥
3.3、IjkMediaPlayer_native_setup
static void
IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
MPTRACE("%s\n", __func__);
//创建native层IjkMediaPlayer结构体
IjkMediaPlayer *mp = ijkmp_android_create(message_loop);
JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError", "mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN);
jni_set_media_player(env, thiz, mp);
ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this));
ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_set_ijkio_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp));
LABEL_RETURN:
ijkmp_dec_ref_p(&mp);
}
//ijkplayer.c -- 创建c层IjkMediaPlayer结构体
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
{
//创建播放器,并设置消息处理函数
IjkMediaPlayer *mp = ijkmp_create(msg_loop);
if (!mp)
goto fail;
//创建图像渲染对象 SDL_Vout
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
if (!mp->ffplayer->vout)
goto fail;
//创建视频解码器、音频输出设备
mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
if (!mp->ffplayer->pipeline)
goto fail;
ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout);
return mp;
fail:
ijkmp_dec_ref_p(&mp);
return NULL;
}
可以看到 native_setup 方法的作用有:
- 创建了native层的 IjkMediaPlayer
- IjkMediaPlayer 绑定了JNI层的消息队列 message_loop。
- 创建图像渲染对象 SDL_Vout
- 创建视频解码器、音频输出设备
message_loop
是ijkplayer在JNI层维护的一个消息循环机制,负责处理播放器发送的一些状态,最后通过发射机制把这些状态发送到Java层IjkMediaPlayer类的postEventFromNative方法中。和Handler的消息队列作用类似。
@CalledByNative
private static void postEventFromNative(Object weakThiz, int what,
int arg1, int arg2, Object obj) {
...
}
其它作用目前没有关注。
3.4、创建视频解码器和音频输出设备
IJKFF_Pipeline *ffpipeline_create_from_android(FFPlayer *ffp)
{
ALOGD("ffpipeline_create_from_android()\n");
IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
if (!pipeline)
return pipeline;
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
opaque->ffp = ffp;
opaque->surface_mutex = SDL_CreateMutex();
opaque->left_volume = 1.0f;
opaque->right_volume = 1.0f;
if (!opaque->surface_mutex) {
ALOGE("ffpipeline-android:create SDL_CreateMutex failed\n");
goto fail;
}
pipeline->func_destroy = func_destroy;
// 创建视频解码器
pipeline->func_open_video_decoder = func_open_video_decoder;
// 创建音频输出设备
pipeline->func_open_audio_output = func_open_audio_output;
// 初始化
pipeline->func_init_video_decoder = func_init_video_decoder;
pipeline->func_config_video_decoder = func_config_video_decoder;
return pipeline;
fail:
ffpipeline_free_p(&pipeline);
return NULL;
}
1、创建视频解码器
static IJKFF_Pipenode *func_open_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
IJKFF_Pipenode *node = NULL;
if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2)
// 硬解码
node = ffpipenode_create_video_decoder_from_android_mediacodec(ffp, pipeline, opaque->weak_vout);
if (!node) {
// 软解吗
node = ffpipenode_create_video_decoder_from_ffplay(ffp);
}
return node;
}
2、创建音频输出设备
static SDL_Aout *func_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
SDL_Aout *aout = NULL;
if (ffp->opensles) {
//OpenSL ES 输出音频
aout = SDL_AoutAndroid_CreateForOpenSLES();
} else {
//AudioTrack 输出音频
aout = SDL_AoutAndroid_CreateForAudioTrack();
}
if (aout)
SDL_AoutSetStereoVolume(aout, pipeline->opaque->left_volume, pipeline->opaque->right_volume);
return aout;
}
初始化流程主要记录了这些,下面来看一下prepare流程
二、prepare流程
4、prepare流程
4.1、Java层prepareAsync()
创建好播放器IMediaPlayer后,我们就可以调用它的 prepareAsync()
方法,就可以在准备完成的回调方法中播放。
@Override
public void prepareAsync() throws IllegalStateException {
_prepareAsync();
}
public native void _prepareAsync() throws IllegalStateException;
执行native方法 _prepareAsync()
,在上面 3.1中 注册JNI方法 我们可以知道_prepareAsync()在JNI层实现方法是 IjkMediaPlayer_prepareAsync
4.2、JNI层 IjkMediaPlayer_prepareAsync
static void
IjkMediaPlayer_prepareAsync(JNIEnv *env, jobject thiz)
{
MPTRACE("%s\n", __func__);
int retval = 0;
//获取c层播放器
IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
JNI_CHECK_GOTO(mp, env, "java/lang/IllegalStateException", "mpjni: prepareAsync: null mp", LABEL_RETURN);
//准备工作
retval = ijkmp_prepare_async(mp);
IJK_CHECK_MPRET_GOTO(retval, env, LABEL_RETURN);
LABEL_RETURN:
ijkmp_dec_ref_p(&mp);
}
继续执行 ijkmp_prepare_async
4.3、ijkplayer.c – ijkmp_prepare_async()
int ijkmp_prepare_async(IjkMediaPlayer *mp)
{
assert(mp);
MPTRACE("ijkmp_prepare_async()\n");
pthread_mutex_lock(&mp->mutex);
int retval = ijkmp_prepare_async_l(mp);
pthread_mutex_unlock(&mp->mutex);
MPTRACE("ijkmp_prepare_async()=%d\n", retval);
return retval;
}
static int ijkmp_prepare_async_l(IjkMediaPlayer *mp)
{
assert(mp);
...
ijkmp_change_state_l(mp, MP_STATE_ASYNC_PREPARING);
//启动msg_queue
msg_queue_start(&mp->ffplayer->msg_queue);
// released in msg_loop
ijkmp_inc_ref(mp);
//创建消息循环的线程
mp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");
// msg_thread is detached inside msg_loop
// TODO: 9 release weak_thiz if pthread_create() failed;
//执行prepare工作
int retval = ffp_prepare_async_l(mp->ffplayer, mp->data_source);
if (retval < 0) {
ijkmp_change_state_l(mp, MP_STATE_ERROR);
return retval;
}
return 0;
}
#### 4.4、ff_ffplay.c - ffp_prepare_async_l()
```c
int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name)
{
assert(ffp);
assert(!ffp->is);
assert(file_name);
if (av_stristart(file_name, "rtmp", NULL) ||
av_stristart(file_name, "rtsp", NULL)) {
// There is total different meaning for 'timeout' option in rtmp
av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp.\n");
av_dict_set(&ffp->format_opts, "timeout", NULL, 0);
}
/* there is a length limit in avformat */
if (strlen(file_name) + 1 > 1024) {
av_log(ffp, AV_LOG_ERROR, "%s too long url\n", __func__);
if (avio_find_protocol_name("ijklongurl:")) {
av_dict_set(&ffp->format_opts, "ijklongurl-url", file_name, 0);
file_name = "ijklongurl:";
}
}
av_log(NULL, AV_LOG_INFO, "===== versions =====\n");
ffp_show_version_str(ffp, "ijkplayer", ijk_version_info());
ffp_show_version_str(ffp, "FFmpeg", av_version_info());
ffp_show_version_int(ffp, "libavutil", avutil_version());
ffp_show_version_int(ffp, "libavcodec", avcodec_version());
ffp_show_version_int(ffp, "libavformat", avformat_version());
ffp_show_version_int(ffp, "libswscale", swscale_version());
ffp_show_version_int(ffp, "libswresample", swresample_version());
av_log(NULL, AV_LOG_INFO, "===== options =====\n");
ffp_show_dict(ffp, "player-opts", ffp->player_opts);
ffp_show_dict(ffp, "format-opts", ffp->format_opts);
ffp_show_dict(ffp, "codec-opts ", ffp->codec_opts);
ffp_show_dict(ffp, "sws-opts ", ffp->sws_dict);
ffp_show_dict(ffp, "swr-opts ", ffp->swr_opts);
av_log(NULL, AV_LOG_INFO, "===================\n");
av_opt_set_dict(ffp, &ffp->player_opts);
if (!ffp->aout) {
//在初始化时候已经创建SDL_Aout,这里把创建好的音频播放设备赋值给 ffp->aout
ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);
if (!ffp->aout)
return -1;
}
#if CONFIG_AVFILTER
if (ffp->vfilter0) {
GROW_ARRAY(ffp->vfilters_list, ffp->nb_vfilters);
ffp->vfilters_list[ffp->nb_vfilters - 1] = ffp->vfilter0;
}
#endif
// 读取音视频流
VideoState *is = stream_open(ffp, file_name, NULL);
if (!is) {
av_log(NULL, AV_LOG_WARNING, "ffp_prepare_async_l: stream_open failed OOM");
return EIJK_OUT_OF_MEMORY;
}
ffp->is = is;
ffp->input_filename = av_strdup(file_name);
return 0;
}
此方法最后会执行stream_open()方法读取网络或者文件中的音视频流,然后发送准备完成的消息给JNI层,JNI层再把准备完成的消息通过反射发送到Java层
4.5、stream_open()
static VideoState *stream_open(FFPlayer *ffp, const char *filename, AVInputFormat *iformat)
{
assert(!ffp->is);
// 数据初始化
VideoState *is;
// 分配结构体并初始化
is = av_mallocz(sizeof(VideoState));
if (!is)
return NULL;
is->filename = av_strdup(filename);
if (!is->filename)
goto fail;
is->iformat = iformat;
is->ytop = 0;
is->xleft = 0;
#if defined(__ANDROID__)
if (ffp->soundtouch_enable) {
is->handle = ijk_soundtouch_create();
}
#endif
/* start video display */
// 初始化帧队列
if (frame_queue_init(&is->pictq, &is->videoq, ffp->pictq_size, 1) < 0)
goto fail;
if (frame_queue_init(&is->subpq, &is->subtitleq, SUBPICTURE_QUEUE_SIZE, 0) < 0)
goto fail;
if (frame_queue_init(&is->sampq, &is->audioq, SAMPLE_QUEUE_SIZE, 1) < 0)
goto fail;
// 初始化Packet队列
if (packet_queue_init(&is->videoq) < 0 ||
packet_queue_init(&is->audioq) < 0 ||
packet_queue_init(&is->subtitleq) < 0)
goto fail;
if (!(is->continue_read_thread = SDL_CreateCond())) {
av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
goto fail;
}
if (!(is->video_accurate_seek_cond = SDL_CreateCond())) {
av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
ffp->enable_accurate_seek = 0;
}
if (!(is->audio_accurate_seek_cond = SDL_CreateCond())) {
av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
ffp->enable_accurate_seek = 0;
}
// 初始化时钟
// 时钟序列->queue_serial,实际上指向的是is->videoq.serial
init_clock(&is->vidclk, &is->videoq.serial);
init_clock(&is->audclk, &is->audioq.serial);
init_clock(&is->extclk, &is->extclk.serial);
is->audio_clock_serial = -1;
// 初始化音量
if (ffp->startup_volume < 0)
av_log(NULL, AV_LOG_WARNING, "-volume=%d < 0, setting to 0\n", ffp->startup_volume);
if (ffp->startup_volume > 100)
av_log(NULL, AV_LOG_WARNING, "-volume=%d > 100, setting to 100\n", ffp->startup_volume);
ffp->startup_volume = av_clip(ffp->startup_volume, 0, 100);
ffp->startup_volume = av_clip(SDL_MIX_MAXVOLUME * ffp->startup_volume / 100, 0, SDL_MIX_MAXVOLUME);
is->audio_volume = ffp->startup_volume;
is->muted = 0;
// 音视频同步类型:默认以音频为基准
is->av_sync_type = ffp->av_sync_type;
is->play_mutex = SDL_CreateMutex();
is->accurate_seek_mutex = SDL_CreateMutex();
ffp->is = is;
is->pause_req = !ffp->start_on_prepared;
// 创建视频渲染线程
is->video_refresh_tid = SDL_CreateThreadEx(&is->_video_refresh_tid, video_refresh_thread, ffp, "ff_vout");
if (!is->video_refresh_tid) {
av_freep(&ffp->is);
return NULL;
}
is->initialized_decoder = 0;
// 创建读取音视频流线程
is->read_tid = SDL_CreateThreadEx(&is->_read_tid, read_thread, ffp, "ff_read");
if (!is->read_tid) {
av_log(NULL, AV_LOG_FATAL, "SDL_CreateThread(): %s\n", SDL_GetError());
goto fail;
}
if (ffp->async_init_decoder && !ffp->video_disable && ffp->video_mime_type && strlen(ffp->video_mime_type) > 0
&& ffp->mediacodec_default_name && strlen(ffp->mediacodec_default_name) > 0) {
if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2) {
decoder_init(&is->viddec, NULL, &is->videoq, is->continue_read_thread);
ffp->node_vdec = ffpipeline_init_video_decoder(ffp->pipeline, ffp);
}
}
is->initialized_decoder = 1;
return is;
fail:
is->initialized_decoder = 1;
is->abort_request = true;
if (is->video_refresh_tid)
SDL_WaitThread(is->video_refresh_tid, NULL);
stream_close(ffp);
return NULL;
}
stream_open方法中,目前关注的作用:
- 初始化VideoState对象,存储音视频播放状态
- 初始化Packet和Frame队列
- 初始化时钟
- 创建并执行 read_thread 读取音视频流线程
- 创建并执行 video_refresh_thread 用来渲染视频
4.6、read_thread()
/* this thread gets the stream from the disk or the network */
static int read_thread(void *arg)
{
FFPlayer *ffp = arg;
VideoState *is = ffp->is;
//iformat 输入封装格式的上下文:封装了文件格式相关信息的结构体 , 如视频宽高 , 音频采样率等信息
AVFormatContext *ic = NULL;
...
//初始化AVFormatContext
ic = avformat_alloc_context();
...
if (ffp->iformat_name)
is->iformat = av_find_input_format(ffp->iformat_name);
//打开音视频地址 ( 文件地址 / 网络地址 )
err = avformat_open_input(&ic, is->filename, is->iformat, &ffp->format_opts);
...
//查找音视频流信息
//打开音视频文件成功后 , 从该地址中获取对应的音视频流 , 获取的流赋值给了 AVFormatContext* 结构体的 nb_streams 成员
//返回值 >= 0 , 代表打开成功 , 否则失败 ;
err = avformat_find_stream_info(ic, opts);
...
/* open the streams */
if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
//打开音频流
stream_component_open(ffp, st_index[AVMEDIA_TYPE_AUDIO]);
} else {
ffp->av_sync_type = AV_SYNC_VIDEO_MASTER;
is->av_sync_type = ffp->av_sync_type;
}
ret = -1;
if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
//打开视频流
ret = stream_component_open(ffp, st_index[AVMEDIA_TYPE_VIDEO]);
}
...
ffp->prepared = true;
//发送准备完成的消息
ffp_notify_msg1(ffp, FFP_MSG_PREPARED);
...
}
调用 ffp_notify_msg1 发送FFP_MSG_PREPARED 将消息添加到消息队列中
inline static void ffp_notify_msg1(FFPlayer *ffp, int what) {
msg_queue_put_simple3(&ffp->msg_queue, what, 0, 0);
}
inline static void msg_queue_put_simple3(MessageQueue *q, int what, int arg1, int arg2)
{
AVMessage msg;
msg_init_msg(&msg);
msg.what = what;
msg.arg1 = arg1;
msg.arg2 = arg2;
//消息入队列
msg_queue_put(q, &msg);
}
5、JNI层 – ijkplayer_jni.c – 消息循环
5.1、message_loop
static int message_loop(void *arg)
{
MPTRACE("%s\n", __func__);
JNIEnv *env = NULL;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: SetupThreadEnv failed\n", __func__);
return -1;
}
IjkMediaPlayer *mp = (IjkMediaPlayer*) arg;
JNI_CHECK_GOTO(mp, env, NULL, "mpjni: native_message_loop: null mp", LABEL_RETURN);
//真正循环的方法
message_loop_n(env, mp);
LABEL_RETURN:
ijkmp_dec_ref_p(&mp);
MPTRACE("message_loop exit");
return 0;
}
继续查看message_loop_n
5.2、message_loop_n
static void message_loop_n(JNIEnv *env, IjkMediaPlayer *mp)
{
jobject weak_thiz = (jobject) ijkmp_get_weak_thiz(mp);
JNI_CHECK_GOTO(weak_thiz, env, NULL, "mpjni: message_loop_n: null weak_thiz", LABEL_RETURN);
//死循环,不停获取消息
while (1) {
AVMessage msg;
//获取消息,阻塞方法
//第三个参数int block:是否阻塞。此处传1,表示没有消息时会阻塞
int retval = ijkmp_get_msg(mp, &msg, 1);
if (retval < 0)
break;
// block-get should never return 0
assert(retval > 0);
switch (msg.what) {
case FFP_MSG_FLUSH:
MPTRACE("FFP_MSG_FLUSH:\n");
post_event(env, weak_thiz, MEDIA_NOP, 0, 0);
break;
case FFP_MSG_ERROR://播放出错消息
MPTRACE("FFP_MSG_ERROR: %d\n", msg.arg1);
post_event(env, weak_thiz, MEDIA_ERROR, MEDIA_ERROR_IJK_PLAYER, msg.arg1);
break;
case FFP_MSG_PREPARED://准备完成消息
MPTRACE("FFP_MSG_PREPARED:\n");
post_event(env, weak_thiz, MEDIA_PREPARED, 0, 0);
break;
case FFP_MSG_COMPLETED://播放完毕消息
MPTRACE("FFP_MSG_COMPLETED:\n");
post_event(env, weak_thiz, MEDIA_PLAYBACK_COMPLETE, 0, 0);
...
default:
ALOGE("unknown FFP_MSG_xxx(%d)\n", msg.what);
break;
}
msg_free_res(&msg);
}
LABEL_RETURN:
;
}
获取消息之后,会调用 post_event 通过反射机制,调用Java层IjkMediaPlayer类的静态方法:postEventFromNative()。
我们再来看一下获取消息的方法ijkmp_get_msg
5.3、ijkplayer.c – ijkmp_get_msg()
/* need to call msg_free_res for freeing the resouce obtained in msg */
int ijkmp_get_msg(IjkMediaPlayer *mp, AVMessage *msg, int block)
{
assert(mp);
while (1) {
int continue_wait_next_msg = 0;
//从消息队列中获取消息,block的值传递的是1,表示阻塞
int retval = msg_queue_get(&mp->ffplayer->msg_queue, msg, block);
if (retval <= 0)
return retval;
switch (msg->what) {
case FFP_MSG_PREPARED:
MPTRACE("ijkmp_get_msg: FFP_MSG_PREPARED\n");
pthread_mutex_lock(&mp->mutex);
if (mp->mp_state == MP_STATE_ASYNC_PREPARING) {
ijkmp_change_state_l(mp, MP_STATE_PREPARED);
} else {
// FIXME: 1: onError() ?
av_log(mp->ffplayer, AV_LOG_DEBUG, "FFP_MSG_PREPARED: expecting mp_state==MP_STATE_ASYNC_PREPARING\n");
}
if (!mp->ffplayer->start_on_prepared) {
ijkmp_change_state_l(mp, MP_STATE_PAUSED);
}
pthread_mutex_unlock(&mp->mutex);
break;
...
if (continue_wait_next_msg) {
msg_free_res(msg);
continue;
}
return retval;
}
return -1;
}
核心方法还是msg_queue_get 从消息队列里面获取消息。下面就是队列获取元素的实现了
5.4、ff_ffmsg_queue.c – msg_queue_get()
/* return < 0 if aborted, 0 if no msg and > 0 if msg. */
inline static int msg_queue_get(MessageQueue *q, AVMessage *msg, int block)
{
AVMessage *msg1;
int ret;
//锁
SDL_LockMutex(q->mutex);
//死循环,不停获取队列的值
for (;;) {
if (q->abort_request) {
ret = -1;
break;
}
//获取队列最新的值
msg1 = q->first_msg;
//如果first_msg不为空
if (msg1) {
//赋值操作
q->first_msg = msg1->next;
if (!q->first_msg)
q->last_msg = NULL;
q->nb_messages--;
*msg = *msg1;
msg1->obj = NULL;
#ifdef FFP_MERGE
av_free(msg1);
#else
msg1->next = q->recycle_msg;
q->recycle_msg = msg1;
#endif
ret = 1;
break;
} else if (!block) {//如果first_msg为空,并且不阻塞就跳出循环
ret = 0;
break;
} else {
//bolck为1,在这里阻塞
SDL_CondWait(q->cond, q->mutex);
}
}
//解锁
SDL_UnlockMutex(q->mutex);
return ret;
}
这里就是典型的队列取值得操作,如果没有值就阻塞。
以上就是ijkplayer初始化流程和prepare流程。