1 前言
本文简要介绍IJKPLAYER的几个常用API,以API使用的角度,来审视其内部运作原理。这里以iOS端直播API调用切入。
2 调用流程
2.1 init
创建播放器实例后,会先调用init方法进行初始化:
- (IJKFFMediaPlayer *)init
{
self = [super init];
if (self) {
_renderType = IJKSDLFFPlayrRenderTypeGlView;
[self nativeSetup];
}
return self;
}
而后,在init方法里调用nativeSetup对IJKPLAYER进行初始化:
- (void) nativeSetup
{
ijkmp_global_init();
ijkmp_global_set_inject_callback(ijkff_inject_callback);
_msgPool = [[IJKFFMoviePlayerMessagePool alloc] init];
_eventHandlers = [[NSMutableSet alloc] init];
_nativeMediaPlayer = ijkmp_ios_create(ff_media_player_msg_loop);
ijkmp_set_option(_nativeMediaPlayer, IJKMP_OPT_CATEGORY_PLAYER, "overlay-format", "fcc-_es2");
[self setInjectOpaque];
_notificationManager = [[IJKNotificationManager alloc] init];
[[IJKAudioKit sharedInstance] setupAudioSessionWithoutInterruptHandler];
_optionsDictionary = nil;
_isThirdGLView = true;
_scaleFactor = 1.0f;
_fps = 1.0f;
[self registerApplicationObservers];
}
注册ijkio相关callback:
- (void) setInjectOpaque
{
IJKFFWeakHolder *weakHolder = [[IJKFFWeakHolder alloc] init];
weakHolder.object = self;
ijkmp_set_weak_thiz(_nativeMediaPlayer, (__bridge_retained void *) self);
ijkmp_set_inject_opaque(_nativeMediaPlayer, (__bridge_retained void *) weakHolder);
ijkmp_set_ijkio_inject_opaque(_nativeMediaPlayer, (__bridge_retained void *) weakHolder);
}
然后调用ijkmp_ios_create方法,由ijkmp_create创建IjkMediaPlayer、SDL_Vout、IJKFF_Pipeline等对象:
IjkMediaPlayer *ijkmp_ios_create(int (*msg_loop)(void*))
{
IjkMediaPlayer *mp = ijkmp_create(msg_loop);
if (!mp)
goto fail;
mp->ffplayer->vout = SDL_VoutIos_CreateForGLES2();
if (!mp->ffplayer->vout)
goto fail;
mp->ffplayer->pipeline = ffpipeline_create_from_ios(mp->ffplayer);
if (!mp->ffplayer->pipeline)
goto fail;
return mp;
fail:
ijkmp_dec_ref_p(&mp);
return NULL;
}
特别值得一提的是ffpipeline_create_from_ios方法,创建了IJKFF_Pipeline对象并注册了打开video和audio的解码器的callback:
IJKFF_Pipeline *ffpipeline_create_from_ios(FFPlayer *ffp)
{
IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
if (!pipeline)
return pipeline;
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
opaque->ffp = ffp;
pipeline->func_destroy = func_destroy;
pipeline->func_open_video_decoder = func_open_video_decoder;
pipeline->func_open_audio_output = func_open_audio_output;
return pipeline;
}
struct IJKFF_Pipeline_Opaque {
FFPlayer *ffp;
bool is_videotoolbox_open;
};
static void func_destroy(IJKFF_Pipeline *pipeline)
{
}
static IJKFF_Pipenode *func_open_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
IJKFF_Pipenode* node = NULL;
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
if (ffp->videotoolbox) {
node = ffpipenode_create_video_decoder_from_ios_videotoolbox(ffp);
if (!node)
ALOGE("vtb fail!!! switch to ffmpeg decode!!!! \n");
}
if (node == NULL) {
node = ffpipenode_create_video_decoder_from_ffplay(ffp);
ffp->stat.vdec_type = FFP_PROPV_DECODER_AVCODEC;
opaque->is_videotoolbox_open = false;
} else {
ffp->stat.vdec_type = FFP_PROPV_DECODER_VIDEOTOOLBOX;
opaque->is_videotoolbox_open = true;
}
ffp_notify_msg2(ffp, FFP_MSG_VIDEO_DECODER_OPEN, opaque->is_videotoolbox_open);
return node;
}
static SDL_Aout *func_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
return SDL_AoutIos_CreateForAudioUnit();
}
static SDL_Class g_pipeline_class = {
.name = "ffpipeline_ios",
};
以video为例,func_open_video_decoder方法是在read_thread线程例如下调用栈回调的:
read_thread => stream_component_open(FFPlayer *ffp, int stream_index) => ffpipeline_open_video_decoder =>
IJKFF_Pipenode* ffpipeline_open_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
return pipeline->func_open_video_decoder(pipeline, ffp);
}
因此,init方法主要是对ijkplayer的比如demux、decoder、网络IO,以及ijkcache等的初始化,并设置使能的option。
2.2 setDataSource
随后,业务层调用setDataSource方法,传入播放URL:
- (int) setDataSource:(NSString *)url
{
return ijkmp_set_data_source(_nativeMediaPlayer, [url UTF8String]);
}
最后,url参数是保存在IjkMediaPlayer结构的data_source字段中:
static int ijkmp_set_data_source_l(IjkMediaPlayer *mp, const char *url)
{
assert(mp);
assert(url);
// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);
freep((void**)&mp->data_source);
mp->data_source = strdup(url);
if (!mp->data_source)
return EIJK_OUT_OF_MEMORY;
ijkmp_change_state_l(mp, MP_STATE_INITIALIZED);
return 0;
}
2.3 prepareAsync
紧接着,调用prepareAsync方法,这里将做一系列播放前的准备工作,比如创建相关线程、队列(PacketQueue和FrameQueue)初始化、锁和信号灯创建、参考时钟始化等:
- (int) prepareAsync
{
return ijkmp_prepare_async(_nativeMediaPlayer);
}
- msg loop thread:创建消息循环线程;
- PacketQueue / FrameQueue / 参考时钟 初始化 / 锁和信号灯创建;
video_refresh_thread:创建视频显示线程;
read_thread:网络IO线程,并在该线程里创建解码线程并做audio、video和subtitle显示初始化系列工作;
展开ijkmp_prepare_async源码:
static int ijkmp_prepare_async_l(IjkMediaPlayer *mp)
{
assert(mp);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);
// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);
// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);
assert(mp->data_source);
ijkmp_change_state_l(mp, MP_STATE_ASYNC_PREPARING);
// released in msg_loop
ijkmp_inc_ref(mp);
// mp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");
mp->msg_thread = SDL_CreateThread(ijkmp_msg_loop, "ff_msg_loop", mp);
// msg_thread is detached inside msg_loop
// TODO: 9 release weak_thiz if pthread_create() failed;
int retval = ffp_prepare_async_l(mp->ffplayer, mp->data_source);
if (retval < 0) {
ijkmp_change_state_l(mp, MP_STATE_ERROR);
return retval;
}
return 0;
}
在此方法里,首先创建ijkmp_msg_loop消息循环线程,紧接着调用ffp_prepare_async_l方法:
int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name)
{
assert(ffp);
assert(!ffp->is);
assert(file_name);
if (av_stristart(file_name, "rtmp", NULL) ||
av_stristart(file_name, "rtsp", NULL)) {
// There is total different meaning for 'timeout' option in rtmp
av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp.\n");
av_dict_set(&ffp->format_opts, "timeout", NULL, 0);
}
/* there is a length limit in avformat */
if (strlen(file_name) + 1 > 1024) {
av_log(ffp, AV_LOG_ERROR, "%s too long url\n", __func__);
if (avio_find_protocol_name("ijklongurl:")) {
av_dict_set(&ffp->format_opts, "ijklongurl-url", file_name, 0);
file_name = "ijklongurl:";
}
}
av_log(NULL, AV_LOG_INFO, "===== versions =====\n");
ffp_show_version_str(ffp, "ijkplayer", ijk_version_info());
ffp_show_version_str(ffp, "FFmpeg", av_version_info());
ffp_show_version_int(ffp, "libavutil", avutil_version());
ffp_show_version_int(ffp, "libavcodec", avcodec_version());
ffp_show_version_int(ffp, "libavformat", avformat_version());
ffp_show_version_int(ffp, "libswscale", swscale_version());
ffp_show_version_int(ffp, "libswresample", swresample_version());
av_log(NULL, AV_LOG_INFO, "===== options =====\n");
ffp_show_dict(ffp, "player-opts", ffp->player_opts);
ffp_show_dict(ffp, "format-opts", ffp->format_opts);
ffp_show_dict(ffp, "codec-opts ", ffp->codec_opts);
ffp_show_dict(ffp, "sws-opts ", ffp->sws_dict);
ffp_show_dict(ffp, "swr-opts ", ffp->swr_opts);
av_log(NULL, AV_LOG_INFO, "===================\n");
av_opt_set_dict(ffp, &ffp->player_opts);
if (!ffp->aout) {
ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);
if (!ffp->aout)
return -1;
}
#if CONFIG_AVFILTER
if (ffp->vfilter0) {
GROW_ARRAY(ffp->vfilters_list, ffp->nb_vfilters);
ffp->vfilters_list[ffp->nb_vfilters - 1] = ffp->vfilter0;
}
#endif
VideoState *is = stream_open(ffp, file_name, NULL);
if (!is) {
av_log(NULL, AV_LOG_WARNING, "ffp_prepare_async_l: stream_open failed OOM");
return EIJK_OUT_OF_MEMORY;
}
ffp->is = is;
ffp->input_filename = av_strdup(file_name);
return 0;
}
在此调用stream_open(ffp, file_name, NULL)方法:
static VideoState *stream_open(FFPlayer *ffp, const char *filename, AVInputFormat *iformat)
{
assert(!ffp->is);
VideoState *is;
is = av_mallocz(sizeof(VideoState));
if (!is)
return NULL;
is->filename = av_strdup(filename);
if (!is->filename)
goto fail;
is->iformat = iformat;
is->ytop = 0;
is->xleft = 0;
#if defined(__ANDROID__)
if (ffp->soundtouch_enable) {
is->handle = ijk_soundtouch_create();
}
#endif
/* start video display */
if (frame_queue_init(&is->pictq, &is->videoq, ffp->pictq_size, 1) < 0)
goto fail;
if (frame_queue_init(&is->subpq, &is->subtitleq, SUBPICTURE_QUEUE_SIZE, 0) < 0)
goto fail;
if (frame_queue_init(&is->sampq, &is->audioq, SAMPLE_QUEUE_SIZE, 1) < 0)
goto fail;
if (packet_queue_init(&is->videoq) < 0 ||
packet_queue_init(&is->audioq) < 0 ||
packet_queue_init(&is->subtitleq) < 0)
goto fail;
if (!(is->continue_read_thread = SDL_CreateCond())) {
av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
goto fail;
}
if (!(is->video_accurate_seek_cond = SDL_CreateCond())) {
av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
ffp->enable_accurate_seek = 0;
}
if (!(is->audio_accurate_seek_cond = SDL_CreateCond())) {
av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
ffp->enable_accurate_seek = 0;
}
init_