昨天研究了ijk初始化当中
_mediaPlayer = ijkmp_ios_create(media_player_msg_loop);
这一句代码背后的操作,遗留了一个问题,对于结构体IJKFF_Pipeline内部方法func_destroy的定义和复制的用法,第一次见。
在定义的时候定义函数func_destroy;
struct IJKFF_Pipeline {
SDL_Class *opaque_class;
IJKFF_Pipeline_Opaque *opaque;
void (*func_destroy) (IJKFF_Pipeline *pipeline);
IJKFF_Pipenode *(*func_open_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
SDL_Aout *(*func_open_audio_output) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
IJKFF_Pipenode *(*func_init_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
int (*func_config_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
};
在初始化IJKFF_Pipeline的时候,同样给func_destroy赋值。
IJKFF_Pipeline *ffpipeline_create_from_ios(FFPlayer *ffp)
{
IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
if (!pipeline)
return pipeline;
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
opaque->ffp = ffp;
pipeline->func_destroy = func_destroy;
pipeline->func_open_video_decoder = func_open_video_decoder;
pipeline->func_open_audio_output = func_open_audio_output;
return pipeline;
}
然后在调用的时候
void ffpipeline_free(IJKFF_Pipeline *pipeline)
{
if (!pipeline)
return;
if (pipeline->func_destroy) {
pipeline->func_destroy(pipeline);
}
free(pipeline->opaque);
memset(pipeline, 0, sizeof(IJKFF_Pipeline));
free(pipeline);
}
这个时候代码就会去执行,赋值的那个函数
static void func_destroy(IJKFF_Pipeline *pipeline)
{
}
好了,这个调用方法看了好久才看懂,所以记录一下。
接下来,继续看_glView的初始化的过程
- (id) initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
_tryLockErrorCount = 0;
_shouldLockWhileBeingMovedToWindow = YES;
self.glActiveLock = [[NSRecursiveLock alloc] init];
_registeredNotifications = [[NSMutableArray alloc] init];
[self registerApplicationObservers];
_didSetupGL = NO;
if ([self isApplicationActive] == YES)
[self setupGLOnce];
}
return self;
}
开始我以为关键代码可能是[self setupGLOnce];因为其他代码是一些初始化的操作。但继续观察
- (BOOL)setupGLOnce
{
if (_didSetupGL)
return YES;
if (![self tryLockGLActive])
return NO;
BOOL didSetupGL = [self setupGL];
[self unlockGLActive];
return didSetupGL;
}
再到[self unlockGLActive];中
- (void) unlockGLActive
{
[self.glActiveLock unlock];
}
发现其实并没有什么东西。那我们再跳回上一层继续看
- (id)initWithContentURLString:(NSString *)aUrlString
withOptions:(IJKFFOptions *)options这个方法发现了一句吧_mediaPlayer和_glview关联起来的代码
ijkmp_ios_set_glview(_mediaPlayer, _glView);
查看里面实现
void ijkmp_ios_set_glview(IjkMediaPlayer *mp, IJKSDLGLView *glView)
{
assert(mp);
MPTRACE("ijkmp_ios_set_view(glView=%p)\n", (void*)glView);
pthread_mutex_lock(&mp->mutex);
ijkmp_ios_set_glview_l(mp, glView);
pthread_mutex_unlock(&mp->mutex);
MPTRACE("ijkmp_ios_set_view(glView=%p)=void\n", (void*)glView);
}
这里又遇到了之前遇到的代码
pthread_mutex_lock(&mp->mutex);
给mediaPlayer的互斥锁上锁还有对应的解锁那我中间的代码应该是关键性的代码
void ijkmp_ios_set_glview_l(IjkMediaPlayer *mp, IJKSDLGLView *glView)
{
assert(mp);
assert(mp->ffplayer);
assert(mp->ffplayer->vout);
SDL_VoutIos_SetGLView(mp->ffplayer->vout, glView);
}
继续查看其中关键代码(last line)
void SDL_VoutIos_SetGLView(SDL_Vout *vout, IJKSDLGLView *view)
{
SDL_LockMutex(vout->mutex);
SDL_VoutIos_SetGLView_l(vout, view);
SDL_UnlockMutex(vout->mutex);
}
上面代码也有对应的互斥锁的lock和unlock。那么中间一行代码应该是关键操作,点进去-》
static void SDL_VoutIos_SetGLView_l(SDL_Vout *vout, IJKSDLGLView *view)
{
SDL_Vout_Opaque *opaque = vout->opaque;
if (opaque->gl_view == view)
return;
if (opaque->gl_view) {
[opaque->gl_view release];
opaque->gl_view = nil;
}
if (view)
opaque->gl_view = [view retain];
}
绕了这么一大圈其实是把glview赋值给ffplayer->vout->opaque->glview
操作如图
好了,走到这里,glview和media Player的初始化工作已经差不多了。接下来我们继续看一看如何通过URL获取数据的。
在IJKFFMoviePlayerController.m中有prepareToPlay方法
- (void)prepareToPlay
{
if (!_mediaPlayer)
return;
[self setScreenOn:_keepScreenOnWhilePlaying];
ijkmp_set_data_source(_mediaPlayer, [_urlString UTF8String]);
ijkmp_set_option(_mediaPlayer, IJKMP_OPT_CATEGORY_FORMAT, "safe", "0"); // for concat demuxer
_monitor.prepareStartTick = (int64_t)SDL_GetTickHR();
ijkmp_prepare_async(_mediaPlayer);
}
继续看看用_urlString做了什么操作
static int ijkmp_set_data_source_l(IjkMediaPlayer *mp, const char *url)
{
assert(mp);
assert(url);
// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);
freep((void**)&mp->data_source);
mp->data_source = strdup(url);
if (!mp->data_source)
return EIJK_OUT_OF_MEMORY;
ijkmp_change_state_l(mp, MP_STATE_INITIALIZED);
return 0;
}
这里面有知识点:strdup和free的用法
这时候吧mp的data_sourse赋值的要播放的url
ijkmp_change_state_l(mp, MP_STATE_INITIALIZED);
这句代码调用了ffplayer内部的状态转换的函数。我估计会发相应的通知
void ijkmp_change_state_l(IjkMediaPlayer *mp, int new_state)
{
mp->mp_state = new_state;
ffp_notify_msg1(mp->ffplayer, FFP_MSG_PLAYBACK_STATE_CHANGED);
}
经过这一步mp->status已经为INITIALIZED(以初始化的)
继续看prepareToPlay函数中的
ijkmp_prepare_async(_mediaPlayer);
int ijkmp_prepare_async(IjkMediaPlayer *mp)
{
assert(mp);
MPTRACE("ijkmp_prepare_async()\n");
pthread_mutex_lock(&mp->mutex);
int retval = ijkmp_prepare_async_l(mp);
pthread_mutex_unlock(&mp->mutex);
MPTRACE("ijkmp_prepare_async()=%d\n", retval);
return retval;
}
其中int retval = ijkmp_prepare_async_l(mp);
static int ijkmp_prepare_async_l(IjkMediaPlayer *mp)
{
assert(mp);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);
// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);
// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);
assert(mp->data_source);
ijkmp_change_state_l(mp, MP_STATE_ASYNC_PREPARING);
msg_queue_start(&mp->ffplayer->msg_queue);
// released in msg_loop
ijkmp_inc_ref(mp);
mp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");
// msg_thread is detached inside msg_loop
// TODO: 9 release weak_thiz if pthread_create() failed;
int retval = ffp_prepare_async_l(mp->ffplayer, mp->data_source);
if (retval < 0) {
ijkmp_change_state_l(mp, MP_STATE_ERROR);
return retval;
}
return 0;
}
首先吧mp_status的状态改变了MP_STATE_ASYNC_PREPARING(异步准备)
msg_queue_start(&mp->ffplayer->msg_queue);出现了messageQueue,信息队列?这是什么东西?
inline static void msg_queue_start(MessageQueue *q)
{
SDL_LockMutex(q->mutex);
q->abort_request = 0;
AVMessage msg;
msg_init_msg(&msg);
msg.what = FFP_MSG_FLUSH;
msg_queue_put_private(q, &msg);
SDL_UnlockMutex(q->mutex);
}
给我的感觉因该是消息的打印吧。好像没那么重要。
返回上一层继续看代码。int retval = ffp_prepare_async_l(mp->ffplayer, mp->data_source);这个应该是个关键代码
int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name)
{
assert(ffp);
assert(!ffp->is);
assert(file_name);
if (av_stristart(file_name, "rtmp", NULL) ||
av_stristart(file_name, "rtsp", NULL)) {
// There is total different meaning for 'timeout' option in rtmp
av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp.\n");
av_dict_set(&ffp->format_opts, "timeout", NULL, 0);
}
/* there is a length limit in avformat */
if (strlen(file_name) + 1 > 1024) {
av_log(ffp, AV_LOG_ERROR, "%s too long url\n", __func__);
if (avio_find_protocol_name("ijklongurl:")) {
av_dict_set(&ffp->format_opts, "ijklongurl-url", file_name, 0);
file_name = "ijklongurl:";
}
}
av_log(NULL, AV_LOG_INFO, "===== versions =====\n");
ffp_show_version_str(ffp, "ijkplayer", ijk_version_info());
ffp_show_version_str(ffp, "FFmpeg", av_version_info());
ffp_show_version_int(ffp, "libavutil", avutil_version());
ffp_show_version_int(ffp, "libavcodec", avcodec_version());
ffp_show_version_int(ffp, "libavformat", avformat_version());
ffp_show_version_int(ffp, "libswscale", swscale_version());
ffp_show_version_int(ffp, "libswresample", swresample_version());
av_log(NULL, AV_LOG_INFO, "===== options =====\n");
ffp_show_dict(ffp, "player-opts", ffp->player_opts);
ffp_show_dict(ffp, "format-opts", ffp->format_opts);
ffp_show_dict(ffp, "codec-opts ", ffp->codec_opts);
ffp_show_dict(ffp, "sws-opts ", ffp->sws_dict);
ffp_show_dict(ffp, "swr-opts ", ffp->swr_opts);
av_log(NULL, AV_LOG_INFO, "===================\n");
av_opt_set_dict(ffp, &ffp->player_opts);
if (!ffp->aout) {
ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);
if (!ffp->aout)
return -1;
}
#if CONFIG_AVFILTER
if (ffp->vfilter0) {
GROW_ARRAY(ffp->vfilters_list, ffp->nb_vfilters);
ffp->vfilters_list[ffp->nb_vfilters - 1] = ffp->vfilter0;
}
#endif
VideoState *is = stream_open(ffp, file_name, NULL);
if (!is) {
av_log(NULL, AV_LOG_WARNING, "ffp_prepare_async_l: stream_open failed OOM");
return EIJK_OUT_OF_MEMORY;
}
ffp->is = is;
ffp->input_filename = av_strdup(file_name);
return 0;
}
果然,如果没有猜错的话,这个应该是去读文件的具体操作。
好了,今天先解读到这,明天继续从读文件操作来看。⛽️