1.IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
static void
IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
MPTRACE("%s\n", __func__);
IjkMediaPlayer *mp = ijkmp_android_create(message_loop);
JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError", "mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN);
jni_set_media_player(env, thiz, mp);
ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this));
ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_set_ijkio_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp));
LABEL_RETURN:
ijkmp_dec_ref_p(&mp);
}
IjkMediaPlayer *mp = ijkmp_android_create(message_loop);创建android mediaplayer
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
{
IjkMediaPlayer *mp = ijkmp_create(msg_loop);
if (!mp)
goto fail;
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
if (!mp->ffplayer->vout)
goto fail;
mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
if (!mp->ffplayer->pipeline)
goto fail;
ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout);
return mp;
fail:
ijkmp_dec_ref_p(&mp);
return NULL;
}
ffpipeline_create_from_android
IJKFF_Pipeline *ffpipeline_create_from_android(FFPlayer *ffp)
{
ALOGD("ffpipeline_create_from_android()\n");
IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
if (!pipeline)
return pipeline;
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
opaque->ffp = ffp;
opaque->surface_mutex = SDL_CreateMutex();
opaque->left_volume = 1.0f;
opaque->right_volume = 1.0f;
if (!opaque->surface_mutex) {
ALOGE("ffpipeline-android:create SDL_CreateMutex failed\n");
goto fail;
}
pipeline->func_destroy = func_destroy;
pipeline->func_open_video_decoder = func_open_video_decoder;
pipeline->func_open_audio_output = func_open_audio_output;
pipeline->func_init_video_decoder = func_init_video_decoder;
pipeline->func_config_video_decoder = func_config_video_decoder;
return pipeline;
fail:
ffpipeline_free_p(&pipeline);
return NULL;
}
创建pipeline,并为pipeline的函数指针赋值,函数指针指向pipeline_android.c的函数 IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque)); pipeline->func_destroy = func_destroy; pipeline->func_open_video_decoder = func_open_video_decoder; pipeline->func_open_audio_output = func_open_audio_output; pipeline->func_init_video_decoder = func_init_video_decoder; pipeline->func_config_video_decoder = func_config_video_decoder;
2.IjkMediaPlayer_prepareAsync(JNIEnv *env, jobject thiz)
IjkMediaPlayer_prepareAsync
|--ijkmp_prepare_async(mp);
|--ijkmp_prepare_async_l
|--SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");
|--ffp_prepare_async_l
|--stream_open(ffp, file_name, NULL);
|--SDL_CreateThreadEx(&is->_read_tid, read_thread, ffp, "ff_read");
read_thread为解复用的线程。创建read_thread线程之后,异步prepared返回。
3.解复用线程 static int read_thread(void *arg)
static int read_thread(void *arg)
{
FFPlayer *ffp = arg;
VideoState *is = ffp->is;
AVFormatContext *ic = NULL;
int err, i, ret __unused;
int st_index[AVMEDIA_TYPE_NB];
AVPacket pkt1, *pkt = &pkt1;
int64_t stream_start_time;
int completed = 0;
int pkt_in_play_range = 0;
AVDictionaryEntry *t;
SDL_mutex *wait_mutex = SDL_CreateMutex();
int scan_all_pmts_set = 0;
int64_t pkt_ts;
int last_error = 0;
int64_t prev_io_tick_counter = 0;
int64_t io_tick_counter = 0;
int init_ijkmeta = 0;
if (!wait_mutex) {
av_log(NULL, AV_LOG_FATAL, "SDL_CreateMutex(): %s\n", SDL_GetError());
ret = AVERROR(ENOMEM);
goto fail;
}
memset(st_index, -1, sizeof(st_index));
is->last_video_stream = is->video_stream = -1;
is->last_audio_stream = is->audio_stream = -1;
is->last_subtitle_stream = is->subtitle_stream = -1;
is->eof = 0;
ic = avformat_alloc_context();
if (!ic) {
av_log(NULL, AV_LOG_FATAL, "Could not allocate context.\n");
ret = AVERROR(ENOMEM);
goto fail;
}
ic->interrupt_callback.callback = decode_interrupt_cb;
ic->interrupt_callback.opaque = is;
if (!av_dict_get(ffp->format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) {
av_dict_set(&ffp->format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE);
scan_all_pmts_set = 1;
}
if (av_stristart(is->filename, "rtmp", NULL) ||
av_stristart(is->filename, "rtsp", NULL)) {
// There is total different meaning for 'timeout' option in rtmp
av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp.\n");
av_dict_set(&ffp->format_opts, "timeout", NULL, 0);
}
if (ffp->skip_calc_frame_rate) {
av_dict_set_int(&ic->metadata, "skip-calc-frame-rate", ffp->skip_calc_frame_rate, 0);
av_dict_set_int(&ffp->format_opts, "skip-calc-frame-rate", ffp->skip_calc_frame_rate, 0);
}
if (ffp->iformat_name)
is->iformat = av_find_input_format(ffp->iformat_name);
err = avformat_open_input(&ic, is->filename, is->iformat, &ffp->format_opts);
if (err < 0) {
print_error(is->filename, err);
ret = -1;
goto fail;
}
ffp_notify_msg1(ffp, FFP_MSG_OPEN_INPUT);
if (scan_all_pmts_set)
av_dict_set(&ffp->format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE);
if ((t = av_dict_get(ffp->format_opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) {
av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key);
#ifdef FFP_MERGE
ret = AVERROR_OPTION_NOT_FOUND;
goto fail;
#endif
}
is->ic = ic;
if (ffp->genpts)
ic->flags |= AVFMT_FLAG_GENPTS;
av_format_inject_global_side_data(ic);
//
//AVDictionary **opts;
//int orig_nb_streams;
//opts = setup_find_stream_info_opts(ic, ffp->codec_opts);
//orig_nb_streams = ic->nb_streams;
av_log(NULL, AV_LOG_ERROR, "aa00 avctx profile%d\n",ic->streams[0]->codecpar->profile);
av_log(NULL, AV_LOG_ERROR, "aa00 avctx profile%d\n",ic->streams[1]->codecpar->profile);
if (ffp->find_stream_info) {
AVDictionary **opts = setup_find_stream_info_opts(ic, ffp->codec_opts);
int orig_nb_streams = ic->nb_streams;
do {
if (av_stristart(is->filename, "data:", NULL) && orig_nb_streams > 0) {
for (i = 0; i < orig_nb_streams; i++) {
if (!ic->streams[i] || !ic->streams[i]->codecpar || ic->streams[i]->codecpar->profile == FF_PROFILE_UNKNOWN) {
break;
}
}
if (i == orig_nb_streams) {
break;
}
}
err = avformat_find_stream_info(ic, opts);
} while(0);
ffp_notify_msg1(ffp, FFP_MSG_FIND_STREAM_INFO);
for (i = 0; i < orig_nb_streams; i++)
av_dict_free(&opts[i]);
av_freep(&opts);
if (err < 0) {
av_log(NULL, AV_LOG_WARNING,
"%s: could not find codec parameters\n", is->filename);
ret = -1;
goto fail;
}
}
av_log(NULL, AV_LOG_ERROR, "00 avctx profile%d\n",ic->streams[0]->codecpar->profile);
av_log(NULL, AV_LOG_ERROR, "00 avctx profile%d\n",ic->streams[1]->codecpar->profile);
if (ic->pb)
ic->pb->eof_reached = 0; // FIXME hack, ffplay maybe should not use avio_feof() to test for the end
if (ffp->seek_by_bytes < 0)
ffp->seek_by_bytes = !!(ic->iformat->flags & AVFMT_TS_DISCONT) && strcmp("ogg", ic->iformat->name);
is->max_frame_duration = (ic->iformat->flags & AVFMT_TS_DISCONT) ? 10.0 : 3600.0;
is->max_frame_duration = 10.0;
av_log(ffp, AV_LOG_INFO, "max_frame_duration: %.3f\n", is->max_frame_duration);
#ifdef FFP_MERGE
if (!window_title && (t = av_dict_get(ic->metadata, "title", NULL, 0)))
window_title = av_asprintf("%s - %s", t->value, input_filename);
#endif
/* if seeking requested, we execute it */
if (ffp->start_time != AV_NOPTS_VALUE) {
int64_t timestamp;
timestamp = ffp->start_time;
/* add the stream start time */
if (ic->start_time != AV_NOPTS_VALUE)
timestamp += ic->start_time;
ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
if (ret < 0) {
av_log(NULL, AV_LOG_WARNING, "%s: could not seek to position %0.3f\n",
is->filename, (double)timestamp / AV_TIME_BASE);
}
}
is->realtime = is_realtime(ic);
av_dump_format(ic, 0, is->filename, 0);
int video_stream_count = 0;
int h264_stream_count = 0;
int first_h264_stream = -1;
for (i = 0; i < ic->nb_streams; i++) {
AVStream *st = ic->streams[i];
enum AVMediaType type = st->codecpar->codec_type;
st->discard = AVDISCARD_ALL;
if (type >= 0 && ffp->wanted_stream_spec[type] && st_index[type] == -1)
if (avformat_match_stream_specifier(ic, st, ffp->wanted_stream_spec[type]) > 0)
st_index[type] = i;
// choose first h264
if (type == AVMEDIA_TYPE_VIDEO) {
enum AVCodecID codec_id = st->codecpar->codec_id;
video_stream_count++;
if (codec_id == AV_CODEC_ID_H264) {
h264_stream_count++;
if (first_h264_stream < 0)
first_h264_stream = i;
}
}
}
if (video_stream_count > 1 && st_index[AVMEDIA_TYPE_VIDEO] < 0) {
st_index[AVMEDIA_TYPE_VIDEO] = first_h264_stream;
av_log(NULL, AV_LOG_WARNING, "multiple video stream found, prefer first h264 stream: %d\n", first_h264_stream);
}
if (!ffp->video_disable)
st_index[AVMEDIA_TYPE_VIDEO] =
av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO,
st_index[AVMEDIA_TYPE_VIDEO], -1, NULL, 0);
if (!ffp->audio_disable)
st_index[AVMEDIA_TYPE_AUDIO] =
av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO,
st_index[AVMEDIA_TYPE_AUDIO],
st_index[AVMEDIA_TYPE_VIDEO],
NULL, 0);
if (!ffp->video_disable && !ffp->subtitle_disable)
st_index[AVMEDIA_TYPE_SUBTITLE] =
av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE,
st_index[AVMEDIA_TYPE_SUBTITLE],
(st_index[AVMEDIA_TYPE_AUDIO] >= 0 ?
st_index[AVMEDIA_TYPE_AUDIO] :
st_index[AVMEDIA_TYPE_VIDEO]),
NULL, 0);
is->show_mode = ffp->show_mode;
#ifdef FFP_MERGE // bbc: dunno if we need this
if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
AVStream *st = ic->streams[st_index[AVMEDIA_TYPE_VIDEO]];
AVCodecParameters *codecpar = st->codecpar;
AVRational sar = av_guess_sample_aspect_ratio(ic, st, NULL);
if (codecpar->width)
set_default_window_size(codecpar->width, codecpar->height, sar);
}
#endif
/* open the streams */
if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
stream_component_open(ffp, st_index[AVMEDIA_TYPE_AUDIO]);
} else {
ffp->av_sync_type = AV_SYNC_VIDEO_MASTER;
is->av_sync_type = ffp->av_sync_type;
}
ret = -1;
if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
ret = stream_component_open(ffp, st_index[AVMEDIA_TYPE_VIDEO]);
}
if (is->show_mode == SHOW_MODE_NONE)
is->show_mode = ret >= 0 ? SHOW_MODE_VIDEO : SHOW_MODE_RDFT;
if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
stream_component_open(ffp, st_index[AVMEDIA_TYPE_SUBTITLE]);
}
ffp_notify_msg1(ffp, FFP_MSG_COMPONENT_OPEN);
if (!ffp->ijkmeta_delay_init) {
ijkmeta_set_avformat_context_l(ffp->meta, ic);
}
ffp->stat.bit_rate = ic->bit_rate;
if (st_index[AVMEDIA_TYPE_VIDEO] >= 0)
ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_VIDEO_STREAM, st_index[AVMEDIA_TYPE_VIDEO]);
if (st_index[AVMEDIA_TYPE_AUDIO] >= 0)
ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_AUDIO_STREAM, st_index[AVMEDIA_TYPE_AUDIO]);
if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0)
ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_TIMEDTEXT_STREAM, st_index[AVMEDIA_TYPE_SUBTITLE]);
if (is->video_stream < 0 && is->audio_stream < 0) {
av_log(NULL, AV_LOG_FATAL, "Failed to open file '%s' or configure filtergraph\n",
is->filename);
ret = -1;
goto fail;
}
if (is->audio_stream >= 0) {
is->audioq.is_buffer_indicator = 1;
is->buffer_indicator_queue = &is->audioq;
} else if (is->video_stream >= 0) {
is->videoq.is_buffer_indicator = 1;
is->buffer_indicator_queue = &is->videoq;
} else {
assert("invalid streams");
}
if (ffp->infinite_buffer < 0 && is->realtime)
ffp->infinite_buffer = 1;
if (!ffp->render_wait_start && !ffp->start_on_prepared)
toggle_pause(ffp, 1);
if (is->video_st && is->video_st->codecpar) {
AVCodecParameters *codecpar = is->video_st->codecpar;
ffp_notify_msg3(ffp, FFP_MSG_VIDEO_SIZE_CHANGED, codecpar->width, codecpar->height);
ffp_notify_msg3(ffp, FFP_MSG_SAR_CHANGED, codecpar->sample_aspect_ratio.num, codecpar->sample_aspect_ratio.den);
}
ffp->prepared = true;
ffp_notify_msg1(ffp, FFP_MSG_PREPARED);
if (!ffp->render_wait_start && !ffp->start_on_prepared) {
while (is->pause_req && !is->abort_request) {
SDL_Delay(20);
}
}
if (ffp->auto_resume) {
ffp_notify_msg1(ffp, FFP_REQ_START);
ffp->auto_resume = 0;
}
/* offset should be seeked*/
if (ffp->seek_at_start > 0) {
ffp_seek_to_l(ffp, (long)(ffp->seek_at_start));
}
//代码省略。。。。
}
read_thread线程中循环调用av_read_frame,把音视频帧写到队列中
read_thread
|--stream_component_open(ffp, st_index[AVMEDIA_TYPE_AUDIO]);
|--stream_component_open(ffp, st_index[AVMEDIA_TYPE_VIDEO]);
|--av_read_frame(ic, pkt);
|--packet_queue_put(&is->videoq, pkt);
|--packet_queue_put(&is->audioq, pkt);
4.stream_component_open中创建audio decoder和video decoder,重点讲video decoder。
stream_component_open
|--ffpipeline_open_video_decoder(ffp->pipeline, ffp);
|--decoder_start(&is->viddec, video_thread, ffp, "ff_video_dec"))
static int stream_component_open(FFPlayer *ffp, int stream_index)
{
VideoState *is = ffp->is;
AVFormatContext *ic = is->ic;
AVCodecContext *avctx;
AVCodec *codec = NULL;
const char *forced_codec_name = NULL;
AVDictionary *opts = NULL;
AVDictionaryEntry *t = NULL;
int sample_rate, nb_channels;
int64_t channel_layout;
int ret = 0;
int stream_lowres = ffp->lowres;
if (stream_index < 0 || stream_index >= ic->nb_streams)
return -1;
avctx = avcodec_alloc_context3(NULL);
if (!avctx)
return AVERROR(ENOMEM);
//if (avctx->codec_type == AVMEDIA_TYPE_VIDEO)
av_log(NULL, AV_LOG_ERROR, "11 avctx profile%d\n",ic->streams[stream_index]->codecpar->profile);
ret = avcodec_parameters_to_context(avctx, ic->streams[stream_index]->codecpar);
if (ret < 0)
goto fail;
av_codec_set_pkt_timebase(avctx, ic->streams[stream_index]->time_base);
codec = avcodec_find_decoder(avctx->codec_id);
switch (avctx->codec_type) {
case AVMEDIA_TYPE_AUDIO : is->last_audio_stream = stream_index; forced_codec_name = ffp->audio_codec_name; break;
case AVMEDIA_TYPE_SUBTITLE: is->last_subtitle_stream = stream_index; forced_codec_name = ffp->subtitle_codec_name; break;
case AVMEDIA_TYPE_VIDEO : is->last_video_stream = stream_index; forced_codec_name = ffp->video_codec_name; break;
default: break;
}
if (forced_codec_name)
codec = avcodec_find_decoder_by_name(forced_codec_name);
if (!codec) {
if (forced_codec_name) av_log(NULL, AV_LOG_WARNING,
"No codec could be found with name '%s'\n", forced_codec_name);
else av_log(NULL, AV_LOG_WARNING,
"No codec could be found with id %d\n", avctx->codec_id);
ret = AVERROR(EINVAL);
goto fail;
}
avctx->codec_id = codec->id;
if(stream_lowres > av_codec_get_max_lowres(codec)){
av_log(avctx, AV_LOG_WARNING, "The maximum value for lowres supported by the decoder is %d\n",
av_codec_get_max_lowres(codec));
stream_lowres = av_codec_get_max_lowres(codec);
}
av_codec_set_lowres(avctx, stream_lowres);
#if FF_API_EMU_EDGE
if(stream_lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
#endif
if (ffp->fast)
avctx->flags2 |= AV_CODEC_FLAG2_FAST;
#if FF_API_EMU_EDGE
if(codec->capabilities & AV_CODEC_CAP_DR1)
avctx->flags |= CODEC_FLAG_EMU_EDGE;
#endif
opts = filter_codec_opts(ffp->codec_opts, avctx->codec_id, ic, ic->streams[stream_index], codec);
if (!av_dict_get(opts, "threads", NULL, 0))
av_dict_set(&opts, "threads", "auto", 0);
if (stream_lowres)
av_dict_set_int(&opts, "lowres", stream_lowres, 0);
if (avctx->codec_type == AVMEDIA_TYPE_VIDEO || avctx->codec_type == AVMEDIA_TYPE_AUDIO)
av_dict_set(&opts, "refcounted_frames", "1", 0);
if ((ret = avcodec_open2(avctx, codec, &opts)) < 0) {
goto fail;
}
if ((t = av_dict_get(opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) {
av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key);
#ifdef FFP_MERGE
ret = AVERROR_OPTION_NOT_FOUND;
goto fail;
#endif
}
is->eof = 0;
ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
switch (avctx->codec_type) {
case AVMEDIA_TYPE_AUDIO:
#if CONFIG_AVFILTER
{
AVFilterContext *sink;
is->audio_filter_src.freq = avctx->sample_rate;
is->audio_filter_src.channels = avctx->channels;
is->audio_filter_src.channel_layout = get_valid_channel_layout(avctx->channel_layout, avctx->channels);
is->audio_filter_src.fmt = avctx->sample_fmt;
SDL_LockMutex(ffp->af_mutex);
if ((ret = configure_audio_filters(ffp, ffp->afilters, 0)) < 0) {
SDL_UnlockMutex(ffp->af_mutex);
goto fail;
}
ffp->af_changed = 0;
SDL_UnlockMutex(ffp->af_mutex);
sink = is->out_audio_filter;
sample_rate = av_buffersink_get_sample_rate(sink);
nb_channels = av_buffersink_get_channels(sink);
channel_layout = av_buffersink_get_channel_layout(sink);
}
#else
sample_rate = avctx->sample_rate;
nb_channels = avctx->channels;
channel_layout = avctx->channel_layout;
#endif
/* prepare audio output */
if ((ret = audio_open(ffp, channel_layout, nb_channels, sample_rate, &is->audio_tgt)) < 0)
goto fail;
ffp_set_audio_codec_info(ffp, AVCODEC_MODULE_NAME, avcodec_get_name(avctx->codec_id));
is->audio_hw_buf_size = ret;
is->audio_src = is->audio_tgt;
is->audio_buf_size = 0;
is->audio_buf_index = 0;
/* init averaging filter */
is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
is->audio_diff_avg_count = 0;
/* since we do not have a precise anough audio FIFO fullness,
we correct audio sync only if larger than this threshold */
is->audio_diff_threshold = 2.0 * is->audio_hw_buf_size / is->audio_tgt.bytes_per_sec;
is->audio_stream = stream_index;
is->audio_st = ic->streams[stream_index];
decoder_init(&is->auddec, avctx, &is->audioq, is->continue_read_thread);
if ((is->ic->iformat->flags & (AVFMT_NOBINSEARCH | AVFMT_NOGENSEARCH | AVFMT_NO_BYTE_SEEK)) && !is->ic->iformat->read_seek) {
is->auddec.start_pts = is->audio_st->start_time;
is->auddec.start_pts_tb = is->audio_st->time_base;
}
if ((ret = decoder_start(&is->auddec, audio_thread, ffp, "ff_audio_dec")) < 0)
goto out;
SDL_AoutPauseAudio(ffp->aout, 0);
break;
case AVMEDIA_TYPE_VIDEO:
is->video_stream = stream_index;
is->video_st = ic->streams[stream_index];
if (ffp->async_init_decoder) {
while (!is->initialized_decoder) {
SDL_Delay(5);
}
if (ffp->node_vdec) {
is->viddec.avctx = avctx;
ret = ffpipeline_config_video_decoder(ffp->pipeline, ffp);
}
if (ret || !ffp->node_vdec) {
decoder_init(&is->viddec, avctx, &is->videoq, is->continue_read_thread);
ffp->node_vdec = ffpipeline_open_video_decoder(ffp->pipeline, ffp);
if (!ffp->node_vdec)
goto fail;
}
} else {
decoder_init(&is->viddec, avctx, &is->videoq, is->continue_read_thread);
ffp->node_vdec = ffpipeline_open_video_decoder(ffp->pipeline, ffp);
if (!ffp->node_vdec)
goto fail;
}
if ((ret = decoder_start(&is->viddec, video_thread, ffp, "ff_video_dec")) < 0)
goto out;
is->queue_attachments_req = 1;
if (ffp->max_fps >= 0) {
if(is->video_st->avg_frame_rate.den && is->video_st->avg_frame_rate.num) {
double fps = av_q2d(is->video_st->avg_frame_rate);
SDL_ProfilerReset(&is->viddec.decode_profiler, fps + 0.5);
if (fps > ffp->max_fps && fps < 130.0) {
is->is_video_high_fps = 1;
av_log(ffp, AV_LOG_WARNING, "fps: %lf (too high)\n", fps);
} else {
av_log(ffp, AV_LOG_WARNING, "fps: %lf (normal)\n", fps);
}
}
if(is->video_st->r_frame_rate.den && is->video_st->r_frame_rate.num) {
double tbr = av_q2d(is->video_st->r_frame_rate);
if (tbr > ffp->max_fps && tbr < 130.0) {
is->is_video_high_fps = 1;
av_log(ffp, AV_LOG_WARNING, "fps: %lf (too high)\n", tbr);
} else {
av_log(ffp, AV_LOG_WARNING, "fps: %lf (normal)\n", tbr);
}
}
}
if (is->is_video_high_fps) {
avctx->skip_frame = FFMAX(avctx->skip_frame, AVDISCARD_NONREF);
avctx->skip_loop_filter = FFMAX(avctx->skip_loop_filter, AVDISCARD_NONREF);
avctx->skip_idct = FFMAX(avctx->skip_loop_filter, AVDISCARD_NONREF);
}
break;
case AVMEDIA_TYPE_SUBTITLE:
if (!ffp->subtitle) break;
is->subtitle_stream = stream_index;
is->subtitle_st = ic->streams[stream_index];
ffp_set_subtitle_codec_info(ffp, AVCODEC_MODULE_NAME, avcodec_get_name(avctx->codec_id));
decoder_init(&is->subdec, avctx, &is->subtitleq, is->continue_read_thread);
if ((ret = decoder_start(&is->subdec, subtitle_thread, ffp, "ff_subtitle_dec")) < 0)
goto out;
break;
default:
break;
}
goto out;
fail:
avcodec_free_context(&avctx);
out:
av_dict_free(&opts);
return ret;
}
ffpipeline_open_video_decoder,调用pipeline的func_open_video_decoder.
IJKFF_Pipenode* ffpipeline_open_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
return pipeline->func_open_video_decoder(pipeline, ffp);
}
因为在ffpipeline_create_from_android中创建pipeline的时候指向了pipeline_android.c中的func_open_video_decoder,所以后面执行pipeline_android.c中的func_open_video_decoder,如果是ios平台,会指向pipeline_ios.c中的func_open_video_decoder
static IJKFF_Pipenode *func_open_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
IJKFF_Pipenode *node = NULL;
if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2)
node = ffpipenode_create_video_decoder_from_android_mediacodec(ffp, pipeline, opaque->weak_vout);
if (!node) {
node = ffpipenode_create_video_decoder_from_ffplay(ffp);
}
return node;
}
func_open_video_decoder函数中创建硬解还是软解,
走硬解的条件:
设置mediacodec硬解
走软解的条件:
1)没有设置mediacodec硬解
2)创建硬解码器失败,node返回空。
本文只分析硬解流程,软解流程一样
ffpipenode_create_video_decoder_from_android_mediacodec
IJKFF_Pipenode *ffpipenode_create_video_decoder_from_android_mediacodec(FFPlayer *ffp, IJKFF_Pipeline *pipeline, SDL_Vout *vout)
{
ALOGD("ffpipenode_create_video_decoder_from_android_mediacodec()\n");
if (SDL_Android_GetApiLevel() < IJK_API_16_JELLY_BEAN)
return NULL;
if (!ffp || !ffp->is)
return NULL;
IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque));
if (!node)
return node;
VideoState *is = ffp->is;
IJKFF_Pipenode_Opaque *opaque = node->opaque;
JNIEnv *env = NULL;
int ret = 0;
jobject jsurface = NULL;
node->func_destroy = func_destroy;
if (ffp->mediacodec_sync) {
node->func_run_sync = func_run_sync_loop;
} else {
node->func_run_sync = func_run_sync;
}
node->func_flush = func_flush;
opaque->pipeline = pipeline;
opaque->ffp = ffp;
opaque->decoder = &is->viddec;
opaque->weak_vout = vout;
opaque->codecpar = avcodec_parameters_alloc();
if (!opaque->codecpar)
goto fail;
ALOGE("%s: MediaCodec: opaque->decoder->avctx profile%d 11\n", __func__, opaque->decoder->avctx->profile);
ret = avcodec_parameters_from_context(opaque->codecpar, opaque->decoder->avctx);
if (ret)
goto fail;
switch (opaque->codecpar->codec_id) {
case AV_CODEC_ID_H264:
if (!ffp->mediacodec_avc && !ffp->mediacodec_all_videos) {
ALOGE("%s: MediaCodec: AVC/H264 is disabled. codec_id:%d \n", __func__, opaque->codecpar->codec_id);
goto fail;
}
switch (opaque->codecpar->profile) {
case FF_PROFILE_H264_BASELINE:
ALOGI("%s: MediaCodec: H264_BASELINE: enabled\n", __func__);
break;
case FF_PROFILE_H264_CONSTRAINED_BASELINE:
ALOGI("%s: MediaCodec: H264_CONSTRAINED_BASELINE: enabled\n", __func__);
break;
case FF_PROFILE_H264_MAIN:
ALOGI("%s: MediaCodec: H264_MAIN: enabled\n", __func__);
break;
case FF_PROFILE_H264_EXTENDED:
ALOGI("%s: MediaCodec: H264_EXTENDED: enabled\n", __func__);
break;
case FF_PROFILE_H264_HIGH:
ALOGI("%s: MediaCodec: H264_HIGH: enabled\n", __func__);
break;
case FF_PROFILE_H264_HIGH_10:
ALOGW("%s: MediaCodec: H264_HIGH_10: disabled\n", __func__);
goto fail;
case FF_PROFILE_H264_HIGH_10_INTRA:
ALOGW("%s: MediaCodec: H264_HIGH_10_INTRA: disabled\n", __func__);
goto fail;
case FF_PROFILE_H264_HIGH_422:
ALOGW("%s: MediaCodec: H264_HIGH_10_422: disabled\n", __func__);
goto fail;
case FF_PROFILE_H264_HIGH_422_INTRA:
ALOGW("%s: MediaCodec: H264_HIGH_10_INTRA: disabled\n", __func__);
goto fail;
case FF_PROFILE_H264_HIGH_444:
ALOGW("%s: MediaCodec: H264_HIGH_10_444: disabled\n", __func__);
goto fail;
case FF_PROFILE_H264_HIGH_444_PREDICTIVE:
ALOGW("%s: MediaCodec: H264_HIGH_444_PREDICTIVE: disabled\n", __func__);
goto fail;
case FF_PROFILE_H264_HIGH_444_INTRA:
ALOGW("%s: MediaCodec: H264_HIGH_444_INTRA: disabled\n", __func__);
goto fail;
case FF_PROFILE_H264_CAVLC_444:
ALOGW("%s: MediaCodec: H264_CAVLC_444: disabled\n", __func__);
goto fail;
default:
ALOGW("%s: MediaCodec: (%d) unknown profile: disabled\n", __func__, opaque->codecpar->profile);
goto fail;
}
strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_AVC);
opaque->mcc.profile = opaque->codecpar->profile;
opaque->mcc.level = opaque->codecpar->level;
break;
case AV_CODEC_ID_HEVC:
if (!ffp->mediacodec_hevc && !ffp->mediacodec_all_videos) {
ALOGE("%s: MediaCodec/HEVC is disabled. codec_id:%d \n", __func__, opaque->codecpar->codec_id);
goto fail;
}
strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_HEVC);
opaque->mcc.profile = opaque->codecpar->profile;
opaque->mcc.level = opaque->codecpar->level;
break;
case AV_CODEC_ID_MPEG2VIDEO:
if (!ffp->mediacodec_mpeg2 && !ffp->mediacodec_all_videos) {
ALOGE("%s: MediaCodec/MPEG2VIDEO is disabled. codec_id:%d \n", __func__, opaque->codecpar->codec_id);
goto fail;
}
strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_MPEG2VIDEO);
opaque->mcc.profile = opaque->codecpar->profile;
opaque->mcc.level = opaque->codecpar->level;
break;
case AV_CODEC_ID_MPEG4:
if (!ffp->mediacodec_mpeg4 && !ffp->mediacodec_all_videos) {
ALOGE("%s: MediaCodec/MPEG4 is disabled. codec_id:%d \n", __func__, opaque->codecpar->codec_id);
goto fail;
}
if ((opaque->codecpar->codec_tag & 0x0000FFFF) == 0x00005844) {
ALOGE("%s: divx is not supported \n", __func__);
goto fail;
}
strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_MPEG4);
opaque->mcc.profile = opaque->codecpar->profile >= 0 ? opaque->codecpar->profile : 0;
opaque->mcc.level = opaque->codecpar->level >= 0 ? opaque->codecpar->level : 1;
break;
default:
ALOGE("%s:create: not H264 or H265/HEVC, codec_id:%d \n", __func__, opaque->codecpar->codec_id);
goto fail;
}
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s:create: SetupThreadEnv failed\n", __func__);
goto fail;
}
opaque->acodec_mutex = SDL_CreateMutex();
opaque->acodec_cond = SDL_CreateCond();
opaque->acodec_first_dequeue_output_mutex = SDL_CreateMutex();
opaque->acodec_first_dequeue_output_cond = SDL_CreateCond();
opaque->any_input_mutex = SDL_CreateMutex();
opaque->any_input_cond = SDL_CreateCond();
if (!opaque->acodec_cond || !opaque->acodec_cond || !opaque->acodec_first_dequeue_output_mutex || !opaque->acodec_first_dequeue_output_cond) {
ALOGE("%s:open_video_decoder: SDL_CreateCond() failed\n", __func__);
goto fail;
}
ret = recreate_format_l(env, node);
if (ret) {
ALOGE("amc: recreate_format_l failed\n");
goto fail;
}
if (!ffpipeline_select_mediacodec_l(pipeline, &opaque->mcc) || !opaque->mcc.codec_name[0]) {
ALOGE("amc: no suitable codec\n");
goto fail;
}
jsurface = ffpipeline_get_surface_as_global_ref(env, pipeline);
ret = reconfigure_codec_l(env, node, jsurface);
J4A_DeleteGlobalRef__p(env, &jsurface);
if (ret != 0)
goto fail;
ffp_set_video_codec_info(ffp, MEDIACODEC_MODULE_NAME, opaque->mcc.codec_name);
opaque->off_buf_out = 0;
if (opaque->n_buf_out) {
int i;
opaque->amc_buf_out = calloc(opaque->n_buf_out, sizeof(*opaque->amc_buf_out));
assert(opaque->amc_buf_out != NULL);
for (i = 0; i < opaque->n_buf_out; i++)
opaque->amc_buf_out[i].pts = AV_NOPTS_VALUE;
}
SDL_SpeedSamplerReset(&opaque->sampler);
ffp->stat.vdec_type = FFP_PROPV_DECODER_MEDIACODEC;
return node;
fail:
ffpipenode_free_p(&node);
return NULL;
}
创建pipelinenode IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque)); 设置node函数指针,与软解函数指针区分 node->func_destroy = func_destroy; if (ffp->mediacodec_sync) { node->func_run_sync = func_run_sync_loop; } else { node->func_run_sync = func_run_sync; } node->func_flush = func_flush;
创建format strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_AVC); //"video/avc"或"video/hevc" opaque->mcc.profile = opaque->codecpar->profile; opaque->mcc.level = opaque->codecpar->level; recreate_format_l(env, node); |----createVideoFormat(env, opaque->mcc.mime_type, width, height); //创建MediaFormat |----MediaFormat_setBuffer(opaque->input_aformat, "csd-0", extradata, extradata_size); //设置extradata |----MediaFormat_setInt32(opaque->input_aformat, "rotation-degrees", rotate_degrees); 选择codec ffpipeline_select_mediacodec_l(pipeline, &opaque->mcc) |----onSelectCodec: mime=video/avc, profile=100, level=31 |----candidate codec: OMX.qcom.video.decoder.avc rank=800 获取surface jsurface = ffpipeline_get_surface_as_global_ref(env, pipeline); 创建解码器即设置参数 reconfigure_codec_l(env, node, jsurface); |----SDL_AMediaCodecJava_createByCodecName(env, mcc->codec_name); |----SDL_AMediaCodec_configure_surface(env, opaque->acodec, opaque->input_aformat, opaque->jsurface, NULL, 0); |----SDL_AMediaCodec_start(opaque->acodec);
decoder_start(&is->viddec, video_thread, ffp, "ff_video_dec"))
static int video_thread(void *arg)
{
FFPlayer *ffp = (FFPlayer *)arg;
int ret = 0;
if (ffp->node_vdec) {
ret = ffpipenode_run_sync(ffp->node_vdec);
}
return ret;
}
int ffpipenode_run_sync(IJKFF_Pipenode *node)
{
return node->func_run_sync(node);
}
node->func_run_sync在ffpipenode_create_video_decoder_from_android_mediacodec中指向了ffpipenode_android_mediacodec_vdec.c中的func_run_sync
static int func_run_sync(IJKFF_Pipenode *node)
{
JNIEnv *env = NULL;
IJKFF_Pipenode_Opaque *opaque = node->opaque;
FFPlayer *ffp = opaque->ffp;
VideoState *is = ffp->is;
Decoder *d = &is->viddec;
PacketQueue *q = d->queue;
int ret = 0;
int dequeue_count = 0;
AVFrame *frame = NULL;
int got_frame = 0;
AVRational tb = is->video_st->time_base;
AVRational frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);
double duration;
double pts;
if (!opaque->acodec) {
return ffp_video_thread(ffp);
}
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: SetupThreadEnv failed\n", __func__);
return -1;
}
frame = av_frame_alloc();
if (!frame)
goto fail;
opaque->enqueue_thread = SDL_CreateThreadEx(&opaque->_enqueue_thread, enqueue_thread_func, node, "amediacodec_input_thread");
if (!opaque->enqueue_thread) {
ALOGE("%s: SDL_CreateThreadEx failed\n", __func__);
ret = -1;
goto fail;
}
while (!q->abort_request) {
int64_t timeUs = opaque->acodec_first_dequeue_output_request ? 0 : AMC_OUTPUT_TIMEOUT_US;
got_frame = 0;
ret = drain_output_buffer(env, node, timeUs, &dequeue_count, frame, &got_frame);
if (opaque->acodec_first_dequeue_output_request) {
SDL_LockMutex(opaque->acodec_first_dequeue_output_mutex);
opaque->acodec_first_dequeue_output_request = false;
SDL_CondSignal(opaque->acodec_first_dequeue_output_cond);
SDL_UnlockMutex(opaque->acodec_first_dequeue_output_mutex);
}
if (ret != 0) {
ret = -1;
if (got_frame && frame->opaque)
SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
goto fail;
}
if (got_frame) {
duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
if (ffp->framedrop > 0 || (ffp->framedrop && ffp_get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) {
ffp->stat.decode_frame_count++;
if (frame->pts != AV_NOPTS_VALUE) {
double dpts = pts;
double diff = dpts - ffp_get_master_clock(is);
if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD &&
diff - is->frame_last_filter_delay < 0 &&
is->viddec.pkt_serial == is->vidclk.serial &&
is->videoq.nb_packets) {
is->frame_drops_early++;
is->continuous_frame_drops_early++;
if (is->continuous_frame_drops_early > ffp->framedrop) {
is->continuous_frame_drops_early = 0;
} else {
ffp->stat.drop_frame_count++;
ffp->stat.drop_frame_rate = (float)(ffp->stat.drop_frame_count) / (float)(ffp->stat.decode_frame_count);
if (frame->opaque) {
SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
}
av_frame_unref(frame);
continue;
}
}
}
}
ret = ffp_queue_picture(ffp, frame, pts, duration, av_frame_get_pkt_pos(frame), is->viddec.pkt_serial);
if (ret) {
if (frame->opaque)
SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
}
av_frame_unref(frame);
}
}
}
func_run_sync
|--SDL_CreateThreadEx(&opaque->_enqueue_thread, enqueue_thread_func, node, "amediacodec_input_thread");读取视频h264的帧
|--while循环
|--drain_output_buffer 解码
|--ffp_queue_picture 解码后的图像输出到队列
enqueue_thread_func
static int enqueue_thread_func(void *arg)
{
JNIEnv *env = NULL;
IJKFF_Pipenode *node = arg;
IJKFF_Pipenode_Opaque *opaque = node->opaque;
FFPlayer *ffp = opaque->ffp;
VideoState *is = ffp->is;
Decoder *d = &is->viddec;
PacketQueue *q = d->queue;
int ret = -1;
int dequeue_count = 0;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: SetupThreadEnv failed\n", __func__);
goto fail;
}
while (!q->abort_request && !opaque->abort) {
ret = feed_input_buffer(env, node, AMC_INPUT_TIMEOUT_US, &dequeue_count);
if (ret != 0) {
goto fail;
}
}
ret = 0;
fail:
SDL_AMediaCodecFake_abort(opaque->acodec);
ALOGI("MediaCodec: %s: exit: %d", __func__, ret);
return ret;
}
while循环调用feed_input_buffer feed_input_buffer |--ffp_packet_queue_get_or_buffering(ffp, d->queue, &pkt, &d->pkt_serial, &d->finished) //获取视频帧(如H264帧)
|--if (opaque->ffp->mediacodec_handle_resolution_change && opaque->codecpar->codec_id == AV_CODEC_ID_H264) //是否处理变清晰度
|--opaque->aformat_need_recreate = true;
|--ffpipeline_set_surface_need_reconfigure_l(pipeline, true);
|--if (ffpipeline_is_surface_need_reconfigure_l(pipeline))
|--ret = recreate_format_l(env, node); //重新设置format,csd0等
|--ret = reconfigure_codec_l(env, node, new_surface); //codec重新设置surface
|--SDL_AMediaCodec_dequeueInputBuffer(opaque->acodec, timeUs);
|--SDL_AMediaCodec_writeInputData(opaque->acodec, input_buffer_index, d->pkt_temp.data, d->pkt_temp.size); //write packet到input_buffer_index
|--SDL_AMediaCodec_queueInputBuffer(opaque->acodec, input_buffer_index, 0, copy_size, time_stamp, queue_flags); 填充数据到inputbuffer,如果播放完成,填充AMEDIACODEC__BUFFER_FLAG_END_OF_STREAM
drain_output_buffer
调用drain_output_buffer_l
drain_output_buffer_l |--SDL_AMediaCodecFake_dequeueOutputBuffer
ffp_queue_picture
|--queue_picture
5. onSelectCodec过程
IjkMediaPlayer_native_setup
|--ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp));
|--ffpipeline_set_mediacodec_select_callback(mp->ffplayer->pipeline, callback, opaque);
|--
void ffpipeline_set_mediacodec_select_callback(IJKFF_Pipeline* pipeline, bool (*callback)(void *opaque, ijkmp_mediacodecinfo_context *mcc), void *opaque)
{
ALOGD("%s\n", __func__);
if (!check_ffpipeline(pipeline, __func__))
return;
pipeline->opaque->mediacodec_select_callback = callback;
pipeline->opaque->mediacodec_select_callback_opaque = opaque;
}
pipeline->opaque->mediacodec_select_callback = callback;
callback即ijkmp_android_set_mediacodec_select_callback传入的ijkplyaer_jni.c中的mediacodec_select_callback。
ffpipenode_create_video_decoder_from_android_mediacodec
|--avcodec_parameters_from_context(opaque->codecpar, opaque->decoder->avctx);//decoder->avctx中保存着profile,level,信息
|--strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_AVC);
|--opaque->mcc.profile = opaque->codecpar->profile;
|--opaque->mcc.level = opaque->codecpar->level;
|--ffpipeline_select_mediacodec_l(pipeline, &opaque->mcc)
|--pipeline->opaque->mediacodec_select_callback(pipeline->opaque->mediacodec_select_callback_opaque, mcc);
|--J4AC_IjkMediaPlayer__onSelectCodec__withCString__asCBuffer(env, weak_this, mcc->mime_type, mcc->profile, mcc->level, mcc->codec_name, sizeof(mcc->codec_name));
#define J4AC_IjkMediaPlayer__onSelectCodec__withCString__asCBuffer J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onSelectCodec__withCString__asCBuffer
const char *J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onSelectCodec__withCString__asCBuffer(JNIEnv *env, jobject weakThiz, const char *mimeType_cstr__, jint profile, jint level, char *out_buf, int out_len)
{
const char *ret_value = NULL;
const char *c_str = NULL;
//返回codec name
jstring local_string = J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onSelectCodec__withCString(env, weakThiz, mimeType_cstr__, profile, level);
if (J4A_ExceptionCheck__throwAny(env) || !local_string) {
goto fail;
}
c_str = (*env)->GetStringUTFChars(env, local_string, NULL );
if (J4A_ExceptionCheck__throwAny(env) || !c_str) {
goto fail;
}
//拷贝到out_buf
strlcpy(out_buf, c_str, out_len);
ret_value = out_buf;
fail:
J4A_ReleaseStringUTFChars__p(env, local_string, &c_str);
J4A_DeleteLocalRef__p(env, &local_string);
return ret_value;
}
J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onSelectCodec__withCString
|--J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__onSelectCodec
|--CallStaticObjectMethod(env, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_onSelectCodec, weakThiz, mimeType, profile, level);
@CalledByNative
private static String onSelectCodec(Object weakThiz, String mimeType, int profile, int level) {
if (weakThiz == null || !(weakThiz instanceof WeakReference<?>))
return null;
@SuppressWarnings("unchecked")
WeakReference<IjkMediaPlayer> weakPlayer = (WeakReference<IjkMediaPlayer>) weakThiz;
IjkMediaPlayer player = weakPlayer.get();
if (player == null)
return null;
OnMediaCodecSelectListener listener = player.mOnMediaCodecSelectListener;
if (listener == null)
listener = DefaultMediaCodecSelector.sInstance;
return listener.onMediaCodecSelect(player, mimeType, profile, level);
}
onMediaCodecSelect遍历返回rank最高的codec name
遍历过程,详细见源码
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) { MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); Log.d(TAG, String.format(Locale.US, " found codec: %s", codecInfo.getName())); if (codecInfo.isEncoder()) continue;
//省略。。。
}
public static class DefaultMediaCodecSelector implements OnMediaCodecSelectListener {
public static final DefaultMediaCodecSelector sInstance = new DefaultMediaCodecSelector();
@SuppressWarnings("deprecation")
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public String onMediaCodecSelect(IMediaPlayer mp, String mimeType, int profile, int level) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN)
return null;
if (TextUtils.isEmpty(mimeType))
return null;
Log.i(TAG, String.format(Locale.US, "onSelectCodec: mime=%s, profile=%d, level=%d", mimeType, profile, level));
ArrayList<IjkMediaCodecInfo> candidateCodecList = new ArrayList<IjkMediaCodecInfo>();
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
Log.d(TAG, String.format(Locale.US, " found codec: %s", codecInfo.getName()));
if (codecInfo.isEncoder())
continue;
String[] types = codecInfo.getSupportedTypes();
if (types == null)
continue;
for(String type: types) {
if (TextUtils.isEmpty(type))
continue;
Log.d(TAG, String.format(Locale.US, " mime: %s", type));
if (!type.equalsIgnoreCase(mimeType))
continue;
IjkMediaCodecInfo candidate = IjkMediaCodecInfo.setupCandidate(codecInfo, mimeType);
if (candidate == null)
continue;
candidateCodecList.add(candidate);
Log.i(TAG, String.format(Locale.US, "candidate codec: %s rank=%d", codecInfo.getName(), candidate.mRank));
candidate.dumpProfileLevels(mimeType);
}
}
if (candidateCodecList.isEmpty()) {
return null;
}
IjkMediaCodecInfo bestCodec = candidateCodecList.get(0);
for (IjkMediaCodecInfo codec : candidateCodecList) {
if (codec.mRank > bestCodec.mRank) {
bestCodec = codec;
}
}
if (bestCodec.mRank < IjkMediaCodecInfo.RANK_LAST_CHANCE) {
Log.w(TAG, String.format(Locale.US, "unaccetable codec: %s", bestCodec.mCodecInfo.getName()));
return null;
}
Log.i(TAG, String.format(Locale.US, "selected codec: %s rank=%d", bestCodec.mCodecInfo.getName(), bestCodec.mRank));
return bestCodec.mCodecInfo.getName();
}
}
onSelectCodec: mime=video/avc, profile=100, level=31
found codec: OMX.qcom.video.decoder.avc
mime: video/avc
candidate codec: OMX.qcom.video.decoder.avc rank=800
D/tv.danmaku.ijk.media.player.IjkMediaPlayer: found codec: OMX.qcom.video.decoder.hevc
found codec: OMX.google.h264.decoder
mime: video/avc
candidate codec: OMX.google.h264.decoder rank=200
found codec: c2.qti.avc.decoder
mime: video/avc
candidate codec: c2.qti.avc.decoder rank=100
selected codec: OMX.qcom.video.decoder.avc rank=800
dsfdsfsd
6.硬解码过程
func_run_sync(IJKFF_Pipenode *node)
|--while (!q->abort_request)
|--drain_output_buffer(env, node, timeUs, &dequeue_count, frame, &got_frame);
|--ffp_queue_picture(ffp, frame, pts, duration, av_frame_get_pkt_pos(frame), is->viddec.pkt_serial);
drain_output_buffer展开
drain_output_buffer(env, node, timeUs, &dequeue_count, frame, &got_frame);
|--drain_output_buffer_l
|--output_buffer_index = SDL_AMediaCodecFake_dequeueOutputBuffer(opaque->acodec, &bufferInfo, timeUs);
|--amc_fill_frame(node, frame, got_frame, output_buffer_index, SDL_AMediaCodec_getSerial(opaque->acodec), &bufferInfo);
|--frame->opaque = SDL_VoutAndroid_obtainBufferProxy(opaque->weak_vout, acodec_serial, output_buffer_index, buffer_info);//保存到proxy->buffer_index = buffer_index; 返回proxy
frame->opaque = proxy //output_buffer_index存入到proxy,即frame->opaque
static SDL_AMediaCodecBufferProxy *SDL_VoutAndroid_obtainBufferProxy_l(SDL_Vout *vout, int acodec_serial, int buffer_index, SDL_AMediaCodecBufferInfo *buffer_info)
{
SDL_Vout_Opaque *opaque = vout->opaque;
SDL_AMediaCodecBufferProxy *proxy = NULL;
if (ISDL_Array__size(&opaque->overlay_pool) > 0) {
proxy = ISDL_Array__pop_back(&opaque->overlay_pool);
SDL_AMediaCodecBufferProxy_reset(proxy);
} else {
proxy = (SDL_AMediaCodecBufferProxy *)mallocz(sizeof(SDL_AMediaCodecBufferProxy));
if (!proxy)
return NULL;
SDL_AMediaCodecBufferProxy_init(proxy);
ISDL_Array__push_back(&opaque->overlay_manager, proxy);
}
proxy->buffer_id = opaque->next_buffer_id++;
proxy->acodec_serial = acodec_serial;
proxy->buffer_index = buffer_index;
proxy->buffer_info = *buffer_info;
AMCTRACE("%s: [%d] ++++++++ proxy %d: vout: %d idx: %d fake: %s",
__func__,
proxy->buffer_id,
proxy->acodec_serial,
SDL_AMediaCodec_getSerial(opaque->acodec),
proxy->buffer_index,
(proxy->buffer_info.flags & AMEDIACODEC__BUFFER_FLAG_FAKE_FRAME) ? "YES" : "NO");
return proxy;
}
ffp_queue_picture展开 ffp_queue_picture(ffp, frame, pts, duration, av_frame_get_pkt_pos(frame), is->viddec.pkt_serial); |--queue_picture(ffp, src_frame, pts, duration, pos, serial) |--alloc_picture(ffp, src_frame->format);
|--vp->bmp = SDL_Vout_CreateOverlay(vp->width, vp->height, frame_format, ffp->vout);
|--func_create_overlay(int width, int height, int frame_format, SDL_Vout *vout)
|--func_create_overlay_l(width, height, frame_format, vout);
|--SDL_VoutFillFrameYUVOverlay(vp->bmp, src_frame)
|--func_fill_frame(SDL_VoutOverlay *overlay, const AVFrame *frame)
|--SDL_VoutAndroid_releaseBufferProxyP(opaque->vout, (SDL_AMediaCodecBufferProxy **)&opaque->buffer_proxy, false);
|--SDL_VoutAndroid_releaseBufferProxy(vout, *proxy, render);
|--SDL_VoutAndroid_releaseBufferProxy_l
|--SDL_AMediaCodec_releaseOutputBuffer(opaque->acodec, proxy->buffer_index, render);
|--opaque->buffer_proxy = (SDL_AMediaCodecBufferProxy *)frame->opaque;
|--overlay->format = SDL_FCC__AMC;
func_create_overlay_l 展开,根据frame_format创建overlay
static SDL_VoutOverlay *func_create_overlay_l(int width, int height, int frame_format, SDL_Vout *vout)
{
switch (frame_format) {
case IJK_AV_PIX_FMT__ANDROID_MEDIACODEC:
return SDL_VoutAMediaCodec_CreateOverlay(width, height, vout);
default:
return SDL_VoutFFmpeg_CreateOverlay(width, height, frame_format, vout);
}
}
SDL_VoutOverlay *SDL_VoutAMediaCodec_CreateOverlay(int width, int height, SDL_Vout *vout)
{
SDL_VoutOverlay *overlay = SDL_VoutOverlay_CreateInternal(sizeof(SDL_VoutOverlay_Opaque));
if (!overlay) {
ALOGE("overlay allocation failed");
return NULL;
}
SDL_VoutOverlay_Opaque *opaque = overlay->opaque;
opaque->mutex = SDL_CreateMutex();
opaque->vout = vout;
opaque->acodec = NULL;
opaque->buffer_proxy = NULL;
overlay->opaque_class = &g_vout_overlay_amediacodec_class;
overlay->format = SDL_FCC__AMC;
overlay->pitches = opaque->pitches;
overlay->pixels = opaque->pixels;
overlay->w = width;
overlay->h = height;
overlay->is_private = 1;
overlay->free_l = overlay_free_l;
overlay->lock = overlay_lock;
overlay->unlock = overlay_unlock;
overlay->unref = overlay_unref;
overlay->func_fill_frame = func_fill_frame;
return overlay;
}
vp->bmp 就是SDL_VoutOverlay *overlay
7.video渲染过程
video_refresh(ffp, &remaining_time);
|--video_display2(ffp);
|--video_image_display2(ffp);
|--SDL_VoutDisplayYUVOverlay(ffp->vout, vp->bmp);
|--return vout->display_overlay(vout, overlay);
|--func_display_overlay
|--func_display_overlay_l(vout, overlay);
static int func_display_overlay_l(SDL_Vout *vout, SDL_VoutOverlay *overlay)
{
SDL_Vout_Opaque *opaque = vout->opaque;
ANativeWindow *native_window = opaque->native_window;
if (!native_window) {
if (!opaque->null_native_window_warned) {
opaque->null_native_window_warned = 1;
ALOGW("func_display_overlay_l: NULL native_window");
}
return -1;
} else {
opaque->null_native_window_warned = 1;
}
if (!overlay) {
ALOGE("func_display_overlay_l: NULL overlay");
return -1;
}
if (overlay->w <= 0 || overlay->h <= 0) {
ALOGE("func_display_overlay_l: invalid overlay dimensions(%d, %d)", overlay->w, overlay->h);
return -1;
}
switch(overlay->format) {
case SDL_FCC__AMC: {
// only ANativeWindow support
IJK_EGL_terminate(opaque->egl);
return SDL_VoutOverlayAMediaCodec_releaseFrame_l(overlay, NULL, true);
}
case SDL_FCC_RV24:
case SDL_FCC_I420:
case SDL_FCC_I444P10LE: {
// only GLES support
if (opaque->egl)
return IJK_EGL_display(opaque->egl, native_window, overlay);
break;
}
case SDL_FCC_YV12:
case SDL_FCC_RV16:
case SDL_FCC_RV32: {
// both GLES & ANativeWindow support
if (vout->overlay_format == SDL_FCC__GLES2 && opaque->egl)
return IJK_EGL_display(opaque->egl, native_window, overlay);
break;
}
}
// fallback to ANativeWindow
IJK_EGL_terminate(opaque->egl);
return SDL_Android_NativeWindow_display_l(native_window, overlay);
}
MediaCodec只支持NativeWindow case SDL_FCC__AMC: { // only ANativeWindow support IJK_EGL_terminate(opaque->egl); return SDL_VoutOverlayAMediaCodec_releaseFrame_l(overlay, NULL, true); }
SDL_VoutOverlayAMediaCodec_releaseFrame_l |--SDL_VoutOverlay_Opaque *opaque = overlay->opaque; |--SDL_VoutAndroid_releaseBufferProxyP_l(opaque->vout, &opaque->buffer_proxy, render); |--SDL_VoutAndroid_releaseBufferProxy_l(vout, *proxy, render); |--SDL_AMediaCodec_releaseOutputBuffer(opaque->acodec, proxy->buffer_index, render);
opaque->buffer_proxy就是SDL_VoutFillFrameYUVOverlay中