ijkplayer编译生成aar,支持https,rtsp,录制与截图

ijkplayer编译,支持https,rtsp,直播录制与截图功能

直接使用可在android-aar目录下自取aar
有其它需求可拉取源码后再次编译

demo地址:https://github.com/zzhengzhe/ijkplayer.git

  • ijk编译流程

1.安装git 和 yasm

sudo apt-get install git
sudo apt-get install yasm

2.配置sdk和ndk环境变量

注意:ndk使用最新r23b版本后期编译报错,建议直接下载r10e版本https://dl.google.com/android/repository/android-ndk-r10e-linux-x86_64.zip
打开.bashrc文件,配置环境
验证ndk配置是否成功:ndk-build -v

 

 3.获取ijkplayer源码

git clone https://github.com/Bilibili/ijkplayer.git ijkplayer-android
cd ijkplayer-android
git checkout -B latest k0.8.8

4. 初始化

./init-android.sh

5.修改module-lite.sh支持rtsp

export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-protocol=rtp"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=rtsp"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-decoder=mjpeg"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=mjpeg"

cd config
rm module.sh      
ln -s module-lite.sh module.sh
source module.sh

 6.增加录制和截图功能

路径:ijkmedia/ijkplayer/Android.mk

//修改
//LOCAL_LDLIBS += -llog -landroid
LOCAL_LDLIBS += -llog -landroid -ljnigraphics

 路径:ijkmedia/ijkplayer/android/ijkplayer_jni.c

#include "ijksdl/android/ijksdl_android_jni.h"
//新增
#include <android/bitmap.h>
//新增

static jboolean
IjkMediaPlayer_getCurrentFrame(JNIEnv *env, jobject thiz, jobject bitmap)
{
    jboolean retval = JNI_TRUE;
    IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
    JNI_CHECK_GOTO(mp, env, NULL, "mpjni: getCurrentFrame: null mp", LABEL_RETURN);

    uint8_t *frame_buffer = NULL;

    if (0 > AndroidBitmap_lockPixels(env, bitmap, (void **)&frame_buffer)) {
        (*env)->ThrowNew(env, "java/io/IOException", "Unable to lock pixels.");
        return JNI_FALSE;
    }

    ijkmp_get_current_frame(mp, frame_buffer);

    if (0 > AndroidBitmap_unlockPixels(env, bitmap)) {
        (*env)->ThrowNew(env, "java/io/IOException", "Unable to unlock pixels.");
        return JNI_FALSE;
    }

    LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
    return retval;
}


static jint
IjkMediaPlayer_startRecord(JNIEnv *env, jobject thiz,jstring file)
{
    jint retval = 0;
    IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
    JNI_CHECK_GOTO(mp, env, NULL, "mpjni: startRecord: null mp", LABEL_RETURN);
    const char *nativeString = (*env)->GetStringUTFChars(env, file, 0);
    retval = ijkmp_start_record(mp,nativeString);

LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
    return retval;
}

static jint
IjkMediaPlayer_stopRecord(JNIEnv *env, jobject thiz)
{
    jint retval = 0;
    IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
    JNI_CHECK_GOTO(mp, env, NULL, "mpjni: stopRecord: null mp", LABEL_RETURN);

    retval = ijkmp_stop_record(mp);

LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
    return retval;
}
    { "getDuration",            "()J",      (void *) IjkMediaPlayer_getDuration },
    //新增
    { "getCurrentFrame",        "(Landroid/graphics/Bitmap;)Z",      (void *) IjkMediaPlayer_getCurrentFrame },
    { "startRecord",            "(Ljava/lang/String;)I",      (void *) IjkMediaPlayer_startRecord },
    { "stopRecord",             "()I",      (void *) IjkMediaPlayer_stopRecord },

 路径:ijkmedia/ijkplayer/ff_ffplay.c

//修改
static int read_thread(void *arg)
{
    /* offset should be seeked*/
    if (ffp->seek_at_start > 0) {
        ffp_seek_to_l(ffp, (long)(ffp->seek_at_start));
    }

    for (;;) {
//插入开始
        if (!ffp->is_first && pkt->pts == pkt->dts) { // 获取开始录制前dts等于pts最后的值,用于
            if (pkt->stream_index == AVMEDIA_TYPE_AUDIO) {
                ffp->start_a_pts = pkt->pts;
                ffp->start_a_dts = pkt->dts;
            }
        }
        if (pkt->stream_index == AVMEDIA_TYPE_VIDEO) {
            if (!ffp->is_first) {
                ffp->start_v_pts = pkt->pts;
                ffp->start_v_dts = pkt->dts;
                //                        printf("set start video start_pts: %lld +++++ start_dts: %lld\n", ffp->start_v_pts, ffp->start_v_dts);
            }
        }
        if (pkt->stream_index == AVMEDIA_TYPE_VIDEO) {
            //                    printf("video pts: %lld +++++ dts: %lld\n", pkt->pts, pkt->dts);
        }
#pragma  mark - 录制插入
        if (ffp->is_record) { // 可以录制时,写入文件
            if (0 != ffp_record_file(ffp, pkt)) {
                ffp->record_error = 1;
                ffp_stop_recording_l(ffp);
            }
        }
//插入结束    
    
        if (is->abort_request)
            break;
#ifdef FFP_MERGE

}
//新增

void ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf)
{
  ALOGD("=============>start snapshot\n");

  VideoState *is = ffp->is;
  Frame *vp;
  int i = 0, linesize = 0, pixels = 0;
  uint8_t *src;

  vp = &is->pictq.queue[is->pictq.rindex];
  int height = vp->bmp->h;
  int width = vp->bmp->w;

  ALOGD("=============>%d X %d === %d\n", width, height, vp->bmp->pitches[0]);

  // copy data to bitmap in java code
  linesize = vp->bmp->pitches[0];
  src = vp->bmp->pixels[0];
  pixels = width * 4;
  for (i = 0; i < height; i++) {
      memcpy(frame_buf + i * pixels, src + i * linesize, pixels);
  }

  ALOGD("=============>end snapshot\n");
}

int ffp_start_recording_l(FFPlayer *ffp, const char *file_name)
{
    assert(ffp);

    VideoState *is = ffp->is;

    ffp->m_ofmt_ctx = NULL;
    ffp->m_ofmt = NULL;
    ffp->is_record = 0;
    ffp->record_error = 0;

    if (!file_name || !strlen(file_name)) { // 没有路径
        av_log(ffp, AV_LOG_ERROR, "filename is invalid");
        goto end;
    }

    if (!is || !is->ic|| is->paused || is->abort_request) { // 没有上下文,或者上下文已经停止
        av_log(ffp, AV_LOG_ERROR, "is,is->ic,is->paused is invalid");
        goto end;
    }

    if (ffp->is_record) { // 已经在录制
        av_log(ffp, AV_LOG_ERROR, "recording has started");
        goto end;
    }

    // 初始化一个用于输出的AVFormatContext结构体
    avformat_alloc_output_context2(&ffp->m_ofmt_ctx, NULL, "mp4", file_name);
    if (!ffp->m_ofmt_ctx) {
        av_log(ffp, AV_LOG_ERROR, "Could not create output context filename is %s\n", file_name);
        goto end;
    }
    ffp->m_ofmt = ffp->m_ofmt_ctx->oformat;

    for (int i = 0; i < is->ic->nb_streams; i++) {
        // 对照输入流创建输出流通道
        AVStream *in_stream = is->ic->streams[i];
        AVStream *out_stream = avformat_new_stream(ffp->m_ofmt_ctx, in_stream->codec->codec);
        if (!out_stream) {
            av_log(ffp, AV_LOG_ERROR, "Failed allocating output stream\n");
            goto end;
        }

        // 将输入视频/音频的参数拷贝至输出视频/音频的AVCodecContext结构体
        av_log(ffp, AV_LOG_DEBUG, "in_stream->codec;%@\n", in_stream->codec);
        if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
            av_log(ffp, AV_LOG_ERROR, "Failed to copy context from input to output stream codec context\n");
            goto end;
        }

        out_stream->codec->codec_tag = 0;
        if (ffp->m_ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
            out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
        }
    }

    av_dump_format(ffp->m_ofmt_ctx, 0, file_name, 1);

    // 打开输出文件
    if (!(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
        if (avio_open(&ffp->m_ofmt_ctx->pb, file_name, AVIO_FLAG_WRITE) < 0) {
            av_log(ffp, AV_LOG_ERROR, "Could not open output file '%s'", file_name);
            goto end;
        }
    }

    // 写视频文件头
    if (avformat_write_header(ffp->m_ofmt_ctx, NULL) < 0) {
        av_log(ffp, AV_LOG_ERROR, "Error occurred when opening output file\n");
        goto end;
    }

    ffp->is_record = 1;
    ffp->record_error = 0;
    pthread_mutex_init(&ffp->record_mutex, NULL);

    return 0;
end:
    ffp->record_error = 1;
    return -1;
}

int ffp_record_file(FFPlayer *ffp, AVPacket *packet)
{
    assert(ffp);
    VideoState *is = ffp->is;
    int ret = 0;
    AVStream *in_stream;
    AVStream *out_stream;

    if (ffp->is_record) {
        if (packet == NULL) {
            ffp->record_error = 1;
            av_log(ffp, AV_LOG_ERROR, "packet == NULL");
            return -1;
        }

        AVPacket *pkt = (AVPacket *)av_malloc(sizeof(AVPacket)); // 与看直播的 AVPacket分开,不然卡屏
        av_new_packet(pkt, 0);
        if (0 == av_packet_ref(pkt, packet)) {
            pthread_mutex_lock(&ffp->record_mutex);

            if (!ffp->is_first) { // 录制的第一帧,时间从0开始
                ffp->is_first = 1;
                pkt->pts = 0;
                pkt->dts = 0;
            } else { // 之后的每一帧都要减去,点击开始录制时的值,这样的时间才是正确的
                //pkt->pts = abs(pkt->pts - ffp->start_pts);
                //pkt->dts = abs(pkt->dts - ffp->start_dts);
                if (pkt->stream_index == AVMEDIA_TYPE_AUDIO) {
                    pkt->pts = llabs(pkt->pts - ffp->start_a_pts);
                    pkt->dts = llabs(pkt->dts - ffp->start_a_dts);
                }
                else if (pkt->stream_index == AVMEDIA_TYPE_VIDEO) {
                    pkt->pts = pkt->dts = llabs(pkt->dts - ffp->start_v_dts);
                }
            }

            in_stream  = is->ic->streams[pkt->stream_index];
            out_stream = ffp->m_ofmt_ctx->streams[pkt->stream_index];

            // 转换PTS/DTS
            pkt->pts = av_rescale_q_rnd(pkt->pts, in_stream->time_base, out_stream->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
            pkt->dts = av_rescale_q_rnd(pkt->dts, in_stream->time_base, out_stream->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
            pkt->duration = av_rescale_q(pkt->duration, in_stream->time_base, out_stream->time_base);
            pkt->pos = -1;

            // 写入一个AVPacket到输出文件
            if ((ret = av_interleaved_write_frame(ffp->m_ofmt_ctx, pkt)) < 0) {
                av_log(ffp, AV_LOG_ERROR, "Error muxing packet\n");
            }

            av_packet_unref(pkt);
            pthread_mutex_unlock(&ffp->record_mutex);
        } else {
            av_log(ffp, AV_LOG_ERROR, "av_packet_ref == NULL");
        }
    }
    return ret;
}

int ffp_record_isfinished_l(FFPlayer *ffp)
{
    return 0;
}

int ffp_stop_recording_l(FFPlayer *ffp)
{
    assert(ffp);
    if (ffp->is_record) {
        ffp->is_record = 0;
        pthread_mutex_lock(&ffp->record_mutex);
        if (ffp->m_ofmt_ctx != NULL) {
            av_write_trailer(ffp->m_ofmt_ctx);
            if (ffp->m_ofmt_ctx && !(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
                avio_close(ffp->m_ofmt_ctx->pb);
            }
            avformat_free_context(ffp->m_ofmt_ctx);
            ffp->m_ofmt_ctx = NULL;
            ffp->is_first = 0;
        }
        pthread_mutex_unlock(&ffp->record_mutex);
        pthread_mutex_destroy(&ffp->record_mutex);
        av_log(ffp, AV_LOG_DEBUG, "stopRecord ok\n");
    } else {
        av_log(ffp, AV_LOG_ERROR, "don't need stopRecord\n");
    }
    return 0;
} 

 路径:ijkmedia/ijkplayer/ff_ffplay.h

void      ffp_set_property_int64(FFPlayer *ffp, int id, int64_t value);
//新增
int       ffp_start_recording_l(FFPlayer *ffp, const char *file_name);
int       ffp_stop_recording_l(FFPlayer *ffp);
int       ffp_record_file(FFPlayer *ffp, AVPacket *packet);
void      ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf);
int       ffp_record_isfinished_l(FFPlayer *ffp);

 路径:ijkmedia/ijkplayer/ff_ffplay_def.h

    int render_wait_start;
    //新增
     AVFormatContext *m_ofmt_ctx;        // 用于输出的AVFormatContext结构体
     AVOutputFormat *m_ofmt;
     pthread_mutex_t record_mutex;       // 锁
     int is_record;                      // 是否在录制
     int record_error;
     int is_first;                       // 第一帧数据
     int64_t start_v_pts;                // 开始录制时pts 视频
     int64_t start_v_dts;                // 开始录制时dts 视频
     int64_t start_a_pts;                // 开始录制时pts 音频
     int64_t start_a_dts;                // 开始录制时dts 音频

 路径:ijkmedia/ijkplayer/ijkplayer.c

//新增


static void ijkmp_get_current_frame_l(IjkMediaPlayer *mp, uint8_t *frame_buf)
{
  ffp_get_current_frame_l(mp->ffplayer, frame_buf);
}

void ijkmp_get_current_frame(IjkMediaPlayer *mp, uint8_t *frame_buf)
{
  assert(mp);
  pthread_mutex_lock(&mp->mutex);
  ijkmp_get_current_frame_l(mp, frame_buf);
  pthread_mutex_unlock(&mp->mutex);
}

static int ijkmp_start_recording_l(IjkMediaPlayer *mp,const char *filePath)
{
//  av_log(mp->ffplayer,AV_LOG_WARING,"cjz ijkmp_start_recording_l filePath %s",filePath);
  return ffp_start_recording_l(mp->ffplayer,filePath);
}

int ijkmp_start_record(IjkMediaPlayer *mp,const char *filePath)
{
  assert(mp);
  pthread_mutex_lock(&mp->mutex);
  av_log(mp->ffplayer,AV_LOG_WARNING,"cjz ijkmp_start_recording");
  int retval = ijkmp_start_recording_l(mp,filePath);
  pthread_mutex_unlock(&mp->mutex);
  return retval;
}

static int ijkmp_stop_recording_l(IjkMediaPlayer *mp)
{
  return ffp_stop_recording_l(mp->ffplayer);
} 

int ijkmp_stop_record(IjkMediaPlayer *mp)
{
  assert(mp);
  pthread_mutex_lock(&mp->mutex);
  av_log(mp->ffplayer,AV_LOG_WARNING,"cjz ijkmp_stop_recording");
  int retval = ijkmp_stop_recording_l(mp);
  pthread_mutex_unlock(&mp->mutex);
  return retval;
}

static int ijkmp_isRecordFinished_l(IjkMediaPlayer *mp)
{
 return ffp_record_isfinished_l(mp->ffplayer);
}

int ijkmp_isRecordFinished(IjkMediaPlayer *mp)
{
  assert(mp);
 pthread_mutex_lock(&mp->mutex);
 av_log(mp->ffplayer,AV_LOG_WARNING,"cjz ijkmp_isRecordFinished ");
 int retval = ijkmp_isRecordFinished_l(mp);
 pthread_mutex_unlock(&mp->mutex);
 return retval;
}

int ijkmp_isRecording(IjkMediaPlayer *mp) {
    return mp->ffplayer->is_record;
}

7.支持Https

cd ..
./init-android-openssl.sh

8.清除

cd android/contrib
./compile-openssl.sh clean
./compile-ffmpeg.sh clean

9.编译openssl

./compile-openssl.sh all

10.编译ffmpeg

./compile-ffmpeg.sh all

11.编译ijkplayer

cd ..
./compile-ijk.sh all

  •  aar打包流程

编译成功后会在android目录下生成一个ijkplayer的工程,导入AS

  1. 右键项目 Open Module Settings,点击减号移除ijkplayer-example和ijkplayer-java以外的依赖

  2. 增加需要支持的.so,打开ijkplayer-java/src/main/,新建一个libs文件夹,同时打开ijkplayer-xxx/main/libs,把里面的armeabi-v7a,arm64-v8a,armeabi三个文件夹复制到ijkplayer-java的libs文件夹下

  3. 修改IMediaPlayer

    //新增
        int startRecord(String file);
        int stopRecord();
        boolean getCurrentFrame(Bitmap bitmap);
  4. 修改IjkMediaPlayer/AndroidMediaPlayer/MediaPlayerProxy,新增native方法

        @Override
        public native int startRecord(String file);
    
        @Override
        public native int stopRecord();
    
        @Override
        public native boolean getCurrentFrame(Bitmap bitmap);
    

  5. 修改下ijkplayer-java的build.gradle文件,编译arr

    gradle -- ijkplayer-java – Tasks – build -- assemble

  • 8
    点赞
  • 9
    收藏
    觉得还不错? 一键收藏
  • 3
    评论
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值