FFMPEG 之音频解码及AudioTrack播放音频

一、实现原理

    使用FFMPEG解码音频文件,然后调用AudioTrack播放解码文件

二、实现

    1、导入ffmpeg的头文件,以及so文件

   

   2、创建native方法,以及AudioTrack对象

        PlayerControl.java

package com.xiaofan.testffmpeg2;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;

public class PlayerControl {
    public native void sound(String input, String output);

    /**
     * @param sampleRateInHz 当前 声音的频率
     * @param channelsNB 声道个数
     * @return
     */
    public AudioTrack createAudioTrack(int sampleRateInHz,int channelsNB) {

        int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
        //声道布局,默认设置立体声
        int channelConfig;
        if(channelsNB==1){
            channelConfig = AudioFormat.CHANNEL_OUT_MONO;
        }else if(channelsNB==2){
            channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
        }else {
            channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
        }
        int bufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);

        AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
                sampleRateInHz, channelConfig,
                audioFormat, bufferSizeInBytes,
                AudioTrack.MODE_STREAM);

        return audioTrack;
    }

    static {
        System.loadLibrary("avutil-54");
        System.loadLibrary("swresample-1");
        System.loadLibrary("avcodec-56");
        System.loadLibrary("avformat-56");
        System.loadLibrary("swscale-3");
        System.loadLibrary("postproc-53");
        System.loadLibrary("avfilter-5");
        System.loadLibrary("avdevice-56");
        System.loadLibrary("native-lib");
    }
}
3、CMake文件编写


cmake_minimum_required(VERSION 3.4.1)


add_library(native-lib
             SHARED
             src/main/cpp/native-lib.c )
find_library(log-lib
              log )


add_library(libavcodec-56
            SHARED
            IMPORTED)

set_target_properties(libavcodec-56
                      PROPERTIES IMPORTED_LOCATION
                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libavcodec-56.so)

add_library(libavdevice-56
            SHARED
            IMPORTED)
set_target_properties(libavdevice-56
                      PROPERTIES IMPORTED_LOCATION
                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libavdevice-56.so)

add_library(libavfilter-5
            SHARED
            IMPORTED)
set_target_properties(libavfilter-5
                      PROPERTIES IMPORTED_LOCATION
                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libavfilter-5.so)

add_library(libavformat-56
            SHARED
            IMPORTED)
set_target_properties(libavformat-56
                      PROPERTIES IMPORTED_LOCATION
                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libavformat-56.so)
add_library(libavutil-54
            SHARED
            IMPORTED)
set_target_properties(libavutil-54
                      PROPERTIES IMPORTED_LOCATION
                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libavutil-54.so)
add_library(libpostproc-53
            SHARED
            IMPORTED)
set_target_properties(libpostproc-53
                      PROPERTIES IMPORTED_LOCATION
                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libpostproc-53.so)

add_library(libswresample-1
            SHARED
            IMPORTED)
set_target_properties(libswresample-1
                      PROPERTIES IMPORTED_LOCATION
                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libswresample-1.so)
add_library(libswscale-3
           SHARED
           IMPORTED)
set_target_properties(libswscale-3
                      PROPERTIES IMPORTED_LOCATION
                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libswscale-3.so)

target_link_libraries(  native-lib
                        libavcodec-56
                        libavdevice-56
                        libavfilter-5
                        libavformat-56
                        libavutil-54
                        libpostproc-53
                        libswresample-1
                        libswscale-3
                       ${log-lib} )
4、使用javah命令生成头文件

5、编写解压以及播放流程

   native-lib.c

#include "com_xiaofan_testffmpeg2_PlayerControl.h"
#include <android/log.h>
#include <stdio.h>
#include <stdlib.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>

#define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO,"wanxiaofan",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR,"wanxiaofan",FORMAT,##__VA_ARGS__);
//封装格式
#include "include/ffmpeg/libavformat/avformat.h"
//解码
#include "include/ffmpeg/libavcodec/avcodec.h"
//缩放
#include "include/ffmpeg/libswscale/swscale.h"
#include "include/ffmpeg/libavutil/avutil.h"
#include "include/ffmpeg/libavutil/frame.h"
//重采样
#include "include/ffmpeg/libswresample/swresample.h"
#include <unistd.h>

JNIEXPORT void JNICALL Java_com_xiaofan_testffmpeg2_PlayerControl_sound
        (JNIEnv *env, jobject jobj, jstring jstr_input, jstring jstr_output) {
    const char *input_cstr = (*env)->GetStringUTFChars(env, jstr_input, NULL);
    const char *output_cstr = (*env)->GetStringUTFChars(env, jstr_output, NULL);
    //注册组件
    av_register_all();
    AVFormatContext *pFormatCtx = avformat_alloc_context();
    //打开音频文件
    if (avformat_open_input(&pFormatCtx, input_cstr, NULL, NULL) != 0) {
        LOGE("%s", "无法打开音频文件!");
        return;
    }
    //获取输入文件信息
    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        LOGE("%s", "无法获取输入文件信息!");
        return;
    }
//    获取音频流索引位置
    int i = 0, audio_stream_idx = -1;
    for (; i < pFormatCtx->nb_streams; ++i) {
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
            audio_stream_idx = i;
            break;
        }
    }
//    获取解码器
    AVCodecContext *codeCtx = pFormatCtx->streams[audio_stream_idx]->codec;
    AVCodec *codec = avcodec_find_decoder(codeCtx->codec_id);
    if (codec == NULL) {
        LOGE("%s", "无法获取解码器");
        return;
    }
//    打开解码器
    if (avcodec_open2(codeCtx, codec, NULL) < 0) {
        LOGE("%s", "无法打开解码器");
        return;
    }

    //压缩数据
    AVPacket *packet = av_malloc(sizeof(AVPacket));
    //解压缩数据
    AVFrame *frame = av_frame_alloc();
    //同一音频采样格式与采样率,把frame->16bit PCM,采样率默认用44.1kHZ
    SwrContext *swrCtx = swr_alloc();//开辟空间
    //设置选项  start
    //输入的采样格式
    enum AVSampleFormat in_sample_fmt = codeCtx->sample_fmt;
    //输出的采样格式16bit PCM
    enum AVSampleFormat out_sample_fmt = AV_SAMPLE_FMT_S16;
    //输入采样率
    int in_sample_rate = codeCtx->sample_rate;
    //输出采样率  44100
    int out_sample_rate = in_sample_rate;
    //获取输入的声道布局
    /**
     *根据声道个数获取默认的声道布局(2个声道,默认立体声)
     */
    //int64_t  in_ch_layout=av_get_default_channel_layout(codeCtx->channels);
    int64_t in_ch_layout = codeCtx->channel_layout;
    //获取输出的声道布局(默认立体声)
    int64_t out_ch_layout = AV_CH_LAYOUT_STEREO;
    swr_alloc_set_opts(swrCtx,
                       out_ch_layout, out_sample_fmt, out_sample_rate,
                       in_ch_layout, in_sample_fmt, in_sample_rate,
                       0, NULL);
    //输出的声道个数
    int out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout);

    //设置选项end

    //jni start
    //获取AudioTrack对象
    jclass jcls = (*env)->GetObjectClass(env, jobj);
    jmethodID create_audio_track_mid = (*env)->GetMethodID(env, jcls, "createAudioTrack",
                                                           "(II)Landroid/media/AudioTrack;");

    jobject audio_track = (*env)->CallObjectMethod(env, jobj, create_audio_track_mid,
                                                   out_sample_rate, codeCtx->channels);

//    调用AudioTrack.play方法
    jclass audio_track_class = (*env)->GetObjectClass(env, audio_track);
    jmethodID audio_track_play_mid = (*env)->GetMethodID(env, audio_track_class, "play", "()V");

    (*env)->CallVoidMethod(env, audio_track, audio_track_play_mid);
    //调用AudioTrack.write
    jmethodID audio_track_write_mid = (*env)->GetMethodID(env, audio_track_class, "write",
                                                          "([BII)I");
//    jni end

    swr_init(swrCtx);

//    不断读取压缩数据
    uint8_t *out_buffer = (uint8_t *) av_malloc(2 * 44100);//保存的就是 16bit PCM  44.1kHZ的数据
    FILE *fp_pcm = fopen(output_cstr, "wb");
    int got_frame = 0, index = 0, ret;
    while (av_read_frame(pFormatCtx, packet) >= 0) {
        //解码音频类型的packet
        if (packet->stream_index == audio_stream_idx) {
            //解码
            ret = avcodec_decode_audio4(codeCtx, frame, &got_frame, packet);
            if (ret < 0) {
                LOGE("%s", "解码完成");
            }
            //解码一帧成功
            if (got_frame > 0) {
                LOGI("解码:%d", index++);
                swr_convert(swrCtx, &out_buffer, 2 * 44100, frame->data, frame->nb_samples);
                //获取sample的size
                int out_buffer_size = av_samples_get_buffer_size(NULL,
                                                                 out_channel_nb,
                                                                 frame->nb_samples,
                                                                 out_sample_fmt, 1);
                //out_buffer缓冲数据,转成byte数组
                jbyteArray data_array = (*env)->NewByteArray(env, out_buffer_size);
                jbyte *sample_byte = (*env)->GetByteArrayElements(env, data_array, NULL);
                //赋值
                memcpy(sample_byte, out_buffer, out_buffer_size);
                //同步
                (*env)->ReleaseByteArrayElements(env, data_array, sample_byte, 0);
                //将pcm的数据写入AudioTrack
//            fwrite(out_buffer, 1, out_buffer_size, fp_pcm);
                //byte[] audioData, int offsetInBytes, int sizeInBytes
                (*env)->CallIntMethod(env, audio_track, audio_track_write_mid, data_array, 0,
                                      out_buffer_size);
                //释放局部引用
                (*env)->DeleteLocalRef(env, data_array);
                usleep(1000 * 16);
            }

            av_free_packet(packet);
        }
    }
    //释放资源
    fclose(fp_pcm);
    av_frame_free(&frame);
    av_free(out_buffer);
    swr_free(&swrCtx);
    avcodec_close(codeCtx);
    avformat_close_input(&pFormatCtx);
    (*env)->ReleaseStringUTFChars(env, jstr_input, input_cstr);
    (*env)->ReleaseStringUTFChars(env, jstr_output, output_cstr);
}
6、在Activity中正常调用就行,然后就能实现播放

注意:

 在AS中使用javap -s命令得到方法签名需到debug目录下,且方法改变了之后需要编译debug中的文件才会改变



评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值