Android音视频学习第6章:视频直播实现完整代码

这里写图片描述


PushNative.class

package com.dongnaoedu.live.jni;

import com.dongnaoedu.live.listener.LiveStateChangeListener;

/**
 * 调用C代码进行编码与推流
 */
public class PushNative {

    public static final int CONNECT_FAILED = 101;
    public static final int INIT_FAILED = 102;

    LiveStateChangeListener liveStateChangeListener;

    /**
     * 接收Native层抛出的错误
     * @param code
     */
    public void throwNativeError(int code){
        if(liveStateChangeListener != null){
            liveStateChangeListener.onError(code);
        }
    }


    public native void startPush(String url);

    public native void stopPush();

    public native void release();

    /**
     * 设置视频参数
     * @param width
     * @param height
     * @param bitrate
     * @param fps
     */
    public native void setVideoOptions(int width, int height, int bitrate, int fps);

    /**
     * 设置音频参数
     * @param sampleRateInHz
     * @param channel
     */
    public native void setAudioOptions(int sampleRateInHz, int channel);


    /**
     * 发送视频数据
     * @param data
     */
    public native void fireVideo(byte[] data);

    /**
     * 发送音频数据
     * @param data
     * @param len
     */
    public native void fireAudio(byte[] data, int len);


    public void setLiveStateChangeListener(LiveStateChangeListener liveStateChangeListener) {
        this.liveStateChangeListener = liveStateChangeListener;
    }

    public void removeLiveStateChangeListener(){
        this.liveStateChangeListener = null;
    }

    static{
        System.loadLibrary("dn_live");
    }

}

LiveStateChangeListener.class

package com.dongnaoedu.live.listener;

public interface LiveStateChangeListener {

    /**
     * 发送错误
     * @param code
     */
    void onError(int code);

}

AudioParam.class

package com.dongnaoedu.live.params;

public class AudioParam {

    // 采样率
    private int sampleRateInHz = 44100;
    // 声道个数
    private int channel = 1;

    public AudioParam() {
    }

    public AudioParam(int sampleRateInHz, int channel) {
        super();
        this.sampleRateInHz = sampleRateInHz;
        this.channel = channel;
    }

    public int getSampleRateInHz() {
        return sampleRateInHz;
    }

    public void setSampleRateInHz(int sampleRateInHz) {
        this.sampleRateInHz = sampleRateInHz;
    }

    public int getChannel() {
        return channel;
    }

    public void setChannel(int channel) {
        this.channel = channel;
    }

}

VideoParam.class

package com.dongnaoedu.live.params;

/**
 * 视频数据参数
 * 
 */
public class VideoParam {

    private int width;
    private int height;
    // 码率480kbps
    private int bitrate = 480000;
    // 帧频默认25帧/s
    private int fps = 25;
    private int cameraId;

    public VideoParam(int width, int height, int cameraId) {
        super();
        this.width = width;
        this.height = height;
        this.cameraId = cameraId;
    }

    public int getWidth() {
        return width;
    }

    public void setWidth(int width) {
        this.width = width;
    }

    public int getHeight() {
        return height;
    }

    public void setHeight(int height) {
        this.height = height;
    }

    public int getCameraId() {
        return cameraId;
    }

    public void setCameraId(int cameraId) {
        this.cameraId = cameraId;
    }

    public int getBitrate() {
        return bitrate;
    }

    public void setBitrate(int bitrate) {
        this.bitrate = bitrate;
    }

    public int getFps() {
        return fps;
    }

    public void setFps(int fps) {
        this.fps = fps;
    }

}

AudioPusher.class

package com.dongnaoedu.live.pusher;

import com.dongnaoedu.live.jni.PushNative;
import com.dongnaoedu.live.params.AudioParam;

import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder.AudioSource;

public class AudioPusher extends Pusher{

    private AudioParam audioParam;
    private AudioRecord audioRecord;
    private boolean isPushing = false;
    private int minBufferSize;
    private PushNative pushNative;

    public AudioPusher(AudioParam audioParam, PushNative pushNative) {
        this.audioParam = audioParam;
        this.pushNative = pushNative;

        int channelConfig = audioParam.getChannel() == 1 ? 
                AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO; 
        //最小缓冲区大小
        minBufferSize = AudioRecord.getMinBufferSize(audioParam.getSampleRateInHz(), channelConfig, AudioFormat.ENCODING_PCM_16BIT);
        audioRecord = new AudioRecord(AudioSource.MIC, 
                audioParam.getSampleRateInHz(), 
                channelConfig, 
                AudioFormat.ENCODING_PCM_16BIT, minBufferSize);
    }


    @Override
    public void startPush() {
        isPushing = true;
        pushNative.setAudioOptions(audioParam.getSampleRateInHz(), audioParam.getChannel());
        //启动一个录音子线程
        new Thread(new AudioRecordTask()).start();
    }

    @Override
    public void stopPush() {
        isPushing = false;
        audioRecord.stop();
    }

    @Override
    public void release() {
        if(audioRecord != null){
            audioRecord.release();
            audioRecord = null;
        }
    }

    class AudioRecordTask implements Runnable{

        @Override
        public void run() {
            //开始录音
            audioRecord.startRecording();

            while(isPushing){
                //通过AudioRecord不断读取音频数据
                byte[] buffer = new byte[minBufferSize];
                int len = audioRecord.read(buffer, 0, buffer.length);
                if(len > 0){
                    //传给Native代码,进行音频编码
                    pushNative.fireAudio(buffer, len);
                }
            }
        }

    }

}

LivePusher.class

package com.dongnaoedu.live.pusher;

import com.dongnaoedu.live.jni.PushNative;
import com.dongnaoedu.live.listener.LiveStateChangeListener;
import com.dongnaoedu.live.params.AudioParam;
import com.dongnaoedu.live.params.VideoParam;

import android.hardware.Camera.CameraInfo;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;

public class LivePusher implements Callback {

    private SurfaceHolder surfaceHolder;
    private VideoPusher videoPusher;
    private AudioPusher audioPusher;
    private PushNative pushNative;

    public LivePusher(SurfaceHolder surfaceHolder) {
        this.surfaceHolder = surfaceHolder;
        surfaceHolder.addCallback(this);
        prepare();
    }

    /**
     * 预览准备
     */
    private void prepare() {
        pushNative = new PushNative();

        //实例化视频推流器
        VideoParam videoParam = new VideoParam(480, 320, CameraInfo.CAMERA_FACING_BACK);
        videoPusher = new VideoPusher(surfaceHolder,videoParam,pushNative);

        //实例化音频推流器
        AudioParam audioParam = new AudioParam();
        audioPusher = new AudioPusher(audioParam,pushNative);
    }

    /**
     * 切换摄像头
     */
    public void switchCamera() {
        videoPusher.switchCamera();
    }

    /**
     * 开始推流
     * @param url
     * @param liveStateChangeListener
     */
    public void startPush(String url,LiveStateChangeListener liveStateChangeListener) {
        videoPusher.startPush();
        audioPusher.startPush();
        pushNative.startPush(url);
        pushNative.setLiveStateChangeListener(liveStateChangeListener);
    }


    /**
     * 停止推流
     */
    public void stopPush() {
        videoPusher.stopPush();
        audioPusher.stopPush();
        pushNative.stopPush();
        pushNative.removeLiveStateChangeListener();
    }

    /**
     * 释放资源
     */
    private void release() {
        videoPusher.release();
        audioPusher.release();
        pushNative.release();
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {

    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        stopPush();
        release();
    }

}

Pusher.class

package com.dongnaoedu.live.pusher;

public abstract class Pusher {

    public abstract void startPush();

    public abstract void stopPush();

    public abstract void release();

}

VideoPusher.class

package com.dongnaoedu.live.pusher;

import java.io.IOException;

import com.dongnaoedu.live.jni.PushNative;
import com.dongnaoedu.live.params.VideoParam;

import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;

public class VideoPusher extends Pusher implements Callback, PreviewCallback{

    private SurfaceHolder surfaceHolder;
    private Camera mCamera;
    private VideoParam videoParams;
    private byte[] buffers;
    private boolean isPushing = false;
    private PushNative pushNative;

    public VideoPusher(SurfaceHolder surfaceHolder, VideoParam videoParams, PushNative pushNative) {
        this.surfaceHolder = surfaceHolder;
        this.videoParams = videoParams;
        this.pushNative = pushNative;
        surfaceHolder.addCallback(this);
    }

    @Override
    public void startPush() {
        //设置视频参数
        pushNative.setVideoOptions(videoParams.getWidth(), 
                videoParams.getHeight(), videoParams.getBitrate(), videoParams.getFps());
        isPushing = true;
    }

    @Override
    public void stopPush() {
        isPushing = false;
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        startPreview();
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {

    }

    @Override
    public void release() {
        stopPreview();
    }


    /**
     * 切换摄像头
     */
    public void switchCamera() {
        if(videoParams.getCameraId() == CameraInfo.CAMERA_FACING_BACK){
            videoParams.setCameraId(CameraInfo.CAMERA_FACING_FRONT);
        }else{
            videoParams.setCameraId(CameraInfo.CAMERA_FACING_BACK);
        }
        //重新预览
        stopPreview();
        startPreview();
    }

    /**
     * 开始预览
     */
    private void startPreview() {
        try {
            //SurfaceView初始化完成,开始相机预览
            mCamera = Camera.open(videoParams.getCameraId());
            Camera.Parameters parameters = mCamera.getParameters();
            //设置相机参数
            parameters.setPreviewFormat(ImageFormat.NV21); //YUV 预览图像的像素格式
            parameters.setPreviewSize(videoParams.getWidth(), videoParams.getHeight()); //预览画面宽高
            mCamera.setParameters(parameters);
            //parameters.setPreviewFpsRange(videoParams.getFps()-1, videoParams.getFps());
            mCamera.setPreviewDisplay(surfaceHolder);
            //获取预览图像数据
            buffers = new byte[videoParams.getWidth() * videoParams.getHeight() * 4];
            mCamera.addCallbackBuffer(buffers);
            mCamera.setPreviewCallbackWithBuffer(this);
            mCamera.startPreview();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 停止预览
     */
    private void stopPreview() {
        if(mCamera != null){            
            mCamera.stopPreview();
            mCamera.release();
            mCamera = null;
        }
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        if(mCamera != null){
            mCamera.addCallbackBuffer(buffers);
        }

        if(isPushing){
            //回调函数中获取图像数据,然后给Native代码编码
            pushNative.fireVideo(data);
        }
    }


}

MainActivity.class

package com.dongnaoedu.live;

import com.dongnaoedu.live.jni.PushNative;
import com.dongnaoedu.live.listener.LiveStateChangeListener;
import com.dongnaoedu.live.pusher.LivePusher;

import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;

public class MainActivity extends Activity implements LiveStateChangeListener {

    static final String URL = "";
    private LivePusher live;

    private Handler handler = new Handler(){
        public void handleMessage(android.os.Message msg) {
            switch (msg.what) {
            case PushNative.CONNECT_FAILED:
                Toast.makeText(MainActivity.this, "连接失败", Toast.LENGTH_SHORT).show();

                break;
            case PushNative.INIT_FAILED:
                Toast.makeText(MainActivity.this, "初始化失败", Toast.LENGTH_SHORT).show();
                break;  
            default:
                break;
            }
        }
    };

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        SurfaceView surfaceView = (SurfaceView) findViewById(R.id.surface);
        //相机图像的预览
        live = new LivePusher(surfaceView.getHolder());
    }

    /**
     * 开始直播
     * @param btn
     */
    public void mStartLive(View view) {
        Button btn = (Button)view;
        if(btn.getText().equals("开始直播")){
            live.startPush(URL,this);
            btn.setText("停止直播");
        }else{
            live.stopPush();
            btn.setText("开始直播");
        }
    }

    /**
     * 切换摄像头
     * @param btn
     */
    public void mSwitchCamera(View btn) {
        live.switchCamera();
    }

    //改方法执行仍然在子线程中,发送消息到UI主线程
    @Override
    public void onError(int code) {
        handler.sendEmptyMessage(code);
    }

}

activity_main.xml

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:tools="http://schemas.android.com/tools"
    android:layout_width="match_parent"
    android:layout_height="match_parent" >

    <android.view.SurfaceView
        android:id="@+id/surface"
        android:layout_width="wrap_content"
        android:layout_height="wrap_content"
        android:layout_gravity="center" />

    <LinearLayout
        android:id="@+id/adcontainer"
        android:layout_width="match_parent"
        android:layout_height="wrap_content"
        android:layout_alignParentBottom="true"
        android:layout_alignParentLeft="true"
        android:orientation="horizontal" >

        <Button
            android:id="@+id/btn_push"
            android:layout_width="wrap_content"
            android:layout_height="match_parent"
            android:onClick="mStartLive"
            android:text="开始直播"/>

        <Button
            android:id="@+id/btn_camera_switch"
            android:layout_width="wrap_content"
            android:layout_height="match_parent"
            android:text="切换摄像头"
            android:onClick="mSwitchCamera"/>
    </LinearLayout>

</RelativeLayout>

看这些源码之前先阅读Android音视频学习第4章:视频直播实现之推送视频篇,再阅读第5章,最后看完整版源码

#include "com_dongnaoedu_live_jni_PushNative.h"

#include <android/log.h>
#include <android/native_window_jni.h>
#include <android/native_window.h>
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"jason",FORMAT,##__VA_ARGS__)
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"jason",FORMAT,##__VA_ARGS__)

#include <pthread.h>
#include "queue.h"
#include "x264.h"
#include "rtmp.h"
#include "faac.h"

#ifndef TRUE
#define TRUE    1
#define FALSE   0
#endif

#define CONNECT_FAILED 101
#define INIT_FAILED 102

//x264编码输入图像YUV420P
x264_picture_t pic_in;
x264_picture_t pic_out;
//YUV个数
int y_len, u_len, v_len;
//x264编码处理器
x264_t *video_encode_handle;

unsigned int start_time;
//线程处理
pthread_mutex_t mutex;
pthread_cond_t cond;
//rtmp流媒体地址
char *rtmp_path;
//是否直播
int is_pushing = FALSE;
//faac音频编码处理器
faacEncHandle audio_encode_handle;

unsigned long nInputSamples; //输入的采样个数
unsigned long nMaxOutputBytes; //编码输出之后的字节数

jobject jobj_push_native; //Global ref
jclass jcls_push_native;
jmethodID jmid_throw_native_error;
JavaVM *javaVM;

/**
 * 添加AAC头信息
 */
void add_aac_sequence_header(){
    //获取aac头信息的长度
    unsigned char *buf;
    unsigned long len; //长度
    faacEncGetDecoderSpecificInfo(audio_encode_handle,&buf,&len);
    int body_size = 2 + len;
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    //RTMPPacket初始化
    RTMPPacket_Alloc(packet,body_size);
    RTMPPacket_Reset(packet);
    unsigned char * body = packet->m_body;
    //头信息配置
    /*AF 00 + AAC RAW data*/
    body[0] = 0xAF;//10 5 SoundFormat(4bits):10=AAC,SoundRate(2bits):3=44kHz,SoundSize(1bit):1=16-bit samples,SoundType(1bit):1=Stereo sound
    body[1] = 0x00;//AACPacketType:0表示AAC sequence header
    memcpy(&body[2], buf, len); /*spec_buf是AAC sequence header数据*/
    packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
    packet->m_nBodySize = body_size;
    packet->m_nChannel = 0x04;
    packet->m_hasAbsTimestamp = 0;
    packet->m_nTimeStamp = 0;
    packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
    add_rtmp_packet(packet);
    free(buf);

}

/**
 * 添加AAC rtmp packet
 */
void add_aac_body(unsigned char *buf, int len){
    int body_size = 2 + len;
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    //RTMPPacket初始化
    RTMPPacket_Alloc(packet,body_size);
    RTMPPacket_Reset(packet);
    unsigned char * body = packet->m_body;
    //头信息配置
    /*AF 00 + AAC RAW data*/
    body[0] = 0xAF;//10 5 SoundFormat(4bits):10=AAC,SoundRate(2bits):3=44kHz,SoundSize(1bit):1=16-bit samples,SoundType(1bit):1=Stereo sound
    body[1] = 0x01;//AACPacketType:1表示AAC raw
    memcpy(&body[2], buf, len); /*spec_buf是AAC raw数据*/
    packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
    packet->m_nBodySize = body_size;
    packet->m_nChannel = 0x04;
    packet->m_hasAbsTimestamp = 0;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
    packet->m_nTimeStamp = RTMP_GetTime() - start_time;
    add_rtmp_packet(packet);
}

//获取JavaVM
jint JNI_OnLoad(JavaVM* vm, void* reserved){
    javaVM = vm;
    return JNI_VERSION_1_4;
}

/**
 * 向Java层发送错误信息
 */
void throwNativeError(JNIEnv *env,int code){
    (*env)->CallVoidMethod(env,jobj_push_native,jmid_throw_native_error,code);
}

/**
 * 从队列中不断拉取RTMPPacket发送给流媒体服务器)
 */
void *push_thread(void * arg){
    JNIEnv* env;//获取当前线程JNIEnv
    (*javaVM)->AttachCurrentThread(javaVM,&env,NULL);

    //建立RTMP连接
    RTMP *rtmp = RTMP_Alloc();
    if(!rtmp){
        LOGE("rtmp初始化失败");
        goto end;
    }
    RTMP_Init(rtmp);
    rtmp->Link.timeout = 5; //连接超时的时间
    //设置流媒体地址
    RTMP_SetupURL(rtmp,rtmp_path);
    //发布rtmp数据流
    RTMP_EnableWrite(rtmp);
    //建立连接
    if(!RTMP_Connect(rtmp,NULL)){
        LOGE("%s","RTMP 连接失败");
        throwNativeError(env,CONNECT_FAILED);
        goto end;
    }
    //计时
    start_time = RTMP_GetTime();
    if(!RTMP_ConnectStream(rtmp,0)){ //连接流
        LOGE("%s","RTMP ConnectStream failed");
        throwNativeError(env,CONNECT_FAILED);
        goto end;
    }
    is_pushing = TRUE;
    //发送AAC头信息
    add_aac_sequence_header();

    while(is_pushing){
        //发送
        pthread_mutex_lock(&mutex);
        pthread_cond_wait(&cond,&mutex);
        //取出队列中的RTMPPacket
        RTMPPacket *packet = queue_get_first();
        if(packet){
            queue_delete_first(); //移除
            packet->m_nInfoField2 = rtmp->m_stream_id; //RTMP协议,stream_id数据
            int i = RTMP_SendPacket(rtmp,packet,TRUE); //TRUE放入librtmp队列中,并不是立即发送
            if(!i){
                LOGE("RTMP 断开");
                RTMPPacket_Free(packet);
                pthread_mutex_unlock(&mutex);
                goto end;
            }else{
                LOGI("%s","rtmp send packet");
            }
            RTMPPacket_Free(packet);
        }

        pthread_mutex_unlock(&mutex);
    }
end:
    LOGI("%s","释放资源");
    free(rtmp_path);
    RTMP_Close(rtmp);
    RTMP_Free(rtmp);
    (*javaVM)->DetachCurrentThread(javaVM);
    return 0;
}

JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_startPush
  (JNIEnv *env, jobject jobj, jstring url_jstr){
    //jobj(PushNative对象)
    jobj_push_native = (*env)->NewGlobalRef(env,jobj);

    jclass jcls_push_native_tmp = (*env)->GetObjectClass(env,jobj);
    jcls_push_native = (*env)->NewGlobalRef(env,jcls_push_native_tmp);
    if(jcls_push_native_tmp == NULL){
        LOGI("%s","NULL");
    }else{
        LOGI("%s","not NULL");
    }
    //PushNative.throwNativeError
    jmid_throw_native_error = (*env)->GetMethodID(env,jcls_push_native_tmp,"throwNativeError","(I)V");

    //初始化的操作
    const char* url_cstr = (*env)->GetStringUTFChars(env,url_jstr,NULL);
    //复制url_cstr内容到rtmp_path
    rtmp_path = malloc(strlen(url_cstr) + 1);
    memset(rtmp_path,0,strlen(url_cstr) + 1);
    memcpy(rtmp_path,url_cstr,strlen(url_cstr));

    //初始化互斥锁与条件变量
    pthread_mutex_init(&mutex,NULL);
    pthread_cond_init(&cond,NULL);

    //创建队列
    create_queue();
    //启动消费者线程(从队列中不断拉取RTMPPacket发送给流媒体服务器)
    pthread_t push_thread_id;
    pthread_create(&push_thread_id, NULL,push_thread, NULL);

    (*env)->ReleaseStringUTFChars(env,url_jstr,url_cstr);
}


JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_stopPush
  (JNIEnv *env, jobject jobj){
    is_pushing = FALSE;
}


JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_release
  (JNIEnv *env, jobject jobj){
    (*env)->DeleteGlobalRef(env,jcls_push_native);
    (*env)->DeleteGlobalRef(env,jobj_push_native);
    (*env)->DeleteGlobalRef(env,jmid_throw_native_error);
}

/**
 * 设置视频参数
 */
JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_setVideoOptions
  (JNIEnv *env, jobject jobj, jint width, jint height, jint bitrate, jint fps){
    x264_param_t param;
    //x264_param_default_preset 设置
    x264_param_default_preset(&param,"ultrafast","zerolatency");
    //编码输入的像素格式YUV420P
    param.i_csp = X264_CSP_I420;
    param.i_width  = width;
    param.i_height = height;

    y_len = width * height;
    u_len = y_len / 4;
    v_len = u_len;

    //参数i_rc_method表示码率控制,CQP(恒定质量),CRF(恒定码率),ABR(平均码率)
    //恒定码率,会尽量控制在固定码率
    param.rc.i_rc_method = X264_RC_CRF;
    param.rc.i_bitrate = bitrate / 1000; //* 码率(比特率,单位Kbps)
    param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2; //瞬时最大码率

    //码率控制不通过timebase和timestamp,而是fps
    param.b_vfr_input = 0;
    param.i_fps_num = fps; //* 帧率分子
    param.i_fps_den = 1; //* 帧率分母
    param.i_timebase_den = param.i_fps_num;
    param.i_timebase_num = param.i_fps_den;
    param.i_threads = 1;//并行编码线程数量,0默认为多线程

    //是否把SPS和PPS放入每一个关键帧
    //SPS Sequence Parameter Set 序列参数集,PPS Picture Parameter Set 图像参数集
    //为了提高图像的纠错能力
    param.b_repeat_headers = 1;
    //设置Level级别
    param.i_level_idc = 51;
    //设置Profile档次
    //baseline级别,没有B帧
    x264_param_apply_profile(&param,"baseline");

    //x264_picture_t(输入图像)初始化
    x264_picture_alloc(&pic_in, param.i_csp, param.i_width, param.i_height);
    pic_in.i_pts = 0;
    //打开编码器
    video_encode_handle = x264_encoder_open(&param);
    if(video_encode_handle){
        LOGI("打开视频编码器成功");
    }else{
        throwNativeError(env,INIT_FAILED);
    }
}

/**
 * 音频编码器配置
 */
JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_setAudioOptions
  (JNIEnv *env, jobject jobj, jint sampleRateInHz, jint numChannels){
    audio_encode_handle = faacEncOpen(sampleRateInHz,numChannels,&nInputSamples,&nMaxOutputBytes);
    if(!audio_encode_handle){
        LOGE("音频编码器打开失败");
        return;
    }
    //设置音频编码参数
    faacEncConfigurationPtr p_config = faacEncGetCurrentConfiguration(audio_encode_handle);
    p_config->mpegVersion = MPEG4;
    p_config->allowMidside = 1;
    p_config->aacObjectType = LOW;
    p_config->outputFormat = 0; //输出是否包含ADTS头
    p_config->useTns = 1; //时域噪音控制,大概就是消爆音
    p_config->useLfe = 0;
//  p_config->inputFormat = FAAC_INPUT_16BIT;
    p_config->quantqual = 100;
    p_config->bandWidth = 0; //频宽
    p_config->shortctl = SHORTCTL_NORMAL;

    if(!faacEncSetConfiguration(audio_encode_handle,p_config)){
        LOGE("%s","音频编码器配置失败..");
        throwNativeError(env,INIT_FAILED);
        return;
    }

    LOGI("%s","音频编码器配置成功");
}

/**
 * 加入RTMPPacket队列,等待发送线程发送
 */
void add_rtmp_packet(RTMPPacket *packet){
    pthread_mutex_lock(&mutex);
    if(is_pushing){
        queue_append_last(packet);
    }
    pthread_cond_signal(&cond);
    pthread_mutex_unlock(&mutex);
}


/**
 * 发送h264 SPS与PPS参数集
 */
void add_264_sequence_header(unsigned char* pps,unsigned char* sps,int pps_len,int sps_len){
    int body_size = 16 + sps_len + pps_len; //按照H264标准配置SPS和PPS,共使用了16字节
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    //RTMPPacket初始化
    RTMPPacket_Alloc(packet,body_size);
    RTMPPacket_Reset(packet);

    unsigned char * body = packet->m_body;
    int i = 0;
    //二进制表示:00010111
    body[i++] = 0x17;//VideoHeaderTag:FrameType(1=key frame)+CodecID(7=AVC)
    body[i++] = 0x00;//AVCPacketType = 0表示设置AVCDecoderConfigurationRecord
    //composition time 0x000000 24bit ?
    body[i++] = 0x00;
    body[i++] = 0x00;
    body[i++] = 0x00;

    /*AVCDecoderConfigurationRecord*/
    body[i++] = 0x01;//configurationVersion,版本为1
    body[i++] = sps[1];//AVCProfileIndication
    body[i++] = sps[2];//profile_compatibility
    body[i++] = sps[3];//AVCLevelIndication
    //?
    body[i++] = 0xFF;//lengthSizeMinusOne,H264 视频中 NALU的长度,计算方法是 1 + (lengthSizeMinusOne & 3),实际测试时发现总为FF,计算结果为4.

    /*sps*/
    body[i++] = 0xE1;//numOfSequenceParameterSets:SPS的个数,计算方法是 numOfSequenceParameterSets & 0x1F,实际测试时发现总为E1,计算结果为1.
    body[i++] = (sps_len >> 8) & 0xff;//sequenceParameterSetLength:SPS的长度
    body[i++] = sps_len & 0xff;//sequenceParameterSetNALUnits
    memcpy(&body[i], sps, sps_len);
    i += sps_len;

    /*pps*/
    body[i++] = 0x01;//numOfPictureParameterSets:PPS 的个数,计算方法是 numOfPictureParameterSets & 0x1F,实际测试时发现总为E1,计算结果为1.
    body[i++] = (pps_len >> 8) & 0xff;//pictureParameterSetLength:PPS的长度
    body[i++] = (pps_len) & 0xff;//PPS
    memcpy(&body[i], pps, pps_len);
    i += pps_len;

    //Message Type,RTMP_PACKET_TYPE_VIDEO:0x09
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    //Payload Length
    packet->m_nBodySize = body_size;
    //Time Stamp:4字节
    //记录了每一个tag相对于第一个tag(File Header)的相对时间。
    //以毫秒为单位。而File Header的time stamp永远为0。
    packet->m_nTimeStamp = 0;
    packet->m_hasAbsTimestamp = 0;
    packet->m_nChannel = 0x04; //Channel ID,Audio和Vidio通道
    packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM; //?
    //将RTMPPacket加入队列
    add_rtmp_packet(packet);

}

/**
 * 发送h264帧信息
 */
void add_264_body(unsigned char *buf ,int len){
    //去掉起始码(界定符)
    if(buf[2] == 0x00){  //00 00 00 01
        buf += 4;
        len -= 4;
    }else if(buf[2] == 0x01){ // 00 00 01
        buf += 3;
        len -= 3;
    }
    int body_size = len + 9;
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    RTMPPacket_Alloc(packet,body_size);

    unsigned char * body = packet->m_body;
    //当NAL头信息中,type(5位)等于5,说明这是关键帧NAL单元
    //buf[0] NAL Header与运算,获取type,根据type判断关键帧和普通帧
    //00000101 & 00011111(0x1f) = 00000101
    int type = buf[0] & 0x1f;
    //Inter Frame 帧间压缩
    body[0] = 0x27;//VideoHeaderTag:FrameType(2=Inter Frame)+CodecID(7=AVC)
    //IDR I帧图像
    if (type == NAL_SLICE_IDR) {
        body[0] = 0x17;//VideoHeaderTag:FrameType(1=key frame)+CodecID(7=AVC)
    }
    //AVCPacketType = 1
    body[1] = 0x01; /*nal unit,NALUs(AVCPacketType == 1)*/
    body[2] = 0x00; //composition time 0x000000 24bit
    body[3] = 0x00;
    body[4] = 0x00;

    //写入NALU信息,右移8位,一个字节的读取?
    body[5] = (len >> 24) & 0xff;
    body[6] = (len >> 16) & 0xff;
    body[7] = (len >> 8) & 0xff;
    body[8] = (len) & 0xff;

    /*copy data*/
    memcpy(&body[9], buf, len);

    packet->m_hasAbsTimestamp = 0;
    packet->m_nBodySize = body_size;
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;//当前packet的类型:Video
    packet->m_nChannel = 0x04;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
//  packet->m_nTimeStamp = -1;
    packet->m_nTimeStamp = RTMP_GetTime() - start_time;//记录了每一个tag相对于第一个tag(File Header)的相对时间
    add_rtmp_packet(packet);

}


/**
 * 将采集到视频数据进行编码
 */
JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_fireVideo
  (JNIEnv *env, jobject jobj, jbyteArray buffer){
    //视频数据转为YUV420P
    //NV21->YUV420P
    jbyte* nv21_buffer = (*env)->GetByteArrayElements(env,buffer,NULL);
    jbyte* u = pic_in.img.plane[1];
    jbyte* v = pic_in.img.plane[2];
    //nv21 4:2:0 Formats, 12 Bits per Pixel
    //nv21与yuv420p,y个数一致,uv位置对调
    //nv21转yuv420p  y = w*h,u/v=w*h/4
    //nv21 = yvu yuv420p=yuv y=y u=y+1+1 v=y+1
    memcpy(pic_in.img.plane[0], nv21_buffer, y_len);
    int i;
    for (i = 0; i < u_len; i++) {
        *(u + i) = *(nv21_buffer + y_len + i * 2 + 1);
        *(v + i) = *(nv21_buffer + y_len + i * 2);
    }

    //h264编码得到NALU数组
    x264_nal_t *nal = NULL; //NAL
    int n_nal = -1; //NALU的个数
    //进行h264编码
    if(x264_encoder_encode(video_encode_handle,&nal, &n_nal,&pic_in,&pic_out) < 0){
        LOGE("%s","编码失败");
        return;
    }
    //使用rtmp协议将h264编码的视频数据发送给流媒体服务器
    //帧分为关键帧和普通帧,为了提高画面的纠错率,关键帧应包含SPS和PPS数据
    int sps_len , pps_len;
    unsigned char sps[100];
    unsigned char pps[100];
    memset(sps,0,100);
    memset(pps,0,100);
    pic_in.i_pts += 1; //顺序累加
    //遍历NALU数组,根据NALU的类型判断
    for(i=0; i < n_nal; i++){
        if(nal[i].i_type == NAL_SPS){
            //复制SPS数据
            sps_len = nal[i].i_payload - 4;
            memcpy(sps,nal[i].p_payload + 4,sps_len); //不复制四字节起始码
        }else if(nal[i].i_type == NAL_PPS){
            //复制PPS数据
            pps_len = nal[i].i_payload - 4;
            memcpy(pps,nal[i].p_payload + 4,pps_len); //不复制四字节起始码

            //发送序列信息
            //h264关键帧会包含SPS和PPS数据
            add_264_sequence_header(pps,sps,pps_len,sps_len);

        }else{
            //发送帧信息
            add_264_body(nal[i].p_payload,nal[i].i_payload);
        }

    }

    (*env)->ReleaseByteArrayElements(env,buffer,nv21_buffer,NULL);
}

/**
 * 对音频采样数据进行AAC编码
 */
JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_fireAudio
  (JNIEnv *env, jobject jobj, jbyteArray buffer, jint len){
    int *pcmbuf;
    unsigned char *bitbuf;
    jbyte* b_buffer = (*env)->GetByteArrayElements(env, buffer, 0);
    pcmbuf = (short*) malloc(nInputSamples * sizeof(int));
    bitbuf = (unsigned char*) malloc(nMaxOutputBytes * sizeof(unsigned char));
    int nByteCount = 0;
    unsigned int nBufferSize = (unsigned int) len / 2;
    unsigned short* buf = (unsigned short*) b_buffer;
    while (nByteCount < nBufferSize) {
        int audioLength = nInputSamples;
        if ((nByteCount + nInputSamples) >= nBufferSize) {
            audioLength = nBufferSize - nByteCount;
        }
        int i;
        for (i = 0; i < audioLength; i++) {//每次从实时的pcm音频队列中读出量化位数为8的pcm数据。
            int s = ((int16_t *) buf + nByteCount)[i];
            pcmbuf[i] = s << 8;//用8个二进制位来表示一个采样量化点(模数转换)
        }
        nByteCount += nInputSamples;
        //利用FAAC进行编码,pcmbuf为转换后的pcm流数据,audioLength为调用faacEncOpen时得到的输入采样数,bitbuf为编码后的数据buff,nMaxOutputBytes为调用faacEncOpen时得到的最大输出字节数
        int byteslen = faacEncEncode(audio_encode_handle, pcmbuf, audioLength,
                bitbuf, nMaxOutputBytes);
        if (byteslen < 1) {
            continue;
        }
        add_aac_body(bitbuf, byteslen);//从bitbuf中得到编码后的aac数据流,放到数据队列
    }
    (*env)->ReleaseByteArrayElements(env, buffer, b_buffer, NULL);
    if (bitbuf)
        free(bitbuf);
    if (pcmbuf)
        free(pcmbuf);
}

双向队列queue.c

#include <stdio.h>
#include <malloc.h>

typedef struct queue_node {
    struct queue_node* prev;
    struct queue_node* next;
    void *p;//节点的值
} node;

// 表头。注意,表头不存放元素值!!!
static node *phead = NULL;
static int count = 0;

static node* create_node(void *pval) {
    node *pnode = NULL;
    pnode = (node *) malloc(sizeof(node));
    if (pnode) {
        // 默认的,pnode的前一节点和后一节点都指向它自身
        pnode->prev = pnode->next = pnode;
        // 节点的值为pval
        pnode->p = pval;
    }
    return pnode;
}
// 新建“双向链表”。成功,返回0;否则,返回-1。
int create_queue() {
    phead = create_node(NULL);
    if (!phead) {
        return -1;
    }
    // 设置“节点个数”为0
    count = 0;
    return 0;
}

// “双向链表是否为空”
int queue_is_empty() {
    return count == 0;
}

// 返回“双向链表的大小”
int queue_size() {
    return count;
}

// 获取“双向链表中第index位置的节点”
static node* get_node(int index) {
    if (index < 0 || index >= count) {
        return NULL;
    }
    if (index <= (count / 2)) {
        int i = 0;
        node *pnode = phead->next;
        while ((i++) < index)
            pnode = pnode->next;
        return pnode;
    }
    int j = 0;
    int rindex = count - index - 1;
    node *rnode = phead->prev;
    while ((j++) < rindex)
        rnode = rnode->prev;
    return rnode;
}

// 获取“第一个节点”
static node* get_first_node() {
    return get_node(0);
}
// 获取“最后一个节点”
static node* get_last_node() {
    return get_node(count - 1);
}
// 获取“双向链表中第index位置的元素”。成功,返回节点值;否则,返回-1。
void* queue_get(int index) {
    node *pindex = get_node(index);
    if (!pindex) {
        return NULL;
    }
    return pindex->p;
}

// 获取“双向链表中第1个元素的值”
void* queue_get_first() {
    return queue_get(0);
}

void* queue_get_last() {
    return queue_get(count - 1);
}

// 将“pval”插入到index位置。成功,返回0;否则,返回-1。
int queue_insert(int index, void* pval) {
    // 插入表头
    if (index == 0)
        return queue_insert_first(pval);
    // 获取要插入的位置对应的节点
    node *pindex = get_node(index);
    if (!pindex)
        return -1;
// 创建“节点”
    node *pnode = create_node(pval);
    if (!pnode)
        return -1;
    pnode->prev = pindex->prev;
    pnode->next = pindex;
    pindex->prev->next = pnode;
    pindex->prev = pnode;
    // 节点个数+1
    count++;
    return 0;
}

// 将“pval”插入到表头位置
int queue_insert_first(void *pval) {
    node *pnode = create_node(pval);
    if (!pnode)
        return -1;
    pnode->prev = phead;
    pnode->next = phead->next;
    phead->next->prev = pnode;
    phead->next = pnode;
    count++;
    return 0;
}
// 将“pval”插入到末尾位置
int queue_append_last(void *pval) {
    node *pnode = create_node(pval);
    if (!pnode)
        return -1;
    pnode->next = phead;
    pnode->prev = phead->prev;
    phead->prev->next = pnode;
    phead->prev = pnode;
    count++;
    return 0;
}
// 删除“双向链表中index位置的节点”。成功,返回0;否则,返回-1。
int queue_delete(int index) {
    node *pindex = get_node(index);
    if (!pindex) {
        return -1;
    }
    pindex->next->prev = pindex->prev;
    pindex->prev->next = pindex->next;
    free(pindex);
    count--;
    return 0;
}
// 删除第一个节点
int queue_delete_first() {
    return queue_delete(0);
}
// 删除组后一个节点
int queue_delete_last() {
    return queue_delete(count - 1);
}
// 撤销“双向链表”。成功,返回0;否则,返回-1。
int destroy_queue() {
    if (!phead) {
        return -1;
    }
    node *pnode = phead->next;
    node *ptmp = NULL;
    while (pnode != phead) {
        ptmp = pnode;
        pnode = pnode->next;
        free(ptmp);
    }
    free(phead);
    phead = NULL;
    count = 0;
    return 0;
}

Android.mk

LOCAL_PATH := $(call my-dir)


#faac
include $(CLEAR_VARS)
LOCAL_MODULE    := faac
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/faac/include
LOCAL_SRC_FILES := faac/libfaac.a
#把编译的源文件打印处理
$(info $(LOCAL_SRC_FILES))
include $(PREBUILT_STATIC_LIBRARY)

#x264
include $(CLEAR_VARS)
LOCAL_MODULE    := x264
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/x264/include
LOCAL_SRC_FILES := x264/libx264.a
$(info $(LOCAL_SRC_FILES))
include $(PREBUILT_STATIC_LIBRARY)

#rtmpdump
include $(CLEAR_VARS)
LOCAL_MODULE    := rtmpdump
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/rtmpdump/include
LOCAL_SRC_FILES := rtmpdump/librtmp.a
$(info $(LOCAL_SRC_FILES))
include $(PREBUILT_STATIC_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE    := dn_live
LOCAL_SRC_FILES := dn_live.c queue.c
LOCAL_STATIC_LIBRARIES := x264 faac rtmpdump
LOCAL_LDLIBS := -llog
include $(BUILD_SHARED_LIBRARY)

Application.mk

APP_ABI :=  armeabi
APP_PLATFORM := android-9
APP_STL := gnustl_static
  • 2
    点赞
  • 8
    收藏
    觉得还不错? 一键收藏
  • 14
    评论
评论 14
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值