RTMP直播推流Audio(音频)

LivePushActivity

package com.example.glivepush;

import android.os.Bundle;
import android.os.Environment;
import android.se.omapi.SEService;
import android.util.Log;
import android.view.View;

import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;

import com.example.glivepush.camera.GCameraView;
import com.example.glivepush.push.BasePushEncoder;
import com.example.glivepush.push.GConnectListener;
import com.example.glivepush.push.PushEncodec;
import com.example.glivepush.push.PushVideo;
import com.example.glivepush.util.DisplayUtil;

public class LivePushActivity extends AppCompatActivity {

    private PushVideo pushVideo;

    private GCameraView gCameraView;
    private boolean start = false;
    private PushEncodec pushEncodec;

    @Override
    protected void onCreate(@Nullable Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_live_push);
        pushVideo = new PushVideo();

        gCameraView = findViewById(R.id.cameraView);

        pushVideo.setgConnectListener(new GConnectListener() {
            @Override
            public void onConnecting() {
                Log.d("godv", "链接服务器中");
            }

            @Override
            public void onConnectSuccess() {
                Log.d("godv", "链接服务器成功");

                pushEncodec = new PushEncodec(LivePushActivity.this, gCameraView.getTextureId());
                pushEncodec.initEncodec(
                        gCameraView.getEglContext(),
                        DisplayUtil.getScreenWidth(LivePushActivity.this),
                        DisplayUtil.getScreenHeight(LivePushActivity.this),
                        44100,
                        2
                );
                pushEncodec.startRecord();

                /*************************************直播推流-audio-start***********************************/
                pushEncodec.setOnMediaInfoListener(new BasePushEncoder.OnMediaInfoListener() {
                    @Override
                    public void onMediaTime(int times) {

                    }

                    @Override
                    public void onSPSPPSInfo(byte[] sps, byte[] pps) {
                        pushVideo.pushSPSPPS(sps, pps);
                    }

                    @Override
                    public void onVideoInfo(byte[] data, boolean keyFrame) {
                        pushVideo.pushVideoData(data, keyFrame);
                    }

                    @Override
                    public void onAudioInfo(byte[] data) {
                        pushVideo.pushAudioData(data);
                    }
                });
                /*************************************直播推流-audio-end***********************************/

            }

            @Override
            public void onConnectFail(String msg) {
                Log.d("godv", msg);
            }
        });
    }

    public void startPush(View view) {
        start = !start;

        if (start) {
            pushVideo.initLivePush("rtmp://192.168.0.14/myapp/mystream");
        } else {
            if (pushEncodec != null) {
                pushVideo.stopPush();
                pushEncodec.stopRecord();
                pushEncodec = null;
            }
        }
    }
}

AudioRecordUtil    java录音的工具类

package com.example.glivepush.push;

import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;

public class AudioRecordUtil {
    private AudioRecord audioRecord;
    private int bufferSizeInBytes;
    private boolean start = false;
    private int readSize = 0;

    private OnRecordListener onRecordListener;

    public void setOnRecordListener(OnRecordListener onRecordListener) {
        this.onRecordListener = onRecordListener;
    }

    public AudioRecordUtil() {
        bufferSizeInBytes = AudioRecord.getMinBufferSize(
                44100,
                AudioFormat.CHANNEL_IN_STEREO,
                AudioFormat.ENCODING_PCM_16BIT);
        //HZ 声道数 16bitPCM
        audioRecord = new AudioRecord(
                MediaRecorder.AudioSource.MIC,
                44100,
                AudioFormat.CHANNEL_IN_STEREO,
                AudioFormat.ENCODING_PCM_16BIT,
                bufferSizeInBytes);
        //MediaRecorder.AudioSource.MIC 麦克风
    }

    public void startRecord() {
        new Thread(new Runnable() {
            @Override
            public void run() {
                start = true;
                //开始录音
                audioRecord.startRecording();
                byte[] audioData = new byte[bufferSizeInBytes];
                while (start) {
                    readSize = audioRecord.read(audioData, 0, bufferSizeInBytes);
                    if (onRecordListener != null) {
                        onRecordListener.recordByte(audioData, readSize);
                    }
                }
                if (audioRecord != null) {
                    audioRecord.stop();
                    audioRecord.release();
                    audioRecord = null;
                }
            }
        }).start();
    }

    public void stopRecord() {
        start = false;
    }

    public interface OnRecordListener {
        void recordByte(byte[] audioData, int readSize);
    }

    public boolean isStart() {
        return start;
    }
}

BasePushEncoder

package com.example.glivepush.push;

import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.util.Log;
import android.view.Surface;

import com.example.glivepush.egl.EglHelper;
import com.example.glivepush.egl.GEGLSurfaceView;

import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;

import javax.microedition.khronos.egl.EGLContext;

public abstract class BasePushEncoder {

    private Surface surface;
    private EGLContext eglContext;

    private int width;
    private int height;

    //视频的编码器
    private MediaCodec videoEncodec;
    private MediaFormat videoFormat;
    private MediaCodec.BufferInfo videoBufferinfo;

    //音频的编码器
    private MediaCodec audioEncodec;
    private MediaFormat audioFormat;
    private MediaCodec.BufferInfo audioBufferinfo;
    private long audioPts = 0;
    private int sampleRate = 0;

    //渲染视频的线程
    private GEGLMediaThread geglMediaThread;
    //编码视频的线程
    private VideoEncodecThread videoEncodecThread;
    //编码音频的线程
    private AudioEncodecThread audioEncodecThread;

    /*************************************直播推流-audio-start*************************************/
    //录音的工具类
    private AudioRecordUtil audioRecordUtil;
    /*************************************直播推流-audio-end*************************************/

    private GEGLSurfaceView.GGLRender gGLRender;

    public final static int RENDERMODE_WHEN_DIRTY = 0;
    public final static int RENDERMODE_CONTINUOUSLY = 1;

    private int mRenderMode = RENDERMODE_CONTINUOUSLY;

    private OnMediaInfoListener onMediaInfoListener;

    public void setOnMediaInfoListener(OnMediaInfoListener onMediaInfoListener) {
        this.onMediaInfoListener = onMediaInfoListener;
    }

    public BasePushEncoder(Context context) {
    }

    public void setRender(GEGLSurfaceView.GGLRender gGLRender) {
        this.gGLRender = gGLRender;
    }

    public void setRenderMode(int mRenderMode) {
        if (gGLRender == null) {
            throw new RuntimeException("must set render before");
        }
        this.mRenderMode = mRenderMode;
    }

    //初始化方法
    public void initEncodec(EGLContext eglContext, int width, int height, int sampleRate, int channelCount) {
        this.width = width;
        this.height = height;
        this.eglContext = eglContext;
        initMediaEncodc(width, height, sampleRate, channelCount);
    }

    //开始编码
    public void startRecord() {
        if (surface != null && eglContext != null) {
            audioPts = 0;

            geglMediaThread = new GEGLMediaThread(new WeakReference<BasePushEncoder>(this));
            videoEncodecThread = new VideoEncodecThread(new WeakReference<BasePushEncoder>(this));
            audioEncodecThread = new AudioEncodecThread(new WeakReference<BasePushEncoder>(this));
            geglMediaThread.isCreate = true;
            geglMediaThread.isChange = true;
            geglMediaThread.start();
            videoEncodecThread.start();
            audioEncodecThread.start();
            /*************************************直播推流-audio-start*************************************/
            //开始录制
            audioRecordUtil.startRecord();
            /*************************************直播推流-audio-end*************************************/
        }
    }

    //结束编码
    public void stopRecord() {
        if (geglMediaThread != null && videoEncodecThread != null && audioEncodecThread != null) {
            /*************************************直播推流-audio-start*************************************/
            //开始录制
            audioRecordUtil.stopRecord();
            /*************************************直播推流-audio-end*************************************/
            videoEncodecThread.exit();
            audioEncodecThread.exit();
            geglMediaThread.onDestory();
            videoEncodecThread = null;
            geglMediaThread = null;
            audioEncodecThread = null;
        }
    }

    /*************************************直播推流-audio-start*************************************/
    private void initMediaEncodc(int width, int height, int sampleRate, int channelCount) {
        //参数二录制类型
        initVideoEncodec(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
        initAudioEncodec(MediaFormat.MIMETYPE_AUDIO_AAC, sampleRate, channelCount);
        initPCMRecord();
    }


    //录音的工具类
    private void initPCMRecord() {
        audioRecordUtil = new AudioRecordUtil();
        audioRecordUtil.setOnRecordListener(new AudioRecordUtil.OnRecordListener() {
            @Override
            public void recordByte(byte[] audioData, int readSize) {
                if (audioRecordUtil.isStart()) {
                    putPCMDate(audioData, readSize);
                }
            }
        });
    }

    /*************************************直播推流-audio-end*************************************/

    //初始化video的编码器
    private void initVideoEncodec(String mimeType, int width, int height) {
        try {
            videoBufferinfo = new MediaCodec.BufferInfo();

            videoFormat = MediaFormat.createVideoFormat(mimeType, width, height);
            videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                    MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
            //码率
            videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 4);
            //帧率
            videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
            //关键帧间隔
            videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);

            videoEncodec = MediaCodec.createEncoderByType(mimeType);
            //录制没有Surface
            //最后一个参数传的是编码
            videoEncodec.configure(videoFormat, null, null,
                    MediaCodec.CONFIGURE_FLAG_ENCODE);
            //得到Surface
            surface = videoEncodec.createInputSurface();
        } catch (IOException e) {
            e.printStackTrace();
            videoEncodec = null;
            videoFormat = null;
            videoBufferinfo = null;
        }
    }

    //初始化音频编码器
    private void initAudioEncodec(String mimeType, int simpleRate, int channelCount) {
        try {
            this.sampleRate = simpleRate;
            audioBufferinfo = new MediaCodec.BufferInfo();
            audioFormat = MediaFormat.createAudioFormat(mimeType, simpleRate, channelCount);
            //设置比特率
            audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 96000);
            //设置aac格式等级
            audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);

            /*************************************直播推流-audio-start*************************************/
            //设置最大输入缓存
            audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 4096 * 10);
            /*************************************直播推流-audio-end*************************************/

            //生成encodec
            audioEncodec = MediaCodec.createEncoderByType(mimeType);

            audioEncodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        } catch (IOException e) {
            e.printStackTrace();
            audioEncodec = null;
            audioFormat = null;
            audioBufferinfo = null;
        }
    }

    //传递PCM的方法
    public void putPCMDate(byte[] buffer, int size) {
        if (audioEncodecThread != null && !audioEncodecThread.isExit && buffer != null && size > 0) {
            int inputBufferindex = audioEncodec.dequeueInputBuffer(0);
            if (inputBufferindex >= 0) {
                ByteBuffer byteBuffer = audioEncodec.getInputBuffers()[inputBufferindex];
                byteBuffer.clear();
                byteBuffer.put(buffer);
                long pts = getAudioPts(size, sampleRate);
                audioEncodec.queueInputBuffer(inputBufferindex, 0, size, pts, 0);
            }
        }
    }

    private long getAudioPts(int size, int sampleRate) {
        audioPts += (long) (1.0 * size / (sampleRate * 2 * 2) * 1000000.0);
        return audioPts;
    }

    //渲染视频
    static class GEGLMediaThread extends Thread {
        private WeakReference<BasePushEncoder> encoder;
        private EglHelper eglHelper;
        private Object object;

        private boolean isExit = false;
        private boolean isCreate = false;
        private boolean isChange = false;
        private boolean isStart = false;

        public GEGLMediaThread(WeakReference<BasePushEncoder> encoder) {
            this.encoder = encoder;
        }

        @Override
        public void run() {
            super.run();
            isExit = false;
            isStart = false;
            object = new Object();
            eglHelper = new EglHelper();
            eglHelper.initEgl(encoder.get().surface, encoder.get().eglContext);
            while (true) {
                if (isExit) {
                    release();
                    break;
                }
                //刷新模式
                if (isStart) {
                    if (encoder.get().mRenderMode == RENDERMODE_WHEN_DIRTY) {
                        synchronized (object) {
                            try {
                                object.wait();
                            } catch (InterruptedException e) {
                                e.printStackTrace();
                            }
                        }
                    } else if (encoder.get().mRenderMode == RENDERMODE_CONTINUOUSLY) {
                        try {
                            Thread.sleep(1000 / 60);
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        }
                    } else {
                        throw new RuntimeException("mRenderMode is wrong value");
                    }
                }

                onCreate();
                onChange(encoder.get().width, encoder.get().height);
                onDraw();

                isStart = true;
            }
        }

        public void release() {
            if (eglHelper != null) {
                eglHelper.destoryEgl();
                eglHelper = null;
                object = null;
                encoder = null;
            }
        }

        private void onCreate() {
            if (isCreate && encoder.get().gGLRender != null) {
                isCreate = false;
                encoder.get().gGLRender.onSurfaceCreated();
            }
        }

        private void onChange(int width, int height) {
            if (isChange && encoder.get().gGLRender != null) {
                isChange = false;
                encoder.get().gGLRender.onSurfaceChanged(width, height);
            }
        }

        private void onDraw() {
            if (encoder.get().gGLRender != null && eglHelper != null) {
                encoder.get().gGLRender.onDrawFrame();
                if (!isStart) {
                    encoder.get().gGLRender.onDrawFrame();
                }
                eglHelper.swapBuffers();
            }
        }

        private void requestRender() {
            if (object != null) {
                synchronized (object) {
                    object.notifyAll();
                }
            }
        }

        public void onDestory() {
            isExit = true;
            requestRender();
        }
    }

    //编码视频
    static class VideoEncodecThread extends Thread {

        private WeakReference<BasePushEncoder> encoder;

        private boolean isExit;

        private MediaCodec videoEncodec;
        private MediaFormat videoFormat;
        private MediaCodec.BufferInfo videoBufferinfo;

        //pts
        private long pts;

        //sps
        private byte[] sps;
        private byte[] pps;

        private boolean keyFrame = false;

        public VideoEncodecThread(WeakReference<BasePushEncoder> encoder) {
            this.encoder = encoder;
            videoEncodec = encoder.get().videoEncodec;
            videoFormat = encoder.get().videoFormat;
            videoBufferinfo = encoder.get().videoBufferinfo;
        }

        @Override
        public void run() {
            super.run();
            pts = 0;
            isExit = false;
            videoEncodec.start();

            while (true) {
                if (isExit) {
                    videoEncodec.stop();
                    videoEncodec.release();
                    videoEncodec = null;
                    Log.d("godv", "录制完成");
                    break;
                }
                //视频编码开始
                //输出队列索引
                int outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);

                keyFrame = false;

                if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
//                    Log.d("godv", "INFO_OUTPUT_FORMAT_CHANGED");

                    ByteBuffer spsb = videoEncodec.getOutputFormat().getByteBuffer("csd-0");
                    sps = new byte[spsb.remaining()];
                    spsb.get(sps, 0, sps.length);

                    ByteBuffer ppsb = videoEncodec.getOutputFormat().getByteBuffer("csd-1");
                    pps = new byte[ppsb.remaining()];
                    ppsb.get(pps, 0, pps.length);

//                    Log.d("godv", "sps : " + byteToHex(sps));
//                    Log.d("godv", "pps : " + byteToHex(pps));

                } else {
                    while (outputBufferIndex >= 0) {
                        ByteBuffer outputBuffer = videoEncodec.getOutputBuffers()[outputBufferIndex];
                        outputBuffer.position(videoBufferinfo.offset);
                        outputBuffer.limit(videoBufferinfo.offset + videoBufferinfo.size);

                        if (pts == 0) {
                            pts = videoBufferinfo.presentationTimeUs;
                        }
                        videoBufferinfo.presentationTimeUs = videoBufferinfo.presentationTimeUs - pts;

                        byte[] data = new byte[outputBuffer.remaining()];
                        outputBuffer.get(data, 0, data.length);
//                        Log.d("godv", "data : " + byteToHex(data));


                        //判断是否为关键帧
                        if (videoBufferinfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
                            keyFrame = true;
                            if (encoder.get().onMediaInfoListener != null) {
                                encoder.get().onMediaInfoListener.onSPSPPSInfo(sps, pps);
                            }
                        }

                        //返回数据
                        if (encoder.get().onMediaInfoListener != null) {
                            encoder.get().onMediaInfoListener.onVideoInfo(data, keyFrame);

                            encoder.get().onMediaInfoListener.onMediaTime(
                                    (int) videoBufferinfo.presentationTimeUs / 1000000);
                        }

                        videoEncodec.releaseOutputBuffer(outputBufferIndex, false);
                        outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);
                    }
                }
            }
        }

        public void exit() {
            isExit = true;
        }
    }

    //编码音频
    static class AudioEncodecThread extends Thread {
        //外层类的引用
        private WeakReference<BasePushEncoder> encoder;
        //是否退出
        private boolean isExit;
        //编码
        private MediaCodec audioEncodec;
        private MediaCodec.BufferInfo bufferInfo;
        //pts
        private long pts;

        public AudioEncodecThread(WeakReference<BasePushEncoder> encoder) {
            this.encoder = encoder;
            audioEncodec = encoder.get().audioEncodec;
            bufferInfo = encoder.get().audioBufferinfo;
        }

        @Override
        public void run() {
            super.run();
            //初始化
            pts = 0;
            isExit = false;

            //编码器开始编码
            audioEncodec.start();

            while (true) {
                if (isExit) {
                    //回收资源
                    audioEncodec.stop();
                    audioEncodec.release();
                    audioEncodec = null;

                    break;
                }

                int outputBufferIndex = audioEncodec.dequeueOutputBuffer(bufferInfo, 0);
                //格式改变
                if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

                } else {
                    while (outputBufferIndex >= 0) {

                        ByteBuffer outputBuffer = audioEncodec.getOutputBuffers()[outputBufferIndex];
                        outputBuffer.position(bufferInfo.offset);
                        outputBuffer.limit(bufferInfo.offset + bufferInfo.size);

                        if (pts == 0) {
                            pts = bufferInfo.presentationTimeUs;
                        }
                        bufferInfo.presentationTimeUs = bufferInfo.presentationTimeUs - pts;

                        /*************************************直播推流-audio-start*************************************/
                        byte[] data = new byte[outputBuffer.remaining()];
                        outputBuffer.get(data, 0, data.length);
                        if (encoder.get().onMediaInfoListener != null) {
                            encoder.get().onMediaInfoListener.onAudioInfo(data);
                        }

                        /*************************************直播推流-audio-end*************************************/


                        audioEncodec.releaseOutputBuffer(outputBufferIndex, false);
                        outputBufferIndex = audioEncodec.dequeueOutputBuffer(bufferInfo, 0);
                    }
                }
            }
        }

        public void exit() {
            isExit = true;
        }
    }

    public interface OnMediaInfoListener {
        void onMediaTime(int times);


        void onSPSPPSInfo(byte[] sps, byte[] pps);

        //参数2  是否是关键帧  0x17 0x27
        void onVideoInfo(byte[] data, boolean keyFrame);

        /*************************************直播推流-audio-start*************************************/
        void onAudioInfo(byte[] data);
        /*************************************直播推流-audio-end*************************************/

    }

    //byte 转 16进制
    public static String byteToHex(byte[] bytes) {
        StringBuffer stringBuffer = new StringBuffer();
        for (int i = 0; i < bytes.length; i++) {
            String hex = Integer.toHexString(bytes[i]);
            if (hex.length() == 1) {
                stringBuffer.append("0" + hex);
            } else {
                stringBuffer.append(hex);
            }
            if (i > 20) {
                break;
            }
        }
        return stringBuffer.toString();
    }

}

gpush.cpp

#include <jni.h>
#include <string>
#include "RtmpPush.h"
#include "GCallJava.h"

GCallJava *gCallJava = NULL;
JavaVM *javaVm = NULL;

RtmpPush *rtmpPush = NULL;

/*************************************直播推流-audio-start***********************************/
bool exits = true;

/*************************************直播推流-audio-end***********************************/

extern "C"
JNIEXPORT void JNICALL
Java_com_example_glivepush_push_PushVideo_initPush(JNIEnv *env, jobject thiz, jstring pushUrl_) {
    // TODO: implement initPush()
    const char *pushUrl = env->GetStringUTFChars(pushUrl_, 0);

/*************************************直播推流-audio-start***********************************/
    if (gCallJava == NULL) {

        exits = false;
        gCallJava = new GCallJava(javaVm, env, &thiz);
        rtmpPush = new RtmpPush(pushUrl, gCallJava);
        rtmpPush->init();
    }
/*************************************直播推流-audio-end***********************************/

    env->ReleaseStringUTFChars(pushUrl_, pushUrl);
}

extern "C"
JNIEXPORT jint JNICALL
JNI_OnLoad(JavaVM *vm, void *reserved) {
    JNIEnv *env;
    javaVm = vm;
    if (vm->GetEnv((void **) &env, JNI_VERSION_1_6) != JNI_OK) {
        return -1;
    }
    return JNI_VERSION_1_6;
}

extern "C"
JNIEXPORT void JNICALL
JNI_OnUnload(JavaVM *vm, void *reserved) {
    javaVm = NULL;
}

extern "C"
JNIEXPORT void JNICALL
Java_com_example_glivepush_push_PushVideo_pushSPSPPS(JNIEnv *env, jobject thiz, jbyteArray sps_,
                                                     jint sps_len, jbyteArray pps_, jint pps_len) {
    // TODO: implement pushSPSPPS()
    jbyte *sps = env->GetByteArrayElements(sps_, NULL);
    jbyte *pps = env->GetByteArrayElements(pps_, NULL);

    if (rtmpPush != NULL && !exits) {
        rtmpPush->pushSPSPPS(reinterpret_cast<char *>(sps), sps_len, reinterpret_cast<char *>(pps),
                             pps_len);
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_glivepush_push_PushVideo_pushVideoData(JNIEnv *env, jobject thiz, jbyteArray data_,
                                                        jint data_len, jboolean keyFrame) {
    // TODO: implement pushVideoData()
    jbyte *data = env->GetByteArrayElements(data_, NULL);
    if (rtmpPush != NULL && !exits) {
        rtmpPush->pushVideoData(reinterpret_cast<char *>(data), data_len, keyFrame);
    }


}
/*************************************直播推流-audio-start***********************************/
extern "C"
JNIEXPORT void JNICALL
Java_com_example_glivepush_push_PushVideo_pushAudioData(JNIEnv *env, jobject thiz, jbyteArray data_,
                                                        jint data_len) {
    // TODO: implement pushAudioData()
    jbyte *data = env->GetByteArrayElements(data_, NULL);
    if (rtmpPush != NULL && !exits) {
        rtmpPush->pushAudioData(reinterpret_cast<char *>(data), data_len);
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_glivepush_push_PushVideo_pushStop(JNIEnv *env, jobject thiz) {
    // TODO: implement pushStop()
    if (rtmpPush != NULL) {
        exits = true;
        rtmpPush->pushStop();
        delete (rtmpPush);
        rtmpPush = NULL;
        delete (gCallJava);
        gCallJava = NULL;
    }
}
/*************************************直播推流-audio-end***********************************/

RtmpPush.h

#ifndef RTMPSUC_RTMPPUSH_H
#define RTMPSUC_RTMPPUSH_H

#include <malloc.h>
#include <cstring>
#include "GQueue.h"
#include "pthread.h"
#include "GCallJava.h"

extern "C" {
#include "librtmp/rtmp.h"
};

class RtmpPush {
public:
    RTMP *rtmp = NULL;
    char *url = NULL;
    GQueue *queue = NULL;

    pthread_t push_thread;

    GCallJava *gCallJava = NULL;

    bool startPushing = false;

    long startTime = 0;

public:

    RtmpPush(const char *url, GCallJava *gCallJava);

    ~RtmpPush();

    void init();


    //发送sps pps
    void pushSPSPPS(char *sps, int sps_len, char *pps, int pps_len);

    //发送帧数据
    void pushVideoData(char *data, int data_len, bool keyFrame);

    /*************************************直播推流-video-start***********************************/
    void pushAudioData(char *data, int data_len);

    void pushStop();
    /*************************************直播推流-video-end***********************************/
};

#endif //RTMPSUC_RTMPPUSH_H

RtmpPush.cpp

#include "RtmpPush.h"

RtmpPush::RtmpPush(const char *url, GCallJava *gCallJava) {
    this->url = static_cast<char *>(malloc(512));
    strcpy(this->url, url);
    this->queue = new GQueue();

    this->gCallJava = gCallJava;
}

RtmpPush::~RtmpPush() {
    queue->notifyQueue();
    queue->clearQueue();
    free(url);
}

void *callBackPush(void *data) {
    RtmpPush *rtmpPush = static_cast<RtmpPush *>(data);

    /*************************************直播推流-video-start***********************************/
    rtmpPush->startPushing = false;
    /*************************************直播推流-video-end***********************************/

    rtmpPush->gCallJava->onConnecting(G_THREAD_CHILD);

    rtmpPush->rtmp = RTMP_Alloc();    //分配空间
    RTMP_Init(rtmpPush->rtmp);        //初始化
    rtmpPush->rtmp->Link.timeout = 10;   //设置超时时间
    rtmpPush->rtmp->Link.lFlags |= RTMP_LF_LIVE;  //追加直播
    RTMP_SetupURL(rtmpPush->rtmp, rtmpPush->url);    //设置推流URL
    RTMP_EnableWrite(rtmpPush->rtmp);    //设置可写状态
    if (!RTMP_Connect(rtmpPush->rtmp, NULL)) {    //链接服务器  0失败
//        LOGE("can not connect the url %s", rtmpPush->url);
        rtmpPush->gCallJava->onConnectFail("can not connect the url");
        goto end;
    }
    if (!RTMP_ConnectStream(rtmpPush->rtmp, 0)) {   //链接流  0失败
        rtmpPush->gCallJava->onConnectFail("can not connect the stream of the service");
        goto end;
    }

    rtmpPush->gCallJava->onConnectSuccess();
    /*************************************直播推流-video-start***********************************/
    //推流
    rtmpPush->startPushing = true;

    rtmpPush->startTime = RTMP_GetTime();

    while (true) {
        if (!rtmpPush->startPushing) {
            break;
        }
        RTMPPacket *packet = NULL;
        packet = rtmpPush->queue->getRtmpPacket();
        if (packet != NULL) {
            int result = RTMP_SendPacket(rtmpPush->rtmp, packet, 1);
//            LOGD("RTMP_SendPacket result is %d", result);
            RTMPPacket_Free(packet);
            packet = NULL;
        }
    }

    /*************************************直播推流-video-end***********************************/

    rtmpPush->gCallJava->onConnectSuccess();

    end:
    RTMP_Close(rtmpPush->rtmp);
    RTMP_Free(rtmpPush->rtmp);
    rtmpPush->rtmp = NULL;
    pthread_exit(&rtmpPush->push_thread);
}

void RtmpPush::init() {

    //gCallJava->onConnecting(G_THREAD_MAIN);
    pthread_create(&push_thread, NULL, callBackPush, this);
}

//发送sps pps
void RtmpPush::pushSPSPPS(char *sps, int sps_len, char *pps, int pps_len) {
    //增加了额外的16字节的长度    sps/pps 按照关键帧处理
    int bodySize = sps_len + pps_len + 16;
    //初始化rtmpPacket
    RTMPPacket *packet = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
    RTMPPacket_Alloc(packet, bodySize);
    RTMPPacket_Reset(packet);

    char *body = packet->m_body;

    int i = 0;
    //0x17关键帧    frame type (4 bit)
    //              CodecID   (4 bit)
    body[i++] = 0x17;

    //fixed     (4 byte)
    body[i++] = 0x00;
    body[i++] = 0x00;
    body[i++] = 0x00;
    body[i++] = 0x00;

    //版本 Profile 兼容性 ProfileLevel
    body[i++] = 0x01;
    body[i++] = sps[1];
    body[i++] = sps[2];
    body[i++] = sps[3];

    //包长数据所使用的字节数
    body[i++] = 0xff;
    //sps个数
    body[i++] = 0xe1;
    //sps长度 (2 byte)
    body[i++] = (sps_len >> 8) & 0xff;
    body[i++] = sps_len & 0xff;
    //sps实际内容
    memcpy(&body[i], sps, sps_len);
    i += sps_len;

    //pps的个数
    body[i++] = 0x01;
    //pps长度 (2 byte)
    body[i++] = (pps_len >> 8) & 0xff;
    body[i++] = pps_len & 0xff;
    //pps实际内容
    memcpy(&body[i], pps, pps_len);

    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    packet->m_nBodySize = bodySize;
    //时间戳
    packet->m_nTimeStamp = 0;
    packet->m_hasAbsTimestamp = 0;

    packet->m_nChannel = 0x04; //音频或者视频
    packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
    packet->m_nInfoField2 = rtmp->m_stream_id;

    //进队列
    queue->putRtmpPacket(packet);
}

void RtmpPush::pushVideoData(char *data, int data_len, bool keyFrame) {
    //增加了额外的9字节的长度
    int bodySize = data_len + 9;
    //初始化rtmpPacket
    RTMPPacket *packet = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
    RTMPPacket_Alloc(packet, bodySize);
    RTMPPacket_Reset(packet);

    char *body = packet->m_body;
    int i = 0;

    //0x17关键帧    frame type (4 bit)
    //0x27非关键帧  CodecID   (4 bit)
    if (keyFrame) {
        body[i++] = 0x17;
    } else {
        body[i++] = 0x27;
    }

    //fixed     (4 byte) NALU
    body[i++] = 0x01;
    body[i++] = 0x00;
    body[i++] = 0x00;
    body[i++] = 0x00;

    //dataLength : 长度信息(4 byte)
    body[i++] = (data_len >> 24) & 0xff;
    body[i++] = (data_len >> 16) & 0xff;
    body[i++] = (data_len >> 8) & 0xff;
    body[i++] = data_len & 0xff;
    //h264 裸数据
    memcpy(&body[i], data, data_len);

    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    packet->m_nBodySize = bodySize;
    //时间戳
    packet->m_nTimeStamp = RTMP_GetTime() - startTime;
    packet->m_hasAbsTimestamp = 0;

    packet->m_nChannel = 0x04; //音频或者视频
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
    packet->m_nInfoField2 = rtmp->m_stream_id;

    //进队列
    queue->putRtmpPacket(packet);
}

/*************************************直播推流-audio-start***********************************/
void RtmpPush::pushAudioData(char *data, int data_len) {
    //增加了额外的2字节的长度
    int bodySize = data_len + 2;
    //初始化rtmpPacket
    RTMPPacket *packet = static_cast<RTMPPacket *>(malloc(sizeof(RTMPPacket)));
    RTMPPacket_Alloc(packet, bodySize);
    RTMPPacket_Reset(packet);
    char *body = packet->m_body;

    body[0] = 0xAF;
    body[1] = 0x01;
    memcpy(&body[2], data, data_len);

    packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
    packet->m_nBodySize = bodySize;
    //时间戳
    packet->m_nTimeStamp = RTMP_GetTime() - startTime;
    packet->m_hasAbsTimestamp = 0;

    packet->m_nChannel = 0x04; //音频或者视频
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
    packet->m_nInfoField2 = rtmp->m_stream_id;

    queue->putRtmpPacket(packet);
}

void RtmpPush::pushStop() {
    startPushing = false;
    queue->notifyQueue();
    //
    pthread_join(push_thread, NULL);
}
/*************************************直播推流-audio-end***********************************/

 

  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值