音视频系列--rtmp直播推流学习(硬编)

上一篇介绍了怎么搭建流媒体服务器,这一篇打算利用这个流媒体服务器,实现rtmp音视频的推流服务。实现效果如下,右边是模拟器推流端,左边是利用vlc来测试推流情况。直播视频源是使用MediaProjection,音频源是使用AudioRecord来实现,之前有学习怎么用它投屏
在这里插入图片描述

一、rtmp介绍


Real Time Messaging Protocol(实时消息传输协议),基于TCP的应用层协议,通过一个可靠地流传输提供了一个双向多通道消息服务,意图在通信端之间传递带有时间信息的视频、音频和数据消息流。

tmpdump 是一个用来处理 rtmp流媒体的工具包,支持 rtmp://, rtmpt://, rtmpe://, rtmpte://, and rtmps:// 等,下载地址

rtmpdump链接和发包流程

在这里插入图片描述
rtmpdump发包格式
在这里插入图片描述

二、rtmpdump 导入


直接用源码导入

#子文件cmakelist
#关闭ssl,不支持 rtmps  rtmp   加密  传递一变量,禁止加密验证
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DNO_CRYPTO"  )
file(GLOB rtmp_source  *.c)
add_library(rtmp
        STATIC  #静态库
        ${rtmp_source})

#主cmkelist引入子工程
cmake_minimum_required(VERSION 3.10.2)

project("rtmplearn")

add_subdirectory(librtmp)

add_library(
             native-lib
             SHARED
             native-lib.cpp )

find_library(
              log-lib
              log )

target_link_libraries(
                       native-lib
                       ${log-lib}
                        rtmp)

三、编码层


3.1、视频编码

public class VideoCodec extends Thread {
    //录屏工具类
    private MediaProjection mediaProjection;
    //虚拟的画布
    private VirtualDisplay virtualDisplay;

    private MediaCodec mediaCodec;
    //传输层的引用
    private ScreenLive screenLive;

    public VideoCodec(ScreenLive screenLive) {
        this.screenLive = screenLive;
    }

    //每一帧编码时间
    private long timeStamp;
    //推流时间
    private long startTime;
    //是否正在直播
    private boolean isLiving;

    @Override
    public void run() {
        isLiving = true;
        mediaCodec.start();
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        while (isLiving) {
            if (System.currentTimeMillis() - timeStamp >= 2000) {
                Bundle params = new Bundle();
                //请求触发I帧
                params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
                //通知dsp芯片手动触发I帧
                mediaCodec.setParameters(params);
                timeStamp = System.currentTimeMillis();
            }
            int index = mediaCodec.dequeueOutputBuffer(bufferInfo, 100000);
            if (index >= 0) {
                if (startTime == 0) {
                    //毫秒,rtmp单位ms
                    startTime = bufferInfo.presentationTimeUs / 1000;
                }
                ByteBuffer buffer = mediaCodec.getOutputBuffer(index);
                byte[] outData = new byte[bufferInfo.size];
                buffer.get(outData);
                //将编码好的数据封装成packet
                RTMPPackage rtmpPackage = new RTMPPackage(outData, (bufferInfo.presentationTimeUs / 1000) - startTime);
                //将封装好的数据放到队列中
                screenLive.addPackage(rtmpPackage);
                rtmpPackage.setType(RTMPPackage.RTMP_PACKET_TYPE_VIDEO);
                mediaCodec.releaseOutputBuffer(index, false);
            }
        }
        isLiving = false;
        mediaCodec.stop();
        mediaCodec.release();
        mediaCodec = null;
        virtualDisplay.release();
        virtualDisplay = null;
        mediaProjection.stop();
        mediaProjection = null;
        startTime = 0;
    }

    public void startLive(MediaProjection mediaProjection) {
        this.mediaProjection = mediaProjection;
        //https://cloud.tencent.com/document/product/267/7969 直播分辨率选取 这里选择超清
        MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC,
                720,
                1280);

        format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        //码率
        format.setInteger(MediaFormat.KEY_BIT_RATE, 400_000);
        //帧率,比较低,短视频中帧率设置得比较高,短视频中要要求,画面清晰,此时会拉长GOP序列,I帧拉长,帧率增加,文件大小最小,比如60
        //直播要求秒开,I帧2秒以内,I帧都是很多的,通过降低码率,来减小文件大小,比如15
        format.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
        //I帧
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);

        try {
            mediaCodec = MediaCodec.createEncoderByType("video/avc");
            mediaCodec.configure(format, null, null,
                    MediaCodec.CONFIGURE_FLAG_ENCODE);
            Surface surface = mediaCodec.createInputSurface();
            virtualDisplay = mediaProjection.createVirtualDisplay(
                    "screen-codec",
                    720, 1280, 1,
                    DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
                    surface, null, null);

        } catch (IOException e) {
            e.printStackTrace();
        }
        LiveTaskManager.getInstance().execute(this);
    }

    public void release(){
        isLiving = false;
    }
}

3.2、音频编码

public class AudioCodec extends Thread {
    private static final String TAG = "ruby";

    private MediaCodec mediaCodec;

    private int minBufferSize;
    private boolean isRecoding;
    private AudioRecord audioRecord;
    private long startTime;
    //传输层
    private   ScreenLive screenLive;
    public AudioCodec(ScreenLive screenLive) {
        this.screenLive = screenLive;
    }
    public void startLive() {
		//采样率44100 通道数1
        MediaFormat format = MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_AAC, 44100, 1);
        //设置编码等级
        format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel
                .AACObjectLC);
        //码率
        format.setInteger(MediaFormat.KEY_BIT_RATE, 64_000);
        try {
            mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC);
            mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mediaCodec.start();
            minBufferSize = AudioRecord.getMinBufferSize(44100,
                    AudioFormat.CHANNEL_IN_MONO,
                    AudioFormat.ENCODING_PCM_16BIT);
            audioRecord = new AudioRecord(
             		//两个麦克风,上面的用噪音消除,一般用下面的
                    MediaRecorder.AudioSource.MIC, 44100,
                    AudioFormat.CHANNEL_IN_MONO,  //单通道
                    AudioFormat.ENCODING_PCM_16BIT, minBufferSize);

        } catch (Exception e) {
        }
        LiveTaskManager.getInstance().execute(this);
    }
    @Override
    public void run() {
        isRecoding = true;

        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        RTMPPackage rtmpPackage = new RTMPPackage();
		//发送一段固定的数据包,告诉要开始发音频数据了
        byte[] audioDecoderSpecificInfo = {0x12, 0x08};
        rtmpPackage.setBuffer(audioDecoderSpecificInfo);
        rtmpPackage.setType(RTMPPackage.RTMP_PACKET_TYPE_AUDIO_HEAD);
        screenLive.addPackage(rtmpPackage);

        Log.i(TAG, "开始录音  minBufferSize: "+minBufferSize);
        audioRecord.startRecording();
		//容器 固定
        byte[] buffer = new byte[minBufferSize];
        while (isRecoding) {
        	//麦克风数据读取出来,pcm 数据编码 	
            int len =audioRecord.read(buffer, 0, buffer.length);
            if (len <= 0) {
                continue;
            }
            //立即得到有效输入缓冲区
            int index = mediaCodec.dequeueInputBuffer(0);
            if (index >= 0) {
                ByteBuffer inputBuffer = mediaCodec.getInputBuffer(index);
                inputBuffer.clear();
                inputBuffer.put(buffer, 0, len);
                //填充数据后再加入队列
                mediaCodec.queueInputBuffer(index, 0, len,
                        System.nanoTime() / 1000, 0);
            }
            index = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);

            while (index >= 0 && isRecoding) {
                ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(index);
                byte[] outData = new byte[bufferInfo.size];
                outputBuffer.get(outData); //编码好的数据
                if (startTime == 0) {
                    startTime = bufferInfo.presentationTimeUs / 1000;
                }
                rtmpPackage = new RTMPPackage();
                rtmpPackage.setBuffer(outData);
                rtmpPackage.setType(RTMPPackage.RTMP_PACKET_TYPE_AUDIO_DATA);
                //设置时间戳
                long tms = (bufferInfo.presentationTimeUs / 1000) - startTime;
                rtmpPackage.setTms(tms);
                screenLive.addPackage(rtmpPackage);
                mediaCodec.releaseOutputBuffer(index, false);
                index = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
            }
        }
        audioRecord.stop();
        audioRecord.release();
        audioRecord = null;
        mediaCodec.stop();
        mediaCodec.release();
        mediaCodec = null;
        startTime = 0;
        isRecoding = false;
    }

    public void release(){
        isRecoding = false;
    }
}

四、传输层


4.1、封装编码好的对象

public class RTMPPackage {
    private byte[] buffer;
    private long tms;
    //视频包和音频包type
    private int type;
    public static final int RTMP_PACKET_TYPE_AUDIO_DATA = 2;
    public static final int RTMP_PACKET_TYPE_AUDIO_HEAD = 1;
    public static final int RTMP_PACKET_TYPE_VIDEO = 0;
    public RTMPPackage(byte[] buffer, long tms) {
        this.buffer = buffer;
        this.tms = tms;
    }

    public RTMPPackage( ) {
    }

    public int getType() {
        return type;
    }

    public void setType(int type) {
        this.type = type;
    }

    public byte[] getBuffer() {
        return buffer;
    }

    public void setBuffer(byte[] buffer) {
        this.buffer = buffer;
    }

    public long getTms() {
        return tms;
    }

    public void setTms(long tms) {
        this.tms = tms;
    }
}

4.2、java传输层

public class ScreenLive extends Thread {
    private String url;
    private MediaProjection mediaProjection;
    private VideoCodec videoCodec;
    private AudioCodec audioCodec;

    static {
        System.loadLibrary("native-lib");
    }

    // 队列,生产者消费模式
    private LinkedBlockingQueue<RTMPPackage> queue = new LinkedBlockingQueue<>();

    //是否正在推流
    public boolean isLiving;

    //生产者入口
    public void addPackage(RTMPPackage rtmpPackage) {

        if (!isLiving) {
            return;
        }
        queue.add(rtmpPackage);
    }

    //开启 推送模式
    public void startLive(String url, MediaProjection mediaProjection) {
        this.url = url;
        this.mediaProjection = mediaProjection;
        LiveTaskManager.getInstance().execute(this);
    }

    @Override
    public void run() {
        //推送到url地址
       if (!connect(url)) {
            //链接失败
            return;
        }
        //开启线程
        videoCodec = new VideoCodec(this);
        videoCodec.startLive(mediaProjection);
        audioCodec = new AudioCodec(this);
        audioCodec.startLive();
        isLiving = true;
        while (isLiving) {
            RTMPPackage rtmpPackage = null;
            try {
                rtmpPackage = queue.take();
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            if (rtmpPackage.getBuffer() != null && rtmpPackage.getBuffer().length != 0) {
                sendData(rtmpPackage.getBuffer(), rtmpPackage.getBuffer().length, rtmpPackage.getTms(), rtmpPackage.getType());
            }
        }
    }

    public void release(){
        isLiving = false;
        videoCodec.release();
        audioCodec.release();
    }

    private native boolean sendData(byte[] data, int len, long tms, int type);

    private native boolean connect(String url);
}

4.3、native传输数据


4.3.1、链接服务器
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_example_rtmplearn_ScreenLive_connect(JNIEnv *env, jobject thiz, jstring url_) {

    const char *url = env->GetStringUTFChars(url_, 0);
    int ret;
    do {
		//实例化,但是不会清空数据
        live = (Live *) malloc(sizeof(Live));
        //清空里面的数据
        memset(live, 0, sizeof(Live));

        live->rtmp = RTMP_Alloc();
        RTMP_Init(live->rtmp);
        live->rtmp->Link.timeout = 10;
        if (!(ret = RTMP_SetupURL(live->rtmp, (char *) url))) break;
        RTMP_EnableWrite(live->rtmp);
        if (!(ret = RTMP_Connect(live->rtmp, 0))) break;
        if (!(ret = RTMP_ConnectStream(live->rtmp, 0))) break;
        //到这一步,就链接成功了
    } while (0);
    if (!ret && live) {
        free(live);
        live = nullptr;
    }
    env->ReleaseStringUTFChars(url_, url);
    return ret;
}
4.3.2、sps和pps解析
//缓存sps和pps信息
void prepareVideo(int8_t *data, int len, Live *live) {

    for (int i = 0; i < len; i++) {
		//防止越界
        if (i + 4 < len) {
        	//先找pps位置
            if (data[i] == 0x00 && data[i + 1] == 0x00
                && data[i + 2] == 0x00
                && data[i + 3] == 0x01) {
                if (data[i + 4] == 0x68) {
                    live->sps_len = i - 4;
					//new一个数组
                    live->sps = static_cast<int8_t *>(malloc(live->sps_len));
					//sps解析出来了
                    memcpy(live->sps, data + 4, live->sps_len);

					//解析pps
                    live->pps_len = len - (4 + live->sps_len) - 4;
					//实例化PPS 的数组
                    live->pps = static_cast<int8_t *>(malloc(live->pps_len));

                    memcpy(live->pps, data + 4 + live->sps_len + 4, live->pps_len);
                    LOGI("sps:%d pps:%d", live->sps_len, live->pps_len);
                    break;
                }
            }
        }
    }
}
4.3.3、封装sps和pps的RTMPPacket
//rtmp传输sps和pps
RTMPPacket *createSpsPpsPackage(Live *live) {
	//sps  pps 的 packaet
    int body_size = 16 + live->sps_len + live->pps_len;
    RTMPPacket *packet = (RTMPPacket *) malloc(sizeof(RTMPPacket));
	//清空缓冲,实例化数据包
    RTMPPacket_Alloc(packet, body_size);
    int i = 0;
    packet->m_body[i++] = 0x17;
    //AVC sequence header 设置为0x00
    packet->m_body[i++] = 0x00;
    //CompositionTime
    packet->m_body[i++] = 0x00;
    packet->m_body[i++] = 0x00;
    packet->m_body[i++] = 0x00;
    //AVC sequence header
    packet->m_body[i++] = 0x01;
	//固定操作
    packet->m_body[i++] = live->sps[1]; //profile 编码质量 如baseline、main、 high
    packet->m_body[i++] = live->sps[2]; //profile_compatibility 兼容性
    packet->m_body[i++] = live->sps[3]; //profile level 编码等级
    packet->m_body[i++] = 0xFF;//已经规定好了
    packet->m_body[i++] = 0xE1; //reserved(111) + lengthSizeMinusOne(5位 sps 个数) 总是0xe1
	//高八位
    packet->m_body[i++] = (live->sps_len >> 8) & 0xFF;
	//低八位
    packet->m_body[i++] = live->sps_len & 0xff;
	//拷贝sps的内容
    memcpy(&packet->m_body[i], live->sps, live->sps_len);
    i += live->sps_len;
	//pps
    packet->m_body[i++] = 0x01; //pps number
    //pps length
    packet->m_body[i++] = (live->pps_len >> 8) & 0xff;
    packet->m_body[i++] = live->pps_len & 0xff;
	//拷贝pps内容
    memcpy(&packet->m_body[i], live->pps, live->pps_len);
	//设置packet的其它属性
	//视频类型
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    packet->m_nBodySize = body_size;
	//视频 04
    packet->m_nChannel = 0x04;
    packet->m_nTimeStamp = 0;
    packet->m_hasAbsTimestamp = 0;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
    packet->m_nInfoField2 = live->rtmp->m_stream_id;
    return packet;
}
4.3.4、封装视频帧数据的RTMPPacket
//rtmp传输帧数据
RTMPPacket *createVideoPackage(int8_t *buf, int len, const long tms, Live *live) {
    buf += 4;
    RTMPPacket *packet = (RTMPPacket *) malloc(sizeof(RTMPPacket));
    //长度
    int body_size = len + 9;
	//初始化RTMP内部的body数组
    RTMPPacket_Alloc(packet, body_size);

    if (buf[0] == 0x65) {
        //关键帧
        packet->m_body[0] = 0x17;
    } else {
        //非关键帧
        packet->m_body[0] = 0x27;
    }
	//固定的大小
    packet->m_body[1] = 0x01;
    packet->m_body[2] = 0x00;
    packet->m_body[3] = 0x00;
    packet->m_body[4] = 0x00;

    //长度
    packet->m_body[5] = (len >> 24) & 0xff;
    packet->m_body[6] = (len >> 16) & 0xff;
    packet->m_body[7] = (len >> 8) & 0xff;
    packet->m_body[8] = (len) & 0xff;

    //数据
    memcpy(&packet->m_body[9], buf, len);

	//设置packet的其它属性
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    packet->m_nBodySize = body_size;
    packet->m_nChannel = 0x04;
    packet->m_nTimeStamp = tms;
    packet->m_hasAbsTimestamp = 0;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
    packet->m_nInfoField2 = live->rtmp->m_stream_id;
    return packet;
}
4.3.5、发送视频数据
int sendVideo(int8_t *buf, int len, long tms) {
    int ret = 0;
    if (buf[4] == 0x67) {
		//缓存sps和pps到全局遍历,不需要推流
        if (live && (!live->pps || !live->sps)) {
            prepareVideo(buf, len, live);
        }
        return ret;
    }
	
	//I帧推之前需要推sps和pps
    if (buf[4] == 0x65) {
    	//发送sps和pps
        RTMPPacket *packet = createSpsPpsPackage(live);
        sendPacket(packet);
    }
    //发送帧数据
    RTMPPacket *packet2 = createVideoPackage(buf, len, tms, live);
    ret = sendPacket(packet2);
    return ret;
}
4.3.6、封装音频帧数据RTMPPacket
RTMPPacket *createAudioPacket(int8_t *buf, const int len, const int type, const long tms,
                              Live *live) {

	//组装音频包,两个字节是固定的 如果是第一次发,就是 01,如果后面,00
    int body_size = len + 2;
    RTMPPacket *packet = (RTMPPacket *) malloc(sizeof(RTMPPacket));
    RTMPPacket_Alloc(packet, body_size);
	//音频头
    packet->m_body[0] = 0xAF;
    if (type == 1) {
        packet->m_body[1] = 0x00;
    }else{
        packet->m_body[1] = 0x01;
    }
    memcpy(&packet->m_body[2], buf, len);
    packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
    //音频通道id
    packet->m_nChannel = 0x05;
    packet->m_nBodySize = body_size;
    packet->m_nTimeStamp = tms;
    packet->m_hasAbsTimestamp = 0;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
    packet->m_nInfoField2 = live->rtmp->m_stream_id;
    return packet;
}
4.3.7、发送音频数据
int sendAudio(int8_t *buf, int len, int type, int tms) {
    RTMPPacket *packet = createAudioPacket(buf, len, type, tms, live);
    int ret=sendPacket(packet);
    return ret;
}
4.3.8、发送数据包
int sendPacket(RTMPPacket *packet) {
	//1代表使用队列
    int r = RTMP_SendPacket(live->rtmp, packet, 1);
    //发送完需要释放
    RTMPPacket_Free(packet);
    free(packet);
    return r;
}

这一篇是硬编,它的优势是编码快,但是兼容性不好.后面再来总结使用软编来做.

代码地址

五、参考

1 .直播推流全过程:直播推流编码之RTMP(5)
2 .RTMPdump 源代码分析 1: main()函数

  • 1
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值