参考博客:
记录RTSP通过FFmpeg+nginx发布成rtmp和http-flv
一个线程将网络实时流写入缓冲区,一个线程负责读取转封装成flv格式进行推流
有用到qt库,ffmpeg库版本4.2.1
pushstream.h
#ifndef PUSHSTREAM_H #define PUSHSTREAM_H #include <QObject> #include <QMutex> #include <QThread> extern "C" { #include "libavformat/avformat.h" #include "libavutil/time.h" } #define MEMORY_POLL_PUSHSTREAM 1024 * 1024 * 5 #define READ_DATA_LEN 1024 class PushStream { public: PushStream(); ~PushStream(); //写进缓冲区 void WriteCacheBuff(const char* buff, uint32_t buffLen); //缓存数据流Buffer char* m_cacheBuff; int m_readPos; int m_writePos; int m_cacheLen; QMutex m_cacheMutex; //停止推流 bool m_bStop; private: //推流 void RunPushStream(); private: std::thread *m_pthread; QString m_outUrl; //推流路径 }; #endif // PUSHSTREAM_H
pushstream.cpp
#include "pushstream.h" #include <unistd.h> //usleep PushStream::PushStream() { //初始化缓冲区 m_cacheBuff = new char[MEMORY_POLL_PUSHSTREAM]; m_readPos = 0; m_writePos = 0; m_cacheLen = 0; m_bStop = false; m_outUrl = QString("rtmp://127.0.0.1:1935/live/mystream"); //开启推流线程 m_pthread = new std::thread(&PushStream::RunPushStream, this); } PushStream::~PushStream() { m_bStop = true; m_pthread->join(); delete m_pthread; delete [] m_cacheBuff; } void PushStream::WriteCacheBuff(const char* buff, uint32_t buffLen) { m_cacheMutex.lock(); //写入位置到缓冲区末尾的大小是否够写入当次接收数据的长度 if (m_writePos + buffLen > MEMORY_POLL_PUSHSTREAM) { memcpy(m_cacheBuff + m_writePos, buff, MEMORY_POLL_PUSHSTREAM - m_writePos); memcpy(m_cacheBuff, buff + (MEMORY_POLL_PUSHSTREAM - m_writePos), buffLen - (MEMORY_POLL_PUSHSTREAM - m_writePos)); m_writePos = buffLen - (MEMORY_POLL_PUSHSTREAM - m_writePos); } else { memcpy(m_cacheBuff + m_writePos, buff, buffLen); m_writePos = (m_writePos + buffLen) % (MEMORY_POLL_PUSHSTREAM); } m_cacheLen += buffLen; m_cacheMutex.unlock(); } //ffmpeg 读取数据的回调函数 int ReadBuffer(void * opaque,uint8_t *buf, int bufsize) { PushStream *pPushStream = reinterpret_cast<PushStream*>(opaque); int readsize = bufsize; while (!pPushStream->m_bStop) { //数据不够等待数据 if (readsize > pPushStream->m_cacheLen) { //延时,避免cpu跑满 usleep(1000 * 2); continue; } else { pPushStream->m_cacheMutex.lock(); //缓存池末尾不够 if (pPushStream->m_readPos + readsize > MEMORY_POLL_PUSHSTREAM) { char tmpCache[READ_DATA_LEN] = {0}; memcpy(tmpCache, pPushStream->m_cacheBuff + pPushStream->m_readPos, MEMORY_POLL_PUSHSTREAM - pPushStream->m_readPos); memcpy(tmpCache + (MEMORY_POLL_PUSHSTREAM - pPushStream->m_readPos), pPushStream->m_cacheBuff, readsize - (MEMORY_POLL_PUSHSTREAM - pPushStream->m_readPos)); memcpy(buf, tmpCache, readsize); pPushStream->m_readPos = readsize - (MEMORY_POLL_PUSHSTREAM - pPushStream->m_readPos); } else { memcpy(buf, pPushStream->m_cacheBuff + pPushStream->m_readPos, readsize); pPushStream->m_readPos = (pPushStream->m_readPos + readsize) % (MEMORY_POLL_PUSHSTREAM); } pPushStream->m_cacheLen -= readsize; pPushStream->m_cacheMutex.unlock(); return readsize; } } return 0; } void PushStream::RunPushStream() { int ret = 0; int videoindex = -1; AVPacket pkt; int frame_index = 0; AVStream *in_stream, *out_stream; AVFormatContext *ictx = nullptr; AVInputFormat* ifmt = nullptr; AVFormatContext *octx = nullptr; AVOutputFormat *ofmt = nullptr; AVIOContext *avio = nullptr; unsigned char * iobuffer = (unsigned char *)av_malloc(READ_DATA_LEN); avio = avio_alloc_context(iobuffer, READ_DATA_LEN, 0, (void*)this, ReadBuffer, nullptr, nullptr); if (!avio) { printf( "avio_alloc_context for input failed\n"); goto end; } //探测流封装格式 ret = av_probe_input_buffer(avio, &ifmt, "", nullptr, 0, 0); if (ret < 0) { printf("av_probe_input_buffer failed\n"); goto end; } printf("av_probe_input_buffer format:%s[%s]\n",ifmt->name, ifmt->long_name); ictx = avformat_alloc_context(); ictx->pb = avio; ictx->flags=AVFMT_FLAG_CUSTOM_IO; ret = avformat_open_input(&ictx, "", nullptr, nullptr); if (ret < 0) { printf("avformat_open_input failed\n"); goto end; } //获取音频视频的信息 ictx->probesize = 1024*1024; ictx->max_analyze_duration = 3 * AV_TIME_BASE; //最大分析3秒 ictx->flags |= AVFMT_FLAG_NOBUFFER; //不缓存, 减小直播延时 ret = avformat_find_stream_info(ictx, nullptr); if (ret < 0) { printf("avformat_find_stream_info failed\n"); goto end; } for(unsigned int i = 0; i < ictx->nb_streams; i++) { if (ictx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO) { //HTTP—FLV只支持 AAC和MP3 音频格式 if ((ictx->streams[i]->codecpar->codec_id != AV_CODEC_ID_AAC && ictx->streams[i]->codecpar->codec_id != AV_CODEC_ID_MP3) || ictx->streams[i]->codecpar->sample_rate == 0) { hflv_audio = false; } else { hflv_audio = true; } } else if (ictx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO) { //HTTP—FLV只支持 h264 视频格式 if (ictx->streams[i]->codecpar->codec_id != AV_CODEC_ID_H264) { goto end; } videoindex = i; } } if (videoindex == -1) { goto end; } av_dump_format(ictx, 0, "", 0); avformat_alloc_output_context2(&octx, nullptr, "flv", m_outUrl.toUtf8().data()); if (!octx) { printf( "Could not create output context\n"); goto end; } printf("avformat_alloc_output_context2 \n"); //将输入流音视频编码信息复制到输出流中 for (unsigned int i = 0; i < ictx->nb_streams; i++) { if (!hflv_audio) { if(ictx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO) { continue; } } AVStream *in_stream = ictx->streams[i]; AVStream *out_stream = avformat_new_stream(octx, nullptr); if (!out_stream) { goto end; } if (avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar) < 0) { goto end; } out_stream->codecpar->codec_tag = 0; } printf("copy codec context \n"); av_dump_format(octx, 0, m_outUrl.toUtf8().data(), 1); //打开输出URL,准备推流 if (!(octx->oformat->flags & AVFMT_NOFILE)) { ret = avio_open(&octx->pb, m_outUrl.toUtf8().data(), AVIO_FLAG_WRITE); if (ret < 0) { printf( "Could not open output URL '%s'", m_outUrl.toUtf8().data()); goto end; } printf("avio_open \n"); } ret = avformat_write_header(octx, nullptr); if (ret < 0) { printf( "Error occurred when opening output URL\n"); goto end; } printf("start push stream \n"); while (!m_bStop) { //获取每一帧数据 ret = av_read_frame(ictx, &pkt); if (ret < 0) { break; } if (!hflv_audio && pkt.stream_index != videoindex) { av_packet_unref(&pkt); continue; } in_stream = ictx->streams[pkt.stream_index]; if (!hflv_audio && pkt.stream_index == videoindex) { out_stream = octx->streams[0]; pkt.stream_index = 0; } else { out_stream = octx->streams[pkt.stream_index]; } //最好自定义pkt的pts,dts,duration if (pkt.pts == AV_NOPTS_VALUE) { //Write PTS AVRational time_base1 = in_stream->time_base; //Duration between 2 frames (us) int64_t calc_duration = (int64_t)((double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate)); //Parameters pkt.pts = (int64_t)((double)(frame_index * calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE)); pkt.dts = pkt.pts; pkt.duration = (int64_t)((double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE)); } //指定时间戳 pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); pkt.duration = (int)av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base); pkt.pos = -1; if (pkt.stream_index == videoindex) { //printf("Send %8d video frames to output URL, [%d]\n",frame_index, pkt.flags); frame_index++; } ret = av_interleaved_write_frame(octx, &pkt); if (ret < 0) { printf("Error muxing packet.error code %d\n", ret); break; } //释放 packet,否则会内存泄露 av_packet_unref(&pkt); } av_write_trailer(octx); end: // 该函数会释放用户自定义的IO buffer // 上面不再释放,否则会corrupted double-linked list avformat_close_input(&ictx); avformat_free_context(ictx); if (octx && !(ofmt->flags & AVFMT_NOFILE)) avio_close(octx->pb); if (octx) { avformat_free_context(octx); } }