如何使用OpenCV+MMPEAG打开摄像头,显示的同时推送RTMP流。

注意,设置中:
代码详解:
1、    char* outUrl = "rtmp://localhost/live/livestream";
这个地址,是AMS(Adeobe Media Server)的默认地址。
2、
    //注册所有的编解码器
    avcodec_register_all();
    //注册所有的封装器
    av_register_all();
    //注册所有网络协议
    avformat_network_init();
    //打开摄像头
    VideoCapture cam;
    namedWindow("video");
    Mat frame;
 
    //像素格式转换上下文
    SwsContext*  vsc = NULL;
    //输出的数据结构
    AVFrame* yuv = NULL;
    //编码器上下文
    AVCodecContext* vc = NULL;
    //rtmp flv 封装器
    AVFormatContext* ic = NULL;   
声明好多变量,是OpenCV & MMPEAG 正常运行所需要的。
3、
    try {
        /// 1. Open Cam
        //  这里默认打开的是摄像头0,并获得摄像头参数
        cam.open(0);
        if (!cam.isOpened()) {
            throw exception("cam open failed");
        }
        cout << "cam open sucess"<< endl;
 
        int inWidth = cam.get(CAP_PROP_FRAME_WIDTH);
        int inHeight = cam.get(CAP_PROP_FRAME_HEIGHT);
        int fps = cam.get(CAP_PROP_FPS);
        if (fps == 0) {
            fps = 25;
        }
        cout << fps<< endl;
 
        /// 2. 初始化 SwsContext(转换格式上下文)
        vsc = sws_getCachedContext(vsc,
            inWidth, inHeight, AV_PIX_FMT_BGR24, 
            inWidth, inHeight, AV_PIX_FMT_YUV420P, 
            SWS_BICUBIC, 
            0, 0, 0
            );
        if (!vsc) {
            throw exception("sws_getCachedContext failed");
        }
 
        ///3.初始化输出的数据结构
        yuv = av_frame_alloc();
        yuv->format = AV_PIX_FMT_YUV420P;
        yuv->width = inWidth;
        yuv->height = inHeight;
        yuv->pts = 0;
        //分配 YUV 空间
        int ret = av_frame_get_buffer(yuv, 32);
        if (ret != 0) {
            char buf[1024] = { 0 };
            av_strerror(ret, buf, sizeof(buf) - 1);
            throw exception(buf);
        }
 
        ///4. 初始化编码器上下文
        //a. 找到编码器,这里全部基于MMPEAG
        AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
        if (!codec) {
            throw exception("Can't find H.264 encoder");
        }
        //b. 创建编码器上下文
        vc = avcodec_alloc_context3(codec);
        if (!vc) {
            throw exception("avcodec_alloc_context3 failed");
        }
        //c. 配置编码器参数
        vc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
        vc->codec_id = codec->id;
        vc->thread_count = 8;
        vc->bit_rate = 50 * 1024 * 8; //video size(bits) per second: 50kByte
        vc->width = inWidth;
        vc->height = inHeight;
        
        vc->time_base = { 1,fps };//used to calculate pts: pts*time_base = second
        vc->framerate = { fps,1 };
 
        vc->gop_size = 50;// for how many frames there is a I frame(关键帧)
        vc->max_b_frames = 0;//if these is no B frames, the orders of both decoding and presentation will be the same
        vc->pix_fmt = AV_PIX_FMT_YUV420P;
        
        //d. 打开编码器上下文(Open encodder context)
        ret = avcodec_open2(vc, 0, 0);
        if (ret != 0) {
            char buf[1024] = { 0 };
            av_strerror(ret, buf, sizeof(buf) - 1);
            throw exception(buf);
        }
        cout << "avcodec_open2 successed!"<< endl;
        
        ///5. 输出封装器和频流配置
        //a. Create context for MUX
        ret = avformat_alloc_output_context2(&ic, 0, "flv", outUrl);
        if (ret != 0) {
            char buf[1024] = { 0 };
            av_strerror(ret, buf, sizeof(buf) - 1);
            throw exception(buf);
        }
        //b. Add video stream
        AVStream* vs = avformat_new_stream(ic, NULL);
        if (!vs) {
            throw exception("avformat_new_stream failed");
        }
        vs->codecpar->codec_tag = 0;
        // copy  parameter from Encoder to MUX
        avcodec_parameters_from_context(vs->codecpar, vc);
        av_dump_format(ic, 0, outUrl, 1);
 
        ///6. Open rtmp output IO(打开输出IO)
        ret = avio_open(&ic->pb, outUrl, AVIO_FLAG_WRITE);
        if (ret != 0) {
            char buf[1024] = { 0 };
            av_strerror(ret, buf, sizeof(buf) - 1);
            throw exception(buf);
        }
        //write mux header
        ret = avformat_write_header(ic, NULL); // after this operation the stream's time_base will also be changed, not vc->time_base anymore
        if (ret != 0) {
            char buf[1024] = { 0 };
            av_strerror(ret, buf, sizeof(buf) - 1);
            throw exception(buf);
        }
 
        AVPacket pack;
        memset(&pack, 0, sizeof(pack));
        int vpts = 0;
 
        //住循环,读入->显示->转码
        for (;;) {
            ///从cam中读取书
            if (!cam.grab()) {
                continue;
            }
            if (!cam.retrieve(frame)) {
                continue;
            }
            imshow("video", frame);
            waitKey(1);
 
            /// convert RGB to YUV 
            // Input data structure--RGB
            uint8_t* indata[AV_NUM_DATA_POINTERS] = { 0 };//srcStride
            indata[0] = frame.data;
            int inlinesize[AV_NUM_DATA_POINTERS] = { 0 };//srcSlice
            //一行(宽)数据的字节数
            inlinesize[0] = frame.cols * frame.elemSize();
 
            int h = sws_scale(vsc, indata, inlinesize, 0, frame.rows,
                yuv->data, yuv->linesize);
            if (h <= 0) {
                continue;
            }
 
            ///Mux YUV to flv h.264
            yuv->pts = vpts;
            vpts++;
            ret = avcodec_send_frame(vc, yuv);
            if (ret != 0) {
                continue;
            }
            ret = avcodec_receive_packet(vc, &pack);
            if (ret != 0 || pack.size > 0) {
                cout << '*' <<pack.size<< flush;
            }
            else {
                continue;
            }
            
            ///推流
            pack.pts = av_rescale_q(pack.pts, vc->time_base, vs->time_base);
            pack.dts = av_rescale_q(pack.dts, vc->time_base, vs->time_base);
            ret = av_interleaved_write_frame(ic, &pack);
            if (ret == 0) {
                cout << '#'<< flush;
            }
 
        }
    }
    catch (exception &ex) {
        
        if (cam.isOpened())
            cam.release();
        if (vsc) {
            sws_freeContext(vsc);
            vsc = NULL;
        }
        if (vc) {
            avio_closep(&ic->pb);
            avcodec_free_context(&vc);
        }
        cerr << ex.what() << endl;
    }
    getchar();
在一段代码中解决所有问题,可能不是一个很好的习惯,我需要做一些改变。
这里对代码进行了封装,使得可以简化成目前情况:
#include <opencv2/core.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
#include "XMediaEncode.h"
#include "XRtmp.h"
extern "C"
{
#include <libswscale/swscale.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
}
using namespace cv;
using namespace std;
 
 
 
//初始化像素格式上下文
void test004()
{
    //相机的rtsp url
    char *inUrl = "rtsp://admin:@192.168.10.30:554/ch0_0.264";
    VideoCapture cam;
    namedWindow("video");
 
    //像素格式转换上下文
    SwsContext *vsc = NULL;
 
    try
    {
        ////
        /// 1 使用opencv打开rtsp相机
        cam.open(inUrl);
        if (!cam.isOpened())
        {
            throw exception("cam open failed!");
        }
        cout << inUrl << " cam open success" << endl;
        int inWidth = (int)cam.get(CAP_PROP_FRAME_WIDTH);
        int inHeight = (int)cam.get(CAP_PROP_FRAME_HEIGHT);
        int fps = (int)cam.get(CAP_PROP_FPS);
 
        ///2 初始化格式转换上下文
        vsc = sws_getCachedContext(vsc,
            inWidth, inHeight, AV_PIX_FMT_BGR24,     //源宽、高、像素格式
            inWidth, inHeight, AV_PIX_FMT_YUV420P,//目标宽、高、像素格式
            SWS_BICUBIC,  // 尺寸变化使用算法
            0, 0, 0
        );
        if (!vsc)
        {
            throw exception("sws_getCachedContext failed!");
        }
        Mat frame;
        for (;;)
        {
            ///读取rtsp视频帧,解码视频帧
            if (!cam.grab())
            {
                continue;
            }
            ///yuv转换为rgb
            if (!cam.retrieve(frame))
            {
                continue;
            }
            imshow("video", frame);
            waitKey(1);
        }
    }
    catch (exception &ex)
    {
        if (cam.isOpened())
            cam.release();
        if (vsc)
        {
            sws_freeContext(vsc);
            vsc = NULL;
        }
        cerr << ex.what() << endl;
    }
    getchar();
}
 
//rtsp数据源到rtmp推流      要重点复习 
void test005()
{
    cout << "void test005()!" << endl;
    //相机的rtsp url
    char *inUrl = "rtsp://admin:@192.168.10.30:554/ch0_0.264";
    //nginx-rtmp 直播服务器rtmp推流URL
    char *outUrl = "rtmp://192.168.10.181/live";
 
    //注册所有的编解码器
    avcodec_register_all();
 
    //注册所有的封装器
    av_register_all();
 
    //注册所有网络协议
    avformat_network_init();
 
 
    VideoCapture cam;
    Mat frame;
    namedWindow("video");
 
    //像素格式转换上下文
    SwsContext *vsc = NULL;
 
    //输出的数据结构
    AVFrame *yuv = NULL;
 
    //编码器上下文
    AVCodecContext *vc = NULL;
 
    //rtmp flv 封装器
    AVFormatContext *ic = NULL;
 
 
    try
    {    ////
        /// 1 使用opencv打开rtsp相机
        cam.open(inUrl);
        if (!cam.isOpened())
        {
            throw exception("cam open failed!");
        }
        cout << inUrl << " cam open success" << endl;
        int inWidth = (int)cam.get(CAP_PROP_FRAME_WIDTH);
        int inHeight = (int)cam.get(CAP_PROP_FRAME_HEIGHT);
        int fps = (int)cam.get(CAP_PROP_FPS);
 
        ///2 初始化格式转换上下文
        vsc = sws_getCachedContext(vsc,
            inWidth, inHeight, AV_PIX_FMT_BGR24,     //源宽、高、像素格式
            inWidth, inHeight, AV_PIX_FMT_YUV420P,//目标宽、高、像素格式
            SWS_BICUBIC,  // 尺寸变化使用算法
            0, 0, 0
        );
        if (!vsc)
        {
            throw exception("sws_getCachedContext failed!");
        }
 
        ///3 初始化输出的数据结构
        yuv = av_frame_alloc();
        yuv->format = AV_PIX_FMT_YUV420P;
        yuv->width = inWidth;
        yuv->height = inHeight;
        yuv->pts = 0;
        //分配yuv空间
        int ret = av_frame_get_buffer(yuv, 32);
        if (ret != 0)
        {
            char buf[1024] = { 0 };
            av_strerror(ret, buf, sizeof(buf) - 1);
            throw exception(buf);
        }
 
        ///4 初始化编码上下文,分为以下三步
        //a 找到编码器
        AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
        if (!codec)
        {
            throw exception("Can`t find h264 encoder!");
        }
        //b 创建编码器上下文
        vc = avcodec_alloc_context3(codec);
        if (!vc)
        {
            throw exception("avcodec_alloc_context3 failed!");
        }
        //c 配置编码器参数
        vc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; //全局参数
        vc->codec_id = codec->id;
        vc->thread_count = 8;
 
        vc->bit_rate = 50 * 1024 * 8;//压缩后每秒视频的bit位大小 50kB
        vc->width = inWidth;
        vc->height = inHeight;
        vc->time_base = { 1,fps };
        vc->framerate = { fps,1 };
 
        //画面组的大小,多少帧一个关键帧
        vc->gop_size = 50;
        vc->max_b_frames = 0;
        vc->pix_fmt = AV_PIX_FMT_YUV420P;
        //d 打开编码器上下文
        ret = avcodec_open2(vc, 0, 0);
 
        if (ret != 0)
        {
            char buf[1024] = { 0 };
            av_strerror(ret, buf, sizeof(buf) - 1);
            throw exception(buf);
        }
        cout << "avcodec_open2 success!" << endl;
 
        ///5 输出封装器和视频流配置
        //a 创建输出封装器上下文
        ret = avformat_alloc_output_context2(&ic, 0, "flv", outUrl);
        if (ret != 0)
        {
            char buf[1024] = { 0 };
            av_strerror(ret, buf, sizeof(buf) - 1);
            throw exception(buf);
        }
 
        //b 添加视频流 
        AVStream *vs = avformat_new_stream(ic, NULL);
        if (!vs)
        {
            throw exception("avformat_new_stream failed");
        }
        vs->codecpar->codec_tag = 0;
        //从编码器复制参数
        avcodec_parameters_from_context(vs->codecpar, vc);
        av_dump_format(ic, 0, outUrl, 1);
 
 
        ///打开rtmp 的网络输出IO
        ret = avio_open(&ic->pb, outUrl, AVIO_FLAG_WRITE);
        if (ret != 0)
        {
            char buf[1024] = { 0 };
            av_strerror(ret, buf, sizeof(buf) - 1);
            throw exception(buf);
        }
 
        //写入封装头
        ret = avformat_write_header(ic, NULL);
        if (ret != 0)
        {
            char buf[1024] = { 0 };
            av_strerror(ret, buf, sizeof(buf) - 1);
            throw exception(buf);
        }
 
        AVPacket pack;
        memset(&pack, 0, sizeof(pack));
        int vpts = 0;
        
        //死循环
        for (;;)
        {
            ///读取rtsp视频帧,解码视频帧
            if (!cam.grab())
            {
                continue;
            }
            ///yuv转换为rgb
            if (!cam.retrieve(frame))
            {
                continue;
            }
            imshow("video", frame);
            waitKey(1);
 
 
            ///rgb to yuv
            //输入的数据结构
            uint8_t *indata[AV_NUM_DATA_POINTERS] = { 0 };
            //indata[0] bgrbgrbgr
            //plane indata[0] bbbbb indata[1]ggggg indata[2]rrrrr 
            indata[0] = frame.data;
            int insize[AV_NUM_DATA_POINTERS] = { 0 };
            //一行(宽)数据的字节数
            insize[0] = frame.cols * frame.elemSize();
            int h = sws_scale(vsc, indata, insize, 0, frame.rows, //源数据
                yuv->data, yuv->linesize);
            if (h <= 0)
            {
                continue;
            }
            cout << h << " " << flush;
 
            ///h264编码
            yuv->pts = vpts;
            vpts++;
            ret = avcodec_send_frame(vc, yuv);
            if (ret != 0)
                continue;
 
            ret = avcodec_receive_packet(vc, &pack);
            if (ret != 0 || pack.size > 0)
            {
                cout << "*" << pack.size << flush;
            }
            else
            {
                continue;
            }
            //推流
            pack.pts = av_rescale_q(pack.pts, vc->time_base, vs->time_base);
            pack.dts = av_rescale_q(pack.dts, vc->time_base, vs->time_base);
            pack.duration = av_rescale_q(pack.duration, vc->time_base, vs->time_base);
            ret = av_interleaved_write_frame(ic, &pack);
            if (ret == 0)
            {
                cout << "#" << flush;
            }
        }
 
    }
    catch (exception &ex)
    {
        if (cam.isOpened())
            cam.release();
        if (vsc)
        {
            sws_freeContext(vsc);
            vsc = NULL;
        }
 
        if (vc)
        {
            avio_closep(&ic->pb);
            avcodec_free_context(&vc);
        }
 
        cerr << ex.what() << endl;
    }
    getchar();
}
 
//opencv_rtsp_to_rtmp_class封装重构代码    要重点复习
void test006()
{
    cout << "void test006()!" << endl;
    //相机的rtsp url
    char *inUrl = "rtsp://admin:@192.168.10.30:554/ch0_0.264";
    //nginx-rtmp 直播服务器rtmp推流URL
    char *outUrl = "rtmp://192.168.10.181/live";
 
 
    //编码器和像素格式转换
    XMediaEncode *me = XMediaEncode::Get(0);
 
    //封装和推流对象
    XRtmp *xr = XRtmp::Get(0);
 
    VideoCapture cam;
    Mat frame;
    namedWindow("video");
 
    int ret = 0;
    try
    {    ////
        /// 1 使用opencv打开rtsp相机
        cam.open(inUrl);
        if (!cam.isOpened())
        {
            throw exception("cam open failed!");
        }
        cout << inUrl << " cam open success" << endl;
        int inWidth = (int)cam.get(CAP_PROP_FRAME_WIDTH);
        int inHeight = (int)cam.get(CAP_PROP_FRAME_HEIGHT);
        int fps = (int)cam.get(CAP_PROP_FPS);
 
        ///2 初始化格式转换上下文
        ///3 初始化输出的数据结构
        me->inWidth = inWidth;
        me->inHeight = inHeight;
        me->outWidth = inWidth;
        me->outHeight = inHeight;
        me->InitScale();
 
        ///4 初始化编码上下文
        //a 找到编码器
        if (!me->InitVideoCodec())
        {
            throw exception("InitVideoCodec failed!");
        }
 
        ///5 输出封装器和视频流配置
        xr->Init(outUrl);
 
        //添加视频流 
        xr->AddStream(me->vc);
        xr->SendHead();
 
        for (;;)
        {
            ///读取rtsp视频帧,解码视频帧
            if (!cam.grab())
            {
                continue;
            }
            ///yuv转换为rgb
            if (!cam.retrieve(frame))
            {
                continue;
            }
            //imshow("video", frame);
            //waitKey(1);
 
 
            ///rgb to yuv
            me->inPixSize = frame.elemSize();
            AVFrame *yuv = me->RGBToYUV((char*)frame.data);
            if (!yuv) continue;
 
            ///h264编码
            AVPacket *pack = me->EncodeVideo(yuv);
            if (!pack) continue;
 
            xr->SendFrame(pack);
 
 
        }
 
    }
    catch (exception &ex)
    {
        if (cam.isOpened())
            cam.release();
        cerr << ex.what() << endl;
    }
    getchar();
}
 
int main(int argc, char *argv[])
{
    //test000();
    //test001();
    //test002();
    //test003();
    //test004();
    //test005();
    test006();
    return 0;
}
具体操作:
——————————————————————————————————————
选择摄像头并且打开摄像头;
 
vlc打开网络串流
输入:
rtmp://localhost/live/livestream
串流摄像头内容:
参考资料《利用ffmpeg和opencv进行视频的解码播放》
 文中代码:

https://files.cnblogs.com/files/blogs/758212/opencv_rtsp2rtmp-master.rar
https://files.cnblogs.com/files/blogs/758212/main.js

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

jsxyhelu2015

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值