Javacv Javacv+ffmpeg+Nginx 监控/直播 学习教程

添加依赖

	<properties>
        <lombok.version>1.18.16</lombok.version>
        <javacv.version>1.5.8</javacv.version>
        <ffmpeg.version>5.1.2-${javacv.version}</ffmpeg.version>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <version>${lombok.version}</version>
        </dependency>

        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
        </dependency>

        <dependency>
            <groupId>cn.hutool</groupId>
            <artifactId>hutool-all</artifactId>
            <version>4.3.2</version>
        </dependency>
        
        <dependency>
            <groupId>org.bytedeco</groupId>
            <artifactId>javacv</artifactId>
            <version>${javacv.version}</version>
        </dependency>

        <dependency>
            <groupId>org.bytedeco</groupId>
            <artifactId>javacpp-platform</artifactId>
            <version>${javacv.version}</version>
        </dependency>

        <dependency>
            <groupId>org.bytedeco</groupId>
            <artifactId>ffmpeg</artifactId>
            <version>${ffmpeg.version}</version>
        </dependency>

        <dependency>
            <groupId>org.bytedeco</groupId>
            <artifactId>ffmpeg-platform</artifactId>
            <version>${ffmpeg.version}</version>
        </dependency>
    </dependencies>

创建实体

import lombok.Data;

import java.text.SimpleDateFormat;

@Data
public class Camera {
    /**
     * rtsp地址
     */
    private String rtsp;
    /**
     * rtmp地址
     */
    private String rtmp;
    /**
     * hls地址
     */
    private String hls;
    /**
     * 最新一次打开时间
     */
    private String openTime;
    /**
     * 同流观众数
     */
    private int count;
    /**
     * 监控token
     */
    private String token;
    /**
     * 熔断时间(分钟)
     */
    private String fusingTime;
    /**
     * 抓帧器配置
     */
    private GrabberOption grabberOption;
    /**
     * 记录器配置
     */
    private RecorderOption recorderOption;

    @Data
    public static class GrabberOption {
        /**
         * rtsp转rtmp通信协议
         */
        private String rtsp_transport;
        /**
         * 阻塞等待延迟
         */
        private String stimeout;

    }

    @Data
    public static class RecorderOption {
        /**
         * 延迟
         */
        private String tune;
        /**
         ** 权衡quality(视频质量)和encode speed(编码速度) values(值): *
         * ultrafast(终极快),superfast(超级快), veryfast(非常快), faster(很快), fast(快), *
         * medium(中等), slow(慢), slower(很慢), veryslow(非常慢) *
         * ultrafast(终极快)提供最少的压缩(低编码器CPU)和最大的视频流大小;而veryslow(非常慢)提供最佳的压缩(高编码器CPU)的同时降低视频流的大小
         */
        private String preset;
        /**
         * 画面质量参数,0~51;18~28是一个合理范围
         */
        private String crf;

    }

    /**
     * 同流增加观众
     */
    public void increase() {
        this.count = this.count + 1;
        this.openTime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(System.currentTimeMillis());
    }

    /**
     * 同流减少观众
     */
    public void reduce() {
        this.count = this.count - 1;
    }

}

配置类

import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Component;

/**
 * @author charels.teng
 * @date 2022/12/6
 * @since 1.0
 **/
@Data
@Primary
@Component
@ConfigurationProperties(prefix = "camera")
public class CameraProperties {

    /**
     * 熔断时间(单位:分钟)
     */
    private String fusingTime;
    /**
     * rtmp域名
     */
    private String rtmpDomain;
    /**
     * hls域名
     */
    private String hlsDomain;
    /**
     * 抓帧配置
     */
    private Camera.GrabberOption grabber;
    /**
     * 取帧配置
     */
    private Camera.RecorderOption recorder;

}

控制层

注:rpc框架以及返回类根据自身业务框架更改
import org.springframework.beans.factory.annotation.Autowired;

import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;

@Path("/camera")
public class CameraResource {

    @Autowired
    private CameraService cameraService;

    /**
     * 开启视频流
     */
    @GET
    @Path("/open/{param}")
    @Produces(MediaType.APPLICATION_JSON)
    public Result openVideo(@PathParam("param") String param) {
        return cameraService.open(param);
    }

    /**
     * 关闭视频流
     */
    @GET
    @Path("/close/{token}")
    @Produces(MediaType.APPLICATION_JSON)
    public Result closeVideo(@PathParam("token") String token) {
        return cameraService.close(token);
    }

业务层

import cn.hutool.core.collection.CollUtil;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.List;

import static cn.facilityone.xia.stream.vedio.camera.thread.CameraThread.THREAD_MAP;


/**
 * @author charels.teng
 * @date 2022/12/6
 * @since 1.0
 **/
@Service
public class CameraServiceImpl implements CameraService {

    private static final Logger log = LoggerFactory.getLogger(CameraServiceImpl.class);

    @Autowired
    private CameraProperties cameraProperties;

    @Override
    public Result open(String token) {
        Camera camera = new Camera();
        // 可可通过param
        String url = iotMointros.get(0).getUrl();
        if (StringUtils.isEmpty(url)) {
            throw new BusinessException("无监控源!!!");
        }

        camera.setRtsp(url);
        camera.setToken(iotSn);

        return openCamera(camera);
    }

    private Result openCamera(Camera camera) {
        String openTime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(System.currentTimeMillis());
        String token = camera.getToken();

        if (THREAD_MAP.containsKey(token)) {
            if (StringUtils.isNotBlank(THREAD_MAP.get(token).getCamera().getHls())) {
                THREAD_MAP.get(token).getCamera().increase();

                return new Result(new CameraDto(THREAD_MAP.get(token).getCamera()));
            } else {
                THREAD_MAP.remove(token);
            }
        }

        String rtsp = camera.getRtsp();
        String rtmp = cameraProperties.getRtmpDomain() + "/" + token;
        String hls = cameraProperties.getHlsDomain() + "/" + token + ".m3u8";

        Socket rtspSocket = new Socket();
        Socket rtmpSocket = new Socket();

        try {
            URI rtspUri = URI.create(rtsp);
            rtspSocket.connect(new InetSocketAddress(rtspUri.getHost(), rtspUri.getPort()), 1000 * 3);
        } catch (IOException e) {
            e.printStackTrace();
            log.error("与拉流IP{}:建立TCP连接失败!", rtsp);

            return new Result(Result.CODE_500, "与拉流IP建立TCP连接失败!");
        }

        try {
            URI rtmpUri = URI.create(rtmp);
            rtmpSocket.connect(new InetSocketAddress(rtmpUri.getHost(), rtmpUri.getPort()), 1000 * 3);
        } catch (IOException e) {
            e.printStackTrace();
            log.error("与推流IP{}:建立TCP连接失败!", rtmp);

            return new Result(Result.CODE_500, "与推流IP建立TCP连接失败!");
        }

        camera.setRtsp(rtsp);
        camera.setRtmp(rtmp);
        camera.setOpenTime(openTime);
        camera.setToken(token);
        camera.setCount(1);
        camera.setHls(hls);
        camera.setFusingTime(cameraProperties.getFusingTime());
        option(camera);

        CameraDto cameraDto = new CameraDto();
        cameraDto.setUrl(hls);
        cameraDto.setToken(token);

        CameraThread cameraThread = new CameraThread(camera);
        CameraThread.THREAD_POOL.execute(cameraThread);
        THREAD_MAP.put(token, cameraThread);

        return new Result(cameraDto);
    }

    private void option(Camera camera) {
        Camera.GrabberOption grabberOption = new Camera.GrabberOption();
        grabberOption.setRtsp_transport(cameraProperties.getGrabber().getRtsp_transport());
        grabberOption.setStimeout(cameraProperties.getGrabber().getStimeout());
        camera.setGrabberOption(grabberOption);
        Camera.RecorderOption recorderOption = new Camera.RecorderOption();
        recorderOption.setTune(cameraProperties.getRecorder().getTune());
        recorderOption.setPreset(cameraProperties.getRecorder().getPreset());
        recorderOption.setCrf(cameraProperties.getRecorder().getCrf());
        camera.setRecorderOption(recorderOption);
    }

    @Override
    public Result close(String token) {
        if (StringUtils.isNotBlank(token)) {
            if (THREAD_MAP.containsKey(token)) {
                CameraThread cameraThread = THREAD_MAP.get(token);
                Camera camera = cameraThread.getCamera();
                int count = camera.getCount() - 1;
                camera.setCount(count);
                if (count > 0) {
                    log.info("关闭成功 当前相机使用人数为{}  [rtmp:{}]",
                            camera.getCount(), camera.getRtmp());

                    return new Result(camera.getCount());
                } else {
                    if (count == 0) {
                        THREAD_MAP.get(token).interrupt();
                        log.info("关闭推流成功 当前相机使用人数为{}  [rtmp:{}]",
                                camera.getCount(), camera.getRtmp());

                        return new Result(camera.getCount());
                    } else {
                        THREAD_MAP.get(token).interrupt();
                        THREAD_MAP.remove(token);
                        log.info("异常关闭推流 当前相机使用人数为{}  [rtmp:{}]",
                                camera.getCount(), camera.getRtmp());

                        return new Result(camera.getCount());
                    }
                }
            }
        }

        return new Result("请检查该token下的视频是否开启成功").toFail();
    }

}

推流

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil;
import lombok.Data;
import org.bytedeco.ffmpeg.avcodec.AVPacket;
import org.bytedeco.ffmpeg.avformat.AVFormatContext;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.SimpleDateFormat;
import java.util.Map;
import java.util.stream.Collectors;

import static org.bytedeco.ffmpeg.global.avcodec.av_packet_unref;

@Data
public class Pusher {

    private static final Logger log = LoggerFactory.getLogger(Pusher.class);

    private Camera camera;

    private FFmpegFrameGrabber grabber;

    private FFmpegFrameRecorder recorder;

    private int errorIndex = 0;

    private double frameRate;

    public Pusher(Camera camera) {
        this.camera = camera;
    }

    public void push() {
        try {
//            avutil.av_log_set_level(avutil.AV_LOG_INFO);
//            FFmpegLogCallback.set();
            grabber = new FFmpegFrameGrabber(camera.getRtsp());
            grabber.setOptions(BeanUtil.beanToMap(camera.getGrabberOption())
                    .entrySet()
                    .stream()
                    .collect(Collectors.toMap(Map.Entry::getKey, grabber -> String.valueOf(grabber.getValue()))));
            grabber.start();
            
            // 部分监控设备流信息里携带的帧率为9000,如出现此问题,会导致dts、pts时间戳计算失败,播放器无法播放,故出现错误的帧率时,默认为25帧
            if (grabber.getFrameRate() > 0 && grabber.getFrameRate() < 100) {
                frameRate = grabber.getFrameRate();
            } else {
                frameRate = 25.0;
            }
            int width = grabber.getImageWidth();
            int height = grabber.getImageHeight();
            // 若视频像素值为0,说明拉流异常,程序结束
            if (width == 0 || height == 0) {
                log.error(camera.getRtsp() + " 拉流异常!");
                grabber.stop();
                grabber.close();
            }
            recorder = new FFmpegFrameRecorder(camera.getRtmp(), grabber.getImageWidth(), grabber.getImageHeight());
            recorder.setInterleaved(true);
            // 关键帧间隔,一般与帧率相同或者是视频帧率的两倍
            recorder.setGopSize((int) (frameRate * 2));
            // 视频帧率(保证视频质量的情况下最低25,低于25会出现闪屏)
            recorder.setFrameRate(frameRate);
            // 设置比特率
            recorder.setVideoBitrate(grabber.getVideoBitrate());
            // 封装flv格式
            recorder.setFormat("flv");
            // h264编/解码器
            recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
 			recorder.setVideoCodec(grabber.getVideoCodec());
            recorder.setAudioCodec(grabber.getAudioCodec());
            recorder.setVideoBitrate(grabber.getVideoBitrate());
            recorder.setAudioBitrate(grabber.getAudioBitrate());
//            recorder.setPixelFormat(grabber.getPixelFormat());
            recorder.setAudioChannels(grabber.getAudioChannels());

            recorder.setOptions(BeanUtil.beanToMap(camera.getRecorderOption())
                    .entrySet()
                    .stream()
                    .collect(Collectors.toMap(Map.Entry::getKey, recorder -> String.valueOf(recorder.getValue()))));
            AVFormatContext fc = grabber.getFormatContext();
            recorder.start(fc);
            log.debug("开始推流 [rtsp:{} rtmp:{}]", camera.getRtsp(), camera.getRtmp());
            // 清空探测时留下的缓存
            grabber.flush();

            AVPacket pkt = null;
            /*pts和dts可根据实际情况选择是否添加,部分视频容易导致无限空包导致无法观看*/
            long dts = 0; // 选择性注释
            long pts = 0; // 选择性注释
            int timebase = 0; // 选择性注释
            int no_frame_index;
            for (no_frame_index = 0; no_frame_index < 10 && errorIndex < 10;) {
                long startTime = System.currentTimeMillis();
                // 断流
                if (THREAD_MAP.containsKey(camera.getToken()) && THREAD_MAP.get(camera.getToken()).getExitCode() == 1) {
                    break;
                }

				// 熔断
                DateTime earlyDate = DateUtil.parse(this.camera.getOpenTime(), "yyyy-MM-dd HH:mm:ss");
                DateTime latelyDate = DateUtil.parse(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(startTime), "yyyy-MM-dd HH:mm:ss");
                if (DateUtil.between(earlyDate, latelyDate, DateUnit.MINUTE) > Integer.parseInt(camera.getFusingTime())) {
                    THREAD_MAP.get(this.camera.getToken()).getCamera().setCount(0);
                    break;
                }

                pkt = grabber.grabPacket();
                
                // 空包记录次数跳过
                if (pkt == null || pkt.size() == 0 || pkt.data() == null) {
                    log.warn("JavaCV 出现空包 [rtsp:{} rtmp:{}]", camera.getRtsp(), camera.getRtmp());
                    no_frame_index++;
                    continue;
                }

				// 过滤音频
                if (pkt.stream_index() == 1) {
                    av_packet_unref(pkt);
                }

				// 矫正sdk回调数据的dts,pts每次不从0开始累加所导致的播放器无法续播问题
				/*pts和dts可根据实际情况选择是否添加,部分视频容易导致无限空包导致无法观看*/
                pkt.pts(pts); // 选择性注释
                pkt.dts(dts); // 选择性注释
                errorIndex += recorder.recordPacket(pkt) ? 0 : 1;
                // pts,dts累加
                timebase = grabber.getFormatContext().streams(pkt.stream_index()).time_base().den(); // 选择性注释
                pts += timebase / frameRate; // 选择性注释
                dts += timebase / frameRate; // 选择性注释
                // 将缓存空间的引用计数-1,并将Packet中的其他字段设为初始值。如果引用计数为0,自动的释放缓存空间。
                av_packet_unref(pkt);
                long endTime = System.currentTimeMillis(); // 选择性注释
                if ((long) (1000 /frameRate) - (endTime - startTime) > 0) { // 选择性注释
                    Thread.sleep((long) (1000 / frameRate) - (endTime - startTime)); // 选择性注释
                } // 选择性注释

            }

			// 减员
            if (no_frame_index == 10) {
                this.camera.reduce();
            }
        } catch (FrameGrabber.Exception | FFmpegFrameRecorder.Exception e) {
            e.printStackTrace();
            log.error(e.getMessage());
        } catch (InterruptedException e) {
            e.printStackTrace();
        } finally {
            release();
            log.info("推流结束 [rtsp:{} rtmp:{}]", camera.getRtsp(), camera.getRtmp());
        }
    }

    public void release() {
        try {
            grabber.stop();
            grabber.close();
            if (recorder != null) {
                recorder.stop();
                recorder.close();
            }
            if (this.camera.getCount() != 0) {
                this.camera.setCount(0);
            }
        } catch (FrameGrabber.Exception | FrameRecorder.Exception e) {
            e.printStackTrace();
        }
    }
}

线程池

import cn.facilityone.xia.stream.vedio.camera.base.Camera;
import cn.facilityone.xia.stream.vedio.camera.pusher.Pusher;

import java.util.Map;
import java.util.concurrent.*;

public class CameraThread implements Runnable{
    /**
     * 线程维护表,可根据业务需求选择内存/数据库/redis等维护方式
     */
    public static Map<String, CameraThread> THREAD_MAP = new ConcurrentHashMap<>();

    private static final int CORE = Runtime.getRuntime().availableProcessors();

    public static final ExecutorService THREAD_POOL = new ThreadPoolExecutor(
            CORE,
            CORE * 4,
            3,
            TimeUnit.SECONDS,
            new LinkedBlockingQueue<>(CORE),
            Executors.defaultThreadFactory(),
            new ThreadPoolExecutor.DiscardOldestPolicy()
    );

    private Camera camera;
    /**
     * 线程退出码
     */
    private int exitCode = 0;

    public int getExitCode() {
        return exitCode;
    }

    public CameraThread(Camera camera) {
        this.camera = camera;
    }

    public Camera getCamera() {
        return this.camera;
    }

    // 中断线程
    public void interrupt() {
        this.exitCode = 1;
    }

    @Override
    public void run() {
        String token = camera.getToken();
        try {
            Pusher rtmpPusher = new Pusher(camera);

            rtmpPusher.push();

            if (THREAD_MAP.get(token).getCamera().getCount() == 0) {
                THREAD_MAP.remove(token);
            }
        } catch (Exception e) {
            e.printStackTrace();
            THREAD_MAP.remove(token);
        }
    }
}

启动类

@Slf4j
@SpringBootApplication
@EnableScheduling
public class Application {
    public static void main(String[] args) {
        // 服务启动执行FFmpegFrameGrabber和FFmpegFrameRecorder的tryLoad(),以免导致第一次推流时耗时。
        try {
            FFmpegFrameGrabber.tryLoad();
            FFmpegFrameRecorder.tryLoad();
        } catch (Exception e) {
            e.printStackTrace();
        }
        SpringApplication.run(FodCameraApplication.class, args);
    }

    @PreDestroy
    public void destory() {
        log.info("服务结束,销毁...");
        THREAD_MAP.clear();
        THREAD_POOL.shutdown();
    }

}

Nginx

1、下载nginx-rtmp-module

进入下载地址:https://github.com/arut/nginx-rtmp-module,并下载压缩包,如下图所示:
在这里插入图片描述

2、安装nginx

  1. 下载地址:http://nginx.org/en/download.html,选择需要的版本下载即可,如下图:
    在这里插入图片描述

  2. 安装nginx前首先要确认系统中安装了gcc、pcre-devel、zlib-devel、openssl-devel,运行命令:yum -y install gcc pcre pcre-devel zlib zlib-devel openssl openssl-devel,结果如下图所示:

yum -y install gcc pcre pcre-devel zlib zlib-devel openssl openssl-devel

在这里插入图片描述

  1. 解压nginx
tar -xvf nginx-1.23.2.tar.gz
  1. 编译安装nginx,并指定上面下载的rtmp模块路径,命令为:

(1) 安装依赖包,依次执行以下两条命令:

yum -y install libxml2 libxml2-dev
yum -y install libxslt-devel

如果出现:Cannot prepare internal mirrorlist: No URLs in mirrorlist

则依次执行以下两条命令:

sudo sed -i -e "s|mirrorlist=|#mirrorlist=|g" /etc/yum.repos.d/CentOS-*

sudo sed -i -e "s|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g" /etc/yum.repos.d/CentOS-*

(2) 编译安装nginx,并指定上面下载的rtmp模块路径,执行命令:

# nginx-rtmp-module-master路径根据自身改变
./configure --add-module=../nginx-rtmp-module-master --with-http_ssl_module --with-http_ssl_module --with-http_xslt_module --with-http_flv_module --with-debug --with-http_gzip_static_module

执行成功如下图所示:
在这里插入图片描述

  1. 进入nginx配置文件目录,修改nginx.conf文件,共需要添加两处配置:

(1) 第一步在文件末尾加上下面配置信息:

# http平级
rtmp {
	server {
		listen 1935;
		chunk_size 4096; 

		application live {
			live on;
			record off;
			publish_notify on;
			#on_publish http://localhost:8080/newsweb/api/v1/rtmp/on_publish;
			#on_publish_done http://localhost:8080/newsweb/api/v1/rtmp/on_publish_done;
			#on_play http://localhost:8080/newsweb/api/v1/rtmp/on_play;
			#on_play_done http://localhost:8080/newsweb/api/v1/rtmp/on_play_done;
		}
		
		application hls {
			live on;
			hls on;  				 #是否开启hls
			# 可填绝对路径,也可填相对路劲(/usr/local/nginx/)
			hls_path /usr/local/nginx/temp/hls; 		 #本地切片路径
			hls_fragment 8s;  		 #本地切片长度
			publish_notify on;
			#on_publish http://localhost:8080/newsweb/api/v1/rtmp/on_publish;
			#on_publish_done http://localhost:8080/newsweb/api/v1/rtmp/on_publish_done;
			#on_play http://localhost:8080/newsweb/api/v1/rtmp/on_play;
			#on_play_done http://localhost:8080/newsweb/api/v1/rtmp/on_play_done;
		}
	}
}

(2) 在http->server节点下增加推流目录的访问权限配置:

	# 推流详情页面 
	location /stat {
		rtmp_stat all;
		# 
		rtmp_stat_stylesheet stat.xsl;
	}

	location /stat.xsl {
		root html;
	}
	#HLS配置开始,这个配置为了`客户端`能够以http协议获取HLS的拉流
	location /hls {  
		#server hls fragments  
		types{  
			application/vnd.apple.mpegurl m3u8;  
			video/mp2t ts;  
			}  
		# 可填绝对路径,也可填相对路劲(/usr/local/nginx/html/)
		alias temp/hls;  #该目录对应rtmp 配置中的hls_path即可
		# autoindex on;  # 开启目录文件列表
        # autoindex_exact_size on;  # 显示出文件的确切大小,单位是bytes
        # autoindex_localtime on;  # 显示的文件时间为文件的服务器时间
        # charset utf-8,gbk;  # 避免中文乱码
		expires -1;  
	}
  1. 执行命令:cd … && make && make install,执行成功如下图所示:
    在这里插入图片描述
  2. 测试配置文件是否有问题:
./usr/local/nginx/nginx -t

在这里插入图片描述

  1. 启动:
./usr/local/nginx/nginx
  1. 在浏览器中输入linux的ip地址检查nginx启动页面是否可访问,正常访问成功如下图所示:
    在这里插入图片描述

若配置过程中遇到问题可评论留言,我会逐一帮助解决❗❗❗

  • 1
    点赞
  • 13
    收藏
    觉得还不错? 一键收藏
  • 8
    评论
要实现这个功能,你可以使用以下步骤: 1. 使用OpenCV库来解码和编码视频流。你可以使用cv::VideoCapture来解码视频文件,并使用cv::VideoWriter来编码视频文件。 2. 使用FFmpeg库来推送RTMP流。你需要使用FFmpeg的API来打开一个RTMP流,并使用avcodec_encode_video2()函数将OpenCV编码后的视频帧推送到流中。 3. 将RTMP流推送到Nginx服务器。你可以使用RTMP协议将视频流推送到Nginx服务器。在Nginx服务器上,你需要配置一个RTMP模块,并使用推流URL将视频流推送到服务器上。 以下是一个简单的示例代码,可以实现将OpenCV视频流编码并推送到Nginx服务器。 ```cpp #include <opencv2/opencv.hpp> #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libavutil/opt.h> #include <libavutil/imgutils.h> #include <libavutil/time.h> #include <librtmp/rtmp.h> int main(int argc, char *argv[]) { // OpenCV video capture and video writer cv::VideoCapture cap(argv[1]); cv::Mat frame; cv::VideoWriter writer("output.mp4", cv::VideoWriter::fourcc('M','J','P','G'), 25, cv::Size(640, 480)); // FFmpeg RTMP stream av_register_all(); avcodec_register_all(); AVFormatContext *fmt_ctx = nullptr; AVOutputFormat *out_fmt = nullptr; AVStream *out_stream = nullptr; AVCodec *codec = nullptr; AVCodecContext *codec_ctx = nullptr; AVPacket pkt; int ret = 0; // Open RTMP stream RTMP *rtmp = RTMP_Alloc(); RTMP_Init(rtmp); RTMP_SetupURL(rtmp, "rtmp://localhost/live/mystream"); RTMP_EnableWrite(rtmp); // Connect to RTMP stream if (!RTMP_Connect(rtmp, nullptr)) { if (!RTMP_ConnectStream(rtmp, 0)) { // Create AVFormatContext avformat_alloc_output_context2(&fmt_ctx, nullptr, "flv", "rtmp://localhost/live/mystream"); if (!fmt_ctx) { fprintf(stderr, "Could not create output context\n"); return -1; } // Create video stream out_fmt = fmt_ctx->oformat; codec = avcodec_find_encoder(out_fmt->video_codec); out_stream = avformat_new_stream(fmt_ctx, codec); if (!out_stream) { fprintf(stderr, "Could not create video stream\n"); return -1; } codec_ctx = avcodec_alloc_context3(codec); avcodec_parameters_to_context(codec_ctx, out_stream->codecpar); codec_ctx->width = 640; codec_ctx->height = 480; codec_ctx->time_base = {1, 25}; codec_ctx->framerate = {25, 1}; codec_ctx->pix_fmt = AV_PIX_FMT_YUV420P; avcodec_open2(codec_ctx, codec, nullptr); // Write header avformat_write_header(fmt_ctx, nullptr); // Encode and push frames while (cap.read(frame)) { // Encode frame ret = avcodec_send_frame(codec_ctx, av_frame); if (ret < 0) { fprintf(stderr, "Error sending frame to encoder\n"); break; } while (ret >= 0) { ret = avcodec_receive_packet(codec_ctx, &pkt); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) break; else if (ret < 0) { fprintf(stderr, "Error receiving packet from encoder\n"); break; } // Write packet pkt.stream_index = out_stream->index; av_interleaved_write_frame(fmt_ctx, &pkt); av_packet_unref(&pkt); // Push packet to RTMP stream RTMP_Write(rtmp, (char *)pkt.data, pkt.size); } // Write frame to OpenCV video writer writer.write(frame); } // Write trailer av_write_trailer(fmt_ctx); // Close RTMP stream RTMP_Close(rtmp); RTMP_Free(rtmp); // Cleanup avcodec_free_context(&codec_ctx); avformat_free_context(fmt_ctx); } } return 0; } ``` 注意,这只是一个简单的示例代码,还需要进行更多的错误检查和异常处理。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 8
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值