javacv桌面推送 通过推送和拉取udp组播视频流实现

ffmpeg udp 推流拉流命令

单播

推流

E:/工具/ffmpeg/ffmpeg -f gdigrab -r 23 -i desktop -pkt_size 1316 -vcodec libx264 -preset:v ultrafast -tune:v zerolatency -f h264 udp://192.168.1.20:5001

拉流

ffplay -f h264 udp://192.168.1.20:5001 -fflags nobuffer -nofind_stream_info

组播

windows推流:

E:/工具/ffmpeg/ffmpeg -f gdigrab -r 23 -i desktop -pkt_size 1316 -vcodec libx264 -preset:v ultrafast -tune:v zerolatency -f h264 "udp://224.1.1.1:5001?buffer_size=0&localaddr=192.168.1.20"

linux推流:

ffmpeg -f x11grab -r 23 -video_size 1920*1080 -i :0.0+0,00 -pkt_size 1316 -vcodec libx264 -preset:v ultrafast -tune:v zerolatency -f h264 "udp://224.1.1.1:5001?buffer_size=0&localaddr=192.168.1.20"

拉流

ffplay -f h264 "udp://224.1.1.1:5001" -fflags nobuffer -nofind_stream_info

javacv推流拉流

先启动推流端,再启动拉流端时,有可能出现无法拉流播放的问题(ffmpeg命令推送,ffplay命令播放没有问题),解决办法有三种:

1,重复再启动拉动端试几次 (成功可能性超过50%)

2,先启动拉流端,再启动推流端 (几乎100%成功)

3,推流端已先启动,拉流端后启动的情况下,在推流端频繁切换几次窗口(类似于重新启动推流端推送数据,成功概率很大,但试验环境下也出现过不管用的情况)

Pusher.java

import java.util.function.Consumer;

import org.bytedeco.ffmpeg.global.avcodec;

import org.bytedeco.ffmpeg.global.avutil;

import org.bytedeco.javacv.FFmpegFrameGrabber;

import org.bytedeco.javacv.FFmpegFrameRecorder;

import org.bytedeco.javacv.Frame;

import org.bytedeco.javacv.FrameRecorder;

//RTMP视频推流器

public class Pusher extends BStreamer {

private Thread thread;

private static final int frameRate = ConfigUtil.getInt("frameRate");// 录制的帧率

private static final String format = ConfigUtil.getString("format");// 录制的帧率

private static final String linux = ConfigUtil.getString("linux");// 是否linux

private FFmpegFrameGrabber grabberCV;

/// 推流

FrameRecorder recorder;

private boolean exit = true;

public Pusher(String url) {

super(url);

}

public Pusher(String url, int w, int h) {

super(url, w, h);

}

public void close() throws Exception {

exit = false;

if (grabberCV != null) {

grabberCV.close();

}

if (recorder.isInterleaved()) {

recorder.close();

recorder.setInterleaved(false);

}

this.thread.interrupt();

}

public void start() throws Exception {

exit = true;

if (grabberCV != null) {

grabberCV.start();

}

if (recorder.isInterleaved()) {

recorder.start();

recorder.setInterleaved(true);

}

this.thread.start();

}

public void push(Consumer<Frame> consumer) throws Exception {

if("1".equals(linux)) {

grabberCV = new FFmpegFrameGrabber("");

grabberCV.setFormat("x11grab"); //linux

}

else {

grabberCV = new FFmpegFrameGrabber("desktop"); //windows

grabberCV.setFormat("gdigrab"); //windows

}

grabberCV.setOption("offset_x", "0");

grabberCV.setOption("offset_y", "0");

grabberCV.setFrameRate(frameRate);

grabberCV.setOption("draw_mouse", "0");

// grabberCV.setOption("video_size", "1600x900");

// 这种形式,双屏有问题

grabberCV.setImageWidth(getWidth());

grabberCV.setImageHeight(getHeight());

/// 推流

recorder = new FFmpegFrameRecorder(getUrl(), getWidth(), getHeight());

recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); // 28

recorder.setFormat(format); // rtmp的类型

recorder.setFrameRate(frameRate);

recorder.setVideoOption("tune", "zerolatency"); // 降低编码延时

recorder.setVideoOption("preset", "ultrafast"); // 提升编码速度

//recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P); // yuv420p AV_PIX_FMT_YUV420P udp时先开server,后启客户端,不设置

//recorder.setVideoBitrate(2 * 1024 * 1024); //udp时不设置

recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);

recorder.setVideoOption("-pkt_size", "1316"); //??用程序发送录屏流时不设置 udp时先开server,后启客户端,设置

this.thread = new Thread(() -> {

try {

while (exit) {

Frame f = grabberCV.grab();

if (f != null) {

if (recorder.isInterleaved()) {

// System.out.println("push stream...");

recorder.record(f);

// consumer.accept(f);

}

}

}

} catch (Exception e) {

e.printStackTrace();

recorder.setInterleaved(false);

}

});

}

}

BStreamer.java

//基础视频流

public class BStreamer {

private int width = 1600;

private int height = 900;

private String url;

public BStreamer(String url) {

this.url = url;

}

public BStreamer(String url, int w, int h) {

this.url = url;

if (w > 0 && h > 0) {

this.width = w;

this.height = h;

}

}

public int getWidth() {

return width;

}

public void setWidth(int width) {

this.width = width;

}

public int getHeight() {

return height;

}

public void setHeight(int height) {

this.height = height;

}

public String getUrl() {

return url;

}

public void setUrl(String url) {

this.url = url;

}

}

ConfigUtil.java

import java.util.Locale;

import java.util.ResourceBundle;

public class ConfigUtil {

private static Locale locale_CN = new Locale("zh","CN");

private static ResourceBundle rb = ResourceBundle.getBundle("rtmp",locale_CN);

public static String getString(String key) {

String ret = rb.getString(key);

return ret;

}

public static int getInt(String key) {

String str = rb.getString(key);

int ret = Integer.parseInt(str);

return ret;

}

}

TestRTMPPusher.java

import java.awt.Dimension;

import java.awt.Toolkit;

//import org.bytedeco.javacv.CanvasFrame;

public class TestRTMPPusher {

public static void main(String[] args) throws Exception {

String pushStreamUrl = ConfigUtil.getString("pushStreamUrl");

Toolkit tk = Toolkit.getDefaultToolkit();//得到Toolkit对象(实例化)

Dimension screen=tk.getScreenSize();//得到屏幕的大小

int width = (int)screen.getWidth();

int height = (int)screen.getHeight();

width = ConfigUtil.getInt("screenWidth");

height = ConfigUtil.getInt("screenHeight");

Pusher pusher = new Pusher(pushStreamUrl, width, height);

pusher.push(f -> {

// cf.showImage(f);

});

pusher.start();

}

}

RtmpPlayer.java

import java.awt.Cursor;

import java.awt.Graphics;

import java.io.IOException;

import java.awt.Image;

import java.awt.Dimension;

import java.awt.Toolkit;

import javax.swing.JFrame;

import javax.swing.UIManager;

import org.bytedeco.javacv.FFmpegFrameGrabber;

import org.bytedeco.javacv.Frame;

import org.bytedeco.javacv.Java2DFrameConverter;

public class RtmpPlayer extends JFrame {

private static final String format = ConfigUtil.getString("format");// 录制的帧率

private Image fecthedImage;

private int width;

private int height;

private int heightSub = ConfigUtil.getInt("heightSub");

public RtmpPlayer() throws IOException {

// 把进入窗口的鼠标设置为手型

this.setCursor(new Cursor(Cursor.HAND_CURSOR));

// 设置标题

this.setTitle("rtmp player");

Toolkit tk = Toolkit.getDefaultToolkit();//得到Toolkit对象(实例化)

Dimension screen=tk.getScreenSize();//得到屏幕的大小

width = (int)screen.getWidth();

height = (int)screen.getHeight();

// 设置窗口大小

this.setSize(width, height);

// 设置窗口默认关闭方式

this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);

// 设置窗口大小不可改变

this.setResizable(false);

// 设置窗口居中

this.setLocationRelativeTo(null);

this.setUndecorated(true);

// 设置窗口显示

this.setVisible(true);

}

// 播流流

public void pullStream(String inputPath) throws Exception, org.bytedeco.javacv.FrameRecorder.Exception {

// 创建+设置采集器

FFmpegFrameGrabber grabber = FFmpegFrameGrabber.createDefault(inputPath);

//grabber.setOption("rtsp_transport", "tcp");

String playStreamUrl = ConfigUtil.getString("playStreamUrl");

if(!playStreamUrl.contains("udp://")) {

String streamProtocol = ConfigUtil.getString("streamProtocol");

String netProtocol = ConfigUtil.getString("netProtocol");

grabber.setOption(streamProtocol, netProtocol);

}

//grabber.setVideoCodec(avcodec.AV_CODEC_ID_H264); // 28 no use

//grabber.setFormat(format); //no use

grabber.setImageWidth(width);

grabber.setImageHeight(height);

// 开启采集器

grabber.start();

Java2DFrameConverter converter2 = new Java2DFrameConverter();

// 播流

while (true) {

Frame frame = grabber.grabImage(); // 拉流

fecthedImage = converter2.getBufferedImage(frame);

repaint();

}

}

@Override

public void paint(Graphics g) {

g.drawImage(fecthedImage, 0, 0, width, height-heightSub, null);

}

// 测试播流器

public static void main(String[] args) throws Exception, org.bytedeco.javacv.FrameRecorder.Exception {

String playStreamUrl = ConfigUtil.getString("playStreamUrl"); // rtmp服务器地址

try {

UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());

new RtmpPlayer().pullStream(playStreamUrl);

} catch (Exception e) {

e.printStackTrace();

System.exit(0);

}

}

}

配置文件rtmp_zh.properties

内容:

streamProtocol=rtmp_transport

netProtocol=tcp

heightSub=0

screenWidth=1920

screenHeight=1080

#streamUrl=rtmp://192.168.1.20:1935/live/desktop

playStreamUrl=udp://224.1.1.1:5001

pushStreamUrl=udp://224.1.1.1:5001?buffer_size=0&localaddr=192.168.1.20

frameRate=23

format=h264

linux=0

//推流命令

java -cp bin;lib/javacpp-1.5.5.jar;lib/javacv-1.5.5.jar;lib/javacv-platform-1.5.5.jar;lib/opencv-4.5.1-1.5.5-windows-x86_64.jar;lib/ffmpeg-4.3.2-1.5.5.jar;lib/ffmpeg-platform-4.3.2-1.5.5.jar;lib/ffmpeg-4.3.2-1.5.5-windows-x86_64.jar TestRTMPPusher

//拉流命令

java -cp bin;lib/javacpp-1.5.5.jar;lib/javacv-1.5.5.jar;lib/javacv-platform-1.5.5.jar;lib/ffmpeg-4.3.2-1.5.5.jar;lib/ffmpeg-platform-4.3.2-1.5.5.jar;lib/ffmpeg-4.3.2-1.5.5-windows-x86_64.jar RtmpPlayer

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
要使用JavaCV进行桌面画面的直播并推流,您可以按照以下步骤进行操作: 1. 首先,确保您已经安装了JavaCV库,并将其添加到您的项目中。您可以在Maven或Gradle中添加相应的依赖项。 2. 创建一个推流器对象。您可以使用FFmpegFrameRecorder类来实现这一目的。示例代码如下: ```java FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("推流地址", 宽度, 高度); ``` 请将"推流地址"替换为您要推送到的实际地址,并将宽度和高度替换为您的视频分辨率。 3. 设置推流器的参数。您可以使用setVideoCodec()和setFormat()方法来设置视频编解码器和输出格式。例如,如果要使用H.264编码器并将输出格式设置为FLV,可以执行以下操作: ```java recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); recorder.setFormat("flv"); ``` 4. 打开推流器并启动推流。示例代码如下: ```java recorder.start(); ``` 5. 使用JavaCV的ScreenGrabber类来获取屏幕上的帧数据,并将其写入推流器。示例代码如下: ```java ScreenGrabber screenGrabber = new ScreenGrabber(); screenGrabber.start(); while (true) { Frame frame = screenGrabber.grab(); recorder.record(frame); } ``` 这将不断地从屏幕上获取帧数据,并将其写入推流器。 6. 当您完成推流操作后,记得关闭推流器和屏幕抓取器。示例代码如下: ```java screenGrabber.stop(); recorder.stop(); recorder.release(); ``` 这是一个基本的JavaCV直播桌面画面并推流的示例,您可以根据您的需求进行进一步的调整和扩展。希望对您有帮助!
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值