javacv实现桌面端录屏

不多说,直接上代码,源码参考了javacv官方提供的demo。

package czh.iewbs.recordingscreenVideo;

import static org.bytedeco.javacpp.opencv_imgcodecs.cvLoadImage;

import java.awt.AWTException;
import java.awt.Rectangle;
import java.awt.Robot;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.Scanner;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import javax.imageio.ImageIO;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.Mixer;
import javax.sound.sampled.TargetDataLine;

import org.bytedeco.javacpp.avcodec;
import org.bytedeco.javacpp.avutil;
import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacpp.opencv_core.IplImage;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.FrameRecorder.Exception;
import org.bytedeco.javacv.OpenCVFrameConverter;

/**
 * 使用javacv进行录屏
 * 
 * @author gj
 *
 *亲测每秒截5幅图片的效果是最好的,截多了会丢帧
 *
 */
public class VideoRecord {
	private ScheduledThreadPoolExecutor screenTimer;
	private final Rectangle rectangle = new Rectangle(Constant.WIDTH, Constant.HEIGHT);
	private FFmpegFrameRecorder recorder;
	private Robot robot;
	private OpenCVFrameConverter.ToIplImage conveter;
	private BufferedImage screenCapture;
	private final int AUDIO_DEVICE_INDEX = 4;
	private ScheduledThreadPoolExecutor exec;
	private TargetDataLine line;
	private AudioFormat audioFormat;
	private DataLine.Info dataLineInfo;
	private boolean isHaveDevice = true;
	private String fileName;
	private long startTime = 0;
	private long videoTS = 0;
	private long pauseTime = 0;
	private double frameRate=5;

	public VideoRecord(String fileName, boolean isHaveDevice) {
		// TODO Auto-generated constructor stub
		recorder = new FFmpegFrameRecorder(fileName + ".mp4", Constant.WIDTH, Constant.HEIGHT);
		// recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); // 28
		// recorder.setVideoCodec(avcodec.AV_CODEC_ID_FLV1); // 28
		recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4); // 13
		recorder.setFormat("mp4");
		// recorder.setFormat("mov,mp4,m4a,3gp,3g2,mj2,h264,ogg,MPEG4");
		recorder.setSampleRate(44100);
		recorder.setFrameRate(frameRate);

		recorder.setVideoQuality(0);
		recorder.setVideoOption("crf", "23");
		// 2000 kb/s, 720P视频的合理比特率范围
		recorder.setVideoBitrate(1000000);
		/**
		 * 权衡quality(视频质量)和encode speed(编码速度) values(值): ultrafast(终极快),superfast(超级快),
		 * veryfast(非常快), faster(很快), fast(快), medium(中等), slow(慢), slower(很慢),
		 * veryslow(非常慢)
		 * ultrafast(终极快)提供最少的压缩(低编码器CPU)和最大的视频流大小;而veryslow(非常慢)提供最佳的压缩(高编码器CPU)的同时降低视频流的大小
		 * 参考:https://trac.ffmpeg.org/wiki/Encode/H.264 官方原文参考:-preset ultrafast as the
		 * name implies provides for the fastest possible encoding. If some tradeoff
		 * between quality and encode speed, go for the speed. This might be needed if
		 * you are going to be transcoding multiple streams on one machine.
		 */
		recorder.setVideoOption("preset", "slow");
		recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P); // yuv420p
		recorder.setAudioChannels(2);
		recorder.setAudioOption("crf", "0");
		// Highest quality
		recorder.setAudioQuality(0);
		recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
		try {
			robot = new Robot();
		} catch (AWTException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		try {
			recorder.start();
		} catch (Exception e) {
			// TODO Auto-generated catch block
			System.out.print("*******************************");
		}
		conveter = new OpenCVFrameConverter.ToIplImage();
		this.isHaveDevice = isHaveDevice;
		this.fileName = fileName;
	}

	public void start() {

		if (startTime == 0) {
			startTime = System.currentTimeMillis();
		}
		if (pauseTime == 0) {
			pauseTime = System.currentTimeMillis();
		}
		
		if (isHaveDevice) {
			new Thread(new Runnable() {

				@Override
				public void run() {
					// TODO Auto-generated method stub
					caputre();
				}
			}).start();

		}
		screenTimer = new ScheduledThreadPoolExecutor(1);
		screenTimer.scheduleAtFixedRate(new Runnable() {
			@Override
			public void run() {
				try {
					screenCapture = robot.createScreenCapture(rectangle);
					String name = fileName + ".JPEG";
					File f = new File(name);
					// 将screenshot对象写入图像文件
					try {
						ImageIO.write(screenCapture, "JPEG", f);
					} catch (IOException e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}
					// videoGraphics.drawImage(screenCapture, 0, 0, null);
					IplImage image = cvLoadImage(name); // 非常吃内存!!
					// 创建一个 timestamp用来写入帧中
					videoTS = 1000 * (System.currentTimeMillis() - startTime - (System.currentTimeMillis() - pauseTime));
					// 检查偏移量
					if (videoTS > recorder.getTimestamp()) {
						recorder.setTimestamp(videoTS);
					}
					recorder.record(conveter.convert(image));
					opencv_core.cvReleaseImage(image);
					f.delete();
					System.gc();
				} catch (Exception ex) {
					// ex.printStackTrace();
				}
			}
		}, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS);

	}

	public void caputre() {
		audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
		dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
		try {
			line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
		} catch (LineUnavailableException e1) {
			// TODO Auto-generated catch block
			System.out.println("#################");
		}
		try {
			line.open(audioFormat);
		} catch (LineUnavailableException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}
		line.start();

		int sampleRate = (int) audioFormat.getSampleRate();
		int numChannels = audioFormat.getChannels();

		int audioBufferSize = sampleRate * numChannels;
		byte[] audioBytes = new byte[audioBufferSize];

		exec = new ScheduledThreadPoolExecutor(1);
		exec.scheduleAtFixedRate(new Runnable() {
			@Override
			public void run() {
				try {
					int nBytesRead = line.read(audioBytes, 0, line.available());
					int nSamplesRead = nBytesRead / 2;
					short[] samples = new short[nSamplesRead];

					// Let's wrap our short[] into a ShortBuffer and
					// pass it to recordSamples
					ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
					ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead);

					// recorder is instance of
					// org.bytedeco.javacv.FFmpegFrameRecorder
					recorder.recordSamples(sampleRate, numChannels, sBuff);
					System.gc();
				} catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
					e.printStackTrace();
				}
			}
		}, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS);
	}

	public void stop() {
		if (null!=screenTimer) {
			screenTimer.shutdownNow();
		}		
		try {
			recorder.stop();
			recorder.release();
			recorder.close();
			screenTimer = null;
			screenCapture = null;
			if (isHaveDevice) {
				if (null !=exec) {
					exec.shutdownNow();
				}
				if (null!=line) {
					line.stop();
					line.close();
				}				
				dataLineInfo = null;
				audioFormat = null;
			}
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	public void pause() throws Exception {
		screenTimer.shutdownNow();
		screenTimer = null;
		if (isHaveDevice) {
			exec.shutdownNow();
			exec = null;
			line.stop();
			line.close();
			dataLineInfo = null;
			audioFormat = null;
			line=null;
		}
		pauseTime = System.currentTimeMillis();
	}

	public static void main(String[] args) throws Exception, AWTException {
		VideoRecord videoRecord = new VideoRecord("D:\\1.mp4", true);
		videoRecord.start();
		while (true) {
			System.out.println("你要停止吗?请输入(stop),程序会停止。");
			Scanner sc = new Scanner(System.in);
			if (sc.next().equalsIgnoreCase("stop")) {
				videoRecord.stop();
			}
			if (sc.next().equalsIgnoreCase("pause")) {
				videoRecord.pause();
			}
			if (sc.next().equalsIgnoreCase("start")) {
				videoRecord.start();
			}
		}
	}

}

 

要使用JavaCv实现录屏功能,需要使用FFmpeg进行视频编码,以下是一个简单的实现示例: 首先,需要引入JavaCv和FFmpeg的库文件,例如: ```xml <dependency> <groupId>org.bytedeco</groupId> <artifactId>javacv</artifactId> <version>1.5.3</version> </dependency> <dependency> <groupId>org.bytedeco</groupId> <artifactId>ffmpeg</artifactId> <version>4.4</version> </dependency> ``` 然后,创建一个FFmpegFrameRecorder对象,设置视频编码器、视频格式、视频帧率等参数: ```java FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("output.mp4", width, height); recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); recorder.setFormat("mp4"); recorder.setFrameRate(fps); recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P); recorder.start(); ``` 接着,使用Java.awt.Robot类进行屏幕捕获,获取屏幕上的图像: ```java Robot robot = new Robot(); Rectangle screenRect = new Rectangle(Toolkit.getDefaultToolkit().getScreenSize()); BufferedImage image = robot.createScreenCapture(screenRect); ``` 将图像转换为JavaCv的Frame对象,写入到视频文件中: ```java Frame frame = converter.convert(image); recorder.record(frame); ``` 最后,当录屏完成时,需要停止录制并释放资源: ```java recorder.stop(); recorder.release(); ``` 完整的代码示例如下: ```java import org.bytedeco.ffmpeg.global.avcodec; import org.bytedeco.ffmpeg.global.avutil; import org.bytedeco.javacv.FFmpegFrameRecorder; import org.bytedeco.javacv.Frame; import org.bytedeco.javacv.Java2DFrameConverter; import java.awt.*; import java.awt.image.BufferedImage; public class ScreenRecorder { public static void main(String[] args) throws Exception { int width = 1920; int height = 1080; int fps = 30; FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("output.mp4", width, height); recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); recorder.setFormat("mp4"); recorder.setFrameRate(fps); recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P); recorder.start(); Java2DFrameConverter converter = new Java2DFrameConverter(); Robot robot = new Robot(); Rectangle screenRect = new Rectangle(Toolkit.getDefaultToolkit().getScreenSize()); long startTime = System.currentTimeMillis(); while (System.currentTimeMillis() - startTime < 60 * 1000) { BufferedImage image = robot.createScreenCapture(screenRect); Frame frame = converter.convert(image); recorder.record(frame); } recorder.stop(); recorder.release(); } } ``` 注意,由于录制屏幕需要操作系统的权限,所以需要以管理员身份运行程序。
评论 17
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值