为什么写这篇文章?
起因是项目中会用到rtsp流,用于分析视频,和语音口令,还有就是控制摄像头和做监控时候做云端存储功能。在网上找了一大圈发现这方面资料很少,资料中大部分人还是FFmpeg的安装程序加代码的方式实现。
其实JAVACV和OPENCV是非常强大的。当然我了解的也不多(因为源码没有什么注释!!理解起来很困难)。这篇文章最主要是针对于目前GPT和通义前问中提供的代码进行纠错和改正(特别是从rtsp中只取出音频)。
部分代码是他人提供,所以看起来会有差异,不过我会进行说明。
pom
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.9</version>
</dependency>
取出视频保存为mp4代码
/**
* 拉取指定时长的视频-保存为mp4
*
* @param rtspUrl 流地址
* @param duration 时长(秒)(TimeUnit Seconds)
* @param outputFile 输出位置
*/
public static void videoPuller(String rtspUrl, int duration, String outputFile) {
// 创建抓取器
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(rtspUrl);
try {
grabber.start();
// 创建录制器-设置的文件保存位置以及视频画面的宽度和高度
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(outputFile, grabber.getImageWidth(), grabber.getImageHeight());
recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); // 设置视频编解码器
recorder.setFormat("mp4"); // 设置视频输出格式
// 设置音频相关参数
recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
//这里直接写2以及后续写固定值也可以,当然我们可以从grabber对象中get出来
//grabber.getAudioChannels();
// recorder.setAudioChannels(grabber.getAudioChannels());
recorder.setAudioChannels(2);
//grabber.getSampleRate();
recorder.setSampleRate(44100);
//grabber.getAudioBitrate();
recorder.setAudioBitrate(192000);
recorder.start();
Frame frame;
long startTime = System.currentTimeMillis();
long endTime = startTime + (duration * 1000);
while ((frame = grabber.grabFrame()) != null && System.currentTimeMillis() <= endTime) {
recorder.record(frame);
}
recorder.stop();
grabber.stop();
recorder.close();
grabber.close();
} catch (FrameGrabber.Exception | FrameRecorder.Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws IOException {
videoPuller("rtsp流地址",60, "D:\\va\\a.mp4");
}
只保存音频
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.javacv.*;
/**
* @author by Guoshun
* @version 1.0.0
* @description 提取音频测试
* @date 2024/2/26 16:57
*/
public class AudioExtractor {
public static void extractAudioFromRtsp(String rtspUrl, String outputPath, int maxDurationSeconds) throws Exception {
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(rtspUrl);
grabber.start();
//1.从视频流中取出读取到的参数
int audioChannels = grabber.getAudioChannels();//音频通道
int sampleRate = grabber.getSampleRate();
int audioBitrate = grabber.getAudioBitrate();
//创建recorder对象,设置编码器和解码器
//这里也是所有AI都回答错误的地方
//通义和GPT会回答为这样
//FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(outputPath, audioChannels, sampleRate);
//上面这种错误传参方式导致,audioChannels变为为宽,sampleRate为高
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(outputPath, audioChannels);
recorder.setSampleRate(sampleRate);
recorder.setAudioBitrate(audioBitrate);
recorder.setAudioCodec(avcodec.AV_CODEC_ID_PCM_S16LE); // 使用PCM 16位小端模式编码
recorder.start();
long startTime = System.currentTimeMillis();
Frame frame;
while ((frame = grabber.grabSamples()) != null && (System.currentTimeMillis() - startTime) / 1000 < maxDurationSeconds) {
recorder.record(frame, 0);
}
recorder.stop();
recorder.release();
grabber.stop();
grabber.release();
}
public static void main(String[] args) {
try {
extractAudioFromRtsp("rtsp://admin:12345qaz@192.168.20.164:554", "D:\\va\\audio.wav", 60);
} catch (Exception e) {
e.printStackTrace();
}
}
}
完整代码,并行执行
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.javacv.*;
import org.bytedeco.opencv.opencv_core.Mat;
import javax.validation.constraints.NotNull;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static org.bytedeco.opencv.global.opencv_imgcodecs.imwrite;
/**
* @author by Guoshun
* @version 1.0.0
* @description RTSP流处理
* 流地址格式 rtsp://账户:密码@ip地址
* @date 2024/2/27 10:21
*/
public class RtspUtils {
/**
* 判断流地址是否在线
* @param rtspUrl 流地址
* @return
*/
public static Boolean isOnline(String rtspUrl) {
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(rtspUrl);
try {
grabber.setTimeout(3000);
grabber.start();
return true;
} catch (FrameGrabber.Exception e) {
e.printStackTrace();
} finally {
if (grabber != null) {
close(grabber);
}
}
return false;
}
/**
* 关闭grabber
*
* @param grabber
* @return
*/
private static boolean close(@NotNull FFmpegFrameGrabber grabber) {
try {
grabber.close();
return true;
} catch (FrameGrabber.Exception e) {
e.printStackTrace();
}
return false;
}
/**
* 拉取指定时长的视频-保存为mp4
* @param rtspUrl 流地址
* @param duration 时长(秒)(TimeUnit Seconds)
* @param outputFile 输出位置
*/
public static void videoPuller(String rtspUrl, int duration, String outputFile) {
// 创建抓取器
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(rtspUrl);
try {
grabber.start();
// 创建录制器
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(outputFile, grabber.getImageWidth(), grabber.getImageHeight());
recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); // 设置视频编解码器
recorder.setFormat("mp4"); // 设置视频输出格式
// 设置音频相关参数
recorder.setAudioChannels(2);
recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
recorder.setSampleRate(44100);
recorder.setAudioBitrate(192000);
recorder.start();
Frame frame;
long startTime = System.currentTimeMillis();
long endTime = startTime + (duration * 1000);
while ((frame = grabber.grabFrame()) != null && System.currentTimeMillis() <= endTime) {
recorder.record(frame);
}
recorder.stop();
grabber.stop();
recorder.close();
grabber.close();
} catch (FrameGrabber.Exception | FrameRecorder.Exception e) {
e.printStackTrace();
}
}
/**
* 提取音频
*
* @param rtspUrl
* @param outputPath
* @param maxDurationSeconds
*/
public static void extractAudioFromRtsp(String rtspUrl, String outputPath, int maxDurationSeconds) {
try(
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(rtspUrl);
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(outputPath, 0,0)
){
grabber.setOption("map", "0:a:0");
grabber.start();
//1.从视频流中取出读取到的参数
int audioChannels = grabber.getAudioChannels();//音频通道
int sampleRate = grabber.getSampleRate();
int audioBitrate = grabber.getAudioBitrate();
//创建recorder对象,设置编码器和解码器
recorder.setAudioChannels(audioChannels);
recorder.setSampleRate(sampleRate);
recorder.setAudioBitrate(audioBitrate);
recorder.setAudioCodec(avcodec.AV_CODEC_ID_PCM_S16LE); // 使用PCM 16位小端模式编码
recorder.start();
long startTime = System.currentTimeMillis();
Frame frame;
while ((frame = grabber.grabSamples()) != null && (System.currentTimeMillis() - startTime) / 1000 < maxDurationSeconds) {
recorder.record(frame, 0);
}
recorder.stop();
recorder.release();
grabber.stop();
grabber.release();
}catch (FrameGrabber.Exception | FrameRecorder.Exception e){
e.printStackTrace();
}
}
/**
* 保存图片
* @param rtspUrl
* @param outputDir
* @param maxDurationSeconds 时长范围
*/
public static void savePicture(String rtspUrl, String outputDir, int maxDurationSeconds) {
try(
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(rtspUrl);
OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat()
){
grabber.start();
long startTime = System.currentTimeMillis();
Frame frame;
int i = 0;
while ((frame = grabber.grab()) != null && (System.currentTimeMillis() - startTime) / 1000 < maxDurationSeconds) {
// 将Frame转换为OpenCV Mat
Mat mat = converter.convert(frame);
if (mat != null && i % 100 == 0) {
// 保存帧到磁盘
//String fileName = "frame_" + i + ".jpg";
//imwrite(outputDir + fileName, mat);
imwrite(outputDir + "frame_" + i + ".jpg", mat);
}
i++;
}
} catch (FrameGrabber.Exception e) {
e.printStackTrace();
}
}
private static String DEFAULT_RTSP_URL = "rtsp://账户:密码@ip地址";
public static void main(String[] args) throws Exception {
ExecutorService executor = Executors.newFixedThreadPool(4); // 创建线程池
CompletableFuture<Void> future1 = CompletableFuture.runAsync(() -> videoPuller(DEFAULT_RTSP_URL, 60, "D:\\va\\a.mp4"), executor);
CompletableFuture<Void> future2 = CompletableFuture.runAsync(() -> savePicture(DEFAULT_RTSP_URL, "D:\\va\\", 60), executor);
CompletableFuture<Void> future3 = CompletableFuture.runAsync(() ->
extractAudioFromRtsp(DEFAULT_RTSP_URL, "D:\\va\\audio.wav", 60), executor);
CompletableFuture.allOf(future1, future2, future3).thenRun(() -> System.out.println("所有任务都已完成"));
// 在应用结束前关闭线程池
CompletableFuture.supplyAsync(() -> {
return true;
}, executor)
.thenAccept((v) -> executor.shutdown());
}
}