依赖
<!-- 视频处理需要的4个jar -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv</artifactId>
<version>1.4.1</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacpp</artifactId>
<version>1.4.1</version>
</dependency>
<dependency>
<groupId>org.bytedeco.javacpp-presets</groupId>
<artifactId>opencv-platform</artifactId>
<version>3.4.1-1.4.1</version>
</dependency>
<dependency>
<groupId>org.bytedeco.javacpp-presets</groupId>
<artifactId>ffmpeg-platform</artifactId>
<version>3.4.2-1.4.1</version>
</dependency>
代码:
/**
* base64字符串转换成图片
* @param imgStr base64字符串
* @param imgFilePath 图片存放路径
*/
private static void base64ToImage(String imgStr, String imgFilePath) {
try {
byte[] buffer = new BASE64Decoder().decodeBuffer(imgStr);
FileOutputStream out = new FileOutputStream(imgFilePath);
out.write(buffer);
out.close();
}catch (Exception e){
e.printStackTrace();
}
}
/**
* 将截取的帧生成文件或者生成base64
* @param frame
* @return
*/
private static String getBase64FromFrame(Frame frame) {
String imgFormat = "jpg";
Java2DFrameConverter converter = new Java2DFrameConverter();
BufferedImage srcBi = converter.getBufferedImage(frame);
// 可以选择对截取的帧进行等比例缩放
// int owidth = srcBi.getWidth();
// int oheight = srcBi.getHeight();
// int width = 800;
// int height = (int) (((double) width / owidth) * oheight);
// BufferedImage bi = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
// bi.getGraphics().drawImage(srcBi.getScaledInstance(width, height, Image.SCALE_SMOOTH), 0, 0, null);
try {
ByteArrayOutputStream output = new ByteArrayOutputStream();
// ImageIO工具类提供了将图片写成文件或者outputStream中
// ImageIO.write(bi, imgFormat, targetFile);
ImageIO.write(srcBi, imgFormat, output);
// 这里需要获取图片的base64数据串,所以将图片写到流里面
return new BASE64Encoder().encode(output.toByteArray());
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/**
* 将截取的帧生成文件
* @param frame
* @return
*/
private static byte[] getPicFromFrame(Frame frame) throws IOException {
String imgFormat = "jpg";
Java2DFrameConverter converter = new Java2DFrameConverter();
BufferedImage srcBi = converter.getBufferedImage(frame);
// 可以选择对截取的帧进行等比例缩放
// int owidth = srcBi.getWidth();
// int oheight = srcBi.getHeight();
// int width = 800;
// int height = (int) (((double) width / owidth) * oheight);
// BufferedImage bi = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
// bi.getGraphics().drawImage(srcBi.getScaledInstance(width, height, Image.SCALE_SMOOTH), 0, 0, null);
ByteArrayOutputStream output = new ByteArrayOutputStream();
// ImageIO工具类提供了将图片写成文件或者outputStream中
// ImageIO.write(bi, imgFormat, targetFile);
ImageIO.write(srcBi, imgFormat, output);
// 这里需要获取图片的base64数据串,所以将图片写到流里面
return output.toByteArray();
}
/**
* 获取指定视频的帧并保存为图片至指定目录
*
* @param fileUrl 源视频文件Url路径
* @param stepSecond 每隔几秒取一帧,默认1200s
* @param count 最多获取几帧
* @return 返回截取的帧的Base64串列表
* @throws Exception
*/
public static synchronized List<String> fetchPicFromVideo(String fileUrl, Integer stepSecond, Integer count) throws Exception {
List<String> picBase64List = new ArrayList<>();
stepSecond = stepSecond == null ? 600 : stepSecond;
FFmpegFrameGrabber ff = new FFmpegFrameGrabber(fileUrl);
ff.start();
long timeLength = ff.getLengthInTime();
System.out.println(timeLength);
Frame frame = ff.grabImage();
long startTime = frame.timestamp;
long timestamp = 0;
int second = 0;
int picNum = 0;
while (timestamp <= timeLength) {
timestamp = startTime + second * 1000000L;
ff.setTimestamp(timestamp);
frame = ff.grabImage();
if (frame != null) {
if (frame.image != null) {
picBase64List.add(getBase64FromFrame(frame));
picNum++;
if (count != null && picNum == count) {
break;
}
}
}
second += stepSecond;
}
ff.stop();
return picBase64List;
}
/**
* 获取视频的首帧并保存为图片
*
* @param fileUrl 源视频文件Url路径
* @return 返回截取的帧
* @throws Exception
*/
public static VideoParseDo fetchFirstPicFromVideo(String fileUrl) throws Exception {
FFmpegFrameGrabber ff = new FFmpegFrameGrabber(fileUrl);
ff.start();
Frame frame = ff.grabImage();
byte[] picFromFrame = getPicFromFrame(frame);
VideoParseDo videoParseDo = new VideoParseDo(picFromFrame, ff.getLengthInTime());
ff.stop();
return videoParseDo;
}
public static synchronized long getVideoTime(String fileUrl) throws IOException {
FFmpegFrameGrabber ff = new FFmpegFrameGrabber(fileUrl);
ff.start();
long timeLength = ff.getLengthInTime();
ff.stop();
return timeLength;
}