对视频和图像的处理

1.pom 和 applicaion


<!-- 视频图像处理需要用到的包-->
		<dependency>
			<groupId>org.bytedeco</groupId>
			<artifactId>javacv</artifactId>
			<version>1.5.7</version>
		</dependency>
		<dependency>
			<groupId>org.bytedeco</groupId>
			<artifactId>javacpp</artifactId>
			<version>1.5.7</version>
		</dependency>
		<dependency>
			<groupId>org.bytedeco</groupId>
			<artifactId>opencv</artifactId>
			<version>4.5.5-1.5.7</version>
			<classifier>linux-x86_64</classifier>
		</dependency>
		<dependency>
			<groupId>org.bytedeco</groupId>
			<artifactId>openblas</artifactId>
			<version>0.3.19-1.5.7</version>
			<classifier>linux-x86_64</classifier>
		</dependency>
		<dependency>
			<groupId>org.bytedeco</groupId>
			<artifactId>ffmpeg</artifactId>
			<version>5.0-1.5.7</version>
			<classifier>linux-x86_64</classifier>
		</dependency>
		<dependency>
			<groupId>org.bytedeco</groupId>
			<artifactId>opencv</artifactId>
			<version>4.5.5-1.5.7</version>
			<classifier>windows-x86_64</classifier>
		</dependency>
		<dependency>
			<groupId>org.bytedeco</groupId>
			<artifactId>openblas</artifactId>
			<version>0.3.19-1.5.7</version>
			<classifier>windows-x86_64</classifier>
		</dependency>
		<dependency>
			<groupId>org.bytedeco</groupId>
			<artifactId>ffmpeg</artifactId>
			<version>5.0-1.5.7</version>
			<classifier>windows-x86_64</classifier>
		</dependency>
		
 <!--java调用ffmpeg-->
        <dependency>
            <groupId>ws.schild</groupId>
            <artifactId>jave-all-deps</artifactId>
            <version>3.0.1</version>
            <exclusions>
                <!--  排除windows 32位系统      -->
                <exclusion>
                    <groupId>ws.schild</groupId>
                    <artifactId>jave-nativebin-win32</artifactId>
                </exclusion>
                <!--  排除linux 32位系统      -->
                <exclusion>
                    <groupId>ws.schild</groupId>
                    <artifactId>jave-nativebin-linux32</artifactId>
                </exclusion>
                <!-- 排除Mac系统-->
                <exclusion>
                    <groupId>ws.schild</groupId>
                    <artifactId>jave-nativebin-osx64</artifactId>
                </exclusion>
            </exclusions>
        </dependency>


   <!-- io常用工具类 Apache Commons IO 库包含实用程序类、流实现、文件过滤器、文件比较器、字节序转换类等等-->
        <dependency>
            <groupId>commons-io</groupId>
            <artifactId>commons-io</artifactId>
            <version>2.11.0</version>
        </dependency>

        <!--Java 音频视频编码器  https://mvnrepository.com/artifact/ws.schild/jave-core -->
        <dependency>
            <groupId>ws.schild</groupId>
            <artifactId>jave-core</artifactId>
            <version>3.3.1</version>
        </dependency>

spring:
servlet:
multipart:
enabled: true # 开启文件上传
max-file-size: 300MB #限制文件上传大小
max-request-size: 300MB # 限制文件流大小




2. 像素实体

@Data
@NoArgsConstructor
@AllArgsConstructor
public class SizeMeta {
    /**
     * 区域宽度,单位 px(像素)
     */
    private int width;

    /**
     * 区域高度,单位 px(像素)
     */
    private int height;
}

3.素材属性工具类

import com.example.demo001.config.SizeMeta;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.multipart.MultipartFile;
import ws.schild.jave.EncoderException;
import ws.schild.jave.MultimediaObject;
import ws.schild.jave.info.MultimediaInfo;

import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.stream.ImageInputStream;
import java.awt.color.ColorSpace;
import java.awt.image.BufferedImage;
import java.awt.image.ColorConvertOp;
import java.awt.image.Raster;
import java.io.File;
import java.io.IOException;
import java.util.Iterator;

/**
 * @ClassName MaterialAttributesUtil
 * @Description 素材属性工具类
 * @Author Pathfinder
 * @Date 2022年06月16日 09时57分
 */
public class MaterialAttributesUtil {

    private static final Logger log = LoggerFactory.getLogger(MaterialAttributesUtil.class);


    /**
     * 获取视频时长:秒 and 分辨率 (ls为视频秒数)
     *
     * @param file
     * @return
     */
    public static SizeMeta getVideoSizeMeta(File file) {
        try {

            MultimediaObject instance = new MultimediaObject(file);
            MultimediaInfo result = instance.getInfo();
            int width = result.getVideo().getSize().getWidth();
            int height = result.getVideo().getSize().getHeight();
            return new SizeMeta(width, height);
        } catch (Exception e) {
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 上传视频,获取视频时长,返回毫秒
     * @param multipartFile
     * @return
     */
    public static long getDurationBackMillis(MultipartFile multipartFile){
        if(multipartFile != null){
            try{
                // 根据上传的文件名字,构建初始化的文件对象(临时文件),这个文件是空的
                File file = new File(multipartFile.getOriginalFilename());
                // 通过工具类,将文件拷贝到空的文件对象中(会在项目根目录下存放上一步构建的临时文件)
                FileUtils.copyInputStreamToFile(multipartFile.getInputStream(), file);

                return getVideoTime(file);
            } catch(Exception e){
                return 0L;
            }
        }
        return 0L;
    }

    /**
     * 获取视频的时长
     *
     * @param file
     * @return 秒
     * @throws EncoderException
     */
    public static long getVideoTime(File file) throws EncoderException {
        // 将普通文件对象转换为媒体对象
        MultimediaObject multimediaObject = new MultimediaObject(file);
        // 获取媒体对象的信息对象
        MultimediaInfo info = multimediaObject.getInfo();
        // 从媒体信息对象中获取媒体的时长,单位是:ms
        return new Double(Math.floor(info.getDuration() / 1000.0)).intValue();
    }

    /**
     * 获取图片分辨率
     *
     * @param file
     * @return
     */
    public static SizeMeta getImageSizeMeta(File file){
        BufferedImage bi = null;
        try {
            bi = ImageIO.read(file);
        } catch (IOException e) {
            try {
                //Find a suitable ImageReader
                Iterator readers = ImageIO.getImageReadersByFormatName("JPEG");
                ImageReader reader = null;
                while (readers.hasNext()) {
                    reader = (ImageReader) readers.next();
                    if (reader.canReadRaster()) {
                        break;
                    }
                }
                //Stream the image file (the original CMYK image)
                ImageInputStream input;
                input = ImageIO.createImageInputStream(file);
                if (input == null) {
                    log.error("ImageIO.createImageInputStream return null");
                    return null;
                }
                reader.setInput(input);
                //Read the image raster
                Raster raster = reader.readRaster(0, null);
                //Create a new RGB image
                bi = new BufferedImage(raster.getWidth(), raster.getHeight(), BufferedImage.TYPE_4BYTE_ABGR);
                //Fill the new image with the old raster
                bi.getRaster().setRect(raster);
                bi = makeGrey(bi);
            } catch (Exception e1) {
                log.error("readImageWithNoError: error", e1);
                return null;
            }
            log.error("获取图片分辨率失败", e);
        }
        return new SizeMeta(bi.getWidth(), bi.getHeight());
    }

    //对图片做灰化处理
    public static BufferedImage makeGrey(BufferedImage img) {
        // https://www.136.la/java/show-93125.html
        // 更换图片图层: https://www.cnblogs.com/tomcatandjerry/
        BufferedImage bufferedImage = new BufferedImage(img.getWidth(), img.getHeight(), BufferedImage.TYPE_3BYTE_BGR);
        bufferedImage.getGraphics().drawImage(img, 0, 0, img.getWidth(), img.getHeight(), null);
        bufferedImage = new ColorConvertOp(ColorSpace.getInstance(ColorSpace.CS_sRGB), null).filter(img, bufferedImage);
        return bufferedImage;
    }

    /**
     * 计算文件大小,保留两位小数点
     *
     * @param fileName
     * @return
     */
    public static double getMemorySpace(String fileName) {
        return getMemorySpace(new File(fileName));
    }

    /**
     * 计算文件大小,保留两位小数点
     *
     * @return
     * @param file
     */
    public static double getMemorySpace(File file) {
        long length = file.length();
        double xMB = length / 1024.0 / 1024;
        double xMBWith2Point = Math.round(xMB * 100) * 0.01;
        return xMBWith2Point;
    }

    /**
     * 判断视频文件是否为 H264
     * @param file
     * @return
     */
    public static boolean isH264Decoder(File file){
        try {
            MultimediaObject instance = new MultimediaObject(file);
            String result = instance.getInfo().getVideo().getDecoder().toLowerCase();
            return result.startsWith("h264");
        } catch (EncoderException e) {
            e.printStackTrace();
        }
        return false;
    }

    public static boolean isNotH264Decoder(File file){
        return !isH264Decoder(file);
    }
}



4. 素材通用工具类,支持图片或视频

import org.bytedeco.ffmpeg.avcodec.AVCodecParameters;
import org.bytedeco.ffmpeg.avformat.AVFormatContext;
import org.bytedeco.ffmpeg.avformat.AVStream;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.javacpp.Loader;
import org.bytedeco.javacpp.annotation.Properties;
import org.bytedeco.javacv.*;
import org.bytedeco.opencv.opencv_core.*;
import org.springframework.stereotype.Component;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;

import static org.bytedeco.opencv.global.opencv_core.*;
import static org.bytedeco.opencv.global.opencv_imgcodecs.imread;
import static org.bytedeco.opencv.global.opencv_imgcodecs.imwrite;
import static org.bytedeco.opencv.global.opencv_imgproc.resize;

/**
 * 素材通用工具类,支持图片或视频
 *
 * @author alderaan
 * @version 创建时间:2022年5月10日 下午1:25:51
 *
 */
@Properties(inherit = org.bytedeco.opencv.presets.opencv_core.class)
// @Component("prototype")
@Component
public class MaterialCommonUtil
{
    static
    {
        Loader.load();
    }

    private OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();

    /**
     * 图片裁剪函数,将srcimageurl路径的图片按照Rect(left,top,width,height)位置进行裁剪,并将图片裁剪后结果存储到dstimageurl路径下
     *
     * @param srcimageurl 原始图片路径
     * @param dstimageurl 裁剪后图片保存路径
     * @param left        裁剪框左上角点x坐标
     * @param top         裁剪框左上角点y坐标
     * @param width       裁剪框宽度
     * @param height      裁剪框高度
     * @throws Exception 异常
     */
    public void imageCutWithRect(String srcimageurl, String dstimageurl, int left, int top, int width,
                                        int height) throws Exception
    {
        if (width <= 0 || height <= 0)
        {
            throw new Exception("Error! Dst width or height can not be zero!");
        }
        Mat srcimagemat = imread(srcimageurl);
        if (0 == srcimagemat.cols() || 0 == srcimagemat.rows())
        {
            throw new FileNotFoundException("Error! File srcimageurl not found or illegal!");
        }

        if (left < 0)
        {
            left = 0;
        }
        if (top < 0)
        {
            top = 0;
        }
        if (width > srcimagemat.cols())
        {
            width = srcimagemat.cols();
        }
        if (height > srcimagemat.rows())
        {
            height = srcimagemat.rows();
        }

        UMat srcimage = new UMat(new Size(srcimagemat.cols(), srcimagemat.rows()), CV_8UC3);
        srcimagemat.copyTo(srcimage);

        // OpenCV里通常定义一个矩形有两种方式,一种是通过矩形左上角和矩形宽高来定义
        Rect rect = new Rect(left, top, width, height);
        UMat dstimage = new UMat(srcimage, rect);

        boolean status = imwrite(dstimageurl, dstimage);
        if (false == status)
        {
            throw new Exception("Error! Saving file " + dstimageurl + " error! Please check path!");
        }
    }

    /**
     * 图片转码
     *
     * @param srcimageurl
     * @param dstimageurl
     * @throws FileNotFoundException
     */
    public void imageConvert(String srcimageurl, String dstimageurl) throws Exception {
        Mat srcimagemat = imread(srcimageurl);
        if (0 == srcimagemat.cols() || 0 == srcimagemat.rows())
        {
            throw new FileNotFoundException("Error! File srcimageurl not found or illegal!");
        }

        UMat srcimage = new UMat(new Size(srcimagemat.cols(), srcimagemat.rows()), CV_8UC3);
        srcimagemat.copyTo(srcimage);

        // OpenCV里通常定义一个矩形有两种方式,一种是通过矩形左上角和矩形宽高来定义
        Rect rect = new Rect(0, 0, srcimagemat.cols(), srcimagemat.rows());
        UMat dstimage = new UMat(srcimage, rect);

        boolean status = imwrite(dstimageurl, dstimage);
        if (false == status)
        {
            throw new Exception("Error! Saving file " + dstimageurl + " error! Please check path!");
        }
    }


    /**
     * 图片缩放函数,将srcimageurl路径的图片缩放至dstwidth×dstheight大小,并将结果存储到dstimageurl路径下
     *
     * @param srcimageurl 原始图片路径
     * @param dstimageurl 缩放后图片保存路径
     * @param dstwidth    缩放后图片宽度(不为0)
     * @param dstheight   缩放后图片高度(不为0)
     * @throws Exception 异常
     */
    // @SneakyThrows
    // @Async
    public void imageResize(String srcimageurl, String dstimageurl, int dstwidth, int dstheight) throws Exception
    {
        if (dstwidth <= 0 || dstheight <= 0)
        {
            throw new Exception("Error! Dst width or height can not be zero!");
        }
        Mat srcimagemat = imread(srcimageurl);
        if (0 == srcimagemat.cols() || 0 == srcimagemat.rows())
        {
            throw new FileNotFoundException("Error! File srcimageurl not found or illegal!");
        }
        UMat srcimage = new UMat(new Size(srcimagemat.cols(), srcimagemat.rows()), CV_8UC3);
        srcimagemat.copyTo(srcimage);

        UMat dstimage = new UMat(new Size(dstwidth, dstheight), CV_8UC3);
        resize(srcimage, dstimage, new Size(dstwidth, dstheight));

        boolean status = imwrite(dstimageurl, dstimage);
        if (false == status)
        {
            throw new Exception("Error! Saving file " + dstimageurl + " error! Please check path!");
        }
    }

    private void uniformScaleResize(UMat srcimage, UMat dstimage_result, int dstwidth, int dstheight)
    {
        int srcwidth = srcimage.cols();
        int srcheight = srcimage.rows();

        int height = (int) (dstwidth * ((float) (srcheight) / srcwidth));
        int width = (int) (dstheight * ((float) (srcwidth) / srcheight));

        UMat dstimage = null;

        if (height <= dstheight)
        {
            dstimage = new UMat(new Size(dstwidth, height), CV_8UC3);
            resize(srcimage, dstimage, new Size(dstwidth, height));
        } else
        {
            dstimage = new UMat(new Size(width, dstheight), CV_8UC3);
            resize(srcimage, dstimage, new Size(width, dstheight));
        }

        int top = (dstheight - dstimage.rows()) / 2;
        int down = (dstheight - dstimage.rows() + 1) / 2;
        int left = (dstwidth - dstimage.cols()) / 2;
        int right = (dstwidth - dstimage.cols() + 1) / 2;

        copyMakeBorder(dstimage, dstimage_result, top, down, left, right, BORDER_CONSTANT);

        return;
    }

    /**
     * 图片等比缩放函数,将srcimageurl路径的图片等比缩放后并黑边填充至dstwidth×dstheight大小,并将结果存储到dstimageurl路径下
     *
     * @param srcimageurl 原始图片路径
     * @param dstimageurl 缩放后图片保存路径
     * @param dstwidth    缩放后图片宽度(不为0)
     * @param dstheight   缩放后图片高度(不为0)
     * @throws Exception 异常
     */
    // @Async
    // @SneakyThrows
    public void imageUniformScaleResize(String srcimageurl, String dstimageurl, int dstwidth, int dstheight)
            throws Exception
    {
        if (dstwidth <= 0 || dstheight <= 0)
        {
            throw new Exception("Error! Dst width or height can not be zero!");
        }
        Mat srcimagemat = imread(srcimageurl);
        if (0 == srcimagemat.cols() || 0 == srcimagemat.rows())
        {
            throw new FileNotFoundException("Error! File srcimageurl not found or illegal!");
        }

        UMat srcimage = new UMat(new Size(srcimagemat.cols(), srcimagemat.rows()), CV_8UC3);
        srcimagemat.copyTo(srcimage);

        UMat dstimage_result = new UMat(new Size(dstwidth, dstheight), CV_8UC3);

        uniformScaleResize(srcimage, dstimage_result, dstwidth, dstheight);

        boolean status = imwrite(dstimageurl, dstimage_result);
        if (false == status)
        {
            throw new Exception("Error! Saving file " + dstimageurl + " error! Please check path!");
        }
    }

    // @Async
    // @SneakyThrows
    public void moveFile(String prePath, String destination){
        try {
            Files.move(new File(prePath).toPath(), new File(destination).toPath());
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 视频缩放并转码函数
     *
     * @param inputFile  原始视频文件完整路径
     * @param outputFile 目标视频文件完整保存路径(必须完整文件名,即包含格式后缀,推荐格式后缀为.mp4)
     * @param dstwidth   缩放后视频宽度(不为0)
     * @param dstheight  缩放后视频高度(不为0)
     * @throws Exception 异常
     */
    // @SneakyThrows
    // @Async
    public void videoConvertWithResize(String inputFile, String outputFile, int dstwidth, int dstheight)
            throws Exception
    {
        if (dstwidth <= 0 || dstheight <= 0)
        {
            throw new Exception("Error! Dst width or height can not be zero!");
        }
        if (outputFile.lastIndexOf('.') < 0)
        {
            throw new Exception("Error! Output file format undetected!");
        }
        String format = outputFile.substring(outputFile.lastIndexOf('.'));

        FFmpegLogCallback.set();
        Frame frame;
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(inputFile);
        FFmpegFrameRecorder recorder = null;

        try
        {
            System.out.println("开始初始化帧抓取器");

            // 初始化帧抓取器,例如数据结构(时间戳、编码器上下文、帧对象等),
            // 如果入参等于true,还会调用avformat_find_stream_info方法获取流的信息,放入AVFormatContext类型的成员变量oc中
            grabber.start(true);

            System.out.println("帧抓取器初始化完成");

            // grabber.start方法中,初始化的解码器信息存在放在grabber的成员变量oc中
            AVFormatContext avFormatContext = grabber.getFormatContext();

            // 文件内有几个媒体流(一般是视频流+音频流)
            int streamNum = avFormatContext.nb_streams();

            // 没有媒体流就不用继续了
            if (streamNum < 1)
            {
                System.out.println("文件内不存在媒体流");
                throw new Exception("Error! There is no media stream in the file!");
            }

            // 取得视频的帧率
            int frameRate = (int) grabber.getVideoFrameRate();

            System.out.printf("视频帧率[%d],视频时长[%d]秒,媒体流数量[%d]\r\n", frameRate, avFormatContext.duration() / 1000000,
                    avFormatContext.nb_streams());

            // 遍历每一个流,检查其类型
            for (int i = 0; i < streamNum; i++)
            {
                AVStream avStream = avFormatContext.streams(i);
                AVCodecParameters avCodecParameters = avStream.codecpar();
                System.out.printf("流的索引[%d],编码器类型[%d],编码器ID[%d]\r\n", i, avCodecParameters.codec_type(),
                        avCodecParameters.codec_id());
            }

            // 视频宽度
            int frameWidth = grabber.getImageWidth();
            // 视频高度
            int frameHeight = grabber.getImageHeight();
            // 音频通道数量
            int audioChannels = grabber.getAudioChannels();

            System.out.printf("视频宽度[%d],视频高度[%d],音频通道数[%d]\r\n", frameWidth, frameHeight, audioChannels);

            try
            {
                recorder = new FFmpegFrameRecorder(outputFile, dstwidth, dstheight, audioChannels);

                recorder.setFormat(format);
                // 码率设置
                recorder.setVideoBitrate(4096000);

                // 一秒内的帧数,帧率
                recorder.setFrameRate(frameRate);

                // 两个关键帧之间的帧数
                recorder.setGopSize(frameRate);

                // 设置音频通道数,与视频源的通道数相等
                recorder.setAudioChannels(grabber.getAudioChannels());

                recorder.setVideoCodecName("h264_nvenc");// 编码器,Nvidia GPU加速关键

                recorder.start();
            } catch (Exception ex)
            {
                System.out.println("尝试选择Nvidia GPU加速失败!");

                recorder.stop();
                recorder.close();
                recorder.release();

                recorder = new FFmpegFrameRecorder(outputFile, dstwidth, dstheight, audioChannels);

                recorder.setFormat(format);
                // 码率设置
                recorder.setVideoBitrate(4096000);

                // 一秒内的帧数,帧率
                recorder.setFrameRate(frameRate);

                // 两个关键帧之间的帧数
                recorder.setGopSize(frameRate);

                // 设置音频通道数,与视频源的通道数相等
                recorder.setAudioChannels(grabber.getAudioChannels());

                recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);

                recorder.start();
            }

            int videoFrameNum = 0;
            int audioFrameNum = 0;
            int dataFrameNum = 0;

            UMat umat = new UMat(frameHeight, frameWidth, CV_8UC3);
            UMat umat_resize = new UMat(dstheight, dstwidth, CV_8UC3);
            Mat mat_resize = new Mat(dstheight, dstwidth, CV_8UC3);;
            Size size_temp = new Size(dstwidth, dstheight);

            // 持续从视频源取帧
            while (null != (frame = grabber.grab()))
            {
                // 有图像,就把视频帧加一
                if (null != frame.image)
                {
                    Mat mat = converter.convertToMat(frame);
                    mat.copyTo(umat);
                    resize(umat, umat_resize, size_temp);
                    umat_resize.copyTo(mat_resize);
                    Frame frame_temp = converter.convert(mat_resize);

                    videoFrameNum++;
                    // 取出的每一帧,都保存到视频
                    recorder.record(frame_temp);
                }

                // 有声音,就把音频帧加一
                if (null != frame.samples)
                {
                    audioFrameNum++;
                    // 取出的每一帧,都保存到视频
                    recorder.record(frame);
                }

                // 有数据,就把数据帧加一
                if (null != frame.data)
                {
                    dataFrameNum++;
                }
            }

            System.out.printf("转码完成,视频帧[%d],音频帧[%d],数据帧[%d]\r\n", videoFrameNum, audioFrameNum, dataFrameNum);

        } catch (Exception e)
        {
            // e.printStackTrace();
            throw e;
        } finally
        {
            if (recorder != null)
            {
                try
                {
                    recorder.close();
                } catch (Exception e)
                {
                    // System.out.println("recorder.close异常" + e);
                    throw e;
                }
            }

            try
            {
                grabber.close();
            } catch (FrameGrabber.Exception e)
            {
                // System.out.println("frameGrabber.close异常" + e);
                throw e;
            }
        }
    }

    /**
     * 视频等比缩放黑边填充并转码函数
     *
     * @param inputFile  原始视频文件完整路径
     * @param outputFile 目标视频文件完整保存路径(必须完整文件名,即包含格式后缀,推荐格式后缀为.mp4)
     * @param dstwidth   缩放后视频宽度(不为0)
     * @param dstheight  缩放后视频高度(不为0)
     * @throws Exception 异常
     */
    // @SneakyThrows
    // @Async
    public void videoConvertWithUniformScaleResize(String inputFile, String outputFile, int dstwidth,
                                                          int dstheight) throws Exception
    {
        if (dstwidth <= 0 || dstheight <= 0)
        {
            throw new Exception("Error! Dst width or height can not be zero!");
        }
        if (outputFile.lastIndexOf('.') < 0)
        {
            throw new Exception("Error! Output file format undetected!");
        }
        String format = outputFile.substring(outputFile.lastIndexOf('.'));

        FFmpegLogCallback.set();
        Frame frame;
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(inputFile);
        FFmpegFrameRecorder recorder = null;

        try
        {
            System.out.println("开始初始化帧抓取器");

            // 初始化帧抓取器,例如数据结构(时间戳、编码器上下文、帧对象等),
            // 如果入参等于true,还会调用avformat_find_stream_info方法获取流的信息,放入AVFormatContext类型的成员变量oc中
            grabber.start(true);

            System.out.println("帧抓取器初始化完成");

            // grabber.start方法中,初始化的解码器信息存在放在grabber的成员变量oc中
            AVFormatContext avFormatContext = grabber.getFormatContext();

            // 文件内有几个媒体流(一般是视频流+音频流)
            int streamNum = avFormatContext.nb_streams();

            // 没有媒体流就不用继续了
            if (streamNum < 1)
            {
                System.out.println("文件内不存在媒体流");
                throw new Exception("Error! There is no media stream in the file!");
            }

            // 取得视频的帧率
            int frameRate = (int) grabber.getVideoFrameRate();

            System.out.printf("视频帧率[%d],视频时长[%d]秒,媒体流数量[%d]\r\n", frameRate, avFormatContext.duration() / 1000000,
                    avFormatContext.nb_streams());

            // 遍历每一个流,检查其类型
            for (int i = 0; i < streamNum; i++)
            {
                AVStream avStream = avFormatContext.streams(i);
                AVCodecParameters avCodecParameters = avStream.codecpar();
                System.out.printf("流的索引[%d],编码器类型[%d],编码器ID[%d]\r\n", i, avCodecParameters.codec_type(),
                        avCodecParameters.codec_id());
            }

            // 视频宽度
            int frameWidth = grabber.getImageWidth();
            // 视频高度
            int frameHeight = grabber.getImageHeight();
            // 音频通道数量
            int audioChannels = grabber.getAudioChannels();

            System.out.printf("视频宽度[%d],视频高度[%d],音频通道数[%d]\r\n", frameWidth, frameHeight, audioChannels);

            try
            {
                recorder = new FFmpegFrameRecorder(outputFile, dstwidth, dstheight, audioChannels);

//				// h264_qsv/hevc_qsv 需要
//				recorder.setPixelFormat(avutil.AV_PIX_FMT_NV12);
//				recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
//				recorder.setVideoCodecName("h264_qsv");// 编码器, INTEL QSV GPU加速关键

                recorder.setFormat(format);
                // 码率设置
                recorder.setVideoBitrate(4096000);

                // 一秒内的帧数,帧率
                recorder.setFrameRate(frameRate);

                // 两个关键帧之间的帧数
                recorder.setGopSize(frameRate);

                // 设置音频通道数,与视频源的通道数相等
                recorder.setAudioChannels(grabber.getAudioChannels());

                recorder.setVideoCodecName("h264_nvenc");// 编码器,Nvidia GPU加速关键

                recorder.start();
            } catch (Exception ex)
            {
                System.out.println("尝试选择Nvidia GPU加速失败!");

                recorder.stop();
                recorder.close();
                recorder.release();

                recorder = new FFmpegFrameRecorder(outputFile, dstwidth, dstheight, audioChannels);

                recorder.setFormat(format);
                // 码率设置
                recorder.setVideoBitrate(4096000);

                // 一秒内的帧数,帧率
                recorder.setFrameRate(frameRate);

                // 两个关键帧之间的帧数
                recorder.setGopSize(frameRate);

                // 设置音频通道数,与视频源的通道数相等
                recorder.setAudioChannels(grabber.getAudioChannels());

                recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);

                recorder.start();
            }

            int videoFrameNum = 0;
            int audioFrameNum = 0;
            int dataFrameNum = 0;

            UMat umat = new UMat(frameHeight, frameWidth, CV_8UC3);
            UMat umat_resize = new UMat(dstheight, dstwidth, CV_8UC3);
            Mat mat_resize = new Mat(dstheight, dstwidth, CV_8UC3);

            int height = (int) (dstwidth * ((float) (frameHeight) / frameWidth));
            int width = (int) (dstheight * ((float) (frameWidth) / frameHeight));

            UMat dstimage = null;

            Size size_temp;

            if (height <= dstheight)
            {
                size_temp = new Size(dstwidth, height);
                dstimage = new UMat(size_temp, CV_8UC3);

            } else
            {
                size_temp = new Size(width, dstheight);
                dstimage = new UMat(size_temp, CV_8UC3);
            }

            int top = (dstheight - dstimage.rows()) / 2;
            int down = (dstheight - dstimage.rows() + 1) / 2;
            int left = (dstwidth - dstimage.cols()) / 2;
            int right = (dstwidth - dstimage.cols() + 1) / 2;

            // 使用黑色进行像素填充
            Scalar scalartemp = new Scalar(0, 0, 0, 0);

            boolean resizeflag = (umat.cols() != dstimage.cols() && umat.rows() != dstimage.rows());

            // 持续从视频源取帧
            while (null != (frame = grabber.grab()))
            {
                // 有图像,就把视频帧加一
                if (null != frame.image)
                {
                    Mat mat = converter.convertToMat(frame);
                    mat.copyTo(umat);
                    if (resizeflag)
                    {
                        resize(umat, dstimage, size_temp);
                        copyMakeBorder(dstimage, umat_resize, top, down, left, right, BORDER_CONSTANT, scalartemp);
                    } else
                    {
                        // UMat imgROI = new UMat(umat_resize, new Rect(left, top, dstimage.cols(),
                        // dstimage.rows()));
                        // umat.copyTo(imgROI);// 将小图复制进底图中
                        copyMakeBorder(umat, umat_resize, top, down, left, right, BORDER_CONSTANT, scalartemp);
                    }

                    umat_resize.copyTo(mat_resize);
                    Frame frame_temp = converter.convert(mat_resize);

                    videoFrameNum++;
                    // 取出的每一帧,都保存到视频
                    recorder.record(frame_temp);
                }

                // 有声音,就把音频帧加一
                if (null != frame.samples)
                {
                    audioFrameNum++;
                    // 取出的每一帧,都保存到视频
                    recorder.record(frame);
                }

                // 有数据,就把数据帧加一
                if (null != frame.data)
                {
                    dataFrameNum++;
                }
            }

            System.out.printf("转码完成,视频帧[%d],音频帧[%d],数据帧[%d]\r\n", videoFrameNum, audioFrameNum, dataFrameNum);

        } catch (Exception e)
        {
            // e.printStackTrace();
            throw e;
        } finally
        {
            if (recorder != null)
            {
                try
                {
                    recorder.close();
                } catch (Exception e)
                {
                    // System.out.println("recorder.close异常" + e);
                    throw e;
                }
            }

            try
            {
                grabber.close();
            } catch (FrameGrabber.Exception e)
            {
                // System.out.println("frameGrabber.close异常" + e);
                throw e;
            }
        }
    }

    /**
     * 视频裁剪并转码函数
     *
     * @param inputFile  原始视频文件完整路径
     * @param outputFile 目标视频文件完整保存路径(必须完整文件名,即包含格式后缀,推荐格式后缀为.mp4)
     * @param left       裁剪框左上角点x坐标
     * @param top        裁剪框左上角点y坐标
     * @param width      裁剪框宽度
     * @param height     裁剪框高度
     * @throws Exception 异常
     */
    // @SneakyThrows
    // @Async
    public void videoConvertWithCut(String inputFile, String outputFile, int left, int top, int width,
                                           int height) throws Exception
    {
        if (width <= 0 || height <= 0)
        {
            throw new Exception("Error! Dst width or height can not be zero!");
        }
        if (outputFile.lastIndexOf('.') < 0)
        {
            throw new Exception("Error! Output file format undetected!");
        }
        String format = outputFile.substring(outputFile.lastIndexOf('.'));

        FFmpegLogCallback.set();
        Frame frame;
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(inputFile);
        FFmpegFrameRecorder recorder = null;

        try
        {
            System.out.println("开始初始化帧抓取器");

            // 初始化帧抓取器,例如数据结构(时间戳、编码器上下文、帧对象等),
            // 如果入参等于true,还会调用avformat_find_stream_info方法获取流的信息,放入AVFormatContext类型的成员变量oc中
            grabber.start(true);

            System.out.println("帧抓取器初始化完成");

            // grabber.start方法中,初始化的解码器信息存在放在grabber的成员变量oc中
            AVFormatContext avFormatContext = grabber.getFormatContext();

            // 文件内有几个媒体流(一般是视频流+音频流)
            int streamNum = avFormatContext.nb_streams();

            // 没有媒体流就不用继续了
            if (streamNum < 1)
            {
                System.out.println("文件内不存在媒体流");
                throw new Exception("Error! There is no media stream in the file!");
            }

            // 取得视频的帧率
            int frameRate = (int) grabber.getVideoFrameRate();

            System.out.printf("视频帧率[%d],视频时长[%d]秒,媒体流数量[%d]\r\n", frameRate, avFormatContext.duration() / 1000000,
                    avFormatContext.nb_streams());

            // 遍历每一个流,检查其类型
            for (int i = 0; i < streamNum; i++)
            {
                AVStream avStream = avFormatContext.streams(i);
                AVCodecParameters avCodecParameters = avStream.codecpar();
                System.out.printf("流的索引[%d],编码器类型[%d],编码器ID[%d]\r\n", i, avCodecParameters.codec_type(),
                        avCodecParameters.codec_id());
            }

            // 视频宽度
            int frameWidth = grabber.getImageWidth();
            // 视频高度
            int frameHeight = grabber.getImageHeight();
            // 音频通道数量
            int audioChannels = grabber.getAudioChannels();

            System.out.printf("视频宽度[%d],视频高度[%d],音频通道数[%d]\r\n", frameWidth, frameHeight, audioChannels);

            if (left < 0)
            {
                left = 0;
            }
            if (top < 0)
            {
                top = 0;
            }
            if (width > frameWidth)
            {
                width = frameWidth;
            }
            if (height > frameHeight)
            {
                height = frameHeight;
            }

            try
            {
                recorder = new FFmpegFrameRecorder(outputFile, width, height, audioChannels);

                recorder.setFormat(format);
                // 码率设置
                recorder.setVideoBitrate(4096000);

                // 一秒内的帧数,帧率
                recorder.setFrameRate(frameRate);

                // 两个关键帧之间的帧数
                recorder.setGopSize(frameRate);

                // 设置音频通道数,与视频源的通道数相等
                recorder.setAudioChannels(grabber.getAudioChannels());

                recorder.setVideoCodecName("h264_nvenc");// 编码器,Nvidia GPU加速关键

                recorder.start();
            } catch (Exception ex)
            {
                System.out.println("尝试选择Nvidia GPU加速失败!");

                recorder.stop();
                recorder.close();
                recorder.release();

                recorder = new FFmpegFrameRecorder(outputFile, width, height, audioChannels);

                recorder.setFormat(format);
                // 码率设置
                recorder.setVideoBitrate(4096000);

                // 一秒内的帧数,帧率
                recorder.setFrameRate(frameRate);

                // 两个关键帧之间的帧数
                recorder.setGopSize(frameRate);

                // 设置音频通道数,与视频源的通道数相等
                recorder.setAudioChannels(grabber.getAudioChannels());

                recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);

                recorder.start();
            }

            int videoFrameNum = 0;
            int audioFrameNum = 0;
            int dataFrameNum = 0;

            UMat umat = new UMat(frameHeight, frameWidth, CV_8UC3);
            Mat mat_cut = new Mat(height, width, CV_8UC3);
            UMat umat_cut = null;
            Rect rect = new Rect(left, top, width, height);

            // 持续从视频源取帧
            while (null != (frame = grabber.grab()))
            {
                // 有图像,就把视频帧加一
                if (null != frame.image)
                {
                    Mat mat = converter.convertToMat(frame);
                    mat.copyTo(umat);

                    umat_cut = new UMat(umat, rect);
                    umat_cut.copyTo(mat_cut);
                    Frame frame_temp = converter.convert(mat_cut);

                    videoFrameNum++;
                    // 取出的每一帧,都保存到视频
                    recorder.record(frame_temp);
                }

                // 有声音,就把音频帧加一
                if (null != frame.samples)
                {
                    audioFrameNum++;
                    // 取出的每一帧,都保存到视频
                    recorder.record(frame);
                }

                // 有数据,就把数据帧加一
                if (null != frame.data)
                {
                    dataFrameNum++;
                }
            }

            System.out.printf("转码完成,视频帧[%d],音频帧[%d],数据帧[%d]\r\n", videoFrameNum, audioFrameNum, dataFrameNum);

        } catch (Exception e)
        {
            // e.printStackTrace();
            throw e;
        } finally
        {
            if (recorder != null)
            {
                try
                {
                    recorder.close();
                } catch (Exception e)
                {
                    // System.out.println("recorder.close异常" + e);
                    throw e;
                }
            }

            try
            {
                grabber.close();
            } catch (FrameGrabber.Exception e)
            {
                // System.out.println("frameGrabber.close异常" + e);
                throw e;
            }
        }
    }

    /**
     * 视频转码函数(仅转码)
     *
     * @param inputFile  原始视频文件完整路径
     * @param outputFile 目标视频文件完整保存路径(必须完整文件名,即包含格式后缀,推荐格式后缀为.mp4)
     * @throws Exception 异常
     */
    // @SneakyThrows
    // @Async
    public void videoConvert(String inputFile, String outputFile) throws Exception
    {
        if (outputFile.lastIndexOf('.') < 0)
        {
            throw new Exception("Error! Output file format undetected!");
        }
        String format = outputFile.substring(outputFile.lastIndexOf('.'));

        FFmpegLogCallback.set();
        Frame frame;
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(inputFile);
        FFmpegFrameRecorder recorder = null;

        try
        {
            System.out.println("开始初始化帧抓取器");

            // 初始化帧抓取器,例如数据结构(时间戳、编码器上下文、帧对象等),
            // 如果入参等于true,还会调用avformat_find_stream_info方法获取流的信息,放入AVFormatContext类型的成员变量oc中
            grabber.start(true);

            System.out.println("帧抓取器初始化完成");

            // grabber.start方法中,初始化的解码器信息存在放在grabber的成员变量oc中
            AVFormatContext avFormatContext = grabber.getFormatContext();

            // 文件内有几个媒体流(一般是视频流+音频流)
            int streamNum = avFormatContext.nb_streams();

            // 没有媒体流就不用继续了
            if (streamNum < 1)
            {
                System.out.println("文件内不存在媒体流");
                throw new Exception("Error! There is no media stream in the file!");
            }

            // 取得视频的帧率
            int frameRate = (int) grabber.getVideoFrameRate();

            System.out.printf("视频帧率[%d],视频时长[%d]秒,媒体流数量[%d]\r\n", frameRate, avFormatContext.duration() / 1000000,
                    avFormatContext.nb_streams());

            // 遍历每一个流,检查其类型
            for (int i = 0; i < streamNum; i++)
            {
                AVStream avStream = avFormatContext.streams(i);
                AVCodecParameters avCodecParameters = avStream.codecpar();
                System.out.printf("流的索引[%d],编码器类型[%d],编码器ID[%d]\r\n", i, avCodecParameters.codec_type(),
                        avCodecParameters.codec_id());
            }

            // 视频宽度
            int frameWidth = grabber.getImageWidth();
            // 视频高度
            int frameHeight = grabber.getImageHeight();
            // 音频通道数量
            int audioChannels = grabber.getAudioChannels();

            System.out.printf("视频宽度[%d],视频高度[%d],音频通道数[%d]\r\n", frameWidth, frameHeight, audioChannels);

            try
            {
                recorder = new FFmpegFrameRecorder(outputFile, frameWidth, frameHeight, audioChannels);

                recorder.setFormat(format);
                // 码率设置
                recorder.setVideoBitrate(4096000);

                // 一秒内的帧数,帧率
                recorder.setFrameRate(frameRate);

                // 两个关键帧之间的帧数
                recorder.setGopSize(frameRate);

                // 设置音频通道数,与视频源的通道数相等
                recorder.setAudioChannels(grabber.getAudioChannels());

                recorder.setVideoCodecName("h264_nvenc");// 编码器,Nvidia GPU加速关键

                recorder.start();
            } catch (Exception ex)
            {
                System.out.println("尝试选择Nvidia GPU加速失败!");

                recorder.stop();
                recorder.close();
                recorder.release();

                recorder = new FFmpegFrameRecorder(outputFile, frameWidth, frameHeight, audioChannels);

                recorder.setFormat(format);
                // 码率设置
                recorder.setVideoBitrate(4096000);

                // 一秒内的帧数,帧率
                recorder.setFrameRate(frameRate);

                // 两个关键帧之间的帧数
                recorder.setGopSize(frameRate);

                // 设置音频通道数,与视频源的通道数相等
                recorder.setAudioChannels(grabber.getAudioChannels());

                recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);

                recorder.start();
            }

            int videoFrameNum = 0;
            int audioFrameNum = 0;
            int dataFrameNum = 0;

            // 持续从视频源取帧
            while (null != (frame = grabber.grab()))
            {
                // 有图像,就把视频帧加一
                if (null != frame.image)
                {
                    videoFrameNum++;
                    // 取出的每一帧,都保存到视频
                    recorder.record(frame);
                }

                // 有声音,就把音频帧加一
                if (null != frame.samples)
                {
                    audioFrameNum++;
                    // 取出的每一帧,都保存到视频
                    recorder.record(frame);
                }

                // 有数据,就把数据帧加一
                if (null != frame.data)
                {
                    dataFrameNum++;
                }
            }

            System.out.printf("转码完成,视频帧[%d],音频帧[%d],数据帧[%d]\r\n", videoFrameNum, audioFrameNum, dataFrameNum);

        } catch (Exception e)
        {
            // e.printStackTrace();
            throw e;
        } finally
        {
            if (recorder != null)
            {
                try
                {
                    recorder.close();
                } catch (Exception e)
                {
                    // System.out.println("recorder.close异常" + e);
                    throw e;
                }
            }

            try
            {
                grabber.close();
            } catch (FrameGrabber.Exception e)
            {
                // System.out.println("frameGrabber.close异常" + e);
                throw e;
            }
        }
    }

    public void videoConvertSync(String inputFile, String outputFile) throws Exception
    {
        if (outputFile.lastIndexOf('.') < 0)
        {
            throw new Exception("Error! Output file format undetected!");
        }
        String format = outputFile.substring(outputFile.lastIndexOf('.'));

        FFmpegLogCallback.set();
        Frame frame;
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(inputFile);
        FFmpegFrameRecorder recorder = null;

        try
        {
            System.out.println("开始初始化帧抓取器");

            // 初始化帧抓取器,例如数据结构(时间戳、编码器上下文、帧对象等),
            // 如果入参等于true,还会调用avformat_find_stream_info方法获取流的信息,放入AVFormatContext类型的成员变量oc中
            grabber.start(true);

            System.out.println("帧抓取器初始化完成");

            // grabber.start方法中,初始化的解码器信息存在放在grabber的成员变量oc中
            AVFormatContext avFormatContext = grabber.getFormatContext();

            // 文件内有几个媒体流(一般是视频流+音频流)
            int streamNum = avFormatContext.nb_streams();

            // 没有媒体流就不用继续了
            if (streamNum < 1)
            {
                System.out.println("文件内不存在媒体流");
                throw new Exception("Error! There is no media stream in the file!");
            }

            // 取得视频的帧率
            int frameRate = (int) grabber.getVideoFrameRate();

            System.out.printf("视频帧率[%d],视频时长[%d]秒,媒体流数量[%d]\r\n", frameRate, avFormatContext.duration() / 1000000,
                    avFormatContext.nb_streams());

            // 遍历每一个流,检查其类型
            for (int i = 0; i < streamNum; i++)
            {
                AVStream avStream = avFormatContext.streams(i);
                AVCodecParameters avCodecParameters = avStream.codecpar();
                System.out.printf("流的索引[%d],编码器类型[%d],编码器ID[%d]\r\n", i, avCodecParameters.codec_type(),
                        avCodecParameters.codec_id());
            }

            // 视频宽度
            int frameWidth = grabber.getImageWidth();
            // 视频高度
            int frameHeight = grabber.getImageHeight();
            // 音频通道数量
            int audioChannels = grabber.getAudioChannels();

            System.out.printf("视频宽度[%d],视频高度[%d],音频通道数[%d]\r\n", frameWidth, frameHeight, audioChannels);

            recorder = new FFmpegFrameRecorder(outputFile, frameWidth, frameHeight, audioChannels);
            recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);

            recorder.setFormat(format);
            // 码率设置
            recorder.setVideoBitrate(4096000);

            // 一秒内的帧数,帧率
            recorder.setFrameRate(frameRate);

            // 两个关键帧之间的帧数
            recorder.setGopSize(frameRate);

            // 设置音频通道数,与视频源的通道数相等
            recorder.setAudioChannels(grabber.getAudioChannels());

            recorder.start();

            int videoFrameNum = 0;
            int audioFrameNum = 0;
            int dataFrameNum = 0;

            // 持续从视频源取帧
            while (null != (frame = grabber.grab()))
            {
                // 有图像,就把视频帧加一
                if (null != frame.image)
                {
                    videoFrameNum++;
                    // 取出的每一帧,都保存到视频
                    recorder.record(frame);
                }

                // 有声音,就把音频帧加一
                if (null != frame.samples)
                {
                    audioFrameNum++;
                    // 取出的每一帧,都保存到视频
                    recorder.record(frame);
                }

                // 有数据,就把数据帧加一
                if (null != frame.data)
                {
                    dataFrameNum++;
                }
            }

            System.out.printf("转码完成,视频帧[%d],音频帧[%d],数据帧[%d]\r\n", videoFrameNum, audioFrameNum, dataFrameNum);

        } catch (Exception e)
        {
            // e.printStackTrace();
            throw e;
        } finally
        {
            if (recorder != null)
            {
                try
                {
                    recorder.close();
                } catch (Exception e)
                {
                    // System.out.println("recorder.close异常" + e);
                    throw e;
                }
            }

            try
            {
                grabber.close();
            } catch (FrameGrabber.Exception e)
            {
                // System.out.println("frameGrabber.close异常" + e);
                throw e;
            }
        }
    }


    /**
     * 定位视频某一帧画面并保存到本地
     *
     * @param inputFile     原始视频文件完整路径
     * @param miliseconds   视频要定位的已播放毫秒数
     * @param framesavepath 指定时刻画面图片保存路径(必须完整文件名,即包含格式后缀,推荐格式后缀为.jpg)
     * @throws Exception 异常
     */
    public void videoSeekFrame(String inputFile, long milliseconds, String framesavepath) throws Exception
    {
        FFmpegLogCallback.set();
        Frame frame;
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(inputFile);

        try
        {
            System.out.println("开始初始化帧抓取器");

            // 初始化帧抓取器,例如数据结构(时间戳、编码器上下文、帧对象等),
            // 如果入参等于true,还会调用avformat_find_stream_info方法获取流的信息,放入AVFormatContext类型的成员变量oc中
            grabber.start(true);

            System.out.println("帧抓取器初始化完成");

            // grabber.start方法中,初始化的解码器信息存在放在grabber的成员变量oc中
            AVFormatContext avFormatContext = grabber.getFormatContext();

            // 文件内有几个媒体流(一般是视频流+音频流)
            int streamNum = avFormatContext.nb_streams();

            // 没有媒体流就不用继续了
            if (streamNum < 1)
            {
                System.out.println("文件内不存在媒体流");
                throw new Exception("Error! There is no media stream in the file!");
            }

            // 取得视频的帧率
            int frameRate = (int) grabber.getVideoFrameRate();

            System.out.printf("视频帧率[%d],视频时长[%d]秒,媒体流数量[%d]\r\n", frameRate, avFormatContext.duration() / 1000000,
                    avFormatContext.nb_streams());

            // 遍历每一个流,检查其类型
            for (int i = 0; i < streamNum; i++)
            {
                AVStream avStream = avFormatContext.streams(i);
                AVCodecParameters avCodecParameters = avStream.codecpar();
                System.out.printf("流的索引[%d],编码器类型[%d],编码器ID[%d]\r\n", i, avCodecParameters.codec_type(),
                        avCodecParameters.codec_id());
            }

            // 视频宽度
            int frameWidth = grabber.getImageWidth();
            // 视频高度
            int frameHeight = grabber.getImageHeight();
            // 音频通道数量
            int audioChannels = grabber.getAudioChannels();

            System.out.printf("视频宽度[%d],视频高度[%d],音频通道数[%d]\r\n", frameWidth, frameHeight, audioChannels);

            // 获取视频总帧数
            int ftp = grabber.getLengthInFrames();
            double videototalmilliseconds = ftp / grabber.getFrameRate();

            if (milliseconds > videototalmilliseconds * 1000)
            {
                System.out.println("指定播放时刻超过视频时长,无法定位");
                throw new Exception("Error! Milliseconds exceeds video duration!");
            }

            grabber.setTimestamp(milliseconds * 1000);

            // 从视频源取帧
            if (null != (frame = grabber.grabImage()))
            {
                Mat mat = converter.convertToMat(frame);
                imwrite(framesavepath, mat);
            }

        } catch (Exception e)
        {
            // e.printStackTrace();
            throw e;
        } finally
        {
            try
            {
                grabber.close();
            } catch (FrameGrabber.Exception e)
            {
                // System.out.println("frameGrabber.close异常" + e);
                throw e;
            }
        }
    }
}

5. 使用

@RestController
public class UploadController {
    /**
     * 测试
     */
    @PostMapping("/test")
    public void test(MultipartFile file) throws IOException, EncoderException {
        long durationBackMillis = MaterialAttributesUtil.getDurationBackMillis(file);

        System.out.printf("当前视频的长度是 %s", durationBackMillis);
    }
}

接口调用,我们会发现会在项目的根目录下生产一个你上传的视频文件
在这里插入图片描述

在这里插入图片描述

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

超人在良家-阿启

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值