javacv FFmpegFrameGrabber 阻塞问题

FFmpegFrameGrabber.start() 当流地址离线或无效时会一直阻塞。
在这里插入图片描述
正解参考:javacv FFmpegFrameGrabber 阻塞问题解决方法-CSDN博客

依赖:

   <dependency>
            <groupId>org.bytedeco</groupId>
            <artifactId>javacv-platform</artifactId>
            <version>1.5.10</version>
        </dependency>
        <dependency>
            <groupId>org.bytedeco</groupId>
            <artifactId>opencv-platform-gpu</artifactId>
            <version>4.9.0-1.5.10</version>
        </dependency>
        <dependency>
            <groupId>org.bytedeco</groupId>
            <artifactId>ffmpeg-platform-gpl</artifactId>
            <version>6.1.1-1.5.10</version>
        </dependency>
		 <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <version>1.18.34</version>
        </dependency>

修改后的 FFmpegFrameGrabber:

package com.cloud.video;

import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicLong;

import lombok.extern.slf4j.Slf4j;
import org.bytedeco.javacpp.BytePointer;
import org.bytedeco.javacpp.DoublePointer;
import org.bytedeco.javacpp.IntPointer;
import org.bytedeco.javacpp.Loader;
import org.bytedeco.javacpp.Pointer;
import org.bytedeco.javacpp.PointerScope;
import org.bytedeco.javacpp.PointerPointer;

import org.bytedeco.ffmpeg.avcodec.*;
import org.bytedeco.ffmpeg.avformat.*;
import org.bytedeco.ffmpeg.avutil.*;
import org.bytedeco.ffmpeg.swresample.*;
import org.bytedeco.ffmpeg.swscale.*;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameGrabber;

import static org.bytedeco.ffmpeg.global.avcodec.*;
import static org.bytedeco.ffmpeg.global.avdevice.*;
import static org.bytedeco.ffmpeg.global.avformat.*;
import static org.bytedeco.ffmpeg.global.avutil.*;
import static org.bytedeco.ffmpeg.global.swresample.*;
import static org.bytedeco.ffmpeg.global.swscale.*;

/**
 *
 * @author Samuel Audet
 */
@Slf4j
public class FFmpegFrameGrabber extends FrameGrabber {

    public static class Exception extends FrameGrabber.Exception {
        public Exception(String message) { super(message + " (For more details, make sure FFmpegLogCallback.set() has been called.)"); }
        public Exception(String message, Throwable cause) { super(message, cause); }
    }

    public static String[] getDeviceDescriptions() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        tryLoad();
        throw new UnsupportedOperationException("Device enumeration not support by FFmpeg.");
    }

    public static com.cloud.video.FFmpegFrameGrabber createDefault(File deviceFile)   throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception { return new com.cloud.video.FFmpegFrameGrabber(deviceFile); }
    public static com.cloud.video.FFmpegFrameGrabber createDefault(String devicePath) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception { return new com.cloud.video.FFmpegFrameGrabber(devicePath); }
    public static com.cloud.video.FFmpegFrameGrabber createDefault(int deviceNumber)  throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception { throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception(org.bytedeco.javacv.FFmpegFrameGrabber.class + " does not support device numbers."); }

    //add whb
    /**
     * 读流超时时间 : 20秒
     */
    private static final int FRAME_READ_TIMEOUT = 15 * 1000;
    // 增加监听
    private final AtomicLong lastFrameTime = new AtomicLong(System.currentTimeMillis());
    private boolean interrupt_grab = false;

    public boolean isInterrupt_grab() {
        return interrupt_grab;
    }


    /**
     *  读流时使用了Process p = Runtime.getRuntime().exec() ;
     *  执行了ffmpeg命令p.waitFor(); 该方法使线程阻塞, 使用interrupt()取消/退出线程
     */
    private final org.bytedeco.ffmpeg.avformat.AVIOInterruptCB.Callback_Pointer cp = new org.bytedeco.ffmpeg.avformat.AVIOInterruptCB.Callback_Pointer() {
        @Override
        public int call(Pointer pointer) {
            // 0:continue, 1:exit
            if (lastFrameTime.get() + FRAME_READ_TIMEOUT < System.currentTimeMillis()) {
                interrupt_grab = true;
                try {
                    // 读流时使用了Process p = Runtime.getRuntime().exec() ;
                    // 执行了ffmpeg命令p.waitFor(); 该方法使线程阻塞, 使用interrupt()取消/退出线程
                    Thread.currentThread().interrupt();
                } catch (java.lang.Exception e) {
                    log.error("Thread interrupt,超时:{}", e.getMessage());
                }
                return 1;
            }
            return 0;
        }
    };
    // add whb end


    private static org.bytedeco.javacv.FFmpegFrameGrabber.Exception loadingException = null;
    public static void tryLoad() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        if (loadingException != null) {
            throw loadingException;
        } else {
            try {
                Loader.load(org.bytedeco.ffmpeg.global.avutil.class);
                Loader.load(org.bytedeco.ffmpeg.global.swresample.class);
                Loader.load(org.bytedeco.ffmpeg.global.avcodec.class);
                Loader.load(org.bytedeco.ffmpeg.global.avformat.class);
                Loader.load(org.bytedeco.ffmpeg.global.swscale.class);

                // Register all formats and codecs
                av_jni_set_java_vm(Loader.getJavaVM(), null);
//                avcodec_register_all();
//                av_register_all();
                avformat_network_init();

                Loader.load(org.bytedeco.ffmpeg.global.avdevice.class);
                avdevice_register_all();
            } catch (Throwable t) {
                if (t instanceof org.bytedeco.javacv.FFmpegFrameGrabber.Exception) {
                    throw loadingException = (org.bytedeco.javacv.FFmpegFrameGrabber.Exception)t;
                } else {
                    throw loadingException = new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("Failed to load " + org.bytedeco.javacv.FFmpegFrameGrabber.class, t);
                }
            }
        }
    }

    static {
        try {
            tryLoad();
//            FFmpegLockCallback.init();
        } catch (org.bytedeco.javacv.FFmpegFrameGrabber.Exception ex) { }
    }

    public FFmpegFrameGrabber(URL url) {
        this(url.toString());
    }
    public FFmpegFrameGrabber(File file) {
        this(file.getAbsolutePath());
    }
    public FFmpegFrameGrabber(String filename) {
        this.filename = filename;
        this.pixelFormat = AV_PIX_FMT_NONE;
        this.sampleFormat = AV_SAMPLE_FMT_NONE;
    }
    /** Calls {@code FFmpegFrameGrabber(inputStream, Integer.MAX_VALUE - 8)}
     *  so that the whole input stream is seekable. */
    public FFmpegFrameGrabber(InputStream inputStream) {
        this(inputStream, Integer.MAX_VALUE - 8);
    }
    /** Set maximumSize to 0 to disable seek and minimize startup time. */
    public FFmpegFrameGrabber(InputStream inputStream, int maximumSize) {
        this.inputStream = inputStream;
        this.closeInputStream = true;
        this.pixelFormat = AV_PIX_FMT_NONE;
        this.sampleFormat = AV_SAMPLE_FMT_NONE;
        this.maximumSize = maximumSize;
    }
    public void release() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        synchronized (org.bytedeco.ffmpeg.global.avcodec.class) {
            releaseUnsafe();
        }
    }
    public synchronized void releaseUnsafe() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        started = false;

        if (plane_ptr != null && plane_ptr2 != null) {
            plane_ptr.releaseReference();
            plane_ptr2.releaseReference();
            plane_ptr = plane_ptr2 = null;
        }

        if (pkt != null) {
            if (pkt.stream_index() != -1) {
                av_packet_unref(pkt);
            }
            pkt.releaseReference();
            pkt = null;
        }

        // Free the RGB image
        if (image_ptr != null) {
            for (int i = 0; i < image_ptr.length; i++) {
                if (imageMode != ImageMode.RAW) {
                    av_free(image_ptr[i]);
                }
            }
            image_ptr = null;
        }
        if (picture_rgb != null) {
            av_frame_free(picture_rgb);
            picture_rgb = null;
        }

        // Free the native format picture frame
        if (picture != null) {
            av_frame_free(picture);
            picture = null;
        }

        // Close the video codec
        if (video_c != null) {
            avcodec_free_context(video_c);
            video_c = null;
        }

        // Free the audio samples frame
        if (samples_frame != null) {
            av_frame_free(samples_frame);
            samples_frame = null;
        }

        // Close the audio codec
        if (audio_c != null) {
            avcodec_free_context(audio_c);
            audio_c = null;
        }

        // Close the video file
        if (inputStream == null && oc != null && !oc.isNull()) {
            avformat_close_input(oc);
            oc = null;
        }

        if (img_convert_ctx != null) {
            sws_freeContext(img_convert_ctx);
            img_convert_ctx = null;
        }

        if (samples_ptr_out != null) {
            for (int i = 0; i < samples_ptr_out.length; i++) {
                av_free(samples_ptr_out[i].position(0));
            }
            samples_ptr_out = null;
            samples_buf_out = null;
        }

        if (samples_convert_ctx != null) {
            swr_free(samples_convert_ctx);
            samples_convert_ctx = null;
        }

        frameGrabbed  = false;
        frame         = null;
        timestamp     = 0;
        frameNumber   = 0;

        if (inputStream != null) {
            try {
                if (oc == null) {
                    // when called a second time
                    if (closeInputStream) {
                        inputStream.close();
                    }
                } else if (maximumSize > 0) {
                    try {
                        inputStream.reset();
                    } catch (IOException ex) {
                        // "Resetting to invalid mark", give up?
                        System.err.println("Error on InputStream.reset(): " + ex);
                    }
                }
            } catch (IOException ex) {
                throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("Error on InputStream.close(): ", ex);
            } finally {
                inputStreams.remove(oc);
                if (avio != null) {
                    if (avio.buffer() != null) {
                        av_free(avio.buffer());
                        avio.buffer(null);
                    }
                    av_free(avio);
                    avio = null;
                }
                if (oc != null) {
                    avformat_free_context(oc);
                    oc = null;
                }
            }
        }
    }
    @Override protected void finalize() throws Throwable {
        super.finalize();
        release();
    }

    static Map<Pointer,InputStream> inputStreams = Collections.synchronizedMap(new HashMap<Pointer,InputStream>());

    static class ReadCallback extends Read_packet_Pointer_BytePointer_int {
        @Override public int call(Pointer opaque, BytePointer buf, int buf_size) {
            try {
                byte[] b = new byte[buf_size];
                InputStream is = inputStreams.get(opaque);
                int size = is.read(b, 0, buf_size);
                if (size < 0) {
                    return AVERROR_EOF();
                } else {
                    buf.put(b, 0, size);
                    return size;
                }
            }
            catch (Throwable t) {
                System.err.println("Error on InputStream.read(): " + t);
                return -1;
            }
        }
    }

    static class SeekCallback extends Seek_Pointer_long_int {
        @Override public long call(Pointer opaque, long offset, int whence) {
            try {
                InputStream is = inputStreams.get(opaque);
                long size = 0;
                switch (whence) {
                    case 0: is.reset(); break; // SEEK_SET
                    case 1: break;             // SEEK_CUR
                    case 2:                    // SEEK_END
                        is.reset();
                        while (true) {
                            long n = is.skip(Long.MAX_VALUE);
                            if (n == 0) break;
                            size += n;
                        }
                        offset += size;
                        is.reset();
                        break;
                    case AVSEEK_SIZE:
                        long remaining = 0;
                        while (true) {
                            long n = is.skip(Long.MAX_VALUE);
                            if (n == 0) break;
                            remaining += n;
                        }
                        is.reset();
                        while (true) {
                            long n = is.skip(Long.MAX_VALUE);
                            if (n == 0) break;
                            size += n;
                        }
                        offset = size - remaining;
                        is.reset();
                        break;
                    default: return -1;
                }
                long remaining = offset;
                while (remaining > 0) {
                    long skipped = is.skip(remaining);
                    if (skipped == 0) break; // end of the stream
                    remaining -= skipped;
                }
                return whence == AVSEEK_SIZE ? size : 0;
            } catch (Throwable t) {
                System.err.println("Error on InputStream.reset() or skip(): " + t);
                return -1;
            }
        }
    }

    static com.cloud.video.FFmpegFrameGrabber.ReadCallback readCallback = new com.cloud.video.FFmpegFrameGrabber.ReadCallback().retainReference();
    static com.cloud.video.FFmpegFrameGrabber.SeekCallback seekCallback = new com.cloud.video.FFmpegFrameGrabber.SeekCallback().retainReference();

    private InputStream     inputStream;
    private boolean         closeInputStream;
    private int             maximumSize;
    private AVIOContext     avio;
    private String          filename;
    private AVFormatContext oc;
    private AVStream        video_st, audio_st;
    private AVCodecContext  video_c, audio_c;
    private AVFrame         picture, picture_rgb;
    private BytePointer[]   image_ptr;
    private Buffer[]        image_buf;
    private AVFrame         samples_frame;
    private BytePointer[]   samples_ptr;
    private Buffer[]        samples_buf;
    private BytePointer[]   samples_ptr_out;
    private Buffer[]        samples_buf_out;
    private PointerPointer  plane_ptr, plane_ptr2;
    private AVPacket        pkt;
    private SwsContext      img_convert_ctx;
    private SwrContext      samples_convert_ctx;
    private int             samples_channels, samples_format, samples_rate;
    private boolean         frameGrabbed;
    private Frame frame;
    private int[]           streams;

    private volatile boolean started = false;

    public boolean isCloseInputStream() {
        return closeInputStream;
    }
    public void setCloseInputStream(boolean closeInputStream) {
        this.closeInputStream = closeInputStream;
    }

    /**
     * Is there a video stream?
     * @return  {@code video_st!=null;}
     */
    public boolean hasVideo() {
        return video_st!=null;
    }

    /**
     * Is there an audio stream?
     * @return  {@code audio_st!=null;}
     */
    public boolean hasAudio() {
        return audio_st!=null;
    }

    @Override public double getGamma() {
        // default to a gamma of 2.2 for cheap Webcams, DV cameras, etc.
        if (gamma == 0.0) {
            return 2.2;
        } else {
            return gamma;
        }
    }

    @Override public String getFormat() {
        if (oc == null) {
            return super.getFormat();
        } else {
            return oc.iformat().name().getString();
        }
    }

    @Override public int getImageWidth() {
        return imageWidth > 0 || video_c == null ? super.getImageWidth() : video_c.width();
    }

    @Override public int getImageHeight() {
        return imageHeight > 0 || video_c == null ? super.getImageHeight() : video_c.height();
    }

    @Override public int getAudioChannels() {
        return audioChannels > 0 || audio_c == null ? super.getAudioChannels() : audio_c.channels();
    }

    @Override public int getPixelFormat() {
        if (imageMode == ImageMode.COLOR || imageMode == ImageMode.GRAY) {
            if (pixelFormat == AV_PIX_FMT_NONE) {
                return imageMode == ImageMode.COLOR ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_GRAY8;
            } else {
                return pixelFormat;
            }
        } else if (video_c != null) { // RAW
            return video_c.pix_fmt();
        } else {
            return super.getPixelFormat();
        }
    }

    @Override public int getVideoCodec() {
        return video_c == null ? super.getVideoCodec() : video_c.codec_id();
    }

    @Override
    public String getVideoCodecName(){
        return  video_c == null ? super.getVideoCodecName() : video_c.codec().name().getString();
    }

    @Override public int getVideoBitrate() {
        return video_c == null ? super.getVideoBitrate() : (int)video_c.bit_rate();
    }

    @Override public double getAspectRatio() {
        if (video_st == null) {
            return super.getAspectRatio();
        } else {
            AVRational r = av_guess_sample_aspect_ratio(oc, video_st, picture);
            double a = (double)r.num() / r.den();
            return a == 0.0 ? 1.0 : a;
        }
    }

    /** Returns {@link #getVideoFrameRate()} */
    @Override public double getFrameRate() {
        return getVideoFrameRate();
    }

    /**Estimation of audio frames per second.
     *
     * Care must be taken as this method may require unnecessary call of
     * grabFrame(true, false, false, false, false) with frameGrabbed set to true.
     *
     * @return (double) getSampleRate()) / samples_frame.nb_samples()
     * if samples_frame.nb_samples() is not zero, otherwise return 0
     */
    public double getAudioFrameRate() {
        if (audio_st == null) {
            return 0.0;
        } else {
            if (samples_frame == null || samples_frame.nb_samples() == 0) {
                try {
                    grabFrame(true, false, false, false, false);
                    frameGrabbed = true;
                } catch (org.bytedeco.javacv.FFmpegFrameGrabber.Exception e) {
                    return 0.0;
                }
            }
            if (samples_frame != null && samples_frame.nb_samples() != 0)
                return ((double) getSampleRate()) / samples_frame.nb_samples();
            else return 0.0;

        }
    }

    public double getVideoFrameRate() {
        if (video_st == null) {
            return super.getFrameRate();
        } else {
            AVRational r = video_st.avg_frame_rate();
            if (r.num() == 0 && r.den() == 0) {
                r = video_st.r_frame_rate();
            }
            return (double)r.num() / r.den();
        }
    }

    @Override public int getAudioCodec() {
        return audio_c == null ? super.getAudioCodec() : audio_c.codec_id();
    }

    @Override public String getAudioCodecName() {
        return audio_c == null ? super.getAudioCodecName() : audio_c.codec().name().getString();
    }

    @Override public int getAudioBitrate() {
        return audio_c == null ? super.getAudioBitrate() : (int)audio_c.bit_rate();
    }

    @Override public int getSampleFormat() {
        if (sampleMode == SampleMode.SHORT || sampleMode == SampleMode.FLOAT) {
            if (sampleFormat == AV_SAMPLE_FMT_NONE) {
                return sampleMode == SampleMode.SHORT ? AV_SAMPLE_FMT_S16 : AV_SAMPLE_FMT_FLT;
            } else {
                return sampleFormat;
            }
        } else if (audio_c != null) { // RAW
            return audio_c.sample_fmt();
        } else {
            return super.getSampleFormat();
        }
    }

    @Override public int getSampleRate() {
        return sampleRate > 0 || audio_c == null ? super.getSampleRate() : audio_c.sample_rate();
    }

    @Override public Map<String, String> getMetadata() {
        if (oc == null) {
            return super.getMetadata();
        }
        AVDictionaryEntry entry = null;
        Map<String, String> metadata = new HashMap<String, String>();
        while ((entry = av_dict_get(oc.metadata(), "", entry, AV_DICT_IGNORE_SUFFIX)) != null) {
            metadata.put(entry.key().getString(charset), entry.value().getString(charset));
        }
        return metadata;
    }

    @Override public Map<String, String> getVideoMetadata() {
        if (video_st == null) {
            return super.getVideoMetadata();
        }
        AVDictionaryEntry entry = null;
        Map<String, String> metadata = new HashMap<String, String>();
        while ((entry = av_dict_get(video_st.metadata(), "", entry, AV_DICT_IGNORE_SUFFIX)) != null) {
            metadata.put(entry.key().getString(charset), entry.value().getString(charset));
        }
        return metadata;
    }

    @Override public Map<String, String> getAudioMetadata() {
        if (audio_st == null) {
            return super.getAudioMetadata();
        }
        AVDictionaryEntry entry = null;
        Map<String, String> metadata = new HashMap<String, String>();
        while ((entry = av_dict_get(audio_st.metadata(), "", entry, AV_DICT_IGNORE_SUFFIX)) != null) {
            metadata.put(entry.key().getString(charset), entry.value().getString(charset));
        }
        return metadata;
    }

    @Override public String getMetadata(String key) {
        if (oc == null) {
            return super.getMetadata(key);
        }
        AVDictionaryEntry entry = av_dict_get(oc.metadata(), key, null, 0);
        return entry == null || entry.value() == null ? null : entry.value().getString(charset);
    }

    @Override public String getVideoMetadata(String key) {
        if (video_st == null) {
            return super.getVideoMetadata(key);
        }
        AVDictionaryEntry entry = av_dict_get(video_st.metadata(), key, null, 0);
        return entry == null || entry.value() == null ? null : entry.value().getString(charset);
    }

    @Override public String getAudioMetadata(String key) {
        if (audio_st == null) {
            return super.getAudioMetadata(key);
        }
        AVDictionaryEntry entry = av_dict_get(audio_st.metadata(), key, null, 0);
        return entry == null || entry.value() == null ? null : entry.value().getString(charset);
    }

    @Override public Map<String, Buffer> getVideoSideData() {
        if (video_st == null) {
            return super.getVideoSideData();
        }
        videoSideData = new HashMap<String, Buffer>();
        for (int i = 0; i < video_st.nb_side_data(); i++) {
            AVPacketSideData sd = video_st.side_data().position(i);
            String key = av_packet_side_data_name(sd.type()).getString();
            Buffer value = sd.data().capacity(sd.size()).asBuffer();
            videoSideData.put(key, value);
        }
        return videoSideData;
    }

    @Override public Buffer getVideoSideData(String key) {
        return getVideoSideData().get(key);
    }

    /** Returns the rotation in degrees from the side data of the video stream, or 0 if unknown. */
    public double getDisplayRotation() {
        ByteBuffer b = (ByteBuffer)getVideoSideData("Display Matrix");
        return b != null ? av_display_rotation_get(new IntPointer(new BytePointer(b))) : 0;
    }

    @Override public Map<String, Buffer> getAudioSideData() {
        if (audio_st == null) {
            return super.getAudioSideData();
        }
        audioSideData = new HashMap<String, Buffer>();
        for (int i = 0; i < audio_st.nb_side_data(); i++) {
            AVPacketSideData sd = audio_st.side_data().position(i);
            String key = av_packet_side_data_name(sd.type()).getString();
            Buffer value = sd.data().capacity(sd.size()).asBuffer();
            audioSideData.put(key, value);
        }
        return audioSideData;
    }

    @Override public Buffer getAudioSideData(String key) {
        return getAudioSideData().get(key);
    }

    /** default override of super.setFrameNumber implies setting
     *  of a frame close to a video frame having that number */
    @Override public void setFrameNumber(int frameNumber) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        if (hasVideo()) setTimestamp(Math.round((1000000L * frameNumber + 500000L)/ getFrameRate()));
        else super.frameNumber = frameNumber;
    }

    /** if there is video stream tries to seek to video frame with corresponding timestamp
     *  otherwise sets super.frameNumber only because frameRate==0 if there is no video stream */
    public void setVideoFrameNumber(int frameNumber) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        // best guess, AVSEEK_FLAG_FRAME has not been implemented in FFmpeg...
        if (hasVideo()) setVideoTimestamp(Math.round((1000000L * frameNumber + 500000L)/ getFrameRate()));
        else super.frameNumber = frameNumber;
    }

    /** if there is audio stream tries to seek to audio frame with corresponding timestamp
     *  ignoring otherwise */
    public void setAudioFrameNumber(int frameNumber) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        // best guess, AVSEEK_FLAG_FRAME has not been implemented in FFmpeg...
        if (hasAudio()) setAudioTimestamp(Math.round((1000000L * frameNumber + 500000L)/ getAudioFrameRate()));
    }

    /** setTimestamp without checking frame content (using old code used in JavaCV versions prior to 1.4.1) */
    @Override public void setTimestamp(long timestamp) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        setTimestamp(timestamp, false);
    }

    /** setTimestamp with possibility to select between old quick seek code or new code
     * doing check of frame content. The frame check can be useful with corrupted files, when seeking may
     * end up with an empty frame not containing video nor audio */
    public void setTimestamp(long timestamp, boolean checkFrame) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        setTimestamp(timestamp, checkFrame ? EnumSet.of(Frame.Type.VIDEO, Frame.Type.AUDIO) : null);
    }

    /** setTimestamp with resulting video frame type if there is a video stream.
     * This should provide precise seek to a video frame containing the requested timestamp
     * in most cases.
     * */
    public void setVideoTimestamp(long timestamp) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        setTimestamp(timestamp, EnumSet.of(Frame.Type.VIDEO));
    }

    /** setTimestamp with resulting audio frame type if there is an audio stream.
     * This should provide precise seek to an audio frame containing the requested timestamp
     * in most cases.
     * */
    public void setAudioTimestamp(long timestamp) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        setTimestamp(timestamp, EnumSet.of(Frame.Type.AUDIO));
    }

    /** setTimestamp with a priority the resulting frame should be:
     *  video (frameTypesToSeek contains only Frame.Type.VIDEO),
     *  audio (frameTypesToSeek contains only Frame.Type.AUDIO),
     *  or any (frameTypesToSeek contains both)
     */
    private synchronized void setTimestamp(long timestamp, EnumSet<Frame.Type> frameTypesToSeek) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        int ret;
        if (oc == null) {
            super.timestamp = timestamp;
        } else {
            timestamp = timestamp * AV_TIME_BASE / 1000000L;

            /* the stream start time */
            long ts0 = oc.start_time() != AV_NOPTS_VALUE ? oc.start_time() : 0;

            if (frameTypesToSeek != null //new code providing check of frame content while seeking to the timestamp
                    && (frameTypesToSeek.contains(Frame.Type.VIDEO) || frameTypesToSeek.contains(Frame.Type.AUDIO))
                    && (hasVideo() || hasAudio())) {

                /*     After the call of ffmpeg's avformat_seek_file(...) with the flag set to AVSEEK_FLAG_BACKWARD
                 * the decoding position should be located before the requested timestamp in a closest position
                 * from which all the active streams can be decoded successfully.
                 * The following seeking consists of two stages:
                 * 1. Grab frames till the frame corresponding to that "closest" position
                 * (the first frame containing decoded data).
                 *
                 * 2. Grab frames till the desired timestamp is reached. The number of steps is restricted
                 * by doubled estimation of frames between that "closest" position and the desired position.
                 *
                 * frameTypesToSeek parameter sets the preferred type of frames to seek.
                 * It can be chosen from three possible types: VIDEO, AUDIO or any of them.
                 * The setting means only a preference in the type. That is, if VIDEO or AUDIO is
                 * specified but the file does not have video or audio stream - any type will be used instead.
                 */

                /* Check if file contains requested streams */
                if ((frameTypesToSeek.contains(Frame.Type.VIDEO) && !hasVideo() ) ||
                        (frameTypesToSeek.contains(Frame.Type.AUDIO) && !hasAudio() ))
                    frameTypesToSeek = EnumSet.of(Frame.Type.VIDEO, Frame.Type.AUDIO);

                /*  If frameTypesToSeek is set explicitly to VIDEO or AUDIO
                 *  we need to use start time of the corresponding stream
                 *  instead of the common start time
                 */
                if (frameTypesToSeek.size()==1) {
                    if (frameTypesToSeek.contains(Frame.Type.VIDEO)) {
                        if (video_st!=null && video_st.start_time() != AV_NOPTS_VALUE) {
                            AVRational time_base = video_st.time_base();
                            ts0 = 1000000L * video_st.start_time() * time_base.num() / time_base.den();
                        }
                    }
                    else if (frameTypesToSeek.contains(Frame.Type.AUDIO)) {
                        if (audio_st!=null && audio_st.start_time() != AV_NOPTS_VALUE) {
                            AVRational time_base = audio_st.time_base();
                            ts0 = 1000000L * audio_st.start_time() * time_base.num() / time_base.den();
                        }
                    }
                }

                /*  Sometimes the ffmpeg's avformat_seek_file(...) function brings us not to a position before
                 *  the desired but few frames after. In case we need a frame-precision seek we may
                 *  try to request an earlier timestamp.
                 */
                long early_ts = timestamp;

                /* add the stream start time */
                timestamp += ts0;
                early_ts += ts0;

                long initialSeekPosition = Long.MIN_VALUE;
                long maxSeekSteps = 0;
                long count = 0;
                Frame seekFrame = null;
                do {
                    if ((ret = avformat_seek_file(oc, -1, 0L, early_ts, early_ts, AVSEEK_FLAG_BACKWARD)) < 0)
                        throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avformat_seek_file() error " + ret + ": Could not seek file to timestamp " + timestamp + ".");
                    if (video_c != null) {
                        avcodec_flush_buffers(video_c);
                    }
                    if (audio_c != null) {
                        avcodec_flush_buffers(audio_c);
                    }
                    if (pkt.stream_index() != -1) {
                        av_packet_unref(pkt);
                        pkt.stream_index(-1);
                    }
                    seekFrame = grabFrame(frameTypesToSeek.contains(Frame.Type.AUDIO), frameTypesToSeek.contains(Frame.Type.VIDEO), false, false, false);
                    if (seekFrame == null) return;
                    initialSeekPosition = seekFrame.timestamp;
                    if(early_ts==0L) break;
                    early_ts-=500000L;
                    if(early_ts<0) early_ts=0L;
                } while (initialSeekPosition>timestamp);
                double frameDuration = 0.0;
                if (seekFrame.image != null && this.getFrameRate() > 0)
                    frameDuration =  AV_TIME_BASE / (double)getFrameRate();
                else if (seekFrame.samples != null && samples_frame != null && getSampleRate() > 0) {
                    frameDuration =  AV_TIME_BASE * samples_frame.nb_samples() / (double)getSampleRate();
                }

                if(frameDuration>0.0) {
                    maxSeekSteps = 0; //no more grab if the distance to the requested timestamp is smaller than frameDuration
                    if (timestamp - initialSeekPosition + 1 > frameDuration)  //allow for a rounding error
                        maxSeekSteps = (long)(10*(timestamp - initialSeekPosition)/frameDuration);
                }
                else if (initialSeekPosition < timestamp) maxSeekSteps = 1000;

                double delta = 0.0; //for the timestamp correction
                count = 0;
                while(count < maxSeekSteps) {
                    seekFrame = grabFrame(frameTypesToSeek.contains(Frame.Type.AUDIO), frameTypesToSeek.contains(Frame.Type.VIDEO), false, false, false);
                    if (seekFrame == null) return; //is it better to throw NullPointerException?

                    count++;
                    double ts=seekFrame.timestamp;
                    frameDuration = 0.0;
                    if (seekFrame.image != null && this.getFrameRate() > 0)
                        frameDuration =  AV_TIME_BASE / (double)getFrameRate();
                    else if (seekFrame.samples != null && samples_frame != null && getSampleRate() > 0)
                        frameDuration =  AV_TIME_BASE * samples_frame.nb_samples() / (double)getSampleRate();

                    delta = 0.0;
                    if (frameDuration>0.0) {
                        delta = (ts-ts0)/frameDuration - Math.round((ts-ts0)/frameDuration);
                        if (Math.abs(delta)>0.2) delta=0.0;
                    }
                    ts-=delta*frameDuration; // corrected timestamp
                    if (ts + frameDuration > timestamp) break;
                }
            } else { //old quick seeking code used in JavaCV versions prior to 1.4.1
                /* add the stream start time */
                timestamp += ts0;
                if ((ret = avformat_seek_file(oc, -1, Long.MIN_VALUE, timestamp, Long.MAX_VALUE, AVSEEK_FLAG_BACKWARD)) < 0) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avformat_seek_file() error " + ret + ": Could not seek file to timestamp " + timestamp + ".");
                }
                if (video_c != null) {
                    avcodec_flush_buffers(video_c);
                }
                if (audio_c != null) {
                    avcodec_flush_buffers(audio_c);
                }
                if (pkt.stream_index() != -1) {
                    av_packet_unref(pkt);
                    pkt.stream_index(-1);
                }
                /* comparing to timestamp +/- 1 avoids rouding issues for framerates
                which are no proper divisors of 1000000, e.g. where
                av_frame_get_best_effort_timestamp in grabFrame sets this.timestamp
                to ...666 and the given timestamp has been rounded to ...667
                (or vice versa)
                 */
                int count = 0; // prevent infinite loops with corrupted files
                while (this.timestamp > timestamp + 1 && grabFrame(true, true, false, false) != null && count++ < 1000) {
                    // flush frames if seeking backwards
                }
                count = 0;
                while (this.timestamp < timestamp - 1 && grabFrame(true, true, false, false) != null && count++ < 1000) {
                    // decode up to the desired frame
                }
            }
            frameGrabbed = true;
        }
    }

    /** Returns {@link #getLengthInVideoFrames()} */
    @Override public int getLengthInFrames() {
        // best guess...
        return getLengthInVideoFrames();
    }

    @Override public long getLengthInTime() {
        return oc.duration() * 1000000L / AV_TIME_BASE;
    }

    /** Returns {@code (int) Math.round(getLengthInTime() * getFrameRate() / 1000000L)}, which is an approximation in general. */
    public int getLengthInVideoFrames() {
        // best guess...
        return (int) Math.round(getLengthInTime() * getFrameRate() / 1000000L);
    }

    public int getLengthInAudioFrames() {
        // best guess...
        double afr = getAudioFrameRate();
        if (afr > 0) return (int) (getLengthInTime() * afr / 1000000L);
        else return 0;
    }

    public AVFormatContext getFormatContext() {
        return oc;
    }

    /** Calls {@code start(true)}. */
    @Override public void start() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        start(true);
    }
    /** Set findStreamInfo to false to minimize startup time, at the expense of robustness. */
    public void start(boolean findStreamInfo) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        synchronized (org.bytedeco.ffmpeg.global.avcodec.class) {
            startUnsafe(findStreamInfo);
        }
    }
    public void startUnsafe() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        startUnsafe(true);
    }

    /**
     * 这个 方法会阻塞
     * @param findStreamInfo
     * @throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception
     */
    public synchronized void startUnsafe(boolean findStreamInfo) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        try (PointerScope scope = new PointerScope()) {

            if (oc != null && !oc.isNull()) {
                throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("start() has already been called: Call stop() before calling start() again.");
            }

            int ret;
            img_convert_ctx = null;
            oc              = new AVFormatContext(null);
            video_c         = null;
            audio_c         = null;
            plane_ptr       = new PointerPointer(AVFrame.AV_NUM_DATA_POINTERS).retainReference();
            plane_ptr2      = new PointerPointer(AVFrame.AV_NUM_DATA_POINTERS).retainReference();
            pkt             = new AVPacket().retainReference();
            frameGrabbed    = false;
            frame           = new Frame();
            timestamp       = 0;
            frameNumber     = 0;

            pkt.stream_index(-1);

            // Open video file
            AVInputFormat f = null;
            if (format != null && format.length() > 0) {
                if ((f = av_find_input_format(format)) == null) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("av_find_input_format() error: Could not find input format \"" + format + "\".");
                }
            }
            AVDictionary options = new AVDictionary(null);
            if (frameRate > 0) {
                AVRational r = av_d2q(frameRate, 1001000);
                av_dict_set(options, "framerate", r.num() + "/" + r.den(), 0);
            }
            if (pixelFormat >= 0) {
                av_dict_set(options, "pixel_format", av_get_pix_fmt_name(pixelFormat).getString(), 0);
            } else if (imageMode != ImageMode.RAW) {
                av_dict_set(options, "pixel_format", imageMode == ImageMode.COLOR ? "bgr24" : "gray8", 0);
            }
            if (imageWidth > 0 && imageHeight > 0) {
                av_dict_set(options, "video_size", imageWidth + "x" + imageHeight, 0);
            }
            if (sampleRate > 0) {
                av_dict_set(options, "sample_rate", "" + sampleRate, 0);
            }
            if (audioChannels > 0) {
                av_dict_set(options, "channels", "" + audioChannels, 0);
            }
            for (Entry<String, String> e : this.options.entrySet()) {
                av_dict_set(options, e.getKey(), e.getValue(), 0);
            }
            if (inputStream != null) {
                if (!inputStream.markSupported()) {
                    inputStream = new BufferedInputStream(inputStream);
                }
                inputStream.mark(maximumSize);
                oc = avformat_alloc_context();
                avio = avio_alloc_context(new BytePointer(av_malloc(4096)), 4096, 0, oc, readCallback, null, maximumSize > 0 ? seekCallback : null);
                oc.pb(avio);

                filename = inputStream.toString();
                inputStreams.put(oc, inputStream);
            }

            // 这里阻塞,添加超时回调 add whb
            lastFrameTime.set(System.currentTimeMillis());
            oc = avformat_alloc_context();
            org.bytedeco.ffmpeg.avformat.AVIOInterruptCB cb = new org.bytedeco.ffmpeg.avformat.AVIOInterruptCB();
            cb.callback(cp);
            oc.interrupt_callback(cb);
            // add whb end

            if ((ret = avformat_open_input(oc, filename, f, options)) < 0) {
                av_dict_set(options, "pixel_format", null, 0);
                // 这里阻塞,添加超时回调 add whb
                lastFrameTime.set(System.currentTimeMillis());
                oc = avformat_alloc_context();
                org.bytedeco.ffmpeg.avformat.AVIOInterruptCB cb2 = new org.bytedeco.ffmpeg.avformat.AVIOInterruptCB();
                cb2.callback(cp);
                oc.interrupt_callback(cb2);
                // add whb end
                if ((ret = avformat_open_input(oc, filename, f, options)) < 0) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avformat_open_input() error " + ret + ": Could not open input \"" + filename + "\". (Has setFormat() been called?)");
                }
            }
            av_dict_free(options);

            oc.max_delay(maxDelay);

            // Retrieve stream information, if desired
            if (findStreamInfo && (ret = avformat_find_stream_info(oc, (PointerPointer)null)) < 0) {
                throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avformat_find_stream_info() error " + ret + ": Could not find stream information.");
            }

            if (av_log_get_level() >= AV_LOG_INFO) {
                // Dump information about file onto standard error
                av_dump_format(oc, 0, filename, 0);
            }

            // Find the first stream with the user-specified disposition property
            int nb_streams = oc.nb_streams();
            for (int i = 0; i < nb_streams; i++) {
                AVStream st = oc.streams(i);
                AVCodecParameters par = st.codecpar();
                if (videoStream < 0 && par.codec_type() == AVMEDIA_TYPE_VIDEO && st.disposition() == videoDisposition) {
                    videoStream = i;
                } else if (audioStream < 0 && par.codec_type() == AVMEDIA_TYPE_AUDIO && st.disposition() == audioDisposition) {
                    audioStream = i;
                }
            }

            // Find the first video and audio stream, unless the user specified otherwise
            video_st = audio_st = null;
            AVCodecParameters video_par = null, audio_par = null;
            streams = new int[nb_streams];
            for (int i = 0; i < nb_streams; i++) {
                AVStream st = oc.streams(i);
                // Get a pointer to the codec context for the video or audio stream
                AVCodecParameters par = st.codecpar();
                streams[i] = par.codec_type();
                if (video_st == null && par.codec_type() == AVMEDIA_TYPE_VIDEO && (videoStream < 0 || videoStream == i)) {
                    video_st = st;
                    video_par = par;
                    videoStream = i;
                } else if (audio_st == null && par.codec_type() == AVMEDIA_TYPE_AUDIO && (audioStream < 0 || audioStream == i)) {
                    audio_st = st;
                    audio_par = par;
                    audioStream = i;
                }
            }
            if (video_st == null && audio_st == null) {
                throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("Did not find a video or audio stream inside \"" + filename
                        + "\" for videoStream == " + videoStream + " and audioStream == " + audioStream + ".");
            }

            if (video_st != null) {
                // Find the decoder for the video stream
                AVCodec codec = avcodec_find_decoder_by_name(videoCodecName);
                if (codec == null) {
                    codec = avcodec_find_decoder(video_par.codec_id());
                }
                if (codec == null) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avcodec_find_decoder() error: Unsupported video format or codec not found: " + video_par.codec_id() + ".");
                }

                /* Allocate a codec context for the decoder */
                if ((video_c = avcodec_alloc_context3(codec)) == null) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avcodec_alloc_context3() error: Could not allocate video decoding context.");
                }

                /* copy the stream parameters from the muxer */
                if ((ret = avcodec_parameters_to_context(video_c, video_st.codecpar())) < 0) {
                    releaseUnsafe();
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avcodec_parameters_to_context() error " + ret + ": Could not copy the video stream parameters.");
                }

                options = new AVDictionary(null);
                for (Entry<String, String> e : videoOptions.entrySet()) {
                    av_dict_set(options, e.getKey(), e.getValue(), 0);
                }

                // Enable multithreading when available
                video_c.thread_count(0);

                // Open video codec
                if ((ret = avcodec_open2(video_c, codec, options)) < 0) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avcodec_open2() error " + ret + ": Could not open video codec.");
                }
                av_dict_free(options);

                // Hack to correct wrong frame rates that seem to be generated by some codecs
                if (video_c.time_base().num() > 1000 && video_c.time_base().den() == 1) {
                    video_c.time_base().den(1000);
                }

                // Allocate video frame and an AVFrame structure for the RGB image
                if ((picture = av_frame_alloc()) == null) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("av_frame_alloc() error: Could not allocate raw picture frame.");
                }
                if ((picture_rgb = av_frame_alloc()) == null) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("av_frame_alloc() error: Could not allocate RGB picture frame.");
                }

                initPictureRGB();
            }

            if (audio_st != null) {
                // Find the decoder for the audio stream
                AVCodec codec = avcodec_find_decoder_by_name(audioCodecName);
                if (codec == null) {
                    codec = avcodec_find_decoder(audio_par.codec_id());
                }
                if (codec == null) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avcodec_find_decoder() error: Unsupported audio format or codec not found: " + audio_par.codec_id() + ".");
                }

                /* Allocate a codec context for the decoder */
                if ((audio_c = avcodec_alloc_context3(codec)) == null) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avcodec_alloc_context3() error: Could not allocate audio decoding context.");
                }

                /* copy the stream parameters from the muxer */
                if ((ret = avcodec_parameters_to_context(audio_c, audio_st.codecpar())) < 0) {
                    releaseUnsafe();
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avcodec_parameters_to_context() error " + ret + ": Could not copy the audio stream parameters.");
                }

                options = new AVDictionary(null);
                for (Entry<String, String> e : audioOptions.entrySet()) {
                    av_dict_set(options, e.getKey(), e.getValue(), 0);
                }

                // Enable multithreading when available
                audio_c.thread_count(0);

                // Open audio codec
                if ((ret = avcodec_open2(audio_c, codec, options)) < 0) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("avcodec_open2() error " + ret + ": Could not open audio codec.");
                }
                av_dict_free(options);

                // Allocate audio samples frame
                if ((samples_frame = av_frame_alloc()) == null) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("av_frame_alloc() error: Could not allocate audio frame.");
                }

                samples_ptr = new BytePointer[] { null };
                samples_buf = new Buffer[] { null };
            }
            started = true;

        }
    }

    private void initPictureRGB() {
        int width  = imageWidth  > 0 ? imageWidth  : video_c.width();
        int height = imageHeight > 0 ? imageHeight : video_c.height();

        switch (imageMode) {
            case COLOR:
            case GRAY:
                // If size changes I new allocation is needed -> free the old one.
                if (image_ptr != null) {
                    // First kill all references, then free it.
                    image_buf = null;
                    BytePointer[] temp = image_ptr;
                    image_ptr = null;
                    av_free(temp[0]);
                }
                int fmt = getPixelFormat();

                // work around bug in swscale: https://trac.ffmpeg.org/ticket/1031
                int align = 64;
                int stride = width;
                for (int i = 1; i <= align; i += i) {
                    stride = (width + (i - 1)) & ~(i - 1);
                    av_image_fill_linesizes(picture_rgb.linesize(), fmt, stride);
                    if ((picture_rgb.linesize(0) & (align - 1)) == 0) {
                        break;
                    }
                }

                // Determine required buffer size and allocate buffer
                int size = av_image_get_buffer_size(fmt, stride, height, 1);
                image_ptr = new BytePointer[] { new BytePointer(av_malloc(size)).capacity(size) };
                image_buf = new Buffer[] { image_ptr[0].asBuffer() };

                // Assign appropriate parts of buffer to image planes in picture_rgb
                // Note that picture_rgb is an AVFrame, but AVFrame is a superset of AVPicture
                av_image_fill_arrays(new PointerPointer(picture_rgb), picture_rgb.linesize(), image_ptr[0], fmt, stride, height, 1);
                picture_rgb.format(fmt);
                picture_rgb.width(width);
                picture_rgb.height(height);
                break;

            case RAW:
                image_ptr = new BytePointer[] { null };
                image_buf = new Buffer[] { null };
                break;

            default:
                assert false;
        }
    }

    @Override public void stop() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        release();
    }

    @Override public synchronized void trigger() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        if (oc == null || oc.isNull()) {
            throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("Could not trigger: No AVFormatContext. (Has start() been called?)");
        }
        if (pkt.stream_index() != -1) {
            av_packet_unref(pkt);
            pkt.stream_index(-1);
        }
        for (int i = 0; i < numBuffers+1; i++) {
            if (av_read_frame(oc, pkt) < 0) {
                return;
            }
            av_packet_unref(pkt);
        }
    }

    private void processImage() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        frame.imageWidth  = imageWidth  > 0 ? imageWidth  : video_c.width();
        frame.imageHeight = imageHeight > 0 ? imageHeight : video_c.height();
        frame.imageDepth = Frame.DEPTH_UBYTE;
        switch (imageMode) {
            case COLOR:
            case GRAY:
                // Deinterlace Picture
                if (deinterlace) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("Cannot deinterlace: Functionality moved to FFmpegFrameFilter.");
                }

                // Has the size changed?
                if (frame.imageWidth != picture_rgb.width() || frame.imageHeight != picture_rgb.height()) {
                    initPictureRGB();
                }

                // Copy "metadata" fields
                av_frame_copy_props(picture_rgb, picture);

                // Convert the image into BGR or GRAY format that OpenCV uses
                img_convert_ctx = sws_getCachedContext(img_convert_ctx,
                        video_c.width(), video_c.height(), video_c.pix_fmt(),
                        frame.imageWidth, frame.imageHeight, getPixelFormat(),
                        imageScalingFlags != 0 ? imageScalingFlags : SWS_BILINEAR,
                        null, null, (DoublePointer)null);
                if (img_convert_ctx == null) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("sws_getCachedContext() error: Cannot initialize the conversion context.");
                }

                // Convert the image from its native format to RGB or GRAY
                sws_scale(img_convert_ctx, new PointerPointer(picture), picture.linesize(), 0,
                        video_c.height(), new PointerPointer(picture_rgb), picture_rgb.linesize());
                frame.imageStride = picture_rgb.linesize(0);
                frame.image = image_buf;
                frame.opaque = picture_rgb;
                break;

            case RAW:
                frame.imageStride = picture.linesize(0);
                BytePointer ptr = picture.data(0);
                if (ptr != null && !ptr.equals(image_ptr[0])) {
                    image_ptr[0] = ptr.capacity(frame.imageHeight * frame.imageStride);
                    image_buf[0] = ptr.asBuffer();
                }
                frame.image = image_buf;
                frame.opaque = picture;
                break;

            default:
                assert false;
        }
        frame.image[0].limit(frame.imageHeight * frame.imageStride);
        frame.imageChannels = frame.imageStride / frame.imageWidth;
    }

    private void processSamples() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        int ret;

        int sample_format = samples_frame.format();
        int planes = av_sample_fmt_is_planar(sample_format) != 0 ? (int)samples_frame.channels() : 1;
        int data_size = av_samples_get_buffer_size((IntPointer)null, audio_c.channels(),
                samples_frame.nb_samples(), audio_c.sample_fmt(), 1) / planes;
        if (samples_buf == null || samples_buf.length != planes) {
            samples_ptr = new BytePointer[planes];
            samples_buf = new Buffer[planes];
        }
        frame.sampleRate = audio_c.sample_rate();
        frame.audioChannels = audio_c.channels();
        frame.samples = samples_buf;
        frame.opaque = samples_frame;
        int sample_size = data_size / av_get_bytes_per_sample(sample_format);
        for (int i = 0; i < planes; i++) {
            BytePointer p = samples_frame.data(i);
            if (!p.equals(samples_ptr[i]) || samples_ptr[i].capacity() < data_size) {
                samples_ptr[i] = p.capacity(data_size);
                ByteBuffer b   = p.asBuffer();
                switch (sample_format) {
                    case AV_SAMPLE_FMT_U8:
                    case AV_SAMPLE_FMT_U8P:  samples_buf[i] = b; break;
                    case AV_SAMPLE_FMT_S16:
                    case AV_SAMPLE_FMT_S16P: samples_buf[i] = b.asShortBuffer();  break;
                    case AV_SAMPLE_FMT_S32:
                    case AV_SAMPLE_FMT_S32P: samples_buf[i] = b.asIntBuffer();    break;
                    case AV_SAMPLE_FMT_FLT:
                    case AV_SAMPLE_FMT_FLTP: samples_buf[i] = b.asFloatBuffer();  break;
                    case AV_SAMPLE_FMT_DBL:
                    case AV_SAMPLE_FMT_DBLP: samples_buf[i] = b.asDoubleBuffer(); break;
                    default: assert false;
                }
            }
            samples_buf[i].position(0).limit(sample_size);
        }

        if (audio_c.channels() != getAudioChannels() || audio_c.sample_fmt() != getSampleFormat() || audio_c.sample_rate() != getSampleRate()) {
            if (samples_convert_ctx == null || samples_channels != getAudioChannels() || samples_format != getSampleFormat() || samples_rate != getSampleRate()) {
                samples_convert_ctx = swr_alloc_set_opts(samples_convert_ctx, av_get_default_channel_layout(getAudioChannels()), getSampleFormat(), getSampleRate(),
                        av_get_default_channel_layout(audio_c.channels()), audio_c.sample_fmt(), audio_c.sample_rate(), 0, null);
                if (samples_convert_ctx == null) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
                } else if ((ret = swr_init(samples_convert_ctx)) < 0) {
                    throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
                }
                samples_channels = getAudioChannels();
                samples_format = getSampleFormat();
                samples_rate = getSampleRate();
            }

            int sample_size_in = samples_frame.nb_samples();
            int planes_out = av_sample_fmt_is_planar(samples_format) != 0 ? (int)samples_frame.channels() : 1;
            int sample_size_out = swr_get_out_samples(samples_convert_ctx, sample_size_in);
            int sample_bytes_out = av_get_bytes_per_sample(samples_format);
            int buffer_size_out = sample_size_out * sample_bytes_out * (planes_out > 1 ? 1 : samples_channels);
            if (samples_buf_out == null || samples_buf.length != planes_out || samples_ptr_out[0].capacity() < buffer_size_out) {
                for (int i = 0; samples_ptr_out != null && i < samples_ptr_out.length; i++) {
                    av_free(samples_ptr_out[i].position(0));
                }
                samples_ptr_out = new BytePointer[planes_out];
                samples_buf_out = new Buffer[planes_out];

                for (int i = 0; i < planes_out; i++) {
                    samples_ptr_out[i] = new BytePointer(av_malloc(buffer_size_out)).capacity(buffer_size_out);
                    ByteBuffer b = samples_ptr_out[i].asBuffer();
                    switch (samples_format) {
                        case AV_SAMPLE_FMT_U8:
                        case AV_SAMPLE_FMT_U8P:  samples_buf_out[i] = b; break;
                        case AV_SAMPLE_FMT_S16:
                        case AV_SAMPLE_FMT_S16P: samples_buf_out[i] = b.asShortBuffer();  break;
                        case AV_SAMPLE_FMT_S32:
                        case AV_SAMPLE_FMT_S32P: samples_buf_out[i] = b.asIntBuffer();    break;
                        case AV_SAMPLE_FMT_FLT:
                        case AV_SAMPLE_FMT_FLTP: samples_buf_out[i] = b.asFloatBuffer();  break;
                        case AV_SAMPLE_FMT_DBL:
                        case AV_SAMPLE_FMT_DBLP: samples_buf_out[i] = b.asDoubleBuffer(); break;
                        default: assert false;
                    }
                }
            }
            frame.sampleRate = samples_rate;
            frame.audioChannels = samples_channels;
            frame.samples = samples_buf_out;

            if ((ret = swr_convert(samples_convert_ctx, plane_ptr.put(samples_ptr_out), sample_size_out, plane_ptr2.put(samples_ptr), sample_size_in)) < 0) {
                throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
            }
            for (int i = 0; i < planes_out; i++) {
                samples_ptr_out[i].position(0).limit(ret * (planes_out > 1 ? 1 : samples_channels));
                samples_buf_out[i].position(0).limit(ret * (planes_out > 1 ? 1 : samples_channels));
            }
        }
    }

    public Frame grab() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        return grabFrame(true, true, true, false, true);
    }
    public Frame grabImage() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        return grabFrame(false, true, true, false, false);
    }
    public Frame grabSamples() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        return grabFrame(true, false, true, false, false);
    }
    public Frame grabKeyFrame() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        return grabFrame(false, true, true, true, false);
    }
    public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        return grabFrame(doAudio, doVideo, doProcessing, keyFrames, true);
    }
    public synchronized Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData) throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        try (PointerScope scope = new PointerScope()) {

            if (oc == null || oc.isNull()) {
                // 回调
                lastFrameTime.set(System.currentTimeMillis()); // add whb
                throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("Could not grab: No AVFormatContext. (Has start() been called?)");
            } else if ((!doVideo || video_st == null) && (!doAudio || audio_st == null) && !doData) {
                return null;
            }
            if (!started) {
                throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("start() was not called successfully!");
            }

            boolean videoFrameGrabbed = frameGrabbed && frame.image != null;
            boolean audioFrameGrabbed = frameGrabbed && frame.samples != null;
            boolean dataFrameGrabbed = frameGrabbed && frame.data != null;
            frameGrabbed = false;
            if (doVideo && videoFrameGrabbed) {
                if (doProcessing) {
                    processImage();
                }
                frame.keyFrame = picture.key_frame() != 0;
                return frame;
            } else if (doAudio && audioFrameGrabbed) {
                if (doProcessing) {
                    processSamples();
                }
                frame.keyFrame = samples_frame.key_frame() != 0;
                return frame;
            } else if (doData && dataFrameGrabbed) {
                return frame;
            }

            frame.keyFrame = false;
            frame.imageWidth = 0;
            frame.imageHeight = 0;
            frame.imageDepth = 0;
            frame.imageChannels = 0;
            frame.imageStride = 0;
            frame.image = null;
            frame.sampleRate = 0;
            frame.audioChannels = 0;
            frame.samples = null;
            frame.data = null;
            frame.opaque = null;
            frame.type = null;

            boolean done = false;
            boolean readPacket = pkt.stream_index() == -1;
            while (!done) {
                int ret = 0;
                if (readPacket) {
                    if (pkt.stream_index() != -1) {
                        // Free the packet that was allocated by av_read_frame
                        av_packet_unref(pkt);
                        pkt.stream_index(-1);
                    }
                    if ((ret = av_read_frame(oc, pkt)) < 0) {
                        if (ret == AVERROR_EAGAIN()) {
                            try {
                                Thread.sleep(10);
                                continue;
                            } catch (InterruptedException ex) {
                                // reset interrupt to be nice
                                Thread.currentThread().interrupt();
                                return null;
                            }
                        }
                        if ((doVideo && video_st != null) || (doAudio && audio_st != null)) {
                            // The video or audio codec may have buffered some frames
                            pkt.stream_index(doVideo && video_st != null ? video_st.index() : audio_st.index());
                            pkt.flags(AV_PKT_FLAG_KEY);
                            pkt.data(null);
                            pkt.size(0);
                        } else {
                            pkt.stream_index(-1);
                            return null;
                        }
                    }
                }

                frame.streamIndex = pkt.stream_index();

                // Is this a packet from the video stream?
                if (doVideo && video_st != null && frame.streamIndex == video_st.index()
                        && (!keyFrames || pkt.flags() == AV_PKT_FLAG_KEY)) {
                    // Decode video frame
                    if (readPacket) {
                        ret = avcodec_send_packet(video_c, pkt);
                        if (pkt.data() == null && pkt.size() == 0) {
                            pkt.stream_index(-1);
                        }
                        if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
                            // The video codec may have buffered some frames
                        } else if (ret < 0) {
                            // Ignore errors to emulate the behavior of the old API
                            // throw new Exception("avcodec_send_packet() error " + ret + ": Error sending a video packet for decoding.");
                        }
                    }

                    // Did we get a video frame?
                    while (!done) {
                        ret = avcodec_receive_frame(video_c, picture);
                        if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
                            if (pkt.data() == null && pkt.size() == 0) {
                                pkt.stream_index(-1);
                                doVideo = false;
                                if (doAudio) {
                                    readPacket = false;
                                    break;
                                }
                                return null;
                            } else {
                                readPacket = true;
                                break;
                            }
                        } else if (ret < 0) {
                            // Ignore errors to emulate the behavior of the old API
                            // throw new Exception("avcodec_receive_frame() error " + ret + ": Error during video decoding.");
                            readPacket = true;
                            break;
                        }

                        if (!keyFrames || picture.pict_type() == AV_PICTURE_TYPE_I) {
                            long pts = picture.best_effort_timestamp();
                            AVRational time_base = video_st.time_base();
                            timestamp = 1000000L * pts * time_base.num() / time_base.den();
                            long ts0 = oc.start_time() != AV_NOPTS_VALUE ? oc.start_time() : 0;
                            // best guess, AVCodecContext.frame_number = number of decoded frames...
                            frameNumber = (int)Math.round((timestamp - ts0) * getFrameRate() / 1000000L);
                            frame.image = image_buf;
                            if (doProcessing) {
                                processImage();
                            }
                        /* the picture is allocated by the decoder. no need to
                           free it */
                            done = true;
                            frame.timestamp = timestamp;
                            frame.keyFrame = picture.key_frame() != 0;
                            frame.pictType = (char)av_get_picture_type_char(picture.pict_type());
                            frame.type = Frame.Type.VIDEO;
                        }
                    }
                } else if (doAudio && audio_st != null && frame.streamIndex == audio_st.index()) {
                    // Decode audio frame
                    if (readPacket) {
                        ret = avcodec_send_packet(audio_c, pkt);
                        if (ret < 0) {
                            // Ignore errors to emulate the behavior of the old API
                            // throw new Exception("avcodec_send_packet() error " + ret + ": Error sending an audio packet for decoding.");
                        }
                    }

                    // Did we get an audio frame?
                    while (!done) {
                        ret = avcodec_receive_frame(audio_c, samples_frame);
                        if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
                            if (pkt.data() == null && pkt.size() == 0) {
                                pkt.stream_index(-1);
                                doAudio = false;
                                return null;
                            } else {
                                readPacket = true;
                                break;
                            }
                        } else if (ret < 0) {
                            // Ignore errors to emulate the behavior of the old API
                            // throw new Exception("avcodec_receive_frame() error " + ret + ": Error during audio decoding.");
                            readPacket = true;
                            break;
                        }

                        long pts = samples_frame.best_effort_timestamp();
                        AVRational time_base = audio_st.time_base();
                        timestamp = 1000000L * pts * time_base.num() / time_base.den();
                        frame.samples = samples_buf;
                        /* if a frame has been decoded, output it */
                        if (doProcessing) {
                            processSamples();
                        }
                        done = true;
                        frame.timestamp = timestamp;
                        frame.keyFrame = samples_frame.key_frame() != 0;
                        frame.type = Frame.Type.AUDIO;
                    }
                } else if (readPacket && doData
                        && frame.streamIndex > -1 && frame.streamIndex < streams.length
                        && streams[frame.streamIndex] != AVMEDIA_TYPE_VIDEO && streams[frame.streamIndex] != AVMEDIA_TYPE_AUDIO) {
                    // Export the stream byte data for non audio / video frames
                    frame.data = pkt.data().position(0).capacity(pkt.size()).asByteBuffer();
                    frame.opaque = pkt;
                    done = true;
                    switch (streams[frame.streamIndex]) {
                        case AVMEDIA_TYPE_DATA: frame.type = Frame.Type.DATA; break;
                        case AVMEDIA_TYPE_SUBTITLE: frame.type = Frame.Type.SUBTITLE; break;
                        case AVMEDIA_TYPE_ATTACHMENT: frame.type = Frame.Type.ATTACHMENT; break;
                        default: frame.type = null;
                    }
                } else {
                    // Current packet is not needed (different stream index required)
                    readPacket = true;
                }
            }
            return frame;

        }
    }

    public synchronized AVPacket grabPacket() throws org.bytedeco.javacv.FFmpegFrameGrabber.Exception {
        if (oc == null || oc.isNull()) {
            throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("Could not grab: No AVFormatContext. (Has start() been called?)");
        }
        if (!started) {
            throw new org.bytedeco.javacv.FFmpegFrameGrabber.Exception("start() was not called successfully!");
        }

        // Return the next frame of a stream.
        if (av_read_frame(oc, pkt) < 0) {
            return null;
        }

        return pkt;
    }
}

测试类:

package com.cloud.video;


import lombok.extern.slf4j.Slf4j;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.javacv.*;

import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.UUID;
import java.util.concurrent.*;
import java.util.function.Consumer;

import static org.bytedeco.ffmpeg.global.avutil.AV_LOG_ERROR;


/**
 * @ClassName VideoUtil
 * @Description: TODO
 * @Author Admin
 * @Date 2024/9/13 10:11
 * @Version 1.0
 **/
@Slf4j
public class VideoUtil {


    /**
     * 抓拍图片
     *
     * @param streamURL 监控链接
     * @return
     */
    public static File grabImage(String streamURL) {
        // 获取视频源
        com.cloud.video.FFmpegFrameGrabber grabber = new com.cloud.video.FFmpegFrameGrabber(streamURL);
        System.out.println("----------run----");
        File file = null;
        try {
            grabber.setOption("rtsp_transport", "tcp");
            //grabber.start();// 会阻塞
            grabber.startUnsafe();
            Frame frame = grabber.grabImage();
            Java2DFrameConverter converter = new Java2DFrameConverter();
            BufferedImage bi = converter.convert(frame);

            String path = "../cache";
            File pathFile = new File(path);
            if (!pathFile.exists()) {
                pathFile.mkdirs();
            }
            String filename = path + "/" + UUID.randomUUID() + ".jpg";
            file = new File(filename);

            // 对frameRate帧进行一次抓取
            ImageIO.write(bi, "jpg", file);
            System.out.println(file.getAbsolutePath());
            System.out.println("抓拍成功:"+filename);
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                grabber.stop();
                grabber.close();
                System.out.println("----over---");
            } catch (FrameGrabber.Exception e) {
                throw new RuntimeException(e);
            }
        }
        return file;
    }

    /**
     * 录屏
     *
     * @param filePath  录屏保存的文件路径
     * @param timesSec  录屏时长,单位秒
     * @param streamURL streamURL 监控链接
     */
    public static void recordVideo(String filePath, Long timesSec, String streamURL) {
        CompletableFuture.runAsync(() -> {
            // 获取视频源
            com.cloud.video.FFmpegFrameGrabber grabber = new com.cloud.video.FFmpegFrameGrabber(streamURL);
            FFmpegLogCallback.setLevel(AV_LOG_ERROR);

            FFmpegFrameRecorder recorder = null;
            String fileName= new SimpleDateFormat("YYYYMMddHHmmssSSS").format(new Date()) + ".mp4";
            String fileFullPath = filePath + fileName;
            try {
                grabber.start();
                System.out.println("------1-----");
                Frame frame = grabber.grabFrame();
                createFile(fileFullPath);
                // 流媒体输出地址,分辨率(长,高),是否录制音频(0:不录制/1:录制)
                recorder = new FFmpegFrameRecorder(fileFullPath, grabber.getImageWidth(), grabber.getImageHeight(), 1);
                System.out.println("------2-----");
                recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);// 直播流格式
                recorder.setFormat("mp4");// 录制的视频格式
                recorder.setFrameRate(25);// 帧数
                //百度翻译的比特率,默认400000,调成800000比较合适
                recorder.setVideoBitrate(800000);
                recorder.start();
                System.out.println("推流开始");
                // 计算结束时间
                long endTime = System.currentTimeMillis() + timesSec * 1000;
                // 如果没有到录制结束时间并且获取到了下一帧则继续录制
                while ((System.currentTimeMillis() < endTime) && (frame != null)) {
                    recorder.record(frame);//录制
                    frame = grabber.grabFrame();//获取下一帧
                }
                recorder.record(frame);

            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                System.out.println("------3-----");
                //停止录制
                try {
                    grabber.stop();
                } catch (FrameGrabber.Exception e) {
                    e.printStackTrace();
                }
                if (recorder != null) {
                    try {
                        recorder.stop();
                    } catch (FrameRecorder.Exception e) {
                        e.printStackTrace();
                    }
                }

                //录制完成后处理文件上传

            }
        });
    }

    private static void createFile(String filePath) {
        File outFile = new File(filePath);
        if (filePath.isEmpty() || !outFile.exists() || outFile.isFile()) {
            try {
                outFile.createNewFile();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    public static void main(String args[])  {
        // http://192.168.1.99:8081/hdl/live/1_out.flv
        // rtmp://192.168.1.99:1935/live/1_out
        // rtsp://192.168.1.99:554/live/1_out
        String streamURL="rtsp://192.168.1.99:554/live/1_out";

        //grabImage(streamURL);
        recordVideo("d:\\test\\",20l,streamURL);
        try {
            Thread.sleep(60000);
        }catch (Exception e){

        }

    }

}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

最好Tony

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值