unity PC端 调用FFmpeg生成视频 unity序列帧合成视频

两个脚本  一个需要挂载 

FFmpeg软件在下面链接下载   需要放到streamingAsset\ffmpeg文件夹下

下面再出一个安卓调用FFmpeg生成视频的博客

using System.Collections;
using System.Diagnostics;
using System.IO;
using UnityEngine;
public class CreatVideos : MonoBehaviour
{

    private string imagePath;
    private string ffmpegPath;
    private string videoPath;
    // Use this for initialization
    void Start()
    {
        Loom.Initialize();
        imagePath = GameManager.imgffpath + "/";
        ffmpegPath = @Application.streamingAssetsPath + "/ffmpeg/ffmpeg.exe";
        videoPath = @Application.streamingAssetsPath + "/视频.mp4";
    }

    public void CreatVideo()
    {

        print("开始渲染");
        print(imagePath);
        print(videoPath);
        if (File.Exists(videoPath))
        {
            File.Delete(videoPath);
        }
        Loom.RunAsync(() =>
        {
            //建立外部调用进程
            Process p = new Process();
            p.StartInfo.FileName = ffmpegPath;

            string args = "-f image2 -framerate 12 -i " + imagePath + "%05d.jpg -vcodec libx264 -r 25 " + videoPath;
//%05d是指图片名称00000  00001  00002 如果是1,2,3用%01d

            p.StartInfo.Arguments = args;
            p.StartInfo.UseShellExecute = false;//不使用操作系统外壳程序启动线程(一定为FALSE,详细的请看MSDN)
            p.StartInfo.RedirectStandardError = true;//把外部程序错误输出写到StandardError流中(这个一定要注意,FFMPEG的所有输出信息,都为错误输出流,用StandardOutput是捕获不到任何消息的...)
            p.StartInfo.CreateNoWindow = true;//不创建进程窗口
            p.ErrorDataReceived += new DataReceivedEventHandler(Output);//外部程序(这里是FFMPEG)输出流时候产生的事件,这里是把流的处理过程转移到下面的方法中,详细请查阅MSDN
            p.Start();//启动线程
            p.BeginErrorReadLine();//开始异步读取
            p.WaitForExit();//阻塞等待进程结束
            p.Close();//关闭进程
            p.Dispose();//释放资源


        });
    }

    private void Output(object sendProcess, DataReceivedEventArgs output)
    {
        if (!string.IsNullOrEmpty(output.Data))
        {
            //处理方法...
            UnityEngine.Debug.Log(output.Data);
        }
    }

}

下面的不需要挂载

using UnityEngine; 
using System.Collections.Generic;
using System;
using System.Threading;
using System.Linq;
/// <summary>
/// 多线程
/// </summary>
public class Loom : MonoBehaviour
{
    public static int maxThreads = 8;
    static int numThreads;

    private static Loom _current;
    public static Loom Current
    {
        get
        {
            Initialize();
            return _current;
        }
    }
    //####去除Awake
    //  void Awake()  
    //  {  
    //      _current = this;  
    //      initialized = true;  
    //  }  

    static bool initialized;

    /// <summary>
    /// ####作为初始化方法自己调用,可在初始化场景调用一次即可
    /// </summary>
    public static void Initialize()
    {
        if (!initialized)
        {

            if (!Application.isPlaying)
                return;
            initialized = true;
            GameObject g = new GameObject("Loom");
            //####永不销毁
            DontDestroyOnLoad(g);
            _current = g.AddComponent<Loom>();
        }

    }

    private List<Action> _actions = new List<Action>();
    public struct DelayedQueueItem
    {
        public float time;
        public Action action;
    }
    private List<DelayedQueueItem> _delayed = new List<DelayedQueueItem>();

    List<DelayedQueueItem> _currentDelayed = new List<DelayedQueueItem>();
    /// <summary>
    /// 在主线程中运行
    /// </summary>
    /// <param name="action"></param>
    public static void QueueOnMainThread(Action action)
    {
        QueueOnMainThread(action, 0f);
    }
    public static void QueueOnMainThread(Action action, float time)
    {
        if (time != 0)
        {
            if (Current != null)
            {
                lock (Current._delayed)
                {
                    Current._delayed.Add(new DelayedQueueItem { time = Time.time + time, action = action });
                }
            }
        }
        else
        {
            if (Current != null)
            {
                lock (Current._actions)
                {
                    Current._actions.Add(action);
                }
            }
        }
    }

    public static Thread RunAsync(Action a)
    {
        Initialize();
        while (numThreads >= maxThreads)
        {
            Thread.Sleep(1);
        }
        Interlocked.Increment(ref numThreads);
        ThreadPool.QueueUserWorkItem(RunAction, a);
        return null;
    }

    private static void RunAction(object action)
    {
        try
        {
            ((Action)action)();
        }
        catch
        {
        }
        finally
        {
            Interlocked.Decrement(ref numThreads);
        }

    }


    void OnDisable()
    {
        if (_current == this)
        {

            _current = null;
        }
    }



    // Use this for initialization  
    void Start()
    {

    }

    List<Action> _currentActions = new List<Action>();

    // Update is called once per frame  
    void Update()
    {
        lock (_actions)
        {
            _currentActions.Clear();
            _currentActions.AddRange(_actions);
            _actions.Clear();
        }
        foreach (var a in _currentActions)
        {
            a();
        }
        lock (_delayed)
        {
            _currentDelayed.Clear();
            _currentDelayed.AddRange(_delayed.Where(d => d.time <= Time.time));
            foreach (var item in _currentDelayed)
                _delayed.Remove(item);
        }
        foreach (var delayed in _currentDelayed)
        {
            delayed.action();
        } 
    }
}

 

  • 1
    点赞
  • 17
    收藏
    觉得还不错? 一键收藏
  • 8
    评论
要在 Unity 中开发录屏工具,并调用 FFmpeg 库进行视频编码,可以使用 FFmpeg.AutoGen 库来实现。以下是一个简单的示例代码: ```csharp using System; using System.Runtime.InteropServices; using FFmpeg.AutoGen; using UnityEngine; public class ScreenRecorder : MonoBehaviour { private const int FPS = 30; private const int BIT_RATE = 4000000; private const string OUTPUT_FILE = "output.mp4"; private int frameCount = 0; private AVCodecContext* codecContext; private AVFormatContext* formatContext; private AVStream* stream; private void Start() { AVDictionary* options = null; ffmpeg.av_dict_set(&options, "framerate", FPS.ToString(), 0); ffmpeg.av_dict_set(&options, "video_size", $"{Screen.width}x{Screen.height}", 0); ffmpeg.av_dict_set(&options, "preset", "ultrafast", 0); ffmpeg.av_dict_set(&options, "tune", "zerolatency", 0); ffmpeg.av_dict_set(&options, "crf", "25", 0); ffmpeg.av_dict_set(&options, "bitrate", BIT_RATE.ToString(), 0); AVCodec* codec = null; codec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264); if (codec == null) { Debug.LogError("Failed to find H.264 codec!"); return; } codecContext = ffmpeg.avcodec_alloc_context3(codec); codecContext->width = Screen.width; codecContext->height = Screen.height; codecContext->time_base = new AVRational { num = 1, den = FPS }; codecContext->framerate = new AVRational { num = FPS, den = 1 }; codecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P; codecContext->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER; if ((codec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) != 0) { codecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED; } int ret = ffmpeg.avcodec_open2(codecContext, codec, &options); if (ret < 0) { Debug.LogError($"Failed to open codec! Error code: {ret}"); return; } formatContext = ffmpeg.avformat_alloc_context(); formatContext->oformat = ffmpeg.av_guess_format(null, OUTPUT_FILE, null); if (formatContext->oformat == null) { Debug.LogError("Failed to guess output format!"); return; } ret = ffmpeg.avio_open(&formatContext->pb, OUTPUT_FILE, ffmpeg.AVIO_FLAG_WRITE); if (ret < 0) { Debug.LogError($"Failed to open file '{OUTPUT_FILE}'! Error code: {ret}"); return; } stream = ffmpeg.avformat_new_stream(formatContext, codec); ret = ffmpeg.avcodec_parameters_from_context(stream->codecpar, codecContext); if (ret < 0) { Debug.LogError($"Failed to copy codec parameters! Error code: {ret}"); return; } ret = ffmpeg.avformat_write_header(formatContext, &options); if (ret < 0) { Debug.LogError($"Failed to write format header! Error code: {ret}"); return; } } private void OnDestroy() { ffmpeg.av_write_trailer(formatContext); if (codecContext != null) { ffmpeg.avcodec_close(codecContext); ffmpeg.avcodec_free_context(&codecContext); } if (formatContext != null) { if ((formatContext->oformat->flags & ffmpeg.AVFMT_NOFILE) == 0 && formatContext->pb != null) { ffmpeg.avio_close(formatContext->pb); } ffmpeg.avformat_free_context(formatContext); } } private void LateUpdate() { AVFrame* frame = ffmpeg.av_frame_alloc(); if (frame == null) { Debug.LogError("Failed to allocate frame!"); return; } ffmpeg.av_image_alloc(frame->data, frame->linesize, codecContext->width, codecContext->height, codecContext->pix_fmt, 16); int size = Screen.width * Screen.height * 3; byte[] buffer = new byte[size]; GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); IntPtr ptr = handle.AddrOfPinnedObject(); GL.ReadPixels(0, 0, Screen.width, Screen.height, UnityEngine.GL.RGB, UnityEngine.GL.UNSIGNED_BYTE, ptr); handle.Free(); for (int i = 0; i < codecContext->height; i++) { byte* row = (byte*)frame->data[0] + i * frame->linesize[0]; for (int j = 0; j < codecContext->width; j++) { row[3 * j] = buffer[3 * (i * codecContext->width + j) + 2]; row[3 * j + 1] = buffer[3 * (i * codecContext->width + j) + 1]; row[3 * j + 2] = buffer[3 * (i * codecContext->width + j)]; } } frame->pts = frameCount++; ffmpeg.avcodec_send_frame(codecContext, frame); AVPacket* packet = ffmpeg.av_packet_alloc(); ffmpeg.av_init_packet(packet); while (ffmpeg.avcodec_receive_packet(codecContext, packet) >= 0) { packet->stream_index = stream->index; packet->pts = packet->dts = frameCount++; packet->duration = ffmpeg.av_rescale_q(ffmpeg.av_make_q(1, FPS), stream->time_base, formatContext->streams[0]->time_base); packet->pos = -1; ffmpeg.av_interleaved_write_frame(formatContext, packet); ffmpeg.av_packet_unref(packet); } ffmpeg.av_frame_free(&frame); } } ``` 在上述代码中,我们首先定义了一些常量,如帧率、输出文件名等。在 Start() 方法中,我们使用 AVDictionary 来设置 FFmpeg 的编码参数,并打开 H.264 编码器。然后,我们创建了一个 AVFormatContext 对象,并设置输出文件格式和音视频流等参数。接着,我们写入文件头,并进入 LateUpdate() 方法,开始每帧截屏并编码。最后,在 OnDestroy() 方法中,我们关闭编码器并释放相关资源。 请注意,上述代码仅适用于 Windows 平台,并且需要将 FFmpeg 库文件复制到 Unity 项目中,并在项目属性中设置库文件的引用路径。同时,为了避免因为异常退出导致编码器资源无法释放,建议在代码中添加相关的异常处理机制。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 8
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值