Unity 使用 FFMpeg 录制视频

在Unity中使用FFMpeg录制Game视窗的画面,进行视频保存。

using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Threading;
using Unity.Collections;
using UnityEngine;
using UnityEngine.Rendering;

public class InstanceVideoController : MonoBehaviour
{
    private bool _isVideo = false;
    private int _frame = 60;
    private string _pathVideo = Application.streamingAssetsPath + "/Video";
    private string _pathEXE = Application.streamingAssetsPath + "/ffmpeg.exe";

    private void OnGUI()
    {
        if (GUI.Button(new Rect(10, 10, 200, 50), _isVideo ? "停止" : "开始"))
        {
            _isVideo = !_isVideo;
            if (_isVideo) StartVideo();
            else StopVideo();
        }

        if (!_isVideo)
        {
            GUI.Label(new Rect(10, 70, 100, 25), "视频帧率 :", new GUIStyle() { fontSize = 20, alignment = TextAnchor.MiddleLeft });
            _frame = int.Parse(GUI.TextField(new Rect(110, 70, 50, 25), _frame.ToString()));

            GUI.Label(new Rect(10, 100, 160, 25), "视频存放文件夹 :", new GUIStyle() { fontSize = 20, alignment = TextAnchor.MiddleLeft });
            _pathVideo = GUI.TextField(new Rect(170, 100, 500, 25), _pathVideo);

            GUI.Label(new Rect(10, 130, 170, 25), "FFMpeg存放目录 :", new GUIStyle() { fontSize = 20, alignment = TextAnchor.MiddleLeft });
            _pathEXE = GUI.TextField(new Rect(180, 130, 500, 25), _pathEXE);
        }
    }



    private Camera _cameraVideo;
    private int _width;
    private int _height;
    private RenderTexture _texCamera;
    private Process _processVideo;
    private Thread _threadVideo;
    private List<AsyncGPUReadbackRequest> _listGPURequest = null;
    private Queue<byte[]> _queueVideoData = null;
    private int _timeFixedInterval;
    private int _timeCurrInterval;
    private DateTime _timer;
    private Material _materialTex;


    private void StartVideo()
    {
        _timeFixedInterval = (int)((1.0f / _frame) * 1000);
        _timeCurrInterval = 0;
        _timer = DateTime.Now;
        _listGPURequest = new List<AsyncGPUReadbackRequest>();
        _queueVideoData = new Queue<byte[]>();

        //Camera
        {
            _cameraVideo = new GameObject("CameraVideo").AddComponent<Camera>();
            _cameraVideo.CopyFrom(Camera.main);
            _width = _cameraVideo.pixelWidth % 2 == 0 ? _cameraVideo.pixelWidth : _cameraVideo.pixelWidth - 1;
            _height = _cameraVideo.pixelHeight % 2 == 0 ? _cameraVideo.pixelHeight : _cameraVideo.pixelHeight - 1;
            _cameraVideo.transform.SetParent(Camera.main.transform);
            _cameraVideo.transform.localPosition = Vector3.zero;
            _cameraVideo.transform.localEulerAngles = Vector3.zero;
            _cameraVideo.transform.localScale = Vector3.one;
            _cameraVideo.gameObject.hideFlags = HideFlags.HideInHierarchy;
        }

        //Material
        {
            _materialTex = new Material(Shader.Find("Shader/Video"));
        }

        //Texture
        {
            _texCamera = new RenderTexture(_width, _height, 24, RenderTextureFormat.Default);
            _texCamera.antiAliasing = 1;
            _cameraVideo.targetTexture = _texCamera;
        }

        //Process
        _processVideo = Process.Start(new ProcessStartInfo
        {
            UseShellExecute = false,
            CreateNoWindow = true,
            RedirectStandardInput = true,
            RedirectStandardOutput = true,
            RedirectStandardError = true,
            FileName = _pathEXE,
            //重点参数
            Arguments = $"-y -f rawvideo -vcodec rawvideo -pixel_format rgba -colorspace bt709 " +
                        $"-video_size {_width}x{_height} " +
                        $"-framerate {_frame} " +
                        $"-loglevel warning -i - -pix_fmt yuv420p " +
                        $" \"{_pathVideo}\\{DateTime.Now.ToString("yyyy-MMdd-HHmmss")}.mp4\""
        });

        //Thread Write
        _threadVideo = new Thread(() =>
        {
            Stream _stremaVideo = _processVideo.StandardInput.BaseStream;
            while (_isVideo)
            {
                while (_queueVideoData.Count > 0)
                {
                    byte[] buff = null;
                    lock (_queueVideoData) buff = _queueVideoData.Dequeue();
                    _stremaVideo.Write(buff, 0, buff.Length);
                    _stremaVideo.Flush();
                }
                Thread.Sleep(10);
            }
        });
        _threadVideo.Start();
    }

    private void StopVideo()
    {
        //Thread Write
        if (_threadVideo != null)
        {
            _threadVideo.Join();
        }

        //Process
        if (_processVideo != null)
        {
            _processVideo.StandardInput.Close();
            _processVideo.WaitForExit();

            StreamReader strR = _processVideo.StandardError;
            string sbError = strR.ReadToEnd();
            _processVideo.Close();
            _processVideo.Dispose();
            strR.Close();
            strR.Dispose();
            _processVideo = null;
        }

        //Camera RenderTexture
        if (_texCamera != null)
        {
            if (_cameraVideo != null) _cameraVideo.targetTexture = null;
            Destroy(_texCamera);
            _texCamera = null;
        }

        //Material
        if (_materialTex != null)
        {
            Destroy(_materialTex);
            _materialTex = null;
        }

        //Camera
        if (_cameraVideo != null)
        {
            Destroy(_cameraVideo.gameObject);
            _cameraVideo = null;
        }
        _listGPURequest.Clear();
        _listGPURequest = null;
        _queueVideoData.Clear();
        _queueVideoData = null;
    }

    private void Update()
    {
        if (!_isVideo) return;


        while (_listGPURequest.Count > 0)
        {
            //重点代码
            if (!_listGPURequest[0].done) break;
            AsyncGPUReadbackRequest requset = _listGPURequest[0];
            _listGPURequest.RemoveAt(0);
            if (requset.hasError)
            {
                UnityEngine.Debug.Log("RequsetGPU is Error ");
                continue;
            }

            NativeArray<byte> byt = requset.GetData<byte>();
            byte[] buffer = byt.ToArray();
            lock (_queueVideoData) _queueVideoData.Enqueue(buffer);
        }

        _timeCurrInterval += (int)((DateTime.Now - _timer).TotalMilliseconds);
        _timer = DateTime.Now;

        while (_timeCurrInterval > _timeFixedInterval)
        {
            //重点代码
            _timeCurrInterval -= _timeFixedInterval;
            RenderTexture tex = RenderTexture.GetTemporary(_width, _height, 0, RenderTextureFormat.ARGB32);
            Graphics.Blit(_texCamera, tex, _materialTex);
            _listGPURequest.Add(AsyncGPUReadback.Request(tex));
            RenderTexture.ReleaseTemporary(tex);
        }
    }

    private void OnApplicationQuit()
    {
        while (_isVideo)
        {
            _isVideo = false;
            StopVideo();
        }
    }
}

使用的Shader内容为:

Shader "Shader/Video"
{
    Properties
    {
        _MainTex("", 2D) = "white" {}
    }

    CGINCLUDE

    #include "UnityCG.cginc"

    sampler2D _MainTex;

    fixed4 frag_flip(v2f_img i) : SV_Target
    {
        float2 uv = i.uv;
        uv.y = 1 - uv.y;
        return tex2D(_MainTex, uv);
    }

    ENDCG

    SubShader
    {
        Cull Off ZWrite Off ZTest Always
        Pass
        {
            CGPROGRAM
            #pragma vertex vert_img
            #pragma fragment frag_flip
            ENDCG
        }
    }
}

  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
在 C# 下使用 ffmpeg 开发录制视频工具,可以通过调用 ffmpeg 的命令行参数实现。在 Unity使用时,可以将 ffmpeg 的可执行文件放到 Unity 项目的 Assets 文件夹下,并在代码中调用命令行参数来实现录制视频的功能。 以下是一个简单的示例代码: ```csharp using System.Diagnostics; public class VideoRecorder : MonoBehaviour { private Process process; private string ffmpegPath = Application.dataPath + "/ffmpeg.exe"; private string videoName = "output.mp4"; public void StartRecording() { if (!File.Exists(ffmpegPath)) { Debug.LogError("ffmpeg.exe not found!"); return; } string arguments = "-f dshow -i video=\"screen-capture-recorder\" -r 60 \"" + videoName + "\""; ProcessStartInfo processInfo = new ProcessStartInfo(ffmpegPath, arguments); processInfo.CreateNoWindow = true; processInfo.UseShellExecute = false; process = Process.Start(processInfo); } public void StopRecording() { if (process != null && !process.HasExited) { process.Kill(); process = null; } } } ``` 在上述代码中,StartRecording() 方法启动了一个新的进程,调用 ffmpeg 的命令行参数进行视频录制,StopRecording() 方法则停止了该进程,结束录制。其中,参数 "-f dshow -i video=\"screen-capture-recorder\" -r 60" 表示使用 DirectShow API 捕获屏幕视频,并以 60 帧的速度进行录制。 请注意,上述代码仅适用于 Windows 系统下的 ffmpeg 可执行文件,如果使用其他系统或版本的 ffmpeg,需要相应地修改命令行参数。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值