【Unity】获取视频某一帧的图片

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.Video;

public class Test : MonoBehaviour
{
    #region 获取视频某一帧图片
    VideoPlayer video;//视频
    Texture2D videoTextrue;//2D图片
    RenderTexture renderTexture;
    Sprite sprite;
    public Image image;

   
    void Start()
    {
        InitVideoToImage();
    }
    void InitVideoToImage()
    {
      
        videoTextrue = new Texture2D(2, 2);
        video = GetComponent<VideoPlayer>();//播放的视频
        video.playOnAwake = false;
        video.waitForFirstFrame = true;
        video.sendFrameReadyEvents = true;//启用 frameReady 事件。  如果设置为 true,则在准备绘制帧时将调用使用 VideoPlayer.frameReady 注册的任何委托。如果设置为 false,则不会调用已注册的委托
        video.frameReady += GetNumTextrue;// 新的一帧准备好时被执行。
        video.Play();
    }
    int framesValue = 0;//获得视频第几帧的图片
    /// <summary>
    /// 获取到framesValue帧的Textrue
    /// </summary>
    void GetNumTextrue(VideoPlayer source, long frameIdx)
    {
        framesValue++;
        if (framesValue == 1)
        {
            renderTexture = source.texture as RenderTexture;//放置视频内容的内部纹理。
            if (videoTextrue.width != renderTexture.width || videoTextrue.height != renderTexture.height)
            {
                videoTextrue.Resize(renderTexture.width, renderTexture.height);
            }
           // 当前处于活动状态的渲染纹理。
            RenderTexture.active = renderTexture;
            videoTextrue.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);//将屏幕像素读取到保存的纹理数据中。
            videoTextrue.Apply();//应用像素
            RenderTexture.active = null;
            video.frameReady -= GetNumTextrue;
            video.sendFrameReadyEvents = false;
        }
        StartCoroutine(ScaleTexture(videoTextrue, 1920, 1080));
    }
    /// <summary>
    /// 生成缩略图
    /// </summary>
    IEnumerator ScaleTexture(Texture2D source, int targetWidth, int targetHeight)
    {
        yield return new WaitForSeconds(2);

        Texture2D result = new Texture2D(targetWidth, targetHeight, TextureFormat.ARGB32, false);

        for (int i = 0; i < result.height; ++i)
        {
            for (int j = 0; j < result.width; ++j)
            {
                Color newColor = source.GetPixelBilinear((float)j / (float)result.width, (float)i / (float)result.height);
                result.SetPixel(j, i, newColor);
            }
        }
        result.Apply();

        sprite = Sprite.Create(result, new Rect(0, 0, targetWidth, targetHeight), new Vector2(0.5f, 0.5f));
        image.sprite = sprite;
       
    }
    #endregion

}

增加获取图片后的委托

using System.IO;
using UnityEngine;
using UnityEngine.Video;

/// <summary>获取视频预览</summary>
public class VideoPreview : MonoBehaviour
{
    /// <summary>获得视频第几帧的图片</summary>
    private int framesValue = 0;
    private VideoPlayer videoPlayer;
    private Texture2D videoFrameTexture;
    private RenderTexture renderTexture;
    /// <summary>视频保存路径</summary>
    private string videoUrl;
    /// <summary>视频预览图存放路径</summary>
    private string videoPreviewPath;

    public delegate void VideoThumbCompleteEvent(Texture2D texture);
    private VideoThumbCompleteEvent onCompleted;

    public static VideoPreview Create()
    {
        GameObject go = new GameObject("VideoPreview");
        VideoPreview videoPreview = go.AddComponent<VideoPreview>();
        return videoPreview;
    }

    private void OnDestroy()
    {
        videoFrameTexture = null;
        renderTexture = null;
    }

    /// <summary>
    /// 获取视频缩略图
    /// </summary>
    /// <param name="videoUrl">视频地址</param>
    /// <param name="completed">生成缩略图完成</param>
    public void VideoThumb(string videoUrl, VideoThumbCompleteEvent completed)
    {
        this.onCompleted = completed;
        this.videoUrl = videoUrl;
        string[] nameArr = videoUrl.Split('/');
        string imageName = nameArr[nameArr.Length - 1];// 缩略图的名字等于视频url的最后
        videoPreviewPath = Application.persistentDataPath + "/" + imageName + ".png";

        videoFrameTexture = new Texture2D(2, 2);
        videoPlayer = gameObject.AddComponent<VideoPlayer>();
        videoPlayer.source = VideoSource.Url;
        videoPlayer.url = videoUrl;
        videoPlayer.playOnAwake = false;
        videoPlayer.waitForFirstFrame = true;

        videoPlayer.sendFrameReadyEvents = true;
        videoPlayer.frameReady += onFrameReady;
        videoPlayer.Play();
    }

    private void onFrameReady(VideoPlayer source, long frameIdx)
    {
        framesValue++;
        if (framesValue == 5)
        {
            renderTexture = source.texture as RenderTexture;
            if (videoFrameTexture.width != renderTexture.width || videoFrameTexture.height != renderTexture.height)
            {
                videoFrameTexture.Resize(renderTexture.width, renderTexture.height);
            }
            RenderTexture.active = renderTexture;
            videoFrameTexture.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
            videoFrameTexture.Apply();
            RenderTexture.active = null;
            videoPlayer.frameReady -= onFrameReady;
            videoPlayer.sendFrameReadyEvents = false;

            scaleTexture(videoFrameTexture, 600, 300, videoPreviewPath);
        }
    }

    //生成缩略图
    private void scaleTexture(Texture2D source, int targetWidth, int targetHeight, string savePath)
    {
        Texture2D result = new Texture2D(targetWidth, targetHeight, TextureFormat.ARGB32, false);
        for (int i = 0; i < result.height; ++i)
        {
            for (int j = 0; j < result.width; ++j)
            {
                Color newColor = source.GetPixelBilinear((float)j / (float)result.width, (float)i / (float)result.height);
                result.SetPixel(j, i, newColor);
            }
        }
        result.Apply();
        File.WriteAllBytes(savePath, result.EncodeToJPG());
        if (onCompleted != null) onCompleted(result);
        Destroy(gameObject);
    }

}

 测试:


using UnityEngine;
using UnityEngine.UI;
 
public class Test : MonoBehaviour
{
    /// <summary>用来显示视频1缩略图UI</summary>
    public RawImage rawImage01;
    
 
    /// <summary>视频路径1(可以本地或网络)</summary>
    private string videoUrl1 = "/Users/chenyongliang/Desktop/相册/1657100515231.mp4";
   
 
    private void Start()
    {
        VideoPreview videoPreview1 = VideoPreview.Create();
        VideoPreview videoPreview2 = VideoPreview.Create();
        VideoPreview videoPreview3 = VideoPreview.Create();
 
        videoPreview1.VideoThumb(videoUrl1, (texture) =>
        {
            rawImage01.texture = texture;
        });

}

  • 1
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 1
    评论
Unity AvProVideo是Unity中一个用于处理视频的插件,其提供了许多功能,包括播放视频获取当前播放位置、控制音量等。在使用AvProVideo时,可以通过回调函数来实现每一帧的回调。 AvProVideo通过注册回调函数来监听每一帧。在初始化视频后,我们可以使用AvProVideo提供的OnFrameReady事件,当视频帧准备就绪时,就会触发该事件。我们可以在事件回调函数中对每一帧进行处理,例如获取一帧的像素数据或执行特定的操作。 下面是一个使用AvProVideo进行每一帧回调的示例代码: ```csharp using UnityEngine; using RenderHeads.Media.AVProVideo; public class VideoController : MonoBehaviour { private MediaPlayer mediaPlayer; private void Awake() { mediaPlayer = GetComponent<MediaPlayer>(); mediaPlayer.OnFrameReady += OnVideoFrameReady; } private void OnVideoFrameReady(Texture2D frameTexture, long frameTime) { // 在这里对每一帧进行处理 // 可以获取帧的像素数据,如frameTexture.GetPixels() // 可执行特定操作,如修改视频色调或应用后处理效果 } } ``` 在这个示例中,Awake方法中我们获取MediaPlayer组件并注册OnFrameReady事件。当视频中的每一帧准备就绪时,OnVideoFrameReady回调函数将会被调用。在这个回调函数中,我们可以对每一帧进行处理,获取像素数据或执行其他操作。 总之,通过使用AvProVideo插件的OnFrameReady事件,我们可以实现对每一帧的回调处理,从而实现对视频的更精细控制和操作。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

DAGUNIANGZHOU

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值