在Unity中播放YUV视频时,采用每帧播放时再将YUV转化成RGB然后传入着色器的方式不仅会造成大量的内存开销,而且会造成播放卡顿的现象。
为了解决这个问题笔者尝试将视频源在Start()中先全部转化成RGB存放在Texture2D[ ]数组中,然后在每帧渲染时直接从数组中索引对应的纹理即可,播放速度提升了50倍以上。
初始化数组长度
public Texture2D[] ts;
ts = new Texture2D[Length];
详细可参考:数组的初始化 array initializer is not allowed here
初始化元素
for (i = 0; i < frameCount; i++)
ts[i] = new Texture2D(width, height, TextureFormat.RGB24, false);
参考Texture2D初始化即可
参考代码
该例程实现了YUV420视频播放器的功能:
using System.Collections;
using System.Collections.Generic;
using System.IO;
using UnityEngine;
using UnityEngine.UI;
using System;
public class RGBPlayer : MonoBehaviour
{
public int frameCount;
private int frameNow = 0;
private int frameOffset = 0;
byte[] file;//最大值255
//public Renderer target;
private int w = 2048;
private int h = 2048;
//原始纹理t
public Texture2D t = null;
public Texture2D[] ts;
public Renderer target;
private byte Y00, Y01, Y10, Y11;//Y分量
private float R00, G00, B00;
private float R01, G01, B01;
private float R10, G10, B10;
private float R11, G11, B11;
int i, k;
int mY;//颠倒y值
private int Ysize;
private int UorVsize;
private byte U, V;
private int offset;
// Plana 連續儲存,Packed 交錯儲存
// 使用 Plana YUV420 格式
void Start()
{
//一次读取所有比特
file = File.ReadAllBytes("D:/FFMPEG/bin/2048_2048_420_8b.yuv");
frameCount = GetFrameCount(file, w, h); // 影像寬高
//输出总帧数
frameCount = 150;
print("Frame Count : " + frameCount);
ConvertRGB(file, w, h);
}
void Update()
{
if (frameNow > frameCount - 1) return;
ts[frameNow].Apply();
target.sharedMaterial.SetTexture("_MainTex", ts[frameNow]);
frameNow++;
print("Frame : " + (frameNow) + " (" + (int)(frameNow / (float)(frameCount) * 100) + "%)");
}
int GetFrameCount(byte[] file, int width, int height)
{
return file.Length / ((width * height) * 3 / 2);
}
void ConvertRGB(byte[] file, int width, int height)
{
ts = new Texture2D[frameCount];
for (; frameOffset < frameCount; frameOffset++)
{
ts[frameOffset] = new Texture2D(width, height, TextureFormat.RGB24, false);
Ysize = width * height;
UorVsize = width * height / 4;
U = 0;
V = 0;
k = 0;
//offset偏移量
int offset = frameOffset * ((width * height) * 3 / 2);
for (int y = 0; y < height; y += 2)
{
for (int x = 0; x < width; x += 2)
{
U = file[offset + Ysize - 1 + k++]; //注意要减一
V = file[offset + Ysize - 1 + k + UorVsize]; //注意要减一
i = y * width + x;
mY = height - 1 - y;
Y00 = file[offset + i];
Y01 = file[offset + i + width];
Y10 = file[offset + i + 1];
Y11 = file[offset + i + width + 1];
R00 = (Y00 + 1.4075f * (V - 128)) / 255f;
G00 = (Y00 - 0.3455f * (U - 128) - 0.7169f * (V - 128)) / 255f;
B00 = (Y00 + 1.779f * (U - 128)) / 255f;
R01 = (Y01 + 1.4075f * (V - 128)) / 255f;
G01 = (Y01 - 0.3455f * (U - 128) - 0.7169f * (V - 128)) / 255f;
B01 = (Y01 + 1.779f * (U - 128)) / 255f;
R10 = (Y10 + 1.4075f * (V - 128)) / 255f;
G10 = (Y10 - 0.3455f * (U - 128) - 0.7169f * (V - 128)) / 255f;
B10 = (Y10 + 1.779f * (U - 128)) / 255f;
R11 = (Y11 + 1.4075f * (V - 128)) / 255f;
G11 = (Y11 - 0.3455f * (U - 128) - 0.7169f * (V - 128)) / 255f;
B11 = (Y11 + 1.779f * (U - 128)) / 255f;
ts[frameOffset].SetPixel(x, h * frameOffset + mY, new Color(R00, G00, B00, 1)) ; //注意y存储顺序,第一帧在最下面
ts[frameOffset].SetPixel(x, h * frameOffset + mY - 1, new Color(R01, G01, B01, 1));
ts[frameOffset].SetPixel(x + 1, h * frameOffset + mY, new Color(R10, G10, B10, 1));
ts[frameOffset].SetPixel(x + 1, h * frameOffset + mY - 1, new Color(R11, G11, B11, 1));
}
}
}
}
}
参考链接
Unity3D-Documentation-Texture2DArray
Unity中Texture2DArray使用小结
Texture2DArray 功能测试