Unity接收FFmpeg的UDP推流
给大佬排面,后续为进一步记录笔记,以备自己回忆
实践名称:将桌面的ffmpeg推流
实践工具:Unity2019.4.25f1cl + Visual studio 2019 + FFMPeg 2021 (官网就好)
整体思路:
1.Unity+VS
- 创建Unity项目
- 在场景中Create Empty(空物体-Receiver)、Create 3D Object- plane/cube(Quad)
- 在Assets下创建materials材质(unlit/Texture)赋给Quad
- 创建C# Script - 最好自行创建,不要全部复制参考代码!会有一些报错!!
- 将脚本赋给Receiver,unlit/Texture材质赋到其Target Mat
2.FFMPeg
- 从电脑终端CMD,进入到ffmpeg所在目录下(个人-f:进F盘),对ffmpeg版本由一定要求
- 输入命令行
ffmpeg -f gdigrab -i desktop -pixel_format rgb8 -video_size 256x256 -vf scale=256:256 -framerate 5 -r 5 -f rawvideo udp://127.0.0.1:8888
3.运行Unity效果展示
附代码
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using UnityEngine;
using Debug = UnityEngine.Debug;
public class UDP : MonoBehaviour #这里一定要注意,不要简单粗暴的复制操作
{
public Material targetMat;
UdpClient client;
int port = 8888;
int receiveBufferSize = 1472 * 1000;
IPEndPoint ipEndPoint;
private object obj = null;
private AsyncCallback AC;
byte[] receivedBytes;
Texture2D tex;
public int size = 256;
int imageSize = 0;
byte[] dump;
int bufferSize = 0;
int bufferIndex = 0;
int bufferFrameStart = 0;
byte[] temp;
bool frameReady = false;
void Start()
{
//tex = new Texture2D(size, size, TextureFormat.RGB24, false, false);
tex = new Texture2D(size, size, TextureFormat.RGBA32, false, false);
tex.filterMode = FilterMode.Point;
tex.wrapMode = TextureWrapMode.Clamp;
imageSize = size * size * 4;
temp = new byte[imageSize];
// init pixels wit bright color
for (int i = 0; i < imageSize; i += 4)
{
temp[i] = 255;
temp[i + 1] = 0;
temp[i + 2] = 255;
}
tex.LoadRawTextureData(temp);
tex.Apply(false);
bufferSize = imageSize * 100;
dump = new byte[bufferSize];
targetMat.mainTexture = tex;
InitializeUDPClient();
}
Queue<int> frameIndex = new Queue<int>();
int frameBufferCount = 0;
void FixedUpdate()
{
// if we have frames, draw them to texture
if (frameBufferCount > 0)
{
Buffer.BlockCopy(dump, frameIndex.Dequeue(), temp, 0, imageSize);
frameBufferCount--;
tex.LoadRawTextureData(temp);
tex.Apply(false);
}
}
void ReceivedUDPPacket(IAsyncResult result)
{
try
{
receivedBytes = client.EndReceive(result, ref ipEndPoint);
var len = receivedBytes.Length;
// we only use the buffer until the end, should wrap around
if (bufferIndex + len > bufferSize)
{
Debug.LogError("Buffer finished, should fix this..");
return;
}
Buffer.BlockCopy(receivedBytes, 0, dump, bufferIndex, len);
bufferIndex += len;
if (bufferIndex - bufferFrameStart >= imageSize)
{
frameIndex.Enqueue(bufferFrameStart);
frameBufferCount++;
bufferFrameStart += imageSize;
}
}
catch (Exception e)
{
Debug.LogException(e);
}
client.BeginReceive(AC, obj);
}
public void InitializeUDPClient()
{
ipEndPoint = new IPEndPoint(IPAddress.Any, port);
client = new UdpClient();
client.Client.ReceiveBufferSize = receiveBufferSize;
client.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, optionValue: true);
client.ExclusiveAddressUse = false;
client.EnableBroadcast = true;
client.Client.Bind(ipEndPoint);
client.DontFragment = true;
client.Client.ReceiveBufferSize = 1472 * 100000;
AC = new AsyncCallback(ReceivedUDPPacket);
client.BeginReceive(AC, obj);
Debug.Log("Started UDP listener..");
}
private void OnDestroy()
{
if (client != null)
{
client.Close();
}
}
}