Unity中基于RPC机制实现摄像头视频传输

Unity中的RPC机制不能直接将视频进行传输,所以要进行视频帧图片到字节的转换,在将字节流进行传输。

首先,客户端的代码如下

using UnityEngine;
using System.Collections;

public class Client : MonoBehaviour {
	//要连接的服务器地址
	//string IP = "127.0.0.1";//本地电脑地址
    string IP = "192.168.1.100";
    public  Texture2D m_recevieTex;
   // string IP = "192.168.1.102";
	//要连接的端口
	int Port = 10100;
	//聊天信息
	string Message = "";
	//声明一个二维向量 
	Vector2 Sc;
    public UITexture m_uiCameraPlane;	
	void OnGUI(){
		//端类型的状态
		switch(Network.peerType){
			//禁止客户端连接运行, 服务器未初始化
			case NetworkPeerType.Disconnected:
				StartConnect();
			    break;
			//运行于服务器端
			case NetworkPeerType.Server:
			    break;
			//运行于客户端
			case NetworkPeerType.Client:
			    OnClient();
			    break;
			//正在尝试连接到服务器
			case NetworkPeerType.Connecting:
				break;
		}
	}
		
	void StartConnect(){
		if (GUILayout.Button("连接服务器")){
			NetworkConnectionError error = Network.Connect(IP,Port);
			//连接状态
			switch(error){
				case NetworkConnectionError.NoError:
				    break;
			    default:
			        Debug.Log("客户端错误"+error);
				    break;
			}
		}
	}
	
	void OnClient(){
		//创建开始滚动视图
		Sc = GUILayout.BeginScrollView(Sc,GUILayout.Width(280),GUILayout.Height(400));
		//绘制纹理, 显示内容
		GUILayout.Box(Message);
		//文本框
		Message = GUILayout.TextArea(Message);
		if (GUILayout.Button("发送")){
			//发送给接收的函数, 模式为全部, 参数为信息
			//networkView.RPC("ReciveMessage", RPCMode.All, Message);
            networkView.RPC("ReciveCameraTex", RPCMode.All, m_recevieTex.EncodeToJPG());
		}
		//结束滚动视图, 注意, 与开始滚动视图成对出现
		GUILayout.EndScrollView();
		
	}
	
    //接收请求的方法. 注意要在上面添加[RPC]
	[RPC]
	void ReciveMessage(string msg, NetworkMessageInfo info){
		//刚从网络接收的数据的相关信息,会被保存到NetworkMessageInfo这个结构中
		Message = "发送端"+info.sender  +"消息"+msg;
	}
    [RPC]
    void ReciveCameraTex(byte[] camTex, NetworkMessageInfo info)
    {
        m_recevieTex=new Texture2D(Screen.width, Screen.height);
        m_recevieTex.LoadImage(camTex);
        m_uiCameraPlane.mainTexture = m_recevieTex;
    }
	// Use this for initialization
	void Start () {
	
	}
	
	// Update is called once per frame
	void Update () {
	
	}
}
客户端主要是用来接收的,所以比较简单。在接收函数ReciveCameraTex中直接进行转换就可以了。

相对的服务端就比较复杂一点,代码如下

using UnityEngine;
using System.Collections;
public class Severs : MonoBehaviour {
    public UITexture m_cameraShowTex;
    public Texture2D m_sendTex;
    //public Texture2D m_sendTexText;
    public byte[] m_tempBytes;
    private RenderTexture m_currentRT;
    private RenderTexture m_renderTex;
	int Port = 10100;
	string Message = "";
    string m_cameraName;
	//声明一个二维向量 
	Vector2 Sc;
    WebCamTexture m_cameraTex;
    float m_zoomRate;//焦距
    bool m_isPlay=false;
    bool m_isSend = false;
    // Use this for initialization
    void Start()
    {
        m_zoomRate = 1;
        m_renderTex = new RenderTexture(300, 300, 24, RenderTextureFormat.ARGB32);
       // m_renderTex = new RenderTexture(Screen.width,Screen.height, 24, RenderTextureFormat.ARGB32);
        m_sendTex = new Texture2D(m_renderTex.width,m_renderTex.height);
        Camera.main.targetTexture = m_renderTex;
        StartCoroutine(openCamera(0));
    }

    // Update is called once per frame
    void Update()
    {
    }
    IEnumerator openCamera(int whichOne)
    {
        yield return Application.RequestUserAuthorization(UserAuthorization.WebCam);
        if (Application.HasUserAuthorization(UserAuthorization.WebCam))
        {
            //可以通过下面的这个数组的索引来选择调用手机上的前置或者后置摄像头,另外这个可以自动对焦
            WebCamDevice[] devices = WebCamTexture.devices;
            if (devices.Length <= whichOne)
            {
                m_cameraName = devices[0].name;
            }
            else
            {
                m_cameraName = devices[whichOne].name;
                if (whichOne > 0)
                {
                   /* Quaternion temp = m_uiCameraPlane.transform.localRotation;
                    temp.eulerAngles = new Vector3(0, 0, 90);
                    m_uiCameraPlane.transform.localRotation = temp;*/
                }
            }
            m_cameraTex = new WebCamTexture(m_cameraName, Screen.width, Screen.height, 30);
            m_cameraTex.anisoLevel = 9;
            float heightRate = Screen.height / m_cameraTex.height;
            float widthRate = Screen.width / m_cameraTex.width;
            //Debug.Log(heightRate.ToString());
            //Debug.Log(widthRate.ToString());
            m_zoomRate = heightRate > widthRate ? heightRate : widthRate;
            if (m_zoomRate > 2)
            {
                m_zoomRate = 2;
            }
            m_cameraTex.Play();
            m_isPlay = true;
        }
    }  
	//OnGUI方法,所有GUI的绘制都需要在这个方法中实现
	void OnGUI(){

		//Network.peerType是端类型的状态:
		//即disconnected, connecting, server 或 client四种
		switch(Network.peerType){
			//禁止客户端连接运行, 服务器未初始化
			case NetworkPeerType.Disconnected:
				StartServer();
			    break;
			//运行于服务器端
			case NetworkPeerType.Server:
				OnServer();
			    break;
			//运行于客户端
			case NetworkPeerType.Client:
			    break;
			//正在尝试连接到服务器
			case NetworkPeerType.Connecting:
				break;
		}

        if (m_isPlay)
        {

            m_cameraShowTex.mainTexture = m_cameraTex;
            //NGUIDebug.Log(" cameraTexture.height:" + cameraTexture.height.ToString() + "cameraTexture.width:" + cameraTexture.width.ToString());
            m_cameraShowTex.width= m_cameraTex.width;
            m_cameraShowTex.height = m_cameraTex.height;

           // m_sendTex = RTImage(Camera.main); 
           // m_tempBytes = m_sendTex.EncodeToPNG();
            if (m_isSend)
            {
                StartCoroutine(changeSendTexToBytes());
            }
        }
	}
    IEnumerator changeSendTexToBytes()
    {
        m_isPlay = false;
        RTImage();
        m_tempBytes = m_sendTex.EncodeToPNG();
        yield return new WaitForSeconds(0.35f);
        networkView.RPC("ReciveCameraTex", RPCMode.All, m_tempBytes);
        m_isPlay = true;
    }
    void RTImage()
    {
        m_currentRT = RenderTexture.active;
        RenderTexture.active = m_renderTex;
        Camera.main.Render();
        m_sendTex.ReadPixels(new Rect(0, 0, m_cameraShowTex.width, m_cameraShowTex.height), 0, 0);
        m_sendTex.Apply();
        RenderTexture.active = m_currentRT;
        
    }
    Texture2D RTImage(Camera cam)
    {
        RenderTexture currentRT = RenderTexture.active;
        RenderTexture.active = cam.targetTexture;
        cam.Render();
        Texture2D image = new Texture2D(cam.targetTexture.width, cam.targetTexture.height);
        image.ReadPixels(new Rect(0, 0, cam.targetTexture.width, cam.targetTexture.height), 0, 0);
        image.Apply();
        RenderTexture.active = currentRT;
        return image;
    }
	void StartServer(){
		//当用户点击按钮的时候为true
		if ( GUI.Button(new Rect(10, 10, 100, 20),"创建服务器")) {
			//初始化本机服务器端口,第一个参数就是本机接收多少连接
			NetworkConnectionError error = Network.InitializeServer(12,Port,false);
			//连接状态
			switch(error){
				case NetworkConnectionError.NoError:
				    break;
			    default:
			        Debug.Log("服务端错误"+error);
				    break;
			}
		}
	}
	
	void OnServer(){
		 GUI.Label(new Rect(10, 30, 100, 20),"服务端已经运行,等待客户端连接");
		//Network.connections是所有连接的玩家, 数组[]
		//取客户端连接数. 
		int length = Network.connections.Length;
		//按数组下标输出每个客户端的IP,Port
		for (int i=0; i<length; i++)
		{
            GUI.Label(new Rect(10, 50, 300, 20),"客户端" + i);
			GUI.Label(new Rect(10, 70, 300, 20),"客户端ip"+Network.connections[i].ipAddress);
			GUI.Label(new Rect(10, 90, 300, 20),"客户端端口"+Network.connections[i].port);
			 GUI.Label(new Rect(10, 100, 300, 20),"-------------------------------");
		}
		//当用户点击按钮的时候为true
		if (GUI.Button(new Rect(10, 120,300, 20),"断开服务器")){
			Network.Disconnect();
		}
        if(GUI.Button(new Rect(500, 120,300, 20),"发送"))
        {
            m_isSend = true;
    
        }
	}
	//接收请求的方法. 注意要在上面添加[RPC]
	[RPC]
    void ReciveCameraTex(byte[] camTex, NetworkMessageInfo info)
    {
        //m_sendTexText.LoadImage(m_tempBytes);
    }
	
}
因为Unity中获取得到的摄像头视频,不能直接转换成字节必须要转换成Textture2D之后在使用EncodeToPNG()函数转换成字节,而将摄像头的WebCamTexture也不能直接转换成Texture2D,在官网上提供了这样一个转换方法
  Texture2D RTImage(Camera cam)
    {
        RenderTexture currentRT = RenderTexture.active;
        RenderTexture.active = cam.targetTexture;
        cam.Render();
        Texture2D image = new Texture2D(cam.targetTexture.width, cam.targetTexture.height);
        image.ReadPixels(new Rect(0, 0, cam.targetTexture.width, cam.targetTexture.height), 0, 0);
        image.Apply();
        RenderTexture.active = currentRT;
        return image;
    }

好了大功告成,我写的比较粗糙, 没有做缓存处理和关键帧的计算,主要集中在怎么转换摄像头的WebCamTexture到字节上,要有一定写UnityRPC基础的童鞋才可以看得懂并进行调试我的代码,在两个电脑上比较流畅,在手机上还是比较卡。

要在Unity使用OpenCVSharp来获取摄像头视频,你需要按照以下步骤进行操作: 1. 首先,下载并安装OpenCVSharp库。你可以从OpenCVSharp的GitHub页面(https://github.com/shimat/opencvsharp)下载最新版本的库。 2. 在Unity创建一个新的C#脚本,例如"CameraCapture.cs",并将其附加到一个GameObject上。 3. 在脚本导入OpenCVSharp库,你可以使用以下语句: ```csharp using OpenCvSharp; ``` 4. 在脚本编写代码来获取摄像头视频。下面是一个简单的示例: ```csharp using UnityEngine; using OpenCvSharp; public class CameraCapture : MonoBehaviour { private VideoCapture videoCapture; private Texture2D texture; void Start() { videoCapture = new VideoCapture(0); // 摄像头索引,0表示默认摄像头 if (!videoCapture.IsOpened()) { Debug.LogError("Failed to open camera!"); return; } texture = new Texture2D((int)videoCapture.FrameWidth, (int)videoCapture.FrameHeight, TextureFormat.RGBA32, false); } void Update() { Mat frame = new Mat(); videoCapture.Read(frame); if (!frame.Empty()) { // 将OpenCV的Mat转换为Unity的Texture2D texture.LoadRawTextureData(frame.Data, (int)(videoCapture.FrameWidth * videoCapture.FrameHeight * 4)); texture.Apply(); // 在Unity显示摄像头视频 GetComponent<Renderer>().material.mainTexture = texture; } } void OnDestroy() { videoCapture.Release(); texture.Dispose(); } } ``` 在上述示例,我们首先创建了一个VideoCapture对象来打开摄像头。然后,在Update方法,我们使用videoCapture.Read方法读取摄像头的每一帧,并将其转换为Unity的Texture2D对象。最后,我们将Texture2D对象应用到一个渲染器的材质上,从而在Unity显示摄像头视频。 请注意,这只是一个简单的示例,你可以根据自己的需求进行修改和扩展。 希望这能帮助到你!如果有任何问题,请随时提问。
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值