Unity中的RPC机制不能直接将视频进行传输,所以要进行视频帧图片到字节的转换,在将字节流进行传输。
首先,客户端的代码如下
using UnityEngine;
using System.Collections;
public class Client : MonoBehaviour {
//要连接的服务器地址
//string IP = "127.0.0.1";//本地电脑地址
string IP = "192.168.1.100";
public Texture2D m_recevieTex;
// string IP = "192.168.1.102";
//要连接的端口
int Port = 10100;
//聊天信息
string Message = "";
//声明一个二维向量
Vector2 Sc;
public UITexture m_uiCameraPlane;
void OnGUI(){
//端类型的状态
switch(Network.peerType){
//禁止客户端连接运行, 服务器未初始化
case NetworkPeerType.Disconnected:
StartConnect();
break;
//运行于服务器端
case NetworkPeerType.Server:
break;
//运行于客户端
case NetworkPeerType.Client:
OnClient();
break;
//正在尝试连接到服务器
case NetworkPeerType.Connecting:
break;
}
}
void StartConnect(){
if (GUILayout.Button("连接服务器")){
NetworkConnectionError error = Network.Connect(IP,Port);
//连接状态
switch(error){
case NetworkConnectionError.NoError:
break;
default:
Debug.Log("客户端错误"+error);
break;
}
}
}
void OnClient(){
//创建开始滚动视图
Sc = GUILayout.BeginScrollView(Sc,GUILayout.Width(280),GUILayout.Height(400));
//绘制纹理, 显示内容
GUILayout.Box(Message);
//文本框
Message = GUILayout.TextArea(Message);
if (GUILayout.Button("发送")){
//发送给接收的函数, 模式为全部, 参数为信息
//networkView.RPC("ReciveMessage", RPCMode.All, Message);
networkView.RPC("ReciveCameraTex", RPCMode.All, m_recevieTex.EncodeToJPG());
}
//结束滚动视图, 注意, 与开始滚动视图成对出现
GUILayout.EndScrollView();
}
//接收请求的方法. 注意要在上面添加[RPC]
[RPC]
void ReciveMessage(string msg, NetworkMessageInfo info){
//刚从网络接收的数据的相关信息,会被保存到NetworkMessageInfo这个结构中
Message = "发送端"+info.sender +"消息"+msg;
}
[RPC]
void ReciveCameraTex(byte[] camTex, NetworkMessageInfo info)
{
m_recevieTex=new Texture2D(Screen.width, Screen.height);
m_recevieTex.LoadImage(camTex);
m_uiCameraPlane.mainTexture = m_recevieTex;
}
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
}
客户端主要是用来接收的,所以比较简单。在接收函数ReciveCameraTex中直接进行转换就可以了。
相对的服务端就比较复杂一点,代码如下
using UnityEngine;
using System.Collections;
public class Severs : MonoBehaviour {
public UITexture m_cameraShowTex;
public Texture2D m_sendTex;
//public Texture2D m_sendTexText;
public byte[] m_tempBytes;
private RenderTexture m_currentRT;
private RenderTexture m_renderTex;
int Port = 10100;
string Message = "";
string m_cameraName;
//声明一个二维向量
Vector2 Sc;
WebCamTexture m_cameraTex;
float m_zoomRate;//焦距
bool m_isPlay=false;
bool m_isSend = false;
// Use this for initialization
void Start()
{
m_zoomRate = 1;
m_renderTex = new RenderTexture(300, 300, 24, RenderTextureFormat.ARGB32);
// m_renderTex = new RenderTexture(Screen.width,Screen.height, 24, RenderTextureFormat.ARGB32);
m_sendTex = new Texture2D(m_renderTex.width,m_renderTex.height);
Camera.main.targetTexture = m_renderTex;
StartCoroutine(openCamera(0));
}
// Update is called once per frame
void Update()
{
}
IEnumerator openCamera(int whichOne)
{
yield return Application.RequestUserAuthorization(UserAuthorization.WebCam);
if (Application.HasUserAuthorization(UserAuthorization.WebCam))
{
//可以通过下面的这个数组的索引来选择调用手机上的前置或者后置摄像头,另外这个可以自动对焦
WebCamDevice[] devices = WebCamTexture.devices;
if (devices.Length <= whichOne)
{
m_cameraName = devices[0].name;
}
else
{
m_cameraName = devices[whichOne].name;
if (whichOne > 0)
{
/* Quaternion temp = m_uiCameraPlane.transform.localRotation;
temp.eulerAngles = new Vector3(0, 0, 90);
m_uiCameraPlane.transform.localRotation = temp;*/
}
}
m_cameraTex = new WebCamTexture(m_cameraName, Screen.width, Screen.height, 30);
m_cameraTex.anisoLevel = 9;
float heightRate = Screen.height / m_cameraTex.height;
float widthRate = Screen.width / m_cameraTex.width;
//Debug.Log(heightRate.ToString());
//Debug.Log(widthRate.ToString());
m_zoomRate = heightRate > widthRate ? heightRate : widthRate;
if (m_zoomRate > 2)
{
m_zoomRate = 2;
}
m_cameraTex.Play();
m_isPlay = true;
}
}
//OnGUI方法,所有GUI的绘制都需要在这个方法中实现
void OnGUI(){
//Network.peerType是端类型的状态:
//即disconnected, connecting, server 或 client四种
switch(Network.peerType){
//禁止客户端连接运行, 服务器未初始化
case NetworkPeerType.Disconnected:
StartServer();
break;
//运行于服务器端
case NetworkPeerType.Server:
OnServer();
break;
//运行于客户端
case NetworkPeerType.Client:
break;
//正在尝试连接到服务器
case NetworkPeerType.Connecting:
break;
}
if (m_isPlay)
{
m_cameraShowTex.mainTexture = m_cameraTex;
//NGUIDebug.Log(" cameraTexture.height:" + cameraTexture.height.ToString() + "cameraTexture.width:" + cameraTexture.width.ToString());
m_cameraShowTex.width= m_cameraTex.width;
m_cameraShowTex.height = m_cameraTex.height;
// m_sendTex = RTImage(Camera.main);
// m_tempBytes = m_sendTex.EncodeToPNG();
if (m_isSend)
{
StartCoroutine(changeSendTexToBytes());
}
}
}
IEnumerator changeSendTexToBytes()
{
m_isPlay = false;
RTImage();
m_tempBytes = m_sendTex.EncodeToPNG();
yield return new WaitForSeconds(0.35f);
networkView.RPC("ReciveCameraTex", RPCMode.All, m_tempBytes);
m_isPlay = true;
}
void RTImage()
{
m_currentRT = RenderTexture.active;
RenderTexture.active = m_renderTex;
Camera.main.Render();
m_sendTex.ReadPixels(new Rect(0, 0, m_cameraShowTex.width, m_cameraShowTex.height), 0, 0);
m_sendTex.Apply();
RenderTexture.active = m_currentRT;
}
Texture2D RTImage(Camera cam)
{
RenderTexture currentRT = RenderTexture.active;
RenderTexture.active = cam.targetTexture;
cam.Render();
Texture2D image = new Texture2D(cam.targetTexture.width, cam.targetTexture.height);
image.ReadPixels(new Rect(0, 0, cam.targetTexture.width, cam.targetTexture.height), 0, 0);
image.Apply();
RenderTexture.active = currentRT;
return image;
}
void StartServer(){
//当用户点击按钮的时候为true
if ( GUI.Button(new Rect(10, 10, 100, 20),"创建服务器")) {
//初始化本机服务器端口,第一个参数就是本机接收多少连接
NetworkConnectionError error = Network.InitializeServer(12,Port,false);
//连接状态
switch(error){
case NetworkConnectionError.NoError:
break;
default:
Debug.Log("服务端错误"+error);
break;
}
}
}
void OnServer(){
GUI.Label(new Rect(10, 30, 100, 20),"服务端已经运行,等待客户端连接");
//Network.connections是所有连接的玩家, 数组[]
//取客户端连接数.
int length = Network.connections.Length;
//按数组下标输出每个客户端的IP,Port
for (int i=0; i<length; i++)
{
GUI.Label(new Rect(10, 50, 300, 20),"客户端" + i);
GUI.Label(new Rect(10, 70, 300, 20),"客户端ip"+Network.connections[i].ipAddress);
GUI.Label(new Rect(10, 90, 300, 20),"客户端端口"+Network.connections[i].port);
GUI.Label(new Rect(10, 100, 300, 20),"-------------------------------");
}
//当用户点击按钮的时候为true
if (GUI.Button(new Rect(10, 120,300, 20),"断开服务器")){
Network.Disconnect();
}
if(GUI.Button(new Rect(500, 120,300, 20),"发送"))
{
m_isSend = true;
}
}
//接收请求的方法. 注意要在上面添加[RPC]
[RPC]
void ReciveCameraTex(byte[] camTex, NetworkMessageInfo info)
{
//m_sendTexText.LoadImage(m_tempBytes);
}
}
因为Unity中获取得到的摄像头视频,不能直接转换成字节必须要转换成Textture2D之后在使用EncodeToPNG()函数转换成字节,而将摄像头的WebCamTexture也不能直接转换成Texture2D,在官网上提供了这样一个转换方法
Texture2D RTImage(Camera cam)
{
RenderTexture currentRT = RenderTexture.active;
RenderTexture.active = cam.targetTexture;
cam.Render();
Texture2D image = new Texture2D(cam.targetTexture.width, cam.targetTexture.height);
image.ReadPixels(new Rect(0, 0, cam.targetTexture.width, cam.targetTexture.height), 0, 0);
image.Apply();
RenderTexture.active = currentRT;
return image;
}
好了大功告成,我写的比较粗糙, 没有做缓存处理和关键帧的计算,主要集中在怎么转换摄像头的WebCamTexture到字节上,要有一定写UnityRPC基础的童鞋才可以看得懂并进行调试我的代码,在两个电脑上比较流畅,在手机上还是比较卡。