最近项目中需要视频会话功能,现在将该功能的代码贴出来,供大家参考,效果不是很好。
布局控件采用ImageView(显示远端视频)、SurfaceView(显示本端视频)。整体思路如下:
将相机的视频帧获取到,通过SurfaceHolder来将视频帧显示在SurfaceView上,同时通过UDP将视频数据实时发送至远端客户端,远端客户端接收到数据帧后转换为BitMap来显示在ImageView上,由于该过程速度比较快,在ImageView就会出现连续的画面,其实则是一幅幅的图片。
代码如下:
public class ShiPin extends Activity implements Callback, OnClickListener {
private ImageView reciverIv;//显示远端视频
private SurfaceView senderSv;//显示本端视频
private Button guaduanBt;//挂断Button
private SurfaceHolder sendHolder;//SurfaceView填充器,用来将视频数据加载进SurfaceView
private int screenWidth = 1024;
private int screenHeight = 768;
private String ipaddress = "";
private Camera camera;// 定义系统所用的照相机
private boolean isPreview = false;//判断相机是否在工作
private DatagramSocket dS;//UDP
private boolean flag = true;
private int width;
private int heigth;
private String sender;//发送者
private String reciver;//接收者
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_shipin);
Intent intent = getIntent();
reciver = intent.getStringExtra("Reciver");
sender = intent.getStringExtra("Sender");
DisplayMetrics metric = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metric);
width = metric.widthPixels;
heigth = metric.heightPixels;
ipaddress = Util.getStrPreference(this, Util.SHAREDFILENAME,
Util.IPADDRESS_Key, "");
try {
dS = new DatagramSocket();
} catch (SocketException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
senderSv = (SurfaceView) this.findViewById(R.id.sendsv);
reciverIv = (ImageView) this.findViewById(R.id.recive_iv);
//挂断按键
guaduanBt = (Button) this.findViewById(R.id.guaduan_bt);
guaduanBt.setOnClickListener(this);
//启动接收线程,接收视频数据代码不展示,根据服务器端代码不同而不同
new Thread(new ReciveThread(dS)).start();
sendHolder = senderSv.getHolder();
sendHolder.addCallback(this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
//初始化相机
initCamera();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (camera != null) {
if (isPreview) {
camera.stopPreview();
camera.release();
camera = null;
}
}
}
//相机初始化方法
private void initCamera() {
try {
if (!isPreview) {
camera = Camera.open();
}
if (camera != null && !isPreview) {
camera.setPreviewDisplay(sendHolder);
Camera.Parameters parameters = camera.getParameters();
// camera.setDisplayOrientation(180);
parameters.setPreviewSize(200, 400);
parameters.setPreviewFpsRange(20, 30);
parameters.setPictureFormat(ImageFormat.NV21);//设置图像格式
parameters.setPictureSize(width, heigth);
camera.setPreviewCallback(new Stream());// 设置回调的类
camera.startPreview();
camera.autoFocus(null);
camera.startPreview();
isPreview = true;
}
} catch (IOException e) {
camera.stopPreview();
camera.release();
camera = null;
}
}
//照相机回调类,通过该类获取到相机数据
class Stream implements PreviewCallback {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Size size = camera.getParameters().getPreviewSize();
//将相机数据转化为NV21的YuvImage对象
YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width,
size.height, null);
if (image != null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
//压缩格式
image.compressToJpeg(new Rect(0, 0, size.width, size.height),
80, baos);
try {
baos.flush();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
byte[] StreamData = baos.toByteArray();//得到视频数据的自己数组
sendVodio(sender, reciver, StreamData);//发送视频帧数据
//如何发送自己定义,本列采用UDP来完成数据发送
}
}
}
// 挂断视频
@Override
public void onClick(View v) {
camera.stopPreview();
camera.release();
camera = null;
flag = false;
dS.close();
finish();
}
//发送视频,该代码中包含对UDP数据的封装,和服务器端对UDP数据的解析需要一致
private void sendVodio(String Sender, String reciver, byte[] data) {
byte header[] = new byte[100];
byte strb[] = (Sender + "@" + reciver).getBytes();
System.arraycopy(strb, 0, header, 0, strb.length);
byte body[] = data;// 视频数据
byte[] sendData = new byte[header.length + body.length];
System.arraycopy(header, 0, sendData, 0, header.length);
System.arraycopy(body, 0, sendData, header.length, body.length);
new Thread(new SendThread(sendData, dS)).start();
}
//发送视频数据线程
class SendThread implements Runnable {
byte[] data;
DatagramSocket ds;
public SendThread(byte[] data, DatagramSocket ds) {
this.data = data;
this.ds = ds;
}
@Override
public void run() {
try {
DatagramPacket dp = new DatagramPacket(data, data.length,
InetAddress.getByName(ipaddress), 8888);
ds.send(dp);
} catch (Exception e) {
e.printStackTrace();
}
}
}
//接收数据线程
class ReciveThread implements Runnable {
DatagramSocket dSocket;
byte[] buff = new byte[1024 * 1024];
public ReciveThread(DatagramSocket dSocket) {
this.dSocket = dSocket;
}
@Override
public void run() {
while (flag) {
DatagramPacket ds = new DatagramPacket(buff, 0, buff.length);
try {
dSocket.receive(ds);
byte[] data = ds.getData();
Util.Message(Util.SHIPINTAG, data, handler);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
//接受到的视频数据,显示在ImageView上
Handler handler = new Handler() {
public void handleMessage(android.os.Message msg) {
switch (msg.what) {
case Util.SHIPINTAG:
byte[] data = (byte[]) msg.obj;
Bitmap bmp = BitmapFactory
.decodeByteArray(data, 0, data.length);
reciverIv.setImageBitmap(bmp);
break;
}
};
};
}