这里搜集了两种实现Android 手机采集摄像头视频 socket 视频传输实时传播的方法,两种都可以使用。
第一种如下:
1.通过客户端socket请求,服务端接受到请求后,获取socket的输出流对象outs。
2.服务端通过Camera的setPreviewCallback方法回调采集摄像头视频数据,将一张张图片数据数据压缩,发送到客户端,因为是图片,需要在图片开头加个标记,然后指定数据长度发送。
3.客户端接收数据,根据数据开头标记,以及数据的长度,接收数据,按一张张图片的形式,采用handle更新imageview。
主要代码:
服务端代码:
public class MainActivity extends Activity {
TextView tv;
SurfaceView surfv;
SurfaceHolder surfaceHolder;
int screenWidth=300, screenHeight=300;
public Handler mHandler = new Handler(){
public void handleMessage(android.os.Message msg) {
switch (msg.what) {
case CamConstant.INIT_CAMERA:
/*if(CamConstant.hadInit){
return;
}</div><div> Camera camera = CameraUtil.initCamera(surfaceHolder);
if( null != camera){
CamConstant.hadInit = true;
}*/
break;
case CamConstant.RECYCLE_CAMERA:
CameraUtil.recycleCamera();
CamConstant.hadInit = false;
break;
default:
//tv.setText((CharSequence) msg.obj);
tv.append((CharSequence) msg.obj+"\n");
break;
}
};
};
@Override
protected void onCreate(Bundle savedInstanceState) {
setContentView(R.layout.activity_main);
super.onCreate(savedInstanceState);
MainApplication.mHandler = this.mHandler;
tv = (TextView) findViewById(R.id.textView);
surfv = (SurfaceView) findViewById(R.id.surview);
DisplayMetrics dm = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(dm);
screenWidth = dm.widthPixels;// 获取屏幕分辨率宽度
screenHeight = dm.heightPixels;
surfaceHolder = surfv.getHolder();
surfaceHolder.setFixedSize(screenWidth, screenHeight/4*2);
surfaceHolder.addCallback(new Callback() {
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
CameraUtil.initCamera(surfaceHolder);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// TODO Auto-generated method stub
}
});
//tv.setMovementMethod(ScrollingMovementMethod.getInstance());
startService();
};
public void startService(){
Intent intent = new Intent(MainActivity.this, CamThreadService.class);
startService(intent);
}
@Override
protected void onDestroy() {
// TODO Auto-generated method stub
CameraUtil.recycleCamera();
CamConstant.hadInit = false;
if(ServiceUtil.isServiceRunning(this, "CamService")){
stopService(new Intent(this, CamService.class));
}
super.onDestroy();
}
}
public class CamThreadService extends Service {
Socket mClintSocket;
ServerSocket serverSocket;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
L.e("启动11111");
// TODO Auto-generated method stub
new Thread(new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
try {
L.e("accept000");
sendMSG("accept1111");
serverSocket = new ServerSocket(CamConstant.Camera_Port);
while(true){
L.e("accept0001111");
L.e("accept11111");
sendMSG("accept222");
mClintSocket = serverSocket.accept();
L.e("accept22222");
L.e("accept3333");
new Thread(){
public void run() {
if(mClintSocket != null){
sendMSG("accept3333");
//初始化
//MainApplication.mHandler.sendEmptyMessage(CamConstant.INIT_CAMERA);
L.e("accept4444");
boolean flag = true;
while(flag){
if(CameraUtil.mCamera != null){
try {
CameraUtil.mCamera.setPreviewCallback(new StreamIt(mClintSocket));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
flag = false;
mClintSocket = null;
}
}
}
};
}.start();
}
} catch (IOException e) {
// TODO Auto-generated catch block
L.e("之行报错 停止");
e.printStackTrace();
}
}
}).start();
super.onCreate();
}
/**
* 视频流数据相关回调
*/
private class StreamIt implements PreviewCallback {
private OutputStream out;
private InputStream in;
private int times = 0;
public StreamIt(Socket s) throws IOException {
// TODO Auto-generated constructor stub
this.out = s.getOutputStream();
this.in = s.getInputStream();
sendMSG("压缩后的数据11");
int[] textures = new int[1];
L.e("压缩后的数据1");
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
// TODO Auto-generated method stub
Size size = camera.getParameters().getPreviewSize();
try {
L.e("压缩后的数据1.5");
if(times == 0){
YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
if(image != null){
//这是源数据
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, size.width, size.height), 100, outputStream);
byte[] srcData = outputStream.toByteArray();
int len = srcData.length;
Bitmap src = BitmapFactory.decodeByteArray(srcData, 0, len);
//压缩后的数据
byte[] outdata = transImage(src, 640, 480);
L.e("压缩后的数据2");
sendMSG("压缩后的数据22");
int datalen = outdata.length;
out.write((byte) 0xA0);
out.write(intTOBytes(datalen));
out.write(outdata, 0, datalen);
if(!src.isRecycled()){
src.recycle();
}
}
}
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
}
/**
* 数据转换,将bitmap转换为byte
*/
private byte[] transImage(Bitmap bitmap, int width, int height){
try{
int bitmapWidth = bitmap.getWidth();
int bitmapHeight = bitmap.getHeight();
//缩放图片的尺寸
float scaleWidth = (float) width/bitmapWidth;
float scaleHeight = (float) height/bitmapHeight;
Matrix matrix = new Matrix();
matrix.postScale(scaleWidth, scaleHeight);
//产生缩放后的Bitmap对象
Bitmap resizeBitemp = Bitmap.createBitmap(bitmap, 0, 0, bitmapWidth, bitmapHeight, matrix, false);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
resizeBitemp.compress(CompressFormat.JPEG, 85, outputStream);
byte[] byteArray = outputStream.toByteArray();
outputStream.close();
if(!bitmap.isRecycled()){
bitmap.recycle();
}
if(!resizeBitemp.isRecycled()){
resizeBitemp.recycle();
}
return byteArray;
}catch(Exception ex){
ex.printStackTrace();
MainApplication.mHandler.sendEmptyMessage(CamConstant.RECYCLE_CAMERA);
}
return null;
}
/**
* 将int 数值转换为4个字节
*/
private byte[] intTOBytes(int value){
Log.e("cam",value+"d");
byte[] src = new byte[4];
src[3] = (byte) ((value >>