【UVCcamera/新手向】简单的网络摄像头(webcam)

  • 说在前面

Android Studio版本:3.2

操作系统(AS):Windows10

Android版本:5.0.2

摄像头:USB摄像头(640*480 贼辣鸡

服务器端:NodeJS

通信方法:Socket.IO

开源项目:saki4510t/UVCCamera

接上篇:【UVCcamera/新手向】AS中使用OpenCV进行简单人脸识别

  • 一些说明

大致流程

    1.  Android端获取uvccamera数据(bitmap格式)

    2.  将每一帧的数据压缩为JPEG格式的byte[]

    3.  通过socketio将数据发送至server

    4.  server转发这些数据到页面

    5.  接收到数据后将其处理成jpeg图片并显示

一连串的jpeg就成了视频了(我哭了,不是视频流,也没有进行优化,我还是太菜了

UVCcamera相关请参考第一篇(https://blog.csdn.net/qq_33446100/article/details/88775629) ,涉及部分后台相关代码,后续会将项目上传至Github.

server端放在本地,监听本地IP以及端口。

  • 运行截图

  • 代码说明

1.  Android端 Socket.IO配置

在app目录下的build.gradle文件中dependencies添加

implementation ('io.socket:socket.io-client:0.8.3') {
        // excluding org.json which is provided by Android
        exclude group: 'org.json', module: 'json'
    }

在mainActivity.java中导入包

import io.socket.client.IO;
import io.socket.client.Socket;

 定义变量

Socket mSocket;

 初始化URL中IP为nodejs所在主机的内网IP(windows在cmd窗口使用ipconfig命令查看,linux使用ifconfig命令,8081为监听的端口号

public static final String CHAT_SERVER_URL = "http://192.168.3.12:8081";

//这部分放在OnCreate方法里
        try {
            mSocket = IO.socket(CHAT_SERVER_URL);

        } catch (URISyntaxException e) {
            Log.v(TAG, "Socket Failed");
            throw new RuntimeException(e);

        }

连接server,这个我放在OnStart方法里,其他地方应该也行;

mSocket.connect();

2.  Android端处理UVCcamera捕捉到的图像,并通过SocketIO转发至server

这部分和上一篇的Opencv处理那里有点相似,主要更改的地方也是那一块

获取每一帧的bitmap,压缩为jpeg格式并转换为byte[]

    private void sendBitmap(Bitmap it){

        //Bitmap bmp = getTransformBitmap(it);//这个函数本来是用来将RGB565转为
        //ARGB8888的,但是发现RGB565也可以,就去掉了
        ByteArrayOutputStream buf;
        buf = new ByteArrayOutputStream();
        it.compress(Bitmap.CompressFormat.JPEG, 20, buf);//压缩为JPEG格式
        //byte[] b = buf.toByteArray();
        mSocket.emit("image", buf.toByteArray());//发送(“event”,"value")

        //Log.v(TAG, "In this module");//测试用
    }

在IFrameCallback函数中调用sendBitmap,也就是上一篇opencv处理那里

sendBitmap(srcBitmap);

3.  配置server

有关nodejs部分的基础知识啥的咱就不说了,自己去找,推荐一个我学习的(Nodejs学习笔记(七)--- Node.js + Express 构建网站简单示例

app.js(这部分写的有点复杂了,因为是用的之前项目的代码)

var createError = require('http-errors');
var express = require('express');
var path = require('path');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var logger = require('morgan');
var moment = require('moment');

var indexRouter = require('./routes/index');

var app = express();

// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'ejs');

app.use(logger('dev'));
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));

app.use('/', indexRouter);

// catch 404 and forward to error handler
app.use(function (req, res, next) {
  next(createError(404));
});

// error handler
app.use(function (err, req, res, next) {
  // set locals, only providing error in development
  res.locals.message = err.message;
  res.locals.error = req.app.get('env') === 'development' ? err : {};

  // render the error page
  res.status(err.status || 500);
  //res.render('error');
});

var server = app.listen(8081, function () {
  console.log("Server Start!");
});

var count = 0;
var interval = 50000;
var totalCount = 0;
var allMem = new Array();

var io = require('socket.io')(server);

io.on('connection', function (socket) {
  console.log("New Socket!");

  console.log("当前有用户连接");
  count++;
  totalCount++;
  console.log("count:" + count);

  let name = '';

  //处理image事件
  socket.on("image", function (message) {
    //console.log(message);
    interval++;
    // 转发给所有用户,message的内容即为byte[]
    //socket.broadcast.emit("image", message);
    if(interval > 1){
      io.emit("image", message);
      interval = 0;
    }


  });

  //	 监听到连接断开
  socket.on("disconnect", function () {
    count--;
    console.log(name + "离开了群聊")
    io.emit("disconnection", {
      count: count,
      name: name
    });
  });



});

module.exports = app;

index.js(用户页面的路由)

var express = require('express');
var router = express.Router();

/* GET home page. */
router.get('/', function (req, res) {
    res.render('sys', { title: 'Express' });
  
});

module.exports = router;

sys.ejs(ejs模板引擎)重要的部分写了注释

<!DOCTYPE html>
<html lang="zh-CN">

<head>
    <meta charset="utf-8">
    <meta http-equiv="X-UA-Compatible" content="IE=edge">
    <meta name="viewport" content="width=device-width, initial-scale=1">
    <!-- 上述3个meta标签*必须*放在最前面,任何其他内容都*必须*跟随其后! -->
    <title>1552218</title>

    <!-- Bootstrap -->
    <link href="https://cdn.jsdelivr.net/npm/bootstrap@3.3.7/dist/css
/bootstrap.min.css" rel="stylesheet">
  <style>
    * {
      margin: 0;
      padding: 0;
      box-sizing: border-box;
    }
    body {
      font: 13px Helvetica, Arial;
    }
    form {
      background: #000;
      padding: 3px;
      position: fixed;
      bottom: 0;
      width: 100%;
    }
    form input {
      border: 0;
      padding: 10px;
      width: 90%;
      margin-right: .5%;
    }
    form button {
      width: 9%;
      background: rgb(130, 224, 255);
      border: none;
      padding: 10px;
    }
    #messages {
      list-style-type: none;
      margin: 0;
      padding: 0;
    }
    #messages li {
      padding: 5px 10px;
    }
    #messages li:nth-child(odd) {
      background: #eee;
    }
  </style>
</head>
<script src="https://code.jquery.com/jquery-1.11.1.js"></script>
<script src="js/socket.io.js"></script>
<script>
  //这个函数可以不管
  function encode (input) {
    var keyStr = 
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
    var output = "";
    var chr1, chr2, chr3, enc1, enc2, enc3, enc4;
    var i = 0;

    while (i < input.length) {
        chr1 = input[i++];
        chr2 = i < input.length ? input[i++] : Number.NaN; 
// Not sure if the index 
        chr3 = i < input.length ? input[i++] : Number.NaN; 
// checks are needed here

        enc1 = chr1 >> 2;
        enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
        enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
        enc4 = chr3 & 63;

        if (isNaN(chr2)) {
            enc3 = enc4 = 64;
        } else if (isNaN(chr3)) {
            enc4 = 64;
        }
        output += keyStr.charAt(enc1) + keyStr.charAt(enc2) +
                  keyStr.charAt(enc3) + keyStr.charAt(enc4);
    }
    return output;
}


  $(function () {
    var socket = io();//初始化socketio

    //处理image事件
    //这里的处理十分简单,就是将单帧的数据显示出来,也没有什么优化,所以会有点卡
    //更加进阶的方法应该是用视频流来处理,但是我也不知道咋搞
    socket.on('image', function (msg) {
      var arrayBufferView = new Uint8Array(msg);
//首先将数据转换为Uint8Array,一个个的字节
      //console.log(arrayBufferView);
      var blob = new Blob([arrayBufferView], { type: "image/jpeg" });
//存储为Blob对象
      //console.log(blob);
      //var urlCreator = window.URL || window.webkitURL;
      var imageUrl = URL.createObjectURL(blob);//转化为URL
      var img = document.getElementById("screen");//然后就可以用url来显示了
      // var img = document.querySelector("#photo");
      img.src = imageUrl;
//"data:image/jpg;base64,"+encode(arrayBufferView);
    });
  });
</script>

<body>
  <img id="screen" src="">
</body>

</html>

4.  android端代码

package com.example.o0orick.camera;

import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.hardware.usb.UsbDevice;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.Toast;

import com.serenegiant.common.BaseActivity;
import com.serenegiant.usb.CameraDialog;
import com.serenegiant.usb.IFrameCallback;
import com.serenegiant.usb.USBMonitor;
import com.serenegiant.usb.USBMonitor.OnDeviceConnectListener;
import com.serenegiant.usb.USBMonitor.UsbControlBlock;
import com.serenegiant.usb.UVCCamera;
import com.serenegiant.usbcameracommon.UVCCameraHandler;
import com.serenegiant.widget.CameraViewInterface;

import java.io.ByteArrayOutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.ByteBuffer;
import io.socket.client.IO;
import io.socket.client.Socket;
import okio.ByteString;

public final class MainActivity extends BaseActivity implements CameraDialog.CameraDialogParent {
    private static final boolean DEBUG = true;	// TODO set false on release
    private static final String TAG = "MainActivity";

    /**
     * 操作锁
     */
	private final Object mSync = new Object();

    /**
     * set true if you want to record movie using MediaSurfaceEncoder
     * (writing frame data into Surface camera from MediaCodec
     *  by almost same way as USBCameratest2)
     * set false if you want to record movie using MediaVideoEncoder
     */
    private static final boolean USE_SURFACE_ENCODER = false;

    /**
     * preview resolution(width)
     * if your camera does not support specific resolution and mode,
     * {@link UVCCamera#setPreviewSize(int, int, int)} throw exception
     */
    private static final int PREVIEW_WIDTH = 640; // 640
    /**
     * preview resolution(height)
     * if your camera does not support specific resolution and mode,
     * {@link UVCCamera#setPreviewSize(int, int, int)} throw exception
     */
    private static final int PREVIEW_HEIGHT = 480; //480
    /**
     * preview mode
     * if your camera does not support specific resolution and mode,
     * {@link UVCCamera#setPreviewSize(int, int, int)} throw exception
     * 0:YUYV, other:MJPEG
     */
    private static final int PREVIEW_MODE = 0; // YUV

    protected static final int SETTINGS_HIDE_DELAY_MS = 2500;

    /**
     * for accessing USB
     */
    private USBMonitor mUSBMonitor;
    /**
     * Handler to execute camera related methods sequentially on private thread
     */
    private UVCCameraHandler mCameraHandler;
    /**
     * for camera preview display
     */
    private CameraViewInterface mUVCCameraView;
    /**
     * for open&start / stop&close camera preview
     */
    private ImageButton mCameraButton;
    private ImageView mImageView;
    private boolean isScaling = false;
    private boolean isInCapturing = false;

    private int[][] capture_solution = {{640,480}, {800,600},{1024,768}, {1280,1024}};
    private int mCaptureWidth = capture_solution[0][0];
    private int mCaptureHeight = capture_solution[0][1];

    public static final String CHAT_SERVER_URL = "http://192.168.3.12:8081";

    Socket mSocket;

    @Override
    protected void onResume() {
        super.onResume();

    }


    @Override
    protected void onCreate(final Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        Log.v(TAG, "onCreate:");

        onResume();

        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        requestWindowFeature(Window.FEATURE_NO_TITLE);
        getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
                WindowManager.LayoutParams.FLAG_FULLSCREEN);

        setContentView(R.layout.activity_main);
        mCameraButton = findViewById(R.id.imageButton);
        mCameraButton.setOnClickListener(mOnClickListener);

        mCaptureWidth = capture_solution[0][0];
        mCaptureHeight = capture_solution[0][1];
        bitmap = Bitmap.createBitmap(mCaptureWidth, mCaptureHeight, Bitmap.Config.RGB_565);

        final View view = findViewById(R.id.camera_view);
        mUVCCameraView = (CameraViewInterface)view;
        mUVCCameraView.setAspectRatio(PREVIEW_WIDTH / (float)PREVIEW_HEIGHT);

        try {
            mSocket = IO.socket(CHAT_SERVER_URL);

        } catch (URISyntaxException e) {
            Log.v(TAG, "Socket Failed");
            throw new RuntimeException(e);

        }

		synchronized (mSync) {
	        mUSBMonitor = new USBMonitor(this, mOnDeviceConnectListener);
	        mCameraHandler = UVCCameraHandler.createHandler(this, mUVCCameraView,
	                USE_SURFACE_ENCODER ? 0 : 1, PREVIEW_WIDTH, PREVIEW_HEIGHT, PREVIEW_MODE);
		}
    }

    @Override
    protected void onStart() {
        super.onStart();

        mSocket.connect();
        Log.v(TAG, "onStart:");

		synchronized (mSync) {
        	mUSBMonitor.register();
		}
		if (mUVCCameraView != null) {
  			mUVCCameraView.onResume();
		}
    }

    @Override
    protected void onStop() {
        Log.v(TAG, "onStop:");
        synchronized (mSync) {
    		mCameraHandler.close();	// #close include #stopRecording and #stopPreview
			mUSBMonitor.unregister();
        }
		 if (mUVCCameraView != null)
			mUVCCameraView.onPause();
        super.onStop();
    }

    @Override
    public void onDestroy() {
        Log.v(TAG, "onDestroy:");
        synchronized (mSync) {
            if (mCameraHandler != null) {
                mCameraHandler.setPreviewCallback(null); //zhf
                mCameraHandler.release();
                mCameraHandler = null;
            }
            if (mUSBMonitor != null) {
                mUSBMonitor.destroy();
                mUSBMonitor = null;
            }
        }
        super.onDestroy();
    }

    /**
     * event handler when click camera / capture button
     */
    private final OnClickListener mOnClickListener = new OnClickListener() {
        @Override
        public void onClick(final View view) {
            synchronized (mSync) {
                if ((mCameraHandler != null) && !mCameraHandler.isOpened()) {
                    CameraDialog.showDialog(MainActivity.this);
                } else {
                    mCameraHandler.close();
                }
            }
        }
    };

    private void startPreview() {
		synchronized (mSync) {
			if (mCameraHandler != null) {
                final SurfaceTexture st = mUVCCameraView.getSurfaceTexture();
                /**
                 * 由于surfaceview由另一个线程处理,这里使用消息处理机制
                 * 对Frame进行回调处理
                 */
				mCameraHandler.setPreviewCallback(mIFrameCallback);
                mCameraHandler.startPreview(new Surface(st));
			}
		}
        updateItems();
    }

    private final OnDeviceConnectListener mOnDeviceConnectListener = new OnDeviceConnectListener() {
        @Override
        public void onAttach(final UsbDevice device) {
            Toast.makeText(MainActivity.this, "USB_DEVICE_ATTACHED", Toast.LENGTH_SHORT).show();
        }

        @Override
        public void onConnect(final UsbDevice device, final UsbControlBlock ctrlBlock, final boolean createNew) {
            if (DEBUG) Log.v(TAG, "onConnect:");
            synchronized (mSync) {
                if (mCameraHandler != null) {
	                mCameraHandler.open(ctrlBlock);
	                startPreview();
	                updateItems();
				}
            }
        }

        @Override
        public void onDisconnect(final UsbDevice device, final UsbControlBlock ctrlBlock) {
            if (DEBUG) Log.v(TAG, "onDisconnect:");
            synchronized (mSync) {
                if (mCameraHandler != null) {
                    queueEvent(new Runnable() {
                        @Override
                        public void run() {
                            try{
                                // maybe throw java.lang.IllegalStateException: already released
                                mCameraHandler.setPreviewCallback(null); //zhf
                            }
                            catch(Exception e){
                                e.printStackTrace();
                            }
                            mCameraHandler.close();
                        }
                    }, 0);
				}
            }
        }
        @Override
        public void onDettach(final UsbDevice device) {
            Toast.makeText(MainActivity.this, "USB_DEVICE_DETACHED", Toast.LENGTH_SHORT).show();
        }

        @Override
        public void onCancel(final UsbDevice device) {
        }
    };

    /**
     * to access from CameraDialog
     * @return
     */
    @Override
    public USBMonitor getUSBMonitor() {
		synchronized (mSync) {
			return mUSBMonitor;
		}
	}

    @Override
    public void onDialogResult(boolean canceled) {
        if (DEBUG) Log.v(TAG, "onDialogResult:canceled=" + canceled);
    }

    //================================================================================
    private boolean isActive() {
        return mCameraHandler != null && mCameraHandler.isOpened();
    }

    private boolean checkSupportFlag(final int flag) {
        return mCameraHandler != null && mCameraHandler.checkSupportFlag(flag);
    }

    private int getValue(final int flag) {
        return mCameraHandler != null ? mCameraHandler.getValue(flag) : 0;
    }

    private int setValue(final int flag, final int value) {
        return mCameraHandler != null ? mCameraHandler.setValue(flag, value) : 0;
    }

    private int resetValue(final int flag) {
        return mCameraHandler != null ? mCameraHandler.resetValue(flag) : 0;
    }

    /**
     * 利用Activity.runOnUiThread(Runnable)把更新ui的代码创建在Runnable中,
     * 然后在需要更新ui时,把这个Runnable对象传给Activity.runOnUiThread(Runnable)
     */
    private void updateItems() {
        runOnUiThread(mUpdateItemsOnUITask, 30);
    }

    private final Runnable mUpdateItemsOnUITask = new Runnable() {
        @Override
        public void run() {
            if (isFinishing()) return;
            final int visible_active = isActive() ? View.VISIBLE : View.INVISIBLE;
        }
    };

    // if you need frame data as byte array on Java side, you can use this callback method with UVCCamera#setFrameCallback
    // if you need to create Bitmap in IFrameCallback, please refer following snippet.
    private Bitmap bitmap = null;//Bitmap.createBitmap(640, 480, Bitmap.Config.RGB_565);
    private final Bitmap srcBitmap = Bitmap.createBitmap(PREVIEW_WIDTH, PREVIEW_HEIGHT, Bitmap.Config.RGB_565);
    private String WarnText;

    public static Bitmap getTransformBitmap(Bitmap src) {
        int width = src.getWidth();         // 宽
        int height = src.getHeight();       // 高
        int count = src.getByteCount();     // 获取图片的RGB 565颜色数组总数
        ByteBuffer buffer = ByteBuffer.allocate(count);
        src.copyPixelsToBuffer(buffer);
        byte[] data = buffer.array();       //获取数组
        int sum = width * height;
        int[] pixels = new int[sum];

        for (int i = 0; i < sum; i++) {
            int tmpint = data[i * 2 + 0] + data[i * 2 + 1] * 256;
            int a = 0xff;                           //透明度
            int r = (tmpint & 0xf800) >> 11;        //红
            int g = (tmpint & 0x07e0) >> 5;         //绿
            int b = (tmpint & 0x001f);              //蓝

            r = r << 3;
            g = g << 2;
            b = b << 3;
            pixels[i] = (a << 24) | (r << 16) | (g << 8) | (b);
        }
        Bitmap bmp = Bitmap.createBitmap(width, height,
                Bitmap.Config.ARGB_8888);
        bmp.setPixels(pixels, 0, width, 0, 0, width, height);
        return bmp;    //旋转屏幕
    }

    private void sendBitmap(Bitmap it){

        //Bitmap bmp = getTransformBitmap(it);
        ByteArrayOutputStream buf;
        buf = new ByteArrayOutputStream();
        it.compress(Bitmap.CompressFormat.JPEG, 20, buf);
        //byte[] b = buf.toByteArray();
        mSocket.emit("image", buf.toByteArray());

        Log.v(TAG, "In this module");
    }

    private final IFrameCallback mIFrameCallback = new IFrameCallback() {
        @Override
        public void onFrame(final ByteBuffer frame) {
            frame.clear();
            if(!isActive() || isInCapturing){
                return;
            }
            if(bitmap == null){
                Toast.makeText(MainActivity.this, "错误:Bitmap为空", Toast.LENGTH_SHORT).show();
                return;
            }
            /**
             * 这里进行opencv操作
             * srcBitmap:源
             * bitmap:处理后
             * 转发bitmap数据
             */
            synchronized (bitmap) {
                srcBitmap.copyPixelsFromBuffer(frame);
                WarnText = "";

                if(bitmap.getWidth() != mCaptureWidth || bitmap.getHeight() != mCaptureHeight){
                    bitmap = Bitmap.createBitmap(mCaptureWidth, mCaptureHeight, Bitmap.Config.RGB_565);
                }

                sendBitmap(srcBitmap);
            }
            mImageView.post(mUpdateImageTask);
        }
    };

    private final Runnable mUpdateImageTask = new Runnable() {
        @Override
        public void run() {
            synchronized (bitmap) {
                mImageView.setImageBitmap(bitmap);

            }
        }
    };

}

END

  • 0
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 5
    评论
虚拟摄像头是指利用软件技术模拟出一个虚拟的摄像头设备,在电脑中生成图像或视频流,并可以被其他应用程序所调用和使用。webcam虚拟摄像头源码则是实现这一功能的代码。 首先,webcam虚拟摄像头源码需要通过调用系统的视频捕获接口,获取真实摄像头的图像或视频流数据。可以利用一些开源的视频捕获库,如OpenCV、DirectShow等,来对摄像头进行初始化,并获取实时的视频数据流。 其次,源码需要创建一个虚拟摄像头设备,可以通过虚拟设备驱动程序来实现。这个虚拟设备可以被操作系统认为是一个真实的摄像头设备,其他应用程序可以通过驱动程序读取这个设备的数据。 接着,源码需要将获取到的真实摄像头的图像或视频数据流传输给虚拟设备,以模拟出虚拟摄像头的图像或视频流。可以利用图像处理算法对真实摄像头的数据进行处理,如实时的图像滤波、颜色调整等,然后将处理后的图像数据传输给虚拟设备。 最后,源码需要对外提供接口,以便其他应用程序可以调用并使用虚拟摄像头。可以通过网络协议,如RTSP、HTTP等,将虚拟摄像头的图像或视频流传输给其他应用程序,实现实时的视频传输。 总之,webcam虚拟摄像头源码是通过调用系统的视频捕获接口,创建虚拟摄像头设备,并将真实摄像头的图像或视频流数据传输给虚拟设备,最终以接口方式提供给其他应用程序使用的代码实现。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值