canvas 处理视频 录制-新

参考:https://developer.mozilla.org/en-US/docs/Web/API/Canvas_API/Manipulating_video_using_canvas

   var captureImage = function () {
                var canvas = document.createElement("canvas");
                canvas.width = video.videoWidth * scale;
                canvas.height = video.videoHeight * scale;
                canvas.getContext('2d')
                    .drawImage(video, 0, 0, canvas.width, canvas.height);
                var img = document.createElement("img");
                // img.src = canvas.toDataURL('image/png');  // base64
                canvas.toBlob(function (blob) {//blob将base64编码的src 以二进制的形式存进了 Blob对象
                    img.src = window.URL.createObjectURL(blob)
                    // $('img').attr('src',HTMLMediaElement.srcObject(blob))//这个方法是MDN说会替代上面的方法 但是我在浏览器中使用还不支持(什么鬼???)
                    // 图片ajax上传-》将blob二进制数据上传上去
                    console.log(blob)
                }, 'image/png');
                $output.prepend(img);
            };

index.js

var processor = {};

processor.doLoad = function doLoad() {
  this.video = document.getElementById("video");
  this.c1 = document.getElementById("c1");
  this.ctx1 = this.c1.getContext("2d");
  this.c2 = document.getElementById("c2");
  this.ctx2 = this.c2.getContext("2d");
  let self = this;
  this.video.addEventListener(
    "play",
    function () {
      self.width = self.video.videoWidth / 2;
      self.height = self.video.videoHeight / 2;
      self.timerCallback();
    },
    false
  );
};
processor.timerCallback = function timerCallback() {
  if (this.video.paused || this.video.ended) {
    return;
  }
  this.computeFrame();
  let self = this;
  setTimeout(function () {
    self.timerCallback();
  }, 0);
};

processor.computeFrame = function computeFrame() {
  this.ctx1.drawImage(this.video, 0, 0, this.width, this.height);
  let frame = this.ctx1.getImageData(0, 0, this.width, this.height);
  let l = frame.data.length / 4;

  for (let i = 0; i < l; i++) {
    let r = frame.data[i * 4 + 0];
    let g = frame.data[i * 4 + 1];
    let b = frame.data[i * 4 + 2];
    // rgba   255 255 0 黄色
    if (g > 100 && r > 100 && b < 43) {
      frame.data[i * 4 + 3] = 0;
    }
  }
  this.ctx2.putImageData(frame, 0, 0);
  return;
};

<!DOCTYPE html
    PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">

<head>

    <style>
        body {
            background: black;
            color: #CCCCCC;
        }

        #c2,
        #c1 {
            background-image: url(foo.png);
            background-repeat: no-repeat;
        }

        div {
            float: left;
            border: 1px solid #444444;
            padding: 10px;
            margin: 10px;
            background: #3B3B3B;
        }
    </style>

</head>

<body><script id="__bs_script__">//<![CDATA[
    document.write("<script async src='/browser-sync/browser-sync-client.js?v=2.26.7'><\/script>".replace("HOST", location.hostname));
//]]></script>

    <div>
        <video id="video" src="video.ogv" controls="true" />
    </div>
    <div>
        <canvas id="c1" width="160" height="96"></canvas>
        <canvas id="c2" width="160" height="96"></canvas>
    </div>
    <script src="./index.js"></script>
    <script>
        processor.doLoad()
    </script>
</body>

</html>

视频素材下载 https://www.videvo.net/stock-video-footage/green-screen/
转载:https://blog.csdn.net/luofeng457/article/details/90186162

<!DOCTYPE html>
<html>
  <head>
    <style>
      body {
        background: black;
        color:#CCCCCC; 
      }
      #c2 {
        background-image: url(a.png);
        background-repeat: no-repeat;
      }
      div {
        float: left;
        border :1px solid #444444;
        padding:10px;
        margin: 10px;
        background:#3B3B3B;
      }
    </style>
  </head>

  <body>
    <div>
      <video id="video" src="chicken.mp4" controls="true"/>
    </div>
    <div>
      <canvas id="c1" width="480" height="270"></canvas>
      <canvas id="c2" width="480" height="270"></canvas>
    </div>
  <script type="text/javascript">
let processor = {
    timerCallback: function() {
      if (this.video.paused || this.video.ended) {
        return;
      }
      this.computeFrame();
      let self = this;
      setTimeout(function () {
          self.timerCallback();
        }, 0);
    },
  
    doLoad: function() {
      this.video = document.getElementById("video");
      this.c1 = document.getElementById("c1");
      this.ctx1 = this.c1.getContext("2d");
      this.c2 = document.getElementById("c2");
      this.ctx2 = this.c2.getContext("2d");
      let self = this;
      this.video.addEventListener("play", function() {
          self.width = self.video.videoWidth / 4;
          self.height = self.video.videoHeight / 4;
          self.timerCallback();
        }, false);
    },
  
    computeFrame: function() {
      this.ctx1.drawImage(this.video, 0, 0, this.width, this.height);
      let frame = this.ctx1.getImageData(0, 0, this.width, this.height);
          let l = frame.data.length / 4;
  
      for (let i = 0; i < l; i++) {
        let r = frame.data[i * 4 + 0];
        let g = frame.data[i * 4 + 1];
        let b = frame.data[i * 4 + 2];
        if (g > 100 && r < 50)
          frame.data[i * 4 + 3] = 0;
      }
      this.ctx2.putImageData(frame, 0, 0);
      return;
    }
  };

document.addEventListener("DOMContentLoaded", () => {
  processor.doLoad();
});

  </script>
  </body>
</html>

视频录制处理一


<!DOCTYPE html>
<html lang="en">

<head>
    <meta charset="UTF-8">
    <meta http-equiv="X-UA-Compatible" content="IE=edge">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Document</title>
</head>

<body><script id="__bs_script__">//<![CDATA[
    document.write("<script async src='/browser-sync/browser-sync-client.js?v=2.26.7'><\/script>".replace("HOST", location.hostname));
//]]></script>


    MediaRecorder.mimeType[只读]:返回实例化过程设置的媒体文件类型。笔者设备测试,如果未设置,默认返回:Chrome -> video/webm;codecs=vp8,Firefox -> video/webm;
    MediaRecorder.state[只读]:返回当前MediaRecorder实例的工作状态,可选值有:inactive、recording和paused;
    MediaRecorder.stream[只读]:返回当前媒体流,亦即实例化过程传入的媒体流对象
    MediaRecorder.ignoreMutedMedia:是否静音模式录制;
    MediaRecorder.start(timeslice):开始录制。timeslice参数可选,表示以该持续时间切片媒体数据;
    MediaRecorder.pause():暂停录制;
    MediaRecorder.resume():继续录制;
    MediaRecorder.stop():停止录制。
    <canvas width="600" height="600"></canvas>
    <canvas id="video" width="600" height="600"></canvas>
    <div id="videoContainer" style="display:none">
        <video controls="true" autoplay="true"></video>
    </div>


    <script>
        const canvas = document.querySelector('canvas');
        const ctx = canvas.getContext('2d');
        const { width, height } = canvas;

        ctx.fillStyle = 'red';

        function draw(rotation = 0) {
            ctx.clearRect(0, 0, 1000, 1000);
            ctx.save();
            ctx.translate(width / 2, height / 2);
            ctx.rotate(rotation);
            ctx.translate(-width / 2, -height / 2);
            ctx.beginPath();
            ctx.rect(200, 200, 200, 200);
            ctx.fill();
            ctx.restore();
            let videoCanvas = document.querySelector('#video')
            const content = videoCanvas.getContext('2d');
            content.clearRect(0, 0, videoCanvas.width, videoCanvas.height);

            // content.drawImage(canvas, 0, 0)  //整个画布


            let frame = ctx.getImageData(0, 0, canvas.width, canvas.height);
            const data = frame.data;
            for (var i = 0; i < data.length; i += 4) {
                data[i] = 255 - data[i];     // red
                data[i + 1] = 255 - data[i + 1]; // green
                data[i + 2] = 255 - data[i + 2]; // blue
            }

            content.putImageData(frame, 0, 0);


        }

        function update(t) {
            draw(t / 500);
            requestAnimationFrame(update);
        }

        const stream = canvas.captureStream();
        const recorder = new MediaRecorder(stream, { mimeType: 'video/webm' });

        const data = [];
        recorder.ondataavailable = function (event) {
            if (event.data && event.data.size) {
                data.push(event.data);
            }
        };
        recorder.onstop = () => {
            const url = URL.createObjectURL(new Blob(data, { type: 'video/webm' }));
            document.querySelector("#videoContainer").style.display = "block";
            document.querySelector("video").src = url;
        };

        recorder.start();
        update(0);

        setTimeout(() => {
            recorder.stop();
        }, 6000);


    </script>
</body>

</html>

视频录制处理二


<!DOCTYPE html>
<html lang="en">

<head>
    <meta charset="UTF-8">
    <meta http-equiv="X-UA-Compatible" content="IE=edge">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Document</title>
    <style>
        #c2,
        #c1 {
            background-image: url('./2.png');
            background-repeat: no-repeat;
            background-size: cover;
        }
    </style>
</head>

<body><script id="__bs_script__">//<![CDATA[
    document.write("<script async src='/browser-sync/browser-sync-client.js?v=2.26.7'><\/script>".replace("HOST", location.hostname));
//]]></script>


    MediaRecorder.mimeType[只读]:返回实例化过程设置的媒体文件类型。笔者设备测试,如果未设置,默认返回:Chrome -> video/webm;codecs=vp8,Firefox -> video/webm;
    MediaRecorder.state[只读]:返回当前MediaRecorder实例的工作状态,可选值有:inactive、recording和paused;
    MediaRecorder.stream[只读]:返回当前媒体流,亦即实例化过程传入的媒体流对象
    MediaRecorder.ignoreMutedMedia:是否静音模式录制;
    MediaRecorder.start(timeslice):开始录制。timeslice参数可选,表示以该持续时间切片媒体数据;
    MediaRecorder.pause():暂停录制;
    MediaRecorder.resume():继续录制;
    MediaRecorder.stop():停止录制。
    <canvas width="600" height="600"></canvas>
    <canvas id="" width="600" height="600"></canvas>

    <div id="videoContainer" style="display:none">
        <video id="video" controls="true" autoplay="true"></video>
    </div>
    <canvas id="c1" width="160" height="96"></canvas>
    <canvas id="c2" width="160" height="96"></canvas>
    <div style="height:100px "></div>
    <script>
        var processor = {};

        processor.doLoad = function doLoad() {
            this.video = document.getElementById("video");
            this.c1 = document.getElementById("c1");
            this.ctx1 = this.c1.getContext("2d");
            this.c2 = document.getElementById("c2");
            this.ctx2 = this.c2.getContext("2d");
            let self = this;
            this.video.addEventListener(
                "play",
                function () {
                    self.width = self.video.videoWidth / 4;
                    self.height = self.video.videoHeight / 4;
                    self.timerCallback();
                },
                false
            );
        };
        processor.timerCallback = function timerCallback() {
            if (this.video.paused || this.video.ended) {
                return;
            }
            this.computeFrame();
            let self = this;
            setTimeout(function () {
                self.timerCallback();
            }, 0);
        };

        processor.computeFrame = function computeFrame() {
            this.ctx1.drawImage(this.video, 0, 0, this.width, this.height);
            let frame = this.ctx1.getImageData(0, 0, this.width, this.height);
            // let l = frame.data.length / 4;
            // for (let i = 0; i < l; i++) {
            //     let r = frame.data[i * 4 + 0];
            //     let g = frame.data[i * 4 + 1];
            //     let b = frame.data[i * 4 + 2];
            //     // rgba   255 255 0 黄色
            //     if (g > 100 && r > 100 && b < 43) {
            //         frame.data[i * 4 + 3] = 0;
            //     }
            // }

            const data = frame.data;
            for (var i = 0; i < data.length; i += 4) {
                data[i] = 255 - data[i];     // red
                data[i + 1] = 255 - data[i + 1]; // green
                data[i + 2] = 255 - data[i + 2]; // blue
            }



            this.ctx2.putImageData(frame, 0, 0);
            return;
        };
    </script>

    <script>
        const canvas = document.querySelector('canvas');
        const ctx = canvas.getContext('2d');
        const { width, height } = canvas;

        ctx.fillStyle = 'red';

        function draw(rotation = 0) {
            ctx.clearRect(0, 0, 1000, 1000);
            ctx.save();
            ctx.translate(width / 2, height / 2);
            ctx.rotate(rotation);
            ctx.translate(-width / 2, -height / 2);
            ctx.beginPath();
            ctx.rect(200, 200, 200, 200);
            ctx.fill();
            ctx.restore();
        }

        function update(t) {
            draw(t / 500);
            requestAnimationFrame(update);
        }

        const stream = canvas.captureStream();
        const recorder = new MediaRecorder(stream, { mimeType: 'video/webm' });

        const data = [];
        recorder.ondataavailable = function (event) {
            if (event.data && event.data.size) {
                data.push(event.data);
            }
        };
        recorder.onstop = () => {
            const url = URL.createObjectURL(new Blob(data, { type: 'video/webm' }));
            document.querySelector("#videoContainer").style.display = "block";
            document.querySelector("video").src = url;

            processor.doLoad()



        };

        recorder.start();
        update(0);

        setTimeout(() => {
            recorder.stop();
        }, 6000);


    </script>
</body>

</html>
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
如果要在 Android 视频录制过程中动态添加时间水印,可以使用 Camera2 API 实现。Camera2 API 可以获取到每一帧视频的数据,因此可以在每一帧数据上添加时间水印,并将处理后的数据传递给 MediaRecorder 进行录制。 以下是添加动态时间水印的示例代码: ``` private CameraDevice mCameraDevice; private CaptureRequest.Builder mPreviewBuilder; private ImageReader mImageReader; private Surface mRecorderSurface; private void initCamera() { CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); String cameraId = CameraCharacteristics.LENS_FACING_BACK + ""; try { manager.openCamera(cameraId, new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice cameraDevice) { mCameraDevice = cameraDevice; startPreview(); } @Override public void onDisconnected(@NonNull CameraDevice cameraDevice) { mCameraDevice.close(); mCameraDevice = null; } @Override public void onError(@NonNull CameraDevice cameraDevice, int i) { mCameraDevice.close(); mCameraDevice = null; } }, null); } catch (Exception e) { e.printStackTrace(); } } private void startPreview() { try { SurfaceTexture surfaceTexture = new SurfaceTexture(0); surfaceTexture.setDefaultBufferSize(640, 480); Surface previewSurface = new Surface(surfaceTexture); mImageReader = ImageReader.newInstance(640, 480, ImageFormat.YUV_420_888, 2); mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { Image image = reader.acquireLatestImage(); ByteBuffer buffer = image.getPlanes()[0].getBuffer(); byte[] data = new byte[buffer.remaining()]; buffer.get(data); addTimeWatermark(data, image.getWidth(), image.getHeight()); image.close(); } }, null); mRecorderSurface = mMediaRecorder.getSurface(); mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); mPreviewBuilder.addTarget(previewSurface); mPreviewBuilder.addTarget(mImageReader.getSurface()); mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, mImageReader.getSurface(), mRecorderSurface), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { try { cameraCaptureSession.setRepeatingRequest(mPreviewBuilder.build(), null, null); } catch (Exception e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { } }, null); } catch (Exception e) { e.printStackTrace(); } } private void addTimeWatermark(byte[] data, int width, int height) { YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, width, height, null); ByteArrayOutputStream out = new ByteArrayOutputStream(); yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, out); byte[] jpegData = out.toByteArray(); Bitmap bitmap = BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length); Canvas canvas = new Canvas(bitmap); canvas.drawText(getCurrentTime(), 10, 10, new Paint()); canvas.save(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream); byte[] outputData = outputStream.toByteArray(); try { mRecorderSurface.lockCanvas(null).drawBitmap(bitmap, 0, 0, null); mMediaRecorder.writeSampleData(mRecorderSurface, ByteBuffer.wrap(outputData), new MediaCodec.BufferInfo()); } catch (Exception e) { e.printStackTrace(); } finally { mRecorderSurface.unlockCanvasAndPost(canvas); } } ``` 需要注意的是,在添加时间水印时,要将视频帧转换为 Bitmap 对象,并在 Bitmap 上绘制时间水印。然后,将处理后的数据传递给 MediaRecorder 进行录制。同时,要在 CameraDevice.close() 方法被调用时,释放资源。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值