背景
C++的同事先写好关于开始录制视频的native接口——startJniRecord
Java思路
1、随便写个测试接口,将数据经过一定的清洗,通过webSocket发送给前端
@Operation(summary = "测试")
@GetMapping("/startFaceRecordingTest")
public void startFaceRecordingTest() {
videoCapture.stopJniRecord();
videoCapture.startJniRecord((byte[] frame, int width, int height) -> {
//将图像通过webSocket发送给前端
if (WebSocketService.session != null) {
byte[] jpegFrame = VideoCapture.convertToJpeg(frame, width, height);
WebSocketMessageUtil.sendMessage(WebSocketService.session, Base64Utils.encode(jpegFrame));
}
});
}
public static void sendMessage(Session session, String message) {
if (ObjectUtil.isNotNull(session)) {
try {
synchronized (session){
session.getBasicRemote().sendText(message);
}
} catch (IOException e) {
log.error(e.getMessage());
}
}
}
2、告知前端同事你的webSocket地址
ws://127.0.0.1:8085/api/monitor/websocket
前端思路
使用webSocket接收后端传过来的base64数据
预先准备:
1、建立webSocket连接,并接收消息
websocket = new WebSocket("ws://127.0.0.1:8085/api/monitor/websocket");
websocket.onmessage = function (event) {
const player = document.getElementById('player');
player.src='data:image/jpeg;base64,'+event.data;
}
2、写一个方法每次点击开启视频命令按钮时,触发事件
function sendCmdStartFaceRecording() {
let xmlHttp = new XMLHttpRequest;
xmlHttp.open('GET', 'http://127.0.0.1:8085/api/monitor/command/startFaceRecordingTest');
xmlHttp.setRequestHeader('content-type', 'application/json');
xmlHttp.send();
}
3、写前端展示信息
<div>
<button onclick="sendCmdStartFaceRecording()"> 开启视频命令</button>
<img id="player" style="width:704px;height:576px"/>
</div>
处理base64图片连续播放形成视频步骤:
步骤1 先写个防止图片位置的
<img id="player" style="width:704px;height:576px"/>
步骤2 获取dom元素
const player=document.getElementById('player')
步骤3 赋值资源使其播放
player.src='data:image/jpeg;base64,'+后端传过来的base64数据;
完整代码
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>test</title>
</head>
<body>
<div>
等待收到消息
</div>
<div>
<div id="message"></div>
<button onclick="sendCmdStartFaceRecording()"> 开启视频命令</button>
<img id="player" style="width:704px;height:576px"/>
</div>
<script type="text/javascript">
var websocket = null;
var init = function () {
connect();
}();
function connect() {
if ("WebSocket" in window) {
websocket = new WebSocket("ws://127.0.0.1:8085/api/monitor/websocket");
websocket.onopen = function (event) {
setMessageHtml("onopen");
}
websocket.onclose = function (event) {
setMessageHtml("onclose");
}
websocket.onmessage = function (event) {
// setMessageHtml(event.data);
// const url = window.URL.createObjectURL(new Blob([event.data]))
const player = document.getElementById('player');
player.src='data:image/jpeg;base64,'+event.data;
// window.URL.revokeObjectURL(url)
}
} else {
alert("not find socket")
}
}
function sendCmdStartFaceRecording() {
let xmlHttp = new XMLHttpRequest;
xmlHttp.open('GET', 'http://127.0.0.1:8085/api/monitor/command/startFaceRecordingTest');
xmlHttp.setRequestHeader('content-type', 'application/json');
xmlHttp.send();
}
function setMessageHtml(innerHTML) {
document.getElementById("message").innerHTML += innerHTML + "<br/>";
}
</script>
</body>
</html>