一、核心技术点:
1、通过js调用浏览器的麦克风来实时录音
2、实时录音的同时,每一帧实时监控改变成16K 16BIT 单声道PCM音频
3、ws的鉴权和连接
4、听写返回结果的动态展示与替换
<template>
<div class="Smart-container">
<el-button type="primary" style="margin-top: 1%;width: 120px;margin-left: 30%" @click="wsInit()">开始报警
</el-button>
<el-button type="danger" style="margin-top: 1%;width: 120px;" @click="stopRecorder()">结束报警</el-button>
<span style="margin-left: 10px">自动获取定位为:经度:{{ ing }}纬度:{{ iat }}</span>
<el-input type="textarea" v-model="iatRes" autocomplete="off"
style="margin-top: 2%;margin-left: 10%;width: 80%;flex-wrap:wrap;font-size: 16px;font-weight: bold;"
:rows="10">
</el-input>
<el-input type="textarea" v-model="modelRes" autocomplete="off"
style="margin-top: 2%;margin-left: 10%;width: 80%;flex-wrap:wrap;font-size: 16px;font-weight: bold;"
:rows="10">
</el-input>
</div>
</template>
<script>
import * as base64 from 'js-base64'
import CryptoJS from '../js_util/crypto-js/crypto-js.js'
import parser from '../js_util/fast-xml-parser/src/parser'
import Recorder from '../../public/recorder/index.umd.js'
function toBase64(buffer) {
var binary = "";
var bytes = new Uint8Array(buffer);
var len = bytes.byteLength;
for (var i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i]);
}
return window.btoa(binary);
};
// 初始化录音工具,注意目录
let recorder = new Recorder("recorder")
recorder.onStart = () => {
console.log("开始录音了")
}
recorder.onStop = () => {
console.log("结束录音了")
}
let wsFlag = false;
let wsTask = {};
recorder.onFrameRecorded = ({isLastFrame, frameBuffer}) => {
// alert("执行了...")
// console.log(isLastFrame, frameBuffer)
if (!isLastFrame && wsFlag) { // 发送中间帧
const params = {
data: {
status: 1,
format: "audio/L16;rate=16000",
encoding: "raw",
audio: toBase64(frameBuffer),
},
};
// console.log("发送中间帧", params, wsFlag)
wsTask.send(JSON.stringify(params)) // 执行发送
} else {
if (wsFlag) {
const params = {
data: {
status: 2,
format: "audio/L16;rate=16000",
encoding: "raw",
audio: "",
},
};
console.log("发送最后一帧", params, wsFlag)
wsTask.send(JSON.stringify(params)) // 执行发送
}
}
}
export default {
name: "Smart",
data() {
return {
// 控制台获取
URL: 'wss://iat-api.xfyun.cn/v2/iat',
APPID: '',
APISecret: '',
APIKey: '',
// 听写结果
iatRes: '',
resultTextTemp: "",
resultText: "",
ing: "",
iat: "",
modelRes: ""
}
},
mounted() {
this.getCurrentLocation();
},
methods: {
getCurrentLocation() {
let _this = this
_this.$message.success("正在自动获取位置")
var geolocation = new BMap.Geolocation();
geolocation.getCurrentPosition(function (r) {
if (this.getStatus() == BMAP_STATUS_SUCCESS) {
// alert('您的位置:' + r.point.lng + ',' + r.point.lat);
_this.$message.success('您的位置:' + r.point.lng + ',' + r.point.lat)
_this.ing = r.point.lng;
_this.iat = r.point.lat;
} else {
// alert('failed' + this.getStatus());
_this.ing = this.getStatus();
}
});
},
// 结束录音
stopRecorder() {
recorder.stop();
this.$message.success("实时录音停止,报警内容将通过大模型自动识别上报!")
},
// 建立ws连接
async wsInit() {
this.$message.success("请您说出报警内容~")
let _this = this;
if (typeof (WebSocket) == 'undefined') {
console.log('您的浏览器不支持ws...')
} else {
console.log('您的浏览器支持ws!!!')
let reqeustUrl = await _this.getWebSocketUrl()
wsTask = new WebSocket(reqeustUrl);
// ws的几个事件,在vue中定义
wsTask.onopen = function () {
console.log('ws已经打开...')
wsFlag = true
let params = { // 第一帧数据
common: {
app_id: _this.APPID
},
business: {
language: "zh_cn",
domain: "iat",
accent: "mandarin",
vad_eos: 6000,
dwa: "wpgs",
},
data: {
status: 0,
format: "audio/L16;rate=16000",
encoding: "raw",
},
};
console.log("发送第一帧数据...")
wsTask.send(JSON.stringify(params)) // 执行发送
// 下面就可以循环发送中间帧了
// 开始录音
console.log("开始录音")
recorder.start({
sampleRate: 16000,
frameSize: 1280,
});
}
wsTask.onmessage = function (message) {
// console.log('收到数据===' + message.data)
let jsonData = JSON.parse(message.data);
if (jsonData.data && jsonData.data.result) {
let data = jsonData.data.result;
let str = "";
let ws = data.ws;
for (let i = 0; i < ws.length; i++) {
str = str + ws[i].cw[0].w;
}
// 开启wpgs会有此字段(前提:在控制台开通动态修正功能)
// 取值为 "apd"时表示该片结果是追加到前面的最终结果;取值为"rpl" 时表示替换前面的部分结果,替换范围为rg字段
if (data.pgs) {
if (data.pgs === "apd") {
// 将resultTextTemp同步给resultText
_this.resultText = _this.resultTextTemp;
}
// 将结果存储在resultTextTemp中
_this.resultTextTemp = _this.resultText + str;
} else {
_this.resultText = _this.resultText + str;
}
_this.iatRes = _this.resultTextTemp || _this.resultText || "";
}
// 检测到结束或异常关闭
if (jsonData.code === 0 && jsonData.data.status === 2) {
// alert("执行了")
recorder.stop();
_this.$message.success("检测到您6秒没说话,报警自动结束上报!")
wsTask.close();
wsFlag = false
}
if (jsonData.code !== 0) {
wsTask.close();
wsFlag = false
console.error(jsonData);
}
}
// 关闭事件
wsTask.onclose = function () {
console.log('ws已关闭...')
}
wsTask.onerror = function () {
console.log('发生错误...')
}
}
},
// 获取鉴权地址与参数
getWebSocketUrl() {
return new Promise((resolve, reject) => {
// 请求地址根据语种不同变化
var url = this.URL;
var host = "iat-api.xfyun.cn";
var apiKeyName = "api_key";
var date = new Date().toGMTString();
var algorithm = "hmac-sha256";
var headers = "host date request-line";
var signatureOrigin = `host: ${host}\ndate: ${date}\nGET /v2/iat HTTP/1.1`;
var signatureSha = CryptoJS.HmacSHA256(signatureOrigin, this.APISecret);
var signature = CryptoJS.enc.Base64.stringify(signatureSha);
var authorizationOrigin =
`${apiKeyName}="${this.APIKey}", algorithm="${algorithm}", headers="${headers}", signature="${signature}"`;
var authorization = base64.encode(authorizationOrigin);
url = `${url}?authorization=${authorization}&date=${encodeURI(date)}&host=${host}`;
// console.log(url)
resolve(url); // 主要是返回地址
});
},
}
};
</script>
<style scoped>
.el-input.is-disabled .el-input__inner {
color: #333; /* 修改为你想要的颜色 */
}
</style>