Unity对接科大讯飞实时语音转写WebAPI(Windows平台)(二)

上一篇中,用到的是MicPhone类,该类对中文支持不友好,存在很多玄学问题(找不到麦克风)。

上一篇链接:Unity对接科大讯飞实时语音转写WebAPI(Windows平台)_unity webgl对接讯飞实时语音听写-CSDN博客

本篇将使用NAudio.Wave.WaveIn类完成麦克风语音数据的获取,属性BufferMilliseconds可以控制数据时间间隔;发送依然使用WebSocketSharp.WebSocket。直接贴代码

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System;
using WebSocketSharp;
using System.Text;
using System.Security.Cryptography;
using LitJson;
using Newtonsoft.Json;

using NAudio.Wave;

public class SpeechHelper2 : MonoBehaviour
{
    private WaveIn audioClip;
    WebSocket speechWebSocket;
    private System.Action<string> resultCallback;
    private System.Action errorCallback;
    private static Queue<string> messageQueue = new Queue<string>();
    private static Queue<byte[]> requestQueue = new Queue<byte[]>();
    public void InitSpeechHelper(System.Action<string> textCallback, Action _errorCallback)
    {
        resultCallback = textCallback;
        errorCallback = _errorCallback;
    }
    private void AudioClip_DataAvailable(object sender, WaveInEventArgs e)
    {
        //UnityEngine.Debug.Log("AudioClip_DataAvailable");
        requestQueue.Enqueue(e.Buffer);
    }
    public bool StartSpeech()
    {
        if (speechWebSocket != null && speechWebSocket.ReadyState == WebSocketState.Open)
        {
            //Debug.LogWarning("开始语音识别失败!,等待上次识别连接结束");
            MessageMng.Ins.ShowFloatTip("请等待上次识别结束");
            return false;
        }
        if (WaveIn.DeviceCount <= 0) 
        {
            //Debug.LogWarning("找不到麦克风");
            MessageMng.Ins.ShowTipMsg("找不到麦克风!");
            return false;
        }
        messageQueue.Clear();
        audioClip = new WaveIn();
        WaveFormat waveFormat = new WaveFormat(16000, 16, 1);
        audioClip.BufferMilliseconds = 80;
        audioClip.WaveFormat = waveFormat;
        audioClip.DataAvailable += AudioClip_DataAvailable;
        audioClip.StartRecording();
        ConnectSpeechWebSocket();
        return true;
    }

    public void StopSpeech()
    {
        try
        {
            Debug.Log("识别结束,停止录音");
            audioClip.StopRecording();
            audioClip.Dispose();
            SendEndMsg(null);
        }
        catch(Exception ex)
        {
            Debug.LogError(ex.Message);
        }
        
    }

    void ConnectSpeechWebSocket()
    {
        try
        {
            speechWebSocket = new WebSocket(GetWebSocketUrl());
        }
        catch (Exception ex)
        {
            UnityEngine.Debug.LogError(ex.Message);
            return;
        }
        speechWebSocket.OnOpen += (sender, e) =>
        {
            Debug.Log("OnOpen");
            speechWebSocket.OnClose += OnWebSocketClose;
        };
        speechWebSocket.OnMessage += OnInitMessage;
        speechWebSocket.OnError += OnError;
        speechWebSocket.ConnectAsync();
    }
    void OnWebSocketClose(object sender, CloseEventArgs e)
    {
        Debug.Log("OnWebSocketClose");
    }
    
    void OnInitMessage(object sender, MessageEventArgs e)
    {
        UnityEngine.Debug.Log("qqqqqqqqqqqqqWebSocket数据返回:" + e.Data);
        messageQueue.Enqueue(e.Data);
    }
    private void MainThreadOnMessage(string message)
    {
        try
        {
            XFResponse response = JsonConvert.DeserializeObject<XFResponse>(message);
            if (0 != response.code)
            {
                errorCallback?.Invoke();
                MessageMng.Ins.ShowFloatTip("连接出错,请重试!");
                return;
            }
            if (response.action.Equals("result"))
            {
                var result = ParseXunfeiRecognitionResult(response.data);
                if (result.IsFinal)
                {
                    //Debug.Log("Text最终:" + result.Text);
                    resultCallback?.Invoke(result.Text);
                }
                else
                {
                    //Debug.Log("Text中间:" + result.Text);
                }
            }
        }
        catch (Exception ex)
        {
            Debug.LogError(ex.Message);
        }
    }
    void OnError(object sender, WebSocketSharp.ErrorEventArgs e)
    {
        UnityEngine.Debug.Log("WebSoclet:发生错误:" + e.Message);

    }

    public SpeechRecognitionResult ParseXunfeiRecognitionResult(string dataJson)
    {
        StringBuilder builder = new StringBuilder();
        SpeechRecognitionResult res = new SpeechRecognitionResult();
        try
        {
            JsonData data = JsonMapper.ToObject(dataJson);
            JsonData cn = data["cn"];
            JsonData st = cn["st"];
            if (st["ed"].ToString().Equals("0"))
            {
                res.IsFinal = false;
            }
            else
            {
                res.IsFinal = true;
            }
            JsonData rtArry = st["rt"];
            foreach (JsonData rtObject in rtArry)
            {
                JsonData wsArr = rtObject["ws"];
                foreach (JsonData wsObject in wsArr)
                {
                    JsonData cwArr = wsObject["cw"];
                    foreach (JsonData cwObject in cwArr)
                    {
                        builder.Append(cwObject["w"].ToString());
                    }
                }
            }
        }
        catch (Exception ex)
        {
            Debug.LogError(ex.Message);
        }
        res.Text = builder.ToString();
        res.type = ASRResultType.TEXT;
        return res;
    }

    void SendData(byte[] voiceData)
    {
        //Debug.Log("SendData:" + voiceData.Length + ",time:" + Time.realtimeSinceStartup);
        if (speechWebSocket.ReadyState != WebSocketState.Open)
        {
            return;
        }
        try
        {
            if (speechWebSocket != null && speechWebSocket.IsAlive)
            {
                speechWebSocket.SendAsync(voiceData, success =>
                {
                    if (success)
                    {
                        //UnityEngine.Debug.Log("WebSoclet:发送成功:" + voiceData.Length);
                    }
                    else
                    {
                        //UnityEngine.Debug.Log("WebSoclet:发送失败:");
                    }
                });
            }
        }
        catch
        {

        }
    }
    void SendEndMsg(System.Action callback)
    {
        string endMsg = "{\"end\": true}";
        byte[] data = Encoding.UTF8.GetBytes(endMsg);
        try
        {
            if (speechWebSocket != null && speechWebSocket.IsAlive)
            {
                speechWebSocket.SendAsync(data, success =>
                {
                    if (success)
                    {
                        UnityEngine.Debug.Log("WebSoclet:发送END成功:" + data.Length);
                    }
                    else
                    {
                        UnityEngine.Debug.Log("WebSoclet:发送END失败:");
                    }
                    callback?.Invoke();
                });
            }
        }
        catch
        {

        }
    }

    private string GetWebSocketUrl()
    {
        string appid = "appid";
        string ts = GetCurrentUnixTimestampMillis().ToString();
        string baseString = appid + ts;
        string md5 = GetMD5Hash(baseString);
        UnityEngine.Debug.Log("baseString:" + baseString + ",md5:" + md5);
        string sha1 = CalculateHmacSha1(md5, "appsecret");
        string signa = sha1;
        string url = string.Format("ws://rtasr.xfyun.cn/v1/ws?appid={0}&ts={1}&signa={2}", appid, ts, signa);
        UnityEngine.Debug.Log(url);
        return url;
    }
    private long GetCurrentUnixTimestampMillis()
    {
        DateTime unixStartTime = new DateTime(1970, 1, 1).ToLocalTime();
        DateTime now = DateTime.Now;// DateTime.UtcNow;
        TimeSpan timeSpan = now - unixStartTime;
        long timestamp = (long)timeSpan.TotalSeconds;
        return timestamp;
    }
    public string GetMD5Hash(string input)
    {
        MD5 md5Hasher = MD5.Create();
        byte[] data = md5Hasher.ComputeHash(Encoding.Default.GetBytes(input));
        StringBuilder sBuilder = new StringBuilder();
        for (int i = 0; i < data.Length; i++)
        {
            sBuilder.Append(data[i].ToString("x2"));
        }
        return sBuilder.ToString();
    }
    public string CalculateHmacSha1(string data, string key)
    {
        HMACSHA1 hmac = new HMACSHA1(Encoding.UTF8.GetBytes(key));
        byte[] hashBytes = hmac.ComputeHash(Encoding.UTF8.GetBytes(data));
        return Convert.ToBase64String(hashBytes);
    }

    private void Update()
    {
        if (messageQueue.Count > 0)
        {
            MainThreadOnMessage(messageQueue.Dequeue());
        }
        if(requestQueue.Count > 0)
        {
            SendData(requestQueue.Dequeue());
        }
    }

    private void OnApplicationQuit()
    {
        audioClip.StopRecording();
        audioClip.Dispose();
    }
}

BufferMilliseconds我这里设置的80,数据长度会大于1280,讯飞也是可以完成识别的。

2024.08 更新以下问题:

以上方法使用过程中,出现开始和结束的时候很卡,甚至进程直接无响应,网上搜到的原因大致意思是注销委托和停止录制同时进行会出现冲突。

优化StartSpeech和StopSpeech如下:

    IEnumerator AsyncStartSpeech()
    {
        audioClip = new WaveIn();
        WaveFormat waveFormat = new WaveFormat(16000, 16, 1);
        audioClip.BufferMilliseconds = 80;
        audioClip.WaveFormat = waveFormat;
        audioClip.DataAvailable += AudioClip_DataAvailable;
        yield return null;
        audioClip.StartRecording();
        yield return null;
        ConnectSpeechWebSocket();
    }
    IEnumerator AsyncStopSpeech()
    {
        if (audioClip == null)
            yield break;
        audioClip.DataAvailable -= AudioClip_DataAvailable;
        yield return null;
        audioClip.StopRecording();
        yield return null;
        audioClip.Dispose();
        Debug.Log("AsyncStopSpeech");
        SendEndMsg(null);
    }

  • 5
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
回答: 在Unity中使用科大讯飞语音的过程中,有一些常见问题需要注意。首先,关于路径的问题,需要确保ivw_res_path路径中不包含中文字符,否则会报错10102。此外,路径中的斜杠需要替换为反斜杠,否则也会报错10102。\[1\] 其次,关于QIVWAudioWrite接口需要调用两次的问题,这是因为在写入音频数据时,需要先调用一次MSP_AUDIO_SAMPLE_CONTINUE表示音频数据的中间部分,然后再调用一次MSP_AUDIO_SAMPLE_LAST表示音频数据的最后一部分。这样做是为了确保音频数据的完整性。\[2\] 最后,关于QIVWRegisterNotify接口后面需要使用Sleep的问题,这是因为在注册通知后,需要给科大讯飞语音一些时间来处理注册的操作。使用Sleep函数可以暂停程序的执行,让科大讯飞语音有足够的时间来完成注册。具体的等待时间可以根据实际情况进行调整。\[2\] 总结起来,在Unity中使用科大讯飞语音时,需要注意路径中不包含中文字符,斜杠需要替换为反斜杠;在写入音频数据时,需要调用两次接口来表示音频数据的中间部分和最后一部分;在注册通知后,需要使用Sleep函数给科大讯飞语音足够的时间来完成注册操作。 #### 引用[.reference_title] - *1* *2* [Unity 科大讯飞语音唤醒](https://blog.csdn.net/weixin_42208093/article/details/107364588)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v91^insertT0,239^v3^insert_chatgpt"}} ] [.reference_item] [ .reference_list ]
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值