直接将.h .m 文件拷贝导Unity 》 Plugs>IOS 下,不要存在子文件夹;录音,百度SDK需要的库文件,在Unity导出IOS工程后需要再添加
#import <Foundation/Foundation.h>
#import "XHVoiceRecordHelper.h"
#import "BDVRFileRecognizer.h"
// Max record Time
#define kVoiceRecorderTotalTime 60.0
#define API_KEY @"1pMskN3kIS6G8ccU4TKXi30Z"
#define SECRET_KEY @"fglBl62npKo6Q6VDX55BD4efZGwZkW48"
@interface VoiceRecordPlug : NSObject<MVoiceRecognitionClientDelegate>
//@property (nonatomic, strong) XHVoiceRecordHelper *voiceRecordHelper;
void startVoiceRecord();
void stopVoicRecord();
-(void)start;
+(void)autoInit;
@end
//
// VoiceRecordPlug.m
// VoiceRecord
//
// Created by bin.li on 15-3-3.
// Copyright (c) 2015年 bin.li. All rights reserved.
//
#import "VoiceRecordPlug.h"
static VoiceRecordPlug *plug = nil;
static XHVoiceRecordHelper *voiceRecordHelper;
@implementation VoiceRecordPlug
//- (id)init {
// NSLog(@"cute init..");
// self = [super init];
// plug = self;
// return self;
//}
+(void)autoInit
{
plug = [[VoiceRecordPlug alloc] init];
}
-(void)start
{
startVoiceRecord();
}
void startVoiceRecord()
{
if (!plug) {
[VoiceRecordPlug autoInit];
}
NSLog(@"start record");
NSString *recorderPath = nil;
recorderPath = [[NSString alloc] initWithFormat:@"%@/Documents/", NSHomeDirectory()];
recorderPath = [recorderPath stringByAppendingFormat:@"MySound.wav"];
if (!voiceRecordHelper)
{
NSLog(@"voiceRecordHelper init..");
voiceRecordHelper = [[XHVoiceRecordHelper alloc] init];
voiceRecordHelper.maxRecordTime = 60;
voiceRecordHelper.maxTimeStopRecorderCompletion = ^{
[voiceRecordHelper stopRecordingWithStopRecorderCompletion:^{
NSLog(@"111111");
finishRecord();
}];
};
voiceRecordHelper.peakPowerForChannel = ^(float peakPowerForChannel) {
// weakSelf.voiceRecordHUD.peakPower = peakPowerForChannel;
};
}
[voiceRecordHelper startRecordingWithPath:recorderPath StartRecorderCompletion:^{///录音存储
NSLog(@"22222");
// finishRecord();
}];
}
void stopVoicRecord()
{
NSLog(@"stop record");
finishRecord();
}
void finishRecord()
{
NSLog(@"cute finishRecord()..");
NSString *recorderPath = nil;
recorderPath = [[NSString alloc] initWithFormat:@"%@/Documents/", NSHomeDirectory()];
recorderPath = [recorderPath stringByAppendingFormat:@"MySound.wav"];
[voiceRecordHelper stopRecordingWithStopRecorderCompletion:^{
NSLog(@"begin connect baidu.");
[[BDVoiceRecognitionClient sharedInstance] setApiKey:API_KEY withSecretKey:SECRET_KEY];
BDVRFileRecognizer *fileRecognizer = [[BDVRFileRecognizer alloc] initFileRecognizerWithFilePath:recorderPath sampleRate:16000 property:EVoiceRecognitionPropertyInput delegate:plug];//将语言文件上传百度识别
int status = [fileRecognizer startFileRecognition];
NSLog(@"begin connect baidu. status = %d",status);
// if (status != EVoiceRecognitionStartWorking) {
// return;
// }
}];
}
void playRecord()
{
// voiceRecordHelper
}
#pragma mark - MVoiceRecognitionClientDelegate 语音识别工作状态通知
- (void)VoiceRecognitionClientWorkStatus:(int) aStatus obj:(id)aObj
{
NSLog(@"enter.. VoiceRecognitionClientWorkStatus");
switch (aStatus) {
case EVoiceRecognitionClientWorkStatusFinish:
{
// 该状态值表示语音识别服务器返回了最终结果,结果以数组的形式保存在 aObj 对象中
// 接受到该消息时应当清空显示区域的文字以免重复
NSLog(@"EVoiceRecognitionClientWorkStatusFinish");
if ([[BDVoiceRecognitionClient sharedInstance] getRecognitionProperty] != EVoiceRecognitionPropertyInput)
{
NSMutableArray *resultData = (NSMutableArray *)aObj;
NSMutableString *tmpString = [[NSMutableString alloc] initWithString:@""];
// 获取识别候选词列表
for (int i=0; i<[resultData count]; i++) {
[tmpString appendFormat:@"%@\r\n",[resultData objectAtIndex:i]]; }
NSLog(@"result: %@", tmpString);
} else {
NSMutableString *sentenceString = [[NSMutableString alloc] initWithString:@""];
for (NSArray *result in aObj)// 此时 aObj 是 array,result 也是 array
{
// 取每条候选结果的第一条,进行组合
// result 的元素是 dictionary,对应一个候选词和对应的可信度
NSDictionary *dic = [result objectAtIndex:0];
NSString *candidateWord = [[dic allKeys] objectAtIndex:0];
[sentenceString appendString:candidateWord];
}
NSLog(@"result: %@", sentenceString);
// UnitySendMessage("MainManager",uFun,[sentenceString UTF8String]);//与Unity通信 ,调用Unity 中的方法 uFun ,传参数 sentenceString
}
break;
}
case EVoiceRecognitionClientWorkStatusFlushData:
{
// 该状态值表示服务器返回了中间结果,如果想要将中间结果展示给用户(形成连续上屏的效果),
// 可以利用与该状态同时返回的数据,每当接到新的该类消息应当清空显示区域的文字以免重复
NSMutableString *tmpString = [[NSMutableString alloc] initWithString:@""];
[tmpString appendFormat:@"%@",[aObj objectAtIndex:0]];
NSLog(@"%@",tmpString);
break;
}
case EVoiceRecognitionClientWorkStatusError:
{
NSLog(@"EVoiceRecognitionClientWorkStatusError");
}
default:
break;
}
}
- (void)VoiceRecognitionClientErrorStatus:(int) aStatus subStatus:(int)aSubStatus
{
NSLog(@"VoiceRecognitionClientErrorStatus");
NSString *str = @"";
switch (aStatus) {
case EVoiceRecognitionClientErrorStatusNoSpeech:
str = @"你怎么不说话呀!";
break;
case EVoiceRecognitionClientErrorStatusShort:
str = @"你说话声音太短啦!";
break;
case EVoiceRecognitionClientErrorStatusChangeNotAvailable:
str = @"录音设备不可用啊!";
break;
case EVoiceRecognitionClientErrorStatusIntrerruption:
str = @"录音中断咯!";
break;
case EVoiceRecognitionClientErrorNetWorkStatusUnusable:
str = @"网络不可用啦!";
break;
case EVoiceRecognitionClientErrorNetWorkStatusError:
str = @"网络发生错误啦!";
break;
case EVoiceRecognitionClientErrorNetWorkStatusTimeOut:
str = @"请求超时咯!";
break;
default:
str = @" 请认真说话呀! ";
break;
}
NSLog(@"error = %@",str);
// UnitySendMessage("MainManager",ufnc,[str UTF8String]);
}
@end