科大讯飞是国内做的比较好的语音开发sdk,首先在网站上注册账号等一系列流程下来之后,获得id的值
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptionsdang
{
//xxxxxxx是自己的id的值,timeout是超时时间
NSString *initString = [[NSString alloc] initWithFormat:@"appid=%@,timeout=%@",@"xxxxxxx",@"20000"];
//所有服务启动前,需要确保执行createUtility
[IFlySpeechUtility createUtility:initString];
}
设置完开启之后便要对其定义属性和进行初始化
#pragma mark - 语音识别使用的属性和控件
@property (nonatomic, strong) NSString *pcmFilePath;//音频文件路径
@property (nonatomic, strong) IFlyRecognizerView *iflyRecognizerView;//带界面的识别对象
@property (nonatomic, strong) IFlyDataUploader *uploader;//数据上传对象
@property (nonatomic, strong) PopupView *popUpView;//界面
@property (nonatomic,strong) NSString *result ;//听写结果
#pragma mark - 语音部分
-(void)viewDidLoad
{
[super viewDidload];
[self Call_JS_Voice];
}
-(void)viewWillAppear:(BOOL)animated
{
[self initRecognizer];//语音初始化
_result = [NSString new];//语音结果
}
-(void)viewDidDisappear:(BOOL)animated
{
[_iflyRecognizerView cancel]; //取消识别
[_iflyRecognizerView setDelegate:nil];
[_iflyRecognizerView setParameter:@"" forKey:[IFlySpeechConstant PARAMS]];
}
/**
初始化语音参数
*/
-(void)initRecognizer
{
//有界面
//单例模式,UI的实例
if (_iflyRecognizerView == nil) {
//UI显示剧中
_iflyRecognizerView= [[IFlyRecognizerView alloc] initWithCenter:self.view.center];
[_iflyRecognizerView setParameter:@"" forKey:[IFlySpeechConstant PARAMS]];
//设置听写模式
[_iflyRecognizerView setParameter:@"iat" forKey:[IFlySpeechConstant IFLY_DOMAIN]];
}
_iflyRecognizerView.delegate = self;
if (_iflyRecognizerView != nil) {
//设置最长录音时间
[_iflyRecognizerView setParameter:@"30000" forKey:[IFlySpeechConstant SPEECH_TIMEOUT]];
//设置后端点
[_iflyRecognizerView setParameter:@"3000" forKey:[IFlySpeechConstant VAD_EOS]];
//设置前端点
[_iflyRecognizerView setParameter:@"3000" forKey:[IFlySpeechConstant VAD_BOS]];
//网络等待时间
[_iflyRecognizerView setParameter:@"20000" forKey:[IFlySpeechConstant NET_TIMEOUT]];
//设置采样率,推荐使用16K
[_iflyRecognizerView setParameter:@"16000" forKey:[IFlySpeechConstant SAMPLE_RATE]];
[_iflyRecognizerView setParameter:@"zh_cn" forKey:[IFlySpeechConstant LANGUAGE]];
//设置是否返回标点符号
[_iflyRecognizerView setParameter:@"0" forKey:[IFlySpeechConstant ASR_PTT]];
}
}
/**
有界面,听写结果回调
resultArray:听写结果
isLast:表示最后一次
****/
- (void)onResult:(NSArray *)resultArray isLast:(BOOL)isLast
{
//根据听写的结果跳转到搜索
NSMutableString *result = [[NSMutableString alloc] init];
NSDictionary *dic = [resultArray objectAtIndex:0];
for (NSString *key in dic) {
[result appendFormat:@"%@",key];
}
_result = [NSString stringWithFormat:@"%@%@",_result,result];
[_iflyRecognizerView cancel];
}
/**
开启语音
*/
-(void)Call_JS_Voice
{
if(_iflyRecognizerView == nil)
{
[self initRecognizer ];
}
//设置音频来源为麦克风
[_iflyRecognizerView setParameter:IFLY_AUDIO_SOURCE_MIC forKey:@"audio_source"];
//设置听写结果格式为json
[_iflyRecognizerView setParameter:@"plain" forKey:[IFlySpeechConstant RESULT_TYPE]];
//保存录音文件,保存在sdk工作路径中,如未设置工作路径,则默认保存在library/cache下
[_iflyRecognizerView setParameter:@"asr.pcm" forKey:[IFlySpeechConstant ASR_AUDIO_PATH]];
_result = @"";
[_iflyRecognizerView start];
}
/**
开始讲话
*/
- (void) onBeginOfSpeech
{
NSLog(@"onBeginOfSpeech");
}
/**
听写取消回调
****/
- (void) onCancel
{
NSLog(@"识别取消");
}
-(void)onError:(IFlySpeechError *)error
{
}
-(void)onResults:(NSArray *)results isLast:(BOOL)isLast
{
}