ios语音识别实现——科大讯飞的使用

ios语音识别实现——科大讯飞的使用

这里主要讲的是代码方面的实现

首先在AppDelegate.m里面配置


- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
    NSString *initString = [[NSString alloc] initWithFormat:@"appid=%@,timeout=%@",@"55ee5b7c",@"20000"];
    //所有服务启动前,需要确保执行createUtility
    [IFlySpeechUtility createUtility:initString];
    return YES;
}

配置好后,在自己要使用的文件中加入几个属性和控件


@property (nonatomic, strong) NSString *pcmFilePath;//音频文件路径
@property (nonatomic, strong) IFlyRecognizerView *iflyRecognizerView;//带界面的识别对象
@property (nonatomic, strong) IFlyDataUploader *uploader;//数据上传对象
@property (nonatomic, strong) PopupView *popUpView;//界面
@property (nonatomic,strong) NSString *result ;//听写结果

然后在.m文件中

- (void)viewDidLoad
{
    [self initRecognizer];//语音初始化

    _result = [NSString new];//语音结果

    [self Call_JS_Voice];
}


/**
 初始化语音参数
 */
-(void)initRecognizer
{
    //有界面

    //单例模式,UI的实例
    if (_iflyRecognizerView == nil) {
        //UI显示剧中
        _iflyRecognizerView= [[IFlyRecognizerView alloc] initWithCenter:self.view.center];

        [_iflyRecognizerView setParameter:@"" forKey:[IFlySpeechConstant PARAMS]];

        //设置听写模式
        [_iflyRecognizerView setParameter:@"iat" forKey:[IFlySpeechConstant IFLY_DOMAIN]];

    }
    _iflyRecognizerView.delegate = self;

    if (_iflyRecognizerView != nil) {

        //设置最长录音时间
        [_iflyRecognizerView setParameter:@"30000" forKey:[IFlySpeechConstant SPEECH_TIMEOUT]];
        //设置后端点
        [_iflyRecognizerView setParameter:@"3000" forKey:[IFlySpeechConstant VAD_EOS]];
        //设置前端点
        [_iflyRecognizerView setParameter:@"3000" forKey:[IFlySpeechConstant VAD_BOS]];
        //网络等待时间
        [_iflyRecognizerView setParameter:@"20000" forKey:[IFlySpeechConstant NET_TIMEOUT]];

        //设置采样率,推荐使用16K
        [_iflyRecognizerView setParameter:@"16000" forKey:[IFlySpeechConstant SAMPLE_RATE]];

        [_iflyRecognizerView setParameter:@"zh_cn" forKey:[IFlySpeechConstant LANGUAGE]];
        //设置是否返回标点符号
        [_iflyRecognizerView setParameter:@"0" forKey:[IFlySpeechConstant ASR_PTT]];

    }
}

/**
 有界面,听写结果回调
 resultArray:听写结果
 isLast:表示最后一次
 ****/
- (void)onResult:(NSArray *)resultArray isLast:(BOOL)isLast
{
    //根据听写的结果跳转到搜索
    NSMutableString *result = [[NSMutableString alloc] init];
    NSDictionary *dic = [resultArray objectAtIndex:0];

    for (NSString *key in dic) {
        [result appendFormat:@"%@",key];
    }
    _result = [NSString stringWithFormat:@"%@%@",_result,result];
    NSString *path = [[NSString alloc] initWithFormat:@"%@search.php?keywords=%@",appweb,_result];

    //进行解码,将中文弄成utf-8码
    NSString * encodedString = (NSString *)CFBridgingRelease(CFURLCreateStringByAddingPercentEscapes( kCFAllocatorDefault, (CFStringRef)path, NULL, NULL,  kCFStringEncodingUTF8 ));

    NSURL *url =[NSURL URLWithString:encodedString];
    NSLog(@"%@",url.path);
    NSURLRequest *request = [NSURLRequest requestWithURL:url];
    [_Show loadRequest:request];
    [_iflyRecognizerView cancel];
}

/**
 开启语音
 */
-(void)Call_JS_Voice
{
    if(_iflyRecognizerView == nil)
    {
        [self initRecognizer ];
    }

    //设置音频来源为麦克风
    [_iflyRecognizerView setParameter:IFLY_AUDIO_SOURCE_MIC forKey:@"audio_source"];

    //设置听写结果格式为json
    [_iflyRecognizerView setParameter:@"plain" forKey:[IFlySpeechConstant RESULT_TYPE]];

    //保存录音文件,保存在sdk工作路径中,如未设置工作路径,则默认保存在library/cache下
    [_iflyRecognizerView setParameter:@"asr.pcm" forKey:[IFlySpeechConstant ASR_AUDIO_PATH]];

    _result = @"";

    [_iflyRecognizerView start];
}

/**
 开始讲话
 */
- (void) onBeginOfSpeech
{
    NSLog(@"onBeginOfSpeech");
}

/**
 听写取消回调
 ****/
- (void) onCancel
{
    NSLog(@"识别取消");
}

-(void)onError:(IFlySpeechError *)error
{

}

-(void)onResults:(NSArray *)results isLast:(BOOL)isLast
{

}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值