由于项目中很多模块都需要语音识别功能 , 做了下简单的封装记录一下
1.创建SpeechRecognizerManager工具类 继承NSObject:SpeechRecognizerManager.h 代码
#import
#import
typedefvoid(^speechRecognizerBlock)(NSString*testStr);
@interfaceSpeechRecognizerManager :NSObject
@property(strong,nonatomic)SFSpeechRecognitionTask*recognitionTask;//语音识别任务
@property(strong,nonatomic)SFSpeechRecognizer*speechRecognizer;//语音识别器
@property (strong, nonatomic) SFSpeechAudioBufferRecognitionRequest *recognitionRequest; //识别请求
@property (strong, nonatomic)AVAudioEngine *audioEngine; //录音引擎
// 单例类
+ (instancetype)sharedSpeechRecognizerManager;
- (void)getSpeechRecognizer:(speechRecognizerBlock)block;
2.#import "SpeechRecognizerManager.m"的 代码
#import "SpeechRecognizerManager.h"
#import
staticSpeechRecognizerManager* speechRecognizerManager =nil;
@interface SpeechRecognizerManager()
@property (nonatomic, copy)speechRecognizerBlock block;
@end
@implementationSpeechRecognizerManager
+ (instancetype)sharedSpeechRecognizerManager{
staticdispatch_once_tonceToken;
dispatch_once(&onceToken, ^{
if (speechRecognizerManager == nil) {
speechRecognizerManager = [[SpeechRecognizerManager alloc] init];
}
});
return speechRecognizerManager;
}
- (instancetype)init {
self= [superinit];
if(self) {
//设备识别语言为中文
NSLocale *cale = [[NSLocale alloc]initWithLocaleIdentifier:@"zh-CN"];
self.speechRecognizer = [[SFSpeechRecognizer alloc]initWithLocale:cale];
//发送语音认证请求(首先要判断设备是否支持语音识别功能)
[SFSpeechRecognizer requestAuthorization:^(SFSpeechRecognizerAuthorizationStatus status) {
boolisButtonEnabled =false;
switch(status) {
case SFSpeechRecognizerAuthorizationStatusAuthorized:
isButtonEnabled =true;
NSLog(@"可以语音识别");
break;
case SFSpeechRecognizerAuthorizationStatusDenied:
isButtonEnabled =false;
NSLog(@"用户被拒绝访问语音识别");
break;
case SFSpeechRecognizerAuthorizationStatusRestricted:
isButtonEnabled =false;
NSLog(@"不能在该设备上进行语音识别");
break;
case SFSpeechRecognizerAuthorizationStatusNotDetermined:
isButtonEnabled =false;
NSLog(@"没有授权语音识别");
break;
default:
break;
}
}];
//创建录音引擎
self.audioEngine = [[AVAudioEngine alloc]init];
}
return self;
}
-(void)startRecording{
if (self.recognitionTask) {
[self.recognitionTask cancel];
self.recognitionTask = nil;
}
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
bool audioBool = [audioSessionsetCategory:AVAudioSessionCategoryRecord error:nil];
bool audioBool1= [audioSessionsetMode:AVAudioSessionModeMeasurement error:nil];
bool audioBool2= [audioSessionsetActive:true withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:nil];
if(audioBool || audioBool1|| audioBool2) {
NSLog(@"可以使用");
}else{
NSLog(@"这里说明有的功能不支持");
}
speechRecognizerManager.recognitionRequest = [[SFSpeechAudioBufferRecognitionRequest alloc]init];
AVAudioInputNode *inputNode = speechRecognizerManager.audioEngine.inputNode;
SFSpeechAudioBufferRecognitionRequest *recognitionRequest = [[SFSpeechAudioBufferRecognitionRequest alloc]init];
recognitionRequest.shouldReportPartialResults=YES;
self.recognitionRequest.shouldReportPartialResults = true;
//开始识别任务
self.recognitionTask = [speechRecognizerManager.speechRecognizer recognitionTaskWithRequest:speechRecognizerManager.recognitionRequest resultHandler:^(SFSpeechRecognitionResult * _Nullable result, NSError * _Nullable error) {
boolisFinal =false;
if(result) {
SFTranscription*str = [resultbestTranscription];
self.block(str.formattedString);
isFinal = [resultisFinal];
}
if(error || isFinal) {
[self.audioEnginestop];
[inputNoderemoveTapOnBus:0];
self.recognitionRequest = nil;
self.recognitionTask=nil;
}
}];
AVAudioFormat*recordingFormat = [inputNodeoutputFormatForBus:0];
[inputNodeinstallTapOnBus:0 bufferSize:1024 format:recordingFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
[speechRecognizerManager.recognitionRequest appendAudioPCMBuffer:buffer];
}];
[speechRecognizerManager.audioEngine prepare];
bool audioEngineBool = [speechRecognizerManager.audioEngine startAndReturnError:nil];
NSLog(@"%d",audioEngineBool);
}
- (void)getSpeechRecognizer:(speechRecognizerBlock)block{
self.block= block;
[self startRecording];
}
3 viewcontroller应用导入工具类的 头文件#import "SpeechRecognizerManager.h"
签代理<SFSpeechRecognizerDelegate>
//按钮的点击方法
- (void)microphoneTap:(UIButton*)sender {
SpeechRecognizerManager *manager = [SpeechRecognizerManager sharedSpeechRecognizerManager];
manager.speechRecognizer.delegate=self;
if([manager.audioEngineisRunning]) {
[manager.audioEnginestop];
[manager.recognitionRequestendAudio];
self.siriBtu.enabled=YES;
[self.siriBtu setTitle:@"开始录制" forState:UIControlStateNormal];
}else{
// 调取工具类的方法返回语音识别的文字
[managergetSpeechRecognizer:^(NSString*testStr) {
_siriTextView.text = [NSString stringWithFormat:@"%@%@",_siriTextView.text, testStr];
NSLog(@"%@",testStr);
}];
[self.siriBtu setTitle:@"停止录制" forState:UIControlStateNormal];
}
}