这里简单使用一下百度官网的语音SDK
首先官网创建一个应用拿到APP_KEY 和APP_ID还有SECRET_KEY这里就不说了
官网下载一下SDK : http://ai.baidu.com/docs#/ASR-Android-SDK/top
下载完SDK将下载完的iOS_Release_ASR_v3.0.5.7文件里面的BDSClientHeaders,BDSClientLib,BDSClientResource三个文件导入到工程
导入依赖库
libc++.tbd
libz.1.2.5.tbd
AudioToolbox
AVFoundation
CFNetwork
CoreLocation
CoreTelephony
SystemConfiguration
GLKit
libsqlite3
Info.plist文件里面设置一下麦克风权限
导入头文件
#import "BDSEventManager.h"
#import "BDSASRDefines.h"
#import "BDSASRParameters.h"
实现一下协议方法
这里创建一个Button和TextView 按钮用来开始识别,TextView用来显示识别到的语音(可自行选择)
创建一个三个属性
@property(nonatomic,strong)BDSEventManager *asrEventManager;
@property(nonatomic, assign) BOOL longSpeechFlag;
@property(nonatomic, assign) BOOL continueToVR;
设置三个字符串类型的常量分别是API_KEY , SECRET_KEY ,APP_ID 把官网创建应用的三个信息替换掉就好
const NSString* API_KEY = @"jc7GGdoTREqhFwCDFYnwrdTY";
const NSString* SECRET_KEY = @"4u4R6bUsNEwpPAaSj7h4VbIjmbRXSpM7";
constNSString* APP_ID =@"11584172";
(Button和TextView的创建和添加我就不加了)
下面开始敲代码
viewDidLoad里面
代码里面的voiceBtn和text替换成自己创建的button和textView就可以了
-(void)recognitionButtonClick{
[self cleanLogUI];
[self.asrEventManager setParameter:@(NO) forKey:BDS_ASR_ENABLE_LONG_SPEECH];
[self.asrEventManager setParameter:@(NO) forKey:BDS_ASR_NEED_CACHE_AUDIO];
[self.asrEventManager setParameter:@"" forKey:BDS_ASR_OFFLINE_ENGINE_TRIGGERED_WAKEUP_WORD];
[self voiceRecogButtonHelper];
}
--------------------
- (void)onStartWorking
{
[voiceBtn setTitle:@"Speaking..." forState:UIControlStateNormal];
}
--------------------
- (void)configModelVAD {
NSString *modelVAD_filepath = [[NSBundle mainBundle] pathForResource:@"bds_easr_basic_model" ofType:@"dat"];
[self.asrEventManager setParameter:modelVAD_filepath forKey:BDS_ASR_MODEL_VAD_DAT_FILE];
[self.asrEventManager setParameter:@(YES) forKey:BDS_ASR_ENABLE_MODEL_VAD];
}
-----------------------
- (void) enableNLU {
// ---- 开启语义理解 -----
[self.asrEventManager setParameter:@(YES) forKey:BDS_ASR_ENABLE_NLU];
[self.asrEventManager setParameter:@"1536" forKey:BDS_ASR_PRODUCT_ID];
}
-----------------------
- (void)configVoiceRecognitionClient {
//设置DEBUG_LOG的级别
[self.asrEventManager setParameter:@(EVRDebugLogLevelTrace) forKey:BDS_ASR_DEBUG_LOG_LEVEL];
[self.asrEventManager setParameter:@[API_KEY, SECRET_KEY] forKey:BDS_ASR_API_SECRET_KEYS];
[self.asrEventManager setParameter:APP_ID forKey:BDS_ASR_OFFLINE_APP_CODE];
[self configModelVAD];
[self enableNLU];
}
---------------------
- (void) enablePunctuation {
[self.asrEventManager setParameter:@(NO) forKey:BDS_ASR_DISABLE_PUNCTUATION];
[self.asrEventManager setParameter:@"1737" forKey:BDS_ASR_PRODUCT_ID];
}
---------------------
- (NSDictionary*)parseLogToDic:(NSString*)logString
{
NSArray*tmp =NULL;
NSMutableDictionary *logDic = [[NSMutableDictionary alloc] initWithCapacity:3];
NSArray *items = [logString componentsSeparatedByString:@"&"];
for(NSString*iteminitems) {
tmp = [itemcomponentsSeparatedByString:@"="];
if(tmp.count== 2) {
[logDicsetObject:tmp.lastObject forKey:tmp.firstObject];
}
}
returnlogDic;
}
-----------------------
- (void)cleanLogUI
{
text.text=@"";
}
------------------------
- (void)onEnd
{
self.longSpeechFlag = NO;
voiceBtn.enabled = YES;
[voiceBtn setTitle:@"语音识别" forState:UIControlStateNormal];
}
------------------------
-(void)voice
{
[self.asrEventManager setParameter:@(NO) forKey:BDS_ASR_ENABLE_LONG_SPEECH];
[self.asrEventManager setParameter:@(NO) forKey:BDS_ASR_NEED_CACHE_AUDIO];
[self.asrEventManager setParameter:@(NO) forKey:BDS_ASR_OFFLINE_ENGINE_TRIGGERED_WAKEUP_WORD];
[self voiceRecogButtonHelper];
}
------------------------
- (void)voiceRecogButtonHelper
{
[self.asrEventManager setDelegate:self];
[self.asrEventManager setParameter:nil forKey:BDS_ASR_AUDIO_FILE_PATH];
[self.asrEventManager setParameter:nil forKey:BDS_ASR_AUDIO_INPUT_STREAM];
[self.asrEventManager sendCommand:BDS_ASR_CMD_START];
[self onInitializing];
}
--------------------------
- (void)onInitializing
{
voiceBtn.enabled = NO;
[voiceBtn setTitle:@"Initializing..." forState:UIControlStateNormal];
}
-------------------------
- (NSString*)getDescriptionForDic:(NSDictionary*)dic {
if ([dic objectForKey:@"results_recognition"]) {
NSArray*array = [dicobjectForKey:@"results_recognition"];
if(array&&array.count>0) {
NSString*string =@"";
for(inti=0; i
string = [NSStringstringWithFormat:@"%@%@",string,(NSString*)array[i]];
}
returnstring;
}
}else{
NSLog(@"解析失败");
}
if(dic) {
return [[NSString alloc] initWithData:[NSJSONSerialization dataWithJSONObject:dic options:NSJSONWritingPrettyPrinted error:nil] encoding:NSUTF8StringEncoding];
}
return nil;
}
结束
按照方法往下敲就可以 点击方法和控件名还有变量不要忘记替换(如果有问题请在评论区评论,会及时修正)
简单实现一下
pod 'FSScrollContentView'
self.view.backgroundColor = [UIColor whiteColor];
self.titleView = [[FSSegmentTitleView alloc]initWithFrame:CGRectMake(0, 64, CGRectGetWidth(self.view.bounds), 50) titles:@[@"全部",@"服饰穿搭",@"生活百货",@"美食吃货",@"美容护理",@"母婴儿童",@"数码家电",@"其他"] delegate:self indicatorType:0];
self.titleView.indicatorColor = [UIColor blueColor];
[self.view addSubview:_titleView];
// 分割线 ---------------
import UIKit
let kScreenWidth = UIScreen.main.bounds.width
let kScreenHeight = UIScreen.main.bounds.height
classViewController:UIViewController{
varscroView :UIScrollView?
varmainImage :UIImageView?
overridefuncviewDidLoad() {
super.viewDidLoad()
scroView=UIScrollView.init(frame:CGRect(x: 0, y: 95, width:kScreenWidth, height: 300))
scroView?.contentSize=CGSize(width:kScreenWidth* 4, height: 300)
scroView?.backgroundColor = UIColor.orange
scroView?.isScrollEnabled = true
view.addSubview(scroView!)
forindexin0..<4 {
mainImage=UIImageView.init(frame:CGRect(x: (0 + (index * 414)), y: 0, width:Int(kScreenWidth), height: 300))
mainImage?.image=UIImage.init(named: ("\(0 + 1 * index).png"))
scroView?.addSubview(mainImage!)
}
}
}