现在很多平台对iOS太不友好了,iOS连个demo都没有...
只能自己摸索了,比较费事的就是这个接口鉴权,注意如果鉴权一直不通过就要检查下signature签名的base64长度是否是44个字节。
#import <CommonCrypto/CommonHMAC.h>
#import <CommonCrypto/CommonDigest.h>
#import <xlocale.h>
static NSString *const url = @"wss://ws-api.xfyun.cn/v2/igr";//webscoket地址
- (NSString *)host {
NSURL *r = [NSURL URLWithString:url];
NSString *date = [self lg_rfc1123String];
NSString *signature_origin = [NSString stringWithFormat:@"host: %@\ndate: %@\nGET %@ HTTP/1.1",r.host,date,r.path];
NSString *signature = [self hmacSHA256WithSecret:kXfyunApiSecret content:signature_origin];//hmac-sha256算法结合apiSecret对signature_origin签名 并BASE64编码
NSString *authorization_origin = [NSString stringWithFormat:@"api_key=\"%@\", algorithm=\"hmac-sha256\", headers=\"host date request-line\", signature=\"%@\"",kXfyunApiKey,signature];
NSData *dataAuthorization = [authorization_origin dataUsingEncoding:NSUTF8StringEncoding];
NSString *authorization = [dataAuthorization base64EncodedStringWithOptions:0];//将加密结果进行一次BASE64编码。
return [self lg_safeUrlWithFormat:[NSString stringWithFormat:@"%@?authorization=%@&date=%@&host=%@",url,authorization,date,r.host]];
}
- (NSString*)lg_rfc1123String {//当前时间戳,RFC1123格式(Mon, 02 Jan 2006 15:04:05 GMT)
time_t date = (time_t)[[NSDate date] timeIntervalSince1970];
struct tm timeinfo;
gmtime_r(&date, &timeinfo);
char buffer[32];
size_t ret = strftime_l(buffer, sizeof(buffer), "%a, %d %b %Y %H:%M:%S UTC", &timeinfo, NULL);
if (ret) {
return @(buffer);
} else {
return nil;
}
}
/**
* 加密方式,MAC算法: HmacSHA256
*
* @param secret 秘钥
* @param content 要加密的文本
*
* @return 加密后的字符串
*/
- (NSString *)hmacSHA256WithSecret:(NSString *)secret content:(NSString *)content {
const char *cKey = [secret cStringUsingEncoding:NSASCIIStringEncoding];
const char *cData = [content cStringUsingEncoding:NSASCIIStringEncoding];
//Sha256:
unsigned char cHMAC[CC_SHA256_DIGEST_LENGTH];
CCHmac(kCCHmacAlgSHA256, cKey, strlen(cKey), cData, strlen(cData), cHMAC);
NSData *HMAC = [[NSData alloc] initWithBytes:cHMAC
length:sizeof(cHMAC)];
NSString *hash = [HMAC base64EncodedStringWithOptions:0];//将加密结果进行一次BASE64编码。
return hash;
}
- (NSString *)lg_safeUrlWithFormat:(id)object {
NSString *cover = [NSString stringWithFormat:@"%@",cover];
cover = [cover stringByAddingPercentEncodingWithAllowedCharacters:[NSCharacterSet URLQueryAllowedCharacterSet]];
return cover;
}
socket用的三方的 pod 'SocketRocket'#websoket。
SocketRocket集成
采集音频
#import "LGAudioRecordManager.h"
#import <AVFoundation/AVFoundation.h>
#import "LGRecordFileManager.h"
static Float64 const LGSampleRate = 8000;//采样率
static UInt32 const LGChannelsPerFrame = 1;//声道数量
static UInt32 const LGBitsPerChannel = 16;//每个采样数据的位数
static NSInteger const LGBufferCount = 3;//缓存区个数
static CGFloat const LGBufferDurationSeconds = 0.2;
@interface LGAudioRecordManager () {
AudioQueueRef audioRef;//音频队列对象指针
AudioStreamBasicDescription recordConfiguration;//音频流配置
AudioQueueBufferRef audioBuffers[LGBufferCount];//音频流缓冲区对象
}
@property (nonatomic, copy) NSString *recordFilePath;//音频沙盒路径
@property (nonatomic,assign) AudioFileID recordFileID;//音频文件标识 用于关联音频文件
@property (nonatomic,assign) SInt64 recordPacket;//录音文件的当前包
@end
@implementation LGAudioRecordManager
+ (instancetype)sharedManager {
static LGAudioRecordManager *manager = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
manager = [[LGAudioRecordManager alloc]init];
});
return manager;
}
- (instancetype)init {
if (self == [super init]) {
//采集路径
self.recordFilePath = [LGRecordFileManager cacheFileWithFolder:@"LGAudioRecord" FileName:@"audioRecord.wav"];
NSLog(@"recordFile:%@",_recordFilePath);
recordConfiguration.mSampleRate = LGSampleRate;
recordConfiguration.mChannelsPerFrame = LGChannelsPerFrame;
//编码格式
recordConfiguration.mFormatID = kAudioFormatLinearPCM;
recordConfiguration.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
//每采样点占用位数
recordConfiguration.mBitsPerChannel = LGBitsPerChannel;
//每帧的字节数
recordConfiguration.mBytesPerFrame = (recordConfiguration.mBitsPerChannel / 8) * recordConfiguration.mChannelsPerFrame;
//每包的字节数
recordConfiguration.mBytesPerPacket = recordConfiguration.mBytesPerFrame;
//每帧的字节数
recordConfiguration.mFramesPerPacket = 1;
}
return self;
}
- (void)initAudio {
//设置音频输入信息和回调
OSStatus status = AudioQueueNewInput(&recordConfiguration, inputBufferHandler, (__bridge void *)(self), NULL, NULL, 0, &audioRef);
if( status != kAudioSessionNoError ) {
NSLog(@"初始化出错");
return ;
}
//计算估算的缓存区大小
int frames = [self computeRecordBufferSize:&recordConfiguration seconds:LGBufferDurationSeconds];
int bufferByteSize = frames * recordConfiguration.mBytesPerFrame;
NSLog(@"缓存区大小%d",bufferByteSize);
//创建缓冲器
for (int i = 0; i < LGBufferCount; i++) {
AudioQueueAllocateBuffer(audioRef, bufferByteSize, &audioBuffers[i]);
AudioQueueEnqueueBuffer(audioRef, audioBuffers[i], 0, NULL);
}
}
//回调
void inputBufferHandler(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer, const AudioTimeStamp *inStartTime,UInt32 inNumPackets, const AudioStreamPacketDescription *inPacketDesc) {
LGAudioRecordManager *audioManager = [LGAudioRecordManager sharedManager];
if (inNumPackets > 0) {
//写入文件
AudioFileWritePackets(audioManager.recordFileID, FALSE, inBuffer->mAudioDataByteSize,inPacketDesc, audioManager.recordPacket, &inNumPackets, inBuffer->mAudioData);
audioManager.recordPacket += inNumPackets;
}
if (audioManager.isRecording) {
//将缓冲器重新放入缓冲队列,以便重复使用该缓冲器
AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);
}
}
- (int)computeRecordBufferSize:(const AudioStreamBasicDescription*)format seconds:(float)seconds {
int packets, frames, bytes = 0;
frames = (int)ceil(seconds * format->mSampleRate);
if (format->mBytesPerFrame > 0) {
bytes = frames * format->mBytesPerFrame;
}else {
UInt32 maxPacketSize = 0;
if (format->mBytesPerPacket > 0) {
maxPacketSize = format->mBytesPerPacket;
}
if (format->mFramesPerPacket > 0) {
packets = frames / format->mFramesPerPacket;
}else {
packets = frames;
}
if (packets == 0) {
packets = 1;
}
bytes = packets * maxPacketSize;
}
return bytes;
}
- (void)startRecord {
[LGRecordFileManager removeFileAtPath:self.recordFilePath];
[self initAudio];
CFURLRef url = CFURLCreateWithString(kCFAllocatorDefault, (CFStringRef)self.recordFilePath, NULL);
//创建音频文件夹
AudioFileCreateWithURL(url, kAudioFileCAFType, &recordConfiguration, kAudioFileFlags_EraseFile,&_recordFileID);
CFRelease(url);
self.recordPacket = 0;
//当有音频设备(比如播放音乐)导致改变时 需要配置
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
[[AVAudioSession sharedInstance] setActive:YES error:nil];
OSStatus status = AudioQueueStart(audioRef, NULL);
if( status != kAudioSessionNoError ) {
NSLog(@"开始出错");
return;
}
self.isRecording = true;
NSLog(@"开始录音");
// 设置可以更新声道的power信息
[self performSelectorOnMainThread:@selector(enableUpdateLevelMetering) withObject:nil waitUntilDone:NO];
}
- (CGFloat)getCurrentVolume {
UInt32 dataSize = sizeof(AudioQueueLevelMeterState) * recordConfiguration.mChannelsPerFrame;
AudioQueueLevelMeterState *levels = (AudioQueueLevelMeterState*)malloc(dataSize);
OSStatus rc = AudioQueueGetProperty(audioRef, kAudioQueueProperty_CurrentLevelMeter, levels, &dataSize);
if (rc) {
NSLog(@"OSStatus %d", (int)rc);
}
float channelAvg = 0;
for (int i = 0; i < recordConfiguration.mChannelsPerFrame; i++) {
channelAvg += levels[i].mPeakPower;
}
free(levels);
return channelAvg;
}
- (BOOL)enableUpdateLevelMetering {
UInt32 val = 1;
OSStatus status = AudioQueueSetProperty(audioRef, kAudioQueueProperty_EnableLevelMetering, &val, sizeof(UInt32));
if( status == kAudioSessionNoError ) {
return YES;
}
return NO;
}
- (void)stopRecord {
if (self.isRecording) {
self.isRecording = NO;
//停止录音队列和移,这里无需考虑成功与否
OSStatus status = AudioQueueStop(audioRef, true);
OSStatus status1 = AudioFileClose(_recordFileID);
OSStatus status2 = AudioQueueDispose(audioRef, TRUE);
NSLog(@"销毁录音%d%d%d",(int)status,(int)status1,(int)status2);
}
}
- (void)dealloc {
AudioQueueDispose(audioRef, TRUE);
AudioFileClose(_recordFileID);
}
@end
上传音频
static char *const record_upload_queue = "com.soma.recordUploadQueue";//录音上传队列
static NSInteger const maxLength = 1280;//每次发送音频字节数
static NSInteger const minInterval = 40;//每次发送音频间隔40ms
dispatch_queue_t recordQ = dispatch_queue_create(record_upload_queue,
DISPATCH_QUEUE_SERIAL);
dispatch_async(recordQ, ^{
NSString *path = [LGRecordFileManager cacheFileWithFolder:@"LGAudioRecord" FileName:@"audioRecord.wav"];
NSData *data = [[NSData alloc] initWithContentsOfFile:path];
NSLog(@"文件大小%@ %f",data,(CGFloat)data.length/(CGFloat)maxLength);
NSInputStream *inputStream = [[NSInputStream alloc] initWithFileAtPath:path];
[inputStream open];
NSInteger index = 0;
int64_t addBytes = 0;//累计读取字节
int64_t allBytes = data.length;//总字节
uint8_t readBuffer [maxLength];
BOOL endOfStreamReached = NO;//是否已经到结尾标识
BOOL isError = NO;//是否出错
while (!endOfStreamReached) {
// if ([LGAudioUploadManager sharedManager].uploadStatus == AudioUploadStatusFailure) {
// break;
// }
addBytes += maxLength;
NSInteger bytesRead = [inputStream read:readBuffer maxLength:maxLength];
if (bytesRead == 0) {
//文件读取到最后
endOfStreamReached = YES;
} else if (bytesRead == -1) {
//文件读取错误
endOfStreamReached = YES;
isError = YES;
} else {
//将字符不断的加载到视图
NSData *data = [NSData dataWithBytesNoCopy:readBuffer length:bytesRead freeWhenDone:NO];
NSString *audio = [data base64EncodedStringWithOptions:0];//BASE64编码。
NSInteger status = 0;//第一帧
if (addBytes < allBytes) {
status = 1;//中间帧
}else {
status = 2;//最后一帧
}
if (index == 0) {
status = 0;//第一帧
}
NSMutableDictionary *dict = [NSMutableDictionary dictionary];
if (status == 0) {//第一帧
NSDictionary *business = @{@"ent":@"igr",@"aue":@"raw",@"rate":@(8000)};
NSDictionary *common = @{@"app_id":kXfyunAppId};
dict[@"business"] = business;
dict[@"common"] = common;
}
NSDictionary *dataDict = @{@"status":@(status),@"audio":audio};
dict[@"data"] = dataDict;
[[SocketRocketUtility instance] sendData:dict withRequestURI:nil];
index++;
NSLog(@"NSInputStream %ld %ld",status,index);
[NSThread sleepForTimeInterval:minInterval/1000.f];
}
}
if (isError) {//读取出错 上传结束标识
NSLog(@"录音文件读取出错,请重新录制");
}
[inputStream close];
});