_seam = dispatch_semaphore_create(0); //创建一个信号量
把UIImage转换成CVPixelBufferRef
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer,0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata !=NULL);
CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
// 当你调用这个函数的时候,Quartz创建一个位图绘制环境,也就是位图上下文。当你向上下文中绘制信息时,Quartz把你要绘制的信息作为位图数据绘制到指定的内存块。一个新的位图上下文的像素格式由三个参数决定:每个组件的位数,颜色空间,alpha选项
CGContextRef context = CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
// 释放色彩空间
CGColorSpaceRelease(rgbColorSpace);
// 释放context
CGContextRelease(context);
// 解锁pixel buffer
CVPixelBufferUnlockBaseAddress(pxbuffer,0);
return pxbuffer;
}
创建视频文件
/// 视频编码
-(void)recordVideoCodel
{
self.theVideoPath = [self videoSavingPath:self.videoName];
//定义视频的大小320 480 倍数
CGSize size = CGSizeMake(320,480); // 192 256
NSError *error = nil;
__block AVAssetWriter *videoWriter = [[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:self.theVideoPath]fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
if(error) {
NSLog(@"error =%@",[error localizedDescription]);
return;
}
//mp4的格式设置 编码格式 宽度 高度
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecTypeH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
// AVAssetWriterInputPixelBufferAdaptor提供CVPixelBufferPool实例,
// 可以使用分配像素缓冲区写入输出文件。使用提供的像素为缓冲池分配通常
// 是更有效的比添加像素缓冲区分配使用一个单独的池
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if([videoWriter canAddInput:writerInput]){
NSLog(@"11111");
}else{
NSLog(@"22222");
}
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
int __block frame = 0;
__weak typeof(self)weakSelf = self;
//开始写视频帧
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while ([writerInput isReadyForMoreMediaData]) {
if (_end) { //结束标记
[writerInput markAsFinished];
if (videoWriter.status == AVAssetWriterStatusWriting) {
NSCondition *cond = [[NSCondition alloc]init];
[videoWriter finishWritingWithCompletionHandler:^{
[cond lock];
[cond signal];
[cond unlock];
}];
[cond wait];
[cond unlock];
if (weakSelf.videoUrl) {
weakSelf.videoUrl(weakSelf.theVideoPath);
}//保存视频方法
}
break;
}
dispatch_semaphore_wait(_seam, DISPATCH_TIME_FOREVER);
if (_imageBuffer) {
//写入视频帧数据
if (![adaptor appendPixelBuffer:_imageBuffer withPresentationTime:CMTimeMake(frame, 25)]) {
NSLog(@"success视频数据写入失败");
}else{
NSLog(@"success视频数据写入成功");
frame++;
}
NSLog(@"--------->写入数据");
//释放buffer
CVPixelBufferRelease(_imageBuffer);
CVPixelBufferRelease(_imgBuffer);
_imgBuffer = NULL;
_imageBuffer = NULL;
}
}
}];
}
图片实时写入
/// 帧数据写入
/// @param image 图片帧
-(void)framedataWriting:(UIImage *)image
{
CGSize size = CGSizeMake(320,480); // 192 256
_imgBuffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[image CGImage] size:size]; //图片数据转入视频流bufferref
CVPixelBufferLockBaseAddress(_imgBuffer, 0);
_imageBuffer = CVPixelBufferRetain(_imgBuffer);
CVPixelBufferUnlockBaseAddress(_imgBuffer, 0);
//发送 写帧 信号
dispatch_semaphore_signal(_seam);
NSLog(@"--------->发送数据");
}
结束录制
/// 结束录制
-(void)endrecordVideo
{
self.end = YES;
dispatch_semaphore_signal(_seam);
}
目前长时间录制CPU 使用率到140%最高。内存控制在40M左右。 录制一个小时 手机微微有点发热,各位大佬看看有没有更好的方法实现长时间录制