UIImagePickerController通过leak工具测试的时候会有内存泄漏,通过Google了一番说把它写成一个单例的就可以解决内存泄漏.
抱歉,我自己经过测试时候告诉你,写成单例也不行.谢谢,如果有请告诉我
接下来上使用的代码,我是用来录像
-(void)useTheSystemRecordViewController{
UIImagePickerController * imagePicker = [[UIImagePickerController alloc] init];
imagePicker.delegate = self;
//判断是否可以拍摄
if ( [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {
//判断是否拥有拍摄权限
AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if(authStatus == AVAuthorizationStatusRestricted || authStatus == AVAuthorizationStatusDenied){
return;
}
//拍摄
imagePicker.sourceType = UIImagePickerControllerSourceTypeCamera;
//录制的类型 下面为视频
imagePicker.mediaTypes=@[(NSString*)kUTTypeMovie];
imagePicker.videoQuality = UIImagePickerControllerQualityTypeIFrame960x540;
//录制的时长
imagePicker.videoMaximumDuration=20.0;
//模态视图的弹出效果
imagePicker.modalPresentationStyle=UIModalPresentationOverFullScreen;
[self presentViewController:imagePicker animated:YES completion:^{
_segement.selectedSegmentIndex = 0;
_isVideo = NO;
}];
}
#pragma mark -录制完成
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info{
NSString *mediaType=[info objectForKey:UIImagePickerControllerMediaType];
//返回的媒体类型是照片或者视频
if ([mediaType isEqualToString:(NSString *)kUTTypeImage]) {
//照片的处理
// [picker dismissViewControllerAnimated:YES completion:^{
// UIImage *img = [info objectForKey:@"UIImagePickerControllerOriginalImage"];
// }];
}else if ([mediaType isEqualToString:(NSString *)kUTTypeMovie]){
//视频的处理
[picker dismissViewControllerAnimated:YES completion:^() {
MBProgressHUD* progressHUD = [MBProgressHUD showHUDAddedTo:self.view animated:YES];
progressHUD.removeFromSuperViewOnHide = YES;
[progressHUD showAnimated:YES];
_progressHUD = progressHUD;
//文件管理器
NSFileManager* fm = [NSFileManager defaultManager];
NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/myMovie.mp4"];
NSURL *mergeFileURL = [NSURL fileURLWithPath:pathToMovie];
NSFileManager* manager = [NSFileManager defaultManager];
if ([manager fileExistsAtPath:pathToMovie]) {
[manager removeItemAtPath:pathToMovie error:nil];
}
NSString* mp4Path = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.mp4"];
if ([manager fileExistsAtPath:mp4Path]) {
[manager removeItemAtPath:mp4Path error:nil];
}
//通过文件管理器将视频存放的创建的路径中
[fm copyItemAtURL:[info objectForKey:UIImagePickerControllerMediaURL] toURL:mergeFileURL error:nil];
[self encodeVideoOrientation:mergeFileURL];
/*
AVURLAsset * asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:pathToMovie]];
//根据AVURLAsset得出视频的时长
CMTime time = [asset duration];
int seconds = ceil(time.value/time.timescale);
NSString *videoTime = [NSString stringWithFormat:@"%d",seconds];
*/
//可以根据需求判断是否需要将录制的视频保存到系统相册中
// ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
// NSURL *recordedVideoURL= [info objectForKey:UIImagePickerControllerMediaURL];
//
// if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:recordedVideoURL]) {
// [library writeVideoAtPathToSavedPhotosAlbum:recordedVideoURL
// completionBlock:^(NSURL *assetURL, NSError *error){
//
// }];
// }
}];
}
}
-(void)encodeVideoOrientation:(NSURL*)anOutputFileURL{
AVURLAsset * videoAsset = [[AVURLAsset alloc]initWithURL:anOutputFileURL options:nil];
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:videoAsset
presetName:AVAssetExportPresetMediumQuality];
NSString* mp4Path = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.mp4"];
assetExport.outputURL = [NSURL fileURLWithPath: mp4Path];
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.outputFileType = AVFileTypeMPEG4;
assetExport.videoComposition = [self getVideoComposition:videoAsset];
[assetExport exportAsynchronouslyWithCompletionHandler:^{
switch ([assetExport status]) {
case AVAssetExportSessionStatusFailed:
{
NSLog(@"AVAssetExportSessionStatusFailed!");
break;
}
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(@"Successful!");
{
dispatch_async(dispatch_get_main_queue(), ^{
[self recordFinished];
});
}
break;
default:
break;
}
}];
}
重点在接下来的代码,横屏录制的时候需要调整方向
#pragma mark - 解决录像保存角度问题
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize videoSize = videoTrack.naturalSize;
BOOL isPortrait_ = [self isVideoPortrait:asset];
if(isPortrait_) {
NSLog(@"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
// videoComposition.renderSize = videoTrack.naturalSize; //
videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
videoComposition.instructions = [NSArray arrayWithObject:inst];
return videoComposition;
}
-(BOOL) isVideoPortrait:(AVAsset *)asset
{
BOOL isPortrait = FALSE;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = FALSE;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = FALSE;
}
}
return isPortrait;
}