import UIKit
importAVFoundation
importMediaPlayer
importFoundation
classTVViewController:BaseViewController,UIScrollViewDelegate{
varpayerTV:AVPlayer?
varplayToEndTimeBool:Bool?
varplayAtem:AVPlayerItem?
varloadProgressView:UIView?
varplayerLayer:AVPlayerLayer?
varurl:NSURL?
vartempPath:NSString?
vareditScrollView:UIScrollView?
vartimeMutArr:NSMutableArray?
varshearPlayBool:Bool?
varshearAfterPlayBool:Bool?
overridefuncviewDidLoad() {
super.viewDidLoad()
self.initInterface()
self.initTV()
self.aVAssetImageGenerator()
}
//获取视频帧图像
funcaVAssetImageGenerator() ->Void{
/*
AVAssetImageGenerator 提供独立于回放的资产的缩略图或预览图像的对象。
*/
self.editScrollView=UIScrollView.init(frame:CGRect.init(origin:CGPoint.init(x:0, y:300), size:CGSize.init(width:UIScreen.main.bounds.size.width, height:80)))
self.view.addSubview(self.editScrollView!)
self.editScrollView?.delegate = self
//读取解析视频帧
//初始化AVURLAsset对象
letasset:AVURLAsset=AVURLAsset.init(url:self.url!asURL)
//获取总视频的长度 = 总帧数 / 每秒的帧数
letsumTime:Float=Float(integerLiteral: (asset.duration.value) /Int64(asset.duration.timescale))
//创建AVAssetImageGenerator对象
let generator : AVAssetImageGenerator = AVAssetImageGenerator.init(asset: asset)
generator.maximumSize = CGSize.init(width: UIScreen.main.bounds.width, height: 80)
generator.appliesPreferredTrackTransform = true
generator.requestedTimeToleranceBefore = CMTime.zero
generator.requestedTimeToleranceAfter = CMTime.zero
self.timeMutArr = NSMutableArray.init()
foriin0..
let time:CMTime = CMTimeMake(value: Int64(i*NSInteger(asset.duration.timescale)), timescale: Int32(asset.duration.timescale))
letvalue:NSValue=NSValue.init(time: time)
self.timeMutArr?.add(value)
}
varcount:NSInteger=0
generator.generateCGImagesAsynchronously(forTimes:self.timeMutArr!as! [NSValue]) { (requestedTime, img, actualTime, result, error)in
if result == AVAssetImageGenerator.Result.succeeded{
print(count)
DispatchQueue.main.sync {
letthumImgView:UIImageView=UIImageView.init()
thumImgView.image=UIImage.init(cgImage: img!)
self.editScrollView?.addSubview(thumImgView)
thumImgView.frame=CGRect.init(origin:CGPoint.init(x:50+ count*50, y:0), size:CGSize.init(width:50, height:70))
self.editScrollView?.contentSize=CGSize.init(width:100+count*50, height:0)
count = count +1
}
}
if result == AVAssetImageGenerator.Result.failed{
print(error!.localizedDescription)
}
if result == AVAssetImageGenerator.Result.cancelled{
print("cancelled")
}
}
self.editScrollView?.contentOffset=CGPoint.init(x:50, y:0)
}
funcscrollViewDidEndDecelerating(_scrollView:UIScrollView) {
print(scrollView.contentOffset.x)
}
//界面初始化
funcinitInterface() ->Void{
self.view.backgroundColor = UIColor.white
self.title="视屏的播放与截取"
}
//视频的初始化
funcinitTV() ->Void{
shearAfterPlayBool = false
self.url=NSURL.init(fileURLWithPath:Bundle.main.path(forResource:"tv", ofType:"mp4")!)
shearPlayBool = false
self.addPlayer(url:self.url!)
self.addBut()
}
//添加播放器
funcaddPlayer(url:NSURL) ->Void{
playToEndTimeBool = false
NotificationCenter.default.addObserver(self, selector: #selector(PlayToEndTime), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
if(!(shearPlayBool!)) {
self.url=NSURL.init(fileURLWithPath:Bundle.main.path(forResource:"tv", ofType:"mp4")!)
}
self.playAtem=AVPlayerItem.init(url:self.url!asURL)
self.payerTV=AVPlayer.init(playerItem:self.playAtem)
//创建播放器
self.playerLayer=AVPlayerLayer.init(player:self.payerTV)
self.playerLayer?.frame=CGRect.init(origin:CGPoint.init(x:0, y:160), size:CGSize.init(width:UIScreen.main.bounds.size.width, height:140))
self.payerTV?.seek(to:CMTimeMake(value:0, timescale:1000))//设置播放位置 1000位帧率
self.view.layer.addSublayer(self.playerLayer!)
/*volume--表示当前播放器的音频音量;0.0表示“关闭所有音频”,1.0表示“以当前项的最大音量播放”。
iOS注意:不要使用此属性实现用于媒体播放的音量滑块。为此,使用MPVolumeView,它在外观上是可定制的,并提供用户期望的标准媒体播放行为。
这个属性在iOS中最有用的是控制AVPlayer相对于其他音频输出的音量,而不是最终用户的音量控制。*/
self.payerTV?.volume=0.5;
let progressView:UIProgressView = UIProgressView.init(progressViewStyle: UIProgressView.Style.default)
progressView.frame=CGRect.init(origin:CGPoint.init(x:0, y:150), size:CGSize.init(width:UIScreen.main.bounds.size.width, height:20))
self.view.addSubview(progressView)
if(self.loadProgressView!=nil) {
self.loadProgressView=UIView.init(frame:CGRect.init(origin:CGPoint.init(x:0, y:145), size:CGSize.init(width:10, height:10)))
self.view.addSubview(self.loadProgressView!)
self.loadProgressView?.backgroundColor = UIColor.yellow
}else{
self.loadProgressView?.frame=CGRect.init(origin:CGPoint.init(x:0, y:145), size:CGSize.init(width:10, height:10))
}
//监听视频播放进度
self.payerTV?.addPeriodicTimeObserver(forInterval:CMTimeMakeWithSeconds(1, preferredTimescale:Int32(NSEC_PER_SEC)), queue:nil, using: { (time)in
//进度 当前时间/总时间
let progress:Float = Float(CMTimeGetSeconds(self.payerTV!.currentItem!.currentTime()) / CMTimeGetSeconds(self.payerTV!.currentItem!.duration))
progressView.progress= progress
if(progress ==1){
self.loadProgressView?.frame=CGRect.init(origin:CGPoint.init(x:Int(Float(progress*(Float.init(UIScreen.main.bounds.size.width))-10.0)), y:145), size:CGSize.init(width:10, height:10))
}else{
self.loadProgressView?.frame=CGRect.init(origin:CGPoint.init(x:Int(Float(progress*(Float.init(UIScreen.main.bounds.size.width))-0.0)), y:145), size:CGSize.init(width:10, height:10))
}
})
//在进度条上面--添加手势--可以拖动控制播放的位置
letpan:UIPanGestureRecognizer=UIPanGestureRecognizer.init(target:self, action:#selector(playpanGestureRecognizer(gestureRecognizer:)))
self.loadProgressView?.addGestureRecognizer(pan)
}
//控制按钮
funcaddBut() ->Void{
let playBut:UIButton = UIButton.init(type: UIButton.ButtonType.custom)
self.view.addSubview(playBut)
playBut.frame=CGRect.init(origin:CGPoint.init(x:20, y:104), size:CGSize.init(width:80, height:30))
playBut.setTitle("播放", for: UIControl.State.normal)
playBut.setTitleColor(UIColor.black, for: UIControl.State.normal)
playBut.addTarget(self, action:#selector(play), for:UIControl.Event.touchUpInside)
let shearPlayBut:UIButton = UIButton.init(type: UIButton.ButtonType.custom)
self.view.addSubview(shearPlayBut)
shearPlayBut.frame=CGRect.init(origin:CGPoint.init(x:20, y:64), size:CGSize.init(width:80, height:30))
shearPlayBut.setTitle("剪切播放", for:UIControl.State.normal)
shearPlayBut.setTitleColor(UIColor.black, for: UIControl.State.normal)
shearPlayBut.addTarget(self, action:#selector(shearPlay), for:UIControl.Event.touchUpInside)
let suspendedBut:UIButton = UIButton.init(type: UIButton.ButtonType.custom)
self.view.addSubview(suspendedBut)
suspendedBut.frame=CGRect.init(origin:CGPoint.init(x:120, y:104), size:CGSize.init(width:80, height:30))
suspendedBut.setTitle("暂停", for:UIControl.State.normal)
suspendedBut.setTitleColor(UIColor.black, for: UIControl.State.normal)
suspendedBut.addTarget(self, action:#selector(suspended), for:UIControl.Event.touchUpInside)
let retBut:UIButton = UIButton.init(type: UIButton.ButtonType.custom)
self.view.addSubview(retBut)
retBut.frame=CGRect.init(origin:CGPoint.init(x:220, y:104), size:CGSize.init(width:80, height:30))
retBut.setTitle("重播", for: UIControl.State.normal)
retBut.setTitleColor(UIColor.black, for: UIControl.State.normal)
retBut.addTarget(self, action:#selector(ret), for:UIControl.Event.touchUpInside)
let shearBut:UIButton = UIButton.init(type: UIButton.ButtonType.custom)
self.view.addSubview(shearBut)
shearBut.frame=CGRect.init(origin:CGPoint.init(x:220, y:64), size:CGSize.init(width:80, height:30))
shearBut.setTitle("剪切", for: UIControl.State.normal)
shearBut.setTitleColor(UIColor.black, for: UIControl.State.normal)
shearBut.addTarget(self, action:#selector(shear), for:UIControl.Event.touchUpInside)
}
//播放
@objcfuncplay() ->Void{
if playToEndTimeBool! {
playToEndTimeBool = false
self.payerTV?.seek(to:CMTimeMake(value:0, timescale:1000))
}
shearPlayBool = false
self.payerTV?.play()
}
//暂停
@objcfuncsuspended() ->Void{
self.payerTV?.pause()
}
//重播
@objcfuncret() ->Void{
self.payerTV?.seek(to:CMTimeMake(value:0, timescale:1000))
self.payerTV?.play()
}
//剪切
@objcfuncshear() ->Void{
self.suspended()
shearAfterPlayBool = true
self.tempPath = NSTemporaryDirectory() + "tmpMov.mp4" as NSString
self.deleteTempFile()//视频裁剪的时候要清理一下之前的视屏
letasset:AVAsset=AVAsset.init(url:self.url!asURL)
let exportSession:AVAssetExportSession = AVAssetExportSession.init(asset: asset, presetName: AVAssetExportPresetPassthrough)!
letfurl:NSURL=NSURL.fileURL(withPath:self.tempPath!asString)asNSURL
exportSession.outputURL= furlasURL
exportSession.outputFileType = AVFileType.mp4
let endTime:NSInteger = NSInteger(CMTimeGetSeconds(self.payerTV!.currentItem!.duration));//希望剪切的---结束时间点
let startTime:NSInteger = NSInteger(CMTimeGetSeconds(self.payerTV!.currentItem!.currentTime())) //希望剪切的---开始时间点
/*
CMTime CMTimeMakeWithSeconds(
Float64 seconds, //第几秒的截图,是当前视频播放到的帧数的具体时间
int32_t preferredTimeScale //首选的时间尺度 "每秒的帧数" );
*/
letstart:CMTime=CMTimeMakeWithSeconds(Float64(startTime), preferredTimescale:Int32(1*NSEC_PER_SEC) )
letduration:CMTime=CMTimeMakeWithSeconds(Float64(endTime - startTime), preferredTimescale:Int32(1*NSEC_PER_SEC))
/*
其中start表示时间的起点,duratin表示时间范围的持续时间。*/
letrange:CMTimeRange=CMTimeRangeMake(start: start, duration: duration)
/*
timeRange-- 指定要从源导出的时间范围。导出会话的默认时间范围是kCMTimeZero..kCMTimePositiveInfinity,这意味着将导出资产的完整持续时间*/
exportSession.timeRange= range
exportSession.exportAsynchronously{
switchexportSession.status{
case.unknown:
print("unknown")
break
case.waiting:
print("waiting")
break
case.exporting:
print("exporting")
break
case.completed:
print("completed")
letmovieUrl:NSURL=NSURL.fileURL(withPath:self.tempPath!asString)asNSURL
UISaveVideoAtPathToSavedPhotosAlbum(movieUrl.relativePath!, self, Selector(("video:didFinishSavingWithError:contextInfo:")), nil)
break
case.failed:
print(exportSession.error!.localizedDescription)
break
case.cancelled:
print("cancelled")
break
}
}
}
funcvideo(videoPath:String, didFinishSavingWithError error:NSError, contextInfo info:AnyObject) {
}
//剪切播放
@objcfuncshearPlay() ->Void{
if shearAfterPlayBool! {
shearPlayBool=true
//播放剪切之后的视屏
self.url=NSURL.init(fileURLWithPath:self.tempPath!asString)
self.addPlayer(url:self.url!)
self.payerTV?.play()
}
}
//播放结束
@objcfuncPlayToEndTime() ->Void{
playToEndTimeBool = true;//播放结束
}
//拖动手势
@objcfuncplaypanGestureRecognizer(gestureRecognizer:UIPanGestureRecognizer) ->Void{
//获取手势的位置
letposition:CGPoint= gestureRecognizer.translation(in:self.loadProgressView!)
//通过stransform 进行平移交换
self.loadProgressView!.transform=CGAffineTransform.init(translationX: position.x, y: position.y)
//将增量置为零
gestureRecognizer.setTranslation(CGPoint.zero, in:self.loadProgressView)
let progress:CGFloat = self.loadProgressView!.frame.origin.x / UIScreen.main.bounds.size.width
self.payerTV?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds(self.payerTV!.currentItem!.duration)*(Double.init(progress)), preferredTimescale: 1000))
self.payerTV?.play()
}
funcdeleteTempFile() ->Void{
leturl:NSURL=NSURL.init(fileURLWithPath:self.tempPath!asString)
let fm: FileManager = FileManager.default
letexist:Bool= fm.fileExists(atPath: url.path!asString)
ifexist {
do{
tryfm.removeItem(at: urlasURL)
}catch{
}
}else{
}
}
}