iOS 音视频采集以及写入文件
添加权限:
在项目info.plist中,进行如下配置。
privacy - Camera Usage Description 需要访问您的相机
privacy- Contacts Usage Description 需要访问您的通讯录
privacy - Microphone Usage Description 需要访问您的麦克风
privacy - Photo Library Usage Description 需要访问您的相册
class ViewController: UIViewController {
fileprivate lazy var session: AVCaptureSession = AVCaptureSession()
//视频输出
fileprivate var videoOutPut: AVCaptureVideoDataOutput?
// 预览层
fileprivate var previewLayer : AVCaptureVideoPreviewLayer?
//视频输入
fileprivate var videoInput:AVCaptureDeviceInput?
//文件输出
fileprivate var fileOutPut: AVCaptureMovieFileOutput?
override func viewDidLoad() {
super.viewDidLoad()
//MARK:视频采集
initVideoInputOutput()
//MARK:音频采集
initAudioInputOutput()
//MARK: 创建预览层
initPreViewLayer()
}}
音视频进行采集
extension ViewController{
//视频采集
fileprivate func initVideoInputOutput() {
// 视频输入
guard let devices = AVCaptureDevice.devices() as? [AVCaptureDevice] else{return}
guard let device = devices.filter({ $0.position == .front }).first else {return}
guard let input = try? AVCaptureDeviceInput(device: device) else {return}
self.videoInput = input
//视频输出
let output = AVCaptureVideoDataOutput()
output.setSampleBufferDelegate(self, queue: DispatchQueue.global())
self.videoOutPut = output
// 添加输入输出
addInputOutPut(input,output)
}
//音频采集
fileprivate func initAudioInputOutput(){
//音频输入
guard let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) else {return}
guard let input = try? AVCaptureDeviceInput(device: device) else {return}
//音频输出
let output = AVCaptureAudioDataOutput()
output.setSampleBufferDelegate(self, queue: DispatchQueue.global())
//添加输入输出
addInputOutPut(input, output)
}
// 添加输入输出
private func addInputOutPut(_input: AVCaptureInput , _output: AVCaptureOutput){
// 添加输入输出
session.beginConfiguration() //开始配置
if session.canAddInput(input){
session.addInput(input)
}
if session.canAddOutput (output){
session.addOutput(output)
}
session.commitConfiguration()//提交配置
}
//创建预览层
fileprivate func initPreviewLayer(){
guard let preLayer = AVCaptureVideoPreviewLayer(session: session) else {return}
self.previewLayer = preLayer
preLayer.frame = view.bounds
view.layer.insertSublayer(preLayer, at: 0)
}}
采集代理方法:
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if videoOutPut?.connection(withMediaType: AVMediaTypeVideo) == connection {
print("采集到视频数据")
}else{
print("采集到音频数据")
}}
写入文件
fileprivate func movieOutPutFile() {
let fileOutPut = AVCaptureMovieFileOutput()
let connection = fileOutPut.connection(withMediaType: AVMediaTypeVideo)
connection?.automaticallyAdjustsVideoMirroring = true
print(session)
session.beginConfiguration()
session.removeOutput(self.fileOutPut)
if session.canAddOutput(fileOutPut) {
session.addOutput(fileOutPut)
}
session.commitConfiguration()
self.fileOutPut = fileOutPut
// 定义一个文件路径,作为测试文件。
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! + "/glt.mp4"
fileOutPut.startRecording(toOutputFileURL: URL(fileURLWithPath: path), recordingDelegate: self)
}
//MARK: 写入文件代理
extension ViewController: AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
print("开始录制")
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
print("结束录制")
}
}