AVCaptureMovieFileOutput系列javascript
Swift
版本哦!java
本篇不涉及 :視頻輸出質量
幀率設置
具體的設備格式
像素格式
光學防抖
...等等
這些都會在下一篇
中帶你去認識。若是還不會用,就想了解這麼多。就如同還不會走就要跑同樣,是要跌大跟頭滴!ios
UIImagePickerController
AVFoundation
本文主要內容是: AVFoundation
git
AVFoundation
與 UIImagePickerController
的區別在於 在於對視頻流的處理,顯然前者會更高級一點。AVFoundation
中對視頻的輸出處理 又分爲 AVCaptureMovieFileOutput
與 AVAssetWriter
。這裏若是想要對視頻的輸出給出更多的操做那選擇 AVAssetWriter
是個不錯的選擇。 AVFoundation
更多的區別,仍是在代碼中體驗比較好,說太多都沒用。就是 幹
首先咱們新建一個工程github
並在工程中的 plist
文件中添加訪問 權限
Privacy - Camera Usage Description
Privacy - Microphone Usage Description
Privacy - Photo Library Usage Description
Privacy - Media Library Usage Description
session
AVCaptureMovieFileOutput
關於 AVCaptureMovieFileOutput
看上圖對號入座閉包
首先新建一個 fileOutputViewController
控制器
控制器上放倆按鈕: Close
Record
你能夠用 storyboard
拖拽也能夠用代碼實現其點擊事件async
由上圖咱們能夠看到輸出方式有兩種 AVCaptureMovieFileOutput
與 AVAssetWriter
,在輸出以前他們對視頻的操做是同樣的,因此咱們能夠把 它倆公共的部分抽象出來一個類,對使用不一樣的輸出方式進行繼承這個類就 OK
了ide
相同的部分抽象成 一個繼承 NSObject
的 CaptureSessionCoordinator
公共類
該公共類不對採集後的視頻不作輸出處理,由於輸出有兩種不一樣的處理結果。
每一種處理正是其繼承 CaptureSessionCoordinator
類的 子類
完成其處理模塊化
對 AVCaptureSession
類進行處理,相關屬性以下:
AVCaptureSession
AVCaptureDevice
代理
視圖
由於使用到線程,故對資源的加鎖問題,在 Swift
中無法直接向 Oc
那樣直接使用: synchronized
故在此利用閉包的特性達到相同的效果:
如何使用看文中代碼
func synchronized(_ lock: AnyObject,dispose: ()->()) {
objc_sync_enter(lock)
dispose()
objc_sync_exit(lock)
}複製代碼
因爲對視頻的處理都不是在主控制器fileOutputViewController
裏面執行的。故,對視頻的輸出都是須要代理來回調到控制器裏面執行後續的相關操做。
因此這裏須要一個代理:
protocol captureSessionCoordinatorDelegate: class {
func coordinatorDidBeginRecording(coordinator: CaptureSessionCoordinator)
func didFinishRecording(coordinator: CaptureSessionCoordinator)
}複製代碼
上面的鋪墊後,下面開始對 AVCaptureSession
進行相應的操做:
以咱們的常識,該類中必須有這些方法:
startRunning
結束運行 stopRunning
開始記錄 startRecording
stopRecording
AVCaptureVideoPreviewLayer
其餘的方法能夠在初始中進行,也能夠進行模塊化拆分
該類一個完整的代碼以下:
class CaptureSessionCoordinator: NSObject {
var captureSession: AVCaptureSession?
var cameraDevice: AVCaptureDevice?
var delegateCallQueue: DispatchQueue?
weak var delegate: captureSessionCoordinatorDelegate?
private var sessionQueue = DispatchQueue(label: "coordinator.Session")
private var previewLayer: AVCaptureVideoPreviewLayer?
override init() {
super.init()
captureSession = setupCaptureSession()
}
public func setDelegate(capDelegate: captureSessionCoordinatorDelegate,queue: DispatchQueue) {
synchronized(self) {
delegate = capDelegate
if delegateCallQueue != queue {
delegateCallQueue = queue
}
}
}
//MARK: ________________Session Setup________________
private func setupCaptureSession() -> AVCaptureSession {
let session = AVCaptureSession()
if !addDefaultCameraInputToCaptureSession(capSession: session) {
printLogDebug("failed to add camera input to capture session")
}
if addDefaultMicInputToCaptureSession(capSession: session) {
printLogDebug("failed to add mic input to capture session")
}
return session
}
private func addDefaultCameraInputToCaptureSession(capSession: AVCaptureSession) -> Bool {
do {
let cameraInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo))
let success = addInput(input: cameraInput, capSession: capSession)
cameraDevice = cameraInput.device
return success
} catch let error as NSError {
printLogDebug("error configuring camera input: \(error.localizedDescription)")
return false
}
}
private func addDefaultMicInputToCaptureSession(capSession: AVCaptureSession) -> Bool {
do {
let micInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
let success = addInput(input: micInput, capSession: capSession)
return success
} catch let error as NSError {
printLogDebug("error configuring mic input: \(error.localizedDescription)")
return false
}
}
//MARK: ________________Public Api________________
func addInput(input: AVCaptureDeviceInput,capSession: AVCaptureSession) -> Bool {
if capSession.canAddInput(input) {
capSession.addInput(input)
return true
}
printLogDebug("input error")
return false
}
func addOutput(output: AVCaptureOutput,capSession: AVCaptureSession) -> Bool {
if capSession.canAddOutput(output) {
capSession.addOutput(output)
return true
}
printLogDebug("output error")
return false
}
func startRunning() {
sessionQueue.async {
self.captureSession?.startRunning()
}
}
func stopRunning() {
sessionQueue.async {
self.stopRunning()
self.captureSession?.stopRunning()
}
}
func startRecording() {
// 子類繼承後重寫
}
func stopRecording() {
// 子類繼承後重寫
}
}複製代碼
咱們建立以 AVCaptureMovieFileOutput 方式輸出並繼承 CaptureSessionCoordinator 的類:fileOutputCoordinator
由最上面的大圖可知,AVFoundation
輸出有兩種:AVCaptureMovieFileOutput
與AVAssetWriter
。
而 AVCaptureMovieFileOutput
是對輸出流沒有作太多的處理,以AVCaptureMovieFileOutput
方式進行視頻輸出處理的類,不須要太多的處理。
故繼承 CaptureSessionCoordinator
它的fileOutputCoordinator
子類只需以下:
重點即是對輸出的處理
class fileOutputCoordinator: CaptureSessionCoordinator,AVCaptureFileOutputRecordingDelegate {
var movieFileOutput: AVCaptureMovieFileOutput?
override init() {
super.init()
movieFileOutput = AVCaptureMovieFileOutput()
_ = addOutput(output: movieFileOutput!, capSession: captureSession!)
}
override func startRecording() {
let fm = YfileManager()
let tempUrl = fm.tempFileUrl()
movieFileOutput?.startRecording(toOutputFileURL: tempUrl, recordingDelegate: self)
}
override func stopRecording() {
movieFileOutput?.stopRecording()
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
delegate?.didFinishRecording(coordinator: self, url: outputFileURL)
}
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
delegate?.coordinatorDidBeginRecording(coordinator: self)
}
}複製代碼
上面代碼中有一個對文件處理的路徑操做類:YfileManager
它主要就是對文件路徑的操做,與臨時文件存儲到系統相冊中的操做:以上代碼中牽扯到的只有以下:
class YfileManager: NSObject {
func tempFileUrl() -> URL {
var path: String = ""
let fm = FileManager.default
var i: Int = 0
while path.isEmpty || fm.fileExists(atPath: path) {
path = NSTemporaryDirectory() + "output\(i.description).mov"
i += 1
}
return URL(fileURLWithPath: path)
}
/// 對臨時視頻文件的存儲操做,本方法在iOS9之後被遺棄了
func copFileToCameraRoll(fileUrl: URL) {
let library = ALAssetsLibrary()
if !library.videoAtPathIs(compatibleWithSavedPhotosAlbum: fileUrl) {
printLogDebug("video error")
}
library.writeVideoAtPath(toSavedPhotosAlbum: fileUrl) { (url, error) in
if (error != nil) {
printLogDebug("error: \(error?.localizedDescription)")
} else if url == nil {
printLogDebug("url is empty")
}
}
}
}複製代碼
實現fileOutputViewController
控制器的方法
首當其衝的是相機視圖與執行代理的方法:captureSessionCoordinatorDelegate
相關變量:
@IBOutlet weak var recordButton: UIBarButtonItem!
var captureSessionCoordinator: fileOutputCoordinator?
var recording: Bool = false
var dismissing: Bool = false複製代碼
控制器具體代碼:
class fileOutputViewController: UIViewController,captureSessionCoordinatorDelegate {
@IBOutlet weak var recordButton: UIBarButtonItem!
var captureSessionCoordinator: fileOutputCoordinator?
var recording: Bool = false
var dismissing: Bool = false
override func viewDidLoad() {
super.viewDidLoad()
captureSessionCoordinator = fileOutputCoordinator()
captureSessionCoordinator?.setDelegate(capDelegate: self, queue: DispatchQueue(label: "fileOutputCoordinator"))
confiureCamper()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
/// 關閉當前視圖
@IBAction func closeCameral(_ sender: Any) {
if recording {
dismissing = true
} else {
stopPipelineAndDismiss()
}
}
/// 開始記錄 與中止記錄
@IBAction func recording(_ sender: Any) {
if recording {
captureSessionCoordinator?.stopRecording()
} else {
UIApplication.shared.isIdleTimerDisabled = true
}
recordButton.isEnabled = false
recordButton.title = "Stop"
captureSessionCoordinator?.startRecording()
recording = true
}
func confiureCamper() {
let cameraViewlayer = captureSessionCoordinator?.previewLayerSetting()
cameraViewlayer?.frame = view.bounds
view.layer.insertSublayer(cameraViewlayer!, at: 0)
captureSessionCoordinator?.startRunning()
}
func stopPipelineAndDismiss() {
captureSessionCoordinator?.stopRunning()
dismiss(animated: true, completion: nil)
dismissing = false
}
func coordinatorDidBeginRecording(coordinator: CaptureSessionCoordinator) {
recordButton.isEnabled = true
}
func didFinishRecording(coordinator: CaptureSessionCoordinator, url: URL) {
UIApplication.shared.isIdleTimerDisabled = false
recordButton.title = "Record"
recording = false
let fm = YfileManager()
fm.copFileToCameraRoll(fileUrl: url)
if dismissing {
stopPipelineAndDismiss()
}
}
}複製代碼
AVCaptureMovieFileOutput
類型的輸出完成
Xcode上面的導航欄
->Window
->Devices
->點擊你的設備
->找到右下的installed Apps
->點擊你的要看的項目
->點擊+ -右邊圖標
->Download
下載到桌面便可 而後選擇顯示包內容
就能夠看到當前沙盒文件的狀態啦!
如圖: