AVFoundation 是蘋果在8.0以後推出的一個音視頻框架.session
AVFoundation 最強大的功能是對 照片&視頻 的捕捉功能. 例如一些APP中的小視頻、直播等, 能夠經過AVFoundation來進行實現捕捉.app
捕捉會話主要是用到AVCaptureSession類, 它相似於一個排插, 各類設備都須要與捕捉會話關聯起來。框架
經過AVCaptureDevice能夠獲取到手機的各類硬件設備, 例如: 麥克風、先後攝像頭、閃光燈等。async
經過AVCaptureDeviceInput能夠捕捉到設備的輸入。ide
在AVFoundation中, 捕捉設備輸入是沒法直接添加到Session中的, 因此須要將捕捉設備輸入轉化爲捕捉設備添加進會話中。post
有輸入就有輸出。 在iOS10.0以後, 能夠經過AVCapturePhotoOutput來進行獲取圖片的輸出, 經過AVCaptureMovieFileOutput來進行視頻文件的輸出。 還有AVCaptureAudioDataOutput、還有AVCaptureVideoDataOutput等。ui
AVCaptureConnection 能夠根據捕捉的媒體的類型來創建一個鏈接spa
AVCaptureVideoPreviewLayer主要是一個圖層,主要是用來顯示攝像頭實時捕捉的內容。代理
這裏涉及到攝像頭、麥克風、相冊, 須要配置用戶隱私需求。rest
#pragma mark - session設置
/// 配置session
/// @param error 錯誤回調
- (BOOL)setupSession:(NSError **)error {
/** * 添加視頻的輸入類別 */
//初始化
self.captureSession = [[AVCaptureSession alloc] init];
//設置分辨率
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
//拿到默認視頻捕捉設備: iOS默認後置攝像頭爲默認視頻捕捉色別
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//必定要將捕捉設備轉化AVCaptureDeviceInput
//注意: 爲session添加捕捉設備, 必須將此封裝成AVCaptureDeviceInput對象
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
if (videoInput) {
//攝像頭不隸屬於任何一個app, 是公共設備, 須要判斷是否能添加
if ([self.captureSession canAddInput:videoInput]) {
[self.captureSession addInput:videoInput];
self.activeVideoInput = videoInput;//攝像頭分前置後置, 須要保存作切換操做
}
} else {
return NO;
}
/** * 添加音頻的輸入設備 */
//添加音頻輸入設備: 麥克風
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:error];
if (audioInput) {
if ([self.captureSession canAddInput:audioInput]) {
[self.captureSession addInput:audioInput];
//音頻輸入只有麥克風, 無需保存
}
} else {
return NO;
}
/** * 設置輸出 (照片/視頻文件) */
//圖片
self.imageOutput = [[AVCapturePhotoOutput alloc] init];
if ([self.captureSession canAddOutput:self.imageOutput]) {
[self.captureSession addOutput:self.imageOutput];
}
//視頻AVCaptureMovieFileOutput實例, QuickTime
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([self.captureSession canAddOutput:self.movieOutput]) {
[self.captureSession addOutput:self.movieOutput];
}
//視頻隊列
self.videoQueue = dispatch_queue_create("glen.videoQueue", NULL);
return YES;;
}
複製代碼
配置完捕捉會話以後,就須要經過外界的按鈕點擊等操做來告訴AVFoundation來開啓或中止捕捉會話。
/// 啓動捕捉
- (void)startSession {
if (![self.captureSession isRunning]) {
dispatch_async(self.videoQueue, ^{
[self.captureSession startRunning];
});
}
}
/// 中止捕捉
- (void)stopSession {
if ([self.captureSession isRunning]) {
dispatch_async(self.videoQueue, ^{
[self.captureSession stopRunning];
});
}
}
複製代碼
獲取當前設備上可用的攝像頭設備,並根據需求來得到指定的攝像頭設備
/// 尋找指定攝像頭
/// @param positon 指定攝像頭設備
- (AVCaptureDevice *)cameraWithPositon:(AVCaptureDevicePosition)positon {
AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera]
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
//獲取到全部設備
NSArray *captureDevices = [captureDeviceDiscoverySession devices];
//遍歷設備
for (AVCaptureDevice *device in captureDevices) {
if (device.position == positon) {
return device;
}
}
return nil;
}
複製代碼
由於攝像頭有多個,因此必需要知道當前使用的是哪一個攝像頭
/// 獲取當前活躍的攝像頭
- (AVCaptureDevice *)activeCamera {
return self.activeVideoInput.device;
}
/// 獲取另一個不活躍的攝像頭
- (AVCaptureDevice *)inactiveCamera {
AVCaptureDevice *device = nil;
if (self.cameraCount > 1) {
if ([self activeCamera].position == AVCaptureDevicePositionBack) {
//後置變前置
device = [self cameraWithPositon:AVCaptureDevicePositionFront];
} else if ([self activeCamera].position == AVCaptureDevicePositionFront) {
//前置變後置
device = [self cameraWithPositon:AVCaptureDevicePositionBack];
}
}
return device;;
}
複製代碼
在進行切換以前,必需要知道其餘的攝像頭是不是一個可進行使用的狀態
/// 是否能切換攝像頭
- (BOOL)canSwitchCameras {
return self.cameraCount > 1;
}
複製代碼
接下來就是對攝像頭進行切換
/// 切換攝像頭
- (BOOL)switchCameras {
//判斷是否能切換
if (![self canSwitchCameras]) {
return NO;
}
//獲取當前設備的反向設備(不活躍的攝像頭)
AVCaptureDevice *device = [self inactiveCamera];
//將device添加進AVCaptureDeviceInput
NSError *error;
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
//添加進會話中
if (videoInput) {
//標註原始配置要發生改變
[self.captureSession beginConfiguration];
//將原來的輸入設備移除
[self.captureSession removeInput:self.activeVideoInput];
//判斷可否加入
if ([self.captureSession canAddInput:videoInput]) {
[self.captureSession addInput:videoInput];
//活躍設備更新
self.activeVideoInput = videoInput;
} else {
//若是新設備沒法加入, 則將原來的視頻輸入設備添加進去
[self.captureSession addInput:self.activeVideoInput];
}
//提交修改配置
[self.captureSession commitConfiguration];
} else {
//若是錯誤! 設備添加錯誤
return NO;
}
return YES;
}
複製代碼
/// 詢問當前活躍的攝像頭是否支持興趣點對焦
- (BOOL)cameraSupportsTapToFocus {
return [[self activeCamera] isFocusPointOfInterestSupported];
}
/// 設置對焦
- (void)focusAtPoint:(CGPoint)point {
AVCaptureDevice *device = [self activeCamera];
//判斷該設備是否支持興趣點對焦 是否支持自動對焦
if (device.isFocusPointOfInterestSupported && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
NSError *error;
//由於配置時, 不能讓多個對象對它進行修改, 因此過程上鎖
if ([device lockForConfiguration:&error]) {
//聚焦位置
device.focusPointOfInterest = point;
//自動對焦模式
device.focusMode = AVCaptureFocusModeAutoFocus;
//修改完畢, 解鎖
[device unlockForConfiguration];
} else {
//設備錯誤
}
}
}
複製代碼
static const NSString *CameraAdjustingExposureContext;
/// 當前活躍攝像頭是否支持曝光
- (BOOL)cameraSupportsTapToExpose {
return [[self activeCamera] isExposurePointOfInterestSupported];
}
- (void)exposeAtPoint:(CGPoint)point {
//獲取活躍攝像頭
AVCaptureDevice *device = [self activeCamera];
//設置根據場景曝光
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
//活躍攝像頭是否支持曝光 而且支持’根據場景曝光‘這個模式
if (device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode]) {
//過程鎖定
NSError *error;
if ([device lockForConfiguration:&error]) {
//設備曝光點
device.exposurePointOfInterest = point;
//設置曝光模式
device.exposureMode = exposureMode;
//是否支持鎖定曝光
if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) {
//使用kvo肯定設備的adjustingExposure屬性狀態
[device addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:&CameraAdjustingExposureContext];
}
//解鎖
[device unlockForConfiguration];
}
}
}
/// 觀察者回調
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary<NSKeyValueChangeKey,id> *)change context:(void *)context {
if (context == &CameraAdjustingExposureContext) {
//獲取設備
AVCaptureDevice *device = (AVCaptureDevice *)object;
//判斷設備是否再也不調整曝光等級,確認設備的exposureMode是否能夠設置爲AVCaptureExposureModeLocked
if (!device.isExposurePointOfInterestSupported && [device isExposureModeSupported:AVCaptureExposureModeLocked]) {
//移除做爲adjustingExposure 的self,就不會獲得後續變動的通知
[object removeObserver:self forKeyPath:@"adjustingExposure" context:&CameraAdjustingExposureContext];
//
dispatch_async(dispatch_get_main_queue(), ^{
if ([device lockForConfiguration:nil]) {
device.exposureMode = AVCaptureExposureModeLocked;
[device unlockForConfiguration];
} else {
//設備錯誤回調
}
});
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
}
//從新設置對焦&曝光
- (void)resetFocusAndExposureModes {
AVCaptureDevice *device = [self activeCamera];
AVCaptureFocusMode focusMode = AVCaptureFocusModeContinuousAutoFocus;
//獲取對焦興趣點 和 連續自動對焦模式 是否被支持
BOOL canResetFocus = [device isFocusPointOfInterestSupported]&& [device isFocusModeSupported:focusMode];
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
//確認曝光度能夠被重設
BOOL canResetExposure = [device isFocusPointOfInterestSupported] && [device isExposureModeSupported:exposureMode];
//回顧一下,捕捉設備空間左上角(0,0),右下角(1,1) 中心點則(0.5,0.5)
CGPoint centPoint = CGPointMake(0.5f, 0.5f);
NSError *error;
//鎖定設備,準備配置
if ([device lockForConfiguration:&error]) {
//焦點可設,則修改
if (canResetFocus) {
device.focusMode = focusMode;
device.focusPointOfInterest = centPoint;
}
//曝光度可設,則設置爲指望的曝光模式
if (canResetExposure) {
device.exposureMode = exposureMode;
device.exposurePointOfInterest = centPoint;
}
//釋放鎖定
[device unlockForConfiguration];
}else
{
//設備錯誤回調
}
}
複製代碼
#pragma mark - 拍照
- (void)captureStillImage {
//捕捉到圖片存儲格式jpg
NSDictionary *setDic = @{AVVideoCodecKey:AVVideoCodecTypeJPEG};
AVCapturePhotoSettings *outputSettings = [AVCapturePhotoSettings photoSettingsWithFormat:setDic];
[self.imageOutput capturePhotoWithSettings:outputSettings delegate:self];
}
//代理方法
- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(nullable NSError *)error {
//圖片數據
NSData *imageData = photo.fileDataRepresentation;
UIImage *image = [[UIImage alloc] initWithData:imageData];
//將圖片寫入到Library
[self writeImageToAssetsLibrary:image];
}
/// 寫入到相冊
/// @param image 圖片
- (void)writeImageToAssetsLibrary:(UIImage *)image {
__block PHObjectPlaceholder *assetPlaceholder = nil;
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
PHAssetChangeRequest *changeRequest = [PHAssetChangeRequest creationRequestForAssetFromImage:image];
assetPlaceholder = changeRequest.placeholderForCreatedAsset;
} completionHandler:^(BOOL success, NSError * _Nullable error) {
NSLog(@"OK");
dispatch_async(dispatch_get_main_queue(), ^{
NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];
[nc postNotificationName:ThumbnailCreatedNotification object:image];
});
}];
}
複製代碼