1.AVAsset:用於獲取一個多媒體文件的信息,至關於獲取一個視頻或音頻文件,是一個抽象類,不能直接使用。數組
2.AVURLAsset:AVAsset的子類,經過URL路徑建立一個包含多媒體信息的對象。session
NSURL *url = <#A URL that identifies an audiovisual asset such as a movie file#>; AVURLAsset *anAsset = [[AVURLAsset alloc] initWithURL:url options:nil];
3.AVCaptureSession:用於捕捉視頻和音頻,負責協調視頻和音頻的輸入流和輸出流。app
AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
4.AVCaptureDevice:表示輸入設備,如照相機或麥克風。框架
AVCaptureDevice *device = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack]; - (AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition)position { NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for (AVCaptureDevice *device in devices) { if ([device position] == position) { if ([device supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720]) return device; return nil; } } return nil; }
5.AVCaptureDeviceInput:視頻或音頻的輸入流,把該對象添加到AVCaptureSession對象中管理。async
NSError *error; AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; if (!input) { // Handle the error appropriately. } if ([captureSession canAddInput:captureDeviceInput]) { [captureSession addInput:captureDeviceInput]; }
6.AVCaptureOutput:視頻或音頻的輸出流,一般使用它的子類:AVCaptureAudioDataOutput,AVCaptureVideoDataOutput,AVCaptureStillImageOutput,AVCaptureFileOutput等,把該對象添加到AVCaptureSession對象中管理。ide
AVCaptureMovieFileOutput *movieOutput = [[AVCaptureMovieFileOutput alloc] init]; if ([captureSession canAddOutput:movieOutput]) { [captureSession addOutput:movieOutput]; }
7.AVCaptureVideoPreviewLayer:預覽圖層,實時查看攝像頭捕捉的畫面。ui
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession]; captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; captureVideoPreviewLayer.frame = <#Set layer frame#>;
8.AVCaptureConnection:AVCaptureSession和輸入輸出流之間的鏈接,能夠用來調節一些設置,如光學防抖。atom
AVCaptureConnection *captureConnection = [movieOutput connectionWithMediaType:AVMediaTypeVideo]; // 打開影院級光學防抖 captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic;
9.AVCaptureDeviceFormat:輸入設備的一些設置,能夠用來修改一些設置,如ISO,慢動做,防抖等。url
AVCaptureDevice *captureDevice = [self.captureDeviceInput device]; NSError *error; if ([captureDevice lockForConfiguration:&error]) { CGFloat minISO = captureDevice.activeFormat.minISO; CGFloat maxISO = captureDevice.activeFormat.maxISO; // 調節ISO爲全範圍的70% CGFloat currentISO = (maxISO - minISO) * 0.7 + minISO; [captureDevice setExposureModeCustomWithDuration:AVCaptureExposureDurationCurrent ISO:currentISO completionHandler:nil]; [captureDevice unlockForConfiguration]; }else{ // Handle the error appropriately. }
/// 負責輸入和輸出設備之間的數據傳遞 @property (nonatomic, strong) AVCaptureSession *captureSession; /// 負責從AVCaptureDevice得到視頻輸入流 @property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInput; /// 負責從AVCaptureDevice得到音頻輸入流 @property (nonatomic, strong) AVCaptureDeviceInput *audioCaptureDeviceInput; /// 視頻輸出流 @property (nonatomic, strong) AVCaptureMovieFileOutput *captureMovieFileOutput; /// 相機拍攝預覽圖層 @property (nonatomic, strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer; ... 公用方法 /// 獲取攝像頭設備 - (AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition)position { NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for (AVCaptureDevice *device in devices) { if ([device position] == position) { if ([device supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720]) return device; return nil; } } return nil; } ... 建立自定義相機 // 建立AVCaptureSession _captureSession = [[AVCaptureSession alloc] init]; if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) _captureSession.sessionPreset = AVCaptureSessionPreset1280x720; // 獲取攝像設備 AVCaptureDevice *videoCaptureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack]; if (!videoCaptureDevice) { // Handle the error appropriately. } // 獲取視頻輸入流 NSError *error = nil; _captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice error:&error]; if (error) { // Handle the error appropriately. } // 獲取錄音設備 AVCaptureDevice *audioCaptureDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject]; // 獲取音頻輸入流 _audioCaptureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error]; if (error) { // Handle the error appropriately. } // 將視頻和音頻輸入添加到AVCaptureSession if ([_captureSession canAddInput:_captureDeviceInput] && [_captureSession canAddInput:_audioCaptureDeviceInput]) { [_captureSession addInput:_captureDeviceInput]; [_captureSession addInput:_audioCaptureDeviceInput]; } // 建立輸出流 _captureMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; // 將輸出流添加到AVCaptureSession if ([_captureSession canAddOutput:_captureMovieFileOutput]) { [_captureSession addOutput:_captureMovieFileOutput]; // 根據設備輸出得到鏈接 AVCaptureConnection *captureConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; // 判斷是否支持光學防抖 if ([videoCaptureDevice.activeFormat isVideoStabilizationModeSupported:AVCaptureVideoStabilizationModeCinematic]) { // 若是支持防抖就打開防抖 captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic; } } // 保存默認的AVCaptureDeviceFormat // 之因此保存是由於修改攝像頭捕捉頻率以後,防抖就沒法再次開啓,試了下只可以用這個默認的format才能夠,因此把它存起來,關閉慢動做拍攝後在設置會默認的format開啓防抖 _defaultFormat = videoCaptureDevice.activeFormat; _defaultMinFrameDuration = videoCaptureDevice.activeVideoMinFrameDuration; _defaultMaxFrameDuration = videoCaptureDevice.activeVideoMaxFrameDuration; // 建立預覽圖層 _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession]; _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//填充模式 _captureVideoPreviewLayer.frame = self.bounds; // 相機的預覽圖層是一個CALayer,因此能夠建立一個UIView,在view的layer上addSublayer就能夠 // 由於這裏是寫在view的init方法裏,因此直接調用了self.layer的addSublayer方法 [self.layer addSublayer:_captureVideoPreviewLayer]; // 開始捕獲 [self.captureSession startRunning];
能夠在相機的預覽圖層所在的view上面直接addSubview咱們須要的視圖,個人作法是直接建立一個和當前預覽圖層同樣大的UIView作控制面板,背景色爲透明。而後總體蓋在相機預覽圖層上面,全部的手勢方法,按鈕點擊等都在咱們的控制面板上做響應,具體代碼其實就是經過代理傳遞控制面板的操做讓相機界面去作對應的處理,這裏就不貼無用代碼了。spa
1.切換到後攝像頭
#pragma mark - 切換到後攝像頭 - (void)cameraBackgroundDidClickChangeBack { AVCaptureDevice *toChangeDevice; AVCaptureDevicePosition toChangePosition = AVCaptureDevicePositionBack; toChangeDevice = [self getCameraDeviceWithPosition:toChangePosition]; AVCaptureDeviceInput *toChangeDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:toChangeDevice error:nil]; [self.captureSession beginConfiguration]; [self.captureSession removeInput:self.captureDeviceInput]; if ([self.captureSession canAddInput:toChangeDeviceInput]) { [self.captureSession addInput:toChangeDeviceInput]; self.captureDeviceInput = toChangeDeviceInput; } [self.captureSession commitConfiguration]; }
2.切換到前攝像頭
- (void)cameraBackgroundDidClickChangeFront { AVCaptureDevice *toChangeDevice; AVCaptureDevicePosition toChangePosition = AVCaptureDevicePositionFront; toChangeDevice = [self getCameraDeviceWithPosition:toChangePosition]; AVCaptureDeviceInput *toChangeDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:toChangeDevice error:nil]; [self.captureSession beginConfiguration]; [self.captureSession removeInput:self.captureDeviceInput]; if ([self.captureSession canAddInput:toChangeDeviceInput]) { [self.captureSession addInput:toChangeDeviceInput]; self.captureDeviceInput = toChangeDeviceInput; } [self.captureSession commitConfiguration]; }
3.打開閃光燈
- (void)cameraBackgroundDidClickOpenFlash { AVCaptureDevice *captureDevice = [self.captureDeviceInput device]; NSError *error; if ([captureDevice lockForConfiguration:&error]) { if ([captureDevice isTorchModeSupported:AVCaptureTorchModeOn]) [captureDevice setTorchMode:AVCaptureTorchModeOn]; }else{ // Handle the error appropriately. } }
4.關閉閃光燈
- (void)cameraBackgroundDidClickCloseFlash { AVCaptureDevice *captureDevice = [self.captureDeviceInput device]; NSError *error; if ([captureDevice lockForConfiguration:&error]) { if ([captureDevice isTorchModeSupported:AVCaptureTorchModeOff]) [captureDevice setTorchMode:AVCaptureTorchModeOff]; }else{ // Handle the error appropriately. } }
5.調節焦距
// 焦距範圍0.0-1.0 - (void)cameraBackgroundDidChangeFocus:(CGFloat)focus { AVCaptureDevice *captureDevice = [self.captureDeviceInput device]; NSError *error; if ([captureDevice lockForConfiguration:&error]) { if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) [captureDevice setFocusModeLockedWithLensPosition:focus completionHandler:nil]; }else{ // Handle the error appropriately. } }
6.數碼變焦
// 數碼變焦 1-3倍 - (void)cameraBackgroundDidChangeZoom:(CGFloat)zoom { AVCaptureDevice *captureDevice = [self.captureDeviceInput device]; NSError *error; if ([captureDevice lockForConfiguration:&error]) { [captureDevice rampToVideoZoomFactor:zoom withRate:50]; }else{ // Handle the error appropriately. } }
7.調節ISO,光感度
// 調節ISO,光感度 0.0-1.0 - (void)cameraBackgroundDidChangeISO:(CGFloat)iso { AVCaptureDevice *captureDevice = [self.captureDeviceInput device]; NSError *error; if ([captureDevice lockForConfiguration:&error]) { CGFloat minISO = captureDevice.activeFormat.minISO; CGFloat maxISO = captureDevice.activeFormat.maxISO; CGFloat currentISO = (maxISO - minISO) * iso + minISO; [captureDevice setExposureModeCustomWithDuration:AVCaptureExposureDurationCurrent ISO:currentISO completionHandler:nil]; [captureDevice unlockForConfiguration]; }else{ // Handle the error appropriately. } }
8.點擊屏幕自動對焦
// 當前屏幕上點擊的點座標 - (void)cameraBackgroundDidTap:(CGPoint)point { AVCaptureDevice *captureDevice = [self.captureDeviceInput device]; NSError *error; if ([captureDevice lockForConfiguration:&error]) { CGPoint location = point; CGPoint pointOfInerest = CGPointMake(0.5, 0.5); CGSize frameSize = self.captureVideoPreviewLayer.frame.size; if ([captureDevice position] == AVCaptureDevicePositionFront) location.x = frameSize.width - location.x; pointOfInerest = CGPointMake(location.y / frameSize.height, 1.f - (location.x / frameSize.width)); [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:pointOfInerest]; [[self.captureDeviceInput device] addObserver:self forKeyPath:@"ISO" options:NSKeyValueObservingOptionNew context:NULL]; }else{ // Handle the error appropriately. } } -(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{ AVCaptureDevice *captureDevice = [self.captureDeviceInput device]; NSError *error; if ([captureDevice lockForConfiguration:&error]) { if ([captureDevice isFocusModeSupported:focusMode]) [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; if ([captureDevice isFocusPointOfInterestSupported]) [captureDevice setFocusPointOfInterest:point]; if ([captureDevice isExposureModeSupported:exposureMode]) [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; if ([captureDevice isExposurePointOfInterestSupported]) [captureDevice setExposurePointOfInterest:point]; }else{ // Handle the error appropriately. } }
9.獲取錄製時視頻的方向
由於相機的特殊性,不可以用常規的控制器的方向來獲取當前的方向,由於用戶可能關閉屏幕旋轉,這裏用重力感應來計算當前手機的放置狀態。
@property (nonatomic, strong) CMMotionManager *motionManager; @property (nonatomic, assign) UIDeviceOrientation deviceOrientation; ... _motionManager = [[CMMotionManager alloc] init]; _motionManager.deviceMotionUpdateInterval = 1/15.0; if (_motionManager.deviceMotionAvailable) { [_motionManager startDeviceMotionUpdatesToQueue:[NSOperationQueue currentQueue] withHandler:^(CMDeviceMotion * _Nullable motion, NSError * _Nullable error) { [self performSelectorOnMainThread:@selector(handleDeviceMotion:) withObject:motion waitUntilDone:YES]; }]; } else { NSLog(@"No device motion on device"); } ... /// 重力感應回調 - (void)handleDeviceMotion:(CMDeviceMotion *)deviceMotion{ double x = deviceMotion.gravity.x; double y = deviceMotion.gravity.y; CGAffineTransform videoTransform; if (fabs(y) >= fabs(x)) { if (y >= 0) { videoTransform = CGAffineTransformMakeRotation(M_PI); _deviceOrientation = UIDeviceOrientationPortraitUpsideDown; } else { videoTransform = CGAffineTransformMakeRotation(0); _deviceOrientation = UIDeviceOrientationPortrait; } } else { if (x >= 0) { videoTransform = CGAffineTransformMakeRotation(-M_PI_2); _deviceOrientation = UIDeviceOrientationLandscapeRight; // Home鍵左側水平拍攝 } else { videoTransform = CGAffineTransformMakeRotation(M_PI_2); _deviceOrientation = UIDeviceOrientationLandscapeLeft; // Home鍵右側水平拍攝 } } // 告訴操做界面當前屏幕的方向,作按鈕跟隨屏幕方向旋轉的操做 [self.backgroundView setOrientation:_deviceOrientation]; }
11.慢動做拍攝
- (void)cameraBackgroundDidClickOpenSlow { [self.captureSession stopRunning]; CGFloat desiredFPS = 240.0; AVCaptureDevice *videoDevice = self.captureDeviceInput.device; AVCaptureDeviceFormat *selectedFormat = nil; int32_t maxWidth = 0; AVFrameRateRange *frameRateRange = nil; for (AVCaptureDeviceFormat *format in [videoDevice formats]) { for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { CMFormatDescriptionRef desc = format.formatDescription; CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(desc); int32_t width = dimensions.width; if (range.minFrameRate <= desiredFPS && desiredFPS <= range.maxFrameRate && width >= maxWidth) { selectedFormat = format; frameRateRange = range; maxWidth = width; } } } if (selectedFormat) { if ([videoDevice lockForConfiguration:nil]) { NSLog(@"selected format: %@", selectedFormat); videoDevice.activeFormat = selectedFormat; videoDevice.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)desiredFPS); videoDevice.activeVideoMaxFrameDuration = CMTimeMake(1, (int32_t)desiredFPS); [videoDevice unlockForConfiguration]; } } [self.captureSession startRunning]; }
12.慢動做拍攝關
- (void)cameraBackgroundDidClickCloseSlow { [self.captureSession stopRunning]; CGFloat desiredFPS = 60.0; AVCaptureDevice *videoDevice = self.captureDeviceInput.device; AVCaptureDeviceFormat *selectedFormat = nil; int32_t maxWidth = 0; AVFrameRateRange *frameRateRange = nil; for (AVCaptureDeviceFormat *format in [videoDevice formats]) { for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { CMFormatDescriptionRef desc = format.formatDescription; CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(desc); int32_t width = dimensions.width; if (range.minFrameRate <= desiredFPS && desiredFPS <= range.maxFrameRate && width >= maxWidth) { selectedFormat = format; frameRateRange = range; maxWidth = width; } } } if (selectedFormat) { if ([videoDevice lockForConfiguration:nil]) { NSLog(@"selected format: %@", selectedFormat); videoDevice.activeFormat = _defaultFormat; videoDevice.activeVideoMinFrameDuration = _defaultMinFrameDuration; videoDevice.activeVideoMaxFrameDuration = _defaultMaxFrameDuration; [videoDevice unlockForConfiguration]; } } [self.captureSession startRunning]; }
13.防抖開啓
- (void)cameraBackgroundDidClickOpenAntiShake { AVCaptureConnection *captureConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; NSLog(@"change captureConnection: %@", captureConnection); AVCaptureDevice *videoDevice = self.captureDeviceInput.device; NSLog(@"set format: %@", videoDevice.activeFormat); if ([videoDevice.activeFormat isVideoStabilizationModeSupported:AVCaptureVideoStabilizationModeCinematic]) { captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic; } }
14.防抖關閉
#pragma mark - 防抖關 - (void)cameraBackgroundDidClickCloseAntiShake { AVCaptureConnection *captureConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; NSLog(@"change captureConnection: %@", captureConnection); AVCaptureDevice *videoDevice = self.captureDeviceInput.device; if ([videoDevice.activeFormat isVideoStabilizationModeSupported:AVCaptureVideoStabilizationModeOff]) { captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeOff; } }
15.錄製視頻
#pragma mark - 錄製 - (void)cameraBackgroundDidClickPlay { // 根據設備輸出得到鏈接 AVCaptureConnection *captureConnection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; // 根據鏈接取得設備輸出的數據 if (![self.captureMovieFileOutput isRecording]) { captureConnection.videoOrientation = (AVCaptureVideoOrientation)_deviceOrientation; // 視頻方向和手機方向一致 NSString *outputFilePath = [kCachePath stringByAppendingPathComponent:[self movieName]]; NSURL *fileURL = [NSURL fileURLWithPath:outputFilePath]; [self.captureMovieFileOutput startRecordingToOutputFileURL:fileURL recordingDelegate:self]; _currentMoviePath = outputFilePath; } } - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections { NSLog(@"開始錄製"); } - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { NSLog(@"錄製完成"); }
16.暫停錄製
[self.captureMovieFileOutput stopRecording];
17.調節視頻的速度
慢動做拍攝的時候要調節攝像頭的捕捉頻率,快速的時候直接調節視頻速度就能夠了。
慢動做下拍攝的視頻視頻的播放時長仍是實際拍攝的時間,這裏根據設置的慢速倍率,把視頻的時長拉長。
/// 處理速度視頻 - (void)setSpeedWithVideo:(NSDictionary *)video completed:(void(^)())completed { dispatch_async(dispatch_get_global_queue(0, 0), ^{ NSLog(@"video set thread: %@", [NSThread currentThread]); // 獲取視頻 AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:video[kMoviePath]] options:nil]; // 視頻混合 AVMutableComposition* mixComposition = [AVMutableComposition composition]; // 視頻軌道 AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; // 音頻軌道 AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; // 視頻的方向 CGAffineTransform videoTransform = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject.preferredTransform; if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) { NSLog(@"垂直拍攝"); videoTransform = CGAffineTransformMakeRotation(M_PI_2); }else if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) { NSLog(@"倒立拍攝"); videoTransform = CGAffineTransformMakeRotation(-M_PI_2); }else if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) { NSLog(@"Home鍵右側水平拍攝"); videoTransform = CGAffineTransformMakeRotation(0); }else if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) { NSLog(@"Home鍵左側水平拍攝"); videoTransform = CGAffineTransformMakeRotation(M_PI); } // 根據視頻的方向同步視頻軌道方向 compositionVideoTrack.preferredTransform = videoTransform; compositionVideoTrack.naturalTimeScale = 600; // 插入視頻軌道 [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMake(videoAsset.duration.value, videoAsset.duration.timescale)) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject] atTime:kCMTimeZero error:nil]; // 插入音頻軌道 [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMake(videoAsset.duration.value, videoAsset.duration.timescale)) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] firstObject] atTime:kCMTimeZero error:nil]; // 適配視頻速度比率 CGFloat scale = 1.0; if([video[kMovieSpeed] isEqualToString:kMovieSpeed_Fast]){ scale = 0.2f; // 快速 x5 } else if ([video[kMovieSpeed] isEqualToString:kMovieSpeed_Slow]) { scale = 4.0f; // 慢速 x4 } // 根據速度比率調節音頻和視頻 [compositionVideoTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMake(videoAsset.duration.value, videoAsset.duration.timescale)) toDuration:CMTimeMake(videoAsset.duration.value * scale , videoAsset.duration.timescale)]; [compositionAudioTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMake(videoAsset.duration.value, videoAsset.duration.timescale)) toDuration:CMTimeMake(videoAsset.duration.value * scale, videoAsset.duration.timescale)]; // 配置導出 AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720]; // 導出視頻的臨時保存路徑 NSString *exportPath = [kCachePath stringByAppendingPathComponent:[self movieName]]; NSURL *exportUrl = [NSURL fileURLWithPath:exportPath]; // 導出視頻的格式 .MOV _assetExport.outputFileType = AVFileTypeQuickTimeMovie; _assetExport.outputURL = exportUrl; _assetExport.shouldOptimizeForNetworkUse = YES; // 導出視頻 [_assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) { dispatch_async(dispatch_get_main_queue(), ^{ [_processedVideoPaths addObject:exportPath]; // 將導出的視頻保存到相冊 ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; if (![library videoAtPathIsCompatibleWithSavedPhotosAlbum:[NSURL URLWithString:exportPath]]){ NSLog(@"cache can't write"); completed(); return; } [library writeVideoAtPathToSavedPhotosAlbum:[NSURL URLWithString:exportPath] completionBlock:^(NSURL *assetURL, NSError *error) { if (error) { completed(); NSLog(@"cache write error"); } else { completed(); NSLog(@"cache write success"); } }]; }); }]; }); }
18.將多個視頻合併爲一個視頻
- (void)mergeVideosWithPaths:(NSArray *)paths completed:(void(^)(NSString *videoPath))completed { if (!paths.count) return; dispatch_async(dispatch_get_main_queue(), ^{ AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; videoTrack.preferredTransform = CGAffineTransformRotate(CGAffineTransformIdentity, M_PI_2); CMTime totalDuration = kCMTimeZero; // NSMutableArray<AVMutableVideoCompositionLayerInstruction *> *instructions = [NSMutableArray array]; for (int i = 0; i < paths.count; i++) { AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:paths[i]]]; AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject]; AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]firstObject]; NSLog(@"%lld", asset.duration.value/asset.duration.timescale); NSError *erroraudio = nil; BOOL ba = [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:assetAudioTrack atTime:totalDuration error:&erroraudio]; NSLog(@"erroraudio:%@--%d", erroraudio, ba); NSError *errorVideo = nil; BOOL bl = [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:assetVideoTrack atTime:totalDuration error:&errorVideo]; NSLog(@"errorVideo:%@--%d",errorVideo,bl); // AVMutableVideoCompositionLayerInstruction *instruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; // UIImageOrientation assetOrientation = UIImageOrientationUp; // BOOL isAssetPortrait = NO; // // 根據視頻的實際拍攝方向來調整視頻的方向 // CGAffineTransform videoTransform = assetVideoTrack.preferredTransform; // if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) { // NSLog(@"垂直拍攝"); // assetOrientation = UIImageOrientationRight; // isAssetPortrait = YES; // }else if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) { // NSLog(@"倒立拍攝"); // assetOrientation = UIImageOrientationLeft; // isAssetPortrait = YES; // }else if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) { // NSLog(@"Home鍵右側水平拍攝"); // assetOrientation = UIImageOrientationUp; // }else if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) { // NSLog(@"Home鍵左側水平拍攝"); // assetOrientation = UIImageOrientationDown; // } // CGFloat assetScaleToFitRatio = 720.0 / assetVideoTrack.naturalSize.width; // if (isAssetPortrait) { // assetScaleToFitRatio = 720.0 / assetVideoTrack.naturalSize.height; // CGAffineTransform assetSacleFactor = CGAffineTransformMakeScale(assetScaleToFitRatio, assetScaleToFitRatio); // [instruction setTransform:CGAffineTransformConcat(assetVideoTrack.preferredTransform, assetSacleFactor) atTime:totalDuration]; // } else { // /** // 豎直方向視頻尺寸:720*1280 // 水平方向視頻尺寸:720*405 // 水平方向視頻須要劇中的y值:(1280 - 405)/ 2 = 437.5 // **/ // CGAffineTransform assetSacleFactor = CGAffineTransformMakeScale(assetScaleToFitRatio, assetScaleToFitRatio); // [instruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(assetVideoTrack.preferredTransform, assetSacleFactor), CGAffineTransformMakeTranslation(0, 437.5)) atTime:totalDuration]; // } // // 把新的插入到最上面,最後是按照數組順序播放的。 // [instructions insertObject:instruction atIndex:0]; // totalDuration = CMTimeAdd(totalDuration, asset.duration); // // 在當前視頻時間點結束後須要清空尺寸,不然若是第二個視頻尺寸比第一個小,它會顯示在第二個視頻的下方。 // [instruction setCropRectangle:CGRectZero atTime:totalDuration]; } // AVMutableVideoCompositionInstruction *mixInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; // mixInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration); // mixInstruction.layerInstructions = instructions; // AVMutableVideoComposition *mixVideoComposition = [AVMutableVideoComposition videoComposition]; // mixVideoComposition.instructions = [NSArray arrayWithObject:mixInstruction]; // mixVideoComposition.frameDuration = CMTimeMake(1, 25); // mixVideoComposition.renderSize = CGSizeMake(720.0, 1280.0); // NSString *outPath = [kVideoPath stringByAppendingPathComponent:[self movieName]]; NSURL *mergeFileURL = [NSURL fileURLWithPath:outPath]; AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; exporter.outputURL = mergeFileURL; exporter.outputFileType = AVFileTypeQuickTimeMovie; // exporter.videoComposition = mixVideoComposition; exporter.shouldOptimizeForNetworkUse = YES; [exporter exportAsynchronouslyWithCompletionHandler:^{ dispatch_async(dispatch_get_main_queue(), ^{ completed(outPath); }); }]; }); }