// // AudioVideoCaptureViewController.m // live // // Created by lujunjie on 2016/10/31. // Copyright © 2016年 lujunjie. All rights reserved. // #import "AudioVideoCaptureViewController.h" #import <AVFoundation/AVFoundation.h> @interface AudioVideoCaptureViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate> @property (nonatomic,strong) AVCaptureSession *mCaptureSession; @property (nonatomic,strong) AVCaptureDeviceInput *mCaptureDeviceInput; @property (nonatomic ,strong) AVCaptureDeviceInput *mCaptureAudioDeviceInput;//負責從AVCaptureDevice得到輸入數據 @property (nonatomic,strong) AVCaptureVideoDataOutput *mCaptureVideoOutput; @property (nonatomic , strong) AVCaptureAudioDataOutput *mCaptureAudioOutput; @property (nonatomic,strong) dispatch_queue_t mProcessQueue; @property (nonatomic,strong) dispatch_queue_t mCaptureQueue; @property (nonatomic,strong) dispatch_queue_t mEncodeQueue; @property (nonatomic,strong) AVCaptureVideoPreviewLayer *mPreviewLayer; @end @implementation AudioVideoCaptureViewController - (void)viewDidLoad { [super viewDidLoad]; // Do any additional setup after loading the view. // a、AVCaptureDevice。這裏表明抽象的硬件設備。 // // b、AVCaptureInput。這裏表明輸入設備(能夠是它的子類),它配置抽象硬件設備的ports。 // // c、AVCaptureOutput。它表明輸出數據,管理着輸出到一個movie或者圖像。 // // d、AVCaptureSession。它是input和output的橋樑。它協調着intput到output的數據傳輸。 [self startCapture]; } - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; // Dispose of any resources that can be recreated. } - (void)startCapture { // 新建會話,設置圖像大小 self.mCaptureSession = [[AVCaptureSession alloc] init]; self.mCaptureSession.sessionPreset = AVCaptureSessionPreset640x480; self.mCaptureQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); self.mEncodeQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); AVCaptureDevice *inputCamera = nil; // 獲取前置攝像頭 NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for (AVCaptureDevice *device in devices) { if ([device position] == AVCaptureDevicePositionFront) { inputCamera = device; } } // 把攝像頭設置到輸入設備,並添加到會話 self.mCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:inputCamera error:nil]; if ([self.mCaptureSession canAddInput:self.mCaptureDeviceInput]) { [self.mCaptureSession addInput:self.mCaptureDeviceInput]; } // 設置輸出設備的參數,並把輸出設備添加到會話 self.mCaptureVideoOutput = [[AVCaptureVideoDataOutput alloc] init]; [self.mCaptureVideoOutput setAlwaysDiscardsLateVideoFrames:NO]; [self.mCaptureVideoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; [self.mCaptureVideoOutput setSampleBufferDelegate:self queue:self.mCaptureQueue]; if ([self.mCaptureSession canAddOutput:self.mCaptureVideoOutput]) { [self.mCaptureSession addOutput:self.mCaptureVideoOutput]; } // 輸出設置縮放裁剪係數設置、創建視頻音頻連接 AVCaptureConnection *connection = [self.mCaptureVideoOutput connectionWithMediaType:AVMediaTypeVideo]; [connection setVideoOrientation:AVCaptureVideoOrientationPortrait]; // 視頻 self.mPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.mCaptureSession]; [self.mPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; //設置預覽時的視頻縮放方式 [self.mPreviewLayer setFrame:self.view.bounds]; [self.view.layer addSublayer:self.mPreviewLayer]; // 獲取麥克風設備 AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] lastObject]; // 把設備設置到輸入設備,並添加到會話 self.mCaptureAudioDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil]; if ([self.mCaptureSession canAddInput:self.mCaptureAudioDeviceInput]) { [self.mCaptureSession addInput:self.mCaptureAudioDeviceInput]; } // 設置輸出設備的參數,並把輸出設備添加到會話 self.mCaptureAudioOutput = [[AVCaptureAudioDataOutput alloc] init]; if ([self.mCaptureSession canAddOutput:self.mCaptureAudioOutput]) { [self.mCaptureSession addOutput:self.mCaptureAudioOutput]; } [self.mCaptureAudioOutput setSampleBufferDelegate:self queue:self.mCaptureQueue]; [self.mCaptureSession startRunning]; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { if (captureOutput == self.mCaptureVideoOutput) { dispatch_sync(self.mEncodeQueue, ^{ NSLog(@"視頻:::sampleBuffer"); }); } else { dispatch_sync(self.mEncodeQueue, ^{ NSLog(@"音頻:::sampleBuffer"); }); } } @end