iOS RTMP 視頻直播

這裏簡單說下 iOS 的攝像頭採集。

首先初始化AVCaptureSessionsession

// 初始化 AVCaptureSession
_session = [[AVCaptureSession alloc] init];

設置採集的 Video 和 Audio 格式,這兩個是分開設置的,也就是說,你能夠只採集視頻。ide

/
 配置採集輸入源(攝像頭)


NSError*error=nil;


//
 得到一個採集設備,例如前置/後置攝像頭


AVCaptureDevice*videoDevice=[AVCaptureDevice
 defaultDeviceWithMediaType:AVMediaTypeVideo];


//
 用設備初始化一個採集的輸入對象


AVCaptureDeviceInput*videoInput=[AVCaptureDeviceInput
 deviceInputWithDevice:videoDevice
 error:&error];


if(error){


    NSLog(@"Error
 getting video input device: %@",error.description);


}


if([_session
 canAddInput:videoInput]){


    [_session
 addInput:videoInput];//
 添加到Session


}


 


//
 配置採集輸出,即咱們取得視頻圖像的接口


_videoQueue=dispatch_queue_create("Video
 Capture Queue",DISPATCH_QUEUE_SERIAL);


_videoOutput=[[AVCaptureVideoDataOutputalloc]
 init];


[_videoOutput
 setSampleBufferDelegate:self
 queue:_videoQueue];


//
 配置輸出視頻圖像格式


NSDictionary*captureSettings=@{(NSString*)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};


_videoOutput.videoSettings=captureSettings;


_videoOutput.alwaysDiscardsLateVideoFrames=YES;


if([_session
 canAddOutput:_videoOutput]){


    [_session
 addOutput:_videoOutput];  //
 添加到Session

}

//
 保存Connection,用於在SampleBufferDelegate中判斷數據來源(是Video/Audio?)


_videoConnection=[_videoOutput
 connectionWithMediaType:AVMediaTypeVideo];

實現 AVCaptureOutputDelegate:ui

- (void) captureOutput:(AVCaptureOutput *)captureOutput 
 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
        fromConnection:(AVCaptureConnection *)connection
{
    // 這裏的sampleBuffer就是採集到的數據了,但它是Video仍是Audio的數據,得根據connection來判斷
    if (connection == _videoConnection) {  // Video
        /*
        // 取得當前視頻尺寸信息
        CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        int width = CVPixelBufferGetWidth(pixelBuffer);
        int height = CVPixelBufferGetHeight(pixelBuffer);
        NSLog(@"video width: %d  height: %d", width, height);
        */
         NSLog(@"在這裏得到video sampleBuffer,作進一步處理(編碼H.264)");
    } else if (connection == _audioConnection) {  // Audio
        NSLog(@"這裏得到audio sampleBuffer,作進一步處理(編碼AAC)");
    }
}

配置完成,如今啓動 Session:編碼

//
 啓動 Session


[_sessionstartRunning];

1.1 附加任務:將當前硬件採集視頻圖像顯示到屏幕

很簡單,發送端直接使用自家的AVCaptureVideoPreviewLayer顯示atom

_previewLayer=[AVCaptureVideoPreviewLayer
 layerWithSession:_session];


_previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//
 設置預覽時的視頻縮放方式


[[_previewLayerconnection]
 setVideoOrientation:AVCaptureVideoOrientationPortrait];//
 設置視頻的朝向

_previewLayer.frame=self.view.layer.bounds;


[self.view.layer
 addSublayer:_previewLayer];

 

而後將這個layer添加到界面中便可顯示了。spa

具體實現代碼:.net

#import "MyAVController.h"
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>

@interface MyAVController()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
- (void)initCapture;

@end

#import "MyAVController.h"

@implementation MyAVController
{
    AVCaptureSession *_captureSession;
    UIImageView *_imageView;
    CALayer *_customLayer;
    AVCaptureVideoPreviewLayer *_prevLayer;
    AVCaptureConnection *_videoConnection;
    AVCaptureConnection *_audioConnection;
}


#pragma mark -
#pragma mark Initialization
- (id)init {
    self = [super init];
    if (self) {
        self.imageView = nil;
        self.prevLayer = nil;
        self.customLayer = nil;
    }
    return self;
}

- (void)viewDidLoad {
    [self initCapture];
}

- (void)initCapture {
    //配置採集輸入源(攝像頭)
    AVCaptureDevice*videoDevice=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    //用設備初始化一個採集的輸入對象
    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice  error:nil];
    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]init];
    captureOutput.alwaysDiscardsLateVideoFrames = YES;
    //captureOutput.minFrameDuration = CMTimeMake(1, 10);
    
    //配置採集輸出,即咱們取得視頻圖像的接口
    dispatch_queue_t queue;
    queue = dispatch_queue_create("cameraQueue", NULL);
    
    [captureOutput setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    //配置輸出視頻圖像格式
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    NSDictionary* videoSettings = [NSDictionary
                                   dictionaryWithObject:value forKey:key];
    [captureOutput setVideoSettings:videoSettings];
    self.captureSession = [[AVCaptureSession alloc] init];
    [self.captureSession addInput:captureInput];
    [self.captureSession addOutput:captureOutput];
    [self.captureSession startRunning];
    
    //保存Connection,用於在SampleBufferDelegate中判斷數據來源(是Video/Audio?)
    _videoConnection=[captureOutput
                      connectionWithMediaType:AVMediaTypeVideo];
    
    //view
    self.customLayer = [CALayer layer];
    self.customLayer.frame = self.view.bounds;
    self.customLayer.transform = CATransform3DRotate(
                                                     CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
    self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
    [self.view.layer addSublayer:self.customLayer];
    self.imageView = [[UIImageView alloc] init];
    self.imageView.frame = CGRectMake(0, 0, 100, 100);
    [self.view addSubview:self.imageView];
    self.prevLayer = [AVCaptureVideoPreviewLayer
                      layerWithSession: self.captureSession];
    self.prevLayer.frame = CGRectMake(100, 0, 100, 100);
    self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [self.view.layer addSublayer: self.prevLayer];
}

#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection
{
    
    // 這裏的sampleBuffer就是採集到的數據了,但它是Video仍是Audio的數據,得根據connection來判斷
    if (connection == _videoConnection) {  // Video
        /*
         // 取得當前視頻尺寸信息
         CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
         int width = CVPixelBufferGetWidth(pixelBuffer);
         int height = CVPixelBufferGetHeight(pixelBuffer);
         NSLog(@"video width: %d  height: %d", width, height);
         */
        NSLog(@"在這裏得到video sampleBuffer,作進一步處理(編碼H.264)");
    } else if (connection == _audioConnection) {  // Audio
        NSLog(@"這裏得到audio sampleBuffer,作進一步處理(編碼AAC)");
    }
    
    
    
    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
    // 這裏的sampleBuffer就是採集到的數據了,但它是Video仍是Audio的數據,得根據connection來判斷
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef newContext = CGBitmapContextCreate(baseAddress,
                                                    width, height, 8, bytesPerRow, colorSpace,
                                                    kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage = CGBitmapContextCreateImage(newContext);
    
    CGContextRelease(newContext);
    CGColorSpaceRelease(colorSpace);
    
    [self.customLayer performSelectorOnMainThread:@selector(setContents:)
                                       withObject: (__bridge id) newImage waitUntilDone:YES];
    
    UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
                                  orientation:UIImageOrientationRight];
    
    CGImageRelease(newImage);
    
    [self.imageView performSelectorOnMainThread:@selector(setImage:)
                                     withObject:image waitUntilDone:YES];
    
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    
    [pool drain];
}

#pragma mark -
#pragma mark Memory management

- (void)viewDidUnload {
    self.imageView = nil;
    self.customLayer = nil;
    self.prevLayer = nil;
}

- (void)dealloc {
    [self.captureSession release];
}

@end
相關文章
相關標籤/搜索