首先先從github上pod下來LFLiveKitgit
新建OnlyLiveSession類:主要用於管理直播的類 .hgithub
#import <UIKit/UIKit.h>
@protocol OnlyLiveSessionDelegate <NSObject>
// 返回的message
- (void)OnlyLiveCallBackMessage:(NSString *)callBackMessage;
@end
@interface OnlyLiveSession : NSObject
// 是否正在直播
@property (assign, nonatomic,readonly) BOOL isPlaying;
// 傳入代理和須要展現的父視圖
- (instancetype)initWithDefaultSessionWithdelegate:(id<OnlyLiveSessionDelegate>)delegate preView:(UIView *)preView;
// 根據須要推流的地址(rtmp地址)
- (void)startLiveWithStreamString:(NSString *)streamString;
// 中止直播
- (void)stopLive;
// 攝像頭翻轉
- (void)resetCamera;
@end
複製代碼
.mbash
#import "OnlyLiveSession.h"
#import <LFLiveKit.h>
inline static NSString *formatedSpeed(float bytes, float elapsed_milli) {
if (elapsed_milli <= 0) {
return @"N/A";
}
if (bytes <= 0) {
return @"0 KB/s";
}
float bytes_per_sec = ((float)bytes) * 1000.f / elapsed_milli;
if (bytes_per_sec >= 1000 * 1000) {
return [NSString stringWithFormat:@"%.2f MB/s", ((float)bytes_per_sec) / 1000 / 1000];
} else if (bytes_per_sec >= 1000) {
return [NSString stringWithFormat:@"%.1f KB/s", ((float)bytes_per_sec) / 1000];
} else {
return [NSString stringWithFormat:@"%ld B/s", (long)bytes_per_sec];
}
}
@interface OnlyLiveSession()<LFLiveSessionDelegate>
@property (assign, nonatomic) BOOL isPlaying;
@property (strong, nonatomic) LFLiveSession *session;
@property (weak, nonatomic) id delegate;
@end
@implementation OnlyLiveSession
- (instancetype)initWithDefaultSessionWithdelegate:(id<OnlyLiveSessionDelegate>)delegate preView:(UIView*)preView{
self = [super init];
if (self) {
[self setDefaultSessionWithdelegate:(id<OnlyLiveSessionDelegate>)delegate preView:(UIView*)preView];
}
return self;
}
#pragma mark - 開始播放 streamString:推流地址
- (void)startLiveWithStreamString:(NSString *)streamString{
if (!self.session && streamString.length > 0) {
@throw @"須要建立一個默認的session or 須要一個播放streamString";
return;
}
NSLog(@"當前的streamString:%@",streamString);
LFLiveStreamInfo *info = [LFLiveStreamInfo new];
info.url = streamString;
[self.session startLive:info];
}
#pragma mark - 中止播放
- (void)stopLive
{
[_session stopLive];
}
#pragma mark - 攝像頭翻轉
- (void)resetCamera{
AVCaptureDevicePosition devicePositon = self.session.captureDevicePosition;
self.session.captureDevicePosition = (devicePositon == AVCaptureDevicePositionBack) ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
}
#pragma mark - 設置默認的Session
- (void)setDefaultSessionWithdelegate:(id<OnlyLiveSessionDelegate>)delegate preView:(UIView*)preView{
if (!_session) {
self.delegate = delegate;
/*** 默認分辨率368 * 640 音頻:44.1 iphone6以上48 雙聲道 方向豎屏 ***/
LFLiveVideoConfiguration *videoConfiguration = [LFLiveVideoConfiguration new];
videoConfiguration.videoSize = CGSizeMake(preView.bounds.size.width, preView.bounds.size.height);
videoConfiguration.videoBitRate = 800*1024;
videoConfiguration.videoMaxBitRate = 1000*1024;
videoConfiguration.videoMinBitRate = 500*1024;
videoConfiguration.videoFrameRate = 24;
videoConfiguration.videoMaxKeyframeInterval = 48;
videoConfiguration.outputImageOrientation = UIInterfaceOrientationPortrait;
videoConfiguration.autorotate = NO;
videoConfiguration.sessionPreset = LFCaptureSessionPreset720x1280;
_session = [[LFLiveSession alloc] initWithAudioConfiguration:[LFLiveAudioConfiguration defaultConfiguration] videoConfiguration:videoConfiguration captureType:LFLiveCaptureDefaultMask];
_session.delegate = self;
_session.preView = preView;
}
[self requestAccessForAudio];
[self requestAccessForVideo];
}
#pragma mark -- LFStreamingSessionDelegate
/** live status changed will callback */
- (void)liveSession:(nullable LFLiveSession *)session liveStateDidChange:(LFLiveState)state {
NSLog(@"liveStateDidChange: %ld", state);
NSString *needCallBackMessage;
switch (state) {
case LFLiveReady:
needCallBackMessage = @"未鏈接";
self.isPlaying = NO;
break;
case LFLivePending:
needCallBackMessage = @"鏈接中";
break;
case LFLiveStart:
self.isPlaying = YES;
needCallBackMessage = @"已鏈接";
break;
case LFLiveError:
needCallBackMessage = @"鏈接錯誤";
self.isPlaying = NO;
break;
case LFLiveStop:
needCallBackMessage = @"未鏈接";
self.isPlaying = NO;
break;
default:
needCallBackMessage = @"未知";
break;
}
if ([self.delegate respondsToSelector:@selector(OnlyLiveCallBackMessage:)]) {
[self.delegate OnlyLiveCallBackMessage:needCallBackMessage];
}
}
/** live debug info callback */
- (void)liveSession:(nullable LFLiveSession *)session debugInfo:(nullable LFLiveDebug *)debugInfo {
NSLog(@"debugInfo uploadSpeed: %@", formatedSpeed(debugInfo.currentBandwidth, debugInfo.elapsedMilli));
}
/** callback socket errorcode */
- (void)liveSession:(nullable LFLiveSession *)session errorCode:(LFLiveSocketErrorCode)errorCode {
self.isPlaying = NO;
NSLog(@"errorCode: %ld", errorCode);
}
#pragma mark -- Public Method
// 請求video權限
- (void)requestAccessForVideo {
__weak typeof(self) _self = self;
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
switch (status) {
case AVAuthorizationStatusNotDetermined: {
// 許可對話沒有出現,發起受權許可
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
if (granted) {
dispatch_async(dispatch_get_main_queue(), ^{
[_self.session setRunning:YES];
});
}
}];
break;
}
case AVAuthorizationStatusAuthorized: {
// 已經開啓受權,可繼續
dispatch_async(dispatch_get_main_queue(), ^{
[_self.session setRunning:YES];
});
break;
}
case AVAuthorizationStatusDenied:
case AVAuthorizationStatusRestricted:
// 用戶明確地拒絕受權,或者相機設備沒法訪問
break;
default:
break;
}
}
// 請求Audio權限
- (void)requestAccessForAudio {
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
switch (status) {
case AVAuthorizationStatusNotDetermined: {
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeAudio completionHandler:^(BOOL granted) {
}];
break;
}
case AVAuthorizationStatusAuthorized: {
break;
}
case AVAuthorizationStatusDenied:
case AVAuthorizationStatusRestricted:
break;
default:
break;
}
}
#pragma mark - delloc
- (void)dealloc
{
_session ? _session = nil :nil;
}
@end
複製代碼
須要承載直播視圖的preview(主要封裝UI視圖)session
.h
#import <UIKit/UIKit.h>
typedef NS_ENUM(NSInteger, OnlyLiveViewClick) {
OnlyLiveViewClickStart = 101, //
OnlyLiveViewClickStop = 102, //
OnlyLiveViewClickResetCamera = 103,
OnlyLiveViewClickBeauty = 104,
OnlyLiveViewClickClose = 105,
};
// view不一樣點擊的回調
typedef void (^OnlyLiveViewClickCompletion)(OnlyLiveViewClick Click);
@interface OnlyLiveView : UIView
-(instancetype)initWithFrame:(CGRect)frame completion:(OnlyLiveViewClickCompletion)completion;
// 更新OnlyLiveView的UI信息
- (void)updateOnlyLiveVieWithMessage:(NSString *)message;
@end
複製代碼
.miphone
#import "OnlyLiveView.h"
#import "UIControl+YYAdd.h"
#import "UIView+YYAdd.h"
@interface OnlyLiveView()
@property (nonatomic, strong) UIButton *beautyButton;
@property (nonatomic, strong) UIButton *cameraButton;
@property (nonatomic, strong) UIButton *closeButton;
@property (nonatomic, strong) UIButton *startLiveButton;
@property (nonatomic, strong) UIView *containerView;
@property (nonatomic, strong) UILabel *stateLabel;
@property (nonatomic, strong) OnlyLiveViewClickCompletion liveCompletion;
@end
@implementation OnlyLiveView
- (void)updateOnlyLiveVieWithMessage:(NSString *)message
{
self.stateLabel.text = message;
}
- (void)OnlyLiveViewClick:(UIButton *)sender{
self.liveCompletion(sender.tag);
}
- (void)startLiveButtonClick:(UIButton*)sender{
self.startLiveButton.selected = !self.startLiveButton.selected;
if (!self.startLiveButton.selected) {
[self.startLiveButton setTitle:@"開始直播" forState:UIControlStateNormal];
self.liveCompletion(OnlyLiveViewClickStop);
} else {
[self.startLiveButton setTitle:@"結束直播" forState:UIControlStateNormal];
self.liveCompletion(OnlyLiveViewClickStart);
}
}
-(instancetype)initWithFrame:(CGRect)frame completion:(OnlyLiveViewClickCompletion)completion
{
if (self = [super initWithFrame:frame]) {
self.liveCompletion = completion;
self.backgroundColor = [UIColor clearColor];
[self addSubview:self.containerView];
[self.containerView addSubview:self.stateLabel];
[self.containerView addSubview:self.closeButton];
[self.containerView addSubview:self.cameraButton];
[self.containerView addSubview:self.beautyButton];
[self.containerView addSubview:self.startLiveButton];
}
return self;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
self.backgroundColor = [UIColor clearColor];
[self addSubview:self.containerView];
[self.containerView addSubview:self.stateLabel];
[self.containerView addSubview:self.closeButton];
[self.containerView addSubview:self.cameraButton];
[self.containerView addSubview:self.beautyButton];
[self.containerView addSubview:self.startLiveButton];
}
return self;
}
- (UIView *)containerView {
if (!_containerView) {
_containerView = [UIView new];
_containerView.frame = self.bounds;
_containerView.backgroundColor = [UIColor clearColor];
_containerView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
}
return _containerView;
}
- (UILabel *)stateLabel {
if (!_stateLabel) {
_stateLabel = [[UILabel alloc] initWithFrame:CGRectMake(20, 20, 80, 40)];
_stateLabel.text = @"未鏈接";
_stateLabel.textColor = [UIColor whiteColor];
_stateLabel.font = [UIFont boldSystemFontOfSize:14.f];
}
return _stateLabel;
}
- (UIButton *)closeButton {
if (!_closeButton) {
_closeButton = [UIButton new];
_closeButton.tag = OnlyLiveViewClickClose;
_closeButton.size = CGSizeMake(44, 44);
_closeButton.left = self.width - 10 - _closeButton.width;
_closeButton.top = 20;
[_closeButton setImage:[UIImage imageNamed:@"close_preview"] forState:UIControlStateNormal];
_closeButton.exclusiveTouch = YES;
[_closeButton addTarget:self action:@selector(OnlyLiveViewClick:) forControlEvents:UIControlEventTouchUpInside];
}
return _closeButton;
}
- (UIButton *)cameraButton {
if (!_cameraButton) {
_cameraButton = [UIButton new];
_cameraButton.tag = OnlyLiveViewClickResetCamera;
_cameraButton.size = CGSizeMake(44, 44);
_cameraButton.origin = CGPointMake(_closeButton.left - 10 - _cameraButton.width, 20);
[_cameraButton setImage:[UIImage imageNamed:@"camra_preview"] forState:UIControlStateNormal];
_cameraButton.exclusiveTouch = YES;
[_cameraButton addTarget:self action:@selector(OnlyLiveViewClick:) forControlEvents:UIControlEventTouchUpInside];
}
return _cameraButton;
}
- (UIButton *)beautyButton {
if (!_beautyButton) {
_beautyButton = [UIButton new];
_beautyButton.tag = OnlyLiveViewClickBeauty;
_beautyButton.size = CGSizeMake(44, 44);
_beautyButton.origin = CGPointMake(_cameraButton.left - 10 - _beautyButton.width, 20);
[_beautyButton setImage:[UIImage imageNamed:@"camra_beauty"] forState:UIControlStateNormal];
[_beautyButton setImage:[UIImage imageNamed:@"camra_beauty_close"] forState:UIControlStateSelected];
_beautyButton.exclusiveTouch = YES;
[_beautyButton addTarget:self action:@selector(OnlyLiveViewClick:) forControlEvents:UIControlEventTouchUpInside];
}
return _beautyButton;
}
- (UIButton *)startLiveButton {
if (!_startLiveButton) {
_startLiveButton = [UIButton new];
_startLiveButton.size = CGSizeMake(self.width - 60, 44);
_startLiveButton.left = 30;
_startLiveButton.bottom = self.height - 50;
_startLiveButton.layer.cornerRadius = _startLiveButton.height/2;
[_startLiveButton setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
[_startLiveButton.titleLabel setFont:[UIFont systemFontOfSize:16]];
[_startLiveButton setTitle:@"開始直播" forState:UIControlStateNormal];
[_startLiveButton setBackgroundColor:[UIColor colorWithRed:50 green:32 blue:245 alpha:1]];
_startLiveButton.exclusiveTouch = YES;
[_startLiveButton addTarget:self action:@selector(startLiveButtonClick:) forControlEvents:UIControlEventTouchUpInside];
}
return _startLiveButton;
}
@end
複製代碼
上面已經建立好了直播的控制類,直播的展現view,而後構建你的直播控制器 直接上.msocket
#import "ViewController.h"
#import <LFLiveKit.h>
#import "OnlyLiveView.h"
#import "OnlyCommonDefine.h"
#import "OnlyLiveSession.h"
@interface ViewController ()<OnlyLiveSessionDelegate>
@property (strong, nonatomic) LFLiveSession *session;
@property (strong, nonatomic) OnlyLiveSession *onlyLiveSession;
@property (strong, nonatomic) OnlyLiveView *liveView;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
OnlyLiveView *liveView = [[OnlyLiveView alloc]initWithFrame:CGRectMake(0, 0, kScreenWidth, kScreenHeight) completion:^(OnlyLiveViewClick Click) {
[self doSomeThingWithOnlyLiveViewClick:(OnlyLiveViewClick)Click];
}];
self.liveView = liveView;
[self.view addSubview:liveView];
OnlyLiveSession *onlyLiveSession = [[OnlyLiveSession alloc]initWithDefaultSessionWithdelegate:self preView:liveView];
self.onlyLiveSession = onlyLiveSession;
}
#pragma mark - OnlyLiveSessionDelegate
- (void)OnlyLiveCallBackMessage:(NSString *)callBackMessage
{
[self.liveView updateOnlyLiveVieWithMessage:callBackMessage];
}
- (void)doSomeThingWithOnlyLiveViewClick:(OnlyLiveViewClick)Click{
switch (Click) {
case OnlyLiveViewClickStart:{
[self onlyLiveSessionStartLive];
}
break;
case OnlyLiveViewClickStop:{
[self.onlyLiveSession stopLive];
}
break;
case OnlyLiveViewClickResetCamera:
[self.onlyLiveSession resetCamera];
break;
case OnlyLiveViewClickBeauty:
break;
case OnlyLiveViewClickClose:
break;
default:
break;
}
}
- (void)onlyLiveSessionStartLive{
NSString * uuidStr =[[[UIDevice currentDevice] identifierForVendor] UUIDString];
NSString *devCode = [[uuidStr substringToIndex:3] lowercaseString];
NSString *streamSrv = @"你的rtmp直播地址";
NSString *streamString = [NSString stringWithFormat:@"%@/%@", streamSrv, devCode];
[self.onlyLiveSession startLiveWithStreamString:streamString];
}
@end
複製代碼
下面運行demo你就能夠看到下邊的效果~ async
點擊開始直播就能夠開始你開心的直播之旅了~ 固然你須要一個能夠播放rtmp的播放器推薦使用VLC 看到的畫面 ide
demo地址~ 推流demoui
下一步會作貼圖的功能~atom