@interface ViewController () @property (nonatomic, strong) id<IJKMediaPlayback> player; @end @implementation ViewController - (void)viewDidLoad { [super viewDidLoad]; self.player = [[IJKFFMoviePlayerController alloc]initWithContentURL:[NSURL URLWithString:@"rtmp://live.hkstv.hk.lxdns.com/live/hks"] withOptions:nil]; // 設置 player 中 view 屬性的frame,且加入到控制器的 view 中 self.player.view.frame = self.view.bounds; [self.view addSubview:self.player.view]; // 設置 橫屏時自動伸縮 self.player.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight; [self.player prepareToPlay]; [self.player play]; } - (void)viewDidDisappear:(BOOL)animated { [super viewDidDisappear:animated]; [self.player stop]; self.player = nil; }
nginx.conf, 找到/usr/local/etc/nginx/nginx.conf 文件, http { …… } # 在http節點下面(也就是文件的尾部)加上rtmp配置: rtmp { server { listen 1935; application xxx { live on; record off; } } } 說明: rtmp是協議名稱 server 說明內部中是服務器相關配置 listen 監聽的端口號, rtmp協議的默認端口號是1935 application 訪問的應用路徑是 xxx live on; 開啓實時 record off; 不記錄數據
只是簡單的修改下配置文件 nginx.conf 便可 1.打開 /usr/local/etc/nginx/nginx.conf 2.找到 http 下的 server ,在花括號中增長 server { listen 8080; server_name localhost; location / { root html; index index.html index.htm; } #HLS配置開始,這個配置爲了`客戶端`可以以http協議獲取HLS的拉流 location /hls { # Serve HLS fragments types { application/vnd.apple.mpegurl m3u8; video/mp2t ts; } root html; add_header Cache-Control no-cache; } #HLS配置結束 error_page 500 502 503 504 /50x.html; location = /50x.html { root html; } } 找到rtmp 下的 server 在花括號中增長 rtmp { server { listen 1935; application xxx { live on; record off; } #增長對HLS支持開始 #推流必須是基於H264/AAC格式 application hls { live on; hls on; hls_path /usr/local/var/www/hls; } #增長對HLS支持結束 } }
nginx -s reload
ffmpeg -f avfoundation -framerate 30 -i "1:0" -f avfoundation -framerate 30 -video_size 640x480 -i "0" -c:v libx264 -preset ultrafast -filter_complex 'overlay=main_w-overlay_w-10:main_h-overlay_h-10' -acodec libmp3lame -ar 44100 -ac 1 -f flv rtmp://192.168.33.245:1935/xxx/room
rtmp://192.168.33.245:1935/xxx/room
ffmpeg -loglevel verbose -re -i /Users/HOWIE-CH/Desktop/1.mp4 -vcodec libx264 -vprofile baseline -acodec libmp3lame -ar 44100 -ac 1 -f flv rtmp://localhost:1935/hls/1
+ 查看 而後你就能夠在這個目錄 /usr/local/var/www/hls 看到生成一個個ts的文件,還會生成一個你的 m3u8 的文件名稱.m3u8的文件
+ 測試地址:http://localhost:8080/hls/你的m3u8的文件名稱.m3u8 + 測試方法 一、用 safari 瀏覽測試 二、也可在 Xcode 項目中用 ijkplayer框架在模擬器中測試該地址(拉流)
#import "ViewController.h" #import <GDLiveStreaming/GDLRawDataOutput.h> #import <GPUImage/GPUImageVideoCamera.h> #import <GPUImage/GPUImageView.h> @interface ViewController () @property (nonatomic, strong) GPUImageVideoCamera *camera; @end @implementation ViewController - (void)viewDidLoad { [super viewDidLoad]; // 1. 建立視頻攝像頭 self.camera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack]; // 2. 設置攝像頭幀率 self.camera.frameRate = 25; // 3. 設置攝像頭輸出視頻的方向 self.camera.outputImageOrientation = UIInterfaceOrientationPortraitUpsideDown; // 4. 建立用於展現視頻的GPUImageView GPUImageView *imageView = [[GPUImageView alloc] init]; imageView.frame = self.view.bounds; [self.view addSubview:imageView]; // 4.1 添加GPUImageView爲攝像頭的的輸出目標 [self.camera addTarget:imageView]; // 5. 建立原始數據輸出對象 GDLRawDataOutput *output = [[GDLRawDataOutput alloc] initWithVideoCamera:self.camera withImageSize:CGSizeMake(720, 1280)]; // 5.1 添加數據輸出對象爲攝像頭輸出目標 [self.camera addTarget:output]; // 6.開啓前置攝像頭, 不寫這句代碼默認開啓的是後置攝像頭 [self.camera rotateCamera]; // 7.開始捕獲視頻 [self.camera startCameraCapture]; // 8.開始上傳視頻 [output startUploadStreamWithURL:@"rtmp://192.168.33.245:1935/zhanghao" andStreamKey:@"room"]; } @end
真機 debug 打印
html
模擬器中
nginx