H264進行編碼,iOS 11 以後,iPhone 7以上的設備能夠支持新的編碼器H265編碼器,使得同等質量視頻佔用的存儲空間更小。因此本例中可使用兩種方式實現視頻數據的編碼ios
判斷當前設備是否支持H265編碼,必須知足兩個條件,一是iPhone 7 以上設備,二是版本大於iOS 11git
if (@available(iOS 11.0, *)) {
BOOL hardwareDecodeSupported = VTIsHardwareDecodeSupported(kCMVideoCodecType_HEVC);
if (hardwareDecodeSupported) {
_deviceSupportH265 = YES;
NSLog(@"XDXHardwareEncoder : Support H265 Encode/Decode!");
}
}else {
_deviceSupportH265 = NO;
NSLog(@"XDXHardwareEncoder : Not support H265 Encode/Decode!");
}
複製代碼
系統已經提供VTIsHardwareDecodeSupported判斷當前設備是否支持H265編碼github
初始化編碼器操做算法
- (void)prepareForEncode {
if(self.width == 0 || self.height == 0) {
NSLog(@"XDXHardwareEncoder : VTSession need with and height for init,with = %d,height = %d",self.width, self.height);
return;
}
if(g_isSupportRealTimeEncoder) NSLog(@"XDXHardwareEncoder : Device processor is 64 bit");
else NSLog(@"XDXHardwareEncoder : Device processor is not 64 bit");
NSLog(@"XDXHardwareEncoder : Current h264 open state : %d, h265 open state : %d",self.enableH264, self.enableH265);
OSStatus h264Status,h265Status;
BOOL isRestart = NO;
if (self.enableH264) {
if (h264CompressionSession != NULL) {
NSLog(@"XDXHardwareEncoder : H264 session not NULL");
return;
}
[m_h264_lock lock];
NSLog(@"XDXHardwareEncoder : Prepare H264 hardware encoder");
//[self.delegate willEncoderStart];
self.h264ErrCount = 0;
h264Status = VTCompressionSessionCreate(NULL, self.width, self.height, kCMVideoCodecType_H264, NULL, NULL, NULL, vtCallBack,(__bridge void *)self, &h264CompressionSession);
if (h264Status != noErr) {
self.h265ErrCount++;
NSLog(@"XDXHardwareEncoder : H264 VTCompressionSessionCreate Failed, status = %d",h264Status);
}
[self getSupportedPropertyFlags];
[self applyAllSessionProperty:h264CompressionSession propertyArr:self.h264propertyFlags];
h264Status = VTCompressionSessionPrepareToEncodeFrames(h264CompressionSession);
if(h264Status != noErr) {
NSLog(@"XDXHardwareEncoder : H264 VTCompressionSessionPrepareToEncodeFrames Failed, status = %d",h264Status);
}else {
initializedH264 = true;
NSLog(@"XDXHardwareEncoder : H264 VTSession create success, with = %d, height = %d, framerate = %d",self.width,self.height,self.fps);
}
if(h264Status != noErr && self.h264ErrCount != 0) isRestart = YES;
[m_h264_lock unlock];
}
if (self.enableH265) {
if (h265CompressionSession != NULL) {
NSLog(@"XDXHardwareEncoder : H265 session not NULL");
return;
}
[m_h265_lock lock];
NSLog(@"XDXHardwareEncoder : Prepare h265 hardware encoder");
// [self.delegate willEncoderStart];
self.h265ErrCount = 0;
h265Status = VTCompressionSessionCreate(NULL, self.width, self.height, kCMVideoCodecType_HEVC, NULL, NULL, NULL, vtH265CallBack,(__bridge void *)self, &h265CompressionSession);
if (h265Status != noErr) {
self.h265ErrCount++;
NSLog(@"XDXHardwareEncoder : H265 VTCompressionSessionCreate Failed, status = %d",h265Status);
}
[self getSupportedPropertyFlags];
[self applyAllSessionProperty:h265CompressionSession propertyArr:self.h265PropertyFlags];
h265Status = VTCompressionSessionPrepareToEncodeFrames(h265CompressionSession);
if(h265Status != noErr) {
NSLog(@"XDXHardwareEncoder : H265 VTCompressionSessionPrepareToEncodeFrames Failed, status = %d",h265Status);
}else {
initializedH265 = true;
NSLog(@"XDXHardwareEncoder : H265 VTSession create success, with = %d, height = %d, framerate = %d",self.width,self.height,self.fps);
}
if(h265Status != noErr && self.h265ErrCount != 0) isRestart = YES;
[m_h265_lock unlock];
}
if (isRestart) {
NSLog(@"XDXHardwareEncoder : VTSession create failured!");
static int count = 0;
count ++;
if (count == 3) {
NSLog(@"TVUEncoder : restart 5 times failured! exit!");
return;
}
sleep(1);
NSLog(@"TVUEncoder : try to restart after 1 second!");
NSLog(@"TVUEncoder : vtsession error occured!,resetart encoder width: %d, height %d, times %d",self.width,self.height,count);
[self tearDownSession];
[self prepareForEncode];
}
}
複製代碼
1> g_isSupportRealTimeEncoder = (is64Bit == 8) ? true : false;
用來判斷當前設備是32位仍是64位bash
2> 建立H264/H265Session 區別僅僅爲參數的不一樣,h264爲kCMVideoCodecType_H264。 h265爲kCMVideoCodecType_HEVC,在建立Session指定了回調函數後,當編碼成功一幀就會調用相應的回調函數。服務器
3> 經過[self getSupportedPropertyFlags];
獲取當前編碼器支持設置的屬性,通過測試,H265不支持碼率的限制。目前暫時得不到解決。等待蘋果後續處理。網絡
4> 以後設置編碼器相關屬性,下面會具體介紹,設置完成後則調用VTCompressionSessionPrepareToEncodeFrames準備編碼。session
- (OSStatus)setSessionProperty:(VTCompressionSessionRef)session key:(CFStringRef)key value:(CFTypeRef)value {
OSStatus status = VTSessionSetProperty(session, key, value);
if (status != noErr) {
NSString *sessionStr;
if (session == h264CompressionSession) {
sessionStr = @"h264 Session";
self.h264ErrCount++;
}else if (session == h265CompressionSession) {
sessionStr = @"h265 Session";
self.h265ErrCount++;
}
NSLog(@"XDXHardwareEncoder : Set %s of %s Failed, status = %d",CFStringGetCStringPtr(key, kCFStringEncodingUTF8),sessionStr.UTF8String,status);
}
return status;
}
- (void)applyAllSessionProperty:(VTCompressionSessionRef)session propertyArr:(NSArray *)propertyArr {
OSStatus status;
if(!g_isSupportRealTimeEncoder) {
/* increase max frame delay from 3 to 6 to reduce encoder pressure*/
int value = 3;
CFNumberRef ref = CFNumberCreate(NULL, kCFNumberSInt32Type, &value);
[self setSessionProperty:session key:kVTCompressionPropertyKey_MaxFrameDelayCount value:ref];
CFRelease(ref);
}
if(self.fps) {
if([self isSupportPropertyWithKey:Key_ExpectedFrameRate inArray:propertyArr]) {
int value = self.fps;
CFNumberRef ref = CFNumberCreate(NULL, kCFNumberSInt32Type, &value);
[self setSessionProperty:session key:kVTCompressionPropertyKey_ExpectedFrameRate value:ref];
CFRelease(ref);
}
}else {
NSLog(@"XDXHardwareEncoder : Current fps is 0");
}
if(self.bitrate) {
if([self isSupportPropertyWithKey:Key_AverageBitRate inArray:propertyArr]) {
int value = self.bitrate;
if (session == h265CompressionSession) value = 2*1000; // if current session is h265, Set birate 2M.
CFNumberRef ref = CFNumberCreate(NULL, kCFNumberSInt32Type, &value);
[self setSessionProperty:session key:kVTCompressionPropertyKey_AverageBitRate value:ref];
CFRelease(ref);
}
}else {
NSLog(@"XDXHardwareEncoder : Current bitrate is 0");
}
/*2016-11-15,@gang, iphone7/7plus do not support realtime encoding, so disable it
otherwize ,we can not control encoding bit rate
*/
if (![[self deviceVersion] isEqualToString:@"iPhone9,1"] && ![[self deviceVersion] isEqualToString:@"iPhone9,2"]) {
if(g_isSupportRealTimeEncoder) {
if([self isSupportPropertyWithKey:Key_RealTime inArray:propertyArr]) {
NSLog(@"use RealTimeEncoder");
NSLog(@"XDXHardwareEncoder : use realTimeEncoder");
[self setSessionProperty:session key:kVTCompressionPropertyKey_RealTime value:kCFBooleanTrue];
}
}
}
if([self isSupportPropertyWithKey:Key_AllowFrameReordering inArray:propertyArr]) {
[self setSessionProperty:session key:kVTCompressionPropertyKey_AllowFrameReordering value:kCFBooleanFalse];
}
if(g_isSupportRealTimeEncoder) {
if([self isSupportPropertyWithKey:Key_ProfileLevel inArray:propertyArr]) {
[self setSessionProperty:session key:kVTCompressionPropertyKey_ProfileLevel value:self.enableH264 ? kVTProfileLevel_H264_Main_AutoLevel : kVTProfileLevel_HEVC_Main_AutoLevel];
}
}else {
if([self isSupportPropertyWithKey:Key_ProfileLevel inArray:propertyArr]) {
[self setSessionProperty:session key:kVTCompressionPropertyKey_ProfileLevel value:self.enableH264 ? kVTProfileLevel_H264_Baseline_AutoLevel : kVTProfileLevel_HEVC_Main_AutoLevel];
}
if (self.enableH264) {
if([self isSupportPropertyWithKey:Key_H264EntropyMode inArray:propertyArr]) {
[self setSessionProperty:session key:kVTCompressionPropertyKey_H264EntropyMode value:kVTH264EntropyMode_CAVLC];
}
}
}
if([self isSupportPropertyWithKey:Key_MaxKeyFrameIntervalDuration inArray:propertyArr]) {
int value = 1;
CFNumberRef ref = CFNumberCreate(NULL, kCFNumberSInt32Type, &value);
[self setSessionProperty:session key:kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration value:ref];
CFRelease(ref);
}
}
複製代碼
上述方法主要設置啓動編碼器所需的各個參數數據結構
1> kVTCompressionPropertyKey_MaxFrameDelayCount : 壓縮器被容許保持的最大幀數在輸出一個壓縮幀以前。例如若是最大幀延遲數是M,那麼在編碼幀N返回的調用以前,幀N-M必須被排出。app
2> kVTCompressionPropertyKey_ExpectedFrameRate : 設置fps
3> kVTCompressionPropertyKey_AverageBitRate : 它不是強制的限制,bit rate可能會超出峯值
4> kVTCompressionPropertyKey_RealTime : 設置編碼器是否實時編碼,若是設置爲False則不是實時編碼,視頻效果會更好一點。
5> kVTCompressionPropertyKey_AllowFrameReordering : 是否讓幀進行從新排序。爲了編碼B幀,編碼器必須對幀從新排序,這將意味着解碼的順序與顯示的順序不一樣。將其設置爲false以防止幀從新排序。
6> kVTCompressionPropertyKey_ProfileLevel : 指定編碼比特流的配置文件和級別
7> kVTCompressionPropertyKey_H264EntropyMode :若是支持h264該屬性設置編碼器是否應該使用基於CAVLC 仍是 CABAC
8> kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration : 兩個I幀之間最大持續時間,該屬性特別有用當frame rate是可變
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if( !CMSampleBufferDataIsReady(sampleBuffer)) {
NSLog( @"sample buffer is not ready. Skipping sample" );
return;
}
if([XDXHardwareEncoder getInstance] != NULL) {
[[XDXHardwareEncoder getInstance] encode:sampleBuffer];
}
}
複製代碼
以上方法在每採集到一幀視頻數據後會調用一次,咱們將拿到的每一幀數據進行編碼。
-(void)encode:(CMSampleBufferRef)sampleBuffer {
if (self.enableH264) {
[m_h264_lock lock];
if(h264CompressionSession == NULL) {
[m_h264_lock unlock];
return;
}
if(initializedH264 == false) {
NSLog(@"TVUEncoder : h264 encoder is not ready\n");
return;
}
}
if (self.enableH265) {
[m_h265_lock lock];
if(h265CompressionSession == NULL) {
[m_h265_lock unlock];
return;
}
if(initializedH265 == false) {
NSLog(@"TVUEncoder : h265 encoder is not ready\n");
return;
}
}
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CMTime duration = CMSampleBufferGetOutputDuration(sampleBuffer);
frameID++;
CMTime presentationTimeStamp = CMTimeMake(frameID, 1000);
[self doSetBitrate];
OSStatus status;
VTEncodeInfoFlags flags;
if (self.enableH264) {
status = VTCompressionSessionEncodeFrame(h264CompressionSession, imageBuffer, presentationTimeStamp, duration, NULL, imageBuffer, &flags);
if(status != noErr) NSLog(@"TVUEncoder : H264 VTCompressionSessionEncodeFrame failed");
[m_h264_lock unlock];
if (status != noErr) {
NSLog(@"TVUEncoder : VTCompressionSessionEncodeFrame failed");
VTCompressionSessionCompleteFrames(h264CompressionSession, kCMTimeInvalid);
VTCompressionSessionInvalidate(h264CompressionSession);
CFRelease(h264CompressionSession);
h264CompressionSession = NULL;
}else {
// NSLog(@"TVUEncoder : Success VTCompressionSessionCompleteFrames");
}
}
if (self.enableH265) {
status = VTCompressionSessionEncodeFrame(h265CompressionSession, imageBuffer, presentationTimeStamp, duration, NULL, imageBuffer, &flags);
if(status != noErr) NSLog(@"TVUEncoder : H265 VTCompressionSessionEncodeFrame failed");
[m_h265_lock unlock];
if (status != noErr) {
NSLog(@"TVUEncoder : VTCompressionSessionEncodeFrame failed");
VTCompressionSessionCompleteFrames(h265CompressionSession, kCMTimeInvalid);
VTCompressionSessionInvalidate(h265CompressionSession);
CFRelease(h265CompressionSession);
h265CompressionSession = NULL;
}else {
NSLog(@"TVUEncoder : Success VTCompressionSessionCompleteFrames");
}
}
}
複製代碼
1> 經過frameID的遞增構造時間戳爲了使編碼後的每一幀數據連續
2> 設置最大碼率的限制,注意:H265目前不支持設置碼率的限制,等待官方後續通知。能夠對H264進行碼率限制
3> kVTCompressionPropertyKey_DataRateLimits : 將數據的bytes和duration封裝到CFMutableArrayRef傳給API進行調用
4> VTCompressionSessionEncodeFrame : 調用此方法成功後觸發回調函數完成編碼。
#pragma mark H264 Callback
static void vtCallBack(void *outputCallbackRefCon,void *souceFrameRefCon,OSStatus status,VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer) {
XDXHardwareEncoder *encoder = (__bridge XDXHardwareEncoder*)outputCallbackRefCon;
if(status != noErr) {
NSError *error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
NSLog(@"H264: vtCallBack failed with %@", error);
NSLog(@"XDXHardwareEncoder : encode frame failured! %s" ,error.debugDescription.UTF8String);
return;
}
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
NSLog(@"didCompressH265 data is not ready ");
return;
}
if (infoFlags == kVTEncodeInfo_FrameDropped) {
NSLog(@"%s with frame dropped.", __FUNCTION__);
return;
}
CMBlockBufferRef block = CMSampleBufferGetDataBuffer(sampleBuffer);
BOOL isKeyframe = false;
CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, false);
if(attachments != NULL) {
CFDictionaryRef attachment =(CFDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
CFBooleanRef dependsOnOthers = (CFBooleanRef)CFDictionaryGetValue(attachment, kCMSampleAttachmentKey_DependsOnOthers);
isKeyframe = (dependsOnOthers == kCFBooleanFalse);
}
if(isKeyframe) {
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
static uint8_t *spsppsNALBuff = NULL;
static size_t spsSize, ppsSize;
size_t parmCount;
const uint8_t*sps, *pps;
int NALUnitHeaderLengthOut;
CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sps, &spsSize, &parmCount, &NALUnitHeaderLengthOut );
CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pps, &ppsSize, &parmCount, &NALUnitHeaderLengthOut );
spsppsNALBuff = (uint8_t*)malloc(spsSize+4+ppsSize+4);
memcpy(spsppsNALBuff, "\x00\x00\x00\x01", 4);
memcpy(&spsppsNALBuff[4], sps, spsSize);
memcpy(&spsppsNALBuff[4+spsSize], "\x00\x00\x00\x01", 4);
memcpy(&spsppsNALBuff[4+spsSize+4], pps, ppsSize);
NSLog(@"XDXHardwareEncoder : H264 spsSize : %zu, ppsSize : %zu",spsSize, ppsSize);
writeFile(spsppsNALBuff,spsSize+4+ppsSize+4,encoder->_videoFile, 200);
}
size_t blockBufferLength;
uint8_t *bufferDataPointer = NULL;
CMBlockBufferGetDataPointer(block, 0, NULL, &blockBufferLength, (char **)&bufferDataPointer);
size_t bufferOffset = 0;
while (bufferOffset < blockBufferLength - startCodeLength) {
uint32_t NALUnitLength = 0;
memcpy(&NALUnitLength, bufferDataPointer+bufferOffset, startCodeLength);
NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
memcpy(bufferDataPointer+bufferOffset, startCode, startCodeLength);
bufferOffset += startCodeLength + NALUnitLength;
}
writeFile(bufferDataPointer, blockBufferLength,encoder->_videoFile, 200);
}
#pragma mark H265 Callback
static void vtH265CallBack(void *outputCallbackRefCon,void *souceFrameRefCon,OSStatus status,VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer) {
XDXHardwareEncoder *encoder = (__bridge XDXHardwareEncoder*)outputCallbackRefCon;
if(status != noErr) {
NSError *error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
NSLog(@"H264: H265 vtH265CallBack failed with %@", error);
NSLog(@"XDXHardwareEncoder : H265 encode frame failured! %s" ,error.debugDescription.UTF8String);
return;
}
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
NSLog(@"didCompressH265 data is not ready ");
return;
}
if (infoFlags == kVTEncodeInfo_FrameDropped) {
NSLog(@"%s with frame dropped.", __FUNCTION__);
return;
}
CMBlockBufferRef block = CMSampleBufferGetDataBuffer(sampleBuffer);
BOOL isKeyframe = false;
CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, false);
if(attachments != NULL) {
CFDictionaryRef attachment =(CFDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
CFBooleanRef dependsOnOthers = (CFBooleanRef)CFDictionaryGetValue(attachment, kCMSampleAttachmentKey_DependsOnOthers);
isKeyframe = (dependsOnOthers == kCFBooleanFalse);
}
if(isKeyframe) {
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
static uint8_t *vpsspsppsNALBuff = NULL;
static size_t vpsSize, spsSize, ppsSize;
size_t parmCount;
const uint8_t *vps, *sps, *pps;
if (encoder.deviceSupportH265) { // >= iPhone 7 && support ios11
CMVideoFormatDescriptionGetHEVCParameterSetAtIndex(format, 0, &vps, &vpsSize, &parmCount, 0);
CMVideoFormatDescriptionGetHEVCParameterSetAtIndex(format, 1, &sps, &spsSize, &parmCount, 0);
CMVideoFormatDescriptionGetHEVCParameterSetAtIndex(format, 2, &pps, &ppsSize, &parmCount, 0);
vpsspsppsNALBuff = (uint8_t*)malloc(vpsSize+4+spsSize+4+ppsSize+4);
memcpy(vpsspsppsNALBuff, "\x00\x00\x00\x01", 4);
memcpy(&vpsspsppsNALBuff[4], vps, vpsSize);
memcpy(&vpsspsppsNALBuff[4+vpsSize], "\x00\x00\x00\x01", 4);
memcpy(&vpsspsppsNALBuff[4+vpsSize+4], sps, spsSize);
memcpy(&vpsspsppsNALBuff[4+vpsSize+4+spsSize], "\x00\x00\x00\x01", 4);
memcpy(&vpsspsppsNALBuff[4+vpsSize+4+spsSize+4], pps, ppsSize);
NSLog(@"XDXHardwareEncoder : H265 vpsSize : %zu, spsSize : %zu, ppsSize : %zu",vpsSize,spsSize, ppsSize);
}
writeFile(vpsspsppsNALBuff, vpsSize+4+spsSize+4+ppsSize+4,encoder->_videoFile, 200);
}
size_t blockBufferLength;
uint8_t *bufferDataPointer = NULL;
CMBlockBufferGetDataPointer(block, 0, NULL, &blockBufferLength, (char **)&bufferDataPointer);
size_t bufferOffset = 0;
while (bufferOffset < blockBufferLength - startCodeLength) {
uint32_t NALUnitLength = 0;
memcpy(&NALUnitLength, bufferDataPointer+bufferOffset, startCodeLength);
NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
memcpy(bufferDataPointer+bufferOffset, startCode, startCodeLength);
bufferOffset += startCodeLength + NALUnitLength;
}
writeFile(bufferDataPointer, blockBufferLength,encoder->_videoFile, 200);
}
複製代碼
1> 首先在回調函數中截取到I幀,從I幀中提取到(h265中新增vps),sps,pps信息並寫入文件 2> 遍歷其餘幀將頭信息0000,0001寫入每一個頭信息中,再將該數據寫入文件便可
這裏咱們簡單介紹一下H264,H265碼流信息
H264流數據是由一系列NAL單元(NAL Unit)組成的。
一個NALU可能包含:視頻幀,視頻幀也就是視頻片斷,具體有I,P,B幀
注意在H265流數據中新增vps在最前。
流數據中,屬性集合多是這樣的:
通過處理以後,在Format Description中則是:
總結以上知識,咱們知道H264的碼流由NALU單元組成,NALU單元包含視頻圖像數據和H264的參數信息。其中視頻圖像數據就是CMBlockBuffer,而H264的參數信息則能夠組合成FormatDesc。具體來講參數信息包含SPS(Sequence Parameter Set)和PPS(Picture Parameter Set).以下圖顯示了一個H.264碼流結構:
提取sps和pps生成FormatDesc
提取視頻圖像數據生成CMBlockBuffer
根據須要,生成CMTime信息。(實際測試時,加入time信息後,有不穩定的圖像,不加入time信息反而沒有,須要進一步研究,這裏建議不加入time信息)
根據上述獲得CMVideoFormatDescriptionRef、CMBlockBufferRef和可選的時間信息,使用CMSampleBufferCreate接口獲得CMSampleBuffer數據這個待解碼的原始的數據。以下圖所示的H264數據轉換示意圖。