視頻是由一幀一幀的數據鏈接而成,而一幀視頻數據其實就是一張圖片。ide
yuv是一種圖片儲存格式,跟RGB格式相似。ui
RGB格式的圖片很好理解,計算機中的大多數圖片,都是以RGB格式存儲的。code
yuv中,y表示亮度,單獨只有y數據就能夠造成一張圖片,只不過這張圖片是灰色的。u和v表示色差(u和v也被稱爲:Cb-藍色差,Cr-紅色差)。component
YUV 字節數orm
YUV 格式視頻
yuv420也包含不一樣的數據排列格式:I420,NV12,NV21. 其格式分別以下,圖片
I420格式:y,u,v 3個部分分別存儲:Y0,Y1...Yn,U0,U1...Un/2,V0,V1...Vn/2 NV12格式:y和uv 2個部分分別存儲:Y0,Y1...Yn,U0,V0,U1,V1...Un/2,Vn/2 NV21格式:同NV12,只是U和V的順序相反。
綜合來講,除了存儲順序不一樣以外,上述格式對於顯示來講沒有任何區別。ip
使用哪一種視頻的格式,取決於初始化相機時設置的視頻輸出格式。rem
設置爲kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange時,表示輸出的視頻格式爲NV12;get
設置爲kCVPixelFormatType_420YpCbCr8Planar時,表示使用I420。
GPUImage設置相機輸出數據時,使用的就是NV12.
NSData 轉 CVPixelBufferRef
static OSType KVideoPixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; + (CVPixelBufferRef)yuvPixelBufferWithData:(NSData *)dataFrame presentationTime:(CMTime)presentationTime width:(size_t)w heigth:(size_t)h { unsigned char* buffer = (unsigned char*) dataFrame.bytes; CVPixelBufferRef getCroppedPixelBuffer = [self copyDataFromBuffer:buffer toYUVPixelBufferWithWidth:w Height:h]; return getCroppedPixelBuffer; } + (CVPixelBufferRef) copyDataFromBuffer:(const unsigned char*)buffer toYUVPixelBufferWithWidth:(size_t)w Height:(size_t)h { NSDictionary *pixelBufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: // [NSDictionary dictionary],kCVPixelBufferIOSurfacePropertiesKey, nil]; CVPixelBufferRef pixelBuffer; CVPixelBufferCreate(NULL, w, h, KVideoPixelFormatType, (__bridge CFDictionaryRef)(pixelBufferAttributes), &pixelBuffer); CVPixelBufferLockBaseAddress(pixelBuffer, 0); size_t d = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); const unsigned char* src = buffer; unsigned char* dst = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); for (unsigned int rIdx = 0; rIdx < h; ++rIdx, dst += d, src += w) { memcpy(dst, src, w); } d = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); dst = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); h = h >> 1; for (unsigned int rIdx = 0; rIdx < h; ++rIdx, dst += d, src += w) { memcpy(dst, src, w); } CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); return pixelBuffer; }
CVPixelBufferRef 轉 NSData
+ (NSData *)dataWithYUVPixelBuffer:(CVPixelBufferRef)pixelBuffer { size_t width = CVPixelBufferGetWidth(pixelBuffer); size_t height = CVPixelBufferGetHeight(pixelBuffer); unsigned char* buffer = (unsigned char*) malloc(width * height * 1.5); // 取視頻YUV數據 [self copyDataFromYUVPixelBuffer:pixelBuffer toBuffer:buffer]; // 保存到本地 NSData *retData = [NSData dataWithBytes:buffer length:sizeof(unsigned char)*(width*height*1.5)]; free(buffer); buffer = nil; return retData; } //the size of buffer has to be width * height * 1.5 (yuv) + (void) copyDataFromYUVPixelBuffer:(CVPixelBufferRef)pixelBuffer toBuffer:(unsigned char*)buffer { CVPixelBufferLockBaseAddress(pixelBuffer, 0); if (CVPixelBufferIsPlanar(pixelBuffer)) { size_t w = CVPixelBufferGetWidth(pixelBuffer); size_t h = CVPixelBufferGetHeight(pixelBuffer); size_t d = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); unsigned char* src = (unsigned char*) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); unsigned char* dst = buffer; for (unsigned int rIdx = 0; rIdx < h; ++rIdx, dst += w, src += d) { memcpy(dst, src, w); } d = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); src = (unsigned char *) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); h = h >> 1; for (unsigned int rIdx = 0; rIdx < h; ++rIdx, dst += w, src += d) { memcpy(dst, src, w); } } CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); }
CVPixelBufferRef轉UIImage
+ (UIImage *)makeUIImageWithYUVPixelBuffer:(CVPixelBufferRef)pixelBuffer { //Lock the imagebuffer CVPixelBufferLockBaseAddress(pixelBuffer,0); size_t width = CVPixelBufferGetWidth(pixelBuffer); size_t height = CVPixelBufferGetHeight(pixelBuffer); // Get information about the image uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer); CVPlanarPixelBufferInfo_YCbCrBiPlanar *bufferInfo = (CVPlanarPixelBufferInfo_YCbCrBiPlanar *)baseAddress; // This just moved the pointer past the offset baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); UIImage *image = [NSData makeUIImage:baseAddress bufferInfo:bufferInfo width:width height:height bytesPerRow:bytesPerRow]; return image; } + (UIImage *)makeUIImage:(uint8_t *)inBaseAddress bufferInfo:(CVPlanarPixelBufferInfo_YCbCrBiPlanar *)inBufferInfo width:(size_t)inWidth height:(size_t)inHeight bytesPerRow:(size_t)inBytesPerRow { NSUInteger yOffset = EndianU32_BtoN(inBufferInfo->componentInfoY.offset); NSUInteger yPitch = EndianU32_BtoN(inBufferInfo->componentInfoY.rowBytes); NSUInteger cbCrOffset = EndianU32_BtoN(inBufferInfo->componentInfoCbCr.offset); NSUInteger cbCrPitch = EndianU32_BtoN(inBufferInfo->componentInfoCbCr.rowBytes); int bytesPerPixel = 4; uint8_t *yBuffer = inBaseAddress + yOffset; uint8_t *cbCrBuffer = inBaseAddress + cbCrOffset; uint8_t *rgbBuffer = (uint8_t *)malloc(inWidth * inHeight * bytesPerPixel); for(int y = 0; y < inHeight; y++) { uint8_t *rgbBufferLine = &rgbBuffer[y * inWidth * bytesPerPixel]; uint8_t *yBufferLine = &yBuffer[y * yPitch]; uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch]; for(int x = 0; x < inWidth; x++) { int16_t y = yBufferLine[x]; int16_t cb = cbCrBufferLine[x & ~1] - 128; int16_t cr = cbCrBufferLine[x | 1] - 128; uint8_t *rgbOutput = &rgbBufferLine[x*bytesPerPixel]; int16_t r = (int16_t)roundf( y + cr * 1.4 ); int16_t g = (int16_t)roundf( y + cb * -0.343 + cr * -0.711 ); int16_t b = (int16_t)roundf( y + cb * 1.765); //ABGR rgbOutput[0] = 0xff; rgbOutput[1] = b > 0 ? (b < 255 ? b : 255) : 0; rgbOutput[2] = g > 0 ? (g < 255 ? g : 255) : 0; rgbOutput[3] = r > 0 ? (r < 255 ? r : 255) : 0; } } CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(rgbBuffer, yPitch, inHeight, 8, yPitch*4, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedLast); CGImageRef quartzImage = CGBitmapContextCreateImage(context); CGContextRelease(context); CGColorSpaceRelease(colorSpace); UIImage *image = [UIImage imageWithCGImage:quartzImage]; CGImageRelease(quartzImage); free(rgbBuffer); rgbBuffer = NULL; return image; }