#include <stdio.h> #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libswscale/swscale.h> #include <libavutil/imgutils.h> void SaveFrame(AVFrame *pFrame, int width, int height, int iFrame); int main(int argc, char *argv[]) { // 獲取AVFormatContext句柄 AVFormatContext *pFormatCtx = avformat_alloc_context(); // 打開一個流媒體文件 open video file if (avformat_open_input(&pFormatCtx, argv[1], NULL, NULL)) { fprintf(stderr, "open input failed\n"); return -1; } // 獲取多媒體信息,存至句柄 retrieve stream information if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { fprintf(stderr, "find stream info failed\n"); return -1; } // 打印多媒體信息 dump information about file onto standard error av_dump_format(pFormatCtx, 0, argv[1], 0); // 經過句柄,找出視頻流 find the video stream int videoStream = -1; videoStream = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0); if (-1 == videoStream) { fprintf(stderr, "Can not find video stream!\n"); return -1; } AVCodecParameters *pCodecPar = NULL; pCodecPar = pFormatCtx->streams[videoStream]->codecpar; // 搜索合適的視頻解碼器 find the decoder for the video stream AVCodec *pCodec = NULL; pCodec = avcodec_find_decoder(pCodecPar->codec_id); if (NULL == pCodec) { fprintf(stderr, "Unsupported codec!\n"); return -1; } AVCodecContext *pCodecCtx = NULL; pCodecCtx = avcodec_alloc_context3(pCodec); // 由於 AVStream::codec 被棄用,AVCodecContext 須要經過 AVCodecParameters 轉換獲得 avcodec_parameters_to_context(pCodecCtx, pCodecPar); // 打開視頻解碼器 open Codec if (avcodec_open2(pCodecCtx, pCodec, NULL)) { fprintf(stderr, "Codec open failed!\n"); return -1; } AVFrame *pFrame = NULL; AVFrame *pFrameRGB = NULL; // 分配兩個視頻幀,pFrame保存原始幀,pFrameRGB存放轉換後的RGB幀 Allocate video frame pFrame = av_frame_alloc(); pFrameRGB = av_frame_alloc(); if (NULL == pFrameRGB || NULL == pFrame) { fprintf(stderr, "Alloc frame failed!\n"); return -1; } uint8_t *buffer = NULL; int numBytes = 0; // 計算解碼後原始數據所需緩衝區大小,並分配內存空間 Determine required buffer size and allocate buffer numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1); buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t)); av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1); int frameFinished = 0; AVPacket packet = {0}; int i = 0; struct SwsContext *img_convert_ctx = NULL; // 獲取swscale句柄 img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL); while (av_read_frame(pFormatCtx, &packet) >= 0) { // Is this a packet from the video stream? if (packet.stream_index == videoStream) { // Decode video frame int ret = avcodec_send_packet(pCodecCtx, &packet); if (0 != ret) continue; while (avcodec_receive_frame(pCodecCtx, pFrame) == 0) { // Convert the image from its native format to RGB //img_convert((AVPicture *)pFrameRGB, AV_PIX_FMT_RGB24, (AVPicture *)pFrame, // pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); // Save the frame to disk if (++i <= 5) SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i); else break; } #if 0 avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, packet.data, packet.size); // Did we get a video frame? if (frameFinished) { // Convert the image from its native format to RGB img_convert((AVPicture *)pFrameRGB, AV_PIX_FMT_RGB24, (AVPicture *)pFrame, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); // Save the frame to disk if (++i <= 5) SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i); } #endif } } // Free the packet that was allocate by av_read_frame av_packet_unref(&packet); // Free the RGB image av_free(buffer); av_free(pFrameRGB); // Free the YUV frame av_free(pFrame); // Close the codec avcodec_close(pCodecCtx); // Close the video file avformat_close_input(&pFormatCtx); return 0; } void SaveFrame(AVFrame *pFrame, int width, int height, int iFrame) { FILE *pFile = NULL; char szFilename[32] = {0}; int y = 0; // Open file sprintf(szFilename, "./raw/frame%d.ppm", iFrame); pFile = fopen(szFilename, "wb"); if (NULL == pFile) return; // Write header fprintf(pFile, "P6\n%d %d\n255\n", width, height); // Write pixel data for (y = 0; y < height; y++) fwrite(pFrame->data[0] + y * pFrame->linesize[0], 1, width * 3, pFile); // Close file fclose(pFile); }
Makefile:ide
all:yuv2rgb CC=gcc CLIBSFLAGS=-lavformat -lavcodec -lavutil -lswresample -lswscale -lz -lpthread -lm FFMPEG=/usr/local CFLAGS=-I$(FFMPEG)/include/ LDFLAGS = -L$(FFMPEG)/lib/ yuv2rgb:yuv2rgb.o $(CC) -o yuv2rgb yuv2rgb.o $(CLIBSFLAGS) $(CFLAGS) $(LDFLAGS) yuv2rgb.o:yuv2rgb.c $(CC) -o yuv2rgb.o -c yuv2rgb.c $(CLIBSFLAGS) $(CFLAGS) $(LDFLAGS) clean: rm yuv2rgb yuv2rgb.o