最近又複習了一下live555的源代碼,並在VS2013下編譯了視頻直播程序。併發
從視頻讀取一幀後x264編碼,用live555的RTSP傳輸流媒體。app
效果清晰流暢(640*480+40幀每秒),很滿意。函數
附上主要的cpp代碼ui
/* H264FramedLiveSource.cpp By Chain_Gank */
#include <highgui.h> #include <cv.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <stdint.h> //#include <unistd.h> #include <fcntl.h> #include "H264FramedLiveSource.hh" #pragma comment(lib,"libx264.lib") #define ENCODER_TUNE "zerolatency" #define ENCODER_PROFILE "baseline" #define ENCODER_PRESET "veryfast" #define ENCODER_COLORSPACE X264_CSP_I420 #define CLEAR(x) (memset((&x),0,sizeof(x))) int FPS, WIDTH, HEIGHT, widthStep; //這些是視頻的參數,用來初始化編碼器 和 圖片格式轉換 //這個函數把RGB24圖片轉換爲YUV420格式圖片 void Convert(unsigned char *RGB, unsigned char *YUV, unsigned int width, unsigned int height); extern class F f; //包含編碼器和圖片緩衝的各類指針變量和函數。由於一直都要用,因此在一開始就聲明的全局變量,到程序結束一直存在 //初始化全局變量f中的各個成員,給指針分配相應的內存空間,並初始化攝像頭和264編碼器 void F::init() { int ret, frames_total; //ret是臨時變量做用不大,frames_total存儲視頻總幀數,用於初始化編碼器 cap = cvCaptureFromFile("test.mp4"); //cap = cvCaptureFromAVI("4.avi"); //cap = cvCaptureFromCAM(0); //camera img = cvQueryFrame(cap); //獲取視頻文件屬性 FPS = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_FPS); frames_total = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_COUNT) - 1; //cvSetCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH, 320); //camera //cvSetCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT, 240); //camera HEIGHT = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT) ; WIDTH = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH) ; //調節視頻的分辨率 while (WIDTH > 480 || HEIGHT > 270) { HEIGHT /= 2; WIDTH /= 2; } widthStep = WIDTH * 3; //固定爲3倍,用於提取IplImage中的RGB數據 printf("fps: %d\twidth: %d\theight: %d\n", FPS, WIDTH, HEIGHT); //這個是修改分辨率後的圖片緩衝區 img2 = cvCreateImage(cvSize(WIDTH, HEIGHT), img->depth, img->nChannels); if (!cap) { fprintf(stderr, "Can not open file.\n"); exit(-1); } //encoder結構體包含了編碼器須要的變量 encoder = (my_x264_encoder *)malloc(sizeof(my_x264_encoder)); if (!encoder){ printf("cannot malloc my_x264_encoder !\n"); exit(EXIT_FAILURE); } CLEAR(*encoder); //初始化encoder的成員 strcpy(encoder->parameter_preset, ENCODER_PRESET); strcpy(encoder->parameter_tune, ENCODER_TUNE); encoder->x264_parameter = (x264_param_t *)malloc(sizeof(x264_param_t)); if (!encoder->x264_parameter){ printf("malloc x264_parameter error!\n"); exit(EXIT_FAILURE); } CLEAR(*(encoder->x264_parameter)); x264_param_default(encoder->x264_parameter); //設置編碼器參數狀態爲: 修改後馬上生效 if ((ret = x264_param_default_preset(encoder->x264_parameter, encoder->parameter_preset, encoder->parameter_tune))<0){ printf("x264_param_default_preset error!\n"); exit(EXIT_FAILURE); } /*cpuFlags 去空緩衝區繼續使用不死鎖保證*/ encoder->x264_parameter->i_threads = X264_SYNC_LOOKAHEAD_AUTO; /*視頻選項*/ encoder->x264_parameter->i_width = WIDTH;//要編碼的圖像的寬度 encoder->x264_parameter->i_height = HEIGHT;//要編碼的圖像的高度 encoder->x264_parameter->i_frame_total = frames_total;// frames_total;//要編碼的總幀數,不知道用0 encoder->x264_parameter->i_keyint_max = 10 * FPS; //關鍵幀出現間隔,若是視頻動做變化較大,可調小到2-4 /*流參數*///下面四個參數不怎麼影響,不過也初始化了吧 encoder->x264_parameter->i_bframe = 5; encoder->x264_parameter->b_open_gop = 0; encoder->x264_parameter->i_bframe_pyramid = 0; //禁止B幀 encoder->x264_parameter->i_bframe_adaptive = X264_B_ADAPT_TRELLIS; //B幀出現率 /*log參數,不須要打印編碼信息時直接註釋掉*/ //encoder->x264_parameter->i_log_level = X264_LOG_DEBUG; encoder->x264_parameter->i_fps_num = FPS;//碼率分子 encoder->x264_parameter->i_fps_den = 1;//碼率分母 //下面兩個挺重要 encoder->x264_parameter->b_intra_refresh = 1; encoder->x264_parameter->b_annexb = 1; strcpy(encoder->parameter_profile, ENCODER_PROFILE); if ((ret = x264_param_apply_profile(encoder->x264_parameter, encoder->parameter_profile))<0){ printf("x264_param_apply_profile error!\n"); exit(EXIT_FAILURE); } /*打開編碼器*/ encoder->x264_encoder = x264_encoder_open(encoder->x264_parameter); encoder->colorspace = ENCODER_COLORSPACE; /*初始化pic*/ encoder->yuv420p_picture = (x264_picture_t *)malloc(sizeof(x264_picture_t)); if (!encoder->yuv420p_picture){ printf("malloc encoder->yuv420p_picture error!\n"); exit(EXIT_FAILURE); } if ((ret = x264_picture_alloc(encoder->yuv420p_picture, encoder->colorspace, WIDTH, HEIGHT))<0){ printf("ret=%d\n", ret); printf("x264_picture_alloc error!\n"); exit(EXIT_FAILURE); } //設置264的輸入圖片格式 encoder->yuv420p_picture->img.i_csp = encoder->colorspace; encoder->yuv420p_picture->img.i_plane = 3; //plane格式存儲 encoder->yuv420p_picture->i_type = X264_TYPE_AUTO; /*申請YUV buffer*///這個YUV緩衝區的數據就是264須要的圖片數據 encoder->yuv = (uint8_t *)malloc(WIDTH*HEIGHT * 3 / 2); if (!encoder->yuv){ printf("malloc yuv error!\n"); exit(EXIT_FAILURE); } CLEAR(*(encoder->yuv)); encoder->yuv420p_picture->img.plane[0] = encoder->yuv; encoder->yuv420p_picture->img.plane[1] = encoder->yuv + WIDTH*HEIGHT; encoder->yuv420p_picture->img.plane[2] = encoder->yuv + WIDTH*HEIGHT + WIDTH*HEIGHT / 4; n_nal = 0;//初始化爲0,以後用於保存每幀的NALU數量 //每一個NALU的數據都存在這裏,通常一幀編碼爲1個(最多3個)NALU,分辨率較大時也許會更多 encoder->nal = (x264_nal_t *)malloc(3 * sizeof(x264_nal_t)); if (!encoder->nal){ printf("malloc x264_nal_t error!\n"); exit(EXIT_FAILURE); } CLEAR(*(encoder->nal)); //用於提取IplImage中的圖片數據 RGB1 = (unsigned char *)malloc(HEIGHT * WIDTH * 3); /**************************************************************************************************************************************/ /************************************************* 初始化完畢 *********************************************************************/ /**************************************************************************************************************************************/ } H264FramedLiveSource::H264FramedLiveSource(UsageEnvironment& env, unsigned preferredFrameSize, unsigned playTimePerFrame) : FramedSource(env) { } H264FramedLiveSource* H264FramedLiveSource::createNew(UsageEnvironment& env, unsigned preferredFrameSize, unsigned playTimePerFrame) { H264FramedLiveSource* newSource = new H264FramedLiveSource(env, preferredFrameSize, playTimePerFrame); return newSource; } H264FramedLiveSource::~H264FramedLiveSource() { printf("~~~~~~~~~~~~~~~\n"); /*free(RGB1); cvReleaseCapture(&cap); free(encoder->yuv); free(encoder->yuv420p_picture); free(encoder->x264_parameter); x264_encoder_close(encoder->x264_encoder); free(encoder);*/ } void F::getframe() { printf("#"); img = cvQueryFrame(cap); cvResize(img, img2); //把圖片轉換爲img2的分辨率大小,在上面的函數中有寫到img2初始化的大小,可自行調整 /*for循環:從IplImage格式中獲取RGB格式圖片*/ for (int i = 0; i< HEIGHT; i++) { for (int j = 0; j< WIDTH; j++) { RGB1[(i*WIDTH + j) * 3] = img2->imageData[i * widthStep + j * 3 + 2];; RGB1[(i*WIDTH + j) * 3 + 1] = img2->imageData[i * widthStep + j * 3 + 1]; RGB1[(i*WIDTH + j) * 3 + 2] = img2->imageData[i * widthStep + j * 3]; } } //把RGB格式圖片轉換爲YUV格式圖片 Convert(RGB1, encoder->yuv, WIDTH, HEIGHT); encoder->yuv420p_picture->i_pts++; //每幀圖片序列號,要保持遞增 //編碼YUV格式圖片,編碼後的數據放在encoder->nal中 if (x264_encoder_encode(encoder->x264_encoder, &encoder->nal, &n_nal, encoder->yuv420p_picture, &pic_out) < 0) { printf("x264_encoder_encode error!\n"); exit(EXIT_FAILURE); } } void H264FramedLiveSource::doGetNextFrame() { fFrameSize = 0; f.getframe(); //把編碼後的H264數據(即全部NALU)放到fTo,fTo會被live555的某些函數使用併發送 for (f.my_nal = f.encoder->nal; f.my_nal < f.encoder->nal + f.n_nal; ++(f.my_nal)) { //判斷NALU是否大於fTo輸入緩衝區 if (fFrameSize + f.my_nal->i_payload > fMaxSize) { printf("fMaxSize!!!!!!!!!!!!\n"); break; } //p_payload存放NALU數據,i_payload存放NALU字節大小 memcpy((unsigned char*)fTo + fFrameSize, f.my_nal->p_payload, f.my_nal->i_payload); fFrameSize += f.my_nal->i_payload; } //將會判斷數據是否存滿,Parser緩衝區(一共兩個)互換。 //而後提取有效數據,continueReadingFrame,讀完有效數據後由調用回doGetNextFrame nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); return; }