Camera負責採集數據,把採集來的數據交給 X264進行編碼打包給RTMP進行推流,服務器
Camera採集來的數據是NV21, 而X264編碼的輸入數據格式爲I420格式。多線程
NV21和I420都是屬於YUV420格式。而NV21是一種two-plane模式,即Y和UV分爲兩個Plane(平面),可是UV(CbCr)交錯存儲,2個平面,而不是分爲三個。這種排列方式被稱之爲YUV420SP,而I420則稱之爲YUV420P。(Y:明亮度、灰度,UV:色度、飽和度)app
下圖是大小爲4x4的NV21數據:Y一、Y二、Y五、Y6共用V1與U1,......ide
而I420則是函數
能夠看出不管是哪一種排列方式,YUV420的數據量都爲: w*h+w/2*h/2+w/2*h/2 即爲w*h*3/2工具
將NV21轉位I420則爲:ui
Y數據按順序完整複製,U數據則是從整個Y數據以後加一個字節再每隔一個字節取一次。編碼
傳感器與屏幕天然方向不一致,將圖像傳感器的座標系逆時針旋轉90度,才能顯示到屏幕的座標系上。因此看到的畫面是逆時針旋轉了90度的,所以咱們須要將圖像順時針旋轉90度才能看到正常的畫面。而Camera對象提供一個setDisplayOrientation
接口可以設置預覽顯示的角度:url
根據文檔,配置完Camera以後預覽確實正常了,可是在onPreviewFrame中回調得到的數據依然是逆時針旋轉了90度的。因此若是須要使用預覽回調的數據,還須要對onPreviewFrame回調的byte[] 進行旋轉。spa
即對NV21數據順時針旋轉90度。
Camera 經過PreviewCallBack把 數據 byte[] data傳給 native 中。native在init時準備一個編碼器編碼,一個隊列用來存儲數據,編碼器 x264_t *videoCodec = 0; 存放在 VideoChannel.cpp中
//native-lib.cpp 文件
//隊列
SafeQueue<RTMPPacket *> packets;
VideoChannel *videoChannel = 0;
extern "C"
JNIEXPORT void JNICALL Java_com_tina_pushstream_live_LivePusher_native_1init(JNIEnv *env, jobject instance) {
//準備一個Video編碼器的工具類 :進行編碼
videoChannel = new VideoChannel;
videoChannel->setVideoCallback(callback);
//準備一個隊列,打包好的數據 放入隊列,在線程中統一的取出數據再發送給服務器
packets.setReleaseCallback(releasePackets);
}
複製代碼
在 VideoChannel中建立編碼器,而且設置參數:
// VideoChannel.h/VideoChannel.cpp
x264_t *videoCodec = 0;
//設置編碼器參數
void VideoChannel::setVideoEncInfo(int width, int height, int fps, int bitrate) {
pthread_mutex_lock(&mutex);
mWidth = width;
mHeight = height;
mFps = fps;
mBitrate = bitrate;
ySize = width * height;
uvSize = ySize / 4;
if (videoCodec) {
x264_encoder_close(videoCodec);
videoCodec = 0;
}
if (pic_in) {
x264_picture_clean(pic_in);
delete pic_in;
pic_in = 0;
}
//打開x264編碼器
//x264編碼器的屬性
x264_param_t param;
//2: 最快
//3: 無延遲編碼
x264_param_default_preset(¶m, "ultrafast", "zerolatency");
//base_line 3.2 編碼規格
param.i_level_idc = 32;
//輸入數據格式
param.i_csp = X264_CSP_I420;
param.i_width = width;
param.i_height = height;
//無b幀
param.i_bframe = 0;
//參數i_rc_method表示碼率控制,CQP(恆定質量),CRF(恆定碼率),ABR(平均碼率)
param.rc.i_rc_method = X264_RC_ABR;
//碼率(比特率,單位Kbps)
param.rc.i_bitrate = bitrate / 1000;
//瞬時最大碼率
param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2;
//設置了i_vbv_max_bitrate必須設置此參數,碼率控制區大小,單位kbps
param.rc.i_vbv_buffer_size = bitrate / 1000;
//幀率
param.i_fps_num = fps;
param.i_fps_den = 1;
param.i_timebase_den = param.i_fps_num;
param.i_timebase_num = param.i_fps_den;
// param.pf_log = x264_log_default2;
//用fps而不是時間戳來計算幀間距離
param.b_vfr_input = 0;
//幀距離(關鍵幀) 2s一個關鍵幀
param.i_keyint_max = fps * 2;
// 是否複製sps和pps放在每一個關鍵幀的前面 該參數設置是讓每一個關鍵幀(I幀)都附帶sps/pps。
param.b_repeat_headers = 1;
//多線程
param.i_threads = 1;
x264_param_apply_profile(¶m, "baseline");
//打開編碼器 videoCodec
videoCodec = x264_encoder_open(¶m);
pic_in = new x264_picture_t;
x264_picture_alloc(pic_in, X264_CSP_I420, width, height);
pthread_mutex_unlock(&mutex);
}
複製代碼
native_start啓動一個線程鏈接服務器,RTMP跟Http同樣是基於TCP的上層協議,因此在start方法裏鏈接。
//LivePusher 調用native_start()
public void startLive(String path) {
native_start(path);
videoChannel.startLive();
audioChannel.startLive();
}
複製代碼
native層RTMP鏈接服務器,首先啓動線程,在線程回調中開啓鏈接:
//native-lib.cpp
extern "C"
JNIEXPORT void JNICALL Java_com_dongnao_pusher_live_LivePusher_native_1start(JNIEnv *env, jobject instance, jstring path_) {
if (isStart) {
return;
}
const char *path = env->GetStringUTFChars(path_, 0);
char *url = new char[strlen(path) + 1];
strcpy(url, path);
isStart = 1;
//啓動線程
pthread_create(&pid, 0, start, url);
env->ReleaseStringUTFChars(path_, path);
}
//線程啓動 RTMP connect 服務器
void *start(void *args) {
char *url = static_cast<char *>(args);
RTMP *rtmp = 0;
do {
rtmp = RTMP_Alloc();
if (!rtmp) {
LOGE("rtmp建立失敗");
break;
}
RTMP_Init(rtmp);
//設置超時時間 5s
rtmp->Link.timeout = 5;
int ret = RTMP_SetupURL(rtmp, url);
if (!ret) {
LOGE("rtmp設置地址失敗:%s", url);
break;
}
//開啓輸出模式
RTMP_EnableWrite(rtmp);
ret = RTMP_Connect(rtmp, 0);
if (!ret) {
LOGE("rtmp鏈接地址失敗:%s", url);
break;
}
ret = RTMP_ConnectStream(rtmp, 0);
if (!ret) {
LOGE("rtmp鏈接流失敗:%s", url);
break;
}
//準備好了 能夠開始推流了
readyPushing = 1;
//記錄一個開始推流的時間
start_time = RTMP_GetTime();
packets.setWork(1);
RTMPPacket *packet = 0;
//循環從隊列取包 而後發送
while (isStart) {
packets.pop(packet);
if (!isStart) {
break;
}
if (!packet) {
continue;
}
// 給rtmp的流id
packet->m_nInfoField2 = rtmp->m_stream_id;
//發送包 1:加入隊列發送
ret = RTMP_SendPacket(rtmp, packet, 1);
releasePackets(packet);
if (!ret) {
LOGE("發送數據失敗");
break;
}
}
releasePackets(packet);
} while (0);
if (rtmp) {
RTMP_Close(rtmp);
RTMP_Free(rtmp);
}
delete url;
return 0;
}
複製代碼
以上start函數中的整個流程:
start鏈接好後,就開始pushVideo數據了:
//VideoChannel, 在LivePusher中start時調用 videoChannel.startLive()
public void startLive() {
isLiving = true;
}
//在 PreviewCallback中的回調裏,此時isLiving爲true,調用native_pushVideo.
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (isLiving) {
mLivePusher.native_pushVideo(data);
}
}
複製代碼
從Camera採集的NV21到 X264的I420須要轉碼:
extern "C"
JNIEXPORT void JNICALL Java_com_tina_pushstream_live_LivePusher_native_1pushVideo(JNIEnv *env, jobject instance,jbyteArray data_) {
if (!videoChannel || !readyPushing) {
return;
}
jbyte *data = env->GetByteArrayElements(data_, NULL);
videoChannel->encodeData(data);
env->ReleaseByteArrayElements(data_, data, 0);
}
複製代碼
根據NV2一、I420的yuv格式的不一樣,轉化後存儲到x264_picture_t *pic_in = 0;
//圖片
x264_picture_t *pic_in = 0;
//編碼,把NV21 轉成I420
void VideoChannel::encodeData(int8_t *data) {
//編碼
pthread_mutex_lock(&mutex);
//將data 放入 pic_in
//y數據
memcpy(pic_in->img.plane[0], data, ySize);
for (int i = 0; i < uvSize; ++i) {
//間隔1個字節取一個數據
//u數據
*(pic_in->img.plane[1] + i) = *(data + ySize + i * 2 + 1);
//v數據
*(pic_in->img.plane[2] + i) = *(data + ySize + i * 2);
}
pic_in->i_pts = index++;
//編碼出的數據
x264_nal_t *pp_nal;
//編碼出了幾個 nalu (暫時理解爲幀)
int pi_nal;
x264_picture_t pic_out;
//編碼
int ret = x264_encoder_encode(videoCodec, &pp_nal, &pi_nal, pic_in, &pic_out);
if (ret < 0) {
pthread_mutex_unlock(&mutex);
return;
}
int sps_len, pps_len;
uint8_t sps[100];
uint8_t pps[100];
//
for (int i = 0; i < pi_nal; ++i) {
//數據類型
if (pp_nal[i].i_type == NAL_SPS) {
// 去掉 00 00 00 01
sps_len = pp_nal[i].i_payload - 4;
memcpy(sps, pp_nal[i].p_payload + 4, sps_len);
} else if (pp_nal[i].i_type == NAL_PPS) {
pps_len = pp_nal[i].i_payload - 4;
memcpy(pps, pp_nal[i].p_payload + 4, pps_len);
//拿到pps 就表示 sps已經拿到了
sendSpsPps(sps, pps, sps_len, pps_len);
} else {
//關鍵幀、非關鍵幀
sendFrame(pp_nal[i].i_type,pp_nal[i].i_payload,pp_nal[i].p_payload);
}
}
pthread_mutex_unlock(&mutex);
}
複製代碼
組裝spspps幀、Frame幀:
//拼數據,省略了數據拼裝的過程
void VideoChannel::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_len) {
RTMPPacket *packet = new RTMPPacket;
int bodysize = 13 + sps_len + 3 + pps_len;
RTMPPacket_Alloc(packet, bodysize);
int i = 0;
//固定頭
packet->m_body[i++] = 0x17;
......
......
//sps pps沒有時間戳
packet->m_nTimeStamp = 0;
//不使用絕對時間
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
callback(packet);
}
void VideoChannel::sendFrame(int type, int payload, uint8_t *p_payload) {
//去掉 00 00 00 01 / 00 00 01
if (p_payload[2] == 0x00){
payload -= 4;
p_payload += 4;
} else if(p_payload[2] == 0x01){
payload -= 3;
p_payload += 3;
}
RTMPPacket *packet = new RTMPPacket;
int bodysize = 9 + payload;
.........
.......
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nChannel = 0x10;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
//經過函數
callback(packet);
}
複製代碼
最終經過 函數指針講packet放入隊列中:
//native-lib.cpp
void callback(RTMPPacket *packet) {
if (packet) {
//設置時間戳
packet->m_nTimeStamp = RTMP_GetTime() - start_time;
//這裏往隊列裏 塞數據,在start中 pop取數據而後發出去
packets.push(packet);
}
}
複製代碼
隊列的消耗在 start鏈接成功時,視頻上傳的整個流程完成。
//循環從隊列取包 而後發送
while (isStart) {
packets.pop(packet);
if (!isStart) {
break;
}
if (!packet) {
continue;
}
// 給rtmp的流id
packet->m_nInfoField2 = rtmp->m_stream_id;
//發送包 1:加入隊列發送
ret = RTMP_SendPacket(rtmp, packet, 1);
releasePackets(packet);
if (!ret) {
LOGE("發送數據失敗");
break;
}
}
releasePackets(packet);
複製代碼
VLC訪問服務器拉流播放: