MediaCodec是anroid api 16之後開發的硬編解碼接口,英文文檔參照這個連接,中文翻譯能夠參考這個連接。本文主要記錄的是如何使用MediaCodec對視頻進行編解碼,最後會以實例的方式展現如何將Camera預覽數據編碼成H264,再把編碼後的h264解碼而且顯示在SurfaceView中。本例不涉及音頻的編解碼。html
使用MediaCodec實現視頻編碼的步驟以下:
1.初始化MediaCodec,方法有兩種,分別是經過名稱和類型來建立,對應的方法爲:java
MediaCodec createByCodecName (String name); MediaCodec createDecoderByType (String type);
具體可用的name和type參考文檔便可。這裏咱們經過後者來初始化一個視頻編碼器。android
mMC = MediaCodec.createDecoderByType(MIME_TYPE);
2.配置MediaCodec,這一步須要配置的是MediaFormat,這個類包含了比特率、幀率、關鍵幀間隔時間等,其中比特率若是過低就會形成相似馬賽克的現象。api
mMF = MediaFormat.createVideoFormat(MIME_TYPE, width, height); mMF.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); mMF.setInteger(MediaFormat.KEY_FRAME_RATE, framerate); if (mPrimeColorFormat != 0){ mMF.setInteger(MediaFormat.KEY_COLOR_FORMAT, mPrimeColorFormat); } mMF.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); //關鍵幀間隔時間 單位s mMC.configure(mMF, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
其中mPrimeColorFormat爲本機支持的顏色空間。通常是yuv420p或者yuv420sp,Camera預覽格式通常是yv12或者NV21,因此在編碼以前須要進行格式轉換,實例可參照文末代碼。代碼是最好的老師嘛。
3.打開編碼器,獲取輸入輸出緩衝區數組
mMC.start(); mInputBuffers = mMC.getInputBuffers(); mOutputBuffers = mMC.getOutputBuffers();
4.輸入數據,過程能夠分爲如下幾個小步:
1)獲取可以使用緩衝區位置獲得索引緩存
int inputbufferindex = mMC.dequeueInputBuffer(BUFFER_TIMEOUT);
若是存在可用的緩衝區,此方法會返回其位置索引,不然返回-1,參數爲超時時間,單位是毫秒,若是此參數是0,則當即返回,若是參數小於0,則無限等待直到有可以使用的緩衝區,若是參數大於0,則等待時間爲傳入的毫秒值。
2)傳入原始數據bash
ByteBuffer inputBuffer = mInputBuffers[inputbufferindex];
inputBuffer.clear();//清除原來的內容以接收新的內容 inputBuffer.put(bytes, 0, len);//len是傳進來的有效數據長度 mMC.queueInputBuffer(inputbufferindex, 0, len, timestamp, 0);
此緩衝區一旦使用,只有在dequeueInputBuffer返回其索引位置才表明它能夠再次使用。
5.獲取其輸出數據,獲取輸入原始數據和獲取輸出數據最好是異步進行,由於輸入一幀數據不表明編碼器立刻就會輸出對應的編碼數據,可能輸入好幾幀纔會輸出一幀。獲取輸出數據的步驟與輸入數據的步驟類似:
1)獲取可用的輸出緩衝區app
int outputbufferindex = mMC.dequeueOutputBuffer(mBI, BUFFER_TIMEOUT);
其中參數一是一個BufferInfo類型的實例,參數二爲超時時間,負數表明無限等待(可見,不要在主線程進行操做)。
2)獲取輸出數據異步
mOutputBuffers[outputbufferindex].get(bytes, 0, mBI.size);
3)釋放緩衝區socket
mMC.releaseOutputBuffer(outputbufferindex, false);
解碼視頻的步驟跟編碼的相似,配置不同:
1.實例化解碼器
mMC = MediaCodec.createDecoderByType(MIME_TYPE);
2.配置解碼器,此處須要配置用於顯示圖像的Surface、MediaFormat包含視頻的pps和sps(包含在編碼出來的第一幀數據)
int[] width = new int[1]; int[] height = new int[1]; AvcUtils.parseSPS(sps, width, height);//從sps中解析出視頻寬高 mMF = MediaFormat.createVideoFormat(MIME_TYPE, width[0], height[0]); mMF.setByteBuffer("csd-0", ByteBuffer.wrap(sps)); mMF.setByteBuffer("csd-1", ByteBuffer.wrap(pps)); mMF.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, width[0] * height[0]); mMC.configure(mMF, surface, null, 0);
3.開啓編碼器並獲取輸入輸出緩衝區
mMC.start(); mInputBuffers = mMC.getInputBuffers(); mOutputBuffers = mMC.getOutputBuffers();
4.輸入數據
1)獲取可用的輸入緩衝區
int inputbufferindex = mMC.dequeueInputBuffer(BUFFER_TIMEOUT);
返回值爲可用緩衝區的索引
ByteBuffer inputBuffer = mInputBuffers[inputbufferindex]; inputBuffer.clear();
2)而後輸入數據
inputBuffer.put(bytes, 0, len); mMC.queueInputBuffer(inputbufferindex, 0, len, timestamp, 0);
5.獲取輸出數據,這一步與4一樣應該異步進行,其具體步驟與上面解碼的基本相同,在釋放緩衝區的時候須要注意第二個參數設置爲true,表示解碼顯示在Surface上
mMC.releaseOutputBuffer(outputbufferindex, true);
下面是一個MediaCodec編解碼實例,此例子Camera預覽數據(yv12)編碼成H264,再把編碼後的h264解碼而且顯示在SurfaceView中。
佈局文件很是簡單,兩個SurfaceView分別用於顯示編解碼的圖像,兩個按鈕控制開始和中止,一個TextView用於顯示捕捉幀率。佈局文件代碼就不展現了,界面以下
package com.example.mediacodecpro; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.util.Log; import java.io.IOException; import java.nio.ByteBuffer; /** * Created by chuibai on 2017/3/10.<br /> */ public class Encoder { public static final int TRY_AGAIN_LATER = -1; public static final int BUFFER_OK = 0; public static final int BUFFER_TOO_SMALL = 1; public static final int OUTPUT_UPDATE = 2; private int format = 0; private final String MIME_TYPE = "video/avc"; private MediaCodec mMC = null; private MediaFormat mMF; private ByteBuffer[] inputBuffers; private ByteBuffer[] outputBuffers; private long BUFFER_TIMEOUT = 0; private MediaCodec.BufferInfo mBI; /** * 初始化編碼器 * @throws IOException 建立編碼器失敗會拋出異常 */ public void init() throws IOException { mMC = MediaCodec.createEncoderByType(MIME_TYPE); format = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar; mBI = new MediaCodec.BufferInfo(); } /** * 配置編碼器,須要配置顏色、幀率、比特率以及視頻寬高 * @param width 視頻的寬 * @param height 視頻的高 * @param bitrate 視頻比特率 * @param framerate 視頻幀率 */ public void configure(int width,int height,int bitrate,int framerate){ if(mMF == null){ mMF = MediaFormat.createVideoFormat(MIME_TYPE, width, height); mMF.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); mMF.setInteger(MediaFormat.KEY_FRAME_RATE, framerate); if (format != 0){ mMF.setInteger(MediaFormat.KEY_COLOR_FORMAT, format); } mMF.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, -1); //關鍵幀間隔時間 單位s } mMC.configure(mMF,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE); } /** * 開啓編碼器,獲取輸入輸出緩衝區 */ public void start(){ mMC.start(); inputBuffers = mMC.getInputBuffers(); outputBuffers = mMC.getOutputBuffers(); } /** * 向編碼器輸入數據,此處要求輸入YUV420P的數據 * @param data YUV數據 * @param len 數據長度 * @param timestamp 時間戳 * @return */ public int input(byte[] data,int len,long timestamp){ int index = mMC.dequeueInputBuffer(BUFFER_TIMEOUT); Log.e("...","" + index); if(index >= 0){ ByteBuffer inputBuffer = inputBuffers[index]; inputBuffer.clear(); if(inputBuffer.capacity() < len){ mMC.queueInputBuffer(index, 0, 0, timestamp, 0); return BUFFER_TOO_SMALL; } inputBuffer.put(data,0,len); mMC.queueInputBuffer(index,0,len,timestamp,0); }else{ return index; } return BUFFER_OK; } /** * 輸出編碼後的數據 * @param data 數據 * @param len 有效數據長度 * @param ts 時間戳 * @return */ public int output(/*out*/byte[] data,/* out */int[] len,/* out */long[] ts){ int i = mMC.dequeueOutputBuffer(mBI, BUFFER_TIMEOUT); if(i >= 0){ if(mBI.size > data.length) return BUFFER_TOO_SMALL; outputBuffers[i].position(mBI.offset); outputBuffers[i].limit(mBI.offset + mBI.size); outputBuffers[i].get(data, 0, mBI.size); len[0] = mBI.size ; ts[0] = mBI.presentationTimeUs; mMC.releaseOutputBuffer(i, false); } else if (i == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { outputBuffers = mMC.getOutputBuffers(); return OUTPUT_UPDATE; } else if (i == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { mMF = mMC.getOutputFormat(); return OUTPUT_UPDATE; } else if (i == MediaCodec.INFO_TRY_AGAIN_LATER) { return TRY_AGAIN_LATER; } return BUFFER_OK; } public void release(){ mMC.stop(); mMC.release(); mMC = null; outputBuffers = null; inputBuffers = null; } public void flush() { mMC.flush(); } }
package com.example.mediacodecpro; import android.media.MediaCodec; import android.media.MediaFormat; import android.view.Surface; import java.io.IOException; import java.nio.ByteBuffer; /** * Created by chuibai on 2017/3/10.<br /> */ public class Decoder { public static final int TRY_AGAIN_LATER = -1; public static final int BUFFER_OK = 0; public static final int BUFFER_TOO_SMALL = 1; public static final int OUTPUT_UPDATE = 2; private final String MIME_TYPE = "video/avc"; private MediaCodec mMC = null; private MediaFormat mMF; private long BUFFER_TIMEOUT = 0; private MediaCodec.BufferInfo mBI; private ByteBuffer[] mInputBuffers; private ByteBuffer[] mOutputBuffers; /** * 初始化編碼器 * @throws IOException 建立編碼器失敗會拋出異常 */ public void init() throws IOException { mMC = MediaCodec.createDecoderByType(MIME_TYPE); mBI = new MediaCodec.BufferInfo(); } /** * 配置解碼器 * @param sps 用於配置的sps參數 * @param pps 用於配置的pps參數 * @param surface 用於解碼顯示的Surface */ public void configure(byte[] sps, byte[] pps, Surface surface){ int[] width = new int[1]; int[] height = new int[1]; AvcUtils.parseSPS(sps, width, height);//從sps中解析出視頻寬高 mMF = MediaFormat.createVideoFormat(MIME_TYPE, width[0], height[0]); mMF.setByteBuffer("csd-0", ByteBuffer.wrap(sps)); mMF.setByteBuffer("csd-1", ByteBuffer.wrap(pps)); mMF.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, width[0] * height[0]); mMC.configure(mMF, surface, null, 0); } /** * 開啓解碼器,獲取輸入輸出緩衝區 */ public void start(){ mMC.start(); mInputBuffers = mMC.getInputBuffers(); mOutputBuffers = mMC.getOutputBuffers(); } /** * 輸入數據 * @param data 輸入的數據 * @param len 數據有效長度 * @param timestamp 時間戳 * @return 成功則返回{@link #BUFFER_OK} 不然返回{@link #TRY_AGAIN_LATER} */ public int input(byte[] data,int len,long timestamp){ int i = mMC.dequeueInputBuffer(BUFFER_TIMEOUT); if(i >= 0){ ByteBuffer inputBuffer = mInputBuffers[i]; inputBuffer.clear(); inputBuffer.put(data, 0, len); mMC.queueInputBuffer(i, 0, len, timestamp, 0); }else { return TRY_AGAIN_LATER; } return BUFFER_OK; } public int output(byte[] data,int[] len,long[] ts){ int i = mMC.dequeueOutputBuffer(mBI, BUFFER_TIMEOUT); if(i >= 0){ if (mOutputBuffers[i] != null) { mOutputBuffers[i].position(mBI.offset); mOutputBuffers[i].limit(mBI.offset + mBI.size); if (data != null) mOutputBuffers[i].get(data, 0, mBI.size); len[0] = mBI.size; ts[0] = mBI.presentationTimeUs; } mMC.releaseOutputBuffer(i, true); }else{ return TRY_AGAIN_LATER; } return BUFFER_OK; } public void flush(){ mMC.flush(); } public void release() { flush(); mMC.stop(); mMC.release(); mMC = null; mInputBuffers = null; mOutputBuffers = null; } }
package com.example.mediacodecpro; import android.content.pm.ActivityInfo; import android.graphics.ImageFormat; import android.hardware.Camera; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.SurfaceView; import android.view.View; import android.widget.Button; import android.widget.TextView; import java.io.IOException; import java.io.OutputStream; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.Socket; import java.nio.ByteBuffer; import java.util.Iterator; import java.util.LinkedList; import java.util.Queue; import butterknife.BindView; import butterknife.ButterKnife; public class MainActivity extends AppCompatActivity implements View.OnClickListener, Camera.PreviewCallback { @BindView(R.id.surfaceView_encode) SurfaceView surfaceViewEncode; @BindView(R.id.surfaceView_decode) SurfaceView surfaceViewDecode; @BindView(R.id.btnStart) Button btnStart; @BindView(R.id.btnStop) Button btnStop; @BindView(R.id.capture) TextView capture; private int width; private int height; private int bitrate; private int framerate; private int captureFrame; private Camera mCamera; private Queue<PreviewBufferInfo> mPreviewBuffers_clean; private Queue<PreviewBufferInfo> mPreviewBuffers_dirty; private Queue<PreviewBufferInfo> mDecodeBuffers_clean; private Queue<PreviewBufferInfo> mDecodeBuffers_dirty; private int PREVIEW_POOL_CAPACITY = 5; private int format; private int DECODE_UNI_SIZE = 1024 * 1024; private byte[] mAvcBuf = new byte[1024 * 1024]; private final int MSG_ENCODE = 0; private final int MSG_DECODE = 1; private String TAG = "MainActivity"; private long mLastTestTick = 0; private Object mAvcEncLock; private Object mDecEncLock; private Decoder mDecoder; private Handler codecHandler; private byte[] mRawData; private Encoder mEncoder; private CodecThread codecThread; private DatagramSocket socket; private DatagramPacket packet; private byte[] sps_pps; private byte[] mPacketBuf = new byte[1024 * 1024]; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ButterKnife.bind(this); //初始化參數 initParams(); //設置監聽事件 btnStart.setOnClickListener(this); btnStop.setOnClickListener(this); } /** * 初始化參數,包括幀率、顏色、比特率,視頻寬高等 */ private void initParams() { width = 352; height = 288; bitrate = 1500000; framerate = 30; captureFrame = 0; format = ImageFormat.YV12; mAvcEncLock = new Object(); mDecEncLock = new Object(); } @Override protected void onResume() { if (getRequestedOrientation() != ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE) { setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); } super.onResume(); } @Override public void onClick(View v) { switch (v.getId()){ case R.id.btnStart: mCamera = Camera.open(0); initQueues(); initEncoder(); initCodecThread(); startPreview(); break; case R.id.btnStop: releaseCodecThread(); releseEncoderAndDecoder(); releaseCamera(); releaseQueue(); break; } } /** * 釋放隊列資源 */ private void releaseQueue() { if (mPreviewBuffers_clean != null){ mPreviewBuffers_clean.clear(); mPreviewBuffers_clean = null; } if (mPreviewBuffers_dirty != null){ mPreviewBuffers_dirty.clear(); mPreviewBuffers_dirty = null; } if (mDecodeBuffers_clean != null){ mDecodeBuffers_clean.clear(); mDecodeBuffers_clean = null; } if (mDecodeBuffers_dirty != null){ mDecodeBuffers_dirty.clear(); mDecodeBuffers_dirty = null; } } /** * 釋放攝像頭資源 */ private void releaseCamera() { if(mCamera != null){ mCamera.setPreviewCallbackWithBuffer(null); mCamera.stopPreview(); mCamera.release(); mCamera = null; } } private void releseEncoderAndDecoder() { if(mEncoder != null){ mEncoder.flush(); mEncoder.release(); mEncoder = null; } if(mDecoder != null){ mDecoder.release(); mDecoder = null; } } private void releaseCodecThread() { codecHandler.getLooper().quit(); codecHandler = null; codecThread = null; } private void initCodecThread() { codecThread = new CodecThread(); codecThread.start(); } /** * 開啓預覽 */ private void startPreview() { Camera.Parameters parameters = mCamera.getParameters(); parameters.setPreviewFormat(format); parameters.setPreviewFrameRate(framerate); parameters.setPreviewSize(width,height); mCamera.setParameters(parameters); try { mCamera.setPreviewDisplay(surfaceViewEncode.getHolder()); } catch (IOException e) { e.printStackTrace(); } mCamera.setPreviewCallbackWithBuffer(this); mCamera.startPreview(); } @Override public void onPreviewFrame(byte[] data, Camera camera) { /** 預覽的data爲null */ if(data == null) { Log.e(TAG,"預覽的data爲null"); return; } long curTick = System.currentTimeMillis(); if (mLastTestTick == 0) { mLastTestTick = curTick; } if (curTick > mLastTestTick + 1000) { setCaptureFPSTextView(captureFrame); captureFrame = 0; mLastTestTick = curTick; } else captureFrame++; synchronized(mAvcEncLock) { PreviewBufferInfo info = mPreviewBuffers_clean.poll(); //remove the head of queue info.buffer = data; info.size = getPreviewBufferSize(width, height, format); info.timestamp = System.currentTimeMillis(); mPreviewBuffers_dirty.add(info); if(mDecoder == null){ codecHandler.sendEmptyMessage(MSG_ENCODE); } } } private void setCaptureFPSTextView(int captureFrame) { capture.setText("當前幀率:" + captureFrame); } private void initEncoder() { mEncoder = new Encoder(); try { mEncoder.init(); mEncoder.configure(width,height,bitrate,framerate); mEncoder.start(); } catch (IOException e) { e.printStackTrace(); } } /** * 初始化各類隊列 */ private void initQueues() { if (mPreviewBuffers_clean == null) mPreviewBuffers_clean = new LinkedList<>(); if (mPreviewBuffers_dirty == null) mPreviewBuffers_dirty = new LinkedList<>(); int size = getPreviewBufferSize(width, height, format); for (int i = 0; i < PREVIEW_POOL_CAPACITY; i++) { byte[] mem = new byte[size]; mCamera.addCallbackBuffer(mem); //ByteBuffer.array is a reference, not a copy PreviewBufferInfo info = new PreviewBufferInfo(); info.buffer = null; info.size = 0; info.timestamp = 0; mPreviewBuffers_clean.add(info); } if (mDecodeBuffers_clean == null) mDecodeBuffers_clean = new LinkedList<>(); if (mDecodeBuffers_dirty == null) mDecodeBuffers_dirty = new LinkedList<>();