【Android】Android Camera實時數據採集及經過MediaCodec硬編碼編碼數據的流程

吐槽:
  其實經常使用流程都差很少,可是有時候仍是會忘記某一步的詳細用法,可是各位朋友請注意,官方已經不推薦Camera類的使用(如今是android.hardware.camera2),但無奈公司項目以前是使用Camera類實現的,而且Camera2貌似是基於API 21以上的,這Android 7的風聲都放出來了,但是6.0如今出了3個多月了市場佔有率也才貌似3%不到,何時纔能有個標準化和統一規範,做爲一名Android開發者實屬不易啊,嘆氣~
Android實現攝像頭實時數據採集及經過硬編碼編碼數據的流程
/* 
 * 編碼器獲取數據,編碼,編碼後的數據的處理等大體流程以下:
 */
/* 1.獲取原始幀 */ 
@Override
onPreviewFrame( byte[] onPreviewData, Camera camera) { 
    /* 在此能夠對onPreviewData進行Rotate或者Scale
     * 也能夠轉換yuv的格式,例如yuv420P(YV12)或者yuv420SP(NV21/NV12)
     * 相關開源類庫可使用libyuv/ffmpeg等
     */
    getRawFrame(onPreviewData)
    /* 而後將onPreviewData加入Camera回調*/
    addCallbackBuffer(onPreviewData);
}
private void getRawFrame( byte[] rawFrame ) { encodFrame(rawFrame); }
/* 2.進行編碼 */
private byte[] encodFrame(byte[] inputData) { return encodedData; } 
/* 3.取得編碼後的數據即可進行相應的操做,能夠保存爲本地文件,也可進行推流 */ 
Operation ? Send(byte[] sendData) : Save(byte[] saveData) 

上述代碼onPreviewFrame爲Camera類的接口,使用Camera前須要進行SurfaceView及SurfaceHolder的初始化及相應interface的實現:html

// init the preview surface
private void initview() {
    SurfaceView surfaceView = (SurfaceView) findViewById(R.id.record_surface);
    SurfaceHolder surfaceHolder = surfaceView.getHolder();
    surfaceHolder.addCallback(this);
    surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);    
}


@Override
public void surfaceCreated(SurfaceHolder holder) {
    openCamera(holder); // 開啓相機
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
    releaseCamera(); // 在surfaceDestroyed的時候記得releaseCamera
}

private void openCamera(SurfaceHolder holder) {
    releaseCamera();
    try {
            camera = getCamera(Camera.CameraInfo.CAMERA_FACING_BACK); // 根據需求選擇前/後置攝像頭
        } catch (Exception e) {
            camera = null;
            if (AppContext.isDebugMode) {
                e.printStackTrace();
            }
        }
    if(mCamera != null){
    try {
            mCamera.setPreviewCallback(this);
            mCamera.setDisplayOrientation(90); // 此方法爲官方提供的旋轉顯示部分的方法,並不會影響onPreviewFrame方法中的原始數據;
            if(parameters == null){
                parameters = mCamera.getParameters();
            }
            parameters.setPreviewFormat(ImageFormat.NV21); // 經常使用格式:NV21 / YV12
            parameters.setPreviewSize(width, height); // 還能夠設置不少相機的參數,可是建議先遍歷當前相機是否支持該配置,否則可能會致使出錯;
            mCamera.setParameters(parameters);
            mCamera.setPreviewDisplay(holder);
            mCamera.startPreview();
    } catch (IOException e) {
        e.printStackTrace();
    }
    }
}

@TargetApi(9)
private Camera getCamera(int cameraType) {
    Camera camera = null;
    try {
        camera = Camera.open(cameraType); 
    } catch (Exception e) {
        e.printStackTrace();
    }
    return camera; // returns null if camera is unavailable
}

private synchronized void releaseCamera() {
    if (camera != null) {
        try {
            camera.setPreviewCallback(null);
        } catch (Exception e) {
            e.printStackTrace();
        }
        try {
            camera.stopPreview();
        } catch (Exception e) {
            e.printStackTrace();
        }
        try {
            camera.release();
        } catch (Exception e) {
            e.printStackTrace();
        }
        camera = null;
    }
}

MediaCodec硬編碼實現部分:java

 此處推薦參考SRS開源項目中的實現方法: https://github.com/ossrs/srs-sea.git
// video device.
private Camera camera;
private MediaCodec vencoder;
private MediaCodecInfo vmci;
private MediaCodec.BufferInfo vebi;
private byte[] vbuffer;
// video camera settings.
private Camera.Size vsize;
private int vcolor;
private int vbitrate_kbps = 300;
private final static int VFPS = 20;
private final static int VGOP = 5;
private final static int VWIDTH = 640;
private final static int VHEIGHT = 480;

/* 首先須要初始化MediaCodec的配置 */
private void initMediaCodec() {
     // choose the right vencoder, perfer qcom then google.
    vcolor = chooseVideoEncoder();
    // vencoder yuv to 264 es stream.
    // requires sdk level 16+, Android 4.1, 4.1.1, the JELLY_BEAN
    try {
        vencoder = MediaCodec.createByCodecName(vmci.getName());
    } catch (IOException e) {
        Log.e(TAG, "create vencoder failed.");
        e.printStackTrace();
        return;
    }
    vebi = new MediaCodec.BufferInfo();
    // setup the vencoder.
    // @see https://developer.android.com/reference/android/media/MediaCodec.html
    MediaFormat vformat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, vsize.width, vsize.height);
    vformat.setInteger(MediaFormat.KEY_COLOR_FORMAT, vcolor);
    vformat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
    vformat.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * vbitrate_kbps);
    vformat.setInteger(MediaFormat.KEY_FRAME_RATE, VFPS);
    vformat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VGOP);
    Log.i(TAG, String.format("vencoder %s, color=%d, bitrate=%d, fps=%d, gop=%d, size=%dx%d",
            vmci.getName(), vcolor, vbitrate_kbps, VFPS, VGOP, vsize.width, vsize.height));
    // the following error can be ignored:
    // 1. the storeMetaDataInBuffers error:
    //      [OMX.qcom.video.encoder.avc] storeMetaDataInBuffers (output) failed w/ err -2147483648
    //      @see http://bigflake.com/mediacodec/#q12
    vencoder.configure(vformat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    vencoder.start();
}

// for the vbuffer for YV12(android YUV), @see below:
// https://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat(int)
// https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12
private int getYuvBuffer(int width, int height) {
    // stride = ALIGN(width, 16)
    int stride = (int) Math.ceil(width / 16.0) * 16;
    // y_size = stride * height
    int y_size = stride * height;
    // c_stride = ALIGN(stride/2, 16)
    int c_stride = (int) Math.ceil(width / 32.0) * 16;
    // c_size = c_stride * height/2
    int c_size = c_stride * height / 2;
    // size = y_size + c_size * 2
    return y_size + c_size * 2;
}

// choose the video encoder by name.
private MediaCodecInfo chooseVideoEncoder(String name, MediaCodecInfo def) {
    int nbCodecs = MediaCodecList.getCodecCount();
    for (int i = 0; i < nbCodecs; i++) {
        MediaCodecInfo mci = MediaCodecList.getCodecInfoAt(i);
        if (!mci.isEncoder()) {
            continue;
        }
        String[] types = mci.getSupportedTypes();
        for (int j = 0; j < types.length; j++) {
            if (types[j].equalsIgnoreCase(VCODEC)) {
                //Log.i(TAG, String.format("vencoder %s types: %s", mci.getName(), types[j]));
                if (name == null) {
                    return mci;
                }

                if (mci.getName().contains(name)) {
                    return mci;
                }
            }
        }
    }
    return def;
}

// choose the right supported color format. @see below:
// https://developer.android.com/reference/android/media/MediaCodecInfo.html
// https://developer.android.com/reference/android/media/MediaCodecInfo.CodecCapabilities.html
private int chooseVideoEncoder() {
    // choose the encoder "video/avc":
    //      1. select one when type matched.
    //      2. perfer google avc.
    //      3. perfer qcom avc.
    vmci = chooseVideoEncoder(null, null);
    //vmci = chooseVideoEncoder("google", vmci);
    //vmci = chooseVideoEncoder("qcom", vmci);

    int matchedColorFormat = 0;
    MediaCodecInfo.CodecCapabilities cc = vmci.getCapabilitiesForType(VCODEC);
    for (int i = 0; i < cc.colorFormats.length; i++) {
        int cf = cc.colorFormats[i];
        Log.i(TAG, String.format("vencoder %s supports color fomart 0x%x(%d)", vmci.getName(), cf, cf));

        // choose YUV for h.264, prefer the bigger one.
        // corresponding to the color space transform in onPreviewFrame
        if ((cf >= cc.COLOR_FormatYUV420Planar && cf <= cc.COLOR_FormatYUV420SemiPlanar)) {
            if (cf > matchedColorFormat) {
                matchedColorFormat = cf;
            }
        }
    }
    for (int i = 0; i < cc.profileLevels.length; i++) {
        MediaCodecInfo.CodecProfileLevel pl = cc.profileLevels[i];
        Log.i(TAG, String.format("vencoder %s support profile %d, level %d", vmci.getName(), pl.profile, pl.level));
    }
    Log.i(TAG, String.format("vencoder %s choose color format 0x%x(%d)", vmci.getName(), matchedColorFormat, matchedColorFormat));
    return matchedColorFormat;
}

  上述代碼爲SRS的部分實現,僅做參考。android

  還推薦一個項目,該項目實現了編碼後的數據存爲本地.h264文件,方便分析,本人Fork的git地址:https://github.com/eterrao/MediaCodecEncodeH264.git git

  原做者git地址:https://github.com/sszhangpengfei/MediaCodecEncodeH264.git github

  (在此感謝擁有開源共享精神的各位朋友,由於大家我才能在學習和成長的路上少了不少坑!)async

  實際上MediaCodec的實現步驟基本都大同小異,可是請注意在API20之後編碼器數據處理的機制有所改變,官方給出的建議以下:ide

連接:developer.android.com/reference/android/media/MediaCodec.html學習

如下摘抄官方API:ui

Depending on the API version, you can process data in three ways:
Processing Mode API version <= 20
Jelly Bean/KitKat
API version >= 21
Lollipop and later
Synchronous API using buffer arrays Supported Deprecated
Synchronous API using buffers Not Available Supported
Asynchronous API using buffers Not Available Supported
Asynchronous Processing using Buffers

Since LOLLIPOP, the preferred method is to process data asynchronously by setting a callback before calling configure. Asynchronous mode changes the state transitions slightly, because you must call start() after flush() to transition the codec to the Running sub-state and start receiving input buffers. Similarly, upon an initial call to start the codec will move directly to the Running sub-state and start passing available input buffers via the callback.this

MediaCodec is typically used like this in asynchronous mode:

MediaCodec codec = MediaCodec.createByCodecName(name);
 MediaFormat mOutputFormat; // member variable
 codec.setCallback(new MediaCodec.Callback() {
   @Override
   void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
     ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
     // fill inputBuffer with valid data
     …
     codec.queueInputBuffer(inputBufferId, …);
   }

   @Override
   void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, …) {
     ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
     MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
     // bufferFormat is equivalent to mOutputFormat
     // outputBuffer is ready to be processed or rendered.
     …
     codec.releaseOutputBuffer(outputBufferId, …);
   }

   @Override
   void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
     // Subsequent data will conform to new format.
     // Can ignore if using getOutputFormat(outputBufferId)
     mOutputFormat = format; // option B
   }

   @Override
   void onError(…) {
     …
   }
 });
 codec.configure(format, …);
 mOutputFormat = codec.getOutputFormat(); // option B
 codec.start();
 // wait for processing to complete
 codec.stop();
 codec.release();

Synchronous Processing using Buffers

Since LOLLIPOP, you should retrieve input and output buffers using getInput/OutputBuffer(int) and/or getInput/OutputImage(int) even when using the codec in synchronous mode. This allows certain optimizations by the framework, e.g. when processing dynamic content. This optimization is disabled if you call getInput/OutputBuffers().

Note: do not mix the methods of using buffers and buffer arrays at the same time. Specifically, only call getInput/OutputBuffers directly after start() or after having dequeued an output buffer ID with the value ofINFO_OUTPUT_FORMAT_CHANGED.

MediaCodec is typically used like this in synchronous mode:

 MediaCodec codec = MediaCodec.createByCodecName(name);
 codec.configure(format, …);
 MediaFormat outputFormat = codec.getOutputFormat(); // option B
 codec.start();
 for (;;) {
   int inputBufferId = codec.dequeueInputBuffer(timeoutUs);
   if (inputBufferId >= 0) {
     ByteBuffer inputBuffer = codec.getInputBuffer(…);
     // fill inputBuffer with valid data
     …
     codec.queueInputBuffer(inputBufferId, …);
   }
   int outputBufferId = codec.dequeueOutputBuffer(…);
   if (outputBufferId >= 0) {
     ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
     MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
     // bufferFormat is identical to outputFormat
     // outputBuffer is ready to be processed or rendered.
     …
     codec.releaseOutputBuffer(outputBufferId, …);
   } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
     // Subsequent data will conform to new format.
     // Can ignore if using getOutputFormat(outputBufferId)
     outputFormat = codec.getOutputFormat(); // option B
   }
 }
 codec.stop();
 codec.release();

Synchronous Processing using Buffer Arrays (deprecated)

In versions KITKAT_WATCH and before, the set of input and output buffers are represented by the ByteBuffer[] arrays. After a successful call to start(), retrieve the buffer arrays using getInput/OutputBuffers(). Use the buffer ID-s as indices into these arrays (when non-negative), as demonstrated in the sample below. Note that there is no inherent correlation between the size of the arrays and the number of input and output buffers used by the system, although the array size provides an upper bound.

 MediaCodec codec = MediaCodec.createByCodecName(name);
 codec.configure(format, …);
 codec.start();
 ByteBuffer[] inputBuffers = codec.getInputBuffers();
 ByteBuffer[] outputBuffers = codec.getOutputBuffers();
 for (;;) {
   int inputBufferId = codec.dequeueInputBuffer(…);
   if (inputBufferId >= 0) {
     // fill inputBuffers[inputBufferId] with valid data
     …
     codec.queueInputBuffer(inputBufferId, …);
   }
   int outputBufferId = codec.dequeueOutputBuffer(…);
   if (outputBufferId >= 0) {
     // outputBuffers[outputBufferId] is ready to be processed or rendered.
     …
     codec.releaseOutputBuffer(outputBufferId, …);
   } else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
     outputBuffers = codec.getOutputBuffers();
   } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
     // Subsequent data will conform to new format.
     MediaFormat format = codec.getOutputFormat();
   }
 }
 codec.stop();
 codec.release();
 
相關參考資料:
 
 
   今天寫到這兒,以後繼續記錄本身遇到的一些問題和坑。
  2016-01-17 23:06編輯...
相關文章
相關標籤/搜索