OpenGL ES 相機預覽android
OpenGL ES 相機預覽git
相機開發是 OpenGL ES 開發的重要應用,利用 OpenGL 能夠很方便地實現相機美顏、濾鏡、塑型以及一些動態特效,其性能顯著優於對應功能的 CPU 實現。github
相機的預覽實現通常有 2 種方式,一種是基於 Android 原生 SurfaceTexture 的純 GPU 實現方式。算法
另外一種是經過相機的預覽回調接口獲取幀的 YUV 數據,利用 CPU 算法處理完成以後,傳入顯存,再利用 GPU 實現 YUV 轉 RGBA 進行渲染,即 CPU + GPU 的實現方式。微信
基於 Android 原生 SurfaceTexture 的純 GPU 實現方式,相機能夠使用 SurfaceTexture 做爲預覽載體,SurfaceTexture 可來自於 GLSurfaceView、TextureView 或 SurfaceView 這些獨立擁有 Surface 的封裝類,也能夠自定義實現。ide
做爲預覽載體的 SurfaceTexture 綁定的紋理須要是 OES 紋理, 使用 OES 紋理後,咱們不須要在片段着色器中本身作 YUV to RGBA 的轉換,由於 OES 紋理能夠直接接收 YUV 數據或者直接輸出 YUV 數據。性能
相似於普通 2D 紋理的建立,OES 紋理建立的實現以下:學習
private int createOESTexture(){ int[] texture = new int[1]; GLES20.glGenTextures(1, texture, 0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE); return texture[0]; }
使用 OES 紋理須要修改片斷着色器,在着色器腳本的頭部增長擴展紋理的聲明:ui
#extension GL_OES_EGL_image_external : require
google
而且紋理採樣器再也不使用 sample2D ,須要換成 samplerExternalOES 做爲紋理採樣器。
#version 300 es #extension GL_OES_EGL_image_external : require precision mediump float; in vec2 v_texCoord; uniform samplerExternalOES s_TexSampler; void main() { gl_FragColor = texture(s_TexSampler, v_texCoord); }
實際上當使用 TextureView 時,實際上也不須要本身去建立 OES 紋理,只須要綁定相機,配置好變換矩陣後便可實現相機預覽。具體例子可直接參考Android 官方的 Samples https://github.com/android/camera-samples 。
相機預覽基於 Android 原生 API 的純 GPU 實現方式,操做簡單,代碼量不多,原生 API 已經作了不少封裝,能夠利用片斷着色器輕易實現美顏濾鏡等相機特效,缺點是擴展性差,例如要使用傳統的 CPU 算法作一些濾鏡或者美顏特效就很不方便,圖像數據須要屢次在內存與顯存之間拷貝,會形成性能和功耗問題。
本文主要介紹將預覽圖像數據取出,傳入 Native 層,而後對數據作一些處理(可選),最後作渲染的相機預覽方式,這種方式相對複雜一些。
相機預覽數據的常見格式是 YUV420P 或者 YUV420SP(NV21) ,須要將圖像數據對應 YUV 3 個份量使用 3 個紋理傳入顯存,在片斷着色器中將 YUV 數據轉爲 RGBA ,相關原理可參考NDK OpenGL ES 3.0 開發(三):YUV 渲染一文。
相機預覽數據獲取,以 Camera2 爲例,主要是經過 ImageReader 實現,該類封裝了 Surface :
private ImageReader.OnImageAvailableListener mOnPreviewImageAvailableListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { Image image = reader.acquireLatestImage(); if (image != null) { if (mCamera2FrameCallback != null) { mCamera2FrameCallback.onPreviewFrame(CameraUtil.YUV_420_888_data(image), image.getWidth(), image.getHeight()); } image.close(); } } }; mPreviewImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YUV_420_888, 2); mPreviewImageReader.setOnImageAvailableListener(mOnPreviewImageAvailableListener, mBackgroundHandler); CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); builder.addTarget(mPreviewImageReader.getSurface()); ession.setRepeatingRequest(mPreviewRequest, null, mBackgroundHandler); //在自定義接口中獲取預覽數據,經過 JNI 傳入到 C++ 層 public void onPreviewFrame(byte[] data, int width, int height) { Log.d(TAG, "onPreviewFrame() called with: data = [" + data + "], width = [" + width + "], height = [" + height + "]"); mByteFlowRender.setRenderFrame(IMAGE_FORMAT_I420, data, width, height); //每次傳入新數據,請求從新渲染 mByteFlowRender.requestRender(); }
主要的 JNI :
public abstract class ByteFlowRender { public static final int GL_RENDER_TYPE = 0; public static final int CL_RENDER_TYPE = 1; public static final int IMAGE_FORMAT_RGBA = 0x01; public static final int IMAGE_FORMAT_NV21 = 0x02; public static final int IMAGE_FORMAT_NV12 = 0x03; public static final int IMAGE_FORMAT_I420 = 0x04; public static final int PARAM_TYPE_SET_SHADER_INDEX = 201; static { System.loadLibrary("byteflow_render"); } private long mNativeContextHandle; protected native void native_CreateContext(int renderType); protected native void native_DestroyContext(); protected native int native_Init(int initType); protected native int native_UnInit(); protected native void native_UpdateFrame(int format, byte[] data, int width, int height); protected native void native_LoadFilterData(int index, int format, int width, int height, byte[] bytes); protected native void native_LoadShaderScript(int shaderIndex, String scriptStr); protected native void native_SetTransformMatrix(float translateX, float translateY, float scaleX, float scaleY, int degree, int mirror); protected native void native_SetParamsInt(int paramType, int value); protected native int native_GetParamsInt(int paramType); protected native void native_OnSurfaceCreated(); protected native void native_OnSurfaceChanged(int width, int height); protected native void native_OnDrawFrame(); }
渲染 YUV 數據用到的着色器腳本,主要是將 3 個紋理對應的 YUV 份量,分別採樣後轉成 RGBA :
//頂點着色器 #version 100 varying vec2 v_texcoord; attribute vec4 position; attribute vec2 texcoord; uniform mat4 MVP; void main() { v_texcoord = texcoord; gl_Position = MVP*position; } //片斷着色器 #version 100 precision highp float; varying vec2 v_texcoord; uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; void main() { float y, u, v, r, g, b; y = texture2D(s_textureY, v_texcoord).r; u = texture2D(s_textureU, v_texcoord).r; v = texture2D(s_textureV, v_texcoord).r; u = u - 0.5; v = v - 0.5; r = y + 1.403 * v; g = y - 0.344 * u - 0.714 * v; b = y + 1.770 * u; gl_FragColor = vec4(r, g, b, 1.0); }
C++ 層的主要實現:
//編譯連接着色器 int GLByteFlowRender::CreateProgram(const char *pVertexShaderSource, const char *pFragShaderSource) { m_Program = GLUtils::CreateProgram(pVertexShaderSource, pFragShaderSource, m_VertexShader, m_FragShader); if (!m_Program) { GLUtils::CheckGLError("Create Program"); LOGCATE("GLByteFlowRender::CreateProgram Could not create program."); return 0; } m_YTextureHandle = glGetUniformLocation(m_Program, "s_textureY"); m_UTextureHandle = glGetUniformLocation(m_Program, "s_textureU"); m_VTextureHandle = glGetUniformLocation(m_Program, "s_textureV"); m_VertexCoorHandle = (GLuint) glGetAttribLocation(m_Program, "position"); m_TextureCoorHandle = (GLuint) glGetAttribLocation(m_Program, "texcoord"); m_MVPHandle = glGetUniformLocation(m_Program, "MVP"); return m_Program; } //建立 YUV 份量對應的 3 個紋理 bool GLByteFlowRender::CreateTextures() { LOGCATE("GLByteFlowRender::CreateTextures"); GLsizei yWidth = static_cast<GLsizei>(m_RenderFrame.width); GLsizei yHeight = static_cast<GLsizei>(m_RenderFrame.height); glActiveTexture(GL_TEXTURE0); glGenTextures(1, &m_YTextureId); glBindTexture(GL_TEXTURE_2D, m_YTextureId); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yWidth, yHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL); if (!m_YTextureId) { GLUtils::CheckGLError("Create Y texture"); return false; } GLsizei uWidth = static_cast<GLsizei>(m_RenderFrame.width / 2); GLsizei uHeight = yHeight / 2; glActiveTexture(GL_TEXTURE1); glGenTextures(1, &m_UTextureId); glBindTexture(GL_TEXTURE_2D, m_UTextureId); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, uWidth, uHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL); if (!m_UTextureId) { GLUtils::CheckGLError("Create U texture"); return false; } GLsizei vWidth = static_cast<GLsizei>(m_RenderFrame.width / 2); GLsizei vHeight = (GLsizei) yHeight / 2; glActiveTexture(GL_TEXTURE2); glGenTextures(1, &m_VTextureId); glBindTexture(GL_TEXTURE_2D, m_VTextureId); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, vWidth, vHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL); if (!m_VTextureId) { GLUtils::CheckGLError("Create V texture"); return false; } return true; } //每傳入一幀新數據後,更新紋理 bool GLByteFlowRender::UpdateTextures() { LOGCATE("GLByteFlowRender::UpdateTextures"); if (m_RenderFrame.ppPlane[0] == NULL) { return false; } if (!m_YTextureId && !m_UTextureId && !m_VTextureId && !CreateTextures()) { return false; } glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_YTextureId); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_RenderFrame.width, (GLsizei) m_RenderFrame.height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_RenderFrame.ppPlane[0]); glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, m_UTextureId); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_RenderFrame.width >> 1, (GLsizei) m_RenderFrame.height >> 1, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_RenderFrame.ppPlane[1]); glActiveTexture(GL_TEXTURE2); glBindTexture(GL_TEXTURE_2D, m_VTextureId); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_RenderFrame.width >> 1, (GLsizei) m_RenderFrame.height >> 1, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_RenderFrame.ppPlane[2]); return true; } //綁定紋理到着色器,傳入頂點和紋理座標數據 GLuint GLByteFlowRender::UseProgram() { LOGCATE("GLByteFlowRender::UseProgram"); ByteFlowLock lock(&m_ShaderBufLock); if (m_IsShaderChanged) { GLUtils::DeleteProgram(m_Program); CreateProgram(kVertexShader, m_pFragShaderBuf); m_IsShaderChanged = false; m_IsProgramChanged = true; } if (!m_Program) { LOGCATE("GLByteFlowRender::UseProgram Could not use program."); return 0; } if (m_IsProgramChanged) { glUseProgram(m_Program); GLUtils::CheckGLError("GLByteFlowRender::UseProgram"); glVertexAttribPointer(m_VertexCoorHandle, 2, GL_FLOAT, GL_FALSE, 2 * 4, VERTICES_COORS); glEnableVertexAttribArray(m_VertexCoorHandle); glUniform1i(m_YTextureHandle, 0); glUniform1i(m_UTextureHandle, 1); glUniform1i(m_VTextureHandle, 2); glVertexAttribPointer(m_TextureCoorHandle, 2, GL_FLOAT, GL_FALSE, 2 * 4, TEXTURE_COORS); glEnableVertexAttribArray(m_TextureCoorHandle); m_IsProgramChanged = false; } return m_Program; } //渲染預覽圖像 void GLByteFlowRender::OnDrawFrame() { LOGCATE("GLByteFlowRender::OnDrawFrame"); glViewport(0, 0, m_ViewportWidth, m_ViewportHeight); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glClearColor(0.0f, 0.0f, 0.0f, 1.0f); glDisable(GL_CULL_FACE); if (!UpdateTextures() || !UseProgram()) { LOGCATE("GLByteFlowRender::OnDrawFrame skip frame"); return; } glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); }
後面文章會基於該預覽實現添加一些濾鏡效果。
-- END --
進技術交流羣,掃碼添加個人微信:Byte-Flow
獲取視頻教程和源碼
推薦:
FFmpeg + OpenGL ES 實現 3D 全景播放器
FFmpeg + OpenGLES 實現視頻解碼播放和視頻濾鏡
Android OpenGL ES 從入門到精通系統性學習教程
本文分享自微信公衆號 - 字節流動(google_developer)。
若有侵權,請聯繫 support@oschina.cn 刪除。
本文參與「OSC源創計劃」,歡迎正在閱讀的你也加入,一塊兒分享。