剛學習了OpenGL的紋理相關知識,終於能夠接着寫Android音視頻系列了。git
本篇博客會介紹使用OpenGL ES 3.0相關知識預覽Camera,而且提供Camera和Camera2兩個版本實現。github
頂點着色器session
#version 300 es
layout (location = 0) in vec4 vPosition;
layout (location = 1) in vec4 aTextureCoord;
//紋理矩陣
uniform mat4 uTextureMatrix;
out vec2 yuvTexCoords;
void main() {
gl_Position = vPosition;
gl_PointSize = 10.0;
//只保留x和y份量
yuvTexCoords = (uTextureMatrix * aTextureCoord).xy;
}
複製代碼
片斷着色器ide
#version 300 es
//OpenGL ES3.0外部紋理擴展
#extension GL_OES_EGL_image_external_essl3 : require
precision mediump float;
uniform samplerExternalOES yuvTexSampler;
in vec2 yuvTexCoords;
out vec4 vFragColor;
void main() {
vFragColor = texture(yuvTexSampler, yuvTexCoords);
}
複製代碼
紋理的類型須要使用 samplerExternalOES ,而不是以前渲染圖片的 sampler2D。學習
咱們知道Android相機輸出的原始數據通常都爲YUV數據,而在OpenGL中使用的絕大部分紋理ID都是RGBA的格式,因此原始數據都是沒法直接用OpenGL ES來渲染的。因此咱們添加了一個擴展#extension GL_OES_EGL_image_external_essl3 : require
,其中定義了一個紋理的擴展類型GL_TEXTURE_EXTERNAL_OES
。後面綁定紋理時須要綁定到GL_TEXTURE_EXTERNAL_OES
上,而不是類型GL_TEXTURE_2D上。ui
/** * 加載外部紋理 * @return */
public int loadTexture() {
int[] tex = new int[1];
//建立一個紋理
GLES30.glGenTextures(1, tex, 0);
//綁定到外部紋理上
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
//設置紋理過濾參數
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_NEAREST);
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR);
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
//解除紋理綁定
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
return tex[0];
}
複製代碼
public CameraSurfaceRenderer(GLSurfaceView glSurfaceView) {
//前置攝像頭
this.mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
//傳入的SurfaceView
this.mGLSurfaceView = glSurfaceView;
// 打開Camera
mCamera = Camera.open(mCameraId);
// 設置預覽角度
setCameraDisplayOrientation(mCameraId, mCamera);
......
}
複製代碼
//設置背景顏色
GLES30.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
//編譯
final int vertexShaderId = RenderUtil.compileShader(GLES30.GL_VERTEX_SHADER,ResReadUtils.readResource(R.raw.vertex_camera_shader));
final int fragmentShaderId = RenderUtil.compileShader(GLES30.GL_FRAGMENT_SHADER,ResReadUtils.readResource(R.raw.fragment_camera_shader));
//連接程序片斷
mProgram = RenderUtil.linkProgram(vertexShaderId, fragmentShaderId);
uTextureMatrixLocation = GLES30.glGetUniformLocation(mProgram, "uTextureMatrix");
//獲取Shader中定義的變量在program中的位置
uTextureSamplerLocation = GLES30.glGetUniformLocation(mProgram, "yuvTexSampler");
//加載紋理
textureId = loadTexture();
//加載SurfaceTexture
loadSurfaceTexture(textureId);
複製代碼
其實前面部分和加載圖片沒有什麼區別,最後兩行,對應上面流程中的一、2步。建立紋理,綁定外部紋理,而後根據紋理ID建立SurfaceTexture做爲相機預覽輸出。this
public boolean loadSurfaceTexture(int textureId) {
//根據紋理ID建立SurfaceTexture
mSurfaceTexture = new SurfaceTexture(textureId);
mSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
// 渲染幀數據
mGLSurfaceView.requestRender();
}
});
//SurfaceTexture做爲相機預覽輸出
try {
mCamera.setPreviewTexture(mSurfaceTexture);
} catch (IOException e) {
e.printStackTrace();
return false;
}
//開啓相機預覽
mCamera.startPreview();
return true;
}
複製代碼
@Override
public void onDrawFrame(GL10 gl) {
GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT);
//使用程序片斷
GLES30.glUseProgram(mProgram);
//更新紋理圖像
mSurfaceTexture.updateTexImage();
mSurfaceTexture.getTransformMatrix(transformMatrix);
//激活紋理單元0
GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
//綁定外部紋理到紋理單元0
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
//將此紋理單元牀位片斷着色器的uTextureSampler外部紋理採樣器
GLES30.glUniform1i(uTextureSamplerLocation, 0);
//將紋理矩陣傳給片斷着色器
GLES30.glUniformMatrix4fv(uTextureMatrixLocation, 1, false, transformMatrix, 0);
GLES30.glEnableVertexAttribArray(0);
GLES30.glVertexAttribPointer(0, 3, GLES30.GL_FLOAT, false, 0, vertexBuffer);
GLES30.glEnableVertexAttribArray(1);
GLES30.glVertexAttribPointer(1, 2, GLES30.GL_FLOAT, false, 0, mTexVertexBuffer);
// 繪製
GLES20.glDrawElements(GLES20.GL_TRIANGLES, VERTEX_INDEX.length, GLES20.GL_UNSIGNED_SHORT, mVertexIndexBuffer);
}
複製代碼
private void setupViews() {
//實例化一個GLSurfaceView
mGLSurfaceView = new GLSurfaceView(this);
mGLSurfaceView.setEGLContextClientVersion(3);
mGLSurfaceView.setRenderer(new CameraSurfaceRenderer(mGLSurfaceView));
setContentView(mGLSurfaceView);
}
複製代碼
使用Camera2在OpenGL方面實際上是同樣的,並無什麼改動。因此只須要看一下Camera2的調用就好。spa
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.e("Renderer", "onSurfaceCreated");
//加載紋理
textureId = loadTexture();
//建立SurfaceTexture,放到前面是爲了防止Camera打開後獲取SurfaceTexture爲空
mSurfaceTexture = new SurfaceTexture(textureId);
//設置背景顏色
GLES30.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
//編譯
final int vertexShaderId = RenderUtil.compileShader(GLES30.GL_VERTEX_SHADER, ResReadUtils.readResource(R.raw.vertex_camera_shader));
final int fragmentShaderId = RenderUtil.compileShader(GLES30.GL_FRAGMENT_SHADER, ResReadUtils.readResource(R.raw.fragment_camera_shader));
//連接程序片斷
mProgram = RenderUtil.linkProgram(vertexShaderId, fragmentShaderId);
uTextureMatrixLocation = GLES30.glGetUniformLocation(mProgram, "uTextureMatrix");
//獲取Shader中定義的變量在program中的位置
uTextureSamplerLocation = GLES30.glGetUniformLocation(mProgram, "yuvTexSampler");
}
複製代碼
// 提供方法獲取SurfaceTexture
public SurfaceTexture getSurfaceTexture() {
return mSurfaceTexture;
}
複製代碼
private void initCamera() {
cameraManager = (CameraManager) MyApplication.getApplication().getSystemService(Context.CAMERA_SERVICE);
//獲取指定相機的輸出尺寸列表
outputSizes = getCameraOutputSizes(cameraId, SurfaceTexture.class);
photoSize = outputSizes.get(1);
}
複製代碼
@SuppressLint("MissingPermission")
private void openCamera() {
try {
cameraManager.openCamera(String.valueOf(cameraId), cameraStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
Log.e(TAG, "openCamera fail");
}
}
複製代碼
CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
//從Renderer中獲取SurfaceTexture
surfaceTexture = camera2SurfaceRenderer.getSurfaceTexture();
if (surfaceTexture == null) {
return;
}
surfaceTexture.setDefaultBufferSize(photoSize.getWidth(), photoSize.getHeight());
surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(final SurfaceTexture surfaceTexture) {
mGLSurfaceView.requestRender();
}
});
// 根據SurfaceTexture建立Surface,做爲預覽數據的Target
surface = new Surface(surfaceTexture);
try {
cameraDevice = camera;
previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(surface);
previewRequest = previewRequestBuilder.build();
cameraDevice.createCaptureSession(Arrays.asList(surface), sessionsStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onDisconnected(CameraDevice camera) {
}
@Override
public void onError(CameraDevice camera, int error) {
Log.e(TAG, "Open onError");
}
};
複製代碼
源碼地址code