一、OpenGL生成紋理java
二、紋理綁定到SurfaceTexture上android
三、用SurfaceTexture作參數建立Surfaceide
四、MediaCodec解碼的視頻就往Surface發送,就顯示出畫面了函數
直接上代碼ui
vertex_shader.glslthis
attribute vec4 av_Position; attribute vec2 af_Position; varying vec2 v_texPosition; void main() { v_texPosition = af_Position; gl_Position = av_Position; }
fragment_mediacodec.glslcode
#extension GL_OES_EGL_image_external : require precision mediump float; varying vec2 v_texPosition; //samplerExternalOES渲染視頻 uniform samplerExternalOES sTexture; void main() { gl_FragColor=texture2D(sTexture, v_texPosition); }
VideoRender.javaorm
import android.content.Context; import android.graphics.SurfaceTexture; import android.opengl.GLES11Ext; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.view.Surface; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; public class VideoRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener { private Context context; private final float[] vertexData = { -1f, -1f, 1f, -1f, -1f, 1f, 1f, 1f }; private final float[] textureData = { 0f, 1f, 1f, 1f, 0f, 0f, 1f, 0f }; private FloatBuffer vertexBuffer; private FloatBuffer textureBuffer; //mediacodec private int program_mediacodec; private int avPosition_mediacodec; private int afPosition_mediacodec; private int samplerOES_mediacodec; private int textureId_mediacodec; private SurfaceTexture surfaceTexture; private Surface surface; private OnSurfaceCreateListener onSurfaceCreateListener; private OnRenderListener onRenderListener; public VideoRender(Context context) { this.context = context; vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() .put(vertexData); vertexBuffer.position(0); textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() .put(textureData); textureBuffer.position(0); } public void setOnSurfaceCreateListener(OnSurfaceCreateListener onSurfaceCreateListener) { this.onSurfaceCreateListener = onSurfaceCreateListener; } public void setOnRenderListener(OnRenderListener onRenderListener) { this.onRenderListener = onRenderListener; } @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { initRenderMediacodec(); } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { GLES20.glViewport(0, 0, width, height); } @Override public void onDrawFrame(GL10 gl) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); renderMediacodec(); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { if (onRenderListener != null) { ////將onFrameAvailable函數回掉到GLSurfaceView調用requestRender()觸發onDrawFrame() onRenderListener.onRender(); } } private void initRenderMediacodec() { String vertexSource = ShaderUtil.readRawTxt(context, R.raw.vertex_shader); String fragmentSource = ShaderUtil.readRawTxt(context, R.raw.fragment_mediacodec); program_mediacodec = ShaderUtil.createProgram(vertexSource, fragmentSource); avPosition_mediacodec = GLES20.glGetAttribLocation(program_mediacodec, "av_Position"); afPosition_mediacodec = GLES20.glGetAttribLocation(program_mediacodec, "af_Position"); samplerOES_mediacodec = GLES20.glGetUniformLocation(program_mediacodec, "sTexture"); int[] textureids = new int[1]; GLES20.glGenTextures(1, textureids, 0); textureId_mediacodec = textureids[0]; GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); surfaceTexture = new SurfaceTexture(textureId_mediacodec); surface = new Surface(surfaceTexture); surfaceTexture.setOnFrameAvailableListener(this); if (onSurfaceCreateListener != null) { //將Surface回掉出去給MediaCodec綁定渲染 onSurfaceCreateListener.onSurfaceCreate(surface); } } private void renderMediacodec() { surfaceTexture.updateTexImage(); GLES20.glUseProgram(program_mediacodec); GLES20.glEnableVertexAttribArray(avPosition_mediacodec); GLES20.glVertexAttribPointer(avPosition_mediacodec, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer); GLES20.glEnableVertexAttribArray(afPosition_mediacodec); GLES20.glVertexAttribPointer(afPosition_mediacodec, 2, GLES20.GL_FLOAT, false, 8, textureBuffer); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId_mediacodec); GLES20.glUniform1i(samplerOES_mediacodec, 0); } public interface OnSurfaceCreateListener { void onSurfaceCreate(Surface surface); } public interface OnRenderListener { void onRender(); } }
VideoGLSurfaceView.java視頻
import android.content.Context; import android.opengl.GLSurfaceView; import android.util.AttributeSet; public class VideoGLSurfaceView extends GLSurfaceView { private VideoRender render; public VideoGLSurfaceView(Context context) { this(context, null); } public VideoGLSurfaceView(Context context, AttributeSet attrs) { super(context, attrs); setEGLContextClientVersion(2); render = new VideoRender(context); setRenderer(render); setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); render.setOnRenderListener(new VideoRender.OnRenderListener() { @Override public void onRender() { requestRender(); } }); } public VideoRender getWlRender() { return render; } }