OpenGL ES 2.0 的代码,用来显示YUV格式的视频数据。这个示例将包括初始化OpenGL环境、加载Shader程序、绘制纹理等步骤
import android.content.Context;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
public class YUVShaderProgram {
private int program;
private int avPosition_yuv;
private int afPosition_yuv;
private final FloatBuffer vertexBuffer;
private final FloatBuffer textureBuffer;
private int yTextureLocation;
private int uTextureLocation;
private int vTextureLocation;
private static final String VERTEX_SHADER =
"attribute vec4 av_Position;\n" +
"attribute vec2 af_Position;\n" +
"varying vec2 v_texCord;\n" +
"void main() {\n" +
"gl_Position = av_Position;\n" +
"v_texCord = af_Position;\n" +
"}\n";
private static final String FRAGMENT_SHADER =
"precision mediump float;\n" +
"uniform sampler2D sampler_y;\n" +
"uniform sampler2D sampler_u;\n" +
"uniform sampler2D sampler_v;\n" +
"varying vec2 v_texCord;\n" +
"void main() {\n" +
"vec4 c = vec4((texture2D(sampler_y, v_texCord).r - 16./255.) * 1.164);\n" +
"vec4 U = vec4(texture2D(sampler_u, v_texCord).r - 128./255.);\n" +
"vec4 V = vec4(texture2D(sampler_v, v_texCord).r - 128./255.);\n" +
"c += V * vec4(1.596, -0.813, 0, 0);\n" +
"c += U * vec4(0, -0.392, 2.017, 0);\n" +
"c.a = 1.0;\n" +
"gl_FragColor = c;\n" +
"}\n";
public YUVShaderProgram(Context context) {
//顶点坐标
float[] vertexData = {
-1f, -1f,
1f, -1f,
-1f, 1f,
1f, 1f
};
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);
//纹理坐标
float[] textureData = {
0f, 1f,
1f, 1f,
0f, 0f,
1f, 0f
};
textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureData);
textureBuffer.position(0);
program = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
}
public void useProgram() {
GLES20.glUseProgram(program);
}
public void enableVertex(){
GLES20.glEnableVertexAttribArray(avPosition_yuv);//使顶点属性数组有效
GLES20.glVertexAttribPointer(avPosition_yuv, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer);//为顶点属性赋值
GLES20.glEnableVertexAttribArray(afPosition_yuv);
GLES20.glVertexAttribPointer(afPosition_yuv, 2, GLES20.GL_FLOAT, false, 0, textureBuffer);
}
public void loadLocation(){
avPosition_yuv = GLES20.glGetAttribLocation(program, "av_Position");
afPosition_yuv = GLES20.glGetAttribLocation(program, "af_Position");
}
public void setUniforms() {
yTextureLocation = GLES20.glGetUniformLocation(program, "sampler_y");
uTextureLocation = GLES20.glGetUniformLocation(program, "sampler_u");
vTextureLocation = GLES20.glGetUniformLocation(program, "sampler_v");
}
public void glUniform1i(){
GLES20.glUniform1i(yTextureLocation, 0);
GLES20.glUniform1i(uTextureLocation, 1);
GLES20.glUniform1i(vTextureLocation, 2);
}
private int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
private int createProgram(String vertexShaderCode, String fragmentShaderCode) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
return program;
}
}
```java
class EffectRender(var mContext:Context,var glView:GLSurfaceView,var mWidth:Int,var mHeight:Int):GLSurfaceView.Renderer,
SurfaceTexture.OnFrameAvailableListener {
private val TAG = "EffectRender"
private val textures = IntArray(3) // 纹理句柄
private val COORDS_PER_VERTEX = 2
private val TEXTURE_COORDS_PER_VERTEX = 2
private var positionHandle = 0
private var textureCoordHandle = 0
private var cameraData:ByteArray?=null
private var mDrawTextureBuffer: FloatBuffer? = null
private var mDrawGlCubeBuffer: FloatBuffer? = null
private var yData:ByteArray?=null
private var uData:ByteArray?=null
private var vData:ByteArray?=null
var ySurfaceTexture: SurfaceTexture?=null
var uSurfaceTexture: SurfaceTexture?=null
var vSurfaceTexture: SurfaceTexture?=null
private val mvpMatrix = FloatArray(16)
private var shaderProgram: YUVShaderProgram? = null
//顶点坐标数据,表示预览图像的位置和大小。
private var avPosition_yuv: Int = 0
private val afPosition_yuv = 0
private var mRenderInput: NveRenderInput?=null
override fun onSurfaceCreated(p0: GL10?, p1: EGLConfig?) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
shaderProgram = YUVShaderProgram(mContext)
shaderProgram?.useProgram()
shaderProgram?.loadLocation()
shaderProgram?.setUniforms()
// 创建三个纹理
GLES20.glGenTextures(3, textures, 0)
ySurfaceTexture = SurfaceTexture(textures[0])
uSurfaceTexture = SurfaceTexture(textures[1])
vSurfaceTexture = SurfaceTexture(textures[2])
// 设置纹理参数
for (i in 0..2) {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i])
//设置环绕和过滤方式
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
}
override fun onSurfaceChanged(p0: GL10?, p1: Int, p2: Int) {
Log.i(TAG,"onSurfaceChanged = $p1 $p2")
GLES20.glViewport(0, 0, p1, p2);
}
override fun onDrawFrame(p0: GL10?) {
effectData()
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
shaderProgram?.useProgram()
shaderProgram?.enableVertex()
yData?.let {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mWidth, mHeight, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(it));
}
uData?.let {
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[1]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mWidth/2, mHeight/2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(it))
}
vData?.let {
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[2]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mWidth/2, mHeight/2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(it));
}
shaderProgram?.glUniform1i()
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
private fun effectData(){
cameraData?.let {
val yPlaneSize: Int = mWidth * mHeight
val uvPlaneSize: Int = yPlaneSize/4
yData = ByteArray(yPlaneSize)
System.arraycopy(it, 0, yData, 0, yPlaneSize)
uData = ByteArray(uvPlaneSize)
System.arraycopy(it, yPlaneSize, uData, 0, uvPlaneSize)
vData = ByteArray(uvPlaneSize)
System.arraycopy(it, yPlaneSize+uvPlaneSize, vData, 0, uvPlaneSize)
}
}
fun refreshView(data:ByteArray){
cameraData = data
glView.requestRender()
}
override fun onFrameAvailable(p0: SurfaceTexture?) {
}
}
使用示例:
<android.opengl.GLSurfaceView
android:id="@+id/glsurface_view"
android:layout_width="match_parent"
android:layout_height="match_parent" />
binding.glsurfaceView.setEGLContextClientVersion(2)
render = EffectRender(this,binding.glsurfaceView,1920,1080)
binding.glsurfaceView.setRenderer(render)
binding.glsurfaceView.renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
//开启摄像头获取数据送入渲染
cameraHelper = CameraHelper()
cameraHelper?.init(this,"0",1920,1080, object : CameraHelper.CallBack {
override fun onCameraPreviewCallBack(data: ByteArray?) {
data?.let { render?.refreshView(data) }
}
})