目录
原理
OpenGL生成纹理并绑定到SurfaceTexture上,将Camera数据设置到SurfaceTexture上,然后就可以从OpenGL中获取到Camera数据了
使用扩展纹理
1、着色器纹理类型:
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 ft_Position;
uniform samplerExternalOES sTexture;
void main() {
gl_FragColor=texture2D(sTexture, ft_Position);
}
2、GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTextureId);
相机MyCamera
package com.zhangyu.myopengl.testCamera;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.util.Log;
import java.io.IOException;
import java.util.List;
public class MyCamera {
private static final String TAG = "MyCamera";
private Camera camera;
private SurfaceTexture surfaceTexture;
private int width;
private int height;
public MyCamera(SurfaceTexture surfaceTexture, int width, int height) {
this.surfaceTexture = surfaceTexture;
this.width = width;
this.height = height;
}
public void startPreview(int cameraId) {
try {
camera = Camera.open(cameraId);
camera.setPreviewTexture(surfaceTexture);
Camera.Parameters parameters = camera.getParameters();
parameters.setFlashMode("off");
parameters.setPreviewFormat(ImageFormat.NV21);
Camera.Size size = getFitSize(parameters.getSupportedPictureSizes());
parameters.setPictureSize(size.width, size.height);
Log.e(TAG, "setPictureSize: size.width=" + size.width + "-----size.height=" + size.height);
size = getFitSize(parameters.getSupportedPreviewSizes());
parameters.setPreviewSize(size.width, size.height);
Log.e(TAG, "setPreviewSize: size.width=" + size.width + "-----size.height=" + size.height);
camera.setParameters(parameters);
camera.startPreview();
Log.e(TAG, "camera start preview");
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 停止
*/
public void stopPreview() {
if (camera != null) {
camera.stopPreview();
camera.release();
camera = null;
}
}
/**
* 切换相机
*
* @param cameraId
*/
public void changeCamera(int cameraId) {
if (camera != null) {
stopPreview();
}
startPreview(cameraId);
}
/**
* 从相机的Size中选取和Surface等比的宽高
* @param sizes
* @return
*/
private Camera.Size getFitSize(List<Camera.Size> sizes) {
if (width < height) {
int t = height;
height = width;
width = t;
}
for (Camera.Size size : sizes) {
if (1.0f * size.width / size.height == 1.0f * width / height) {
return size;
}
}
return sizes.get(0);
}
}
用于绘制的MyCameraView
package com.zhangyu.myopengl.testCamera;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.WindowManager;
import com.zhangyu.myopengl.egl.EGLSurfaceView;
public class MyCameraView extends EGLSurfaceView {
private static final String TAG = "TestCameraView";
private MyCamera myCamera;
private MyCameraRender cameraRender;
public MyCameraView(Context context) {
this(context, null);
}
public MyCameraView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public MyCameraView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
Log.e(TAG, "onCreate: getWidth()=" + getWidth() + "-----getHeight()=" + getHeight());
cameraRender = new MyCameraRender(context);
setRender(cameraRender);
previewAngle(context);
cameraRender.setOnSurfaceCreateListener(new MyCameraRender.OnSurfaceCreateListener() {
@Override
public void onCreate(SurfaceTexture surfaceTexture) {
Log.e(TAG, "onCreate: getWidth()=" + getWidth() + "-----getHeight()=" + getHeight());
myCamera = new MyCamera(surfaceTexture, getWidth(), getHeight());
myCamera.startPreview(Camera.CameraInfo.CAMERA_FACING_BACK);
}
});
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
setMeasuredDimension(1080,1920);
}
public void onDestory() {
if (myCamera != null) {
myCamera.stopPreview();
}
}
public void previewAngle(Context context) {
cameraRender.resetMatrix();
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
int angle = windowManager.getDefaultDisplay().getRotation();
switch (angle) {
case Surface.ROTATION_0:
cameraRender.setAngle(-90, 0, 0, 1);
break;
case Surface.ROTATION_90:
break;
case Surface.ROTATION_180:
break;
case Surface.ROTATION_270:
cameraRender.setAngle(180, 0, 0, 1);
break;
}
}
}
MyCameraRender
package com.zhangyu.myopengl.testCamera;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;
import com.zhangyu.myopengl.R;
import com.zhangyu.myopengl.egl.EGLSurfaceView;
import com.zhangyu.myopengl.egl.EGLUtils;
import com.zhangyu.myopengl.testFBO.FboRender;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
public class MyCameraRender implements EGLSurfaceView.EGLRender, SurfaceTexture.OnFrameAvailableListener {
private static final String TAG = "CameraRender";
//绘制坐标范围
float[] vertexData = {
-1f, -1f,
1f, -1.0f,
-1f, 1.0f,
1f, 1f
};
float[] fragmentData = {
// 0f, 1f,
// 1f, 1f,
// 0f, 0f,
// 1f, 0f
0f, 0f,
1f, 0,
0f, 1f,
1f, 1f
};
//分配本地内存
FloatBuffer vertexBuffer;
FloatBuffer fragmentBuffer;
private Context context;
public MyCameraRender(Context context) {
this.context = context;
fboRender = new FboRender(context);
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);
fragmentBuffer = ByteBuffer.allocateDirect(fragmentData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(fragmentData);
fragmentBuffer.position(0);
resetMatrix();
}
private int program;
private int vPosition;
private int fPosition;
private int sampler;
private int vboId;
private int fboId;
private int uMatrix;
private float[] matrix = new float[16];
private int textureId;
private int cameraTextureId;
private SurfaceTexture surfaceTexture;
private FboRender fboRender;
@Override
public void onSurfaceCreated() {
fboRender.onCreate();
String vertexSource = EGLUtils.readRawTxt(context, R.raw.vertex_shader_matrix);
String fragmentSource = EGLUtils.readRawTxt(context, R.raw.fragment_shader_camera);
program = EGLUtils.createProgram(vertexSource, fragmentSource);
//
vPosition = GLES20.glGetAttribLocation(program, "av_Position");
fPosition = GLES20.glGetAttribLocation(program, "af_Position");
uMatrix = GLES20.glGetUniformLocation(program, "u_Matrix");
//
//创建vbo
int[] vbos = new int[1];
GLES20.glGenBuffers(1, vbos, 0);
vboId = vbos[0];
//绑定vbo
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
//分配空间
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4 + fragmentData.length * 4, null, GLES20.GL_STATIC_DRAW);
//赋值顶点坐标
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * 4, vertexBuffer);
//赋值纹理坐标
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4, fragmentData.length * 4, fragmentBuffer);
//解绑
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
//创建fbo
int[] fbos = new int[1];
GLES20.glGenBuffers(1, fbos, 0);
fboId = fbos[0];
//绑定fbo
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
//创建纹理
int[] textureIds = new int[1];
GLES20.glGenTextures(1, textureIds, 0);
textureId = textureIds[0];
//绑定纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
//激活第0个纹理
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//绑定sampler
GLES20.glUniform1i(sampler, 0);
//环绕方式
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//过滤方式
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
//fbo分配空间
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 1080, 1920, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
//绑定纹理和fbo
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
Log.e(TAG, "创建fbo失败");
} else {
Log.e(TAG, "创建fbo成功");
}
//解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
//解绑fbo
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
//创建Camera纹理
int[] cameraTextureIds = new int[1];
GLES20.glGenTextures(1, cameraTextureIds, 0);
cameraTextureId = cameraTextureIds[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTextureId);
//环绕方式
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
//过滤方式
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
//绑定到SurfaceTexture
surfaceTexture = new SurfaceTexture(cameraTextureId);
surfaceTexture.setOnFrameAvailableListener(this);
if (onSurfaceCreateListener != null) {
onSurfaceCreateListener.onCreate(surfaceTexture);
}
//解绑纹理
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
public void resetMatrix() {
Matrix.setIdentityM(matrix, 0);
}
public void setAngle(float angle, float x, float y, float z) {
Matrix.rotateM(matrix, 0, angle, x, y, z);
}
@Override
public void onSurfaceChanged(int width, int height) {
Log.e(TAG, "onSurfaceChanged: width=" + width + "-----height=" + height);
GLES20.glViewport(0, 0, width, height);
fboRender.onChange(width, height);
}
@Override
public void onDrawFrame() {
surfaceTexture.updateTexImage();
//清屏
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glClearColor(0, 0, 1, 1);
//
GLES20.glUseProgram(program);
GLES20.glUniformMatrix4fv(uMatrix, 1, false, matrix, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
//使用vbo
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
//取顶点坐标
GLES20.glEnableVertexAttribArray(vPosition);
GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 8, 0);
//取纹理坐标
GLES20.glEnableVertexAttribArray(fPosition);
GLES20.glVertexAttribPointer(fPosition, 2, GLES20.GL_FLOAT, false, 8, vertexData.length * 4);
//绘制坐标系
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
//解绑纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
//解绑vbo
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
//解绑fbo
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
fboRender.onDraw(textureId);
}
/**
* 摄像头数据返回
*
* @param surfaceTexture
*/
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
}
private OnSurfaceCreateListener onSurfaceCreateListener;
public void setOnSurfaceCreateListener(OnSurfaceCreateListener onSurfaceCreateListener) {
this.onSurfaceCreateListener = onSurfaceCreateListener;
}
public interface OnSurfaceCreateListener {
void onCreate(SurfaceTexture surfaceTexture);
}
}
FboRender
package com.zhangyu.myopengl.testFBO;
import android.content.Context;
import android.opengl.GLES20;
import com.zhangyu.myopengl.R;
import com.zhangyu.myopengl.egl.EGLUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
public class FboRender {
private Context context;
private float[] vertexData = {
-1f, -1f,
1f, -1f,
-1f, 1f,
1f, 1f
};
private FloatBuffer vertexBuffer;
private float[] fragmentData = {
0f, 1f,
1f, 1f,
0f, 0f,
1f, 0f
};
private FloatBuffer fragmentBuffer;
private int program;
private int vPosition;
private int fPosition;
private int textureid;
private int sampler;
private int vboId;
public FboRender(Context context) {
this.context = context;
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);
fragmentBuffer = ByteBuffer.allocateDirect(fragmentData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(fragmentData);
fragmentBuffer.position(0);
}
public void onCreate() {
String vertexSource = EGLUtils.readRawTxt(context, R.raw.vertex_shader);
String fragmentSource = EGLUtils.readRawTxt(context, R.raw.fragment_shader);
program = EGLUtils.createProgram(vertexSource, fragmentSource);
vPosition = GLES20.glGetAttribLocation(program, "av_Position");
fPosition = GLES20.glGetAttribLocation(program, "af_Position");
sampler = GLES20.glGetUniformLocation(program, "sTexture");
int[] vbos = new int[1];
GLES20.glGenBuffers(1, vbos, 0);
vboId = vbos[0];
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4 + fragmentData.length * 4, null, GLES20.GL_STATIC_DRAW);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * 4, vertexBuffer);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4, fragmentData.length * 4, fragmentBuffer);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
}
public void onChange(int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
public void onDraw(int textureId) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glClearColor(1f, 0f, 0f, 1f);
GLES20.glUseProgram(program);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
GLES20.glEnableVertexAttribArray(vPosition);
GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 8,
0);
GLES20.glEnableVertexAttribArray(fPosition);
GLES20.glVertexAttribPointer(fPosition, 2, GLES20.GL_FLOAT, false, 8,
vertexData.length * 4);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
}
}