android摄像头采集和预览-第二种方法
android摄像头采集和预览-几种方法介绍
http://blog.csdn.net/f562230829/article/details/78905530
第一种方法
http://blog.csdn.net/f562230829/article/details/78905652
demo地址
git clone git@gitee.com:audiocn/androidCamera.git
https://gitee.com/audiocn/androidCamera.git
GLSurfaceView + setPreviewTexture + opengl-es
这种方式现在比较常用了
在GLSurfaceView的onSurfaceCreated函数中创建SurfaceTexture,然后摄像头通过setPreviewTexture将图像渲染到SurfaceTexture,有图像更新时会触发onFrameAvailable回调,将图像填充到与SurfaceTexture关联的纹理id上,
使用opengl-es将纹理渲染到屏幕上完成预览。
主要代码
开启权限
开启摄像头需要在 AndroidManifest.xml 中添加摄像头权限
<uses-permission android:name="android.permission.CAMERA"/>
注:高版本的手机,还需要动态申请
GLSurfaceView
<android.opengl.GLSurfaceView
android:id="@+id/glSurfaceView"
android:layout_width="fill_parent"
android:layout_height="fill_parent" />
UI线程中处理 GLSurfaceView和camera
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_demo2);
setTitle("Demo2");
initView();
}
private void initView(){
try {
mGlSurfaceView = (GLSurfaceView) findViewById(R.id.glSurfaceView);
CameraController.getInstance().openCamera(mGlSurfaceView);
}catch (Exception e){
e.printStackTrace();
}
}
设置GLSurfaceView
public boolean openCamera(GLSurfaceView glSurfaceView){
boolean b = true;
try{
mGlsurfaceView = glSurfaceView;
//使用 opengl-es 2.0
mGlsurfaceView.setEGLContextClientVersion(2);
//设置opengl渲染的Render接口,opengl线程来调用render接口
//opengl线程会调用 onSurfaceCreated onSurfaceChanged onDrawFrame 这三个函数
mGlsurfaceView.setRenderer(this);
//设置渲染模式 RENDERMODE_WHEN_DIRTY 模式下 只有 主动调 requestRender() 时才触发 onDrawFrame
//RENDERMODE_CONTINUOUSLY 模式,是opengl线程定时触发 onDrawFrame
mGlsurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
//glsurfaceview 的生命周期
mGlsurfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
closeCamera();
}
});
}catch (Exception e){
e.printStackTrace();
b = false;
}
return b;
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.i(TAG,"onSurfaceCreated");
initSurfaceTexture();
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
Log.i(TAG,"onFrameAvailable");
synchronized(this) {
updateSurface = true;
}
//调这个函数,opengl线程会触发 onDrawFrame 将纹理id保存的图像渲染到屏幕上
mGlsurfaceView.requestRender();
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.i(TAG,"onSurfaceCreated");
initSurfaceTexture();
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
Log.i(TAG,"onSurfaceChanged");
try {
//打开摄像头
int cameraId = openCommonCamera();
//将摄像头数据预览到 SurfaceTexture 上
mCamera.setPreviewTexture(mSurfaceTexture);
setPameras();
mCamera.startPreview();
//将与 SurfaceTexture 绑定的 纹理id给opengl渲染类
mRenderScreen = new RenderScreen(mSurfaceTextureId);
mRenderScreen.setSreenSize(width,height);
}catch (Exception e){
e.printStackTrace();
}
}
@Override
public void onDrawFrame(GL10 gl) {
try {
Log.i(TAG,"onDrawFrame");
synchronized(this) {
if (updateSurface) {
//把数据给了mSurfaceTextureId
mSurfaceTexture.updateTexImage();
mSurfaceTexture.getTransformMatrix(mTexMtx);
updateSurface = false;
}
}
//渲染到屏幕上
mRenderScreen.draw(mTexMtx);
}catch (Exception e){
e.printStackTrace();
}
}
private void initSurfaceTexture(){
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
//摄像头纹理
mSurfaceTextureId = textures[0];
mSurfaceTexture = new SurfaceTexture(mSurfaceTextureId);
//当摄像头数据更新时,会触发 onFrameAvailable 函数
mSurfaceTexture.setOnFrameAvailableListener(this);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mSurfaceTextureId);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}
opengl-es渲染
public class RenderScreen {
private final FloatBuffer mNormalVtxBuf = GlUtil.createVertexBuffer(); //顶点坐标
private final FloatBuffer mNormalVtxBufImage = GlUtil.createVertexBufferImage(); //顶点坐标
private FloatBuffer mCameraTexCoordBuffer; //纹理坐标,根据窗口大小和图像大小生成
private final float[] mPosMtx = GlUtil.createIdentityMtx();
private int mFboTexId;
private int mProgram = -1;
private int maPositionHandle = -1;
private int maTexCoordHandle = -1;
private int muPosMtxHandle = -1;
private int muTexMtxHandle = -1;
private int muSamplerHandle = -1;
private int mScreenW = -1;
private int mScreenH = -1;
private boolean mirrorImage;//是否开启镜像
public RenderScreen(int id) {
initGL();
mFboTexId = id;
mirrorImage = false;
}
public void setSreenSize(int width, int height) {
mScreenW = width;
mScreenH = height;
initCameraTexCoordBuffer();
}
public void setTextureId(int textureId) {
//摄像头纹理copy
mFboTexId = textureId;
}
private void initCameraTexCoordBuffer() {
int cameraWidth, cameraHeight;
Camera.Size size = CameraController.getInstance().getmPreviewSize();
int width = size.width;
int height = size.height;
//TODO 横竖屏对宽高的调整
if(CameraController.getInstance().isLandscape()) {
cameraWidth = Math.max(width, height);
cameraHeight = Math.min(width, height);
} else {
cameraWidth = Math.min(width, height);
cameraHeight = Math.max(width, height);
}
float hRatio = mScreenW / ((float)cameraWidth);
float vRatio = mScreenH / ((float)cameraHeight);
float ratio;
if(hRatio > vRatio) {
ratio = mScreenH / (cameraHeight * hRatio);
final float vtx[] = {
//UV
0f, 0.5f + ratio/2,
0f, 0.5f - ratio/2,
1f, 0.5f + ratio/2,
1f, 0.5f - ratio/2,
};
ByteBuffer bb = ByteBuffer.allocateDirect(4 * vtx.length);
bb.order(ByteOrder.nativeOrder());
mCameraTexCoordBuffer = bb.asFloatBuffer();
mCameraTexCoordBuffer.put(vtx);
mCameraTexCoordBuffer.position(0);
} else {
ratio = mScreenW/ (cameraWidth * vRatio);
//横排显示
// final float vtx[] = {
// //UV
// 0.5f - ratio/2, 1f,
// 0.5f - ratio/2, 0f,
// 0.5f + ratio/2, 1f,
// 0.5f + ratio/2, 0f,
// };
//竖屏显示 放大
// final float vtx[] = {
// //UV
// 0.5f - ratio/2, 1f,
// 0.5f + ratio/2, 1f,
// 0.5f - ratio/2, 0f,
// 0.5f + ratio/2, 0f,
// };
//竖屏 不放大
final float vtx[] = {
//UV
0f, 0.5f + ratio/2,
1f, 0.5f + ratio/2,
0f, 0.5f - ratio/2,
1f, 0.5f - ratio/2,
};
ByteBuffer bb = ByteBuffer.allocateDirect(4 * vtx.length);
bb.order(ByteOrder.nativeOrder());
mCameraTexCoordBuffer = bb.asFloatBuffer();
mCameraTexCoordBuffer.put(vtx);
mCameraTexCoordBuffer.position(0);
}
}
public void draw(final float[] tex_mtx) {
if (mScreenW <= 0 || mScreenH <= 0) {
return;
}
//设置视口大小
GLES20.glViewport(0, 0, mScreenW, mScreenH);
GLES20.glClearColor(0f, 0f, 0f, 1f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
//设置定点坐标
if(mirrorImage){
mNormalVtxBuf.position(0);
GLES20.glVertexAttribPointer(maPositionHandle,
3, GLES20.GL_FLOAT, false, 4 * 3, mNormalVtxBuf);
}else{
mNormalVtxBufImage.position(0);
GLES20.glVertexAttribPointer(maPositionHandle,
3, GLES20.GL_FLOAT, false, 4 * 3, mNormalVtxBufImage);
}
GLES20.glEnableVertexAttribArray(maPositionHandle);
//设置纹理坐标
mCameraTexCoordBuffer.position(0);
GLES20.glVertexAttribPointer(maTexCoordHandle,
2, GLES20.GL_FLOAT, false, 4 * 2, mCameraTexCoordBuffer);
GLES20.glEnableVertexAttribArray(maTexCoordHandle);
//设置变换矩阵
if(muPosMtxHandle>= 0)
GLES20.glUniformMatrix4fv(muPosMtxHandle, 1, false, mPosMtx, 0);
if(muTexMtxHandle>= 0)
GLES20.glUniformMatrix4fv(muTexMtxHandle, 1, false, tex_mtx, 0);
//绑定纹理,将纹理渲染
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mFboTexId);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
private void initGL() {
GlUtil.checkGlError("initGL_S");
final String vertexShader =
//
"attribute vec4 position;\n" +
"attribute vec4 inputTextureCoordinate;\n" +
"uniform mat4 uPosMtx;\n" +
"varying vec2 textureCoordinate;\n" +
"void main() {\n" +
" gl_Position = uPosMtx * position;\n" +
" textureCoordinate = inputTextureCoordinate.xy;\n" +
"}\n";
final String fragmentShader =
//
"precision mediump float;\n" +
"uniform sampler2D uSampler;\n" +
"varying vec2 textureCoordinate;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(uSampler, textureCoordinate);\n" +
"}\n";
String SHARDE_NULL_VERTEX = "attribute vec4 position;\n" +
"attribute vec4 inputTextureCoordinate;\n" +
"\n" +
"uniform mat4 uPosMtx;\n" +
"uniform mat4 uTexMtx;\n" +
"varying vec2 textureCoordinate;\n" +
"void main() {\n" +
" gl_Position = uPosMtx * position;\n" +
" textureCoordinate = (uTexMtx * inputTextureCoordinate).xy;\n" +
"}";
String SHARDE_NULL_FRAGMENT = "#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 textureCoordinate;\n" +
"uniform samplerExternalOES uSampler;\n" +
"void main() {\n" +
" vec4 tc = texture2D(uSampler, textureCoordinate);\n" +
" gl_FragColor = vec4(tc.r, tc.g, tc.b, 1.0);\n" +
"}";
// mProgram = GlUtil.createProgram(vertexShader, fragmentShader);
mProgram = GlUtil.createProgram(SHARDE_NULL_VERTEX, SHARDE_NULL_FRAGMENT);
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
maTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
muPosMtxHandle = GLES20.glGetUniformLocation(mProgram, "uPosMtx");
muTexMtxHandle = GLES20.glGetUniformLocation(mProgram, "uTexMtx");
muSamplerHandle = GLES20.glGetUniformLocation(mProgram, "uSampler");
GlUtil.checkGlError("initGL_E");
}
}