既然要录像,第一步肯定是打开相机了,打开方法网上很多,我是用opengl预览的,就讲opengl部分
camera2要显示图像,就要添加surface来进行显示
mCameraDevice.createCaptureSession(Arrays.asList(mSurface), new CameraCaptureSession.StateCallback()
平常预览都是取surfaceview的surface来显示,我前面有用过egl,现在我就是要用egl来显示图像
显示的时候再加个黑白滤镜
egl
public class EGLUtils { private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE; private EGLContext eglCtx = EGL14.EGL_NO_CONTEXT; private EGLDisplay eglDis = EGL14.EGL_NO_DISPLAY; private EGLContext eglContext = EGL14.EGL_NO_CONTEXT; public void initEGL(EGLContext eglContext, Surface surface) { this.eglContext = eglContext; eglDis = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); int[] version = new int[2]; EGL14.eglInitialize(eglDis, version, 0, version, 1); int confAttr[] = { EGL14.EGL_RED_SIZE, 8, EGL14.EGL_GREEN_SIZE, 8, EGL14.EGL_BLUE_SIZE, 8, EGL14.EGL_SURFACE_TYPE, EGL14.EGL_WINDOW_BIT, EGL14.EGL_NONE }; EGLConfig[] configs = new EGLConfig[1]; int[] numConfigs = new int[1]; EGL14.eglChooseConfig(eglDis, confAttr, 0, configs, 0, 1, numConfigs, 0); int ctxAttr[] = { EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,// 0x3098 EGL14.EGL_NONE }; eglCtx = EGL14.eglCreateContext(eglDis, configs[0], eglContext, ctxAttr, 0); int[] surfaceAttr = { EGL14.EGL_NONE }; eglSurface = EGL14.eglCreateWindowSurface(eglDis, configs[0], surface, surfaceAttr, 0); EGL14.eglMakeCurrent(eglDis, eglSurface, eglSurface, eglCtx); } public EGLContext getContext() { return eglCtx; } public void swap() { EGL14.eglSwapBuffers(eglDis, eglSurface); } public void release() { if (eglSurface != EGL14.EGL_NO_SURFACE) { EGL14.eglMakeCurrent(eglDis, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT); EGL14.eglDestroySurface(eglDis, eglSurface); eglSurface = EGL14.EGL_NO_SURFACE; } if (eglCtx != EGL14.EGL_NO_CONTEXT ) { if(eglContext == EGL14.EGL_NO_CONTEXT){ EGL14.eglDestroyContext(eglDis, eglCtx); } eglCtx = EGL14.EGL_NO_CONTEXT; } if (eglDis != EGL14.EGL_NO_DISPLAY) { EGL14.eglTerminate(eglDis); eglDis = EGL14.EGL_NO_DISPLAY; } } }
fbo
public class GLFramebuffer { private Context context; private final float[] vertexData = { 1f, -1f, 0f, -1f, -1f, 0f, 1f, 1f, 0f, -1f, 1f, 0f }; private FloatBuffer vertexBuffer; private FloatBuffer textureVertexBuffer; private int programId; private int aPositionHandle; private int uTextureSamplerHandle; private int aTextureCoordHandle; private int uSTMMatrixHandle; private float[] mSTMatrix = new float[16]; private int[] textures; private int[] frameBuffers; private int[] vertexBuffers; private SurfaceTexture surfaceTexture; public GLFramebuffer(Context context){ this.context = context; final float[] textureVertexData = { 1f, 0f, 0f, 0f, 1f, 1f, 0f, 1f }; vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() .put(vertexData); vertexBuffer.position(0); textureVertexBuffer = ByteBuffer.allocateDirect(textureVertexData.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() .put(textureVertexData); textureVertexBuffer.position(0); } public void initFramebuffer(int width,int height){ String vertexShader = ShaderUtils.readRawTextFile(context, R.raw.vertext_shader); String fragmentShader = ShaderUtils.readRawTextFile(context, R.raw.fragment_sharder); programId = ShaderUtils.createProgram(vertexShader, fragmentShader); aPositionHandle = GLES20.glGetAttribLocation(programId, "aPosition"); uSTMMatrixHandle = GLES20.glGetUniformLocation(programId, "uSTMatrix"); uTextureSamplerHandle = GLES20.glGetUniformLocation(programId, "sTexture"); aTextureCoordHandle = GLES20.glGetAttribLocation(programId, "aTexCoord"); vertexBuffers = new int[1]; GLES20.glGenBuffers(1,vertexBuffers,0); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBuffers[0]); GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertexData.length*4, vertexBuffer,GLES20.GL_STATIC_DRAW); GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER,0,vertexData.length*4,vertexBuffer); frameBuffers = new int[1]; GLES20.glGenFramebuffers(1, frameBuffers, 0); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffers[0]); textures = new int[2]; GLES20.glGenTextures(2, textures, 0); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textures[0], 0); GLES20.glActiveTexture(GLES20.GL_TEXTURE1); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[1]); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glUseProgram(programId); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBuffers[0]); GLES20.glEnableVertexAttribArray(aPositionHandle); GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 12, 0); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); GLES20.glEnableVertexAttribArray(aTextureCoordHandle); GLES20.glVertexAttribPointer(aTextureCoordHandle, 2, GLES20.GL_FLOAT, false, 8, textureVertexBuffer); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); } public SurfaceTexture getSurfaceTexture(){ surfaceTexture = new SurfaceTexture(textures[1]); return surfaceTexture; } public void drawFrameBuffer(int width,int height){ surfaceTexture.updateTexImage(); surfaceTexture.getTransformMatrix(mSTMatrix); GLES20.glUseProgram(programId); GLES20.glActiveTexture(GLES20.GL_TEXTURE1); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[1]); GLES20.glUniform1i(uTextureSamplerHandle,1); GLES20.glUniformMatrix4fv(uSTMMatrixHandle, 1, false, mSTMatrix, 0); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffers[0]); GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glViewport(0, 0, width, height); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); } public void drawFrame(){ GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); } }
渲染
public class GLRenderer { private Context context; private FloatBuffer vertexBuffer; private FloatBuffer textureVertexBuffer; private int programId; private int aPositionHandle; private int uTextureSamplerHandle; private int aTextureCoordHandle; public GLRenderer(Context context){ this.context = context; final float[] vertexData = { 1f, -1f, 0f, -1f, -1f, 0f, 1f, 1f, 0f, -1f, 1f, 0f }; final float[] textureVertexData = { 1f, 0f, 0f, 0f, 1f, 1f, 0f, 1f }; vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() .put(vertexData); vertexBuffer.position(0); textureVertexBuffer = ByteBuffer.allocateDirect(textureVertexData.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() .put(textureVertexData); textureVertexBuffer.position(0); } public void initShader(){ String vertexShader = ShaderUtils.readRawTextFile(context, R.raw.bitmap_vertext_shader); String fragmentShader = ShaderUtils.readRawTextFile(context, R.raw.bitmap_fragment_sharder); programId = ShaderUtils.createProgram(vertexShader, fragmentShader); aPositionHandle = GLES20.glGetAttribLocation(programId, "aPosition"); uTextureSamplerHandle = GLES20.glGetUniformLocation(programId, "sTexture"); aTextureCoordHandle = GLES20.glGetAttribLocation(programId, "aTexCoord"); GLES20.glUseProgram(programId); vertexBuffer.position(0); GLES20.glEnableVertexAttribArray(aPositionHandle); GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 12, vertexBuffer); textureVertexBuffer.position(0); GLES20.glEnableVertexAttribArray(aTextureCoordHandle); GLES20.glVertexAttribPointer(aTextureCoordHandle, 2, GLES20.GL_FLOAT, false, 8, textureVertexBuffer); } public void drawFrame(){ GLES20.glUseProgram(programId); GLES20.glUniform1i(uTextureSamplerHandle, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } }
主要用到这三个类
创建一个surface用来接收camera2的图像
mSurfaceTexture = mFramebuffer.getSurfaceTexture(); mSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() { @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { } });
mSurface = new Surface(mSurfaceTexture); final CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); builder.addTarget(mSurface); mCameraDevice.createCaptureSession(Arrays.asList(mSurface), new CameraCaptureSession.StateCallback()
然后把图像画到我们的纹理上
surfaceTexture.updateTexImage(); surfaceTexture.getTransformMatrix(mSTMatrix);
这样就可以添加滤镜了
完整代码
public class Camera2SurfaceView extends SurfaceView { private SurfaceHolder mHolder; private EGLUtils mEglUtils; private GLFramebuffer mFramebuffer; private GLRenderer mRenderer; private SurfaceTexture mSurfaceTexture; private Surface mSurface; private final Object mObject = new Object(); private String mCameraId; private CameraManager mCameraManager; private CameraCaptureSession mCameraCaptureSession; private CameraDevice mCameraDevice; private Handler mHandler; private int screenWidth, screenHeight; public Camera2SurfaceView(Context context) { super(context); init(context); } public Camera2SurfaceView(Context context, AttributeSet attrs) { super(context, attrs); init(context); } private void init(Context context){ mHolder = getHolder(); mFramebuffer = new GLFramebuffer(context); mRenderer = new GLRenderer(context); initCamera2(); mHolder.addCallback(new SurfaceHolder.Callback() { @Override public void surfaceCreated(SurfaceHolder surfaceHolder) { } @Override public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int w, int h) { screenWidth = w; screenHeight = h; Thread thread = new Thread(){ @Override public void run() { super.run(); mEglUtils = new EGLUtils(); mEglUtils.initEGL(EGL14.EGL_NO_CONTEXT,mHolder.getSurface()); mRenderer.initShader(); Size mPreviewSize = getPreferredPreviewSize(mSizes, screenWidth, screenHeight); int previewWidth = mPreviewSize.getHeight(); int previewHeight = mPreviewSize.getWidth(); int left,top,viewWidth,viewHeight; if(previewHeight > previewWidth){ left = 0; viewWidth = screenWidth; viewHeight = (int)(previewHeight*1.0f/previewWidth*viewWidth); top = (screenHeight - viewHeight)/2; }else{ top = 0; viewHeight = screenHeight; viewWidth = (int)(previewWidth*1.0f/previewHeight*viewHeight); left = (screenWidth - viewWidth)/2; } Rect rect = new Rect(); rect.left = left; rect.top = top; rect.right = left + viewWidth; rect.bottom = top + viewHeight; mFramebuffer.initFramebuffer(previewWidth,previewHeight); mSurfaceTexture = mFramebuffer.getSurfaceTexture(); mSurfaceTexture.setDefaultBufferSize(previewWidth, previewHeight); mSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() { @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { synchronized (mObject) { mObject.notifyAll(); } } }); openCamera2(); while (true){ synchronized (mObject) { try { mObject.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } if(mSurfaceTexture == null){ break; } mFramebuffer.drawFrameBuffer(previewWidth,previewHeight); GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glViewport(rect.left,rect.top,rect.width(),rect.height()); mFramebuffer.drawFrame(); mRenderer.drawFrame(); mEglUtils.swap(); } mEglUtils.release(); } }; thread.start(); } @Override public void surfaceDestroyed(SurfaceHolder surfaceHolder) { if(mCameraCaptureSession != null){ mCameraCaptureSession.getDevice().close(); mCameraCaptureSession.close(); mCameraCaptureSession = null; } if(mSurface != null){ mSurface.release(); mSurface = null; } if(mSurfaceTexture != null){ mSurfaceTexture.release(); mSurfaceTexture = null; synchronized (mObject) { mObject.notifyAll(); } } } }); } private Size[] mSizes; private void initCamera2() { HandlerThread handlerThread = new HandlerThread("Camera2"); handlerThread.start(); mHandler = new Handler(handlerThread.getLooper()); mCameraManager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE); try { String[] CameraIdList = mCameraManager.getCameraIdList(); mCameraId = CameraIdList[0]; CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(mCameraId); characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if(map != null){ mSizes = map.getOutputSizes(SurfaceTexture.class); } } catch (CameraAccessException e) { e.printStackTrace(); } } private void openCamera2(){ if (PermissionChecker.checkSelfPermission(getContext(), Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) { try { mCameraManager.openCamera(mCameraId, stateCallback, mHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } } private CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice cameraDevice) { mCameraDevice = cameraDevice; takePreview(); } @Override public void onDisconnected(@NonNull CameraDevice cameraDevice) { if (mCameraDevice != null) { mCameraDevice.close(); mCameraDevice = null; } } @Override public void onError(@NonNull CameraDevice cameraDevice, int i) { } }; private void takePreview() { try { mSurface = new Surface(mSurfaceTexture); final CaptureRequest.Builder builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); builder.addTarget(mSurface); mCameraDevice.createCaptureSession(Arrays.asList(mSurface), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { if (null == mCameraDevice) return; mCameraCaptureSession = cameraCaptureSession; builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); CaptureRequest previewRequest = builder.build(); try { mCameraCaptureSession.setRepeatingRequest(previewRequest, null, mHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { } }, mHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } private Size getPreferredPreviewSize(Size[] sizes, int width, int height) { List<Size> collectorSizes = new ArrayList<>(); for (Size option : sizes) { if (width > height) { if (option.getWidth() > width && option.getHeight() > height) { collectorSizes.add(option); } } else { if (option.getHeight() > width && option.getWidth() > height) { collectorSizes.add(option); } } } if (collectorSizes.size() > 0) { return Collections.min(collectorSizes, new Comparator<Size>() { @Override public int compare(Size s1, Size s2) { return Long.signum(s1.getWidth() * s1.getHeight() - s2.getWidth() * s2.getHeight()); } }); } return sizes[0]; } }
看类名就知道有二就有一了,camera1已经过时了,而且网上有很多例子,就不贴了
贴shader代码
vertext_shader
attribute vec4 aPosition; attribute vec4 aTexCoord; varying vec2 vTexCoord; uniform mat4 uSTMatrix; void main() { vTexCoord = (uSTMatrix * aTexCoord).xy; gl_Position = aPosition; }
fragment_shader
#extension GL_OES_EGL_image_external : require precision mediump float; varying vec2 vTexCoord; uniform samplerExternalOES sTexture; void main() { vec3 centralColor = texture2D(sTexture, vTexCoord).rgb; gl_FragColor = vec4(0.299*centralColor.r+0.587*centralColor.g+0.114*centralColor.b); }
bitmap_vertext_shader
attribute vec4 aPosition; attribute vec2 aTexCoord; varying vec2 vTexCoord; void main() { vTexCoord = aTexCoord; gl_Position = aPosition; }
bitmap_fragment_shader
precision mediump float; varying vec2 vTexCoord; uniform sampler2D sTexture; void main() { gl_FragColor = texture2D(sTexture, vTexCoord); }
文件名就不要吐槽了,临时拿来用的
当然不要忘记权限
<uses-permission android:name="android.permission.CAMERA" />