android 通过egl实现Service运行Camera

所谓的后台可以是在Activity里打开Camera而不显示画面,也可以在Service内开启Camera,我主要写后者
Camera的开启必须要调用setPreviewTexture或setPreviewDisplay,二选一,所以想要在后台打开Camera就需要一个没有画面的SurfaceTexture或SurfaceHolder,SurfaceHolder我不太熟,所以我从SurfaceTexture下手
SurfaceTexture可以在opengl es的线程内通过纹理id创建,而opengl es的egl又可以配置成不需要画面的离屏渲染(这个我以前的文章有提过就不说了),那么就简单了,只要在离屏渲染模式下生成一个SurfaceTexture,通过这个SurfaceTexture开启Camera就ok了
贴代码,首先是离屏渲染egl
DummyEGLUtils.java

public class DummyEGLUtils {
    private static final int EGL_RECORDABLE_ANDROID = 0x3142;

    private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
    private EGLContext eglCtx = EGL14.EGL_NO_CONTEXT;
    private EGLDisplay eglDis = EGL14.EGL_NO_DISPLAY;

    public void initEGL() {
        eglDis = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
        int[] version = new int[2];
        EGL14.eglInitialize(eglDis, version, 0, version, 1);
        int confAttr[] = {
                EGL14.EGL_SURFACE_TYPE,EGL14.EGL_WINDOW_BIT,
                EGL14.EGL_RED_SIZE, 8,
                EGL14.EGL_GREEN_SIZE, 8,
                EGL14.EGL_BLUE_SIZE, 8,
                EGL14.EGL_ALPHA_SIZE, 8,
                EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
                EGL_RECORDABLE_ANDROID, 1,
                EGL14.EGL_NONE
        };
        EGLConfig[] configs = new EGLConfig[1];
        int[] numConfigs = new int[1];
        EGL14.eglChooseConfig(eglDis, confAttr, 0, configs, 0, 1, numConfigs, 0);
        int ctxAttr[] = {
                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,// 0x3098
                EGL14.EGL_NONE
        };
        eglCtx = EGL14.eglCreateContext(eglDis, configs[0], EGL14.EGL_NO_CONTEXT, ctxAttr, 0);
        int[] surfaceAttr = {
                EGL14.EGL_WIDTH, 480,
                EGL14.EGL_HEIGHT, 640,
                EGL14.EGL_NONE
        };
        eglSurface = EGL14.eglCreatePbufferSurface(eglDis, configs[0],  surfaceAttr, 0);

        EGL14.eglMakeCurrent(eglDis, eglSurface, eglSurface, eglCtx);

    }
    public EGLContext getContext() {
        return eglCtx;
    }

    public void swap() {
        EGL14.eglSwapBuffers(eglDis, eglSurface);
    }

    public void release() {
        if (eglSurface != EGL14.EGL_NO_SURFACE) {
            EGL14.eglMakeCurrent(eglDis, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
            EGL14.eglDestroySurface(eglDis, eglSurface);
            eglSurface = EGL14.EGL_NO_SURFACE;
        }
        if (eglCtx != EGL14.EGL_NO_CONTEXT) {
            EGL14.eglDestroyContext(eglDis, eglCtx);
            eglCtx = EGL14.EGL_NO_CONTEXT;
        }
        if (eglDis != EGL14.EGL_NO_DISPLAY) {
            EGL14.eglTerminate(eglDis);
            eglDis = EGL14.EGL_NO_DISPLAY;
        }
    }
}

然后Camera的操作代码
CameraOverlap.java

public class CameraOverlap {
    protected Camera mCamera = null;
    protected Camera.CameraInfo mCameraInfo = null;
    public static final int PREVIEW_WIDTH = 640;
    public static final int PREVIEW_HEIGHT = 480;
    private int CameraFacing = Camera.CameraInfo.CAMERA_FACING_FRONT;
    private Camera.PreviewCallback mPreviewCallback;

    private Context context;
    public CameraOverlap(Context context){
        this.context = context;
    }


    public void openCamera(SurfaceTexture surfaceTexture) {
        if (null != mCamera) {
            mCamera.setPreviewCallback(null);
            mCamera.stopPreview();
            mCamera.release();
            mCamera = null;
        }
        Camera.CameraInfo info = new Camera.CameraInfo();
        for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
            Camera.getCameraInfo(i, info);
            if (info.facing == CameraFacing) {
                try {
                    mCamera = Camera.open(i);
                    mCameraInfo = info;
                } catch (RuntimeException e) {
                    e.printStackTrace();
                    mCamera = null;
                    continue;
                }
                break;
            }
        }
        try {
            mCamera.setPreviewTexture(surfaceTexture);
            initCamera();
        } catch (Exception ex) {
            if (null != mCamera) {
                mCamera.release();
                mCamera = null;
            }
        }
    }


    private void initCamera() {
        if (null != mCamera) {
            try {
                Camera.Parameters parameters = mCamera.getParameters();
                List<String> flashModes = parameters.getSupportedFlashModes();
                if(flashModes !=null && flashModes.contains(Camera.Parameters.FLASH_MODE_OFF))
                {
                    parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
                }


                List<Camera.Size> pictureSizes = mCamera.getParameters()
                        .getSupportedPictureSizes();

                parameters.setPreviewSize(PREVIEW_WIDTH, PREVIEW_HEIGHT);

                Camera.Size fs = null;
                for (int i = 0; i < pictureSizes.size(); i++) {
                    Camera.Size psize = pictureSizes.get(i);
                    if (fs == null && psize.width >= 1280)
                        fs = psize;

                }
                parameters.setPictureSize(fs.width, fs.height);

                if (context.getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE) {
                    parameters.set("orientation", "portrait");
                    parameters.set("rotation", 90);

                    int orientation = CameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT ? 360 - mCameraInfo.orientation : mCameraInfo.orientation;
                    mCamera.setDisplayOrientation(orientation);

                } else {
                    parameters.set("orientation", "landscape");
                    mCamera.setDisplayOrientation(0);

                }

                if(CameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK){
                    if (parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
                        parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
                    } else{
                        parameters.setFocusMode(parameters.FOCUS_MODE_AUTO);
                    }
                }

                mCamera.setParameters(parameters);
                mCamera.setPreviewCallback(this.mPreviewCallback);
                mCamera.startPreview();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }

    public void setPreviewCallback(Camera.PreviewCallback previewCallback) {
        this.mPreviewCallback = previewCallback;
        if (mCamera != null) {
            mCamera.setPreviewCallback(previewCallback);
        }
    }
    public int getOrientation(){
        if(mCameraInfo != null){
            return mCameraInfo.orientation;
        }
        return 0;
    }

    public void release() {
        if (null != mCamera) {
            mCamera.setPreviewCallback(null);
            mCamera.stopPreview();
            mCamera.release();
            mCamera = null;
        }
    }

}

PREVIEW_WIDTH和PREVIEW_HEIGHT是网上的一个人脸识别的宽高,因为要验证在Service是否有开启Camera所以我就搞了个人脸识别
最后是Service里的代码
CameraService.java

public class CameraService extends Service {
    @Nullable
    @Override
    public IBinder onBind(Intent intent) {
        return null;
    }

    private final byte[] mNv21Data = new byte[CameraOverlap.PREVIEW_WIDTH * CameraOverlap.PREVIEW_HEIGHT * 2];
    private HandlerThread mHandlerThread;
    private Handler mHandler;
    private CameraOverlap cameraOverlap;

    private DummyEGLUtils mEglUtils;
    private GLFramebuffer mFramebuffer;

    @Override
    public void onCreate() {
        super.onCreate();
        mHandlerThread = new HandlerThread("DrawFaceThread");
        mHandlerThread.start();
        mHandler = new Handler(mHandlerThread.getLooper());
        cameraOverlap = new CameraOverlap(this);
        mEglUtils = new DummyEGLUtils();
        mFramebuffer = new GLFramebuffer();
        cameraOverlap.setPreviewCallback(new Camera.PreviewCallback() {
            @Override
            public void onPreviewFrame(byte[] data, Camera camera) {
                synchronized (mNv21Data) {
                    System.arraycopy(data, 0, mNv21Data, 0, data.length);
                }
                mHandler.post(new Runnable() {
                    @Override
                    public void run() {
                            Log.d("================","mNv21Data就是camera的图像数据");
                    }
                });
            }
        });
        mHandler.post(new Runnable() {
            @Override
            public void run() {
                mHandler.post(new Runnable() {
                    @Override
                    public void run() {
                        mEglUtils.initEGL();
                        mFramebuffer.initFramebuffer();
                        cameraOverlap.openCamera(mFramebuffer.getSurfaceTexture());
                    }
                });

            }
        });
    }

    @Override
    public void onDestroy() {
        super.onDestroy();
        mHandler.post(new Runnable() {
            @Override
            public void run() {
                cameraOverlap.release();
                mFramebuffer.release();
                mEglUtils.release();
                mHandlerThread.quit();
            }
        });
    }
}

至于怎么开启Service我就不写了
本文的Camera指的是android.hardware.Camera而不是android.hardware.camera2,其实camera2也简单只要通过SurfaceTexture再创建个Surface传给CameraDevice就好了

补上GLFramebuffer.java

public class GLFramebuffer {

    private float[] mSTMatrix = new float[16];

    private int[] textures;

    private SurfaceTexture surfaceTexture;
    public void initFramebuffer(){

        textures = new int[1];
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
                GLES20.GL_NEAREST);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
                GLES20.GL_LINEAR);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
    }

    public SurfaceTexture getSurfaceTexture(){
        surfaceTexture = new SurfaceTexture(textures[0]);
        return surfaceTexture;
    }

    public void release(){
        GLES20.glDeleteTextures(1,textures,0);
        if(surfaceTexture != null ){
            surfaceTexture.release();
            surfaceTexture = null;
        }
    }

    public int drawFrameBuffer(){
        if(surfaceTexture != null){
            surfaceTexture.updateTexImage();
            surfaceTexture.getTransformMatrix(mSTMatrix);
        }
        return textures[0];
    }

    public float[] getMatrix() {
        return mSTMatrix;
    }

}

 

  • 2
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 5
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值