android opencv通过Camera2实现的JavaCamera2View

本来思路很简单的,通过opengles3.0的pbo来实现将Camera2的数据读取到内存,可是却遇到了天坑,当设置的分辨率太大的时候调用glReadPixels会导致surface不接受数据。

然后就换ImageReader来实现,可还是遇到分辨率过大的问题,虽然不会导致surface不接收数据,可是会导致渲染线程卡顿

总结下来就是,手机太垃圾了,承受不了高分辨率渲染

最后还是选ImageReader来实现JavaCamera2View,因为是直接改JavaCameraView的代码,就没用opengl的pbo了

JavaCamera2View

public class JavaCamera2View extends CameraBridgeViewBase{

    private static final String TAG = "JavaCamera2View";


    private Mat[] mFrameChain;
    private int mChainIdx = 0;
    private Thread mThread;
    private boolean mStopThread;


    protected JavaCameraFrame[] mCameraFrame;


    private String mCameraId;
    private CameraManager mCameraManager;
    private CameraCaptureSession mCameraCaptureSession;
    private CameraDevice mCameraDevice;
    private CaptureRequest.Builder builder;
    private Handler mCameraHandler;
    private Handler mImageHandler;
    private android.util.Size[] mSizes;

    private ImageReader mImageReader;

    public JavaCamera2View(Context context) {
        super(context, -1);
    }

    public JavaCamera2View(Context context, AttributeSet attrs) {
        super(context, attrs);
    }

    protected void initializeCamera(int width, int height) {
        synchronized (this) {
            initCamera2();
            //获取摄像头支持的分辨率
            android.util.Size mPreviewSize = getPreferredPreviewSize(mSizes, width, height);
            mFrameWidth = mPreviewSize.getWidth();
            mFrameHeight = mPreviewSize.getHeight();

            if (width < mFrameWidth || height < mFrameHeight)
                mScale = Math.min(((float)height)/mFrameHeight, ((float)width)/mFrameWidth);
            else
                mScale = 0;

            if (mFpsMeter != null) {
                mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
            }

            mFrameChain = new Mat[2];
            mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
            mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);

            AllocateCache();

            mCameraFrame = new JavaCameraFrame[2];
            mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
            mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
        }
    }

    protected void releaseCamera() {
        synchronized (this) {
            if (mCameraCaptureSession != null) {
                mCameraCaptureSession.getDevice().close();
                mCameraCaptureSession.close();
                mCameraCaptureSession = null;
            }
            if (mFrameChain != null) {
                mFrameChain[0].release();
                mFrameChain[1].release();
            }
            if (mCameraFrame != null) {
                mCameraFrame[0].release();
                mCameraFrame[1].release();
            }
        }
    }

    private boolean mCameraFrameReady = false;

    @Override
    protected boolean connectCamera(int width, int height) {

        /* 1. We need to instantiate camera
         * 2. We need to start thread which will be getting frames
         */
        /* First step - initialize camera connection */
        initializeCamera(width, height);

        mCameraFrameReady = false;

        /* now we can start update thread */
        Log.d(TAG, "Starting processing thread");
        mStopThread = false;
        mThread = new Thread(new CameraWorker());
        mThread.start();

        return true;
    }

    @Override
    protected void disconnectCamera() {
        /* 1. We need to stop thread which updating the frames
         * 2. Stop camera and release it
         */
        Log.d(TAG, "Disconnecting from camera");
        try {
            mStopThread = true;
            Log.d(TAG, "Notify thread");
            synchronized (this) {
                this.notify();
            }
            Log.d(TAG, "Wating for thread");
            if (mThread != null)
                mThread.join();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } finally {
            mThread =  null;
        }

        /* Now release camera */
        releaseCamera();

        mCameraFrameReady = false;
    }

    private class JavaCameraFrame implements CvCameraViewFrame {
        @Override
        public Mat gray() {
            return mYuvFrameData.submat(0, mHeight, 0, mWidth);
        }

        @Override
        public Mat rgba() {
            Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
            return mRgba;
        }

        public JavaCameraFrame(Mat Yuv420sp, int width, int height) {
            super();
            mWidth = width;
            mHeight = height;
            mYuvFrameData = Yuv420sp;
            mRgba = new Mat();
        }

        public void release() {
            mRgba.release();
        }

        private Mat mYuvFrameData;
        private Mat mRgba;
        private int mWidth;
        private int mHeight;
    }
    private class CameraWorker implements Runnable {

        @Override
        public void run() {
            openCamera2();
            do {
                boolean hasFrame = false;
                synchronized (JavaCamera2View.this) {
                    try {
                        while (!mCameraFrameReady && !mStopThread) {
                            JavaCamera2View.this.wait();
                        }
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                    if (mCameraFrameReady)
                    {
                        mChainIdx = 1 - mChainIdx;
                        mCameraFrameReady = false;
                        hasFrame = true;
                    }
                }

                if (!mStopThread && hasFrame) {
                    if (!mFrameChain[1 - mChainIdx].empty())
                        deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]);
                }
            } while (!mStopThread);
            Log.d(TAG, "Finish processing thread");
        }
    }






    private void initCamera2() {
        HandlerThread cameraHandlerThread = new HandlerThread("Camera2");
        cameraHandlerThread.start();
        mCameraHandler = new Handler(cameraHandlerThread.getLooper());

        HandlerThread imageHandlerThread = new HandlerThread("image");
        imageHandlerThread.start();
        mImageHandler = new Handler(imageHandlerThread.getLooper());
        mCameraManager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
        try {
            String[] CameraIdList = mCameraManager.getCameraIdList();
            mCameraId = CameraIdList[0];
            CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(mCameraId);
            characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
            StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map != null) {
                mSizes = map.getOutputSizes(SurfaceTexture.class);
            }

        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    private void openCamera2() {
        if (PermissionChecker.checkSelfPermission(getContext(), Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
            try {
                mCameraManager.openCamera(mCameraId, stateCallback, mCameraHandler);
            } catch (CameraAccessException e) {
                e.printStackTrace();
            }
        }
    }

    private CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
        @Override
        public void onOpened(@NonNull CameraDevice cameraDevice) {
            mCameraDevice = cameraDevice;
            takePreview();
        }

        @Override
        public void onDisconnected(@NonNull CameraDevice cameraDevice) {
            if (mCameraDevice != null) {
                mCameraDevice.close();
                mCameraDevice = null;
            }
        }

        @Override
        public void onError(@NonNull CameraDevice cameraDevice, int i) {

        }
    };
    private void takePreview() {

        try {
            //如果是用opengl渲染,ImageReader只是拿来给opencv计算用
            //宽高不能设置太高,高了会影响opengl渲染线程
            //Image的宽高和摄像头支持的宽高有关
            //举个例子,设置600x600,Image的宽高却是640x480
            //640x480这个宽高是摄像头支持宽高,基本是会小于设定值
            mImageReader = ImageReader.newInstance(mFrameWidth, mFrameHeight, ImageFormat.YUV_420_888, 1);
            mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
                @Override
                public void onImageAvailable(ImageReader imageReader) {
                    Image image = imageReader.acquireNextImage();
                    if (image == null) {
                        return;
                    }
                    byte[] data = ImageUtils.YUV_420_888toNV21(image);
                    synchronized (JavaCamera2View.this) {
                        mFrameChain[mChainIdx].put(0, 0, data);
                        mCameraFrameReady = true;
                        JavaCamera2View.this.notify();
                    }
                    image.close();
                }
            }, mImageHandler);

            builder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
            builder.addTarget(mImageReader.getSurface());
            mCameraDevice.createCaptureSession(Arrays.asList(mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
                @Override
                public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
                    if (null == mCameraDevice) return;
                    mCameraCaptureSession = cameraCaptureSession;
                    builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                    builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
                    CaptureRequest previewRequest = builder.build();
                    try {
                        mCameraCaptureSession.setRepeatingRequest(previewRequest, null, mCameraHandler);
                    } catch (CameraAccessException e) {
                        e.printStackTrace();
                    }
                }

                @Override
                public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {

                }
            }, mCameraHandler);

        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }
    //取最低16:9分辨率
    private android.util.Size getPreferredPreviewSize(android.util.Size[] sizes, int width, int height) {
        List<android.util.Size> collectorSizes = new ArrayList<>();
        for (android.util.Size option : sizes) {
            //计算啥,只有最低分辨率的才最流畅...
            //取16:9分辨率
            int w = option.getWidth();
            int h = option.getHeight();
            int s = w * 100 / h;
            if (s == 177) {
                collectorSizes.add(option);
            }
        }
        if (collectorSizes.size() > 0) {
            return Collections.min(collectorSizes, new Comparator<android.util.Size>() {
                @Override
                public int compare(android.util.Size s1, android.util.Size s2) {
                    return Long.signum(s1.getWidth() * s1.getHeight() - s2.getWidth() * s2.getHeight());
                }
            });
        }
        return sizes[0];
    }
}

yuv420转n21的代码参考

Android: YUV_420_888编码Image转换为I420和NV21格式byte数组

ImageUtils

public class ImageUtils {

    public static byte[] imageToByteArray(Image image) {
        byte[] data = null;
        if (image.getFormat() == ImageFormat.JPEG) {
            Image.Plane[] planes = image.getPlanes();
            ByteBuffer buffer = planes[0].getBuffer();
            data = new byte[buffer.capacity()];
            buffer.get(data);
            return data;
        } else if (image.getFormat() == ImageFormat.YUV_420_888) {
            data = NV21toJPEG(
                    YUV_420_888toNV21(image),
                    image.getWidth(), image.getHeight());
        }
        return data;
    }

    public static byte[] YUV_420_888toNV21(Image image) {
        Rect crop = image.getCropRect();
        int format = image.getFormat();
        int width = crop.width();
        int height = crop.height();
        Image.Plane[] planes = image.getPlanes();
        byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
        byte[] rowData = new byte[planes[0].getRowStride()];
        int channelOffset = 0;
        int outputStride = 1;
        for (int i = 0; i < planes.length; i++) {
            switch (i) {
                case 0:
                    channelOffset = 0;
                    outputStride = 1;
                    break;
                case 1:
                    channelOffset = width * height + 1;
                    outputStride = 2;
                    break;
                case 2:
                    channelOffset = width * height;
                    outputStride = 2;
                    break;
            }
            ByteBuffer buffer = planes[i].getBuffer();
            int rowStride = planes[i].getRowStride();
            int pixelStride = planes[i].getPixelStride();

            int shift = (i == 0) ? 0 : 1;
            int w = width >> shift;
            int h = height >> shift;
            buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
            for (int row = 0; row < h; row++) {
                int length;
                if (pixelStride == 1 && outputStride == 1) {
                    length = w;
                    buffer.get(data, channelOffset, length);
                    channelOffset += length;
                } else {
                    length = (w - 1) * pixelStride + 1;
                    buffer.get(rowData, 0, length);
                    for (int col = 0; col < w; col++) {
                        data[channelOffset] = rowData[col * pixelStride];
                        channelOffset += outputStride;
                    }
                }
                if (row < h - 1) {
                    buffer.position(buffer.position() + rowStride - length);
                }
            }
        }
        return data;
    }

    public static byte[] NV21toJPEG(byte[] nv21, int width, int height) {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        YuvImage yuv = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
        yuv.compressToJpeg(new Rect(0, 0, width, height), 100, out);
        return out.toByteArray();
    }
}

转来转去挺麻烦的,也不知道怎么直接将yuv直接转成Bitmap











  • 1
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
Android上使用OpenCV实现预览画面曝光,可以按照以下步骤进行: 1. 首先,确保你已经在项目中集成了OpenCV库,并进行了正确的初始化。 2. 创建一个Camera对象来进行相机预览。可以使用Camera.open()方法来获取默认的相机实例。 3. 在预览回调中,获取每一帧的图像数据。可以使用Camera.PreviewCallback接口来实现回调函数。 4. 在回调函数中,将每一帧的图像数据转换为OpenCV的Mat对象,可以使用Imgproc.cvtColor()方法将YUV格式的图像转换为RGB格式。 5. 对每一帧的图像应用曝光调整算法。这可能涉及到调整图像的亮度、对比度等参数,以达到所需的曝光效果。 6. 将处理后的图像数据重新转换回YUV格式,并通过Camera.PreviewCallback接口中的onPreviewFrame()方法返回给相机进行显示。 以下是一个简单的示例代码: ```java public class MainActivity extends AppCompatActivity implements Camera.PreviewCallback { private Camera mCamera; private CameraView mCameraView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mCameraView = findViewById(R.id.camera_view); mCamera = Camera.open(); Parameters params = mCamera.getParameters(); params.setPreviewFormat(ImageFormat.NV21); mCamera.setParameters(params); mCamera.setPreviewCallback(this); mCamera.startPreview(); } @Override public void onPreviewFrame(byte[] data, Camera camera) { // 将YUV格式的图像数据转换为Mat对象 Mat frame = new Mat(camera.getParameters().getPreviewSize().height, camera.getParameters().getPreviewSize().width, CvType.CV_8UC1); frame.put(0, 0, data); // 将YUV图像转换为RGB图像 Mat rgbFrame = new Mat(); Imgproc.cvtColor(frame, rgbFrame, Imgproc.COLOR_YUV2RGB_NV21); // 调整图像的亮度、对比度等参数 // 将处理后的图像数据重新转换为YUV格式 Imgproc.cvtColor(rgbFrame, frame, Imgproc.COLOR_RGB2YUV_I420); // 将Mat对象转换为字节数组 frame.get(0, 0, data); // 返回处理后的图像数据 camera.addCallbackBuffer(data); } @Override protected void onDestroy() { super.onDestroy(); mCamera.stopPreview(); mCamera.release(); } } ``` 以上代码只是一个简单的示例,具体的曝光调整算法还需要根据实际需求来实现。可以根据图像的亮度、对比度等参数进行调整,也可以使用图像增强算法实现自动曝光控制。
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值