Android Camera2预览和实时帧数据获取

1、预览

2、角度旋转

3、实时帧数据获取ImageReader的参数设置

4、实时帧数据格式转换

package com.neatech.stface.tools;

import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.ExifInterface;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import android.widget.RelativeLayout;

import com.neatech.stface.R;
import com.neatech.stface.broadcast.CameraErrorReceiver;
import com.neatech.stface.view.FaceDrawerView;

import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;

/**
 * 功能:视频预览
 * 作者:陈晗
 * 时间:2019-3-7 16:47
 */
public class CameraPreviewHelper {

    private String Tag = "CameraPreviewHelper";
    private Context mContext;
    private TextureView mTextureView;

    private HandlerThread mHandlerThread;
    private Handler childHandler;
    private Handler mHandler;
    private Activity activity;


    //private TrackThread mTrackThread;
    private LivenessTackThread mLiveTackThread;

    private TrackAndSearchThread trackAndSearchThread;

    private FaceDrawerView mFaceDrawerView;

    private String mCameraId;
    private Size mPreviewSize;
    private CameraManager mCameraManager;//摄像头管理器
    private CameraDevice mCameraDevice;
    private SurfaceTexture mSurfaceTexture = null;
    private CaptureRequest.Builder mCaptureRequestBuilder;
    private CaptureRequest mCaptureRequest;
    private CameraCaptureSession mCameraCaptureSession;

    private boolean isConnect = false;

    private ImageReader mImageReader;
    private int capacity = -1;
    private static byte[] yv12bytes;
    private static byte[] imgData;  //1280*720 img
    private int imgFormat;
    private int imgHeight;
    private int ingWidth;

    public final static int picMode = 640;   //720p ,1080p 640p


    public static final int CAPTURE_IMG_PREPARED = 1;
    public static final int TRY_TO_CAPTURE_IMG = 2;

    //  private static boolean isLiveness = false;

    public static int preview_width;      //preview
    public static int preview_higth;
    public static int surface_width;    //surface
    public static int surface_higth;

    public static int delaytest = 1;

    public CameraPreviewHelper(Activity activity, TextureView textureView) {
        mContext = activity.getBaseContext();
        mTextureView = textureView;
        this.activity = activity;
        mFaceDrawerView = new FaceDrawerView(mContext);
        ((RelativeLayout) activity.findViewById(R.id.rootview_relativelayout_camera_verity)).addView(mFaceDrawerView);
        mHandlerThread = new HandlerThread(Tag);
        mHandlerThread.start();
        childHandler = new Handler(mHandlerThread.getLooper());
        mHandler = new Handler(activity.getMainLooper(), mHandlerCallback);

    }

    /**
     * open camera and start preview
     *
     * @return
     */

    public boolean Connect(boolean isliveness) {
        if (mContext == null || mTextureView == null) {
            Log.e("asda", "Connect");
            return false;
        }


        //   isLiveness = false;// isliveness;

        mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
        // mSurfaceTexture = mTextureView.getSurfaceTexture();

        if (mCameraDevice != null)
            mCameraDevice.close();

        isConnect = true;

//        if (isLiveness) {
//            mLiveTackThread = new LivenessTackThread(activity, mFaceDrawerView);
//            mLiveTackThread.start();
//        } else {

//            mTrackThread = new TrackThread(activity, mFaceDrawerView, mTextName);
//            mTrackThread.start();
        trackAndSearchThread = new TrackAndSearchThread(activity, mFaceDrawerView);
        trackAndSearchThread.start();
        //     }
        return true;

    }

    public void DisConnect() {
        mContext = null;
//        if (mSurfaceTexture != null) {
//            mSurfaceTexture.release();
//            Log.e(Tag, "mSurfaceTexture release");
//
//
//        }
        activity = null;

//            if (mTrackThread != null)
//                mTrackThread.stopTrack();
        if (trackAndSearchThread != null)
            trackAndSearchThread.stopTrack();


        if (mTextureView != null) {
            mTextureView = null;

        }
        // mPreviewed=false;
        if (childHandler != null) {
            childHandler.removeCallbacksAndMessages(null);
            childHandler = null;
        }
        if (mHandler != null)
            mHandler.removeCallbacksAndMessages(null);
            mHandler = null;

        if (mHandlerThread != null) {
            mHandlerThread.quit();
            mHandlerThread = null;
        }


        if (null != mCameraCaptureSession) {
            mCameraCaptureSession.close();
            mCameraCaptureSession = null;
        }
        if (null != mCameraDevice) {
            mCameraDevice.close();
            mCameraDevice = null;
        }
        if (null != mImageReader) {
            mImageReader.close();
            mImageReader = null;
        }
    }

    TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
        @Override
        public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {

            //  mSurfaceTexture=surface;
            try {
                if (mCameraManager == null) {
                    //获取摄像头管理
                    mCameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
                    String[] CameraIdList=mCameraManager.getCameraIdList();//获取可用相机列表
                    Log.e(Tag,"可用相机的个数是:"+CameraIdList.length);
                    mCameraId = CameraIdList[0];
                    CameraCharacteristics cameraCharacteristics=mCameraManager.getCameraCharacteristics(mCameraId);//获取某个相机(摄像头特性)
                    cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);//检查支持

                    //打开摄像头
                    try {
                        if (ActivityCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
                            return;
                        }
                        if (mCameraManager == null)
                            return;
                        //镜面效果
                        surface_width = width;
                        surface_higth = height;
                        //获取StreamConfigurationMap,他是管理摄像头支持的所有输出格式和尺寸
                        StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
                        //获得最接近的尺寸大小
                        mPreviewSize = getOptimalSize(map.getOutputSizes(SurfaceTexture.class),width,height);
                        Log.d(Tag, "getOptimalPreviewSize: w" + mPreviewSize.getWidth() + "  h" + mPreviewSize.getHeight());
                        if (picMode == 720) {
                            mPreviewSize = null;
                            mPreviewSize = new Size(1280, 720);
                            preview_higth = mPreviewSize.getWidth();
                            preview_width = mPreviewSize.getHeight();
                        } else {
                            preview_higth = mPreviewSize.getWidth();
                            preview_width = mPreviewSize.getHeight();
                        }
                        //如果视频显示需要角度旋转 用该函数进行角度转正
                        configureTransform(surface_width, surface_higth);
                        /**
                         * 实时帧数据获取类
                         * 由于获取实时帧所以选用YV12或者YUV_420_888两个格式,暂时不采用JPEG格式
                         * 在真机显示的过程中,不同的数据格式所设置的width和height需要注意,否侧视频会很卡顿
                         * YV12:width 720, height 960
                         * YUV_420_888:width 720, height 960
                         * JPEG:获取帧数据不能用 ImageFormat.JPEG 格式,否则你会发现预览非常卡的,因为渲染 JPEG 数据量过大,导致掉帧,所以预览帧请使用其他编码格式
                         */
                        mImageReader = ImageReader.newInstance(720, 960, ImageFormat.YV12, 10);//YUV_420_888
                        mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mHandler);

                        //打开摄像头
                        mCameraManager.openCamera(mCameraId, stateCallback, mHandler);

                    } catch (CameraAccessException e) {
                        e.printStackTrace();
                    }

                }
            } catch (Exception e) {
                e.printStackTrace();
                if (activity != null)
                    activity.sendBroadcast(new Intent(CameraErrorReceiver.ACTION_CAMERA_OPEN_ERROR));
            }
        }

        @Override
        public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
            configureTransform(width, height);
        }

        @Override
        public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
            if (mCameraDevice != null) {
                mCameraDevice.close();
                mCameraDevice = null;
                Log.d(Tag, "onSurfaceTextureDestroyed: stopPreview");
                // mPreviewed = false;
            }
            Log.e(Tag, "onSurfaceTextureDestroyed");
            return true;
        }

        @Override
        public void onSurfaceTextureUpdated(SurfaceTexture surface) {

        }
    };

    /**
     * 摄像头创建监听
     */
    private CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
        @Override
        public void onOpened(CameraDevice camera) {//打开摄像头
            mCameraDevice = camera;


            if (mCameraDevice == null)
                return;
            SurfaceTexture mSurfaceTexture = mTextureView.getSurfaceTexture();
            mSurfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(),mPreviewSize.getHeight());

            Surface previewSurface = new Surface(mSurfaceTexture);
            try {

                mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
                mCaptureRequestBuilder.addTarget(previewSurface);
                //设置实时帧数据接收
                mCaptureRequestBuilder.addTarget(mImageReader.getSurface());
                mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
                    @Override
                    public void onConfigured(@NonNull CameraCaptureSession session) {

                        try{
                            mCameraCaptureSession = session;
                            mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                            //开始预览
                            mCameraCaptureSession.setRepeatingRequest(mCaptureRequestBuilder.build(), null, childHandler);
                        } catch (CameraAccessException e) {
                            e.printStackTrace();
                        }
                    }

                    @Override
                    public void onConfigureFailed(@NonNull CameraCaptureSession session) {

                    }
                },childHandler);
            } catch (CameraAccessException e) {
                e.printStackTrace();
            }

        }

        @Override
        public void onDisconnected(CameraDevice camera) {//关闭摄像头
            if (null != mCameraDevice) {
                mCameraDevice.close();
                mCameraDevice = null;
            }
        }

        @Override
        public void onError(CameraDevice camera, int error) {//发生错误
            if (null != mCameraDevice) {
                mCameraDevice.close();
                mCameraDevice = null;
            }
            Log.e(Tag, "打开摄像头失败");
        }
    };

    /**
     * 收到实时帧数据进行处理:
     * 一定要使用Image image = reader.acquireLatestImage();image.close();要不然会很卡顿
     * 由于ImageReader不兼容NV21,所以将YV12转换为NV21
     */
    private ImageReader.OnImageAvailableListener mOnImageAvailableListener
            = new ImageReader.OnImageAvailableListener() {

        @Override
        public void onImageAvailable(ImageReader reader) {
            Image image = reader.acquireLatestImage();
            if (!trackAndSearchThread.isHandleImg) {
                imgFormat = ImageFormat.NV21;
                imgHeight = image.getHeight();
                ingWidth = image.getWidth();
                imgData = ImageUtil.getBytesFromImageAsType(image, ImageUtil.NV21);
                if (mHandler != null)
                    mHandler.sendEmptyMessage(CAPTURE_IMG_PREPARED);
            }
            image.close();

        }
    };


    Handler.Callback mHandlerCallback = new Handler.Callback() {
        @Override
        public boolean handleMessage(Message msg) {
            // Log.e(Tag,"handleMessage");

            //准备图片
            if (msg.what == TRY_TO_CAPTURE_IMG) {


            }

            if (msg.what == CAPTURE_IMG_PREPARED) {
                // mFaceDrawerView.drawFaces(facelist, mFaceDrawerView.getWidth(), 720, mContext.getResources().getColor(R.color.white));

                //
                synchronized (imgData) {
                    //uiHandler.sendEmptyMessage(CAPTURE_IMG_PREPARED);
                }

                //mTrackThread.handleImgData(imgData, ingWidth, imgHeight, imgFormat);
                trackAndSearchThread.handleImgData(imgData, ingWidth, imgHeight, imgFormat);
            }

            return false;
        }
    };


    /**
     * 解决预览变形问题
     *
     * @param sizeMap
     * @param width
     * @param height
     * @return
     */
    //选择sizeMap中大于并且最接近width和height的size
    private Size getOptimalSize(Size[] sizeMap, int width, int height) {
        List<Size> sizeList = new ArrayList<>();
        for (Size option : sizeMap) {
            if (width > height) {
                if (option.getWidth() > width && option.getHeight() > height) {
                    sizeList.add(option);
                }
            } else {
                if (option.getWidth() > height && option.getHeight() > width) {
                    sizeList.add(option);
                }
            }
        }
        if (sizeList.size() > 0) {
            return Collections.min(sizeList, new Comparator<Size>() {
                @Override
                public int compare(Size lhs, Size rhs) {
                    return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getWidth() * rhs.getHeight());
                }
            });
        }
        return sizeMap[0];
    }

    private Size getOptimalPreviewSize(Size []sizes, int w, int h) {
        final double ASPECT_TOLERANCE = 0.1;
        double targetRatio = (double) w / h;
        if (sizes == null) return null;

        Size optimalSize = null;
        double minDiff = Double.MAX_VALUE;

        int targetHeight = h;

        // Try to find an size match aspect ratio and size
        Size size = null;
        for (int i = 0; i < sizes.length; i++) {
            size = sizes[i];
            double ratio = (double) size.getWidth() / size.getHeight();
            if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
            if (Math.abs(size.getHeight() - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.getHeight() - targetHeight);
            }
        }

        // Cannot find the one match the aspect ratio, ignore the requirement
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (int i = 0; i < sizes.length; i++) {
                size = sizes[i];
                if (Math.abs(size.getHeight() - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.getHeight() - targetHeight);
                }
            }
        }
        return optimalSize;
    }


    public void updateList() {
        if (trackAndSearchThread != null)
            trackAndSearchThread.isUpdateFuture = true;
    }

    private void configureTransform(int viewWidth, int viewHeight) {

        if (null == mTextureView || null == mPreviewSize || null == activity) {
            return;
        }
        int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
        Matrix matrix = new Matrix();
        RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
        RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
        float centerX = viewRect.centerX();
        float centerY = viewRect.centerY();
        if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
            bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
            matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
            float scale1 = Math.max(
                    (float) viewHeight / mPreviewSize.getHeight(),
                    (float) viewWidth / mPreviewSize.getWidth());
            float scale2 = Math.min(
                    (float)mPreviewSize.getHeight() / viewHeight,
                    (float)  mPreviewSize.getWidth() / viewWidth);
            matrix.postScale(scale1, scale2, centerX, centerY);
            matrix.postRotate(90 * rotation, centerX, centerY);
        }
        mTextureView.setTransform(matrix);
    }
}
package com.neatech.stface.tools;

import android.graphics.ImageFormat;
import android.media.Image;

import java.nio.ByteBuffer;

/**
 * 功能:YV12转其他格式
 * 作者:chenhan
 * 时间:2019-3-7 16:47
 */
/**
 * yuv420p:  yyyyyyyyuuvv
 * yuv420sp: yyyyyyyyuvuv
 * nv21:     yyyyyyyyvuvu
 */

public class ImageUtil {
    public static final int YUV420P = 0;
    public static final int YUV420SP = 1;
    public static final int NV21 = 2;
    private static final String TAG = "ImageUtil";

    /***
     * 此方法内注释以640*480为例
     * 未考虑CropRect的
     */
    public static byte[] getBytesFromImageAsType(Image image, int type) {
        try {
            //获取源数据,如果是YUV格式的数据planes.length = 3
            //plane[i]里面的实际数据可能存在byte[].length <= capacity (缓冲区总大小)
            final Image.Plane[] planes = image.getPlanes();

            //数据有效宽度,一般的,图片width <= rowStride,这也是导致byte[].length <= capacity的原因
            // 所以我们只取width部分
            int width = image.getWidth();
            int height = image.getHeight();

            //此处用来装填最终的YUV数据,需要1.5倍的图片大小,因为Y U V 比例为 4:1:1
            byte[] yuvBytes = new byte[width * height * ImageFormat.getBitsPerPixel(ImageFormat.YV12) / 8];
            //目标数组的装填到的位置
            int dstIndex = 0;

            //临时存储uv数据的
            byte uBytes[] = new byte[width * height / 4];
            byte vBytes[] = new byte[width * height / 4];
            int uIndex = 0;
            int vIndex = 0;

            int pixelsStride, rowStride;
            for (int i = 0; i < planes.length; i++) {
                pixelsStride = planes[i].getPixelStride();
                rowStride = planes[i].getRowStride();

                ByteBuffer buffer = planes[i].getBuffer();

                //如果pixelsStride==2,一般的Y的buffer长度=640*480,UV的长度=640*480/2-1
                //源数据的索引,y的数据是byte中连续的,u的数据是v向左移以为生成的,两者都是偶数位为有效数据
                byte[] bytes = new byte[buffer.capacity()];
                buffer.get(bytes);

                int srcIndex = 0;
                if (i == 0) {
                    //直接取出来所有Y的有效区域,也可以存储成一个临时的bytes,到下一步再copy
                    for (int j = 0; j < height; j++) {
                        System.arraycopy(bytes, srcIndex, yuvBytes, dstIndex, width);
                        srcIndex += rowStride;
                        dstIndex += width;
                    }
                } else if (i == 1) {
                    //根据pixelsStride取相应的数据
                    for (int j = 0; j < height / 2; j++) {
                        for (int k = 0; k < width / 2; k++) {
                            uBytes[uIndex++] = bytes[srcIndex];
                            srcIndex += pixelsStride;
                        }
                        if (pixelsStride == 2) {
                            srcIndex += rowStride - width;
                        } else if (pixelsStride == 1) {
                            srcIndex += rowStride - width / 2;
                        }
                    }
                } else if (i == 2) {
                    //根据pixelsStride取相应的数据
                    for (int j = 0; j < height / 2; j++) {
                        for (int k = 0; k < width / 2; k++) {
                            vBytes[vIndex++] = bytes[srcIndex];
                            srcIndex += pixelsStride;
                        }
                        if (pixelsStride == 2) {
                            srcIndex += rowStride - width;
                        } else if (pixelsStride == 1) {
                            srcIndex += rowStride - width / 2;
                        }
                    }
                }
            }

            image.close();

            //根据要求的结果类型进行填充
            switch (type) {
                case YUV420P:
                    System.arraycopy(uBytes, 0, yuvBytes, dstIndex, uBytes.length);
                    System.arraycopy(vBytes, 0, yuvBytes, dstIndex + uBytes.length, vBytes.length);
                    break;
                case YUV420SP:
                    for (int i = 0; i < vBytes.length; i++) {
                        yuvBytes[dstIndex++] = uBytes[i];
                        yuvBytes[dstIndex++] = vBytes[i];
                    }
                    break;
                case NV21:
                    for (int i = 0; i < vBytes.length; i++) {
                        yuvBytes[dstIndex++] = vBytes[i];
                        yuvBytes[dstIndex++] = uBytes[i];
                    }
                    break;
            }
            return yuvBytes;
        } catch (final Exception e) {
            if (image != null) {
                image.close();
            }
        }
        return null;
    }
}

 

### 回答1: Android Camera2 API是Android系统提供的一种新的相机框架,用于实现相机预览和采集功能。Camera2 API可以提供更强大、更灵活的相机控制和图像处理能力。 相比于传统的Camera API,Camera2 API具有以下优势: 1. 能够同时管理多个相机设备:Camera2 API支持同时管理多个相机设备,包括前置摄像头、后置摄像头、深度摄像头等,可以方便地进行相机切换和管理。 2. 提供更灵活的相机控制:Camera2 API提供了更丰富的相机控制选项,可以精确地设置曝光时间、ISO值、焦距等参数,以便于用户根据不同的拍摄场景进行调整。 3. 支持原生的相机预览和采集:Camera2 API提供了对原生的预览和采集数据的支持,可以更高效地获取图像数据,用户可以利用这些数据进行实时的图像处理或者保存。 4. 支持原生的相机回调:Camera2 API提供了原生的相机回调接口,用户可以通过设置回调函数来实时获取相机的状态和图像数据,从而实现一些特殊的相机功能,如实时人脸检测、连拍等。 总之,Android Camera2 API提供了更强大、更灵活的相机功能,可以满足开发者对相机预览和采集的各种需求。无论是在智能手机、平板电脑还是其他移动设备上,都可以通过Camera2 API来实现高质量的相机应用。 ### 回答2: Android Camera 2是Android系统中用于处理相机功能的全新API。相较于之前的Camera API,Camera 2更加强大和灵活,提供了更多的功能和优化。 在Android Camera 2中,预览采集是通过创建一个CameraDevice实例来实现的。首先,需要获取相机的相关信息,例如相机的ID、参数等。然后,通过调用CameraManager的openCamera方法来打开指定ID的相机。 在相机打开后,可以通过创建一个CaptureRequest.Builder对象来配置相机预览设置。可以设置预览的目标Surface、预览图像的尺寸、率等。然后,通过调用createCaptureSession方法,传入预览的目标Surface和一个CameraCaptureSession.StateCallback来创建一个预览会话。 在预览会话创建后,可以调用setRepeatingRequest方法来开始预览Camera2会自动将预览数据传输到指定的Surface上,并在Surface上更新预览图像。同时,可以通过设置回调函数来处理预览数据,例如实时显示预览图像、进行进一步的图像处理等。 在预览过程中,还可以根据需要调整相机的参数,例如调整曝光度、焦距、白平衡等。通过创建新的CaptureRequest.Builder对象并设置相应参数,然后调用Session的setRepeatingRequest方法,可以实现动态调整相机参数。 总而言之,Android Camera 2提供了更强大和灵活的方式来实现相机预览采集。通过合理地使用Camera2的API,我们可以轻松地实现各种功能,例如实时预览、图像处理、动态调整参数等。 ### 回答3: Android Camera2预览采集是通过使用Android操作系统的Camera2 API来实现的一种方式。Camera2 API是Android 5.0(Lollipop)版本引入的相机框架,它提供了更多的功能和灵活性,用于控制和管理Android设备上的相机。 使用Camera2 API进行预览采集是一种相对较新和高级的方法,相比传统的Camera API,Camera2 API提供了更多的控制选项和功能,以及更好的性能和稳定性。 通过Camera2 API,我们可以实时地从Android设备的摄像头获取图像,并将图像传输到屏幕上进行实时预览预览采集可以在应用程序中使用,例如用于视频通话、拍照、视频录制等场景。 Camera2 API的预览采集过程主要涉及以下步骤: 1. 获取相机设备:通过CameraManager类获取设备的摄像头列表,并选择要使用的摄像头设备。 2. 创建相机会话:使用CameraDevice类进行连接和建立与相机设备的会话。 3. 创建预览请求:使用CaptureRequest.Builder类创建一个预览请求,并设置相应的参数,例如预览尺寸、率等。 4. 创建预览会话:使用CameraDevice类创建预览会话,并将预览请求设置为预览会话的目标。 5. 开启预览:将预览会话设置为活动状态,相机将开始实时地捕获图像,并通过指定的Surface进行预览。 通过这些步骤,我们可以在Android设备上实现相机预览采集功能,并根据需要进行自定义设置和扩展。预览采集可以进一步应用于更多的相机应用场景,例如人脸识别、图像处理等。
评论 16
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值