使用
FaceDetectionListener进行人脸检测
package com.emptech.biocollection.fragment; import android.Manifest; import android.content.pm.PackageManager; import android.graphics.ImageFormat; import android.graphics.Point; import android.hardware.Camera; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.content.ContextCompat; import android.util.Log; import android.view.LayoutInflater; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.Toast; import com.emptech.biocollection.R; import com.emptech.biocollection.socket.MessageType; import com.emptech.biocollection.socket.message.IPreview; import com.emptech.biocollection.socket.message.SessionPreviewSocket; import com.emptech.biocollection.socket.message.SocketSession; import com.emptech.biocollection.utils.BitmapUtil; import com.emptech.biocollection.views.FaceView; import java.util.List; public class PhotoFragment extends BaseSocketFragment implements IPreview ,Camera.PreviewCallback,SurfaceHolder.Callback{ SessionPreviewSocket mPreviewSocket; SurfaceView mSurfaceView; Camera mCamera; private final int CAMERA_PERMISSION_CODE = 0x01; private final String TAG = getClass().getSimpleName(); byte[] mCache = new byte[]{0}; FaceView mFaceView; ImageView imageView; View mTakePhotoBtn; int mPreviewWidth; int mPreviewHeight; FaceListener mListener; @Override protected int getLayout() { return R.layout.fragment_photo; } @Override protected void initView(View view) { setIPreviewListener(this); mSurfaceView = (SurfaceView) view.findViewById(R.id.surface_view); imageView = (ImageView) view.findViewById(R.id.image_view); mFaceView = (FaceView) view.findViewById(R.id.face_view); mTakePhotoBtn = view.findViewById(R.id.take_photo); mTakePhotoBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { takePhoto(); } }); } private void takePhoto(){ if (mCamera != null){ mCamera.takePicture(null, null, new Camera.PictureCallback() { @Override public void onPictureTaken(byte[] data, Camera camera) { imageView.setImageBitmap(BitmapUtil.Bytes2Bimap(data)); } }); } } @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { return super.onCreateView(inflater, container, savedInstanceState); } @Override public void onStart() { super.onStart(); if(ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED){ //TODO no permission requestPermissions(new String[]{Manifest.permission.CAMERA},CAMERA_PERMISSION_CODE); }else{ // 已有权限 // Toast.makeText(getContext(),"有权限",Toast.LENGTH_SHORT).show(); openCamera(); } } private void openCamera(){ mCamera = Camera.open(0); mSurfaceView.getHolder().addCallback(this); } private void startPreview(){ if (mCamera != null){ try { mCamera.setPreviewDisplay(mSurfaceView.getHolder()); }catch (Exception e){ e.printStackTrace(); } mCamera.setFaceDetectionListener(new Camera.FaceDetectionListener() { @Override public void onFaceDetection(Camera.Face[] faces, Camera camera) { if(faces.length>0){ Camera.Face face = faces[0]; Log.e(TAG,"score:"+face.score); Log.e(TAG,"leftEyeX:"+face.leftEye.x+" leftEyeY:"+face.leftEye.y); Log.e(TAG,"rightEyeX:"+face.rightEye.x+" rightEyeY:"+face.rightEye.y); Log.e(TAG,"mouthx:"+face.mouth.x+" mouthy:"+face.mouth.y); Log.e(TAG,"score:"+face.score); Log.e(TAG,"left:"+face.rect.left+" top:"+face.rect.top+" right:"+face.rect.right+" bottom:"+face.rect.bottom); int cx = -face.rect.centerY(); int cy = -face.rect.centerX(); cx = caculateRealValue(cx,mPreviewWidth); cy = mPreviewHeight - caculateRealValue(cy,mPreviewHeight); exChangeSize(face.leftEye); exChangeSize(face.rightEye); exChangeSize(face.mouth); int leftEyeX = mPreviewWidth - caculateRealValue (face.leftEye.x,mPreviewWidth); int leftEyeY = caculateRealValue (face.leftEye.y,mPreviewHeight); int rightEyeX = mPreviewWidth - caculateRealValue(face.rightEye.x,mPreviewWidth); int rightEyeY = caculateRealValue(face.rightEye.y,mPreviewHeight); int mouseX = mPreviewWidth - caculateRealValue(face.mouth.x,mPreviewWidth); int mouseY = caculateRealValue(face.mouth.y,mPreviewHeight); int rectLeft = caculateRealValue(face.rect.left,mPreviewWidth); int rectRight = caculateRealValue(face.rect.right,mPreviewWidth); if (mListener != null){ mListener.faceIn(leftEyeX,leftEyeY,rightEyeX,rightEyeY,mouseX,mouseY); } Log.e(TAG,"RleftEyeX:"+leftEyeX+"RleftEyeY:"+leftEyeY+"RrightX:"+rightEyeX+"RrightY:"+rightEyeY); int distance = rectRight - rectLeft; Log.e(TAG,"DisTance:"+distance); mFaceView.setFace(leftEyeX,leftEyeY,rightEyeX,rightEyeY,mouseX,mouseY); mFaceView.setFaceArea(cx -distance,cy - distance,cx +distance,cy +distance); if (mListener != null){ mListener.faceArea(cx -distance,cy - distance,cx +distance,cy +distance); } return; } mFaceView.setFaceArea(0,0,0,0); } }); mCamera.setDisplayOrientation(90); mCamera.setPreviewCallback(this); mCamera.startPreview(); mCamera.startFaceDetection(); } } private static void exChangeSize(Point point){ int x = point.x; point.x = point.y; point.y = x; } private static int caculateRealValue(int value,int displayValue){ return (int)((value + 1000f) /2000f * (float)(displayValue)); } public void setFaceLinstener(FaceListener listener){ mListener = listener; } @Override public void onDestroy() { super.onDestroy(); release(); } private void release(){ if (mCamera != null){ mCamera.stopPreview(); mCamera.release(); } } @Override public void onPreviewFrame(byte[] data, Camera camera) { mCache = data; } @Override public void surfaceCreated(SurfaceHolder holder) { Log.e(TAG,"surfaceCreated"); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { mPreviewWidth = width; mPreviewHeight = height; List<Camera.Size> supportSizes = mCamera.getParameters().getSupportedPreviewSizes(); Camera.Size bestPreviewSize = null; for (int i = 0;i < supportSizes.size();i++){ if(supportSizes.get(i).width >= width) {// 最佳预览尺寸 bestPreviewSize = supportSizes.get(i); break; } } Camera.Parameters parameters = mCamera.getParameters(); parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); if (bestPreviewSize != null) { parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height); } parameters.setPictureFormat(ImageFormat.JPEG); mCamera.setParameters(parameters); startPreview(); } @Override public void surfaceDestroyed(SurfaceHolder holder) { Log.e(TAG,"surfaceDestroyed"); } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); if (requestCode == CAMERA_PERMISSION_CODE && grantResults[0] == PackageManager.PERMISSION_GRANTED){ openCamera(); startPreview(); }else{ Toast.makeText(getContext(),"reject by user",Toast.LENGTH_SHORT).show(); } } @Override public boolean isInitPreviewMode(SocketSession mSocketSession) { return mSocketSession.getRunningMode() == MessageType.TYPE_MODE.Photo; } /** * 初始化预览发送的Socket; * * @param mSocketSession */ public void initPreviewSocket(SocketSession mSocketSession) { if (mPreviewSocket == null) { mPreviewSocket = new SessionPreviewSocket(); } if (mSocketSession != null) { mPreviewSocket.setRequestID(mSocketSession.getRequestID()); mPreviewSocket.setRunningMode(mSocketSession.getRunningMode()); mPreviewSocket.transmitIoSession(mSocketSession.getmIoSession()); } } /** * 初始化硬件; * * @return */ @Override public boolean initHardWare() { //TODO 未完成摄像头初始化 return mCamera != null; } /** * 获取预览数据; * * @return */ @Override public byte[] getPreviewData() { //TODO 未完成摄像头预览返回数据 return mCache; } @Override public byte[] collection(byte runningMode) { return new byte[2]; } /** * 设置采集对象的ID编号 * @return */ @Override protected byte[] getID() { return new byte[8]; } public interface FaceListener{ void faceIn(int leftEyeX,int leftEyeY,int rightEyeX,int rightEyeY,int mouseX,int mouseY); void faceArea(int left ,int top,int right,int bottom); } }
1:相机预览
private void openCamera(){ mHolder = mSurfaceView.getHolder(); mCamera = Camera.open(1); mCamera.startPreview(); try { mCamera.setPreviewDisplay(mHolder); mCamera.setPreviewCallback(new Camera.PreviewCallback() { @Override public void onPreviewFrame(byte[] bytes, Camera camera) { findFace(bytes); } }); }catch (IOException e){ e.printStackTrace(); } List<Camera.Size> previewSizes = mCamera.getParameters().getSupportedPreviewSizes(); mBitmapWidth = previewSizes.get(0).width; mBitmapHeight = previewSizes.get(0).height; Camera.Parameters parameters = mCamera.getParameters(); parameters.setPreviewSize(mBitmapWidth,mBitmapHeight); mPreviewSize = previewSizes.get(0); mCamera.setParameters(parameters); mCamera.startPreview(); mCamera.autoFocus(new Camera.AutoFocusCallback() { @Override public void onAutoFocus(boolean b, Camera camera) { mCamera.autoFocus(this); } }); mCamera.setDisplayOrientation(90); }
2:数据转换
private Bitmap decodeBitmap(byte[] data){ Bitmap bitmap = null; Bitmap tempBitmap = null; YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21,mBitmapWidth,mBitmapHeight,null); ByteArrayOutputStream baos = new ByteArrayOutputStream(); if (!yuvImage.compressToJpeg(new Rect(0,0,mBitmapWidth,mBitmapHeight), 100, baos)) { return null; } BitmapFactory.Options bfo = new BitmapFactory.Options(); bfo.inPreferredConfig = Bitmap.Config.RGB_565; tempBitmap = BitmapFactory.decodeStream(new ByteArrayInputStream(baos.toByteArray()),null, bfo); List<Camera.Size> previewSizes = mCamera.getParameters().getSupportedPreviewSizes(); int index = previewSizes.size() - 3; Camera.Size size = previewSizes.get(index); bitmap = Bitmap.createScaledBitmap(tempBitmap,size.width,size.height,false); return rotate(bitmap,-90); }
3:FaceDetector ,Face 解析说明
从处理后的RGB_565图片中超找人脸
参数是:位图宽,位图高自己定义的 最大检测数10
mFaceDetector = new FaceDetector(targetBitmap.getWidth(),targetBitmap.getHeight(), 10);
参数是:处理后的位图,用于保存结果的FaceDetetor.Face 数组
FaceDetector.Face[] faces = new FaceDetector.Face[10];
int faceFoundNum = mFaceDetector.findFaces(targetBitmap,faces);
4: FaceDetetor.Face 解析
获取两眼间中点坐标:
PointF eyeMidPoint = new PointF(); face.getMidPoint(eyeMidPoint);
获取两眼间距离:
eyesDistance = face.eyesDistance();
获取检测像人的程度
face.confidence()5:画出检测框:
获取两眼间中点距离后,根据相机预览尺寸与实际提交检测的bitmap 尺寸算出预览的坐标值再使用
addContent
addContentView(mFaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,ViewGroup.LayoutParams.MATCH_PARENT));这里因为相机预览中的SurfaceHolder.lockCanvas() 在系统层已经被相机使用,所以不能通过surfaceview 对相机进行画图
源码地址:https://github.com/Perece/Android-Face-Detector.git