new camera feature: face detection architecture.
1. camera app, in packages/apps/Camera/src/com/android/camera/camera.java
startFaceDetection()
stopFaceDetection()
//packages/apps/Camera/src/com/android/camera/ui/FaceView.java
public class FaceView extends View implements FocusIndicator{
....
}
In FaceView:
frameworks/base/media/java/android/media/FaceDetector.java
frameworks/base/graphics/java/android/graphics/Matrix.java
frameworks/base/graphics/java/android/graphics/RectF.java
frameworks/base/graphics/java/android/graphics/drawable/Drawable.java
2. new android api in frameworks/base/core/java/android/hardware/Camera.java
public class Camera {
private FaceDetectionListener mFaceListener;
private boolean mFaceDetectionRunning = false;
private static final int CAMERA_FACE_DETECTION_HW = 0;
private static final int CAMERA_FACE_DETECTION_SW = 1;
public void handleMessage(Message msg) {
switch(msg.what) {
......
case CAMERA_MSG_PREVIEW_METADATA:
if (mFaceListener != null) {
mFaceListener.onFaceDetection((Face[])msg.obj, mCamera);
}
return;
}
public interface FaceDetectionListener{
void onFaceDetection(Face[] faces, Camera camera);
}
public final void setFaceDetectionListener(FaceDetectionListener listener){
mFaceListener = listener;
}
public final void startFaceDetection() {
if (mFaceDetectionRunning) {
throw new RuntimeException("Face detection is already running");
}
_startFaceDetection(CAMERA_FACE_DETECTION_HW);
mFaceDetectionRunning = true;
}
public final void stopFaceDetection() {
_stopFaceDetection();
mFaceDetectionRunning = false;
}
private native final void _startFaceDetection(int type);
private native final void _stopFaceDetection();
public static class Face{
public Face() {
}
public Rect rect;
public int score;
public int id = -1;
public Point leftEye = null;
public Point rightEye = null;
public Point mouth = null;
}
public class Parameters { //new camera parameters.
private static final String KEY_MAX_NUM_DETECTED_FACES_HW = "max-num-detected-faces-hw";
private static final String KEY_MAX_NUM_DETECTED_FACES_SW = "max-num-detected-faces-sw";
public int getMaxNumDetectedFaces() {
return getInt(KEY_MAX_NUM_DETECTED_FACES_HW, 0);
}
}
}
3. in camera hal:
hardware/ti/omap4xxx/camera/OMXCameraAdapter/OMXFD.cpp //This file contains functionality for handling face detection.
OMXCameraAdapter::setParametersFD()
OMXCameraAdapter::startFaceDetection()
OMXCameraAdapter::stopFaceDetection()
OMXCameraAdapter::pauseFaceDetection()
OMXCameraAdapter::setFaceDetection()
OMXCameraAdapter::detectFaces()
OMXCameraAdapter::encodeFaceCoordinates()
hardware/ti/omap4xxx/camera/OMXCameraAdapter/OMXAlgo.cpp //This file contains functionality for handling algorithm configurations.
OMXCameraAdapter::setAlgoPriority()
hardware/ti/omap4xxx/domx/omx_core/inc/OMX_Core.h //depend on hardware/ti/omap4xxx/domx modules.
OMX_SetConfig()
hardware/ti/omap4xxx/domx/omx_core/inc/OMX_TI_IVCommon.h
typedef struct OMX_CONFIG_EXTRADATATYPE {
OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_EXT_EXTRADATATYPE eExtraDataType;
OMX_TI_CAMERAVIEWTYPE eCameraView;
OMX_BOOL bEnable;
} OMX_CONFIG_EXTRADATATYPE;
typedef struct OMX_CONFIG_OBJDETECTIONTYPE {
OMX_U32 nSize;
OMX_VERSIONTYPE nVersion;
OMX_U32 nPortIndex;
OMX_BOOL bEnable;
OMX_BOOL bFrameLimited;
OMX_U32 nFrameLimit;
OMX_U32 nMaxNbrObjects;
OMX_S32 nLeft;
OMX_S32 nTop;
OMX_U32 nWidth;
OMX_U32 nHeight;
OMX_OBJDETECTQUALITY eObjDetectQuality;
OMX_U32 nPriority;
OMX_U32 nDeviceOrientation;
} OMX_CONFIG_OBJDETECTIONTYPE;
4. conclusion
camera app and android api are ready. the work will focus on camera hal. To realize all face detect related methods. Because it relates to domx, don't sure it can work for medfield, I think it will takes 2 weeks at least.