一、框架视图
Main.png
二、关键代码
WebCamTextureARSample
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using UnityEngine.UI;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using OpenCVForUnity;
using DlibFaceLandmarkDetector;
namespace DlibFaceLandmarkDetectorSample
{
///
/// Face tracker AR from WebCamTexture Sample.
/// This sample was referring to http://www.morethantechnical.com/2012/10/17/head-pose-estimation-with-opencv-opengl-revisited-w-code/
/// and use effect asset from http://ktk-kumamoto.hatenablog.com/entry/2014/09/14/092400
///
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class WebCamTextureARSample : MonoBehaviour
{
///
/// The is showing face points.
///
public bool isShowingFacePoints;
///
/// The is showing face points toggle.
///
public Toggle isShowingFacePointsToggle;
///
/// The is showing axes.
///
public bool isShowingAxes;
///
/// The is showing axes toggle.
///
public Toggle isShowingAxesToggle;
///
/// The is showing head.
///
public bool isShowingHead;
///
/// The is showing head toggle.
///
public Toggle isShowingHeadToggle;
///
/// The is showing effects.
///
public bool isShowingEffects;
///
/// The is showing effects toggle.
///
public Toggle isShowingEffectsToggle;
///
/// The axes. 轴
///
public GameObject axes;
///
/// The head. 头部
///
public GameObject head;
///
/// The right eye.右眼
///
public GameObject rightEye;
///
/// The left eye.左眼
///
public GameObject leftEye;
///
/// The mouth. 嘴巴
///
public GameObject mouth;
///
/// The mouth particle system. 嘴部特效
///
ParticleSystem[] mouthParticleSystem;
///
/// The texture. 贴图
///
Texture2D texture;
///
/// The face landmark detector. 脸部识别
///
FaceLandmarkDetector faceLandmarkDetector;
///
/// The AR camera.
///
public Camera ARCamera;
///
/// The cam matrix.
///
Mat camMatrix;
///
/// The dist coeffs.
///
MatOfDouble distCoeffs;
///
/// The invert Y.
///
Matrix4x4 invertYM;
///
/// The transformation m.
///
Matrix4x4 transformationM = new Matrix4x4 ();
///
/// The invert Z.
///
Matrix4x4 invertZM;
///
/// The ar m.
///
Matrix4x4 ARM;
///
/// The ar game object.
///
public GameObject ARGameObject;
///
/// The should move AR camera.
///
public bool shouldMoveARCamera;
///
/// The 3d face object points.
///
MatOfPoint3f objectPoints;
///
/// The image points.
///
MatOfPoint2f imagePoints;
///
/// The rvec.
///
Mat rvec;
///
/// The tvec.
///
Mat tvec;
///
/// The rot m.
///
Mat rotM;
///
/// The web cam texture to mat helper.
///
WebCamTextureToMatHelper webCamTextureToMatHelper;
///
/// The shape_predictor_68_face_landmarks_dat_filepath.
///
private string shape_predictor_68_face_landmarks_dat_filepath;
// Use this for initialization
void Start ()
{
isShowingFacePointsToggle.isOn = isShowingFacePoints;
isShowingAxesToggle.isOn = isShowingAxes;
isShowingHeadToggle.isOn = isShowingHead;
isShowingEffectsToggle.isOn = isShowingEffects;
#if UNITY_WEBGL && !UNITY_EDITOR
StartCoroutine(DlibFaceLandmarkDetector.Utils.getFilePathAsync("shape_predictor_68_face_landmarks.dat", (result) => {
shape_predictor_68_face_landmarks_dat_filepath = result;
Run ();
}));
#else
shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");
Run ();
#endif
}
private void Run ()
{
//set 3d face object points.
objectPoints = new MatOfPoint3f (
new Point3 (-31, 72, 86),//l eye
new Point3 (31, 72, 86),//r eye
new Point3 (0, 40, 114),//nose
new Point3 (-20, 15, 90),//l mouse
new Point3 (20, 15, 90),//r mouse
new Point3 (-69, 76, -2),//l ear
new Point3 (69, 76, -2)//r ear
);
imagePoints = new MatOfPoint2f ();
rvec = new Mat ();
tvec = new Mat ();
rotM = new Mat (3, 3, CvType.CV_64FC1);
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
webCamTextureToMatHelper = gameObject.GetComponent ();
webCamTextureToMatHelper.Init ();
}
///
/// Raises the web cam texture to mat helper inited event.
///
public void OnWebCamTextureToMatHelperInited ()
{
Debug.Log ("OnWebCamTextureToMatHelperInited");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
gameObject.GetComponent ().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width ();
float height = webCamTextureMat.height ();
float imageSizeScale = 1.0f;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
imageSizeScale = (float)Screen.height / (float)Screen.width;
} else {
Camera.main.orthographicSize = height / 2;
}
// Update is called once per frame
void Update ()
{
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
//detect face rects
List detectResult = faceLandmarkDetector.Detect ();
if (detectResult.Count > 0) {
//detect landmark points
List points = faceLandmarkDetector.DetectLandmark (detectResult [0]);
if (points.Count > 0) {
if (isShowingFacePoints)
OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, points, new Scalar (0, 255, 0, 255), 2);
imagePoints.fromArray (
new Point ((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2),//l eye
new Point ((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2),//r eye
new Point (points [33].x, points [33].y),//nose
new Point (points [48].x, points [48].y),//l mouth
new Point (points [54].x, points [54].y) //r mouth
,
new Point (points [0].x, points [0].y),//l ear
new Point (points [16].x, points [16].y)//r ear
);
Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
//眼睛的特效
if (tvec.get (2, 0) [0] > 0) {
if (Mathf.Abs ((float)(points [43].y - points [46].y)) > Mathf.Abs ((float)(points [42].x - points [45].x)) / 6.0) {
if (isShowingEffects)
rightEye.SetActive (true);
}
if (Mathf.Abs ((float)(points [38].y - points [41].y)) > Mathf.Abs ((float)(points [39].x - points [36].x)) / 6.0) {
if (isShowingEffects)
leftEye.SetActive (true);
}
if (isShowingHead)
head.SetActive (true);
if (isShowingAxes)
axes.SetActive (true);
//嘴部特效
float noseDistance = Mathf.Abs ((float)(points [27].y - points [33].y));
float mouseDistance = Mathf.Abs ((float)(points [62].y - points [66].y));
if (mouseDistance > noseDistance / 5.0) {
if (isShowingEffects) {
mouth.SetActive (true);
foreach (ParticleSystem ps in mouthParticleSystem) {
ps.enableEmission = true;
ps.startSize = 500 * (mouseDistance / noseDistance);
}
}
} else {
if (isShowingEffects) {
foreach (ParticleSystem ps in mouthParticleSystem) {
ps.enableEmission = false;
}
}
}
Calib3d.Rodrigues (rvec, rotM);
transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));
transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));
transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));
transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));
if (shouldMoveARCamera) {
if (ARGameObject !&