unityar自动识别人脸_AR开发实战项目之人脸识别(实现换脸、人脸涂妆、动作特效)...

一、框架视图

Main.png

二、关键代码

WebCamTextureARSample

using UnityEngine;

using System.Collections;

using System.Collections.Generic;

using UnityEngine.UI;

#if UNITY_5_3 || UNITY_5_3_OR_NEWER

using UnityEngine.SceneManagement;

#endif

using OpenCVForUnity;

using DlibFaceLandmarkDetector;

namespace DlibFaceLandmarkDetectorSample

{

///

/// Face tracker AR from WebCamTexture Sample.

/// This sample was referring to http://www.morethantechnical.com/2012/10/17/head-pose-estimation-with-opencv-opengl-revisited-w-code/

/// and use effect asset from http://ktk-kumamoto.hatenablog.com/entry/2014/09/14/092400

///

[RequireComponent(typeof(WebCamTextureToMatHelper))]

public class WebCamTextureARSample : MonoBehaviour

{

///

/// The is showing face points.

///

public bool isShowingFacePoints;

///

/// The is showing face points toggle.

///

public Toggle isShowingFacePointsToggle;

///

/// The is showing axes.

///

public bool isShowingAxes;

///

/// The is showing axes toggle.

///

public Toggle isShowingAxesToggle;

///

/// The is showing head.

///

public bool isShowingHead;

///

/// The is showing head toggle.

///

public Toggle isShowingHeadToggle;

///

/// The is showing effects.

///

public bool isShowingEffects;

///

/// The is showing effects toggle.

///

public Toggle isShowingEffectsToggle;

///

/// The axes. 轴

///

public GameObject axes;

///

/// The head. 头部

///

public GameObject head;

///

/// The right eye.右眼

///

public GameObject rightEye;

///

/// The left eye.左眼

///

public GameObject leftEye;

///

/// The mouth. 嘴巴

///

public GameObject mouth;

///

/// The mouth particle system. 嘴部特效

///

ParticleSystem[] mouthParticleSystem;

///

/// The texture. 贴图

///

Texture2D texture;

///

/// The face landmark detector. 脸部识别

///

FaceLandmarkDetector faceLandmarkDetector;

///

/// The AR camera.

///

public Camera ARCamera;

///

/// The cam matrix.

///

Mat camMatrix;

///

/// The dist coeffs.

///

MatOfDouble distCoeffs;

///

/// The invert Y.

///

Matrix4x4 invertYM;

///

/// The transformation m.

///

Matrix4x4 transformationM = new Matrix4x4 ();

///

/// The invert Z.

///

Matrix4x4 invertZM;

///

/// The ar m.

///

Matrix4x4 ARM;

///

/// The ar game object.

///

public GameObject ARGameObject;

///

/// The should move AR camera.

///

public bool shouldMoveARCamera;

///

/// The 3d face object points.

///

MatOfPoint3f objectPoints;

///

/// The image points.

///

MatOfPoint2f imagePoints;

///

/// The rvec.

///

Mat rvec;

///

/// The tvec.

///

Mat tvec;

///

/// The rot m.

///

Mat rotM;

///

/// The web cam texture to mat helper.

///

WebCamTextureToMatHelper webCamTextureToMatHelper;

///

/// The shape_predictor_68_face_landmarks_dat_filepath.

///

private string shape_predictor_68_face_landmarks_dat_filepath;

// Use this for initialization

void Start ()

{

isShowingFacePointsToggle.isOn = isShowingFacePoints;

isShowingAxesToggle.isOn = isShowingAxes;

isShowingHeadToggle.isOn = isShowingHead;

isShowingEffectsToggle.isOn = isShowingEffects;

#if UNITY_WEBGL && !UNITY_EDITOR

StartCoroutine(DlibFaceLandmarkDetector.Utils.getFilePathAsync("shape_predictor_68_face_landmarks.dat", (result) => {

shape_predictor_68_face_landmarks_dat_filepath = result;

Run ();

}));

#else

shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");

Run ();

#endif

}

private void Run ()

{

//set 3d face object points.

objectPoints = new MatOfPoint3f (

new Point3 (-31, 72, 86),//l eye

new Point3 (31, 72, 86),//r eye

new Point3 (0, 40, 114),//nose

new Point3 (-20, 15, 90),//l mouse

new Point3 (20, 15, 90),//r mouse

new Point3 (-69, 76, -2),//l ear

new Point3 (69, 76, -2)//r ear

);

imagePoints = new MatOfPoint2f ();

rvec = new Mat ();

tvec = new Mat ();

rotM = new Mat (3, 3, CvType.CV_64FC1);

faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);

webCamTextureToMatHelper = gameObject.GetComponent ();

webCamTextureToMatHelper.Init ();

}

///

/// Raises the web cam texture to mat helper inited event.

///

public void OnWebCamTextureToMatHelperInited ()

{

Debug.Log ("OnWebCamTextureToMatHelperInited");

Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();

texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);

gameObject.GetComponent ().material.mainTexture = texture;

gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);

Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

float width = webCamTextureMat.width ();

float height = webCamTextureMat.height ();

float imageSizeScale = 1.0f;

float widthScale = (float)Screen.width / width;

float heightScale = (float)Screen.height / height;

if (widthScale < heightScale) {

Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;

imageSizeScale = (float)Screen.height / (float)Screen.width;

} else {

Camera.main.orthographicSize = height / 2;

}

// Update is called once per frame

void Update ()

{

if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {

Mat rgbaMat = webCamTextureToMatHelper.GetMat ();

OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);

//detect face rects

List detectResult = faceLandmarkDetector.Detect ();

if (detectResult.Count > 0) {

//detect landmark points

List points = faceLandmarkDetector.DetectLandmark (detectResult [0]);

if (points.Count > 0) {

if (isShowingFacePoints)

OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, points, new Scalar (0, 255, 0, 255), 2);

imagePoints.fromArray (

new Point ((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2),//l eye

new Point ((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2),//r eye

new Point (points [33].x, points [33].y),//nose

new Point (points [48].x, points [48].y),//l mouth

new Point (points [54].x, points [54].y) //r mouth

,

new Point (points [0].x, points [0].y),//l ear

new Point (points [16].x, points [16].y)//r ear

);

Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

//眼睛的特效

if (tvec.get (2, 0) [0] > 0) {

if (Mathf.Abs ((float)(points [43].y - points [46].y)) > Mathf.Abs ((float)(points [42].x - points [45].x)) / 6.0) {

if (isShowingEffects)

rightEye.SetActive (true);

}

if (Mathf.Abs ((float)(points [38].y - points [41].y)) > Mathf.Abs ((float)(points [39].x - points [36].x)) / 6.0) {

if (isShowingEffects)

leftEye.SetActive (true);

}

if (isShowingHead)

head.SetActive (true);

if (isShowingAxes)

axes.SetActive (true);

//嘴部特效

float noseDistance = Mathf.Abs ((float)(points [27].y - points [33].y));

float mouseDistance = Mathf.Abs ((float)(points [62].y - points [66].y));

if (mouseDistance > noseDistance / 5.0) {

if (isShowingEffects) {

mouth.SetActive (true);

foreach (ParticleSystem ps in mouthParticleSystem) {

ps.enableEmission = true;

ps.startSize = 500 * (mouseDistance / noseDistance);

}

}

} else {

if (isShowingEffects) {

foreach (ParticleSystem ps in mouthParticleSystem) {

ps.enableEmission = false;

}

}

}

Calib3d.Rodrigues (rvec, rotM);

transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));

transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));

transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));

transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));

if (shouldMoveARCamera) {

if (ARGameObject !&

  • 0
    点赞
  • 8
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
这是一个非常广泛和复杂的主题,需要用到多种技术和工具来实现。以下是一个简单的示例代码,演示了如何在 Unity AR自动识别人脸并应用换脸效果: ```c# using UnityEngine; using UnityEngine.XR.ARFoundation; using UnityEngine.XR.ARSubsystems; public class FaceDetector : MonoBehaviour { public ARFaceManager faceManager; public GameObject[] faceModels; private void OnEnable() { faceManager.facesChanged += OnFacesChanged; } private void OnDisable() { faceManager.facesChanged -= OnFacesChanged; } private void OnFacesChanged(ARFacesChangedEventArgs args) { foreach (var face in args.added) { // 获取脸部特征点的位置 Vector3[] facePoints = face.vertices; // 获取脸部的旋转和缩放信息 Quaternion faceRotation = face.transform.rotation; Vector3 faceScale = face.transform.localScale; // 根据脸部特征点的位置,计算出脸部的中心点 Vector3 faceCenter = Vector3.zero; foreach (var point in facePoints) { faceCenter += point; } faceCenter /= facePoints.Length; // 根据脸部的中心点,计算出应该使用的换脸模型 int modelIndex = Mathf.FloorToInt(faceCenter.x / Screen.width * faceModels.Length); // 将换脸模型应用到脸部上 var faceModel = Instantiate(faceModels[modelIndex], face.transform.position, faceRotation, face.transform); faceModel.transform.localScale = faceScale; } foreach (var face in args.updated) { // 更新脸部的旋转和缩放信息 face.transform.rotation = face.transform.rotation; face.transform.localScale = face.transform.localScale; } foreach (var face in args.removed) { // 移除脸部上的换脸模型 Destroy(face.transform.GetChild(0).gameObject); } } } ``` 该代码使用了 Unity ARFoundation 和 ARSubsystems 库,以及 ARFaceManager 组件,监测相机捕捉到的人脸,并将指定的换脸模型应用到人脸上。具体实现过程中,需要根据实际情况进行适当的修改和调整。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值