一、框架视图
二、关键代码
WebCamTextureARSample
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using UnityEngine.UI;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using OpenCVForUnity;
using DlibFaceLandmarkDetector;
namespace DlibFaceLandmarkDetectorSample
{
/// <summary>
/// Face tracker AR from WebCamTexture Sample.
/// This sample was referring to http://www.morethantechnical.com/2012/10/17/head-pose-estimation-with-opencv-opengl-revisited-w-code/
/// and use effect asset from http://ktk-kumamoto.hatenablog.com/entry/2014/09/14/092400
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class WebCamTextureARSample : MonoBehaviour
{
/// <summary>
/// The is showing face points.
/// </summary>
public bool isShowingFacePoints;
/// <summary>
/// The is showing face points toggle.
/// </summary>
public Toggle isShowingFacePointsToggle;
/// <summary>
/// The is showing axes.
/// </summary>
public bool isShowingAxes;
/// <summary>
/// The is showing axes toggle.
/// </summary>
public Toggle isShowingAxesToggle;
/// <summary>
/// The is showing head.
/// </summary>
public bool isShowingHead;
/// <summary>
/// The is showing head toggle.
/// </summary>
public Toggle isShowingHeadToggle;
/// <summary>
/// The is showing effects.
/// </summary>
public bool isShowingEffects;
/// <summary>
/// The is showing effects toggle.
/// </summary>
public Toggle isShowingEffectsToggle;
/// <summary>
/// The axes. 轴
/// </summary>
public GameObject axes;
/// <summary>
/// The head. 头部
/// </summary>
public GameObject head;
/// <summary>
/// The right eye.右眼
/// </summary>
public GameObject rightEye;
/// <summary>
/// The left eye.左眼
/// </summary>
public GameObject leftEye;
/// <summary>
/// The mouth. 嘴巴
/// </summary>
public GameObject mouth;
/// <summary>
/// The mouth particle system. 嘴部特效
/// </summary>
ParticleSystem[] mouthParticleSystem;
/// <summary>
/// The texture. 贴图
/// </summary>
Texture2D texture;
/// <summary>
/// The face landmark detector. 脸部识别
/// </summary>
FaceLandmarkDetector faceLandmarkDetector;
/// <summary>
/// The AR camera.
/// </summary>
public Camera ARCamera;
/// <summary>
/// The cam matrix.
/// </summary>
Mat camMatrix;
/// <summary>
/// The dist coeffs.
/// </summary>
MatOfDouble distCoeffs;
/// <summary>
/// The invert Y.
/// </summary>
Matrix4x4 invertYM;
/// <summary>
/// The transformation m.
/// </summary>
Matrix4x4 transformationM = new Matrix4x4 ();
/// <summary>
/// The invert Z.
/// </summary>
Matrix4x4 invertZM;
/// <summary>
/// The ar m.
/// </summary>
Matrix4x4 ARM;
/// <summary>
/// The ar game object.
/// </summary>
public GameObject ARGameObject;
/// <summary>
/// The should move AR camera.
/// </summary>
public bool shouldMoveARCamera;
/// <summary>
/// The 3d face object points.
/// </summary>
MatOfPoint3f objectPoints;
/// <summary>
/// The image points.
/// </summary>
MatOfPoint2f imagePoints;
/// <summary>
/// The rvec.
/// </summary>
Mat rvec;
/// <summary>
/// The tvec.
/// </summary>
Mat tvec;
/// <summary>
/// The rot m.
/// </summary>
Mat rotM;
/// <summary>
/// The web cam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The shape_predictor_68_face_landmarks_dat_filepath.
/// </summary>
private string shape_predictor_68_face_landmarks_dat_filepath;
// Use this for initialization
void Start ()
{
isShowingFacePointsToggle.isOn = isShowingFacePoints;
isShowingAxesToggle.isOn = isShowingAxes;
isShowingHeadToggle.isOn = isShowingHead;
isShowingEffectsToggle.isOn = isShowingEffects;
#if UNITY_WEBGL && !UNITY_EDITOR
StartCoroutine(DlibFaceLandmarkDetector.Utils.getFilePathAsync("shape_predictor_68_face_landmarks.dat", (result) => {
shape_predictor_68_face_landmarks_dat_filepath = result;
Run ();
}));
#else
shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");
Run ();
#endif
}
private void Run ()
{
//set 3d face object points.
objectPoints = new MatOfPoint3f (
new Point3 (-31, 72, 86),//l eye
new Point3 (31, 72, 86),//r eye
new Point3 (0, 40, 114),//nose
new Point3 (-20, 15, 90),//l mouse
new Point3 (20, 15, 90),//r mouse
new Point3 (-69, 76, -2),//l ear
new Point3 (69, 76, -2)//r ear
);
imagePoints = new MatOfPoint2f ();
rvec = new Mat ();
tvec = new Mat ();
rotM = new Mat (3, 3, CvType.CV_64FC1);
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
webCamTextureToMatHelper.Init ();
}
/// <summary>
/// Raises the web cam texture to mat helper inited event.
/// </summary>
public void OnWebCamTextureToMatHelperInited ()
{
Debug.Log ("OnWebCamTextureToMatHelperInited");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width ();
float height = webCamTextureMat.height ();
float imageSizeScale = 1.0f;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
imageSizeScale = (float)Screen.height / (float)Screen.width;
} else {
Camera.main.orthographicSize = height / 2;
}
// Update is called once per frame
void Update ()
{
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
//detect face rects
List<UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect ();
if (detectResult.Count > 0) {
//detect landmark points
List<Vector2> points = faceLandmarkDetector.DetectLandmark (detectResult [0]);
if (points.Count > 0) {
if (isShowingFacePoints)
OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, points, new Scalar (0, 255, 0, 255), 2);
imagePoints.fromArray (
new Point ((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2),//l eye
new Point ((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2),//r eye
new Point (points [33].x, points [33].y),//nose
new Point (points [48].x, points [48].y),//l mouth
new Point (points [54].x, points [54].y) //r mouth
,
new Point (points [0].x, points [0].y),//l ear
new Point (points [16].x, points [16].y)//r ear
);
Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
//眼睛的特效
if (tvec.get (2, 0) [0] > 0) {
if (Mathf.Abs ((float)(points [43].y - points [46].y)) > Mathf.Abs ((float)(points [42].x - points [45].x)) / 6.0) {
if (isShowingEffects)
rightEye.SetActive (true);
}
if (Mathf.Abs ((float)(points [38].y - points [41].y)) > Mathf.Abs ((float)(points [39].x - points [36].x)) / 6.0) {
if (isShowingEffects)
leftEye.SetActive (true);
}
if (isShowingHead)
head.SetActive (true);
if (isShowingAxes)
axes.SetActive (true);
//嘴部特效
float noseDistance = Mathf.Abs ((float)(points [27].y - points [33].y));
float mouseDistance = Mathf.Abs ((float)(points [62].y - points [66].y));
if (mouseDistance > noseDistance / 5.0) {
if (isShowingEffects) {
mouth.SetActive (true);
foreach (ParticleSystem ps in mouthParticleSystem) {
ps.enableEmission = true;
ps.startSize = 500 * (mouseDistance / noseDistance);
}
}
} else {
if (isShowingEffects) {
foreach (ParticleSystem ps in mouthParticleSystem) {
ps.enableEmission = false;
}
}
}
Calib3d.Rodrigues (rvec, rotM);
transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));
transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));
transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));
transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));
if (shouldMoveARCamera) {
if (ARGameObject != null) {
ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
ARUtils.SetTransformFromMatrix (ARCamera.transform, ref ARM);
ARGameObject.SetActive (true);
}
} else {
ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
if (ARGameObject != null) {
ARUtils.SetTransformFromMatrix (ARGameObject.transform, ref ARM);
ARGameObject.SetActive (true);
}
}
}
}
}
//显示分辨率和的字眼在底下
// Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors ());
}
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable ()
{
if (webCamTextureToMatHelper != null)
webCamTextureToMatHelper.Dispose ();
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose ();
}
/// <summary>
/// Raises the back button event.
/// </summary>
public void OnBackButton ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
// SceneManager.LoadScene ("DlibFaceLandmarkDetectorSample");
SceneManager.LoadScene("FaceMask");
#else
Application.LoadLevel ("FaceMask");
//Application.LoadLevel ("DlibFaceLandmarkDetectorSample");
#endif
}
/// <summary>
/// Raises the play button event.
/// </summary>
public void OnPlayButton ()
{
webCamTextureToMatHelper.Play ();
}
/// <summary>
/// Raises the pause button event.
/// </summary>
public void OnPauseButton ()
{
webCamTextureToMatHelper.Pause ();
}
/// <summary>
/// Raises the stop button event.
/// </summary>
public void OnStopButton ()
{
webCamTextureToMatHelper.Stop ();
}
/// <summary>
/// Raises the change camera button event.
/// </summary>
public void OnChangeCameraButton ()
{
webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
}
if (isTimeout) {
//Debug.Log("Init time out.");
webCamTexture.Stop ();
webCamTexture = null;
initWaiting = false;
if (OnErrorOccurredEvent != null)
OnErrorOccurredEvent.Invoke (ErrorCode.TIMEOUT);
}
}
/// <summary>
/// Ises the inited.
/// </summary>
/// <returns><c>true</c>, if inited was ised, <c>false</c> otherwise.</returns>
public bool IsInited ()
{
return initDone;
}
/// <summary>
/// Play this instance.
/// </summary>
public void Play ()
{
if (initDone)
webCamTexture.Play ();
}
/// <summary>
/// Pause this instance.
/// </summary>
public void Pause ()
{
if (initDone)
webCamTexture.Pause ();
}
/// <summary>
/// Stop this instance.
/// </summary>
public void Stop ()
{
if (initDone)
webCamTexture.Stop ();
}
/// <summary>
/// Ises the playing.
/// </summary>
/// <returns><c>true</c>, if playing was ised, <c>false</c> otherwise.</returns>
public bool IsPlaying ()
{
if (!initDone)
return false;
return webCamTexture.isPlaying;
}
/// <summary>
/// Gets the web cam texture.
/// </summary>
/// <returns>The web cam texture.</returns>
public WebCamTexture GetWebCamTexture ()
{
return (initDone) ? webCamTexture : null;
}
/// <summary>
/// Gets the web cam device.
/// </summary>
/// <returns>The web cam device.</returns>
public WebCamDevice GetWebCamDevice ()
{
return webCamDevice;
}
/// <summary>
/// Dids the update this frame.
/// </summary>
/// <returns><c>true</c>, if update this frame was dided, <c>false</c> otherwise.</returns>
public bool DidUpdateThisFrame ()
{
if (!initDone)
return false;
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
return true;
} else {
return false;
}
#else
return webCamTexture.didUpdateThisFrame;
#endif
}
/// <summary>
/// Gets the mat.
/// </summary>
/// <returns>The mat.</returns>
public Mat GetMat ()
{
if (!initDone || !webCamTexture.isPlaying) {
if (rotatedRgbaMat != null) {
return rotatedRgbaMat;
} else {
return rgbaMat;
}
}
Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);
if (rotatedRgbaMat != null) {
using (Mat transposeRgbaMat = rgbaMat.t ()) {
Core.flip (transposeRgbaMat, rotatedRgbaMat, 1);
}
flipMat (rotatedRgbaMat);
return rotatedRgbaMat;
} else {
flipMat (rgbaMat);
return rgbaMat;
}
}
/// <summary>
/// Flips the mat.
/// </summary>
/// <param name="mat">Mat.</param>
private void flipMat (Mat mat)
{
int flipCode = int.MinValue;
if (webCamDevice.isFrontFacing) {
if (webCamTexture.videoRotationAngle == 0) {
flipCode = 1;
} else if (webCamTexture.videoRotationAngle == 90) {
flipCode = 1;
}
if (webCamTexture.videoRotationAngle == 180) {
flipCode = 0;
} else if (webCamTexture.videoRotationAngle == 270) {
flipCode = 0;
}
} else {
if (webCamTexture.videoRotationAngle == 180) {
flipCode = -1;
} else if (webCamTexture.videoRotationAngle == 270) {
flipCode = -1;
}
}
if (flipVertical) {
if (flipCode == int.MinValue) {
flipCode = 0;
} else if (flipCode == 0) {
flipCode = int.MinValue;
} else if (flipCode == 1) {
flipCode = -1;
} else if (flipCode == -1) {
flipCode = 1;
}
}
if (flipHorizontal) {
if (flipCode == int.MinValue) {
flipCode = 1;
} else if (flipCode == 0) {
flipCode = -1;
} else if (flipCode == 1) {
flipCode = int.MinValue;
} else if (flipCode == -1) {
flipCode = 0;
}
}
if (flipCode > int.MinValue) {
Core.flip (mat, mat, flipCode);
}
}
/// <summary>
/// Gets the buffer colors.
/// </summary>
/// <returns>The buffer colors.</returns>
public Color32[] GetBufferColors ()
{
return colors;
}
/// <summary>
/// To release the resources for the init method.
/// </summary>
private void dispose ()
{
initWaiting = false;
initDone = false;
if (webCamTexture != null) {
webCamTexture.Stop ();
webCamTexture = null;
}
if (rgbaMat != null) {
rgbaMat.Dispose ();
rgbaMat = null;
}
if (rotatedRgbaMat != null) {
rotatedRgbaMat.Dispose ();
rotatedRgbaMat = null;
}
if (OnDisposedEvent != null)
OnDisposedEvent.Invoke ();
}
/// <summary>
/// Releases all resource used by the <see cref="WebCamTextureToMatHelper"/> object.
/// </summary>
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="WebCamTextureToMatHelper"/>. The
/// <see cref="Dispose"/> method leaves the <see cref="WebCamTextureToMatHelper"/> in an unusable state. After
/// calling <see cref="Dispose"/>, you must release all references to the <see cref="WebCamTextureToMatHelper"/> so
/// the garbage collector can reclaim the memory that the <see cref="WebCamTextureToMatHelper"/> was occupying.</remarks>
public void Dispose ()
{
if (initDone)
dispose ();
colors = null;
}
}
}
TrackedMeshOverlay
using System;
using System.Collections.Generic;
using UnityEngine;
using OpenCVForUnity.RectangleTrack;
namespace FaceMaskExample
{
public class TrackedMeshOverlay : MonoBehaviour
{
public int Interval = 1;
public int PoolSize = 10;
[SerializeField]
private GameObject baseObject;
public GameObject BaseObject
{
get {
return baseObject;
}
set {
baseObject = value;
setBaseObject(baseObject);
}
}
public float Width
{
get {
return targetWidth;
}
}
public float Height
{
get {
return targetHeight;
}
}
protected Transform targetTransform;
protected float targetWidth = 0;
protected float targetHeight = 0;
protected Transform overlayTransform;
protected ObjectPool objectPool;
protected Dictionary<int, TrackedMesh> showingObjects = new Dictionary<int, TrackedMesh>();
void Awake()
{
init("MeshOverlay");
}
void OnDestroy()
{
overlayTransform = null;
targetTransform = null;
targetWidth = 0;
targetHeight = 0;
showingObjects.Clear();
if(objectPool != null)
{
Destroy(objectPool.gameObject);
objectPool = null;
}
}
protected GameObject getPoolObject(Transform parent)
{
if(objectPool == null) return null;
GameObject newObj = objectPool.GetInstance(parent);
if(newObj != null){
newObj.transform.SetParent(parent, false);
return newObj;
}else{
return null;
}
}
protected virtual void init(String name)
{
GameObject obj = new GameObject(name);
overlayTransform = obj.transform;
overlayTransform.parent = gameObject.transform.parent;
if(baseObject != null)
setBaseObject (baseObject);
}
protected virtual void setBaseObject (GameObject obj)
{
if (obj.GetComponent<TrackedMesh>() == null)
{
Debug.LogWarning("Object is not TrackedMesh.");
return;
}
if(objectPool != null){
Destroy(objectPool);
}
objectPool = overlayTransform.gameObject.AddComponent<ObjectPool>();
objectPool.prefab = obj;
objectPool.maxCount = PoolSize;
objectPool.prepareCount = (int)PoolSize / 2;
objectPool.Interval = Interval;
}
public virtual void UpdateOverlayTransform(Transform targetTransform)
{
if (targetTransform == null)
{
this.targetTransform = null;
return;
}
targetWidth = targetTransform.localScale.x;
targetHeight = targetTransform.localScale.y;
this.targetTransform = targetTransform;
overlayTransform.localPosition = targetTransform.localPosition;
overlayTransform.localRotation = targetTransform.localRotation;
overlayTransform.localScale = targetTransform.localScale;
}
public virtual TrackedMesh GetObjectById(int id)
{
if (showingObjects.ContainsKey(id))
{
return showingObjects[id];
}
return null;
}
public virtual TrackedMesh CreateObject(int id, Texture2D tex = null)
{
if (!showingObjects.ContainsKey(id)){
GameObject obj = getPoolObject(overlayTransform);
if (obj == null) return null;
TrackedMesh tm = obj.GetComponent<TrackedMesh>();
if (tm != null)
{
tm.Id = id;
tm.transform.localPosition = Vector3.zero;
tm.transform.localRotation = new Quaternion();
tm.transform.localScale = Vector3.one;
if (tex != null)
{
Renderer tmRenderer = tm.transform.GetComponent<Renderer>();
tmRenderer.sharedMaterial.SetTexture ("_MainTex", tex);
}
showingObjects.Add(id, tm);
}
return tm;
}
else{
return null;
}
}
public virtual void UpdateObject(int id, Vector3[] vertices, int[] triangles = null, Vector2[] uv = null)
{
if (showingObjects.ContainsKey(id)){
TrackedMesh tm = showingObjects[id];
if(vertices.Length != tm.MeshFilter.mesh.vertices.Length) Debug.LogError("The number of vertices does not match.");
tm.MeshFilter.mesh.vertices = vertices;
if (triangles != null)
{
tm.MeshFilter.mesh.triangles = triangles;
}
if (uv != null)
{
tm.MeshFilter.mesh.uv = uv;
}
tm.MeshFilter.mesh.RecalculateBounds();
tm.MeshFilter.mesh.RecalculateNormals();
}
}
public virtual void DeleteObject(int id)
{
if (showingObjects.ContainsKey(id))
{
if(showingObjects[id] != null)
showingObjects[id].gameObject.SetActive(false);
showingObjects.Remove(id);
}
}
public virtual void Reset()
{
foreach (int key in showingObjects.Keys)
{
if(showingObjects[key] != null)
showingObjects[key].gameObject.SetActive(false);
}
showingObjects.Clear();
}
}
}
WebCamTextureFaceMaskExample
using System.Collections;
using System.Collections.Generic;
using System.IO;
using UnityEngine;
using UnityEngine.UI;
using DlibFaceLandmarkDetector;
using OpenCVForUnity;
using OpenCVForUnity.RectangleTrack;
using WebGLFileUploader;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
#if UNITY_WEBGL && !UNITY_EDITOR
private IEnumerator getFilePathCoroutine()
{
var getFilePathAsync_0_Coroutine = StartCoroutine (OpenCVForUnity.Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
haarcascade_frontalface_alt_xml_filepath = result;
}));
var getFilePathAsync_1_Coroutine = StartCoroutine (DlibFaceLandmarkDetector.Utils.getFilePathAsync ("shape_predictor_68_face_landmarks.dat", (result) => {
shape_predictor_68_face_landmarks_dat_filepath = result;
}));
yield return getFilePathAsync_0_Coroutine;
yield return getFilePathAsync_1_Coroutine;
Run ();
uploadFaceMaskButton.interactable = true;
}
#endif
private void Run ()
{
meshOverlay = this.GetComponent<TrackedMeshOverlay> ();
shader_FadeID = Shader.PropertyToID("_Fade");
rectangleTracker = new RectangleTracker ();
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
frontalFaceParam = new FrontalFaceParam ();
webCamTextureToMatHelper.Init ();
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
}
/// <summary>
/// Raises the web cam texture to mat helper inited event.
/// </summary>
public void OnWebCamTextureToMatHelperInited ()
{
Debug.Log ("OnWebCamTextureToMatHelperInited");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = gameObject.transform.localScale.x;
float height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} else {
Camera.main.orthographicSize = height / 2;
}
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
cascade = new CascadeClassifier (haarcascade_frontalface_alt_xml_filepath);
if (cascade.empty ()) {
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
meshOverlay.UpdateOverlayTransform (gameObject.transform);
OnChangeFaceMaskButton ();
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed ()
{
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
grayMat.Dispose ();
rectangleTracker.Reset ();
meshOverlay.Reset ();
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode){
Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update ()
{
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
// detect faces.
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
if (useDlibFaceDetecter) {
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
foreach (var unityRect in result) {
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
}
} else {
// convert image to greyscale.
Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
using (Mat equalizeHistMat = new Mat ())
using (MatOfRect faces = new MatOfRect ()) {
Imgproc.equalizeHist (grayMat, equalizeHistMat);
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
detectResult = faces.toList ();
}
// Adjust to Dilb's result.
foreach (OpenCVForUnity.Rect r in detectResult) {
r.y += (int)(r.height * 0.1f);
}
}
// face traking.
rectangleTracker.UpdateTrackedObjects (detectResult);
List<TrackedRect> trackedRects = new List<TrackedRect> ();
rectangleTracker.GetObjects (trackedRects, true);
// detect face landmark.
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
for (int i = 0; i < trackedRects.Count; i++) {
TrackedRect tr = trackedRects [i];
UnityEngine.Rect rect = new UnityEngine.Rect (tr.x, tr.y, tr.width, tr.height);
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
landmarkPoints.Add (points);
}
// face masking.
if (faceMaskTexture != null && landmarkPoints.Count >= 1) {
OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);
float imageWidth = meshOverlay.Width;
float imageHeight = meshOverlay.Height;
float maskImageWidth = faceMaskTexture.width;
float maskImageHeight = faceMaskTexture.height;
TrackedRect tr;
TrackedMesh tm;
for (int i = 0; i < trackedRects.Count; i++) {
tr = trackedRects [i];
if (tr.state == TrackedState.NEW) {
meshOverlay.CreateObject (tr.id, faceMaskTexture);
}
if (tr.state < TrackedState.DELETED) {
tm = meshOverlay.GetObjectById (tr.id);
Vector3[] vertices = tm.MeshFilter.mesh.vertices;
if (vertices.Length == landmarkPoints [i].Count) {
for (int j = 0; j < vertices.Length; j++) {
vertices [j].x = landmarkPoints [i] [j].x / imageWidth - 0.5f;
vertices [j].y = 0.5f - landmarkPoints [i] [j].y / imageHeight;
}
}
Vector2[] uv = tm.MeshFilter.mesh.uv;
if (uv.Length == faceLandmarkPointsInMask.Count) {
for (int jj = 0; jj < uv.Length; jj++) {
uv [jj].x = faceLandmarkPointsInMask [jj].x / maskImageWidth;
uv [jj].y = (maskImageHeight - faceLandmarkPointsInMask [jj].y) / maskImageHeight;
}
}
meshOverlay.UpdateObject (tr.id, vertices, null, uv);
if (tr.numFramesNotDetected > 3) {
tm.Material.SetFloat (shader_FadeID, 1f);
}else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3) {
tm.Material.SetFloat (shader_FadeID, 0.3f + (0.7f/4f) * tr.numFramesNotDetected);
} else {
tm.Material.SetFloat (shader_FadeID, 0.3f);
}
// filter nonfrontalface.
if (isFilteringNonFrontalFaces && frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
tm.Material.SetFloat (shader_FadeID, 1f);
}
} else if (tr.state == TrackedState.DELETED) {
meshOverlay.DeleteObject (tr.id);
}
}
} else if (landmarkPoints.Count >= 1) {
float imageWidth = meshOverlay.Width;
float imageHeight = meshOverlay.Height;
float maskImageWidth = texture.width;
float maskImageHeight = texture.height;
TrackedRect tr;
TrackedMesh tm;
for (int i = 0; i < trackedRects.Count; i++) {
tr = trackedRects [i];
if (tr.state == TrackedState.NEW) {
meshOverlay.CreateObject (tr.id, texture);
}
if (tr.state < TrackedState.DELETED) {
tm = meshOverlay.GetObjectById (tr.id);
Vector3[] vertices = tm.MeshFilter.mesh.vertices;
if (vertices.Length == landmarkPoints [i].Count) {
for (int j = 0; j < vertices.Length; j++) {
vertices [j].x = landmarkPoints[i][j].x / imageWidth - 0.5f;
vertices [j].y = 0.5f - landmarkPoints[i][j].y / imageHeight;
}
}
Vector2[] uv = tm.MeshFilter.mesh.uv;
if (uv.Length == landmarkPoints [0].Count) {
for (int jj = 0; jj < uv.Length; jj++) {
uv [jj].x = landmarkPoints[0][jj].x / maskImageWidth;
uv [jj].y = (maskImageHeight - landmarkPoints[0][jj].y) / maskImageHeight;
}
}
meshOverlay.UpdateObject (tr.id, vertices, null, uv);
if (tr.numFramesNotDetected > 3) {
tm.Material.SetFloat (shader_FadeID, 1f);
}else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3) {
tm.Material.SetFloat (shader_FadeID, 0.3f + (0.7f/4f) * tr.numFramesNotDetected);
} else {
tm.Material.SetFloat (shader_FadeID, 0.3f);
}
// filter nonfrontalface.
if (isFilteringNonFrontalFaces && frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
tm.Material.SetFloat (shader_FadeID, 1f);
}
} else if (tr.state == TrackedState.DELETED) {
meshOverlay.DeleteObject (tr.id);
}
}
}
// draw face rects.
if (isShowingFaceRects) {
for (int i = 0; i < detectResult.Count; i++) {
UnityEngine.Rect rect = new UnityEngine.Rect (detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 0, 0, 255), 2);
}
for (int i = 0; i < trackedRects.Count; i++) {
UnityEngine.Rect rect = new UnityEngine.Rect (trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height);
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 255, 0, 255), 2);
//Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
//Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
}
}
// draw face points.
if (isShowingDebugFacePoints) {
for (int i = 0; i < landmarkPoints.Count; i++) {
OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, landmarkPoints [i], new Scalar (0, 255, 0, 255), 2);
}
}
// display face mask image.
if (faceMaskTexture != null && faceMaskMat != null) {
if (isShowingFaceRects) {
OpenCVForUnityUtils.DrawFaceRect (faceMaskMat, faceRectInMask, new Scalar (255, 0, 0, 255), 2);
}
if (isShowingDebugFacePoints) {
OpenCVForUnityUtils.DrawFaceLandmark (faceMaskMat, faceLandmarkPointsInMask, new Scalar (0, 255, 0, 255), 2);
}
float scale = (rgbaMat.width () / 4f) / faceMaskMat.width ();
float tx = rgbaMat.width () - faceMaskMat.width () * scale;
float ty = 0.0f;
Mat trans = new Mat (2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
trans.put (0, 0, scale);
trans.put (0, 1, 0.0f);
trans.put (0, 2, tx);
trans.put (1, 0, 0.0f);
trans.put (1, 1, scale);
trans.put (1, 2, ty);
Imgproc.warpAffine (faceMaskMat, rgbaMat, trans, rgbaMat.size (), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar (0));
}
Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, colors);
}
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable ()
{
WebGLFileUploadManager.FileUploadEventHandler -= fileUploadHandler;
WebGLFileUploadManager.Dispose ();
webCamTextureToMatHelper.Dispose ();
if (cascade != null)
cascade.Dispose ();
if (rectangleTracker != null)
rectangleTracker.Dispose ();
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose ();
if (frontalFaceParam != null)
frontalFaceParam.Dispose ();
}
/// <summary>
/// Raises the back button event.
/// </summary>
public void OnBackButton ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("FaceMask");
#else
Application.LoadLevel ("FaceMask");
#endif
}
/// <summary>
/// Raises the play button event.
/// </summary>
public void OnPlayButton ()
{
webCamTextureToMatHelper.Play ();
}
/// <summary>
/// Raises the pause button event.
/// </summary>
public void OnPauseButton ()
{
webCamTextureToMatHelper.Pause ();
}
/// <summary>
/// Raises the change camera button event.
/// </summary>
public void OnChangeCameraButton ()
{
webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
}
/// <summary>
/// Raises the is showing face rects toggle event.
/// </summary>
public void OnIsShowingFaceRectsToggle ()
{
if (isShowingFaceRectsToggle.isOn) {
isShowingFaceRects = true;
} else {
isShowingFaceRects = false;
}
}
/// <summary>
/// Raises the use Dlib face detector toggle event.
/// </summary>
public void OnUseDlibFaceDetecterToggle ()
{
if (useDlibFaceDetecterToggle.isOn) {
useDlibFaceDetecter = true;
} else {
useDlibFaceDetecter = false;
}
}
/// <summary>
/// Raises the is filtering non frontal faces toggle event.
/// </summary>
public void OnIsFilteringNonFrontalFacesToggle ()
{
if (isFilteringNonFrontalFacesToggle.isOn) {
isFilteringNonFrontalFaces = true;
} else {
isFilteringNonFrontalFaces = false;
}
}
/// <summary>
/// Raises the is showing debug face points toggle event.
/// </summary>
public void OnIsShowingDebugFacePointsToggle ()
{
if (isShowingDebugFacePointsToggle.isOn) {
isShowingDebugFacePoints = true;
} else {
isShowingDebugFacePoints = false;
}
}
/// <summary>
/// Raises the set face mask button event.
/// </summary>
public void OnChangeFaceMaskButton ()
{
removeFaceMask ();
ExampleMaskData maskData = ExampleDataSet.GetData();
faceMaskTexture = Resources.Load (maskData.FileName) as Texture2D;
faceMaskMat = new Mat (faceMaskTexture.height, faceMaskTexture.width, CvType.CV_8UC4);
OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);
Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());
if(maskData.LandmarkPoints != null){
faceRectInMask = maskData.FaceRect;
faceLandmarkPointsInMask = maskData.LandmarkPoints;
}else{
faceRectInMask = detectFace (faceMaskMat);
faceLandmarkPointsInMask = detectFaceLandmarkPoints (faceMaskMat, faceRectInMask);
}
ExampleDataSet.Next();
if (faceRectInMask.width == 0 && faceRectInMask.height == 0){
removeFaceMask ();
Debug.Log ("A face could not be detected from the input image.");
}
//dumpRect(faceRectInMask);
//dumpVector2(faceLandmarkPointsInMask);
//dumpVector3(faceLandmarkPointsInMask);
//MeshFilter mf = createFaceMesh(faceMaskTexture.width, faceMaskTexture.height);
//ObjExporter.MeshToFile(mf, "Assets/FaceMaskExample/Resources/FaceMesh.obj");
}
/// <summary>
/// Raises the scan face mask button event.
/// </summary>
public void OnScanFaceMaskButton ()
{
removeFaceMask ();
// Capture webcam frame.
if (webCamTextureToMatHelper.IsPlaying ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
faceRectInMask = detectFace (rgbaMat);
if (faceRectInMask.width == 0 && faceRectInMask.height == 0){
Debug.Log ("A face could not be detected from the input image.");
return;
}
OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect((int)faceRectInMask.x, (int)faceRectInMask.y, (int)faceRectInMask.width, (int)faceRectInMask.height);
rect.inflate(rect.x/5, rect.y/5);
rect = rect.intersect(new OpenCVForUnity.Rect(0,0,rgbaMat.width(),rgbaMat.height()));
faceMaskTexture = new Texture2D (rect.width, rect.height, TextureFormat.RGBA32, false);
faceMaskMat = new Mat(rgbaMat, rect).clone ();
OpenCVForUnity.Utils.matToTexture2D(faceMaskMat, faceMaskTexture);
Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());
faceRectInMask = detectFace (faceMaskMat);
faceLandmarkPointsInMask = detectFaceLandmarkPoints (faceMaskMat, faceRectInMask);
if (faceRectInMask.width == 0 && faceRectInMask.height == 0){
removeFaceMask ();
Debug.Log ("A face could not be detected from the input image.");
}
}
}
/// <summary>
/// Raises the upload face mask button event.
/// </summary>
public void OnUploadFaceMaskButton ()
{
WebGLFileUploadManager.PopupDialog (null, "Select frontal face image file (.png|.jpg|.gif)");
}
/// <summary>
/// Raises the remove face mask button event.
/// </summary>
public void OnRemoveFaceMaskButton ()
{
removeFaceMask ();
}
private void removeFaceMask ()
{
faceMaskTexture = null;
if (faceMaskMat != null) {
faceMaskMat.Dispose ();
faceMaskMat = null;
}
rectangleTracker.Reset ();
meshOverlay.Reset ();
}
/// <summary>
/// Files the upload handler.
/// </summary>
/// <param name="result">Result.</param>
private void fileUploadHandler (UploadedFileInfo[] result)
{
if (result.Length == 0) {
Debug.Log ("File upload Error!");
return;
}
removeFaceMask ();
foreach (UploadedFileInfo file in result) {
if (file.isSuccess) {
Debug.Log ("file.filePath: " + file.filePath + " exists:" + File.Exists (file.filePath));
faceMaskTexture = new Texture2D (2, 2);
byte[] byteArray = File.ReadAllBytes (file.filePath);
faceMaskTexture.LoadImage (byteArray);
break;
}
}
if (faceMaskTexture != null) {
faceMaskMat = new Mat (faceMaskTexture.height, faceMaskTexture.width, CvType.CV_8UC4);
OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);
Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());
faceRectInMask = detectFace (faceMaskMat);
faceLandmarkPointsInMask = detectFaceLandmarkPoints (faceMaskMat, faceRectInMask);
if (faceRectInMask.width == 0 && faceRectInMask.height == 0){
removeFaceMask ();
Debug.Log ("A face could not be detected from the input image.");
}
}
}
private UnityEngine.Rect detectFace (Mat mat)
{
if (useDlibFaceDetecter) {
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, mat);
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
if (result.Count >= 1)
return result [0];
} else {
using (Mat grayMat = new Mat ())
using (Mat equalizeHistMat = new Mat ())
using (MatOfRect faces = new MatOfRect ()) {
// convert image to greyscale.
Imgproc.cvtColor (mat, grayMat, Imgproc.COLOR_RGBA2GRAY);
Imgproc.equalizeHist (grayMat, equalizeHistMat);
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
List<OpenCVForUnity.Rect> faceList = faces.toList ();
if (faceList.Count >= 1) {
UnityEngine.Rect r = new UnityEngine.Rect (faceList [0].x, faceList [0].y, faceList [0].width, faceList [0].height);
// Adjust to Dilb's result.
r.y += (int)(r.height * 0.1f);
return r;
}
}
}
return new UnityEngine.Rect ();
}
private List<Vector2> detectFaceLandmarkPoints (Mat mat, UnityEngine.Rect rect)
{
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, mat);
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
return points;
}
/*
private void dumpRect(UnityEngine.Rect rect){
string r = "new Rect(" + rect.x + ", " + rect.y + ", " + rect.width + ", " + rect.height + ")";
Debug.Log ("dumpRect:" + "\n" + r);
}
private void dumpVector2(List<Vector2> points){
string p = "";
int i = 0;
foreach (var item in points) {
p += "new Vector2(" + "" + item.x + ", " + item.y + "),\n";
i++;
}
Debug.Log ("dumpMeshVector2:" + "\n" + p);
}
private void dumpVector3(List<Vector2> points){
string p = "";
int i = 0;
foreach (var item in points) {
//p += ", " + i + ":" + item;
p += "new Vector3(" + "" + item.x + ", " + item.y + "),\n";
i++;
}
Debug.Log ("dumpMeshVector3:" + "\n" + p);
}
private MeshFilter createFaceMesh (float textureWidth, float textureHeight)
{
GameObject newObj = new GameObject("FaceMesh");
MeshFilter meshFilter = newObj.AddComponent<MeshFilter>();
newObj.AddComponent<MeshCollider>();
MeshRenderer meshRenderer = newObj.AddComponent<MeshRenderer>();
meshRenderer.material = new Material(Shader.Find("Hide/FadeShader"));
Vector3[] vertices2 = (Vector3[])vertices.Clone();
for (int j = 0; j < vertices2.Length; j++) {
vertices2 [j].x = vertices2 [j].x - textureWidth/2;
vertices2 [j].y = textureHeight/2 - vertices2 [j].y;
}
//Flip X axis
for (int j = 0; j < vertices2.Length; j++) {
vertices2 [j].x = -vertices2 [j].x;
}
meshFilter.mesh.vertices = vertices2;
};
//Flip X axis
for (int j = 0; j < triangles.Length; j=j+3) {
int a = triangles [j+1];
int b = triangles [j+2];
triangles [j+1] = b;
triangles [j+2] = a;
}
meshFilter.mesh.triangles = triangles;
//uv
Vector2[] uv = new Vector2[68];
for (int j = 0; j < uv.Length; j++) {
uv [j].x = vertices[j].x / textureWidth;
uv [j].y = (textureHeight - vertices[j].y) / textureHeight;
}
meshFilter.mesh.uv = uv;
meshFilter.mesh.RecalculateBounds ();
meshFilter.mesh.RecalculateNormals ();
return meshFilter;
}
*/
}
}