diff --git a/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HoloLensARHeadExample/ARUtils.cs b/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HoloLensARHeadExample/ARUtils.cs
deleted file mode 100644
index 99632a7..0000000
--- a/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HoloLensARHeadExample/ARUtils.cs
+++ /dev/null
@@ -1,322 +0,0 @@
-using UnityEngine;
-using System.Collections;
-using System.Collections.Generic;
-using System;
-
-namespace HoloLensWithDlibFaceLandmarkDetectorExample
-{
- public struct PoseData {
- public Vector3 pos;
- public Quaternion rot;
- }
-
- ///
- /// AR utils.
- ///
- public class ARUtils
- {
- ///
- /// Convertes rvec value to rotation transform.
- ///
- /// Rvec.
- /// Rotation.
- public static Quaternion ConvertRvecToRot (double[] rvec)
- {
- Vector3 _rvec = new Vector3((float)rvec[0], (float)rvec[1], (float)rvec[2]);
- float theta = _rvec.magnitude;
- _rvec.Normalize();
-
- // http://stackoverflow.com/questions/12933284/rodrigues-into-eulerangles-and-vice-versa
- return Quaternion.AngleAxis(theta * Mathf.Rad2Deg, _rvec);
- }
-
- ///
- /// Convertes tvec value to position transform.
- ///
- /// Tvec.
- /// Position.
- public static Vector3 ConvertTvecToPos (double[] tvec)
- {
- return new Vector3((float)tvec[0], (float)tvec[1], (float)tvec[2]);
- }
-
- ///
- /// Convertes rvec and tvec value to PoseData.
- ///
- /// Rvec.
- /// Tvec.
- /// PoseData.
- public static PoseData ConvertRvecTvecToPoseData (double[] rvec, double[] tvec)
- {
- PoseData data = new PoseData();
- data.pos = ConvertTvecToPos (tvec);
- data.rot = ConvertRvecToRot (rvec);
-
- return data;
- }
-
- ///
- /// Creates pose data dictionary.
- ///
- /// Marker count.
- /// ids.
- /// Rvecs.
- /// Tvecs.
- /// PoseData dictionary.
- public static Dictionary CreatePoseDataDict (int markerCount, int[] ids, double[] rvecs, double[] tvecs)
- {
- Dictionary dict = new Dictionary();
- if (markerCount == 0) return dict;
-
- Vector3 rvec = new Vector3();
- for(int i = 0; i < markerCount; i++) {
- PoseData data = new PoseData();
- data.pos.Set((float)tvecs[i * 3], (float)tvecs[i * 3 + 1], (float)tvecs[i * 3 + 2]);
-
- rvec.Set((float)rvecs[i * 3], (float)rvecs[i * 3 + 1], (float)rvecs[i * 3 + 2]);
- float theta = rvec.magnitude;
- rvec.Normalize();
- data.rot = Quaternion.AngleAxis(theta * Mathf.Rad2Deg, rvec);
-
- dict[ids[i]] = data;
- }
- return dict;
- }
-
- ///
- /// Performs a lowpass check on the position and rotation in newPose, comparing them to oldPose.
- ///
- /// Old PoseData.
- /// New PoseData.
- /// Positon threshold.
- /// Rotation threshold.
- public static void LowpassPoseData (ref PoseData oldPose, ref PoseData newPose, float posThreshold, float rotThreshold)
- {
- posThreshold *= posThreshold;
-
- float posDiff = (newPose.pos - oldPose.pos).sqrMagnitude;
- float rotDiff = Quaternion.Angle(newPose.rot, oldPose.rot);
-
- if (posDiff < posThreshold) {
- newPose.pos = oldPose.pos;
- }
-
- if (rotDiff < rotThreshold) {
- newPose.rot = oldPose.rot;
- }
- }
-
- ///
- /// Performs a lowpass check on the position and rotation of each marker in newDict, comparing them to those in oldDict.
- ///
- /// Old dictionary.
- /// New dictionary.
- /// Positon threshold.
- /// Rotation threshold.
- public static void LowpassPoseDataDict (Dictionary oldDict, Dictionary newDict, float posThreshold, float rotThreshold)
- {
- posThreshold *= posThreshold;
-
- List keys = new List(newDict.Keys);
- foreach (int key in keys) {
- if (!oldDict.ContainsKey(key)) continue;
-
- PoseData oldPose = oldDict[key];
- PoseData newPose = newDict[key];
-
- float posDiff = (newPose.pos - oldPose.pos).sqrMagnitude;
- float rotDiff = Quaternion.Angle(newPose.rot, oldPose.rot);
-
- if (posDiff < posThreshold) {
- newPose.pos = oldPose.pos;
- }
-
- if (rotDiff < rotThreshold) {
- newPose.rot = oldPose.rot;
- }
-
- newDict[key] = newPose;
- }
- }
-
-
- ///
- /// Extract translation from transform matrix.
- ///
- /// Transform matrix. This parameter is passed by reference
- /// to improve performance; no changes will be made to it.
- ///
- /// Translation offset.
- ///
- public static Vector3 ExtractTranslationFromMatrix (ref Matrix4x4 matrix)
- {
- Vector3 translate;
- translate.x = matrix.m03;
- translate.y = matrix.m13;
- translate.z = matrix.m23;
- return translate;
- }
-
- ///
- /// Extract rotation quaternion from transform matrix.
- ///
- /// Transform matrix. This parameter is passed by reference
- /// to improve performance; no changes will be made to it.
- ///
- /// Quaternion representation of rotation transform.
- ///
- public static Quaternion ExtractRotationFromMatrix (ref Matrix4x4 matrix)
- {
- Vector3 forward;
- forward.x = matrix.m02;
- forward.y = matrix.m12;
- forward.z = matrix.m22;
-
- Vector3 upwards;
- upwards.x = matrix.m01;
- upwards.y = matrix.m11;
- upwards.z = matrix.m21;
-
- return Quaternion.LookRotation (forward, upwards);
- }
-
- ///
- /// Extract scale from transform matrix.
- ///
- /// Transform matrix. This parameter is passed by reference
- /// to improve performance; no changes will be made to it.
- ///
- /// Scale vector.
- ///
- public static Vector3 ExtractScaleFromMatrix (ref Matrix4x4 matrix)
- {
- Vector3 scale;
- scale.x = new Vector4 (matrix.m00, matrix.m10, matrix.m20, matrix.m30).magnitude;
- scale.y = new Vector4 (matrix.m01, matrix.m11, matrix.m21, matrix.m31).magnitude;
- scale.z = new Vector4 (matrix.m02, matrix.m12, matrix.m22, matrix.m32).magnitude;
- return scale;
- }
-
- ///
- /// Extract position, rotation and scale from TRS matrix.
- ///
- /// Transform matrix. This parameter is passed by reference
- /// to improve performance; no changes will be made to it.
- /// Output position.
- /// Output rotation.
- /// Output scale.
- public static void DecomposeMatrix (ref Matrix4x4 matrix, out Vector3 localPosition, out Quaternion localRotation, out Vector3 localScale)
- {
- localPosition = ExtractTranslationFromMatrix (ref matrix);
- localRotation = ExtractRotationFromMatrix (ref matrix);
- localScale = ExtractScaleFromMatrix (ref matrix);
- }
-
- ///
- /// Set transform component from TRS matrix.
- ///
- /// Transform component.
- /// Transform matrix. This parameter is passed by reference
- /// to improve performance; no changes will be made to it.
- public static void SetTransformFromMatrix (Transform transform, ref Matrix4x4 matrix)
- {
- transform.localPosition = ExtractTranslationFromMatrix (ref matrix);
- transform.localRotation = ExtractRotationFromMatrix (ref matrix);
- transform.localScale = ExtractScaleFromMatrix (ref matrix);
- }
-
- ///
- /// Calculate projection matrix from camera matrix values.
- ///
- /// Focal length x.
- /// Focal length y.
- /// Image center point x.(principal point x)
- /// Image center point y.(principal point y)
- /// Image width.
- /// Image height.
- /// The near clipping plane distance.
- /// The far clipping plane distance.
- ///
- /// Projection matrix.
- ///
- public static Matrix4x4 CalculateProjectionMatrixFromCameraMatrixValues (float fx, float fy, float cx, float cy, float width, float height, float near, float far)
- {
- Matrix4x4 projectionMatrix = new Matrix4x4 ();
- projectionMatrix.m00 = 2.0f * fx / width;
- projectionMatrix.m02 = 1.0f - 2.0f * cx / width;
- projectionMatrix.m11 = 2.0f * fy / height;
- projectionMatrix.m12 = - 1.0f + 2.0f * cy / height;
- projectionMatrix.m22 = -(far + near) / (far - near);
- projectionMatrix.m23 = -2.0f * far * near / (far - near);
- projectionMatrix.m32 = -1.0f;
-
- return projectionMatrix;
- }
-
- ///
- /// Calculate camera matrix values from projection matrix.
- ///
- /// Projection matrix.
- /// Image width.
- /// Image height.
- /// Vertical field of view.
- ///
- /// Camera matrix values. (fx = matrx.m00, fy = matrx.m11, cx = matrx.m02, cy = matrx.m12)
- ///
- public static Matrix4x4 CameraMatrixValuesFromCalculateProjectionMatrix (Matrix4x4 projectionMatrix, float width, float height, float fovV)
- {
- float fovH = 2.0f * Mathf.Atan (width/height * Mathf.Tan (fovV*Mathf.Deg2Rad / 2.0f)) * Mathf.Rad2Deg;
-
- Matrix4x4 cameraMatrix = new Matrix4x4 ();
- cameraMatrix.m00 = CalculateDistance (width, fovH);
- cameraMatrix.m02 = -((projectionMatrix.m02*width - width) / 2);
- cameraMatrix.m11 = CalculateDistance (height, fovV);
- cameraMatrix.m12 = (projectionMatrix.m12*height + height) / 2;
- cameraMatrix.m22 = 1.0f;
-
- return cameraMatrix;
- }
-
- ///
- /// Calculate frustum size.
- /// https://docs.unity3d.com/Manual/FrustumSizeAtDistance.html
- ///
- /// Distance.
- /// Field of view. (horizontal or vertical direction)
- ///
- /// Frustum height.
- ///
- public static float CalculateFrustumSize (float distance, float fov)
- {
- return 2.0f * distance * Mathf.Tan(fov * 0.5f * Mathf.Deg2Rad);
- }
-
- ///
- /// Calculate distance.
- /// https://docs.unity3d.com/Manual/FrustumSizeAtDistance.html
- ///
- /// One side size of a frustum.
- /// Field of view. (horizontal or vertical direction)
- ///
- /// Distance.
- ///
- public static float CalculateDistance (float frustumSize, float fov)
- {
- return frustumSize * 0.5f / Mathf.Tan(fov * 0.5f * Mathf.Deg2Rad);
- }
-
- ///
- /// Calculate FOV angle.
- /// https://docs.unity3d.com/Manual/FrustumSizeAtDistance.html
- ///
- /// One side size of a frustum.
- /// Distance.
- ///
- /// FOV angle.
- ///
- public static float CalculateFOVAngle (float frustumSize, float distance)
- {
- return 2.0f * Mathf.Atan (frustumSize * 0.5f / distance) * Mathf.Rad2Deg;
- }
- }
-}
\ No newline at end of file
diff --git a/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HoloLensARHeadExample/ARUtils.cs.meta b/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HoloLensARHeadExample/ARUtils.cs.meta
deleted file mode 100644
index 33f5f36..0000000
--- a/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HoloLensARHeadExample/ARUtils.cs.meta
+++ /dev/null
@@ -1,12 +0,0 @@
-fileFormatVersion: 2
-guid: b52b0ecda5ea1aa479a495ba4658d80f
-timeCreated: 1493991979
-licenseType: Free
-MonoImporter:
- serializedVersion: 2
- defaultReferences: []
- executionOrder: 0
- icon: {instanceID: 0}
- userData:
- assetBundleName:
- assetBundleVariant:
diff --git a/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HoloLensARHeadExample/HoloLensARHeadExample.cs b/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HoloLensARHeadExample/HoloLensARHeadExample.cs
index f338c94..693a2f9 100644
--- a/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HoloLensARHeadExample/HoloLensARHeadExample.cs
+++ b/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HoloLensARHeadExample/HoloLensARHeadExample.cs
@@ -1,18 +1,19 @@
using UnityEngine;
+using UnityEngine.UI;
using System;
-using System.Collections;
+using System.Threading;
using System.Collections.Generic;
-using UnityEngine.UI;
+using HoloToolkit.Unity.InputModule;
using OpenCVForUnity.RectangleTrack;
-using System.Threading;
-using UnityEngine.EventSystems;
-
-#if UNITY_5_3 || UNITY_5_3_OR_NEWER
-using UnityEngine.SceneManagement;
-#endif
-using OpenCVForUnity;
-using Rect = OpenCVForUnity.Rect;
+using OpenCVForUnity.UnityUtils;
+using OpenCVForUnity.CoreModule;
+using OpenCVForUnity.UnityUtils.Helper;
+using OpenCVForUnity.ObjdetectModule;
+using OpenCVForUnity.Calib3dModule;
+using OpenCVForUnity.ImgprocModule;
using DlibFaceLandmarkDetector;
+using Rect = OpenCVForUnity.CoreModule.Rect;
+using HoloLensWithOpenCVForUnity.UnityUtils.Helper;
namespace HoloLensWithDlibFaceLandmarkDetectorExample
{
@@ -58,6 +59,16 @@ public class HoloLensARHeadExample : ExampleSceneBase
///
public Toggle useSeparateDetectionToggle;
+ ///
+ /// Determines if use OpenCV FaceDetector for face detection.
+ ///
+ public bool useOpenCVDetector;
+
+ ///
+ /// The use OpenCV FaceDetector toggle.
+ ///
+ public Toggle useOpenCVDetectorToggle;
+
///
/// The min detection size ratio.
///
@@ -213,7 +224,22 @@ public class HoloLensARHeadExample : ExampleSceneBase
///
/// The 3d face object points.
///
- MatOfPoint3f objectPoints;
+ MatOfPoint3f objectPoints68;
+
+ ///
+ /// The 3d face object points.
+ ///
+ MatOfPoint3f objectPoints17;
+
+ ///
+ /// The 3d face object points.
+ ///
+ MatOfPoint3f objectPoints6;
+
+ ///
+ /// The 3d face object points.
+ ///
+ MatOfPoint3f objectPoints5;
///
/// The image points.
@@ -230,11 +256,6 @@ public class HoloLensARHeadExample : ExampleSceneBase
///
Mat tvec;
- ///
- /// The rot mat.
- ///
- Mat rotMat;
-
///
/// The webcam texture to mat helper.
///
@@ -263,13 +284,35 @@ public class HoloLensARHeadExample : ExampleSceneBase
///
/// The detection result.
///
- MatOfRect detectionResult;
+ List detectionResult = new List();
///
/// The face landmark detector.
///
FaceLandmarkDetector faceLandmarkDetector;
+ ///
+ /// The dlib shape predictor file name.
+ ///
+ string dlibShapePredictorFileName = "sp_human_face_68.dat";
+
+ ///
+ /// The dlib shape predictor file path.
+ ///
+ string dlibShapePredictorFilePath;
+
+ #if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
+ int CVTCOLOR_CODE = Imgproc.COLOR_BGRA2GRAY;
+ Scalar COLOR_RED = new Scalar(0, 0, 255, 255);
+ Scalar COLOR_GREEN = new Scalar(0, 255, 0, 255);
+ Scalar COLOR_BLUE = new Scalar(255, 0, 0, 255);
+ #else
+ int CVTCOLOR_CODE = Imgproc.COLOR_RGBA2GRAY;
+ Scalar COLOR_RED = new Scalar(255, 0, 0, 255);
+ Scalar COLOR_GREEN = new Scalar(0, 255, 0, 255);
+ Scalar COLOR_BLUE = new Scalar(0, 0, 255, 255);
+ #endif
+
// The camera matrix value of Hololens camera 896x504 size.
// For details on the camera matrix, please refer to this page. (http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html)
@@ -288,6 +331,7 @@ public class HoloLensARHeadExample : ExampleSceneBase
Mat grayMat4Thread;
CascadeClassifier cascade4Thread;
+ FaceLandmarkDetector faceLandmarkDetector4Thread;
readonly static Queue ExecuteOnMainThread = new Queue();
System.Object sync = new System.Object ();
@@ -302,7 +346,6 @@ bool isThreadRunning {
RectangleTracker rectangleTracker;
float coeffTrackingWindowSize = 2.0f;
float coeffObjectSizeToTrack = 0.85f;
- Rect[] rectsWhereRegions;
List detectedObjectsInRegions = new List ();
List resultObjects = new List ();
@@ -327,41 +370,85 @@ protected override void Start ()
{
base.Start ();
+ imageOptimizationHelper = gameObject.GetComponent ();
+ webCamTextureToMatHelper = gameObject.GetComponent ();
+ #if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
+ webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
+ #endif
+ webCamTextureToMatHelper.Initialize ();
+
+ rectangleTracker = new RectangleTracker();
+
+ dlibShapePredictorFileName = HoloLensWithDlibFaceLandmarkDetectorExample.dlibShapePredictorFileName;
+ dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName);
+ if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ {
+ Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
+ }
+ faceLandmarkDetector = new FaceLandmarkDetector (dlibShapePredictorFilePath);
+
+ dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("sp_human_face_6.dat");
+ if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ {
+ Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
+ }
+ faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+
+
displayCameraPreviewToggle.isOn = displayCameraPreview;
useSeparateDetectionToggle.isOn = useSeparateDetection;
+ useOpenCVDetectorToggle.isOn = useOpenCVDetector;
displayAxesToggle.isOn = displayAxes;
displayHeadToggle.isOn = displayHead;
displayEffectsToggle.isOn = displayEffects;
enableOpticalFlowFilterToggle.isOn = enableOpticalFlowFilter;
enableLowPassFilterToggle.isOn = enableLowPassFilter;
- imageOptimizationHelper = gameObject.GetComponent ();
- webCamTextureToMatHelper = gameObject.GetComponent ();
- #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
- webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
- #endif
- webCamTextureToMatHelper.Initialize ();
- rectangleTracker = new RectangleTracker ();
- faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("sp_human_face_68.dat"));
-// faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("sp_human_face_68_for_mobile.dat"));
-
- // The coordinates of the detection object on the real world space connected with the pixel coordinates.(mm)
- objectPoints = new MatOfPoint3f (
- new Point3 (-34, 90, 83),//l eye (Interpupillary breadth)
- new Point3 (34, 90, 83),//r eye (Interpupillary breadth)
- new Point3 (0.0, 50, 120),//nose (Nose top)
- new Point3 (-26, 15, 83),//l mouse (Mouth breadth)
- new Point3 (26, 15, 83),//r mouse (Mouth breadth)
- new Point3 (-79, 90, 0.0),//l ear (Bitragion breadth)
- new Point3 (79, 90, 0.0)//r ear (Bitragion breadth)
+ // set 3d face object points.
+ objectPoints68 = new MatOfPoint3f(
+ new Point3(-34, 90, 83),//l eye (Interpupillary breadth)
+ new Point3(34, 90, 83),//r eye (Interpupillary breadth)
+ new Point3(0.0, 50, 117),//nose (Tip)
+ new Point3(0.0, 32, 97),//nose (Subnasale)
+ new Point3(-79, 90, 10),//l ear (Bitragion breadth)
+ new Point3(79, 90, 10)//r ear (Bitragion breadth)
+ );
+
+ objectPoints17 = new MatOfPoint3f(
+ new Point3(-34, 90, 83),//l eye (Interpupillary breadth)
+ new Point3(34, 90, 83),//r eye (Interpupillary breadth)
+ new Point3(0.0, 50, 117),//nose (Tip)
+ new Point3(0.0, 32, 97),//nose (Subnasale)
+ new Point3(-79, 90, 10),//l ear (Bitragion breadth)
+ new Point3(79, 90, 10)//r ear (Bitragion breadth)
+ );
+
+ objectPoints6 = new MatOfPoint3f(
+ new Point3(-34, 90, 83),//l eye (Interpupillary breadth)
+ new Point3(34, 90, 83),//r eye (Interpupillary breadth)
+ new Point3(0.0, 50, 117),//nose (Tip)
+ new Point3(0.0, 32, 97)//nose (Subnasale)
+ );
+
+ objectPoints5 = new MatOfPoint3f(
+ new Point3(-23, 90, 83),//l eye (Inner corner of the eye)
+ new Point3(23, 90, 83),//r eye (Inner corner of the eye)
+ new Point3(-50, 90, 80),//l eye (Tail of the eye)
+ new Point3(50, 90, 80),//r eye (Tail of the eye)
+ new Point3(0.0, 32, 97)//nose (Subnasale)
);
+ // adjust object points to the scale of real world space.
+ AjustPointScale (objectPoints68, 0.001);
+ AjustPointScale(objectPoints17, 0.001);
+ AjustPointScale(objectPoints6, 0.001);
+ AjustPointScale(objectPoints5, 0.001);
+
imagePoints = new MatOfPoint2f ();
- rotMat = new Mat (3, 3, CvType.CV_64FC1);
opticalFlowFilter = new OFPointsFilter ((int)faceLandmarkDetector.GetShapePredictorNumParts());
- opticalFlowFilter.diffDlib /= imageOptimizationHelper.downscaleRatio;
+ opticalFlowFilter.diffCheckSensitivity /= imageOptimizationHelper.downscaleRatio;
}
///
@@ -371,20 +458,20 @@ public void OnWebCamTextureToMatHelperInitialized ()
{
Debug.Log ("OnWebCamTextureToMatHelperInitialized");
- Mat webCamTextureMat = imageOptimizationHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat ());
-
- Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
+ Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
- #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
+ #if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
// HololensCameraStream always returns image data in BGRA format.
texture = new Texture2D ((int)width, (int)height, TextureFormat.BGRA32, false);
#else
texture = new Texture2D ((int)width, (int)height, TextureFormat.RGBA32, false);
#endif
+ Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
+
previewQuad.GetComponent().material.mainTexture = texture;
previewQuad.transform.localScale = new Vector3 (1, height/width, 1);
previewQuad.SetActive (displayCameraPreview);
@@ -392,8 +479,8 @@ public void OnWebCamTextureToMatHelperInitialized ()
double fx = this.fx;
double fy = this.fy;
- double cx = this.cx / imageOptimizationHelper.downscaleRatio;
- double cy = this.cy / imageOptimizationHelper.downscaleRatio;
+ double cx = this.cx;
+ double cy = this.cy;
camMatrix = new Mat (3, 3, CvType.CV_64FC1);
camMatrix.put (0, 0, fx);
@@ -455,25 +542,28 @@ public void OnWebCamTextureToMatHelperInitialized ()
webCamTextureToMatHelper.flipHorizontal = true;
}
+
grayMat = new Mat ();
cascade = new CascadeClassifier ();
- cascade.load (OpenCVForUnity.Utils.getFilePath ("lbpcascade_frontalface.xml"));
-
+ cascade.load (Utils.getFilePath ("lbpcascade_frontalface.xml"));
+ #if !UNITY_WSA_10_0 || UNITY_EDITOR
// "empty" method is not working on the UWP platform.
- // if (cascade.empty ()) {
- // Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
- // }
+ if (cascade.empty())
+ {
+ Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
+ }
+ #endif
grayMat4Thread = new Mat ();
cascade4Thread = new CascadeClassifier ();
- cascade4Thread.load (OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
-
+ cascade4Thread.load (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
+ #if !UNITY_WSA_10_0 || UNITY_EDITOR
// "empty" method is not working on the UWP platform.
- // if (cascade4Thread.empty ()) {
- // Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
- // }
-
- detectionResult = new MatOfRect ();
+ if (cascade4Thread.empty())
+ {
+ Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
+ }
+ #endif
}
///
@@ -484,7 +574,7 @@ public void OnWebCamTextureToMatHelperDisposed ()
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
StopThread ();
- lock (sync) {
+ lock (ExecuteOnMainThread) {
ExecuteOnMainThread.Clear ();
}
@@ -527,75 +617,88 @@ public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.Err
Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
- #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
+ #if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
public void OnFrameMatAcquired (Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
{
- Mat downScaleFrameMat = imageOptimizationHelper.GetDownScaleMat(bgraMat);
-
- Imgproc.cvtColor (downScaleFrameMat, grayMat, Imgproc.COLOR_BGRA2GRAY);
- Imgproc.equalizeHist (grayMat, grayMat);
+ Imgproc.cvtColor(bgraMat, grayMat, CVTCOLOR_CODE);
+
+ Mat downScaleGrayMat = imageOptimizationHelper.GetDownScaleMat(grayMat);
+
+ if (useOpenCVDetector)
+ Imgproc.equalizeHist(downScaleGrayMat, downScaleGrayMat);
if (enableDetection && !isDetecting ) {
isDetecting = true;
- grayMat.copyTo (grayMat4Thread);
+ downScaleGrayMat.copyTo(grayMat4Thread);
System.Threading.Tasks.Task.Run(() => {
isThreadRunning = true;
- DetectObject ();
+
+ if (useOpenCVDetector)
+ {
+ DetectObject(grayMat4Thread, out detectionResult, cascade4Thread, true);
+ }
+ else
+ {
+ DetectObject(grayMat4Thread, out detectionResult, faceLandmarkDetector4Thread);
+ }
+
isThreadRunning = false;
OnDetectionDone ();
});
}
- OpenCVForUnityUtils.SetImage (faceLandmarkDetector, grayMat);
-
- Mat bgraMat4preview = null;
- if (displayCameraPreview) {
- bgraMat4preview = new Mat ();
- downScaleFrameMat.copyTo (bgraMat4preview);
- }
-
List points = null;
- Rect[] rects;
if (!useSeparateDetection) {
if (hasUpdatedDetectionResult) {
hasUpdatedDetectionResult = false;
lock (rectangleTracker) {
- rectangleTracker.UpdateTrackedObjects (detectionResult.toList ());
+ rectangleTracker.UpdateTrackedObjects (detectionResult);
}
}
lock (rectangleTracker) {
rectangleTracker.GetObjects (resultObjects, true);
}
- rects = resultObjects.ToArray ();
- if(rects.Length > 0){
+ if(resultObjects.Count > 0) {
- OpenCVForUnity.Rect rect = rects [0];
+ // set original size image
+ OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);
- // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
- rect.y += (int)(rect.height * 0.1f);
+ Rect rect = resultObjects[0];
- //detect landmark points
+ // restore to original size rect
+ float downscaleRatio = imageOptimizationHelper.downscaleRatio;
+ rect.x = (int)(rect.x * downscaleRatio);
+ rect.y = (int)(rect.y * downscaleRatio);
+ rect.width = (int)(rect.width * downscaleRatio);
+ rect.height = (int)(rect.height * downscaleRatio);
+
+ // detect face landmark points
points = faceLandmarkDetector.DetectLandmark (new UnityEngine.Rect (rect.x, rect.y, rect.width, rect.height));
if (enableOpticalFlowFilter) {
- opticalFlowFilter.Process (bgraMat, points, points, false);
+ opticalFlowFilter.Process (grayMat, points, points, false);
}
- if (displayCameraPreview && bgraMat4preview != null) {
- //draw landmark points
- OpenCVForUnityUtils.DrawFaceLandmark (bgraMat4preview, points, new Scalar (0, 255, 0, 255), 2);
+ if (displayCameraPreview) {
+ // draw landmark points
+ OpenCVForUnityUtils.DrawFaceLandmark (bgraMat, points, COLOR_GREEN, 2);
+
+ // draw face rect
+ OpenCVForUnityUtils.DrawFaceRect(bgraMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), COLOR_RED, 2);
}
}
}else {
+ Rect[] rectsWhereRegions;
+
if (hasUpdatedDetectionResult) {
hasUpdatedDetectionResult = false;
@@ -604,72 +707,120 @@ public void OnFrameMatAcquired (Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4
//}, true);
lock (rectangleTracker) {
- rectsWhereRegions = detectionResult.toArray ();
+ rectsWhereRegions = detectionResult.ToArray ();
}
- } else {
+ if (displayCameraPreview)
+ DrawDownScaleFaceRects(bgraMat, rectsWhereRegions, imageOptimizationHelper.downscaleRatio, COLOR_BLUE, 1);
+ }
+ else {
//UnityEngine.WSA.Application.InvokeOnAppThread (() => {
// Debug.Log("process: get rectsWhereRegions from previous positions");
//}, true);
- lock (rectangleTracker) {
- rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects ();
- }
+ if (useOpenCVDetector)
+ {
+ lock (rectangleTracker)
+ {
+ rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();
+ }
+ }
+ else
+ {
+ lock (rectangleTracker)
+ {
+ rectsWhereRegions = rectangleTracker.CreateRawRects();
+ }
+ }
+
+ if (displayCameraPreview)
+ DrawDownScaleFaceRects(bgraMat, rectsWhereRegions, imageOptimizationHelper.downscaleRatio, COLOR_GREEN, 1);
}
detectedObjectsInRegions.Clear ();
- if (rectsWhereRegions.Length > 0) {
- int len = rectsWhereRegions.Length;
- for (int i = 0; i < len; i++) {
- DetectInRegion (grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
+ int len = rectsWhereRegions.Length;
+ for (int i = 0; i < len; i++)
+ {
+ if (useOpenCVDetector)
+ {
+ DetectInRegion(downScaleGrayMat, rectsWhereRegions[i], detectedObjectsInRegions, cascade, true);
+ }
+ else
+ {
+ DetectInRegion(downScaleGrayMat, rectsWhereRegions[i], detectedObjectsInRegions, faceLandmarkDetector);
}
- }
+ }
lock (rectangleTracker) {
rectangleTracker.UpdateTrackedObjects (detectedObjectsInRegions);
- rectangleTracker.GetObjects (resultObjects, true);
+ rectangleTracker.GetObjects (resultObjects, false);
}
if(resultObjects.Count > 0) {
- OpenCVForUnity.Rect rect = resultObjects [0];
+ // set original size image
+ OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);
- // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
- rect.y += (int)(rect.height * 0.1f);
+ Rect rect = resultObjects [0];
+
+ // restore to original size rect
+ float downscaleRatio = imageOptimizationHelper.downscaleRatio;
+ rect.x = (int)(rect.x * downscaleRatio);
+ rect.y = (int)(rect.y * downscaleRatio);
+ rect.width = (int)(rect.width * downscaleRatio);
+ rect.height = (int)(rect.height * downscaleRatio);
- //detect landmark points
+ // detect face landmark points
points = faceLandmarkDetector.DetectLandmark (new UnityEngine.Rect (rect.x, rect.y, rect.width, rect.height));
if (enableOpticalFlowFilter) {
- opticalFlowFilter.Process (bgraMat, points, points, false);
+ opticalFlowFilter.Process (grayMat, points, points, false);
}
- if (displayCameraPreview && bgraMat4preview != null) {
- //draw landmark points
- OpenCVForUnityUtils.DrawFaceLandmark (bgraMat4preview, points, new Scalar (0, 255, 0, 255), 2);
+ if (displayCameraPreview) {
+ // draw landmark points
+ OpenCVForUnityUtils.DrawFaceLandmark (bgraMat, points, COLOR_GREEN, 2);
+
+ // draw face rect
+ OpenCVForUnityUtils.DrawFaceRect(bgraMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), COLOR_RED, 2);
}
}
- }
-
+ }
- UnityEngine.WSA.Application.InvokeOnAppThread(() => {
+ Enqueue(() => {
if (!webCamTextureToMatHelper.IsPlaying ()) return;
- if (displayCameraPreview && bgraMat4preview != null) {
- OpenCVForUnity.Utils.fastMatToTexture2D(bgraMat4preview, texture);
+ if (displayCameraPreview) {
+ Utils.fastMatToTexture2D(bgraMat, texture);
}
- if (points != null){
- UpdateARHeadTransform (points, cameraToWorldMatrix);
+ if (points != null) {
+ UpdateARHeadTransform (points, cameraToWorldMatrix, cameraToWorldMatrix.inverse, projectionMatrix);
}
bgraMat.Dispose ();
- if (bgraMat4preview != null){
- bgraMat4preview.Dispose();
+
+ });
+ }
+
+ private void Update()
+ {
+ lock (ExecuteOnMainThread)
+ {
+ while (ExecuteOnMainThread.Count > 0)
+ {
+ ExecuteOnMainThread.Dequeue().Invoke();
}
+ }
+ }
- }, false);
+ private void Enqueue(Action action)
+ {
+ lock (ExecuteOnMainThread)
+ {
+ ExecuteOnMainThread.Enqueue(action);
+ }
}
#else
@@ -677,7 +828,7 @@ public void OnFrameMatAcquired (Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4
// Update is called once per frame
void Update ()
{
- lock (sync) {
+ lock (ExecuteOnMainThread) {
while (ExecuteOnMainThread.Count > 0) {
ExecuteOnMainThread.Dequeue ().Invoke ();
}
@@ -685,204 +836,362 @@ void Update ()
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
- Mat rgbaMat = imageOptimizationHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat ());
+ Mat rgbaMat = webCamTextureToMatHelper.GetMat();
+ Imgproc.cvtColor(rgbaMat, grayMat, CVTCOLOR_CODE);
- Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
- Imgproc.equalizeHist (grayMat, grayMat);
+ Mat downScaleGrayMat = imageOptimizationHelper.GetDownScaleMat(grayMat);
+
+ if (useOpenCVDetector)
+ Imgproc.equalizeHist(downScaleGrayMat, downScaleGrayMat);
if (enableDetection && !isDetecting ) {
isDetecting = true;
- grayMat.copyTo (grayMat4Thread);
+ downScaleGrayMat.copyTo(grayMat4Thread);
StartThread (ThreadWorker);
}
- OpenCVForUnityUtils.SetImage (faceLandmarkDetector, grayMat);
-
- Rect[] rects;
if (!useSeparateDetection) {
if (hasUpdatedDetectionResult)
{
hasUpdatedDetectionResult = false;
- rectangleTracker.UpdateTrackedObjects (detectionResult.toList());
+ rectangleTracker.UpdateTrackedObjects (detectionResult);
}
rectangleTracker.GetObjects (resultObjects, true);
- rects = rectangleTracker.CreateCorrectionBySpeedOfRects ();
+ if (resultObjects.Count > 0) {
- if(rects.Length > 0){
+ // set original size image
+ OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);
- OpenCVForUnity.Rect rect = rects [0];
+ Rect rect = resultObjects[0];
- // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
- rect.y += (int)(rect.height * 0.1f);
+ // restore to original size rect
+ float downscaleRatio = imageOptimizationHelper.downscaleRatio;
+ rect.x = (int)(rect.x * downscaleRatio);
+ rect.y = (int)(rect.y * downscaleRatio);
+ rect.width = (int)(rect.width * downscaleRatio);
+ rect.height = (int)(rect.height * downscaleRatio);
- //detect landmark points
+ // detect face landmark points
List points = faceLandmarkDetector.DetectLandmark (new UnityEngine.Rect (rect.x, rect.y, rect.width, rect.height));
if (enableOpticalFlowFilter) {
- opticalFlowFilter.Process (rgbaMat, points, points, false);
+ opticalFlowFilter.Process (grayMat, points, points, false);
}
- UpdateARHeadTransform (points, arCamera.cameraToWorldMatrix);
+ UpdateARHeadTransform (points, arCamera.cameraToWorldMatrix, arCamera.worldToCameraMatrix, arCamera.projectionMatrix);
if (displayCameraPreview) {
- //draw landmark points
- OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, points, new Scalar (0, 255, 0, 255), 2);
+ // draw landmark points
+ OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, points, COLOR_GREEN, 2);
+
+ // draw face rect
+ OpenCVForUnityUtils.DrawFaceRect(rgbaMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), COLOR_RED, 2);
}
}
} else {
+ Rect[] rectsWhereRegions;
+
if (hasUpdatedDetectionResult) {
hasUpdatedDetectionResult = false;
//Debug.Log("process: get rectsWhereRegions were got from detectionResult");
- rectsWhereRegions = detectionResult.toArray ();
- } else {
+ rectsWhereRegions = detectionResult.ToArray ();
+
+ if (displayCameraPreview)
+ DrawDownScaleFaceRects(rgbaMat, rectsWhereRegions, imageOptimizationHelper.downscaleRatio, COLOR_BLUE, 1);
+ }
+ else {
//Debug.Log("process: get rectsWhereRegions from previous positions");
- rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects ();
+ if (useOpenCVDetector)
+ {
+ rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();
+ }
+ else
+ {
+ rectsWhereRegions = rectangleTracker.CreateRawRects();
+ }
+
+ if (displayCameraPreview)
+ DrawDownScaleFaceRects(rgbaMat, rectsWhereRegions, imageOptimizationHelper.downscaleRatio, COLOR_GREEN, 1);
}
- detectedObjectsInRegions.Clear ();
- if (rectsWhereRegions.Length > 0) {
- int len = rectsWhereRegions.Length;
- for (int i = 0; i < len; i++) {
- DetectInRegion (grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
+ detectedObjectsInRegions.Clear();
+ int len = rectsWhereRegions.Length;
+ for (int i = 0; i < len; i++)
+ {
+ if (useOpenCVDetector)
+ {
+ DetectInRegion(downScaleGrayMat, rectsWhereRegions[i], detectedObjectsInRegions, cascade, true);
+ }
+ else
+ {
+ DetectInRegion(downScaleGrayMat, rectsWhereRegions[i], detectedObjectsInRegions, faceLandmarkDetector);
}
}
rectangleTracker.UpdateTrackedObjects (detectedObjectsInRegions);
- rectangleTracker.GetObjects (resultObjects, true);
+ rectangleTracker.GetObjects (resultObjects, false);
if(resultObjects.Count > 0) {
- OpenCVForUnity.Rect rect = resultObjects [0];
+ // set original size image
+ OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);
- // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
- rect.y += (int)(rect.height * 0.1f);
+ Rect rect = resultObjects [0];
- //detect landmark points
+ // restore to original size rect
+ float downscaleRatio = imageOptimizationHelper.downscaleRatio;
+ rect.x = (int)(rect.x * downscaleRatio);
+ rect.y = (int)(rect.y * downscaleRatio);
+ rect.width = (int)(rect.width * downscaleRatio);
+ rect.height = (int)(rect.height * downscaleRatio);
+
+ // detect face landmark points
List points = faceLandmarkDetector.DetectLandmark (new UnityEngine.Rect (rect.x, rect.y, rect.width, rect.height));
if (enableOpticalFlowFilter) {
- opticalFlowFilter.Process (rgbaMat, points, points, false);
+ opticalFlowFilter.Process (grayMat, points, points, false);
}
- UpdateARHeadTransform (points, arCamera.cameraToWorldMatrix);
+ UpdateARHeadTransform (points, arCamera.cameraToWorldMatrix, arCamera.worldToCameraMatrix, arCamera.projectionMatrix);
if (displayCameraPreview) {
- //draw landmark points
- OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, points, new Scalar (0, 255, 0, 255), 2);
+ // draw landmark points
+ OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, points, COLOR_GREEN, 2);
+
+ // draw face rect
+ OpenCVForUnityUtils.DrawFaceRect(rgbaMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), COLOR_RED, 2);
}
}
}
if (displayCameraPreview) {
- OpenCVForUnity.Utils.fastMatToTexture2D (rgbaMat, texture);
+ Utils.fastMatToTexture2D (rgbaMat, texture);
}
}
}
#endif
- private void UpdateARHeadTransform(List points, Matrix4x4 cameraToWorldMatrix)
+ private void UpdateARHeadTransform (List points, Matrix4x4 cameraToWorldMatrix, Matrix4x4 worldToCameraMatrix, Matrix4x4 projectionMatrix)
{
- // The coordinates in pixels of the object detected on the image projected onto the plane.
- imagePoints.fromArray (
- new Point ((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2),//l eye (Interpupillary breadth)
- new Point ((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2),//r eye (Interpupillary breadth)
- new Point (points [30].x, points [30].y),//nose (Nose top)
- new Point (points [48].x, points [48].y),//l mouth (Mouth breadth)
- new Point (points [54].x, points [54].y),//r mouth (Mouth breadth)
- new Point (points [0].x, points [0].y),//l ear (Bitragion breadth)
- new Point (points [16].x, points [16].y)//r ear (Bitragion breadth)
- );
-
- // Estimate head pose.
- if (rvec == null || tvec == null) {
- rvec = new Mat (3, 1, CvType.CV_64FC1);
- tvec = new Mat (3, 1, CvType.CV_64FC1);
- Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
- }
+ MatOfPoint3f objectPoints = null;
+ bool isRightEyeOpen = false;
+ bool isLeftEyeOpen = false;
+ bool isMouthOpen = false;
+ if (points.Count == 68)
+ {
+
+ objectPoints = objectPoints68;
+
+ imagePoints.fromArray(
+ new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2),//l eye (Interpupillary breadth)
+ new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2),//r eye (Interpupillary breadth)
+ new Point(points[30].x, points[30].y),//nose (Tip)
+ new Point(points[33].x, points[33].y),//nose (Subnasale)
+ new Point(points[0].x, points[0].y),//l ear (Bitragion breadth)
+ new Point(points[16].x, points[16].y)//r ear (Bitragion breadth)
+ );
+
+ if (Mathf.Abs((float)(points[43].y - points[46].y)) > Mathf.Abs((float)(points[42].x - points[45].x)) / 5.0)
+ {
+ isRightEyeOpen = true;
+ }
- double tvec_z = tvec.get (2, 0) [0];
+ if (Mathf.Abs((float)(points[38].y - points[41].y)) > Mathf.Abs((float)(points[39].x - points[36].x)) / 5.0)
+ {
+ isLeftEyeOpen = true;
+ }
+
+ float noseDistance = Mathf.Abs((float)(points[27].y - points[33].y));
+ float mouseDistance = Mathf.Abs((float)(points[62].y - points[66].y));
+ if (mouseDistance > noseDistance / 5.0)
+ {
+ isMouthOpen = true;
+ }
+ else
+ {
+ isMouthOpen = false;
+ }
- if (double.IsNaN(tvec_z) || tvec_z < 0) { // if tvec is wrong data, do not use extrinsic guesses.
- Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
- }else{
- Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
}
-
- if (applyEstimationPose && !double.IsNaN(tvec_z)) {
+ else if (points.Count == 17)
+ {
+
+ objectPoints = objectPoints17;
+
+ imagePoints.fromArray(
+ new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2),//l eye (Interpupillary breadth)
+ new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2),//r eye (Interpupillary breadth)
+ new Point(points[0].x, points[0].y),//nose (Tip)
+ new Point(points[1].x, points[1].y),//nose (Subnasale)
+ new Point(points[6].x, points[6].y),//l ear (Bitragion breadth)
+ new Point(points[8].x, points[8].y)//r ear (Bitragion breadth)
+ );
+
+ if (Mathf.Abs((float)(points[11].y - points[12].y)) > Mathf.Abs((float)(points[4].x - points[5].x)) / 5.0)
+ {
+ isRightEyeOpen = true;
+ }
- if (Mathf.Abs ((float)(points [43].y - points [46].y)) > Mathf.Abs ((float)(points [42].x - points [45].x)) / 5.0) {
- if (displayEffects)
- rightEye.SetActive (true);
- } else {
- if (displayEffects)
- rightEye.SetActive (false);
+ if (Mathf.Abs((float)(points[9].y - points[10].y)) > Mathf.Abs((float)(points[2].x - points[3].x)) / 5.0)
+ {
+ isLeftEyeOpen = true;
}
- if (Mathf.Abs ((float)(points [38].y - points [41].y)) > Mathf.Abs ((float)(points [39].x - points [36].x)) / 5.0) {
- if (displayEffects)
- leftEye.SetActive (true);
- } else {
- if (displayEffects)
- leftEye.SetActive (false);
+ float noseDistance = Mathf.Abs((float)(points[3].y - points[1].y));
+ float mouseDistance = Mathf.Abs((float)(points[14].y - points[16].y));
+ if (mouseDistance > noseDistance / 2.0)
+ {
+ isMouthOpen = true;
}
+ else
+ {
+ isMouthOpen = false;
+ }
+
+ }
+ else if (points.Count == 6)
+ {
+
+ objectPoints = objectPoints6;
+
+ imagePoints.fromArray(
+ new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2),//l eye (Interpupillary breadth)
+ new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2),//r eye (Interpupillary breadth)
+ new Point(points[0].x, points[0].y),//nose (Tip)
+ new Point(points[1].x, points[1].y)//nose (Subnasale)
+ );
+
+ }
+ else if (points.Count == 5)
+ {
+
+ objectPoints = objectPoints5;
+
+ imagePoints.fromArray(
+ new Point(points[3].x, points[3].y),//l eye (Inner corner of the eye)
+ new Point(points[1].x, points[1].y),//r eye (Inner corner of the eye)
+ new Point(points[2].x, points[2].y),//l eye (Tail of the eye)
+ new Point(points[0].x, points[0].y),//r eye (Tail of the eye)
+ new Point(points[4].x, points[4].y)//nose (Nose top)
+ );
+ }
+
+ // estimate head pose
+ if (rvec == null || tvec == null)
+ {
+ rvec = new Mat(3, 1, CvType.CV_64FC1);
+ tvec = new Mat(3, 1, CvType.CV_64FC1);
+ Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
+ }
+
+
+
+ /*
+ double tvec_x = tvec.get(0, 0)[0], tvec_y = tvec.get(1, 0)[0], tvec_z = tvec.get(2, 0)[0];
+
+ bool isNotInViewport = false;
+ Matrix4x4 VP = projectionMatrix * worldToCameraMatrix;
+ Vector4 pos = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f);
+ if (pos.w != 0)
+ {
+ float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
+ if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
+ isNotInViewport = true;
+ }
+ if (double.IsNaN(tvec_z) || isNotInViewport)
+ { // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view)
+ Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
+ }
+ else
+ {
+ Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
+ }
+ //Debug.Log (tvec.dump() + " " + isNotInViewport);
+ */
+
+
+
+ double tvec_z = tvec.get(2, 0)[0];
+
+ if (double.IsNaN(tvec_z) || tvec_z < 0)
+ { // if tvec is wrong data, do not use extrinsic guesses.
+ Calib3d.solvePnP(objectPoints68, imagePoints, camMatrix, distCoeffs, rvec, tvec);
+ }
+ else
+ {
+ Calib3d.solvePnP(objectPoints68, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
+ }
+
+
+
+ if (applyEstimationPose && !double.IsNaN(tvec_z))
+ {
+
+ // Display effects.
if (displayHead)
- head.SetActive (true);
+ head.SetActive(true);
if (displayAxes)
- axes.SetActive (true);
+ axes.SetActive(true);
+ if (displayEffects)
+ {
+ rightEye.SetActive(isRightEyeOpen);
+ leftEye.SetActive(isLeftEyeOpen);
- float noseDistance = Mathf.Abs ((float)(points [27].y - points [33].y));
- float mouseDistance = Mathf.Abs ((float)(points [62].y - points [66].y));
- if (mouseDistance > noseDistance / 5.0) {
- if (displayEffects) {
- mouth.SetActive (true);
- foreach (ParticleSystem ps in mouthParticleSystem) {
+ if (isMouthOpen)
+ {
+ mouth.SetActive(true);
+ foreach (ParticleSystem ps in mouthParticleSystem)
+ {
var em = ps.emission;
em.enabled = true;
#if UNITY_5_5_OR_NEWER
var main = ps.main;
- main.startSizeMultiplier = 40 * (mouseDistance / noseDistance);
+ main.startSizeMultiplier = 20;
#else
- ps.startSize = 40 * (mouseDistance / noseDistance);
+ ps.startSize = 20;
#endif
}
}
- } else {
- if (displayEffects) {
- foreach (ParticleSystem ps in mouthParticleSystem) {
+ else
+ {
+ foreach (ParticleSystem ps in mouthParticleSystem)
+ {
var em = ps.emission;
em.enabled = false;
}
}
}
-
+
// Convert to unity pose data.
double[] rvecArr = new double[3];
- rvec.get (0, 0, rvecArr);
+ rvec.get(0, 0, rvecArr);
double[] tvecArr = new double[3];
- tvec.get (0, 0, tvecArr);
- tvecArr [0] = tvecArr [0] / 1000.0;
- tvecArr[1] = tvecArr[1] / 1000.0;
- tvecArr[2] = tvecArr[2] / 1000.0 / imageOptimizationHelper.downscaleRatio;
- PoseData poseData = ARUtils.ConvertRvecTvecToPoseData (rvecArr, tvecArr);
+ tvec.get(0, 0, tvecArr);
+ tvecArr[0] = tvecArr[0];
+ tvecArr[1] = tvecArr[1];
+ tvecArr[2] = tvecArr[2];
+ PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);
+
// Changes in pos/rot below these thresholds are ignored.
- if (enableLowPassFilter) {
- ARUtils.LowpassPoseData (ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
+ if (enableLowPassFilter)
+ {
+ ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
}
oldPoseData = poseData;
// Create transform matrix.
- transformationM = Matrix4x4.TRS (poseData.pos, poseData.rot, Vector3.one);
+ transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
// right-handed coordinates system (OpenCV) to left-handed one (Unity)
ARM = invertYM * transformationM;
@@ -893,18 +1202,16 @@ private void UpdateARHeadTransform(List points, Matrix4x4 cameraToWorld
// Apply the cameraToWorld matrix with the Z-axis inverted.
ARM = cameraToWorldMatrix * invertZM * ARM;
- ARUtils.SetTransformFromMatrix (arGameObject.transform, ref ARM);
+ ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
}
}
- private void StartThread(Action action)
+ private void StartThread (Action action)
{
- #if UNITY_METRO && NETFX_CORE
+ #if WINDOWS_UWP || (!UNITY_WSA_10_0 && (NET_4_6 || NET_STANDARD_2_0))
System.Threading.Tasks.Task.Run(() => action());
- #elif UNITY_METRO
- action.BeginInvoke(ar => action.EndInvoke(ar), null);
#else
- ThreadPool.QueueUserWorkItem (_ => action());
+ ThreadPool.QueueUserWorkItem(_ => action());
#endif
}
@@ -918,13 +1225,20 @@ private void StopThread ()
}
}
- private void ThreadWorker()
+ private void ThreadWorker ()
{
isThreadRunning = true;
- DetectObject ();
+ if (useOpenCVDetector)
+ {
+ DetectObject(grayMat4Thread, out detectionResult, cascade4Thread, true);
+ }
+ else
+ {
+ DetectObject(grayMat4Thread, out detectionResult, faceLandmarkDetector4Thread);
+ }
- lock (sync) {
+ lock (ExecuteOnMainThread) {
if (ExecuteOnMainThread.Count == 0) {
ExecuteOnMainThread.Enqueue (() => {
OnDetectionDone ();
@@ -935,14 +1249,46 @@ private void ThreadWorker()
isThreadRunning = false;
}
- private void DetectObject()
+ private void DetectObject (Mat img, out List detectedObjects, FaceLandmarkDetector landmarkDetector)
{
- MatOfRect objects = new MatOfRect ();
- if (cascade4Thread != null)
- cascade4Thread.detectMultiScale (grayMat, objects, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
- new Size (grayMat.cols () * minDetectionSizeRatio, grayMat.rows () * minDetectionSizeRatio), new Size ());
+ OpenCVForUnityUtils.SetImage(landmarkDetector, img);
+
+ List detectResult = landmarkDetector.Detect();
- detectionResult = objects;
+ detectedObjects = new List();
+
+ int len = detectResult.Count;
+ for (int i = 0; i < len; i++)
+ {
+ UnityEngine.Rect r = detectResult[i];
+ detectedObjects.Add(new Rect((int)r.x, (int)r.y, (int)r.width, (int)r.height));
+ }
+ }
+
+ private void DetectObject (Mat img, out List detectedObjects, CascadeClassifier cascade, bool correctToDlibResult = false)
+ {
+ int d = Mathf.Min(img.width(), img.height());
+ d = (int)Mathf.Round(d * minDetectionSizeRatio);
+
+ MatOfRect objects = new MatOfRect();
+ if (cascade != null)
+ cascade.detectMultiScale(img, objects, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, new Size(d, d), new Size());
+
+ detectedObjects = objects.toList();
+
+ if (correctToDlibResult)
+ {
+ int len = detectedObjects.Count;
+ for (int i = 0; i < len; i++)
+ {
+ Rect r = detectedObjects[i];
+ // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
+ r.x += (int)(r.width * 0.05f);
+ r.y += (int)(r.height * 0.1f);
+ r.width = (int)(r.width * 0.9f);
+ r.height = (int)(r.height * 0.9f);
+ }
+ }
}
private void OnDetectionDone()
@@ -952,45 +1298,117 @@ private void OnDetectionDone()
isDetecting = false;
}
- private void DetectInRegion (Mat img, Rect r, List detectedObjectsInRegions)
+ private void DetectInRegion (Mat img, Rect region, List detectedObjectsInRegions, FaceLandmarkDetector landmarkDetector)
{
- Rect r0 = new Rect (new Point (), img.size ());
- Rect r1 = new Rect (r.x, r.y, r.width, r.height);
- Rect.inflate (r1, (int)((r1.width * coeffTrackingWindowSize) - r1.width) / 2,
+ Rect r0 = new Rect(new Point(), img.size());
+ Rect r1 = new Rect(region.x, region.y, region.width, region.height);
+ Rect.inflate(r1, (int)((r1.width * coeffTrackingWindowSize) - r1.width) / 2,
(int)((r1.height * coeffTrackingWindowSize) - r1.height) / 2);
- r1 = Rect.intersect (r0, r1);
+ r1 = Rect.intersect(r0, r1);
- if ((r1.width <= 0) || (r1.height <= 0)) {
- Debug.Log ("DetectionBasedTracker::detectInRegion: Empty intersection");
+ if ((r1.width <= 0) || (r1.height <= 0))
+ {
+ Debug.Log("detectInRegion: Empty intersection");
return;
}
- int d = Math.Min (r.width, r.height);
- d = (int)Math.Round (d * coeffObjectSizeToTrack);
+ using (Mat img1_roi = new Mat(img, r1))
+ using (Mat img1 = new Mat(r1.size(), img.type()))
+ {
+ img1_roi.copyTo(img1);
- MatOfRect tmpobjects = new MatOfRect ();
+ OpenCVForUnityUtils.SetImage(landmarkDetector, img1);
- Mat img1 = new Mat (img, r1);//subimage for rectangle -- without data copying
+ List detectResult = landmarkDetector.Detect();
- cascade.detectMultiScale (img1, tmpobjects, 1.1, 2, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size (d, d), new Size ());
+ int len = detectResult.Count;
+ for (int i = 0; i < len; i++)
+ {
+ UnityEngine.Rect tmp = detectResult[i];
+ Rect r = new Rect((int)(tmp.x + r1.x), (int)(tmp.y + r1.y), (int)tmp.width, (int)tmp.height);
+ detectedObjectsInRegions.Add(r);
+ }
+ }
+ }
+ private void DetectInRegion (Mat img, Rect region, List detectedObjectsInRegions, CascadeClassifier cascade, bool correctToDlibResult = false)
+ {
+ Rect r0 = new Rect(new Point(), img.size());
+ Rect r1 = new Rect(region.x, region.y, region.width, region.height);
+ Rect.inflate(r1, (int)((r1.width * coeffTrackingWindowSize) - r1.width) / 2,
+ (int)((r1.height * coeffTrackingWindowSize) - r1.height) / 2);
+ r1 = Rect.intersect(r0, r1);
- Rect[] tmpobjectsArray = tmpobjects.toArray ();
- int len = tmpobjectsArray.Length;
- for (int i = 0; i < len; i++) {
- Rect tmp = tmpobjectsArray [i];
- Rect curres = new Rect (new Point (tmp.x + r1.x, tmp.y + r1.y), tmp.size ());
- detectedObjectsInRegions.Add (curres);
+ if ((r1.width <= 0) || (r1.height <= 0))
+ {
+ Debug.Log("detectInRegion: Empty intersection");
+ return;
+ }
+
+ int d = Math.Min(region.width, region.height);
+ d = (int)Math.Round(d * coeffObjectSizeToTrack);
+
+ using (MatOfRect tmpobjects = new MatOfRect())
+ using (Mat img1 = new Mat(img, r1)) //subimage for rectangle -- without data copying
+ {
+ cascade.detectMultiScale(img1, tmpobjects, 1.1, 2, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size(d, d), new Size());
+
+ Rect[] tmpobjectsArray = tmpobjects.toArray();
+ int len = tmpobjectsArray.Length;
+ for (int i = 0; i < len; i++)
+ {
+ Rect tmp = tmpobjectsArray[i];
+ Rect r = new Rect(new Point(tmp.x + r1.x, tmp.y + r1.y), tmp.size());
+
+ if (correctToDlibResult)
+ {
+ // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
+ r.x += (int)(r.width * 0.05f);
+ r.y += (int)(r.height * 0.1f);
+ r.width = (int)(r.width * 0.9f);
+ r.height = (int)(r.height * 0.9f);
+ }
+
+ detectedObjectsInRegions.Add(r);
+ }
+ }
+ }
+
+ private void DrawDownScaleFaceRects (Mat img, Rect[] rects, float downscaleRatio, Scalar color, int thickness)
+ {
+ int len = rects.Length;
+ for (int i = 0; i < len; i++)
+ {
+ Rect rect = new Rect(
+ (int)(rects[i].x * downscaleRatio),
+ (int)(rects[i].y * downscaleRatio),
+ (int)(rects[i].width * downscaleRatio),
+ (int)(rects[i].height * downscaleRatio)
+ );
+ Imgproc.rectangle(img, rect, color, thickness);
}
}
+ private void AjustPointScale (MatOfPoint3f p, double scale)
+ {
+ Point3[] arr = p.toArray();
+ for (int i = 0; i < arr.Length; i++)
+ {
+ //arr[i] = new Point3(arr[i].x * scale, arr[i].y * scale, arr[i].z * scale);
+ arr[i].x *= scale;
+ arr[i].y *= scale;
+ arr[i].z *= scale;
+ }
+ p.fromArray(arr);
+ }
+
///
/// Raises the destroy event.
///
void OnDestroy ()
{
imageOptimizationHelper.Dispose ();
- #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
+ #if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
webCamTextureToMatHelper.frameMatAcquired -= OnFrameMatAcquired;
#endif
webCamTextureToMatHelper.Dispose ();
@@ -998,11 +1416,11 @@ void OnDestroy ()
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose ();
+ if (faceLandmarkDetector4Thread != null)
+ faceLandmarkDetector4Thread.Dispose();
+
if (rectangleTracker != null)
rectangleTracker.Dispose ();
-
- if (rotMat != null)
- rotMat.Dispose ();
}
///
@@ -1050,11 +1468,8 @@ public void OnChangeCameraButtonClick ()
///
public void OnDisplayCamreaPreviewToggleValueChanged ()
{
- if (displayCameraPreviewToggle.isOn) {
- displayCameraPreview = true;
- } else {
- displayCameraPreview = false;
- }
+ displayCameraPreview = displayCameraPreviewToggle.isOn;
+
previewQuad.SetActive (displayCameraPreview);
}
@@ -1063,18 +1478,23 @@ public void OnDisplayCamreaPreviewToggleValueChanged ()
///
public void OnUseSeparateDetectionToggleValueChanged ()
{
- if (useSeparateDetectionToggle.isOn) {
- useSeparateDetection = true;
- } else {
- useSeparateDetection = false;
- }
+ useSeparateDetection = useSeparateDetectionToggle.isOn;
- lock (rectangleTracker) {
+ lock (rectangleTracker)
+ {
if (rectangleTracker != null)
- rectangleTracker.Reset ();
+ rectangleTracker.Reset();
}
}
+ ///
+ /// Raises the use OpenCV Detector toggle value changed event.
+ ///
+ public void OnUseOpenCVDetectorToggleValueChanged()
+ {
+ useOpenCVDetector = useOpenCVDetectorToggle.isOn;
+ }
+
///
/// Raises the display axes toggle value changed event.
///
@@ -1121,11 +1541,7 @@ public void OnDisplayEffectsToggleValueChanged ()
///
public void OnEnableOpticalFlowFilterToggleValueChanged ()
{
- if (enableOpticalFlowFilterToggle.isOn) {
- enableOpticalFlowFilter = true;
- } else {
- enableOpticalFlowFilter = false;
- }
+ enableOpticalFlowFilter = enableOpticalFlowFilterToggle.isOn;
}
///
@@ -1147,8 +1563,12 @@ public void OnEnableLowPassFilterToggleValueChanged ()
///
public void OnTapped ()
{
- if (EventSystem.current.IsPointerOverGameObject ())
+ // Determine if a Gaze pointer is over a GUI.
+ if (GazeManager.Instance.HitObject != null && (GazeManager.Instance.HitObject.GetComponent