diff --git a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.cs b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.cs
index ea017ce..7a747b0 100644
--- a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.cs
+++ b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.cs
@@ -22,7 +22,7 @@ namespace HoloLensWithDlibFaceLandmarkDetectorExample
/// HoloLens AR Head Example
/// An example of AR head projection using OpenCVForUnity and DlibLandmarkDetector on Hololens.
///
- [RequireComponent(typeof(HLCameraStreamToMatHelper), typeof(ImageOptimizationHelper))]
+ [RequireComponent(typeof(HLCameraStream2MatHelper), typeof(ImageOptimizationHelper))]
public class HLARHeadExample : MonoBehaviour
{
[SerializeField, HeaderAttribute("Preview")]
@@ -265,7 +265,7 @@ public class HLARHeadExample : MonoBehaviour
///
/// The webcam texture to mat helper.
///
- HLCameraStreamToMatHelper webCamTextureToMatHelper;
+ HLCameraStream2MatHelper webCamTextureToMatHelper;
///
/// The image optimization helper.
@@ -297,11 +297,6 @@ public class HLARHeadExample : MonoBehaviour
///
string dlibShapePredictorFileName = "DlibFaceLandmarkDetector/sp_human_face_68.dat";
- ///
- /// The dlib shape predictor file path.
- ///
- string dlibShapePredictorFilePath;
-
Scalar COLOR_WHITE = new Scalar(255, 255, 255, 255);
Scalar COLOR_GRAY = new Scalar(128, 128, 128, 255);
@@ -386,8 +381,18 @@ bool isDetectingInFrameArrivedThread
public Text debugStr;
+ string cascade_filepath;
+ string cascade4Thread_filepath;
+ string dlibShapePredictor_filepath;
+ string dlibShapePredictor4Thread_filepath;
+
+ ///
+ /// The CancellationTokenSource.
+ ///
+ CancellationTokenSource cts = new CancellationTokenSource();
+
// Use this for initialization
- protected void Start()
+ async void Start()
{
displayCameraPreviewToggle.isOn = displayCameraPreview;
enableDownScaleToggle.isOn = enableDownScale;
@@ -400,29 +405,69 @@ protected void Start()
enableLerpFilterToggle.isOn = enableLerpFilter;
imageOptimizationHelper = gameObject.GetComponent();
- webCamTextureToMatHelper = gameObject.GetComponent();
+ webCamTextureToMatHelper = gameObject.GetComponent();
#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
#endif
- webCamTextureToMatHelper.outputColorFormat = WebCamTextureToMatHelper.ColorFormat.GRAY;
- webCamTextureToMatHelper.Initialize();
rectangleTracker = new RectangleTracker();
+
+ // Asynchronously retrieves the readable file path from the StreamingAssets directory.
+ if (debugStr != null)
+ {
+ debugStr.text = "Preparing file access...";
+ }
+
+ cascade_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml", cancellationToken: cts.Token);
+ //cascade4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/haarcascade_frontalface_alt.xml", cancellationToken: cts.Token);
+ cascade4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml", cancellationToken: cts.Token);
dlibShapePredictorFileName = HoloLensWithDlibFaceLandmarkDetectorExample.dlibShapePredictorFileName;
- dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName);
- if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ dlibShapePredictor_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask(dlibShapePredictorFileName, cancellationToken: cts.Token);
+ dlibShapePredictor4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("DlibFaceLandmarkDetector/sp_human_face_6.dat", cancellationToken: cts.Token);
+
+ if (debugStr != null)
+ {
+ debugStr.text = "";
+ }
+
+ Run();
+ }
+
+ // Use this for initialization
+ void Run()
+ {
+ cascade = new CascadeClassifier();
+ cascade.load(cascade_filepath);
+#if !WINDOWS_UWP || UNITY_EDITOR
+ // "empty" method is not working on the UWP platform.
+ if (cascade.empty())
+ {
+ Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
+ }
+#endif
+
+ cascade4Thread = new CascadeClassifier();
+ cascade4Thread.load(cascade4Thread_filepath);
+#if !WINDOWS_UWP || UNITY_EDITOR
+ // "empty" method is not working on the UWP platform.
+ if (cascade4Thread.empty())
+ {
+ Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
+ }
+#endif
+
+ if (string.IsNullOrEmpty(dlibShapePredictor_filepath))
{
Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
}
- faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+ faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictor_filepath);
- dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/sp_human_face_6.dat");
- if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ if (string.IsNullOrEmpty(dlibShapePredictor4Thread_filepath))
{
Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
}
- faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+ faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictor4Thread_filepath);
// set 3d face object points. (right-handed coordinates system)
@@ -469,6 +514,9 @@ protected void Start()
opticalFlowFilter = new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts());
opticalFlowFilter.diffCheckSensitivity /= imageOptimizationHelper.downscaleRatio;
+
+ webCamTextureToMatHelper.outputColorFormat = Source2MatHelperColorFormat.GRAY;
+ webCamTextureToMatHelper.Initialize();
}
///
@@ -584,29 +632,7 @@ public void OnWebCamTextureToMatHelperInitialized()
mouthParticleSystem = mouth.GetComponentsInChildren(true);
-
- //grayMat = new Mat();
- cascade = new CascadeClassifier();
- cascade.load(Utils.getFilePath("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml"));
-#if !WINDOWS_UWP || UNITY_EDITOR
- // "empty" method is not working on the UWP platform.
- if (cascade.empty())
- {
- Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
- }
-#endif
-
grayMat4Thread = new Mat();
- cascade4Thread = new CascadeClassifier();
- //cascade4Thread.load(Utils.getFilePath("OpenCVForUnity/objdetect/haarcascade_frontalface_alt.xml"));
- cascade4Thread.load(Utils.getFilePath("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml"));
-#if !WINDOWS_UWP || UNITY_EDITOR
- // "empty" method is not working on the UWP platform.
- if (cascade4Thread.empty())
- {
- Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
- }
-#endif
}
///
@@ -630,15 +656,9 @@ public void OnWebCamTextureToMatHelperDisposed()
ExecuteOnMainThread.Clear();
}
- if (cascade != null)
- cascade.Dispose();
-
if (grayMat4Thread != null)
grayMat4Thread.Dispose();
- if (cascade4Thread != null)
- cascade4Thread.Dispose();
-
rectangleTracker.Reset();
camMatrix.Dispose();
@@ -667,12 +687,13 @@ public void OnWebCamTextureToMatHelperDisposed()
}
///
- /// Raises the web cam texture to mat helper error occurred event.
+ /// Raises the webcam texture to mat helper error occurred event.
///
/// Error code.
- public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
+ /// Message.
+ public void OnWebCamTextureToMatHelperErrorOccurred(Source2MatHelperErrorCode errorCode, string message)
{
- Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
+ Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode + ":" + message);
}
#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
@@ -1536,6 +1557,12 @@ void OnDestroy()
webCamTextureToMatHelper.Dispose();
imageOptimizationHelper.Dispose();
+ if (cascade != null)
+ cascade.Dispose();
+
+ if (cascade4Thread != null)
+ cascade4Thread.Dispose();
+
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose();
@@ -1544,6 +1571,9 @@ void OnDestroy()
if (rectangleTracker != null)
rectangleTracker.Dispose();
+
+ if (cts != null)
+ cts.Dispose();
}
///
diff --git a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.unity b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.unity
index 7a50e80..6c18254 100644
--- a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.unity
+++ b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.unity
@@ -3393,8 +3393,8 @@ GameObject:
serializedVersion: 6
m_Component:
- component: {fileID: 1076083699}
- - component: {fileID: 1076083697}
- component: {fileID: 1076083695}
+ - component: {fileID: 1076083701}
- component: {fileID: 1076083700}
m_Layer: 0
m_Name: HLARHeadExample
@@ -3417,74 +3417,6 @@ MonoBehaviour:
m_EditorClassIdentifier:
_downscaleRatio: 2
_frameSkippingRatio: 1
---- !u!114 &1076083697
-MonoBehaviour:
- m_ObjectHideFlags: 0
- m_CorrespondingSourceObject: {fileID: 0}
- m_PrefabInstance: {fileID: 0}
- m_PrefabAsset: {fileID: 0}
- m_GameObject: {fileID: 1076083694}
- m_Enabled: 1
- m_EditorHideFlags: 0
- m_Script: {fileID: 11500000, guid: 0195895dd83eb204da131e4a7c4bcfe3, type: 3}
- m_Name:
- m_EditorClassIdentifier:
- _requestedDeviceName:
- _requestedWidth: 896
- _requestedHeight: 504
- _requestedIsFrontFacing: 0
- _requestedFPS: 30
- _rotate90Degree: 0
- _flipVertical: 0
- _flipHorizontal: 0
- _outputColorFormat: 0
- _timeoutFrameCount: 300
- onInitialized:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperInitialized
- m_Mode: 1
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- onDisposed:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperDisposed
- m_Mode: 1
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- onErrorOccurred:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperErrorOccurred
- m_Mode: 0
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- avoidAndroidFrontCameraLowLightIssue: 0
--- !u!4 &1076083699
Transform:
m_ObjectHideFlags: 0
@@ -3545,6 +3477,76 @@ MonoBehaviour:
videoFPS: {fileID: 443524926926304178}
trackFPS: {fileID: 443524928155071722}
debugStr: {fileID: 443524927024673313}
+--- !u!114 &1076083701
+MonoBehaviour:
+ m_ObjectHideFlags: 0
+ m_CorrespondingSourceObject: {fileID: 0}
+ m_PrefabInstance: {fileID: 0}
+ m_PrefabAsset: {fileID: 0}
+ m_GameObject: {fileID: 1076083694}
+ m_Enabled: 1
+ m_EditorHideFlags: 0
+ m_Script: {fileID: 11500000, guid: a80f11196f5e6394aaec3da665ebfc5a, type: 3}
+ m_Name:
+ m_EditorClassIdentifier:
+ _requestedDeviceName:
+ _requestedWidth: 896
+ _requestedHeight: 504
+ _requestedIsFrontFacing: 0
+ _requestedFPS: 30
+ _rotate90Degree: 0
+ _flipVertical: 0
+ _flipHorizontal: 0
+ _outputColorFormat: 3
+ _timeoutFrameCount: 1500
+ _onInitialized:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLARHeadExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperInitialized
+ m_Mode: 1
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
+ _onDisposed:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLARHeadExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperDisposed
+ m_Mode: 1
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
+ _onErrorOccurred:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLARHeadExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperErrorOccurred
+ m_Mode: 0
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
--- !u!1 &1108638276
GameObject:
m_ObjectHideFlags: 0
diff --git a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.cs b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.cs
index 3c517c4..4c50853 100644
--- a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.cs
+++ b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.cs
@@ -22,7 +22,7 @@ namespace HoloLensWithDlibFaceLandmarkDetectorExample
/// An example of face landmark detection using OpenCVForUnity and DlibLandmarkDetector on Hololens.
/// Referring to https://github.com/Itseez/opencv/blob/master/modules/objdetect/src/detection_based_tracker.cpp.
///
- [RequireComponent(typeof(HLCameraStreamToMatHelper), typeof(ImageOptimizationHelper))]
+ [RequireComponent(typeof(HLCameraStream2MatHelper), typeof(ImageOptimizationHelper))]
public class HLFaceLandmarkDetectionExample : MonoBehaviour
{
@@ -89,7 +89,7 @@ public class HLFaceLandmarkDetectionExample : MonoBehaviour
///
/// The webcam texture to mat helper.
///
- HLCameraStreamToMatHelper webCamTextureToMatHelper;
+ HLCameraStream2MatHelper webCamTextureToMatHelper;
///
/// The image optimization helper.
@@ -126,11 +126,6 @@ public class HLFaceLandmarkDetectionExample : MonoBehaviour
///
string dlibShapePredictorFileName = "DlibFaceLandmarkDetector/sp_human_face_68.dat";
- ///
- /// The dlib shape predictor file path.
- ///
- string dlibShapePredictorFilePath;
-
Scalar COLOR_WHITE = new Scalar(255, 255, 255, 255);
Scalar COLOR_GRAY = new Scalar(128, 128, 128, 255);
@@ -216,8 +211,18 @@ bool isDetectingInFrameArrivedThread
public Text debugStr;
+ string cascade_filepath;
+ string cascade4Thread_filepath;
+ string dlibShapePredictor_filepath;
+ string dlibShapePredictor4Thread_filepath;
+
+ ///
+ /// The CancellationTokenSource.
+ ///
+ CancellationTokenSource cts = new CancellationTokenSource();
+
// Use this for initialization
- protected void Start()
+ async void Start()
{
enableDownScaleToggle.isOn = enableDownScale;
useSeparateDetectionToggle.isOn = useSeparateDetection;
@@ -226,30 +231,72 @@ protected void Start()
displayDetectedFaceRectToggle.isOn = displayDetectedFaceRect;
imageOptimizationHelper = gameObject.GetComponent();
- webCamTextureToMatHelper = gameObject.GetComponent();
+ webCamTextureToMatHelper = gameObject.GetComponent();
#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
#endif
- webCamTextureToMatHelper.outputColorFormat = WebCamTextureToMatHelper.ColorFormat.GRAY;
- webCamTextureToMatHelper.Initialize();
rectangleTracker = new RectangleTracker();
+
+ // Asynchronously retrieves the readable file path from the StreamingAssets directory.
+ if (debugStr != null)
+ {
+ debugStr.text = "Preparing file access...";
+ }
+
+ cascade_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml", cancellationToken: cts.Token);
+ //cascade4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/haarcascade_frontalface_alt.xml", cancellationToken: cts.Token);
+ cascade4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml", cancellationToken: cts.Token);
dlibShapePredictorFileName = HoloLensWithDlibFaceLandmarkDetectorExample.dlibShapePredictorFileName;
- dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName);
- if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ dlibShapePredictor_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask(dlibShapePredictorFileName, cancellationToken: cts.Token);
+ dlibShapePredictor4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("DlibFaceLandmarkDetector/sp_human_face_6.dat", cancellationToken: cts.Token);
+
+ if (debugStr != null)
{
- Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
+ debugStr.text = "";
+ }
+
+ Run();
+ }
+
+ // Use this for initialization
+ void Run()
+ {
+ cascade = new CascadeClassifier();
+ cascade.load(cascade_filepath);
+#if !WINDOWS_UWP || UNITY_EDITOR
+ // "empty" method is not working on the UWP platform.
+ if (cascade.empty())
+ {
+ Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
+ }
+#endif
+
+ cascade4Thread = new CascadeClassifier();
+ cascade4Thread.load(cascade4Thread_filepath);
+#if !WINDOWS_UWP || UNITY_EDITOR
+ // "empty" method is not working on the UWP platform.
+ if (cascade4Thread.empty())
+ {
+ Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
}
- faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+#endif
+ if (string.IsNullOrEmpty(dlibShapePredictor_filepath))
+ {
+ Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
+ }
+ faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictor_filepath);
- dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/sp_human_face_6.dat");
- if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ if (string.IsNullOrEmpty(dlibShapePredictor4Thread_filepath))
{
Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
}
- faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+ faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictor4Thread_filepath);
+
+ webCamTextureToMatHelper.outputColorFormat = Source2MatHelperColorFormat.GRAY;
+ webCamTextureToMatHelper.Initialize();
}
///
@@ -305,28 +352,7 @@ public void OnWebCamTextureToMatHelperInitialized()
quad_renderer.sharedMaterial.SetFloat("_VignetteScale", 0.0f);
-
- cascade = new CascadeClassifier();
- cascade.load(Utils.getFilePath("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml"));
-#if !WINDOWS_UWP || UNITY_EDITOR
- // "empty" method is not working on the UWP platform.
- if (cascade.empty())
- {
- Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
- }
-#endif
-
grayMat4Thread = new Mat();
- cascade4Thread = new CascadeClassifier();
- //cascade4Thread.load(Utils.getFilePath("OpenCVForUnity/objdetect/haarcascade_frontalface_alt.xml"));
- cascade4Thread.load(Utils.getFilePath("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml"));
-#if !WINDOWS_UWP || UNITY_EDITOR
- // "empty" method is not working on the UWP platform.
- if (cascade4Thread.empty())
- {
- Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
- }
-#endif
}
///
@@ -350,15 +376,9 @@ public void OnWebCamTextureToMatHelperDisposed()
ExecuteOnMainThread.Clear();
}
- if (cascade != null)
- cascade.Dispose();
-
if (grayMat4Thread != null)
grayMat4Thread.Dispose();
- if (cascade4Thread != null)
- cascade4Thread.Dispose();
-
rectangleTracker.Reset();
if (debugStr != null)
@@ -369,12 +389,13 @@ public void OnWebCamTextureToMatHelperDisposed()
}
///
- /// Raises the web cam texture to mat helper error occurred event.
+ /// Raises the webcam texture to mat helper error occurred event.
///
/// Error code.
- public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
+ /// Message.
+ public void OnWebCamTextureToMatHelperErrorOccurred(Source2MatHelperErrorCode errorCode, string message)
{
- Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
+ Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode + ":" + message);
}
#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
@@ -1084,6 +1105,12 @@ void OnDestroy()
webCamTextureToMatHelper.Dispose();
imageOptimizationHelper.Dispose();
+ if (cascade != null)
+ cascade.Dispose();
+
+ if (cascade4Thread != null)
+ cascade4Thread.Dispose();
+
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose();
@@ -1092,6 +1119,9 @@ void OnDestroy()
if (rectangleTracker != null)
rectangleTracker.Dispose();
+
+ if (cts != null)
+ cts.Dispose();
}
///
diff --git a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.unity b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.unity
index 3c19ece..cd7481f 100644
--- a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.unity
+++ b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.unity
@@ -2442,8 +2442,8 @@ GameObject:
- component: {fileID: 1076083698}
- component: {fileID: 1076083697}
- component: {fileID: 1076083696}
- - component: {fileID: 1076083695}
- component: {fileID: 1076083701}
+ - component: {fileID: 1076083702}
- component: {fileID: 1076083700}
m_Layer: 0
m_Name: HLFaceLandmarkDetectionExample
@@ -2452,74 +2452,6 @@ GameObject:
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
---- !u!114 &1076083695
-MonoBehaviour:
- m_ObjectHideFlags: 0
- m_CorrespondingSourceObject: {fileID: 0}
- m_PrefabInstance: {fileID: 0}
- m_PrefabAsset: {fileID: 0}
- m_GameObject: {fileID: 1076083694}
- m_Enabled: 1
- m_EditorHideFlags: 0
- m_Script: {fileID: 11500000, guid: 0195895dd83eb204da131e4a7c4bcfe3, type: 3}
- m_Name:
- m_EditorClassIdentifier:
- _requestedDeviceName:
- _requestedWidth: 896
- _requestedHeight: 504
- _requestedIsFrontFacing: 0
- _requestedFPS: 30
- _rotate90Degree: 0
- _flipVertical: 0
- _flipHorizontal: 0
- _outputColorFormat: 0
- _timeoutFrameCount: 300
- onInitialized:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperInitialized
- m_Mode: 1
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- onDisposed:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperDisposed
- m_Mode: 1
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- onErrorOccurred:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperErrorOccurred
- m_Mode: 0
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- avoidAndroidFrontCameraLowLightIssue: 0
--- !u!23 &1076083696
MeshRenderer:
m_ObjectHideFlags: 0
@@ -2641,6 +2573,76 @@ MonoBehaviour:
m_EditorClassIdentifier:
_downscaleRatio: 2
_frameSkippingRatio: 1
+--- !u!114 &1076083702
+MonoBehaviour:
+ m_ObjectHideFlags: 0
+ m_CorrespondingSourceObject: {fileID: 0}
+ m_PrefabInstance: {fileID: 0}
+ m_PrefabAsset: {fileID: 0}
+ m_GameObject: {fileID: 1076083694}
+ m_Enabled: 1
+ m_EditorHideFlags: 0
+ m_Script: {fileID: 11500000, guid: a80f11196f5e6394aaec3da665ebfc5a, type: 3}
+ m_Name:
+ m_EditorClassIdentifier:
+ _requestedDeviceName:
+ _requestedWidth: 896
+ _requestedHeight: 504
+ _requestedIsFrontFacing: 0
+ _requestedFPS: 30
+ _rotate90Degree: 0
+ _flipVertical: 0
+ _flipHorizontal: 0
+ _outputColorFormat: 3
+ _timeoutFrameCount: 1500
+ _onInitialized:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLFaceLandmarkDetectionExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperInitialized
+ m_Mode: 1
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
+ _onDisposed:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLFaceLandmarkDetectionExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperDisposed
+ m_Mode: 1
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
+ _onErrorOccurred:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLFaceLandmarkDetectionExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperErrorOccurred
+ m_Mode: 0
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
--- !u!1 &1197189831
GameObject:
m_ObjectHideFlags: 0
diff --git a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLPhotoCaptureExample/HLPhotoCaptureExample.cs b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLPhotoCaptureExample/HLPhotoCaptureExample.cs
index a07f5d9..528c9f5 100644
--- a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLPhotoCaptureExample/HLPhotoCaptureExample.cs
+++ b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLPhotoCaptureExample/HLPhotoCaptureExample.cs
@@ -7,6 +7,7 @@
using OpenCVForUnity.ImgprocModule;
using System.Collections;
using System.Linq;
+using System.Threading;
#if UNITY_2018_2_OR_NEWER
using UnityEngine.Windows.WebCam;
@@ -40,16 +41,33 @@ public class HLPhotoCaptureExample : MonoBehaviour
private string dlibShapePredictorFileName = "DlibFaceLandmarkDetector/sp_human_face_68.dat";
private string dlibShapePredictorFilePath;
- private IEnumerator Start()
+ ///
+ /// The CancellationTokenSource.
+ ///
+ CancellationTokenSource cts = new CancellationTokenSource();
+
+ // Use this for initialization
+ async void Start()
{
+ // Asynchronously retrieves the readable file path from the StreamingAssets directory.
+ if (text != null)
+ {
+ text.text = "Preparing file access...";
+ }
+
dlibShapePredictorFileName = HoloLensWithDlibFaceLandmarkDetectorExample.dlibShapePredictorFileName;
+ string dlibShapePredictor_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask(dlibShapePredictorFileName, cancellationToken: cts.Token);
- dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName);
- if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ if (text != null)
+ {
+ text.text = "";
+ }
+
+ if (string.IsNullOrEmpty(dlibShapePredictor_filepath))
{
Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
}
- faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+ faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictor_filepath);
var resolutions = PhotoCapture.SupportedResolutions;
@@ -59,7 +77,7 @@ private IEnumerator Start()
{
text.text = "Resolutions not available. Did you provide web cam access?";
}
- yield return null;
+ return;
}
cameraResolution = resolutions.OrderByDescending((res) => res.width * res.height).First();
@@ -87,6 +105,9 @@ private void OnDestroy()
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose();
+
+ if (cts != null)
+ cts.Dispose();
}
private void OnPhotoCaptureCreated(PhotoCapture captureObject)
@@ -177,8 +198,8 @@ private void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptur
Mat bgraMat = new Mat(targetTexture.height, targetTexture.width, CvType.CV_8UC4);
- // For BGRA or BGR format, use the fastTexture2DToMat method.
- OpenCVForUnity.UnityUtils.Utils.fastTexture2DToMat(targetTexture, bgraMat);
+ // For BGRA or BGR format, use the texture2DToMatRaw method.
+ OpenCVForUnity.UnityUtils.Utils.texture2DToMatRaw(targetTexture, bgraMat);
OpenCVForUnityUtils.SetImage(faceLandmarkDetector, bgraMat);
@@ -206,8 +227,8 @@ private void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptur
Imgproc.putText(bgraMat, targetTexture.format + " W:" + bgraMat.width() + " H:" + bgraMat.height(), new Point(5, bgraMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.5, new Scalar(255, 0, 0, 255), 2, Imgproc.LINE_AA, false);
- // For BGRA or BGR format, use the fastMatToTexture2D method.
- OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(bgraMat, targetTexture);
+ // For BGRA or BGR format, use the matToTexture2DRaw method.
+ OpenCVForUnity.UnityUtils.Utils.matToTexture2DRaw(bgraMat, targetTexture);
bgraMat.Dispose();
diff --git a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs
similarity index 81%
rename from HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs
rename to HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs
index 4436838..ce58b32 100644
--- a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs
+++ b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs
@@ -25,30 +25,70 @@ namespace HoloLensWithOpenCVForUnity.UnityUtils.Helper
/// This is called every time there is a new frame image mat available.
/// The Mat object's type is 'CV_8UC4' or 'CV_8UC3' or 'CV_8UC1' (ColorFormat is determined by the outputColorFormat setting).
///
- /// The recently captured frame image mat.
+ /// The recently captured frame image mat.
/// The projection matrices.
/// The camera to world matrices.
/// The camera intrinsics.
public delegate void FrameMatAcquiredCallback(Mat mat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix, CameraIntrinsics cameraIntrinsics);
///
- /// Hololens camera stream to mat helper.
- /// v 1.0.7
- /// Depends on EnoxSoftware/HoloLensCameraStream (https://github.com/EnoxSoftware/HoloLensCameraStream).
- /// Depends on OpenCVForUnity version 2.4.1 (WebCamTextureToMatHelper v 1.1.2) or later.
+ /// A helper component class for obtaining camera frames from HoloLensCameraStream and converting them to OpenCV Mat format in real-time.
+ ///
+ ///
+ /// The HLCameraStream2MatHelper class captures video frames from a device's camera using HoloLensCameraStream
+ /// and converts each frame to an OpenCV Mat object every frame.
+ ///
+ /// This component is particularly useful for image processing tasks in Unity, such as computer vision applications,
+ /// where real-time camera input in Mat format is required. It enables seamless integration of OpenCV-based
+ /// image processing algorithms with HoloLens camera input.
+ ///
+ ///
+ /// FrameMatAcquiredCallback:
+ /// The FrameMatAcquiredCallback delegate is invoked each time a new frame is captured and converted to a Mat object.
+ /// This delegate wraps the HoloLensCameraStream.VideoCapture.FrameSampleAcquired callback to provide the frame data
+ /// in Mat format, along with the projection matrix, camera-to-world matrix, and camera intrinsics for the captured frame.
+ ///
+ ///
+ /// Usage Notes:
+ ///
+ /// -
+ /// The callback is executed outside of the Unity application's main thread. To safely use Unity API functions or
+ /// update Unity objects within the callback, use UnityEngine.WSA.Application.InvokeOnAppThread(() => { ... })
+ /// to dispatch operations to the application thread.
+ ///
+ /// -
+ /// The Mat object passed to the callback is managed by the helper class and should not be stored for long-term use.
+ /// Copy the data if further processing is required beyond the callback's scope.
+ ///
+ ///
+ ///
///
- /// By setting outputColorFormat to BGRA or GRAY, processing that does not include extra color conversion is performed.
+ /// Note: By setting outputColorFormat to BGRA or GRAY, processing that does not include extra color conversion is performed.
+ /// Note: Depends on EnoxSoftware/HoloLensCameraStream.
+ /// Note: Depends on OpenCVForUnity version 2.6.4 or later.
+ ///
+ ///
+ /// Attach this component to a GameObject and call GetMat() to retrieve the latest camera frame in Mat format.
+ /// The helper class manages camera start/stop operations and frame updates internally.
+ ///
///
+ ///
/// Usage:
/// Add Define Symbols: "Open File > Build Settings > Player Settings > Other Settings" and add the following to Scripting Define Symbols depending on the XR system used in your project.
- /// This is the setup needed to get the correct values from the TryGetCameraToWorldMatrix method.
- /// Legacy built-in XR ; "BUILTIN_XR"
+ ///
+ /// {
+ /// Legacy built-in XR : "BUILTIN_XR"
/// XR Plugin Management(Windows Mixed Reality) : "XR_PLUGIN_WINDOWSMR"
/// XR Plugin Management(OpenXR) : "XR_PLUGIN_OPENXR"
+ /// }
+ ///
+ ///
///
- ///
+ ///
/// Combination of camera frame size and frame rate that can be acquired on Hololens. (width x height : framerate)
- /// (See https://learn.microsoft.com/en-us/windows/mixed-reality/develop/advanced-concepts/locatable-camera-overview)
+ /// https://learn.microsoft.com/en-us/windows/mixed-reality/develop/advanced-concepts/locatable-camera-overview
+ ///
+ /// {
///
/// Hololens1
///
@@ -126,8 +166,10 @@ namespace HoloLensWithOpenCVForUnity.UnityUtils.Helper
/// 424x240 : 30
/// 424x240 : 15
///
- ///
- public class HLCameraStreamToMatHelper : WebCamTextureToMatHelper
+ /// }
+ ///
+ ///
+ public class HLCameraStream2MatHelper : WebCamTexture2MatHelper
{
///
/// This will be called whenever a new camera frame image available is converted to Mat.
@@ -140,7 +182,7 @@ public class HLCameraStreamToMatHelper : WebCamTextureToMatHelper
protected CameraIntrinsics cameraIntrinsics;
///
- /// Returns the camera intrinsics.
+ /// Return the camera intrinsics.
///
/// The camera intrinsics.
public virtual CameraIntrinsics GetCameraIntrinsics()
@@ -160,6 +202,8 @@ public override string requestedDeviceName
_requestedDeviceName = value;
if (hasInitDone)
Initialize();
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
}
@@ -175,6 +219,8 @@ public override int requestedWidth
_requestedWidth = _value;
if (hasInitDone)
Initialize();
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
}
@@ -190,6 +236,8 @@ public override int requestedHeight
_requestedHeight = _value;
if (hasInitDone)
Initialize();
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
}
@@ -203,7 +251,9 @@ public override bool requestedIsFrontFacing
{
_requestedIsFrontFacing = value;
if (hasInitDone)
- Initialize(_requestedIsFrontFacing, requestedFPS, rotate90Degree);
+ Initialize(_requestedIsFrontFacing, requestedFPS, rotate90Degree, IsPlaying());
+ else if (isInitWaiting)
+ Initialize(_requestedIsFrontFacing, requestedFPS, rotate90Degree, autoPlayAfterInitialize);
}
}
}
@@ -218,11 +268,13 @@ public override bool rotate90Degree
_rotate90Degree = value;
if (hasInitDone)
Initialize();
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
}
- public override ColorFormat outputColorFormat
+ public override Source2MatHelperColorFormat outputColorFormat
{
get { return _outputColorFormat; }
set
@@ -232,6 +284,8 @@ public override ColorFormat outputColorFormat
_outputColorFormat = value;
if (hasInitDone)
Initialize();
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
}
@@ -243,13 +297,13 @@ public override float requestedFPS
{
_requestedFPS = Mathf.Clamp(value, -1f, float.MaxValue);
if (hasInitDone)
- {
Initialize();
- }
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
- new protected ColorFormat baseColorFormat = ColorFormat.BGRA;
+ new protected Source2MatHelperColorFormat baseColorFormat = Source2MatHelperColorFormat.BGRA;
protected System.Object lockObject = new System.Object();
protected System.Object matrixLockObject = new System.Object();
@@ -426,7 +480,7 @@ protected virtual void OnFrameSampleAcquired(VideoCaptureSample sample)
if (hasInitEventCompleted && frameMatAcquired != null)
{
- Mat mat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Channels(outputColorFormat)));
+ Mat mat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Source2MatHelperUtils.Channels(outputColorFormat)));
if (baseColorFormat == outputColorFormat)
{
@@ -434,14 +488,14 @@ protected virtual void OnFrameSampleAcquired(VideoCaptureSample sample)
}
else
{
- Mat baseMat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Channels(baseColorFormat)));
+ Mat baseMat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Source2MatHelperUtils.Channels(baseColorFormat)));
MatUtils.copyToMat(latestImageBytes, baseMat);
- Imgproc.cvtColor(baseMat, mat, ColorConversionCodes(baseColorFormat, outputColorFormat));
+ Imgproc.cvtColor(baseMat, mat, Source2MatHelperUtils.ColorConversionCodes(baseColorFormat, outputColorFormat));
}
if (_rotate90Degree)
{
- Mat rotatedFrameMat = new Mat(frameSampleWidth, frameSampleHeight, CvType.CV_8UC(Channels(outputColorFormat)));
+ Mat rotatedFrameMat = new Mat(frameSampleWidth, frameSampleHeight, CvType.CV_8UC(Source2MatHelperUtils.Channels(outputColorFormat)));
Core.rotate(mat, rotatedFrameMat, Core.ROTATE_90_CLOCKWISE);
mat.Dispose();
@@ -470,7 +524,7 @@ protected virtual HoloLensCameraStream.CameraParameters CreateCameraParams(HoloL
cameraParams.cameraResolutionHeight = resolution.height;
cameraParams.cameraResolutionWidth = resolution.width;
cameraParams.frameRate = Mathf.RoundToInt(frameRate);
- cameraParams.pixelFormat = (outputColorFormat == ColorFormat.GRAY) ? CapturePixelFormat.NV12 : CapturePixelFormat.BGRA32;
+ cameraParams.pixelFormat = (outputColorFormat == Source2MatHelperColorFormat.GRAY) ? CapturePixelFormat.NV12 : CapturePixelFormat.BGRA32;
cameraParams.rotateImage180Degrees = false;
cameraParams.enableHolograms = false;
cameraParams.enableVideoStabilization = false;
@@ -481,7 +535,7 @@ protected virtual HoloLensCameraStream.CameraParameters CreateCameraParams(HoloL
#endif
///
- /// Returns the video capture.
+ /// Return the video capture.
///
/// The video capture.
public virtual HoloLensCameraStream.VideoCapture GetVideoCapture()
@@ -574,7 +628,7 @@ protected override void OnDestroy()
}
///
- /// Initializes this instance by coroutine.
+ /// Initialize this instance by coroutine.
///
protected override IEnumerator _Initialize()
{
@@ -595,6 +649,10 @@ protected override IEnumerator _Initialize()
isInitWaiting = true;
+ // Wait one frame before starting initialization process
+ yield return null;
+
+
while (isChangeVideoModeWaiting)
{
yield return null;
@@ -615,7 +673,7 @@ protected override IEnumerator _Initialize()
CancelInitCoroutine();
if (onErrorOccurred != null)
- onErrorOccurred.Invoke(ErrorCode.UNKNOWN);
+ onErrorOccurred.Invoke(Source2MatHelperErrorCode.UNKNOWN, "!result2.success");
}
else
{
@@ -660,7 +718,7 @@ protected override IEnumerator _Initialize()
CancelInitCoroutine();
if (onErrorOccurred != null)
- onErrorOccurred.Invoke(ErrorCode.CAMERA_DEVICE_NOT_EXIST);
+ onErrorOccurred.Invoke(Source2MatHelperErrorCode.CAMERA_DEVICE_NOT_EXIST, "Did not find a video capture object. You may not be using the HoloLens.");
return;
}
@@ -687,7 +745,7 @@ protected override IEnumerator _Initialize()
CancelInitCoroutine();
if (onErrorOccurred != null)
- onErrorOccurred.Invoke(ErrorCode.UNKNOWN);
+ onErrorOccurred.Invoke(Source2MatHelperErrorCode.UNKNOWN, "!result.success");
}
else
{
@@ -709,11 +767,11 @@ protected override IEnumerator _Initialize()
}
else if (didUpdateThisFrame)
{
- Debug.Log("HololensCameraStreamToMatHelper:: " + "name:" + "" + " width:" + frameSampleWidth + " height:" + frameSampleHeight + " fps:" + cameraParams.frameRate);
+ Debug.Log("HLCameraStream2MatHelper:: " + "name:" + "" + " width:" + frameSampleWidth + " height:" + frameSampleHeight + " fps:" + cameraParams.frameRate);
- baseColorFormat = (outputColorFormat == ColorFormat.GRAY) ? ColorFormat.GRAY : ColorFormat.BGRA;
+ baseColorFormat = (outputColorFormat == Source2MatHelperColorFormat.GRAY) ? Source2MatHelperColorFormat.GRAY : Source2MatHelperColorFormat.BGRA;
- baseMat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Channels(baseColorFormat)));
+ baseMat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Source2MatHelperUtils.Channels(baseColorFormat)));
if (baseColorFormat == outputColorFormat)
{
@@ -721,11 +779,11 @@ protected override IEnumerator _Initialize()
}
else
{
- frameMat = new Mat(baseMat.rows(), baseMat.cols(), CvType.CV_8UC(Channels(outputColorFormat)));
+ frameMat = new Mat(baseMat.rows(), baseMat.cols(), CvType.CV_8UC(Source2MatHelperUtils.Channels(outputColorFormat)));
}
if (_rotate90Degree)
- rotatedFrameMat = new Mat(frameMat.cols(), frameMat.rows(), CvType.CV_8UC(Channels(outputColorFormat)));
+ rotatedFrameMat = new Mat(frameMat.cols(), frameMat.rows(), CvType.CV_8UC(Source2MatHelperUtils.Channels(outputColorFormat)));
isInitWaiting = false;
hasInitDone = true;
@@ -763,7 +821,7 @@ protected override IEnumerator _Initialize()
initCoroutine = null;
if (onErrorOccurred != null)
- onErrorOccurred.Invoke(ErrorCode.TIMEOUT);
+ onErrorOccurred.Invoke(Source2MatHelperErrorCode.TIMEOUT, string.Empty);
}
else
{
@@ -771,13 +829,13 @@ protected override IEnumerator _Initialize()
initCoroutine = null;
if (onErrorOccurred != null)
- onErrorOccurred.Invoke(ErrorCode.TIMEOUT);
+ onErrorOccurred.Invoke(Source2MatHelperErrorCode.TIMEOUT, string.Empty);
}
}
}
///
- /// Indicates whether this instance has been initialized.
+ /// Indicate whether this instance has been initialized.
///
/// true, if this instance has been initialized, false otherwise.
public override bool IsInitialized()
@@ -786,7 +844,7 @@ public override bool IsInitialized()
}
///
- /// Starts the camera.
+ /// Start the active camera.
///
public override void Play()
{
@@ -811,7 +869,7 @@ protected virtual IEnumerator _Play()
}
///
- /// Pauses the active camera.
+ /// Pause the active camera.
///
public override void Pause()
{
@@ -820,7 +878,7 @@ public override void Pause()
}
///
- /// Stops the active camera.
+ /// Stop the active camera.
///
public override void Stop()
{
@@ -845,7 +903,7 @@ protected virtual IEnumerator _Stop()
}
///
- /// Indicates whether the active camera is currently playing.
+ /// Indicate whether the active camera is currently playing.
///
/// true, if the active camera is playing, false otherwise.
public override bool IsPlaying()
@@ -857,7 +915,7 @@ public override bool IsPlaying()
}
///
- /// Indicates whether the active camera device is currently front facng.
+ /// Indicate whether the active camera device is currently front facng.
///
/// true, if the active camera device is front facng, false otherwise.
public override bool IsFrontFacing()
@@ -866,7 +924,7 @@ public override bool IsFrontFacing()
}
///
- /// Returns the active camera device name.
+ /// Return the active camera device name.
///
/// The active camera device name.
public override string GetDeviceName()
@@ -875,7 +933,7 @@ public override string GetDeviceName()
}
///
- /// Returns the active camera width.
+ /// Return the active camera width.
///
/// The active camera width.
public override int GetWidth()
@@ -886,7 +944,7 @@ public override int GetWidth()
}
///
- /// Returns the active camera height.
+ /// Return the active camera height.
///
/// The active camera height.
public override int GetHeight()
@@ -897,7 +955,7 @@ public override int GetHeight()
}
///
- /// Returns the active camera framerate.
+ /// Return the active camera framerate.
///
/// The active camera framerate.
public override float GetFPS()
@@ -906,16 +964,16 @@ public override float GetFPS()
}
///
- /// Returns the webcam texture.
+ /// Return the active WebcamTexture.
///
- /// The webcam texture.
+ /// The active WebcamTexture.
public override WebCamTexture GetWebCamTexture()
{
return null;
}
///
- /// Returns the camera to world matrix.
+ /// Return the camera to world matrix.
///
/// The camera to world matrix.
public override Matrix4x4 GetCameraToWorldMatrix()
@@ -924,7 +982,7 @@ public override Matrix4x4 GetCameraToWorldMatrix()
}
///
- /// Returns the projection matrix matrix.
+ /// Return the projection matrix matrix.
///
/// The projection matrix.
public override Matrix4x4 GetProjectionMatrix()
@@ -933,7 +991,7 @@ public override Matrix4x4 GetProjectionMatrix()
}
///
- /// Indicates whether the video buffer of the frame has been updated.
+ /// Indicate whether the video buffer of the frame has been updated.
///
/// true, if the video buffer has been updated false otherwise.
public override bool DidUpdateThisFrame()
@@ -945,10 +1003,12 @@ public override bool DidUpdateThisFrame()
}
///
- /// Gets the mat of the current frame.
+ /// Get the mat of the current frame.
+ ///
+ ///
/// The Mat object's type is 'CV_8UC4' or 'CV_8UC3' or 'CV_8UC1' (ColorFormat is determined by the outputColorFormat setting).
/// Please do not dispose of the returned mat as it will be reused.
- ///
+ ///
/// The mat of the current frame.
public override Mat GetMat()
{
@@ -964,7 +1024,7 @@ public override Mat GetMat()
else
{
MatUtils.copyToMat(latestImageBytes, baseMat);
- Imgproc.cvtColor(baseMat, frameMat, ColorConversionCodes(baseColorFormat, outputColorFormat));
+ Imgproc.cvtColor(baseMat, frameMat, Source2MatHelperUtils.ColorConversionCodes(baseColorFormat, outputColorFormat));
}
if (rotatedFrameMat != null)
@@ -983,7 +1043,7 @@ public override Mat GetMat()
}
///
- /// Flips the mat.
+ /// Flip the mat.
///
/// Mat.
protected override void FlipMat(Mat mat, bool flipVertical, bool flipHorizontal)
@@ -1067,12 +1127,12 @@ protected override void ReleaseResources()
}
///
- /// Releases all resource used by the object.
+ /// Releases all resource used by the object.
///
- /// Call when you are finished using the . The
- /// method leaves the in an unusable state. After
- /// calling , you must release all references to the so
- /// the garbage collector can reclaim the memory that the was occupying.
+ /// Call when you are finished using the . The
+ /// method leaves the in an unusable state. After
+ /// calling , you must release all references to the so
+ /// the garbage collector can reclaim the memory that the was occupying.
public override void Dispose()
{
if (colors != null)
diff --git a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs.meta b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs.meta
similarity index 76%
rename from HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs.meta
rename to HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs.meta
index a8fd2e9..5c411c8 100644
--- a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs.meta
+++ b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs.meta
@@ -1,6 +1,6 @@
fileFormatVersion: 2
-guid: 0195895dd83eb204da131e4a7c4bcfe3
-timeCreated: 1518188247
+guid: a80f11196f5e6394aaec3da665ebfc5a
+timeCreated: 1509282299
licenseType: Free
MonoImporter:
serializedVersion: 2
diff --git a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/OpenCVForUnityUtils.cs b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/OpenCVForUnityUtils.cs
index 7a81a2d..34ea796 100644
--- a/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/OpenCVForUnityUtils.cs
+++ b/HLWithDlibExampleMRTK2/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/OpenCVForUnityUtils.cs
@@ -1,8 +1,10 @@
using DlibFaceLandmarkDetector;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
+using OpenCVForUnity.UnityUtils;
using System;
using System.Collections.Generic;
+using System.Runtime.InteropServices;
using UnityEngine;
namespace HoloLensWithDlibFaceLandmarkDetectorExample
@@ -13,19 +15,33 @@ namespace HoloLensWithDlibFaceLandmarkDetectorExample
public static class OpenCVForUnityUtils
{
///
- /// Sets a image.
+ /// Sets the image for the specified using a given object.
///
- /// Face landmark detector.
- /// Image mat.
+ ///
+ /// An instance of that processes the specified image.
+ ///
+ ///
+ /// A object representing the image to set. The matrix must be continuous and valid for processing.
+ ///
+ ///
+ /// Thrown if or is null.
+ ///
+ ///
+ /// Thrown if is not continuous. Ensure imgMat.isContinuous() == true.
+ ///
+ ///
+ /// This method directly assigns the data pointer, width, height, and element size to the
+ /// specified . It avoids additional memory allocations by reusing the existing data.
+ ///
public static void SetImage(FaceLandmarkDetector faceLandmarkDetector, Mat imgMat)
{
if (faceLandmarkDetector == null)
- throw new ArgumentNullException("faceLandmarkDetector");
+ throw new ArgumentNullException(nameof(faceLandmarkDetector));
if (faceLandmarkDetector != null)
faceLandmarkDetector.ThrowIfDisposed();
if (imgMat == null)
- throw new ArgumentNullException("imgMat");
+ throw new ArgumentNullException(nameof(imgMat));
if (imgMat != null)
imgMat.ThrowIfDisposed();
if (!imgMat.isContinuous())
@@ -35,40 +51,113 @@ public static void SetImage(FaceLandmarkDetector faceLandmarkDetector, Mat imgMa
}
///
- /// Draws a face rect.
+ /// Draws a rectangle on the specified image to indicate a detected face region.
///
- /// Image mat.
- /// Rect.
- /// Color.
- /// Thickness.
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
public static void DrawFaceRect(Mat imgMat, UnityEngine.Rect rect, Scalar color, int thickness)
{
Imgproc.rectangle(imgMat, new Point(rect.xMin, rect.yMin), new Point(rect.xMax, rect.yMax), color, thickness);
}
///
- /// Draws a face rect.
+ /// Draws a rectangle on the specified image to indicate a detected face region.
///
- /// Image mat.
- /// Rect.
- /// Color.
- /// Thickness.
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, UnityEngine.Rect rect, in Vec4d color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, new Vec2d(rect.xMin, rect.yMin), new Vec2d(rect.xMax, rect.yMax), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, UnityEngine.Rect rect, in (double v0, double v1, double v2, double v3) color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, (rect.xMin, rect.yMin), (rect.xMax, rect.yMax), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
public static void DrawFaceRect(Mat imgMat, OpenCVForUnity.CoreModule.Rect rect, Scalar color, int thickness)
{
Imgproc.rectangle(imgMat, rect, color, thickness);
}
///
- /// Draws a face rect.
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, in Vec4i rect, in Vec4d color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, rect, color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, in Vec4d rect, in Vec4d color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, rect.ToVec4i(), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, in (int x, int y, int width, int height) rect, in (double v0, double v1, double v2, double v3) color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, rect, color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, in (double x, double y, double width, double height) rect, in (double v0, double v1, double v2, double v3) color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, ((int)rect.x, (int)rect.y, (int)rect.width, (int)rect.height), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle and detection information on the specified image based on the given data.
///
- /// Image mat.
- /// RectDetection.
- /// Color.
- /// Thickness.
+ /// The image on which to draw the rectangle and text.
+ /// The containing the rectangle and detection details.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
public static void DrawFaceRect(Mat imgMat, DlibFaceLandmarkDetector.FaceLandmarkDetector.RectDetection rect, Scalar color, int thickness)
{
if (rect == null)
- throw new ArgumentNullException("rect");
+ throw new ArgumentNullException(nameof(rect));
UnityEngine.Rect _rect = rect.rect;
Imgproc.putText(imgMat, "detection_confidence : " + rect.detection_confidence, new Point(_rect.xMin, _rect.yMin - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
@@ -77,16 +166,58 @@ public static void DrawFaceRect(Mat imgMat, DlibFaceLandmarkDetector.FaceLandmar
}
///
- /// Draws a face rect.
+ /// Draws a rectangle and detection information on the specified image based on the given data.
///
- /// Image mat.
- /// Detected object's data. [left, top, width, height, detection_confidence, weight_index]
- /// Color.
- /// Thickness.
+ /// The image on which to draw the rectangle and text.
+ /// The containing the rectangle and detection details.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
+ public static void DrawFaceRect(Mat imgMat, DlibFaceLandmarkDetector.FaceLandmarkDetector.RectDetection rect, in Vec4d color, int thickness)
+ {
+ if (rect == null)
+ throw new ArgumentNullException(nameof(rect));
+
+ UnityEngine.Rect _rect = rect.rect;
+ Imgproc.putText(imgMat, "detection_confidence : " + rect.detection_confidence, new Vec2d(_rect.xMin, _rect.yMin - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Vec4d(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.putText(imgMat, "weight_index : " + rect.weight_index, new Vec2d(_rect.xMin, _rect.yMin - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Vec4d(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.rectangle(imgMat, new Vec2d(_rect.xMin, _rect.yMin), new Vec2d(_rect.xMax, _rect.yMax), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle and detection information on the specified image based on the given data.
+ ///
+ /// The image on which to draw the rectangle and text.
+ /// The containing the rectangle and detection details.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
+ public static void DrawFaceRect(Mat imgMat, DlibFaceLandmarkDetector.FaceLandmarkDetector.RectDetection rect, in (double v0, double v1, double v2, double v3) color, int thickness)
+ {
+ if (rect == null)
+ throw new ArgumentNullException(nameof(rect));
+
+ UnityEngine.Rect _rect = rect.rect;
+ Imgproc.putText(imgMat, "detection_confidence : " + rect.detection_confidence, (_rect.xMin, _rect.yMin - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.putText(imgMat, "weight_index : " + rect.weight_index, (_rect.xMin, _rect.yMin - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.rectangle(imgMat, (_rect.xMin, _rect.yMin), (_rect.xMax, _rect.yMax), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle and detection information on the specified image based on the provided rectangle data.
+ ///
+ /// The image on which to draw the rectangle and text.
+ ///
+ /// An array containing the rectangle data in the format [x, y, width, height, detection_confidence, weight_index].
+ /// The last two values are optional and used for displaying additional detection information.
+ ///
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
public static void DrawFaceRect(Mat imgMat, double[] rect, Scalar color, int thickness)
{
if (rect == null)
- throw new ArgumentNullException("rect");
+ throw new ArgumentNullException(nameof(rect));
if (rect.Length > 4)
Imgproc.putText(imgMat, "detection_confidence : " + rect[4], new Point(rect[0], rect[1] - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
@@ -96,18 +227,75 @@ public static void DrawFaceRect(Mat imgMat, double[] rect, Scalar color, int thi
}
///
- /// Draws a face landmark.
- /// This method supports 68,17,6,5 landmark points.
+ /// Draws a rectangle and detection information on the specified image based on the provided rectangle data.
+ ///
+ /// The image on which to draw the rectangle and text.
+ ///
+ /// An array containing the rectangle data in the format [x, y, width, height, detection_confidence, weight_index].
+ /// The last two values are optional and used for displaying additional detection information.
+ ///
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
+ public static void DrawFaceRect(Mat imgMat, double[] rect, in Vec4d color, int thickness)
+ {
+ if (rect == null)
+ throw new ArgumentNullException(nameof(rect));
+
+ if (rect.Length > 4)
+ Imgproc.putText(imgMat, "detection_confidence : " + rect[4], new Vec2d(rect[0], rect[1] - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Vec4d(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ if (rect.Length > 5)
+ Imgproc.putText(imgMat, "weight_index : " + rect[5], new Vec2d(rect[0], rect[1] - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Vec4d(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.rectangle(imgMat, new Vec2d(rect[0], rect[1]), new Vec2d(rect[0] + rect[2], rect[1] + rect[3]), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle and detection information on the specified image based on the provided rectangle data.
+ ///
+ /// The image on which to draw the rectangle and text.
+ ///
+ /// An array containing the rectangle data in the format [x, y, width, height, detection_confidence, weight_index].
+ /// The last two values are optional and used for displaying additional detection information.
+ ///
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
+ public static void DrawFaceRect(Mat imgMat, double[] rect, in (double v0, double v1, double v2, double v3) color, int thickness)
+ {
+ if (rect == null)
+ throw new ArgumentNullException(nameof(rect));
+
+ if (rect.Length > 4)
+ Imgproc.putText(imgMat, "detection_confidence : " + rect[4], (rect[0], rect[1] - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ if (rect.Length > 5)
+ Imgproc.putText(imgMat, "weight_index : " + rect[5], (rect[0], rect[1] - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.rectangle(imgMat, (rect[0], rect[1]), (rect[0] + rect[2], rect[1] + rect[3]), color, thickness);
+ }
+
+ ///
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
///
- /// Image mat.
- /// Points.
- /// Color.
- /// Thickness.
- /// Determines if draw index numbers.
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A list of points representing the landmark positions. The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar color, int thickness, bool drawIndexNumbers = false)
{
if (points == null)
- throw new ArgumentNullException("points");
+ throw new ArgumentNullException(nameof(points));
if (points.Count == 5)
{
@@ -203,20 +391,31 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar co
}
///
- /// Draws a face landmark.
- /// This method supports 68,17,6,5 landmark points.
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
///
- /// Image mat.
- /// Points.
- /// Color.
- /// Thickness.
- /// Determines if draw index numbers.
- public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar color, int thickness, bool drawIndexNumbers = false)
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A list of points representing the landmark positions. The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, in Vec2d[] points, in Vec4d color, int thickness, bool drawIndexNumbers = false)
{
if (points == null)
- throw new ArgumentNullException("points");
+ throw new ArgumentNullException(nameof(points));
- if (points.Count == 5)
+ if (points.Length == 5)
{
Imgproc.line(imgMat, points[0], points[1], color, thickness);
@@ -225,7 +424,7 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar colo
Imgproc.line(imgMat, points[3], points[2], color, thickness);
}
- else if (points.Count == 6)
+ else if (points.Length == 6)
{
Imgproc.line(imgMat, points[2], points[3], color, thickness);
@@ -235,7 +434,7 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar colo
Imgproc.line(imgMat, points[0], points[1], color, thickness);
}
- else if (points.Count == 17)
+ else if (points.Length == 17)
{
Imgproc.line(imgMat, points[2], points[9], color, thickness);
@@ -260,7 +459,7 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar colo
Imgproc.circle(imgMat, points[i], 2, color, -1);
}
- else if (points.Count == 68)
+ else if (points.Length == 68)
{
for (int i = 1; i <= 16; ++i)
@@ -295,7 +494,7 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar colo
}
else
{
- for (int i = 0; i < points.Count; i++)
+ for (int i = 0; i < points.Length; i++)
{
Imgproc.circle(imgMat, points[i], 2, color, -1);
}
@@ -304,157 +503,531 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar colo
// Draw the index number of facelandmark points.
if (drawIndexNumbers)
{
- for (int i = 0; i < points.Count; ++i)
- Imgproc.putText(imgMat, i.ToString(), points[i], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ for (int i = 0; i < points.Length; ++i)
+ Imgproc.putText(imgMat, i.ToString(), points[i], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Vec4d(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
}
}
///
- /// Draws a face landmark.
- /// This method supports 68,17,6,5 landmark points.
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
///
- /// Image mat.
- /// Detected object landmark data.[x_0, y_0, x_1, y_1, ...]
- /// Color.
- /// Thickness.
- /// Determines if draw index numbers.
- public static void DrawFaceLandmark(Mat imgMat, double[] points, Scalar color, int thickness, bool drawIndexNumbers = false)
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A list of points representing the landmark positions. The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, in (double x, double y)[] points, in (double v0, double v1, double v2, double v3) color, int thickness, bool drawIndexNumbers = false)
{
if (points == null)
- throw new ArgumentNullException("points");
+ throw new ArgumentNullException(nameof(points));
- List _points = new List();
- for (int i = 0; i < points.Length; i = i + 2)
+ if (points.Length == 5)
{
- _points.Add(new Vector2((float)points[i], (float)points[i + 1]));
+
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
+ Imgproc.line(imgMat, points[1], points[4], color, thickness);
+ Imgproc.line(imgMat, points[4], points[3], color, thickness);
+ Imgproc.line(imgMat, points[3], points[2], color, thickness);
+
}
- DrawFaceLandmark(imgMat, _points, color, thickness, drawIndexNumbers);
- }
+ else if (points.Length == 6)
+ {
+ Imgproc.line(imgMat, points[2], points[3], color, thickness);
+ Imgproc.line(imgMat, points[4], points[5], color, thickness);
+ Imgproc.line(imgMat, points[3], points[0], color, thickness);
+ Imgproc.line(imgMat, points[4], points[0], color, thickness);
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
+ }
+ else if (points.Length == 17)
+ {
+ Imgproc.line(imgMat, points[2], points[9], color, thickness);
+ Imgproc.line(imgMat, points[9], points[3], color, thickness);
+ Imgproc.line(imgMat, points[3], points[10], color, thickness);
+ Imgproc.line(imgMat, points[10], points[2], color, thickness);
- ///
- /// Convert Vector2 list to Vector2 array.
- ///
- /// List of Vector2.
- /// Array of Vector2.
- /// Array of Vector2.
- public static Vector2[] ConvertVector2ListToVector2Array(IList src, Vector2[] dst = null)
- {
- if (src == null)
- throw new ArgumentNullException("src");
+ Imgproc.line(imgMat, points[4], points[11], color, thickness);
+ Imgproc.line(imgMat, points[11], points[5], color, thickness);
+ Imgproc.line(imgMat, points[5], points[12], color, thickness);
+ Imgproc.line(imgMat, points[12], points[4], color, thickness);
- if (dst != null && src.Count != dst.Length)
- throw new ArgumentException("src.Count != dst.Length");
+ Imgproc.line(imgMat, points[3], points[0], color, thickness);
+ Imgproc.line(imgMat, points[4], points[0], color, thickness);
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
- if (dst == null)
- {
- dst = new Vector2[src.Count];
- }
+ for (int i = 14; i <= 16; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[16], points[13], color, thickness);
+
+ for (int i = 6; i <= 8; i++)
+ Imgproc.circle(imgMat, points[i], 2, color, -1);
- for (int i = 0; i < src.Count; ++i)
- {
- dst[i].x = src[i].x;
- dst[i].y = src[i].y;
}
+ else if (points.Length == 68)
+ {
- return dst;
- }
+ for (int i = 1; i <= 16; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
- ///
- /// Convert Vector2 list to Point list.
- ///
- /// List of Vector2.
- /// List of Point.
- /// List of Point.
- public static List ConvertVector2ListToPointList(IList src, List dst = null)
- {
- if (src == null)
- throw new ArgumentNullException("src");
+ for (int i = 28; i <= 30; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
- if (dst == null)
- {
- dst = new List();
- }
+ for (int i = 18; i <= 21; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ for (int i = 23; i <= 26; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ for (int i = 31; i <= 35; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[30], points[35], color, thickness);
- if (dst.Count != src.Count)
+ for (int i = 37; i <= 41; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[36], points[41], color, thickness);
+
+ for (int i = 43; i <= 47; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[42], points[47], color, thickness);
+
+ for (int i = 49; i <= 59; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[48], points[59], color, thickness);
+
+ for (int i = 61; i <= 67; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[60], points[67], color, thickness);
+ }
+ else
{
- dst.Clear();
- for (int i = 0; i < src.Count; i++)
+ for (int i = 0; i < points.Length; i++)
{
- dst.Add(new Point());
+ Imgproc.circle(imgMat, points[i], 2, color, -1);
}
}
- for (int i = 0; i < src.Count; ++i)
+ // Draw the index number of facelandmark points.
+ if (drawIndexNumbers)
{
- dst[i].x = src[i].x;
- dst[i].y = src[i].y;
+ for (int i = 0; i < points.Length; ++i)
+ Imgproc.putText(imgMat, i.ToString(), points[i], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
}
-
- return dst;
}
///
- /// Convert Vector2 list to Point array.
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
///
- /// List of Vector2.
- /// Array of Point.
- /// Array of Point.
- public static Point[] ConvertVector2ListToPointArray(IList src, Point[] dst = null)
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A list of points representing the landmark positions. The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar color, int thickness, bool drawIndexNumbers = false)
{
- if (src == null)
- throw new ArgumentNullException("src");
-
- if (dst != null && src.Count != dst.Length)
- throw new ArgumentException("src.Count != dst.Length");
-
- if (dst == null)
- {
- dst = new Point[src.Count];
- }
+ if (points == null)
+ throw new ArgumentNullException(nameof(points));
- for (int i = 0; i < src.Count; ++i)
+ if (points.Count == 5)
{
- dst[i] = new Point(src[i].x, src[i].y);
- }
- return dst;
- }
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
+ Imgproc.line(imgMat, points[1], points[4], color, thickness);
+ Imgproc.line(imgMat, points[4], points[3], color, thickness);
+ Imgproc.line(imgMat, points[3], points[2], color, thickness);
- ///
- /// Convert Vector2 list to array.
- ///
- /// List of Vector2.
- /// Array of double.
- /// Array of double.
- public static double[] ConvertVector2ListToArray(IList src, double[] dst = null)
- {
- if (src == null)
- throw new ArgumentNullException("src");
+ }
+ else if (points.Count == 6)
+ {
- if (dst != null && src.Count * 2 != dst.Length)
- throw new ArgumentException("src.Count * 2 != dst.Length");
+ Imgproc.line(imgMat, points[2], points[3], color, thickness);
+ Imgproc.line(imgMat, points[4], points[5], color, thickness);
+ Imgproc.line(imgMat, points[3], points[0], color, thickness);
+ Imgproc.line(imgMat, points[4], points[0], color, thickness);
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
- if (dst == null)
- {
- dst = new double[src.Count * 2];
}
-
- for (int i = 0; i < src.Count; ++i)
+ else if (points.Count == 17)
{
- dst[i * 2] = src[i].x;
- dst[i * 2 + 1] = src[i].y;
- }
- return dst;
- }
+ Imgproc.line(imgMat, points[2], points[9], color, thickness);
+ Imgproc.line(imgMat, points[9], points[3], color, thickness);
+ Imgproc.line(imgMat, points[3], points[10], color, thickness);
+ Imgproc.line(imgMat, points[10], points[2], color, thickness);
+ Imgproc.line(imgMat, points[4], points[11], color, thickness);
+ Imgproc.line(imgMat, points[11], points[5], color, thickness);
+ Imgproc.line(imgMat, points[5], points[12], color, thickness);
+ Imgproc.line(imgMat, points[12], points[4], color, thickness);
+ Imgproc.line(imgMat, points[3], points[0], color, thickness);
+ Imgproc.line(imgMat, points[4], points[0], color, thickness);
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
+ for (int i = 14; i <= 16; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[16], points[13], color, thickness);
- ///
+ for (int i = 6; i <= 8; i++)
+ Imgproc.circle(imgMat, points[i], 2, color, -1);
+
+ }
+ else if (points.Count == 68)
+ {
+
+ for (int i = 1; i <= 16; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+
+ for (int i = 28; i <= 30; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+
+ for (int i = 18; i <= 21; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ for (int i = 23; i <= 26; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ for (int i = 31; i <= 35; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[30], points[35], color, thickness);
+
+ for (int i = 37; i <= 41; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[36], points[41], color, thickness);
+
+ for (int i = 43; i <= 47; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[42], points[47], color, thickness);
+
+ for (int i = 49; i <= 59; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[48], points[59], color, thickness);
+
+ for (int i = 61; i <= 67; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[60], points[67], color, thickness);
+ }
+ else
+ {
+ for (int i = 0; i < points.Count; i++)
+ {
+ Imgproc.circle(imgMat, points[i], 2, color, -1);
+ }
+ }
+
+ // Draw the index number of facelandmark points.
+ if (drawIndexNumbers)
+ {
+ for (int i = 0; i < points.Count; ++i)
+ Imgproc.putText(imgMat, i.ToString(), points[i], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ }
+ }
+
+ ///
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
+ ///
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A list of points representing the landmark positions. The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, double[] points, Scalar color, int thickness, bool drawIndexNumbers = false)
+ {
+ if (points == null)
+ throw new ArgumentNullException(nameof(points));
+
+ List _points = new List();
+ for (int i = 0; i < points.Length; i = i + 2)
+ {
+ _points.Add(new Vector2((float)points[i], (float)points[i + 1]));
+ }
+ DrawFaceLandmark(imgMat, _points, color, thickness, drawIndexNumbers);
+ }
+
+ ///
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
+ ///
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A array representing the landmark positions, where each pair of consecutive values represents the X and Y coordinates of a point.
+ /// The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, double[] points, in Vec4d color, int thickness, bool drawIndexNumbers = false)
+ {
+ if (points == null)
+ throw new ArgumentNullException(nameof(points));
+
+ Vec2d[] pointsVec2d = new Vec2d[points.Length / 2];
+#if NET_STANDARD_2_1
+ Span pointsSpan = MemoryMarshal.Cast(points);
+ pointsSpan.CopyTo(pointsVec2d);
+#else
+ for (int i = 0; i < pointsVec2d.Length; i++)
+ {
+ pointsVec2d[i].Item1 = points[i * 2 + 0];
+ pointsVec2d[i].Item2 = points[i * 2 + 1];
+ }
+#endif
+ DrawFaceLandmark(imgMat, pointsVec2d, color, thickness, drawIndexNumbers);
+ }
+
+ ///
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
+ ///
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A array representing the landmark positions, where each pair of consecutive values represents the X and Y coordinates of a point.
+ /// The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, double[] points, in (double v0, double v1, double v2, double v3) color, int thickness, bool drawIndexNumbers = false)
+ {
+ if (points == null)
+ throw new ArgumentNullException(nameof(points));
+
+ (double x, double y)[] pointsValueTuple = new (double x, double y)[points.Length / 2];
+ for (int i = 0; i < pointsValueTuple.Length; i++)
+ {
+ pointsValueTuple[i].Item1 = points[i * 2 + 0];
+ pointsValueTuple[i].Item2 = points[i * 2 + 1];
+ }
+ DrawFaceLandmark(imgMat, pointsValueTuple, color, thickness, drawIndexNumbers);
+ }
+
+
+ ///
+ /// Convert Vector2 list to Vector2 array.
+ ///
+ /// List of Vector2.
+ /// Array of Vector2.
+ /// Array of Vector2.
+ public static Vector2[] ConvertVector2ListToVector2Array(IList src, Vector2[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Vector2[src.Count];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].x = src[i].x;
+ dst[i].y = src[i].y;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vector2 list to Point list.
+ ///
+ /// List of Vector2.
+ /// List of Point.
+ /// List of Point.
+ public static List ConvertVector2ListToPointList(IList src, List dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst == null)
+ {
+ dst = new List();
+ }
+
+ if (dst.Count != src.Count)
+ {
+ dst.Clear();
+ for (int i = 0; i < src.Count; i++)
+ {
+ dst.Add(new Point());
+ }
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].x = src[i].x;
+ dst[i].y = src[i].y;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vector2 list to Point array.
+ ///
+ /// List of Vector2.
+ /// Array of Point.
+ /// Array of Point.
+ public static Point[] ConvertVector2ListToPointArray(IList src, Point[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Point[src.Count];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i] = new Point(src[i].x, src[i].y);
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vector2 list to double array.
+ ///
+ /// List of Vector2.
+ /// Array of double.
+ /// Array of double.
+ public static double[] ConvertVector2ListToArray(IList src, double[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count * 2 != dst.Length)
+ throw new ArgumentException("src.Count * 2 != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new double[src.Count * 2];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i * 2] = src[i].x;
+ dst[i * 2 + 1] = src[i].y;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vector2 list to Vec2d array.
+ ///
+ /// List of Vector2.
+ /// Array of Vec2d.
+ /// Array of Vec2d.
+ public static Vec2d[] ConvertVector2ListToVec2dArray(IList src, Vec2d[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Vec2d[src.Count];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].Item1 = src[i].x;
+ dst[i].Item2 = src[i].y;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vector2 list to ValueTuple array.
+ ///
+ /// List of Vector2.
+ /// Array of ValueTuple.
+ /// Array of ValueTuple.
+ public static (double x, double y)[] ConvertVector2ListToValueTupleArray(IList src, (double x, double y)[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new (double x, double y)[src.Count];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].Item1 = src[i].x;
+ dst[i].Item2 = src[i].y;
+ }
+
+ return dst;
+ }
+
+
+ ///
/// Convert Point list to Point array.
///
/// List of Point.
@@ -463,7 +1036,7 @@ public static double[] ConvertVector2ListToArray(IList src, double[] ds
public static Point[] ConvertPointListToPointArray(IList src, Point[] dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst != null && src.Count != dst.Length)
throw new ArgumentException("src.Count != dst.Length");
@@ -490,7 +1063,7 @@ public static Point[] ConvertPointListToPointArray(IList src, Point[] dst
public static List ConvertPointListToVector2List(IList src, List dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst == null)
{
@@ -516,7 +1089,7 @@ public static List ConvertPointListToVector2List(IList src, List
public static Vector2[] ConvertPointListToVector2Array(IList src, Vector2[] dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst != null && src.Count != dst.Length)
throw new ArgumentException("src.Count != dst.Length");
@@ -536,7 +1109,7 @@ public static Vector2[] ConvertPointListToVector2Array(IList src, Vector2
}
///
- /// Convert Point list to array.
+ /// Convert Point list to double array.
///
/// List of Point.
/// Array of double.
@@ -544,7 +1117,7 @@ public static Vector2[] ConvertPointListToVector2Array(IList src, Vector2
public static double[] ConvertPointListToArray(IList src, double[] dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst != null && src.Count * 2 != dst.Length)
throw new ArgumentException("src.Count * 2 != dst.Length");
@@ -563,11 +1136,64 @@ public static double[] ConvertPointListToArray(IList src, double[] dst =
return dst;
}
+ ///
+ /// Convert Point list to Vec2d array.
+ ///
+ /// List of Point.
+ /// Array of Vec2d.
+ /// Array of Vec2d.
+ public static Vec2d[] ConvertPointListToVec2dArray(IList src, Vec2d[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Vec2d[src.Count];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].Item1 = src[i].x;
+ dst[i].Item2 = src[i].y;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Point list to ValueTuple array.
+ ///
+ /// List of Point.
+ /// Array of ValueTuple.
+ /// Array of ValueTuple.
+ public static (double x, double y)[] ConvertPointListToValueTupleArray(IList src, (double x, double y)[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new (double x, double y)[src.Count];
+ }
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].Item1 = src[i].x;
+ dst[i].Item2 = src[i].y;
+ }
+ return dst;
+ }
///
- /// Convert array to Vector2 list.
+ /// Convert double array to Vector2 list.
///
/// Array of double.
/// List of Vector2.
@@ -575,7 +1201,7 @@ public static double[] ConvertPointListToArray(IList src, double[] dst =
public static List ConvertArrayToVector2List(double[] src, List dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst == null)
{
@@ -594,7 +1220,7 @@ public static List ConvertArrayToVector2List(double[] src, List
- /// Convert array to Vector2 array.
+ /// Convert double array to Vector2 array.
///
/// Array of double.
/// Array of Vector2.
@@ -602,7 +1228,7 @@ public static List ConvertArrayToVector2List(double[] src, List
- /// Convert array to Point list.
+ /// Convert double array to Point list.
///
/// Array of double.
/// List of Point.
@@ -630,7 +1256,7 @@ public static Vector2[] ConvertArrayToVector2Array(double[] src, Vector2[] dst =
public static List ConvertArrayToPointList(double[] src, List dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst == null)
{
@@ -656,7 +1282,7 @@ public static List ConvertArrayToPointList(double[] src, List dst
}
///
- /// Convert array to Point array.
+ /// Convert double array to Point array.
///
/// Array of double.
/// Array of Point.
@@ -664,7 +1290,7 @@ public static List ConvertArrayToPointList(double[] src, List dst
public static Point[] ConvertArrayToPointArray(double[] src, Point[] dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst != null && src.Length / 2 != dst.Length)
throw new ArgumentException("src.Length / 2 != dst.Length");
@@ -681,5 +1307,295 @@ public static Point[] ConvertArrayToPointArray(double[] src, Point[] dst = null)
return dst;
}
+
+ ///
+ /// Convert double array to Vec2d array.
+ ///
+ /// Array of double.
+ /// Array of Vec2d.
+ /// Array of Vec2d.
+ public static Vec2d[] ConvertArrayToVec2dArray(double[] src, Vec2d[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length / 2 != dst.Length)
+ throw new ArgumentException("src.Length / 2 != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Vec2d[src.Length / 2];
+ }
+
+ for (int i = 0; i < dst.Length; ++i)
+ {
+ dst[i].Item1 = src[i * 2];
+ dst[i].Item2 = src[i * 2 + 1];
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert double array to ValueTuple array.
+ ///
+ /// Array of double.
+ /// Array of ValueTuple.
+ /// Array of ValueTuple.
+ public static (double x, double y)[] ConvertArrayToValueTupleArray(double[] src, (double x, double y)[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length / 2 != dst.Length)
+ throw new ArgumentException("src.Length / 2 != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new (double x, double y)[src.Length / 2];
+ }
+
+ for (int i = 0; i < dst.Length; ++i)
+ {
+ dst[i].Item1 = src[i * 2];
+ dst[i].Item2 = src[i * 2 + 1];
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vec2d array to Point list.
+ ///
+ /// Array of Vec2d.
+ /// List of Point.
+ /// List of Point.
+ public static List ConvertVec2dArrayToPointList(in Vec2d[] src, List dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst == null)
+ {
+ dst = new List();
+ }
+
+ if (dst.Count != src.Length)
+ {
+ dst.Clear();
+ for (int i = 0; i < src.Length; i++)
+ {
+ dst.Add(new Point());
+ }
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i].x = src[i].Item1;
+ dst[i].y = src[i].Item2;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vec2d array to Point array.
+ ///
+ /// Array of Vec2d.
+ /// Array of Point.
+ /// Array of Point.
+ public static Point[] ConvertVec2dArrayToPointArray(in Vec2d[] src, Point[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Point[src.Length];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i] = new Point(src[i].Item1, src[i].Item2);
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vec2d array to double array.
+ ///
+ /// Array of Vec2d.
+ /// Array of double.
+ /// Array of double.
+ public static double[] ConvertVec2dArrayToArray(in Vec2d[] src, double[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length * 2 != dst.Length)
+ throw new ArgumentException("src.Count * 2 != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new double[src.Length * 2];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i * 2] = src[i].Item1;
+ dst[i * 2 + 1] = src[i].Item2;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vec2d array to ValueTuple array.
+ ///
+ /// Array of Vec2d.
+ /// Array of ValueTuple.
+ /// Array of ValueTuple.
+ public static (double x, double y)[] ConvertVec2dArrayToValueTupleArray(in Vec2d[] src, (double x, double y)[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new (double x, double y)[src.Length];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i].Item1 = src[i].Item1;
+ dst[i].Item2 = src[i].Item2;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert ValueTuple array to Point list.
+ ///
+ /// Array of ValueTuple.
+ /// List of Point.
+ /// List of Point.
+ public static List ConvertValueTupleArrayToPointList(in (double x, double y)[] src, List dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst == null)
+ {
+ dst = new List();
+ }
+
+ if (dst.Count != src.Length)
+ {
+ dst.Clear();
+ for (int i = 0; i < src.Length; i++)
+ {
+ dst.Add(new Point());
+ }
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i].x = src[i].Item1;
+ dst[i].y = src[i].Item2;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert ValueTuple array to Point array.
+ ///
+ /// Array of ValueTuple.
+ /// Array of Point.
+ /// Array of Point.
+ public static Point[] ConvertValueTupleArrayToPointArray(in (double x, double y)[] src, Point[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Point[src.Length];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i] = new Point(src[i].Item1, src[i].Item2);
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert ValueTuple array to double array.
+ ///
+ /// Array of ValueTuple.
+ /// Array of double.
+ /// Array of double.
+ public static double[] ConvertValueTupleArrayToArray(in (double x, double y)[] src, double[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length * 2 != dst.Length)
+ throw new ArgumentException("src.Count * 2 != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new double[src.Length * 2];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i * 2] = src[i].Item1;
+ dst[i * 2 + 1] = src[i].Item2;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert ValueTuple array to Vec2d array.
+ ///
+ /// Array of ValueTuple.
+ /// Array of Vec2d.
+ /// Array of Vec2d.
+ public static Vec2d[] ConvertValueTupleArrayToVec2dArray(in (double x, double y)[] src, Vec2d[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Vec2d[src.Length];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i].Item1 = src[i].x;
+ dst[i].Item2 = src[i].y;
+ }
+
+ return dst;
+ }
}
}
diff --git a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.cs b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.cs
index ea017ce..7a747b0 100644
--- a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.cs
+++ b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.cs
@@ -22,7 +22,7 @@ namespace HoloLensWithDlibFaceLandmarkDetectorExample
/// HoloLens AR Head Example
/// An example of AR head projection using OpenCVForUnity and DlibLandmarkDetector on Hololens.
///
- [RequireComponent(typeof(HLCameraStreamToMatHelper), typeof(ImageOptimizationHelper))]
+ [RequireComponent(typeof(HLCameraStream2MatHelper), typeof(ImageOptimizationHelper))]
public class HLARHeadExample : MonoBehaviour
{
[SerializeField, HeaderAttribute("Preview")]
@@ -265,7 +265,7 @@ public class HLARHeadExample : MonoBehaviour
///
/// The webcam texture to mat helper.
///
- HLCameraStreamToMatHelper webCamTextureToMatHelper;
+ HLCameraStream2MatHelper webCamTextureToMatHelper;
///
/// The image optimization helper.
@@ -297,11 +297,6 @@ public class HLARHeadExample : MonoBehaviour
///
string dlibShapePredictorFileName = "DlibFaceLandmarkDetector/sp_human_face_68.dat";
- ///
- /// The dlib shape predictor file path.
- ///
- string dlibShapePredictorFilePath;
-
Scalar COLOR_WHITE = new Scalar(255, 255, 255, 255);
Scalar COLOR_GRAY = new Scalar(128, 128, 128, 255);
@@ -386,8 +381,18 @@ bool isDetectingInFrameArrivedThread
public Text debugStr;
+ string cascade_filepath;
+ string cascade4Thread_filepath;
+ string dlibShapePredictor_filepath;
+ string dlibShapePredictor4Thread_filepath;
+
+ ///
+ /// The CancellationTokenSource.
+ ///
+ CancellationTokenSource cts = new CancellationTokenSource();
+
// Use this for initialization
- protected void Start()
+ async void Start()
{
displayCameraPreviewToggle.isOn = displayCameraPreview;
enableDownScaleToggle.isOn = enableDownScale;
@@ -400,29 +405,69 @@ protected void Start()
enableLerpFilterToggle.isOn = enableLerpFilter;
imageOptimizationHelper = gameObject.GetComponent();
- webCamTextureToMatHelper = gameObject.GetComponent();
+ webCamTextureToMatHelper = gameObject.GetComponent();
#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
#endif
- webCamTextureToMatHelper.outputColorFormat = WebCamTextureToMatHelper.ColorFormat.GRAY;
- webCamTextureToMatHelper.Initialize();
rectangleTracker = new RectangleTracker();
+
+ // Asynchronously retrieves the readable file path from the StreamingAssets directory.
+ if (debugStr != null)
+ {
+ debugStr.text = "Preparing file access...";
+ }
+
+ cascade_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml", cancellationToken: cts.Token);
+ //cascade4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/haarcascade_frontalface_alt.xml", cancellationToken: cts.Token);
+ cascade4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml", cancellationToken: cts.Token);
dlibShapePredictorFileName = HoloLensWithDlibFaceLandmarkDetectorExample.dlibShapePredictorFileName;
- dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName);
- if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ dlibShapePredictor_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask(dlibShapePredictorFileName, cancellationToken: cts.Token);
+ dlibShapePredictor4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("DlibFaceLandmarkDetector/sp_human_face_6.dat", cancellationToken: cts.Token);
+
+ if (debugStr != null)
+ {
+ debugStr.text = "";
+ }
+
+ Run();
+ }
+
+ // Use this for initialization
+ void Run()
+ {
+ cascade = new CascadeClassifier();
+ cascade.load(cascade_filepath);
+#if !WINDOWS_UWP || UNITY_EDITOR
+ // "empty" method is not working on the UWP platform.
+ if (cascade.empty())
+ {
+ Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
+ }
+#endif
+
+ cascade4Thread = new CascadeClassifier();
+ cascade4Thread.load(cascade4Thread_filepath);
+#if !WINDOWS_UWP || UNITY_EDITOR
+ // "empty" method is not working on the UWP platform.
+ if (cascade4Thread.empty())
+ {
+ Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
+ }
+#endif
+
+ if (string.IsNullOrEmpty(dlibShapePredictor_filepath))
{
Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
}
- faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+ faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictor_filepath);
- dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/sp_human_face_6.dat");
- if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ if (string.IsNullOrEmpty(dlibShapePredictor4Thread_filepath))
{
Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
}
- faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+ faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictor4Thread_filepath);
// set 3d face object points. (right-handed coordinates system)
@@ -469,6 +514,9 @@ protected void Start()
opticalFlowFilter = new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts());
opticalFlowFilter.diffCheckSensitivity /= imageOptimizationHelper.downscaleRatio;
+
+ webCamTextureToMatHelper.outputColorFormat = Source2MatHelperColorFormat.GRAY;
+ webCamTextureToMatHelper.Initialize();
}
///
@@ -584,29 +632,7 @@ public void OnWebCamTextureToMatHelperInitialized()
mouthParticleSystem = mouth.GetComponentsInChildren(true);
-
- //grayMat = new Mat();
- cascade = new CascadeClassifier();
- cascade.load(Utils.getFilePath("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml"));
-#if !WINDOWS_UWP || UNITY_EDITOR
- // "empty" method is not working on the UWP platform.
- if (cascade.empty())
- {
- Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
- }
-#endif
-
grayMat4Thread = new Mat();
- cascade4Thread = new CascadeClassifier();
- //cascade4Thread.load(Utils.getFilePath("OpenCVForUnity/objdetect/haarcascade_frontalface_alt.xml"));
- cascade4Thread.load(Utils.getFilePath("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml"));
-#if !WINDOWS_UWP || UNITY_EDITOR
- // "empty" method is not working on the UWP platform.
- if (cascade4Thread.empty())
- {
- Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
- }
-#endif
}
///
@@ -630,15 +656,9 @@ public void OnWebCamTextureToMatHelperDisposed()
ExecuteOnMainThread.Clear();
}
- if (cascade != null)
- cascade.Dispose();
-
if (grayMat4Thread != null)
grayMat4Thread.Dispose();
- if (cascade4Thread != null)
- cascade4Thread.Dispose();
-
rectangleTracker.Reset();
camMatrix.Dispose();
@@ -667,12 +687,13 @@ public void OnWebCamTextureToMatHelperDisposed()
}
///
- /// Raises the web cam texture to mat helper error occurred event.
+ /// Raises the webcam texture to mat helper error occurred event.
///
/// Error code.
- public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
+ /// Message.
+ public void OnWebCamTextureToMatHelperErrorOccurred(Source2MatHelperErrorCode errorCode, string message)
{
- Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
+ Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode + ":" + message);
}
#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
@@ -1536,6 +1557,12 @@ void OnDestroy()
webCamTextureToMatHelper.Dispose();
imageOptimizationHelper.Dispose();
+ if (cascade != null)
+ cascade.Dispose();
+
+ if (cascade4Thread != null)
+ cascade4Thread.Dispose();
+
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose();
@@ -1544,6 +1571,9 @@ void OnDestroy()
if (rectangleTracker != null)
rectangleTracker.Dispose();
+
+ if (cts != null)
+ cts.Dispose();
}
///
diff --git a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.unity b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.unity
index db74cc2..1f5279b 100644
--- a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.unity
+++ b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLARHeadExample/HLARHeadExample.unity
@@ -3413,8 +3413,8 @@ GameObject:
serializedVersion: 6
m_Component:
- component: {fileID: 1076083699}
- - component: {fileID: 1076083697}
- component: {fileID: 1076083695}
+ - component: {fileID: 1076083701}
- component: {fileID: 1076083700}
m_Layer: 0
m_Name: HLARHeadExample
@@ -3437,74 +3437,6 @@ MonoBehaviour:
m_EditorClassIdentifier:
_downscaleRatio: 2
_frameSkippingRatio: 1
---- !u!114 &1076083697
-MonoBehaviour:
- m_ObjectHideFlags: 0
- m_CorrespondingSourceObject: {fileID: 0}
- m_PrefabInstance: {fileID: 0}
- m_PrefabAsset: {fileID: 0}
- m_GameObject: {fileID: 1076083694}
- m_Enabled: 1
- m_EditorHideFlags: 0
- m_Script: {fileID: 11500000, guid: 0195895dd83eb204da131e4a7c4bcfe3, type: 3}
- m_Name:
- m_EditorClassIdentifier:
- _requestedDeviceName:
- _requestedWidth: 896
- _requestedHeight: 504
- _requestedIsFrontFacing: 0
- _requestedFPS: 30
- _rotate90Degree: 0
- _flipVertical: 0
- _flipHorizontal: 0
- _outputColorFormat: 0
- _timeoutFrameCount: 300
- onInitialized:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperInitialized
- m_Mode: 1
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- onDisposed:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperDisposed
- m_Mode: 1
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- onErrorOccurred:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperErrorOccurred
- m_Mode: 0
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- avoidAndroidFrontCameraLowLightIssue: 0
--- !u!4 &1076083699
Transform:
m_ObjectHideFlags: 0
@@ -3565,6 +3497,76 @@ MonoBehaviour:
videoFPS: {fileID: 443524926926304178}
trackFPS: {fileID: 443524928155071722}
debugStr: {fileID: 443524927024673313}
+--- !u!114 &1076083701
+MonoBehaviour:
+ m_ObjectHideFlags: 0
+ m_CorrespondingSourceObject: {fileID: 0}
+ m_PrefabInstance: {fileID: 0}
+ m_PrefabAsset: {fileID: 0}
+ m_GameObject: {fileID: 1076083694}
+ m_Enabled: 1
+ m_EditorHideFlags: 0
+ m_Script: {fileID: 11500000, guid: a80f11196f5e6394aaec3da665ebfc5a, type: 3}
+ m_Name:
+ m_EditorClassIdentifier:
+ _requestedDeviceName:
+ _requestedWidth: 896
+ _requestedHeight: 504
+ _requestedIsFrontFacing: 0
+ _requestedFPS: 30
+ _rotate90Degree: 0
+ _flipVertical: 0
+ _flipHorizontal: 0
+ _outputColorFormat: 0
+ _timeoutFrameCount: 1500
+ _onInitialized:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLARHeadExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperInitialized
+ m_Mode: 1
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
+ _onDisposed:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLARHeadExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperDisposed
+ m_Mode: 1
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
+ _onErrorOccurred:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLARHeadExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperErrorOccurred
+ m_Mode: 0
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
--- !u!1 &1108638276
GameObject:
m_ObjectHideFlags: 0
diff --git a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.cs b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.cs
index 3c517c4..4c50853 100644
--- a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.cs
+++ b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.cs
@@ -22,7 +22,7 @@ namespace HoloLensWithDlibFaceLandmarkDetectorExample
/// An example of face landmark detection using OpenCVForUnity and DlibLandmarkDetector on Hololens.
/// Referring to https://github.com/Itseez/opencv/blob/master/modules/objdetect/src/detection_based_tracker.cpp.
///
- [RequireComponent(typeof(HLCameraStreamToMatHelper), typeof(ImageOptimizationHelper))]
+ [RequireComponent(typeof(HLCameraStream2MatHelper), typeof(ImageOptimizationHelper))]
public class HLFaceLandmarkDetectionExample : MonoBehaviour
{
@@ -89,7 +89,7 @@ public class HLFaceLandmarkDetectionExample : MonoBehaviour
///
/// The webcam texture to mat helper.
///
- HLCameraStreamToMatHelper webCamTextureToMatHelper;
+ HLCameraStream2MatHelper webCamTextureToMatHelper;
///
/// The image optimization helper.
@@ -126,11 +126,6 @@ public class HLFaceLandmarkDetectionExample : MonoBehaviour
///
string dlibShapePredictorFileName = "DlibFaceLandmarkDetector/sp_human_face_68.dat";
- ///
- /// The dlib shape predictor file path.
- ///
- string dlibShapePredictorFilePath;
-
Scalar COLOR_WHITE = new Scalar(255, 255, 255, 255);
Scalar COLOR_GRAY = new Scalar(128, 128, 128, 255);
@@ -216,8 +211,18 @@ bool isDetectingInFrameArrivedThread
public Text debugStr;
+ string cascade_filepath;
+ string cascade4Thread_filepath;
+ string dlibShapePredictor_filepath;
+ string dlibShapePredictor4Thread_filepath;
+
+ ///
+ /// The CancellationTokenSource.
+ ///
+ CancellationTokenSource cts = new CancellationTokenSource();
+
// Use this for initialization
- protected void Start()
+ async void Start()
{
enableDownScaleToggle.isOn = enableDownScale;
useSeparateDetectionToggle.isOn = useSeparateDetection;
@@ -226,30 +231,72 @@ protected void Start()
displayDetectedFaceRectToggle.isOn = displayDetectedFaceRect;
imageOptimizationHelper = gameObject.GetComponent();
- webCamTextureToMatHelper = gameObject.GetComponent();
+ webCamTextureToMatHelper = gameObject.GetComponent();
#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
#endif
- webCamTextureToMatHelper.outputColorFormat = WebCamTextureToMatHelper.ColorFormat.GRAY;
- webCamTextureToMatHelper.Initialize();
rectangleTracker = new RectangleTracker();
+
+ // Asynchronously retrieves the readable file path from the StreamingAssets directory.
+ if (debugStr != null)
+ {
+ debugStr.text = "Preparing file access...";
+ }
+
+ cascade_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml", cancellationToken: cts.Token);
+ //cascade4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/haarcascade_frontalface_alt.xml", cancellationToken: cts.Token);
+ cascade4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml", cancellationToken: cts.Token);
dlibShapePredictorFileName = HoloLensWithDlibFaceLandmarkDetectorExample.dlibShapePredictorFileName;
- dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName);
- if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ dlibShapePredictor_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask(dlibShapePredictorFileName, cancellationToken: cts.Token);
+ dlibShapePredictor4Thread_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask("DlibFaceLandmarkDetector/sp_human_face_6.dat", cancellationToken: cts.Token);
+
+ if (debugStr != null)
{
- Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
+ debugStr.text = "";
+ }
+
+ Run();
+ }
+
+ // Use this for initialization
+ void Run()
+ {
+ cascade = new CascadeClassifier();
+ cascade.load(cascade_filepath);
+#if !WINDOWS_UWP || UNITY_EDITOR
+ // "empty" method is not working on the UWP platform.
+ if (cascade.empty())
+ {
+ Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
+ }
+#endif
+
+ cascade4Thread = new CascadeClassifier();
+ cascade4Thread.load(cascade4Thread_filepath);
+#if !WINDOWS_UWP || UNITY_EDITOR
+ // "empty" method is not working on the UWP platform.
+ if (cascade4Thread.empty())
+ {
+ Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
}
- faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+#endif
+ if (string.IsNullOrEmpty(dlibShapePredictor_filepath))
+ {
+ Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
+ }
+ faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictor_filepath);
- dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/sp_human_face_6.dat");
- if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ if (string.IsNullOrEmpty(dlibShapePredictor4Thread_filepath))
{
Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
}
- faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+ faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictor4Thread_filepath);
+
+ webCamTextureToMatHelper.outputColorFormat = Source2MatHelperColorFormat.GRAY;
+ webCamTextureToMatHelper.Initialize();
}
///
@@ -305,28 +352,7 @@ public void OnWebCamTextureToMatHelperInitialized()
quad_renderer.sharedMaterial.SetFloat("_VignetteScale", 0.0f);
-
- cascade = new CascadeClassifier();
- cascade.load(Utils.getFilePath("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml"));
-#if !WINDOWS_UWP || UNITY_EDITOR
- // "empty" method is not working on the UWP platform.
- if (cascade.empty())
- {
- Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
- }
-#endif
-
grayMat4Thread = new Mat();
- cascade4Thread = new CascadeClassifier();
- //cascade4Thread.load(Utils.getFilePath("OpenCVForUnity/objdetect/haarcascade_frontalface_alt.xml"));
- cascade4Thread.load(Utils.getFilePath("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml"));
-#if !WINDOWS_UWP || UNITY_EDITOR
- // "empty" method is not working on the UWP platform.
- if (cascade4Thread.empty())
- {
- Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
- }
-#endif
}
///
@@ -350,15 +376,9 @@ public void OnWebCamTextureToMatHelperDisposed()
ExecuteOnMainThread.Clear();
}
- if (cascade != null)
- cascade.Dispose();
-
if (grayMat4Thread != null)
grayMat4Thread.Dispose();
- if (cascade4Thread != null)
- cascade4Thread.Dispose();
-
rectangleTracker.Reset();
if (debugStr != null)
@@ -369,12 +389,13 @@ public void OnWebCamTextureToMatHelperDisposed()
}
///
- /// Raises the web cam texture to mat helper error occurred event.
+ /// Raises the webcam texture to mat helper error occurred event.
///
/// Error code.
- public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
+ /// Message.
+ public void OnWebCamTextureToMatHelperErrorOccurred(Source2MatHelperErrorCode errorCode, string message)
{
- Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
+ Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode + ":" + message);
}
#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
@@ -1084,6 +1105,12 @@ void OnDestroy()
webCamTextureToMatHelper.Dispose();
imageOptimizationHelper.Dispose();
+ if (cascade != null)
+ cascade.Dispose();
+
+ if (cascade4Thread != null)
+ cascade4Thread.Dispose();
+
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose();
@@ -1092,6 +1119,9 @@ void OnDestroy()
if (rectangleTracker != null)
rectangleTracker.Dispose();
+
+ if (cts != null)
+ cts.Dispose();
}
///
diff --git a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.unity b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.unity
index f9f1d49..17d4f39 100644
--- a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.unity
+++ b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLFaceLandmarkDetectionExample/HLFaceLandmarkDetectionExample.unity
@@ -2311,8 +2311,8 @@ GameObject:
- component: {fileID: 1076083698}
- component: {fileID: 1076083697}
- component: {fileID: 1076083696}
- - component: {fileID: 1076083695}
- component: {fileID: 1076083701}
+ - component: {fileID: 1076083702}
- component: {fileID: 1076083700}
m_Layer: 0
m_Name: HLFaceLandmarkDetectionExample
@@ -2321,74 +2321,6 @@ GameObject:
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
---- !u!114 &1076083695
-MonoBehaviour:
- m_ObjectHideFlags: 0
- m_CorrespondingSourceObject: {fileID: 0}
- m_PrefabInstance: {fileID: 0}
- m_PrefabAsset: {fileID: 0}
- m_GameObject: {fileID: 1076083694}
- m_Enabled: 1
- m_EditorHideFlags: 0
- m_Script: {fileID: 11500000, guid: 0195895dd83eb204da131e4a7c4bcfe3, type: 3}
- m_Name:
- m_EditorClassIdentifier:
- _requestedDeviceName:
- _requestedWidth: 896
- _requestedHeight: 504
- _requestedIsFrontFacing: 0
- _requestedFPS: 30
- _rotate90Degree: 0
- _flipVertical: 0
- _flipHorizontal: 0
- _outputColorFormat: 0
- _timeoutFrameCount: 300
- onInitialized:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperInitialized
- m_Mode: 1
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- onDisposed:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperDisposed
- m_Mode: 1
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- onErrorOccurred:
- m_PersistentCalls:
- m_Calls:
- - m_Target: {fileID: 1076083700}
- m_TargetAssemblyTypeName:
- m_MethodName: OnWebCamTextureToMatHelperErrorOccurred
- m_Mode: 0
- m_Arguments:
- m_ObjectArgument: {fileID: 0}
- m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
- m_IntArgument: 0
- m_FloatArgument: 0
- m_StringArgument:
- m_BoolArgument: 0
- m_CallState: 2
- avoidAndroidFrontCameraLowLightIssue: 0
--- !u!23 &1076083696
MeshRenderer:
m_ObjectHideFlags: 0
@@ -2510,6 +2442,76 @@ MonoBehaviour:
m_EditorClassIdentifier:
_downscaleRatio: 2
_frameSkippingRatio: 1
+--- !u!114 &1076083702
+MonoBehaviour:
+ m_ObjectHideFlags: 0
+ m_CorrespondingSourceObject: {fileID: 0}
+ m_PrefabInstance: {fileID: 0}
+ m_PrefabAsset: {fileID: 0}
+ m_GameObject: {fileID: 1076083694}
+ m_Enabled: 1
+ m_EditorHideFlags: 0
+ m_Script: {fileID: 11500000, guid: a80f11196f5e6394aaec3da665ebfc5a, type: 3}
+ m_Name:
+ m_EditorClassIdentifier:
+ _requestedDeviceName:
+ _requestedWidth: 896
+ _requestedHeight: 504
+ _requestedIsFrontFacing: 0
+ _requestedFPS: 30
+ _rotate90Degree: 0
+ _flipVertical: 0
+ _flipHorizontal: 0
+ _outputColorFormat: 0
+ _timeoutFrameCount: 1500
+ _onInitialized:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLFaceLandmarkDetectionExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperInitialized
+ m_Mode: 1
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
+ _onDisposed:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLFaceLandmarkDetectionExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperDisposed
+ m_Mode: 1
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
+ _onErrorOccurred:
+ m_PersistentCalls:
+ m_Calls:
+ - m_Target: {fileID: 1076083700}
+ m_TargetAssemblyTypeName: HoloLensWithDlibFaceLandmarkDetectorExample.HLFaceLandmarkDetectionExample,
+ Assembly-CSharp
+ m_MethodName: OnWebCamTextureToMatHelperErrorOccurred
+ m_Mode: 0
+ m_Arguments:
+ m_ObjectArgument: {fileID: 0}
+ m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+ m_IntArgument: 0
+ m_FloatArgument: 0
+ m_StringArgument:
+ m_BoolArgument: 0
+ m_CallState: 2
--- !u!1001 &1118258304
PrefabInstance:
m_ObjectHideFlags: 0
diff --git a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLPhotoCaptureExample/HLPhotoCaptureExample.cs b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLPhotoCaptureExample/HLPhotoCaptureExample.cs
index a07f5d9..464d8e8 100644
--- a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLPhotoCaptureExample/HLPhotoCaptureExample.cs
+++ b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/HLPhotoCaptureExample/HLPhotoCaptureExample.cs
@@ -7,6 +7,7 @@
using OpenCVForUnity.ImgprocModule;
using System.Collections;
using System.Linq;
+using System.Threading;
#if UNITY_2018_2_OR_NEWER
using UnityEngine.Windows.WebCam;
@@ -38,18 +39,34 @@ public class HLPhotoCaptureExample : MonoBehaviour
private FaceLandmarkDetector faceLandmarkDetector;
private string dlibShapePredictorFileName = "DlibFaceLandmarkDetector/sp_human_face_68.dat";
- private string dlibShapePredictorFilePath;
- private IEnumerator Start()
+ ///
+ /// The CancellationTokenSource.
+ ///
+ CancellationTokenSource cts = new CancellationTokenSource();
+
+ // Use this for initialization
+ async void Start()
{
+ // Asynchronously retrieves the readable file path from the StreamingAssets directory.
+ if (text != null)
+ {
+ text.text = "Preparing file access...";
+ }
+
dlibShapePredictorFileName = HoloLensWithDlibFaceLandmarkDetectorExample.dlibShapePredictorFileName;
+ string dlibShapePredictor_filepath = await DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsyncTask(dlibShapePredictorFileName, cancellationToken: cts.Token);
- dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName);
- if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
+ if (text != null)
+ {
+ text.text = "";
+ }
+
+ if (string.IsNullOrEmpty(dlibShapePredictor_filepath))
{
Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
}
- faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);
+ faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictor_filepath);
var resolutions = PhotoCapture.SupportedResolutions;
@@ -59,7 +76,7 @@ private IEnumerator Start()
{
text.text = "Resolutions not available. Did you provide web cam access?";
}
- yield return null;
+ return;
}
cameraResolution = resolutions.OrderByDescending((res) => res.width * res.height).First();
@@ -87,6 +104,9 @@ private void OnDestroy()
if (faceLandmarkDetector != null)
faceLandmarkDetector.Dispose();
+
+ if (cts != null)
+ cts.Dispose();
}
private void OnPhotoCaptureCreated(PhotoCapture captureObject)
@@ -177,8 +197,8 @@ private void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptur
Mat bgraMat = new Mat(targetTexture.height, targetTexture.width, CvType.CV_8UC4);
- // For BGRA or BGR format, use the fastTexture2DToMat method.
- OpenCVForUnity.UnityUtils.Utils.fastTexture2DToMat(targetTexture, bgraMat);
+ // For BGRA or BGR format, use the texture2DToMatRaw method.
+ OpenCVForUnity.UnityUtils.Utils.texture2DToMatRaw(targetTexture, bgraMat);
OpenCVForUnityUtils.SetImage(faceLandmarkDetector, bgraMat);
@@ -206,8 +226,8 @@ private void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptur
Imgproc.putText(bgraMat, targetTexture.format + " W:" + bgraMat.width() + " H:" + bgraMat.height(), new Point(5, bgraMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.5, new Scalar(255, 0, 0, 255), 2, Imgproc.LINE_AA, false);
- // For BGRA or BGR format, use the fastMatToTexture2D method.
- OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(bgraMat, targetTexture);
+ // For BGRA or BGR format, use the matToTexture2DRaw method.
+ OpenCVForUnity.UnityUtils.Utils.matToTexture2DRaw(bgraMat, targetTexture);
bgraMat.Dispose();
diff --git a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs
similarity index 81%
rename from HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs
rename to HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs
index 4436838..ce58b32 100644
--- a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs
+++ b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs
@@ -25,30 +25,70 @@ namespace HoloLensWithOpenCVForUnity.UnityUtils.Helper
/// This is called every time there is a new frame image mat available.
/// The Mat object's type is 'CV_8UC4' or 'CV_8UC3' or 'CV_8UC1' (ColorFormat is determined by the outputColorFormat setting).
///
- /// The recently captured frame image mat.
+ /// The recently captured frame image mat.
/// The projection matrices.
/// The camera to world matrices.
/// The camera intrinsics.
public delegate void FrameMatAcquiredCallback(Mat mat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix, CameraIntrinsics cameraIntrinsics);
///
- /// Hololens camera stream to mat helper.
- /// v 1.0.7
- /// Depends on EnoxSoftware/HoloLensCameraStream (https://github.com/EnoxSoftware/HoloLensCameraStream).
- /// Depends on OpenCVForUnity version 2.4.1 (WebCamTextureToMatHelper v 1.1.2) or later.
+ /// A helper component class for obtaining camera frames from HoloLensCameraStream and converting them to OpenCV Mat format in real-time.
+ ///
+ ///
+ /// The HLCameraStream2MatHelper class captures video frames from a device's camera using HoloLensCameraStream
+ /// and converts each frame to an OpenCV Mat object every frame.
+ ///
+ /// This component is particularly useful for image processing tasks in Unity, such as computer vision applications,
+ /// where real-time camera input in Mat format is required. It enables seamless integration of OpenCV-based
+ /// image processing algorithms with HoloLens camera input.
+ ///
+ ///
+ /// FrameMatAcquiredCallback:
+ /// The FrameMatAcquiredCallback delegate is invoked each time a new frame is captured and converted to a Mat object.
+ /// This delegate wraps the HoloLensCameraStream.VideoCapture.FrameSampleAcquired callback to provide the frame data
+ /// in Mat format, along with the projection matrix, camera-to-world matrix, and camera intrinsics for the captured frame.
+ ///
+ ///
+ /// Usage Notes:
+ ///
+ /// -
+ /// The callback is executed outside of the Unity application's main thread. To safely use Unity API functions or
+ /// update Unity objects within the callback, use UnityEngine.WSA.Application.InvokeOnAppThread(() => { ... })
+ /// to dispatch operations to the application thread.
+ ///
+ /// -
+ /// The Mat object passed to the callback is managed by the helper class and should not be stored for long-term use.
+ /// Copy the data if further processing is required beyond the callback's scope.
+ ///
+ ///
+ ///
///
- /// By setting outputColorFormat to BGRA or GRAY, processing that does not include extra color conversion is performed.
+ /// Note: By setting outputColorFormat to BGRA or GRAY, processing that does not include extra color conversion is performed.
+ /// Note: Depends on EnoxSoftware/HoloLensCameraStream.
+ /// Note: Depends on OpenCVForUnity version 2.6.4 or later.
+ ///
+ ///
+ /// Attach this component to a GameObject and call GetMat() to retrieve the latest camera frame in Mat format.
+ /// The helper class manages camera start/stop operations and frame updates internally.
+ ///
///
+ ///
/// Usage:
/// Add Define Symbols: "Open File > Build Settings > Player Settings > Other Settings" and add the following to Scripting Define Symbols depending on the XR system used in your project.
- /// This is the setup needed to get the correct values from the TryGetCameraToWorldMatrix method.
- /// Legacy built-in XR ; "BUILTIN_XR"
+ ///
+ /// {
+ /// Legacy built-in XR : "BUILTIN_XR"
/// XR Plugin Management(Windows Mixed Reality) : "XR_PLUGIN_WINDOWSMR"
/// XR Plugin Management(OpenXR) : "XR_PLUGIN_OPENXR"
+ /// }
+ ///
+ ///
///
- ///
+ ///
/// Combination of camera frame size and frame rate that can be acquired on Hololens. (width x height : framerate)
- /// (See https://learn.microsoft.com/en-us/windows/mixed-reality/develop/advanced-concepts/locatable-camera-overview)
+ /// https://learn.microsoft.com/en-us/windows/mixed-reality/develop/advanced-concepts/locatable-camera-overview
+ ///
+ /// {
///
/// Hololens1
///
@@ -126,8 +166,10 @@ namespace HoloLensWithOpenCVForUnity.UnityUtils.Helper
/// 424x240 : 30
/// 424x240 : 15
///
- ///
- public class HLCameraStreamToMatHelper : WebCamTextureToMatHelper
+ /// }
+ ///
+ ///
+ public class HLCameraStream2MatHelper : WebCamTexture2MatHelper
{
///
/// This will be called whenever a new camera frame image available is converted to Mat.
@@ -140,7 +182,7 @@ public class HLCameraStreamToMatHelper : WebCamTextureToMatHelper
protected CameraIntrinsics cameraIntrinsics;
///
- /// Returns the camera intrinsics.
+ /// Return the camera intrinsics.
///
/// The camera intrinsics.
public virtual CameraIntrinsics GetCameraIntrinsics()
@@ -160,6 +202,8 @@ public override string requestedDeviceName
_requestedDeviceName = value;
if (hasInitDone)
Initialize();
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
}
@@ -175,6 +219,8 @@ public override int requestedWidth
_requestedWidth = _value;
if (hasInitDone)
Initialize();
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
}
@@ -190,6 +236,8 @@ public override int requestedHeight
_requestedHeight = _value;
if (hasInitDone)
Initialize();
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
}
@@ -203,7 +251,9 @@ public override bool requestedIsFrontFacing
{
_requestedIsFrontFacing = value;
if (hasInitDone)
- Initialize(_requestedIsFrontFacing, requestedFPS, rotate90Degree);
+ Initialize(_requestedIsFrontFacing, requestedFPS, rotate90Degree, IsPlaying());
+ else if (isInitWaiting)
+ Initialize(_requestedIsFrontFacing, requestedFPS, rotate90Degree, autoPlayAfterInitialize);
}
}
}
@@ -218,11 +268,13 @@ public override bool rotate90Degree
_rotate90Degree = value;
if (hasInitDone)
Initialize();
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
}
- public override ColorFormat outputColorFormat
+ public override Source2MatHelperColorFormat outputColorFormat
{
get { return _outputColorFormat; }
set
@@ -232,6 +284,8 @@ public override ColorFormat outputColorFormat
_outputColorFormat = value;
if (hasInitDone)
Initialize();
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
}
@@ -243,13 +297,13 @@ public override float requestedFPS
{
_requestedFPS = Mathf.Clamp(value, -1f, float.MaxValue);
if (hasInitDone)
- {
Initialize();
- }
+ else if (isInitWaiting)
+ Initialize(autoPlayAfterInitialize);
}
}
- new protected ColorFormat baseColorFormat = ColorFormat.BGRA;
+ new protected Source2MatHelperColorFormat baseColorFormat = Source2MatHelperColorFormat.BGRA;
protected System.Object lockObject = new System.Object();
protected System.Object matrixLockObject = new System.Object();
@@ -426,7 +480,7 @@ protected virtual void OnFrameSampleAcquired(VideoCaptureSample sample)
if (hasInitEventCompleted && frameMatAcquired != null)
{
- Mat mat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Channels(outputColorFormat)));
+ Mat mat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Source2MatHelperUtils.Channels(outputColorFormat)));
if (baseColorFormat == outputColorFormat)
{
@@ -434,14 +488,14 @@ protected virtual void OnFrameSampleAcquired(VideoCaptureSample sample)
}
else
{
- Mat baseMat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Channels(baseColorFormat)));
+ Mat baseMat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Source2MatHelperUtils.Channels(baseColorFormat)));
MatUtils.copyToMat(latestImageBytes, baseMat);
- Imgproc.cvtColor(baseMat, mat, ColorConversionCodes(baseColorFormat, outputColorFormat));
+ Imgproc.cvtColor(baseMat, mat, Source2MatHelperUtils.ColorConversionCodes(baseColorFormat, outputColorFormat));
}
if (_rotate90Degree)
{
- Mat rotatedFrameMat = new Mat(frameSampleWidth, frameSampleHeight, CvType.CV_8UC(Channels(outputColorFormat)));
+ Mat rotatedFrameMat = new Mat(frameSampleWidth, frameSampleHeight, CvType.CV_8UC(Source2MatHelperUtils.Channels(outputColorFormat)));
Core.rotate(mat, rotatedFrameMat, Core.ROTATE_90_CLOCKWISE);
mat.Dispose();
@@ -470,7 +524,7 @@ protected virtual HoloLensCameraStream.CameraParameters CreateCameraParams(HoloL
cameraParams.cameraResolutionHeight = resolution.height;
cameraParams.cameraResolutionWidth = resolution.width;
cameraParams.frameRate = Mathf.RoundToInt(frameRate);
- cameraParams.pixelFormat = (outputColorFormat == ColorFormat.GRAY) ? CapturePixelFormat.NV12 : CapturePixelFormat.BGRA32;
+ cameraParams.pixelFormat = (outputColorFormat == Source2MatHelperColorFormat.GRAY) ? CapturePixelFormat.NV12 : CapturePixelFormat.BGRA32;
cameraParams.rotateImage180Degrees = false;
cameraParams.enableHolograms = false;
cameraParams.enableVideoStabilization = false;
@@ -481,7 +535,7 @@ protected virtual HoloLensCameraStream.CameraParameters CreateCameraParams(HoloL
#endif
///
- /// Returns the video capture.
+ /// Return the video capture.
///
/// The video capture.
public virtual HoloLensCameraStream.VideoCapture GetVideoCapture()
@@ -574,7 +628,7 @@ protected override void OnDestroy()
}
///
- /// Initializes this instance by coroutine.
+ /// Initialize this instance by coroutine.
///
protected override IEnumerator _Initialize()
{
@@ -595,6 +649,10 @@ protected override IEnumerator _Initialize()
isInitWaiting = true;
+ // Wait one frame before starting initialization process
+ yield return null;
+
+
while (isChangeVideoModeWaiting)
{
yield return null;
@@ -615,7 +673,7 @@ protected override IEnumerator _Initialize()
CancelInitCoroutine();
if (onErrorOccurred != null)
- onErrorOccurred.Invoke(ErrorCode.UNKNOWN);
+ onErrorOccurred.Invoke(Source2MatHelperErrorCode.UNKNOWN, "!result2.success");
}
else
{
@@ -660,7 +718,7 @@ protected override IEnumerator _Initialize()
CancelInitCoroutine();
if (onErrorOccurred != null)
- onErrorOccurred.Invoke(ErrorCode.CAMERA_DEVICE_NOT_EXIST);
+ onErrorOccurred.Invoke(Source2MatHelperErrorCode.CAMERA_DEVICE_NOT_EXIST, "Did not find a video capture object. You may not be using the HoloLens.");
return;
}
@@ -687,7 +745,7 @@ protected override IEnumerator _Initialize()
CancelInitCoroutine();
if (onErrorOccurred != null)
- onErrorOccurred.Invoke(ErrorCode.UNKNOWN);
+ onErrorOccurred.Invoke(Source2MatHelperErrorCode.UNKNOWN, "!result.success");
}
else
{
@@ -709,11 +767,11 @@ protected override IEnumerator _Initialize()
}
else if (didUpdateThisFrame)
{
- Debug.Log("HololensCameraStreamToMatHelper:: " + "name:" + "" + " width:" + frameSampleWidth + " height:" + frameSampleHeight + " fps:" + cameraParams.frameRate);
+ Debug.Log("HLCameraStream2MatHelper:: " + "name:" + "" + " width:" + frameSampleWidth + " height:" + frameSampleHeight + " fps:" + cameraParams.frameRate);
- baseColorFormat = (outputColorFormat == ColorFormat.GRAY) ? ColorFormat.GRAY : ColorFormat.BGRA;
+ baseColorFormat = (outputColorFormat == Source2MatHelperColorFormat.GRAY) ? Source2MatHelperColorFormat.GRAY : Source2MatHelperColorFormat.BGRA;
- baseMat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Channels(baseColorFormat)));
+ baseMat = new Mat(frameSampleHeight, frameSampleWidth, CvType.CV_8UC(Source2MatHelperUtils.Channels(baseColorFormat)));
if (baseColorFormat == outputColorFormat)
{
@@ -721,11 +779,11 @@ protected override IEnumerator _Initialize()
}
else
{
- frameMat = new Mat(baseMat.rows(), baseMat.cols(), CvType.CV_8UC(Channels(outputColorFormat)));
+ frameMat = new Mat(baseMat.rows(), baseMat.cols(), CvType.CV_8UC(Source2MatHelperUtils.Channels(outputColorFormat)));
}
if (_rotate90Degree)
- rotatedFrameMat = new Mat(frameMat.cols(), frameMat.rows(), CvType.CV_8UC(Channels(outputColorFormat)));
+ rotatedFrameMat = new Mat(frameMat.cols(), frameMat.rows(), CvType.CV_8UC(Source2MatHelperUtils.Channels(outputColorFormat)));
isInitWaiting = false;
hasInitDone = true;
@@ -763,7 +821,7 @@ protected override IEnumerator _Initialize()
initCoroutine = null;
if (onErrorOccurred != null)
- onErrorOccurred.Invoke(ErrorCode.TIMEOUT);
+ onErrorOccurred.Invoke(Source2MatHelperErrorCode.TIMEOUT, string.Empty);
}
else
{
@@ -771,13 +829,13 @@ protected override IEnumerator _Initialize()
initCoroutine = null;
if (onErrorOccurred != null)
- onErrorOccurred.Invoke(ErrorCode.TIMEOUT);
+ onErrorOccurred.Invoke(Source2MatHelperErrorCode.TIMEOUT, string.Empty);
}
}
}
///
- /// Indicates whether this instance has been initialized.
+ /// Indicate whether this instance has been initialized.
///
/// true, if this instance has been initialized, false otherwise.
public override bool IsInitialized()
@@ -786,7 +844,7 @@ public override bool IsInitialized()
}
///
- /// Starts the camera.
+ /// Start the active camera.
///
public override void Play()
{
@@ -811,7 +869,7 @@ protected virtual IEnumerator _Play()
}
///
- /// Pauses the active camera.
+ /// Pause the active camera.
///
public override void Pause()
{
@@ -820,7 +878,7 @@ public override void Pause()
}
///
- /// Stops the active camera.
+ /// Stop the active camera.
///
public override void Stop()
{
@@ -845,7 +903,7 @@ protected virtual IEnumerator _Stop()
}
///
- /// Indicates whether the active camera is currently playing.
+ /// Indicate whether the active camera is currently playing.
///
/// true, if the active camera is playing, false otherwise.
public override bool IsPlaying()
@@ -857,7 +915,7 @@ public override bool IsPlaying()
}
///
- /// Indicates whether the active camera device is currently front facng.
+ /// Indicate whether the active camera device is currently front facng.
///
/// true, if the active camera device is front facng, false otherwise.
public override bool IsFrontFacing()
@@ -866,7 +924,7 @@ public override bool IsFrontFacing()
}
///
- /// Returns the active camera device name.
+ /// Return the active camera device name.
///
/// The active camera device name.
public override string GetDeviceName()
@@ -875,7 +933,7 @@ public override string GetDeviceName()
}
///
- /// Returns the active camera width.
+ /// Return the active camera width.
///
/// The active camera width.
public override int GetWidth()
@@ -886,7 +944,7 @@ public override int GetWidth()
}
///
- /// Returns the active camera height.
+ /// Return the active camera height.
///
/// The active camera height.
public override int GetHeight()
@@ -897,7 +955,7 @@ public override int GetHeight()
}
///
- /// Returns the active camera framerate.
+ /// Return the active camera framerate.
///
/// The active camera framerate.
public override float GetFPS()
@@ -906,16 +964,16 @@ public override float GetFPS()
}
///
- /// Returns the webcam texture.
+ /// Return the active WebcamTexture.
///
- /// The webcam texture.
+ /// The active WebcamTexture.
public override WebCamTexture GetWebCamTexture()
{
return null;
}
///
- /// Returns the camera to world matrix.
+ /// Return the camera to world matrix.
///
/// The camera to world matrix.
public override Matrix4x4 GetCameraToWorldMatrix()
@@ -924,7 +982,7 @@ public override Matrix4x4 GetCameraToWorldMatrix()
}
///
- /// Returns the projection matrix matrix.
+ /// Return the projection matrix matrix.
///
/// The projection matrix.
public override Matrix4x4 GetProjectionMatrix()
@@ -933,7 +991,7 @@ public override Matrix4x4 GetProjectionMatrix()
}
///
- /// Indicates whether the video buffer of the frame has been updated.
+ /// Indicate whether the video buffer of the frame has been updated.
///
/// true, if the video buffer has been updated false otherwise.
public override bool DidUpdateThisFrame()
@@ -945,10 +1003,12 @@ public override bool DidUpdateThisFrame()
}
///
- /// Gets the mat of the current frame.
+ /// Get the mat of the current frame.
+ ///
+ ///
/// The Mat object's type is 'CV_8UC4' or 'CV_8UC3' or 'CV_8UC1' (ColorFormat is determined by the outputColorFormat setting).
/// Please do not dispose of the returned mat as it will be reused.
- ///
+ ///
/// The mat of the current frame.
public override Mat GetMat()
{
@@ -964,7 +1024,7 @@ public override Mat GetMat()
else
{
MatUtils.copyToMat(latestImageBytes, baseMat);
- Imgproc.cvtColor(baseMat, frameMat, ColorConversionCodes(baseColorFormat, outputColorFormat));
+ Imgproc.cvtColor(baseMat, frameMat, Source2MatHelperUtils.ColorConversionCodes(baseColorFormat, outputColorFormat));
}
if (rotatedFrameMat != null)
@@ -983,7 +1043,7 @@ public override Mat GetMat()
}
///
- /// Flips the mat.
+ /// Flip the mat.
///
/// Mat.
protected override void FlipMat(Mat mat, bool flipVertical, bool flipHorizontal)
@@ -1067,12 +1127,12 @@ protected override void ReleaseResources()
}
///
- /// Releases all resource used by the object.
+ /// Releases all resource used by the object.
///
- /// Call when you are finished using the . The
- /// method leaves the in an unusable state. After
- /// calling , you must release all references to the so
- /// the garbage collector can reclaim the memory that the was occupying.
+ /// Call when you are finished using the . The
+ /// method leaves the in an unusable state. After
+ /// calling , you must release all references to the so
+ /// the garbage collector can reclaim the memory that the was occupying.
public override void Dispose()
{
if (colors != null)
diff --git a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs.meta b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs.meta
similarity index 76%
rename from HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs.meta
rename to HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs.meta
index a8fd2e9..5c411c8 100644
--- a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStreamToMatHelper.cs.meta
+++ b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/HLCameraStream2MatHelper.cs.meta
@@ -1,6 +1,6 @@
fileFormatVersion: 2
-guid: 0195895dd83eb204da131e4a7c4bcfe3
-timeCreated: 1518188247
+guid: a80f11196f5e6394aaec3da665ebfc5a
+timeCreated: 1509282299
licenseType: Free
MonoImporter:
serializedVersion: 2
diff --git a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/OpenCVForUnityUtils.cs b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/OpenCVForUnityUtils.cs
index 7a81a2d..34ea796 100644
--- a/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/OpenCVForUnityUtils.cs
+++ b/HLWithDlibExampleMRTK3/Assets/HoloLensWithDlibFaceLandmarkDetectorExample/Scripts/Utils/OpenCVForUnityUtils.cs
@@ -1,8 +1,10 @@
using DlibFaceLandmarkDetector;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
+using OpenCVForUnity.UnityUtils;
using System;
using System.Collections.Generic;
+using System.Runtime.InteropServices;
using UnityEngine;
namespace HoloLensWithDlibFaceLandmarkDetectorExample
@@ -13,19 +15,33 @@ namespace HoloLensWithDlibFaceLandmarkDetectorExample
public static class OpenCVForUnityUtils
{
///
- /// Sets a image.
+ /// Sets the image for the specified using a given object.
///
- /// Face landmark detector.
- /// Image mat.
+ ///
+ /// An instance of that processes the specified image.
+ ///
+ ///
+ /// A object representing the image to set. The matrix must be continuous and valid for processing.
+ ///
+ ///
+ /// Thrown if or is null.
+ ///
+ ///
+ /// Thrown if is not continuous. Ensure imgMat.isContinuous() == true.
+ ///
+ ///
+ /// This method directly assigns the data pointer, width, height, and element size to the
+ /// specified . It avoids additional memory allocations by reusing the existing data.
+ ///
public static void SetImage(FaceLandmarkDetector faceLandmarkDetector, Mat imgMat)
{
if (faceLandmarkDetector == null)
- throw new ArgumentNullException("faceLandmarkDetector");
+ throw new ArgumentNullException(nameof(faceLandmarkDetector));
if (faceLandmarkDetector != null)
faceLandmarkDetector.ThrowIfDisposed();
if (imgMat == null)
- throw new ArgumentNullException("imgMat");
+ throw new ArgumentNullException(nameof(imgMat));
if (imgMat != null)
imgMat.ThrowIfDisposed();
if (!imgMat.isContinuous())
@@ -35,40 +51,113 @@ public static void SetImage(FaceLandmarkDetector faceLandmarkDetector, Mat imgMa
}
///
- /// Draws a face rect.
+ /// Draws a rectangle on the specified image to indicate a detected face region.
///
- /// Image mat.
- /// Rect.
- /// Color.
- /// Thickness.
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
public static void DrawFaceRect(Mat imgMat, UnityEngine.Rect rect, Scalar color, int thickness)
{
Imgproc.rectangle(imgMat, new Point(rect.xMin, rect.yMin), new Point(rect.xMax, rect.yMax), color, thickness);
}
///
- /// Draws a face rect.
+ /// Draws a rectangle on the specified image to indicate a detected face region.
///
- /// Image mat.
- /// Rect.
- /// Color.
- /// Thickness.
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, UnityEngine.Rect rect, in Vec4d color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, new Vec2d(rect.xMin, rect.yMin), new Vec2d(rect.xMax, rect.yMax), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, UnityEngine.Rect rect, in (double v0, double v1, double v2, double v3) color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, (rect.xMin, rect.yMin), (rect.xMax, rect.yMax), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
public static void DrawFaceRect(Mat imgMat, OpenCVForUnity.CoreModule.Rect rect, Scalar color, int thickness)
{
Imgproc.rectangle(imgMat, rect, color, thickness);
}
///
- /// Draws a face rect.
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, in Vec4i rect, in Vec4d color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, rect, color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, in Vec4d rect, in Vec4d color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, rect.ToVec4i(), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, in (int x, int y, int width, int height) rect, in (double v0, double v1, double v2, double v3) color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, rect, color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle on the specified image to indicate a detected face region.
+ ///
+ /// The image on which to draw the rectangle.
+ /// The defining the area to highlight.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ public static void DrawFaceRect(Mat imgMat, in (double x, double y, double width, double height) rect, in (double v0, double v1, double v2, double v3) color, int thickness)
+ {
+ Imgproc.rectangle(imgMat, ((int)rect.x, (int)rect.y, (int)rect.width, (int)rect.height), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle and detection information on the specified image based on the given data.
///
- /// Image mat.
- /// RectDetection.
- /// Color.
- /// Thickness.
+ /// The image on which to draw the rectangle and text.
+ /// The containing the rectangle and detection details.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
public static void DrawFaceRect(Mat imgMat, DlibFaceLandmarkDetector.FaceLandmarkDetector.RectDetection rect, Scalar color, int thickness)
{
if (rect == null)
- throw new ArgumentNullException("rect");
+ throw new ArgumentNullException(nameof(rect));
UnityEngine.Rect _rect = rect.rect;
Imgproc.putText(imgMat, "detection_confidence : " + rect.detection_confidence, new Point(_rect.xMin, _rect.yMin - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
@@ -77,16 +166,58 @@ public static void DrawFaceRect(Mat imgMat, DlibFaceLandmarkDetector.FaceLandmar
}
///
- /// Draws a face rect.
+ /// Draws a rectangle and detection information on the specified image based on the given data.
///
- /// Image mat.
- /// Detected object's data. [left, top, width, height, detection_confidence, weight_index]
- /// Color.
- /// Thickness.
+ /// The image on which to draw the rectangle and text.
+ /// The containing the rectangle and detection details.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
+ public static void DrawFaceRect(Mat imgMat, DlibFaceLandmarkDetector.FaceLandmarkDetector.RectDetection rect, in Vec4d color, int thickness)
+ {
+ if (rect == null)
+ throw new ArgumentNullException(nameof(rect));
+
+ UnityEngine.Rect _rect = rect.rect;
+ Imgproc.putText(imgMat, "detection_confidence : " + rect.detection_confidence, new Vec2d(_rect.xMin, _rect.yMin - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Vec4d(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.putText(imgMat, "weight_index : " + rect.weight_index, new Vec2d(_rect.xMin, _rect.yMin - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Vec4d(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.rectangle(imgMat, new Vec2d(_rect.xMin, _rect.yMin), new Vec2d(_rect.xMax, _rect.yMax), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle and detection information on the specified image based on the given data.
+ ///
+ /// The image on which to draw the rectangle and text.
+ /// The containing the rectangle and detection details.
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
+ public static void DrawFaceRect(Mat imgMat, DlibFaceLandmarkDetector.FaceLandmarkDetector.RectDetection rect, in (double v0, double v1, double v2, double v3) color, int thickness)
+ {
+ if (rect == null)
+ throw new ArgumentNullException(nameof(rect));
+
+ UnityEngine.Rect _rect = rect.rect;
+ Imgproc.putText(imgMat, "detection_confidence : " + rect.detection_confidence, (_rect.xMin, _rect.yMin - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.putText(imgMat, "weight_index : " + rect.weight_index, (_rect.xMin, _rect.yMin - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.rectangle(imgMat, (_rect.xMin, _rect.yMin), (_rect.xMax, _rect.yMax), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle and detection information on the specified image based on the provided rectangle data.
+ ///
+ /// The image on which to draw the rectangle and text.
+ ///
+ /// An array containing the rectangle data in the format [x, y, width, height, detection_confidence, weight_index].
+ /// The last two values are optional and used for displaying additional detection information.
+ ///
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
public static void DrawFaceRect(Mat imgMat, double[] rect, Scalar color, int thickness)
{
if (rect == null)
- throw new ArgumentNullException("rect");
+ throw new ArgumentNullException(nameof(rect));
if (rect.Length > 4)
Imgproc.putText(imgMat, "detection_confidence : " + rect[4], new Point(rect[0], rect[1] - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
@@ -96,18 +227,75 @@ public static void DrawFaceRect(Mat imgMat, double[] rect, Scalar color, int thi
}
///
- /// Draws a face landmark.
- /// This method supports 68,17,6,5 landmark points.
+ /// Draws a rectangle and detection information on the specified image based on the provided rectangle data.
+ ///
+ /// The image on which to draw the rectangle and text.
+ ///
+ /// An array containing the rectangle data in the format [x, y, width, height, detection_confidence, weight_index].
+ /// The last two values are optional and used for displaying additional detection information.
+ ///
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
+ public static void DrawFaceRect(Mat imgMat, double[] rect, in Vec4d color, int thickness)
+ {
+ if (rect == null)
+ throw new ArgumentNullException(nameof(rect));
+
+ if (rect.Length > 4)
+ Imgproc.putText(imgMat, "detection_confidence : " + rect[4], new Vec2d(rect[0], rect[1] - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Vec4d(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ if (rect.Length > 5)
+ Imgproc.putText(imgMat, "weight_index : " + rect[5], new Vec2d(rect[0], rect[1] - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Vec4d(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.rectangle(imgMat, new Vec2d(rect[0], rect[1]), new Vec2d(rect[0] + rect[2], rect[1] + rect[3]), color, thickness);
+ }
+
+ ///
+ /// Draws a rectangle and detection information on the specified image based on the provided rectangle data.
+ ///
+ /// The image on which to draw the rectangle and text.
+ ///
+ /// An array containing the rectangle data in the format [x, y, width, height, detection_confidence, weight_index].
+ /// The last two values are optional and used for displaying additional detection information.
+ ///
+ /// The color of the rectangle border.
+ /// The thickness of the rectangle border.
+ /// Thrown if is null.
+ public static void DrawFaceRect(Mat imgMat, double[] rect, in (double v0, double v1, double v2, double v3) color, int thickness)
+ {
+ if (rect == null)
+ throw new ArgumentNullException(nameof(rect));
+
+ if (rect.Length > 4)
+ Imgproc.putText(imgMat, "detection_confidence : " + rect[4], (rect[0], rect[1] - 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ if (rect.Length > 5)
+ Imgproc.putText(imgMat, "weight_index : " + rect[5], (rect[0], rect[1] - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ Imgproc.rectangle(imgMat, (rect[0], rect[1]), (rect[0] + rect[2], rect[1] + rect[3]), color, thickness);
+ }
+
+ ///
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
///
- /// Image mat.
- /// Points.
- /// Color.
- /// Thickness.
- /// Determines if draw index numbers.
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A list of points representing the landmark positions. The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar color, int thickness, bool drawIndexNumbers = false)
{
if (points == null)
- throw new ArgumentNullException("points");
+ throw new ArgumentNullException(nameof(points));
if (points.Count == 5)
{
@@ -203,20 +391,31 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar co
}
///
- /// Draws a face landmark.
- /// This method supports 68,17,6,5 landmark points.
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
///
- /// Image mat.
- /// Points.
- /// Color.
- /// Thickness.
- /// Determines if draw index numbers.
- public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar color, int thickness, bool drawIndexNumbers = false)
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A list of points representing the landmark positions. The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, in Vec2d[] points, in Vec4d color, int thickness, bool drawIndexNumbers = false)
{
if (points == null)
- throw new ArgumentNullException("points");
+ throw new ArgumentNullException(nameof(points));
- if (points.Count == 5)
+ if (points.Length == 5)
{
Imgproc.line(imgMat, points[0], points[1], color, thickness);
@@ -225,7 +424,7 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar colo
Imgproc.line(imgMat, points[3], points[2], color, thickness);
}
- else if (points.Count == 6)
+ else if (points.Length == 6)
{
Imgproc.line(imgMat, points[2], points[3], color, thickness);
@@ -235,7 +434,7 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar colo
Imgproc.line(imgMat, points[0], points[1], color, thickness);
}
- else if (points.Count == 17)
+ else if (points.Length == 17)
{
Imgproc.line(imgMat, points[2], points[9], color, thickness);
@@ -260,7 +459,7 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar colo
Imgproc.circle(imgMat, points[i], 2, color, -1);
}
- else if (points.Count == 68)
+ else if (points.Length == 68)
{
for (int i = 1; i <= 16; ++i)
@@ -295,7 +494,7 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar colo
}
else
{
- for (int i = 0; i < points.Count; i++)
+ for (int i = 0; i < points.Length; i++)
{
Imgproc.circle(imgMat, points[i], 2, color, -1);
}
@@ -304,157 +503,531 @@ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar colo
// Draw the index number of facelandmark points.
if (drawIndexNumbers)
{
- for (int i = 0; i < points.Count; ++i)
- Imgproc.putText(imgMat, i.ToString(), points[i], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ for (int i = 0; i < points.Length; ++i)
+ Imgproc.putText(imgMat, i.ToString(), points[i], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Vec4d(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
}
}
///
- /// Draws a face landmark.
- /// This method supports 68,17,6,5 landmark points.
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
///
- /// Image mat.
- /// Detected object landmark data.[x_0, y_0, x_1, y_1, ...]
- /// Color.
- /// Thickness.
- /// Determines if draw index numbers.
- public static void DrawFaceLandmark(Mat imgMat, double[] points, Scalar color, int thickness, bool drawIndexNumbers = false)
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A list of points representing the landmark positions. The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, in (double x, double y)[] points, in (double v0, double v1, double v2, double v3) color, int thickness, bool drawIndexNumbers = false)
{
if (points == null)
- throw new ArgumentNullException("points");
+ throw new ArgumentNullException(nameof(points));
- List _points = new List();
- for (int i = 0; i < points.Length; i = i + 2)
+ if (points.Length == 5)
{
- _points.Add(new Vector2((float)points[i], (float)points[i + 1]));
+
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
+ Imgproc.line(imgMat, points[1], points[4], color, thickness);
+ Imgproc.line(imgMat, points[4], points[3], color, thickness);
+ Imgproc.line(imgMat, points[3], points[2], color, thickness);
+
}
- DrawFaceLandmark(imgMat, _points, color, thickness, drawIndexNumbers);
- }
+ else if (points.Length == 6)
+ {
+ Imgproc.line(imgMat, points[2], points[3], color, thickness);
+ Imgproc.line(imgMat, points[4], points[5], color, thickness);
+ Imgproc.line(imgMat, points[3], points[0], color, thickness);
+ Imgproc.line(imgMat, points[4], points[0], color, thickness);
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
+ }
+ else if (points.Length == 17)
+ {
+ Imgproc.line(imgMat, points[2], points[9], color, thickness);
+ Imgproc.line(imgMat, points[9], points[3], color, thickness);
+ Imgproc.line(imgMat, points[3], points[10], color, thickness);
+ Imgproc.line(imgMat, points[10], points[2], color, thickness);
- ///
- /// Convert Vector2 list to Vector2 array.
- ///
- /// List of Vector2.
- /// Array of Vector2.
- /// Array of Vector2.
- public static Vector2[] ConvertVector2ListToVector2Array(IList src, Vector2[] dst = null)
- {
- if (src == null)
- throw new ArgumentNullException("src");
+ Imgproc.line(imgMat, points[4], points[11], color, thickness);
+ Imgproc.line(imgMat, points[11], points[5], color, thickness);
+ Imgproc.line(imgMat, points[5], points[12], color, thickness);
+ Imgproc.line(imgMat, points[12], points[4], color, thickness);
- if (dst != null && src.Count != dst.Length)
- throw new ArgumentException("src.Count != dst.Length");
+ Imgproc.line(imgMat, points[3], points[0], color, thickness);
+ Imgproc.line(imgMat, points[4], points[0], color, thickness);
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
- if (dst == null)
- {
- dst = new Vector2[src.Count];
- }
+ for (int i = 14; i <= 16; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[16], points[13], color, thickness);
+
+ for (int i = 6; i <= 8; i++)
+ Imgproc.circle(imgMat, points[i], 2, color, -1);
- for (int i = 0; i < src.Count; ++i)
- {
- dst[i].x = src[i].x;
- dst[i].y = src[i].y;
}
+ else if (points.Length == 68)
+ {
- return dst;
- }
+ for (int i = 1; i <= 16; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
- ///
- /// Convert Vector2 list to Point list.
- ///
- /// List of Vector2.
- /// List of Point.
- /// List of Point.
- public static List ConvertVector2ListToPointList(IList src, List dst = null)
- {
- if (src == null)
- throw new ArgumentNullException("src");
+ for (int i = 28; i <= 30; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
- if (dst == null)
- {
- dst = new List();
- }
+ for (int i = 18; i <= 21; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ for (int i = 23; i <= 26; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ for (int i = 31; i <= 35; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[30], points[35], color, thickness);
- if (dst.Count != src.Count)
+ for (int i = 37; i <= 41; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[36], points[41], color, thickness);
+
+ for (int i = 43; i <= 47; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[42], points[47], color, thickness);
+
+ for (int i = 49; i <= 59; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[48], points[59], color, thickness);
+
+ for (int i = 61; i <= 67; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[60], points[67], color, thickness);
+ }
+ else
{
- dst.Clear();
- for (int i = 0; i < src.Count; i++)
+ for (int i = 0; i < points.Length; i++)
{
- dst.Add(new Point());
+ Imgproc.circle(imgMat, points[i], 2, color, -1);
}
}
- for (int i = 0; i < src.Count; ++i)
+ // Draw the index number of facelandmark points.
+ if (drawIndexNumbers)
{
- dst[i].x = src[i].x;
- dst[i].y = src[i].y;
+ for (int i = 0; i < points.Length; ++i)
+ Imgproc.putText(imgMat, i.ToString(), points[i], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
}
-
- return dst;
}
///
- /// Convert Vector2 list to Point array.
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
///
- /// List of Vector2.
- /// Array of Point.
- /// Array of Point.
- public static Point[] ConvertVector2ListToPointArray(IList src, Point[] dst = null)
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A list of points representing the landmark positions. The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, IList points, Scalar color, int thickness, bool drawIndexNumbers = false)
{
- if (src == null)
- throw new ArgumentNullException("src");
-
- if (dst != null && src.Count != dst.Length)
- throw new ArgumentException("src.Count != dst.Length");
-
- if (dst == null)
- {
- dst = new Point[src.Count];
- }
+ if (points == null)
+ throw new ArgumentNullException(nameof(points));
- for (int i = 0; i < src.Count; ++i)
+ if (points.Count == 5)
{
- dst[i] = new Point(src[i].x, src[i].y);
- }
- return dst;
- }
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
+ Imgproc.line(imgMat, points[1], points[4], color, thickness);
+ Imgproc.line(imgMat, points[4], points[3], color, thickness);
+ Imgproc.line(imgMat, points[3], points[2], color, thickness);
- ///
- /// Convert Vector2 list to array.
- ///
- /// List of Vector2.
- /// Array of double.
- /// Array of double.
- public static double[] ConvertVector2ListToArray(IList src, double[] dst = null)
- {
- if (src == null)
- throw new ArgumentNullException("src");
+ }
+ else if (points.Count == 6)
+ {
- if (dst != null && src.Count * 2 != dst.Length)
- throw new ArgumentException("src.Count * 2 != dst.Length");
+ Imgproc.line(imgMat, points[2], points[3], color, thickness);
+ Imgproc.line(imgMat, points[4], points[5], color, thickness);
+ Imgproc.line(imgMat, points[3], points[0], color, thickness);
+ Imgproc.line(imgMat, points[4], points[0], color, thickness);
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
- if (dst == null)
- {
- dst = new double[src.Count * 2];
}
-
- for (int i = 0; i < src.Count; ++i)
+ else if (points.Count == 17)
{
- dst[i * 2] = src[i].x;
- dst[i * 2 + 1] = src[i].y;
- }
- return dst;
- }
+ Imgproc.line(imgMat, points[2], points[9], color, thickness);
+ Imgproc.line(imgMat, points[9], points[3], color, thickness);
+ Imgproc.line(imgMat, points[3], points[10], color, thickness);
+ Imgproc.line(imgMat, points[10], points[2], color, thickness);
+ Imgproc.line(imgMat, points[4], points[11], color, thickness);
+ Imgproc.line(imgMat, points[11], points[5], color, thickness);
+ Imgproc.line(imgMat, points[5], points[12], color, thickness);
+ Imgproc.line(imgMat, points[12], points[4], color, thickness);
+ Imgproc.line(imgMat, points[3], points[0], color, thickness);
+ Imgproc.line(imgMat, points[4], points[0], color, thickness);
+ Imgproc.line(imgMat, points[0], points[1], color, thickness);
+ for (int i = 14; i <= 16; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[16], points[13], color, thickness);
- ///
+ for (int i = 6; i <= 8; i++)
+ Imgproc.circle(imgMat, points[i], 2, color, -1);
+
+ }
+ else if (points.Count == 68)
+ {
+
+ for (int i = 1; i <= 16; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+
+ for (int i = 28; i <= 30; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+
+ for (int i = 18; i <= 21; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ for (int i = 23; i <= 26; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ for (int i = 31; i <= 35; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[30], points[35], color, thickness);
+
+ for (int i = 37; i <= 41; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[36], points[41], color, thickness);
+
+ for (int i = 43; i <= 47; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[42], points[47], color, thickness);
+
+ for (int i = 49; i <= 59; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[48], points[59], color, thickness);
+
+ for (int i = 61; i <= 67; ++i)
+ Imgproc.line(imgMat, points[i], points[i - 1], color, thickness);
+ Imgproc.line(imgMat, points[60], points[67], color, thickness);
+ }
+ else
+ {
+ for (int i = 0; i < points.Count; i++)
+ {
+ Imgproc.circle(imgMat, points[i], 2, color, -1);
+ }
+ }
+
+ // Draw the index number of facelandmark points.
+ if (drawIndexNumbers)
+ {
+ for (int i = 0; i < points.Count; ++i)
+ Imgproc.putText(imgMat, i.ToString(), points[i], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
+ }
+ }
+
+ ///
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
+ ///
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A list of points representing the landmark positions. The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, double[] points, Scalar color, int thickness, bool drawIndexNumbers = false)
+ {
+ if (points == null)
+ throw new ArgumentNullException(nameof(points));
+
+ List _points = new List();
+ for (int i = 0; i < points.Length; i = i + 2)
+ {
+ _points.Add(new Vector2((float)points[i], (float)points[i + 1]));
+ }
+ DrawFaceLandmark(imgMat, _points, color, thickness, drawIndexNumbers);
+ }
+
+ ///
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
+ ///
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A array representing the landmark positions, where each pair of consecutive values represents the X and Y coordinates of a point.
+ /// The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, double[] points, in Vec4d color, int thickness, bool drawIndexNumbers = false)
+ {
+ if (points == null)
+ throw new ArgumentNullException(nameof(points));
+
+ Vec2d[] pointsVec2d = new Vec2d[points.Length / 2];
+#if NET_STANDARD_2_1
+ Span pointsSpan = MemoryMarshal.Cast(points);
+ pointsSpan.CopyTo(pointsVec2d);
+#else
+ for (int i = 0; i < pointsVec2d.Length; i++)
+ {
+ pointsVec2d[i].Item1 = points[i * 2 + 0];
+ pointsVec2d[i].Item2 = points[i * 2 + 1];
+ }
+#endif
+ DrawFaceLandmark(imgMat, pointsVec2d, color, thickness, drawIndexNumbers);
+ }
+
+ ///
+ /// Draws a face landmark on the specified image.
+ /// This method supports drawing landmarks for 68, 17, 6, or 5 landmark points.
+ /// The landmarks are drawn by connecting the points with lines, and optionally, index numbers can be drawn on each point.
+ ///
+ /// The image on which to draw the face landmarks.
+ ///
+ /// A array representing the landmark positions, where each pair of consecutive values represents the X and Y coordinates of a point.
+ /// The number of points must match one of the following:
+ /// 5 points for a basic face shape (e.g., eyes, nose, mouth),
+ /// 6 points for a face with more detailed landmarks,
+ /// 17 points for a detailed face shape, or
+ /// 68 points for a full set of face landmarks.
+ ///
+ /// The color used to draw the landmarks and lines.
+ /// The thickness of the lines used to connect the landmarks.
+ ///
+ /// If set to true, index numbers will be drawn next to each landmark point.
+ /// If set to false, no index numbers will be drawn. Default is false.
+ ///
+ /// Thrown if is null.
+ public static void DrawFaceLandmark(Mat imgMat, double[] points, in (double v0, double v1, double v2, double v3) color, int thickness, bool drawIndexNumbers = false)
+ {
+ if (points == null)
+ throw new ArgumentNullException(nameof(points));
+
+ (double x, double y)[] pointsValueTuple = new (double x, double y)[points.Length / 2];
+ for (int i = 0; i < pointsValueTuple.Length; i++)
+ {
+ pointsValueTuple[i].Item1 = points[i * 2 + 0];
+ pointsValueTuple[i].Item2 = points[i * 2 + 1];
+ }
+ DrawFaceLandmark(imgMat, pointsValueTuple, color, thickness, drawIndexNumbers);
+ }
+
+
+ ///
+ /// Convert Vector2 list to Vector2 array.
+ ///
+ /// List of Vector2.
+ /// Array of Vector2.
+ /// Array of Vector2.
+ public static Vector2[] ConvertVector2ListToVector2Array(IList src, Vector2[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Vector2[src.Count];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].x = src[i].x;
+ dst[i].y = src[i].y;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vector2 list to Point list.
+ ///
+ /// List of Vector2.
+ /// List of Point.
+ /// List of Point.
+ public static List ConvertVector2ListToPointList(IList src, List dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst == null)
+ {
+ dst = new List();
+ }
+
+ if (dst.Count != src.Count)
+ {
+ dst.Clear();
+ for (int i = 0; i < src.Count; i++)
+ {
+ dst.Add(new Point());
+ }
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].x = src[i].x;
+ dst[i].y = src[i].y;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vector2 list to Point array.
+ ///
+ /// List of Vector2.
+ /// Array of Point.
+ /// Array of Point.
+ public static Point[] ConvertVector2ListToPointArray(IList src, Point[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Point[src.Count];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i] = new Point(src[i].x, src[i].y);
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vector2 list to double array.
+ ///
+ /// List of Vector2.
+ /// Array of double.
+ /// Array of double.
+ public static double[] ConvertVector2ListToArray(IList src, double[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count * 2 != dst.Length)
+ throw new ArgumentException("src.Count * 2 != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new double[src.Count * 2];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i * 2] = src[i].x;
+ dst[i * 2 + 1] = src[i].y;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vector2 list to Vec2d array.
+ ///
+ /// List of Vector2.
+ /// Array of Vec2d.
+ /// Array of Vec2d.
+ public static Vec2d[] ConvertVector2ListToVec2dArray(IList src, Vec2d[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Vec2d[src.Count];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].Item1 = src[i].x;
+ dst[i].Item2 = src[i].y;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vector2 list to ValueTuple array.
+ ///
+ /// List of Vector2.
+ /// Array of ValueTuple.
+ /// Array of ValueTuple.
+ public static (double x, double y)[] ConvertVector2ListToValueTupleArray(IList src, (double x, double y)[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new (double x, double y)[src.Count];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].Item1 = src[i].x;
+ dst[i].Item2 = src[i].y;
+ }
+
+ return dst;
+ }
+
+
+ ///
/// Convert Point list to Point array.
///
/// List of Point.
@@ -463,7 +1036,7 @@ public static double[] ConvertVector2ListToArray(IList src, double[] ds
public static Point[] ConvertPointListToPointArray(IList src, Point[] dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst != null && src.Count != dst.Length)
throw new ArgumentException("src.Count != dst.Length");
@@ -490,7 +1063,7 @@ public static Point[] ConvertPointListToPointArray(IList src, Point[] dst
public static List ConvertPointListToVector2List(IList src, List dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst == null)
{
@@ -516,7 +1089,7 @@ public static List ConvertPointListToVector2List(IList src, List
public static Vector2[] ConvertPointListToVector2Array(IList src, Vector2[] dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst != null && src.Count != dst.Length)
throw new ArgumentException("src.Count != dst.Length");
@@ -536,7 +1109,7 @@ public static Vector2[] ConvertPointListToVector2Array(IList src, Vector2
}
///
- /// Convert Point list to array.
+ /// Convert Point list to double array.
///
/// List of Point.
/// Array of double.
@@ -544,7 +1117,7 @@ public static Vector2[] ConvertPointListToVector2Array(IList src, Vector2
public static double[] ConvertPointListToArray(IList src, double[] dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst != null && src.Count * 2 != dst.Length)
throw new ArgumentException("src.Count * 2 != dst.Length");
@@ -563,11 +1136,64 @@ public static double[] ConvertPointListToArray(IList src, double[] dst =
return dst;
}
+ ///
+ /// Convert Point list to Vec2d array.
+ ///
+ /// List of Point.
+ /// Array of Vec2d.
+ /// Array of Vec2d.
+ public static Vec2d[] ConvertPointListToVec2dArray(IList src, Vec2d[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Vec2d[src.Count];
+ }
+
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].Item1 = src[i].x;
+ dst[i].Item2 = src[i].y;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Point list to ValueTuple array.
+ ///
+ /// List of Point.
+ /// Array of ValueTuple.
+ /// Array of ValueTuple.
+ public static (double x, double y)[] ConvertPointListToValueTupleArray(IList src, (double x, double y)[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Count != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new (double x, double y)[src.Count];
+ }
+ for (int i = 0; i < src.Count; ++i)
+ {
+ dst[i].Item1 = src[i].x;
+ dst[i].Item2 = src[i].y;
+ }
+ return dst;
+ }
///
- /// Convert array to Vector2 list.
+ /// Convert double array to Vector2 list.
///
/// Array of double.
/// List of Vector2.
@@ -575,7 +1201,7 @@ public static double[] ConvertPointListToArray(IList src, double[] dst =
public static List ConvertArrayToVector2List(double[] src, List dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst == null)
{
@@ -594,7 +1220,7 @@ public static List ConvertArrayToVector2List(double[] src, List
- /// Convert array to Vector2 array.
+ /// Convert double array to Vector2 array.
///
/// Array of double.
/// Array of Vector2.
@@ -602,7 +1228,7 @@ public static List ConvertArrayToVector2List(double[] src, List
- /// Convert array to Point list.
+ /// Convert double array to Point list.
///
/// Array of double.
/// List of Point.
@@ -630,7 +1256,7 @@ public static Vector2[] ConvertArrayToVector2Array(double[] src, Vector2[] dst =
public static List ConvertArrayToPointList(double[] src, List dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst == null)
{
@@ -656,7 +1282,7 @@ public static List ConvertArrayToPointList(double[] src, List dst
}
///
- /// Convert array to Point array.
+ /// Convert double array to Point array.
///
/// Array of double.
/// Array of Point.
@@ -664,7 +1290,7 @@ public static List ConvertArrayToPointList(double[] src, List dst
public static Point[] ConvertArrayToPointArray(double[] src, Point[] dst = null)
{
if (src == null)
- throw new ArgumentNullException("src");
+ throw new ArgumentNullException(nameof(src));
if (dst != null && src.Length / 2 != dst.Length)
throw new ArgumentException("src.Length / 2 != dst.Length");
@@ -681,5 +1307,295 @@ public static Point[] ConvertArrayToPointArray(double[] src, Point[] dst = null)
return dst;
}
+
+ ///
+ /// Convert double array to Vec2d array.
+ ///
+ /// Array of double.
+ /// Array of Vec2d.
+ /// Array of Vec2d.
+ public static Vec2d[] ConvertArrayToVec2dArray(double[] src, Vec2d[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length / 2 != dst.Length)
+ throw new ArgumentException("src.Length / 2 != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Vec2d[src.Length / 2];
+ }
+
+ for (int i = 0; i < dst.Length; ++i)
+ {
+ dst[i].Item1 = src[i * 2];
+ dst[i].Item2 = src[i * 2 + 1];
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert double array to ValueTuple array.
+ ///
+ /// Array of double.
+ /// Array of ValueTuple.
+ /// Array of ValueTuple.
+ public static (double x, double y)[] ConvertArrayToValueTupleArray(double[] src, (double x, double y)[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length / 2 != dst.Length)
+ throw new ArgumentException("src.Length / 2 != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new (double x, double y)[src.Length / 2];
+ }
+
+ for (int i = 0; i < dst.Length; ++i)
+ {
+ dst[i].Item1 = src[i * 2];
+ dst[i].Item2 = src[i * 2 + 1];
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vec2d array to Point list.
+ ///
+ /// Array of Vec2d.
+ /// List of Point.
+ /// List of Point.
+ public static List ConvertVec2dArrayToPointList(in Vec2d[] src, List dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst == null)
+ {
+ dst = new List();
+ }
+
+ if (dst.Count != src.Length)
+ {
+ dst.Clear();
+ for (int i = 0; i < src.Length; i++)
+ {
+ dst.Add(new Point());
+ }
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i].x = src[i].Item1;
+ dst[i].y = src[i].Item2;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vec2d array to Point array.
+ ///
+ /// Array of Vec2d.
+ /// Array of Point.
+ /// Array of Point.
+ public static Point[] ConvertVec2dArrayToPointArray(in Vec2d[] src, Point[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Point[src.Length];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i] = new Point(src[i].Item1, src[i].Item2);
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vec2d array to double array.
+ ///
+ /// Array of Vec2d.
+ /// Array of double.
+ /// Array of double.
+ public static double[] ConvertVec2dArrayToArray(in Vec2d[] src, double[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length * 2 != dst.Length)
+ throw new ArgumentException("src.Count * 2 != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new double[src.Length * 2];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i * 2] = src[i].Item1;
+ dst[i * 2 + 1] = src[i].Item2;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert Vec2d array to ValueTuple array.
+ ///
+ /// Array of Vec2d.
+ /// Array of ValueTuple.
+ /// Array of ValueTuple.
+ public static (double x, double y)[] ConvertVec2dArrayToValueTupleArray(in Vec2d[] src, (double x, double y)[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new (double x, double y)[src.Length];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i].Item1 = src[i].Item1;
+ dst[i].Item2 = src[i].Item2;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert ValueTuple array to Point list.
+ ///
+ /// Array of ValueTuple.
+ /// List of Point.
+ /// List of Point.
+ public static List ConvertValueTupleArrayToPointList(in (double x, double y)[] src, List dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst == null)
+ {
+ dst = new List();
+ }
+
+ if (dst.Count != src.Length)
+ {
+ dst.Clear();
+ for (int i = 0; i < src.Length; i++)
+ {
+ dst.Add(new Point());
+ }
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i].x = src[i].Item1;
+ dst[i].y = src[i].Item2;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert ValueTuple array to Point array.
+ ///
+ /// Array of ValueTuple.
+ /// Array of Point.
+ /// Array of Point.
+ public static Point[] ConvertValueTupleArrayToPointArray(in (double x, double y)[] src, Point[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Point[src.Length];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i] = new Point(src[i].Item1, src[i].Item2);
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert ValueTuple array to double array.
+ ///
+ /// Array of ValueTuple.
+ /// Array of double.
+ /// Array of double.
+ public static double[] ConvertValueTupleArrayToArray(in (double x, double y)[] src, double[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length * 2 != dst.Length)
+ throw new ArgumentException("src.Count * 2 != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new double[src.Length * 2];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i * 2] = src[i].Item1;
+ dst[i * 2 + 1] = src[i].Item2;
+ }
+
+ return dst;
+ }
+
+ ///
+ /// Convert ValueTuple array to Vec2d array.
+ ///
+ /// Array of ValueTuple.
+ /// Array of Vec2d.
+ /// Array of Vec2d.
+ public static Vec2d[] ConvertValueTupleArrayToVec2dArray(in (double x, double y)[] src, Vec2d[] dst = null)
+ {
+ if (src == null)
+ throw new ArgumentNullException(nameof(src));
+
+ if (dst != null && src.Length != dst.Length)
+ throw new ArgumentException("src.Count != dst.Length");
+
+ if (dst == null)
+ {
+ dst = new Vec2d[src.Length];
+ }
+
+ for (int i = 0; i < src.Length; ++i)
+ {
+ dst[i].Item1 = src[i].x;
+ dst[i].Item2 = src[i].y;
+ }
+
+ return dst;
+ }
}
}
diff --git a/README.md b/README.md
index 47bd7c2..187d425 100644
--- a/README.md
+++ b/README.md
@@ -8,12 +8,11 @@
## Environment
* HoloLens1 10.0.17763.316 / HoloLens2 22621.1399
* Windows 10 SDK 10.0.19041.0 / 10.0.22621.0
-* Unity 2019.4.40f1 (Built-in Render Pipeline / LegacyXR / MRTK 2.8.3 / DirectX 11 / Visual Studio 2019 MSVC v142)
* Unity 2021.3.35f1 (Built-in Render Pipeline / OpenXR 1.11.1 / MRTK 2.8.3 / DirectX 11 / Visual Studio 2022 MSVC v143)
* Unity 2021.3.35f1 (Built-in Render Pipeline / OpenXR 1.11.1 / MRTK 3.2.2 / DirectX 11 / Visual Studio 2022 MSVC v143)
* [Mixed Reality Feature Tool](https://learn.microsoft.com/en-us/windows/mixed-reality/develop/unity/welcome-to-mr-feature-tool)
-* [OpenCV for Unity](https://assetstore.unity.com/packages/tools/integration/opencv-for-unity-21088?aid=1011l4ehR) 2.6.3+
-* [Dlib FaceLandmarkDetector](https://assetstore.unity.com/packages/tools/integration/dlib-facelandmark-detector-64314?aid=1011l4ehR) 1.4.0+
+* [OpenCV for Unity](https://assetstore.unity.com/packages/tools/integration/opencv-for-unity-21088?aid=1011l4ehR) 2.6.4+
+* [Dlib FaceLandmarkDetector](https://assetstore.unity.com/packages/tools/integration/dlib-facelandmark-detector-64314?aid=1011l4ehR) 1.4.1+
* [EnoxSoftware/HoloLensCameraStream](https://github.com/EnoxSoftware/HoloLensCameraStream)
@@ -49,23 +48,6 @@
* [Known issues in Unity versions and packages](https://learn.microsoft.com/en-us/windows/mixed-reality/develop/unity/known-issues)
-## Setup (HoloLens1 / Unity 2019 (Built-in Render Pipeline / LegacyXR / MRTK 2 / DirectX 11 / Visual Studio 2019))
-1. Download the latest release unitypackage.
-1. Create a new project. (`HoloLensWithDlibFaceLandmarkDetectorExample`)
-1. Import the Microsoft Mixed Reality Toolkit.
- * Add MRTK2 (Mixed Reality Toolkit - Mixed Reality Toolkit Foundation) to the project using "Mixed Reality Feature Tool".
- * Follow the MRTK2 configuration dialog to set up the project. (XR System: Legacy Built-in XR)
-1. Import the OpenCVForUnity.
-1. Import the DlibFaceLandmarkDetector.
-1. Import the HoloLensCameraStream.
-1. Import the HoloLensWithDlibFaceLandmarkDetectorExampleMRTK2.unitypackage.
-1. Add the "Assets/HoloLensWithDlibFaceLandmarkDetectorExample/*.unity" files to the "Scenes In Build" list in the "Build Settings" window.
-1. Configure settings in the "Project Settings" window.
- * Add `BUILTIN_XR` to the list of Define Symbols.
- * Enable `WebCam` Capabilties in Publishing settings tab.
-1. Build the project; open it in VS2019, set the deployment target to `x86` and deploy it to the Hololens1 actual device.
-
-
## Setup (Unity 2021 (Built-in Render Pipeline / OpenXR / MRTK 2 / DirectX 11 / Visual Studio 2022))
1. Download the latest release unitypackage.
1. Create a new project. (`HoloLensWithDlibFaceLandmarkDetectorExample`)