FaceRecognizerSFExample.cs 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339
  1. #if !UNITY_WSA_10_0
  2. using OpenCVForUnity.CoreModule;
  3. using OpenCVForUnity.ImgcodecsModule;
  4. using OpenCVForUnity.ImgprocModule;
  5. using OpenCVForUnity.ObjdetectModule;
  6. using OpenCVForUnity.UnityUtils;
  7. using System;
  8. using System.Collections;
  9. using UnityEngine;
  10. using UnityEngine.SceneManagement;
  11. using Rect = OpenCVForUnity.CoreModule.Rect;
  12. namespace OpenCVForUnityExample
  13. {
  14. /// <summary>
  15. /// FaceRecognizerSF Example
  16. /// An example of human face recognition using the FaceRecognizerSF class.
  17. /// https://github.com/opencv/opencv/blob/master/samples/dnn/face_detect.cpp
  18. /// https://docs.opencv.org/4.5.4/d0/dd4/tutorial_dnn_face.html
  19. /// </summary>
  20. public class FaceRecognizerSFExample : MonoBehaviour
  21. {
  22. /// <summary>
  23. /// Filter out faces of score < score_threshold.
  24. /// </summary>
  25. float scoreThreshold = 0.9f;
  26. /// <summary>
  27. /// Suppress bounding boxes of iou >= nms_threshold
  28. /// </summary>
  29. float nmsThreshold = 0.3f;
  30. /// <summary>
  31. /// Keep top_k bounding boxes before NMS.
  32. /// </summary>
  33. int topK = 5000;
  34. /// <summary>
  35. /// FD_MODEL_FILENAME
  36. /// </summary>
  37. protected static readonly string FD_MODEL_FILENAME = "OpenCVForUnity/objdetect/face_detection_yunet_2023mar.onnx";
  38. /// <summary>
  39. /// The fd model filepath.
  40. /// </summary>
  41. string fd_model_filepath;
  42. /// <summary>
  43. /// The cosine similar thresh.
  44. /// </summary>
  45. double cosine_similar_thresh = 0.363;
  46. /// <summary>
  47. /// The l2norm similar thresh.
  48. /// </summary>
  49. double l2norm_similar_thresh = 1.128;
  50. /// <summary>
  51. /// SF_MODEL_FILENAME
  52. /// </summary>
  53. protected static readonly string SF_MODEL_FILENAME = "OpenCVForUnity/objdetect/face_recognition_sface_2021dec.onnx";
  54. /// <summary>
  55. /// The sf model filepath.
  56. /// </summary>
  57. string sf_model_filepath;
  58. /// <summary>
  59. /// IMAGE_0_FILENAME
  60. /// </summary>
  61. protected static readonly string IMAGE_0_FILENAME = "OpenCVForUnity/face/facerec_0.bmp";
  62. /// <summary>
  63. /// The image 0 filepath.
  64. /// </summary>
  65. string image_0_filepath;
  66. /// <summary>
  67. /// IMAGE_1_FILENAME
  68. /// </summary>
  69. protected static readonly string IMAGE_1_FILENAME = "OpenCVForUnity/face/facerec_1.bmp";
  70. /// <summary>
  71. /// The image 1 filepath.
  72. /// </summary>
  73. string image_1_filepath;
  74. /// <summary>
  75. /// SAMPLE_IMAGE_FILENAME
  76. /// </summary>
  77. protected static readonly string SAMPLE_IMAGE_FILENAME = "OpenCVForUnity/face/facerec_sample.bmp";
  78. /// <summary>
  79. /// The sample image filepath.
  80. /// </summary>
  81. string sample_image_filepath;
  82. #if UNITY_WEBGL
  83. IEnumerator getFilePath_Coroutine;
  84. #endif
  85. // Use this for initialization
  86. void Start()
  87. {
  88. #if UNITY_WEBGL
  89. getFilePath_Coroutine = GetFilePath ();
  90. StartCoroutine (getFilePath_Coroutine);
  91. #else
  92. fd_model_filepath = Utils.getFilePath(FD_MODEL_FILENAME);
  93. sf_model_filepath = Utils.getFilePath(SF_MODEL_FILENAME);
  94. image_0_filepath = Utils.getFilePath(IMAGE_0_FILENAME);
  95. image_1_filepath = Utils.getFilePath(IMAGE_1_FILENAME);
  96. sample_image_filepath = Utils.getFilePath(SAMPLE_IMAGE_FILENAME);
  97. Run();
  98. #endif
  99. }
  100. #if UNITY_WEBGL
  101. private IEnumerator GetFilePath()
  102. {
  103. var getFilePathAsync_fd_Coroutine = Utils.getFilePathAsync(FD_MODEL_FILENAME, (result) => {
  104. fd_model_filepath = result;
  105. });
  106. yield return getFilePathAsync_fd_Coroutine;
  107. var getFilePathAsync_sf_Coroutine = Utils.getFilePathAsync(SF_MODEL_FILENAME, (result) => {
  108. sf_model_filepath = result;
  109. });
  110. yield return getFilePathAsync_sf_Coroutine;
  111. var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync (IMAGE_0_FILENAME, (result) => {
  112. image_0_filepath = result;
  113. });
  114. yield return getFilePathAsync_0_Coroutine;
  115. var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync (IMAGE_1_FILENAME, (result) => {
  116. image_1_filepath = result;
  117. });
  118. yield return getFilePathAsync_1_Coroutine;
  119. var getFilePathAsync_sample_Coroutine = Utils.getFilePathAsync (SAMPLE_IMAGE_FILENAME, (result) => {
  120. sample_image_filepath = result;
  121. });
  122. yield return getFilePathAsync_sample_Coroutine;
  123. getFilePath_Coroutine = null;
  124. Run ();
  125. }
  126. #endif
  127. private void Run()
  128. {
  129. //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
  130. Utils.setDebugMode(true);
  131. if (string.IsNullOrEmpty(image_0_filepath) || string.IsNullOrEmpty(image_1_filepath) || string.IsNullOrEmpty(sample_image_filepath))
  132. {
  133. Debug.LogError(IMAGE_0_FILENAME + " or " + IMAGE_1_FILENAME + " or " + SAMPLE_IMAGE_FILENAME + " is not loaded. Please move from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/” to “Assets/StreamingAssets/OpenCVForUnity/” folder.");
  134. }
  135. Mat testSampleMat = Imgcodecs.imread(sample_image_filepath, Imgcodecs.IMREAD_COLOR);
  136. Mat image0Mat = Imgcodecs.imread(image_0_filepath, Imgcodecs.IMREAD_COLOR);
  137. Mat image1Mat = Imgcodecs.imread(image_1_filepath, Imgcodecs.IMREAD_COLOR);
  138. int imageSizeW = 112;
  139. int imageSizeH = 112;
  140. Mat resultMat = new Mat(imageSizeH * 2, imageSizeW * 2, CvType.CV_8UC3, new Scalar(127, 127, 127, 255));
  141. if (string.IsNullOrEmpty(fd_model_filepath) || string.IsNullOrEmpty(sf_model_filepath))
  142. {
  143. Debug.LogError(FD_MODEL_FILENAME + " or " + SF_MODEL_FILENAME + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/objdetect/setup_objdetect_module.pdf” to make the necessary setup.");
  144. Imgproc.putText(resultMat, "model file is not loaded.", new Point(5, resultMat.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
  145. Imgproc.putText(resultMat, "Please read console message.", new Point(5, resultMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
  146. }
  147. else
  148. {
  149. FaceDetectorYN faceDetector = FaceDetectorYN.create(fd_model_filepath, "", new Size(imageSizeW, imageSizeH), scoreThreshold, nmsThreshold, topK);
  150. FaceRecognizerSF faceRecognizer = FaceRecognizerSF.create(sf_model_filepath, "");
  151. // Detect faces.
  152. faceDetector.setInputSize(testSampleMat.size());
  153. Mat faces_sample = new Mat();
  154. faceDetector.detect(testSampleMat, faces_sample);
  155. if (faces_sample.rows() < 1)
  156. {
  157. Debug.Log("Cannot find a face in " + SAMPLE_IMAGE_FILENAME + ".");
  158. }
  159. faceDetector.setInputSize(image0Mat.size());
  160. Mat faces_0 = new Mat();
  161. faceDetector.detect(image0Mat, faces_0);
  162. if (faces_0.rows() < 1)
  163. {
  164. Debug.Log("Cannot find a face in " + IMAGE_0_FILENAME + ".");
  165. }
  166. faceDetector.setInputSize(image1Mat.size());
  167. Mat faces_1 = new Mat();
  168. faceDetector.detect(image1Mat, faces_1);
  169. if (faces_1.rows() < 1)
  170. {
  171. Debug.Log("Cannot find a face in " + IMAGE_1_FILENAME + ".");
  172. }
  173. /*
  174. // Draw results on the input image.
  175. DrawDetection(faces_sample.row(0),testSampleMat);
  176. DrawDetection(faces_0.row(0), image0Mat);
  177. DrawDetection(faces_1.row(0), image1Mat);
  178. */
  179. // Aligning and cropping facial image through the first face of faces detected.
  180. Mat aligned_face_sample = new Mat();
  181. faceRecognizer.alignCrop(testSampleMat, faces_sample.row(0), aligned_face_sample);
  182. Mat aligned_face_0 = new Mat();
  183. faceRecognizer.alignCrop(image0Mat, faces_0.row(0), aligned_face_0);
  184. Mat aligned_face_1 = new Mat();
  185. faceRecognizer.alignCrop(image1Mat, faces_1.row(0), aligned_face_1);
  186. // Run feature extraction with given aligned_face.
  187. Mat feature_sample = new Mat();
  188. faceRecognizer.feature(aligned_face_sample, feature_sample);
  189. feature_sample = feature_sample.clone();
  190. Mat feature_0 = new Mat();
  191. faceRecognizer.feature(aligned_face_0, feature_0);
  192. feature_0 = feature_0.clone();
  193. Mat feature_1 = new Mat();
  194. faceRecognizer.feature(aligned_face_1, feature_1);
  195. feature_1 = feature_1.clone();
  196. // Match the face features.
  197. bool sample_0_match = Match(faceRecognizer, feature_sample, feature_0);
  198. bool sample_1_match = Match(faceRecognizer, feature_sample, feature_1);
  199. Debug.Log("Match(" + SAMPLE_IMAGE_FILENAME + ", " + IMAGE_0_FILENAME + "): " + sample_0_match);
  200. Debug.Log("Match(" + SAMPLE_IMAGE_FILENAME + ", " + IMAGE_1_FILENAME + "): " + sample_1_match);
  201. // Draw the recognition result.
  202. aligned_face_sample.copyTo(resultMat.submat(new Rect(new Point(imageSizeW / 2, 0), aligned_face_sample.size())));
  203. aligned_face_0.copyTo(resultMat.submat(new Rect(new Point(0, imageSizeH), aligned_face_0.size())));
  204. aligned_face_1.copyTo(resultMat.submat(new Rect(new Point(imageSizeW, imageSizeH), aligned_face_1.size())));
  205. Imgproc.putText(resultMat, "TestSample", new Point(imageSizeW / 2 + 5, 15), Imgproc.FONT_HERSHEY_SIMPLEX, 0.4, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
  206. Imgproc.putText(resultMat, "Image0", new Point(5, imageSizeH + 15), Imgproc.FONT_HERSHEY_SIMPLEX, 0.4, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
  207. Imgproc.putText(resultMat, "Image1", new Point(imageSizeW + 5, imageSizeH + 15), Imgproc.FONT_HERSHEY_SIMPLEX, 0.4, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
  208. if (sample_0_match)
  209. Imgproc.rectangle(resultMat, new Rect(0, imageSizeH, imageSizeW, imageSizeH), new Scalar(255, 0, 0, 255), 2);
  210. if (sample_1_match)
  211. Imgproc.rectangle(resultMat, new Rect(imageSizeW, imageSizeH, imageSizeW, imageSizeH), new Scalar(255, 0, 0, 255), 2);
  212. }
  213. Texture2D texture = new Texture2D(resultMat.cols(), resultMat.rows(), TextureFormat.RGB24, false);
  214. Utils.matToTexture2D(resultMat, texture);
  215. gameObject.GetComponent<Renderer>().material.mainTexture = texture;
  216. Utils.setDebugMode(false);
  217. }
  218. // Update is called once per frame
  219. void Update()
  220. {
  221. }
  222. /// <summary>
  223. /// Raises the destroy event.
  224. /// </summary>
  225. void OnDestroy()
  226. {
  227. #if UNITY_WEBGL
  228. if (getFilePath_Coroutine != null) {
  229. StopCoroutine (getFilePath_Coroutine);
  230. ((IDisposable)getFilePath_Coroutine).Dispose ();
  231. }
  232. #endif
  233. }
  234. /// <summary>
  235. /// Raises the back button click event.
  236. /// </summary>
  237. public void OnBackButtonClick()
  238. {
  239. SceneManager.LoadScene("OpenCVForUnityExample");
  240. }
  241. protected void DrawDetection(Mat d, Mat frame)
  242. {
  243. float[] buf = new float[15];
  244. d.get(0, 0, buf);
  245. Scalar color = new Scalar(255, 255, 0, 255);
  246. Imgproc.rectangle(frame, new Point(buf[0], buf[1]), new Point(buf[0] + buf[2], buf[1] + buf[3]), color, 2);
  247. Imgproc.circle(frame, new Point(buf[4], buf[5]), 2, color, 2);
  248. Imgproc.circle(frame, new Point(buf[6], buf[7]), 2, color, 2);
  249. Imgproc.circle(frame, new Point(buf[8], buf[9]), 2, color, 2);
  250. Imgproc.circle(frame, new Point(buf[10], buf[11]), 2, color, 2);
  251. Imgproc.circle(frame, new Point(buf[12], buf[13]), 2, color, 2);
  252. }
  253. protected bool Match(FaceRecognizerSF faceRecognizer, Mat feature1, Mat feature2)
  254. {
  255. double cos_score = faceRecognizer.match(feature1, feature2, FaceRecognizerSF.FR_COSINE);
  256. bool cos_match = (cos_score >= cosine_similar_thresh);
  257. Debug.Log((cos_match ? "They have the same identity;" : "They have different identities;") + "\n"
  258. + " Cosine Similarity: " + cos_score + ", threshold: " + cosine_similar_thresh + ". (higher value means higher similarity, max 1.0)");
  259. double L2_score = faceRecognizer.match(feature1, feature2, FaceRecognizerSF.FR_NORM_L2);
  260. bool L2_match = (L2_score <= l2norm_similar_thresh);
  261. Debug.Log((L2_match ? "They have the same identity;" : "They have different identities;") + "\n"
  262. + " NormL2 Distance: " + L2_score + ", threshold: " + l2norm_similar_thresh + ". (lower value means higher similarity, min 0.0)");
  263. return cos_match && L2_match;
  264. }
  265. }
  266. }
  267. #endif