AsynchronousFaceDetectionWebCamTextureExample.cs 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854
  1. using UnityEngine;
  2. using UnityEngine.SceneManagement;
  3. using System;
  4. using System.Collections;
  5. using System.Collections.Generic;
  6. using System.Threading;
  7. using OpenCVForUnity.CoreModule;
  8. using OpenCVForUnity.ObjdetectModule;
  9. using OpenCVForUnity.ImgprocModule;
  10. using OpenCVForUnity.UnityUtils;
  11. using OpenCVForUnity.UnityUtils.Helper;
  12. using Rect = OpenCVForUnity.CoreModule.Rect;
  13. using PositionsVector = System.Collections.Generic.List<OpenCVForUnity.CoreModule.Rect>;
  14. namespace OpenCVForUnityExample
  15. {
  16. /// <summary>
  17. /// Asynchronous Face Detection WebCamTexture Example
  18. /// Referring to https://github.com/Itseez/opencv/blob/master/modules/objdetect/src/detection_based_tracker.cpp.
  19. /// </summary>
  20. [RequireComponent (typeof(WebCamTextureToMatHelper))]
  21. public class AsynchronousFaceDetectionWebCamTextureExample : MonoBehaviour
  22. {
  23. /// <summary>
  24. /// The gray mat.
  25. /// </summary>
  26. Mat grayMat;
  27. /// <summary>
  28. /// The texture.
  29. /// </summary>
  30. Texture2D texture;
  31. /// <summary>
  32. /// The webcam texture to mat helper.
  33. /// </summary>
  34. WebCamTextureToMatHelper webCamTextureToMatHelper;
  35. /// <summary>
  36. /// The cascade.
  37. /// </summary>
  38. CascadeClassifier cascade;
  39. /// <summary>
  40. /// The lbpcascade_frontalface_xml_filepath.
  41. /// </summary>
  42. string lbpcascade_frontalface_xml_filepath;
  43. /// <summary>
  44. /// The haarcascade_frontalface_alt_xml_filepath.
  45. /// </summary>
  46. string haarcascade_frontalface_alt_xml_filepath;
  47. /// <summary>
  48. /// The rects where regions.
  49. /// </summary>
  50. Rect[] rectsWhereRegions;
  51. /// <summary>
  52. /// The detected objects in regions.
  53. /// </summary>
  54. List<Rect> detectedObjectsInRegions = new List<Rect> ();
  55. /// <summary>
  56. /// The result objects.
  57. /// </summary>
  58. List<Rect> resultObjects = new List<Rect> ();
  59. // for Thread
  60. CascadeClassifier cascade4Thread;
  61. Mat grayMat4Thread;
  62. MatOfRect detectionResult;
  63. System.Object sync = new System.Object ();
  64. bool _isThreadRunning = false;
  65. bool isThreadRunning {
  66. get {
  67. lock (sync)
  68. return _isThreadRunning;
  69. }
  70. set {
  71. lock (sync)
  72. _isThreadRunning = value;
  73. }
  74. }
  75. bool _shouldStopThread = false;
  76. bool shouldStopThread {
  77. get {
  78. lock (sync)
  79. return _shouldStopThread;
  80. }
  81. set {
  82. lock (sync)
  83. _shouldStopThread = value;
  84. }
  85. }
  86. bool _shouldDetectInMultiThread = false;
  87. bool shouldDetectInMultiThread {
  88. get {
  89. lock (sync)
  90. return _shouldDetectInMultiThread;
  91. }
  92. set {
  93. lock (sync)
  94. _shouldDetectInMultiThread = value;
  95. }
  96. }
  97. bool _didUpdateTheDetectionResult = false;
  98. bool didUpdateTheDetectionResult {
  99. get {
  100. lock (sync)
  101. return _didUpdateTheDetectionResult;
  102. }
  103. set {
  104. lock (sync)
  105. _didUpdateTheDetectionResult = value;
  106. }
  107. }
  108. /// <summary>
  109. /// The FPS monitor.
  110. /// </summary>
  111. FpsMonitor fpsMonitor;
  112. // for tracker
  113. List<TrackedObject> trackedObjects = new List<TrackedObject> ();
  114. List<float> weightsPositionsSmoothing = new List<float> ();
  115. List<float> weightsSizesSmoothing = new List<float> ();
  116. Parameters parameters;
  117. InnerParameters innerParameters;
  118. #if UNITY_WEBGL && !UNITY_EDITOR
  119. IEnumerator getFilePath_Coroutine;
  120. #endif
  121. // Use this for initialization
  122. void Start ()
  123. {
  124. fpsMonitor = GetComponent<FpsMonitor> ();
  125. webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
  126. #if UNITY_WEBGL && !UNITY_EDITOR
  127. getFilePath_Coroutine = GetFilePath ();
  128. StartCoroutine (getFilePath_Coroutine);
  129. #else
  130. lbpcascade_frontalface_xml_filepath = Utils.getFilePath ("lbpcascade_frontalface.xml");
  131. haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath ("haarcascade_frontalface_alt.xml");
  132. Run ();
  133. #endif
  134. }
  135. #if UNITY_WEBGL && !UNITY_EDITOR
  136. private IEnumerator GetFilePath ()
  137. {
  138. var getFilePathAsync_lbpcascade_frontalface_xml_filepath_Coroutine = Utils.getFilePathAsync ("lbpcascade_frontalface.xml", (result) => {
  139. lbpcascade_frontalface_xml_filepath = result;
  140. });
  141. yield return getFilePathAsync_lbpcascade_frontalface_xml_filepath_Coroutine;
  142. var getFilePathAsync_haarcascade_frontalface_alt_xml_filepath_Coroutine = Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
  143. haarcascade_frontalface_alt_xml_filepath = result;
  144. });
  145. yield return getFilePathAsync_haarcascade_frontalface_alt_xml_filepath_Coroutine;
  146. getFilePath_Coroutine = null;
  147. Run ();
  148. }
  149. #endif
  150. private void Run ()
  151. {
  152. weightsPositionsSmoothing.Add (1);
  153. weightsSizesSmoothing.Add (0.5f);
  154. weightsSizesSmoothing.Add (0.3f);
  155. weightsSizesSmoothing.Add (0.2f);
  156. //parameters.minObjectSize = 96;
  157. //parameters.maxObjectSize = int.MaxValue;
  158. //parameters.scaleFactor = 1.1f;
  159. //parameters.minNeighbors = 2;
  160. parameters.maxTrackLifetime = 5;
  161. innerParameters.numLastPositionsToTrack = 4;
  162. innerParameters.numStepsToWaitBeforeFirstShow = 6;
  163. innerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown = 3;
  164. innerParameters.numStepsToShowWithoutDetecting = 3;
  165. innerParameters.coeffTrackingWindowSize = 2.0f;
  166. innerParameters.coeffObjectSizeToTrack = 0.85f;
  167. innerParameters.coeffObjectSpeedUsingInPrediction = 0.8f;
  168. #if UNITY_ANDROID && !UNITY_EDITOR
  169. // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
  170. webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
  171. #endif
  172. webCamTextureToMatHelper.Initialize ();
  173. }
  174. /// <summary>
  175. /// Raises the webcam texture to mat helper initialized event.
  176. /// </summary>
  177. public void OnWebCamTextureToMatHelperInitialized ()
  178. {
  179. Debug.Log ("OnWebCamTextureToMatHelperInitialized");
  180. Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
  181. texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
  182. gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
  183. gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
  184. Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
  185. if (fpsMonitor != null) {
  186. fpsMonitor.Add ("width", webCamTextureMat.width ().ToString ());
  187. fpsMonitor.Add ("height", webCamTextureMat.height ().ToString ());
  188. fpsMonitor.Add ("orientation", Screen.orientation.ToString ());
  189. }
  190. float width = webCamTextureMat.width ();
  191. float height = webCamTextureMat.height ();
  192. float widthScale = (float)Screen.width / width;
  193. float heightScale = (float)Screen.height / height;
  194. if (widthScale < heightScale) {
  195. Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
  196. } else {
  197. Camera.main.orthographicSize = height / 2;
  198. }
  199. grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
  200. cascade = new CascadeClassifier ();
  201. cascade.load (lbpcascade_frontalface_xml_filepath);
  202. #if !UNITY_WSA_10_0
  203. if (cascade.empty ()) {
  204. Debug.LogError ("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
  205. }
  206. #endif
  207. InitThread ();
  208. }
  209. /// <summary>
  210. /// Raises the webcam texture to mat helper disposed event.
  211. /// </summary>
  212. public void OnWebCamTextureToMatHelperDisposed ()
  213. {
  214. Debug.Log ("OnWebCamTextureToMatHelperDisposed");
  215. #if !UNITY_WEBGL
  216. StopThread ();
  217. #else
  218. StopCoroutine ("ThreadWorker");
  219. #endif
  220. if (grayMat4Thread != null)
  221. grayMat4Thread.Dispose ();
  222. if (cascade4Thread != null)
  223. cascade4Thread.Dispose ();
  224. if (grayMat != null)
  225. grayMat.Dispose ();
  226. if (texture != null) {
  227. Texture2D.Destroy (texture);
  228. texture = null;
  229. }
  230. if (cascade != null)
  231. cascade.Dispose ();
  232. trackedObjects.Clear ();
  233. }
  234. /// <summary>
  235. /// Raises the webcam texture to mat helper error occurred event.
  236. /// </summary>
  237. /// <param name="errorCode">Error code.</param>
  238. public void OnWebCamTextureToMatHelperErrorOccurred (WebCamTextureToMatHelper.ErrorCode errorCode)
  239. {
  240. Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
  241. }
  242. // Update is called once per frame
  243. void Update ()
  244. {
  245. if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
  246. Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
  247. Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
  248. Imgproc.equalizeHist (grayMat, grayMat);
  249. if (!shouldDetectInMultiThread) {
  250. grayMat.copyTo (grayMat4Thread);
  251. shouldDetectInMultiThread = true;
  252. }
  253. OpenCVForUnity.CoreModule.Rect[] rects;
  254. if (didUpdateTheDetectionResult) {
  255. didUpdateTheDetectionResult = false;
  256. //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
  257. rectsWhereRegions = detectionResult.toArray ();
  258. rects = rectsWhereRegions;
  259. for (int i = 0; i < rects.Length; i++) {
  260. Imgproc.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 0, 255, 255), 2);
  261. }
  262. } else {
  263. //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
  264. rectsWhereRegions = new Rect[trackedObjects.Count];
  265. for (int i = 0; i < trackedObjects.Count; i++) {
  266. int n = trackedObjects [i].lastPositions.Count;
  267. //if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");
  268. Rect r = trackedObjects [i].lastPositions [n - 1].clone ();
  269. if (r.area () == 0) {
  270. Debug.Log ("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
  271. continue;
  272. }
  273. //correction by speed of rectangle
  274. if (n > 1) {
  275. Point center = CenterRect (r);
  276. Point center_prev = CenterRect (trackedObjects [i].lastPositions [n - 2]);
  277. Point shift = new Point ((center.x - center_prev.x) * innerParameters.coeffObjectSpeedUsingInPrediction,
  278. (center.y - center_prev.y) * innerParameters.coeffObjectSpeedUsingInPrediction);
  279. r.x += (int)Math.Round (shift.x);
  280. r.y += (int)Math.Round (shift.y);
  281. }
  282. rectsWhereRegions [i] = r;
  283. }
  284. rects = rectsWhereRegions;
  285. for (int i = 0; i < rects.Length; i++) {
  286. Imgproc.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 255, 0, 255), 2);
  287. }
  288. }
  289. detectedObjectsInRegions.Clear ();
  290. if (rectsWhereRegions.Length > 0) {
  291. int len = rectsWhereRegions.Length;
  292. for (int i = 0; i < len; i++) {
  293. DetectInRegion (grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
  294. }
  295. }
  296. UpdateTrackedObjects (detectedObjectsInRegions);
  297. GetObjects (resultObjects);
  298. rects = resultObjects.ToArray ();
  299. for (int i = 0; i < rects.Length; i++) {
  300. //Debug.Log ("detect faces " + rects [i]);
  301. Imgproc.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
  302. }
  303. #if UNITY_WEBGL
  304. Imgproc.putText (rgbaMat, "WebGL platform does not support multi-threading.", new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
  305. #endif
  306. Utils.fastMatToTexture2D (rgbaMat, texture);
  307. }
  308. }
  309. private void DetectInRegion (Mat img, Rect r, List<Rect> detectedObjectsInRegions)
  310. {
  311. Rect r0 = new Rect (new Point (), img.size ());
  312. Rect r1 = new Rect (r.x, r.y, r.width, r.height);
  313. Rect.inflate (r1, (int)((r1.width * innerParameters.coeffTrackingWindowSize) - r1.width) / 2,
  314. (int)((r1.height * innerParameters.coeffTrackingWindowSize) - r1.height) / 2);
  315. r1 = Rect.intersect (r0, r1);
  316. if (r1 != null && (r1.width <= 0) || (r1.height <= 0)) {
  317. Debug.Log ("DetectionBasedTracker::detectInRegion: Empty intersection");
  318. return;
  319. }
  320. int d = Math.Min (r.width, r.height);
  321. d = (int)Math.Round (d * innerParameters.coeffObjectSizeToTrack);
  322. MatOfRect tmpobjects = new MatOfRect ();
  323. Mat img1 = new Mat (img, r1);//subimage for rectangle -- without data copying
  324. cascade.detectMultiScale (img1, tmpobjects, 1.1, 2, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size (d, d), new Size ());
  325. Rect[] tmpobjectsArray = tmpobjects.toArray ();
  326. int len = tmpobjectsArray.Length;
  327. for (int i = 0; i < len; i++) {
  328. Rect tmp = tmpobjectsArray [i];
  329. Rect curres = new Rect (new Point (tmp.x + r1.x, tmp.y + r1.y), tmp.size ());
  330. detectedObjectsInRegions.Add (curres);
  331. }
  332. }
  333. public Point CenterRect (Rect r)
  334. {
  335. return new Point (r.x + (r.width / 2), r.y + (r.height / 2));
  336. }
  337. private void InitThread ()
  338. {
  339. StopThread ();
  340. grayMat4Thread = new Mat ();
  341. cascade4Thread = new CascadeClassifier ();
  342. cascade4Thread.load (haarcascade_frontalface_alt_xml_filepath);
  343. #if !UNITY_WSA_10_0
  344. if (cascade4Thread.empty ()) {
  345. Debug.LogError ("cascade4Thread file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
  346. }
  347. #endif
  348. shouldDetectInMultiThread = false;
  349. #if !UNITY_WEBGL
  350. StartThread (ThreadWorker);
  351. #else
  352. StartCoroutine ("ThreadWorker");
  353. #endif
  354. }
  355. private void StartThread (Action action)
  356. {
  357. shouldStopThread = false;
  358. #if UNITY_METRO && NETFX_CORE
  359. System.Threading.Tasks.Task.Run(() => action());
  360. #elif UNITY_METRO
  361. action.BeginInvoke(ar => action.EndInvoke(ar), null);
  362. #else
  363. ThreadPool.QueueUserWorkItem (_ => action ());
  364. #endif
  365. Debug.Log ("Thread Start");
  366. }
  367. private void StopThread ()
  368. {
  369. if (!isThreadRunning)
  370. return;
  371. shouldStopThread = true;
  372. while (isThreadRunning) {
  373. //Wait threading stop
  374. }
  375. Debug.Log ("Thread Stop");
  376. }
  377. #if !UNITY_WEBGL
  378. private void ThreadWorker ()
  379. {
  380. isThreadRunning = true;
  381. while (!shouldStopThread) {
  382. if (!shouldDetectInMultiThread)
  383. continue;
  384. Detect ();
  385. shouldDetectInMultiThread = false;
  386. didUpdateTheDetectionResult = true;
  387. }
  388. isThreadRunning = false;
  389. }
  390. #else
  391. private IEnumerator ThreadWorker ()
  392. {
  393. while (true) {
  394. while (!shouldDetectInMultiThread) {
  395. yield return null;
  396. }
  397. Detect ();
  398. shouldDetectInMultiThread = false;
  399. didUpdateTheDetectionResult = true;
  400. }
  401. }
  402. #endif
  403. private void Detect ()
  404. {
  405. MatOfRect objects = new MatOfRect ();
  406. if (cascade4Thread != null)
  407. cascade4Thread.detectMultiScale (grayMat4Thread, objects, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
  408. new Size (grayMat4Thread.height () * 0.2, grayMat4Thread.height () * 0.2), new Size ());
  409. //Thread.Sleep(200);
  410. detectionResult = objects;
  411. }
  412. /// <summary>
  413. /// Raises the destroy event.
  414. /// </summary>
  415. void OnDestroy ()
  416. {
  417. webCamTextureToMatHelper.Dispose ();
  418. #if UNITY_WEBGL && !UNITY_EDITOR
  419. if (getFilePath_Coroutine != null) {
  420. StopCoroutine (getFilePath_Coroutine);
  421. ((IDisposable)getFilePath_Coroutine).Dispose ();
  422. }
  423. #endif
  424. }
  425. /// <summary>
  426. /// Raises the back button click event.
  427. /// </summary>
  428. public void OnBackButtonClick ()
  429. {
  430. SceneManager.LoadScene ("OpenCVForUnityExample");
  431. }
  432. /// <summary>
  433. /// Raises the play button click event.
  434. /// </summary>
  435. public void OnPlayButtonClick ()
  436. {
  437. webCamTextureToMatHelper.Play ();
  438. }
  439. /// <summary>
  440. /// Raises the pause button click event.
  441. /// </summary>
  442. public void OnPauseButtonClick ()
  443. {
  444. webCamTextureToMatHelper.Pause ();
  445. }
  446. /// <summary>
  447. /// Raises the stop button click event.
  448. /// </summary>
  449. public void OnStopButtonClick ()
  450. {
  451. webCamTextureToMatHelper.Stop ();
  452. }
  453. /// <summary>
  454. /// Raises the change camera button click event.
  455. /// </summary>
  456. public void OnChangeCameraButtonClick ()
  457. {
  458. webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing ();
  459. }
  460. //
  461. // tracker
  462. //
  463. private void GetObjects (List<Rect> result)
  464. {
  465. result.Clear ();
  466. for (int i = 0; i < trackedObjects.Count; i++) {
  467. Rect r = CalcTrackedObjectPositionToShow (i);
  468. if (r.area () == 0) {
  469. continue;
  470. }
  471. result.Add (r);
  472. //LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
  473. }
  474. }
  475. private enum TrackedState : int
  476. {
  477. NEW_RECTANGLE = -1,
  478. INTERSECTED_RECTANGLE = -2
  479. }
  480. private void UpdateTrackedObjects (List<Rect> detectedObjects)
  481. {
  482. int N1 = (int)trackedObjects.Count;
  483. int N2 = (int)detectedObjects.Count;
  484. for (int i = 0; i < N1; i++) {
  485. trackedObjects [i].numDetectedFrames++;
  486. }
  487. int[] correspondence = new int[N2];
  488. for (int i = 0; i < N2; i++) {
  489. correspondence [i] = (int)TrackedState.NEW_RECTANGLE;
  490. }
  491. for (int i = 0; i < N1; i++) {
  492. TrackedObject curObject = trackedObjects [i];
  493. int bestIndex = -1;
  494. int bestArea = -1;
  495. int numpositions = (int)curObject.lastPositions.Count;
  496. //if (numpositions > 0) UnityEngine.Debug.LogError("numpositions > 0 is false");
  497. Rect prevRect = curObject.lastPositions [numpositions - 1];
  498. for (int j = 0; j < N2; j++) {
  499. if (correspondence [j] >= 0) {
  500. //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + i + " is rejected, because it has correspondence=" + correspondence[j]);
  501. continue;
  502. }
  503. if (correspondence [j] != (int)TrackedState.NEW_RECTANGLE) {
  504. //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it is intersected with another rectangle");
  505. continue;
  506. }
  507. Rect r = Rect.intersect (prevRect, detectedObjects [j]);
  508. if (r != null && (r.width > 0) && (r.height > 0)) {
  509. //LOGD("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect, r={%d, %d, %d x %d}",
  510. // r.x, r.y, r.width, r.height);
  511. correspondence [j] = (int)TrackedState.INTERSECTED_RECTANGLE;
  512. if (r.area () > bestArea) {
  513. //LOGD("DetectionBasedTracker::updateTrackedObjects: The area of intersection is %d, it is better than bestArea=%d", r.area(), bestArea);
  514. bestIndex = j;
  515. bestArea = (int)r.area ();
  516. }
  517. }
  518. }
  519. if (bestIndex >= 0) {
  520. //LOGD("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=%d is j=%d", i, bestIndex);
  521. correspondence [bestIndex] = i;
  522. for (int j = 0; j < N2; j++) {
  523. if (correspondence [j] >= 0)
  524. continue;
  525. Rect r = Rect.intersect (detectedObjects [j], detectedObjects [bestIndex]);
  526. if (r != null && (r.width > 0) && (r.height > 0)) {
  527. //LOGD("DetectionBasedTracker::updateTrackedObjects: Found intersection between "
  528. // "rectangles j=%d and bestIndex=%d, rectangle j=%d is marked as intersected", j, bestIndex, j);
  529. correspondence [j] = (int)TrackedState.INTERSECTED_RECTANGLE;
  530. }
  531. }
  532. } else {
  533. //LOGD("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i=%d ", i);
  534. curObject.numFramesNotDetected++;
  535. }
  536. }
  537. //LOGD("DetectionBasedTracker::updateTrackedObjects: start second cycle");
  538. for (int j = 0; j < N2; j++) {
  539. int i = correspondence [j];
  540. if (i >= 0) {//add position
  541. //Debug.Log("DetectionBasedTracker::updateTrackedObjects: add position");
  542. trackedObjects [i].lastPositions.Add (detectedObjects [j]);
  543. while ((int)trackedObjects [i].lastPositions.Count > (int)innerParameters.numLastPositionsToTrack) {
  544. trackedObjects [i].lastPositions.Remove (trackedObjects [i].lastPositions [0]);
  545. }
  546. trackedObjects [i].numFramesNotDetected = 0;
  547. } else if (i == (int)TrackedState.NEW_RECTANGLE) { //new object
  548. //Debug.Log("DetectionBasedTracker::updateTrackedObjects: new object");
  549. trackedObjects.Add (new TrackedObject (detectedObjects [j]));
  550. } else {
  551. //Debug.Log ("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection");
  552. }
  553. }
  554. int t = 0;
  555. TrackedObject it;
  556. while (t < trackedObjects.Count) {
  557. it = trackedObjects [t];
  558. if ((it.numFramesNotDetected > parameters.maxTrackLifetime)
  559. ||
  560. ((it.numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow)
  561. &&
  562. (it.numFramesNotDetected > innerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown))) {
  563. //int numpos = (int)it.lastPositions.Count;
  564. //if (numpos > 0) UnityEngine.Debug.LogError("numpos > 0 is false");
  565. //Rect r = it.lastPositions [numpos - 1];
  566. //Debug.Log("DetectionBasedTracker::updateTrackedObjects: deleted object " + r.x + " " + r.y + " " + r.width + " " + r.height);
  567. trackedObjects.Remove (it);
  568. } else {
  569. t++;
  570. }
  571. }
  572. }
  573. private Rect CalcTrackedObjectPositionToShow (int i)
  574. {
  575. if ((i < 0) || (i >= trackedObjects.Count)) {
  576. Debug.Log ("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: wrong i=" + i);
  577. return new Rect ();
  578. }
  579. if (trackedObjects [i].numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow) {
  580. //Debug.Log("DetectionBasedTracker::calcTrackedObjectPositionToShow: " + "trackedObjects[" + i + "].numDetectedFrames=" + trackedObjects[i].numDetectedFrames + " <= numStepsToWaitBeforeFirstShow=" + innerParameters.numStepsToWaitBeforeFirstShow + " --- return empty Rect()");
  581. return new Rect ();
  582. }
  583. if (trackedObjects [i].numFramesNotDetected > innerParameters.numStepsToShowWithoutDetecting) {
  584. return new Rect ();
  585. }
  586. List<Rect> lastPositions = trackedObjects [i].lastPositions;
  587. int N = lastPositions.Count;
  588. if (N <= 0) {
  589. Debug.Log ("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: no positions for i=" + i);
  590. return new Rect ();
  591. }
  592. int Nsize = Math.Min (N, (int)weightsSizesSmoothing.Count);
  593. int Ncenter = Math.Min (N, (int)weightsPositionsSmoothing.Count);
  594. Point center = new Point ();
  595. double w = 0, h = 0;
  596. if (Nsize > 0) {
  597. double sum = 0;
  598. for (int j = 0; j < Nsize; j++) {
  599. int k = N - j - 1;
  600. w += lastPositions [k].width * weightsSizesSmoothing [j];
  601. h += lastPositions [k].height * weightsSizesSmoothing [j];
  602. sum += weightsSizesSmoothing [j];
  603. }
  604. w /= sum;
  605. h /= sum;
  606. } else {
  607. w = lastPositions [N - 1].width;
  608. h = lastPositions [N - 1].height;
  609. }
  610. if (Ncenter > 0) {
  611. double sum = 0;
  612. for (int j = 0; j < Ncenter; j++) {
  613. int k = N - j - 1;
  614. Point tl = lastPositions [k].tl ();
  615. Point br = lastPositions [k].br ();
  616. Point c1;
  617. //c1=tl;
  618. //c1=c1* 0.5f;//
  619. c1 = new Point (tl.x * 0.5f, tl.y * 0.5f);
  620. Point c2;
  621. //c2=br;
  622. //c2=c2*0.5f;
  623. c2 = new Point (br.x * 0.5f, br.y * 0.5f);
  624. //c1=c1+c2;
  625. c1 = new Point (c1.x + c2.x, c1.y + c2.y);
  626. //center=center+ (c1 * weightsPositionsSmoothing[j]);
  627. center = new Point (center.x + (c1.x * weightsPositionsSmoothing [j]), center.y + (c1.y * weightsPositionsSmoothing [j]));
  628. sum += weightsPositionsSmoothing [j];
  629. }
  630. //center *= (float)(1 / sum);
  631. center = new Point (center.x * (1 / sum), center.y * (1 / sum));
  632. } else {
  633. int k = N - 1;
  634. Point tl = lastPositions [k].tl ();
  635. Point br = lastPositions [k].br ();
  636. Point c1;
  637. //c1=tl;
  638. //c1=c1* 0.5f;
  639. c1 = new Point (tl.x * 0.5f, tl.y * 0.5f);
  640. Point c2;
  641. //c2=br;
  642. //c2=c2*0.5f;
  643. c2 = new Point (br.x * 0.5f, br.y * 0.5f);
  644. //center=c1+c2;
  645. center = new Point (c1.x + c2.x, c1.y + c2.y);
  646. }
  647. //Point2f tl=center-(Point2f(w,h)*0.5);
  648. Point tl2 = new Point (center.x - (w * 0.5f), center.y - (h * 0.5f));
  649. //Rect res(cvRound(tl.x), cvRound(tl.y), cvRound(w), cvRound(h));
  650. Rect res = new Rect ((int)Math.Round (tl2.x), (int)Math.Round (tl2.y), (int)Math.Round (w), (int)Math.Round (h));
  651. //LOGD("DetectionBasedTracker::calcTrackedObjectPositionToShow: Result for i=%d: {%d, %d, %d x %d}", i, res.x, res.y, res.width, res.height);
  652. return res;
  653. }
  654. private struct Parameters
  655. {
  656. //public int minObjectSize;
  657. //public int maxObjectSize;
  658. //public float scaleFactor;
  659. //public int minNeighbors;
  660. public int maxTrackLifetime;
  661. //public int minDetectionPeriod; //the minimal time between run of the big object detector (on the whole frame) in ms (1000 mean 1 sec), default=0
  662. };
  663. private struct InnerParameters
  664. {
  665. public int numLastPositionsToTrack;
  666. public int numStepsToWaitBeforeFirstShow;
  667. public int numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown;
  668. public int numStepsToShowWithoutDetecting;
  669. public float coeffTrackingWindowSize;
  670. public float coeffObjectSizeToTrack;
  671. public float coeffObjectSpeedUsingInPrediction;
  672. };
  673. private class TrackedObject
  674. {
  675. public PositionsVector lastPositions;
  676. public int numDetectedFrames;
  677. public int numFramesNotDetected;
  678. public int id;
  679. static private int _id = 0;
  680. public TrackedObject (OpenCVForUnity.CoreModule.Rect rect)
  681. {
  682. lastPositions = new PositionsVector ();
  683. numDetectedFrames = 1;
  684. numFramesNotDetected = 0;
  685. lastPositions.Add (rect.clone ());
  686. _id = GetNextId ();
  687. id = _id;
  688. }
  689. static int GetNextId ()
  690. {
  691. _id++;
  692. return _id;
  693. }
  694. }
  695. }
  696. }