AsynchronousFaceDetectionWebCamExample.cs 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949
  1. using UnityEngine;
  2. using UnityEngine.SceneManagement;
  3. using System;
  4. using System.Collections;
  5. using System.Collections.Generic;
  6. using System.Threading;
  7. using OpenCVForUnity.CoreModule;
  8. using OpenCVForUnity.ObjdetectModule;
  9. using OpenCVForUnity.ImgprocModule;
  10. using OpenCVForUnity.UnityUtils;
  11. using OpenCVForUnity.UnityUtils.Helper;
  12. using Rect = OpenCVForUnity.CoreModule.Rect;
  13. using PositionsVector = System.Collections.Generic.List<OpenCVForUnity.CoreModule.Rect>;
  14. namespace OpenCVForUnityExample
  15. {
  16. /// <summary>
  17. /// Asynchronous Face Detection WebCam Example
  18. /// Referring to https://github.com/Itseez/opencv/blob/master/modules/objdetect/src/detection_based_tracker.cpp.
  19. /// </summary>
  20. [RequireComponent(typeof(WebCamTextureToMatHelper))]
  21. public class AsynchronousFaceDetectionWebCamExample : MonoBehaviour
  22. {
  23. /// <summary>
  24. /// The gray mat.
  25. /// </summary>
  26. Mat grayMat;
  27. /// <summary>
  28. /// The texture.
  29. /// </summary>
  30. Texture2D texture;
  31. /// <summary>
  32. /// The webcam texture to mat helper.
  33. /// </summary>
  34. WebCamTextureToMatHelper webCamTextureToMatHelper;
  35. /// <summary>
  36. /// The cascade.
  37. /// </summary>
  38. CascadeClassifier cascade;
  39. /// <summary>
  40. /// LBP_CASCADE_FILENAME
  41. /// </summary>
  42. protected static readonly string LBP_CASCADE_FILENAME = "OpenCVForUnity/objdetect/lbpcascade_frontalface.xml";
  43. /// <summary>
  44. /// The lbp cascade filepath.
  45. /// </summary>
  46. string lbp_cascade_filepath;
  47. /// <summary>
  48. /// HAAR_CASCADE_FILENAME
  49. /// </summary>
  50. protected static readonly string HAAR_CASCADE_FILENAME = "OpenCVForUnity/objdetect/haarcascade_frontalface_alt.xml";
  51. /// <summary>
  52. /// The haar_cascade_filepath.
  53. /// </summary>
  54. string haar_cascade_filepath;
  55. /// <summary>
  56. /// The rects where regions.
  57. /// </summary>
  58. Rect[] rectsWhereRegions;
  59. /// <summary>
  60. /// The detected objects in regions.
  61. /// </summary>
  62. List<Rect> detectedObjectsInRegions = new List<Rect>();
  63. /// <summary>
  64. /// The result objects.
  65. /// </summary>
  66. List<Rect> resultObjects = new List<Rect>();
  67. // for Thread
  68. CascadeClassifier cascade4Thread;
  69. Mat grayMat4Thread;
  70. MatOfRect detectionResult;
  71. System.Object sync = new System.Object();
  72. bool _isThreadRunning = false;
  73. bool isThreadRunning
  74. {
  75. get
  76. {
  77. lock (sync)
  78. return _isThreadRunning;
  79. }
  80. set
  81. {
  82. lock (sync)
  83. _isThreadRunning = value;
  84. }
  85. }
  86. bool _shouldStopThread = false;
  87. bool shouldStopThread
  88. {
  89. get
  90. {
  91. lock (sync)
  92. return _shouldStopThread;
  93. }
  94. set
  95. {
  96. lock (sync)
  97. _shouldStopThread = value;
  98. }
  99. }
  100. bool _shouldDetectInMultiThread = false;
  101. bool shouldDetectInMultiThread
  102. {
  103. get
  104. {
  105. lock (sync)
  106. return _shouldDetectInMultiThread;
  107. }
  108. set
  109. {
  110. lock (sync)
  111. _shouldDetectInMultiThread = value;
  112. }
  113. }
  114. bool _didUpdateTheDetectionResult = false;
  115. bool didUpdateTheDetectionResult
  116. {
  117. get
  118. {
  119. lock (sync)
  120. return _didUpdateTheDetectionResult;
  121. }
  122. set
  123. {
  124. lock (sync)
  125. _didUpdateTheDetectionResult = value;
  126. }
  127. }
  128. /// <summary>
  129. /// The FPS monitor.
  130. /// </summary>
  131. FpsMonitor fpsMonitor;
  132. // for tracker
  133. List<TrackedObject> trackedObjects = new List<TrackedObject>();
  134. List<float> weightsPositionsSmoothing = new List<float>();
  135. List<float> weightsSizesSmoothing = new List<float>();
  136. Parameters parameters;
  137. InnerParameters innerParameters;
  138. #if UNITY_WEBGL
  139. IEnumerator getFilePath_Coroutine;
  140. #endif
  141. // Use this for initialization
  142. void Start()
  143. {
  144. fpsMonitor = GetComponent<FpsMonitor>();
  145. webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
  146. #if UNITY_WEBGL
  147. getFilePath_Coroutine = GetFilePath ();
  148. StartCoroutine (getFilePath_Coroutine);
  149. #else
  150. lbp_cascade_filepath = Utils.getFilePath(LBP_CASCADE_FILENAME);
  151. haar_cascade_filepath = Utils.getFilePath(HAAR_CASCADE_FILENAME);
  152. Run();
  153. #endif
  154. }
  155. #if UNITY_WEBGL
  156. private IEnumerator GetFilePath ()
  157. {
  158. var getFilePathAsync_lbpcascade_frontalface_xml_filepath_Coroutine = Utils.getFilePathAsync (LBP_CASCADE_FILENAME, (result) => {
  159. lbp_cascade_filepath = result;
  160. });
  161. yield return getFilePathAsync_lbpcascade_frontalface_xml_filepath_Coroutine;
  162. var getFilePathAsync_haarcascade_frontalface_alt_xml_filepath_Coroutine = Utils.getFilePathAsync (HAAR_CASCADE_FILENAME, (result) => {
  163. haar_cascade_filepath = result;
  164. });
  165. yield return getFilePathAsync_haarcascade_frontalface_alt_xml_filepath_Coroutine;
  166. getFilePath_Coroutine = null;
  167. Run ();
  168. }
  169. #endif
  170. private void Run()
  171. {
  172. weightsPositionsSmoothing.Add(1);
  173. weightsSizesSmoothing.Add(0.5f);
  174. weightsSizesSmoothing.Add(0.3f);
  175. weightsSizesSmoothing.Add(0.2f);
  176. //parameters.minObjectSize = 96;
  177. //parameters.maxObjectSize = int.MaxValue;
  178. //parameters.scaleFactor = 1.1f;
  179. //parameters.minNeighbors = 2;
  180. parameters.maxTrackLifetime = 5;
  181. innerParameters.numLastPositionsToTrack = 4;
  182. innerParameters.numStepsToWaitBeforeFirstShow = 6;
  183. innerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown = 3;
  184. innerParameters.numStepsToShowWithoutDetecting = 3;
  185. innerParameters.coeffTrackingWindowSize = 2.0f;
  186. innerParameters.coeffObjectSizeToTrack = 0.85f;
  187. innerParameters.coeffObjectSpeedUsingInPrediction = 0.8f;
  188. #if UNITY_ANDROID && !UNITY_EDITOR
  189. // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
  190. webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
  191. #endif
  192. webCamTextureToMatHelper.Initialize();
  193. }
  194. /// <summary>
  195. /// Raises the webcam texture to mat helper initialized event.
  196. /// </summary>
  197. public void OnWebCamTextureToMatHelperInitialized()
  198. {
  199. Debug.Log("OnWebCamTextureToMatHelperInitialized");
  200. Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
  201. texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
  202. Utils.matToTexture2D(webCamTextureMat, texture);
  203. gameObject.GetComponent<Renderer>().material.mainTexture = texture;
  204. gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
  205. Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
  206. if (fpsMonitor != null)
  207. {
  208. fpsMonitor.Add("width", webCamTextureMat.width().ToString());
  209. fpsMonitor.Add("height", webCamTextureMat.height().ToString());
  210. fpsMonitor.Add("orientation", Screen.orientation.ToString());
  211. }
  212. float width = webCamTextureMat.width();
  213. float height = webCamTextureMat.height();
  214. float widthScale = (float)Screen.width / width;
  215. float heightScale = (float)Screen.height / height;
  216. if (widthScale < heightScale)
  217. {
  218. Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
  219. }
  220. else
  221. {
  222. Camera.main.orthographicSize = height / 2;
  223. }
  224. grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
  225. if (string.IsNullOrEmpty(lbp_cascade_filepath))
  226. {
  227. Debug.LogError(LBP_CASCADE_FILENAME + " is not loaded. Please move from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/” to “Assets/StreamingAssets/OpenCVForUnity/” folder.");
  228. }
  229. else
  230. {
  231. cascade = new CascadeClassifier(lbp_cascade_filepath);
  232. }
  233. InitThread();
  234. }
  235. /// <summary>
  236. /// Raises the webcam texture to mat helper disposed event.
  237. /// </summary>
  238. public void OnWebCamTextureToMatHelperDisposed()
  239. {
  240. Debug.Log("OnWebCamTextureToMatHelperDisposed");
  241. #if !UNITY_WEBGL
  242. StopThread();
  243. #else
  244. StopCoroutine ("ThreadWorker");
  245. #endif
  246. if (grayMat4Thread != null)
  247. grayMat4Thread.Dispose();
  248. if (cascade4Thread != null)
  249. cascade4Thread.Dispose();
  250. if (grayMat != null)
  251. grayMat.Dispose();
  252. if (texture != null)
  253. {
  254. Texture2D.Destroy(texture);
  255. texture = null;
  256. }
  257. if (cascade != null)
  258. cascade.Dispose();
  259. trackedObjects.Clear();
  260. }
  261. /// <summary>
  262. /// Raises the webcam texture to mat helper error occurred event.
  263. /// </summary>
  264. /// <param name="errorCode">Error code.</param>
  265. public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
  266. {
  267. Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
  268. }
  269. // Update is called once per frame
  270. void Update()
  271. {
  272. if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
  273. {
  274. Mat rgbaMat = webCamTextureToMatHelper.GetMat();
  275. if (cascade == null || cascade4Thread == null)
  276. {
  277. Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
  278. Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
  279. Utils.matToTexture2D(rgbaMat, texture);
  280. return;
  281. }
  282. Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
  283. Imgproc.equalizeHist(grayMat, grayMat);
  284. if (!shouldDetectInMultiThread)
  285. {
  286. grayMat.copyTo(grayMat4Thread);
  287. shouldDetectInMultiThread = true;
  288. }
  289. OpenCVForUnity.CoreModule.Rect[] rects;
  290. if (didUpdateTheDetectionResult)
  291. {
  292. didUpdateTheDetectionResult = false;
  293. //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
  294. rectsWhereRegions = detectionResult.toArray();
  295. rects = rectsWhereRegions;
  296. for (int i = 0; i < rects.Length; i++)
  297. {
  298. Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(0, 0, 255, 255), 2);
  299. }
  300. }
  301. else
  302. {
  303. //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
  304. rectsWhereRegions = new Rect[trackedObjects.Count];
  305. for (int i = 0; i < trackedObjects.Count; i++)
  306. {
  307. int n = trackedObjects[i].lastPositions.Count;
  308. //if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");
  309. Rect r = trackedObjects[i].lastPositions[n - 1].clone();
  310. if (r.area() == 0)
  311. {
  312. Debug.Log("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
  313. continue;
  314. }
  315. //correction by speed of rectangle
  316. if (n > 1)
  317. {
  318. Point center = CenterRect(r);
  319. Point center_prev = CenterRect(trackedObjects[i].lastPositions[n - 2]);
  320. Point shift = new Point((center.x - center_prev.x) * innerParameters.coeffObjectSpeedUsingInPrediction,
  321. (center.y - center_prev.y) * innerParameters.coeffObjectSpeedUsingInPrediction);
  322. r.x += (int)Math.Round(shift.x);
  323. r.y += (int)Math.Round(shift.y);
  324. }
  325. rectsWhereRegions[i] = r;
  326. }
  327. rects = rectsWhereRegions;
  328. for (int i = 0; i < rects.Length; i++)
  329. {
  330. Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(0, 255, 0, 255), 2);
  331. }
  332. }
  333. detectedObjectsInRegions.Clear();
  334. if (rectsWhereRegions.Length > 0)
  335. {
  336. int len = rectsWhereRegions.Length;
  337. for (int i = 0; i < len; i++)
  338. {
  339. DetectInRegion(grayMat, rectsWhereRegions[i], detectedObjectsInRegions);
  340. }
  341. }
  342. UpdateTrackedObjects(detectedObjectsInRegions);
  343. GetObjects(resultObjects);
  344. rects = resultObjects.ToArray();
  345. for (int i = 0; i < rects.Length; i++)
  346. {
  347. //Debug.Log ("detect faces " + rects [i]);
  348. Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2);
  349. }
  350. #if UNITY_WEBGL
  351. Imgproc.putText (rgbaMat, "WebGL platform does not support multi-threading.", new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
  352. #endif
  353. Utils.matToTexture2D(rgbaMat, texture);
  354. }
  355. }
  356. private void DetectInRegion(Mat img, Rect r, List<Rect> detectedObjectsInRegions)
  357. {
  358. Rect r0 = new Rect(new Point(), img.size());
  359. Rect r1 = new Rect(r.x, r.y, r.width, r.height);
  360. Rect.inflate(r1, (int)((r1.width * innerParameters.coeffTrackingWindowSize) - r1.width) / 2,
  361. (int)((r1.height * innerParameters.coeffTrackingWindowSize) - r1.height) / 2);
  362. r1 = Rect.intersect(r0, r1);
  363. if (r1 != null && (r1.width <= 0) || (r1.height <= 0))
  364. {
  365. Debug.Log("DetectionBasedTracker::detectInRegion: Empty intersection");
  366. return;
  367. }
  368. int d = Math.Min(r.width, r.height);
  369. d = (int)Math.Round(d * innerParameters.coeffObjectSizeToTrack);
  370. MatOfRect tmpobjects = new MatOfRect();
  371. Mat img1 = new Mat(img, r1);//subimage for rectangle -- without data copying
  372. cascade.detectMultiScale(img1, tmpobjects, 1.1, 2, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size(d, d), new Size());
  373. Rect[] tmpobjectsArray = tmpobjects.toArray();
  374. int len = tmpobjectsArray.Length;
  375. for (int i = 0; i < len; i++)
  376. {
  377. Rect tmp = tmpobjectsArray[i];
  378. Rect curres = new Rect(new Point(tmp.x + r1.x, tmp.y + r1.y), tmp.size());
  379. detectedObjectsInRegions.Add(curres);
  380. }
  381. }
  382. public Point CenterRect(Rect r)
  383. {
  384. return new Point(r.x + (r.width / 2), r.y + (r.height / 2));
  385. }
  386. private void InitThread()
  387. {
  388. StopThread();
  389. grayMat4Thread = new Mat();
  390. if (string.IsNullOrEmpty(haar_cascade_filepath))
  391. {
  392. Debug.LogError(HAAR_CASCADE_FILENAME + " is not loaded. Please move from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/” to “Assets/StreamingAssets/OpenCVForUnity/” folder.");
  393. }
  394. else
  395. {
  396. cascade4Thread = new CascadeClassifier(haar_cascade_filepath);
  397. }
  398. shouldDetectInMultiThread = false;
  399. #if !UNITY_WEBGL
  400. StartThread(ThreadWorker);
  401. #else
  402. StartCoroutine ("ThreadWorker");
  403. #endif
  404. }
  405. private void StartThread(Action action)
  406. {
  407. shouldStopThread = false;
  408. #if UNITY_METRO && NETFX_CORE
  409. System.Threading.Tasks.Task.Run(() => action());
  410. #elif UNITY_METRO
  411. action.BeginInvoke(ar => action.EndInvoke(ar), null);
  412. #else
  413. ThreadPool.QueueUserWorkItem(_ => action());
  414. #endif
  415. Debug.Log("Thread Start");
  416. }
  417. private void StopThread()
  418. {
  419. if (!isThreadRunning)
  420. return;
  421. shouldStopThread = true;
  422. while (isThreadRunning)
  423. {
  424. //Wait threading stop
  425. }
  426. Debug.Log("Thread Stop");
  427. }
  428. #if !UNITY_WEBGL
  429. private void ThreadWorker()
  430. {
  431. isThreadRunning = true;
  432. while (!shouldStopThread)
  433. {
  434. if (!shouldDetectInMultiThread)
  435. continue;
  436. Detect();
  437. shouldDetectInMultiThread = false;
  438. didUpdateTheDetectionResult = true;
  439. }
  440. isThreadRunning = false;
  441. }
  442. #else
  443. private IEnumerator ThreadWorker ()
  444. {
  445. while (true) {
  446. while (!shouldDetectInMultiThread) {
  447. yield return null;
  448. }
  449. Detect ();
  450. shouldDetectInMultiThread = false;
  451. didUpdateTheDetectionResult = true;
  452. }
  453. }
  454. #endif
  455. private void Detect()
  456. {
  457. MatOfRect objects = new MatOfRect();
  458. if (cascade4Thread != null)
  459. cascade4Thread.detectMultiScale(grayMat4Thread, objects, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
  460. new Size(grayMat4Thread.height() * 0.2, grayMat4Thread.height() * 0.2), new Size());
  461. //Thread.Sleep(200);
  462. detectionResult = objects;
  463. }
  464. /// <summary>
  465. /// Raises the destroy event.
  466. /// </summary>
  467. void OnDestroy()
  468. {
  469. webCamTextureToMatHelper.Dispose();
  470. #if UNITY_WEBGL
  471. if (getFilePath_Coroutine != null) {
  472. StopCoroutine (getFilePath_Coroutine);
  473. ((IDisposable)getFilePath_Coroutine).Dispose ();
  474. }
  475. #endif
  476. }
  477. /// <summary>
  478. /// Raises the back button click event.
  479. /// </summary>
  480. public void OnBackButtonClick()
  481. {
  482. SceneManager.LoadScene("OpenCVForUnityExample");
  483. }
  484. /// <summary>
  485. /// Raises the play button click event.
  486. /// </summary>
  487. public void OnPlayButtonClick()
  488. {
  489. webCamTextureToMatHelper.Play();
  490. }
  491. /// <summary>
  492. /// Raises the pause button click event.
  493. /// </summary>
  494. public void OnPauseButtonClick()
  495. {
  496. webCamTextureToMatHelper.Pause();
  497. }
  498. /// <summary>
  499. /// Raises the stop button click event.
  500. /// </summary>
  501. public void OnStopButtonClick()
  502. {
  503. webCamTextureToMatHelper.Stop();
  504. }
  505. /// <summary>
  506. /// Raises the change camera button click event.
  507. /// </summary>
  508. public void OnChangeCameraButtonClick()
  509. {
  510. webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.requestedIsFrontFacing;
  511. }
  512. //
  513. // tracker
  514. //
  515. private void GetObjects(List<Rect> result)
  516. {
  517. result.Clear();
  518. for (int i = 0; i < trackedObjects.Count; i++)
  519. {
  520. Rect r = CalcTrackedObjectPositionToShow(i);
  521. if (r.area() == 0)
  522. {
  523. continue;
  524. }
  525. result.Add(r);
  526. //LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
  527. }
  528. }
  529. private enum TrackedState : int
  530. {
  531. NEW_RECTANGLE = -1,
  532. INTERSECTED_RECTANGLE = -2
  533. }
  534. private void UpdateTrackedObjects(List<Rect> detectedObjects)
  535. {
  536. int N1 = (int)trackedObjects.Count;
  537. int N2 = (int)detectedObjects.Count;
  538. for (int i = 0; i < N1; i++)
  539. {
  540. trackedObjects[i].numDetectedFrames++;
  541. }
  542. int[] correspondence = new int[N2];
  543. for (int i = 0; i < N2; i++)
  544. {
  545. correspondence[i] = (int)TrackedState.NEW_RECTANGLE;
  546. }
  547. for (int i = 0; i < N1; i++)
  548. {
  549. TrackedObject curObject = trackedObjects[i];
  550. int bestIndex = -1;
  551. int bestArea = -1;
  552. int numpositions = (int)curObject.lastPositions.Count;
  553. //if (numpositions > 0) UnityEngine.Debug.LogError("numpositions > 0 is false");
  554. Rect prevRect = curObject.lastPositions[numpositions - 1];
  555. for (int j = 0; j < N2; j++)
  556. {
  557. if (correspondence[j] >= 0)
  558. {
  559. //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + i + " is rejected, because it has correspondence=" + correspondence[j]);
  560. continue;
  561. }
  562. if (correspondence[j] != (int)TrackedState.NEW_RECTANGLE)
  563. {
  564. //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it is intersected with another rectangle");
  565. continue;
  566. }
  567. Rect r = Rect.intersect(prevRect, detectedObjects[j]);
  568. if (r != null && (r.width > 0) && (r.height > 0))
  569. {
  570. //LOGD("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect, r={%d, %d, %d x %d}",
  571. // r.x, r.y, r.width, r.height);
  572. correspondence[j] = (int)TrackedState.INTERSECTED_RECTANGLE;
  573. if (r.area() > bestArea)
  574. {
  575. //LOGD("DetectionBasedTracker::updateTrackedObjects: The area of intersection is %d, it is better than bestArea=%d", r.area(), bestArea);
  576. bestIndex = j;
  577. bestArea = (int)r.area();
  578. }
  579. }
  580. }
  581. if (bestIndex >= 0)
  582. {
  583. //LOGD("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=%d is j=%d", i, bestIndex);
  584. correspondence[bestIndex] = i;
  585. for (int j = 0; j < N2; j++)
  586. {
  587. if (correspondence[j] >= 0)
  588. continue;
  589. Rect r = Rect.intersect(detectedObjects[j], detectedObjects[bestIndex]);
  590. if (r != null && (r.width > 0) && (r.height > 0))
  591. {
  592. //LOGD("DetectionBasedTracker::updateTrackedObjects: Found intersection between "
  593. // "rectangles j=%d and bestIndex=%d, rectangle j=%d is marked as intersected", j, bestIndex, j);
  594. correspondence[j] = (int)TrackedState.INTERSECTED_RECTANGLE;
  595. }
  596. }
  597. }
  598. else
  599. {
  600. //LOGD("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i=%d ", i);
  601. curObject.numFramesNotDetected++;
  602. }
  603. }
  604. //LOGD("DetectionBasedTracker::updateTrackedObjects: start second cycle");
  605. for (int j = 0; j < N2; j++)
  606. {
  607. int i = correspondence[j];
  608. if (i >= 0)
  609. {//add position
  610. //Debug.Log("DetectionBasedTracker::updateTrackedObjects: add position");
  611. trackedObjects[i].lastPositions.Add(detectedObjects[j]);
  612. while ((int)trackedObjects[i].lastPositions.Count > (int)innerParameters.numLastPositionsToTrack)
  613. {
  614. trackedObjects[i].lastPositions.Remove(trackedObjects[i].lastPositions[0]);
  615. }
  616. trackedObjects[i].numFramesNotDetected = 0;
  617. }
  618. else if (i == (int)TrackedState.NEW_RECTANGLE)
  619. { //new object
  620. //Debug.Log("DetectionBasedTracker::updateTrackedObjects: new object");
  621. trackedObjects.Add(new TrackedObject(detectedObjects[j]));
  622. }
  623. else
  624. {
  625. //Debug.Log ("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection");
  626. }
  627. }
  628. int t = 0;
  629. TrackedObject it;
  630. while (t < trackedObjects.Count)
  631. {
  632. it = trackedObjects[t];
  633. if ((it.numFramesNotDetected > parameters.maxTrackLifetime)
  634. ||
  635. ((it.numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow)
  636. &&
  637. (it.numFramesNotDetected > innerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown)))
  638. {
  639. //int numpos = (int)it.lastPositions.Count;
  640. //if (numpos > 0) UnityEngine.Debug.LogError("numpos > 0 is false");
  641. //Rect r = it.lastPositions [numpos - 1];
  642. //Debug.Log("DetectionBasedTracker::updateTrackedObjects: deleted object " + r.x + " " + r.y + " " + r.width + " " + r.height);
  643. trackedObjects.Remove(it);
  644. }
  645. else
  646. {
  647. t++;
  648. }
  649. }
  650. }
  651. private Rect CalcTrackedObjectPositionToShow(int i)
  652. {
  653. if ((i < 0) || (i >= trackedObjects.Count))
  654. {
  655. Debug.Log("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: wrong i=" + i);
  656. return new Rect();
  657. }
  658. if (trackedObjects[i].numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow)
  659. {
  660. //Debug.Log("DetectionBasedTracker::calcTrackedObjectPositionToShow: " + "trackedObjects[" + i + "].numDetectedFrames=" + trackedObjects[i].numDetectedFrames + " <= numStepsToWaitBeforeFirstShow=" + innerParameters.numStepsToWaitBeforeFirstShow + " --- return empty Rect()");
  661. return new Rect();
  662. }
  663. if (trackedObjects[i].numFramesNotDetected > innerParameters.numStepsToShowWithoutDetecting)
  664. {
  665. return new Rect();
  666. }
  667. List<Rect> lastPositions = trackedObjects[i].lastPositions;
  668. int N = lastPositions.Count;
  669. if (N <= 0)
  670. {
  671. Debug.Log("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: no positions for i=" + i);
  672. return new Rect();
  673. }
  674. int Nsize = Math.Min(N, (int)weightsSizesSmoothing.Count);
  675. int Ncenter = Math.Min(N, (int)weightsPositionsSmoothing.Count);
  676. Point center = new Point();
  677. double w = 0, h = 0;
  678. if (Nsize > 0)
  679. {
  680. double sum = 0;
  681. for (int j = 0; j < Nsize; j++)
  682. {
  683. int k = N - j - 1;
  684. w += lastPositions[k].width * weightsSizesSmoothing[j];
  685. h += lastPositions[k].height * weightsSizesSmoothing[j];
  686. sum += weightsSizesSmoothing[j];
  687. }
  688. w /= sum;
  689. h /= sum;
  690. }
  691. else
  692. {
  693. w = lastPositions[N - 1].width;
  694. h = lastPositions[N - 1].height;
  695. }
  696. if (Ncenter > 0)
  697. {
  698. double sum = 0;
  699. for (int j = 0; j < Ncenter; j++)
  700. {
  701. int k = N - j - 1;
  702. Point tl = lastPositions[k].tl();
  703. Point br = lastPositions[k].br();
  704. Point c1;
  705. //c1=tl;
  706. //c1=c1* 0.5f;//
  707. c1 = new Point(tl.x * 0.5f, tl.y * 0.5f);
  708. Point c2;
  709. //c2=br;
  710. //c2=c2*0.5f;
  711. c2 = new Point(br.x * 0.5f, br.y * 0.5f);
  712. //c1=c1+c2;
  713. c1 = new Point(c1.x + c2.x, c1.y + c2.y);
  714. //center=center+ (c1 * weightsPositionsSmoothing[j]);
  715. center = new Point(center.x + (c1.x * weightsPositionsSmoothing[j]), center.y + (c1.y * weightsPositionsSmoothing[j]));
  716. sum += weightsPositionsSmoothing[j];
  717. }
  718. //center *= (float)(1 / sum);
  719. center = new Point(center.x * (1 / sum), center.y * (1 / sum));
  720. }
  721. else
  722. {
  723. int k = N - 1;
  724. Point tl = lastPositions[k].tl();
  725. Point br = lastPositions[k].br();
  726. Point c1;
  727. //c1=tl;
  728. //c1=c1* 0.5f;
  729. c1 = new Point(tl.x * 0.5f, tl.y * 0.5f);
  730. Point c2;
  731. //c2=br;
  732. //c2=c2*0.5f;
  733. c2 = new Point(br.x * 0.5f, br.y * 0.5f);
  734. //center=c1+c2;
  735. center = new Point(c1.x + c2.x, c1.y + c2.y);
  736. }
  737. //Point2f tl=center-(Point2f(w,h)*0.5);
  738. Point tl2 = new Point(center.x - (w * 0.5f), center.y - (h * 0.5f));
  739. //Rect res(cvRound(tl.x), cvRound(tl.y), cvRound(w), cvRound(h));
  740. Rect res = new Rect((int)Math.Round(tl2.x), (int)Math.Round(tl2.y), (int)Math.Round(w), (int)Math.Round(h));
  741. //LOGD("DetectionBasedTracker::calcTrackedObjectPositionToShow: Result for i=%d: {%d, %d, %d x %d}", i, res.x, res.y, res.width, res.height);
  742. return res;
  743. }
  744. private struct Parameters
  745. {
  746. //public int minObjectSize;
  747. //public int maxObjectSize;
  748. //public float scaleFactor;
  749. //public int minNeighbors;
  750. public int maxTrackLifetime;
  751. //public int minDetectionPeriod; //the minimal time between run of the big object detector (on the whole frame) in ms (1000 mean 1 sec), default=0
  752. };
  753. private struct InnerParameters
  754. {
  755. public int numLastPositionsToTrack;
  756. public int numStepsToWaitBeforeFirstShow;
  757. public int numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown;
  758. public int numStepsToShowWithoutDetecting;
  759. public float coeffTrackingWindowSize;
  760. public float coeffObjectSizeToTrack;
  761. public float coeffObjectSpeedUsingInPrediction;
  762. };
  763. private class TrackedObject
  764. {
  765. public PositionsVector lastPositions;
  766. public int numDetectedFrames;
  767. public int numFramesNotDetected;
  768. public int id;
  769. static private int _id = 0;
  770. public TrackedObject(OpenCVForUnity.CoreModule.Rect rect)
  771. {
  772. lastPositions = new PositionsVector();
  773. numDetectedFrames = 1;
  774. numFramesNotDetected = 0;
  775. lastPositions.Add(rect.clone());
  776. _id = GetNextId();
  777. id = _id;
  778. }
  779. static int GetNextId()
  780. {
  781. _id++;
  782. return _id;
  783. }
  784. }
  785. }
  786. }