TrackingExample.cs 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344
  1. using UnityEngine;
  2. using UnityEngine.EventSystems;
  3. using UnityEngine.SceneManagement;
  4. using System;
  5. using System.Collections;
  6. using System.Collections.Generic;
  7. using OpenCVForUnity.VideoioModule;
  8. using OpenCVForUnity.TrackingModule;
  9. using OpenCVForUnity.CoreModule;
  10. using OpenCVForUnity.ImgprocModule;
  11. using OpenCVForUnity.UnityUtils;
  12. namespace OpenCVForUnityExample
  13. {
  14. /// <summary>
  15. /// Tracking Example
  16. /// An example of object tracking using the tracking (Tracking API) module.
  17. /// http://docs.opencv.org/trunk/d5/d07/tutorial_multitracker.html
  18. /// </summary>
  19. public class TrackingExample : MonoBehaviour
  20. {
  21. /// <summary>
  22. /// The capture.
  23. /// </summary>
  24. VideoCapture capture;
  25. /// <summary>
  26. /// The rgb mat.
  27. /// </summary>
  28. Mat rgbMat;
  29. /// <summary>
  30. /// The texture.
  31. /// </summary>
  32. Texture2D texture;
  33. /// <summary>
  34. /// The trackers.
  35. /// </summary>
  36. MultiTracker trackers;
  37. /// <summary>
  38. /// The objects.
  39. /// </summary>
  40. MatOfRect2d objects;
  41. /// <summary>
  42. /// The tracking color list.
  43. /// </summary>
  44. List<Scalar> trackingColorList;
  45. /// <summary>
  46. /// The selected point list.
  47. /// </summary>
  48. List<Point> selectedPointList;
  49. /// <summary>
  50. /// The stored touch point.
  51. /// </summary>
  52. Point storedTouchPoint;
  53. /// <summary>
  54. /// The FPS monitor.
  55. /// </summary>
  56. FpsMonitor fpsMonitor;
  57. #if UNITY_WEBGL && !UNITY_EDITOR
  58. IEnumerator getFilePath_Coroutine;
  59. #endif
  60. // Use this for initialization
  61. void Start ()
  62. {
  63. fpsMonitor = GetComponent<FpsMonitor> ();
  64. capture = new VideoCapture ();
  65. #if UNITY_WEBGL && !UNITY_EDITOR
  66. getFilePath_Coroutine = Utils.getFilePathAsync("768x576_mjpeg.mjpeg", (result) => {
  67. getFilePath_Coroutine = null;
  68. capture.open (result);
  69. Init ();
  70. });
  71. StartCoroutine (getFilePath_Coroutine);
  72. #else
  73. capture.open (Utils.getFilePath ("768x576_mjpeg.mjpeg"));
  74. Init ();
  75. #endif
  76. }
  77. private void Init ()
  78. {
  79. rgbMat = new Mat ();
  80. if (capture.isOpened ()) {
  81. Debug.Log ("capture.isOpened() true");
  82. } else {
  83. Debug.Log ("capture.isOpened() false");
  84. }
  85. Debug.Log ("CAP_PROP_FORMAT: " + capture.get (Videoio.CAP_PROP_FORMAT));
  86. Debug.Log ("CAP_PROP_POS_MSEC: " + capture.get (Videoio.CAP_PROP_POS_MSEC));
  87. Debug.Log ("CAP_PROP_POS_FRAMES: " + capture.get (Videoio.CAP_PROP_POS_FRAMES));
  88. Debug.Log ("CAP_PROP_POS_AVI_RATIO: " + capture.get (Videoio.CAP_PROP_POS_AVI_RATIO));
  89. Debug.Log ("CAP_PROP_FRAME_COUNT: " + capture.get (Videoio.CAP_PROP_FRAME_COUNT));
  90. Debug.Log ("CAP_PROP_FPS: " + capture.get (Videoio.CAP_PROP_FPS));
  91. Debug.Log ("CAP_PROP_FRAME_WIDTH: " + capture.get (Videoio.CAP_PROP_FRAME_WIDTH));
  92. Debug.Log ("CAP_PROP_FRAME_HEIGHT: " + capture.get (Videoio.CAP_PROP_FRAME_HEIGHT));
  93. capture.grab ();
  94. capture.retrieve (rgbMat, 0);
  95. int frameWidth = rgbMat.cols ();
  96. int frameHeight = rgbMat.rows ();
  97. texture = new Texture2D (frameWidth, frameHeight, TextureFormat.RGB24, false);
  98. gameObject.transform.localScale = new Vector3 ((float)frameWidth, (float)frameHeight, 1);
  99. float widthScale = (float)Screen.width / (float)frameWidth;
  100. float heightScale = (float)Screen.height / (float)frameHeight;
  101. if (widthScale < heightScale) {
  102. Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2;
  103. } else {
  104. Camera.main.orthographicSize = (float)frameHeight / 2;
  105. }
  106. capture.set (Videoio.CAP_PROP_POS_FRAMES, 0);
  107. gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
  108. trackers = MultiTracker.create ();
  109. objects = new MatOfRect2d ();
  110. trackingColorList = new List<Scalar> ();
  111. selectedPointList = new List<Point> ();
  112. }
  113. // Update is called once per frame
  114. void Update ()
  115. {
  116. #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
  117. //Touch
  118. int touchCount = Input.touchCount;
  119. if (touchCount == 1)
  120. {
  121. Touch t = Input.GetTouch(0);
  122. if(t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject (t.fingerId)) {
  123. storedTouchPoint = new Point (t.position.x, t.position.y);
  124. //Debug.Log ("touch X " + t.position.x);
  125. //Debug.Log ("touch Y " + t.position.y);
  126. }
  127. }
  128. #else
  129. //Mouse
  130. if (Input.GetMouseButtonUp (0) && !EventSystem.current.IsPointerOverGameObject ()) {
  131. storedTouchPoint = new Point (Input.mousePosition.x, Input.mousePosition.y);
  132. //Debug.Log ("mouse X " + Input.mousePosition.x);
  133. //Debug.Log ("mouse Y " + Input.mousePosition.y);
  134. }
  135. #endif
  136. if (selectedPointList.Count == 1) {
  137. if (storedTouchPoint != null) {
  138. ConvertScreenPointToTexturePoint (storedTouchPoint, storedTouchPoint, gameObject, rgbMat.cols (), rgbMat.rows ());
  139. OnTouch (rgbMat, storedTouchPoint);
  140. storedTouchPoint = null;
  141. }
  142. }
  143. //Loop play
  144. if (capture.get (Videoio.CAP_PROP_POS_FRAMES) >= capture.get (Videoio.CAP_PROP_FRAME_COUNT))
  145. capture.set (Videoio.CAP_PROP_POS_FRAMES, 0);
  146. //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
  147. if (selectedPointList.Count != 1 && capture.grab ()) {
  148. capture.retrieve (rgbMat, 0);
  149. Imgproc.cvtColor (rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
  150. if (storedTouchPoint != null) {
  151. ConvertScreenPointToTexturePoint (storedTouchPoint, storedTouchPoint, gameObject, rgbMat.cols (), rgbMat.rows ());
  152. OnTouch (rgbMat, storedTouchPoint);
  153. storedTouchPoint = null;
  154. }
  155. if (selectedPointList.Count < 2) {
  156. foreach (var point in selectedPointList) {
  157. Imgproc.circle (rgbMat, point, 6, new Scalar (0, 0, 255), 2);
  158. }
  159. } else {
  160. using (MatOfPoint selectedPointMat = new MatOfPoint (selectedPointList.ToArray ())) {
  161. OpenCVForUnity.CoreModule.Rect region = Imgproc.boundingRect (selectedPointMat);
  162. trackers.add (TrackerKCF.create (), rgbMat, new Rect2d (region.x, region.y, region.width, region.height));
  163. }
  164. selectedPointList.Clear ();
  165. trackingColorList.Add (new Scalar (UnityEngine.Random.Range (0, 255), UnityEngine.Random.Range (0, 255), UnityEngine.Random.Range (0, 255)));
  166. }
  167. trackers.update (rgbMat, objects);
  168. // bool updated = trackers.update (rgbMat, objects);
  169. // Debug.Log ("updated " + updated);
  170. // if (!updated && objects.rows () > 0) {
  171. // OnResetTrackerButtonClick ();
  172. // }
  173. Rect2d[] objectsArray = objects.toArray ();
  174. for (int i = 0; i < objectsArray.Length; i++) {
  175. Imgproc.rectangle (rgbMat, objectsArray [i].tl (), objectsArray [i].br (), trackingColorList [i], 2, 1, 0);
  176. }
  177. if (selectedPointList.Count != 1) {
  178. //Imgproc.putText (rgbMat, "Please touch the screen, and select tracking regions.", new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
  179. if (fpsMonitor != null) {
  180. fpsMonitor.consoleText = "Please touch the screen, and select tracking regions.";
  181. }
  182. } else {
  183. //Imgproc.putText (rgbMat, "Please select the end point of the new tracking region.", new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
  184. if (fpsMonitor != null) {
  185. fpsMonitor.consoleText = "Please select the end point of the new tracking region.";
  186. }
  187. }
  188. Utils.fastMatToTexture2D (rgbMat, texture);
  189. }
  190. }
  191. private void OnTouch (Mat img, Point touchPoint)
  192. {
  193. if (selectedPointList.Count < 2) {
  194. selectedPointList.Add (touchPoint);
  195. if (!(new OpenCVForUnity.CoreModule.Rect (0, 0, img.cols (), img.rows ()).contains (selectedPointList [selectedPointList.Count - 1]))) {
  196. selectedPointList.RemoveAt (selectedPointList.Count - 1);
  197. }
  198. }
  199. }
  200. /// <summary>
  201. /// Converts the screen point to texture point.
  202. /// </summary>
  203. /// <param name="screenPoint">Screen point.</param>
  204. /// <param name="dstPoint">Dst point.</param>
  205. /// <param name="texturQuad">Texture quad.</param>
  206. /// <param name="textureWidth">Texture width.</param>
  207. /// <param name="textureHeight">Texture height.</param>
  208. /// <param name="camera">Camera.</param>
  209. private void ConvertScreenPointToTexturePoint (Point screenPoint, Point dstPoint, GameObject textureQuad, int textureWidth = -1, int textureHeight = -1, Camera camera = null)
  210. {
  211. if (textureWidth < 0 || textureHeight < 0) {
  212. Renderer r = textureQuad.GetComponent<Renderer> ();
  213. if (r != null && r.material != null && r.material.mainTexture != null) {
  214. textureWidth = r.material.mainTexture.width;
  215. textureHeight = r.material.mainTexture.height;
  216. } else {
  217. textureWidth = (int)textureQuad.transform.localScale.x;
  218. textureHeight = (int)textureQuad.transform.localScale.y;
  219. }
  220. }
  221. if (camera == null)
  222. camera = Camera.main;
  223. Vector3 quadPosition = textureQuad.transform.localPosition;
  224. Vector3 quadScale = textureQuad.transform.localScale;
  225. Vector2 tl = camera.WorldToScreenPoint (new Vector3 (quadPosition.x - quadScale.x / 2, quadPosition.y + quadScale.y / 2, quadPosition.z));
  226. Vector2 tr = camera.WorldToScreenPoint (new Vector3 (quadPosition.x + quadScale.x / 2, quadPosition.y + quadScale.y / 2, quadPosition.z));
  227. Vector2 br = camera.WorldToScreenPoint (new Vector3 (quadPosition.x + quadScale.x / 2, quadPosition.y - quadScale.y / 2, quadPosition.z));
  228. Vector2 bl = camera.WorldToScreenPoint (new Vector3 (quadPosition.x - quadScale.x / 2, quadPosition.y - quadScale.y / 2, quadPosition.z));
  229. using (Mat srcRectMat = new Mat (4, 1, CvType.CV_32FC2))
  230. using (Mat dstRectMat = new Mat (4, 1, CvType.CV_32FC2)) {
  231. srcRectMat.put (0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
  232. dstRectMat.put (0, 0, 0, 0, quadScale.x, 0, quadScale.x, quadScale.y, 0, quadScale.y);
  233. using (Mat perspectiveTransform = Imgproc.getPerspectiveTransform (srcRectMat, dstRectMat))
  234. using (MatOfPoint2f srcPointMat = new MatOfPoint2f (screenPoint))
  235. using (MatOfPoint2f dstPointMat = new MatOfPoint2f ()) {
  236. Core.perspectiveTransform (srcPointMat, dstPointMat, perspectiveTransform);
  237. dstPoint.x = dstPointMat.get (0, 0) [0] * textureWidth / quadScale.x;
  238. dstPoint.y = dstPointMat.get (0, 0) [1] * textureHeight / quadScale.y;
  239. }
  240. }
  241. }
  242. /// <summary>
  243. /// Raises the destroy event.
  244. /// </summary>
  245. void OnDestroy ()
  246. {
  247. capture.release ();
  248. if (rgbMat != null)
  249. rgbMat.Dispose ();
  250. if (texture != null) {
  251. Texture2D.Destroy (texture);
  252. texture = null;
  253. }
  254. if (trackers != null)
  255. trackers.Dispose ();
  256. if (objects != null)
  257. objects.Dispose ();
  258. #if UNITY_WEBGL && !UNITY_EDITOR
  259. if (getFilePath_Coroutine != null) {
  260. StopCoroutine (getFilePath_Coroutine);
  261. ((IDisposable)getFilePath_Coroutine).Dispose ();
  262. }
  263. #endif
  264. }
  265. /// <summary>
  266. /// Raises the back button click event.
  267. /// </summary>
  268. public void OnBackButtonClick ()
  269. {
  270. SceneManager.LoadScene ("OpenCVForUnityExample");
  271. }
  272. /// <summary>
  273. /// Raises the reset tracker button click event.
  274. /// </summary>
  275. public void OnResetTrackerButtonClick ()
  276. {
  277. if (trackers != null) {
  278. trackers.Dispose ();
  279. trackers = null;
  280. }
  281. if (objects != null) {
  282. objects.Dispose ();
  283. objects = null;
  284. }
  285. trackers = MultiTracker.create ();
  286. objects = new MatOfRect2d ();
  287. trackingColorList.Clear ();
  288. selectedPointList.Clear ();
  289. }
  290. }
  291. }