CamShiftExample.cs 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392
  1. using UnityEngine;
  2. using UnityEngine.EventSystems;
  3. using UnityEngine.SceneManagement;
  4. using System.Collections;
  5. using System.Collections.Generic;
  6. using OpenCVForUnity.CoreModule;
  7. using OpenCVForUnity.ImgprocModule;
  8. using OpenCVForUnity.VideoModule;
  9. using OpenCVForUnity.UnityUtils;
  10. using OpenCVForUnity.UnityUtils.Helper;
  11. namespace OpenCVForUnityExample
  12. {
  13. /// <summary>
  14. /// CamShift Example
  15. /// An example of object tracking using the Video.Camshift function.
  16. /// Referring to http://www.computervisiononline.com/blog/tutorial-using-camshift-track-objects-video.
  17. /// http://docs.opencv.org/3.2.0/db/df8/tutorial_py_meanshift.html
  18. /// </summary>
  19. [RequireComponent(typeof(WebCamTextureToMatHelper))]
  20. public class CamShiftExample : MonoBehaviour
  21. {
  22. /// <summary>
  23. /// The texture.
  24. /// </summary>
  25. Texture2D texture;
  26. /// <summary>
  27. /// The roi point list.
  28. /// </summary>
  29. List<Point> roiPointList;
  30. /// <summary>
  31. /// The roi rect.
  32. /// </summary>
  33. OpenCVForUnity.CoreModule.Rect roiRect;
  34. /// <summary>
  35. /// The hsv mat.
  36. /// </summary>
  37. Mat hsvMat;
  38. /// <summary>
  39. /// The roi hist mat.
  40. /// </summary>
  41. Mat roiHistMat;
  42. /// <summary>
  43. /// The termination.
  44. /// </summary>
  45. TermCriteria termination;
  46. /// <summary>
  47. /// The webcam texture to mat helper.
  48. /// </summary>
  49. WebCamTextureToMatHelper webCamTextureToMatHelper;
  50. /// <summary>
  51. /// The stored touch point.
  52. /// </summary>
  53. Point storedTouchPoint;
  54. /// <summary>
  55. /// The flag for requesting the start of the CamShift Method.
  56. /// </summary>
  57. bool shouldStartCamShift = false;
  58. /// <summary>
  59. /// The FPS monitor.
  60. /// </summary>
  61. FpsMonitor fpsMonitor;
  62. // Use this for initialization
  63. void Start()
  64. {
  65. fpsMonitor = GetComponent<FpsMonitor>();
  66. roiPointList = new List<Point>();
  67. termination = new TermCriteria(TermCriteria.EPS | TermCriteria.COUNT, 10, 1);
  68. webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
  69. #if UNITY_ANDROID && !UNITY_EDITOR
  70. // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
  71. webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
  72. #endif
  73. webCamTextureToMatHelper.Initialize();
  74. }
  75. /// <summary>
  76. /// Raises the webcam texture to mat helper initialized event.
  77. /// </summary>
  78. public void OnWebCamTextureToMatHelperInitialized()
  79. {
  80. Debug.Log("OnWebCamTextureToMatHelperInitialized");
  81. Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
  82. texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
  83. Utils.matToTexture2D(webCamTextureMat, texture);
  84. gameObject.GetComponent<Renderer>().material.mainTexture = texture;
  85. gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
  86. Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
  87. if (fpsMonitor != null)
  88. {
  89. fpsMonitor.Add("width", webCamTextureMat.width().ToString());
  90. fpsMonitor.Add("height", webCamTextureMat.height().ToString());
  91. fpsMonitor.Add("orientation", Screen.orientation.ToString());
  92. }
  93. if (fpsMonitor != null)
  94. {
  95. fpsMonitor.consoleText = "Please touch the 4 points surrounding the tracking object.";
  96. }
  97. float width = webCamTextureMat.width();
  98. float height = webCamTextureMat.height();
  99. float widthScale = (float)Screen.width / width;
  100. float heightScale = (float)Screen.height / height;
  101. if (widthScale < heightScale)
  102. {
  103. Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
  104. }
  105. else
  106. {
  107. Camera.main.orthographicSize = height / 2;
  108. }
  109. hsvMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
  110. }
  111. /// <summary>
  112. /// Raises the webcam texture to mat helper disposed event.
  113. /// </summary>
  114. public void OnWebCamTextureToMatHelperDisposed()
  115. {
  116. Debug.Log("OnWebCamTextureToMatHelperDisposed");
  117. hsvMat.Dispose();
  118. if (roiHistMat != null)
  119. roiHistMat.Dispose();
  120. roiPointList.Clear();
  121. if (texture != null)
  122. {
  123. Texture2D.Destroy(texture);
  124. texture = null;
  125. }
  126. }
  127. /// <summary>
  128. /// Raises the webcam texture to mat helper error occurred event.
  129. /// </summary>
  130. /// <param name="errorCode">Error code.</param>
  131. public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
  132. {
  133. Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
  134. }
  135. // Update is called once per frame
  136. void Update()
  137. {
  138. #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
  139. //Touch
  140. int touchCount = Input.touchCount;
  141. if (touchCount == 1)
  142. {
  143. Touch t = Input.GetTouch(0);
  144. if(t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject (t.fingerId)) {
  145. storedTouchPoint = new Point (t.position.x, t.position.y);
  146. //Debug.Log ("touch X " + t.position.x);
  147. //Debug.Log ("touch Y " + t.position.y);
  148. }
  149. }
  150. #else
  151. //Mouse
  152. if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject())
  153. {
  154. storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y);
  155. //Debug.Log ("mouse X " + Input.mousePosition.x);
  156. //Debug.Log ("mouse Y " + Input.mousePosition.y);
  157. }
  158. #endif
  159. if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
  160. {
  161. Mat rgbaMat = webCamTextureToMatHelper.GetMat();
  162. Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
  163. Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);
  164. if (storedTouchPoint != null)
  165. {
  166. ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, rgbaMat.cols(), rgbaMat.rows());
  167. OnTouch(rgbaMat, storedTouchPoint);
  168. storedTouchPoint = null;
  169. }
  170. Point[] points = roiPointList.ToArray();
  171. if (shouldStartCamShift)
  172. {
  173. shouldStartCamShift = false;
  174. using (MatOfPoint roiPointMat = new MatOfPoint(points))
  175. {
  176. roiRect = Imgproc.boundingRect(roiPointMat);
  177. }
  178. if (roiHistMat != null)
  179. {
  180. roiHistMat.Dispose();
  181. roiHistMat = null;
  182. }
  183. roiHistMat = new Mat();
  184. using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
  185. using (Mat maskMat = new Mat())
  186. {
  187. Imgproc.calcHist(new List<Mat>(new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
  188. Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);
  189. //Debug.Log ("roiHist " + roiHistMat.ToString ());
  190. }
  191. }
  192. else if (points.Length == 4)
  193. {
  194. using (Mat backProj = new Mat())
  195. {
  196. Imgproc.calcBackProject(new List<Mat>(new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);
  197. RotatedRect r = Video.CamShift(backProj, roiRect, termination);
  198. r.points(points);
  199. }
  200. }
  201. if (points.Length < 4)
  202. {
  203. for (int i = 0; i < points.Length; i++)
  204. {
  205. Imgproc.circle(rgbaMat, points[i], 6, new Scalar(0, 0, 255, 255), 2);
  206. }
  207. }
  208. else
  209. {
  210. for (int i = 0; i < 4; i++)
  211. {
  212. Imgproc.line(rgbaMat, points[i], points[(i + 1) % 4], new Scalar(255, 0, 0, 255), 2);
  213. }
  214. Imgproc.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
  215. }
  216. //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
  217. Utils.matToTexture2D(rgbaMat, texture);
  218. }
  219. }
  220. private void OnTouch(Mat img, Point touchPoint)
  221. {
  222. if (roiPointList.Count == 4)
  223. {
  224. roiPointList.Clear();
  225. }
  226. if (roiPointList.Count < 4)
  227. {
  228. roiPointList.Add(touchPoint);
  229. if (!(new OpenCVForUnity.CoreModule.Rect(0, 0, img.width(), img.height()).contains(roiPointList[roiPointList.Count - 1])))
  230. {
  231. roiPointList.RemoveAt(roiPointList.Count - 1);
  232. }
  233. if (roiPointList.Count == 4)
  234. {
  235. shouldStartCamShift = true;
  236. }
  237. }
  238. }
  239. /// <summary>
  240. /// Converts the screen point to texture point.
  241. /// </summary>
  242. /// <param name="screenPoint">Screen point.</param>
  243. /// <param name="dstPoint">Dst point.</param>
  244. /// <param name="texturQuad">Texture quad.</param>
  245. /// <param name="textureWidth">Texture width.</param>
  246. /// <param name="textureHeight">Texture height.</param>
  247. /// <param name="camera">Camera.</param>
  248. private void ConvertScreenPointToTexturePoint(Point screenPoint, Point dstPoint, GameObject textureQuad, int textureWidth = -1, int textureHeight = -1, Camera camera = null)
  249. {
  250. if (textureWidth < 0 || textureHeight < 0)
  251. {
  252. Renderer r = textureQuad.GetComponent<Renderer>();
  253. if (r != null && r.material != null && r.material.mainTexture != null)
  254. {
  255. textureWidth = r.material.mainTexture.width;
  256. textureHeight = r.material.mainTexture.height;
  257. }
  258. else
  259. {
  260. textureWidth = (int)textureQuad.transform.localScale.x;
  261. textureHeight = (int)textureQuad.transform.localScale.y;
  262. }
  263. }
  264. if (camera == null)
  265. camera = Camera.main;
  266. Vector3 quadPosition = textureQuad.transform.localPosition;
  267. Vector3 quadScale = textureQuad.transform.localScale;
  268. Vector2 tl = camera.WorldToScreenPoint(new Vector3(quadPosition.x - quadScale.x / 2, quadPosition.y + quadScale.y / 2, quadPosition.z));
  269. Vector2 tr = camera.WorldToScreenPoint(new Vector3(quadPosition.x + quadScale.x / 2, quadPosition.y + quadScale.y / 2, quadPosition.z));
  270. Vector2 br = camera.WorldToScreenPoint(new Vector3(quadPosition.x + quadScale.x / 2, quadPosition.y - quadScale.y / 2, quadPosition.z));
  271. Vector2 bl = camera.WorldToScreenPoint(new Vector3(quadPosition.x - quadScale.x / 2, quadPosition.y - quadScale.y / 2, quadPosition.z));
  272. using (Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2))
  273. using (Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2))
  274. {
  275. srcRectMat.put(0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
  276. dstRectMat.put(0, 0, 0, 0, quadScale.x, 0, quadScale.x, quadScale.y, 0, quadScale.y);
  277. using (Mat perspectiveTransform = Imgproc.getPerspectiveTransform(srcRectMat, dstRectMat))
  278. using (MatOfPoint2f srcPointMat = new MatOfPoint2f(screenPoint))
  279. using (MatOfPoint2f dstPointMat = new MatOfPoint2f())
  280. {
  281. Core.perspectiveTransform(srcPointMat, dstPointMat, perspectiveTransform);
  282. dstPoint.x = dstPointMat.get(0, 0)[0] * textureWidth / quadScale.x;
  283. dstPoint.y = dstPointMat.get(0, 0)[1] * textureHeight / quadScale.y;
  284. }
  285. }
  286. }
  287. /// <summary>
  288. /// Raises the destroy event.
  289. /// </summary>
  290. void OnDestroy()
  291. {
  292. webCamTextureToMatHelper.Dispose();
  293. }
  294. /// <summary>
  295. /// Raises the back button click event.
  296. /// </summary>
  297. public void OnBackButtonClick()
  298. {
  299. SceneManager.LoadScene("OpenCVForUnityExample");
  300. }
  301. /// <summary>
  302. /// Raises the play button click event.
  303. /// </summary>
  304. public void OnPlayButtonClick()
  305. {
  306. webCamTextureToMatHelper.Play();
  307. }
  308. /// <summary>
  309. /// Raises the pause button click event.
  310. /// </summary>
  311. public void OnPauseButtonClick()
  312. {
  313. webCamTextureToMatHelper.Pause();
  314. }
  315. /// <summary>
  316. /// Raises the stop button click event.
  317. /// </summary>
  318. public void OnStopButtonClick()
  319. {
  320. webCamTextureToMatHelper.Stop();
  321. }
  322. /// <summary>
  323. /// Raises the change camera button click event.
  324. /// </summary>
  325. public void OnChangeCameraButtonClick()
  326. {
  327. webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.requestedIsFrontFacing;
  328. }
  329. }
  330. }