CountFingersExample.cs 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523
  1. using UnityEngine;
  2. using UnityEngine.EventSystems;
  3. using UnityEngine.SceneManagement;
  4. using UnityEngine.UI;
  5. using System.Collections;
  6. using System.Collections.Generic;
  7. using OpenCVForUnity.CoreModule;
  8. using OpenCVForUnity.ImgprocModule;
  9. using OpenCVForUnity.UnityUtils.Helper;
  10. using OpenCVForUnity.UnityUtils;
  11. namespace OpenCVForUnityExample
  12. {
  13. /// <summary>
  14. /// Count Fingers Example
  15. /// The techniques used are color segmentation using HSV color space to find the hand contour, and convex hull and convex defect algorithms to count the number of fingers.
  16. /// Referring to https://www.youtube.com/watch?v=KuGpOxOcpds.
  17. /// </summary>
  18. [RequireComponent(typeof(WebCamTextureToMatHelper))]
  19. public class CountFingersExample : MonoBehaviour
  20. {
  21. /// <summary>
  22. /// The number of fingers text.
  23. /// </summary>
  24. public UnityEngine.UI.Text numberOfFingersText;
  25. /// <summary>
  26. /// The threashold slider.
  27. /// </summary>
  28. public Slider threasholdSlider;
  29. /// <summary>
  30. /// The texture.
  31. /// </summary>
  32. Texture2D texture;
  33. /// <summary>
  34. /// The BLOB color hsv.
  35. /// </summary>
  36. Scalar blobColorHsv;
  37. ///// <summary>
  38. ///// The BLOB color rgba.
  39. ///// </summary>
  40. //Scalar blobColorRgba;
  41. /// <summary>
  42. /// The detector.
  43. /// </summary>
  44. ColorBlobDetector detector;
  45. /// <summary>
  46. /// The spectrum mat.
  47. /// </summary>
  48. Mat spectrumMat;
  49. /// <summary>
  50. /// Indicates whether is color selected.
  51. /// </summary>
  52. bool isColorSelected = false;
  53. /// <summary>
  54. /// The spectrum size.
  55. /// </summary>
  56. Size SPECTRUM_SIZE;
  57. /// <summary>
  58. /// The contour color.
  59. /// </summary>
  60. Scalar CONTOUR_COLOR;
  61. /// <summary>
  62. /// The contour color white.
  63. /// </summary>
  64. Scalar CONTOUR_COLOR_WHITE;
  65. /// <summary>
  66. /// The number of fingers.
  67. /// </summary>
  68. int numberOfFingers = 0;
  69. /// <summary>
  70. /// The webcam texture to mat helper.
  71. /// </summary>
  72. WebCamTextureToMatHelper webCamTextureToMatHelper;
  73. /// <summary>
  74. /// The stored touch point.
  75. /// </summary>
  76. Point storedTouchPoint;
  77. /// <summary>
  78. /// The FPS monitor.
  79. /// </summary>
  80. FpsMonitor fpsMonitor;
  81. // Use this for initialization
  82. void Start()
  83. {
  84. fpsMonitor = GetComponent<FpsMonitor>();
  85. webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
  86. #if UNITY_ANDROID && !UNITY_EDITOR
  87. // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
  88. webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
  89. #endif
  90. webCamTextureToMatHelper.Initialize();
  91. }
  92. /// <summary>
  93. /// Raises the web cam texture to mat helper initialized event.
  94. /// </summary>
  95. public void OnWebCamTextureToMatHelperInitialized()
  96. {
  97. Debug.Log("OnWebCamTextureToMatHelperInitialized");
  98. Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
  99. texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
  100. Utils.matToTexture2D(webCamTextureMat, texture);
  101. gameObject.GetComponent<Renderer>().material.mainTexture = texture;
  102. gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
  103. Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
  104. if (fpsMonitor != null)
  105. {
  106. fpsMonitor.Add("width", webCamTextureMat.width().ToString());
  107. fpsMonitor.Add("height", webCamTextureMat.height().ToString());
  108. fpsMonitor.Add("orientation", Screen.orientation.ToString());
  109. fpsMonitor.consoleText = "Please touch the area of the open hand.";
  110. }
  111. float width = webCamTextureMat.width();
  112. float height = webCamTextureMat.height();
  113. float widthScale = (float)Screen.width / width;
  114. float heightScale = (float)Screen.height / height;
  115. if (widthScale < heightScale)
  116. {
  117. Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
  118. }
  119. else
  120. {
  121. Camera.main.orthographicSize = height / 2;
  122. }
  123. detector = new ColorBlobDetector();
  124. spectrumMat = new Mat();
  125. //blobColorRgba = new Scalar (255);
  126. blobColorHsv = new Scalar(255);
  127. SPECTRUM_SIZE = new Size(200, 64);
  128. CONTOUR_COLOR = new Scalar(255, 0, 0, 255);
  129. CONTOUR_COLOR_WHITE = new Scalar(255, 255, 255, 255);
  130. }
  131. /// <summary>
  132. /// Raises the web cam texture to mat helper disposed event.
  133. /// </summary>
  134. public void OnWebCamTextureToMatHelperDisposed()
  135. {
  136. Debug.Log("OnWebCamTextureToMatHelperDisposed");
  137. if (spectrumMat != null)
  138. {
  139. spectrumMat.Dispose();
  140. spectrumMat = null;
  141. }
  142. if (texture != null)
  143. {
  144. Texture2D.Destroy(texture);
  145. texture = null;
  146. }
  147. }
  148. /// <summary>
  149. /// Raises the web cam texture to mat helper error occurred event.
  150. /// </summary>
  151. /// <param name="errorCode">Error code.</param>
  152. public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
  153. {
  154. Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
  155. }
  156. // Update is called once per frame
  157. void Update()
  158. {
  159. #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
  160. //Touch
  161. int touchCount = Input.touchCount;
  162. if (touchCount == 1)
  163. {
  164. Touch t = Input.GetTouch (0);
  165. if(t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject (t.fingerId)) {
  166. storedTouchPoint = new Point (t.position.x, t.position.y);
  167. //Debug.Log ("touch X " + t.position.x);
  168. //Debug.Log ("touch Y " + t.position.y);
  169. }
  170. }
  171. #else
  172. //Mouse
  173. if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject())
  174. {
  175. storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y);
  176. //Debug.Log ("mouse X " + Input.mousePosition.x);
  177. //Debug.Log ("mouse Y " + Input.mousePosition.y);
  178. }
  179. #endif
  180. if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
  181. {
  182. Mat rgbaMat = webCamTextureToMatHelper.GetMat();
  183. if (storedTouchPoint != null)
  184. {
  185. ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, rgbaMat.cols(), rgbaMat.rows());
  186. OnTouch(rgbaMat, storedTouchPoint);
  187. storedTouchPoint = null;
  188. }
  189. HandPoseEstimationProcess(rgbaMat);
  190. //Imgproc.putText (rgbaMat, "Please touch the area of the open hand.", new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
  191. Utils.matToTexture2D(rgbaMat, texture);
  192. }
  193. }
  194. private void HandPoseEstimationProcess(Mat rgbaMat)
  195. {
  196. //Imgproc.blur(mRgba, mRgba, new Size(5,5));
  197. Imgproc.GaussianBlur(rgbaMat, rgbaMat, new Size(3, 3), 1, 1);
  198. //Imgproc.medianBlur(mRgba, mRgba, 3);
  199. if (!isColorSelected)
  200. return;
  201. List<MatOfPoint> contours = detector.GetContours();
  202. detector.Process(rgbaMat);
  203. //Debug.Log ("Contours count: " + contours.Count);
  204. if (contours.Count <= 0)
  205. {
  206. return;
  207. }
  208. RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));
  209. double boundWidth = rect.size.width;
  210. double boundHeight = rect.size.height;
  211. int boundPos = 0;
  212. for (int i = 1; i < contours.Count; i++)
  213. {
  214. rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
  215. if (rect.size.width * rect.size.height > boundWidth * boundHeight)
  216. {
  217. boundWidth = rect.size.width;
  218. boundHeight = rect.size.height;
  219. boundPos = i;
  220. }
  221. }
  222. MatOfPoint contour = contours[boundPos];
  223. OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));
  224. Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);
  225. //Debug.Log(
  226. // " Row start [" +
  227. // (int)boundRect.tl().y + "] row end [" +
  228. // (int)boundRect.br().y + "] Col start [" +
  229. // (int)boundRect.tl().x + "] Col end [" +
  230. // (int)boundRect.br().x + "]");
  231. double a = boundRect.br().y - boundRect.tl().y;
  232. a = a * 0.7;
  233. a = boundRect.tl().y + a;
  234. //Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");
  235. Imgproc.rectangle(rgbaMat, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0);
  236. MatOfPoint2f pointMat = new MatOfPoint2f();
  237. Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
  238. contour = new MatOfPoint(pointMat.toArray());
  239. MatOfInt hull = new MatOfInt();
  240. MatOfInt4 convexDefect = new MatOfInt4();
  241. Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);
  242. if (hull.toArray().Length < 3)
  243. return;
  244. Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);
  245. List<MatOfPoint> hullPoints = new List<MatOfPoint>();
  246. List<Point> listPo = new List<Point>();
  247. for (int j = 0; j < hull.toList().Count; j++)
  248. {
  249. listPo.Add(contour.toList()[hull.toList()[j]]);
  250. }
  251. MatOfPoint e = new MatOfPoint();
  252. e.fromList(listPo);
  253. hullPoints.Add(e);
  254. List<Point> listPoDefect = new List<Point>();
  255. if (convexDefect.rows() > 0)
  256. {
  257. List<int> convexDefectList = convexDefect.toList();
  258. List<Point> contourList = contour.toList();
  259. for (int j = 0; j < convexDefectList.Count; j = j + 4)
  260. {
  261. Point farPoint = contourList[convexDefectList[j + 2]];
  262. int depth = convexDefectList[j + 3];
  263. if (depth > threasholdSlider.value && farPoint.y < a)
  264. {
  265. listPoDefect.Add(contourList[convexDefectList[j + 2]]);
  266. }
  267. //Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
  268. }
  269. }
  270. //Debug.Log ("hull: " + hull.toList ());
  271. //if (convexDefect.rows () > 0) {
  272. // Debug.Log ("defects: " + convexDefect.toList ());
  273. //}
  274. Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);
  275. //int defectsTotal = (int)convexDefect.total();
  276. //Debug.Log ("Defect total " + defectsTotal);
  277. this.numberOfFingers = listPoDefect.Count;
  278. if (this.numberOfFingers > 5)
  279. this.numberOfFingers = 5;
  280. //Debug.Log ("numberOfFingers " + numberOfFingers);
  281. //Imgproc.putText (rgbaMat, "" + numberOfFingers, new Point (rgbaMat.cols () / 2, rgbaMat.rows () / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
  282. numberOfFingersText.text = numberOfFingers.ToString();
  283. foreach (Point p in listPoDefect)
  284. {
  285. Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
  286. }
  287. }
  288. private void OnTouch(Mat img, Point touchPoint)
  289. {
  290. int cols = img.cols();
  291. int rows = img.rows();
  292. int x = (int)touchPoint.x;
  293. int y = (int)touchPoint.y;
  294. //Debug.Log ("Touch image coordinates: (" + x + ", " + y + ")");
  295. if ((x < 0) || (y < 0) || (x > cols) || (y > rows))
  296. return;
  297. OpenCVForUnity.CoreModule.Rect touchedRect = new OpenCVForUnity.CoreModule.Rect();
  298. touchedRect.x = (x > 5) ? x - 5 : 0;
  299. touchedRect.y = (y > 5) ? y - 5 : 0;
  300. touchedRect.width = (x + 5 < cols) ? x + 5 - touchedRect.x : cols - touchedRect.x;
  301. touchedRect.height = (y + 5 < rows) ? y + 5 - touchedRect.y : rows - touchedRect.y;
  302. using (Mat touchedRegionRgba = img.submat(touchedRect))
  303. using (Mat touchedRegionHsv = new Mat())
  304. {
  305. Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
  306. // Calculate average color of touched region
  307. blobColorHsv = Core.sumElems(touchedRegionHsv);
  308. int pointCount = touchedRect.width * touchedRect.height;
  309. for (int i = 0; i < blobColorHsv.val.Length; i++)
  310. blobColorHsv.val[i] /= pointCount;
  311. //blobColorRgba = ConverScalarHsv2Rgba (blobColorHsv);
  312. //Debug.Log ("Touched rgba color: (" + mBlobColorRgba.val [0] + ", " + mBlobColorRgba.val [1] +
  313. // ", " + mBlobColorRgba.val [2] + ", " + mBlobColorRgba.val [3] + ")");
  314. detector.SetHsvColor(blobColorHsv);
  315. Imgproc.resize(detector.GetSpectrum(), spectrumMat, SPECTRUM_SIZE);
  316. isColorSelected = true;
  317. }
  318. }
  319. private Scalar ConverScalarHsv2Rgba(Scalar hsvColor)
  320. {
  321. Scalar rgbaColor;
  322. using (Mat pointMatRgba = new Mat())
  323. using (Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor))
  324. {
  325. Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
  326. rgbaColor = new Scalar(pointMatRgba.get(0, 0));
  327. }
  328. return rgbaColor;
  329. }
  330. /// <summary>
  331. /// Converts the screen point to texture point.
  332. /// </summary>
  333. /// <param name="screenPoint">Screen point.</param>
  334. /// <param name="dstPoint">Dst point.</param>
  335. /// <param name="texturQuad">Texture quad.</param>
  336. /// <param name="textureWidth">Texture width.</param>
  337. /// <param name="textureHeight">Texture height.</param>
  338. /// <param name="camera">Camera.</param>
  339. private void ConvertScreenPointToTexturePoint(Point screenPoint, Point dstPoint, GameObject textureQuad, int textureWidth = -1, int textureHeight = -1, Camera camera = null)
  340. {
  341. if (textureWidth < 0 || textureHeight < 0)
  342. {
  343. Renderer r = textureQuad.GetComponent<Renderer>();
  344. if (r != null && r.material != null && r.material.mainTexture != null)
  345. {
  346. textureWidth = r.material.mainTexture.width;
  347. textureHeight = r.material.mainTexture.height;
  348. }
  349. else
  350. {
  351. textureWidth = (int)textureQuad.transform.localScale.x;
  352. textureHeight = (int)textureQuad.transform.localScale.y;
  353. }
  354. }
  355. if (camera == null)
  356. camera = Camera.main;
  357. Vector3 quadPosition = textureQuad.transform.localPosition;
  358. Vector3 quadScale = textureQuad.transform.localScale;
  359. Vector2 tl = camera.WorldToScreenPoint(new Vector3(quadPosition.x - quadScale.x / 2, quadPosition.y + quadScale.y / 2, quadPosition.z));
  360. Vector2 tr = camera.WorldToScreenPoint(new Vector3(quadPosition.x + quadScale.x / 2, quadPosition.y + quadScale.y / 2, quadPosition.z));
  361. Vector2 br = camera.WorldToScreenPoint(new Vector3(quadPosition.x + quadScale.x / 2, quadPosition.y - quadScale.y / 2, quadPosition.z));
  362. Vector2 bl = camera.WorldToScreenPoint(new Vector3(quadPosition.x - quadScale.x / 2, quadPosition.y - quadScale.y / 2, quadPosition.z));
  363. using (Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2))
  364. using (Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2))
  365. {
  366. srcRectMat.put(0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
  367. dstRectMat.put(0, 0, 0, 0, quadScale.x, 0, quadScale.x, quadScale.y, 0, quadScale.y);
  368. using (Mat perspectiveTransform = Imgproc.getPerspectiveTransform(srcRectMat, dstRectMat))
  369. using (MatOfPoint2f srcPointMat = new MatOfPoint2f(screenPoint))
  370. using (MatOfPoint2f dstPointMat = new MatOfPoint2f())
  371. {
  372. Core.perspectiveTransform(srcPointMat, dstPointMat, perspectiveTransform);
  373. dstPoint.x = dstPointMat.get(0, 0)[0] * textureWidth / quadScale.x;
  374. dstPoint.y = dstPointMat.get(0, 0)[1] * textureHeight / quadScale.y;
  375. }
  376. }
  377. }
  378. /// <summary>
  379. /// Raises the destroy event.
  380. /// </summary>
  381. void OnDestroy()
  382. {
  383. webCamTextureToMatHelper.Dispose();
  384. if (detector != null)
  385. detector.Dispose();
  386. }
  387. /// <summary>
  388. /// Raises the back button click event.
  389. /// </summary>
  390. public void OnBackButtonClick()
  391. {
  392. SceneManager.LoadScene("OpenCVForUnityExample");
  393. }
  394. /// <summary>
  395. /// Raises the play button click event.
  396. /// </summary>
  397. public void OnPlayButtonClick()
  398. {
  399. webCamTextureToMatHelper.Play();
  400. }
  401. /// <summary>
  402. /// Raises the pause button click event.
  403. /// </summary>
  404. public void OnPauseButtonClick()
  405. {
  406. webCamTextureToMatHelper.Pause();
  407. }
  408. /// <summary>
  409. /// Raises the stop button click event.
  410. /// </summary>
  411. public void OnStopButtonClick()
  412. {
  413. webCamTextureToMatHelper.Stop();
  414. }
  415. /// <summary>
  416. /// Raises the change camera button click event.
  417. /// </summary>
  418. public void OnChangeCameraButtonClick()
  419. {
  420. webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.requestedIsFrontFacing;
  421. }
  422. }
  423. }