ArUcoExample.cs 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354
  1. using UnityEngine;
  2. using UnityEngine.UI;
  3. using UnityEngine.SceneManagement;
  4. using System.Collections;
  5. using System.Collections.Generic;
  6. using OpenCVForUnity.CoreModule;
  7. using OpenCVForUnity.Calib3dModule;
  8. using OpenCVForUnity.ArucoModule;
  9. using OpenCVForUnity.UnityUtils;
  10. namespace OpenCVForUnityExample
  11. {
  12. /// <summary>
  13. /// ArUco Example
  14. /// An example of marker-based AR view and camera pose estimation using the aruco (ArUco Marker Detection) module.
  15. /// Referring to https://github.com/opencv/opencv_contrib/blob/master/modules/aruco/samples/detect_markers.cpp.
  16. /// http://docs.opencv.org/3.1.0/d5/dae/tutorial_aruco_detection.html
  17. /// </summary>
  18. public class ArUcoExample : MonoBehaviour
  19. {
  20. /// <summary>
  21. /// The image texture.
  22. /// </summary>
  23. public Texture2D imgTexture;
  24. [Space (10)]
  25. /// <summary>
  26. /// The dictionary identifier.
  27. /// </summary>
  28. public ArUcoDictionary dictionaryId = ArUcoDictionary.DICT_6X6_250;
  29. /// <summary>
  30. /// The dictionary id dropdown.
  31. /// </summary>
  32. public Dropdown dictionaryIdDropdown;
  33. /// <summary>
  34. /// Determines if shows rejected corners.
  35. /// </summary>
  36. public bool showRejectedCorners = false;
  37. /// <summary>
  38. /// The shows rejected corners toggle.
  39. /// </summary>
  40. public Toggle showRejectedCornersToggle;
  41. /// <summary>
  42. /// Determines if applied the pose estimation.
  43. /// </summary>
  44. public bool applyEstimationPose = true;
  45. /// <summary>
  46. /// The length of the markers' side. Normally, unit is meters.
  47. /// </summary>
  48. public float markerLength = 0.1f;
  49. /// <summary>
  50. /// The AR game object.
  51. /// </summary>
  52. public GameObject arGameObject;
  53. /// <summary>
  54. /// The AR camera.
  55. /// </summary>
  56. public Camera arCamera;
  57. [Space (10)]
  58. /// <summary>
  59. /// Determines if request the AR camera moving.
  60. /// </summary>
  61. public bool shouldMoveARCamera = false;
  62. /// <summary>
  63. /// The rgb mat.
  64. /// </summary>
  65. Mat rgbMat;
  66. /// <summary>
  67. /// The texture.
  68. /// </summary>
  69. Texture2D texture;
  70. // Use this for initialization
  71. void Start ()
  72. {
  73. rgbMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC3);
  74. texture = new Texture2D (rgbMat.cols (), rgbMat.rows (), TextureFormat.RGBA32, false);
  75. gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
  76. dictionaryIdDropdown.value = (int)dictionaryId;
  77. showRejectedCornersToggle.isOn = showRejectedCorners;
  78. DetectMarkers ();
  79. }
  80. // Update is called once per frame
  81. void Update ()
  82. {
  83. }
  84. private void DetectMarkers ()
  85. {
  86. Utils.texture2DToMat (imgTexture, rgbMat);
  87. Debug.Log ("imgMat dst ToString " + rgbMat.ToString ());
  88. gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
  89. Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
  90. float width = rgbMat.width ();
  91. float height = rgbMat.height ();
  92. float imageSizeScale = 1.0f;
  93. float widthScale = (float)Screen.width / width;
  94. float heightScale = (float)Screen.height / height;
  95. if (widthScale < heightScale) {
  96. Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
  97. imageSizeScale = (float)Screen.height / (float)Screen.width;
  98. } else {
  99. Camera.main.orthographicSize = height / 2;
  100. }
  101. // set camera parameters.
  102. int max_d = (int)Mathf.Max (width, height);
  103. double fx = max_d;
  104. double fy = max_d;
  105. double cx = width / 2.0f;
  106. double cy = height / 2.0f;
  107. Mat camMatrix = new Mat (3, 3, CvType.CV_64FC1);
  108. camMatrix.put (0, 0, fx);
  109. camMatrix.put (0, 1, 0);
  110. camMatrix.put (0, 2, cx);
  111. camMatrix.put (1, 0, 0);
  112. camMatrix.put (1, 1, fy);
  113. camMatrix.put (1, 2, cy);
  114. camMatrix.put (2, 0, 0);
  115. camMatrix.put (2, 1, 0);
  116. camMatrix.put (2, 2, 1.0f);
  117. Debug.Log ("camMatrix " + camMatrix.dump ());
  118. MatOfDouble distCoeffs = new MatOfDouble (0, 0, 0, 0);
  119. Debug.Log ("distCoeffs " + distCoeffs.dump ());
  120. // calibration camera matrix values.
  121. Size imageSize = new Size (width * imageSizeScale, height * imageSizeScale);
  122. double apertureWidth = 0;
  123. double apertureHeight = 0;
  124. double[] fovx = new double[1];
  125. double[] fovy = new double[1];
  126. double[] focalLength = new double[1];
  127. Point principalPoint = new Point (0, 0);
  128. double[] aspectratio = new double[1];
  129. Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
  130. Debug.Log ("imageSize " + imageSize.ToString ());
  131. Debug.Log ("apertureWidth " + apertureWidth);
  132. Debug.Log ("apertureHeight " + apertureHeight);
  133. Debug.Log ("fovx " + fovx [0]);
  134. Debug.Log ("fovy " + fovy [0]);
  135. Debug.Log ("focalLength " + focalLength [0]);
  136. Debug.Log ("principalPoint " + principalPoint.ToString ());
  137. Debug.Log ("aspectratio " + aspectratio [0]);
  138. // To convert the difference of the FOV value of the OpenCV and Unity.
  139. double fovXScale = (2.0 * Mathf.Atan ((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2 ((float)cx, (float)fx) + Mathf.Atan2 ((float)(imageSize.width - cx), (float)fx));
  140. double fovYScale = (2.0 * Mathf.Atan ((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2 ((float)cy, (float)fy) + Mathf.Atan2 ((float)(imageSize.height - cy), (float)fy));
  141. Debug.Log ("fovXScale " + fovXScale);
  142. Debug.Log ("fovYScale " + fovYScale);
  143. // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
  144. if (widthScale < heightScale) {
  145. arCamera.fieldOfView = (float)(fovx [0] * fovXScale);
  146. } else {
  147. arCamera.fieldOfView = (float)(fovy [0] * fovYScale);
  148. }
  149. // Display objects near the camera.
  150. arCamera.nearClipPlane = 0.01f;
  151. Mat ids = new Mat ();
  152. List<Mat> corners = new List<Mat> ();
  153. List<Mat> rejectedCorners = new List<Mat> ();
  154. Mat rvecs = new Mat ();
  155. Mat tvecs = new Mat ();
  156. Mat rotMat = new Mat (3, 3, CvType.CV_64FC1);
  157. DetectorParameters detectorParams = DetectorParameters.create ();
  158. Dictionary dictionary = Aruco.getPredefinedDictionary ((int)dictionaryId);
  159. // detect markers.
  160. Aruco.detectMarkers (rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);
  161. // if at least one marker detected
  162. if (ids.total () > 0) {
  163. Aruco.drawDetectedMarkers (rgbMat, corners, ids, new Scalar (0, 255, 0));
  164. // estimate pose.
  165. if (applyEstimationPose) {
  166. Aruco.estimatePoseSingleMarkers (corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
  167. for (int i = 0; i < ids.total (); i++) {
  168. using (Mat rvec = new Mat (rvecs, new OpenCVForUnity.CoreModule.Rect (0, i, 1, 1)))
  169. using (Mat tvec = new Mat (tvecs, new OpenCVForUnity.CoreModule.Rect (0, i, 1, 1))) {
  170. // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
  171. Aruco.drawAxis (rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
  172. }
  173. // This example can display the ARObject on only first detected marker.
  174. if (i == 0) {
  175. // Position
  176. double[] tvecArr = tvecs.get (i, 0);
  177. // Rotation
  178. double[] rvecArr = rvecs.get (i, 0);
  179. Mat rvec = new Mat (3, 1, CvType.CV_64FC1);
  180. rvec.put (0, 0, rvecArr);
  181. Calib3d.Rodrigues (rvec, rotMat);
  182. double[] rotMatArr = new double[rotMat.total ()];
  183. rotMat.get (0, 0, rotMatArr);
  184. Matrix4x4 transformationM = new Matrix4x4 (); // from OpenCV
  185. transformationM.SetRow (0, new Vector4 ((float)rotMatArr [0], (float)rotMatArr [1], (float)rotMatArr [2], (float)tvecArr [0]));
  186. transformationM.SetRow (1, new Vector4 ((float)rotMatArr [3], (float)rotMatArr [4], (float)rotMatArr [5], (float)tvecArr [1]));
  187. transformationM.SetRow (2, new Vector4 ((float)rotMatArr [6], (float)rotMatArr [7], (float)rotMatArr [8], (float)tvecArr [2]));
  188. transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));
  189. Debug.Log ("transformationM " + transformationM.ToString ());
  190. Matrix4x4 invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
  191. Debug.Log ("invertZM " + invertZM.ToString ());
  192. Matrix4x4 invertYM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, -1, 1));
  193. Debug.Log ("invertYM " + invertYM.ToString ());
  194. // right-handed coordinates system (OpenCV) to left-handed one (Unity)
  195. Matrix4x4 ARM = invertYM * transformationM;
  196. // Apply Z-axis inverted matrix.
  197. ARM = ARM * invertZM;
  198. if (shouldMoveARCamera) {
  199. ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;
  200. Debug.Log ("ARM " + ARM.ToString ());
  201. ARUtils.SetTransformFromMatrix (arCamera.transform, ref ARM);
  202. } else {
  203. ARM = arCamera.transform.localToWorldMatrix * ARM;
  204. Debug.Log ("ARM " + ARM.ToString ());
  205. ARUtils.SetTransformFromMatrix (arGameObject.transform, ref ARM);
  206. }
  207. }
  208. }
  209. }
  210. }
  211. if (showRejectedCorners && rejectedCorners.Count > 0)
  212. Aruco.drawDetectedMarkers (rgbMat, rejectedCorners, new Mat (), new Scalar (255, 0, 0));
  213. Utils.matToTexture2D (rgbMat, texture);
  214. }
  215. private void ResetObjectTransform ()
  216. {
  217. // reset AR object transform.
  218. Matrix4x4 i = Matrix4x4.identity;
  219. ARUtils.SetTransformFromMatrix (arCamera.transform, ref i);
  220. ARUtils.SetTransformFromMatrix (arGameObject.transform, ref i);
  221. }
  222. /// <summary>
  223. /// Raises the destroy event.
  224. /// </summary>
  225. void OnDestroy ()
  226. {
  227. if (rgbMat != null)
  228. rgbMat.Dispose ();
  229. }
  230. /// <summary>
  231. /// Raises the back button click event.
  232. /// </summary>
  233. public void OnBackButtonClick ()
  234. {
  235. SceneManager.LoadScene ("OpenCVForUnityExample");
  236. }
  237. /// <summary>
  238. /// Raises the dictionary id dropdown value changed event.
  239. /// </summary>
  240. public void OnDictionaryIdDropdownValueChanged (int result)
  241. {
  242. if ((int)dictionaryId != result) {
  243. dictionaryId = (ArUcoDictionary)result;
  244. ResetObjectTransform ();
  245. DetectMarkers ();
  246. }
  247. }
  248. /// <summary>
  249. /// Raises the show rejected corners toggle value changed event.
  250. /// </summary>
  251. public void OnShowRejectedCornersToggleValueChanged ()
  252. {
  253. if (showRejectedCorners != showRejectedCornersToggle.isOn) {
  254. showRejectedCorners = showRejectedCornersToggle.isOn;
  255. ResetObjectTransform ();
  256. DetectMarkers ();
  257. }
  258. }
  259. public enum ArUcoDictionary
  260. {
  261. DICT_4X4_50 = Aruco.DICT_4X4_50,
  262. DICT_4X4_100 = Aruco.DICT_4X4_100,
  263. DICT_4X4_250 = Aruco.DICT_4X4_250,
  264. DICT_4X4_1000 = Aruco.DICT_4X4_1000,
  265. DICT_5X5_50 = Aruco.DICT_5X5_50,
  266. DICT_5X5_100 = Aruco.DICT_5X5_100,
  267. DICT_5X5_250 = Aruco.DICT_5X5_250,
  268. DICT_5X5_1000 = Aruco.DICT_5X5_1000,
  269. DICT_6X6_50 = Aruco.DICT_6X6_50,
  270. DICT_6X6_100 = Aruco.DICT_6X6_100,
  271. DICT_6X6_250 = Aruco.DICT_6X6_250,
  272. DICT_6X6_1000 = Aruco.DICT_6X6_1000,
  273. DICT_7X7_50 = Aruco.DICT_7X7_50,
  274. DICT_7X7_100 = Aruco.DICT_7X7_100,
  275. DICT_7X7_250 = Aruco.DICT_7X7_250,
  276. DICT_7X7_1000 = Aruco.DICT_7X7_1000,
  277. DICT_ARUCO_ORIGINAL = Aruco.DICT_ARUCO_ORIGINAL,
  278. }
  279. }
  280. }