ArUcoWebCamExample.cs 41 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063
  1. using OpenCVForUnity.Calib3dModule;
  2. using OpenCVForUnity.CoreModule;
  3. using OpenCVForUnity.ImgprocModule;
  4. using OpenCVForUnity.ObjdetectModule;
  5. using OpenCVForUnity.UnityUtils;
  6. using OpenCVForUnity.UnityUtils.Helper;
  7. using System.Collections.Generic;
  8. using System.IO;
  9. using System.Xml.Serialization;
  10. using UnityEngine;
  11. using UnityEngine.SceneManagement;
  12. using UnityEngine.UI;
  13. namespace OpenCVForUnityExample
  14. {
  15. /// <summary>
  16. /// ArUco WebCam Example
  17. /// An example of marker-based AR view and camera pose estimation using the objdetect and aruco module.
  18. /// Referring to https://github.com/opencv/opencv_contrib/blob/4.x/modules/aruco/samples/detect_markers.cpp
  19. /// http://docs.opencv.org/3.1.0/d5/dae/tutorial_aruco_detection.html
  20. /// https://github.com/opencv/opencv/blob/4.x/modules/objdetect/test/test_arucodetection.cpp
  21. /// https://github.com/opencv/opencv/blob/4.x/modules/objdetect/test/test_boarddetection.cpp
  22. /// https://github.com/opencv/opencv/blob/4.x/modules/objdetect/test/test_charucodetection.cpp
  23. /// </summary>
  24. [RequireComponent(typeof(WebCamTextureToMatHelper))]
  25. public class ArUcoWebCamExample : MonoBehaviour
  26. {
  27. /// <summary>
  28. /// The marker type.
  29. /// </summary>
  30. public MarkerType markerType = MarkerType.CanonicalMarker;
  31. /// <summary>
  32. /// The marker type dropdown.
  33. /// </summary>
  34. public Dropdown markerTypeDropdown;
  35. /// <summary>
  36. /// The dictionary identifier.
  37. /// </summary>
  38. public ArUcoDictionary dictionaryId = ArUcoDictionary.DICT_6X6_250;
  39. /// <summary>
  40. /// The dictionary id dropdown.
  41. /// </summary>
  42. public Dropdown dictionaryIdDropdown;
  43. /// <summary>
  44. /// Determines if restores the camera parameters when the file exists.
  45. /// </summary>
  46. public bool useStoredCameraParameters = false;
  47. /// <summary>
  48. /// The toggle for switching to use the stored camera parameters.
  49. /// </summary>
  50. public Toggle useStoredCameraParametersToggle;
  51. /// <summary>
  52. /// Determines if shows rejected corners.
  53. /// </summary>
  54. public bool showRejectedCorners = false;
  55. /// <summary>
  56. /// The shows rejected corners toggle.
  57. /// </summary>
  58. public Toggle showRejectedCornersToggle;
  59. /// <summary>
  60. /// Determines if applied the pose estimation.
  61. /// </summary>
  62. public bool applyEstimationPose = true;
  63. /// <summary>
  64. /// Determines if refine marker detection. (only valid for ArUco boards)
  65. /// </summary>
  66. public bool refineMarkerDetection = true;
  67. /// <summary>
  68. /// The shows refine marker detection toggle.
  69. /// </summary>
  70. public Toggle refineMarkerDetectionToggle;
  71. [Space(10)]
  72. /// <summary>
  73. /// The length of the markers' side. Normally, unit is meters.
  74. /// </summary>
  75. public float markerLength = 0.1f;
  76. /// <summary>
  77. /// The AR game object.
  78. /// </summary>
  79. public GameObject arGameObject;
  80. /// <summary>
  81. /// The AR camera.
  82. /// </summary>
  83. public Camera arCamera;
  84. [Space(10)]
  85. /// <summary>
  86. /// Determines if request the AR camera moving.
  87. /// </summary>
  88. public bool shouldMoveARCamera = false;
  89. [Space(10)]
  90. /// <summary>
  91. /// Determines if enable low pass filter.
  92. /// </summary>
  93. public bool enableLowPassFilter;
  94. /// <summary>
  95. /// The enable low pass filter toggle.
  96. /// </summary>
  97. public Toggle enableLowPassFilterToggle;
  98. /// <summary>
  99. /// The position low pass. (Value in meters)
  100. /// </summary>
  101. public float positionLowPass = 0.005f;
  102. /// <summary>
  103. /// The rotation low pass. (Value in degrees)
  104. /// </summary>
  105. public float rotationLowPass = 2f;
  106. /// <summary>
  107. /// The old pose data.
  108. /// </summary>
  109. PoseData oldPoseData;
  110. /// <summary>
  111. /// The texture.
  112. /// </summary>
  113. Texture2D texture;
  114. /// <summary>
  115. /// The webcam texture to mat helper.
  116. /// </summary>
  117. WebCamTextureToMatHelper webCamTextureToMatHelper;
  118. /// <summary>
  119. /// The rgb mat.
  120. /// </summary>
  121. Mat rgbMat;
  122. /// <summary>
  123. /// The undistorted rgb mat.
  124. /// </summary>
  125. Mat undistortedRgbMat;
  126. /// <summary>
  127. /// The cameraparam matrix.
  128. /// </summary>
  129. Mat camMatrix;
  130. /// <summary>
  131. /// The distortion coeffs.
  132. /// </summary>
  133. MatOfDouble distCoeffs;
  134. /// <summary>
  135. /// The transformation matrix for AR.
  136. /// </summary>
  137. Matrix4x4 ARM;
  138. /// <summary>
  139. /// The FPS monitor.
  140. /// </summary>
  141. FpsMonitor fpsMonitor;
  142. // for CanonicalMarker.
  143. Mat ids;
  144. List<Mat> corners;
  145. List<Mat> rejectedCorners;
  146. Mat rotMat;
  147. Dictionary dictionary;
  148. Mat recoveredIdxs;
  149. ArucoDetector arucoDetector;
  150. // for GridBoard.
  151. // number of markers in X direction
  152. const int gridBoradMarkersX = 5;
  153. // number of markers in Y direction
  154. const int gridBoradMarkersY = 7;
  155. // marker side length (normally in meters)
  156. const float gridBoradMarkerLength = 0.04f;
  157. // separation between two markers (same unit as markerLength)
  158. const float gridBoradMarkerSeparation = 0.01f;
  159. GridBoard gridBoard;
  160. // for ChArUcoBoard.
  161. // number of chessboard squares in X direction
  162. const int chArUcoBoradSquaresX = 5;
  163. // number of chessboard squares in Y direction
  164. const int chArUcoBoradSquaresY = 7;
  165. // chessboard square side length (normally in meters)
  166. const float chArUcoBoradSquareLength = 0.04f;
  167. // marker side length (same unit than squareLength)
  168. const float chArUcoBoradMarkerLength = 0.02f;
  169. const int charucoMinMarkers = 2;
  170. Mat charucoCorners;
  171. Mat charucoIds;
  172. CharucoBoard charucoBoard;
  173. CharucoDetector charucoDetector;
  174. // for ChArUcoDiamondMarker.
  175. // size of the diamond squares in pixels
  176. const float diamondSquareLength = 0.1f;
  177. // size of the markers in pixels.
  178. const float diamondMarkerLength = 0.06f;
  179. List<Mat> diamondCorners;
  180. Mat diamondIds;
  181. CharucoBoard charucoDiamondBoard;
  182. CharucoDetector charucoDiamondDetector;
  183. // Use this for initialization
  184. void Start()
  185. {
  186. //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
  187. Utils.setDebugMode(true);
  188. fpsMonitor = GetComponent<FpsMonitor>();
  189. markerTypeDropdown.value = (int)markerType;
  190. dictionaryIdDropdown.value = (int)dictionaryId;
  191. useStoredCameraParametersToggle.isOn = useStoredCameraParameters;
  192. showRejectedCornersToggle.isOn = showRejectedCorners;
  193. refineMarkerDetectionToggle.isOn = refineMarkerDetection;
  194. refineMarkerDetectionToggle.interactable = (markerType == MarkerType.GridBoard || markerType == MarkerType.ChArUcoBoard);
  195. enableLowPassFilterToggle.isOn = enableLowPassFilter;
  196. webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
  197. #if UNITY_ANDROID && !UNITY_EDITOR
  198. // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
  199. webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
  200. #endif
  201. webCamTextureToMatHelper.Initialize();
  202. }
  203. /// <summary>
  204. /// Raises the webcam texture to mat helper initialized event.
  205. /// </summary>
  206. public void OnWebCamTextureToMatHelperInitialized()
  207. {
  208. Debug.Log("OnWebCamTextureToMatHelperInitialized");
  209. Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
  210. texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
  211. Utils.matToTexture2D(webCamTextureMat, texture);
  212. gameObject.GetComponent<Renderer>().material.mainTexture = texture;
  213. gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
  214. Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
  215. if (fpsMonitor != null)
  216. {
  217. fpsMonitor.Add("width", webCamTextureMat.width().ToString());
  218. fpsMonitor.Add("height", webCamTextureMat.height().ToString());
  219. fpsMonitor.Add("orientation", Screen.orientation.ToString());
  220. }
  221. float width = webCamTextureMat.width();
  222. float height = webCamTextureMat.height();
  223. float imageSizeScale = 1.0f;
  224. float widthScale = (float)Screen.width / width;
  225. float heightScale = (float)Screen.height / height;
  226. if (widthScale < heightScale)
  227. {
  228. Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
  229. imageSizeScale = (float)Screen.height / (float)Screen.width;
  230. }
  231. else
  232. {
  233. Camera.main.orthographicSize = height / 2;
  234. }
  235. // set camera parameters.
  236. double fx;
  237. double fy;
  238. double cx;
  239. double cy;
  240. string loadDirectoryPath = Path.Combine(Application.persistentDataPath, "ArUcoCameraCalibrationExample");
  241. string calibratonDirectoryName = "camera_parameters" + width + "x" + height;
  242. string loadCalibratonFileDirectoryPath = Path.Combine(loadDirectoryPath, calibratonDirectoryName);
  243. string loadPath = Path.Combine(loadCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml");
  244. if (useStoredCameraParameters && File.Exists(loadPath))
  245. {
  246. CameraParameters param;
  247. XmlSerializer serializer = new XmlSerializer(typeof(CameraParameters));
  248. using (var stream = new FileStream(loadPath, FileMode.Open))
  249. {
  250. param = (CameraParameters)serializer.Deserialize(stream);
  251. }
  252. camMatrix = param.GetCameraMatrix();
  253. distCoeffs = new MatOfDouble(param.GetDistortionCoefficients());
  254. fx = param.camera_matrix[0];
  255. fy = param.camera_matrix[4];
  256. cx = param.camera_matrix[2];
  257. cy = param.camera_matrix[5];
  258. Debug.Log("Loaded CameraParameters from a stored XML file.");
  259. Debug.Log("loadPath: " + loadPath);
  260. }
  261. else
  262. {
  263. int max_d = (int)Mathf.Max(width, height);
  264. fx = max_d;
  265. fy = max_d;
  266. cx = width / 2.0f;
  267. cy = height / 2.0f;
  268. camMatrix = new Mat(3, 3, CvType.CV_64FC1);
  269. camMatrix.put(0, 0, fx);
  270. camMatrix.put(0, 1, 0);
  271. camMatrix.put(0, 2, cx);
  272. camMatrix.put(1, 0, 0);
  273. camMatrix.put(1, 1, fy);
  274. camMatrix.put(1, 2, cy);
  275. camMatrix.put(2, 0, 0);
  276. camMatrix.put(2, 1, 0);
  277. camMatrix.put(2, 2, 1.0f);
  278. distCoeffs = new MatOfDouble(0, 0, 0, 0);
  279. Debug.Log("Created a dummy CameraParameters.");
  280. }
  281. Debug.Log("camMatrix " + camMatrix.dump());
  282. Debug.Log("distCoeffs " + distCoeffs.dump());
  283. // calibration camera matrix values.
  284. Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale);
  285. double apertureWidth = 0;
  286. double apertureHeight = 0;
  287. double[] fovx = new double[1];
  288. double[] fovy = new double[1];
  289. double[] focalLength = new double[1];
  290. Point principalPoint = new Point(0, 0);
  291. double[] aspectratio = new double[1];
  292. Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
  293. Debug.Log("imageSize " + imageSize.ToString());
  294. Debug.Log("apertureWidth " + apertureWidth);
  295. Debug.Log("apertureHeight " + apertureHeight);
  296. Debug.Log("fovx " + fovx[0]);
  297. Debug.Log("fovy " + fovy[0]);
  298. Debug.Log("focalLength " + focalLength[0]);
  299. Debug.Log("principalPoint " + principalPoint.ToString());
  300. Debug.Log("aspectratio " + aspectratio[0]);
  301. // To convert the difference of the FOV value of the OpenCV and Unity.
  302. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
  303. double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));
  304. Debug.Log("fovXScale " + fovXScale);
  305. Debug.Log("fovYScale " + fovYScale);
  306. // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
  307. if (widthScale < heightScale)
  308. {
  309. arCamera.fieldOfView = (float)(fovx[0] * fovXScale);
  310. }
  311. else
  312. {
  313. arCamera.fieldOfView = (float)(fovy[0] * fovYScale);
  314. }
  315. // Display objects near the camera.
  316. arCamera.nearClipPlane = 0.01f;
  317. rgbMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
  318. undistortedRgbMat = new Mat();
  319. ids = new Mat();
  320. corners = new List<Mat>();
  321. rejectedCorners = new List<Mat>();
  322. rotMat = new Mat(3, 3, CvType.CV_64FC1);
  323. dictionary = Objdetect.getPredefinedDictionary((int)dictionaryId);
  324. recoveredIdxs = new Mat();
  325. DetectorParameters detectorParams = new DetectorParameters();
  326. detectorParams.set_minDistanceToBorder(3);
  327. detectorParams.set_useAruco3Detection(true);
  328. detectorParams.set_cornerRefinementMethod(Objdetect.CORNER_REFINE_SUBPIX);
  329. detectorParams.set_minSideLengthCanonicalImg(16);
  330. detectorParams.set_errorCorrectionRate(0.8);
  331. RefineParameters refineParameters = new RefineParameters(10f, 3f, true);
  332. arucoDetector = new ArucoDetector(dictionary, detectorParams, refineParameters);
  333. gridBoard = new GridBoard(new Size(gridBoradMarkersX, gridBoradMarkersY), gridBoradMarkerLength, gridBoradMarkerSeparation, dictionary);
  334. charucoCorners = new Mat();
  335. charucoIds = new Mat();
  336. charucoBoard = new CharucoBoard(new Size(chArUcoBoradSquaresX, chArUcoBoradSquaresY), chArUcoBoradSquareLength, chArUcoBoradMarkerLength, dictionary);
  337. CharucoParameters charucoParameters = new CharucoParameters();
  338. charucoParameters.set_cameraMatrix(camMatrix);
  339. charucoParameters.set_distCoeffs(distCoeffs);
  340. charucoParameters.set_minMarkers(charucoMinMarkers);
  341. charucoDetector = new CharucoDetector(charucoBoard, charucoParameters, detectorParams, refineParameters);
  342. diamondCorners = new List<Mat>();
  343. diamondIds = new Mat(1, 1, CvType.CV_32SC4);
  344. charucoDiamondBoard = new CharucoBoard(new Size(3, 3), diamondSquareLength, diamondMarkerLength, dictionary);
  345. CharucoParameters charucoDiamondParameters = new CharucoParameters();
  346. charucoDiamondParameters.set_cameraMatrix(camMatrix);
  347. charucoDiamondParameters.set_distCoeffs(distCoeffs);
  348. charucoDiamondParameters.set_tryRefineMarkers(true);
  349. charucoDiamondDetector = new CharucoDetector(charucoDiamondBoard, charucoDiamondParameters, detectorParams, refineParameters);
  350. // If the WebCam is front facing, flip the Mat horizontally. Required for successful detection of AR markers.
  351. if (webCamTextureToMatHelper.IsFrontFacing() && !webCamTextureToMatHelper.flipHorizontal)
  352. {
  353. webCamTextureToMatHelper.flipHorizontal = true;
  354. }
  355. else if (!webCamTextureToMatHelper.IsFrontFacing() && webCamTextureToMatHelper.flipHorizontal)
  356. {
  357. webCamTextureToMatHelper.flipHorizontal = false;
  358. }
  359. }
  360. /// <summary>
  361. /// Raises the webcam texture to mat helper disposed event.
  362. /// </summary>
  363. public void OnWebCamTextureToMatHelperDisposed()
  364. {
  365. Debug.Log("OnWebCamTextureToMatHelperDisposed");
  366. if (rgbMat != null)
  367. rgbMat.Dispose();
  368. if (undistortedRgbMat != null)
  369. undistortedRgbMat.Dispose();
  370. if (texture != null)
  371. {
  372. Texture2D.Destroy(texture);
  373. texture = null;
  374. }
  375. if (arucoDetector != null)
  376. arucoDetector.Dispose();
  377. if (charucoDetector != null)
  378. charucoDetector.Dispose();
  379. if (charucoDiamondDetector != null)
  380. charucoDiamondDetector.Dispose();
  381. if (ids != null)
  382. ids.Dispose();
  383. foreach (var item in corners)
  384. {
  385. item.Dispose();
  386. }
  387. corners.Clear();
  388. foreach (var item in rejectedCorners)
  389. {
  390. item.Dispose();
  391. }
  392. rejectedCorners.Clear();
  393. if (rotMat != null)
  394. rotMat.Dispose();
  395. if (recoveredIdxs != null)
  396. recoveredIdxs.Dispose();
  397. if (gridBoard != null)
  398. gridBoard.Dispose();
  399. if (charucoCorners != null)
  400. charucoCorners.Dispose();
  401. if (charucoIds != null)
  402. charucoIds.Dispose();
  403. if (charucoBoard != null)
  404. charucoBoard.Dispose();
  405. foreach (var item in diamondCorners)
  406. {
  407. item.Dispose();
  408. }
  409. diamondCorners.Clear();
  410. if (diamondIds != null)
  411. diamondIds.Dispose();
  412. if (charucoDiamondBoard != null)
  413. charucoDiamondBoard.Dispose();
  414. }
  415. /// <summary>
  416. /// Raises the webcam texture to mat helper error occurred event.
  417. /// </summary>
  418. /// <param name="errorCode">Error code.</param>
  419. public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
  420. {
  421. Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
  422. }
  423. // Update is called once per frame
  424. void Update()
  425. {
  426. if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
  427. {
  428. Mat rgbaMat = webCamTextureToMatHelper.GetMat();
  429. Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);
  430. switch (markerType)
  431. {
  432. default:
  433. case MarkerType.CanonicalMarker:
  434. Calib3d.undistort(rgbMat, undistortedRgbMat, camMatrix, distCoeffs);
  435. arucoDetector.detectMarkers(undistortedRgbMat, corners, ids, rejectedCorners);
  436. if (corners.Count == ids.total() || ids.total() == 0)
  437. Objdetect.drawDetectedMarkers(undistortedRgbMat, corners, ids, new Scalar(0, 255, 0));
  438. if (applyEstimationPose)
  439. {
  440. // If at least one marker detected
  441. if (ids.total() > 0)
  442. EstimatePoseCanonicalMarker(undistortedRgbMat);
  443. }
  444. break;
  445. case MarkerType.GridBoard:
  446. Calib3d.undistort(rgbMat, undistortedRgbMat, camMatrix, distCoeffs);
  447. arucoDetector.detectMarkers(undistortedRgbMat, corners, ids, rejectedCorners);
  448. if (refineMarkerDetection)
  449. arucoDetector.refineDetectedMarkers(undistortedRgbMat, gridBoard, corners, ids, rejectedCorners, camMatrix, distCoeffs, recoveredIdxs);
  450. if (corners.Count == ids.total() || ids.total() == 0)
  451. Objdetect.drawDetectedMarkers(undistortedRgbMat, corners, ids, new Scalar(0, 255, 0));
  452. if (applyEstimationPose)
  453. {
  454. // If at least one marker detected
  455. if (ids.total() > 0)
  456. EstimatePoseGridBoard(undistortedRgbMat);
  457. }
  458. break;
  459. case MarkerType.ChArUcoBoard:
  460. /*
  461. //
  462. Calib3d.undistort(rgbMat, undistortedRgbMat, camMatrix, distCoeffs);
  463. ids = new Mat();
  464. corners = new List<Mat>();
  465. // When fails to detect any markers, it throws the following error:
  466. // objdetect::detectBoard_12() : OpenCV(4.8.0-dev) \opencv\modules\objdetect\src\aruco\aruco_board.cpp:39: error: (-215:Assertion failed) detectedIds.total() > 0ull in function 'cv::aruco::Board::Impl::matchImagePoints'
  467. charucoDetector.detectBoard(undistortedRgbMat, charucoCorners, charucoIds, corners, ids); // error
  468. if (corners.Count == ids.total() || ids.total() == 0)
  469. Objdetect.drawDetectedMarkers(undistortedRgbMat, corners, ids, new Scalar(0, 255, 0));
  470. if (charucoCorners.total() == charucoIds.total() || charucoIds.total() == 0)
  471. Objdetect.drawDetectedCornersCharuco(undistortedRgbMat, charucoCorners, charucoIds, new Scalar(0, 0, 255));
  472. if (applyEstimationPose)
  473. {
  474. // if at least one charuco corner detected
  475. if (charucoIds.total() > 0)
  476. EstimatePoseChArUcoBoard(undistortedRgbMat);
  477. }
  478. //
  479. */
  480. //
  481. Calib3d.undistort(rgbMat, undistortedRgbMat, camMatrix, distCoeffs);
  482. arucoDetector.detectMarkers(undistortedRgbMat, corners, ids, rejectedCorners);
  483. if (refineMarkerDetection)
  484. // https://github.com/opencv/opencv/blob/377be68d923e40900ac5526242bcf221e3f355e5/modules/objdetect/src/aruco/charuco_detector.cpp#L310
  485. arucoDetector.refineDetectedMarkers(undistortedRgbMat, charucoBoard, corners, ids, rejectedCorners);
  486. // If at least one marker detected
  487. if (ids.total() > 0)
  488. {
  489. charucoDetector.detectBoard(undistortedRgbMat, charucoCorners, charucoIds, corners, ids);
  490. if (corners.Count == ids.total() || ids.total() == 0)
  491. Objdetect.drawDetectedMarkers(undistortedRgbMat, corners, ids, new Scalar(0, 255, 0));
  492. if (charucoCorners.total() == charucoIds.total() || charucoIds.total() == 0)
  493. Objdetect.drawDetectedCornersCharuco(undistortedRgbMat, charucoCorners, charucoIds, new Scalar(0, 0, 255));
  494. if (applyEstimationPose)
  495. {
  496. // if at least one charuco board detected
  497. if (charucoIds.total() > 0)
  498. EstimatePoseChArUcoBoard(undistortedRgbMat);
  499. }
  500. }
  501. //
  502. break;
  503. case MarkerType.ChArUcoDiamondMarker:
  504. //
  505. Calib3d.undistort(rgbMat, undistortedRgbMat, camMatrix, distCoeffs);
  506. ids = new Mat();
  507. corners = new List<Mat>();
  508. charucoDiamondDetector.detectDiamonds(undistortedRgbMat, diamondCorners, diamondIds, corners, ids);
  509. if (corners.Count == ids.total() || ids.total() == 0)
  510. Objdetect.drawDetectedMarkers(undistortedRgbMat, corners, ids, new Scalar(0, 255, 0));
  511. if (diamondCorners.Count == diamondIds.total() || diamondIds.total() == 0)
  512. Objdetect.drawDetectedDiamonds(undistortedRgbMat, diamondCorners, diamondIds, new Scalar(0, 0, 255));
  513. if (applyEstimationPose)
  514. {
  515. // If at least one diamonds detected
  516. if (diamondIds.total() > 0)
  517. EstimatePoseChArUcoDiamondMarker(undistortedRgbMat);
  518. }
  519. //
  520. /*
  521. //
  522. Calib3d.undistort(rgbMat, undistortedRgbMat, camMatrix, distCoeffs);
  523. arucoDetector.detectMarkers(undistortedRgbMat, corners, ids, rejectedCorners);
  524. // If at least one marker detected
  525. if (ids.total() > 0)
  526. {
  527. charucoDiamondDetector.detectDiamonds(undistortedRgbMat, diamondCorners, diamondIds, corners, ids);
  528. if (corners.Count == ids.total() || ids.total() == 0)
  529. Objdetect.drawDetectedMarkers(undistortedRgbMat, corners, ids, new Scalar(0, 255, 0));
  530. if (diamondCorners.Count == diamondIds.total() || diamondIds.total() == 0)
  531. Objdetect.drawDetectedDiamonds(undistortedRgbMat, diamondCorners, diamondIds, new Scalar(0, 0, 255));
  532. if (applyEstimationPose)
  533. {
  534. // If at least one diamonds detected
  535. if (diamondIds.total() > 0)
  536. EstimatePoseChArUcoDiamondMarker(undistortedRgbMat);
  537. }
  538. }
  539. //
  540. */
  541. break;
  542. }
  543. if (showRejectedCorners && rejectedCorners.Count > 0)
  544. Objdetect.drawDetectedMarkers(undistortedRgbMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0));
  545. //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
  546. Imgproc.cvtColor(undistortedRgbMat, rgbaMat, Imgproc.COLOR_RGB2RGBA);
  547. Utils.matToTexture2D(rgbaMat, texture);
  548. }
  549. }
  550. private void EstimatePoseCanonicalMarker(Mat rgbMat)
  551. {
  552. using (MatOfPoint3f objPoints = new MatOfPoint3f(
  553. new Point3(-markerLength / 2f, markerLength / 2f, 0),
  554. new Point3(markerLength / 2f, markerLength / 2f, 0),
  555. new Point3(markerLength / 2f, -markerLength / 2f, 0),
  556. new Point3(-markerLength / 2f, -markerLength / 2f, 0)
  557. ))
  558. {
  559. for (int i = 0; i < corners.Count; i++)
  560. {
  561. using (Mat rvec = new Mat(1, 1, CvType.CV_64FC3))
  562. using (Mat tvec = new Mat(1, 1, CvType.CV_64FC3))
  563. using (Mat corner_4x1 = corners[i].reshape(2, 4)) // 1*4*CV_32FC2 => 4*1*CV_32FC2
  564. using (MatOfPoint2f imagePoints = new MatOfPoint2f(corner_4x1))
  565. {
  566. // Calculate pose for each marker
  567. Calib3d.solvePnP(objPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
  568. // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
  569. Calib3d.drawFrameAxes(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
  570. // This example can display the ARObject on only first detected marker.
  571. if (i == 0)
  572. {
  573. UpdateARObjectTransform(rvec, tvec);
  574. }
  575. }
  576. }
  577. }
  578. }
  579. private void EstimatePoseGridBoard(Mat rgbMat)
  580. {
  581. if (ids.total() == 0)
  582. return;
  583. // https://github.com/opencv/opencv_contrib/blob/f10c84d48b0714f2b408c9e5cccfac1277c8e6cc/modules/aruco/src/aruco.cpp#L43
  584. if (corners.Count != ids.total())
  585. return;
  586. using (Mat rvec = new Mat(1, 1, CvType.CV_64FC3))
  587. using (Mat tvec = new Mat(1, 1, CvType.CV_64FC3))
  588. using (Mat objPoints = new Mat())
  589. using (Mat imgPoints = new Mat())
  590. {
  591. // Get object and image points for the solvePnP function
  592. gridBoard.matchImagePoints(corners, ids, objPoints, imgPoints);
  593. if (imgPoints.total() != objPoints.total())
  594. return;
  595. if (objPoints.total() == 0) // 0 of the detected markers in board
  596. return;
  597. // Find pose
  598. MatOfPoint3f objPoints_p3f = new MatOfPoint3f(objPoints);
  599. MatOfPoint2f imgPoints_p3f = new MatOfPoint2f(imgPoints);
  600. Calib3d.solvePnP(objPoints_p3f, imgPoints_p3f, camMatrix, distCoeffs, rvec, tvec);
  601. // If at least one board marker detected
  602. int markersOfBoardDetected = (int)objPoints.total() / 4;
  603. if (markersOfBoardDetected > 0)
  604. {
  605. // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
  606. Calib3d.drawFrameAxes(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
  607. UpdateARObjectTransform(rvec, tvec);
  608. }
  609. }
  610. }
  611. private void EstimatePoseChArUcoBoard(Mat rgbMat)
  612. {
  613. /*
  614. //
  615. using (Mat rvec = new Mat(1, 1, CvType.CV_64FC3))
  616. using (Mat tvec = new Mat(1, 1, CvType.CV_64FC3))
  617. {
  618. bool valid = Aruco.estimatePoseCharucoBoard(charucoCorners, charucoIds, charucoBoard, camMatrix, distCoeffs, rvec, tvec); // error
  619. // if at least one board marker detected
  620. if (valid)
  621. {
  622. // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
  623. Calib3d.drawFrameAxes(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
  624. UpdateARObjectTransform(rvec, tvec);
  625. }
  626. }
  627. //
  628. */
  629. //
  630. // https://github.com/opencv/opencv_contrib/blob/f10c84d48b0714f2b408c9e5cccfac1277c8e6cc/modules/aruco/src/aruco.cpp#L63
  631. if (charucoCorners.total() != charucoIds.total())
  632. return;
  633. if (charucoIds.total() < 4)
  634. return;
  635. using (Mat rvec = new Mat(1, 1, CvType.CV_64FC3))
  636. using (Mat tvec = new Mat(1, 1, CvType.CV_64FC3))
  637. using (Mat objPoints = new Mat())
  638. using (Mat imgPoints = new Mat())
  639. {
  640. // Get object and image points for the solvePnP function
  641. List<Mat> charucoCorners_list = new List<Mat>();
  642. for (int i = 0; i < charucoCorners.rows(); i++)
  643. {
  644. charucoCorners_list.Add(charucoCorners.row(i));
  645. }
  646. charucoBoard.matchImagePoints(charucoCorners_list, charucoIds, objPoints, imgPoints);
  647. // Find pose
  648. MatOfPoint3f objPoints_p3f = new MatOfPoint3f(objPoints);
  649. MatOfPoint2f imgPoints_p3f = new MatOfPoint2f(imgPoints);
  650. try
  651. {
  652. Calib3d.solvePnP(objPoints_p3f, imgPoints_p3f, camMatrix, distCoeffs, rvec, tvec);
  653. }
  654. catch (CvException e)
  655. {
  656. Debug.LogWarning("estimatePoseCharucoBoard: " + e);
  657. return;
  658. }
  659. // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
  660. Calib3d.drawFrameAxes(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
  661. UpdateARObjectTransform(rvec, tvec);
  662. //
  663. }
  664. }
  665. private void EstimatePoseChArUcoDiamondMarker(Mat rgbMat)
  666. {
  667. using (MatOfPoint3f objPoints = new MatOfPoint3f(
  668. new Point3(-markerLength / 2f, markerLength / 2f, 0),
  669. new Point3(markerLength / 2f, markerLength / 2f, 0),
  670. new Point3(markerLength / 2f, -markerLength / 2f, 0),
  671. new Point3(-markerLength / 2f, -markerLength / 2f, 0)
  672. ))
  673. {
  674. for (int i = 0; i < diamondCorners.Count; i++)
  675. {
  676. using (Mat rvec = new Mat(1, 1, CvType.CV_64FC3))
  677. using (Mat tvec = new Mat(1, 1, CvType.CV_64FC3))
  678. using (Mat corner_4x1 = diamondCorners[i].reshape(2, 4)) // 1*4*CV_32FC2 => 4*1*CV_32FC2
  679. using (MatOfPoint2f imagePoints = new MatOfPoint2f(corner_4x1))
  680. {
  681. // Calculate pose for each marker
  682. Calib3d.solvePnP(objPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
  683. // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
  684. Calib3d.drawFrameAxes(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
  685. // This example can display the ARObject on only first detected marker.
  686. if (i == 0)
  687. {
  688. UpdateARObjectTransform(rvec, tvec);
  689. }
  690. }
  691. }
  692. }
  693. }
  694. private void UpdateARObjectTransform(Mat rvec, Mat tvec)
  695. {
  696. // Convert to unity pose data.
  697. double[] rvecArr = new double[3];
  698. rvec.get(0, 0, rvecArr);
  699. double[] tvecArr = new double[3];
  700. tvec.get(0, 0, tvecArr);
  701. PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);
  702. // Changes in pos/rot below these thresholds are ignored.
  703. if (enableLowPassFilter)
  704. {
  705. ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
  706. }
  707. oldPoseData = poseData;
  708. // Convert to transform matrix.
  709. ARM = ARUtils.ConvertPoseDataToMatrix(ref poseData, true);
  710. if (shouldMoveARCamera)
  711. {
  712. ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;
  713. ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM);
  714. }
  715. else
  716. {
  717. ARM = arCamera.transform.localToWorldMatrix * ARM;
  718. ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
  719. }
  720. }
  721. private void ResetObjectTransform()
  722. {
  723. // reset AR object transform.
  724. Matrix4x4 i = Matrix4x4.identity;
  725. ARUtils.SetTransformFromMatrix(arCamera.transform, ref i);
  726. ARUtils.SetTransformFromMatrix(arGameObject.transform, ref i);
  727. }
  728. /// <summary>
  729. /// Raises the destroy event.
  730. /// </summary>
  731. void OnDestroy()
  732. {
  733. webCamTextureToMatHelper.Dispose();
  734. Utils.setDebugMode(false);
  735. }
  736. /// <summary>
  737. /// Raises the back button click event.
  738. /// </summary>
  739. public void OnBackButtonClick()
  740. {
  741. SceneManager.LoadScene("OpenCVForUnityExample");
  742. }
  743. /// <summary>
  744. /// Raises the play button click event.
  745. /// </summary>
  746. public void OnPlayButtonClick()
  747. {
  748. webCamTextureToMatHelper.Play();
  749. }
  750. /// <summary>
  751. /// Raises the pause button click event.
  752. /// </summary>
  753. public void OnPauseButtonClick()
  754. {
  755. webCamTextureToMatHelper.Pause();
  756. }
  757. /// <summary>
  758. /// Raises the stop button click event.
  759. /// </summary>
  760. public void OnStopButtonClick()
  761. {
  762. webCamTextureToMatHelper.Stop();
  763. }
  764. /// <summary>
  765. /// Raises the change camera button click event.
  766. /// </summary>
  767. public void OnChangeCameraButtonClick()
  768. {
  769. webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.requestedIsFrontFacing;
  770. }
  771. /// <summary>
  772. /// Raises the marker type dropdown value changed event.
  773. /// </summary>
  774. public void OnMarkerTypeDropdownValueChanged(int result)
  775. {
  776. if ((int)markerType != result)
  777. {
  778. markerType = (MarkerType)result;
  779. refineMarkerDetectionToggle.interactable = (markerType == MarkerType.GridBoard || markerType == MarkerType.ChArUcoBoard);
  780. ResetObjectTransform();
  781. if (webCamTextureToMatHelper.IsInitialized())
  782. webCamTextureToMatHelper.Initialize();
  783. }
  784. }
  785. /// <summary>
  786. /// Raises the dictionary id dropdown value changed event.
  787. /// </summary>
  788. public void OnDictionaryIdDropdownValueChanged(int result)
  789. {
  790. if ((int)dictionaryId != result)
  791. {
  792. dictionaryId = (ArUcoDictionary)result;
  793. dictionary = Objdetect.getPredefinedDictionary((int)dictionaryId);
  794. ResetObjectTransform();
  795. if (webCamTextureToMatHelper.IsInitialized())
  796. webCamTextureToMatHelper.Initialize();
  797. }
  798. }
  799. /// <summary>
  800. /// Raises the use stored camera parameters toggle value changed event.
  801. /// </summary>
  802. public void OnUseStoredCameraParametersToggleValueChanged()
  803. {
  804. if (useStoredCameraParameters != useStoredCameraParametersToggle.isOn)
  805. {
  806. useStoredCameraParameters = useStoredCameraParametersToggle.isOn;
  807. if (webCamTextureToMatHelper != null && webCamTextureToMatHelper.IsInitialized())
  808. webCamTextureToMatHelper.Initialize();
  809. }
  810. }
  811. /// <summary>
  812. /// Raises the show rejected corners toggle value changed event.
  813. /// </summary>
  814. public void OnShowRejectedCornersToggleValueChanged()
  815. {
  816. showRejectedCorners = showRejectedCornersToggle.isOn;
  817. }
  818. /// <summary>
  819. /// Raises the refine marker detection toggle value changed event.
  820. /// </summary>
  821. public void OnRefineMarkerDetectionToggleValueChanged()
  822. {
  823. refineMarkerDetection = refineMarkerDetectionToggle.isOn;
  824. }
  825. /// <summary>
  826. /// Raises the enable low pass filter toggle value changed event.
  827. /// </summary>
  828. public void OnEnableLowPassFilterToggleValueChanged()
  829. {
  830. enableLowPassFilter = enableLowPassFilterToggle.isOn;
  831. }
  832. public enum MarkerType
  833. {
  834. CanonicalMarker,
  835. GridBoard,
  836. ChArUcoBoard,
  837. ChArUcoDiamondMarker
  838. }
  839. public enum ArUcoDictionary
  840. {
  841. DICT_4X4_50 = Objdetect.DICT_4X4_50,
  842. DICT_4X4_100 = Objdetect.DICT_4X4_100,
  843. DICT_4X4_250 = Objdetect.DICT_4X4_250,
  844. DICT_4X4_1000 = Objdetect.DICT_4X4_1000,
  845. DICT_5X5_50 = Objdetect.DICT_5X5_50,
  846. DICT_5X5_100 = Objdetect.DICT_5X5_100,
  847. DICT_5X5_250 = Objdetect.DICT_5X5_250,
  848. DICT_5X5_1000 = Objdetect.DICT_5X5_1000,
  849. DICT_6X6_50 = Objdetect.DICT_6X6_50,
  850. DICT_6X6_100 = Objdetect.DICT_6X6_100,
  851. DICT_6X6_250 = Objdetect.DICT_6X6_250,
  852. DICT_6X6_1000 = Objdetect.DICT_6X6_1000,
  853. DICT_7X7_50 = Objdetect.DICT_7X7_50,
  854. DICT_7X7_100 = Objdetect.DICT_7X7_100,
  855. DICT_7X7_250 = Objdetect.DICT_7X7_250,
  856. DICT_7X7_1000 = Objdetect.DICT_7X7_1000,
  857. DICT_ARUCO_ORIGINAL = Objdetect.DICT_ARUCO_ORIGINAL,
  858. }
  859. }
  860. }