Tracking2DManager.cs 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271
  1. using System;
  2. using System.Collections;
  3. using System.Runtime.InteropServices;
  4. using UnityEngine;
  5. using EZXR.Glass.SixDof;
  6. using EZXR.Glass.Core;
  7. using EZXR.Glass.Device;
  8. namespace EZXR.Glass.Tracking2D
  9. {
  10. public enum LocCam
  11. {
  12. LocCam_RGB,
  13. // LocCam_FisheyeGray
  14. }
  15. public class Tracking2DManager : MonoBehaviour
  16. {
  17. public LocCam locCamType = LocCam.LocCam_RGB;
  18. public AR2dMarkerInfo m_2dMarkerInfo;
  19. public float m_autoDectectInterval = 10.0f;
  20. private bool m_needDetecting = true;
  21. private float m_runTracking2dInterval = 0.01f;
  22. private Tracking2dController m_tracking2dController;
  23. private string m_assetPath;
  24. private NormalRGBCameraDevice rgbCameraDevice;
  25. private Core.EZVIOInputImage locCamImageBuffer;
  26. private bool hasExtractOST = false;
  27. private OSTMatrices ostMatrices = new OSTMatrices();
  28. private float[] imageIntrinsics = new float[8];
  29. private Pose imageTsHeadPose;
  30. private bool m_fisrtFrameArrived = true;
  31. private Matrix4x4 headToImage = Matrix4x4.identity;
  32. private void OnEnable()
  33. {
  34. if (locCamType == LocCam.LocCam_RGB)
  35. {
  36. StartCoroutine(startRGBCamera());
  37. }
  38. }
  39. private void OnDisable()
  40. {
  41. stopTrackSession();
  42. if (locCamType == LocCam.LocCam_RGB)
  43. {
  44. stopRGBCamera();
  45. }
  46. }
  47. // Start is called before the first frame update
  48. void Start()
  49. {
  50. }
  51. // Update is called once per frame
  52. void Update()
  53. {
  54. if (m_tracking2dController == null)
  55. {
  56. return;
  57. }
  58. EZXR_Detect2DResult detect2DResult = m_tracking2dController.GetDetectedMarker();
  59. if (m_needDetecting == true && detect2DResult.state == (int)EZXR_Detect2DState.EZXR_Detect2DState_Tracking)
  60. {
  61. Matrix4x4 cameraToWorldMatrix = Matrix4x4.identity;
  62. // Convert the EZXR_Detect2DResult pose's transform to a Matrix4x4
  63. cameraToWorldMatrix.SetRow(0, new Vector4(detect2DResult.imagePose.transform[0], detect2DResult.imagePose.transform[1], detect2DResult.imagePose.transform[2], detect2DResult.imagePose.transform[3]));
  64. cameraToWorldMatrix.SetRow(1, new Vector4(detect2DResult.imagePose.transform[4], detect2DResult.imagePose.transform[5], detect2DResult.imagePose.transform[6], detect2DResult.imagePose.transform[7]));
  65. cameraToWorldMatrix.SetRow(2, new Vector4(detect2DResult.imagePose.transform[8], detect2DResult.imagePose.transform[9], detect2DResult.imagePose.transform[10], detect2DResult.imagePose.transform[11]));
  66. cameraToWorldMatrix.SetRow(3, new Vector4(detect2DResult.imagePose.transform[12], detect2DResult.imagePose.transform[13], detect2DResult.imagePose.transform[14], detect2DResult.imagePose.transform[15]));
  67. m_2dMarkerInfo?.SetMarkerInfo(cameraToWorldMatrix, detect2DResult.markerAnchor);
  68. m_needDetecting = false;
  69. }
  70. }
  71. public void DetectImageOnce()
  72. {
  73. m_needDetecting = true;
  74. }
  75. public void DetectImageAuto(bool isAutoDectect)
  76. {
  77. if(isAutoDectect == true)
  78. {
  79. if (IsInvoking("DetectImageOnce") == false)
  80. {
  81. InvokeRepeating("DetectImageOnce", 0.0f, m_autoDectectInterval);
  82. }
  83. }
  84. else
  85. {
  86. CancelInvoke("DetectImageOnce");
  87. }
  88. }
  89. public int GetDectectState()
  90. {
  91. return m_needDetecting == true ? 0 : 1;
  92. }
  93. public NormalRGBCameraDevice GetNormalRGBCameraDevice()
  94. {
  95. return rgbCameraDevice;
  96. }
  97. public void startTrackSession(string assetPath)
  98. {
  99. if (assetPath == null)
  100. {
  101. UnityEngine.Debug.LogError("configuration resources assetPath is error");
  102. }
  103. UnityEngine.Debug.Log("startTrackSession : " + assetPath);
  104. if (Application.platform == RuntimePlatform.Android || Application.platform == RuntimePlatform.IPhonePlayer)
  105. {
  106. if (m_tracking2dController == null)
  107. {
  108. m_assetPath = assetPath;
  109. m_tracking2dController = new Tracking2dController();
  110. int state = m_tracking2dController.CreateTracking2d(m_assetPath);
  111. if (state == 0)
  112. {
  113. UnityEngine.Debug.Log("CreateTracking2d failed");
  114. m_tracking2dController = null;
  115. }
  116. else
  117. {
  118. UnityEngine.Debug.Log("CreateTracking2d success");
  119. }
  120. }
  121. }
  122. InvokeRepeating("UpdateCameraImage", 0.5f, m_runTracking2dInterval);
  123. }
  124. private void stopTrackSession()
  125. {
  126. if (m_tracking2dController != null)
  127. {
  128. m_tracking2dController.DestroyTracking2d();
  129. m_tracking2dController = null;
  130. }
  131. }
  132. private IEnumerator startRGBCamera()
  133. {
  134. yield return new WaitUntil(() => SessionManager.Instance != null && SessionManager.Instance.IsInited);
  135. rgbCameraDevice = new NormalRGBCameraDevice();
  136. rgbCameraDevice.Open();
  137. }
  138. private void stopRGBCamera()
  139. {
  140. if (rgbCameraDevice != null)
  141. rgbCameraDevice.Close();
  142. rgbCameraDevice = null;
  143. if (locCamImageBuffer.fullImg != IntPtr.Zero)
  144. {
  145. Marshal.FreeHGlobal(locCamImageBuffer.fullImg);
  146. locCamImageBuffer.fullImg = IntPtr.Zero;
  147. }
  148. }
  149. // test save images
  150. /*
  151. Texture2D texture0 = null;
  152. Texture2D texture1 = null;
  153. Texture2D texture2 = null;
  154. byte[] dataU;
  155. byte[] dataV;
  156. int count = 0;
  157. byte[] byteArray = null;
  158. */
  159. private void UpdateCameraImage()
  160. {
  161. if (m_needDetecting == false)
  162. {
  163. return;
  164. }
  165. if (ARFrame.SessionStatus != EZVIOState.EZVIOCameraState_Tracking)
  166. return;
  167. if (!hasExtractOST)
  168. {
  169. NativeTracking.GetOSTParams(ref ostMatrices);
  170. hasExtractOST = true;
  171. }
  172. if (locCamType == LocCam.LocCam_RGB)
  173. {
  174. if (rgbCameraDevice == null)
  175. {
  176. return;
  177. }
  178. bool isCameraMotionViolently = rgbCameraDevice.isCameraMotionViolently();
  179. if (!isCameraMotionViolently)
  180. {
  181. bool res = false;
  182. res = rgbCameraDevice.getCurrentImage(ref locCamImageBuffer, imageIntrinsics);
  183. if (res)
  184. {
  185. double timestamp_sec = locCamImageBuffer.timestamp;
  186. imageTsHeadPose = ARFrame.GetHistoricalHeadPose(timestamp_sec);
  187. if ((Application.platform == RuntimePlatform.Android || Application.platform == RuntimePlatform.IPhonePlayer) &&
  188. m_tracking2dController != null)
  189. {
  190. int width = (int)locCamImageBuffer.imgRes.width;
  191. int height = (int)locCamImageBuffer.imgRes.height;
  192. float[] intrinsicsArray = new float[8] { imageIntrinsics[0], imageIntrinsics[1], imageIntrinsics[2], imageIntrinsics[3],
  193. imageIntrinsics[4], imageIntrinsics[5], imageIntrinsics[6], imageIntrinsics[7]};
  194. double timestamp = locCamImageBuffer.timestamp;
  195. if(m_fisrtFrameArrived) {
  196. Matrix4x4 imageToHead = new Matrix4x4();
  197. imageToHead.SetRow(0, new Vector4(ostMatrices.T_RGB_Head[0], ostMatrices.T_RGB_Head[1], ostMatrices.T_RGB_Head[2], ostMatrices.T_RGB_Head[3]));
  198. imageToHead.SetRow(1, new Vector4(ostMatrices.T_RGB_Head[4], ostMatrices.T_RGB_Head[5], ostMatrices.T_RGB_Head[6], ostMatrices.T_RGB_Head[7]));
  199. imageToHead.SetRow(2, new Vector4(ostMatrices.T_RGB_Head[8], ostMatrices.T_RGB_Head[9], ostMatrices.T_RGB_Head[10], ostMatrices.T_RGB_Head[11]));
  200. imageToHead.SetRow(3, new Vector4(ostMatrices.T_RGB_Head[12], ostMatrices.T_RGB_Head[13], ostMatrices.T_RGB_Head[14], ostMatrices.T_RGB_Head[15]));
  201. headToImage = imageToHead.inverse;
  202. m_fisrtFrameArrived = false;
  203. }
  204. // image camera pose
  205. Matrix4x4 imageCamTransform;
  206. {
  207. Matrix4x4 headTransform = Matrix4x4.TRS(
  208. new Vector3(imageTsHeadPose.position.x, imageTsHeadPose.position.y, imageTsHeadPose.position.z),
  209. new Quaternion(imageTsHeadPose.rotation.x, imageTsHeadPose.rotation.y, imageTsHeadPose.rotation.z, imageTsHeadPose.rotation.w),
  210. new Vector3(1, 1, 1));
  211. imageCamTransform = headTransform * headToImage;
  212. }
  213. float[] cameraToWorldMatrix_array = new float[16];
  214. for (int i = 0; i < 4; i++)
  215. {
  216. cameraToWorldMatrix_array[i * 4] = imageCamTransform.GetRow(i).x;
  217. cameraToWorldMatrix_array[i * 4 + 1] = imageCamTransform.GetRow(i).y;
  218. cameraToWorldMatrix_array[i * 4 + 2] = imageCamTransform.GetRow(i).z;
  219. cameraToWorldMatrix_array[i * 4 + 3] = imageCamTransform.GetRow(i).w;
  220. }
  221. IntPtr imagePtr = locCamImageBuffer.fullImg;
  222. int state = m_tracking2dController.SetImage(imagePtr, cameraToWorldMatrix_array, intrinsicsArray, timestamp, width, height, (int)EZVIOImageFormat.EZVIOImageFormat_GREY);
  223. if (state == 1)
  224. {
  225. // UnityEngine.Debug.Log("SetImage success");
  226. }
  227. else
  228. {
  229. UnityEngine.Debug.Log("SetImage failed");
  230. }
  231. }
  232. }
  233. }
  234. }
  235. }
  236. }
  237. }