using System; using System.Collections; using System.Runtime.InteropServices; using UnityEngine; using EZXR.Glass.SixDof; using EZXR.Glass.Core; using EZXR.Glass.Device; namespace EZXR.Glass.Tracking2D { public enum LocCam { LocCam_RGB, // LocCam_FisheyeGray } public class Tracking2DManager : MonoBehaviour { public LocCam locCamType = LocCam.LocCam_RGB; public AR2dMarkerInfo m_2dMarkerInfo; public float m_autoDectectInterval = 10.0f; private bool m_needDetecting = true; private float m_runTracking2dInterval = 0.01f; private Tracking2dController m_tracking2dController; private string m_assetPath; private NormalRGBCameraDevice rgbCameraDevice; private Core.EZVIOInputImage locCamImageBuffer; private bool hasExtractOST = false; private OSTMatrices ostMatrices = new OSTMatrices(); private float[] imageIntrinsics = new float[8]; private Pose imageTsHeadPose; private bool m_fisrtFrameArrived = true; private Matrix4x4 headToImage = Matrix4x4.identity; private void OnEnable() { if (locCamType == LocCam.LocCam_RGB) { StartCoroutine(startRGBCamera()); } } private void OnDisable() { stopTrackSession(); if (locCamType == LocCam.LocCam_RGB) { stopRGBCamera(); } } // Start is called before the first frame update void Start() { } // Update is called once per frame void Update() { if (m_tracking2dController == null) { return; } EZXR_Detect2DResult detect2DResult = m_tracking2dController.GetDetectedMarker(); if (m_needDetecting == true && detect2DResult.state == (int)EZXR_Detect2DState.EZXR_Detect2DState_Tracking) { Matrix4x4 cameraToWorldMatrix = Matrix4x4.identity; // Convert the EZXR_Detect2DResult pose's transform to a Matrix4x4 cameraToWorldMatrix.SetRow(0, new Vector4(detect2DResult.imagePose.transform[0], detect2DResult.imagePose.transform[1], detect2DResult.imagePose.transform[2], detect2DResult.imagePose.transform[3])); cameraToWorldMatrix.SetRow(1, new Vector4(detect2DResult.imagePose.transform[4], detect2DResult.imagePose.transform[5], detect2DResult.imagePose.transform[6], detect2DResult.imagePose.transform[7])); cameraToWorldMatrix.SetRow(2, new Vector4(detect2DResult.imagePose.transform[8], detect2DResult.imagePose.transform[9], detect2DResult.imagePose.transform[10], detect2DResult.imagePose.transform[11])); cameraToWorldMatrix.SetRow(3, new Vector4(detect2DResult.imagePose.transform[12], detect2DResult.imagePose.transform[13], detect2DResult.imagePose.transform[14], detect2DResult.imagePose.transform[15])); m_2dMarkerInfo?.SetMarkerInfo(cameraToWorldMatrix, detect2DResult.markerAnchor); m_needDetecting = false; } } public void DetectImageOnce() { m_needDetecting = true; } public void DetectImageAuto(bool isAutoDectect) { if(isAutoDectect == true) { if (IsInvoking("DetectImageOnce") == false) { InvokeRepeating("DetectImageOnce", 0.0f, m_autoDectectInterval); } } else { CancelInvoke("DetectImageOnce"); } } public int GetDectectState() { return m_needDetecting == true ? 0 : 1; } public NormalRGBCameraDevice GetNormalRGBCameraDevice() { return rgbCameraDevice; } public void startTrackSession(string assetPath) { if (assetPath == null) { UnityEngine.Debug.LogError("configuration resources assetPath is error"); } UnityEngine.Debug.Log("startTrackSession : " + assetPath); if (Application.platform == RuntimePlatform.Android || Application.platform == RuntimePlatform.IPhonePlayer) { if (m_tracking2dController == null) { m_assetPath = assetPath; m_tracking2dController = new Tracking2dController(); int state = m_tracking2dController.CreateTracking2d(m_assetPath); if (state == 0) { UnityEngine.Debug.Log("CreateTracking2d failed"); m_tracking2dController = null; } else { UnityEngine.Debug.Log("CreateTracking2d success"); } } } InvokeRepeating("UpdateCameraImage", 0.5f, m_runTracking2dInterval); } private void stopTrackSession() { if (m_tracking2dController != null) { m_tracking2dController.DestroyTracking2d(); m_tracking2dController = null; } } private IEnumerator startRGBCamera() { yield return new WaitUntil(() => SessionManager.Instance != null && SessionManager.Instance.IsInited); rgbCameraDevice = new NormalRGBCameraDevice(); rgbCameraDevice.Open(); } private void stopRGBCamera() { if (rgbCameraDevice != null) rgbCameraDevice.Close(); rgbCameraDevice = null; if (locCamImageBuffer.fullImg != IntPtr.Zero) { Marshal.FreeHGlobal(locCamImageBuffer.fullImg); locCamImageBuffer.fullImg = IntPtr.Zero; } } // test save images /* Texture2D texture0 = null; Texture2D texture1 = null; Texture2D texture2 = null; byte[] dataU; byte[] dataV; int count = 0; byte[] byteArray = null; */ private void UpdateCameraImage() { if (m_needDetecting == false) { return; } if (ARFrame.SessionStatus != EZVIOState.EZVIOCameraState_Tracking) return; if (!hasExtractOST) { NativeTracking.GetOSTParams(ref ostMatrices); hasExtractOST = true; } if (locCamType == LocCam.LocCam_RGB) { if (rgbCameraDevice == null) { return; } bool isCameraMotionViolently = rgbCameraDevice.isCameraMotionViolently(); if (!isCameraMotionViolently) { bool res = false; res = rgbCameraDevice.getCurrentImage(ref locCamImageBuffer, imageIntrinsics); if (res) { double timestamp_sec = locCamImageBuffer.timestamp; imageTsHeadPose = ARFrame.GetHistoricalHeadPose(timestamp_sec); if ((Application.platform == RuntimePlatform.Android || Application.platform == RuntimePlatform.IPhonePlayer) && m_tracking2dController != null) { int width = (int)locCamImageBuffer.imgRes.width; int height = (int)locCamImageBuffer.imgRes.height; float[] intrinsicsArray = new float[8] { imageIntrinsics[0], imageIntrinsics[1], imageIntrinsics[2], imageIntrinsics[3], imageIntrinsics[4], imageIntrinsics[5], imageIntrinsics[6], imageIntrinsics[7]}; double timestamp = locCamImageBuffer.timestamp; if(m_fisrtFrameArrived) { Matrix4x4 imageToHead = new Matrix4x4(); imageToHead.SetRow(0, new Vector4(ostMatrices.T_RGB_Head[0], ostMatrices.T_RGB_Head[1], ostMatrices.T_RGB_Head[2], ostMatrices.T_RGB_Head[3])); imageToHead.SetRow(1, new Vector4(ostMatrices.T_RGB_Head[4], ostMatrices.T_RGB_Head[5], ostMatrices.T_RGB_Head[6], ostMatrices.T_RGB_Head[7])); imageToHead.SetRow(2, new Vector4(ostMatrices.T_RGB_Head[8], ostMatrices.T_RGB_Head[9], ostMatrices.T_RGB_Head[10], ostMatrices.T_RGB_Head[11])); imageToHead.SetRow(3, new Vector4(ostMatrices.T_RGB_Head[12], ostMatrices.T_RGB_Head[13], ostMatrices.T_RGB_Head[14], ostMatrices.T_RGB_Head[15])); headToImage = imageToHead.inverse; m_fisrtFrameArrived = false; } // image camera pose Matrix4x4 imageCamTransform; { Matrix4x4 headTransform = Matrix4x4.TRS( new Vector3(imageTsHeadPose.position.x, imageTsHeadPose.position.y, imageTsHeadPose.position.z), new Quaternion(imageTsHeadPose.rotation.x, imageTsHeadPose.rotation.y, imageTsHeadPose.rotation.z, imageTsHeadPose.rotation.w), new Vector3(1, 1, 1)); imageCamTransform = headTransform * headToImage; } float[] cameraToWorldMatrix_array = new float[16]; for (int i = 0; i < 4; i++) { cameraToWorldMatrix_array[i * 4] = imageCamTransform.GetRow(i).x; cameraToWorldMatrix_array[i * 4 + 1] = imageCamTransform.GetRow(i).y; cameraToWorldMatrix_array[i * 4 + 2] = imageCamTransform.GetRow(i).z; cameraToWorldMatrix_array[i * 4 + 3] = imageCamTransform.GetRow(i).w; } IntPtr imagePtr = locCamImageBuffer.fullImg; int state = m_tracking2dController.SetImage(imagePtr, cameraToWorldMatrix_array, intrinsicsArray, timestamp, width, height, (int)EZVIOImageFormat.EZVIOImageFormat_GREY); if (state == 1) { // UnityEngine.Debug.Log("SetImage success"); } else { UnityEngine.Debug.Log("SetImage failed"); } } } } } } } }