using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.Rendering;
using System.IO;
using System.Threading;
using EZXR.Glass.Core;
using EZXR.Glass.SixDof;
using System.Runtime.InteropServices;
using AOT;
using EZXR.Glass.Inputs;
using EZXR.Glass.Device;
namespace EZXR.Glass.Recording
{
public class ARRenderRGB : MonoBehaviour
{
public static ARRenderRGB Instance
{
get
{
return _instance;
}
}
private bool _isReady = false;
public bool isReady
{
get
{
return _isReady;
}
}
private static ARRenderRGB _instance = null;
private const int RENDER_EVENT_DRAWRGB = 0x0001;
/// Renders the event delegate described by eventID.
/// Identifier for the event.
private delegate void RenderEventDelegate(int eventID);
/// Handle of the render thread.
private static RenderEventDelegate RenderThreadHandle = new RenderEventDelegate(RunOnRenderThread);
private static IntPtr RenderThreadHandlePtr = Marshal.GetFunctionPointerForDelegate(RenderThreadHandle);
#region params
private GameObject m_VideoQuad;
private float previewQuadScale = 1.0f;
public Material m_BackgroundMaterial;
private Camera m_Camera;
private Texture2D m_VideoTexture;
private static float _imageWidth = 0;
private static float _imageHeight = 0;
private NormalRGBCameraDevice rgbCameraDevice;
private Coroutine drawRgbCameraBackgroundCorotine;
#endregion
#region unity functions
private void Awake()
{
_instance = this;
m_Camera = GetComponent();
rgbCameraDevice = new NormalRGBCameraDevice();
}
private void Start()
{
}
private void OnDisable()
{
}
private void OnDestroy()
{
_instance = null;
}
#endregion
#region custom functions
public void HandleARRenderOpen()
{
if (drawRgbCameraBackgroundCorotine != null)
return;
m_Camera.enabled = true;
int[] sizeRgbCamera = rgbCameraDevice.getCameraSize();
_imageWidth = sizeRgbCamera[0];
_imageHeight = sizeRgbCamera[1];
StartCoroutine(startRGBCamera());
StartCoroutine(ConfigARRenderCamera());
drawRgbCameraBackgroundCorotine = StartCoroutine(CallPluginAtEndOfFrames());
ConfigVideoPlane();
}
public void HandleARRenderClose()
{
m_Camera.enabled = false;
if (drawRgbCameraBackgroundCorotine == null)
return;
StopCoroutine(drawRgbCameraBackgroundCorotine);
stopRGBCamera();
if (m_VideoQuad != null)
{
Destroy(m_VideoQuad);
m_VideoQuad = null;
}
drawRgbCameraBackgroundCorotine = null;
}
public bool HandleARRenderShot()
{
if (!isReady) return false;
if (rgbCameraDevice != null && m_VideoTexture != null)
{
rgbCameraDevice.DrawRGBVideoFrame(m_VideoTexture.GetNativeTexturePtr());
return true;
}
return false;
}
/// Executes the 'on render thread' operation.
/// Identifier for the event.
[MonoPInvokeCallback(typeof(RenderEventDelegate))]
private static void RunOnRenderThread(int eventID)
{
if (eventID == RENDER_EVENT_DRAWRGB)
{
if (_instance != null)
_instance.drawRGBTexture();
}
}
private void drawRGBTexture()
{
if (rgbCameraDevice != null)
{
if (m_VideoTexture != null)
{
rgbCameraDevice.DrawRGBVideoFrame(m_VideoTexture.GetNativeTexturePtr());
}
}
}
public static void SetRGBResolution(int imageWidth, int imageHeight)
{
_imageWidth = imageWidth;
_imageHeight = imageHeight;
Debug.Log("arrenderrgb set: " + _imageWidth + "," + _imageHeight);
}
private void ConfigVideoPlane()
{
if (m_VideoQuad != null)
return;
m_VideoQuad = GameObject.CreatePrimitive(PrimitiveType.Plane);
m_VideoQuad.SetActive(false);
Collider videoQuadCollider = m_VideoQuad.GetComponent();
if (videoQuadCollider != null)
{
Destroy(videoQuadCollider);
}
float[] intrinsic = new float[8];
rgbCameraDevice.getRGBCameraIntrics(intrinsic);
float fx = intrinsic[0];
m_VideoQuad.transform.parent = this.transform;
m_VideoQuad.transform.localRotation = Quaternion.AngleAxis(-90, Vector3.right) * Quaternion.AngleAxis(180, Vector3.up);
m_VideoQuad.transform.localPosition = Vector3.forward * 50;
Debug.Log("arrenderrgb: " + _imageWidth + "," + _imageHeight);
Debug.Log("arrenderrgb: " + _imageWidth + "," + _imageHeight);
m_VideoQuad.transform.localScale = new Vector3(_imageWidth * 5 / fx, 1.0f, _imageHeight * 5 / fx);
//选择第2个空layer用于投屏
string[] layerNames = new string[32];
for (int i = 0; i < 32; i++)
{
layerNames[i] = LayerMask.LayerToName(i);
}
int count = 0;
for (int i = 0; i < layerNames.Length; i++)
{
if (string.IsNullOrEmpty(layerNames[i]))
{
if (count == 1)
{
m_VideoQuad.layer = i;
m_Camera.cullingMask |= 1 << i;
showPreview(false);
break;
}
else
{
count++;
}
}
}
if (m_BackgroundMaterial == null)
{
m_BackgroundMaterial = new Material(Shader.Find("Unlit/Texture"));
m_BackgroundMaterial.SetTextureScale("_MainTex", new Vector2(1, -1));
}
//Texture 坐标和opengl图像坐标是上下颠倒的,修正方向
//m_BackgroundMaterial.SetVector(
// "_UvTopLeftRight",
// new Vector4(
// 0.0f, 1.0f, 1.0f, 1.0f));
//m_BackgroundMaterial.SetVector(
// "_UvBottomLeftRight",
// new Vector4(0.0f, 0.0f, 1.0f, 0.0f));
m_VideoQuad.GetComponent().material = m_BackgroundMaterial;
m_VideoQuad.SetActive(true);
}
public void showPreview(bool isShow, float scale = 1.0f)
{
if (m_VideoQuad == null)
return;
int quadLayer = m_VideoQuad.layer;
if (!isShow)
{
if (HMDPoseTracker.Instance != null)
{
HMDPoseTracker.Instance.leftCamera.cullingMask = ~(~HMDPoseTracker.Instance.leftCamera.cullingMask | (1 << quadLayer));
HMDPoseTracker.Instance.rightCamera.cullingMask = ~(~HMDPoseTracker.Instance.rightCamera.cullingMask | (1 << quadLayer));
HMDPoseTracker.Instance.centerCamera.cullingMask = ~(~HMDPoseTracker.Instance.centerCamera.cullingMask | (1 << quadLayer));
}
if (m_VideoQuad != null)
{
m_VideoQuad.transform.localScale *= 1.0f / previewQuadScale;
previewQuadScale = 1.0f;
}
}
else
{
if (HMDPoseTracker.Instance != null)
{
HMDPoseTracker.Instance.leftCamera.cullingMask = HMDPoseTracker.Instance.leftCamera.cullingMask | (1 << quadLayer);
HMDPoseTracker.Instance.rightCamera.cullingMask = HMDPoseTracker.Instance.rightCamera.cullingMask | (1 << quadLayer);
HMDPoseTracker.Instance.centerCamera.cullingMask = HMDPoseTracker.Instance.centerCamera.cullingMask | (1 << quadLayer);
}
if (m_VideoQuad != null)
{
m_VideoQuad.transform.localScale *= scale;
previewQuadScale = scale;
}
}
}
private IEnumerator ConfigARRenderCamera()
{
yield return new WaitUntil(() => rgbCameraDevice.IsStarted());
Debug.Log("UNITY LOG ========= ARRenderRGB, ConfigARRenderCamera");
// Create a texture
m_VideoTexture = new Texture2D((int)_imageWidth, (int)_imageHeight, TextureFormat.RGBA32, false);
//set Texture as Black
byte[] blackByets = new byte[(int)_imageWidth * (int)_imageHeight * 4];
m_VideoTexture.LoadRawTextureData(blackByets);
m_VideoTexture.filterMode = FilterMode.Bilinear;
m_VideoTexture.wrapMode = TextureWrapMode.Repeat;
m_VideoTexture.Apply();
m_BackgroundMaterial.SetTexture("_MainTex", m_VideoTexture);
_isReady = true;
Debug.Log("UNITY LOG ========= ARRenderRGB, _isReady = " + isReady);
}
private IEnumerator CallPluginAtEndOfFrames()
{
if (!Application.isEditor)
{
yield return new WaitUntil(() => rgbCameraDevice.IsStarted());
while (m_Camera.enabled)
{
// Wait until all frame rendering is done
yield return new WaitForEndOfFrame();
// Issue a plugin event with arbitrary integer identifier.
// The plugin can distinguish between different
// things it needs to do based on this ID.
// For our simple plugin, it does not matter which ID we pass here.
//GL.IssuePluginEvent(rgbCameraDevice.GetDrawRGBVideoFrameFunc(), 1);
GL.IssuePluginEvent(RenderThreadHandlePtr, RENDER_EVENT_DRAWRGB);
}
}
}
private IEnumerator startRGBCamera()
{
yield return new WaitUntil(() => SessionManager.Instance != null && SessionManager.Instance.IsInited);
Debug.Log("UNITY LOG ========= ARRenderRGB, startRGBCamera");
rgbCameraDevice.Open();
}
private void stopRGBCamera()
{
if (rgbCameraDevice.IsStarted())
rgbCameraDevice.Close();
}
#endregion
}
}