using System;
using System.Collections;
using System.Collections.Generic;
using Unity.Collections;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
using SC.XR.Unity;

public class XRRGBCamera : SingletonMono<XRRGBCamera>
{
    public bool isARCamrea;
    public static XRRGBCamera Instance;
    public Texture CaptureImage;
    public WebCamTexture RGBCamTexture;
    private void Awake()
    {
        Instance = this;
    }
    private void Start()
    {
        if (isARCamrea)
        {
            // ��ȡ ARCameraManager ���
            cameraManager = FindObjectOfType<ARCameraManager>();

            cameraManager.frameReceived -= OnCameraFrameReceived;
            // ���ĸ����¼�
            cameraManager.frameReceived += OnCameraFrameReceived;
        }
        // ��ȡ���õ�����ͷ�豸
        playCamera();
    }

    public ARCameraManager cameraManager;
    private Material cameraBackgroundMaterial;
    public void playCamera()
    {
        if (isARCamrea)
        {

         //   Vuforia.VuforiaBehaviour.Instance.enabled = true;

        }
        else
        {
            // ��WebCamTexture�󶨵�RawImage�������ʾ����ͷ��׽����ͼ��

            WebCamDevice[] devices = WebCamTexture.devices;

            if (devices.Length == 0)
            {
                Debug.Log("No webcam devices found.");
                return;
            }

            // ʹ�õ�һ�����õ�����ͷ�豸������Ը�����Ҫѡ�������豸��
            RGBCamTexture = new WebCamTexture(devices[0].name, 1280, 720, 30);
            Debug.Log("��������ͷ" + devices[0].name);
            // ��ʼ����ͷ��׽
            RGBCamTexture.Play();
            CaptureImage = RGBCamTexture;
            if (this.GetComponent<RawImage>() != null)
            {
                this.GetComponent<RawImage>().texture = CaptureImage;
                this.GetComponent<RawImage>().transform.localEulerAngles = new Vector3(0, 0, 0);
            }
            Debug.Log("��������ͷ");

        }
    }
    Texture2D cameraTexture;
    private void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        Debug.Log("��ȡOnCameraFrameReceivedOnCameraFrameReceived");

        // ��ȡ���µ�����ͷͼ��
        if (!cameraManager.TryAcquireLatestCpuImage(out XRCpuImage image))

        {

            return;

        }


        var format = TextureFormat.RGBA32;

        if (cameraTexture == null || cameraTexture.width != image.width || cameraTexture.height != image.height)

        {

            cameraTexture = new Texture2D(image.width, image.height, format, false);

        }

        XRCpuImage.Transformation m_Transformation = XRCpuImage.Transformation.MirrorY;
        // Convert the image to format, flipping the image across the Y axis.

        // We can also get a sub rectangle, but we'll get the full image here.

        var conversionParams = new XRCpuImage.ConversionParams(image, format, m_Transformation);

        // Texture2D allows us write directly to the raw texture data

        // This allows us to do the conversion in-place without making any copies.
    var rawTextureData = cameraTexture.GetRawTextureData<byte>();

        try

        {
            image.Convert(conversionParams, rawTextureData);

        }

        finally

        {

            // We must dispose of the XRCpuImage after we're finished

            // with it to avoid leaking native resources.

            image.Dispose();

        }

        // Apply the updated texture data to our texture

        cameraTexture.Apply();




        CaptureImage = cameraTexture;


        if (this.GetComponent<RawImage>() != null)
        {
            this.GetComponent<RawImage>().texture = CaptureImage;
            this.GetComponent<RawImage>().transform.localEulerAngles = new Vector3(0, 0, 180);
        }

    }

    public void Update()
    {
    }

    public void stopCamera()
    {
        if (RGBCamTexture != null && RGBCamTexture.isPlaying)
        {
            RGBCamTexture.Stop();
        }
    }
}