using System; using System.Collections; using System.Collections.Generic; using Unity.Collections; using UnityEngine; using UnityEngine.UI; using UnityEngine.XR.ARFoundation; using UnityEngine.XR.ARSubsystems; public class XRRGBCamera : MonoBehaviour { public bool isARCamrea; public static XRRGBCamera Instance; public Texture CaptureImage; public WebCamTexture RGBCamTexture; public bool isAuto = true; private void Awake() { Instance = this; } private void Start() { if (isARCamrea) { // 获取 ARCameraManager 组件 cameraManager = FindObjectOfType(); cameraManager.frameReceived -= OnCameraFrameReceived; // 订阅更新事件 cameraManager.frameReceived += OnCameraFrameReceived; } // 获取可用的摄像头设备 if(isAuto) playCamera(1280,720); } public ARCameraManager cameraManager; private Material cameraBackgroundMaterial; public void playCamera(int w, int h) { if (isARCamrea) { // Vuforia.VuforiaBehaviour.Instance.enabled = true; } else { // 将WebCamTexture绑定到RawImage组件以显示摄像头捕捉到的图像 WebCamDevice[] devices = WebCamTexture.devices; if (devices.Length == 0) { Debug.Log("No webcam devices found."); return; } // 使用第一个可用的摄像头设备(你可以根据需要选择其他设备) RGBCamTexture = new WebCamTexture(devices[0].name, w, h, 30); Debug.Log("开启摄像头" + devices[0].name); // 开始摄像头捕捉 RGBCamTexture.Play(); CaptureImage = RGBCamTexture; if (this.GetComponent() != null) { this.GetComponent().texture = CaptureImage; this.GetComponent().transform.localEulerAngles = new Vector3(0, 0, 0); } if (this.GetComponent() != null) { this.GetComponent().material.mainTexture = CaptureImage; this.GetComponent().transform.localEulerAngles = new Vector3(0, 0, 0); } Debug.Log("开启摄像头"); } } Texture2D cameraTexture; private void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs) { Debug.Log("获取OnCameraFrameReceivedOnCameraFrameReceived"); // 获取最新的摄像头图像 if (!cameraManager.TryAcquireLatestCpuImage(out XRCpuImage image)) { return; } var format = TextureFormat.RGBA32; if (cameraTexture == null || cameraTexture.width != image.width || cameraTexture.height != image.height) { cameraTexture = new Texture2D(image.width, image.height, format, false); } XRCpuImage.Transformation m_Transformation = XRCpuImage.Transformation.MirrorY; // Convert the image to format, flipping the image across the Y axis. // We can also get a sub rectangle, but we'll get the full image here. var conversionParams = new XRCpuImage.ConversionParams(image, format, m_Transformation); // Texture2D allows us write directly to the raw texture data // This allows us to do the conversion in-place without making any copies. var rawTextureData = cameraTexture.GetRawTextureData(); try { image.Convert(conversionParams, rawTextureData); } finally { // We must dispose of the XRCpuImage after we're finished // with it to avoid leaking native resources. image.Dispose(); } // Apply the updated texture data to our texture cameraTexture.Apply(); CaptureImage = cameraTexture; if (this.GetComponent() != null) { this.GetComponent().texture = CaptureImage; this.GetComponent().transform.localEulerAngles = new Vector3(0, 0, 180); } if (this.GetComponent() != null) { this.GetComponent().material.mainTexture = CaptureImage; this.GetComponent().transform.localEulerAngles = new Vector3(0, 0, 180); } } public void Update() { } public void stopCamera() { if (RGBCamTexture != null && RGBCamTexture.isPlaying) { RGBCamTexture.Stop(); } } }