using Seengene.XDKUnityPluginCloud;
using System;
using System.Collections;
using System.IO;
using System.Text;
using UnityEngine;
using UnityEngine.UI;
namespace NRKernal.NRExamples {
public class MyGrayCameraCaptureController : MonoBehaviour {
private bool m_IsInitialized = false;
[SerializeField] private CameraCalibrationLoader.EyeType eyeTyep = CameraCalibrationLoader.EyeType.LeftEye;
[SerializeField] private bool m_UndistortImage = false;
[SerializeField] private RawImage m_CameraRawImage;
///
/// 本地图片保存路径
///
private string m_ImageSavePath;
///
/// 重定位间隔时间
///
private float m_RelocationInterval = 1f;
private float m_ElapsedTime;
CameraCalibration cameraCalibration = null;
private void Awake() {
cameraCalibration = CameraCalibrationLoader.GetCameraCalibration(eyeTyep);
if (cameraCalibration != null) {
Debug.LogFormat("初始化相机内参成功:{0}, {1}", eyeTyep, cameraCalibration.ToString());
m_IsInitialized = true;
} else {
Debug.LogError("初始化相机内参失败!");
}
}
private void Start() {
// 清空本地图片缓存文件夹
m_ImageSavePath = Path.Combine(Application.persistentDataPath, "seengene");
if (Directory.Exists(m_ImageSavePath))
Directory.Delete(m_ImageSavePath, true);
m_ElapsedTime = 0;
}
void Update() {
if (m_IsInitialized) {
m_ElapsedTime += Time.deltaTime;
if (m_ElapsedTime < m_RelocationInterval) return;
m_ElapsedTime = 0f;
GetFrameCameraImageAndPose();
}
}
private void GetFrameCameraImageAndPose() {
//Debug.Log("SvrManager.Instance.Initialized : " + SvrManager.Instance.Initialized);
if (!SvrManager.Instance.Initialized) return;
int imageWidth = 640;
int imageHeight = 400;
bool outBUdate = true;
uint outCurrFrameIndex = 0;
ulong outFrameExposureNano = 0;
byte[] outFrameData = new byte[imageWidth * imageHeight];
float[] outTRDataArray = new float[7];
Texture2D texture = null;
SvrPluginAndroid.SvrGetLatestQVRCameraFrameData(ref outBUdate, ref outCurrFrameIndex, ref outFrameExposureNano, outFrameData, outTRDataArray);
//Debug.Log("outBUdate: " + outBUdate);
if (outBUdate) {
Vector3 cameraPosition = new Vector3(outTRDataArray[4], outTRDataArray[5], outTRDataArray[6]);
Quaternion cameraRotation = new Quaternion(outTRDataArray[0], outTRDataArray[1], outTRDataArray[2], outTRDataArray[3]);
// compress grayscale image data into JPG format.
TextureFormat textureFormat = TextureFormat.R8;
byte[] jpegBytes;
//float[] cameraIntrinsicData = new float[9] { 266.75388f, 0, 321.64169f, 0, 266.75388f, 196.71561f, 0, 0, 1 };
//float[] disCoeffsData = new float[8] { 0.92348194f, 0, 0, 0, 0, 0, 0, 0 };
//jpegBytes = XDKTools.GetGrayUndistortTextureBytes(outFrameData, textureFormat, imageHeight, imageWidth, cameraIntrinsicData, disCoeffsData);
if (m_UndistortImage) {
jpegBytes = XDKTools.GetGrayUndistortTextureBytes(outFrameData, textureFormat, imageHeight, imageWidth, cameraCalibration);
} else {
jpegBytes = XDKTools.GetGrayTextureBytes(outFrameData, textureFormat, imageHeight, imageWidth);
}
if (texture == null) {
texture = new Texture2D(imageWidth, imageHeight, TextureFormat.R8, false);
}
texture.LoadImage(jpegBytes);
texture.Apply();
m_CameraRawImage.texture = texture;
m_CameraRawImage.SetNativeSize();
//Debug.Log("设置图片显示完成");
//SaveCameraImageToLocal(frame.timeStamp, jpegBytes);
SaveCameraImageToLocal(outCurrFrameIndex, jpegBytes, cameraPosition, cameraRotation, cameraCalibration);
}
}
private void SaveCameraImageToLocal(UInt64 seq, byte[] imageBuffer, Vector3 cameraPosition, Quaternion cameraRotation, CameraCalibration cameraIntrinsic) {
if (!Directory.Exists(m_ImageSavePath)) Directory.CreateDirectory(m_ImageSavePath);
// 计算相机位姿Transform
Vector3 positionIn = cameraPosition;
positionIn.z = -positionIn.z;
Quaternion rotationIn = cameraRotation;
rotationIn.z = -rotationIn.z;
rotationIn.w = -rotationIn.w;
Matrix4x4 matrix = Matrix4x4.TRS(positionIn, rotationIn, Vector3.one);
matrix = matrix.transpose;
string poseData = string.Format(
"{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14} {15} {16} {17} {18} {19} {20} {21} {22} {23} {24}",
matrix.m00, matrix.m01, matrix.m02, matrix.m03,
matrix.m10, matrix.m11, matrix.m12, matrix.m13,
matrix.m20, matrix.m21, matrix.m22, matrix.m32,
matrix.m30, matrix.m31, matrix.m32, matrix.m33,
cameraIntrinsic.focal_length.x, 0, cameraIntrinsic.principal_point.x,
0, cameraIntrinsic.focal_length.y, cameraIntrinsic.principal_point.y,
0, 0, 1);
string poseDataOrigin = string.Format("{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14} {15}",
cameraPosition.x, cameraPosition.y, cameraPosition.z, cameraRotation.x, cameraRotation.y, cameraRotation.z, cameraRotation.w,
cameraIntrinsic.focal_length.x, 0, cameraIntrinsic.principal_point.x,
0, cameraIntrinsic.focal_length.y, cameraIntrinsic.principal_point.y,
0, 0, 1);
File.WriteAllBytes(Path.Combine(m_ImageSavePath, seq + ".jpg"), imageBuffer);
File.WriteAllText(Path.Combine(m_ImageSavePath, seq + ".txt"), poseData);
File.WriteAllText(Path.Combine(m_ImageSavePath, seq + "-origin" + ".txt"), poseDataOrigin);
Debug.Log("图片本地保存完成:" + Path.Combine(m_ImageSavePath, seq + ".jpg"));
}
private void SaveCameraImageToLocal(UInt64 seq, byte[] imageBuffer) {
if (!Directory.Exists(m_ImageSavePath)) Directory.CreateDirectory(m_ImageSavePath);
File.WriteAllBytes(Path.Combine(m_ImageSavePath, seq + ".jpg"), imageBuffer);
}
}
}