NxrSlam.cs 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814
  1. using System.Collections.Generic;
  2. using UnityEngine;
  3. namespace Assets.NXR.Scripts.Slam
  4. {
  5. public class NxrSlam
  6. {
  7. [System.Runtime.InteropServices.DllImport("nxr_slam")]
  8. public static extern bool initPtr(int streamType, System.IntPtr dataPtr0, System.IntPtr dataPtr1);
  9. /// <summary>
  10. ///
  11. /// </summary>
  12. /// <param name="streamType"></param>
  13. /// <param name="width"></param>
  14. /// <param name="height"></param>
  15. /// <returns>0/1 has valid data, -1 no data</returns>
  16. [System.Runtime.InteropServices.DllImport("nxr_slam")]
  17. public static extern int getNxrStreamData(int streamType, ref int width, ref int height);
  18. [System.Runtime.InteropServices.DllImport("nxr_slam")]
  19. public static extern int getNxrStreamSize(int streamType);
  20. [System.Runtime.InteropServices.DllImport("nxr_slam")]
  21. public static extern void unlockStream(int idx);
  22. public enum TOFStreamMode
  23. {
  24. ONLY_DEPTH, // w*h*4
  25. ONLY_CLOUD,
  26. DEPTH_CLOUD,// w*h*12
  27. NONE,
  28. CLOUD_ON_LEFTHAND_SLAM
  29. }
  30. public enum TOFFPS
  31. {
  32. TOF_FPS_5, TOF_FPS_10, TOF_FPS_15, TOF_FPS_20, TOF_FPS_25, TOF_FPS_30
  33. }
  34. public enum RgbResolution
  35. {
  36. RGB_1920x1080, RGB_1280x720, RGB_640x480, RGB_320x240, RGB_2560x1920, TOF
  37. }
  38. public enum Mode
  39. {
  40. MODE_3DOF, MODE_6DOF
  41. }
  42. public enum RawDataType
  43. {
  44. NONE,
  45. EYETRACKING,
  46. RGB,
  47. TOF,
  48. FISHEYE,
  49. THERMAL
  50. }
  51. public enum Codec
  52. {
  53. YUYV,
  54. YUV420p,
  55. JPEG,
  56. NV12,
  57. BITSTREAM
  58. }
  59. public enum RgbType
  60. {
  61. Preview,
  62. Capture
  63. }
  64. public enum RgbFormat
  65. {
  66. FORMAT_NONE,
  67. FORMAT_YUYV,
  68. FORMAT_YUV420,
  69. FORMAT_RGB,
  70. FORMAT_RGBA,
  71. FORMAT_H264
  72. }
  73. public enum TofType
  74. {
  75. Depth_16,
  76. Depth_32,
  77. IR,
  78. Cloud,
  79. Raw,
  80. Eeprom
  81. }
  82. public class NXRStreamData
  83. {
  84. public RawDataType rawDataType;
  85. public byte[] data;//rgb or tof camera data
  86. public byte[] left; // fish eye left camera data
  87. public byte[] right;// fish eye right camera data
  88. public int type;//数据类型:TofType/RgbType
  89. // 224*172*3*4 bytes
  90. public TofType tofType;
  91. public RgbType rgbType;
  92. public Codec rgbCodecFormat;// only for rgb
  93. public int width;
  94. public int height;
  95. public int size; // rgb or tof size
  96. public int leftSize; // fish eye left size
  97. public int rightSize;// fish eye right size
  98. public long timestamp;
  99. public TOFStreamMode tofStreamMode;
  100. public NXRStreamData(RawDataType dataType, AndroidJavaObject javaObject)
  101. {
  102. rawDataType = dataType;
  103. data = AndroidJNIHelper.ConvertFromJNIArray<byte[]>(javaObject.Get<AndroidJavaObject>("data").GetRawObject());
  104. if (rawDataType == RawDataType.FISHEYE)
  105. {
  106. left = AndroidJNIHelper.ConvertFromJNIArray<byte[]>(javaObject.Get<AndroidJavaObject>("left").GetRawObject());
  107. right = AndroidJNIHelper.ConvertFromJNIArray<byte[]>(javaObject.Get<AndroidJavaObject>("right").GetRawObject());
  108. }
  109. type = javaObject.Get<int>("type");
  110. rgbCodecFormat = (Codec)javaObject.Get<int>("format");
  111. width = javaObject.Get<int>("width");
  112. height = javaObject.Get<int>("height");
  113. size = javaObject.Get<int>("size");
  114. leftSize = javaObject.Get<int>("leftSize");
  115. rightSize = javaObject.Get<int>("rightSize");
  116. timestamp = javaObject.Get<long>("timestamp");
  117. if (rawDataType == RawDataType.RGB)
  118. {
  119. rgbType = (RgbType)type;
  120. }
  121. else if (rawDataType == RawDataType.TOF)
  122. {
  123. tofType = (TofType)type;
  124. }
  125. }
  126. }
  127. public class NXRPlaneData
  128. {
  129. public long hostTimestamp = 0L;
  130. public long deviceTimestamp = 0L;
  131. public NXRPlaneData.Point3D[] points;
  132. public NXRPlaneData.Point3D normal = new Point3D();
  133. public int size;
  134. public int id = -1;
  135. public void Refresh()
  136. {
  137. if (
  138. Mathf.Abs((float)(points[0].x - points[size - 1].x)) < Mathf.Epsilon
  139. && Mathf.Abs((float)(points[0].y - points[size - 1].y)) < Mathf.Epsilon
  140. && Mathf.Abs((float)(points[0].z - points[size - 1].z)) < Mathf.Epsilon
  141. )
  142. {
  143. // 首尾相同
  144. size = size - 1;
  145. }
  146. }
  147. public NXRPlaneData(AndroidJavaObject javaObject)
  148. {
  149. if(javaObject != null)
  150. {
  151. hostTimestamp = javaObject.Get<long>("hostTimestamp");
  152. deviceTimestamp = javaObject.Get<long>("deviceTimestamp");
  153. size = javaObject.Get<int>("size");
  154. id = javaObject.Get<int>("id");
  155. AndroidJavaObject normalObj = javaObject.Get<AndroidJavaObject>("normal");
  156. if (normalObj != null)
  157. {
  158. normal.x = normalObj.Get<double>("x");
  159. normal.y = normalObj.Get<double>("y");
  160. normal.z = normalObj.Get<double>("z");
  161. normalObj.Dispose();
  162. }
  163. AndroidJavaObject pointsObj = javaObject.Get<AndroidJavaObject>("points");
  164. if (pointsObj != null)
  165. {
  166. AndroidJavaObject[] pointsArray = AndroidJNIHelper.ConvertFromJNIArray<AndroidJavaObject[]>(pointsObj.GetRawObject());
  167. if (pointsArray != null)
  168. {
  169. if (
  170. Mathf.Abs((float) (pointsArray[0].Get<double>("x") - pointsArray[size-1].Get<double>("x"))) < Mathf.Epsilon
  171. && Mathf.Abs((float)(pointsArray[0].Get<double>("y") - pointsArray[size - 1].Get<double>("y"))) < Mathf.Epsilon
  172. && Mathf.Abs((float)(pointsArray[0].Get<double>("z") - pointsArray[size - 1].Get<double>("z"))) < Mathf.Epsilon
  173. )
  174. {
  175. // 首尾相同
  176. size = size - 1;
  177. }
  178. points = new Point3D[size];
  179. for (int i = 0; i < size; i++)
  180. {
  181. Point3D point3D = new Point3D();
  182. point3D.x = pointsArray[i].Get<double>("x");
  183. point3D.y = pointsArray[i].Get<double>("y");
  184. point3D.z = pointsArray[i].Get<double>("z");
  185. // 右手坐标系转换,绕x轴旋转-180度
  186. point3D.y = point3D.y * (-1);
  187. points[i] = point3D;
  188. pointsArray[i].Dispose();
  189. }
  190. }
  191. pointsObj.Dispose();
  192. }
  193. }
  194. }
  195. public class Point3D
  196. {
  197. public double x;
  198. public double y;
  199. public double z;
  200. public Point3D()
  201. {
  202. }
  203. public Point3D(double x, double y, double z)
  204. {
  205. this.x = x;
  206. this.y = y;
  207. this.z = z;
  208. }
  209. }
  210. }
  211. private static object syncRoot = new object();
  212. private static NxrSlam _instance = null;
  213. public static NxrSlam Instance
  214. {
  215. get
  216. {
  217. if (_instance == null) //第一重判断,先判断实例是否存在,不存在再加锁处理
  218. {
  219. lock (syncRoot) //加锁,在某一时刻只允许一个线程访问
  220. {
  221. if (_instance == null) //第二重判断: 第一个线程进入Lock中执行创建代码,第二个线程处于排队等待状态,当第二个线程进入Lock后并不知道实例已创建,将会继续创建新的实例
  222. {
  223. _instance = new NxrSlam();
  224. }
  225. }
  226. }
  227. return _instance;
  228. }
  229. }
  230. public bool IsRGBCameraSteaming { get; set; }
  231. private NxrSlam()
  232. {
  233. #if !UNITY_EDITOR
  234. GetNxrSlamInstance();
  235. #endif
  236. }
  237. AndroidJavaObject nxrSlamInstance;
  238. AndroidJavaClass nxrSlamClass;
  239. #if !UNITY_EDITOR
  240. public AndroidJavaObject GetNxrSlamInstance()
  241. {
  242. if (nxrSlamClass == null)
  243. {
  244. nxrSlamClass = new AndroidJavaClass("ruiyue.nxr.slam.NXRSlam");
  245. }
  246. if (nxrSlamClass != null && nxrSlamInstance == null)
  247. {
  248. nxrSlamInstance = nxrSlamClass.CallStatic<AndroidJavaObject>("getInstance");
  249. Debug.Log("NXRSlam getInstance");
  250. }
  251. return nxrSlamInstance;
  252. }
  253. #else
  254. private AndroidJavaObject GetJavaObject() { return null;}
  255. #endif
  256. public void EnableNativeLog(bool enable)
  257. {
  258. if (nxrSlamInstance != null)
  259. {
  260. Debug.Log("NXRSlam enableNativeLog");
  261. nxrSlamInstance.Call("enableNativeLog", enable);
  262. }
  263. }
  264. bool Inited = false;
  265. public bool Init()
  266. {
  267. IsRGBCameraSteaming = false;
  268. if (nxrSlamInstance != null)
  269. {
  270. Debug.Log("NXRSlam init");
  271. Inited = true;
  272. return nxrSlamInstance.Call<bool>("init");
  273. }
  274. return false;
  275. }
  276. public void UnInit()
  277. {
  278. if (nxrSlamInstance != null && Inited)
  279. {
  280. StopPlaneDetection();
  281. StopStreaming();
  282. StopBuildMap();
  283. Debug.Log("NXRSlam uninit");
  284. Inited = false;
  285. nxrSlamInstance.Call("uninit");
  286. }
  287. }
  288. /// <summary>
  289. ///
  290. /// </summary>
  291. /// <param name="path">地图路径:sdcard/unity_map.bin</param>
  292. /// <returns></returns>
  293. public bool StartSlamWithMap(string path)
  294. {
  295. // 是否存在
  296. //if (path == null || path.Length == 0 || !File.Exists(path))
  297. //{
  298. // Debug.Log("NXRSlam startSlamWithMap failed, file not exist !!! " + path);
  299. // return false;
  300. //}
  301. if (nxrSlamInstance != null)
  302. {
  303. IsBuildingMap = true;
  304. Debug.Log("NXRSlam startSlamWithMap." + path);
  305. return nxrSlamInstance.Call<bool>("startSlamWithMap", path, new MapBuildCallback());
  306. }
  307. return false;
  308. }
  309. public void StopSlam()
  310. {
  311. if (nxrSlamInstance != null)
  312. {
  313. Debug.Log("NXRSlam stopSlam");
  314. nxrSlamInstance.Call("stopSlam");
  315. }
  316. }
  317. bool IsBuildingMap = false;
  318. public bool StartBuildMap()
  319. {
  320. if (nxrSlamInstance != null)
  321. {
  322. IsBuildingMap = true;
  323. Debug.Log("NXRSlam startRecordMap");
  324. return nxrSlamInstance.Call<bool>("startRecordMap", new MapBuildCallback());
  325. }
  326. return false;
  327. }
  328. /// <summary>
  329. ///
  330. /// </summary>
  331. /// <param name="path">sdcard/unity_map.bin</param>
  332. /// <returns></returns>
  333. public bool StopBuildMap(string path = null)
  334. {
  335. if (nxrSlamInstance != null && IsBuildingMap)
  336. {
  337. IsBuildingMap = false;
  338. //if(path == null)
  339. //{
  340. // path = "system/etc/unity_map.bin";
  341. //}
  342. Debug.Log("NXRSlam stopRecordMap." + path);
  343. return nxrSlamInstance.Call<bool>("stopRecordMap", path);
  344. }
  345. return false;
  346. }
  347. bool IsPlaneDetecting = false;
  348. public bool StartPlaneDetection()
  349. {
  350. if (nxrSlamInstance != null)
  351. {
  352. IsPlaneDetecting = true;
  353. Debug.Log("NXRSlam startPlaneDetection");
  354. return nxrSlamInstance.Call<bool>("startPlaneDetection", new PlaneDetectionCallback());
  355. }
  356. return false;
  357. }
  358. public bool StopPlaneDetection()
  359. {
  360. if (nxrSlamInstance != null && IsPlaneDetecting)
  361. {
  362. IsPlaneDetecting = false;
  363. Debug.Log("NXRSlam stopPlaneDetection");
  364. return nxrSlamInstance.Call<bool>("stopPlaneDetection");
  365. }
  366. return false;
  367. }
  368. public void Pause()
  369. {
  370. Debug.Log("NXRSlam pause");
  371. //StopPlaneDetection();
  372. //StopStreaming();
  373. //StopBuildMap();
  374. }
  375. List<RawDataType> streamingDataTypeList = new List<RawDataType>();
  376. AndroidJavaClass rawDataTypeCls = null;
  377. AndroidJavaClass rgbFormatCls = null;
  378. AndroidJavaClass tofStreamModeCls = null;
  379. AndroidJavaClass tofFpsCls = null;
  380. AndroidJavaClass rgbResolutionCls = null;
  381. public bool StartStreaming(RawDataType rawDataType)
  382. {
  383. if (nxrSlamInstance != null)
  384. {
  385. if (rawDataTypeCls == null)
  386. {
  387. rawDataTypeCls = new AndroidJavaClass("ruiyue.nxr.slam.NXRSlam$RawDataType");
  388. }
  389. if (rgbFormatCls == null)
  390. {
  391. rgbFormatCls = new AndroidJavaClass("ruiyue.nxr.slam.NXRSlam$RgbFormat");
  392. }
  393. if (rawDataType == RawDataType.RGB)
  394. {
  395. AndroidJavaObject rgbFormatObj = rgbFormatCls.CallStatic<AndroidJavaObject>("valueOf", RgbFormat.FORMAT_RGB.ToString());
  396. nxrSlamInstance.Call("setRgbFormat", rgbFormatObj);
  397. IsRGBCameraSteaming = true;
  398. }
  399. AndroidJavaObject rawDataTypeObj = rawDataTypeCls.CallStatic<AndroidJavaObject>("valueOf", rawDataType.ToString());
  400. streamingDataTypeList.Add(rawDataType);
  401. return nxrSlamInstance.Call<bool>("startStreaming", rawDataTypeObj, new StreamDataCallback());
  402. }
  403. return false;
  404. }
  405. public void SetLed(int select, int channel, int brightness)
  406. {
  407. if (nxrSlamInstance != null)
  408. {
  409. nxrSlamInstance.Call("setLed", select, channel, brightness);
  410. }
  411. }
  412. public void SetAec(int aecmode, int aecgain, int aectime)
  413. {
  414. if (nxrSlamInstance != null)
  415. {
  416. nxrSlamInstance.Call("setAec", aecmode, aecgain, aectime);
  417. }
  418. }
  419. TOFStreamMode tofStreamMode = TOFStreamMode.ONLY_DEPTH;
  420. public void SetTofStreamMode(TOFStreamMode mode)
  421. {
  422. if (nxrSlamInstance != null)
  423. {
  424. if (tofStreamModeCls == null)
  425. {
  426. tofStreamModeCls = new AndroidJavaClass("ruiyue.nxr.slam.NXRSlam$TOFStreamMode");
  427. }
  428. AndroidJavaObject tofStreamModeObj = tofStreamModeCls.CallStatic<AndroidJavaObject>("valueOf", mode.ToString());
  429. nxrSlamInstance.Call("setTofStreamMode", tofStreamModeObj);
  430. tofStreamMode = mode;
  431. }
  432. }
  433. public void SetTofFps(TOFFPS fps)
  434. {
  435. if (nxrSlamInstance != null)
  436. {
  437. if (tofFpsCls == null)
  438. {
  439. tofFpsCls = new AndroidJavaClass("ruiyue.nxr.slam.NXRSlam$TOFFPS");
  440. }
  441. AndroidJavaObject tofStreamModeObj = tofFpsCls.CallStatic<AndroidJavaObject>("valueOf", fps.ToString());
  442. nxrSlamInstance.Call("setTOFFps", tofStreamModeObj);
  443. }
  444. }
  445. public void SetRgbResolution(RgbResolution rgbResolution)
  446. {
  447. if (nxrSlamInstance != null)
  448. {
  449. if (rgbResolutionCls == null)
  450. {
  451. rgbResolutionCls = new AndroidJavaClass("ruiyue.nxr.slam.NXRSlam$RgbResolution");
  452. }
  453. AndroidJavaObject tofStreamModeObj = rgbResolutionCls.CallStatic<AndroidJavaObject>("valueOf", rgbResolution.ToString());
  454. nxrSlamInstance.Call("setRgbResolution", tofStreamModeObj);
  455. }
  456. }
  457. public bool StopStreaming(RawDataType rawDataType = RawDataType.NONE)
  458. {
  459. if (nxrSlamInstance != null)
  460. {
  461. if (rawDataType == RawDataType.NONE)
  462. {
  463. foreach(RawDataType type in streamingDataTypeList)
  464. {
  465. AndroidJavaObject rawDataTypeObj = rawDataTypeCls.CallStatic<AndroidJavaObject>("valueOf", type.ToString());
  466. Debug.Log("NXRSlam stopStreaming." + type.ToString());
  467. nxrSlamInstance.Call<bool>("stopStreaming", rawDataTypeObj);
  468. }
  469. IsRGBCameraSteaming = false;
  470. streamingDataTypeList.Clear();
  471. return true;
  472. } else
  473. {
  474. if (rawDataType == RawDataType.RGB) IsRGBCameraSteaming = false;
  475. AndroidJavaObject rawDataTypeObj = rawDataTypeCls.CallStatic<AndroidJavaObject>("valueOf", rawDataType.ToString());
  476. return nxrSlamInstance.Call<bool>("stopStreaming", rawDataTypeObj);
  477. }
  478. }
  479. return false;
  480. }
  481. private Mode CurMode = Mode.MODE_3DOF;
  482. public bool SwitchMode(Mode mode)
  483. {
  484. if (nxrSlamInstance != null)
  485. {
  486. CurMode = mode;
  487. AndroidJavaClass modeCls = new AndroidJavaClass("ruiyue.nxr.slam.NXRSlam$Mode");
  488. AndroidJavaObject modeObj = rawDataTypeCls.CallStatic<AndroidJavaObject>("valueOf", mode.ToString());
  489. return nxrSlamInstance.Call<bool>("switchMode", modeObj);
  490. }
  491. return false;
  492. }
  493. #region
  494. public delegate void MapStatusAndQualityCallback(int status, int quality);
  495. public delegate void MapPercentCallback(float percent);
  496. public delegate void StreamingDataCallback(NXRStreamData data);
  497. public delegate void PlaneDetectionCallaback(NXRPlaneData data);
  498. public MapStatusAndQualityCallback MapStatusAndQualityEvent;
  499. public MapPercentCallback MapPercentEvent;
  500. public StreamingDataCallback StreamDataEvent;
  501. public PlaneDetectionCallaback PlaneDetectionEvent;
  502. public class MapBuildCallback : AndroidJavaProxy
  503. {
  504. public MapBuildCallback() : base("ruiyue.nxr.slam.onMapListener")
  505. {
  506. }
  507. public void onMapStatusAndQuality(int status, int quality)
  508. {
  509. if (Instance.MapStatusAndQualityEvent != null)
  510. {
  511. Instance.MapStatusAndQualityEvent(status, quality);
  512. }
  513. }
  514. public void onMapPercent(float percent)
  515. {
  516. if (Instance.MapPercentEvent != null)
  517. {
  518. Instance.MapPercentEvent(percent);
  519. }
  520. }
  521. }
  522. public class StreamDataCallback : AndroidJavaProxy
  523. {
  524. public StreamDataCallback() : base("ruiyue.nxr.slam.onStreamDataListener")
  525. {
  526. }
  527. public void onStreamData(AndroidJavaObject rawDataType, AndroidJavaObject streamData)
  528. {
  529. RawDataType type = (RawDataType)rawDataType.Call<int>("ordinal");
  530. if (type == RawDataType.RGB) return;
  531. NXRStreamData nxrStreamData = new NXRStreamData(type, streamData);
  532. nxrStreamData.tofStreamMode = Instance.tofStreamMode;
  533. if (Instance.StreamDataEvent != null)
  534. {
  535. Instance.StreamDataEvent(nxrStreamData);
  536. }
  537. }
  538. }
  539. public class PlaneDetectionCallback : AndroidJavaProxy
  540. {
  541. public PlaneDetectionCallback() : base("ruiyue.nxr.slam.onPlaneDetectionListener")
  542. {
  543. }
  544. public void onPlaneDetected(AndroidJavaObject planeData)
  545. {
  546. if (planeData != null)
  547. {
  548. NXRPlaneData nxrPlaneData = new NXRPlaneData(planeData);
  549. if (Instance.PlaneDetectionEvent != null)
  550. {
  551. Instance.PlaneDetectionEvent(nxrPlaneData);
  552. }
  553. }
  554. else
  555. {
  556. Debug.LogError("AndroidJavaObject planeData is null !!!");
  557. }
  558. }
  559. }
  560. #endregion
  561. public NxrSlamPlane GeneratePlaneObject(NXRPlaneData planeData)
  562. {
  563. // Debug.Log("GeneratePlaneObject:" + planeData.id);
  564. GameObject obj = new GameObject();
  565. obj.name = "NxrSlamPlane " + planeData.id;
  566. obj.AddComponent<MeshCollider>();
  567. MeshRenderer meshRenderer = obj.AddComponent<MeshRenderer>();
  568. meshRenderer.material = Resources.Load("PlaneMaterial", typeof(Material)) as Material;
  569. obj.AddComponent<MeshFilter>();
  570. NxrSlamPlane plnScript = obj.AddComponent<NxrSlamPlane>();
  571. plnScript.MakePlane(planeData);
  572. return plnScript;
  573. }
  574. /// <summary>
  575. /// UYVY=>YUV422
  576. /// [U0 Y0 V0 Y1] [U1 Y2 V1 Y3] [U2 Y4 V2 Y5]
  577. /// Y = w * h, u = w/2 *h, v = w/2 *h
  578. /// 2bytes=16bits = 8 + 4 + 4
  579. ///
  580. /// YUV420
  581. /// Y = w * h, u = w/2 *h/2, v = w/2 *h/2
  582. /// 1.5bytes=12bits = 8 + 2 + 2
  583. ///
  584. /// </summary>
  585. /// <param name="buff"></param>
  586. /// <param name="videoW"></param>
  587. /// <param name="videoH"></param>
  588. /// <param name="bufY"></param>
  589. /// <param name="bufU"></param>
  590. /// <param name="bufV"></param>
  591. /// <param name="codec"></param>
  592. public void LoadYUV(byte[] buff, int videoW, int videoH, ref byte[] bufY, ref byte[] bufU, ref byte[] bufV, Codec codec = Codec.YUV420p)
  593. {
  594. float pixelBytes = codec == Codec.YUV420p ? 1.5f : 2.0f;
  595. int firstFrameEndIndex = (int)(videoH * videoW * pixelBytes);
  596. int yIndex = firstFrameEndIndex * 8 / 12;
  597. int uIndex = firstFrameEndIndex * 10 / 12;
  598. // int vIndex = firstFrameEndIndex;
  599. if(codec == Codec.YUV420p)
  600. {
  601. bufY = new byte[videoW * videoH];
  602. bufU = new byte[videoW * videoH >> 2];
  603. bufV = new byte[videoW * videoH >> 2];
  604. for (int i = 0; i < firstFrameEndIndex; i++)
  605. {
  606. if (i < yIndex)
  607. {
  608. bufY[i] = buff[i];
  609. }
  610. else if (i < uIndex)
  611. {
  612. bufU[i - yIndex] = buff[i];
  613. }
  614. else
  615. {
  616. bufV[i - uIndex] = buff[i];
  617. }
  618. }
  619. }
  620. else if(codec == Codec.YUYV)
  621. {
  622. bufY = new byte[videoW * videoH];
  623. bufU = new byte[videoW/2 * videoH];
  624. bufV = new byte[videoW/2 * videoH];
  625. int uTmpIndex = 0;
  626. int vTmpIndex = 0;
  627. bool isUFind = false;
  628. for (int i = 0; i < firstFrameEndIndex; i++)
  629. {
  630. if (i % 2 == 1)
  631. {
  632. // Y
  633. bufY[i / 2] = buff[i];
  634. }
  635. else
  636. {
  637. // U / V
  638. if (!isUFind)
  639. {
  640. bufU[uTmpIndex] = buff[i];
  641. isUFind = true;
  642. uTmpIndex++;
  643. }
  644. else
  645. {
  646. bufV[vTmpIndex] = buff[i];
  647. isUFind = false;
  648. vTmpIndex++;
  649. }
  650. }
  651. }
  652. }
  653. // byte[] bufUV = new byte[videoW * videoH >> 1];
  654. //如果是把UV分量一起写入到一张RGBA4444的纹理中时,byte[]
  655. //里的字节顺序应该是 UVUVUVUV....
  656. //这样在shader中纹理采样的结果 U 分量就存在r、g通道。
  657. //V 分量就存在b、a通道。
  658. //for(int i = 0; i < bufUV.Length; i+=2)
  659. //{
  660. // bufUV[i] = bufU[i >> 1];
  661. // bufUV[i + 1] = bufV[i >> 1];
  662. //}
  663. }
  664. public Color Int_To_RGB(int color)
  665. {
  666. int b = color / (256 * 256);
  667. int g = (color - b * 256 * 256) / 256;
  668. int r = color - b * 256 * 256 - g * 256;
  669. Color outcolor = Color.white;
  670. outcolor.r = r;
  671. outcolor.g = g;
  672. outcolor.b = b;
  673. return outcolor;
  674. }
  675. public void YUV420P_TO_RGB24(int width, int height, byte[] dataY, byte[] dataU, byte[] dataV, byte[] dataRGB24)
  676. {
  677. int index_y=0, index_u = 0, index_v = 0;
  678. int index_r=0, index_g = 0, index_b = 0;
  679. for(int i=0; i< height; i++)
  680. {
  681. for(int j=0; j<width; j++)
  682. {
  683. index_r = (i * width + j) * 3;
  684. index_g = index_r + 1;
  685. index_b = index_r + 2;
  686. index_y = i * width + j;
  687. index_u = index_y / 4;
  688. index_v = index_y / 4;
  689. byte y = dataY[index_y];
  690. byte u = dataU[index_u];
  691. byte v = dataV[index_v];
  692. float yF = y / 255.0f;
  693. float uF = u / 255.0f;
  694. float vF = v / 255.0f;
  695. byte r = (byte)((yF + 1.4022f * vF - 0.7011f) * 255);
  696. byte g = (byte)((yF + 0.3456f * uF - 0.7145f * vF + 0.53005f) * 255);
  697. byte b = (byte)((yF + 1.771f * uF - 0.8855f) * 255);
  698. dataRGB24[index_r] = r;
  699. dataRGB24[index_g] = g;
  700. dataRGB24[index_b] = b;
  701. }
  702. }
  703. }
  704. }
  705. }
  706. /*
  707. if(RgbCamTexture2D_RGB == null && data.rgbCodecFormat == NxrSlam.Codec.YUV420p)
  708. {
  709. RgbCamTexture2D_RGB_Data = new byte[data.width * data.height * 3];
  710. RgbCamTexture2D_RGB = new Texture2D(data.width, data.height, TextureFormat.RGB24, false);
  711. RgbCameraPreviewII.material.SetTexture("_MainTex", RgbCamTexture2D_RGB);
  712. }
  713. NxrSlam.Instance.YUV420P_TO_RGB24(data.width, data.height, dataY, dataU, dataV, RgbCamTexture2D_RGB_Data);
  714. RgbCamTexture2D_RGB.LoadRawTextureData(RgbCamTexture2D_RGB_Data);
  715. RgbCamTexture2D_RGB.Apply();
  716. */