Browse Source

添加tip 和 拍照 录屏功能

胡佳骏 1 year ago
parent
commit
ff8d68109e
100 changed files with 9203 additions and 293 deletions
  1. 0 45
      Assets/ARSaoTuManager.cs
  2. 0 43
      Assets/ChangeCameraSaoMiao.cs
  3. 30 14
      Assets/DownLoadXR/DownLoadXRManager.cs
  4. 5 2
      Assets/FrameWork/AR.prefab
  5. 39 0
      Assets/FrameWork/Art/Player.renderTexture
  6. 8 0
      Assets/FrameWork/Art/Player.renderTexture.meta
  7. 38 0
      Assets/FrameWork/Art/vrplayer.renderTexture
  8. 8 0
      Assets/FrameWork/Art/vrplayer.renderTexture.meta
  9. 4 4
      Assets/FrameWork/Error.prefab
  10. 7 8
      Assets/FrameWork/Login/Login.prefab
  11. 14 0
      Assets/FrameWork/Login/SmallIconItem.prefab
  12. 314 0
      Assets/FrameWork/PlayerToImage.prefab
  13. 7 0
      Assets/FrameWork/PlayerToImage.prefab.meta
  14. 13 0
      Assets/FrameWork/ProjectManager/Image.prefab
  15. 13 0
      Assets/FrameWork/ProjectManager/Model.prefab
  16. 145 40
      Assets/FrameWork/ProjectManager/Scripts/JinRuRenwu.cs
  17. 15 6
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/XunJianDataManager.cs
  18. 90 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ARSaoTuManager.cs
  19. 0 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ARSaoTuManager.cs.meta
  20. 77 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/CaoZuoLanManager.cs
  21. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/CaoZuoLanManager.cs.meta
  22. 166 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ChangeCameraSaoMiao.cs
  23. 0 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ChangeCameraSaoMiao.cs.meta
  24. 0 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/FollowCameraSaoMiao.cs
  25. 0 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/FollowCameraSaoMiao.cs.meta
  26. 520 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/GameUtility.cs
  27. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/GameUtility.cs.meta
  28. 154 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/Loom.cs
  29. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/Loom.cs.meta
  30. 126 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoDataManager.cs
  31. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoDataManager.cs.meta
  32. 142 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoItem.cs
  33. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoItem.cs.meta
  34. 153 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoManager.cs
  35. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoManager.cs.meta
  36. 122 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PlayerToImage.cs
  37. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PlayerToImage.cs.meta
  38. 1 1
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/RenWuListItemWindow.cs
  39. 2 21
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/RenWuTypeWindow.cs
  40. 12 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/SaoTuManager.cs
  41. 0 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/SaoTuManager.cs.meta
  42. 18 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowImageManager.cs
  43. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowImageManager.cs.meta
  44. 35 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowModelManager.cs
  45. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowModelManager.cs.meta
  46. 18 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowTextManager.cs
  47. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowTextManager.cs.meta
  48. 27 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowVideoManager.cs
  49. 11 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowVideoManager.cs.meta
  50. 0 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/WindowGenSui.cs
  51. 0 0
      Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/WindowGenSui.cs.meta
  52. 21 0
      Assets/FrameWork/ProjectManager/Scripts/ShowInfoTipManager.cs
  53. 11 0
      Assets/FrameWork/ProjectManager/Scripts/ShowInfoTipManager.cs.meta
  54. 7 0
      Assets/FrameWork/ProjectManager/Scripts/TipAndErrorManager.cs
  55. 41 0
      Assets/FrameWork/ProjectManager/Scripts/TipInfoMsgManager.cs
  56. 11 0
      Assets/FrameWork/ProjectManager/Scripts/TipInfoMsgManager.cs.meta
  57. 13 0
      Assets/FrameWork/ProjectManager/Text.prefab
  58. 14 0
      Assets/FrameWork/ProjectManager/Top.prefab
  59. 13 0
      Assets/FrameWork/ProjectManager/Video.prefab
  60. 653 106
      Assets/FrameWork/ProjectManager/XunJian/进入任务/进入任务.prefab
  61. 311 0
      Assets/FrameWork/ProjectManager/paizhaoItem.prefab
  62. 7 0
      Assets/FrameWork/ProjectManager/paizhaoItem.prefab.meta
  63. 889 0
      Assets/FrameWork/ProjectManager/paizhaoList.prefab
  64. 7 0
      Assets/FrameWork/ProjectManager/paizhaoList.prefab.meta
  65. 364 0
      Assets/FrameWork/ProjectManager/拍照.prefab
  66. 13 0
      Assets/FrameWork/SaoMiao.prefab
  67. 16 0
      Assets/FrameWork/Scenes/Edustry.unity
  68. 53 3
      Assets/FrameWork/Scripts/Window/WindowsManager.cs
  69. BIN
      Assets/FrameWork/Texture/半透明1.png
  70. 135 0
      Assets/FrameWork/Texture/半透明1.png.meta
  71. BIN
      Assets/FrameWork/Texture/半透明2.png
  72. 135 0
      Assets/FrameWork/Texture/半透明2.png.meta
  73. 388 0
      Assets/FrameWork/Tip.prefab
  74. 7 0
      Assets/FrameWork/Tip.prefab.meta
  75. 8 0
      Assets/FrameWork/WindowsItem.asset
  76. 8 0
      Assets/OpenCVForUnity.meta
  77. 9 0
      Assets/OpenCVForUnity/Editor.meta
  78. 127 0
      Assets/OpenCVForUnity/Editor/OpenCVForUnityIOSBuildPostprocessor.cs
  79. 12 0
      Assets/OpenCVForUnity/Editor/OpenCVForUnityIOSBuildPostprocessor.cs.meta
  80. 386 0
      Assets/OpenCVForUnity/Editor/OpenCVForUnityMenuItem.cs
  81. 12 0
      Assets/OpenCVForUnity/Editor/OpenCVForUnityMenuItem.cs.meta
  82. 9 0
      Assets/OpenCVForUnity/Extra.meta
  83. 9 0
      Assets/OpenCVForUnity/Extra/exclude_contrib.meta
  84. 9 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android.meta
  85. 9 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs.meta
  86. 9 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/arm64-v8a.meta
  87. BIN
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/arm64-v8a/libopencvforunity.so
  88. 27 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/arm64-v8a/libopencvforunity.so.meta
  89. 9 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/armeabi-v7a.meta
  90. BIN
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/armeabi-v7a/libopencvforunity.so
  91. 27 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/armeabi-v7a/libopencvforunity.so.meta
  92. 9 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/x86.meta
  93. BIN
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/x86/libopencvforunity.so
  94. 27 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/x86/libopencvforunity.so.meta
  95. 9 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/iOS.meta
  96. BIN
      Assets/OpenCVForUnity/Extra/exclude_contrib/iOS/libopencvforunity.a
  97. 20 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/iOS/libopencvforunity.a.meta
  98. 21 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/iOS/opencv2.framework.meta
  99. 9 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/iOS/opencv2.framework/Headers.meta
  100. 2885 0
      Assets/OpenCVForUnity/Extra/exclude_contrib/iOS/opencv2.framework/Headers/calib3d.hpp

+ 0 - 45
Assets/ARSaoTuManager.cs

@@ -1,45 +0,0 @@
-using easyar;
-using System.Collections;
-using System.Collections.Generic;
-using UnityEngine;
-using static easyar.ImageTargetController;
-
-public class ARSaoTuManager : MonoSingleton<ARSaoTuManager>
-{
-    public List<GameObject> golist=new List<GameObject>();
-    public void Start()
-    {
-       // initARGameObj();
-    }
-
-    void clearAll()
-    {
-        for (int i = 0; i < golist.Count; i++)
-        {
-            Destroy(golist[i]);
-        }
-        golist.Clear();
-    }
-
-    public void initARGameObj(List<ImageFileSourceData> listdata)
-    {
-        clearAll();
-        for (int i = 0; i < listdata.Count; i++)
-        {
-
-            GameObject go = new GameObject();
-            go.name = "TargetImage_"+ listdata[i].Name;
-
-            ImageTargetController im = go.AddComponent<ImageTargetController>();
-            im.ImageFileSource= listdata[i];
-            /*
-            im.ImageFileSource.Path = listdata[i].// Application.streamingAssetsPath + "/Art/16£º9.jpg";
-            im.ImageFileSource.Name = "jpg";
-            im.ImageFileSource.Scale = 0.1f;*/
-
-            golist.Add(go);
-        }
-        
-    }
-
-}

+ 0 - 43
Assets/ChangeCameraSaoMiao.cs

@@ -1,43 +0,0 @@
-using System.Collections;
-using System.Collections.Generic;
-using UnityEngine;
-
-public class ChangeCameraSaoMiao : MonoSingleton<ChangeCameraSaoMiao>
-{
-    public GameObject paizhao;
-    public GameObject luxiang;
-    public GameObject saomiao;
-    public void Awake()
-    {
-    }
-
-    public void showpaizhao()
-    {
-        paizhao.SetActive(true);
-        luxiang.SetActive(false);
-        saomiao.SetActive(false);
-
-    }
-    public void showluxiang()
-    {
-        paizhao.SetActive(false);
-        luxiang.SetActive(true);
-        saomiao.SetActive(false);
-
-    }
-    public void showsaomiao()
-    {
-
-        paizhao.SetActive(false);
-        luxiang.SetActive(false);
-        saomiao.SetActive(true);
-    }
-
-    public void close()
-    {
-
-        paizhao.SetActive(false);
-        luxiang.SetActive(false);
-        saomiao.SetActive(false);
-    }
-}

+ 30 - 14
Assets/DownLoadXR/DownLoadXRManager.cs

@@ -70,24 +70,40 @@ public class DownLoadXRManager
                     string path = Application.persistentDataPath + "/DownLoadXR/" + Url.Split("/")[Url.Split("/").Length - 1];
                     Debug.Log("准备存文件===》"+path);
                     FileInfo file = new FileInfo(path);
-                    if (file.Exists)
+                    try
                     {
-                        file.Delete();
+
+                        if (file.Exists)
+                        {
+                            file.Delete();
+                        }
+                        if (!Directory.Exists(Application.persistentDataPath + "/DownLoadXR"))
+                        {
+                            Directory.CreateDirectory(Application.persistentDataPath + "/DownLoadXR");
+                        }
+                        //如果此文件存在则打开  
+                        //sw = file .Append();  
+                        //如果此文件不存在则创建  
+                        sw = file.Create();
+
+                        //以行的形式写入信息  
+                        //sw.WriteLine(info);  
+                        sw.Write(fileBytes, 0, fileBytes.Length);
+                        sw.Close();
+                        sw.Dispose();
                     }
-                    if (!Directory.Exists(Application.persistentDataPath + "/DownLoadXR"))
+                    catch
                     {
-                        Directory.CreateDirectory(Application.persistentDataPath + "/DownLoadXR");
+                        /*
+                        JsonData data = new JsonData();
+                        data["type"] = "fileError";
+                        List<string> backTip = new List<string>();
+                        backTip.Add(data.ToJson());
+                        backTip.Add(data.ToJson());
+                        backTip.Add(data.ToJson());
+                        WindowsManager.Instance.show(WindowConfig.windowType.Error, false, WindowsManager.Instance.getErrorData("文件提示", "文件存储失败"+ path, Color.white, "icon", backTip, false, "", 5, "知道了.", "", "").ToJson());
+                        */
                     }
-                    //如果此文件存在则打开  
-                    //sw = file .Append();  
-                    //如果此文件不存在则创建  
-                    sw = file.Create();
-
-                    //以行的形式写入信息  
-                    //sw.WriteLine(info);  
-                    sw.Write(fileBytes, 0, fileBytes.Length);
-                    sw.Close();
-                    sw.Dispose();
 
                     bytes?.Invoke(path);
                     PlayerPrefs.SetString("DownLoadXR_" + Url, path);

+ 5 - 2
Assets/FrameWork/AR.prefab

@@ -137,7 +137,7 @@ MonoBehaviour:
   m_EditorClassIdentifier: 
   CenterMode: 0
   HorizontalFlipNormal: 0
-  HorizontalFlipFront: 1
+  HorizontalFlipFront: 0
   specificTargetCenter: {fileID: 0}
 --- !u!1 &2351110679568293635
 GameObject:
@@ -382,6 +382,9 @@ MonoBehaviour:
   m_Name: 
   m_EditorClassIdentifier: 
   global: 1
+  cameraDevice: {fileID: 2351110679746097938}
+  imageOnFind: {fileID: 2351110678720899160}
+  golist: []
 --- !u!1 &2351110680174526689
 GameObject:
   m_ObjectHideFlags: 0
@@ -445,7 +448,7 @@ Camera:
   m_Depth: -1
   m_CullingMask:
     serializedVersion: 2
-    m_Bits: 4294967295
+    m_Bits: 0
   m_RenderingPath: -1
   m_TargetTexture: {fileID: 0}
   m_TargetDisplay: 0

+ 39 - 0
Assets/FrameWork/Art/Player.renderTexture

@@ -0,0 +1,39 @@
+%YAML 1.1
+%TAG !u! tag:unity3d.com,2011:
+--- !u!84 &8400000
+RenderTexture:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_Name: Player
+  m_ImageContentsHash:
+    serializedVersion: 2
+    Hash: 00000000000000000000000000000000
+  m_ForcedFallbackFormat: 4
+  m_DownscaleFallback: 0
+  m_IsAlphaChannelOptional: 0
+  serializedVersion: 5
+  m_Width: 320
+  m_Height: 240
+  m_AntiAliasing: 1
+  m_MipCount: -1
+  m_DepthStencilFormat: 0
+  m_ColorFormat: 8
+  m_MipMap: 0
+  m_GenerateMips: 1
+  m_SRGB: 0
+  m_UseDynamicScale: 0
+  m_BindMS: 0
+  m_EnableCompatibleFormat: 1
+  m_TextureSettings:
+    serializedVersion: 2
+    m_FilterMode: 1
+    m_Aniso: 0
+    m_MipBias: 0
+    m_WrapU: 1
+    m_WrapV: 1
+    m_WrapW: 1
+  m_Dimension: 2
+  m_VolumeDepth: 1
+  m_ShadowSamplingMode: 2

+ 8 - 0
Assets/FrameWork/Art/Player.renderTexture.meta

@@ -0,0 +1,8 @@
+fileFormatVersion: 2
+guid: 1038abab12f09df4c91e9308c4f710cf
+NativeFormatImporter:
+  externalObjects: {}
+  mainObjectFileID: 8400000
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 38 - 0
Assets/FrameWork/Art/vrplayer.renderTexture

@@ -0,0 +1,38 @@
+%YAML 1.1
+%TAG !u! tag:unity3d.com,2011:
+--- !u!84 &8400000
+RenderTexture:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_Name: vrplayer
+  m_ImageContentsHash:
+    serializedVersion: 2
+    Hash: 00000000000000000000000000000000
+  m_ForcedFallbackFormat: 4
+  m_DownscaleFallback: 0
+  m_IsAlphaChannelOptional: 0
+  serializedVersion: 3
+  m_Width: 320
+  m_Height: 240
+  m_AntiAliasing: 1
+  m_MipCount: -1
+  m_DepthFormat: 2
+  m_ColorFormat: 8
+  m_MipMap: 0
+  m_GenerateMips: 1
+  m_SRGB: 0
+  m_UseDynamicScale: 0
+  m_BindMS: 0
+  m_EnableCompatibleFormat: 1
+  m_TextureSettings:
+    serializedVersion: 2
+    m_FilterMode: 1
+    m_Aniso: 0
+    m_MipBias: 0
+    m_WrapU: 1
+    m_WrapV: 1
+    m_WrapW: 1
+  m_Dimension: 2
+  m_VolumeDepth: 1

+ 8 - 0
Assets/FrameWork/Art/vrplayer.renderTexture.meta

@@ -0,0 +1,8 @@
+fileFormatVersion: 2
+guid: bdc0c6abe88c4de4581be67ba6633ff5
+NativeFormatImporter:
+  externalObjects: {}
+  mainObjectFileID: 8400000
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 4 - 4
Assets/FrameWork/Error.prefab

@@ -515,7 +515,7 @@ MonoBehaviour:
   m_VertexBufferAutoSizeReduction: 0
   m_useMaxVisibleDescender: 1
   m_pageToDisplay: 1
-  m_margin: {x: 0, y: 0, z: 0, w: 0}
+  m_margin: {x: 0, y: -11.225952, z: 0, w: 26.937805}
   m_isUsingLegacyAnimationComponent: 0
   m_isVolumetricText: 0
   m_hasFontAssetChanged: 0
@@ -861,7 +861,7 @@ MonoBehaviour:
   m_OnCullStateChanged:
     m_PersistentCalls:
       m_Calls: []
-  m_text: "\u5F53\u524D\u8D26\u53F7\u5DF2\u5C01\u7981"
+  m_text: "\u5F53\u524D\u8D26\u53F7\u5DF2\u5C01\u7981\u5F53\u524D\u8D26\u53F7\u5DF2\u5C01\u7981\u5F53\u524D\u8D26\u53F7\u5DF2\u5C01\u7981\u5F53\u524D\u8D26\u53F7\u5DF2\u5C01\u7981\u5F53\u524D\u8D26\u53F7\u5DF2\u5C01\u7981\u5F53\u524D\u8D26\u53F7\u5DF2\u5C01\u7981\u5F53\u524D\u8D26\u53F7\u5DF2\u5C01\u7981"
   m_isRightToLeft: 0
   m_fontAsset: {fileID: 11400000, guid: fb2cd06170cbccb45aeb2aa643b0e4d4, type: 2}
   m_sharedMaterial: {fileID: 8874737445518861689, guid: fb2cd06170cbccb45aeb2aa643b0e4d4, type: 2}
@@ -906,7 +906,7 @@ MonoBehaviour:
   m_charWidthMaxAdj: 0
   m_enableWordWrapping: 1
   m_wordWrappingRatios: 0.4
-  m_overflowMode: 0
+  m_overflowMode: 1
   m_linkedTextComponent: {fileID: 0}
   parentLinkedComponent: {fileID: 0}
   m_enableKerning: 1
@@ -924,7 +924,7 @@ MonoBehaviour:
   m_VertexBufferAutoSizeReduction: 0
   m_useMaxVisibleDescender: 1
   m_pageToDisplay: 1
-  m_margin: {x: 0, y: 0, z: 0, w: 0}
+  m_margin: {x: -67.931335, y: -4.3289185, z: -71.869934, w: 18.299545}
   m_isUsingLegacyAnimationComponent: 0
   m_isVolumetricText: 0
   m_hasFontAssetChanged: 0

+ 7 - 8
Assets/FrameWork/Login/Login.prefab

@@ -860,6 +860,7 @@ RectTransform:
   m_Children:
   - {fileID: 2195083590267680226}
   - {fileID: 6562645088846451702}
+  - {fileID: 4627271264101020060}
   m_Father: {fileID: 1294790462873742252}
   m_RootOrder: 2
   m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
@@ -1543,7 +1544,7 @@ MonoBehaviour:
   m_TargetGraphic: {fileID: 7086420143854824660}
   m_HandleRect: {fileID: 7611750814157746043}
   m_Direction: 2
-  m_Value: 0
+  m_Value: 1
   m_Size: 1
   m_NumberOfSteps: 0
   m_OnValueChanged:
@@ -1729,7 +1730,6 @@ RectTransform:
   - {fileID: 6672823423708142584}
   - {fileID: 1238823503620197604}
   - {fileID: 7644096570010894456}
-  - {fileID: 4627271264101020060}
   m_Father: {fileID: 0}
   m_RootOrder: 0
   m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
@@ -1750,7 +1750,6 @@ MonoBehaviour:
   m_Script: {fileID: 11500000, guid: a95a9fc897a682249871c9f84819b54c, type: 3}
   m_Name: 
   m_EditorClassIdentifier: 
-  data: 
   loginMain: {fileID: 502619497374680189}
   loginPassWord: {fileID: 1879010613933534945}
 --- !u!1 &6041951458396396047
@@ -2210,7 +2209,7 @@ RectTransform:
   m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
   m_AnchorMin: {x: 0, y: 1}
   m_AnchorMax: {x: 1, y: 1}
-  m_AnchoredPosition: {x: 0, y: 0.000030517578}
+  m_AnchoredPosition: {x: 0, y: -0.000030517578}
   m_SizeDelta: {x: 0, y: 300}
   m_Pivot: {x: 0, y: 1}
 --- !u!114 &2392315003609835272
@@ -3498,7 +3497,7 @@ PrefabInstance:
   m_ObjectHideFlags: 0
   serializedVersion: 2
   m_Modification:
-    m_TransformParent: {fileID: 1294790462873742252}
+    m_TransformParent: {fileID: 7644096570010894456}
     m_Modifications:
     - target: {fileID: 4608872851140012201, guid: 98f3a554d0e4f4641a5be4c923a47d83, type: 3}
       propertyPath: m_OnClick.m_PersistentCalls.m_Calls.Array.data[0].m_Target
@@ -3514,7 +3513,7 @@ PrefabInstance:
       objectReference: {fileID: 0}
     - target: {fileID: 4750341258902292937, guid: 98f3a554d0e4f4641a5be4c923a47d83, type: 3}
       propertyPath: m_RootOrder
-      value: 3
+      value: 2
       objectReference: {fileID: 0}
     - target: {fileID: 4750341258902292937, guid: 98f3a554d0e4f4641a5be4c923a47d83, type: 3}
       propertyPath: m_AnchorMax.x
@@ -3570,11 +3569,11 @@ PrefabInstance:
       objectReference: {fileID: 0}
     - target: {fileID: 4750341258902292937, guid: 98f3a554d0e4f4641a5be4c923a47d83, type: 3}
       propertyPath: m_AnchoredPosition.x
-      value: -37
+      value: -3
       objectReference: {fileID: 0}
     - target: {fileID: 4750341258902292937, guid: 98f3a554d0e4f4641a5be4c923a47d83, type: 3}
       propertyPath: m_AnchoredPosition.y
-      value: -606
+      value: -511
       objectReference: {fileID: 0}
     - target: {fileID: 4750341258902292937, guid: 98f3a554d0e4f4641a5be4c923a47d83, type: 3}
       propertyPath: m_LocalEulerAnglesHint.x

+ 14 - 0
Assets/FrameWork/Login/SmallIconItem.prefab

@@ -13,6 +13,7 @@ GameObject:
   - component: {fileID: 3176420488534560842}
   - component: {fileID: 5745037088995240394}
   - component: {fileID: 2378118562381520559}
+  - component: {fileID: 5003701075152160832}
   m_Layer: 5
   m_Name: SmallIconItem
   m_TagString: Untagged
@@ -147,6 +148,19 @@ MonoBehaviour:
           m_StringArgument: 
           m_BoolArgument: 0
         m_CallState: 2
+--- !u!114 &5003701075152160832
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 2484990173723969103}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 35172762ec0d5c649988b493bae6ac42, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  msg: "\u8BF7\u9009\u62E9\u8D26\u53F7"
 --- !u!1 &6485174692968104930
 GameObject:
   m_ObjectHideFlags: 0

+ 314 - 0
Assets/FrameWork/PlayerToImage.prefab

@@ -0,0 +1,314 @@
+%YAML 1.1
+%TAG !u! tag:unity3d.com,2011:
+--- !u!1 &6371964580600787495
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 6371964580600787494}
+  - component: {fileID: 6371964580600787491}
+  - component: {fileID: 6371964580600787492}
+  - component: {fileID: 6371964580600787493}
+  m_Layer: 6
+  m_Name: Canvas
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &6371964580600787494
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964580600787495}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 0, y: 0, z: 0}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 6371964580815403536}
+  m_Father: {fileID: 6371964581270030861}
+  m_RootOrder: 1
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 0}
+  m_AnchorMax: {x: 0, y: 0}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 0, y: 0}
+  m_Pivot: {x: 0, y: 0}
+--- !u!223 &6371964580600787491
+Canvas:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964580600787495}
+  m_Enabled: 1
+  serializedVersion: 3
+  m_RenderMode: 1
+  m_Camera: {fileID: 6371964580808598764}
+  m_PlaneDistance: 100
+  m_PixelPerfect: 0
+  m_ReceivesEvents: 1
+  m_OverrideSorting: 0
+  m_OverridePixelPerfect: 0
+  m_SortingBucketNormalizedSize: 0
+  m_AdditionalShaderChannelsFlag: 0
+  m_SortingLayerID: 0
+  m_SortingOrder: 0
+  m_TargetDisplay: 0
+--- !u!114 &6371964580600787492
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964580600787495}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 0cd44c1031e13a943bb63640046fad76, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_UiScaleMode: 0
+  m_ReferencePixelsPerUnit: 100
+  m_ScaleFactor: 1
+  m_ReferenceResolution: {x: 800, y: 600}
+  m_ScreenMatchMode: 0
+  m_MatchWidthOrHeight: 0
+  m_PhysicalUnit: 3
+  m_FallbackScreenDPI: 96
+  m_DefaultSpriteDPI: 96
+  m_DynamicPixelsPerUnit: 1
+  m_PresetInfoIsWorld: 0
+--- !u!114 &6371964580600787493
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964580600787495}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: dc42784cf147c0c48a680349fa168899, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_IgnoreReversedGraphics: 1
+  m_BlockingObjects: 0
+  m_BlockingMask:
+    serializedVersion: 2
+    m_Bits: 4294967295
+--- !u!1 &6371964580808598767
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 6371964580808598766}
+  - component: {fileID: 6371964580808598764}
+  m_Layer: 6
+  m_Name: Camera
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!4 &6371964580808598766
+Transform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964580808598767}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children: []
+  m_Father: {fileID: 6371964581270030861}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+--- !u!20 &6371964580808598764
+Camera:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964580808598767}
+  m_Enabled: 1
+  serializedVersion: 2
+  m_ClearFlags: 2
+  m_BackGroundColor: {r: 0, g: 0, b: 0, a: 0}
+  m_projectionMatrixMode: 1
+  m_GateFitMode: 2
+  m_FOVAxisMode: 0
+  m_SensorSize: {x: 36, y: 24}
+  m_LensShift: {x: 0, y: 0}
+  m_FocalLength: 50
+  m_NormalizedViewPortRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1
+  near clip plane: 0.3
+  far clip plane: 1000
+  field of view: 60
+  orthographic: 0
+  orthographic size: 5
+  m_Depth: 0
+  m_CullingMask:
+    serializedVersion: 2
+    m_Bits: 64
+  m_RenderingPath: -1
+  m_TargetTexture: {fileID: 8400000, guid: bdc0c6abe88c4de4581be67ba6633ff5, type: 2}
+  m_TargetDisplay: 0
+  m_TargetEye: 3
+  m_HDR: 1
+  m_AllowMSAA: 1
+  m_AllowDynamicResolution: 0
+  m_ForceIntoRT: 0
+  m_OcclusionCulling: 1
+  m_StereoConvergence: 10
+  m_StereoSeparation: 0.022
+--- !u!1 &6371964580815403537
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 6371964580815403536}
+  - component: {fileID: 6371964580815403566}
+  - component: {fileID: 6371964580815403567}
+  m_Layer: 6
+  m_Name: RawImage
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &6371964580815403536
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964580815403537}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children: []
+  m_Father: {fileID: 6371964580600787494}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 0}
+  m_AnchorMax: {x: 1, y: 1}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 0, y: 0}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &6371964580815403566
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964580815403537}
+  m_CullTransparentMesh: 1
+--- !u!114 &6371964580815403567
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964580815403537}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1344c3c82d62a2a41a3576d8abb8e3ea, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 1}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Texture: {fileID: 0}
+  m_UVRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1
+--- !u!1 &6371964581270030862
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 6371964581270030861}
+  - component: {fileID: 6371964581270030860}
+  - component: {fileID: 841037905012157494}
+  m_Layer: 6
+  m_Name: PlayerToImage
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!4 &6371964581270030861
+Transform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964581270030862}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0.117397115, y: -0.15264285, z: 1.5343657}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 6371964580808598766}
+  - {fileID: 6371964580600787494}
+  m_Father: {fileID: 0}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+--- !u!114 &6371964581270030860
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964581270030862}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 6653168e5a0c3b345b69b7a74503d092, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  global: 1
+  ca: {fileID: 6371964580808598764}
+  rimg: {fileID: 6371964580815403567}
+  audioOldVolume: 0
+--- !u!114 &841037905012157494
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 6371964581270030862}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: eb94bf957aaed554492064dde1c1775e, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 

+ 7 - 0
Assets/FrameWork/PlayerToImage.prefab.meta

@@ -0,0 +1,7 @@
+fileFormatVersion: 2
+guid: a2814d80086e5b5408355d76fc96c80f
+PrefabImporter:
+  externalObjects: {}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 13 - 0
Assets/FrameWork/ProjectManager/Image.prefab

@@ -12,6 +12,7 @@ GameObject:
   - component: {fileID: 6399698229834585584}
   - component: {fileID: 6546605293001365448}
   - component: {fileID: 4983239016688348473}
+  - component: {fileID: -3935363561865819814}
   m_Layer: 5
   m_Name: Image
   m_TagString: Untagged
@@ -86,3 +87,15 @@ MonoBehaviour:
   m_Script: {fileID: 11500000, guid: b9df92cc077b8924abbbb78762154fe0, type: 3}
   m_Name: 
   m_EditorClassIdentifier: 
+--- !u!114 &-3935363561865819814
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 4458167396202558435}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: c87572251ca822141acf06c5ca528a82, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 

+ 13 - 0
Assets/FrameWork/ProjectManager/Model.prefab

@@ -12,6 +12,7 @@ GameObject:
   - component: {fileID: 8193688906978589056}
   - component: {fileID: 3114445212997545933}
   - component: {fileID: 5787054142968232577}
+  - component: {fileID: -4583455588524958336}
   m_Layer: 5
   m_Name: Model
   m_TagString: Untagged
@@ -87,6 +88,18 @@ MonoBehaviour:
   m_Script: {fileID: 11500000, guid: b9df92cc077b8924abbbb78762154fe0, type: 3}
   m_Name: 
   m_EditorClassIdentifier: 
+--- !u!114 &-4583455588524958336
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 2415235072230918127}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 9b952a506485cf5488d6957fe20cf10f, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
 --- !u!1001 &512708586739425507
 PrefabInstance:
   m_ObjectHideFlags: 0

+ 145 - 40
Assets/FrameWork/ProjectManager/Scripts/JinRuRenwu.cs

@@ -29,6 +29,8 @@ public class JinRuRenwu : WindowSingleton<JinRuRenwu>
     public GameObject Image2;
     public GameObject Text;
 
+    public GameObject paizhaoGo;
+
     public TextMeshProUGUI indexText;
 
 
@@ -36,16 +38,47 @@ public class JinRuRenwu : WindowSingleton<JinRuRenwu>
 
     public GameObject kong;
 
-    public Button NextButton;
 
     public GameObject saotuTip;
     public GameObject StartGo;
 
+
+    public GameObject paizhaoanniu;
+
+    public int PaiZhaoIndex=0;
     public override void OnEnable()
     {
         base.OnEnable();
     }
 
+    public void isPaiZhao(bool isPaizhao)
+    {
+        if(isPaizhao)
+        {
+            changIndex(0);
+        }
+        else
+        {
+
+            changIndex(1);
+        }
+    }
+
+    public void changIndex(int i)
+    {
+        PaiZhaoIndex = i;
+        if(PaiZhaoIndex==0)
+        {
+            if (ChangeCameraSaoMiao.Instance)
+            ChangeCameraSaoMiao.Instance.showpaizhao();
+        }else
+        {
+
+            if (ChangeCameraSaoMiao.Instance)
+                ChangeCameraSaoMiao.Instance.showluxiang();
+        }
+    }
+
     public void initStart()
     {
 
@@ -56,47 +89,49 @@ public class JinRuRenwu : WindowSingleton<JinRuRenwu>
    
     public void GotoStart()
     {
+        XunJianDataManager.Instance.chooseXunJian.nowIndex = -1;
         switch (XunJianDataManager.Instance.chooseXunJian.renwuModelType)
         {
             case RenWuModelType.SaoTu:
-                ChangeCameraSaoMiao.Instance.showsaomiao();
+                ChangeCameraSaoMiao.Instance.showsaomiao(XunJianDataManager.Instance.chooseXunJian.itemList[0]);
                 break;
             case RenWuModelType.DianYun:
                 break;
             case RenWuModelType.DingWeiBan:
                 break;
         }
-
-        TimerMgr.Instance.CreateTimer(()=> { showInit(); },5f);
+        //  TimerMgr.Instance.CreateTimer(()=> { showInit(); },5f);
     }
 
-    public void showInit()
+    public void show()
     {
-
-        ChangeCameraSaoMiao.Instance.close();
+        Debug.Log("showshowshowshowshowshow");
         saotuTip.SetActive(false);
         StartGo.SetActive(true);
-        XunJianDataManager.Instance.chooseXunJian.nowIndex = 0;
-        //RoadManager.Instance.gameObject.SetActive(true);
-        updateListRef();
+        //updateListRef();
         TimerMgr.Instance.CreateTimer(() => {
-
             kong.SetActive(!kong.activeSelf);
         }, 0.01f, 6);
-        if (XunJianDataManager.Instance.chooseXunJian != null)
-            UpdateData();
+        gotoNextItem();
+       // if (XunJianDataManager.Instance.chooseXunJian != null)
+        //    UpdateData();
     }
+    
 
     public override void UpdateData()
     {
         base.UpdateData();
+        ChangeCameraSaoMiao.Instance.close();
         VideoBT.SetActive(false);
         ModelBT.SetActive(false);
         ImageBT.SetActive(false);
         Image2BT.SetActive(false);
         TextBT.SetActive(false);
         rwitem = XunJianDataManager.Instance.chooseXunJian.itemList[XunJianDataManager.Instance.chooseXunJian.nowIndex];
-        LineManager.Instance.setRoad(rwitem.roadList);
+        if(XunJianDataManager.Instance.chooseXunJian.renwuModelType== RenWuModelType.DianYun)
+        {
+            LineManager.Instance.setRoad(rwitem.roadList);
+        }
         indexText.text = (rwitem.index+1).ToString();
         info.text = rwitem.info;
         if (rwitem.typeList.Count > 0)
@@ -153,6 +188,12 @@ public class JinRuRenwu : WindowSingleton<JinRuRenwu>
     }
     private void OnDisable()
     {
+        if(ChangeCameraSaoMiao.Instance)
+        {
+
+            ChangeCameraSaoMiao.Instance.close();
+            ChangeCameraSaoMiao.Instance.checkLuXiang();
+        }
         LineManager.Instance.stop();
         WindowGenSui.Instance.jd = 30;
         //RoadManager.Instance.gameObject.SetActive(false);
@@ -234,6 +275,10 @@ public class JinRuRenwu : WindowSingleton<JinRuRenwu>
                 ct++;
             }
         }
+        if (JinRuRenwu.Instance.paizhaoGo.activeSelf)
+        {
+            ct += 1;
+        }
         if (ct >3)
             WindowGenSui.Instance.jd = 60;
         else if (ct > 0)
@@ -243,6 +288,7 @@ public class JinRuRenwu : WindowSingleton<JinRuRenwu>
 
             WindowGenSui.Instance.jd = 30;
         }
+
     }
     public void closeAll()
     {
@@ -256,21 +302,42 @@ public class JinRuRenwu : WindowSingleton<JinRuRenwu>
     public void showText()
     {
         Text.SetActive(!Text.activeSelf);
-        Text.transform.SetAsFirstSibling();
+        if (!JinRuRenwu.Instance.paizhaoGo.activeSelf)
+        {
+            Text.transform.SetAsFirstSibling();
+        }
+        else
+        {
+            Video.transform.SetSiblingIndex(1);
+        }
         checkJd();
     }
     public void showImage()
     {
 
         Image.SetActive(!Image.activeSelf);
-        Image.transform.SetAsFirstSibling();
-        checkJd();
+        if (!JinRuRenwu.Instance.paizhaoGo.activeSelf)
+        {
+            Image.transform.SetAsFirstSibling();
+        }
+        else
+        {
+            Video.transform.SetSiblingIndex(1);
+        }
+            checkJd();
     }
     public void showImage2()
     {
 
         Image2.SetActive(!Image2.activeSelf);
-        Image2.transform.SetAsFirstSibling();
+        if (!JinRuRenwu.Instance.paizhaoGo.activeSelf)
+        {
+            Image2.transform.SetAsFirstSibling();
+        }
+        else
+        {
+            Video.transform.SetSiblingIndex(1);
+        }
         checkJd();
     }
 
@@ -278,47 +345,65 @@ public class JinRuRenwu : WindowSingleton<JinRuRenwu>
     {
 
         Model.SetActive(!Model.activeSelf);
-        Model.transform.SetAsFirstSibling();
+        if (!JinRuRenwu.Instance.paizhaoGo.activeSelf)
+        {
+            Model.transform.SetAsFirstSibling();
+        }
+        else
+        {
+            Video.transform.SetSiblingIndex(1);
+        }
         checkJd();
     }
     public void showVideo()
     {
 
         Video.SetActive(!Video.activeSelf);
-        Video.transform.SetAsFirstSibling();
+        if (!JinRuRenwu.Instance.paizhaoGo.activeSelf)
+        {
+            Video.transform.SetAsFirstSibling();
+        }
+        else
+        {
+            Video.transform.SetSiblingIndex(1);
+        }
         checkJd();
     }
-
+    public Toggle paizhaoToggle;
     public void GotoPaiZhao()
     {
-        ChangeCameraSaoMiao.Instance.showpaizhao();
+        if(!JinRuRenwu.Instance.paizhaoGo.activeSelf)
+        {
+            PaiZhaoIndex = 0;
+            ChangeCameraSaoMiao.Instance.showpaizhao();
+
+        }else
+        {
+            paizhaoToggle.isOn = true;
+            ChangeCameraSaoMiao.Instance.close();
+        }
+        checkJd();
       //  PaiZhaoGo.SetActive(!PaiZhaoGo.activeSelf);
     }
 
-    public void GotoTask()
+    public void paizhaoAndLuxiang()
     {
-        TaskGo.SetActive(!TaskGo.activeSelf);
-    }
 
-    public void GotoLieBiao()
-    {
-        WindowsManager.Instance.show(WindowConfig.windowType.XunJianLB);
+        if (PaiZhaoIndex == 0)
+        {
+            ChangeCameraSaoMiao.Instance.paizhaoclick();
+        }
+        else
+        {
+            ChangeCameraSaoMiao.Instance.luxiangClick();
+        }
     }
 
-    public void successItem()
-    {
-        rwitem.state = RenWuState.Success;
-        gotoNextItem();
-    }
-    public void failItem()
-    {
-        rwitem.state = RenWuState.Fail;
-        gotoNextItem();
-    }
-    public void gotoNextItem()
+    public void GotoTask()
     {
-        XunJianDataManager.Instance.gotoNext();
+        TaskGo.SetActive(!TaskGo.activeSelf);
     }
+
     public void updateListRef()
     {
         for (int i = updateList.Count-1; i >=0; i--)
@@ -342,4 +427,24 @@ public class JinRuRenwu : WindowSingleton<JinRuRenwu>
             }
         }
     }
+
+
+
+
+    public void successItem()
+    {
+        rwitem.state = RenWuState.Success;
+    }
+    public void failItem()
+    {
+        rwitem.state = RenWuState.Fail;
+    }
+    public void gotoNextItem()
+    {
+        XunJianDataManager.Instance.gotoNext();
+    }
+    public void GotoLieBiao()
+    {
+        WindowsManager.Instance.show(WindowConfig.windowType.XunJianLB);
+    }
 }

+ 15 - 6
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/XunJianDataManager.cs

@@ -1,8 +1,10 @@
+using LitJson;
 using System;
 using System.Collections;
 using System.Collections.Generic;
 using UnityEngine;
 using XRTool.Util;
+using static easyar.ImageTargetController;
 
 public class XunJianDataManager : Singleton<XunJianDataManager>
 {
@@ -43,6 +45,9 @@ public class XunJianDataManager : Singleton<XunJianDataManager>
             for (int j = 0; j < 7; j++)
             {
                 RenWuItem rw = new RenWuItem();
+
+                rw.imageUrl = Application.streamingAssetsPath + "/ARTargetImage/"+(j+1)+".png";
+
                 //任务路线 (空间定位时才有)
                 rw.roadList = new List<Vector3>();
                 Vector3 v3 = new Vector3(UnityEngine.Random.Range(-3.1f, 3.1f), 0, UnityEngine.Random.Range(-3.1f, 3.1f));
@@ -139,17 +144,20 @@ public class XunJianDataManager : Singleton<XunJianDataManager>
             {
                 RenWuListWindow.Instance.UpdateData();
             }
-        }else
+            if (CaoZuoLanManager.Instance)
+                CaoZuoLanManager.Instance.nextInteractable(false);
+        }
+        else
         {
             List<string> backTip = new List<string>();
+            JsonData data = new JsonData();
+            data["type"] = "30001";
             backTip.Add("1");
-            backTip.Add("2");
+            backTip.Add(data.ToJson());
             backTip.Add("3");
-            WindowsManager.Instance.show(WindowConfig.windowType.Error, false, WindowsManager.Instance.getErrorData("提示", "已经是最后一步了!", Color.gray, "icon", backTip, false, "敬请期待", 5).ToJson());
-
+            WindowsManager.Instance.show(WindowConfig.windowType.Error, false, WindowsManager.Instance.getErrorData("提示", "已经是最后一步了!", Color.gray, "icon", backTip, false, "自动退出", 5,"","退出巡检","返回").ToJson());
 
         }
-
     }
 
     public void chooseItem(int i)
@@ -175,7 +183,8 @@ public class XunJianDataManager : Singleton<XunJianDataManager>
 
         public List<Vector3> roadList;
 
-
+        public string imageUrl;
+        public ImageFileSourceData imageData;
         public RenWuState state = RenWuState.None;
     }
     public class RenWuTypeModel

+ 90 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ARSaoTuManager.cs

@@ -0,0 +1,90 @@
+using easyar;
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using UnityEngine;
+using static easyar.ImageTargetController;
+using static XunJianDataManager;
+
+public class ARSaoTuManager : MonoSingleton<ARSaoTuManager>
+{
+    public CameraDeviceFrameSource cameraDevice;
+    public ImageTrackerFrameFilter imageOnFind;
+    public List<GameObject> golist=new List<GameObject>();
+    public void Start()
+    {
+        // initARGameObj();
+        imageOnFind.TargetLoad += OnLoad;
+    }
+
+    public void closeDevice()
+    {
+        cameraDevice.Close();
+    }
+    public void startDevice()
+    {
+        cameraDevice.Open();
+    }
+
+    private void OnLoad(ImageTargetController arg1, Target arg2, bool arg3)
+    {
+        if(arg3)
+        {
+            arg1.TargetFound += ()=> {
+
+                if (XunJianDataManager.Instance.chooseXunJian.itemList[XunJianDataManager.Instance.chooseXunJian.nowIndex + 1].imageData!=null&&arg1.name== "TargetImage_" + XunJianDataManager.Instance.chooseXunJian.itemList[ XunJianDataManager.Instance.chooseXunJian.nowIndex+1].imageData.Name)
+                {
+                    Debug.Log("showshowshowshowshowshow");
+                    JinRuRenwu.Instance.show();
+                    closeDevice();
+                }
+
+
+            };
+        }
+    }
+
+    void clearAll()
+    {
+        for (int i = 0; i < golist.Count; i++)
+        {
+            Destroy(golist[i]);
+        }
+        golist.Clear();
+    }
+
+    public void AddARGameObj(ImageFileSourceData listdata)
+    {
+        XRRGBCamera.Instance.stopCamera();
+        startDevice();
+        if (check(listdata))
+        {
+            GameObject go = new GameObject();
+            go.name = "TargetImage_" + listdata.Name;
+
+            ImageTargetController im = go.AddComponent<ImageTargetController>();
+            im.ImageFileSource = listdata;
+            imageOnFind.LoadTarget(im);
+            /*
+            im.ImageFileSource.Path = listdata[i].// Application.streamingAssetsPath + "/Art/16£º9.jpg";
+            im.ImageFileSource.Name = "jpg";
+            im.ImageFileSource.Scale = 0.1f;*/
+
+            golist.Add(go);
+
+        }
+    }
+
+    bool check(ImageFileSourceData listdata)
+    {
+        for (int i = 0; i < golist.Count; i++)
+        {
+            if(golist[i].GetComponent<ImageTargetController>().ImageFileSource.Path == listdata.Path)
+            {
+                return false;
+            }
+        }
+        return true;
+    }
+
+}

+ 0 - 0
Assets/ARSaoTuManager.cs.meta → Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ARSaoTuManager.cs.meta


+ 77 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/CaoZuoLanManager.cs

@@ -0,0 +1,77 @@
+using System.Collections;
+using System.Collections.Generic;
+using UnityEngine;
+using UnityEngine.UI;
+using XRTool.Util;
+using static XunJianDataManager;
+
+public class CaoZuoLanManager : MonoSingleton<CaoZuoLanManager>
+{
+    public GameObject duigou;
+    public GameObject dazha;
+    public GameObject xiayibu;
+
+
+    public bool isChoose = false;
+
+    private void OnEnable()
+    {
+        //nextInteractable(false);
+    }
+
+    public void successItem()
+    {
+        JinRuRenwu.Instance.successItem();
+        nextInteractable(true);
+        if (XunJianDataManager.Instance.chooseXunJian.itemList.Count > (XunJianDataManager.Instance.chooseXunJian.nowIndex + 1))
+        {
+            switch (XunJianDataManager.Instance.chooseXunJian.renwuModelType)
+            {
+                case RenWuModelType.SaoTu:
+                    ChangeCameraSaoMiao.Instance.showsaomiao(XunJianDataManager.Instance.chooseXunJian.itemList[XunJianDataManager.Instance.chooseXunJian.nowIndex + 1]);
+                    break;
+                case RenWuModelType.DianYun:
+                    break;
+                case RenWuModelType.DingWeiBan:
+                    break;
+            }
+        }
+    }
+    public void failItem()
+    {
+        JinRuRenwu.Instance.failItem();
+        nextInteractable(true);
+
+        if (XunJianDataManager.Instance.chooseXunJian.itemList.Count > (XunJianDataManager.Instance.chooseXunJian.nowIndex + 1))
+        {
+
+            switch (XunJianDataManager.Instance.chooseXunJian.renwuModelType)
+            {
+                case RenWuModelType.SaoTu:
+                    ChangeCameraSaoMiao.Instance.showsaomiao(XunJianDataManager.Instance.chooseXunJian.itemList[XunJianDataManager.Instance.chooseXunJian.nowIndex + 1]);
+                    break;
+                case RenWuModelType.DianYun:
+                    break;
+                case RenWuModelType.DingWeiBan:
+                    break;
+            }
+
+        }
+    }
+    public void gotoNextItem()
+    {
+        JinRuRenwu.Instance.gotoNextItem();
+    }
+    public void GotoLieBiao()
+    {
+        WindowsManager.Instance.show(WindowConfig.windowType.XunJianLB);
+    }
+
+    public void nextInteractable(bool isActive)
+    {
+        Debug.Log("nextInteractable==>"+isActive);
+        isChoose = isActive;
+        xiayibu.GetComponent<Button>().interactable = isActive;
+
+    }
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/CaoZuoLanManager.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: c5dda28f7d17c78459d80f7e672b6601
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 166 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ChangeCameraSaoMiao.cs

@@ -0,0 +1,166 @@
+using easyar;
+using LitJson;
+using RenderHeads.Media.AVProMovieCapture;
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.IO;
+using TMPro;
+using UnityEngine;
+using XRTool.Util;
+using static easyar.ImageTargetController;
+using static PaiZhaoDataManager;
+using static XunJianDataManager;
+
+public class ChangeCameraSaoMiao : MonoSingleton<ChangeCameraSaoMiao>
+{
+    public CaptureFromTexture caTexture;
+
+    public GameObject paizhao;
+    public GameObject luxiang;
+    public GameObject saomiao;
+    public void Awake()
+    {
+        caTexture.CompletedFileWritingAction += OnCompletedFileWritingAction;
+    }
+
+    public void showpaizhao()
+    {
+        if (!XRRGBCamera.Instance.RGBCamTexture || !XRRGBCamera.Instance.RGBCamTexture.isPlaying)
+            XRRGBCamera.Instance.playCamera(640,480);
+        JinRuRenwu.Instance.paizhaoanniu.SetActive(true);
+        JinRuRenwu.Instance.paizhaoGo.SetActive(true);
+        paizhao.SetActive(true);
+        luxiang.SetActive(false);
+        saomiao.SetActive(false);
+        checkLuXiang();
+
+    }
+    public void showluxiang()
+    {
+        if (!XRRGBCamera.Instance.RGBCamTexture || !XRRGBCamera.Instance.RGBCamTexture.isPlaying)
+            XRRGBCamera.Instance.playCamera(640, 480);
+        JinRuRenwu.Instance.paizhaoanniu.SetActive(true);
+        JinRuRenwu.Instance.paizhaoGo.SetActive(true);
+        paizhao.SetActive(false);
+        luxiang.SetActive(true);
+        saomiao.SetActive(false);
+
+    }
+    public void showsaomiao(RenWuItem ri)
+    {
+        checkLuXiang();
+        JinRuRenwu.Instance.paizhaoanniu.SetActive(false);
+        JinRuRenwu.Instance.paizhaoGo.SetActive(false);
+        paizhao.SetActive(false);
+        luxiang.SetActive(false);
+        saomiao.SetActive(true);
+        RenWuItem rw = ri;
+        DownLoadXRManager.DownLoadForFilePath(DownLoadXRManager.getTestData(rw.imageUrl), (string bytes) => {
+            if(bytes==null)
+            {
+                close();
+                JsonData data = new JsonData();
+                data["type"] = "20001";
+                List<string> backTip = new List<string>();
+                backTip.Add(data.ToJson());
+                backTip.Add(data.ToJson());
+                backTip.Add(data.ToJson());
+                WindowsManager.Instance.show(WindowConfig.windowType.Error, false, WindowsManager.Instance.getErrorData("枑尨", "妎梗芞帤梑善!"+ rw.imageUrl, Color.gray, "icon", backTip, true, "妎梗芞渣昫", 5).ToJson());
+
+            }
+            else
+            {
+                ImageFileSourceData data = new ImageFileSourceData();
+                data.Name = bytes.Split("/")[bytes.Split("/").Length - 1];
+                data.Path = bytes;
+                data.PathType = PathType.Absolute;
+                data.Scale = 0.1f;
+                rw.imageData = data;
+                ARSaoTuManager.Instance.AddARGameObj(data);
+            }
+        }, (float f) => {
+
+
+        });
+    }
+
+    public void close()
+    {
+        checkLuXiang();
+        JinRuRenwu.Instance.paizhaoanniu.SetActive(false);
+        JinRuRenwu.Instance.paizhaoGo.SetActive(false);
+        XRRGBCamera.Instance.stopCamera();
+        paizhao.SetActive(false);
+        luxiang.SetActive(false);
+        saomiao.SetActive(false);
+    }
+
+    public void paizhaoclick()
+    {
+        PaiZhaoDataManager.Instance.addImage(XRRGBCamera.Instance.CaptureImage);
+    }
+    Timer timer;
+    public TextMeshProUGUI timerText;
+    bool isLuXiang;
+    string pathVideo;
+    public void luxiangClick()
+    {
+           
+        if (isLuXiang)
+        {
+            checkLuXiang();
+        }
+        else
+        {
+
+            int count = 0;
+            if (Directory.Exists(Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id + "/" + XunJianDataManager.Instance.chooseXunJian.nowIndex))
+                count = Directory.GetFiles(Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id + "/" + XunJianDataManager.Instance.chooseXunJian.nowIndex).Length;
+            string name = count.ToString();
+            string path = Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id + "/" + XunJianDataManager.Instance.chooseXunJian.nowIndex;
+            pathVideo = path + "/" + name + ".mp4";
+            caTexture.SetSourceTexture(XRRGBCamera.Instance.CaptureImage);
+            caTexture.OutputFolderPath= path;
+            caTexture.FilenamePrefix = name;
+            caTexture.StartCapture();
+            isLuXiang = true;
+
+         timer=   TimerMgr.Instance.CreateTimer(()=> { 
+                FrameOutTime++;
+                timerText.text="REC  "+ TimeToString(FrameOutTime);
+
+            },1f,-1);
+        }    
+    }
+
+    private void OnCompletedFileWritingAction(FileWritingHandler obj)
+    {
+        Debug.Log("saveEnd===>" + pathVideo);
+        paizhaoData data = new paizhaoData();
+        data.url = pathVideo;
+        PaiZhaoManager.Instance.addVidew(data);
+    }
+
+    public string TimeToString(int result)
+    {
+        int hour = (int)result / 3600;
+        int minute = ((int)result - hour * 3600) / 60;
+        int second = (int)result - hour * 3600 - minute * 60;
+        int millisecond = (int)((result - (int)result) * 1000);
+        string data = string.Format("{0:D2}:{1:D2}:{2:D2}", hour, minute, second);
+        return data;
+    }
+    public void checkLuXiang()
+    {
+        if (isLuXiang)
+        {
+            FrameOutTime = 0;
+            TimerMgr.Instance.DestroyTimer(timer);
+            caTexture.StopCapture();
+            isLuXiang = false;
+            timerText.text = "REC  00:00:00" ;
+        }
+    }
+    int FrameOutTime;
+}

+ 0 - 0
Assets/ChangeCameraSaoMiao.cs.meta → Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ChangeCameraSaoMiao.cs.meta


+ 0 - 0
Assets/FollowCameraSaoMiao.cs → Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/FollowCameraSaoMiao.cs


+ 0 - 0
Assets/FollowCameraSaoMiao.cs.meta → Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/FollowCameraSaoMiao.cs.meta


+ 520 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/GameUtility.cs

@@ -0,0 +1,520 @@
+using UnityEngine;
+using System.Collections;
+using System.Collections.Generic;
+using OpenCVForUnity.CoreModule;
+using OpenCVForUnity.ImgcodecsModule;
+using OpenCVForUnity.ImgprocModule;
+using OpenCVForUnity.UnityUtils;
+using UnityEditor;
+using System.Text;
+using System.Web;
+using System;
+using System.IO;
+using System.Linq;
+
+/// <summary>
+/// 游戏工具类
+/// Game Utility Tools
+/// </summary>
+public class GameUtility
+{
+    /// <summary>
+    /// Find Deep child with name
+    /// </summary>
+    public static Transform FindDeepChild(GameObject _target, string _childName)
+    {
+        Transform resultTrs = null;
+        resultTrs = _target.transform.Find(_childName);
+        if (resultTrs == null)
+        {
+            foreach (Transform trs in _target.transform)
+            {
+                resultTrs = GameUtility.FindDeepChild(trs.gameObject, _childName);
+                if (resultTrs != null)
+                    return resultTrs;
+            }
+        }
+        return resultTrs;
+    }
+
+    /// <summary>
+    /// Find component in Target Child
+    /// </summary>
+    public static T FindDeepChild<T>(GameObject _target, string _childName) where T : Component
+    {
+        Transform resultTrs = GameUtility.FindDeepChild(_target, _childName);
+        if (resultTrs != null)
+            return resultTrs.gameObject.GetComponent<T>();
+        return (T)((object)null);
+    }
+
+    /// <summary>
+    /// 添加子节点
+    /// Add child to target
+    /// </summary>
+    public static void AddChildToTarget(Transform target, Transform child)
+    {
+        //child.parent = target;
+        child.SetParent(target, false);
+        child.localScale = Vector3.one;
+        child.localPosition = Vector3.zero;
+        child.localEulerAngles = Vector3.zero;
+
+        ChangeChildLayer(child, target.gameObject.layer);
+    }
+
+    /// <summary>
+    /// 修改子节点Layer
+    /// Change child layer
+    /// </summary>
+    public static void ChangeChildLayer(Transform t, int layer)
+    {
+        t.gameObject.layer = layer;
+        for (int i = 0; i < t.childCount; ++i)
+        {
+            Transform child = t.GetChild(i);
+            child.gameObject.layer = layer;
+            ChangeChildLayer(child, layer);
+        }
+    }
+
+
+    /// 删除下面所有子物体
+    public static void DeleteAllGo(Transform go)
+    {
+        List<GameObject> DeleteList = new List<GameObject>();
+        for (int i = 0; i < go.childCount; i++)
+        {
+            DeleteList.Add(go.GetChild(i).gameObject);
+        }
+        for (int i = 0; i < DeleteList.Count; i++)
+        {
+            GameObject.DestroyImmediate(DeleteList[i],true);
+        }
+    }
+    // 删除下面单个物体
+    public static void DeleteSingleGo(Transform go, int index)
+    {
+        for (int i = 0; i < go.childCount; i++)
+        {
+            if (i == index)
+                GameObject.Destroy(go.GetChild(i).gameObject);
+        }
+    }
+    private static  Texture2D _tex;
+    /// <summary>
+    /// OpenCV生成图片缩略图
+    /// </summary>
+    /// <param name="path">文件路径</param>
+    /// <param name="zoom">缩放值</param>
+    /// <returns></returns>
+    public static Mat zoomMatByOpenCV(string path, double zoom)
+    {
+        //Debug.Log("  zoomMatByOpenCV   path  : "+path);
+        Mat mat = filePathToMat(path);
+        //Debug.Log(mat.cols());
+        //Debug.Log(mat.rows());
+        //压缩图片
+        Size size = new Size(mat.cols() * zoom, mat.rows() * zoom);
+        Mat zoomMat = new Mat(size, CvType.CV_8UC4);
+        Imgproc.resize(mat, zoomMat, size);
+        ////BGR 转RGB
+        Mat imgRGBMat = new Mat(zoomMat.cols(), zoomMat.rows(), CvType.CV_8UC4);
+        Imgproc.cvtColor(zoomMat, imgRGBMat, Imgproc.COLOR_BGR2RGB);
+        zoomMat.release();
+        mat.release();
+        return imgRGBMat;
+        //return mat;
+    }
+    public event Action RefreshEvent;
+    /// <summary>
+    /// OpenCV生成图片缩略图
+    /// </summary>
+    /// <param name="path">文件路径</param>
+    /// <param name="zoom">缩放值</param>
+    /// <returns></returns>
+    public static Mat zoomByteByOpenCV(string path, double zoom,bool isZD=false,bool issuolve=false)
+    {
+
+        MatOfByte matbytes = filePathToMatByte(path);
+        Mat mat = Imgcodecs.imdecode(matbytes, -1);
+        if (isZD)
+        {
+
+            zoom = 1;
+            float sint = 2048f;
+            if(issuolve)
+            {
+                sint = 256f;
+            }
+            if (mat.cols() > sint)
+            {
+                zoom = (float)(sint / mat.cols());
+            }
+            if (mat.rows() * zoom > sint)
+            {
+                zoom = (float)(sint / (float)(mat.rows() * zoom));
+            }
+        }
+        //Debug.Log(mat.cols());
+        //Debug.Log(mat.rows());
+        //压缩图片
+        Size size = new Size(mat.cols() * zoom, mat.rows() * zoom);
+        Mat zoomMat = new Mat(size, CvType.CV_8UC4);
+        Imgproc.resize(mat, zoomMat, size);
+        ////BGR 转RGB
+        Mat imgRGBMat = new Mat(zoomMat.cols(), zoomMat.rows(), CvType.CV_8UC4);
+        Imgproc.cvtColor(zoomMat, imgRGBMat, Imgproc.COLOR_BGR2RGB);
+        //RefreshEvent?.Invoke();
+        zoomMat.release();
+        mat.release();
+        matbytes.release();
+        return imgRGBMat;
+        //return mat;
+    }
+
+    /// <summary>
+    /// OpenCV生成图片缩略图
+    /// </summary>
+    /// <param name="">二进制</param>
+    /// <param name="zoom">缩放值</param>
+    /// <returns></returns>
+    public static Mat zoomByteByOpenCV(byte[] bytedata, double zoom)
+    {
+        MatOfByte matbytes = new MatOfByte(bytedata);
+        if (matbytes.nativeObj == IntPtr.Zero)
+        {
+            return null;
+        }
+        Mat mat = Imgcodecs.imdecode(matbytes, -1);
+        //mat.
+        //Debug.Log(mat.cols());
+        //Debug.Log(mat.rows());
+        //压缩图片
+        Size size = new Size(mat.cols() * zoom, mat.rows() * zoom);
+        Mat zoomMat = new Mat(size, CvType.CV_8UC4);
+        Imgproc.resize(mat, zoomMat, size);
+        ////BGR 转RGB
+        Mat imgRGBMat = new Mat(zoomMat.cols(), zoomMat.rows(), CvType.CV_8UC4);
+        Imgproc.cvtColor(zoomMat, imgRGBMat, Imgproc.COLOR_BGR2RGB);
+        zoomMat.release();
+        mat.release();
+        matbytes.release();
+        //RefreshEvent?.Invoke();
+        return imgRGBMat;
+        //return mat;
+    }
+
+
+    /// <summary>
+    /// 为了放入子线程 不能使用Time
+    /// </summary>
+    /// <param name="matbytes"></param>
+    /// <param name="zoom"></param>
+    /// <returns></returns>
+    public static  Mat zoomMatOfByteByOpenCV(MatOfByte matbytes, double zoom)
+    {
+        Mat mat = Imgcodecs.imdecode(matbytes, -1);
+
+
+        zoom = 1;
+        float sint = 256f;
+        if (mat.cols() > sint)
+        {
+            zoom = (float)(sint / mat.cols());
+        }
+        if (mat.rows() * zoom > sint)
+        {
+            zoom = (float)(sint / (float)(mat.rows() * zoom));
+        }
+
+
+        //Debug.Log(mat.cols());
+        //Debug.Log(mat.rows());
+        //压缩图片
+        Size size = new Size(mat.cols() * zoom, mat.rows() * zoom);
+        Mat zoomMat = new Mat(size, CvType.CV_8UC4);
+        Imgproc.resize(mat, zoomMat, size);
+        ////BGR 转RGB
+        Mat imgRGBMat = new Mat(zoomMat.cols(), zoomMat.rows(), CvType.CV_8UC4);
+        Imgproc.cvtColor(zoomMat, imgRGBMat, Imgproc.COLOR_BGR2RGB);
+        zoomMat.release();
+        mat.release();
+        matbytes.release();
+        return imgRGBMat;
+    }
+
+    /// <summary>
+    /// 通过文件地址拿到图片矩阵
+    /// </summary>
+    /// <param name="path"></param>
+    /// <returns></returns>
+    public static Mat filePathToMat(string path)
+    {
+        
+        //Imgcodecs.imdecode
+        return Imgcodecs.imread(path);
+    }
+
+    public static MatOfByte filePathToMatByte(string path)
+    {
+        MatOfByte matOfByte = new MatOfByte(LoadSpriteByIOByte(path));
+        //Imgcodecs.imdecode
+        return matOfByte;
+    }
+    /// <summary>
+    /// 矩阵转Texture
+    /// </summary>
+    /// <param name="path"></param>
+    /// <param name="zoom"></param>
+    /// <returns></returns>
+    public static Texture2D TextureByMat(Mat imgRGBMat)
+    {
+        //Debug.Log(imgRGBMat.cols());
+        //Debug.Log(imgRGBMat.rows());
+        _tex = new Texture2D(imgRGBMat.cols(), imgRGBMat.rows(), TextureFormat.RGB24, false);
+        Utils.fastMatToTexture2D(imgRGBMat, _tex);
+        return _tex;
+    }
+
+    /// <summary>
+    /// 缩略图
+    /// </summary>
+    /// <param name="source"></param>
+    /// <param name="newWidth"></param>
+    /// <param name="newHeight"></param>
+    /// <returns></returns>
+    public static Texture2D Resize(Texture2D source, int newWidth, int newHeight)
+    {
+        source.filterMode = FilterMode.Point;
+        RenderTexture rt = RenderTexture.GetTemporary(newWidth, newHeight);
+        rt.filterMode = FilterMode.Point;
+        RenderTexture.active = rt;
+        Graphics.Blit(source, rt);
+        var nTex = new Texture2D(newWidth, newHeight);
+        nTex.ReadPixels(new UnityEngine.Rect(0, 0, newWidth, newHeight), 0, 0);
+        nTex.Apply();
+        RenderTexture.active = null;
+        return nTex;
+    }
+
+    /// <summary>
+    /// IO方式加载本地图片
+    /// </summary>
+    /// <param name="fileurl"></param>
+    /// <returns>Texture2D</returns>
+    public Texture2D LoadSpriteByIO(string fileurl)
+    {
+        double startTime = (double)Time.time;
+        //创建文件流
+        FileStream fileStream = new FileStream(fileurl, FileMode.Open, FileAccess.Read);
+        fileStream.Seek(0, SeekOrigin.Begin);
+        //创建文件长度的缓冲区
+        byte[] bytes = new byte[fileStream.Length];
+        //读取文件
+        fileStream.Read(bytes, 0, (int)fileStream.Length);
+        //释放文件读取liu
+        fileStream.Close();
+        fileStream.Dispose();
+        fileStream = null;
+
+        //创建Texture
+        int width = 300;
+        int height = 372;
+        Texture2D texture2D = new Texture2D(width, height);
+
+        texture2D.LoadImage(bytes);
+
+        //tex = texture2D;
+
+        return texture2D;
+    }
+    /// <summary>
+    /// texture转2进制
+    /// </summary>
+    /// <param name="sp"></param>
+    /// <returns></returns>
+    public static byte[] GetTextureToByte(Texture2D temp)
+    {
+        //在转换成bytes
+        byte[] photoByte = temp.EncodeToPNG();
+        return photoByte;
+    }
+
+    /// <summary>
+    /// IO方式加载本地图片
+    /// </summary>
+    /// <param name="fileurl"></param>
+    /// <returns>byte[]</returns>
+    public static  byte[] LoadSpriteByIOByte(string fileurl)
+    {
+        //创建文件流
+        FileStream fileStream = new FileStream(fileurl, FileMode.Open, FileAccess.Read);
+        fileStream.Seek(0, SeekOrigin.Begin);
+        //创建文件长度的缓冲区
+        byte[] bytes = new byte[fileStream.Length];
+        //读取文件
+        fileStream.Read(bytes, 0, (int)fileStream.Length);
+        //释放文件读取liu
+        fileStream.Close();
+        fileStream.Dispose();
+        fileStream = null;
+        return bytes;
+    }
+
+
+    /// <summary>
+    /// C#按文件夹名称排序(顺序)
+    /// </summary>
+    /// <param name="dirs"></param>
+    private static void OrderSortAsFolderName(ref DirectoryInfo[] dirs)
+    {
+        Array.Sort(dirs, delegate (DirectoryInfo x, DirectoryInfo y) { return x.Name.CompareTo(y.Name); });
+    }
+    /// <summary>
+    /// C#按文件夹名称排序(倒序)
+    /// </summary>
+    /// <param name="dirs"></param>
+    private static void ReverseSortAsFolderName(ref DirectoryInfo[] dirs)
+    {
+        Array.Sort(dirs, delegate (DirectoryInfo x, DirectoryInfo y) { return y.Name.CompareTo(x.Name); });
+    }
+    /// <summary>
+    /// C#按文件夹夹创建时间排序(顺序)
+    /// </summary>
+    /// <param name="dirs"></param>
+    private static void OrderSortAsFolderCreationTime(ref DirectoryInfo[] dirs)
+    {
+        Array.Sort(dirs, delegate (DirectoryInfo x, DirectoryInfo y) { return x.CreationTime.CompareTo(y.CreationTime); });
+    }
+
+    /// <summary>
+    /// C#按文件夹创建时间排序(倒序)
+    /// </summary>
+    /// <param name="dirs"></param>
+    private static void ReverseSortAsFolderCreationTime(ref DirectoryInfo[] dirs)
+    {
+        Array.Sort(dirs, delegate (DirectoryInfo x, DirectoryInfo y) { return y.CreationTime.CompareTo(x.CreationTime); });
+    }
+
+    /// <summary>
+    /// C#按文件夹修改时间排序(顺序)
+    /// </summary>
+    /// <param name="dirs"></param>
+    private static void OrderSortAsFolderLastWriteTime(ref DirectoryInfo[] dirs)
+    {
+        Array.Sort(dirs, delegate (DirectoryInfo x, DirectoryInfo y) { return x.CreationTime.CompareTo(y.LastWriteTime); });
+    }
+
+    /// <summary>
+    /// C#按文件夹修改时间排序(倒序)
+    /// </summary>
+    /// <param name="dirs"></param>
+    private static void ReverseSortAsFolderLastWriteTime(ref DirectoryInfo[] dirs)
+    {
+        Array.Sort(dirs, delegate (DirectoryInfo x, DirectoryInfo y) { return y.CreationTime.CompareTo(x.LastWriteTime); });
+    }
+
+    static double  unit = 1024;
+    /// <summary>
+    /// 因为C#提供的文件的大小是以B为单位的,所以显示文件大小的时候会出现一大串数字很不方便
+    /// 所以,该函数为了方便地显示文件大小而出现
+    /// 函数说明,
+    ///     如果文件大小是0-1024B 以内的   显示以B为单位
+    ///     如果文件大小是1KB-1024KB之间的 显示以KB为单位
+    ///     如果文件大小是1M-1024M之间的   显示以M为单位
+    ///     如果文件大小是1024M以上的      显示以GB为单位
+    /// </summary>
+    /// <param name="lengthOfDocument"> 文件的大小 单位:B 类型:long</param>
+    /// <returns></returns>
+    public static string GetLength(long lengthOfDocument)
+    {
+
+        if (lengthOfDocument < unit)
+            return string.Format(lengthOfDocument.ToString("f2") + 'B');
+        else if (lengthOfDocument > unit && lengthOfDocument <= Math.Pow(unit, 2))
+            return string.Format((lengthOfDocument / unit).ToString("f2") + "KB");
+        else if (lengthOfDocument > Math.Pow(unit, 2) && lengthOfDocument <= Math.Pow(unit, 3))
+            return string.Format((lengthOfDocument / unit / unit).ToString("f2") + "MB");
+        else
+            return string.Format((lengthOfDocument / unit / unit / unit).ToString("f2") + "GB");
+    }
+
+    /// <summary>该函数为了方便地显示数量格式化而出现
+    /// 函数说明,
+    ///     如果文件大小是0-1000 以内的   直接显示
+    ///     如果文件大小是1K-1000K之间的 显示以K为单位
+    ///     如果文件大小是1M-1000M之间的   显示以M为单位
+    ///     如果文件大小是1000M以上的      显示以B为单位
+    /// </summary>
+    /// <param name="lengthOfDocument"> :1 类型:long</param>
+    /// <returns></returns>
+    public static string GetNumFormat(long lengthOfDocument)
+    {
+
+        if (lengthOfDocument < 1000)
+            return string.Format(lengthOfDocument.ToString());
+        else if (lengthOfDocument >= 1000 && lengthOfDocument < Math.Pow(1000, 2))
+        {
+            return string.Format((lengthOfDocument / 1000).ToString() + "K");
+
+        }
+            
+        else if (lengthOfDocument >= Math.Pow(1000, 2) && lengthOfDocument < Math.Pow(1000, 3))
+            return string.Format((lengthOfDocument / 1000 / 1000).ToString() + "M");
+        else
+            return string.Format((lengthOfDocument / 1000 / 1000 / 1000).ToString() + "B");
+    }
+
+
+    public static string GetVideoTime(long time)
+    {
+        string timeStr;
+        if (time < 60)
+        {
+            timeStr = string.Format("00:{0:00}", time % 60);
+        }
+        else if (time < 3600)
+        {
+            timeStr = string.Format("{0}:", time / 60) + string.Format("{0}", time % 60);
+        }
+        else
+        {
+            timeStr = string.Format("{0}:", time / 3600) + string.Format("{0}:", (time % 3600) / 60) /*+ string.Format("{0}", time % 60)*/;
+        }
+        return timeStr;
+    }
+
+    public static string formatLongToTimeStr(long l)
+    {
+      //  Debug.Log("");
+        String str = "";
+        int hour = 0;
+        int minute = 0;
+        int second = 0;
+        second = (int)l / 1000;
+
+        if (second > 60)
+        {
+            minute = second / 60;
+            second = second % 60;
+        }
+        if (minute > 60)
+        {
+            hour = minute / 60;
+            minute = minute % 60;
+        }
+        return (hour+ ":" + minute+ ":"
+            + second );
+    }
+
+    // 时间戳 转换为时间
+    public static string StampToDateTime(string timeStamp)
+    {
+        DateTime startTime = System.TimeZone.CurrentTimeZone.ToLocalTime(new System.DateTime(1970, 1, 1));//获取时间戳
+        DateTime dt = startTime.AddSeconds(double.Parse(timeStamp));
+        return dt.ToString("yyyy/MM/dd");//转化为日期时间
+    }
+
+
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/GameUtility.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 4b79ba21bf5df8e419586dfed8c963a4
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 154 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/Loom.cs

@@ -0,0 +1,154 @@
+using UnityEngine;
+using System.Collections;
+using System.Collections.Generic;
+using System;
+using System.Threading;
+using System.Linq;
+
+public class Loom : MonoBehaviour
+{
+	public static int maxThreads = 8;
+	static int numThreads;
+
+	private static Loom _current;
+	private int _count;
+	public static Loom Current
+	{
+		get
+		{
+			Initialize();
+			return _current;
+		}
+	}
+
+	void Awake()
+	{
+		_current = this;
+		initialized = true;
+	}
+
+	static bool initialized;
+
+	static void Initialize()
+	{
+		if (!initialized)
+		{
+
+			if (!Application.isPlaying)
+				return;
+			initialized = true;
+			var g = new GameObject("Loom");
+			_current = g.AddComponent<Loom>();
+		}
+
+	}
+
+	private List<Action> _actions = new List<Action>();
+	public struct DelayedQueueItem
+	{
+		public float time;
+		public Action action;
+	}
+	private List<DelayedQueueItem> _delayed = new List<DelayedQueueItem>();
+
+	List<DelayedQueueItem> _currentDelayed = new List<DelayedQueueItem>();
+
+	public static void QueueOnMainThread(Action action)
+	{
+		QueueOnMainThread(action, 0f);
+	}
+	public static void QueueOnMainThread(Action action, float time)
+	{
+		if (time != 0)
+		{
+			lock (Current._delayed)
+			{
+				Current._delayed.Add(new DelayedQueueItem { time = Time.time + time, action = action });
+			}
+		}
+		else
+		{
+			lock (Current._actions)
+			{
+				Current._actions.Add(action);
+			}
+		}
+	}
+
+	public static Thread RunAsync(Action a)
+	{
+		Initialize();
+		while (numThreads >= maxThreads)
+		{
+			Thread.Sleep(1);
+		}
+		Interlocked.Increment(ref numThreads);
+		ThreadPool.QueueUserWorkItem(RunAction, a);
+		return null;
+	}
+
+	private static void RunAction(object action)
+	{
+		try
+		{
+			((Action)action)();
+		}
+		catch
+		{
+		}
+		finally
+		{
+			Interlocked.Decrement(ref numThreads);
+		}
+
+	}
+
+
+	void OnDisable()
+	{
+		if (_current == this)
+		{
+
+			_current = null;
+		}
+	}
+
+
+
+	// Use this for initialization
+	void Start()
+	{
+
+	}
+
+	List<Action> _currentActions = new List<Action>();
+
+	// Update is called once per frame
+	void Update()
+	{
+		lock (_actions)
+		{
+			_currentActions.Clear();
+			_currentActions.AddRange(_actions);
+			_actions.Clear();
+		}
+		foreach (var a in _currentActions)
+		{
+			a();
+		}
+		lock (_delayed)
+		{
+			_currentDelayed.Clear();
+			_currentDelayed.AddRange(_delayed.Where(d => d.time <= Time.time));
+			foreach (var item in _currentDelayed)
+				_delayed.Remove(item);
+		}
+		foreach (var delayed in _currentDelayed)
+		{
+			delayed.action();
+		}
+
+
+
+	}
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/Loom.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 66d873578f054694888854b4fef7c39e
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 126 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoDataManager.cs

@@ -0,0 +1,126 @@
+
+using LitJson;
+using System.Collections;
+using System.Collections.Generic;
+using System.IO;
+using UnityEngine;
+using UnityEngine.Rendering;
+using XRTool.Util;
+using static XunJianDataManager;
+
+public class PaiZhaoDataManager : Singleton<PaiZhaoDataManager>
+{
+     List<paizhaoData> paizhaoList = new List<paizhaoData>();
+    public void addImage(Texture tex)
+    {
+        WindowsManager.Instance.StartCoroutine(GetTextureTo2D(tex));
+    }
+    IEnumerator GetTextureTo2D(Texture tex)
+    {
+        var req = AsyncGPUReadback.Request(tex,0, TextureFormat.ARGB32);
+        yield return new WaitUntil(() => req.done);
+        if (!req.hasError)
+        {
+            byte[] bts = new byte[req.layerDataSize];
+            req.GetData<byte>().CopyTo(bts);
+
+            Texture2D texture2D = new Texture2D(tex.width, tex.height, TextureFormat.ARGB32, false);
+            texture2D.LoadRawTextureData(bts);
+
+            bts = texture2D.EncodeToPNG();
+            WindowsManager.Instance.DestroyText2D(texture2D);
+            Debug.Log("准备存文件===》" + bts.Length);
+            int count = 0;
+            if(Directory.Exists(Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id + "/" + XunJianDataManager.Instance.chooseXunJian.nowIndex))
+                count= Directory.GetFiles(Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id + "/" + XunJianDataManager.Instance.chooseXunJian.nowIndex).Length;
+            //文件流信息  
+            //StreamWriter sw;  
+            Stream sw;
+            string name = count+".png";
+            string path = Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id + "/" + XunJianDataManager.Instance.chooseXunJian.nowIndex + "/" + name;
+            Debug.Log("准备存文件===》" + path);
+            FileInfo file = new FileInfo(path);
+            try
+            {
+
+                if (file.Exists)
+                {
+                    file.Delete();
+                }
+                if (!Directory.Exists(Application.persistentDataPath + "/PaiZhao"))
+                {
+                    Directory.CreateDirectory(Application.persistentDataPath + "/PaiZhao");
+                }
+                if (!Directory.Exists(Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id))
+                {
+                    Directory.CreateDirectory(Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id);
+                }
+                if (!Directory.Exists(Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id + "/" + XunJianDataManager.Instance.chooseXunJian.nowIndex))
+                {
+                    Directory.CreateDirectory(Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id + "/" + XunJianDataManager.Instance.chooseXunJian.nowIndex);
+                }
+                //如果此文件存在则打开  
+                //sw = file .Append();  
+                //如果此文件不存在则创建  
+                sw = file.Create();
+
+                //以行的形式写入信息  
+                //sw.WriteLine(info);  
+                sw.Write(bts, 0, bts.Length);
+                sw.Close();
+                sw.Dispose();
+
+                paizhaoData data = new paizhaoData();
+                data.url = path;
+                paizhaoList.Add(data);
+                PaiZhaoManager.Instance.addVidew(data);
+            }
+            catch
+            {
+                
+                JsonData data = new JsonData();
+                data["type"] = "fileError";
+                List<string> backTip = new List<string>();
+                backTip.Add(data.ToJson());
+                backTip.Add(data.ToJson());
+                backTip.Add(data.ToJson());
+                WindowsManager.Instance.show(WindowConfig.windowType.Error, false, WindowsManager.Instance.getErrorData("提示", "采集失败"+ path, Color.white, "icon", backTip, false, "", 5, "知道了.", "", "").ToJson());
+                
+            }
+
+            //   screenShot.LoadRawTextureData(bts);
+            //  screenShot.Apply();
+            //  tex.SetPixels32(req.GetData<Color32>().ToArray());
+            //  img.texture = screenShot;
+        }
+        else
+        {
+            Debug.LogError("Error AsyncGPUReadbackRequest.hasError");
+        }
+    }
+
+    public List<paizhaoData> updateFile()
+    {
+        if (XunJianDataManager.Instance!=null&& XunJianDataManager.Instance.chooseXunJian!=null)
+        {
+            paizhaoList.Clear();
+            if (Directory.Exists(Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id + "/" + XunJianDataManager.Instance.chooseXunJian.nowIndex))
+            {
+                string[] files = Directory.GetFiles(Application.persistentDataPath + "/PaiZhao/" + XunJianDataManager.Instance.chooseXunJian.id + "/" + XunJianDataManager.Instance.chooseXunJian.nowIndex);
+                for (int i = 0; i < files.Length; i++)
+                {
+                    paizhaoData data = new paizhaoData();
+                    data.url = files[i];
+                    paizhaoList.Add(data);
+                }
+            }
+
+        }
+        return paizhaoList;
+    }
+
+    public class paizhaoData
+    {
+        public string url;
+    }
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoDataManager.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 1c8cedaf109ee7449b550263a924b206
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 142 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoItem.cs

@@ -0,0 +1,142 @@
+using OpenCVForUnity.CoreModule;
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.IO;
+using UnityEngine;
+using UnityEngine.UI;
+using static PaiZhaoDataManager;
+
+public class PaiZhaoItem : MonoBehaviour
+{
+    public bool isMp4;
+    public int index;
+    public GameObject playImg;
+    public Texture tex;
+
+    public paizhaoData data;
+
+    public GameObject item;
+    Mat _mat;
+    public void updateData(paizhaoData data)
+    {
+        this.index = this.transform.GetSiblingIndex()-1;
+        this.data = data;
+        if (tex!=null)
+        {
+                Destroy(tex);
+        }
+        string ext = Path.GetExtension(data.url);
+        switch (ext)
+        {
+            case ".png":
+                isMp4 = false;
+                playImg.SetActive(false) ;
+                Loom.RunAsync(() =>
+                {
+                     _mat = GameUtility.zoomByteByOpenCV(data.url, 0.1f, true, true);
+                    Loom.QueueOnMainThread(()=> {
+                        tex = GameUtility.TextureByMat(_mat);
+                        item.GetComponent<RawImage>().texture = tex;
+                        /*
+                        float xx = 100f / (float)tex.width;
+                        float yy = 100f / (float)tex.height;
+                        if (yy > xx)
+                        {
+                            item.GetComponent<RawImage>().rectTransform.sizeDelta = new Vector2((tex.width * xx), (tex.height * xx));
+
+                        }
+                        else if (xx > yy)
+                        {
+
+                            item.GetComponent<RawImage>().rectTransform.sizeDelta = new Vector2((tex.width * yy), (tex.height * yy));
+                        }
+                        else
+                        {
+                            item.GetComponent<RawImage>().rectTransform.sizeDelta = new Vector2((tex.width * xx), (tex.height * xx));
+
+                        }*/
+                    });
+                });
+               // DownLoadXRManager
+               // 
+                break;
+            case ".mp4":
+                isMp4 = true;
+                playImg.SetActive(true);
+                PlayerToImage.Instance.LoadVideoImgEvent -= onBackImg;
+                PlayerToImage.Instance.LoadVideoImgEvent += onBackImg;
+                PlayerToImage.Instance.LoadVideoImage(data.url);
+
+                break;
+        }
+    }
+    public AVProVideoPlayer av;
+    public void playVideo()
+    {
+        if(playImg.activeSelf)
+        {
+            if(av.IsPlaying())
+            {
+                av.Stop();
+            }
+            else
+            {
+                av.SetUrl(data.url);
+                av.Play();
+            }
+        }
+    }
+
+    public void stopVideo()
+    {
+        if (playImg.activeSelf)
+        {
+            av.Stop();
+        }
+
+    }
+
+    public void showImage()
+    {
+        this.index = this.transform.GetSiblingIndex() - 1;
+        PaiZhaoManager.Instance.clickShowImage(index);
+    }
+    private void OnDestroy()
+    {
+        Destroy(tex);
+    }
+    private void onBackImg(string arg1, Texture2D arg2)
+    {
+        if(arg1 == data.url)
+        {
+            PlayerToImage.Instance.LoadVideoImgEvent -= onBackImg;
+
+            if (item && item.GetComponent<RawImage>())
+            {
+
+                tex = arg2;
+                item.GetComponent<RawImage>().texture = tex;
+
+                /*
+                float xx = 100f / (float)tex.width;
+                float yy = 100f / (float)tex.height;
+                if (yy > xx)
+                {
+                    item.GetComponent<RawImage>().rectTransform.sizeDelta = new Vector2((tex.width * xx), (tex.height * xx));
+
+                }
+                else if (xx > yy)
+                {
+
+                    item.GetComponent<RawImage>().rectTransform.sizeDelta = new Vector2((tex.width * yy), (tex.height * yy));
+                }
+                else
+                {
+                    item.GetComponent<RawImage>().rectTransform.sizeDelta = new Vector2((tex.width * xx), (tex.height * xx));
+
+                }*/
+            }
+        }
+    }
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoItem.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 85b67949d6c314b438fa08b83a770b94
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 153 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoManager.cs

@@ -0,0 +1,153 @@
+using System.Collections;
+using System.Collections.Generic;
+using System.IO;
+using UnityEngine;
+using UnityEngine.UI;
+using static PaiZhaoDataManager;
+using static WindowConfig;
+
+public class PaiZhaoManager : MonoSingleton<PaiZhaoManager>
+{
+    public GameObject playImg;
+    public RawImage big;
+    List<GameObject> itemList = new List<GameObject>();
+    public GameObject Content;
+    private void OnEnable()
+    {
+        initshow();
+    }
+    int nowIndex;
+    public void showImg(int i)
+    {
+        if(nowIndex!=i)
+        {
+            nowIndex = i;
+            if (itemList[nowIndex])
+                itemList[nowIndex].GetComponent<PaiZhaoItem>().stopVideo();
+        }
+        if(itemList[nowIndex].GetComponent<PaiZhaoItem>().av.IsPlaying())
+        {
+            big.texture = itemList[i].GetComponent<PaiZhaoItem>().av.getTexture();
+            playImg.SetActive(false);
+
+        }else
+        {
+
+            big.texture = itemList[i].GetComponent<PaiZhaoItem>().tex;
+            playImg.SetActive(itemList[i].GetComponent<PaiZhaoItem>().playImg.activeSelf);
+        }
+    }
+
+    private void Update()
+    {
+        RectTransform rt = Content.GetComponent<RectTransform>();
+        int indx = -1;
+        for (int i = 0; i < itemList.Count; i++)
+        {
+            float jia = 0;
+            if(indx<i&& indx!=-1)
+            {
+                jia = 25;
+            }
+            float cha = (itemList[i].GetComponent<RectTransform>().localPosition.x + rt.localPosition.x)+ jia;
+            if (cha > 200&& cha<400)
+            {
+                if(indx==-1)
+                indx = i;
+            }else
+            {
+
+                itemList[i].GetComponent<RectTransform>().sizeDelta = new Vector2(100, 100);
+            }
+        }
+        if(indx!=-1)
+        {
+            itemList[indx].GetComponent<RectTransform>().sizeDelta = new Vector2(150, 100);
+            showImg(indx);
+
+        }
+
+    }
+
+    public void playVideo()
+    {
+        if(itemList[nowIndex])
+        itemList[nowIndex].GetComponent<PaiZhaoItem>().playVideo();
+    }
+
+    public void clickShowImage(int index)
+    {
+        showImg(index);
+        RectTransform rt = Content.GetComponent<RectTransform>();
+        rt.localPosition = new Vector3(-((index+1)*100),0,0);
+    }
+
+    public void initshow()
+    {
+        clear();
+        List<paizhaoData> datalist= PaiZhaoDataManager.Instance.updateFile();
+        for (int i = 0; i < datalist.Count; i++)
+        {
+            GameObject obj =GameObject.Instantiate( WindowsManager.Instance.GetPrefab(windowType.XunJian, "PaiZhaoItem"), Content.transform);
+            obj.transform.SetSiblingIndex(obj.transform.GetSiblingIndex()-1);
+            obj.GetComponent<PaiZhaoItem>().updateData(datalist[i]);
+            itemList.Add(obj);
+        }
+
+        if(datalist.Count<=0)
+        {
+            big.gameObject.SetActive(false);
+        }else
+        {
+            big.gameObject.SetActive(true);
+        }
+    }
+
+    public void RemoveView()
+    {
+        if (itemList[nowIndex])
+            itemList[nowIndex].GetComponent<PaiZhaoItem>().stopVideo();
+        File.Delete(itemList[nowIndex].GetComponent<PaiZhaoItem>().data.url);
+        Destroy(itemList[nowIndex]);
+        itemList.RemoveAt(nowIndex);
+        if (itemList.Count <= 0)
+        {
+            big.gameObject.SetActive(false);
+        }
+        else
+        {
+            big.gameObject.SetActive(true);
+        }
+
+        if(nowIndex>= itemList.Count)
+        {
+            nowIndex = itemList.Count - 1;
+        }
+    }
+
+    public void addVidew(paizhaoData data)
+    {
+        GameObject obj = GameObject.Instantiate(WindowsManager.Instance.GetPrefab(windowType.XunJian, "PaiZhaoItem"), Content.transform);
+        obj.transform.SetSiblingIndex(obj.transform.GetSiblingIndex() - 1);
+        obj.GetComponent<PaiZhaoItem>().updateData(data);
+        itemList.Add(obj);
+        if (itemList.Count <= 0)
+        {
+            big.gameObject.SetActive(false);
+        }
+        else
+        {
+            big.gameObject.SetActive(true);
+        }
+    }
+
+    void clear()
+    {
+        for (int i = 0; i < itemList.Count; i++)
+        {
+            Destroy(itemList[i]);
+        }
+        itemList.Clear();
+
+    }
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PaiZhaoManager.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: e89ef601c586bca4dbcc4159ffa958be
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 122 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PlayerToImage.cs

@@ -0,0 +1,122 @@
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.IO;
+using UnityEngine;
+using UnityEngine.UI;
+
+public class PlayerToImage : MonoSingleton<PlayerToImage>
+{
+    public Camera ca;
+    public event Action<string, Texture2D> LoadVideoImgEvent;
+    public Dictionary<string, Texture> VideoImgCaches = new Dictionary<string, Texture>();
+    public RawImage rimg;
+    private AVProVideoPlayer mediaPlayer;
+    public AVProVideoPlayer MediaPlayer
+    {
+        get
+        {
+            if (!mediaPlayer)
+            {
+                mediaPlayer = GetComponent<AVProVideoPlayer>();
+            }
+            return mediaPlayer;
+        }
+    }
+
+    /// <summary>
+    /// Ìí¼Óµ½¶ÓÁÐ
+    /// </summary>
+    /// <param name="filePath"></param>
+    public void LoadVideoImage(string filePath)
+    {
+        Debug.Log("filename =0= ¡·"+ filePath);
+       VideoQueue.Enqueue(filePath);
+    }
+    public Queue<string> VideoQueue = new Queue<string>();
+    private bool isLoading = false;
+    Texture2D screenShot;
+    public float audioOldVolume = 0;
+    IEnumerator LoadVideo(string url)
+    {
+        // yield return new WaitForSeconds(0.5f);
+        //LoadVideoImgEvent?.Invoke(url, screenShot, (float)0);
+        bool ischeck=true;
+        int ct = 0;
+        Debug.Log("filename =1= ¡·" + url);
+        while (ischeck)
+        {
+            ct++;
+            if (File.Exists(url)|| ct>10)
+            {
+                ischeck = false;
+            }
+            yield return new WaitForSeconds(0.5f);
+        }
+        try
+        {
+
+            Debug.Log("filename =2= ¡·" + url);
+            MediaPlayer.SetUrl(url);
+
+            MediaPlayer.GetComponent<MediaPlayerCtrl>().Load(MediaPlayer.GetUrl());
+            MediaPlayer.Play();
+            MediaPlayer.SetVolue(0);
+        }
+        catch
+        {
+
+        }
+
+     
+        yield return new WaitForSeconds(0.5f);
+        if(MediaPlayer.IsPlaying())
+        {
+            Debug.Log("filename =3= ¡·" + url);
+            double len = MediaPlayer.GetMaxTimer();
+            MediaPlayer.SetSeek(0.1f);
+            yield return new WaitForSeconds(0.5f);
+            rimg.texture = MediaPlayer.getTexture();
+            yield return RenderTexturesScreenCapture();
+            LoadVideoImgEvent?.Invoke(url, screenShot);
+            MediaPlayer.Stop();
+            Debug.Log("filename =4= ¡·" + url);
+            MediaPlayer.GetComponent<MediaPlayerCtrl>().UnLoad();
+
+        }
+
+        isLoading = false;
+
+    }
+
+    private void Update()
+    {
+        if (!isLoading)
+        {
+            if (VideoQueue.Count > 0)
+            {
+                isLoading = true;
+                StartCoroutine(LoadVideo(VideoQueue.Dequeue()));
+            }
+        }
+        else
+        {
+            if (VideoQueue.Count > 0)
+            {
+                Debug.Log(VideoQueue.Count);
+            }
+        }
+    }
+    IEnumerator RenderTexturesScreenCapture()
+    {
+        yield return new WaitForEndOfFrame();
+        screenShot = new Texture2D(ca.targetTexture.width, ca.targetTexture.height, TextureFormat.RGB24, false);
+
+        RenderTexture.active = ca.targetTexture;
+        screenShot.ReadPixels(new Rect(0, 0, ca.targetTexture.width, ca.targetTexture.height), 0, 0);
+        screenShot.Apply();
+        //Camera.main.targetTexture = null;
+        RenderTexture.active = null;
+        yield return 0;
+    }
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/PlayerToImage.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 6653168e5a0c3b345b69b7a74503d092
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 1 - 1
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/RenWuListItemWindow.cs

@@ -51,6 +51,6 @@ public class RenWuListItemWindow : WindowSingleton<RenWuListItemWindow>
 
     public void chooseItem()
     {
-        XunJianDataManager.Instance.chooseItem(data.index);
+       // XunJianDataManager.Instance.chooseItem(data.index);
     }
 }

+ 2 - 21
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/RenWuTypeWindow.cs

@@ -23,9 +23,7 @@ public class RenWuTypeWindow : MonoBehaviour
         {
             case RenWuType.Video:
                 DownLoadXRManager.DownLoadForFilePath(DownLoadXRManager.getTestData(data.url), (string bytes) => {
-                    AVProVideoPlayer avpro =  this.GetComponent<AVProVideoPlayer>();
-                    avpro.SetUrl(bytes);
-                    avpro.Play();
+                    this.GetComponent<ShowVideoManager>().url = bytes;
                 }, (float f) => {
 
 
@@ -46,24 +44,7 @@ public class RenWuTypeWindow : MonoBehaviour
                 break;
             case RenWuType.Model:
                 DownLoadXRManager.DownLoadForFilePath(DownLoadXRManager.getTestData(data.url),(string bytes)=> {
-
-                    TriLibModelLoad.Load(bytes, (AssetLoaderContext ac) => {
-                        ac.RootGameObject.SetActive(false);
-                        Debug.Log("模型加载完成");
-                    }, (AssetLoaderContext ac) => {
-                        Debug.Log("载材质加完成");
-                        ac.RootGameObject.transform.parent = this.transform;
-                        ac.RootGameObject.transform.localPosition = Vector3.zero;
-                        ac.RootGameObject.transform.localEulerAngles = Vector3.zero;
-                        ac.RootGameObject.transform.localScale = Vector3.one;
-                        ac.RootGameObject.SetActive(true);
-                    }, (AssetLoaderContext ac, float f) => {
-
-                        Debug.Log("加载中==》" + f);
-                    }, (IContextualizedError error) => {
-
-                        Debug.Log("加载失败" + error);
-                    });
+                    this.GetComponent<ShowModelManager>().Onupdate(bytes);
                 },(float f)=> { 
                 
                 

+ 12 - 0
Assets/SaoTuManager.cs → Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/SaoTuManager.cs

@@ -17,4 +17,16 @@ public class SaoTuManager : MonoBehaviour
         WindowsManager.Instance.show(WindowConfig.windowType.Error, false, WindowsManager.Instance.getErrorData("标识图扫描", "请前往第一个任务点并扫描标识图", Color.white, "icon", backTip, false, "", 5, "我已到达", "", "").ToJson());
 
     }
+    public void showDianYun()
+    {
+
+        JsonData data = new JsonData();
+        data["type"] = "10001";
+        List<string> backTip = new List<string>();
+        backTip.Add(data.ToJson());
+        backTip.Add(data.ToJson());
+        backTip.Add(data.ToJson());
+        WindowsManager.Instance.show(WindowConfig.windowType.Error, false, WindowsManager.Instance.getErrorData("标识图扫描", "请前往第一个任务点并扫描标识图", Color.white, "icon", backTip, false, "", 5, "我已到达", "", "").ToJson());
+
+    }
 }

+ 0 - 0
Assets/SaoTuManager.cs.meta → Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/SaoTuManager.cs.meta


+ 18 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowImageManager.cs

@@ -0,0 +1,18 @@
+using System.Collections;
+using System.Collections.Generic;
+using UnityEngine;
+
+public class ShowImageManager : MonoBehaviour
+{
+    // Start is called before the first frame update
+    void Start()
+    {
+        
+    }
+
+    // Update is called once per frame
+    void Update()
+    {
+        
+    }
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowImageManager.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: c87572251ca822141acf06c5ca528a82
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 35 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowModelManager.cs

@@ -0,0 +1,35 @@
+using System.Collections;
+using System.Collections.Generic;
+using TriLibCore;
+using UnityEngine;
+
+public class ShowModelManager : MonoBehaviour
+{
+    public GameObject root;
+    public void Onupdate(string url)
+    {
+        if(root!=null)
+        {
+            Destroy(root);
+        }
+
+        TriLibModelLoad.Load(url, (AssetLoaderContext ac) => {
+            ac.RootGameObject.SetActive(false);
+            //Debug.Log("模型加载完成");
+        }, (AssetLoaderContext ac) => {
+            //Debug.Log("载材质加完成");
+            root = ac.RootGameObject;
+            ac.RootGameObject.transform.parent = this.transform;
+            ac.RootGameObject.transform.localPosition = Vector3.zero;
+            ac.RootGameObject.transform.localEulerAngles = Vector3.zero;
+            ac.RootGameObject.transform.localScale = Vector3.one;
+            ac.RootGameObject.SetActive(true);
+        }, (AssetLoaderContext ac, float f) => {
+
+            //Debug.Log("加载中==》" + f);
+        }, (IContextualizedError error) => {
+
+            //Debug.Log("加载失败" + error);
+        });
+    }
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowModelManager.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 9b952a506485cf5488d6957fe20cf10f
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 18 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowTextManager.cs

@@ -0,0 +1,18 @@
+using System.Collections;
+using System.Collections.Generic;
+using UnityEngine;
+
+public class ShowTextManager : MonoBehaviour
+{
+    // Start is called before the first frame update
+    void Start()
+    {
+        
+    }
+
+    // Update is called once per frame
+    void Update()
+    {
+        
+    }
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowTextManager.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: f3c339f25e0741b4eab44003b96024c3
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 27 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowVideoManager.cs

@@ -0,0 +1,27 @@
+using System.Collections;
+using System.Collections.Generic;
+using UnityEngine;
+
+public class ShowVideoManager : MonoBehaviour
+{
+    public string url;
+    AVProVideoPlayer avpro;
+    private void OnEnable()
+    {
+        if(url!=null&& url!="")
+        {
+            avpro = this.GetComponent<AVProVideoPlayer>();
+            avpro.SetUrl(url);
+            avpro.Play();
+
+        }
+    }
+
+    private void OnDisable()
+    {
+        if(avpro!=null)
+        {
+            avpro.Stop();
+        }
+    }
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/ShowVideoManager.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 6c43c2e969f7c77408b270b6f63e4f09
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 0 - 0
Assets/WindowGenSui.cs → Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/WindowGenSui.cs


+ 0 - 0
Assets/WindowGenSui.cs.meta → Assets/FrameWork/ProjectManager/Scripts/Project/XunJian/进入任务/WindowGenSui.cs.meta


+ 21 - 0
Assets/FrameWork/ProjectManager/Scripts/ShowInfoTipManager.cs

@@ -0,0 +1,21 @@
+using System.Collections;
+using System.Collections.Generic;
+using TMPro;
+using UnityEngine;
+
+public class ShowInfoTipManager : MonoSingleton<ShowInfoTipManager>
+{
+    public GameObject go;
+    public TextMeshProUGUI text;
+
+    public void showTip(string msg)
+    {
+        go.gameObject.SetActive(true);
+        text.text = msg;
+    }
+
+    public void closeTip()
+    {
+        go.gameObject.SetActive(false);
+    }
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/ShowInfoTipManager.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 5c52777cb1ca72c4d98b47db4f52e8e5
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 7 - 0
Assets/FrameWork/ProjectManager/Scripts/TipAndErrorManager.cs

@@ -22,6 +22,13 @@ public class TipAndErrorManager : WindowSingleton<TipAndErrorManager>
                 case "10001":
                     JinRuRenwu.Instance.GotoStart();
                     break;
+                case "20001":
+                    JinRuRenwu.Instance.show();
+                    break;
+                case "30001":
+                    CaoZuoLanManager.Instance.GotoLieBiao();
+                    break;
+                    
             }
         }
         catch

+ 41 - 0
Assets/FrameWork/ProjectManager/Scripts/TipInfoMsgManager.cs

@@ -0,0 +1,41 @@
+using System.Collections;
+using System.Collections.Generic;
+using UnityEngine;
+using UnityEngine.EventSystems;
+
+public class TipInfoMsgManager : MonoBehaviour,IPointerHandler
+{
+
+    public string msg;
+    public void OnDrag(PointerEventData eventData)
+    {
+    }
+
+    public void OnPointerClick(PointerEventData eventData)
+    {
+    }
+
+    public void OnPointerDown(PointerEventData eventData)
+    {
+    }
+
+    public void OnPointerEnter(PointerEventData eventData)
+    {
+        ShowInfoTipManager.Instance.showTip(msg);
+    }
+
+    public void OnPointerExit(PointerEventData eventData)
+    {
+        ShowInfoTipManager.Instance.closeTip();
+    }
+
+    public void OnPointerUp(PointerEventData eventData)
+    {
+    }
+    private void OnDisable()
+    {
+        if(ShowInfoTipManager.Instance)
+        ShowInfoTipManager.Instance.closeTip();
+    }
+
+}

+ 11 - 0
Assets/FrameWork/ProjectManager/Scripts/TipInfoMsgManager.cs.meta

@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 35172762ec0d5c649988b493bae6ac42
+MonoImporter:
+  externalObjects: {}
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 13 - 0
Assets/FrameWork/ProjectManager/Text.prefab

@@ -14,6 +14,7 @@ GameObject:
   - component: {fileID: 5551775021085795401}
   - component: {fileID: 2229417628307891464}
   - component: {fileID: 464504873149100750}
+  - component: {fileID: -1221202283288059475}
   m_Layer: 5
   m_Name: Text
   m_TagString: Untagged
@@ -132,6 +133,18 @@ MonoBehaviour:
   m_Script: {fileID: 11500000, guid: b9df92cc077b8924abbbb78762154fe0, type: 3}
   m_Name: 
   m_EditorClassIdentifier: 
+--- !u!114 &-1221202283288059475
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 2240469572346570524}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: f3c339f25e0741b4eab44003b96024c3, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
 --- !u!1 &5530050303012494401
 GameObject:
   m_ObjectHideFlags: 0

+ 14 - 0
Assets/FrameWork/ProjectManager/Top.prefab

@@ -537,6 +537,7 @@ GameObject:
   - component: {fileID: 5100120351558288135}
   - component: {fileID: 2859152896656582607}
   - component: {fileID: 1668592583740475880}
+  - component: {fileID: 3518188434427396521}
   m_Layer: 5
   m_Name: LogOut
   m_TagString: Untagged
@@ -655,3 +656,16 @@ MonoBehaviour:
           m_StringArgument: 
           m_BoolArgument: 0
         m_CallState: 2
+--- !u!114 &3518188434427396521
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 8569551128891729803}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 35172762ec0d5c649988b493bae6ac42, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  msg: "\u9000\u51FA\u8D26\u53F7"

+ 13 - 0
Assets/FrameWork/ProjectManager/Video.prefab

@@ -12,6 +12,7 @@ GameObject:
   - component: {fileID: 6848787593610870211}
   - component: {fileID: 5759322430622227990}
   - component: {fileID: 6375102404521121725}
+  - component: {fileID: 1600708616988324897}
   m_Layer: 5
   m_Name: Video
   m_TagString: Untagged
@@ -86,3 +87,15 @@ MonoBehaviour:
   m_Script: {fileID: 11500000, guid: b9df92cc077b8924abbbb78762154fe0, type: 3}
   m_Name: 
   m_EditorClassIdentifier: 
+--- !u!114 &1600708616988324897
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 2413293942919352051}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 6c43c2e969f7c77408b270b6f63e4f09, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 

File diff suppressed because it is too large
+ 653 - 106
Assets/FrameWork/ProjectManager/XunJian/进入任务/进入任务.prefab


+ 311 - 0
Assets/FrameWork/ProjectManager/paizhaoItem.prefab

@@ -0,0 +1,311 @@
+%YAML 1.1
+%TAG !u! tag:unity3d.com,2011:
+--- !u!1 &895371214607694078
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 5707687602762284230}
+  - component: {fileID: 7309134199506973735}
+  - component: {fileID: 8856316178823262411}
+  - component: {fileID: 8203841934488375}
+  - component: {fileID: 2208364255844893289}
+  - component: {fileID: 6705655446350118404}
+  m_Layer: 5
+  m_Name: paizhaoItem
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &5707687602762284230
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 895371214607694078}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 7546922959049974175}
+  - {fileID: 2390901495743449360}
+  m_Father: {fileID: 0}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 1}
+  m_AnchorMax: {x: 0, y: 1}
+  m_AnchoredPosition: {x: 50, y: -50}
+  m_SizeDelta: {x: 100, y: 100}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &7309134199506973735
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 895371214607694078}
+  m_CullTransparentMesh: 1
+--- !u!114 &8856316178823262411
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 895371214607694078}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 85b67949d6c314b438fa08b83a770b94, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  index: 0
+  playImg: {fileID: 3810644886565967928}
+  tex: {fileID: 0}
+  item: {fileID: 4361928126334041447}
+  av: {fileID: 6705655446350118404}
+--- !u!114 &8203841934488375
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 895371214607694078}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1344c3c82d62a2a41a3576d8abb8e3ea, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 0.254717, g: 0.254717, b: 0.254717, a: 0}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Texture: {fileID: 0}
+  m_UVRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1
+--- !u!114 &2208364255844893289
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 895371214607694078}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 4e29b1a8efbd4b44bb3f3716e73f07ff, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Navigation:
+    m_Mode: 3
+    m_WrapAround: 0
+    m_SelectOnUp: {fileID: 0}
+    m_SelectOnDown: {fileID: 0}
+    m_SelectOnLeft: {fileID: 0}
+    m_SelectOnRight: {fileID: 0}
+  m_Transition: 1
+  m_Colors:
+    m_NormalColor: {r: 1, g: 1, b: 1, a: 1}
+    m_HighlightedColor: {r: 0.9607843, g: 0.9607843, b: 0.9607843, a: 1}
+    m_PressedColor: {r: 0.78431374, g: 0.78431374, b: 0.78431374, a: 1}
+    m_SelectedColor: {r: 0.9607843, g: 0.9607843, b: 0.9607843, a: 1}
+    m_DisabledColor: {r: 0.78431374, g: 0.78431374, b: 0.78431374, a: 0.5019608}
+    m_ColorMultiplier: 1
+    m_FadeDuration: 0.1
+  m_SpriteState:
+    m_HighlightedSprite: {fileID: 0}
+    m_PressedSprite: {fileID: 0}
+    m_SelectedSprite: {fileID: 0}
+    m_DisabledSprite: {fileID: 0}
+  m_AnimationTriggers:
+    m_NormalTrigger: Normal
+    m_HighlightedTrigger: Highlighted
+    m_PressedTrigger: Pressed
+    m_SelectedTrigger: Selected
+    m_DisabledTrigger: Disabled
+  m_Interactable: 1
+  m_TargetGraphic: {fileID: 8203841934488375}
+  m_OnClick:
+    m_PersistentCalls:
+      m_Calls:
+      - m_Target: {fileID: 8856316178823262411}
+        m_TargetAssemblyTypeName: PaiZhaoItem, Assembly-CSharp
+        m_MethodName: showImage
+        m_Mode: 1
+        m_Arguments:
+          m_ObjectArgument: {fileID: 0}
+          m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+          m_IntArgument: 0
+          m_FloatArgument: 0
+          m_StringArgument: 
+          m_BoolArgument: 0
+        m_CallState: 2
+--- !u!114 &6705655446350118404
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 895371214607694078}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: eb94bf957aaed554492064dde1c1775e, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+--- !u!1 &3810644886565967928
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 2390901495743449360}
+  - component: {fileID: 1148538418558650821}
+  - component: {fileID: 1291598619100303549}
+  m_Layer: 5
+  m_Name: RawImage (2)
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 0
+--- !u!224 &2390901495743449360
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 3810644886565967928}
+  m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children: []
+  m_Father: {fileID: 5707687602762284230}
+  m_RootOrder: 1
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0.5, y: 0.5}
+  m_AnchorMax: {x: 0.5, y: 0.5}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 100, y: 100}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &1148538418558650821
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 3810644886565967928}
+  m_CullTransparentMesh: 1
+--- !u!114 &1291598619100303549
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 3810644886565967928}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1344c3c82d62a2a41a3576d8abb8e3ea, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 1}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Texture: {fileID: 2800000, guid: 538ab41c860fe1b49a78a49218e38ace, type: 3}
+  m_UVRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1
+--- !u!1 &4361928126334041447
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 7546922959049974175}
+  - component: {fileID: 969472721799915817}
+  - component: {fileID: 544722747351198316}
+  m_Layer: 5
+  m_Name: RawImage (1)
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &7546922959049974175
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 4361928126334041447}
+  m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children: []
+  m_Father: {fileID: 5707687602762284230}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 0}
+  m_AnchorMax: {x: 1, y: 1}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: -20, y: 0}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &969472721799915817
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 4361928126334041447}
+  m_CullTransparentMesh: 1
+--- !u!114 &544722747351198316
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 4361928126334041447}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1344c3c82d62a2a41a3576d8abb8e3ea, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 1}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Texture: {fileID: 2800000, guid: 7dc90fa7d51f4ef418b206205ce32205, type: 3}
+  m_UVRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1

+ 7 - 0
Assets/FrameWork/ProjectManager/paizhaoItem.prefab.meta

@@ -0,0 +1,7 @@
+fileFormatVersion: 2
+guid: 3e7ff33a2aaa09442ae2d68ab2e243df
+PrefabImporter:
+  externalObjects: {}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 889 - 0
Assets/FrameWork/ProjectManager/paizhaoList.prefab

@@ -0,0 +1,889 @@
+%YAML 1.1
+%TAG !u! tag:unity3d.com,2011:
+--- !u!1 &70856248087562596
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 1729325788605878995}
+  - component: {fileID: 8000435183856549967}
+  - component: {fileID: 8611361867541936412}
+  - component: {fileID: 5677694605870762636}
+  m_Layer: 5
+  m_Name: Scroll View
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &1729325788605878995
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 70856248087562596}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 2419238578547944628}
+  m_Father: {fileID: 8949749624788767784}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 0}
+  m_AnchorMax: {x: 0, y: 0}
+  m_AnchoredPosition: {x: 0, y: -50}
+  m_SizeDelta: {x: 640, y: 100}
+  m_Pivot: {x: 0, y: 0.5}
+--- !u!222 &8000435183856549967
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 70856248087562596}
+  m_CullTransparentMesh: 1
+--- !u!114 &8611361867541936412
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 70856248087562596}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: fe87c0e1cc204ed48ad3b37840f39efc, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 0}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Sprite: {fileID: 10907, guid: 0000000000000000f000000000000000, type: 0}
+  m_Type: 1
+  m_PreserveAspect: 0
+  m_FillCenter: 1
+  m_FillMethod: 4
+  m_FillAmount: 1
+  m_FillClockwise: 1
+  m_FillOrigin: 0
+  m_UseSpriteMesh: 0
+  m_PixelsPerUnitMultiplier: 1
+--- !u!114 &5677694605870762636
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 70856248087562596}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1aa08ab6e0800fa44ae55d278d1423e3, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Content: {fileID: 6284254867043529287}
+  m_Horizontal: 1
+  m_Vertical: 0
+  m_MovementType: 1
+  m_Elasticity: 0.1
+  m_Inertia: 1
+  m_DecelerationRate: 0.135
+  m_ScrollSensitivity: 1
+  m_Viewport: {fileID: 2419238578547944628}
+  m_HorizontalScrollbar: {fileID: 0}
+  m_VerticalScrollbar: {fileID: 0}
+  m_HorizontalScrollbarVisibility: 1
+  m_VerticalScrollbarVisibility: 1
+  m_HorizontalScrollbarSpacing: -3
+  m_VerticalScrollbarSpacing: -3
+  m_OnValueChanged:
+    m_PersistentCalls:
+      m_Calls: []
+--- !u!1 &1634709308374064041
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 6534303979265014755}
+  - component: {fileID: 4142364702878761493}
+  m_Layer: 5
+  m_Name: Buttom
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &6534303979265014755
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1634709308374064041}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 4360556328327235407}
+  m_Father: {fileID: 6284254867043529287}
+  m_RootOrder: 1
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 1}
+  m_AnchorMax: {x: 0, y: 1}
+  m_AnchoredPosition: {x: 405, y: -50}
+  m_SizeDelta: {x: 270, y: 100}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &4142364702878761493
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1634709308374064041}
+  m_CullTransparentMesh: 1
+--- !u!1 &2663175551128001469
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 8949749624788767784}
+  - component: {fileID: 7419981905106404128}
+  - component: {fileID: 9147873482401368671}
+  - component: {fileID: 7708746070731989160}
+  m_Layer: 5
+  m_Name: paizhaoList
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &8949749624788767784
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 2663175551128001469}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 1729325788605878995}
+  - {fileID: 6220833517895301435}
+  - {fileID: 6777327179038105997}
+  m_Father: {fileID: 0}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 0}
+  m_AnchorMax: {x: 0, y: 0}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 0, y: 0}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &7419981905106404128
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 2663175551128001469}
+  m_CullTransparentMesh: 1
+--- !u!114 &9147873482401368671
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 2663175551128001469}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1344c3c82d62a2a41a3576d8abb8e3ea, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 0}
+  m_RaycastTarget: 0
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Texture: {fileID: 0}
+  m_UVRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1
+--- !u!114 &7708746070731989160
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 2663175551128001469}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: e89ef601c586bca4dbcc4159ffa958be, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  global: 0
+  playImg: {fileID: 0}
+  big: {fileID: 8132153799750047636}
+  Content: {fileID: 5844032987995997011}
+--- !u!1 &3108169979341904516
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 4360556328327235407}
+  - component: {fileID: 2674302272488659106}
+  - component: {fileID: 5720397512942380326}
+  m_Layer: 5
+  m_Name: RawImage (1)
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 0
+--- !u!224 &4360556328327235407
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 3108169979341904516}
+  m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children: []
+  m_Father: {fileID: 6534303979265014755}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0.5, y: 0.5}
+  m_AnchorMax: {x: 0.5, y: 0.5}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 100, y: 100}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &2674302272488659106
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 3108169979341904516}
+  m_CullTransparentMesh: 1
+--- !u!114 &5720397512942380326
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 3108169979341904516}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1344c3c82d62a2a41a3576d8abb8e3ea, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 1}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Texture: {fileID: 2800000, guid: 46f69f3374c9503469639f738a7a4cde, type: 3}
+  m_UVRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1
+--- !u!1 &3768396625945034500
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 2419238578547944628}
+  - component: {fileID: 8189572720220219611}
+  - component: {fileID: 4974229750032595336}
+  - component: {fileID: 7578423135753975541}
+  m_Layer: 5
+  m_Name: Viewport
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &2419238578547944628
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 3768396625945034500}
+  m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 6284254867043529287}
+  m_Father: {fileID: 1729325788605878995}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 0}
+  m_AnchorMax: {x: 1, y: 1}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 0, y: 0}
+  m_Pivot: {x: 0, y: 1}
+--- !u!222 &8189572720220219611
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 3768396625945034500}
+  m_CullTransparentMesh: 1
+--- !u!114 &4974229750032595336
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 3768396625945034500}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: fe87c0e1cc204ed48ad3b37840f39efc, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 1}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Sprite: {fileID: 10917, guid: 0000000000000000f000000000000000, type: 0}
+  m_Type: 1
+  m_PreserveAspect: 0
+  m_FillCenter: 1
+  m_FillMethod: 4
+  m_FillAmount: 1
+  m_FillClockwise: 1
+  m_FillOrigin: 0
+  m_UseSpriteMesh: 0
+  m_PixelsPerUnitMultiplier: 1
+--- !u!114 &7578423135753975541
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 3768396625945034500}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 31a19414c41e5ae4aae2af33fee712f6, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_ShowMaskGraphic: 0
+--- !u!1 &5225497099990966800
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 6220833517895301435}
+  - component: {fileID: 7284829754055528321}
+  - component: {fileID: 8912663260059073967}
+  m_Layer: 5
+  m_Name: RawImage
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &6220833517895301435
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 5225497099990966800}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children: []
+  m_Father: {fileID: 8949749624788767784}
+  m_RootOrder: 1
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 0}
+  m_AnchorMax: {x: 0, y: 0}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 640, y: 480}
+  m_Pivot: {x: 0, y: 0}
+--- !u!222 &7284829754055528321
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 5225497099990966800}
+  m_CullTransparentMesh: 1
+--- !u!114 &8912663260059073967
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 5225497099990966800}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1344c3c82d62a2a41a3576d8abb8e3ea, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 0.078431375}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Texture: {fileID: 0}
+  m_UVRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1
+--- !u!1 &5611403938071344045
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 1046681076883175317}
+  - component: {fileID: 2656010605129957236}
+  m_Layer: 5
+  m_Name: Top
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &1046681076883175317
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 5611403938071344045}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 2958661771794885324}
+  m_Father: {fileID: 6284254867043529287}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 1}
+  m_AnchorMax: {x: 0, y: 1}
+  m_AnchoredPosition: {x: 135, y: -50}
+  m_SizeDelta: {x: 270, y: 100}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &2656010605129957236
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 5611403938071344045}
+  m_CullTransparentMesh: 1
+--- !u!1 &5844032987995997011
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 6284254867043529287}
+  - component: {fileID: 3625880013060399471}
+  - component: {fileID: 1990553899534100246}
+  m_Layer: 5
+  m_Name: Content
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &6284254867043529287
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 5844032987995997011}
+  m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 1046681076883175317}
+  - {fileID: 6534303979265014755}
+  m_Father: {fileID: 2419238578547944628}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 1}
+  m_AnchorMax: {x: 1, y: 1}
+  m_AnchoredPosition: {x: -0.00018481453, y: 0}
+  m_SizeDelta: {x: 0, y: 100}
+  m_Pivot: {x: 0, y: 1}
+--- !u!114 &3625880013060399471
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 5844032987995997011}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 3245ec927659c4140ac4f8d17403cc18, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_HorizontalFit: 2
+  m_VerticalFit: 0
+--- !u!114 &1990553899534100246
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 5844032987995997011}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 30649d3a9faa99c48a7b1166b86bf2a0, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Padding:
+    m_Left: 0
+    m_Right: 0
+    m_Top: 0
+    m_Bottom: 0
+  m_ChildAlignment: 0
+  m_Spacing: 0
+  m_ChildForceExpandWidth: 1
+  m_ChildForceExpandHeight: 1
+  m_ChildControlWidth: 0
+  m_ChildControlHeight: 0
+  m_ChildScaleWidth: 0
+  m_ChildScaleHeight: 0
+  m_ReverseArrangement: 0
+--- !u!1 &7190674170000630867
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 7734590451114863976}
+  - component: {fileID: 1292237656889215327}
+  - component: {fileID: 3855182295300169514}
+  m_Layer: 5
+  m_Name: Big (1)
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &7734590451114863976
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 7190674170000630867}
+  m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children: []
+  m_Father: {fileID: 6777327179038105997}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0.5, y: 0.5}
+  m_AnchorMax: {x: 0.5, y: 0.5}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 100, y: 100}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &1292237656889215327
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 7190674170000630867}
+  m_CullTransparentMesh: 1
+--- !u!114 &3855182295300169514
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 7190674170000630867}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1344c3c82d62a2a41a3576d8abb8e3ea, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 1}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Texture: {fileID: 2800000, guid: 538ab41c860fe1b49a78a49218e38ace, type: 3}
+  m_UVRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1
+--- !u!1 &8410583486655861759
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 6777327179038105997}
+  - component: {fileID: 7966215883991201126}
+  - component: {fileID: 8132153799750047636}
+  - component: {fileID: 428346803059711955}
+  m_Layer: 5
+  m_Name: Big
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &6777327179038105997
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 8410583486655861759}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 7734590451114863976}
+  m_Father: {fileID: 8949749624788767784}
+  m_RootOrder: 2
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 0}
+  m_AnchorMax: {x: 0, y: 0}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 640, y: 480}
+  m_Pivot: {x: 0, y: 0}
+--- !u!222 &7966215883991201126
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 8410583486655861759}
+  m_CullTransparentMesh: 1
+--- !u!114 &8132153799750047636
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 8410583486655861759}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1344c3c82d62a2a41a3576d8abb8e3ea, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 1}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Texture: {fileID: 0}
+  m_UVRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1
+--- !u!114 &428346803059711955
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 8410583486655861759}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 4e29b1a8efbd4b44bb3f3716e73f07ff, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Navigation:
+    m_Mode: 3
+    m_WrapAround: 0
+    m_SelectOnUp: {fileID: 0}
+    m_SelectOnDown: {fileID: 0}
+    m_SelectOnLeft: {fileID: 0}
+    m_SelectOnRight: {fileID: 0}
+  m_Transition: 1
+  m_Colors:
+    m_NormalColor: {r: 1, g: 1, b: 1, a: 1}
+    m_HighlightedColor: {r: 0.9607843, g: 0.9607843, b: 0.9607843, a: 1}
+    m_PressedColor: {r: 0.78431374, g: 0.78431374, b: 0.78431374, a: 1}
+    m_SelectedColor: {r: 0.9607843, g: 0.9607843, b: 0.9607843, a: 1}
+    m_DisabledColor: {r: 0.78431374, g: 0.78431374, b: 0.78431374, a: 0.5019608}
+    m_ColorMultiplier: 1
+    m_FadeDuration: 0.1
+  m_SpriteState:
+    m_HighlightedSprite: {fileID: 0}
+    m_PressedSprite: {fileID: 0}
+    m_SelectedSprite: {fileID: 0}
+    m_DisabledSprite: {fileID: 0}
+  m_AnimationTriggers:
+    m_NormalTrigger: Normal
+    m_HighlightedTrigger: Highlighted
+    m_PressedTrigger: Pressed
+    m_SelectedTrigger: Selected
+    m_DisabledTrigger: Disabled
+  m_Interactable: 1
+  m_TargetGraphic: {fileID: 8132153799750047636}
+  m_OnClick:
+    m_PersistentCalls:
+      m_Calls:
+      - m_Target: {fileID: 7708746070731989160}
+        m_TargetAssemblyTypeName: PaiZhaoManager, Assembly-CSharp
+        m_MethodName: playVideo
+        m_Mode: 1
+        m_Arguments:
+          m_ObjectArgument: {fileID: 0}
+          m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+          m_IntArgument: 0
+          m_FloatArgument: 0
+          m_StringArgument: 
+          m_BoolArgument: 0
+        m_CallState: 2
+--- !u!1 &9024061106273498676
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 2958661771794885324}
+  - component: {fileID: 5532402810553229946}
+  - component: {fileID: 5061051152515473727}
+  m_Layer: 5
+  m_Name: RawImage (1)
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 0
+--- !u!224 &2958661771794885324
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 9024061106273498676}
+  m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children: []
+  m_Father: {fileID: 1046681076883175317}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0.5, y: 0.5}
+  m_AnchorMax: {x: 0.5, y: 0.5}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 100, y: 100}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &5532402810553229946
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 9024061106273498676}
+  m_CullTransparentMesh: 1
+--- !u!114 &5061051152515473727
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 9024061106273498676}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 1344c3c82d62a2a41a3576d8abb8e3ea, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 1}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Texture: {fileID: 2800000, guid: 46f69f3374c9503469639f738a7a4cde, type: 3}
+  m_UVRect:
+    serializedVersion: 2
+    x: 0
+    y: 0
+    width: 1
+    height: 1

+ 7 - 0
Assets/FrameWork/ProjectManager/paizhaoList.prefab.meta

@@ -0,0 +1,7 @@
+fileFormatVersion: 2
+guid: cb61b52cc83342e489699f3476640957
+PrefabImporter:
+  externalObjects: {}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 364 - 0
Assets/FrameWork/ProjectManager/拍照.prefab

@@ -84,6 +84,7 @@ GameObject:
   - component: {fileID: 5016533565466159247}
   - component: {fileID: 7262347541084263935}
   - component: {fileID: 4960275866369813479}
+  - component: {fileID: 7803334018238905534}
   m_Layer: 5
   m_Name: "\u62CD\u7167"
   m_TagString: Untagged
@@ -146,6 +147,62 @@ MonoBehaviour:
     y: 0
     width: 1
     height: 1
+--- !u!114 &7803334018238905534
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1703231794821718166}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 4e29b1a8efbd4b44bb3f3716e73f07ff, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Navigation:
+    m_Mode: 3
+    m_WrapAround: 0
+    m_SelectOnUp: {fileID: 0}
+    m_SelectOnDown: {fileID: 0}
+    m_SelectOnLeft: {fileID: 0}
+    m_SelectOnRight: {fileID: 0}
+  m_Transition: 1
+  m_Colors:
+    m_NormalColor: {r: 1, g: 1, b: 1, a: 1}
+    m_HighlightedColor: {r: 0.9607843, g: 0.9607843, b: 0.9607843, a: 1}
+    m_PressedColor: {r: 0.78431374, g: 0.78431374, b: 0.78431374, a: 1}
+    m_SelectedColor: {r: 0.9607843, g: 0.9607843, b: 0.9607843, a: 1}
+    m_DisabledColor: {r: 0.78431374, g: 0.78431374, b: 0.78431374, a: 0.5019608}
+    m_ColorMultiplier: 1
+    m_FadeDuration: 0.1
+  m_SpriteState:
+    m_HighlightedSprite: {fileID: 0}
+    m_PressedSprite: {fileID: 0}
+    m_SelectedSprite: {fileID: 0}
+    m_DisabledSprite: {fileID: 0}
+  m_AnimationTriggers:
+    m_NormalTrigger: Normal
+    m_HighlightedTrigger: Highlighted
+    m_PressedTrigger: Pressed
+    m_SelectedTrigger: Selected
+    m_DisabledTrigger: Disabled
+  m_Interactable: 1
+  m_TargetGraphic: {fileID: 0}
+  m_OnClick:
+    m_PersistentCalls:
+      m_Calls:
+      - m_Target: {fileID: 6413818847766296788}
+        m_TargetAssemblyTypeName: ChangeCameraSaoMiao, Assembly-CSharp
+        m_MethodName: paizhaoclick
+        m_Mode: 1
+        m_Arguments:
+          m_ObjectArgument: {fileID: 0}
+          m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+          m_IntArgument: 0
+          m_FloatArgument: 0
+          m_StringArgument: 
+          m_BoolArgument: 0
+        m_CallState: 2
 --- !u!1 &1734752791485398805
 GameObject:
   m_ObjectHideFlags: 0
@@ -157,6 +214,7 @@ GameObject:
   - component: {fileID: 7851867678479278875}
   - component: {fileID: 7627018520805612778}
   - component: {fileID: 1304325578614803563}
+  - component: {fileID: 6218150971018074843}
   m_Layer: 5
   m_Name: "\u5F55\u50CF"
   m_TagString: Untagged
@@ -220,6 +278,62 @@ MonoBehaviour:
     y: 0
     width: 1
     height: 1
+--- !u!114 &6218150971018074843
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1734752791485398805}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 4e29b1a8efbd4b44bb3f3716e73f07ff, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Navigation:
+    m_Mode: 3
+    m_WrapAround: 0
+    m_SelectOnUp: {fileID: 0}
+    m_SelectOnDown: {fileID: 0}
+    m_SelectOnLeft: {fileID: 0}
+    m_SelectOnRight: {fileID: 0}
+  m_Transition: 1
+  m_Colors:
+    m_NormalColor: {r: 1, g: 1, b: 1, a: 1}
+    m_HighlightedColor: {r: 0.9607843, g: 0.9607843, b: 0.9607843, a: 1}
+    m_PressedColor: {r: 0.78431374, g: 0.78431374, b: 0.78431374, a: 1}
+    m_SelectedColor: {r: 0.9607843, g: 0.9607843, b: 0.9607843, a: 1}
+    m_DisabledColor: {r: 0.78431374, g: 0.78431374, b: 0.78431374, a: 0.5019608}
+    m_ColorMultiplier: 1
+    m_FadeDuration: 0.1
+  m_SpriteState:
+    m_HighlightedSprite: {fileID: 0}
+    m_PressedSprite: {fileID: 0}
+    m_SelectedSprite: {fileID: 0}
+    m_DisabledSprite: {fileID: 0}
+  m_AnimationTriggers:
+    m_NormalTrigger: Normal
+    m_HighlightedTrigger: Highlighted
+    m_PressedTrigger: Pressed
+    m_SelectedTrigger: Selected
+    m_DisabledTrigger: Disabled
+  m_Interactable: 1
+  m_TargetGraphic: {fileID: 1304325578614803563}
+  m_OnClick:
+    m_PersistentCalls:
+      m_Calls:
+      - m_Target: {fileID: 6413818847766296788}
+        m_TargetAssemblyTypeName: ChangeCameraSaoMiao, Assembly-CSharp
+        m_MethodName: luxiangClick
+        m_Mode: 1
+        m_Arguments:
+          m_ObjectArgument: {fileID: 0}
+          m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
+          m_IntArgument: 0
+          m_FloatArgument: 0
+          m_StringArgument: 
+          m_BoolArgument: 0
+        m_CallState: 2
 --- !u!1 &1820598777092635662
 GameObject:
   m_ObjectHideFlags: 0
@@ -254,6 +368,7 @@ RectTransform:
   - {fileID: 5873708962649615876}
   - {fileID: 5016533565466159247}
   - {fileID: 7851867678479278875}
+  - {fileID: 8921205970611646330}
   m_Father: {fileID: 0}
   m_RootOrder: 0
   m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
@@ -310,9 +425,11 @@ MonoBehaviour:
   m_Name: 
   m_EditorClassIdentifier: 
   global: 1
+  caTexture: {fileID: 7146680759362879240}
   paizhao: {fileID: 1703231794821718166}
   luxiang: {fileID: 1734752791485398805}
   saomiao: {fileID: 1488662223649881619}
+  timerText: {fileID: 7975936281869001249}
 --- !u!1 &3679523137848540852
 GameObject:
   m_ObjectHideFlags: 0
@@ -512,6 +629,253 @@ CanvasRenderer:
   m_PrefabAsset: {fileID: 0}
   m_GameObject: {fileID: 6965139760007539639}
   m_CullTransparentMesh: 1
+--- !u!1 &7339356351296674047
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 8921205970611646330}
+  - component: {fileID: 7146680759362879240}
+  m_Layer: 0
+  m_Name: TextureCapture
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!4 &8921205970611646330
+Transform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 7339356351296674047}
+  m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
+  m_LocalPosition: {x: -1432.2, y: -386.99994, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children: []
+  m_Father: {fileID: 646974796005607044}
+  m_RootOrder: 3
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+--- !u!114 &7146680759362879240
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 7339356351296674047}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: de5a7a2f274da2340a2de24a1fffee45, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  _encoderHintsWindows:
+    videoHints:
+      averageBitrate: 0
+      maximumBitrate: 0
+      quality: 1
+      keyframeInterval: 0
+      allowFastStartStreamingPostProcess: 1
+      supportTransparency: 0
+      useHardwareEncoding: 1
+      injectStereoPacking: 1
+      stereoPacking: 0
+      injectSphericalVideoLayout: 1
+      sphericalVideoLayout: 0
+      enableFragmentedWriting: 0
+      movieFragmentInterval: 120
+      colourSpace: -1
+      sourceWidth: 0
+      sourceHeight: 0
+      androidNoCaptureRotation: 0
+      transparency: 0
+      androidVulkanPreTransform: 0
+    imageHints:
+      quality: 0.85
+      supportTransparency: 0
+      colourSpace: -1
+      sourceWidth: 0
+      sourceHeight: 0
+      transparency: 0
+      androidVulkanPreTransform: 0
+  _encoderHintsMacOS:
+    videoHints:
+      averageBitrate: 0
+      maximumBitrate: 0
+      quality: 1
+      keyframeInterval: 0
+      allowFastStartStreamingPostProcess: 1
+      supportTransparency: 0
+      useHardwareEncoding: 1
+      injectStereoPacking: 1
+      stereoPacking: 0
+      injectSphericalVideoLayout: 1
+      sphericalVideoLayout: 0
+      enableFragmentedWriting: 0
+      movieFragmentInterval: 120
+      colourSpace: -1
+      sourceWidth: 0
+      sourceHeight: 0
+      androidNoCaptureRotation: 0
+      transparency: 0
+      androidVulkanPreTransform: 0
+    imageHints:
+      quality: 0.85
+      supportTransparency: 0
+      colourSpace: -1
+      sourceWidth: 0
+      sourceHeight: 0
+      transparency: 0
+      androidVulkanPreTransform: 0
+  _encoderHintsIOS:
+    videoHints:
+      averageBitrate: 0
+      maximumBitrate: 0
+      quality: 1
+      keyframeInterval: 0
+      allowFastStartStreamingPostProcess: 1
+      supportTransparency: 0
+      useHardwareEncoding: 1
+      injectStereoPacking: 1
+      stereoPacking: 0
+      injectSphericalVideoLayout: 1
+      sphericalVideoLayout: 0
+      enableFragmentedWriting: 0
+      movieFragmentInterval: 120
+      colourSpace: -1
+      sourceWidth: 0
+      sourceHeight: 0
+      androidNoCaptureRotation: 0
+      transparency: 0
+      androidVulkanPreTransform: 0
+    imageHints:
+      quality: 0.85
+      supportTransparency: 0
+      colourSpace: -1
+      sourceWidth: 0
+      sourceHeight: 0
+      transparency: 0
+      androidVulkanPreTransform: 0
+  _encoderHintsAndroid:
+    videoHints:
+      averageBitrate: 0
+      maximumBitrate: 0
+      quality: 1
+      keyframeInterval: 0
+      allowFastStartStreamingPostProcess: 1
+      supportTransparency: 0
+      useHardwareEncoding: 1
+      injectStereoPacking: 1
+      stereoPacking: 0
+      injectSphericalVideoLayout: 1
+      sphericalVideoLayout: 0
+      enableFragmentedWriting: 0
+      movieFragmentInterval: 120
+      colourSpace: -1
+      sourceWidth: 0
+      sourceHeight: 0
+      androidNoCaptureRotation: 0
+      transparency: 0
+      androidVulkanPreTransform: 0
+    imageHints:
+      quality: 0.85
+      supportTransparency: 0
+      colourSpace: -1
+      sourceWidth: 0
+      sourceHeight: 0
+      transparency: 0
+      androidVulkanPreTransform: 0
+  _captureKey: 0
+  _isRealTime: 1
+  _persistAcrossSceneLoads: 0
+  _startTrigger: 0
+  _startDelay: 0
+  _startDelaySeconds: 0
+  _stopMode: 0
+  _stopFrames: 0
+  _stopSeconds: 0
+  _pauseCaptureOnAppPause: 1
+  _videoCodecPriorityWindows:
+  - H264
+  - HEVC
+  - Lagarith Lossless Codec
+  - Uncompressed
+  - x264vfw - H.264/MPEG-4 AVC codec
+  - Xvid MPEG-4 Codec
+  _videoCodecPriorityMacOS:
+  - H264
+  - HEVC
+  - Apple ProRes 422
+  - Apple ProRes 4444
+  _videoCodecPriorityAndroid:
+  - H264
+  - HEVC
+  _audioCodecPriorityWindows:
+  - AAC
+  - Uncompressed
+  _audioCodecPriorityMacOS:
+  - AAC
+  - FLAC
+  - Apple Lossless
+  - Linear PCM
+  - Uncompresssed
+  _audioCodecPriorityAndroid:
+  - AAC
+  _frameRate: 30
+  _timelapseScale: 1
+  _frameUpdateMode: 0
+  _downScale: 1
+  _maxVideoSize: {x: 0, y: 0}
+  _forceVideoCodecIndexWindows: -1
+  _forceVideoCodecIndexMacOS: 0
+  _forceVideoCodecIndexIOS: 0
+  _forceVideoCodecIndexAndroid: 0
+  _forceAudioCodecIndexWindows: -1
+  _forceAudioCodecIndexMacOS: 0
+  _forceAudioCodecIndexIOS: 0
+  _forceAudioCodecIndexAndroid: -1
+  _flipVertically: 0
+  _forceGpuFlush: 0
+  _useWaitForEndOfFrame: 1
+  _androidUpdateMediaGallery: 1
+  _androidNoCaptureRotation: 0
+  _logCaptureStartStop: 1
+  _audioCaptureSource: 1
+  _unityAudioCapture: {fileID: 0}
+  _forceAudioInputDeviceIndex: 0
+  _manualAudioSampleRate: 48000
+  _manualAudioChannelCount: 2
+  _outputTarget: 0
+  _outputFolderType: 2
+  _outputFolderPath: Captures
+  _filenamePrefix: TextureCapture
+  _appendFilenameTimestamp: 1
+  _allowManualFileExtension: 0
+  _filenameExtension: mp4
+  _namedPipePath: \\.\pipe\test_pipe
+  _imageSequenceStartFrame: 0
+  _imageSequenceZeroDigits: 6
+  _imageSequenceFormatWindows: 0
+  _imageSequenceFormatMacOS: 0
+  _imageSequenceFormatIOS: 0
+  _imageSequenceFormatAndroid: 0
+  _renderResolution: 14
+  _renderSize: {x: 1, y: 1}
+  _renderAntiAliasing: -1
+  _useMotionBlur: 0
+  _motionBlurSamples: 0
+  _motionBlurCameras: []
+  _motionBlur: {fileID: 0}
+  _allowVSyncDisable: 1
+  _supportTextureRecreate: 0
+  _minimumDiskSpaceMB: -1
+  _timelineController: {fileID: 0}
+  _videoPlayerController: {fileID: 0}
+  _manualUpdate: 0
 --- !u!1 &7439144582993703312
 GameObject:
   m_ObjectHideFlags: 0

+ 13 - 0
Assets/FrameWork/SaoMiao.prefab

@@ -13,6 +13,7 @@ GameObject:
   - component: {fileID: 1620164308813382645}
   - component: {fileID: 1620164308813382644}
   - component: {fileID: 1620164308813382648}
+  - component: {fileID: 8235294428274845642}
   m_Layer: 5
   m_Name: SaoMiao
   m_TagString: Untagged
@@ -115,6 +116,18 @@ MonoBehaviour:
   m_Name: 
   m_EditorClassIdentifier: 
   global: 0
+--- !u!114 &8235294428274845642
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1620164308813382647}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 948b9ab040508dc48a294230f85b39e2, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
 --- !u!1001 &7016956402451150101
 PrefabInstance:
   m_ObjectHideFlags: 0

+ 16 - 0
Assets/FrameWork/Scenes/Edustry.unity

@@ -134,6 +134,7 @@ GameObject:
   - component: {fileID: 549249285}
   - component: {fileID: 549249283}
   - component: {fileID: 549249284}
+  - component: {fileID: 549249286}
   m_Layer: 5
   m_Name: MREduastryStart
   m_TagString: Untagged
@@ -189,6 +190,21 @@ Transform:
   m_Father: {fileID: 0}
   m_RootOrder: 3
   m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+--- !u!114 &549249286
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 549249281}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: d4c33e92d8dc2934e919ffa2ff71afd6, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  isAuto: 0
+  CaptureImage: {fileID: 0}
+  RGBCamTexture: {fileID: 0}
 --- !u!1 &639320401
 GameObject:
   m_ObjectHideFlags: 0

+ 53 - 3
Assets/FrameWork/Scripts/Window/WindowsManager.cs

@@ -1,4 +1,9 @@
 using LitJson;
+using OpenCVForUnity.CoreModule;
+using OpenCVForUnity.UnityUtils;
+using SC.XR.Unity.Module_InputSystem;
+using SC.XR.Unity.Module_InputSystem.InputDeviceGC.KS;
+using SC.XR.Unity.Module_InputSystem.InputDeviceHead;
 using System;
 using System.Collections;
 using System.Collections.Generic;
@@ -17,11 +22,31 @@ public class WindowsManager : MonoSingleton<WindowsManager>
     public List<windowItemGameObject> windowItemGameObjectList;
     public WindowConfig windowsConfig;
 
-
+    public void DestroyText2D(Texture2D t2)
+    {
+        Destroy(t2);
+    }
+    InputDeviceHeadPart headPart;
+    InputDeviceKSPart LeftPart;
+    InputDeviceKSPart RightPart;
     private void Awake()
     {
         PlayerPrefs.DeleteAll();
         StartCoroutine(CheckIsStart());
+        
+        if (API_GSXR_Module_InputSystem_Head.GSXR_Head != null)
+        {
+            headPart = API_GSXR_Module_InputSystem_Head.GSXR_Head;
+            //headPart.detectorBase.pointerBase.gameObject;
+        }
+        if (API_GSXR_Module_InputSystem_KS.GSXR_KSRight != null)
+        {
+            RightPart = API_GSXR_Module_InputSystem_KS.GSXR_KSRight;
+        }
+        if (API_GSXR_Module_InputSystem_KS.GSXR_KSLeft != null)
+        {
+            LeftPart  = API_GSXR_Module_InputSystem_KS.GSXR_KSLeft;
+        }
     }
 
     void initShowWindow()
@@ -42,6 +67,17 @@ public class WindowsManager : MonoSingleton<WindowsManager>
     public bool isStart = false;
     IEnumerator CheckIsStart()
     {
+        ///神奇代码 但是不能删除 删除子线程使用opencv会闪退
+        try
+        {
+            Texture2D texture2D = Resources.Load("touxiang") as Texture2D;
+            Mat imgMat = new Mat(texture2D.height, texture2D.width, CvType.CV_8UC4);
+
+            Utils.texture2DToMat(texture2D, imgMat);
+            //  Destroy(texture2D);
+            imgMat.release();
+        }
+        catch { }
         while (HttpSDKAction.Instance.jsonData == "")
         {
             yield return null;
@@ -64,7 +100,12 @@ public class WindowsManager : MonoSingleton<WindowsManager>
         jt.transform.localEulerAngles = Vector3.zero;
         GameObject.Instantiate(GetPrefab(windowType.XunJian, "SaoMiao"), OpenXRCamera.Instance.head.transform);
         GameObject.Instantiate(GetPrefab(windowType.XunJian, "ARSaoTu"));
+        GameObject.Instantiate(GetPrefab(windowType.ProjectMain, "PlayerToImage"));
+
+        tipInfoGameObject = GameObject.Instantiate(GetPrefab(windowType.ProjectMain, "TipInfo"));
     }
+
+    public GameObject tipInfoGameObject;
     public static System.Type GetTypeByName(string name)
     {
         foreach (Assembly assembly in System.AppDomain.CurrentDomain.GetAssemblies())
@@ -316,9 +357,19 @@ public class WindowsManager : MonoSingleton<WindowsManager>
         }
     }
 
+    void updatePart()
+    {
+        if (tipInfoGameObject!=null&&API_GSXR_Module_InputSystem.GSXR_Position!=null)
+        {
+            tipInfoGameObject.transform.position = API_GSXR_Module_InputSystem.GSXR_Position;
+            tipInfoGameObject.transform.LookAt(OpenXRCamera.Instance.head);
+        }
+    }
+
     private void Update()
     {
-        if(tdlist.Count>0&&!isShowTip)
+        updatePart();
+        if (tdlist.Count>0&&!isShowTip)
         {
             TipData td = tdlist.Dequeue();
             Debug.Log("准备显示Tip");
@@ -472,7 +523,6 @@ public class WindowsManager : MonoSingleton<WindowsManager>
         WindowsManager.Instance.show(WindowConfig.windowType.Error, false, WindowsManager.Instance.getErrorData("提示", "敬请期待!", Color.gray, "icon", backTip, false, "敬请期待", 5).ToJson());
 
     }
-
     public class TipData
     {
         public windowType wt;

BIN
Assets/FrameWork/Texture/半透明1.png


+ 135 - 0
Assets/FrameWork/Texture/半透明1.png.meta

@@ -0,0 +1,135 @@
+fileFormatVersion: 2
+guid: fd9ab80445e22b94ea57d15b5945a173
+TextureImporter:
+  internalIDToNameTable: []
+  externalObjects: {}
+  serializedVersion: 12
+  mipmaps:
+    mipMapMode: 0
+    enableMipMap: 1
+    sRGBTexture: 1
+    linearTexture: 0
+    fadeOut: 0
+    borderMipMap: 0
+    mipMapsPreserveCoverage: 0
+    alphaTestReferenceValue: 0.5
+    mipMapFadeDistanceStart: 1
+    mipMapFadeDistanceEnd: 3
+  bumpmap:
+    convertToNormalMap: 0
+    externalNormalMap: 0
+    heightScale: 0.25
+    normalMapFilter: 0
+  isReadable: 0
+  streamingMipmaps: 0
+  streamingMipmapsPriority: 0
+  vTOnly: 0
+  ignoreMasterTextureLimit: 0
+  grayScaleToAlpha: 0
+  generateCubemap: 6
+  cubemapConvolution: 0
+  seamlessCubemap: 0
+  textureFormat: 1
+  maxTextureSize: 2048
+  textureSettings:
+    serializedVersion: 2
+    filterMode: 1
+    aniso: 1
+    mipBias: 0
+    wrapU: 0
+    wrapV: 0
+    wrapW: 0
+  nPOTScale: 0
+  lightmap: 0
+  compressionQuality: 50
+  spriteMode: 1
+  spriteExtrude: 1
+  spriteMeshType: 1
+  alignment: 0
+  spritePivot: {x: 0.5, y: 0.5}
+  spritePixelsToUnits: 100
+  spriteBorder: {x: 0, y: 0, z: 0, w: 0}
+  spriteGenerateFallbackPhysicsShape: 1
+  alphaUsage: 1
+  alphaIsTransparency: 0
+  spriteTessellationDetail: -1
+  textureType: 8
+  textureShape: 1
+  singleChannelComponent: 0
+  flipbookRows: 1
+  flipbookColumns: 1
+  maxTextureSizeSet: 0
+  compressionQualitySet: 0
+  textureFormatSet: 0
+  ignorePngGamma: 0
+  applyGammaDecoding: 0
+  cookieLightType: 0
+  platformSettings:
+  - serializedVersion: 3
+    buildTarget: DefaultTexturePlatform
+    maxTextureSize: 2048
+    resizeAlgorithm: 0
+    textureFormat: -1
+    textureCompression: 1
+    compressionQuality: 50
+    crunchedCompression: 0
+    allowsAlphaSplitting: 0
+    overridden: 0
+    androidETC2FallbackOverride: 0
+    forceMaximumCompressionQuality_BC6H_BC7: 0
+  - serializedVersion: 3
+    buildTarget: Standalone
+    maxTextureSize: 2048
+    resizeAlgorithm: 0
+    textureFormat: -1
+    textureCompression: 1
+    compressionQuality: 50
+    crunchedCompression: 0
+    allowsAlphaSplitting: 0
+    overridden: 0
+    androidETC2FallbackOverride: 0
+    forceMaximumCompressionQuality_BC6H_BC7: 0
+  - serializedVersion: 3
+    buildTarget: Server
+    maxTextureSize: 2048
+    resizeAlgorithm: 0
+    textureFormat: -1
+    textureCompression: 1
+    compressionQuality: 50
+    crunchedCompression: 0
+    allowsAlphaSplitting: 0
+    overridden: 0
+    androidETC2FallbackOverride: 0
+    forceMaximumCompressionQuality_BC6H_BC7: 0
+  - serializedVersion: 3
+    buildTarget: Android
+    maxTextureSize: 2048
+    resizeAlgorithm: 0
+    textureFormat: -1
+    textureCompression: 1
+    compressionQuality: 50
+    crunchedCompression: 0
+    allowsAlphaSplitting: 0
+    overridden: 0
+    androidETC2FallbackOverride: 0
+    forceMaximumCompressionQuality_BC6H_BC7: 0
+  spriteSheet:
+    serializedVersion: 2
+    sprites: []
+    outline: []
+    physicsShape: []
+    bones: []
+    spriteID: 5e97eb03825dee720800000000000000
+    internalID: 0
+    vertices: []
+    indices: 
+    edges: []
+    weights: []
+    secondaryTextures: []
+    nameFileIdTable: {}
+  spritePackingTag: 
+  pSDRemoveMatte: 0
+  pSDShowRemoveMatteOption: 0
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

BIN
Assets/FrameWork/Texture/半透明2.png


+ 135 - 0
Assets/FrameWork/Texture/半透明2.png.meta

@@ -0,0 +1,135 @@
+fileFormatVersion: 2
+guid: 78ef8fc8086f8b04fbcddf574708681f
+TextureImporter:
+  internalIDToNameTable: []
+  externalObjects: {}
+  serializedVersion: 12
+  mipmaps:
+    mipMapMode: 0
+    enableMipMap: 1
+    sRGBTexture: 1
+    linearTexture: 0
+    fadeOut: 0
+    borderMipMap: 0
+    mipMapsPreserveCoverage: 0
+    alphaTestReferenceValue: 0.5
+    mipMapFadeDistanceStart: 1
+    mipMapFadeDistanceEnd: 3
+  bumpmap:
+    convertToNormalMap: 0
+    externalNormalMap: 0
+    heightScale: 0.25
+    normalMapFilter: 0
+  isReadable: 0
+  streamingMipmaps: 0
+  streamingMipmapsPriority: 0
+  vTOnly: 0
+  ignoreMasterTextureLimit: 0
+  grayScaleToAlpha: 0
+  generateCubemap: 6
+  cubemapConvolution: 0
+  seamlessCubemap: 0
+  textureFormat: 1
+  maxTextureSize: 2048
+  textureSettings:
+    serializedVersion: 2
+    filterMode: 1
+    aniso: 1
+    mipBias: 0
+    wrapU: 0
+    wrapV: 0
+    wrapW: 0
+  nPOTScale: 0
+  lightmap: 0
+  compressionQuality: 50
+  spriteMode: 1
+  spriteExtrude: 1
+  spriteMeshType: 1
+  alignment: 0
+  spritePivot: {x: 0.5, y: 0.5}
+  spritePixelsToUnits: 100
+  spriteBorder: {x: 16, y: 15, z: 13, w: 15}
+  spriteGenerateFallbackPhysicsShape: 1
+  alphaUsage: 1
+  alphaIsTransparency: 0
+  spriteTessellationDetail: -1
+  textureType: 8
+  textureShape: 1
+  singleChannelComponent: 0
+  flipbookRows: 1
+  flipbookColumns: 1
+  maxTextureSizeSet: 0
+  compressionQualitySet: 0
+  textureFormatSet: 0
+  ignorePngGamma: 0
+  applyGammaDecoding: 0
+  cookieLightType: 0
+  platformSettings:
+  - serializedVersion: 3
+    buildTarget: DefaultTexturePlatform
+    maxTextureSize: 2048
+    resizeAlgorithm: 0
+    textureFormat: -1
+    textureCompression: 1
+    compressionQuality: 50
+    crunchedCompression: 0
+    allowsAlphaSplitting: 0
+    overridden: 0
+    androidETC2FallbackOverride: 0
+    forceMaximumCompressionQuality_BC6H_BC7: 0
+  - serializedVersion: 3
+    buildTarget: Standalone
+    maxTextureSize: 2048
+    resizeAlgorithm: 0
+    textureFormat: -1
+    textureCompression: 1
+    compressionQuality: 50
+    crunchedCompression: 0
+    allowsAlphaSplitting: 0
+    overridden: 0
+    androidETC2FallbackOverride: 0
+    forceMaximumCompressionQuality_BC6H_BC7: 0
+  - serializedVersion: 3
+    buildTarget: Server
+    maxTextureSize: 2048
+    resizeAlgorithm: 0
+    textureFormat: -1
+    textureCompression: 1
+    compressionQuality: 50
+    crunchedCompression: 0
+    allowsAlphaSplitting: 0
+    overridden: 0
+    androidETC2FallbackOverride: 0
+    forceMaximumCompressionQuality_BC6H_BC7: 0
+  - serializedVersion: 3
+    buildTarget: Android
+    maxTextureSize: 2048
+    resizeAlgorithm: 0
+    textureFormat: -1
+    textureCompression: 1
+    compressionQuality: 50
+    crunchedCompression: 0
+    allowsAlphaSplitting: 0
+    overridden: 0
+    androidETC2FallbackOverride: 0
+    forceMaximumCompressionQuality_BC6H_BC7: 0
+  spriteSheet:
+    serializedVersion: 2
+    sprites: []
+    outline: []
+    physicsShape: []
+    bones: []
+    spriteID: 5e97eb03825dee720800000000000000
+    internalID: 1537655665
+    vertices: []
+    indices: 
+    edges: []
+    weights: []
+    secondaryTextures: []
+    nameFileIdTable: {}
+  spritePackingTag: 
+  pSDRemoveMatte: 0
+  pSDShowRemoveMatteOption: 0
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 388 - 0
Assets/FrameWork/Tip.prefab

@@ -0,0 +1,388 @@
+%YAML 1.1
+%TAG !u! tag:unity3d.com,2011:
+--- !u!1 &1314959984993192417
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 1314959984993192445}
+  - component: {fileID: 1314959984993192444}
+  - component: {fileID: 1314959984993192419}
+  - component: {fileID: 1314959984993192418}
+  - component: {fileID: 1314959984993192446}
+  m_Layer: 5
+  m_Name: Tip
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &1314959984993192445
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959984993192417}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 0.001, y: 0.001, z: 0.001}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 1314959986119365870}
+  m_Father: {fileID: 0}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 0}
+  m_AnchorMax: {x: 0, y: 0}
+  m_AnchoredPosition: {x: 0, y: 0}
+  m_SizeDelta: {x: 600, y: 100}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!223 &1314959984993192444
+Canvas:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959984993192417}
+  m_Enabled: 1
+  serializedVersion: 3
+  m_RenderMode: 2
+  m_Camera: {fileID: 0}
+  m_PlaneDistance: 100
+  m_PixelPerfect: 0
+  m_ReceivesEvents: 1
+  m_OverrideSorting: 0
+  m_OverridePixelPerfect: 0
+  m_SortingBucketNormalizedSize: 0
+  m_AdditionalShaderChannelsFlag: 25
+  m_SortingLayerID: 0
+  m_SortingOrder: 0
+  m_TargetDisplay: 0
+--- !u!114 &1314959984993192419
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959984993192417}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 0cd44c1031e13a943bb63640046fad76, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_UiScaleMode: 0
+  m_ReferencePixelsPerUnit: 100
+  m_ScaleFactor: 1
+  m_ReferenceResolution: {x: 800, y: 600}
+  m_ScreenMatchMode: 0
+  m_MatchWidthOrHeight: 0
+  m_PhysicalUnit: 3
+  m_FallbackScreenDPI: 96
+  m_DefaultSpriteDPI: 96
+  m_DynamicPixelsPerUnit: 1
+  m_PresetInfoIsWorld: 0
+--- !u!114 &1314959984993192418
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959984993192417}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: dc42784cf147c0c48a680349fa168899, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_IgnoreReversedGraphics: 1
+  m_BlockingObjects: 0
+  m_BlockingMask:
+    serializedVersion: 2
+    m_Bits: 4294967295
+--- !u!114 &1314959984993192446
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959984993192417}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 5c52777cb1ca72c4d98b47db4f52e8e5, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  global: 0
+  go: {fileID: 1314959986119365869}
+  text: {fileID: 1314959985958535384}
+--- !u!1 &1314959985958535389
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 1314959985958535390}
+  - component: {fileID: 1314959985958535385}
+  - component: {fileID: 1314959985958535384}
+  - component: {fileID: 1314959985958535391}
+  m_Layer: 5
+  m_Name: Text (TMP)
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 1
+--- !u!224 &1314959985958535390
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959985958535389}
+  m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children: []
+  m_Father: {fileID: 1314959986119365870}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
+  m_AnchorMin: {x: 0, y: 1}
+  m_AnchorMax: {x: 0, y: 1}
+  m_AnchoredPosition: {x: 185.765, y: -25}
+  m_SizeDelta: {x: 371.53, y: 50}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &1314959985958535385
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959985958535389}
+  m_CullTransparentMesh: 1
+--- !u!114 &1314959985958535384
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959985958535389}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: f4688fdb7df04437aeb418b961361dc5, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 1, g: 1, b: 1, a: 1}
+  m_RaycastTarget: 1
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_text: "122\u7684 \u7684\u963F\u65AF\u987F\u963F\u65AF\u987F\u653E\u5230"
+  m_isRightToLeft: 0
+  m_fontAsset: {fileID: 11400000, guid: fb2cd06170cbccb45aeb2aa643b0e4d4, type: 2}
+  m_sharedMaterial: {fileID: 8874737445518861689, guid: fb2cd06170cbccb45aeb2aa643b0e4d4, type: 2}
+  m_fontSharedMaterials: []
+  m_fontMaterial: {fileID: 0}
+  m_fontMaterials: []
+  m_fontColor32:
+    serializedVersion: 2
+    rgba: 4294967295
+  m_fontColor: {r: 1, g: 1, b: 1, a: 1}
+  m_enableVertexGradient: 0
+  m_colorMode: 3
+  m_fontColorGradient:
+    topLeft: {r: 1, g: 1, b: 1, a: 1}
+    topRight: {r: 1, g: 1, b: 1, a: 1}
+    bottomLeft: {r: 1, g: 1, b: 1, a: 1}
+    bottomRight: {r: 1, g: 1, b: 1, a: 1}
+  m_fontColorGradientPreset: {fileID: 0}
+  m_spriteAsset: {fileID: 0}
+  m_tintAllSprites: 0
+  m_StyleSheet: {fileID: 0}
+  m_TextStyleHashCode: -1183493901
+  m_overrideHtmlColors: 0
+  m_faceColor:
+    serializedVersion: 2
+    rgba: 4294967295
+  m_fontSize: 25
+  m_fontSizeBase: 25
+  m_fontWeight: 400
+  m_enableAutoSizing: 0
+  m_fontSizeMin: 18
+  m_fontSizeMax: 72
+  m_fontStyle: 0
+  m_HorizontalAlignment: 2
+  m_VerticalAlignment: 512
+  m_textAlignment: 65535
+  m_characterSpacing: 0
+  m_wordSpacing: 0
+  m_lineSpacing: 0
+  m_lineSpacingMax: 0
+  m_paragraphSpacing: 0
+  m_charWidthMaxAdj: 0
+  m_enableWordWrapping: 1
+  m_wordWrappingRatios: 0.4
+  m_overflowMode: 0
+  m_linkedTextComponent: {fileID: 0}
+  parentLinkedComponent: {fileID: 0}
+  m_enableKerning: 1
+  m_enableExtraPadding: 0
+  checkPaddingRequired: 0
+  m_isRichText: 1
+  m_parseCtrlCharacters: 1
+  m_isOrthographic: 1
+  m_isCullingEnabled: 0
+  m_horizontalMapping: 0
+  m_verticalMapping: 0
+  m_uvLineOffset: 0
+  m_geometrySortingOrder: 0
+  m_IsTextObjectScaleStatic: 0
+  m_VertexBufferAutoSizeReduction: 0
+  m_useMaxVisibleDescender: 1
+  m_pageToDisplay: 1
+  m_margin: {x: 33.463528, y: 0, z: 39.221794, w: 0}
+  m_isUsingLegacyAnimationComponent: 0
+  m_isVolumetricText: 0
+  m_hasFontAssetChanged: 0
+  m_baseMaterial: {fileID: 0}
+  m_maskOffset: {x: 0, y: 0, z: 0, w: 0}
+--- !u!114 &1314959985958535391
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959985958535389}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 3245ec927659c4140ac4f8d17403cc18, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_HorizontalFit: 2
+  m_VerticalFit: 0
+--- !u!1 &1314959986119365869
+GameObject:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  serializedVersion: 6
+  m_Component:
+  - component: {fileID: 1314959986119365870}
+  - component: {fileID: 1314959986119365866}
+  - component: {fileID: 1314959986119365865}
+  - component: {fileID: 1314959986119365864}
+  - component: {fileID: 1314959986119365871}
+  m_Layer: 5
+  m_Name: Image
+  m_TagString: Untagged
+  m_Icon: {fileID: 0}
+  m_NavMeshLayer: 0
+  m_StaticEditorFlags: 0
+  m_IsActive: 0
+--- !u!224 &1314959986119365870
+RectTransform:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959986119365869}
+  m_LocalRotation: {x: 0, y: 1, z: 0, w: 0}
+  m_LocalPosition: {x: 0, y: 0, z: 0}
+  m_LocalScale: {x: 1, y: 1, z: 1}
+  m_ConstrainProportionsScale: 0
+  m_Children:
+  - {fileID: 1314959985958535390}
+  m_Father: {fileID: 1314959984993192445}
+  m_RootOrder: 0
+  m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0}
+  m_AnchorMin: {x: 0.5, y: 0.5}
+  m_AnchorMax: {x: 0.5, y: 0.5}
+  m_AnchoredPosition: {x: 0, y: -65}
+  m_SizeDelta: {x: 371.53, y: 49.995}
+  m_Pivot: {x: 0.5, y: 0.5}
+--- !u!222 &1314959986119365866
+CanvasRenderer:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959986119365869}
+  m_CullTransparentMesh: 1
+--- !u!114 &1314959986119365865
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959986119365869}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: fe87c0e1cc204ed48ad3b37840f39efc, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Material: {fileID: 0}
+  m_Color: {r: 0.1509434, g: 0.1509434, b: 0.1509434, a: 0.39215687}
+  m_RaycastTarget: 0
+  m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0}
+  m_Maskable: 1
+  m_OnCullStateChanged:
+    m_PersistentCalls:
+      m_Calls: []
+  m_Sprite: {fileID: 21300000, guid: 78ef8fc8086f8b04fbcddf574708681f, type: 3}
+  m_Type: 1
+  m_PreserveAspect: 0
+  m_FillCenter: 1
+  m_FillMethod: 4
+  m_FillAmount: 1
+  m_FillClockwise: 1
+  m_FillOrigin: 0
+  m_UseSpriteMesh: 0
+  m_PixelsPerUnitMultiplier: 1
+--- !u!114 &1314959986119365864
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959986119365869}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 30649d3a9faa99c48a7b1166b86bf2a0, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_Padding:
+    m_Left: 0
+    m_Right: 0
+    m_Top: 0
+    m_Bottom: 0
+  m_ChildAlignment: 0
+  m_Spacing: 0
+  m_ChildForceExpandWidth: 1
+  m_ChildForceExpandHeight: 1
+  m_ChildControlWidth: 0
+  m_ChildControlHeight: 0
+  m_ChildScaleWidth: 0
+  m_ChildScaleHeight: 0
+  m_ReverseArrangement: 0
+--- !u!114 &1314959986119365871
+MonoBehaviour:
+  m_ObjectHideFlags: 0
+  m_CorrespondingSourceObject: {fileID: 0}
+  m_PrefabInstance: {fileID: 0}
+  m_PrefabAsset: {fileID: 0}
+  m_GameObject: {fileID: 1314959986119365869}
+  m_Enabled: 1
+  m_EditorHideFlags: 0
+  m_Script: {fileID: 11500000, guid: 3245ec927659c4140ac4f8d17403cc18, type: 3}
+  m_Name: 
+  m_EditorClassIdentifier: 
+  m_HorizontalFit: 2
+  m_VerticalFit: 0

+ 7 - 0
Assets/FrameWork/Tip.prefab.meta

@@ -0,0 +1,7 @@
+fileFormatVersion: 2
+guid: c73141cf71d6eac46be594876eb3d9c6
+PrefabImporter:
+  externalObjects: {}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 8 - 0
Assets/FrameWork/WindowsItem.asset

@@ -38,7 +38,15 @@ MonoBehaviour:
       obj: {fileID: 1909995360214599739, guid: f00c43dfe3ee5c84d90c9f05a30b1968, type: 3}
     - name: SaoMiao
       obj: {fileID: 1620164308813382647, guid: b19230ffb2b63cd4899f44f3e8582a97, type: 3}
+    - name: PaiZhaoItem
+      obj: {fileID: 895371214607694078, guid: 3e7ff33a2aaa09442ae2d68ab2e243df, type: 3}
     type: 200001
+  - PrefabList:
+    - name: PlayerToImage
+      obj: {fileID: 6371964581270030862, guid: a2814d80086e5b5408355d76fc96c80f, type: 3}
+    - name: TipInfo
+      obj: {fileID: 1314959984993192417, guid: c73141cf71d6eac46be594876eb3d9c6, type: 3}
+    type: 200002
   listTexture:
   - PrefabList:
       name: icon

+ 8 - 0
Assets/OpenCVForUnity.meta

@@ -0,0 +1,8 @@
+fileFormatVersion: 2
+guid: b30935a936e5a2a4fa95eb9d546ad044
+folderAsset: yes
+DefaultImporter:
+  externalObjects: {}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 9 - 0
Assets/OpenCVForUnity/Editor.meta

@@ -0,0 +1,9 @@
+fileFormatVersion: 2
+guid: db158ada3f52311488e2345f442cfff7
+folderAsset: yes
+timeCreated: 1430587581
+licenseType: Store
+DefaultImporter:
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 127 - 0
Assets/OpenCVForUnity/Editor/OpenCVForUnityIOSBuildPostprocessor.cs

@@ -0,0 +1,127 @@
+#if (UNITY_5 || UNITY_5_3_OR_NEWER) && UNITY_IOS
+using UnityEngine;
+using UnityEditor;
+using UnityEditor.Callbacks;
+using UnityEditor.iOS.Xcode;
+
+using System.Diagnostics;
+
+#if UNITY_2017_2_OR_NEWER
+using UnityEditor.iOS.Xcode.Extensions;
+#endif
+using System;
+using System.Collections;
+using System.IO;
+
+namespace OpenCVForUnity
+{
+    public class OpenCVForUnityIOSBuildPostprocessor : MonoBehaviour
+    {
+        
+        [PostProcessBuild]
+        public static void OnPostprocessBuild (BuildTarget buildTarget, string path)
+        {
+            if (buildTarget == BuildTarget.iOS) {
+
+                string[] guids = UnityEditor.AssetDatabase.FindAssets ("OpenCVForUnityIOSBuildPostprocessor");
+                if (guids.Length == 0) {
+                    UnityEngine.Debug.LogWarning ("SetPluginImportSettings Faild : OpenCVForUnityIOSBuildPostprocessor.cs is missing.");
+                    return;
+                }
+                string opencvForUnityFolderPath = AssetDatabase.GUIDToAssetPath (guids [0]).Substring ("Assets/".Length);
+                opencvForUnityFolderPath = opencvForUnityFolderPath.Substring (0, opencvForUnityFolderPath.LastIndexOf ("Editor/OpenCVForUnityIOSBuildPostprocessor.cs"));
+
+
+                if (PlayerSettings.iOS.sdkVersion == iOSSdkVersion.DeviceSDK) {
+#if UNITY_2018_3_OR_NEWER
+                    RemoveSimulatorArchitectures (path + "/Frameworks/", "opencv2.framework/opencv2");
+#else
+                    RemoveSimulatorArchitectures (path + "/Frameworks/"+opencvForUnityFolderPath+"Plugins/iOS/", "opencv2.framework/opencv2");
+#endif
+                    RemoveSimulatorArchitectures (path + "/Libraries/"+opencvForUnityFolderPath+"Plugins/iOS/", "libopencvforunity.a");
+                }
+
+#if UNITY_5_0 || UNITY_5_1 || UNITY5_2
+                                string projPath = path + "/Unity-iPhone.xcodeproj/project.pbxproj";
+#else
+                string projPath = PBXProject.GetPBXProjectPath (path);
+#endif
+            
+                PBXProject proj = new PBXProject ();
+                proj.ReadFromString (System.IO.File.ReadAllText (projPath));
+                    
+#if UNITY_5_0 || UNITY_5_1 || UNITY5_2
+                                string target = proj.TargetGuidByName ("Unity-iPhone");
+#else
+                string target = proj.TargetGuidByName (PBXProject.GetUnityTargetName ());
+#endif
+
+#if UNITY_2018_1_OR_NEWER
+
+#elif UNITY_2017_2_OR_NEWER
+                string frameworkPath = "Frameworks/"+opencvForUnityFolderPath+"Plugins/iOS/opencv2.framework";
+                string fileGuid = proj.FindFileGuidByProjectPath(frameworkPath);
+
+                proj.AddFileToBuild(target, fileGuid);
+                proj.AddFileToEmbedFrameworks(target, fileGuid);
+                foreach (var configName in proj.BuildConfigNames()) {
+                    var configGuid = proj.BuildConfigByName(target, configName);
+                    proj.SetBuildPropertyForConfig(configGuid, "LD_RUNPATH_SEARCH_PATHS", "$(inherited) @executable_path/Frameworks");
+                }
+#else
+                UnityEngine.Debug.LogError ("If the version of Unity is less than 2017.2, you have to set opencv2.framework to Embedded Binaries manually.");
+#endif
+
+                File.WriteAllText (projPath, proj.WriteToString ());
+
+#if UNITY_5_5_OR_NEWER
+                if ((int)Convert.ToDecimal (PlayerSettings.iOS.targetOSVersionString) < 8) {
+#else
+                if ((int)PlayerSettings.iOS.targetOSVersion < (int)iOSTargetOSVersion.iOS_8_0) {
+#endif
+                    UnityEngine.Debug.LogError ("Please set Target minimum iOS Version to 8.0 or higher.");
+                }
+
+            }
+        }
+
+        /// <summary>
+        /// Removes the simulator architectures.
+        /// </summary>
+        /// <param name="WorkingDirectory">Working directory.</param>
+        /// <param name="filePath">File path.</param>
+        private static void RemoveSimulatorArchitectures (string WorkingDirectory, string filePath)
+        {
+            Process process = new Process ();
+            process.StartInfo.FileName = "/bin/bash";
+            process.StartInfo.WorkingDirectory = WorkingDirectory;
+
+            process.StartInfo.Arguments = "-c \" ";
+
+            process.StartInfo.Arguments += "lipo -remove i386 " + filePath + " -o " + filePath + ";";
+            process.StartInfo.Arguments += "lipo -remove x86_64 " + filePath + " -o " + filePath + ";";
+            process.StartInfo.Arguments += "lipo -info " + filePath + ";";
+
+            process.StartInfo.Arguments += " \"";
+
+            process.StartInfo.UseShellExecute = false;
+            process.StartInfo.RedirectStandardOutput = true;
+            process.StartInfo.RedirectStandardError = true;
+
+            process.Start ();
+
+            string output = process.StandardOutput.ReadToEnd ();
+            string error = process.StandardError.ReadToEnd ();
+
+            process.WaitForExit ();
+            process.Close ();
+
+            if (string.IsNullOrEmpty (error)) {
+                UnityEngine.Debug.Log ("success : " + output);
+            } else {
+                UnityEngine.Debug.LogWarning ("error : " + error);
+            }
+        }
+    }
+}
+#endif

+ 12 - 0
Assets/OpenCVForUnity/Editor/OpenCVForUnityIOSBuildPostprocessor.cs.meta

@@ -0,0 +1,12 @@
+fileFormatVersion: 2
+guid: 1468bec8532ec6d488cebe62022b2b67
+timeCreated: 1530944474
+licenseType: Store
+MonoImporter:
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 386 - 0
Assets/OpenCVForUnity/Editor/OpenCVForUnityMenuItem.cs

@@ -0,0 +1,386 @@
+#if UNITY_5 || UNITY_5_3_OR_NEWER
+using UnityEngine;
+using UnityEditor;
+
+using System.IO;
+using System.Linq;
+using System.Text.RegularExpressions;
+using System.Collections.Generic;
+using System;
+
+namespace OpenCVForUnity
+{
+    class OpenCVForUnityMenuItem : MonoBehaviour
+    {
+
+        /// <summary>
+        /// Open OpenCV for Unity API Reference.
+        /// </summary>
+        [MenuItem ("Tools/OpenCV for Unity/Open OpenCV for Unity API Reference", false, 12)]
+        public static void OpenOpenCVForUnityAPIReference ()
+        {
+            Application.OpenURL ("http://enoxsoftware.github.io/OpenCVForUnity/3.0.0/doc/html/index.html");
+        }
+
+        /// <summary>
+        /// Open OpenCV C++ API Reference.
+        /// </summary>
+        [MenuItem ("Tools/OpenCV for Unity/Open OpenCV C++ API Reference", false, 13)]
+        public static void OpenOpenCVAPIReference ()
+        {
+            Application.OpenURL ("http://docs.opencv.org/3.3.0/index.html");
+        }
+
+
+        /// <summary>
+        /// Sets the plugin import settings.
+        /// </summary>
+        [MenuItem ("Tools/OpenCV for Unity/Set Plugin Import Settings", false, 1)]
+        public static void SetPluginImportSettings ()
+        {
+            string[] guids = UnityEditor.AssetDatabase.FindAssets ("OpenCVForUnityMenuItem");
+            if (guids.Length == 0) {
+                Debug.LogWarning ("SetPluginImportSettings Faild : OpenCVForUnityMenuItem.cs is missing.");
+                return;
+            }
+            string opencvForUnityFolderPath = AssetDatabase.GUIDToAssetPath (guids [0]).Substring (0, AssetDatabase.GUIDToAssetPath (guids [0]).LastIndexOf ("Editor/OpenCVForUnityMenuItem.cs"));
+
+            string pluginsFolderPath = opencvForUnityFolderPath + "Plugins";
+//            Debug.Log ("pluginsFolderPath " + pluginsFolderPath);
+
+            string extraFolderPath = opencvForUnityFolderPath + "Extra";
+//            Debug.Log ("extraFolderPath " + extraFolderPath);
+
+
+            //Disable Extra folder
+            SetPlugins (GetPluginFilePaths (extraFolderPath + "/exclude_contrib/Android/libs/armeabi-v7a"), null, null);
+            SetPlugins (GetPluginFilePaths (extraFolderPath + "/exclude_contrib/Android/libs/x86"), null, null);
+            SetPlugins (GetPluginFilePaths (extraFolderPath + "/exclude_contrib/Android/libs/arm64-v8a"), null, null);
+            SetPlugins (new string[] { extraFolderPath + "/exclude_contrib/iOS/opencv2.framework" }, null, null);
+            SetPlugins (GetPluginFilePaths (extraFolderPath + "/exclude_contrib/iOS"), null, null);
+           
+
+            //Android
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/Android/libs/armeabi-v7a"), null,
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {BuildTarget.Android,new Dictionary<string, string> () { {
+                                "CPU",
+                                "ARMv7"
+                            }
+                        }
+                    }
+                });
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/Android/libs/x86"), null,
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {BuildTarget.Android,new Dictionary<string, string> () { {
+                                "CPU",
+                                "x86"
+                            }
+                        }
+                    }
+                });
+#if UNITY_2018_1_OR_NEWER
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/Android/libs/arm64-v8a"), null,
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {BuildTarget.Android,new Dictionary<string, string> () { {
+                                "CPU",
+                                "ARM64"
+                            }
+                        }
+                    }
+                });
+#else
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/Android/libs/arm64-v8a"), null, null);
+#endif
+            
+            //iOS
+            SetPlugins (new string[] { pluginsFolderPath + "/iOS/opencv2.framework" }, null,
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {
+                        BuildTarget.iOS,
+                        new Dictionary<string, string> () { {
+                                "AddToEmbeddedBinaries",
+                                "true"
+                            }
+                        }
+                    }
+                });
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/iOS"), null,
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {
+                        BuildTarget.iOS,
+                        null
+                    }
+                });
+            
+            //OSX
+            SetPlugins (new string[] { pluginsFolderPath + "/macOS/opencvforunity.bundle" }, new Dictionary<string, string> () { {
+                    "CPU",
+                    "AnyCPU"
+                }, {
+                    "OS",
+                    "OSX"
+                }
+            },
+                new Dictionary<BuildTarget, Dictionary<string, string>> () {
+#if UNITY_2017_3_OR_NEWER
+                    {
+                        BuildTarget.StandaloneOSX,new Dictionary<string, string> () { {
+                                "CPU",
+                                "AnyCPU"
+                            }
+                        }
+                    }
+#else
+                    {
+                        BuildTarget.StandaloneOSXIntel,new Dictionary<string, string> () { {
+                                "CPU",
+                                "x86"
+                            }
+                        }
+                    }, {
+                        BuildTarget.StandaloneOSXIntel64,new Dictionary<string, string> () { {
+                                "CPU",
+                                "x86_64"
+                            }
+                        }
+                    }, {
+                        BuildTarget.StandaloneOSXUniversal,new Dictionary<string, string> () { {
+                                "CPU",
+                                "AnyCPU"
+                            }
+                        }
+                    }
+#endif
+                });
+            
+            //Windows
+            SetPlugins (new string[] { pluginsFolderPath + "/Windows/x86/opencvforunity.dll" }, new Dictionary<string, string> () { {
+                    "CPU",
+                    "x86"
+                }, {
+                    "OS",
+                    "Windows"
+                }
+            },
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {BuildTarget.StandaloneWindows,new Dictionary<string, string> () { {
+                                "CPU",
+                                "x86"
+                            }
+                        }
+                    }
+                });
+            SetPlugins (new string[] { pluginsFolderPath + "/Windows/x86_64/opencvforunity.dll" }, new Dictionary<string, string> () { {
+                    "CPU",
+                    "x86_64"
+                }, {
+                    "OS",
+                    "Windows"
+                }
+            },
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {BuildTarget.StandaloneWindows64,new Dictionary<string, string> () { {
+                                "CPU",
+                                "x86_64"
+                            }
+                        }
+                    }
+                });
+            
+            //Linux
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/Linux/x86"), new Dictionary<string, string> () { {
+                    "CPU",
+                    "x86"
+                }, {
+                    "OS",
+                    "Linux"
+                }
+            },
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {BuildTarget.StandaloneLinux,new Dictionary<string, string> () { {
+                                "CPU",
+                                "x86"
+                            }
+                        }
+                    },
+                });
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/Linux/x86_64"), new Dictionary<string, string> () { {
+                    "CPU",
+                    "x86_64"
+                }, {
+                    "OS",
+                    "Linux"
+                }
+            },
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {BuildTarget.StandaloneLinux64,new Dictionary<string, string> () { {
+                                "CPU",
+                                "x86_64"
+                            }
+                        }
+                    },
+                });
+            
+            
+            //UWP
+            #if UNITY_5_0 || UNITY_5_1
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/WSA/UWP/ARM"), null, null);
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/WSA/UWP/x64"), null, null);
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/WSA/UWP/x86"), null, null);
+            #else
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/WSA/UWP/ARM"), null,
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {BuildTarget.WSAPlayer,new Dictionary<string, string> () { {
+                                "SDK",
+                                "UWP"
+                            }, {
+                                "CPU",
+                                "ARM"
+                            }
+                        }
+                    }
+                });
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/WSA/UWP/x64"), null,
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {BuildTarget.WSAPlayer,new Dictionary<string, string> () { {
+                                "SDK",
+                                "UWP"
+                            }, {
+                                "CPU",
+                                "x64"
+                            }
+                        }
+                    }
+                });
+            SetPlugins (GetPluginFilePaths (pluginsFolderPath + "/WSA/UWP/x86"), null,
+                new Dictionary<BuildTarget, Dictionary<string, string>> () { {BuildTarget.WSAPlayer,new Dictionary<string, string> () { {
+                                "SDK",
+                                "UWP"
+                            }, {
+                                "CPU",
+                                "x86"
+                            }
+                        }
+                    }
+                });
+            #endif
+            
+            //WebGL
+            #if UNITY_2018_2_OR_NEWER
+            SetPlugins (new string[] { pluginsFolderPath + "/WebGL/2018.2/opencvforunity.bc" }, null, new Dictionary<BuildTarget, Dictionary<string, string>> () { {
+                    BuildTarget.WebGL,
+                    null
+                }
+            });
+            SetPlugins (new string[] { pluginsFolderPath + "/WebGL/5.6/opencvforunity.bc" }, null, null);
+            #elif UNITY_5_6_OR_NEWER
+            SetPlugins (new string[] { pluginsFolderPath + "/WebGL/2018.2/opencvforunity.bc" }, null, null);
+            SetPlugins (new string[] { pluginsFolderPath + "/WebGL/5.6/opencvforunity.bc" }, null, new Dictionary<BuildTarget, Dictionary<string, string>> () { {
+                    BuildTarget.WebGL,
+                    null
+                }
+            });
+            #else
+            SetPlugins (new string[] { pluginsFolderPath + "/WebGL/2018.2/opencvforunity.bc" }, null, null);
+            SetPlugins (new string[] { pluginsFolderPath + "/WebGL/5.6/opencvforunity.bc" }, null, null);
+            #endif
+            
+        }
+
+        /// <summary>
+        /// Gets the plugin file paths.
+        /// </summary>
+        /// <returns>The plugin file paths.</returns>
+        /// <param name="folderPath">Folder path.</param>
+        static string[] GetPluginFilePaths (string folderPath)
+        {
+            Regex reg = new Regex (".meta$|.DS_Store$|.zip");
+            try {
+                return Directory.GetFiles (folderPath).Where (f => !reg.IsMatch (f)).ToArray ();
+            } catch (Exception ex) {
+                Debug.LogWarning ("SetPluginImportSettings Faild :" + ex);
+                return null;
+            }
+        }
+
+        /// <summary>
+        /// Sets the plugins.
+        /// </summary>
+        /// <param name="files">Files.</param>
+        /// <param name="editorSettings">Editor settings.</param>
+        /// <param name="settings">Settings.</param>
+        public static void SetPlugins (string[] files, Dictionary<string, string> editorSettings, Dictionary<BuildTarget, Dictionary<string, string>> settings)
+        {
+            if (files == null)
+                return;
+            
+            foreach (string item in files) {
+                
+                PluginImporter pluginImporter = PluginImporter.GetAtPath (item) as PluginImporter;
+                
+                if (pluginImporter != null) {
+                    
+                    pluginImporter.SetCompatibleWithAnyPlatform (false);
+                    pluginImporter.SetCompatibleWithEditor (false);
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.Android, false);
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.iOS, false);
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneWindows, false);
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneWindows64, false);
+#if UNITY_2017_3_OR_NEWER
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneOSX, false);
+#else
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneOSXIntel, false);
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneOSXIntel64, false);
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneOSXUniversal, false);
+#endif
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneLinux, false);
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneLinux64, false);
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneLinuxUniversal, false);
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.WSAPlayer, false);
+                    pluginImporter.SetCompatibleWithPlatform (BuildTarget.WebGL, false);
+                    
+                    
+                    if (editorSettings != null) {
+                        pluginImporter.SetCompatibleWithEditor (true);
+                        
+                        foreach (KeyValuePair<string, string> pair in editorSettings) {
+                            if (pluginImporter.GetEditorData (pair.Key) != pair.Value) {
+                                pluginImporter.SetEditorData (pair.Key, pair.Value);
+                            }
+                        }
+                    }
+                    
+                    if (settings != null) {
+                        foreach (KeyValuePair<BuildTarget, Dictionary<string, string>> settingPair in settings) {
+                            
+                            pluginImporter.SetCompatibleWithPlatform (settingPair.Key, true);
+                            if (settingPair.Value != null) {
+                                foreach (KeyValuePair<string, string> pair in settingPair.Value) {
+                                    if (pluginImporter.GetPlatformData (settingPair.Key, pair.Key) != pair.Value) {
+                                        pluginImporter.SetPlatformData (settingPair.Key, pair.Key, pair.Value);
+                                    }
+                                }
+                            }
+                            
+                        }
+                    } else {
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.Android, false);
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.iOS, false);
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneWindows, false);
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneWindows64, false);
+#if UNITY_2017_3_OR_NEWER
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneOSX, false);
+#else
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneOSXIntel, false);
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneOSXIntel64, false);
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneOSXUniversal, false);
+#endif
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneLinux, false);
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneLinux64, false);
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.StandaloneLinuxUniversal, false);
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.WSAPlayer, false);
+                        pluginImporter.SetCompatibleWithPlatform (BuildTarget.WebGL, false);
+                    }
+                    
+                    
+                    pluginImporter.SaveAndReimport ();
+                    
+                    Debug.Log ("SetPluginImportSettings Success :" + item);
+                } else {
+                    Debug.LogWarning ("SetPluginImportSettings Faild :" + item);
+                }
+            }
+        }
+    }
+}
+#endif

+ 12 - 0
Assets/OpenCVForUnity/Editor/OpenCVForUnityMenuItem.cs.meta

@@ -0,0 +1,12 @@
+fileFormatVersion: 2
+guid: 94571a6b79d0cf14ba69f68be8bb2852
+timeCreated: 1438870515
+licenseType: Store
+MonoImporter:
+  serializedVersion: 2
+  defaultReferences: []
+  executionOrder: 0
+  icon: {instanceID: 0}
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 9 - 0
Assets/OpenCVForUnity/Extra.meta

@@ -0,0 +1,9 @@
+fileFormatVersion: 2
+guid: 0be691e3d2d0871468bbc75565bda60b
+folderAsset: yes
+timeCreated: 1490623195
+licenseType: Store
+DefaultImporter:
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 9 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib.meta

@@ -0,0 +1,9 @@
+fileFormatVersion: 2
+guid: 9de5091c7911cb24f8bd16e86393edf1
+folderAsset: yes
+timeCreated: 1495790157
+licenseType: Store
+DefaultImporter:
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 9 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/Android.meta

@@ -0,0 +1,9 @@
+fileFormatVersion: 2
+guid: 7d71a97816ea6fe499bf29b22d80efed
+folderAsset: yes
+timeCreated: 1495790078
+licenseType: Store
+DefaultImporter:
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 9 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs.meta

@@ -0,0 +1,9 @@
+fileFormatVersion: 2
+guid: 0d245e2ecdff5d9409de3622f5f1ba8f
+folderAsset: yes
+timeCreated: 1495790308
+licenseType: Store
+DefaultImporter:
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 9 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/arm64-v8a.meta

@@ -0,0 +1,9 @@
+fileFormatVersion: 2
+guid: 67cab65ca22e42746aa47a85654b3b68
+folderAsset: yes
+timeCreated: 1547622860
+licenseType: Store
+DefaultImporter:
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

BIN
Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/arm64-v8a/libopencvforunity.so


+ 27 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/arm64-v8a/libopencvforunity.so.meta

@@ -0,0 +1,27 @@
+fileFormatVersion: 2
+guid: 363f4b250a7ab484293029f827708917
+timeCreated: 1547622873
+licenseType: Store
+PluginImporter:
+  serializedVersion: 2
+  iconMap: {}
+  executionOrder: {}
+  isPreloaded: 0
+  isOverridable: 0
+  platformData:
+    data:
+      first:
+        Any: 
+      second:
+        enabled: 0
+        settings: {}
+    data:
+      first:
+        Editor: Editor
+      second:
+        enabled: 0
+        settings:
+          DefaultValueInitialized: true
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 9 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/armeabi-v7a.meta

@@ -0,0 +1,9 @@
+fileFormatVersion: 2
+guid: de61ba97ac8a0e148873b06ad0960d1f
+folderAsset: yes
+timeCreated: 1547622860
+licenseType: Store
+DefaultImporter:
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

BIN
Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/armeabi-v7a/libopencvforunity.so


+ 27 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/armeabi-v7a/libopencvforunity.so.meta

@@ -0,0 +1,27 @@
+fileFormatVersion: 2
+guid: 344fcd144b88608458bd9ce9037ab8f5
+timeCreated: 1547622873
+licenseType: Store
+PluginImporter:
+  serializedVersion: 2
+  iconMap: {}
+  executionOrder: {}
+  isPreloaded: 0
+  isOverridable: 0
+  platformData:
+    data:
+      first:
+        Any: 
+      second:
+        enabled: 0
+        settings: {}
+    data:
+      first:
+        Editor: Editor
+      second:
+        enabled: 0
+        settings:
+          DefaultValueInitialized: true
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 9 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/x86.meta

@@ -0,0 +1,9 @@
+fileFormatVersion: 2
+guid: 78ddecdc2f30eac43b7c4968f3226374
+folderAsset: yes
+timeCreated: 1547622860
+licenseType: Store
+DefaultImporter:
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

BIN
Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/x86/libopencvforunity.so


+ 27 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/Android/libs/x86/libopencvforunity.so.meta

@@ -0,0 +1,27 @@
+fileFormatVersion: 2
+guid: 2c01a18433116f24bab03bee7c77fc8c
+timeCreated: 1547622873
+licenseType: Store
+PluginImporter:
+  serializedVersion: 2
+  iconMap: {}
+  executionOrder: {}
+  isPreloaded: 0
+  isOverridable: 0
+  platformData:
+    data:
+      first:
+        Any: 
+      second:
+        enabled: 0
+        settings: {}
+    data:
+      first:
+        Editor: Editor
+      second:
+        enabled: 0
+        settings:
+          DefaultValueInitialized: true
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 9 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/iOS.meta

@@ -0,0 +1,9 @@
+fileFormatVersion: 2
+guid: e308d736a598ba0439a65f1f29e42190
+folderAsset: yes
+timeCreated: 1495790078
+licenseType: Store
+DefaultImporter:
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

BIN
Assets/OpenCVForUnity/Extra/exclude_contrib/iOS/libopencvforunity.a


+ 20 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/iOS/libopencvforunity.a.meta

@@ -0,0 +1,20 @@
+fileFormatVersion: 2
+guid: da9747da83248e941adc14b1a60e4521
+timeCreated: 1495800246
+licenseType: Store
+PluginImporter:
+  serializedVersion: 1
+  iconMap: {}
+  executionOrder: {}
+  isPreloaded: 0
+  platformData:
+    Any:
+      enabled: 0
+      settings: {}
+    Editor:
+      enabled: 0
+      settings:
+        DefaultValueInitialized: true
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 21 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/iOS/opencv2.framework.meta

@@ -0,0 +1,21 @@
+fileFormatVersion: 2
+guid: 183de44caf99e8c498bf002f729d3aa1
+folderAsset: yes
+timeCreated: 1495800246
+licenseType: Store
+PluginImporter:
+  serializedVersion: 1
+  iconMap: {}
+  executionOrder: {}
+  isPreloaded: 0
+  platformData:
+    Any:
+      enabled: 0
+      settings: {}
+    Editor:
+      enabled: 0
+      settings:
+        DefaultValueInitialized: true
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 9 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/iOS/opencv2.framework/Headers.meta

@@ -0,0 +1,9 @@
+fileFormatVersion: 2
+guid: a17bca842ef0ff64c9411fb7a0bec2f7
+folderAsset: yes
+timeCreated: 1495798630
+licenseType: Store
+DefaultImporter:
+  userData: 
+  assetBundleName: 
+  assetBundleVariant: 

+ 2885 - 0
Assets/OpenCVForUnity/Extra/exclude_contrib/iOS/opencv2.framework/Headers/calib3d.hpp

@@ -0,0 +1,2885 @@
+/*M///////////////////////////////////////////////////////////////////////////////////////
+//
+//  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
+//
+//  By downloading, copying, installing or using the software you agree to this license.
+//  If you do not agree to this license, do not download, install,
+//  copy or use the software.
+//
+//
+//                          License Agreement
+//                For Open Source Computer Vision Library
+//
+// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
+// Copyright (C) 2009, Willow Garage Inc., all rights reserved.
+// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
+// Third party copyrights are property of their respective owners.
+//
+// Redistribution and use in source and binary forms, with or without modification,
+// are permitted provided that the following conditions are met:
+//
+//   * Redistribution's of source code must retain the above copyright notice,
+//     this list of conditions and the following disclaimer.
+//
+//   * Redistribution's in binary form must reproduce the above copyright notice,
+//     this list of conditions and the following disclaimer in the documentation
+//     and/or other materials provided with the distribution.
+//
+//   * The name of the copyright holders may not be used to endorse or promote products
+//     derived from this software without specific prior written permission.
+//
+// This software is provided by the copyright holders and contributors "as is" and
+// any express or implied warranties, including, but not limited to, the implied
+// warranties of merchantability and fitness for a particular purpose are disclaimed.
+// In no event shall the Intel Corporation or contributors be liable for any direct,
+// indirect, incidental, special, exemplary, or consequential damages
+// (including, but not limited to, procurement of substitute goods or services;
+// loss of use, data, or profits; or business interruption) however caused
+// and on any theory of liability, whether in contract, strict liability,
+// or tort (including negligence or otherwise) arising in any way out of
+// the use of this software, even if advised of the possibility of such damage.
+//
+//M*/
+
+#ifndef OPENCV_CALIB3D_HPP
+#define OPENCV_CALIB3D_HPP
+
+#include "opencv2/core.hpp"
+#include "opencv2/features2d.hpp"
+#include "opencv2/core/affine.hpp"
+
+/**
+  @defgroup calib3d Camera Calibration and 3D Reconstruction
+
+The functions in this section use a so-called pinhole camera model. In this model, a scene view is
+formed by projecting 3D points into the image plane using a perspective transformation.
+
+\f[s  \; m' = A [R|t] M'\f]
+
+or
+
+\f[s  \vecthree{u}{v}{1} = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}
+\begin{bmatrix}
+r_{11} & r_{12} & r_{13} & t_1  \\
+r_{21} & r_{22} & r_{23} & t_2  \\
+r_{31} & r_{32} & r_{33} & t_3
+\end{bmatrix}
+\begin{bmatrix}
+X \\
+Y \\
+Z \\
+1
+\end{bmatrix}\f]
+
+where:
+
+-   \f$(X, Y, Z)\f$ are the coordinates of a 3D point in the world coordinate space
+-   \f$(u, v)\f$ are the coordinates of the projection point in pixels
+-   \f$A\f$ is a camera matrix, or a matrix of intrinsic parameters
+-   \f$(cx, cy)\f$ is a principal point that is usually at the image center
+-   \f$fx, fy\f$ are the focal lengths expressed in pixel units.
+
+Thus, if an image from the camera is scaled by a factor, all of these parameters should be scaled
+(multiplied/divided, respectively) by the same factor. The matrix of intrinsic parameters does not
+depend on the scene viewed. So, once estimated, it can be re-used as long as the focal length is
+fixed (in case of zoom lens). The joint rotation-translation matrix \f$[R|t]\f$ is called a matrix of
+extrinsic parameters. It is used to describe the camera motion around a static scene, or vice versa,
+rigid motion of an object in front of a still camera. That is, \f$[R|t]\f$ translates coordinates of a
+point \f$(X, Y, Z)\f$ to a coordinate system, fixed with respect to the camera. The transformation above
+is equivalent to the following (when \f$z \ne 0\f$ ):
+
+\f[\begin{array}{l}
+\vecthree{x}{y}{z} = R  \vecthree{X}{Y}{Z} + t \\
+x' = x/z \\
+y' = y/z \\
+u = f_x*x' + c_x \\
+v = f_y*y' + c_y
+\end{array}\f]
+
+The following figure illustrates the pinhole camera model.
+
+![Pinhole camera model](pics/pinhole_camera_model.png)
+
+Real lenses usually have some distortion, mostly radial distortion and slight tangential distortion.
+So, the above model is extended as:
+
+\f[\begin{array}{l}
+\vecthree{x}{y}{z} = R  \vecthree{X}{Y}{Z} + t \\
+x' = x/z \\
+y' = y/z \\
+x'' = x'  \frac{1 + k_1 r^2 + k_2 r^4 + k_3 r^6}{1 + k_4 r^2 + k_5 r^4 + k_6 r^6} + 2 p_1 x' y' + p_2(r^2 + 2 x'^2) + s_1 r^2 + s_2 r^4 \\
+y'' = y'  \frac{1 + k_1 r^2 + k_2 r^4 + k_3 r^6}{1 + k_4 r^2 + k_5 r^4 + k_6 r^6} + p_1 (r^2 + 2 y'^2) + 2 p_2 x' y' + s_3 r^2 + s_4 r^4 \\
+\text{where} \quad r^2 = x'^2 + y'^2  \\
+u = f_x*x'' + c_x \\
+v = f_y*y'' + c_y
+\end{array}\f]
+
+\f$k_1\f$, \f$k_2\f$, \f$k_3\f$, \f$k_4\f$, \f$k_5\f$, and \f$k_6\f$ are radial distortion coefficients. \f$p_1\f$ and \f$p_2\f$ are
+tangential distortion coefficients. \f$s_1\f$, \f$s_2\f$, \f$s_3\f$, and \f$s_4\f$, are the thin prism distortion
+coefficients. Higher-order coefficients are not considered in OpenCV.
+
+The next figures show two common types of radial distortion: barrel distortion (typically \f$ k_1 < 0 \f$) and pincushion distortion (typically \f$ k_1 > 0 \f$).
+
+![](pics/distortion_examples.png)
+![](pics/distortion_examples2.png)
+
+In some cases the image sensor may be tilted in order to focus an oblique plane in front of the
+camera (Scheimpfug condition). This can be useful for particle image velocimetry (PIV) or
+triangulation with a laser fan. The tilt causes a perspective distortion of \f$x''\f$ and
+\f$y''\f$. This distortion can be modelled in the following way, see e.g. @cite Louhichi07.
+
+\f[\begin{array}{l}
+s\vecthree{x'''}{y'''}{1} =
+\vecthreethree{R_{33}(\tau_x, \tau_y)}{0}{-R_{13}(\tau_x, \tau_y)}
+{0}{R_{33}(\tau_x, \tau_y)}{-R_{23}(\tau_x, \tau_y)}
+{0}{0}{1} R(\tau_x, \tau_y) \vecthree{x''}{y''}{1}\\
+u = f_x*x''' + c_x \\
+v = f_y*y''' + c_y
+\end{array}\f]
+
+where the matrix \f$R(\tau_x, \tau_y)\f$ is defined by two rotations with angular parameter \f$\tau_x\f$
+and \f$\tau_y\f$, respectively,
+
+\f[
+R(\tau_x, \tau_y) =
+\vecthreethree{\cos(\tau_y)}{0}{-\sin(\tau_y)}{0}{1}{0}{\sin(\tau_y)}{0}{\cos(\tau_y)}
+\vecthreethree{1}{0}{0}{0}{\cos(\tau_x)}{\sin(\tau_x)}{0}{-\sin(\tau_x)}{\cos(\tau_x)} =
+\vecthreethree{\cos(\tau_y)}{\sin(\tau_y)\sin(\tau_x)}{-\sin(\tau_y)\cos(\tau_x)}
+{0}{\cos(\tau_x)}{\sin(\tau_x)}
+{\sin(\tau_y)}{-\cos(\tau_y)\sin(\tau_x)}{\cos(\tau_y)\cos(\tau_x)}.
+\f]
+
+In the functions below the coefficients are passed or returned as
+
+\f[(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6 [, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f]
+
+vector. That is, if the vector contains four elements, it means that \f$k_3=0\f$ . The distortion
+coefficients do not depend on the scene viewed. Thus, they also belong to the intrinsic camera
+parameters. And they remain the same regardless of the captured image resolution. If, for example, a
+camera has been calibrated on images of 320 x 240 resolution, absolutely the same distortion
+coefficients can be used for 640 x 480 images from the same camera while \f$f_x\f$, \f$f_y\f$, \f$c_x\f$, and
+\f$c_y\f$ need to be scaled appropriately.
+
+The functions below use the above model to do the following:
+
+-   Project 3D points to the image plane given intrinsic and extrinsic parameters.
+-   Compute extrinsic parameters given intrinsic parameters, a few 3D points, and their
+projections.
+-   Estimate intrinsic and extrinsic camera parameters from several views of a known calibration
+pattern (every view is described by several 3D-2D point correspondences).
+-   Estimate the relative position and orientation of the stereo camera "heads" and compute the
+*rectification* transformation that makes the camera optical axes parallel.
+
+@note
+    -   A calibration sample for 3 cameras in horizontal position can be found at
+        opencv_source_code/samples/cpp/3calibration.cpp
+    -   A calibration sample based on a sequence of images can be found at
+        opencv_source_code/samples/cpp/calibration.cpp
+    -   A calibration sample in order to do 3D reconstruction can be found at
+        opencv_source_code/samples/cpp/build3dmodel.cpp
+    -   A calibration sample of an artificially generated camera and chessboard patterns can be
+        found at opencv_source_code/samples/cpp/calibration_artificial.cpp
+    -   A calibration example on stereo calibration can be found at
+        opencv_source_code/samples/cpp/stereo_calib.cpp
+    -   A calibration example on stereo matching can be found at
+        opencv_source_code/samples/cpp/stereo_match.cpp
+    -   (Python) A camera calibration sample can be found at
+        opencv_source_code/samples/python/calibrate.py
+
+  @{
+    @defgroup calib3d_fisheye Fisheye camera model
+
+    Definitions: Let P be a point in 3D of coordinates X in the world reference frame (stored in the
+    matrix X) The coordinate vector of P in the camera reference frame is:
+
+    \f[Xc = R X + T\f]
+
+    where R is the rotation matrix corresponding to the rotation vector om: R = rodrigues(om); call x, y
+    and z the 3 coordinates of Xc:
+
+    \f[x = Xc_1 \\ y = Xc_2 \\ z = Xc_3\f]
+
+    The pinhole projection coordinates of P is [a; b] where
+
+    \f[a = x / z \ and \ b = y / z \\ r^2 = a^2 + b^2 \\ \theta = atan(r)\f]
+
+    Fisheye distortion:
+
+    \f[\theta_d = \theta (1 + k_1 \theta^2 + k_2 \theta^4 + k_3 \theta^6 + k_4 \theta^8)\f]
+
+    The distorted point coordinates are [x'; y'] where
+
+    \f[x' = (\theta_d / r) a \\ y' = (\theta_d / r) b \f]
+
+    Finally, conversion into pixel coordinates: The final pixel coordinates vector [u; v] where:
+
+    \f[u = f_x (x' + \alpha y') + c_x \\
+    v = f_y y' + c_y\f]
+
+    @defgroup calib3d_c C API
+
+  @}
+ */
+
+namespace cv
+{
+
+//! @addtogroup calib3d
+//! @{
+
+//! type of the robust estimation algorithm
+enum { LMEDS  = 4, //!< least-median of squares algorithm
+       RANSAC = 8, //!< RANSAC algorithm
+       RHO    = 16 //!< RHO algorithm
+     };
+
+enum { SOLVEPNP_ITERATIVE = 0,
+       SOLVEPNP_EPNP      = 1, //!< EPnP: Efficient Perspective-n-Point Camera Pose Estimation @cite lepetit2009epnp
+       SOLVEPNP_P3P       = 2, //!< Complete Solution Classification for the Perspective-Three-Point Problem @cite gao2003complete
+       SOLVEPNP_DLS       = 3, //!< A Direct Least-Squares (DLS) Method for PnP  @cite hesch2011direct
+       SOLVEPNP_UPNP      = 4, //!< Exhaustive Linearization for Robust Camera Pose and Focal Length Estimation @cite penate2013exhaustive
+       SOLVEPNP_AP3P      = 5, //!< An Efficient Algebraic Solution to the Perspective-Three-Point Problem @cite Ke17
+       SOLVEPNP_MAX_COUNT      //!< Used for count
+};
+
+enum { CALIB_CB_ADAPTIVE_THRESH = 1,
+       CALIB_CB_NORMALIZE_IMAGE = 2,
+       CALIB_CB_FILTER_QUADS    = 4,
+       CALIB_CB_FAST_CHECK      = 8,
+       CALIB_CB_EXHAUSTIVE      = 16,
+       CALIB_CB_ACCURACY        = 32
+     };
+
+enum { CALIB_CB_SYMMETRIC_GRID  = 1,
+       CALIB_CB_ASYMMETRIC_GRID = 2,
+       CALIB_CB_CLUSTERING      = 4
+     };
+
+enum { CALIB_NINTRINSIC          = 18,
+       CALIB_USE_INTRINSIC_GUESS = 0x00001,
+       CALIB_FIX_ASPECT_RATIO    = 0x00002,
+       CALIB_FIX_PRINCIPAL_POINT = 0x00004,
+       CALIB_ZERO_TANGENT_DIST   = 0x00008,
+       CALIB_FIX_FOCAL_LENGTH    = 0x00010,
+       CALIB_FIX_K1              = 0x00020,
+       CALIB_FIX_K2              = 0x00040,
+       CALIB_FIX_K3              = 0x00080,
+       CALIB_FIX_K4              = 0x00800,
+       CALIB_FIX_K5              = 0x01000,
+       CALIB_FIX_K6              = 0x02000,
+       CALIB_RATIONAL_MODEL      = 0x04000,
+       CALIB_THIN_PRISM_MODEL    = 0x08000,
+       CALIB_FIX_S1_S2_S3_S4     = 0x10000,
+       CALIB_TILTED_MODEL        = 0x40000,
+       CALIB_FIX_TAUX_TAUY       = 0x80000,
+       CALIB_USE_QR              = 0x100000, //!< use QR instead of SVD decomposition for solving. Faster but potentially less precise
+       CALIB_FIX_TANGENT_DIST    = 0x200000,
+       // only for stereo
+       CALIB_FIX_INTRINSIC       = 0x00100,
+       CALIB_SAME_FOCAL_LENGTH   = 0x00200,
+       // for stereo rectification
+       CALIB_ZERO_DISPARITY      = 0x00400,
+       CALIB_USE_LU              = (1 << 17), //!< use LU instead of SVD decomposition for solving. much faster but potentially less precise
+       CALIB_USE_EXTRINSIC_GUESS = (1 << 22), //!< for stereoCalibrate
+     };
+
+//! the algorithm for finding fundamental matrix
+enum { FM_7POINT = 1, //!< 7-point algorithm
+       FM_8POINT = 2, //!< 8-point algorithm
+       FM_LMEDS  = 4, //!< least-median algorithm. 7-point algorithm is used.
+       FM_RANSAC = 8  //!< RANSAC algorithm. It needs at least 15 points. 7-point algorithm is used.
+     };
+
+
+
+/** @brief Converts a rotation matrix to a rotation vector or vice versa.
+
+@param src Input rotation vector (3x1 or 1x3) or rotation matrix (3x3).
+@param dst Output rotation matrix (3x3) or rotation vector (3x1 or 1x3), respectively.
+@param jacobian Optional output Jacobian matrix, 3x9 or 9x3, which is a matrix of partial
+derivatives of the output array components with respect to the input array components.
+
+\f[\begin{array}{l} \theta \leftarrow norm(r) \\ r  \leftarrow r/ \theta \\ R =  \cos{\theta} I + (1- \cos{\theta} ) r r^T +  \sin{\theta} \vecthreethree{0}{-r_z}{r_y}{r_z}{0}{-r_x}{-r_y}{r_x}{0} \end{array}\f]
+
+Inverse transformation can be also done easily, since
+
+\f[\sin ( \theta ) \vecthreethree{0}{-r_z}{r_y}{r_z}{0}{-r_x}{-r_y}{r_x}{0} = \frac{R - R^T}{2}\f]
+
+A rotation vector is a convenient and most compact representation of a rotation matrix (since any
+rotation matrix has just 3 degrees of freedom). The representation is used in the global 3D geometry
+optimization procedures like calibrateCamera, stereoCalibrate, or solvePnP .
+ */
+CV_EXPORTS_W void Rodrigues( InputArray src, OutputArray dst, OutputArray jacobian = noArray() );
+
+/** @example samples/cpp/tutorial_code/features2D/Homography/pose_from_homography.cpp
+An example program about pose estimation from coplanar points
+
+Check @ref tutorial_homography "the corresponding tutorial" for more details
+*/
+
+/** Levenberg-Marquardt solver. Starting with the specified vector of parameters it
+    optimizes the target vector criteria "err"
+    (finds local minima of each target vector component absolute value).
+
+    When needed, it calls user-provided callback.
+*/
+class CV_EXPORTS LMSolver : public Algorithm
+{
+public:
+    class CV_EXPORTS Callback
+    {
+    public:
+        virtual ~Callback() {}
+        /**
+         computes error and Jacobian for the specified vector of parameters
+
+         @param param the current vector of parameters
+         @param err output vector of errors: err_i = actual_f_i - ideal_f_i
+         @param J output Jacobian: J_ij = d(err_i)/d(param_j)
+
+         when J=noArray(), it means that it does not need to be computed.
+         Dimensionality of error vector and param vector can be different.
+         The callback should explicitly allocate (with "create" method) each output array
+         (unless it's noArray()).
+        */
+        virtual bool compute(InputArray param, OutputArray err, OutputArray J) const = 0;
+    };
+
+    /**
+       Runs Levenberg-Marquardt algorithm using the passed vector of parameters as the start point.
+       The final vector of parameters (whether the algorithm converged or not) is stored at the same
+       vector. The method returns the number of iterations used. If it's equal to the previously specified
+       maxIters, there is a big chance the algorithm did not converge.
+
+       @param param initial/final vector of parameters.
+
+       Note that the dimensionality of parameter space is defined by the size of param vector,
+       and the dimensionality of optimized criteria is defined by the size of err vector
+       computed by the callback.
+    */
+    virtual int run(InputOutputArray param) const = 0;
+
+    /**
+       Sets the maximum number of iterations
+       @param maxIters the number of iterations
+    */
+    virtual void setMaxIters(int maxIters) = 0;
+    /**
+       Retrieves the current maximum number of iterations
+    */
+    virtual int getMaxIters() const = 0;
+
+    /**
+       Creates Levenberg-Marquard solver
+
+       @param cb callback
+       @param maxIters maximum number of iterations that can be further
+         modified using setMaxIters() method.
+    */
+    static Ptr<LMSolver> create(const Ptr<LMSolver::Callback>& cb, int maxIters);
+};
+
+/** @brief Finds a perspective transformation between two planes.
+
+@param srcPoints Coordinates of the points in the original plane, a matrix of the type CV_32FC2
+or vector\<Point2f\> .
+@param dstPoints Coordinates of the points in the target plane, a matrix of the type CV_32FC2 or
+a vector\<Point2f\> .
+@param method Method used to compute a homography matrix. The following methods are possible:
+-   **0** - a regular method using all the points, i.e., the least squares method
+-   **RANSAC** - RANSAC-based robust method
+-   **LMEDS** - Least-Median robust method
+-   **RHO** - PROSAC-based robust method
+@param ransacReprojThreshold Maximum allowed reprojection error to treat a point pair as an inlier
+(used in the RANSAC and RHO methods only). That is, if
+\f[\| \texttt{dstPoints} _i -  \texttt{convertPointsHomogeneous} ( \texttt{H} * \texttt{srcPoints} _i) \|_2  >  \texttt{ransacReprojThreshold}\f]
+then the point \f$i\f$ is considered as an outlier. If srcPoints and dstPoints are measured in pixels,
+it usually makes sense to set this parameter somewhere in the range of 1 to 10.
+@param mask Optional output mask set by a robust method ( RANSAC or LMEDS ). Note that the input
+mask values are ignored.
+@param maxIters The maximum number of RANSAC iterations.
+@param confidence Confidence level, between 0 and 1.
+
+The function finds and returns the perspective transformation \f$H\f$ between the source and the
+destination planes:
+
+\f[s_i  \vecthree{x'_i}{y'_i}{1} \sim H  \vecthree{x_i}{y_i}{1}\f]
+
+so that the back-projection error
+
+\f[\sum _i \left ( x'_i- \frac{h_{11} x_i + h_{12} y_i + h_{13}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2+ \left ( y'_i- \frac{h_{21} x_i + h_{22} y_i + h_{23}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2\f]
+
+is minimized. If the parameter method is set to the default value 0, the function uses all the point
+pairs to compute an initial homography estimate with a simple least-squares scheme.
+
+However, if not all of the point pairs ( \f$srcPoints_i\f$, \f$dstPoints_i\f$ ) fit the rigid perspective
+transformation (that is, there are some outliers), this initial estimate will be poor. In this case,
+you can use one of the three robust methods. The methods RANSAC, LMeDS and RHO try many different
+random subsets of the corresponding point pairs (of four pairs each, collinear pairs are discarded), estimate the homography matrix
+using this subset and a simple least-squares algorithm, and then compute the quality/goodness of the
+computed homography (which is the number of inliers for RANSAC or the least median re-projection error for
+LMeDS). The best subset is then used to produce the initial estimate of the homography matrix and
+the mask of inliers/outliers.
+
+Regardless of the method, robust or not, the computed homography matrix is refined further (using
+inliers only in case of a robust method) with the Levenberg-Marquardt method to reduce the
+re-projection error even more.
+
+The methods RANSAC and RHO can handle practically any ratio of outliers but need a threshold to
+distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
+correctly only when there are more than 50% of inliers. Finally, if there are no outliers and the
+noise is rather small, use the default method (method=0).
+
+The function is used to find initial intrinsic and extrinsic matrices. Homography matrix is
+determined up to a scale. Thus, it is normalized so that \f$h_{33}=1\f$. Note that whenever an \f$H\f$ matrix
+cannot be estimated, an empty one will be returned.
+
+@sa
+getAffineTransform, estimateAffine2D, estimateAffinePartial2D, getPerspectiveTransform, warpPerspective,
+perspectiveTransform
+ */
+CV_EXPORTS_W Mat findHomography( InputArray srcPoints, InputArray dstPoints,
+                                 int method = 0, double ransacReprojThreshold = 3,
+                                 OutputArray mask=noArray(), const int maxIters = 2000,
+                                 const double confidence = 0.995);
+
+/** @overload */
+CV_EXPORTS Mat findHomography( InputArray srcPoints, InputArray dstPoints,
+                               OutputArray mask, int method = 0, double ransacReprojThreshold = 3 );
+
+/** @brief Computes an RQ decomposition of 3x3 matrices.
+
+@param src 3x3 input matrix.
+@param mtxR Output 3x3 upper-triangular matrix.
+@param mtxQ Output 3x3 orthogonal matrix.
+@param Qx Optional output 3x3 rotation matrix around x-axis.
+@param Qy Optional output 3x3 rotation matrix around y-axis.
+@param Qz Optional output 3x3 rotation matrix around z-axis.
+
+The function computes a RQ decomposition using the given rotations. This function is used in
+decomposeProjectionMatrix to decompose the left 3x3 submatrix of a projection matrix into a camera
+and a rotation matrix.
+
+It optionally returns three rotation matrices, one for each axis, and the three Euler angles in
+degrees (as the return value) that could be used in OpenGL. Note, there is always more than one
+sequence of rotations about the three principal axes that results in the same orientation of an
+object, e.g. see @cite Slabaugh . Returned tree rotation matrices and corresponding three Euler angles
+are only one of the possible solutions.
+ */
+CV_EXPORTS_W Vec3d RQDecomp3x3( InputArray src, OutputArray mtxR, OutputArray mtxQ,
+                                OutputArray Qx = noArray(),
+                                OutputArray Qy = noArray(),
+                                OutputArray Qz = noArray());
+
+/** @brief Decomposes a projection matrix into a rotation matrix and a camera matrix.
+
+@param projMatrix 3x4 input projection matrix P.
+@param cameraMatrix Output 3x3 camera matrix K.
+@param rotMatrix Output 3x3 external rotation matrix R.
+@param transVect Output 4x1 translation vector T.
+@param rotMatrixX Optional 3x3 rotation matrix around x-axis.
+@param rotMatrixY Optional 3x3 rotation matrix around y-axis.
+@param rotMatrixZ Optional 3x3 rotation matrix around z-axis.
+@param eulerAngles Optional three-element vector containing three Euler angles of rotation in
+degrees.
+
+The function computes a decomposition of a projection matrix into a calibration and a rotation
+matrix and the position of a camera.
+
+It optionally returns three rotation matrices, one for each axis, and three Euler angles that could
+be used in OpenGL. Note, there is always more than one sequence of rotations about the three
+principal axes that results in the same orientation of an object, e.g. see @cite Slabaugh . Returned
+tree rotation matrices and corresponding three Euler angles are only one of the possible solutions.
+
+The function is based on RQDecomp3x3 .
+ */
+CV_EXPORTS_W void decomposeProjectionMatrix( InputArray projMatrix, OutputArray cameraMatrix,
+                                             OutputArray rotMatrix, OutputArray transVect,
+                                             OutputArray rotMatrixX = noArray(),
+                                             OutputArray rotMatrixY = noArray(),
+                                             OutputArray rotMatrixZ = noArray(),
+                                             OutputArray eulerAngles =noArray() );
+
+/** @brief Computes partial derivatives of the matrix product for each multiplied matrix.
+
+@param A First multiplied matrix.
+@param B Second multiplied matrix.
+@param dABdA First output derivative matrix d(A\*B)/dA of size
+\f$\texttt{A.rows*B.cols} \times {A.rows*A.cols}\f$ .
+@param dABdB Second output derivative matrix d(A\*B)/dB of size
+\f$\texttt{A.rows*B.cols} \times {B.rows*B.cols}\f$ .
+
+The function computes partial derivatives of the elements of the matrix product \f$A*B\f$ with regard to
+the elements of each of the two input matrices. The function is used to compute the Jacobian
+matrices in stereoCalibrate but can also be used in any other similar optimization function.
+ */
+CV_EXPORTS_W void matMulDeriv( InputArray A, InputArray B, OutputArray dABdA, OutputArray dABdB );
+
+/** @brief Combines two rotation-and-shift transformations.
+
+@param rvec1 First rotation vector.
+@param tvec1 First translation vector.
+@param rvec2 Second rotation vector.
+@param tvec2 Second translation vector.
+@param rvec3 Output rotation vector of the superposition.
+@param tvec3 Output translation vector of the superposition.
+@param dr3dr1
+@param dr3dt1
+@param dr3dr2
+@param dr3dt2
+@param dt3dr1
+@param dt3dt1
+@param dt3dr2
+@param dt3dt2 Optional output derivatives of rvec3 or tvec3 with regard to rvec1, rvec2, tvec1 and
+tvec2, respectively.
+
+The functions compute:
+
+\f[\begin{array}{l} \texttt{rvec3} =  \mathrm{rodrigues} ^{-1} \left ( \mathrm{rodrigues} ( \texttt{rvec2} )  \cdot \mathrm{rodrigues} ( \texttt{rvec1} ) \right )  \\ \texttt{tvec3} =  \mathrm{rodrigues} ( \texttt{rvec2} )  \cdot \texttt{tvec1} +  \texttt{tvec2} \end{array} ,\f]
+
+where \f$\mathrm{rodrigues}\f$ denotes a rotation vector to a rotation matrix transformation, and
+\f$\mathrm{rodrigues}^{-1}\f$ denotes the inverse transformation. See Rodrigues for details.
+
+Also, the functions can compute the derivatives of the output vectors with regards to the input
+vectors (see matMulDeriv ). The functions are used inside stereoCalibrate but can also be used in
+your own code where Levenberg-Marquardt or another gradient-based solver is used to optimize a
+function that contains a matrix multiplication.
+ */
+CV_EXPORTS_W void composeRT( InputArray rvec1, InputArray tvec1,
+                             InputArray rvec2, InputArray tvec2,
+                             OutputArray rvec3, OutputArray tvec3,
+                             OutputArray dr3dr1 = noArray(), OutputArray dr3dt1 = noArray(),
+                             OutputArray dr3dr2 = noArray(), OutputArray dr3dt2 = noArray(),
+                             OutputArray dt3dr1 = noArray(), OutputArray dt3dt1 = noArray(),
+                             OutputArray dt3dr2 = noArray(), OutputArray dt3dt2 = noArray() );
+
+/** @brief Projects 3D points to an image plane.
+
+@param objectPoints Array of object points, 3xN/Nx3 1-channel or 1xN/Nx1 3-channel (or
+vector\<Point3f\> ), where N is the number of points in the view.
+@param rvec Rotation vector. See Rodrigues for details.
+@param tvec Translation vector.
+@param cameraMatrix Camera matrix \f$A = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{_1}\f$ .
+@param distCoeffs Input vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6 [, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$ of
+4, 5, 8, 12 or 14 elements. If the vector is empty, the zero distortion coefficients are assumed.
+@param imagePoints Output array of image points, 2xN/Nx2 1-channel or 1xN/Nx1 2-channel, or
+vector\<Point2f\> .
+@param jacobian Optional output 2Nx(10+\<numDistCoeffs\>) jacobian matrix of derivatives of image
+points with respect to components of the rotation vector, translation vector, focal lengths,
+coordinates of the principal point and the distortion coefficients. In the old interface different
+components of the jacobian are returned via different output parameters.
+@param aspectRatio Optional "fixed aspect ratio" parameter. If the parameter is not 0, the
+function assumes that the aspect ratio (*fx/fy*) is fixed and correspondingly adjusts the jacobian
+matrix.
+
+The function computes projections of 3D points to the image plane given intrinsic and extrinsic
+camera parameters. Optionally, the function computes Jacobians - matrices of partial derivatives of
+image points coordinates (as functions of all the input parameters) with respect to the particular
+parameters, intrinsic and/or extrinsic. The Jacobians are used during the global optimization in
+calibrateCamera, solvePnP, and stereoCalibrate . The function itself can also be used to compute a
+re-projection error given the current intrinsic and extrinsic parameters.
+
+@note By setting rvec=tvec=(0,0,0) or by setting cameraMatrix to a 3x3 identity matrix, or by
+passing zero distortion coefficients, you can get various useful partial cases of the function. This
+means that you can compute the distorted coordinates for a sparse set of points or apply a
+perspective transformation (and also compute the derivatives) in the ideal zero-distortion setup.
+ */
+CV_EXPORTS_W void projectPoints( InputArray objectPoints,
+                                 InputArray rvec, InputArray tvec,
+                                 InputArray cameraMatrix, InputArray distCoeffs,
+                                 OutputArray imagePoints,
+                                 OutputArray jacobian = noArray(),
+                                 double aspectRatio = 0 );
+
+/** @example samples/cpp/tutorial_code/features2D/Homography/homography_from_camera_displacement.cpp
+An example program about homography from the camera displacement
+
+Check @ref tutorial_homography "the corresponding tutorial" for more details
+*/
+
+/** @brief Finds an object pose from 3D-2D point correspondences.
+
+@param objectPoints Array of object points in the object coordinate space, Nx3 1-channel or
+1xN/Nx1 3-channel, where N is the number of points. vector\<Point3f\> can be also passed here.
+@param imagePoints Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
+where N is the number of points. vector\<Point2f\> can be also passed here.
+@param cameraMatrix Input camera matrix \f$A = \vecthreethree{fx}{0}{cx}{0}{fy}{cy}{0}{0}{1}\f$ .
+@param distCoeffs Input vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6 [, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$ of
+4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are
+assumed.
+@param rvec Output rotation vector (see @ref Rodrigues ) that, together with tvec , brings points from
+the model coordinate system to the camera coordinate system.
+@param tvec Output translation vector.
+@param useExtrinsicGuess Parameter used for #SOLVEPNP_ITERATIVE. If true (1), the function uses
+the provided rvec and tvec values as initial approximations of the rotation and translation
+vectors, respectively, and further optimizes them.
+@param flags Method for solving a PnP problem:
+-   **SOLVEPNP_ITERATIVE** Iterative method is based on Levenberg-Marquardt optimization. In
+this case the function finds such a pose that minimizes reprojection error, that is the sum
+of squared distances between the observed projections imagePoints and the projected (using
+projectPoints ) objectPoints .
+-   **SOLVEPNP_P3P** Method is based on the paper of X.S. Gao, X.-R. Hou, J. Tang, H.-F. Chang
+"Complete Solution Classification for the Perspective-Three-Point Problem" (@cite gao2003complete).
+In this case the function requires exactly four object and image points.
+-   **SOLVEPNP_AP3P** Method is based on the paper of T. Ke, S. Roumeliotis
+"An Efficient Algebraic Solution to the Perspective-Three-Point Problem" (@cite Ke17).
+In this case the function requires exactly four object and image points.
+-   **SOLVEPNP_EPNP** Method has been introduced by F.Moreno-Noguer, V.Lepetit and P.Fua in the
+paper "EPnP: Efficient Perspective-n-Point Camera Pose Estimation" (@cite lepetit2009epnp).
+-   **SOLVEPNP_DLS** Method is based on the paper of Joel A. Hesch and Stergios I. Roumeliotis.
+"A Direct Least-Squares (DLS) Method for PnP" (@cite hesch2011direct).
+-   **SOLVEPNP_UPNP** Method is based on the paper of A.Penate-Sanchez, J.Andrade-Cetto,
+F.Moreno-Noguer. "Exhaustive Linearization for Robust Camera Pose and Focal Length
+Estimation" (@cite penate2013exhaustive). In this case the function also estimates the parameters \f$f_x\f$ and \f$f_y\f$
+assuming that both have the same value. Then the cameraMatrix is updated with the estimated
+focal length.
+-   **SOLVEPNP_AP3P** Method is based on the paper of Tong Ke and Stergios I. Roumeliotis.
+"An Efficient Algebraic Solution to the Perspective-Three-Point Problem" (@cite Ke17). In this case the
+function requires exactly four object and image points.
+
+The function estimates the object pose given a set of object points, their corresponding image
+projections, as well as the camera matrix and the distortion coefficients, see the figure below
+(more precisely, the X-axis of the camera frame is pointing to the right, the Y-axis downward
+and the Z-axis forward).
+
+![](pnp.jpg)
+
+Points expressed in the world frame \f$ \bf{X}_w \f$ are projected into the image plane \f$ \left[ u, v \right] \f$
+using the perspective projection model \f$ \Pi \f$ and the camera intrinsic parameters matrix \f$ \bf{A} \f$:
+
+\f[
+  \begin{align*}
+  \begin{bmatrix}
+  u \\
+  v \\
+  1
+  \end{bmatrix} &=
+  \bf{A} \hspace{0.1em} \Pi \hspace{0.2em} ^{c}\bf{M}_w
+  \begin{bmatrix}
+  X_{w} \\
+  Y_{w} \\
+  Z_{w} \\
+  1
+  \end{bmatrix} \\
+  \begin{bmatrix}
+  u \\
+  v \\
+  1
+  \end{bmatrix} &=
+  \begin{bmatrix}
+  f_x & 0 & c_x \\
+  0 & f_y & c_y \\
+  0 & 0 & 1
+  \end{bmatrix}
+  \begin{bmatrix}
+  1 & 0 & 0 & 0 \\
+  0 & 1 & 0 & 0 \\
+  0 & 0 & 1 & 0
+  \end{bmatrix}
+  \begin{bmatrix}
+  r_{11} & r_{12} & r_{13} & t_x \\
+  r_{21} & r_{22} & r_{23} & t_y \\
+  r_{31} & r_{32} & r_{33} & t_z \\
+  0 & 0 & 0 & 1
+  \end{bmatrix}
+  \begin{bmatrix}
+  X_{w} \\
+  Y_{w} \\
+  Z_{w} \\
+  1
+  \end{bmatrix}
+  \end{align*}
+\f]
+
+The estimated pose is thus the rotation (`rvec`) and the translation (`tvec`) vectors that allow to transform
+a 3D point expressed in the world frame into the camera frame:
+
+\f[
+  \begin{align*}
+  \begin{bmatrix}
+  X_c \\
+  Y_c \\
+  Z_c \\
+  1
+  \end{bmatrix} &=
+  \hspace{0.2em} ^{c}\bf{M}_w
+  \begin{bmatrix}
+  X_{w} \\
+  Y_{w} \\
+  Z_{w} \\
+  1
+  \end{bmatrix} \\
+  \begin{bmatrix}
+  X_c \\
+  Y_c \\
+  Z_c \\
+  1
+  \end{bmatrix} &=
+  \begin{bmatrix}
+  r_{11} & r_{12} & r_{13} & t_x \\
+  r_{21} & r_{22} & r_{23} & t_y \\
+  r_{31} & r_{32} & r_{33} & t_z \\
+  0 & 0 & 0 & 1
+  \end{bmatrix}
+  \begin{bmatrix}
+  X_{w} \\
+  Y_{w} \\
+  Z_{w} \\
+  1
+  \end{bmatrix}
+  \end{align*}
+\f]
+
+@note
+   -   An example of how to use solvePnP for planar augmented reality can be found at
+        opencv_source_code/samples/python/plane_ar.py
+   -   If you are using Python:
+        - Numpy array slices won't work as input because solvePnP requires contiguous
+        arrays (enforced by the assertion using cv::Mat::checkVector() around line 55 of
+        modules/calib3d/src/solvepnp.cpp version 2.4.9)
+        - The P3P algorithm requires image points to be in an array of shape (N,1,2) due
+        to its calling of cv::undistortPoints (around line 75 of modules/calib3d/src/solvepnp.cpp version 2.4.9)
+        which requires 2-channel information.
+        - Thus, given some data D = np.array(...) where D.shape = (N,M), in order to use a subset of
+        it as, e.g., imagePoints, one must effectively copy it into a new array: imagePoints =
+        np.ascontiguousarray(D[:,:2]).reshape((N,1,2))
+   -   The methods **SOLVEPNP_DLS** and **SOLVEPNP_UPNP** cannot be used as the current implementations are
+       unstable and sometimes give completely wrong results. If you pass one of these two
+       flags, **SOLVEPNP_EPNP** method will be used instead.
+   -   The minimum number of points is 4 in the general case. In the case of **SOLVEPNP_P3P** and **SOLVEPNP_AP3P**
+       methods, it is required to use exactly 4 points (the first 3 points are used to estimate all the solutions
+       of the P3P problem, the last one is used to retain the best solution that minimizes the reprojection error).
+   -   With **SOLVEPNP_ITERATIVE** method and `useExtrinsicGuess=true`, the minimum number of points is 3 (3 points
+       are sufficient to compute a pose but there are up to 4 solutions). The initial solution should be close to the
+       global solution to converge.
+ */
+CV_EXPORTS_W bool solvePnP( InputArray objectPoints, InputArray imagePoints,
+                            InputArray cameraMatrix, InputArray distCoeffs,
+                            OutputArray rvec, OutputArray tvec,
+                            bool useExtrinsicGuess = false, int flags = SOLVEPNP_ITERATIVE );
+
+/** @brief Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.
+
+@param objectPoints Array of object points in the object coordinate space, Nx3 1-channel or
+1xN/Nx1 3-channel, where N is the number of points. vector\<Point3f\> can be also passed here.
+@param imagePoints Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
+where N is the number of points. vector\<Point2f\> can be also passed here.
+@param cameraMatrix Input camera matrix \f$A = \vecthreethree{fx}{0}{cx}{0}{fy}{cy}{0}{0}{1}\f$ .
+@param distCoeffs Input vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6 [, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$ of
+4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are
+assumed.
+@param rvec Output rotation vector (see Rodrigues ) that, together with tvec , brings points from
+the model coordinate system to the camera coordinate system.
+@param tvec Output translation vector.
+@param useExtrinsicGuess Parameter used for SOLVEPNP_ITERATIVE. If true (1), the function uses
+the provided rvec and tvec values as initial approximations of the rotation and translation
+vectors, respectively, and further optimizes them.
+@param iterationsCount Number of iterations.
+@param reprojectionError Inlier threshold value used by the RANSAC procedure. The parameter value
+is the maximum allowed distance between the observed and computed point projections to consider it
+an inlier.
+@param confidence The probability that the algorithm produces a useful result.
+@param inliers Output vector that contains indices of inliers in objectPoints and imagePoints .
+@param flags Method for solving a PnP problem (see solvePnP ).
+
+The function estimates an object pose given a set of object points, their corresponding image
+projections, as well as the camera matrix and the distortion coefficients. This function finds such
+a pose that minimizes reprojection error, that is, the sum of squared distances between the observed
+projections imagePoints and the projected (using projectPoints ) objectPoints. The use of RANSAC
+makes the function resistant to outliers.
+
+@note
+   -   An example of how to use solvePNPRansac for object detection can be found at
+        opencv_source_code/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/
+   -   The default method used to estimate the camera pose for the Minimal Sample Sets step
+       is #SOLVEPNP_EPNP. Exceptions are:
+         - if you choose #SOLVEPNP_P3P or #SOLVEPNP_AP3P, these methods will be used.
+         - if the number of input points is equal to 4, #SOLVEPNP_P3P is used.
+   -   The method used to estimate the camera pose using all the inliers is defined by the
+       flags parameters unless it is equal to #SOLVEPNP_P3P or #SOLVEPNP_AP3P. In this case,
+       the method #SOLVEPNP_EPNP will be used instead.
+ */
+CV_EXPORTS_W bool solvePnPRansac( InputArray objectPoints, InputArray imagePoints,
+                                  InputArray cameraMatrix, InputArray distCoeffs,
+                                  OutputArray rvec, OutputArray tvec,
+                                  bool useExtrinsicGuess = false, int iterationsCount = 100,
+                                  float reprojectionError = 8.0, double confidence = 0.99,
+                                  OutputArray inliers = noArray(), int flags = SOLVEPNP_ITERATIVE );
+/** @brief Finds an object pose from 3 3D-2D point correspondences.
+
+@param objectPoints Array of object points in the object coordinate space, 3x3 1-channel or
+1x3/3x1 3-channel. vector\<Point3f\> can be also passed here.
+@param imagePoints Array of corresponding image points, 3x2 1-channel or 1x3/3x1 2-channel.
+ vector\<Point2f\> can be also passed here.
+@param cameraMatrix Input camera matrix \f$A = \vecthreethree{fx}{0}{cx}{0}{fy}{cy}{0}{0}{1}\f$ .
+@param distCoeffs Input vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6 [, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$ of
+4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are
+assumed.
+@param rvecs Output rotation vectors (see Rodrigues ) that, together with tvecs , brings points from
+the model coordinate system to the camera coordinate system. A P3P problem has up to 4 solutions.
+@param tvecs Output translation vectors.
+@param flags Method for solving a P3P problem:
+-   **SOLVEPNP_P3P** Method is based on the paper of X.S. Gao, X.-R. Hou, J. Tang, H.-F. Chang
+"Complete Solution Classification for the Perspective-Three-Point Problem" (@cite gao2003complete).
+-   **SOLVEPNP_AP3P** Method is based on the paper of Tong Ke and Stergios I. Roumeliotis.
+"An Efficient Algebraic Solution to the Perspective-Three-Point Problem" (@cite Ke17).
+
+The function estimates the object pose given 3 object points, their corresponding image
+projections, as well as the camera matrix and the distortion coefficients.
+ */
+CV_EXPORTS_W int solveP3P( InputArray objectPoints, InputArray imagePoints,
+                           InputArray cameraMatrix, InputArray distCoeffs,
+                           OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs,
+                           int flags );
+
+/** @brief Finds an initial camera matrix from 3D-2D point correspondences.
+
+@param objectPoints Vector of vectors of the calibration pattern points in the calibration pattern
+coordinate space. In the old interface all the per-view vectors are concatenated. See
+calibrateCamera for details.
+@param imagePoints Vector of vectors of the projections of the calibration pattern points. In the
+old interface all the per-view vectors are concatenated.
+@param imageSize Image size in pixels used to initialize the principal point.
+@param aspectRatio If it is zero or negative, both \f$f_x\f$ and \f$f_y\f$ are estimated independently.
+Otherwise, \f$f_x = f_y * \texttt{aspectRatio}\f$ .
+
+The function estimates and returns an initial camera matrix for the camera calibration process.
+Currently, the function only supports planar calibration patterns, which are patterns where each
+object point has z-coordinate =0.
+ */
+CV_EXPORTS_W Mat initCameraMatrix2D( InputArrayOfArrays objectPoints,
+                                     InputArrayOfArrays imagePoints,
+                                     Size imageSize, double aspectRatio = 1.0 );
+
+/** @brief Finds the positions of internal corners of the chessboard.
+
+@param image Source chessboard view. It must be an 8-bit grayscale or color image.
+@param patternSize Number of inner corners per a chessboard row and column
+( patternSize = cv::Size(points_per_row,points_per_colum) = cv::Size(columns,rows) ).
+@param corners Output array of detected corners.
+@param flags Various operation flags that can be zero or a combination of the following values:
+-   **CALIB_CB_ADAPTIVE_THRESH** Use adaptive thresholding to convert the image to black
+and white, rather than a fixed threshold level (computed from the average image brightness).
+-   **CALIB_CB_NORMALIZE_IMAGE** Normalize the image gamma with equalizeHist before
+applying fixed or adaptive thresholding.
+-   **CALIB_CB_FILTER_QUADS** Use additional criteria (like contour area, perimeter,
+square-like shape) to filter out false quads extracted at the contour retrieval stage.
+-   **CALIB_CB_FAST_CHECK** Run a fast check on the image that looks for chessboard corners,
+and shortcut the call if none is found. This can drastically speed up the call in the
+degenerate condition when no chessboard is observed.
+
+The function attempts to determine whether the input image is a view of the chessboard pattern and
+locate the internal chessboard corners. The function returns a non-zero value if all of the corners
+are found and they are placed in a certain order (row by row, left to right in every row).
+Otherwise, if the function fails to find all the corners or reorder them, it returns 0. For example,
+a regular chessboard has 8 x 8 squares and 7 x 7 internal corners, that is, points where the black
+squares touch each other. The detected coordinates are approximate, and to determine their positions
+more accurately, the function calls cornerSubPix. You also may use the function cornerSubPix with
+different parameters if returned coordinates are not accurate enough.
+
+Sample usage of detecting and drawing chessboard corners: :
+@code
+    Size patternsize(8,6); //interior number of corners
+    Mat gray = ....; //source image
+    vector<Point2f> corners; //this will be filled by the detected corners
+
+    //CALIB_CB_FAST_CHECK saves a lot of time on images
+    //that do not contain any chessboard corners
+    bool patternfound = findChessboardCorners(gray, patternsize, corners,
+            CALIB_CB_ADAPTIVE_THRESH + CALIB_CB_NORMALIZE_IMAGE
+            + CALIB_CB_FAST_CHECK);
+
+    if(patternfound)
+      cornerSubPix(gray, corners, Size(11, 11), Size(-1, -1),
+        TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
+
+    drawChessboardCorners(img, patternsize, Mat(corners), patternfound);
+@endcode
+@note The function requires white space (like a square-thick border, the wider the better) around
+the board to make the detection more robust in various environments. Otherwise, if there is no
+border and the background is dark, the outer black squares cannot be segmented properly and so the
+square grouping and ordering algorithm fails.
+ */
+CV_EXPORTS_W bool findChessboardCorners( InputArray image, Size patternSize, OutputArray corners,
+                                         int flags = CALIB_CB_ADAPTIVE_THRESH + CALIB_CB_NORMALIZE_IMAGE );
+
+/*
+   Checks whether the image contains chessboard of the specific size or not.
+   If yes, nonzero value is returned.
+*/
+CV_EXPORTS_W bool checkChessboard(InputArray img, Size size);
+
+/** @brief Finds the positions of internal corners of the chessboard using a sector based approach.
+
+@param image Source chessboard view. It must be an 8-bit grayscale or color image.
+@param patternSize Number of inner corners per a chessboard row and column
+( patternSize = cv::Size(points_per_row,points_per_colum) = cv::Size(columns,rows) ).
+@param corners Output array of detected corners.
+@param flags Various operation flags that can be zero or a combination of the following values:
+-   **CALIB_CB_NORMALIZE_IMAGE** Normalize the image gamma with equalizeHist before detection.
+-   **CALIB_CB_EXHAUSTIVE ** Run an exhaustive search to improve detection rate.
+-   **CALIB_CB_ACCURACY ** Up sample input image to improve sub-pixel accuracy due to aliasing effects.
+This should be used if an accurate camera calibration is required.
+
+The function is analog to findchessboardCorners but uses a localized radon
+transformation approximated by box filters being more robust to all sort of
+noise, faster on larger images and is able to directly return the sub-pixel
+position of the internal chessboard corners. The Method is based on the paper
+@cite duda2018 "Accurate Detection and Localization of Checkerboard Corners for
+Calibration" demonstrating that the returned sub-pixel positions are more
+accurate than the one returned by cornerSubPix allowing a precise camera
+calibration for demanding applications.
+
+@note The function requires a white boarder with roughly the same width as one
+of the checkerboard fields around the whole board to improve the detection in
+various environments. In addition, because of the localized radon
+transformation it is beneficial to use round corners for the field corners
+which are located on the outside of the board. The following figure illustrates
+a sample checkerboard optimized for the detection. However, any other checkerboard
+can be used as well.
+![Checkerboard](pics/checkerboard_radon.png)
+ */
+CV_EXPORTS_W bool findChessboardCornersSB(InputArray image,Size patternSize, OutputArray corners,int flags=0);
+
+//! finds subpixel-accurate positions of the chessboard corners
+CV_EXPORTS bool find4QuadCornerSubpix( InputArray img, InputOutputArray corners, Size region_size );
+
+/** @brief Renders the detected chessboard corners.
+
+@param image Destination image. It must be an 8-bit color image.
+@param patternSize Number of inner corners per a chessboard row and column
+(patternSize = cv::Size(points_per_row,points_per_column)).
+@param corners Array of detected corners, the output of findChessboardCorners.
+@param patternWasFound Parameter indicating whether the complete board was found or not. The
+return value of findChessboardCorners should be passed here.
+
+The function draws individual chessboard corners detected either as red circles if the board was not
+found, or as colored corners connected with lines if the board was found.
+ */
+CV_EXPORTS_W void drawChessboardCorners( InputOutputArray image, Size patternSize,
+                                         InputArray corners, bool patternWasFound );
+
+/** @brief Draw axes of the world/object coordinate system from pose estimation. @sa solvePnP
+
+@param image Input/output image. It must have 1 or 3 channels. The number of channels is not altered.
+@param cameraMatrix Input 3x3 floating-point matrix of camera intrinsic parameters.
+\f$A = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$
+@param distCoeffs Input vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6 [, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$ of
+4, 5, 8, 12 or 14 elements. If the vector is empty, the zero distortion coefficients are assumed.
+@param rvec Rotation vector (see @ref Rodrigues ) that, together with tvec , brings points from
+the model coordinate system to the camera coordinate system.
+@param tvec Translation vector.
+@param length Length of the painted axes in the same unit than tvec (usually in meters).
+@param thickness Line thickness of the painted axes.
+
+This function draws the axes of the world/object coordinate system w.r.t. to the camera frame.
+OX is drawn in red, OY in green and OZ in blue.
+ */
+CV_EXPORTS_W void drawFrameAxes(InputOutputArray image, InputArray cameraMatrix, InputArray distCoeffs,
+                                InputArray rvec, InputArray tvec, float length, int thickness=3);
+
+struct CV_EXPORTS_W_SIMPLE CirclesGridFinderParameters
+{
+    CV_WRAP CirclesGridFinderParameters();
+    CV_PROP_RW cv::Size2f densityNeighborhoodSize;
+    CV_PROP_RW float minDensity;
+    CV_PROP_RW int kmeansAttempts;
+    CV_PROP_RW int minDistanceToAddKeypoint;
+    CV_PROP_RW int keypointScale;
+    CV_PROP_RW float minGraphConfidence;
+    CV_PROP_RW float vertexGain;
+    CV_PROP_RW float vertexPenalty;
+    CV_PROP_RW float existingVertexGain;
+    CV_PROP_RW float edgeGain;
+    CV_PROP_RW float edgePenalty;
+    CV_PROP_RW float convexHullFactor;
+    CV_PROP_RW float minRNGEdgeSwitchDist;
+
+    enum GridType
+    {
+      SYMMETRIC_GRID, ASYMMETRIC_GRID
+    };
+    GridType gridType;
+
+    CV_PROP_RW float squareSize; //!< Distance between two adjacent points. Used by CALIB_CB_CLUSTERING.
+    CV_PROP_RW float maxRectifiedDistance; //!< Max deviation from predicion. Used by CALIB_CB_CLUSTERING.
+};
+
+#ifndef DISABLE_OPENCV_3_COMPATIBILITY
+typedef CirclesGridFinderParameters CirclesGridFinderParameters2;
+#endif
+
+/** @brief Finds centers in the grid of circles.
+
+@param image grid view of input circles; it must be an 8-bit grayscale or color image.
+@param patternSize number of circles per row and column
+( patternSize = Size(points_per_row, points_per_colum) ).
+@param centers output array of detected centers.
+@param flags various operation flags that can be one of the following values:
+-   **CALIB_CB_SYMMETRIC_GRID** uses symmetric pattern of circles.
+-   **CALIB_CB_ASYMMETRIC_GRID** uses asymmetric pattern of circles.
+-   **CALIB_CB_CLUSTERING** uses a special algorithm for grid detection. It is more robust to
+perspective distortions but much more sensitive to background clutter.
+@param blobDetector feature detector that finds blobs like dark circles on light background.
+@param parameters struct for finding circles in a grid pattern.
+
+The function attempts to determine whether the input image contains a grid of circles. If it is, the
+function locates centers of the circles. The function returns a non-zero value if all of the centers
+have been found and they have been placed in a certain order (row by row, left to right in every
+row). Otherwise, if the function fails to find all the corners or reorder them, it returns 0.
+
+Sample usage of detecting and drawing the centers of circles: :
+@code
+    Size patternsize(7,7); //number of centers
+    Mat gray = ....; //source image
+    vector<Point2f> centers; //this will be filled by the detected centers
+
+    bool patternfound = findCirclesGrid(gray, patternsize, centers);
+
+    drawChessboardCorners(img, patternsize, Mat(centers), patternfound);
+@endcode
+@note The function requires white space (like a square-thick border, the wider the better) around
+the board to make the detection more robust in various environments.
+ */
+CV_EXPORTS_W bool findCirclesGrid( InputArray image, Size patternSize,
+                                   OutputArray centers, int flags,
+                                   const Ptr<FeatureDetector> &blobDetector,
+                                   const CirclesGridFinderParameters& parameters);
+
+/** @overload */
+CV_EXPORTS_W bool findCirclesGrid( InputArray image, Size patternSize,
+                                   OutputArray centers, int flags = CALIB_CB_SYMMETRIC_GRID,
+                                   const Ptr<FeatureDetector> &blobDetector = SimpleBlobDetector::create());
+
+/** @brief Finds the camera intrinsic and extrinsic parameters from several views of a calibration pattern.
+
+@param objectPoints In the new interface it is a vector of vectors of calibration pattern points in
+the calibration pattern coordinate space (e.g. std::vector<std::vector<cv::Vec3f>>). The outer
+vector contains as many elements as the number of the pattern views. If the same calibration pattern
+is shown in each view and it is fully visible, all the vectors will be the same. Although, it is
+possible to use partially occluded patterns, or even different patterns in different views. Then,
+the vectors will be different. The points are 3D, but since they are in a pattern coordinate system,
+then, if the rig is planar, it may make sense to put the model to a XY coordinate plane so that
+Z-coordinate of each input object point is 0.
+In the old interface all the vectors of object points from different views are concatenated
+together.
+@param imagePoints In the new interface it is a vector of vectors of the projections of calibration
+pattern points (e.g. std::vector<std::vector<cv::Vec2f>>). imagePoints.size() and
+objectPoints.size() and imagePoints[i].size() must be equal to objectPoints[i].size() for each i.
+In the old interface all the vectors of object points from different views are concatenated
+together.
+@param imageSize Size of the image used only to initialize the intrinsic camera matrix.
+@param cameraMatrix Output 3x3 floating-point camera matrix
+\f$A = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ . If CV\_CALIB\_USE\_INTRINSIC\_GUESS
+and/or CALIB_FIX_ASPECT_RATIO are specified, some or all of fx, fy, cx, cy must be
+initialized before calling the function.
+@param distCoeffs Output vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6 [, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$ of
+4, 5, 8, 12 or 14 elements.
+@param rvecs Output vector of rotation vectors (see Rodrigues ) estimated for each pattern view
+(e.g. std::vector<cv::Mat>>). That is, each k-th rotation vector together with the corresponding
+k-th translation vector (see the next output parameter description) brings the calibration pattern
+from the model coordinate space (in which object points are specified) to the world coordinate
+space, that is, a real position of the calibration pattern in the k-th pattern view (k=0.. *M* -1).
+@param tvecs Output vector of translation vectors estimated for each pattern view.
+@param stdDeviationsIntrinsics Output vector of standard deviations estimated for intrinsic parameters.
+ Order of deviations values:
+\f$(f_x, f_y, c_x, c_y, k_1, k_2, p_1, p_2, k_3, k_4, k_5, k_6 , s_1, s_2, s_3,
+ s_4, \tau_x, \tau_y)\f$ If one of parameters is not estimated, it's deviation is equals to zero.
+@param stdDeviationsExtrinsics Output vector of standard deviations estimated for extrinsic parameters.
+ Order of deviations values: \f$(R_1, T_1, \dotsc , R_M, T_M)\f$ where M is number of pattern views,
+ \f$R_i, T_i\f$ are concatenated 1x3 vectors.
+ @param perViewErrors Output vector of the RMS re-projection error estimated for each pattern view.
+@param flags Different flags that may be zero or a combination of the following values:
+-   **CALIB_USE_INTRINSIC_GUESS** cameraMatrix contains valid initial values of
+fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
+center ( imageSize is used), and focal distances are computed in a least-squares fashion.
+Note, that if intrinsic parameters are known, there is no need to use this function just to
+estimate extrinsic parameters. Use solvePnP instead.
+-   **CALIB_FIX_PRINCIPAL_POINT** The principal point is not changed during the global
+optimization. It stays at the center or at a different location specified when
+CALIB_USE_INTRINSIC_GUESS is set too.
+-   **CALIB_FIX_ASPECT_RATIO** The functions considers only fy as a free parameter. The
+ratio fx/fy stays the same as in the input cameraMatrix . When
+CALIB_USE_INTRINSIC_GUESS is not set, the actual input values of fx and fy are
+ignored, only their ratio is computed and used further.
+-   **CALIB_ZERO_TANGENT_DIST** Tangential distortion coefficients \f$(p_1, p_2)\f$ are set
+to zeros and stay zero.
+-   **CALIB_FIX_K1,...,CALIB_FIX_K6** The corresponding radial distortion
+coefficient is not changed during the optimization. If CALIB_USE_INTRINSIC_GUESS is
+set, the coefficient from the supplied distCoeffs matrix is used. Otherwise, it is set to 0.
+-   **CALIB_RATIONAL_MODEL** Coefficients k4, k5, and k6 are enabled. To provide the
+backward compatibility, this extra flag should be explicitly specified to make the
+calibration function use the rational model and return 8 coefficients. If the flag is not
+set, the function computes and returns only 5 distortion coefficients.
+-   **CALIB_THIN_PRISM_MODEL** Coefficients s1, s2, s3 and s4 are enabled. To provide the
+backward compatibility, this extra flag should be explicitly specified to make the
+calibration function use the thin prism model and return 12 coefficients. If the flag is not
+set, the function computes and returns only 5 distortion coefficients.
+-   **CALIB_FIX_S1_S2_S3_S4** The thin prism distortion coefficients are not changed during
+the optimization. If CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
+supplied distCoeffs matrix is used. Otherwise, it is set to 0.
+-   **CALIB_TILTED_MODEL** Coefficients tauX and tauY are enabled. To provide the
+backward compatibility, this extra flag should be explicitly specified to make the
+calibration function use the tilted sensor model and return 14 coefficients. If the flag is not
+set, the function computes and returns only 5 distortion coefficients.
+-   **CALIB_FIX_TAUX_TAUY** The coefficients of the tilted sensor model are not changed during
+the optimization. If CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
+supplied distCoeffs matrix is used. Otherwise, it is set to 0.
+@param criteria Termination criteria for the iterative optimization algorithm.
+
+@return the overall RMS re-projection error.
+
+The function estimates the intrinsic camera parameters and extrinsic parameters for each of the
+views. The algorithm is based on @cite Zhang2000 and @cite BouguetMCT . The coordinates of 3D object
+points and their corresponding 2D projections in each view must be specified. That may be achieved
+by using an object with a known geometry and easily detectable feature points. Such an object is
+called a calibration rig or calibration pattern, and OpenCV has built-in support for a chessboard as
+a calibration rig (see findChessboardCorners ). Currently, initialization of intrinsic parameters
+(when CALIB_USE_INTRINSIC_GUESS is not set) is only implemented for planar calibration
+patterns (where Z-coordinates of the object points must be all zeros). 3D calibration rigs can also
+be used as long as initial cameraMatrix is provided.
+
+The algorithm performs the following steps:
+
+-   Compute the initial intrinsic parameters (the option only available for planar calibration
+    patterns) or read them from the input parameters. The distortion coefficients are all set to
+    zeros initially unless some of CALIB_FIX_K? are specified.
+
+-   Estimate the initial camera pose as if the intrinsic parameters have been already known. This is
+    done using solvePnP .
+
+-   Run the global Levenberg-Marquardt optimization algorithm to minimize the reprojection error,
+    that is, the total sum of squared distances between the observed feature points imagePoints and
+    the projected (using the current estimates for camera parameters and the poses) object points
+    objectPoints. See projectPoints for details.
+
+@note
+   If you use a non-square (=non-NxN) grid and findChessboardCorners for calibration, and
+    calibrateCamera returns bad values (zero distortion coefficients, an image center very far from
+    (w/2-0.5,h/2-0.5), and/or large differences between \f$f_x\f$ and \f$f_y\f$ (ratios of 10:1 or more)),
+    then you have probably used patternSize=cvSize(rows,cols) instead of using
+    patternSize=cvSize(cols,rows) in findChessboardCorners .
+
+@sa
+   calibrateCameraRO, findChessboardCorners, solvePnP, initCameraMatrix2D, stereoCalibrate, undistort
+ */
+CV_EXPORTS_AS(calibrateCameraExtended) double calibrateCamera( InputArrayOfArrays objectPoints,
+                                     InputArrayOfArrays imagePoints, Size imageSize,
+                                     InputOutputArray cameraMatrix, InputOutputArray distCoeffs,
+                                     OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs,
+                                     OutputArray stdDeviationsIntrinsics,
+                                     OutputArray stdDeviationsExtrinsics,
+                                     OutputArray perViewErrors,
+                                     int flags = 0, TermCriteria criteria = TermCriteria(
+                                        TermCriteria::COUNT + TermCriteria::EPS, 30, DBL_EPSILON) );
+
+/** @overload */
+CV_EXPORTS_W double calibrateCamera( InputArrayOfArrays objectPoints,
+                                     InputArrayOfArrays imagePoints, Size imageSize,
+                                     InputOutputArray cameraMatrix, InputOutputArray distCoeffs,
+                                     OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs,
+                                     int flags = 0, TermCriteria criteria = TermCriteria(
+                                        TermCriteria::COUNT + TermCriteria::EPS, 30, DBL_EPSILON) );
+
+/** @brief Finds the camera intrinsic and extrinsic parameters from several views of a calibration pattern.
+
+This function is an extension of calibrateCamera() with the method of releasing object which was
+proposed in @cite strobl2011iccv. In many common cases with inaccurate, unmeasured, roughly planar
+targets (calibration plates), this method can dramatically improve the precision of the estimated
+camera parameters. Both the object-releasing method and standard method are supported by this
+function. Use the parameter **iFixedPoint** for method selection. In the internal implementation,
+calibrateCamera() is a wrapper for this function.
+
+@param objectPoints Vector of vectors of calibration pattern points in the calibration pattern
+coordinate space. See calibrateCamera() for details. If the method of releasing object to be used,
+the identical calibration board must be used in each view and it must be fully visible, and all
+objectPoints[i] must be the same and all points should be roughly close to a plane. **The calibration
+target has to be rigid, or at least static if the camera (rather than the calibration target) is
+shifted for grabbing images.**
+@param imagePoints Vector of vectors of the projections of calibration pattern points. See
+calibrateCamera() for details.
+@param imageSize Size of the image used only to initialize the intrinsic camera matrix.
+@param iFixedPoint The index of the 3D object point in objectPoints[0] to be fixed. It also acts as
+a switch for calibration method selection. If object-releasing method to be used, pass in the
+parameter in the range of [1, objectPoints[0].size()-2], otherwise a value out of this range will
+make standard calibration method selected. Usually the top-right corner point of the calibration
+board grid is recommended to be fixed when object-releasing method being utilized. According to
+\cite strobl2011iccv, two other points are also fixed. In this implementation, objectPoints[0].front
+and objectPoints[0].back.z are used. With object-releasing method, accurate rvecs, tvecs and
+newObjPoints are only possible if coordinates of these three fixed points are accurate enough.
+@param cameraMatrix Output 3x3 floating-point camera matrix. See calibrateCamera() for details.
+@param distCoeffs Output vector of distortion coefficients. See calibrateCamera() for details.
+@param rvecs Output vector of rotation vectors estimated for each pattern view. See calibrateCamera()
+for details.
+@param tvecs Output vector of translation vectors estimated for each pattern view.
+@param newObjPoints The updated output vector of calibration pattern points. The coordinates might
+be scaled based on three fixed points. The returned coordinates are accurate only if the above
+mentioned three fixed points are accurate. If not needed, noArray() can be passed in. This parameter
+is ignored with standard calibration method.
+@param stdDeviationsIntrinsics Output vector of standard deviations estimated for intrinsic parameters.
+See calibrateCamera() for details.
+@param stdDeviationsExtrinsics Output vector of standard deviations estimated for extrinsic parameters.
+See calibrateCamera() for details.
+@param stdDeviationsObjPoints Output vector of standard deviations estimated for refined coordinates
+of calibration pattern points. It has the same size and order as objectPoints[0] vector. This
+parameter is ignored with standard calibration method.
+ @param perViewErrors Output vector of the RMS re-projection error estimated for each pattern view.
+@param flags Different flags that may be zero or a combination of some predefined values. See
+calibrateCamera() for details. If the method of releasing object is used, the calibration time may
+be much longer. CALIB_USE_QR or CALIB_USE_LU could be used for faster calibration with potentially
+less precise and less stable in some rare cases.
+@param criteria Termination criteria for the iterative optimization algorithm.
+
+@return the overall RMS re-projection error.
+
+The function estimates the intrinsic camera parameters and extrinsic parameters for each of the
+views. The algorithm is based on @cite Zhang2000, @cite BouguetMCT and @cite strobl2011iccv. See
+calibrateCamera() for other detailed explanations.
+@sa
+   calibrateCamera, findChessboardCorners, solvePnP, initCameraMatrix2D, stereoCalibrate, undistort
+ */
+CV_EXPORTS_AS(calibrateCameraROExtended) double calibrateCameraRO( InputArrayOfArrays objectPoints,
+                                     InputArrayOfArrays imagePoints, Size imageSize, int iFixedPoint,
+                                     InputOutputArray cameraMatrix, InputOutputArray distCoeffs,
+                                     OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs,
+                                     OutputArray newObjPoints,
+                                     OutputArray stdDeviationsIntrinsics,
+                                     OutputArray stdDeviationsExtrinsics,
+                                     OutputArray stdDeviationsObjPoints,
+                                     OutputArray perViewErrors,
+                                     int flags = 0, TermCriteria criteria = TermCriteria(
+                                        TermCriteria::COUNT + TermCriteria::EPS, 30, DBL_EPSILON) );
+
+/** @overload */
+CV_EXPORTS_W double calibrateCameraRO( InputArrayOfArrays objectPoints,
+                                     InputArrayOfArrays imagePoints, Size imageSize, int iFixedPoint,
+                                     InputOutputArray cameraMatrix, InputOutputArray distCoeffs,
+                                     OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs,
+                                     OutputArray newObjPoints,
+                                     int flags = 0, TermCriteria criteria = TermCriteria(
+                                        TermCriteria::COUNT + TermCriteria::EPS, 30, DBL_EPSILON) );
+
+/** @brief Computes useful camera characteristics from the camera matrix.
+
+@param cameraMatrix Input camera matrix that can be estimated by calibrateCamera or
+stereoCalibrate .
+@param imageSize Input image size in pixels.
+@param apertureWidth Physical width in mm of the sensor.
+@param apertureHeight Physical height in mm of the sensor.
+@param fovx Output field of view in degrees along the horizontal sensor axis.
+@param fovy Output field of view in degrees along the vertical sensor axis.
+@param focalLength Focal length of the lens in mm.
+@param principalPoint Principal point in mm.
+@param aspectRatio \f$f_y/f_x\f$
+
+The function computes various useful camera characteristics from the previously estimated camera
+matrix.
+
+@note
+   Do keep in mind that the unity measure 'mm' stands for whatever unit of measure one chooses for
+    the chessboard pitch (it can thus be any value).
+ */
+CV_EXPORTS_W void calibrationMatrixValues( InputArray cameraMatrix, Size imageSize,
+                                           double apertureWidth, double apertureHeight,
+                                           CV_OUT double& fovx, CV_OUT double& fovy,
+                                           CV_OUT double& focalLength, CV_OUT Point2d& principalPoint,
+                                           CV_OUT double& aspectRatio );
+
+/** @brief Calibrates the stereo camera.
+
+@param objectPoints Vector of vectors of the calibration pattern points.
+@param imagePoints1 Vector of vectors of the projections of the calibration pattern points,
+observed by the first camera.
+@param imagePoints2 Vector of vectors of the projections of the calibration pattern points,
+observed by the second camera.
+@param cameraMatrix1 Input/output first camera matrix:
+\f$\vecthreethree{f_x^{(j)}}{0}{c_x^{(j)}}{0}{f_y^{(j)}}{c_y^{(j)}}{0}{0}{1}\f$ , \f$j = 0,\, 1\f$ . If
+any of CALIB_USE_INTRINSIC_GUESS , CALIB_FIX_ASPECT_RATIO ,
+CALIB_FIX_INTRINSIC , or CALIB_FIX_FOCAL_LENGTH are specified, some or all of the
+matrix components must be initialized. See the flags description for details.
+@param distCoeffs1 Input/output vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6 [, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$ of
+4, 5, 8, 12 or 14 elements. The output vector length depends on the flags.
+@param cameraMatrix2 Input/output second camera matrix. The parameter is similar to cameraMatrix1
+@param distCoeffs2 Input/output lens distortion coefficients for the second camera. The parameter
+is similar to distCoeffs1 .
+@param imageSize Size of the image used only to initialize intrinsic camera matrix.
+@param R Output rotation matrix between the 1st and the 2nd camera coordinate systems.
+@param T Output translation vector between the coordinate systems of the cameras.
+@param E Output essential matrix.
+@param F Output fundamental matrix.
+@param perViewErrors Output vector of the RMS re-projection error estimated for each pattern view.
+@param flags Different flags that may be zero or a combination of the following values:
+-   **CALIB_FIX_INTRINSIC** Fix cameraMatrix? and distCoeffs? so that only R, T, E , and F
+matrices are estimated.
+-   **CALIB_USE_INTRINSIC_GUESS** Optimize some or all of the intrinsic parameters
+according to the specified flags. Initial values are provided by the user.
+-   **CALIB_USE_EXTRINSIC_GUESS** R, T contain valid initial values that are optimized further.
+Otherwise R, T are initialized to the median value of the pattern views (each dimension separately).
+-   **CALIB_FIX_PRINCIPAL_POINT** Fix the principal points during the optimization.
+-   **CALIB_FIX_FOCAL_LENGTH** Fix \f$f^{(j)}_x\f$ and \f$f^{(j)}_y\f$ .
+-   **CALIB_FIX_ASPECT_RATIO** Optimize \f$f^{(j)}_y\f$ . Fix the ratio \f$f^{(j)}_x/f^{(j)}_y\f$
+.
+-   **CALIB_SAME_FOCAL_LENGTH** Enforce \f$f^{(0)}_x=f^{(1)}_x\f$ and \f$f^{(0)}_y=f^{(1)}_y\f$ .
+-   **CALIB_ZERO_TANGENT_DIST** Set tangential distortion coefficients for each camera to
+zeros and fix there.
+-   **CALIB_FIX_K1,...,CALIB_FIX_K6** Do not change the corresponding radial
+distortion coefficient during the optimization. If CALIB_USE_INTRINSIC_GUESS is set,
+the coefficient from the supplied distCoeffs matrix is used. Otherwise, it is set to 0.
+-   **CALIB_RATIONAL_MODEL** Enable coefficients k4, k5, and k6. To provide the backward
+compatibility, this extra flag should be explicitly specified to make the calibration
+function use the rational model and return 8 coefficients. If the flag is not set, the
+function computes and returns only 5 distortion coefficients.
+-   **CALIB_THIN_PRISM_MODEL** Coefficients s1, s2, s3 and s4 are enabled. To provide the
+backward compatibility, this extra flag should be explicitly specified to make the
+calibration function use the thin prism model and return 12 coefficients. If the flag is not
+set, the function computes and returns only 5 distortion coefficients.
+-   **CALIB_FIX_S1_S2_S3_S4** The thin prism distortion coefficients are not changed during
+the optimization. If CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
+supplied distCoeffs matrix is used. Otherwise, it is set to 0.
+-   **CALIB_TILTED_MODEL** Coefficients tauX and tauY are enabled. To provide the
+backward compatibility, this extra flag should be explicitly specified to make the
+calibration function use the tilted sensor model and return 14 coefficients. If the flag is not
+set, the function computes and returns only 5 distortion coefficients.
+-   **CALIB_FIX_TAUX_TAUY** The coefficients of the tilted sensor model are not changed during
+the optimization. If CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
+supplied distCoeffs matrix is used. Otherwise, it is set to 0.
+@param criteria Termination criteria for the iterative optimization algorithm.
+
+The function estimates transformation between two cameras making a stereo pair. If you have a stereo
+camera where the relative position and orientation of two cameras is fixed, and if you computed
+poses of an object relative to the first camera and to the second camera, (R1, T1) and (R2, T2),
+respectively (this can be done with solvePnP ), then those poses definitely relate to each other.
+This means that, given ( \f$R_1\f$,\f$T_1\f$ ), it should be possible to compute ( \f$R_2\f$,\f$T_2\f$ ). You only
+need to know the position and orientation of the second camera relative to the first camera. This is
+what the described function does. It computes ( \f$R\f$,\f$T\f$ ) so that:
+
+\f[R_2=R*R_1\f]
+\f[T_2=R*T_1 + T,\f]
+
+Optionally, it computes the essential matrix E:
+
+\f[E= \vecthreethree{0}{-T_2}{T_1}{T_2}{0}{-T_0}{-T_1}{T_0}{0} *R\f]
+
+where \f$T_i\f$ are components of the translation vector \f$T\f$ : \f$T=[T_0, T_1, T_2]^T\f$ . And the function
+can also compute the fundamental matrix F:
+
+\f[F = cameraMatrix2^{-T} E cameraMatrix1^{-1}\f]
+
+Besides the stereo-related information, the function can also perform a full calibration of each of
+two cameras. However, due to the high dimensionality of the parameter space and noise in the input
+data, the function can diverge from the correct solution. If the intrinsic parameters can be
+estimated with high accuracy for each of the cameras individually (for example, using
+calibrateCamera ), you are recommended to do so and then pass CALIB_FIX_INTRINSIC flag to the
+function along with the computed intrinsic parameters. Otherwise, if all the parameters are
+estimated at once, it makes sense to restrict some parameters, for example, pass
+CALIB_SAME_FOCAL_LENGTH and CALIB_ZERO_TANGENT_DIST flags, which is usually a
+reasonable assumption.
+
+Similarly to calibrateCamera , the function minimizes the total re-projection error for all the
+points in all the available views from both cameras. The function returns the final value of the
+re-projection error.
+ */
+CV_EXPORTS_AS(stereoCalibrateExtended) double stereoCalibrate( InputArrayOfArrays objectPoints,
+                                     InputArrayOfArrays imagePoints1, InputArrayOfArrays imagePoints2,
+                                     InputOutputArray cameraMatrix1, InputOutputArray distCoeffs1,
+                                     InputOutputArray cameraMatrix2, InputOutputArray distCoeffs2,
+                                     Size imageSize, InputOutputArray R,InputOutputArray T, OutputArray E, OutputArray F,
+                                     OutputArray perViewErrors, int flags = CALIB_FIX_INTRINSIC,
+                                     TermCriteria criteria = TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 30, 1e-6) );
+
+/// @overload
+CV_EXPORTS_W double stereoCalibrate( InputArrayOfArrays objectPoints,
+                                     InputArrayOfArrays imagePoints1, InputArrayOfArrays imagePoints2,
+                                     InputOutputArray cameraMatrix1, InputOutputArray distCoeffs1,
+                                     InputOutputArray cameraMatrix2, InputOutputArray distCoeffs2,
+                                     Size imageSize, OutputArray R,OutputArray T, OutputArray E, OutputArray F,
+                                     int flags = CALIB_FIX_INTRINSIC,
+                                     TermCriteria criteria = TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 30, 1e-6) );
+
+/** @brief Computes rectification transforms for each head of a calibrated stereo camera.
+
+@param cameraMatrix1 First camera matrix.
+@param distCoeffs1 First camera distortion parameters.
+@param cameraMatrix2 Second camera matrix.
+@param distCoeffs2 Second camera distortion parameters.
+@param imageSize Size of the image used for stereo calibration.
+@param R Rotation matrix between the coordinate systems of the first and the second cameras.
+@param T Translation vector between coordinate systems of the cameras.
+@param R1 Output 3x3 rectification transform (rotation matrix) for the first camera.
+@param R2 Output 3x3 rectification transform (rotation matrix) for the second camera.
+@param P1 Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
+camera.
+@param P2 Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
+camera.
+@param Q Output \f$4 \times 4\f$ disparity-to-depth mapping matrix (see reprojectImageTo3D ).
+@param flags Operation flags that may be zero or CALIB_ZERO_DISPARITY . If the flag is set,
+the function makes the principal points of each camera have the same pixel coordinates in the
+rectified views. And if the flag is not set, the function may still shift the images in the
+horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
+useful image area.
+@param alpha Free scaling parameter. If it is -1 or absent, the function performs the default
+scaling. Otherwise, the parameter should be between 0 and 1. alpha=0 means that the rectified
+images are zoomed and shifted so that only valid pixels are visible (no black areas after
+rectification). alpha=1 means that the rectified image is decimated and shifted so that all the
+pixels from the original images from the cameras are retained in the rectified images (no source
+image pixels are lost). Obviously, any intermediate value yields an intermediate result between
+those two extreme cases.
+@param newImageSize New image resolution after rectification. The same size should be passed to
+initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
+is passed (default), it is set to the original imageSize . Setting it to larger value can help you
+preserve details in the original image, especially when there is a big radial distortion.
+@param validPixROI1 Optional output rectangles inside the rectified images where all the pixels
+are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
+(see the picture below).
+@param validPixROI2 Optional output rectangles inside the rectified images where all the pixels
+are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
+(see the picture below).
+
+The function computes the rotation matrices for each camera that (virtually) make both camera image
+planes the same plane. Consequently, this makes all the epipolar lines parallel and thus simplifies
+the dense stereo correspondence problem. The function takes the matrices computed by stereoCalibrate
+as input. As output, it provides two rotation matrices and also two projection matrices in the new
+coordinates. The function distinguishes the following two cases:
+
+-   **Horizontal stereo**: the first and the second camera views are shifted relative to each other
+    mainly along the x axis (with possible small vertical shift). In the rectified images, the
+    corresponding epipolar lines in the left and right cameras are horizontal and have the same
+    y-coordinate. P1 and P2 look like:
+
+    \f[\texttt{P1} = \begin{bmatrix} f & 0 & cx_1 & 0 \\ 0 & f & cy & 0 \\ 0 & 0 & 1 & 0 \end{bmatrix}\f]
+
+    \f[\texttt{P2} = \begin{bmatrix} f & 0 & cx_2 & T_x*f \\ 0 & f & cy & 0 \\ 0 & 0 & 1 & 0 \end{bmatrix} ,\f]
+
+    where \f$T_x\f$ is a horizontal shift between the cameras and \f$cx_1=cx_2\f$ if
+    CALIB_ZERO_DISPARITY is set.
+
+-   **Vertical stereo**: the first and the second camera views are shifted relative to each other
+    mainly in vertical direction (and probably a bit in the horizontal direction too). The epipolar
+    lines in the rectified images are vertical and have the same x-coordinate. P1 and P2 look like:
+
+    \f[\texttt{P1} = \begin{bmatrix} f & 0 & cx & 0 \\ 0 & f & cy_1 & 0 \\ 0 & 0 & 1 & 0 \end{bmatrix}\f]
+
+    \f[\texttt{P2} = \begin{bmatrix} f & 0 & cx & 0 \\ 0 & f & cy_2 & T_y*f \\ 0 & 0 & 1 & 0 \end{bmatrix} ,\f]
+
+    where \f$T_y\f$ is a vertical shift between the cameras and \f$cy_1=cy_2\f$ if CALIB_ZERO_DISPARITY is
+    set.
+
+As you can see, the first three columns of P1 and P2 will effectively be the new "rectified" camera
+matrices. The matrices, together with R1 and R2 , can then be passed to initUndistortRectifyMap to
+initialize the rectification map for each camera.
+
+See below the screenshot from the stereo_calib.cpp sample. Some red horizontal lines pass through
+the corresponding image regions. This means that the images are well rectified, which is what most
+stereo correspondence algorithms rely on. The green rectangles are roi1 and roi2 . You see that
+their interiors are all valid pixels.
+
+![image](pics/stereo_undistort.jpg)
+ */
+CV_EXPORTS_W void stereoRectify( InputArray cameraMatrix1, InputArray distCoeffs1,
+                                 InputArray cameraMatrix2, InputArray distCoeffs2,
+                                 Size imageSize, InputArray R, InputArray T,
+                                 OutputArray R1, OutputArray R2,
+                                 OutputArray P1, OutputArray P2,
+                                 OutputArray Q, int flags = CALIB_ZERO_DISPARITY,
+                                 double alpha = -1, Size newImageSize = Size(),
+                                 CV_OUT Rect* validPixROI1 = 0, CV_OUT Rect* validPixROI2 = 0 );
+
+/** @brief Computes a rectification transform for an uncalibrated stereo camera.
+
+@param points1 Array of feature points in the first image.
+@param points2 The corresponding points in the second image. The same formats as in
+findFundamentalMat are supported.
+@param F Input fundamental matrix. It can be computed from the same set of point pairs using
+findFundamentalMat .
+@param imgSize Size of the image.
+@param H1 Output rectification homography matrix for the first image.
+@param H2 Output rectification homography matrix for the second image.
+@param threshold Optional threshold used to filter out the outliers. If the parameter is greater
+than zero, all the point pairs that do not comply with the epipolar geometry (that is, the points
+for which \f$|\texttt{points2[i]}^T*\texttt{F}*\texttt{points1[i]}|>\texttt{threshold}\f$ ) are
+rejected prior to computing the homographies. Otherwise, all the points are considered inliers.
+
+The function computes the rectification transformations without knowing intrinsic parameters of the
+cameras and their relative position in the space, which explains the suffix "uncalibrated". Another
+related difference from stereoRectify is that the function outputs not the rectification
+transformations in the object (3D) space, but the planar perspective transformations encoded by the
+homography matrices H1 and H2 . The function implements the algorithm @cite Hartley99 .
+
+@note
+   While the algorithm does not need to know the intrinsic parameters of the cameras, it heavily
+    depends on the epipolar geometry. Therefore, if the camera lenses have a significant distortion,
+    it would be better to correct it before computing the fundamental matrix and calling this
+    function. For example, distortion coefficients can be estimated for each head of stereo camera
+    separately by using calibrateCamera . Then, the images can be corrected using undistort , or
+    just the point coordinates can be corrected with undistortPoints .
+ */
+CV_EXPORTS_W bool stereoRectifyUncalibrated( InputArray points1, InputArray points2,
+                                             InputArray F, Size imgSize,
+                                             OutputArray H1, OutputArray H2,
+                                             double threshold = 5 );
+
+//! computes the rectification transformations for 3-head camera, where all the heads are on the same line.
+CV_EXPORTS_W float rectify3Collinear( InputArray cameraMatrix1, InputArray distCoeffs1,
+                                      InputArray cameraMatrix2, InputArray distCoeffs2,
+                                      InputArray cameraMatrix3, InputArray distCoeffs3,
+                                      InputArrayOfArrays imgpt1, InputArrayOfArrays imgpt3,
+                                      Size imageSize, InputArray R12, InputArray T12,
+                                      InputArray R13, InputArray T13,
+                                      OutputArray R1, OutputArray R2, OutputArray R3,
+                                      OutputArray P1, OutputArray P2, OutputArray P3,
+                                      OutputArray Q, double alpha, Size newImgSize,
+                                      CV_OUT Rect* roi1, CV_OUT Rect* roi2, int flags );
+
+/** @brief Returns the new camera matrix based on the free scaling parameter.
+
+@param cameraMatrix Input camera matrix.
+@param distCoeffs Input vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6 [, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$ of
+4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are
+assumed.
+@param imageSize Original image size.
+@param alpha Free scaling parameter between 0 (when all the pixels in the undistorted image are
+valid) and 1 (when all the source image pixels are retained in the undistorted image). See
+stereoRectify for details.
+@param newImgSize Image size after rectification. By default, it is set to imageSize .
+@param validPixROI Optional output rectangle that outlines all-good-pixels region in the
+undistorted image. See roi1, roi2 description in stereoRectify .
+@param centerPrincipalPoint Optional flag that indicates whether in the new camera matrix the
+principal point should be at the image center or not. By default, the principal point is chosen to
+best fit a subset of the source image (determined by alpha) to the corrected image.
+@return new_camera_matrix Output new camera matrix.
+
+The function computes and returns the optimal new camera matrix based on the free scaling parameter.
+By varying this parameter, you may retrieve only sensible pixels alpha=0 , keep all the original
+image pixels if there is valuable information in the corners alpha=1 , or get something in between.
+When alpha\>0 , the undistorted result is likely to have some black pixels corresponding to
+"virtual" pixels outside of the captured distorted image. The original camera matrix, distortion
+coefficients, the computed new camera matrix, and newImageSize should be passed to
+initUndistortRectifyMap to produce the maps for remap .
+ */
+CV_EXPORTS_W Mat getOptimalNewCameraMatrix( InputArray cameraMatrix, InputArray distCoeffs,
+                                            Size imageSize, double alpha, Size newImgSize = Size(),
+                                            CV_OUT Rect* validPixROI = 0,
+                                            bool centerPrincipalPoint = false);
+
+/** @brief Converts points from Euclidean to homogeneous space.
+
+@param src Input vector of N-dimensional points.
+@param dst Output vector of N+1-dimensional points.
+
+The function converts points from Euclidean to homogeneous space by appending 1's to the tuple of
+point coordinates. That is, each point (x1, x2, ..., xn) is converted to (x1, x2, ..., xn, 1).
+ */
+CV_EXPORTS_W void convertPointsToHomogeneous( InputArray src, OutputArray dst );
+
+/** @brief Converts points from homogeneous to Euclidean space.
+
+@param src Input vector of N-dimensional points.
+@param dst Output vector of N-1-dimensional points.
+
+The function converts points homogeneous to Euclidean space using perspective projection. That is,
+each point (x1, x2, ... x(n-1), xn) is converted to (x1/xn, x2/xn, ..., x(n-1)/xn). When xn=0, the
+output point coordinates will be (0,0,0,...).
+ */
+CV_EXPORTS_W void convertPointsFromHomogeneous( InputArray src, OutputArray dst );
+
+/** @brief Converts points to/from homogeneous coordinates.
+
+@param src Input array or vector of 2D, 3D, or 4D points.
+@param dst Output vector of 2D, 3D, or 4D points.
+
+The function converts 2D or 3D points from/to homogeneous coordinates by calling either
+convertPointsToHomogeneous or convertPointsFromHomogeneous.
+
+@note The function is obsolete. Use one of the previous two functions instead.
+ */
+CV_EXPORTS void convertPointsHomogeneous( InputArray src, OutputArray dst );
+
+/** @brief Calculates a fundamental matrix from the corresponding points in two images.
+
+@param points1 Array of N points from the first image. The point coordinates should be
+floating-point (single or double precision).
+@param points2 Array of the second image points of the same size and format as points1 .
+@param method Method for computing a fundamental matrix.
+-   **CV_FM_7POINT** for a 7-point algorithm. \f$N = 7\f$
+-   **CV_FM_8POINT** for an 8-point algorithm. \f$N \ge 8\f$
+-   **CV_FM_RANSAC** for the RANSAC algorithm. \f$N \ge 8\f$
+-   **CV_FM_LMEDS** for the LMedS algorithm. \f$N \ge 8\f$
+@param ransacReprojThreshold Parameter used only for RANSAC. It is the maximum distance from a point to an epipolar
+line in pixels, beyond which the point is considered an outlier and is not used for computing the
+final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
+point localization, image resolution, and the image noise.
+@param confidence Parameter used for the RANSAC and LMedS methods only. It specifies a desirable level
+of confidence (probability) that the estimated matrix is correct.
+@param mask
+
+The epipolar geometry is described by the following equation:
+
+\f[[p_2; 1]^T F [p_1; 1] = 0\f]
+
+where \f$F\f$ is a fundamental matrix, \f$p_1\f$ and \f$p_2\f$ are corresponding points in the first and the
+second images, respectively.
+
+The function calculates the fundamental matrix using one of four methods listed above and returns
+the found fundamental matrix. Normally just one matrix is found. But in case of the 7-point
+algorithm, the function may return up to 3 solutions ( \f$9 \times 3\f$ matrix that stores all 3
+matrices sequentially).
+
+The calculated fundamental matrix may be passed further to computeCorrespondEpilines that finds the
+epipolar lines corresponding to the specified points. It can also be passed to
+stereoRectifyUncalibrated to compute the rectification transformation. :
+@code
+    // Example. Estimation of fundamental matrix using the RANSAC algorithm
+    int point_count = 100;
+    vector<Point2f> points1(point_count);
+    vector<Point2f> points2(point_count);
+
+    // initialize the points here ...
+    for( int i = 0; i < point_count; i++ )
+    {
+        points1[i] = ...;
+        points2[i] = ...;
+    }
+
+    Mat fundamental_matrix =
+     findFundamentalMat(points1, points2, FM_RANSAC, 3, 0.99);
+@endcode
+ */
+CV_EXPORTS_W Mat findFundamentalMat( InputArray points1, InputArray points2,
+                                     int method = FM_RANSAC,
+                                     double ransacReprojThreshold = 3., double confidence = 0.99,
+                                     OutputArray mask = noArray() );
+
+/** @overload */
+CV_EXPORTS Mat findFundamentalMat( InputArray points1, InputArray points2,
+                                   OutputArray mask, int method = FM_RANSAC,
+                                   double ransacReprojThreshold = 3., double confidence = 0.99 );
+
+/** @brief Calculates an essential matrix from the corresponding points in two images.
+
+@param points1 Array of N (N \>= 5) 2D points from the first image. The point coordinates should
+be floating-point (single or double precision).
+@param points2 Array of the second image points of the same size and format as points1 .
+@param cameraMatrix Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ .
+Note that this function assumes that points1 and points2 are feature points from cameras with the
+same camera matrix.
+@param method Method for computing an essential matrix.
+-   **RANSAC** for the RANSAC algorithm.
+-   **LMEDS** for the LMedS algorithm.
+@param prob Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
+confidence (probability) that the estimated matrix is correct.
+@param threshold Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
+line in pixels, beyond which the point is considered an outlier and is not used for computing the
+final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
+point localization, image resolution, and the image noise.
+@param mask Output array of N elements, every element of which is set to 0 for outliers and to 1
+for the other points. The array is computed only in the RANSAC and LMedS methods.
+
+This function estimates essential matrix based on the five-point algorithm solver in @cite Nister03 .
+@cite SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
+
+\f[[p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\f]
+
+where \f$E\f$ is an essential matrix, \f$p_1\f$ and \f$p_2\f$ are corresponding points in the first and the
+second images, respectively. The result of this function may be passed further to
+decomposeEssentialMat or recoverPose to recover the relative pose between cameras.
+ */
+CV_EXPORTS_W Mat findEssentialMat( InputArray points1, InputArray points2,
+                                 InputArray cameraMatrix, int method = RANSAC,
+                                 double prob = 0.999, double threshold = 1.0,
+                                 OutputArray mask = noArray() );
+
+/** @overload
+@param points1 Array of N (N \>= 5) 2D points from the first image. The point coordinates should
+be floating-point (single or double precision).
+@param points2 Array of the second image points of the same size and format as points1 .
+@param focal focal length of the camera. Note that this function assumes that points1 and points2
+are feature points from cameras with same focal length and principal point.
+@param pp principal point of the camera.
+@param method Method for computing a fundamental matrix.
+-   **RANSAC** for the RANSAC algorithm.
+-   **LMEDS** for the LMedS algorithm.
+@param threshold Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
+line in pixels, beyond which the point is considered an outlier and is not used for computing the
+final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
+point localization, image resolution, and the image noise.
+@param prob Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
+confidence (probability) that the estimated matrix is correct.
+@param mask Output array of N elements, every element of which is set to 0 for outliers and to 1
+for the other points. The array is computed only in the RANSAC and LMedS methods.
+
+This function differs from the one above that it computes camera matrix from focal length and
+principal point:
+
+\f[K =
+\begin{bmatrix}
+f & 0 & x_{pp}  \\
+0 & f & y_{pp}  \\
+0 & 0 & 1
+\end{bmatrix}\f]
+ */
+CV_EXPORTS_W Mat findEssentialMat( InputArray points1, InputArray points2,
+                                 double focal = 1.0, Point2d pp = Point2d(0, 0),
+                                 int method = RANSAC, double prob = 0.999,
+                                 double threshold = 1.0, OutputArray mask = noArray() );
+
+/** @brief Decompose an essential matrix to possible rotations and translation.
+
+@param E The input essential matrix.
+@param R1 One possible rotation matrix.
+@param R2 Another possible rotation matrix.
+@param t One possible translation.
+
+This function decompose an essential matrix E using svd decomposition @cite HartleyZ00 . Generally 4
+possible poses exists for a given E. They are \f$[R_1, t]\f$, \f$[R_1, -t]\f$, \f$[R_2, t]\f$, \f$[R_2, -t]\f$. By
+decomposing E, you can only get the direction of the translation, so the function returns unit t.
+ */
+CV_EXPORTS_W void decomposeEssentialMat( InputArray E, OutputArray R1, OutputArray R2, OutputArray t );
+
+/** @brief Recover relative camera rotation and translation from an estimated essential matrix and the
+corresponding points in two images, using cheirality check. Returns the number of inliers which pass
+the check.
+
+@param E The input essential matrix.
+@param points1 Array of N 2D points from the first image. The point coordinates should be
+floating-point (single or double precision).
+@param points2 Array of the second image points of the same size and format as points1 .
+@param cameraMatrix Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ .
+Note that this function assumes that points1 and points2 are feature points from cameras with the
+same camera matrix.
+@param R Recovered relative rotation.
+@param t Recovered relative translation.
+@param mask Input/output mask for inliers in points1 and points2.
+:   If it is not empty, then it marks inliers in points1 and points2 for then given essential
+matrix E. Only these inliers will be used to recover pose. In the output mask only inliers
+which pass the cheirality check.
+This function decomposes an essential matrix using decomposeEssentialMat and then verifies possible
+pose hypotheses by doing cheirality check. The cheirality check basically means that the
+triangulated 3D points should have positive depth. Some details can be found in @cite Nister03 .
+
+This function can be used to process output E and mask from findEssentialMat. In this scenario,
+points1 and points2 are the same input for findEssentialMat. :
+@code
+    // Example. Estimation of fundamental matrix using the RANSAC algorithm
+    int point_count = 100;
+    vector<Point2f> points1(point_count);
+    vector<Point2f> points2(point_count);
+
+    // initialize the points here ...
+    for( int i = 0; i < point_count; i++ )
+    {
+        points1[i] = ...;
+        points2[i] = ...;
+    }
+
+    // cametra matrix with both focal lengths = 1, and principal point = (0, 0)
+    Mat cameraMatrix = Mat::eye(3, 3, CV_64F);
+
+    Mat E, R, t, mask;
+
+    E = findEssentialMat(points1, points2, cameraMatrix, RANSAC, 0.999, 1.0, mask);
+    recoverPose(E, points1, points2, cameraMatrix, R, t, mask);
+@endcode
+ */
+CV_EXPORTS_W int recoverPose( InputArray E, InputArray points1, InputArray points2,
+                            InputArray cameraMatrix, OutputArray R, OutputArray t,
+                            InputOutputArray mask = noArray() );
+
+/** @overload
+@param E The input essential matrix.
+@param points1 Array of N 2D points from the first image. The point coordinates should be
+floating-point (single or double precision).
+@param points2 Array of the second image points of the same size and format as points1 .
+@param R Recovered relative rotation.
+@param t Recovered relative translation.
+@param focal Focal length of the camera. Note that this function assumes that points1 and points2
+are feature points from cameras with same focal length and principal point.
+@param pp principal point of the camera.
+@param mask Input/output mask for inliers in points1 and points2.
+:   If it is not empty, then it marks inliers in points1 and points2 for then given essential
+matrix E. Only these inliers will be used to recover pose. In the output mask only inliers
+which pass the cheirality check.
+
+This function differs from the one above that it computes camera matrix from focal length and
+principal point:
+
+\f[K =
+\begin{bmatrix}
+f & 0 & x_{pp}  \\
+0 & f & y_{pp}  \\
+0 & 0 & 1
+\end{bmatrix}\f]
+ */
+CV_EXPORTS_W int recoverPose( InputArray E, InputArray points1, InputArray points2,
+                            OutputArray R, OutputArray t,
+                            double focal = 1.0, Point2d pp = Point2d(0, 0),
+                            InputOutputArray mask = noArray() );
+
+/** @overload
+@param E The input essential matrix.
+@param points1 Array of N 2D points from the first image. The point coordinates should be
+floating-point (single or double precision).
+@param points2 Array of the second image points of the same size and format as points1.
+@param cameraMatrix Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ .
+Note that this function assumes that points1 and points2 are feature points from cameras with the
+same camera matrix.
+@param R Recovered relative rotation.
+@param t Recovered relative translation.
+@param distanceThresh threshold distance which is used to filter out far away points (i.e. infinite points).
+@param mask Input/output mask for inliers in points1 and points2.
+:   If it is not empty, then it marks inliers in points1 and points2 for then given essential
+matrix E. Only these inliers will be used to recover pose. In the output mask only inliers
+which pass the cheirality check.
+@param triangulatedPoints 3d points which were reconstructed by triangulation.
+ */
+
+CV_EXPORTS_W int recoverPose( InputArray E, InputArray points1, InputArray points2,
+                            InputArray cameraMatrix, OutputArray R, OutputArray t, double distanceThresh, InputOutputArray mask = noArray(),
+                            OutputArray triangulatedPoints = noArray());
+
+/** @brief For points in an image of a stereo pair, computes the corresponding epilines in the other image.
+
+@param points Input points. \f$N \times 1\f$ or \f$1 \times N\f$ matrix of type CV_32FC2 or
+vector\<Point2f\> .
+@param whichImage Index of the image (1 or 2) that contains the points .
+@param F Fundamental matrix that can be estimated using findFundamentalMat or stereoRectify .
+@param lines Output vector of the epipolar lines corresponding to the points in the other image.
+Each line \f$ax + by + c=0\f$ is encoded by 3 numbers \f$(a, b, c)\f$ .
+
+For every point in one of the two images of a stereo pair, the function finds the equation of the
+corresponding epipolar line in the other image.
+
+From the fundamental matrix definition (see findFundamentalMat ), line \f$l^{(2)}_i\f$ in the second
+image for the point \f$p^{(1)}_i\f$ in the first image (when whichImage=1 ) is computed as:
+
+\f[l^{(2)}_i = F p^{(1)}_i\f]
+
+And vice versa, when whichImage=2, \f$l^{(1)}_i\f$ is computed from \f$p^{(2)}_i\f$ as:
+
+\f[l^{(1)}_i = F^T p^{(2)}_i\f]
+
+Line coefficients are defined up to a scale. They are normalized so that \f$a_i^2+b_i^2=1\f$ .
+ */
+CV_EXPORTS_W void computeCorrespondEpilines( InputArray points, int whichImage,
+                                             InputArray F, OutputArray lines );
+
+/** @brief Reconstructs points by triangulation.
+
+@param projMatr1 3x4 projection matrix of the first camera.
+@param projMatr2 3x4 projection matrix of the second camera.
+@param projPoints1 2xN array of feature points in the first image. In case of c++ version it can
+be also a vector of feature points or two-channel matrix of size 1xN or Nx1.
+@param projPoints2 2xN array of corresponding points in the second image. In case of c++ version
+it can be also a vector of feature points or two-channel matrix of size 1xN or Nx1.
+@param points4D 4xN array of reconstructed points in homogeneous coordinates.
+
+The function reconstructs 3-dimensional points (in homogeneous coordinates) by using their
+observations with a stereo camera. Projections matrices can be obtained from stereoRectify.
+
+@note
+   Keep in mind that all input data should be of float type in order for this function to work.
+
+@sa
+   reprojectImageTo3D
+ */
+CV_EXPORTS_W void triangulatePoints( InputArray projMatr1, InputArray projMatr2,
+                                     InputArray projPoints1, InputArray projPoints2,
+                                     OutputArray points4D );
+
+/** @brief Refines coordinates of corresponding points.
+
+@param F 3x3 fundamental matrix.
+@param points1 1xN array containing the first set of points.
+@param points2 1xN array containing the second set of points.
+@param newPoints1 The optimized points1.
+@param newPoints2 The optimized points2.
+
+The function implements the Optimal Triangulation Method (see Multiple View Geometry for details).
+For each given point correspondence points1[i] \<-\> points2[i], and a fundamental matrix F, it
+computes the corrected correspondences newPoints1[i] \<-\> newPoints2[i] that minimize the geometric
+error \f$d(points1[i], newPoints1[i])^2 + d(points2[i],newPoints2[i])^2\f$ (where \f$d(a,b)\f$ is the
+geometric distance between points \f$a\f$ and \f$b\f$ ) subject to the epipolar constraint
+\f$newPoints2^T * F * newPoints1 = 0\f$ .
+ */
+CV_EXPORTS_W void correctMatches( InputArray F, InputArray points1, InputArray points2,
+                                  OutputArray newPoints1, OutputArray newPoints2 );
+
+/** @brief Filters off small noise blobs (speckles) in the disparity map
+
+@param img The input 16-bit signed disparity image
+@param newVal The disparity value used to paint-off the speckles
+@param maxSpeckleSize The maximum speckle size to consider it a speckle. Larger blobs are not
+affected by the algorithm
+@param maxDiff Maximum difference between neighbor disparity pixels to put them into the same
+blob. Note that since StereoBM, StereoSGBM and may be other algorithms return a fixed-point
+disparity map, where disparity values are multiplied by 16, this scale factor should be taken into
+account when specifying this parameter value.
+@param buf The optional temporary buffer to avoid memory allocation within the function.
+ */
+CV_EXPORTS_W void filterSpeckles( InputOutputArray img, double newVal,
+                                  int maxSpeckleSize, double maxDiff,
+                                  InputOutputArray buf = noArray() );
+
+//! computes valid disparity ROI from the valid ROIs of the rectified images (that are returned by cv::stereoRectify())
+CV_EXPORTS_W Rect getValidDisparityROI( Rect roi1, Rect roi2,
+                                        int minDisparity, int numberOfDisparities,
+                                        int SADWindowSize );
+
+//! validates disparity using the left-right check. The matrix "cost" should be computed by the stereo correspondence algorithm
+CV_EXPORTS_W void validateDisparity( InputOutputArray disparity, InputArray cost,
+                                     int minDisparity, int numberOfDisparities,
+                                     int disp12MaxDisp = 1 );
+
+/** @brief Reprojects a disparity image to 3D space.
+
+@param disparity Input single-channel 8-bit unsigned, 16-bit signed, 32-bit signed or 32-bit
+floating-point disparity image. If 16-bit signed format is used, the values are assumed to have no
+fractional bits.
+@param _3dImage Output 3-channel floating-point image of the same size as disparity . Each
+element of _3dImage(x,y) contains 3D coordinates of the point (x,y) computed from the disparity
+map.
+@param Q \f$4 \times 4\f$ perspective transformation matrix that can be obtained with stereoRectify.
+@param handleMissingValues Indicates, whether the function should handle missing values (i.e.
+points where the disparity was not computed). If handleMissingValues=true, then pixels with the
+minimal disparity that corresponds to the outliers (see StereoMatcher::compute ) are transformed
+to 3D points with a very large Z value (currently set to 10000).
+@param ddepth The optional output array depth. If it is -1, the output image will have CV_32F
+depth. ddepth can also be set to CV_16S, CV_32S or CV_32F.
+
+The function transforms a single-channel disparity map to a 3-channel image representing a 3D
+surface. That is, for each pixel (x,y) and the corresponding disparity d=disparity(x,y) , it
+computes:
+
+\f[\begin{array}{l} [X \; Y \; Z \; W]^T =  \texttt{Q} *[x \; y \; \texttt{disparity} (x,y) \; 1]^T  \\ \texttt{\_3dImage} (x,y) = (X/W, \; Y/W, \; Z/W) \end{array}\f]
+
+The matrix Q can be an arbitrary \f$4 \times 4\f$ matrix (for example, the one computed by
+stereoRectify). To reproject a sparse set of points {(x,y,d),...} to 3D space, use
+perspectiveTransform .
+ */
+CV_EXPORTS_W void reprojectImageTo3D( InputArray disparity,
+                                      OutputArray _3dImage, InputArray Q,
+                                      bool handleMissingValues = false,
+                                      int ddepth = -1 );
+
+/** @brief Calculates the Sampson Distance between two points.
+
+The function cv::sampsonDistance calculates and returns the first order approximation of the geometric error as:
+\f[
+sd( \texttt{pt1} , \texttt{pt2} )=
+\frac{(\texttt{pt2}^t \cdot \texttt{F} \cdot \texttt{pt1})^2}
+{((\texttt{F} \cdot \texttt{pt1})(0))^2 +
+((\texttt{F} \cdot \texttt{pt1})(1))^2 +
+((\texttt{F}^t \cdot \texttt{pt2})(0))^2 +
+((\texttt{F}^t \cdot \texttt{pt2})(1))^2}
+\f]
+The fundamental matrix may be calculated using the cv::findFundamentalMat function. See @cite HartleyZ00 11.4.3 for details.
+@param pt1 first homogeneous 2d point
+@param pt2 second homogeneous 2d point
+@param F fundamental matrix
+@return The computed Sampson distance.
+*/
+CV_EXPORTS_W double sampsonDistance(InputArray pt1, InputArray pt2, InputArray F);
+
+/** @brief Computes an optimal affine transformation between two 3D point sets.
+
+It computes
+\f[
+\begin{bmatrix}
+x\\
+y\\
+z\\
+\end{bmatrix}
+=
+\begin{bmatrix}
+a_{11} & a_{12} & a_{13}\\
+a_{21} & a_{22} & a_{23}\\
+a_{31} & a_{32} & a_{33}\\
+\end{bmatrix}
+\begin{bmatrix}
+X\\
+Y\\
+Z\\
+\end{bmatrix}
++
+\begin{bmatrix}
+b_1\\
+b_2\\
+b_3\\
+\end{bmatrix}
+\f]
+
+@param src First input 3D point set containing \f$(X,Y,Z)\f$.
+@param dst Second input 3D point set containing \f$(x,y,z)\f$.
+@param out Output 3D affine transformation matrix \f$3 \times 4\f$ of the form
+\f[
+\begin{bmatrix}
+a_{11} & a_{12} & a_{13} & b_1\\
+a_{21} & a_{22} & a_{23} & b_2\\
+a_{31} & a_{32} & a_{33} & b_3\\
+\end{bmatrix}
+\f]
+@param inliers Output vector indicating which points are inliers (1-inlier, 0-outlier).
+@param ransacThreshold Maximum reprojection error in the RANSAC algorithm to consider a point as
+an inlier.
+@param confidence Confidence level, between 0 and 1, for the estimated transformation. Anything
+between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
+significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
+
+The function estimates an optimal 3D affine transformation between two 3D point sets using the
+RANSAC algorithm.
+ */
+CV_EXPORTS_W  int estimateAffine3D(InputArray src, InputArray dst,
+                                   OutputArray out, OutputArray inliers,
+                                   double ransacThreshold = 3, double confidence = 0.99);
+
+/** @brief Computes an optimal affine transformation between two 2D point sets.
+
+It computes
+\f[
+\begin{bmatrix}
+x\\
+y\\
+\end{bmatrix}
+=
+\begin{bmatrix}
+a_{11} & a_{12}\\
+a_{21} & a_{22}\\
+\end{bmatrix}
+\begin{bmatrix}
+X\\
+Y\\
+\end{bmatrix}
++
+\begin{bmatrix}
+b_1\\
+b_2\\
+\end{bmatrix}
+\f]
+
+@param from First input 2D point set containing \f$(X,Y)\f$.
+@param to Second input 2D point set containing \f$(x,y)\f$.
+@param inliers Output vector indicating which points are inliers (1-inlier, 0-outlier).
+@param method Robust method used to compute transformation. The following methods are possible:
+-   cv::RANSAC - RANSAC-based robust method
+-   cv::LMEDS - Least-Median robust method
+RANSAC is the default method.
+@param ransacReprojThreshold Maximum reprojection error in the RANSAC algorithm to consider
+a point as an inlier. Applies only to RANSAC.
+@param maxIters The maximum number of robust method iterations.
+@param confidence Confidence level, between 0 and 1, for the estimated transformation. Anything
+between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
+significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
+@param refineIters Maximum number of iterations of refining algorithm (Levenberg-Marquardt).
+Passing 0 will disable refining, so the output matrix will be output of robust method.
+
+@return Output 2D affine transformation matrix \f$2 \times 3\f$ or empty matrix if transformation
+could not be estimated. The returned matrix has the following form:
+\f[
+\begin{bmatrix}
+a_{11} & a_{12} & b_1\\
+a_{21} & a_{22} & b_2\\
+\end{bmatrix}
+\f]
+
+The function estimates an optimal 2D affine transformation between two 2D point sets using the
+selected robust algorithm.
+
+The computed transformation is then refined further (using only inliers) with the
+Levenberg-Marquardt method to reduce the re-projection error even more.
+
+@note
+The RANSAC method can handle practically any ratio of outliers but needs a threshold to
+distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
+correctly only when there are more than 50% of inliers.
+
+@sa estimateAffinePartial2D, getAffineTransform
+*/
+CV_EXPORTS_W cv::Mat estimateAffine2D(InputArray from, InputArray to, OutputArray inliers = noArray(),
+                                  int method = RANSAC, double ransacReprojThreshold = 3,
+                                  size_t maxIters = 2000, double confidence = 0.99,
+                                  size_t refineIters = 10);
+
+/** @brief Computes an optimal limited affine transformation with 4 degrees of freedom between
+two 2D point sets.
+
+@param from First input 2D point set.
+@param to Second input 2D point set.
+@param inliers Output vector indicating which points are inliers.
+@param method Robust method used to compute transformation. The following methods are possible:
+-   cv::RANSAC - RANSAC-based robust method
+-   cv::LMEDS - Least-Median robust method
+RANSAC is the default method.
+@param ransacReprojThreshold Maximum reprojection error in the RANSAC algorithm to consider
+a point as an inlier. Applies only to RANSAC.
+@param maxIters The maximum number of robust method iterations.
+@param confidence Confidence level, between 0 and 1, for the estimated transformation. Anything
+between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
+significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
+@param refineIters Maximum number of iterations of refining algorithm (Levenberg-Marquardt).
+Passing 0 will disable refining, so the output matrix will be output of robust method.
+
+@return Output 2D affine transformation (4 degrees of freedom) matrix \f$2 \times 3\f$ or
+empty matrix if transformation could not be estimated.
+
+The function estimates an optimal 2D affine transformation with 4 degrees of freedom limited to
+combinations of translation, rotation, and uniform scaling. Uses the selected algorithm for robust
+estimation.
+
+The computed transformation is then refined further (using only inliers) with the
+Levenberg-Marquardt method to reduce the re-projection error even more.
+
+Estimated transformation matrix is:
+\f[ \begin{bmatrix} \cos(\theta) \cdot s & -\sin(\theta) \cdot s & t_x \\
+                \sin(\theta) \cdot s & \cos(\theta) \cdot s & t_y
+\end{bmatrix} \f]
+Where \f$ \theta \f$ is the rotation angle, \f$ s \f$ the scaling factor and \f$ t_x, t_y \f$ are
+translations in \f$ x, y \f$ axes respectively.
+
+@note
+The RANSAC method can handle practically any ratio of outliers but need a threshold to
+distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
+correctly only when there are more than 50% of inliers.
+
+@sa estimateAffine2D, getAffineTransform
+*/
+CV_EXPORTS_W cv::Mat estimateAffinePartial2D(InputArray from, InputArray to, OutputArray inliers = noArray(),
+                                  int method = RANSAC, double ransacReprojThreshold = 3,
+                                  size_t maxIters = 2000, double confidence = 0.99,
+                                  size_t refineIters = 10);
+
+/** @example samples/cpp/tutorial_code/features2D/Homography/decompose_homography.cpp
+An example program with homography decomposition.
+
+Check @ref tutorial_homography "the corresponding tutorial" for more details.
+*/
+
+/** @brief Decompose a homography matrix to rotation(s), translation(s) and plane normal(s).
+
+@param H The input homography matrix between two images.
+@param K The input intrinsic camera calibration matrix.
+@param rotations Array of rotation matrices.
+@param translations Array of translation matrices.
+@param normals Array of plane normal matrices.
+
+This function extracts relative camera motion between two views observing a planar object from the
+homography H induced by the plane. The intrinsic camera matrix K must also be provided. The function
+may return up to four mathematical solution sets. At least two of the solutions may further be
+invalidated if point correspondences are available by applying positive depth constraint (all points
+must be in front of the camera). The decomposition method is described in detail in @cite Malis .
+ */
+CV_EXPORTS_W int decomposeHomographyMat(InputArray H,
+                                        InputArray K,
+                                        OutputArrayOfArrays rotations,
+                                        OutputArrayOfArrays translations,
+                                        OutputArrayOfArrays normals);
+
+/** @brief Filters homography decompositions based on additional information.
+
+@param rotations Vector of rotation matrices.
+@param normals Vector of plane normal matrices.
+@param beforePoints Vector of (rectified) visible reference points before the homography is applied
+@param afterPoints Vector of (rectified) visible reference points after the homography is applied
+@param possibleSolutions Vector of int indices representing the viable solution set after filtering
+@param pointsMask optional Mat/Vector of 8u type representing the mask for the inliers as given by the findHomography function
+
+This function is intended to filter the output of the decomposeHomographyMat based on additional
+information as described in @cite Malis . The summary of the method: the decomposeHomographyMat function
+returns 2 unique solutions and their "opposites" for a total of 4 solutions. If we have access to the
+sets of points visible in the camera frame before and after the homography transformation is applied,
+we can determine which are the true potential solutions and which are the opposites by verifying which
+homographies are consistent with all visible reference points being in front of the camera. The inputs
+are left unchanged; the filtered solution set is returned as indices into the existing one.
+
+*/
+CV_EXPORTS_W void filterHomographyDecompByVisibleRefpoints(InputArrayOfArrays rotations,
+                                                           InputArrayOfArrays normals,
+                                                           InputArray beforePoints,
+                                                           InputArray afterPoints,
+                                                           OutputArray possibleSolutions,
+                                                           InputArray pointsMask = noArray());
+
+/** @brief The base class for stereo correspondence algorithms.
+ */
+class CV_EXPORTS_W StereoMatcher : public Algorithm
+{
+public:
+    enum { DISP_SHIFT = 4,
+           DISP_SCALE = (1 << DISP_SHIFT)
+         };
+
+    /** @brief Computes disparity map for the specified stereo pair
+
+    @param left Left 8-bit single-channel image.
+    @param right Right image of the same size and the same type as the left one.
+    @param disparity Output disparity map. It has the same size as the input images. Some algorithms,
+    like StereoBM or StereoSGBM compute 16-bit fixed-point disparity map (where each disparity value
+    has 4 fractional bits), whereas other algorithms output 32-bit floating-point disparity map.
+     */
+    CV_WRAP virtual void compute( InputArray left, InputArray right,
+                                  OutputArray disparity ) = 0;
+
+    CV_WRAP virtual int getMinDisparity() const = 0;
+    CV_WRAP virtual void setMinDisparity(int minDisparity) = 0;
+
+    CV_WRAP virtual int getNumDisparities() const = 0;
+    CV_WRAP virtual void setNumDisparities(int numDisparities) = 0;
+
+    CV_WRAP virtual int getBlockSize() const = 0;
+    CV_WRAP virtual void setBlockSize(int blockSize) = 0;
+
+    CV_WRAP virtual int getSpeckleWindowSize() const = 0;
+    CV_WRAP virtual void setSpeckleWindowSize(int speckleWindowSize) = 0;
+
+    CV_WRAP virtual int getSpeckleRange() const = 0;
+    CV_WRAP virtual void setSpeckleRange(int speckleRange) = 0;
+
+    CV_WRAP virtual int getDisp12MaxDiff() const = 0;
+    CV_WRAP virtual void setDisp12MaxDiff(int disp12MaxDiff) = 0;
+};
+
+
+/** @brief Class for computing stereo correspondence using the block matching algorithm, introduced and
+contributed to OpenCV by K. Konolige.
+ */
+class CV_EXPORTS_W StereoBM : public StereoMatcher
+{
+public:
+    enum { PREFILTER_NORMALIZED_RESPONSE = 0,
+           PREFILTER_XSOBEL              = 1
+         };
+
+    CV_WRAP virtual int getPreFilterType() const = 0;
+    CV_WRAP virtual void setPreFilterType(int preFilterType) = 0;
+
+    CV_WRAP virtual int getPreFilterSize() const = 0;
+    CV_WRAP virtual void setPreFilterSize(int preFilterSize) = 0;
+
+    CV_WRAP virtual int getPreFilterCap() const = 0;
+    CV_WRAP virtual void setPreFilterCap(int preFilterCap) = 0;
+
+    CV_WRAP virtual int getTextureThreshold() const = 0;
+    CV_WRAP virtual void setTextureThreshold(int textureThreshold) = 0;
+
+    CV_WRAP virtual int getUniquenessRatio() const = 0;
+    CV_WRAP virtual void setUniquenessRatio(int uniquenessRatio) = 0;
+
+    CV_WRAP virtual int getSmallerBlockSize() const = 0;
+    CV_WRAP virtual void setSmallerBlockSize(int blockSize) = 0;
+
+    CV_WRAP virtual Rect getROI1() const = 0;
+    CV_WRAP virtual void setROI1(Rect roi1) = 0;
+
+    CV_WRAP virtual Rect getROI2() const = 0;
+    CV_WRAP virtual void setROI2(Rect roi2) = 0;
+
+    /** @brief Creates StereoBM object
+
+    @param numDisparities the disparity search range. For each pixel algorithm will find the best
+    disparity from 0 (default minimum disparity) to numDisparities. The search range can then be
+    shifted by changing the minimum disparity.
+    @param blockSize the linear size of the blocks compared by the algorithm. The size should be odd
+    (as the block is centered at the current pixel). Larger block size implies smoother, though less
+    accurate disparity map. Smaller block size gives more detailed disparity map, but there is higher
+    chance for algorithm to find a wrong correspondence.
+
+    The function create StereoBM object. You can then call StereoBM::compute() to compute disparity for
+    a specific stereo pair.
+     */
+    CV_WRAP static Ptr<StereoBM> create(int numDisparities = 0, int blockSize = 21);
+};
+
+/** @brief The class implements the modified H. Hirschmuller algorithm @cite HH08 that differs from the original
+one as follows:
+
+-   By default, the algorithm is single-pass, which means that you consider only 5 directions
+instead of 8. Set mode=StereoSGBM::MODE_HH in createStereoSGBM to run the full variant of the
+algorithm but beware that it may consume a lot of memory.
+-   The algorithm matches blocks, not individual pixels. Though, setting blockSize=1 reduces the
+blocks to single pixels.
+-   Mutual information cost function is not implemented. Instead, a simpler Birchfield-Tomasi
+sub-pixel metric from @cite BT98 is used. Though, the color images are supported as well.
+-   Some pre- and post- processing steps from K. Konolige algorithm StereoBM are included, for
+example: pre-filtering (StereoBM::PREFILTER_XSOBEL type) and post-filtering (uniqueness
+check, quadratic interpolation and speckle filtering).
+
+@note
+   -   (Python) An example illustrating the use of the StereoSGBM matching algorithm can be found
+        at opencv_source_code/samples/python/stereo_match.py
+ */
+class CV_EXPORTS_W StereoSGBM : public StereoMatcher
+{
+public:
+    enum
+    {
+        MODE_SGBM = 0,
+        MODE_HH   = 1,
+        MODE_SGBM_3WAY = 2,
+        MODE_HH4  = 3
+    };
+
+    CV_WRAP virtual int getPreFilterCap() const = 0;
+    CV_WRAP virtual void setPreFilterCap(int preFilterCap) = 0;
+
+    CV_WRAP virtual int getUniquenessRatio() const = 0;
+    CV_WRAP virtual void setUniquenessRatio(int uniquenessRatio) = 0;
+
+    CV_WRAP virtual int getP1() const = 0;
+    CV_WRAP virtual void setP1(int P1) = 0;
+
+    CV_WRAP virtual int getP2() const = 0;
+    CV_WRAP virtual void setP2(int P2) = 0;
+
+    CV_WRAP virtual int getMode() const = 0;
+    CV_WRAP virtual void setMode(int mode) = 0;
+
+    /** @brief Creates StereoSGBM object
+
+    @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
+    rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
+    @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
+    zero. In the current implementation, this parameter must be divisible by 16.
+    @param blockSize Matched block size. It must be an odd number \>=1 . Normally, it should be
+    somewhere in the 3..11 range.
+    @param P1 The first parameter controlling the disparity smoothness. See below.
+    @param P2 The second parameter controlling the disparity smoothness. The larger the values are,
+    the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
+    between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
+    pixels. The algorithm requires P2 \> P1 . See stereo_match.cpp sample where some reasonably good
+    P1 and P2 values are shown (like 8\*number_of_image_channels\*SADWindowSize\*SADWindowSize and
+    32\*number_of_image_channels\*SADWindowSize\*SADWindowSize , respectively).
+    @param disp12MaxDiff Maximum allowed difference (in integer pixel units) in the left-right
+    disparity check. Set it to a non-positive value to disable the check.
+    @param preFilterCap Truncation value for the prefiltered image pixels. The algorithm first
+    computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
+    The result values are passed to the Birchfield-Tomasi pixel cost function.
+    @param uniquenessRatio Margin in percentage by which the best (minimum) computed cost function
+    value should "win" the second best value to consider the found match correct. Normally, a value
+    within the 5-15 range is good enough.
+    @param speckleWindowSize Maximum size of smooth disparity regions to consider their noise speckles
+    and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
+    50-200 range.
+    @param speckleRange Maximum disparity variation within each connected component. If you do speckle
+    filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
+    Normally, 1 or 2 is good enough.
+    @param mode Set it to StereoSGBM::MODE_HH to run the full-scale two-pass dynamic programming
+    algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
+    huge for HD-size pictures. By default, it is set to false .
+
+    The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
+    set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
+    to a custom value.
+     */
+    CV_WRAP static Ptr<StereoSGBM> create(int minDisparity = 0, int numDisparities = 16, int blockSize = 3,
+                                          int P1 = 0, int P2 = 0, int disp12MaxDiff = 0,
+                                          int preFilterCap = 0, int uniquenessRatio = 0,
+                                          int speckleWindowSize = 0, int speckleRange = 0,
+                                          int mode = StereoSGBM::MODE_SGBM);
+};
+
+
+//! cv::undistort mode
+enum UndistortTypes
+{
+    PROJ_SPHERICAL_ORTHO  = 0,
+    PROJ_SPHERICAL_EQRECT = 1
+};
+
+/** @brief Transforms an image to compensate for lens distortion.
+
+The function transforms an image to compensate radial and tangential lens distortion.
+
+The function is simply a combination of #initUndistortRectifyMap (with unity R ) and #remap
+(with bilinear interpolation). See the former function for details of the transformation being
+performed.
+
+Those pixels in the destination image, for which there is no correspondent pixels in the source
+image, are filled with zeros (black color).
+
+A particular subset of the source image that will be visible in the corrected image can be regulated
+by newCameraMatrix. You can use #getOptimalNewCameraMatrix to compute the appropriate
+newCameraMatrix depending on your requirements.
+
+The camera matrix and the distortion parameters can be determined using #calibrateCamera. If
+the resolution of images is different from the resolution used at the calibration stage, \f$f_x,
+f_y, c_x\f$ and \f$c_y\f$ need to be scaled accordingly, while the distortion coefficients remain
+the same.
+
+@param src Input (distorted) image.
+@param dst Output (corrected) image that has the same size and type as src .
+@param cameraMatrix Input camera matrix \f$A = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ .
+@param distCoeffs Input vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$
+of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.
+@param newCameraMatrix Camera matrix of the distorted image. By default, it is the same as
+cameraMatrix but you may additionally scale and shift the result by using a different matrix.
+ */
+CV_EXPORTS_W void undistort( InputArray src, OutputArray dst,
+                             InputArray cameraMatrix,
+                             InputArray distCoeffs,
+                             InputArray newCameraMatrix = noArray() );
+
+/** @brief Computes the undistortion and rectification transformation map.
+
+The function computes the joint undistortion and rectification transformation and represents the
+result in the form of maps for remap. The undistorted image looks like original, as if it is
+captured with a camera using the camera matrix =newCameraMatrix and zero distortion. In case of a
+monocular camera, newCameraMatrix is usually equal to cameraMatrix, or it can be computed by
+#getOptimalNewCameraMatrix for a better control over scaling. In case of a stereo camera,
+newCameraMatrix is normally set to P1 or P2 computed by #stereoRectify .
+
+Also, this new camera is oriented differently in the coordinate space, according to R. That, for
+example, helps to align two heads of a stereo camera so that the epipolar lines on both images
+become horizontal and have the same y- coordinate (in case of a horizontally aligned stereo camera).
+
+The function actually builds the maps for the inverse mapping algorithm that is used by remap. That
+is, for each pixel \f$(u, v)\f$ in the destination (corrected and rectified) image, the function
+computes the corresponding coordinates in the source image (that is, in the original image from
+camera). The following process is applied:
+\f[
+\begin{array}{l}
+x  \leftarrow (u - {c'}_x)/{f'}_x  \\
+y  \leftarrow (v - {c'}_y)/{f'}_y  \\
+{[X\,Y\,W]} ^T  \leftarrow R^{-1}*[x \, y \, 1]^T  \\
+x'  \leftarrow X/W  \\
+y'  \leftarrow Y/W  \\
+r^2  \leftarrow x'^2 + y'^2 \\
+x''  \leftarrow x' \frac{1 + k_1 r^2 + k_2 r^4 + k_3 r^6}{1 + k_4 r^2 + k_5 r^4 + k_6 r^6}
++ 2p_1 x' y' + p_2(r^2 + 2 x'^2)  + s_1 r^2 + s_2 r^4\\
+y''  \leftarrow y' \frac{1 + k_1 r^2 + k_2 r^4 + k_3 r^6}{1 + k_4 r^2 + k_5 r^4 + k_6 r^6}
++ p_1 (r^2 + 2 y'^2) + 2 p_2 x' y' + s_3 r^2 + s_4 r^4 \\
+s\vecthree{x'''}{y'''}{1} =
+\vecthreethree{R_{33}(\tau_x, \tau_y)}{0}{-R_{13}((\tau_x, \tau_y)}
+{0}{R_{33}(\tau_x, \tau_y)}{-R_{23}(\tau_x, \tau_y)}
+{0}{0}{1} R(\tau_x, \tau_y) \vecthree{x''}{y''}{1}\\
+map_x(u,v)  \leftarrow x''' f_x + c_x  \\
+map_y(u,v)  \leftarrow y''' f_y + c_y
+\end{array}
+\f]
+where \f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$
+are the distortion coefficients.
+
+In case of a stereo camera, this function is called twice: once for each camera head, after
+stereoRectify, which in its turn is called after #stereoCalibrate. But if the stereo camera
+was not calibrated, it is still possible to compute the rectification transformations directly from
+the fundamental matrix using #stereoRectifyUncalibrated. For each camera, the function computes
+homography H as the rectification transformation in a pixel domain, not a rotation matrix R in 3D
+space. R can be computed from H as
+\f[\texttt{R} = \texttt{cameraMatrix} ^{-1} \cdot \texttt{H} \cdot \texttt{cameraMatrix}\f]
+where cameraMatrix can be chosen arbitrarily.
+
+@param cameraMatrix Input camera matrix \f$A=\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ .
+@param distCoeffs Input vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$
+of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.
+@param R Optional rectification transformation in the object space (3x3 matrix). R1 or R2 ,
+computed by #stereoRectify can be passed here. If the matrix is empty, the identity transformation
+is assumed. In cvInitUndistortMap R assumed to be an identity matrix.
+@param newCameraMatrix New camera matrix \f$A'=\vecthreethree{f_x'}{0}{c_x'}{0}{f_y'}{c_y'}{0}{0}{1}\f$.
+@param size Undistorted image size.
+@param m1type Type of the first output map that can be CV_32FC1, CV_32FC2 or CV_16SC2, see #convertMaps
+@param map1 The first output map.
+@param map2 The second output map.
+ */
+CV_EXPORTS_W
+void initUndistortRectifyMap(InputArray cameraMatrix, InputArray distCoeffs,
+                             InputArray R, InputArray newCameraMatrix,
+                             Size size, int m1type, OutputArray map1, OutputArray map2);
+
+//! initializes maps for #remap for wide-angle
+CV_EXPORTS
+float initWideAngleProjMap(InputArray cameraMatrix, InputArray distCoeffs,
+                           Size imageSize, int destImageWidth,
+                           int m1type, OutputArray map1, OutputArray map2,
+                           enum UndistortTypes projType = PROJ_SPHERICAL_EQRECT, double alpha = 0);
+static inline
+float initWideAngleProjMap(InputArray cameraMatrix, InputArray distCoeffs,
+                           Size imageSize, int destImageWidth,
+                           int m1type, OutputArray map1, OutputArray map2,
+                           int projType, double alpha = 0)
+{
+    return initWideAngleProjMap(cameraMatrix, distCoeffs, imageSize, destImageWidth,
+                                m1type, map1, map2, (UndistortTypes)projType, alpha);
+}
+
+/** @brief Returns the default new camera matrix.
+
+The function returns the camera matrix that is either an exact copy of the input cameraMatrix (when
+centerPrinicipalPoint=false ), or the modified one (when centerPrincipalPoint=true).
+
+In the latter case, the new camera matrix will be:
+
+\f[\begin{bmatrix} f_x && 0 && ( \texttt{imgSize.width} -1)*0.5  \\ 0 && f_y && ( \texttt{imgSize.height} -1)*0.5  \\ 0 && 0 && 1 \end{bmatrix} ,\f]
+
+where \f$f_x\f$ and \f$f_y\f$ are \f$(0,0)\f$ and \f$(1,1)\f$ elements of cameraMatrix, respectively.
+
+By default, the undistortion functions in OpenCV (see #initUndistortRectifyMap, #undistort) do not
+move the principal point. However, when you work with stereo, it is important to move the principal
+points in both views to the same y-coordinate (which is required by most of stereo correspondence
+algorithms), and may be to the same x-coordinate too. So, you can form the new camera matrix for
+each view where the principal points are located at the center.
+
+@param cameraMatrix Input camera matrix.
+@param imgsize Camera view image size in pixels.
+@param centerPrincipalPoint Location of the principal point in the new camera matrix. The
+parameter indicates whether this location should be at the image center or not.
+ */
+CV_EXPORTS_W
+Mat getDefaultNewCameraMatrix(InputArray cameraMatrix, Size imgsize = Size(),
+                              bool centerPrincipalPoint = false);
+
+/** @brief Computes the ideal point coordinates from the observed point coordinates.
+
+The function is similar to #undistort and #initUndistortRectifyMap but it operates on a
+sparse set of points instead of a raster image. Also the function performs a reverse transformation
+to projectPoints. In case of a 3D object, it does not reconstruct its 3D coordinates, but for a
+planar object, it does, up to a translation vector, if the proper R is specified.
+
+For each observed point coordinate \f$(u, v)\f$ the function computes:
+\f[
+\begin{array}{l}
+x^{"}  \leftarrow (u - c_x)/f_x  \\
+y^{"}  \leftarrow (v - c_y)/f_y  \\
+(x',y') = undistort(x^{"},y^{"}, \texttt{distCoeffs}) \\
+{[X\,Y\,W]} ^T  \leftarrow R*[x' \, y' \, 1]^T  \\
+x  \leftarrow X/W  \\
+y  \leftarrow Y/W  \\
+\text{only performed if P is specified:} \\
+u'  \leftarrow x {f'}_x + {c'}_x  \\
+v'  \leftarrow y {f'}_y + {c'}_y
+\end{array}
+\f]
+
+where *undistort* is an approximate iterative algorithm that estimates the normalized original
+point coordinates out of the normalized distorted point coordinates ("normalized" means that the
+coordinates do not depend on the camera matrix).
+
+The function can be used for both a stereo camera head or a monocular camera (when R is empty).
+
+@param src Observed point coordinates, 1xN or Nx1 2-channel (CV_32FC2 or CV_64FC2).
+@param dst Output ideal point coordinates after undistortion and reverse perspective
+transformation. If matrix P is identity or omitted, dst will contain normalized point coordinates.
+@param cameraMatrix Camera matrix \f$\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ .
+@param distCoeffs Input vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$
+of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.
+@param R Rectification transformation in the object space (3x3 matrix). R1 or R2 computed by
+#stereoRectify can be passed here. If the matrix is empty, the identity transformation is used.
+@param P New camera matrix (3x3) or new projection matrix (3x4) \f$\begin{bmatrix} {f'}_x & 0 & {c'}_x & t_x \\ 0 & {f'}_y & {c'}_y & t_y \\ 0 & 0 & 1 & t_z \end{bmatrix}\f$. P1 or P2 computed by
+#stereoRectify can be passed here. If the matrix is empty, the identity new camera matrix is used.
+ */
+CV_EXPORTS_W
+void undistortPoints(InputArray src, OutputArray dst,
+                     InputArray cameraMatrix, InputArray distCoeffs,
+                     InputArray R = noArray(), InputArray P = noArray());
+/** @overload
+    @note Default version of #undistortPoints does 5 iterations to compute undistorted points.
+ */
+CV_EXPORTS_AS(undistortPointsIter)
+void undistortPoints(InputArray src, OutputArray dst,
+                     InputArray cameraMatrix, InputArray distCoeffs,
+                     InputArray R, InputArray P, TermCriteria criteria);
+
+//! @} calib3d
+
+/** @brief The methods in this namespace use a so-called fisheye camera model.
+  @ingroup calib3d_fisheye
+*/
+namespace fisheye
+{
+//! @addtogroup calib3d_fisheye
+//! @{
+
+    enum{
+        CALIB_USE_INTRINSIC_GUESS   = 1 << 0,
+        CALIB_RECOMPUTE_EXTRINSIC   = 1 << 1,
+        CALIB_CHECK_COND            = 1 << 2,
+        CALIB_FIX_SKEW              = 1 << 3,
+        CALIB_FIX_K1                = 1 << 4,
+        CALIB_FIX_K2                = 1 << 5,
+        CALIB_FIX_K3                = 1 << 6,
+        CALIB_FIX_K4                = 1 << 7,
+        CALIB_FIX_INTRINSIC         = 1 << 8,
+        CALIB_FIX_PRINCIPAL_POINT   = 1 << 9
+    };
+
+    /** @brief Projects points using fisheye model
+
+    @param objectPoints Array of object points, 1xN/Nx1 3-channel (or vector\<Point3f\> ), where N is
+    the number of points in the view.
+    @param imagePoints Output array of image points, 2xN/Nx2 1-channel or 1xN/Nx1 2-channel, or
+    vector\<Point2f\>.
+    @param affine
+    @param K Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{_1}\f$.
+    @param D Input vector of distortion coefficients \f$(k_1, k_2, k_3, k_4)\f$.
+    @param alpha The skew coefficient.
+    @param jacobian Optional output 2Nx15 jacobian matrix of derivatives of image points with respect
+    to components of the focal lengths, coordinates of the principal point, distortion coefficients,
+    rotation vector, translation vector, and the skew. In the old interface different components of
+    the jacobian are returned via different output parameters.
+
+    The function computes projections of 3D points to the image plane given intrinsic and extrinsic
+    camera parameters. Optionally, the function computes Jacobians - matrices of partial derivatives of
+    image points coordinates (as functions of all the input parameters) with respect to the particular
+    parameters, intrinsic and/or extrinsic.
+     */
+    CV_EXPORTS void projectPoints(InputArray objectPoints, OutputArray imagePoints, const Affine3d& affine,
+        InputArray K, InputArray D, double alpha = 0, OutputArray jacobian = noArray());
+
+    /** @overload */
+    CV_EXPORTS_W void projectPoints(InputArray objectPoints, OutputArray imagePoints, InputArray rvec, InputArray tvec,
+        InputArray K, InputArray D, double alpha = 0, OutputArray jacobian = noArray());
+
+    /** @brief Distorts 2D points using fisheye model.
+
+    @param undistorted Array of object points, 1xN/Nx1 2-channel (or vector\<Point2f\> ), where N is
+    the number of points in the view.
+    @param K Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{_1}\f$.
+    @param D Input vector of distortion coefficients \f$(k_1, k_2, k_3, k_4)\f$.
+    @param alpha The skew coefficient.
+    @param distorted Output array of image points, 1xN/Nx1 2-channel, or vector\<Point2f\> .
+
+    Note that the function assumes the camera matrix of the undistorted points to be identity.
+    This means if you want to transform back points undistorted with undistortPoints() you have to
+    multiply them with \f$P^{-1}\f$.
+     */
+    CV_EXPORTS_W void distortPoints(InputArray undistorted, OutputArray distorted, InputArray K, InputArray D, double alpha = 0);
+
+    /** @brief Undistorts 2D points using fisheye model
+
+    @param distorted Array of object points, 1xN/Nx1 2-channel (or vector\<Point2f\> ), where N is the
+    number of points in the view.
+    @param K Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{_1}\f$.
+    @param D Input vector of distortion coefficients \f$(k_1, k_2, k_3, k_4)\f$.
+    @param R Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
+    1-channel or 1x1 3-channel
+    @param P New camera matrix (3x3) or new projection matrix (3x4)
+    @param undistorted Output array of image points, 1xN/Nx1 2-channel, or vector\<Point2f\> .
+     */
+    CV_EXPORTS_W void undistortPoints(InputArray distorted, OutputArray undistorted,
+        InputArray K, InputArray D, InputArray R = noArray(), InputArray P  = noArray());
+
+    /** @brief Computes undistortion and rectification maps for image transform by cv::remap(). If D is empty zero
+    distortion is used, if R or P is empty identity matrixes are used.
+
+    @param K Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{_1}\f$.
+    @param D Input vector of distortion coefficients \f$(k_1, k_2, k_3, k_4)\f$.
+    @param R Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
+    1-channel or 1x1 3-channel
+    @param P New camera matrix (3x3) or new projection matrix (3x4)
+    @param size Undistorted image size.
+    @param m1type Type of the first output map that can be CV_32FC1 or CV_16SC2 . See convertMaps()
+    for details.
+    @param map1 The first output map.
+    @param map2 The second output map.
+     */
+    CV_EXPORTS_W void initUndistortRectifyMap(InputArray K, InputArray D, InputArray R, InputArray P,
+        const cv::Size& size, int m1type, OutputArray map1, OutputArray map2);
+
+    /** @brief Transforms an image to compensate for fisheye lens distortion.
+
+    @param distorted image with fisheye lens distortion.
+    @param undistorted Output image with compensated fisheye lens distortion.
+    @param K Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{_1}\f$.
+    @param D Input vector of distortion coefficients \f$(k_1, k_2, k_3, k_4)\f$.
+    @param Knew Camera matrix of the distorted image. By default, it is the identity matrix but you
+    may additionally scale and shift the result by using a different matrix.
+    @param new_size
+
+    The function transforms an image to compensate radial and tangential lens distortion.
+
+    The function is simply a combination of fisheye::initUndistortRectifyMap (with unity R ) and remap
+    (with bilinear interpolation). See the former function for details of the transformation being
+    performed.
+
+    See below the results of undistortImage.
+       -   a\) result of undistort of perspective camera model (all possible coefficients (k_1, k_2, k_3,
+            k_4, k_5, k_6) of distortion were optimized under calibration)
+        -   b\) result of fisheye::undistortImage of fisheye camera model (all possible coefficients (k_1, k_2,
+            k_3, k_4) of fisheye distortion were optimized under calibration)
+        -   c\) original image was captured with fisheye lens
+
+    Pictures a) and b) almost the same. But if we consider points of image located far from the center
+    of image, we can notice that on image a) these points are distorted.
+
+    ![image](pics/fisheye_undistorted.jpg)
+     */
+    CV_EXPORTS_W void undistortImage(InputArray distorted, OutputArray undistorted,
+        InputArray K, InputArray D, InputArray Knew = cv::noArray(), const Size& new_size = Size());
+
+    /** @brief Estimates new camera matrix for undistortion or rectification.
+
+    @param K Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{_1}\f$.
+    @param image_size
+    @param D Input vector of distortion coefficients \f$(k_1, k_2, k_3, k_4)\f$.
+    @param R Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
+    1-channel or 1x1 3-channel
+    @param P New camera matrix (3x3) or new projection matrix (3x4)
+    @param balance Sets the new focal length in range between the min focal length and the max focal
+    length. Balance is in range of [0, 1].
+    @param new_size
+    @param fov_scale Divisor for new focal length.
+     */
+    CV_EXPORTS_W void estimateNewCameraMatrixForUndistortRectify(InputArray K, InputArray D, const Size &image_size, InputArray R,
+        OutputArray P, double balance = 0.0, const Size& new_size = Size(), double fov_scale = 1.0);
+
+    /** @brief Performs camera calibaration
+
+    @param objectPoints vector of vectors of calibration pattern points in the calibration pattern
+    coordinate space.
+    @param imagePoints vector of vectors of the projections of calibration pattern points.
+    imagePoints.size() and objectPoints.size() and imagePoints[i].size() must be equal to
+    objectPoints[i].size() for each i.
+    @param image_size Size of the image used only to initialize the intrinsic camera matrix.
+    @param K Output 3x3 floating-point camera matrix
+    \f$A = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ . If
+    fisheye::CALIB_USE_INTRINSIC_GUESS/ is specified, some or all of fx, fy, cx, cy must be
+    initialized before calling the function.
+    @param D Output vector of distortion coefficients \f$(k_1, k_2, k_3, k_4)\f$.
+    @param rvecs Output vector of rotation vectors (see Rodrigues ) estimated for each pattern view.
+    That is, each k-th rotation vector together with the corresponding k-th translation vector (see
+    the next output parameter description) brings the calibration pattern from the model coordinate
+    space (in which object points are specified) to the world coordinate space, that is, a real
+    position of the calibration pattern in the k-th pattern view (k=0.. *M* -1).
+    @param tvecs Output vector of translation vectors estimated for each pattern view.
+    @param flags Different flags that may be zero or a combination of the following values:
+    -   **fisheye::CALIB_USE_INTRINSIC_GUESS** cameraMatrix contains valid initial values of
+    fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
+    center ( imageSize is used), and focal distances are computed in a least-squares fashion.
+    -   **fisheye::CALIB_RECOMPUTE_EXTRINSIC** Extrinsic will be recomputed after each iteration
+    of intrinsic optimization.
+    -   **fisheye::CALIB_CHECK_COND** The functions will check validity of condition number.
+    -   **fisheye::CALIB_FIX_SKEW** Skew coefficient (alpha) is set to zero and stay zero.
+    -   **fisheye::CALIB_FIX_K1..fisheye::CALIB_FIX_K4** Selected distortion coefficients
+    are set to zeros and stay zero.
+    -   **fisheye::CALIB_FIX_PRINCIPAL_POINT** The principal point is not changed during the global
+optimization. It stays at the center or at a different location specified when CALIB_USE_INTRINSIC_GUESS is set too.
+    @param criteria Termination criteria for the iterative optimization algorithm.
+     */
+    CV_EXPORTS_W double calibrate(InputArrayOfArrays objectPoints, InputArrayOfArrays imagePoints, const Size& image_size,
+        InputOutputArray K, InputOutputArray D, OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs, int flags = 0,
+            TermCriteria criteria = TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 100, DBL_EPSILON));
+
+    /** @brief Stereo rectification for fisheye camera model
+
+    @param K1 First camera matrix.
+    @param D1 First camera distortion parameters.
+    @param K2 Second camera matrix.
+    @param D2 Second camera distortion parameters.
+    @param imageSize Size of the image used for stereo calibration.
+    @param R Rotation matrix between the coordinate systems of the first and the second
+    cameras.
+    @param tvec Translation vector between coordinate systems of the cameras.
+    @param R1 Output 3x3 rectification transform (rotation matrix) for the first camera.
+    @param R2 Output 3x3 rectification transform (rotation matrix) for the second camera.
+    @param P1 Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
+    camera.
+    @param P2 Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
+    camera.
+    @param Q Output \f$4 \times 4\f$ disparity-to-depth mapping matrix (see reprojectImageTo3D ).
+    @param flags Operation flags that may be zero or CALIB_ZERO_DISPARITY . If the flag is set,
+    the function makes the principal points of each camera have the same pixel coordinates in the
+    rectified views. And if the flag is not set, the function may still shift the images in the
+    horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
+    useful image area.
+    @param newImageSize New image resolution after rectification. The same size should be passed to
+    initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
+    is passed (default), it is set to the original imageSize . Setting it to larger value can help you
+    preserve details in the original image, especially when there is a big radial distortion.
+    @param balance Sets the new focal length in range between the min focal length and the max focal
+    length. Balance is in range of [0, 1].
+    @param fov_scale Divisor for new focal length.
+     */
+    CV_EXPORTS_W void stereoRectify(InputArray K1, InputArray D1, InputArray K2, InputArray D2, const Size &imageSize, InputArray R, InputArray tvec,
+        OutputArray R1, OutputArray R2, OutputArray P1, OutputArray P2, OutputArray Q, int flags, const Size &newImageSize = Size(),
+        double balance = 0.0, double fov_scale = 1.0);
+
+    /** @brief Performs stereo calibration
+
+    @param objectPoints Vector of vectors of the calibration pattern points.
+    @param imagePoints1 Vector of vectors of the projections of the calibration pattern points,
+    observed by the first camera.
+    @param imagePoints2 Vector of vectors of the projections of the calibration pattern points,
+    observed by the second camera.
+    @param K1 Input/output first camera matrix:
+    \f$\vecthreethree{f_x^{(j)}}{0}{c_x^{(j)}}{0}{f_y^{(j)}}{c_y^{(j)}}{0}{0}{1}\f$ , \f$j = 0,\, 1\f$ . If
+    any of fisheye::CALIB_USE_INTRINSIC_GUESS , fisheye::CALIB_FIX_INTRINSIC are specified,
+    some or all of the matrix components must be initialized.
+    @param D1 Input/output vector of distortion coefficients \f$(k_1, k_2, k_3, k_4)\f$ of 4 elements.
+    @param K2 Input/output second camera matrix. The parameter is similar to K1 .
+    @param D2 Input/output lens distortion coefficients for the second camera. The parameter is
+    similar to D1 .
+    @param imageSize Size of the image used only to initialize intrinsic camera matrix.
+    @param R Output rotation matrix between the 1st and the 2nd camera coordinate systems.
+    @param T Output translation vector between the coordinate systems of the cameras.
+    @param flags Different flags that may be zero or a combination of the following values:
+    -   **fisheye::CALIB_FIX_INTRINSIC** Fix K1, K2? and D1, D2? so that only R, T matrices
+    are estimated.
+    -   **fisheye::CALIB_USE_INTRINSIC_GUESS** K1, K2 contains valid initial values of
+    fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
+    center (imageSize is used), and focal distances are computed in a least-squares fashion.
+    -   **fisheye::CALIB_RECOMPUTE_EXTRINSIC** Extrinsic will be recomputed after each iteration
+    of intrinsic optimization.
+    -   **fisheye::CALIB_CHECK_COND** The functions will check validity of condition number.
+    -   **fisheye::CALIB_FIX_SKEW** Skew coefficient (alpha) is set to zero and stay zero.
+    -   **fisheye::CALIB_FIX_K1..4** Selected distortion coefficients are set to zeros and stay
+    zero.
+    @param criteria Termination criteria for the iterative optimization algorithm.
+     */
+    CV_EXPORTS_W double stereoCalibrate(InputArrayOfArrays objectPoints, InputArrayOfArrays imagePoints1, InputArrayOfArrays imagePoints2,
+                                  InputOutputArray K1, InputOutputArray D1, InputOutputArray K2, InputOutputArray D2, Size imageSize,
+                                  OutputArray R, OutputArray T, int flags = fisheye::CALIB_FIX_INTRINSIC,
+                                  TermCriteria criteria = TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 100, DBL_EPSILON));
+
+//! @} calib3d_fisheye
+} // end namespace fisheye
+
+} //end namespace cv
+
+#if 0 //def __cplusplus
+//////////////////////////////////////////////////////////////////////////////////////////
+class CV_EXPORTS CvLevMarq
+{
+public:
+    CvLevMarq();
+    CvLevMarq( int nparams, int nerrs, CvTermCriteria criteria=
+              cvTermCriteria(CV_TERMCRIT_EPS+CV_TERMCRIT_ITER,30,DBL_EPSILON),
+              bool completeSymmFlag=false );
+    ~CvLevMarq();
+    void init( int nparams, int nerrs, CvTermCriteria criteria=
+              cvTermCriteria(CV_TERMCRIT_EPS+CV_TERMCRIT_ITER,30,DBL_EPSILON),
+              bool completeSymmFlag=false );
+    bool update( const CvMat*& param, CvMat*& J, CvMat*& err );
+    bool updateAlt( const CvMat*& param, CvMat*& JtJ, CvMat*& JtErr, double*& errNorm );
+
+    void clear();
+    void step();
+    enum { DONE=0, STARTED=1, CALC_J=2, CHECK_ERR=3 };
+
+    cv::Ptr<CvMat> mask;
+    cv::Ptr<CvMat> prevParam;
+    cv::Ptr<CvMat> param;
+    cv::Ptr<CvMat> J;
+    cv::Ptr<CvMat> err;
+    cv::Ptr<CvMat> JtJ;
+    cv::Ptr<CvMat> JtJN;
+    cv::Ptr<CvMat> JtErr;
+    cv::Ptr<CvMat> JtJV;
+    cv::Ptr<CvMat> JtJW;
+    double prevErrNorm, errNorm;
+    int lambdaLg10;
+    CvTermCriteria criteria;
+    int state;
+    int iters;
+    bool completeSymmFlag;
+    int solveMethod;
+};
+#endif
+
+#endif

Some files were not shown because too many files changed in this diff