armv7-apple-ios.swiftinterface 79 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213
  1. // swift-interface-format-version: 1.0
  2. // swift-compiler-version: Apple Swift version 5.3.2 (swiftlang-1200.0.45 clang-1200.0.32.28)
  3. // swift-module-flags: -target armv7s-apple-ios9.0 -enable-objc-interop -enable-library-evolution -swift-version 5 -enforce-exclusivity=checked -O -module-name opencv2
  4. import Foundation
  5. import Swift
  6. @_exported import opencv2
  7. extension Aruco {
  8. @available(*, deprecated)
  9. @nonobjc public class func detectMarkers(image: opencv2.Mat, dictionary: opencv2.Dictionary, corners: inout [opencv2.Mat], ids: opencv2.Mat, parameters: opencv2.DetectorParameters, rejectedImgPoints: inout [opencv2.Mat])
  10. }
  11. extension Aruco {
  12. @available(*, deprecated)
  13. @nonobjc public class func detectMarkers(image: opencv2.Mat, dictionary: opencv2.Dictionary, corners: inout [opencv2.Mat], ids: opencv2.Mat, parameters: opencv2.DetectorParameters)
  14. }
  15. extension Aruco {
  16. @available(*, deprecated)
  17. @nonobjc public class func detectMarkers(image: opencv2.Mat, dictionary: opencv2.Dictionary, corners: inout [opencv2.Mat], ids: opencv2.Mat)
  18. }
  19. extension Aruco {
  20. @available(*, deprecated)
  21. @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, minRepDistance: Swift.Float, errorCorrectionRate: Swift.Float, checkAllOrders: Swift.Bool, recoveredIdxs: opencv2.Mat, parameters: opencv2.DetectorParameters)
  22. }
  23. extension Aruco {
  24. @available(*, deprecated)
  25. @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, minRepDistance: Swift.Float, errorCorrectionRate: Swift.Float, checkAllOrders: Swift.Bool, recoveredIdxs: opencv2.Mat)
  26. }
  27. extension Aruco {
  28. @available(*, deprecated)
  29. @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, minRepDistance: Swift.Float, errorCorrectionRate: Swift.Float, checkAllOrders: Swift.Bool)
  30. }
  31. extension Aruco {
  32. @available(*, deprecated)
  33. @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, minRepDistance: Swift.Float, errorCorrectionRate: Swift.Float)
  34. }
  35. extension Aruco {
  36. @available(*, deprecated)
  37. @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, minRepDistance: Swift.Float)
  38. }
  39. extension Aruco {
  40. @available(*, deprecated)
  41. @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat)
  42. }
  43. extension Aruco {
  44. @available(*, deprecated)
  45. @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat)
  46. }
  47. extension Aruco {
  48. @available(*, deprecated)
  49. @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat])
  50. }
  51. extension Aruco {
  52. @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  53. }
  54. extension Aruco {
  55. @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
  56. }
  57. extension Aruco {
  58. @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat) -> Swift.Double
  59. }
  60. extension Aruco {
  61. @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  62. }
  63. extension Aruco {
  64. @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
  65. }
  66. extension Aruco {
  67. @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
  68. }
  69. extension Aruco {
  70. @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat]) -> Swift.Double
  71. }
  72. extension Aruco {
  73. @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  74. }
  75. extension Aruco {
  76. @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
  77. }
  78. extension Aruco {
  79. @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat) -> Swift.Double
  80. }
  81. extension Aruco {
  82. @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  83. }
  84. extension Aruco {
  85. @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
  86. }
  87. extension Aruco {
  88. @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
  89. }
  90. extension Aruco {
  91. @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat]) -> Swift.Double
  92. }
  93. extension Aruco {
  94. @available(*, deprecated)
  95. @nonobjc public class func detectCharucoDiamond(image: opencv2.Mat, markerCorners: [opencv2.Mat], markerIds: opencv2.Mat, squareMarkerLengthRate: Swift.Float, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, dictionary: opencv2.Dictionary)
  96. }
  97. extension Aruco {
  98. @available(*, deprecated)
  99. @nonobjc public class func detectCharucoDiamond(image: opencv2.Mat, markerCorners: [opencv2.Mat], markerIds: opencv2.Mat, squareMarkerLengthRate: Swift.Float, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat)
  100. }
  101. extension Aruco {
  102. @available(*, deprecated)
  103. @nonobjc public class func detectCharucoDiamond(image: opencv2.Mat, markerCorners: [opencv2.Mat], markerIds: opencv2.Mat, squareMarkerLengthRate: Swift.Float, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat, cameraMatrix: opencv2.Mat)
  104. }
  105. extension Aruco {
  106. @available(*, deprecated)
  107. @nonobjc public class func detectCharucoDiamond(image: opencv2.Mat, markerCorners: [opencv2.Mat], markerIds: opencv2.Mat, squareMarkerLengthRate: Swift.Float, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat)
  108. }
  109. extension Calib3d {
  110. @nonobjc public class func solveP3P(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Int32
  111. }
  112. extension Calib3d {
  113. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod, rvec: opencv2.Mat, tvec: opencv2.Mat, reprojectionError: opencv2.Mat) -> Swift.Int32
  114. }
  115. extension Calib3d {
  116. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod, rvec: opencv2.Mat, tvec: opencv2.Mat) -> Swift.Int32
  117. }
  118. extension Calib3d {
  119. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod, rvec: opencv2.Mat) -> Swift.Int32
  120. }
  121. extension Calib3d {
  122. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod) -> Swift.Int32
  123. }
  124. extension Calib3d {
  125. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool) -> Swift.Int32
  126. }
  127. extension Calib3d {
  128. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Int32
  129. }
  130. extension Calib3d {
  131. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  132. }
  133. extension Calib3d {
  134. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
  135. }
  136. extension Calib3d {
  137. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat) -> Swift.Double
  138. }
  139. extension Calib3d {
  140. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  141. }
  142. extension Calib3d {
  143. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
  144. }
  145. extension Calib3d {
  146. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
  147. }
  148. extension Calib3d {
  149. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, stdDeviationsObjPoints: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  150. }
  151. extension Calib3d {
  152. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, stdDeviationsObjPoints: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
  153. }
  154. extension Calib3d {
  155. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, stdDeviationsObjPoints: opencv2.Mat, perViewErrors: opencv2.Mat) -> Swift.Double
  156. }
  157. extension Calib3d {
  158. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  159. }
  160. extension Calib3d {
  161. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
  162. }
  163. extension Calib3d {
  164. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat) -> Swift.Double
  165. }
  166. extension Calib3d {
  167. @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], cameraMatrix1: opencv2.Mat, distCoeffs1: opencv2.Mat, cameraMatrix2: opencv2.Mat, distCoeffs2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, E: opencv2.Mat, F: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  168. }
  169. extension Calib3d {
  170. @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], cameraMatrix1: opencv2.Mat, distCoeffs1: opencv2.Mat, cameraMatrix2: opencv2.Mat, distCoeffs2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, E: opencv2.Mat, F: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
  171. }
  172. extension Calib3d {
  173. @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], cameraMatrix1: opencv2.Mat, distCoeffs1: opencv2.Mat, cameraMatrix2: opencv2.Mat, distCoeffs2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, E: opencv2.Mat, F: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], perViewErrors: opencv2.Mat) -> Swift.Double
  174. }
  175. extension Calib3d {
  176. @nonobjc public class func decomposeHomographyMat(H: opencv2.Mat, K: opencv2.Mat, rotations: inout [opencv2.Mat], translations: inout [opencv2.Mat], normals: inout [opencv2.Mat]) -> Swift.Int32
  177. }
  178. extension Calib3d {
  179. @nonobjc public class func calibrate(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], image_size: opencv2.Size2i, K: opencv2.Mat, D: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  180. }
  181. extension Calib3d {
  182. @nonobjc public class func calibrate(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], image_size: opencv2.Size2i, K: opencv2.Mat, D: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
  183. }
  184. extension Calib3d {
  185. @nonobjc public class func calibrate(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], image_size: opencv2.Size2i, K: opencv2.Mat, D: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
  186. }
  187. extension Calib3d {
  188. @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], K1: opencv2.Mat, D1: opencv2.Mat, K2: opencv2.Mat, D2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  189. }
  190. extension Calib3d {
  191. @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], K1: opencv2.Mat, D1: opencv2.Mat, K2: opencv2.Mat, D2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
  192. }
  193. extension Calib3d {
  194. @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], K1: opencv2.Mat, D1: opencv2.Mat, K2: opencv2.Mat, D2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
  195. }
  196. extension ByteVector {
  197. public convenience init(_ array: [Swift.Int8])
  198. public convenience init(_ array: [Swift.UInt8])
  199. public subscript(index: Swift.Int) -> Swift.Int8 {
  200. get
  201. }
  202. public var array: [Swift.Int8] {
  203. get
  204. }
  205. public var unsignedArray: [Swift.UInt8] {
  206. get
  207. }
  208. }
  209. extension ByteVector : Swift.Sequence {
  210. public typealias Iterator = opencv2.ByteVectorIterator
  211. public func makeIterator() -> opencv2.ByteVectorIterator
  212. public typealias Element = opencv2.ByteVectorIterator.Element
  213. }
  214. public struct ByteVectorIterator : Swift.IteratorProtocol {
  215. public typealias Element = Swift.Int8
  216. public mutating func next() -> Swift.Int8?
  217. }
  218. extension Core {
  219. @nonobjc public class func meanStdDev(src: opencv2.Mat, mean: inout [Swift.Double], stddev: inout [Swift.Double], mask: opencv2.Mat)
  220. }
  221. extension Core {
  222. @nonobjc public class func meanStdDev(src: opencv2.Mat, mean: inout [Swift.Double], stddev: inout [Swift.Double])
  223. }
  224. extension Core {
  225. @nonobjc public class func split(m: opencv2.Mat, mv: inout [opencv2.Mat])
  226. }
  227. extension Core {
  228. @nonobjc public class func mixChannels(src: [opencv2.Mat], dst: [opencv2.Mat], fromTo: [Swift.Int32])
  229. }
  230. extension Core {
  231. @nonobjc public class func transposeND(src: opencv2.Mat, order: [Swift.Int32], dst: opencv2.Mat)
  232. }
  233. extension CvType {
  234. public static var CV_8U: Swift.Int32
  235. public static var CV_8S: Swift.Int32
  236. public static var CV_16U: Swift.Int32
  237. public static var CV_16S: Swift.Int32
  238. public static var CV_32S: Swift.Int32
  239. public static var CV_32F: Swift.Int32
  240. public static var CV_64F: Swift.Int32
  241. public static var CV_16F: Swift.Int32
  242. public static var CV_8UC1: Swift.Int32
  243. public static var CV_8UC2: Swift.Int32
  244. public static var CV_8UC3: Swift.Int32
  245. public static var CV_8UC4: Swift.Int32
  246. public static var CV_8SC1: Swift.Int32
  247. public static var CV_8SC2: Swift.Int32
  248. public static var CV_8SC3: Swift.Int32
  249. public static var CV_8SC4: Swift.Int32
  250. public static var CV_16UC1: Swift.Int32
  251. public static var CV_16UC2: Swift.Int32
  252. public static var CV_16UC3: Swift.Int32
  253. public static var CV_16UC4: Swift.Int32
  254. public static var CV_16SC1: Swift.Int32
  255. public static var CV_16SC2: Swift.Int32
  256. public static var CV_16SC3: Swift.Int32
  257. public static var CV_16SC4: Swift.Int32
  258. public static var CV_32SC1: Swift.Int32
  259. public static var CV_32SC2: Swift.Int32
  260. public static var CV_32SC3: Swift.Int32
  261. public static var CV_32SC4: Swift.Int32
  262. public static var CV_32FC1: Swift.Int32
  263. public static var CV_32FC2: Swift.Int32
  264. public static var CV_32FC3: Swift.Int32
  265. public static var CV_32FC4: Swift.Int32
  266. public static var CV_64FC1: Swift.Int32
  267. public static var CV_64FC2: Swift.Int32
  268. public static var CV_64FC3: Swift.Int32
  269. public static var CV_64FC4: Swift.Int32
  270. public static var CV_16FC1: Swift.Int32
  271. public static var CV_16FC2: Swift.Int32
  272. public static var CV_16FC3: Swift.Int32
  273. public static var CV_16FC4: Swift.Int32
  274. public static var CV_CN_MAX: Swift.Int
  275. public static var CV_CN_SHIFT: Swift.Int
  276. public static var CV_DEPTH_MAX: Swift.Int
  277. public static func CV_8UC(_ channels: Swift.Int32) -> Swift.Int32
  278. public static func CV_8SC(_ channels: Swift.Int32) -> Swift.Int32
  279. public static func CV_16UC(_ channels: Swift.Int32) -> Swift.Int32
  280. public static func CV_16SC(_ channels: Swift.Int32) -> Swift.Int32
  281. public static func CV_32SC(_ channels: Swift.Int32) -> Swift.Int32
  282. public static func CV_32FC(_ channels: Swift.Int32) -> Swift.Int32
  283. public static func CV_64FC(_ channels: Swift.Int32) -> Swift.Int32
  284. public static func CV_16FC(_ channels: Swift.Int32) -> Swift.Int32
  285. }
  286. extension DoubleVector {
  287. public convenience init(_ array: [Swift.Double])
  288. public subscript(index: Swift.Int) -> Swift.Double {
  289. get
  290. }
  291. public var array: [Swift.Double] {
  292. get
  293. }
  294. }
  295. extension DoubleVector : Swift.Sequence {
  296. public typealias Iterator = opencv2.DoubleVectorIterator
  297. public func makeIterator() -> opencv2.DoubleVectorIterator
  298. public typealias Element = opencv2.DoubleVectorIterator.Element
  299. }
  300. public struct DoubleVectorIterator : Swift.IteratorProtocol {
  301. public typealias Element = Swift.Double
  302. public mutating func next() -> Swift.Double?
  303. }
  304. extension FloatVector {
  305. public convenience init(_ array: [Swift.Float])
  306. public subscript(index: Swift.Int) -> Swift.Float {
  307. get
  308. }
  309. public var array: [Swift.Float] {
  310. get
  311. }
  312. }
  313. extension FloatVector : Swift.Sequence {
  314. public typealias Iterator = opencv2.FloatVectorIterator
  315. public func makeIterator() -> opencv2.FloatVectorIterator
  316. public typealias Element = opencv2.FloatVectorIterator.Element
  317. }
  318. public struct FloatVectorIterator : Swift.IteratorProtocol {
  319. public typealias Element = Swift.Float
  320. public mutating func next() -> Swift.Float?
  321. }
  322. extension IntVector {
  323. public convenience init(_ array: [Swift.Int32])
  324. public subscript(index: Swift.Int) -> Swift.Int32 {
  325. get
  326. }
  327. public var array: [Swift.Int32] {
  328. get
  329. }
  330. }
  331. extension IntVector : Swift.Sequence {
  332. public typealias Iterator = opencv2.IntVectorIterator
  333. public func makeIterator() -> opencv2.IntVectorIterator
  334. public typealias Element = opencv2.IntVectorIterator.Element
  335. }
  336. public struct IntVectorIterator : Swift.IteratorProtocol {
  337. public typealias Element = Swift.Int32
  338. public mutating func next() -> Swift.Int32?
  339. }
  340. public typealias T2<T> = (T, T)
  341. public typealias T3<T> = (T, T, T)
  342. public typealias T4<T> = (T, T, T, T)
  343. extension Mat {
  344. public convenience init(rows: Swift.Int32, cols: Swift.Int32, type: Swift.Int32, data: [Swift.Int8])
  345. public convenience init(rows: Swift.Int32, cols: Swift.Int32, type: Swift.Int32, data: [Swift.Int8], step: Swift.Int)
  346. @discardableResult
  347. public func get(indices: [Swift.Int32], data: inout [Swift.Int8]) throws -> Swift.Int32
  348. @discardableResult
  349. public func get(indices: [Swift.Int32], data: inout [Swift.UInt8]) throws -> Swift.Int32
  350. @discardableResult
  351. public func get(indices: [Swift.Int32], data: inout [Swift.Double]) throws -> Swift.Int32
  352. @discardableResult
  353. public func get(indices: [Swift.Int32], data: inout [Swift.Float]) throws -> Swift.Int32
  354. @discardableResult
  355. public func get(indices: [Swift.Int32], data: inout [Swift.Int32]) throws -> Swift.Int32
  356. @discardableResult
  357. public func get(indices: [Swift.Int32], data: inout [Swift.Int16]) throws -> Swift.Int32
  358. @discardableResult
  359. public func get(indices: [Swift.Int32], data: inout [Swift.UInt16]) throws -> Swift.Int32
  360. @discardableResult
  361. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Int8]) throws -> Swift.Int32
  362. @discardableResult
  363. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.UInt8]) throws -> Swift.Int32
  364. @discardableResult
  365. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Double]) throws -> Swift.Int32
  366. @discardableResult
  367. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Float]) throws -> Swift.Int32
  368. @discardableResult
  369. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Int32]) throws -> Swift.Int32
  370. @discardableResult
  371. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Int16]) throws -> Swift.Int32
  372. @discardableResult
  373. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.UInt16]) throws -> Swift.Int32
  374. @discardableResult
  375. public func put(indices: [Swift.Int32], data: [Swift.Int8]) throws -> Swift.Int32
  376. @discardableResult
  377. public func put(indices: [Swift.Int32], data: [Swift.UInt8]) throws -> Swift.Int32
  378. @discardableResult
  379. public func put(indices: [Swift.Int32], data: [Swift.Int8], offset: Swift.Int, length: Swift.Int32) throws -> Swift.Int32
  380. @discardableResult
  381. public func put(indices: [Swift.Int32], data: [Swift.Double]) throws -> Swift.Int32
  382. @discardableResult
  383. public func put(indices: [Swift.Int32], data: [Swift.Float]) throws -> Swift.Int32
  384. @discardableResult
  385. public func put(indices: [Swift.Int32], data: [Swift.Int32]) throws -> Swift.Int32
  386. @discardableResult
  387. public func put(indices: [Swift.Int32], data: [Swift.Int16]) throws -> Swift.Int32
  388. @discardableResult
  389. public func put(indices: [Swift.Int32], data: [Swift.UInt16]) throws -> Swift.Int32
  390. @discardableResult
  391. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int8]) throws -> Swift.Int32
  392. @discardableResult
  393. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.UInt8]) throws -> Swift.Int32
  394. @discardableResult
  395. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int8], offset: Swift.Int, length: Swift.Int32) throws -> Swift.Int32
  396. @discardableResult
  397. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Double]) throws -> Swift.Int32
  398. @discardableResult
  399. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Float]) throws -> Swift.Int32
  400. @discardableResult
  401. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int32]) throws -> Swift.Int32
  402. @discardableResult
  403. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int16]) throws -> Swift.Int32
  404. @discardableResult
  405. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.UInt16]) throws -> Swift.Int32
  406. @discardableResult
  407. public func get(row: Swift.Int32, col: Swift.Int32) -> [Swift.Double]
  408. @discardableResult
  409. public func get(indices: [Swift.Int32]) -> [Swift.Double]
  410. }
  411. public protocol Atable {
  412. static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Self
  413. static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Self)
  414. static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Self, Self)
  415. static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Self, Self))
  416. static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Self, Self, Self)
  417. static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Self, Self, Self))
  418. static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Self, Self, Self, Self)
  419. static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Self, Self, Self, Self))
  420. }
  421. @_hasMissingDesignatedInitializers public class MatAt<N> where N : opencv2.Atable {
  422. public var v: N {
  423. get
  424. set(value)
  425. }
  426. public var v2c: (N, N) {
  427. get
  428. set(value)
  429. }
  430. public var v3c: (N, N, N) {
  431. get
  432. set(value)
  433. }
  434. public var v4c: (N, N, N, N) {
  435. get
  436. set(value)
  437. }
  438. @objc deinit
  439. }
  440. extension UInt8 : opencv2.Atable {
  441. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.UInt8
  442. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.UInt8)
  443. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt8, Swift.UInt8)
  444. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt8, Swift.UInt8))
  445. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt8, Swift.UInt8, Swift.UInt8)
  446. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt8, Swift.UInt8, Swift.UInt8))
  447. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt8, Swift.UInt8, Swift.UInt8, Swift.UInt8)
  448. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt8, Swift.UInt8, Swift.UInt8, Swift.UInt8))
  449. }
  450. extension Int8 : opencv2.Atable {
  451. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Int8
  452. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Int8)
  453. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int8, Swift.Int8)
  454. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int8, Swift.Int8))
  455. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int8, Swift.Int8, Swift.Int8)
  456. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int8, Swift.Int8, Swift.Int8))
  457. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int8, Swift.Int8, Swift.Int8, Swift.Int8)
  458. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int8, Swift.Int8, Swift.Int8, Swift.Int8))
  459. }
  460. extension Double : opencv2.Atable {
  461. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Double
  462. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Double)
  463. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Double, Swift.Double)
  464. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Double, Swift.Double))
  465. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Double, Swift.Double, Swift.Double)
  466. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Double, Swift.Double, Swift.Double))
  467. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Double, Swift.Double, Swift.Double, Swift.Double)
  468. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Double, Swift.Double, Swift.Double, Swift.Double))
  469. }
  470. extension Float : opencv2.Atable {
  471. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Float
  472. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Float)
  473. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Float, Swift.Float)
  474. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Float, Swift.Float))
  475. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Float, Swift.Float, Swift.Float)
  476. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Float, Swift.Float, Swift.Float))
  477. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Float, Swift.Float, Swift.Float, Swift.Float)
  478. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Float, Swift.Float, Swift.Float, Swift.Float))
  479. }
  480. extension Int32 : opencv2.Atable {
  481. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Int32
  482. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Int32)
  483. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int32, Swift.Int32)
  484. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int32, Swift.Int32))
  485. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int32, Swift.Int32, Swift.Int32)
  486. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int32, Swift.Int32, Swift.Int32))
  487. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int32, Swift.Int32, Swift.Int32, Swift.Int32)
  488. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int32, Swift.Int32, Swift.Int32, Swift.Int32))
  489. }
  490. extension UInt16 : opencv2.Atable {
  491. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.UInt16
  492. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.UInt16)
  493. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt16, Swift.UInt16)
  494. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt16, Swift.UInt16))
  495. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt16, Swift.UInt16, Swift.UInt16)
  496. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt16, Swift.UInt16, Swift.UInt16))
  497. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt16, Swift.UInt16, Swift.UInt16, Swift.UInt16)
  498. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt16, Swift.UInt16, Swift.UInt16, Swift.UInt16))
  499. }
  500. extension Int16 : opencv2.Atable {
  501. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Int16
  502. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Int16)
  503. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int16, Swift.Int16)
  504. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int16, Swift.Int16))
  505. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int16, Swift.Int16, Swift.Int16)
  506. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int16, Swift.Int16, Swift.Int16))
  507. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int16, Swift.Int16, Swift.Int16, Swift.Int16)
  508. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int16, Swift.Int16, Swift.Int16, Swift.Int16))
  509. }
  510. extension Mat {
  511. public func at<N>(row: Swift.Int32, col: Swift.Int32) -> opencv2.MatAt<N> where N : opencv2.Atable
  512. public func at<N>(indices: [Swift.Int32]) -> opencv2.MatAt<N> where N : opencv2.Atable
  513. }
  514. extension Mat {
  515. public static func * (lhs: opencv2.Mat, rhs: opencv2.Mat) -> opencv2.Mat
  516. }
  517. public typealias Rect = opencv2.Rect2i
  518. public typealias Point = opencv2.Point2i
  519. public typealias Size = opencv2.Size2i
  520. extension Dnn {
  521. @nonobjc public class func readNetFromDarknet(bufferCfg: [Swift.UInt8], bufferModel: [Swift.UInt8]) -> opencv2.Net
  522. }
  523. extension Dnn {
  524. @nonobjc public class func readNetFromDarknet(bufferCfg: [Swift.UInt8]) -> opencv2.Net
  525. }
  526. extension Dnn {
  527. @nonobjc public class func readNetFromCaffe(bufferProto: [Swift.UInt8], bufferModel: [Swift.UInt8]) -> opencv2.Net
  528. }
  529. extension Dnn {
  530. @nonobjc public class func readNetFromCaffe(bufferProto: [Swift.UInt8]) -> opencv2.Net
  531. }
  532. extension Dnn {
  533. @nonobjc public class func readNetFromTensorflow(bufferModel: [Swift.UInt8], bufferConfig: [Swift.UInt8]) -> opencv2.Net
  534. }
  535. extension Dnn {
  536. @nonobjc public class func readNetFromTensorflow(bufferModel: [Swift.UInt8]) -> opencv2.Net
  537. }
  538. extension Dnn {
  539. @nonobjc public class func readNetFromTFLite(bufferModel: [Swift.UInt8]) -> opencv2.Net
  540. }
  541. extension Dnn {
  542. @nonobjc public class func readNet(framework: Swift.String, bufferModel: [Swift.UInt8], bufferConfig: [Swift.UInt8]) -> opencv2.Net
  543. }
  544. extension Dnn {
  545. @nonobjc public class func readNet(framework: Swift.String, bufferModel: [Swift.UInt8]) -> opencv2.Net
  546. }
  547. extension Dnn {
  548. @nonobjc public class func readNetFromModelOptimizer(bufferModelConfig: [Swift.UInt8], bufferWeights: [Swift.UInt8]) -> opencv2.Net
  549. }
  550. extension Dnn {
  551. @nonobjc public class func readNetFromONNX(buffer: [Swift.UInt8]) -> opencv2.Net
  552. }
  553. extension Dnn {
  554. @nonobjc public class func imagesFromBlob(blob_: opencv2.Mat, images_: inout [opencv2.Mat])
  555. }
  556. extension Dnn {
  557. @nonobjc public class func NMSBoxes(bboxes: [opencv2.Rect2d], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float, top_k: Swift.Int32)
  558. }
  559. extension Dnn {
  560. @nonobjc public class func NMSBoxes(bboxes: [opencv2.Rect2d], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float)
  561. }
  562. extension Dnn {
  563. @nonobjc public class func NMSBoxes(bboxes: [opencv2.Rect2d], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
  564. }
  565. extension Dnn {
  566. @nonobjc public class func NMSBoxes(bboxes: [opencv2.RotatedRect], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float, top_k: Swift.Int32)
  567. }
  568. extension Dnn {
  569. @nonobjc public class func NMSBoxes(bboxes: [opencv2.RotatedRect], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float)
  570. }
  571. extension Dnn {
  572. @nonobjc public class func NMSBoxes(bboxes: [opencv2.RotatedRect], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
  573. }
  574. extension Dnn {
  575. @nonobjc public class func NMSBoxesBatched(bboxes: [opencv2.Rect2d], scores: [Swift.Float], class_ids: [Swift.Int32], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float, top_k: Swift.Int32)
  576. }
  577. extension Dnn {
  578. @nonobjc public class func NMSBoxesBatched(bboxes: [opencv2.Rect2d], scores: [Swift.Float], class_ids: [Swift.Int32], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float)
  579. }
  580. extension Dnn {
  581. @nonobjc public class func NMSBoxesBatched(bboxes: [opencv2.Rect2d], scores: [Swift.Float], class_ids: [Swift.Int32], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
  582. }
  583. extension Dnn {
  584. @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], top_k: Darwin.size_t, sigma: Swift.Float, method: opencv2.SoftNMSMethod)
  585. }
  586. extension Dnn {
  587. @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], top_k: Darwin.size_t, sigma: Swift.Float)
  588. }
  589. extension Dnn {
  590. @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], top_k: Darwin.size_t)
  591. }
  592. extension Dnn {
  593. @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
  594. }
  595. extension DetectionModel {
  596. @nonobjc public func detect(frame: opencv2.Mat, classIds: inout [Swift.Int32], confidences: inout [Swift.Float], boxes: inout [opencv2.Rect2i], confThreshold: Swift.Float, nmsThreshold: Swift.Float)
  597. }
  598. extension DetectionModel {
  599. @nonobjc public func detect(frame: opencv2.Mat, classIds: inout [Swift.Int32], confidences: inout [Swift.Float], boxes: inout [opencv2.Rect2i], confThreshold: Swift.Float)
  600. }
  601. extension DetectionModel {
  602. @nonobjc public func detect(frame: opencv2.Mat, classIds: inout [Swift.Int32], confidences: inout [Swift.Float], boxes: inout [opencv2.Rect2i])
  603. }
  604. extension Layer {
  605. @nonobjc public func finalize(inputs: [opencv2.Mat], outputs: inout [opencv2.Mat])
  606. }
  607. extension Layer {
  608. @available(*, deprecated)
  609. @nonobjc public func run(inputs: [opencv2.Mat], outputs: inout [opencv2.Mat], internals: inout [opencv2.Mat])
  610. }
  611. extension Model {
  612. @nonobjc public func predict(frame: opencv2.Mat, outs: inout [opencv2.Mat])
  613. }
  614. extension Net {
  615. @nonobjc public class func readFromModelOptimizer(bufferModelConfig: [Swift.UInt8], bufferWeights: [Swift.UInt8]) -> opencv2.Net
  616. }
  617. extension Net {
  618. @nonobjc public func forward(outputBlobs: inout [opencv2.Mat], outputName: Swift.String)
  619. }
  620. extension Net {
  621. @nonobjc public func forward(outputBlobs: inout [opencv2.Mat])
  622. }
  623. extension Net {
  624. @nonobjc public func forward(outputBlobs: inout [opencv2.Mat], outBlobNames: [Swift.String])
  625. }
  626. extension Net {
  627. @nonobjc public func forwardAndRetrieve(outputBlobs: inout [[opencv2.Mat]], outBlobNames: [Swift.String])
  628. }
  629. extension Net {
  630. @nonobjc public func getInputDetails(scales: inout [Swift.Float], zeropoints: inout [Swift.Int32])
  631. }
  632. extension Net {
  633. @nonobjc public func getOutputDetails(scales: inout [Swift.Float], zeropoints: inout [Swift.Int32])
  634. }
  635. extension Net {
  636. @nonobjc public func getLayersShapes(netInputShapes: [opencv2.IntVector], layersIds: inout [Swift.Int32], inLayersShapes: inout [[opencv2.IntVector]], outLayersShapes: inout [[opencv2.IntVector]])
  637. }
  638. extension Net {
  639. @nonobjc public func getLayersShapes(netInputShape: opencv2.IntVector, layersIds: inout [Swift.Int32], inLayersShapes: inout [[opencv2.IntVector]], outLayersShapes: inout [[opencv2.IntVector]])
  640. }
  641. extension Net {
  642. @nonobjc public func getLayerTypes(layersTypes: inout [Swift.String])
  643. }
  644. extension Net {
  645. @nonobjc public func getPerfProfile(timings: inout [Swift.Double]) -> Swift.Int
  646. }
  647. extension TextDetectionModel {
  648. @nonobjc public func detect(frame: opencv2.Mat, detections: inout [[opencv2.Point2i]], confidences: inout [Swift.Float])
  649. }
  650. extension TextDetectionModel {
  651. @nonobjc public func detect(frame: opencv2.Mat, detections: inout [[opencv2.Point2i]])
  652. }
  653. extension TextDetectionModel {
  654. @nonobjc public func detectTextRectangles(frame: opencv2.Mat, detections: inout [opencv2.RotatedRect], confidences: inout [Swift.Float])
  655. }
  656. extension TextDetectionModel {
  657. @nonobjc public func detectTextRectangles(frame: opencv2.Mat, detections: inout [opencv2.RotatedRect])
  658. }
  659. extension TextRecognitionModel {
  660. @nonobjc public func recognize(frame: opencv2.Mat, roiRects: [opencv2.Mat], results: inout [Swift.String])
  661. }
  662. extension Facemark {
  663. @nonobjc public func fit(image: opencv2.Mat, faces: opencv2.Mat, landmarks: inout [opencv2.Mat]) -> Swift.Bool
  664. }
  665. extension Features2d {
  666. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8], flags: opencv2.DrawMatchesFlags)
  667. }
  668. extension Features2d {
  669. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8])
  670. }
  671. extension Features2d {
  672. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchesThickness: Swift.Int32, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8], flags: opencv2.DrawMatchesFlags)
  673. }
  674. extension Features2d {
  675. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchesThickness: Swift.Int32, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8])
  676. }
  677. extension Features2d {
  678. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [[opencv2.DMatch]], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [[Swift.Int8]], flags: opencv2.DrawMatchesFlags)
  679. }
  680. extension Features2d {
  681. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [[opencv2.DMatch]], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [[Swift.Int8]])
  682. }
  683. extension AffineFeature {
  684. @nonobjc public func setViewParams(tilts: [Swift.Float], rolls: [Swift.Float])
  685. }
  686. extension AffineFeature {
  687. @nonobjc public func getViewParams(tilts: [Swift.Float], rolls: [Swift.Float])
  688. }
  689. extension BRISK {
  690. @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float, indexChange: [Swift.Int32]) -> opencv2.BRISK
  691. }
  692. extension BRISK {
  693. @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float) -> opencv2.BRISK
  694. }
  695. extension BRISK {
  696. @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float) -> opencv2.BRISK
  697. }
  698. extension BRISK {
  699. @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32]) -> opencv2.BRISK
  700. }
  701. extension BRISK {
  702. @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float, indexChange: [Swift.Int32]) -> opencv2.BRISK
  703. }
  704. extension BRISK {
  705. @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float) -> opencv2.BRISK
  706. }
  707. extension BRISK {
  708. @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float) -> opencv2.BRISK
  709. }
  710. extension BRISK {
  711. @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32]) -> opencv2.BRISK
  712. }
  713. extension DescriptorMatcher {
  714. @nonobjc public func match(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch], mask: opencv2.Mat)
  715. }
  716. extension DescriptorMatcher {
  717. @nonobjc public func match(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch])
  718. }
  719. extension DescriptorMatcher {
  720. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, mask: opencv2.Mat, compactResult: Swift.Bool)
  721. }
  722. extension DescriptorMatcher {
  723. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, mask: opencv2.Mat)
  724. }
  725. extension DescriptorMatcher {
  726. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32)
  727. }
  728. extension DescriptorMatcher {
  729. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, mask: opencv2.Mat, compactResult: Swift.Bool)
  730. }
  731. extension DescriptorMatcher {
  732. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, mask: opencv2.Mat)
  733. }
  734. extension DescriptorMatcher {
  735. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float)
  736. }
  737. extension DescriptorMatcher {
  738. @nonobjc public func match(queryDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch], masks: [opencv2.Mat])
  739. }
  740. extension DescriptorMatcher {
  741. @nonobjc public func match(queryDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch])
  742. }
  743. extension DescriptorMatcher {
  744. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, masks: [opencv2.Mat], compactResult: Swift.Bool)
  745. }
  746. extension DescriptorMatcher {
  747. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, masks: [opencv2.Mat])
  748. }
  749. extension DescriptorMatcher {
  750. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32)
  751. }
  752. extension DescriptorMatcher {
  753. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, masks: [opencv2.Mat], compactResult: Swift.Bool)
  754. }
  755. extension DescriptorMatcher {
  756. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, masks: [opencv2.Mat])
  757. }
  758. extension DescriptorMatcher {
  759. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float)
  760. }
  761. extension Feature2D {
  762. @nonobjc public func detect(image: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], mask: opencv2.Mat)
  763. }
  764. extension Feature2D {
  765. @nonobjc public func detect(image: opencv2.Mat, keypoints: inout [opencv2.KeyPoint])
  766. }
  767. extension Feature2D {
  768. @nonobjc public func detect(images: [opencv2.Mat], keypoints: inout [[opencv2.KeyPoint]], masks: [opencv2.Mat])
  769. }
  770. extension Feature2D {
  771. @nonobjc public func detect(images: [opencv2.Mat], keypoints: inout [[opencv2.KeyPoint]])
  772. }
  773. extension Feature2D {
  774. @nonobjc public func compute(image: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], descriptors: opencv2.Mat)
  775. }
  776. extension Feature2D {
  777. @nonobjc public func compute(images: [opencv2.Mat], keypoints: inout [[opencv2.KeyPoint]], descriptors: inout [opencv2.Mat])
  778. }
  779. extension Feature2D {
  780. @nonobjc public func detectAndCompute(image: opencv2.Mat, mask: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], descriptors: opencv2.Mat, useProvidedKeypoints: Swift.Bool)
  781. }
  782. extension Feature2D {
  783. @nonobjc public func detectAndCompute(image: opencv2.Mat, mask: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], descriptors: opencv2.Mat)
  784. }
  785. extension MSER {
  786. @nonobjc public func detectRegions(image: opencv2.Mat, msers: inout [[opencv2.Point2i]], bboxes: inout [opencv2.Rect2i])
  787. }
  788. extension Imgcodecs {
  789. @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Bool
  790. }
  791. extension Imgcodecs {
  792. @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat]) -> Swift.Bool
  793. }
  794. extension Imgcodecs {
  795. @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat], start: Swift.Int32, count: Swift.Int32, flags: Swift.Int32) -> Swift.Bool
  796. }
  797. extension Imgcodecs {
  798. @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat], start: Swift.Int32, count: Swift.Int32) -> Swift.Bool
  799. }
  800. extension Imgcodecs {
  801. @nonobjc public class func imwrite(filename: Swift.String, img: opencv2.Mat, params: [Swift.Int32]) -> Swift.Bool
  802. }
  803. extension Imgcodecs {
  804. @nonobjc public class func imwritemulti(filename: Swift.String, img: [opencv2.Mat], params: [Swift.Int32]) -> Swift.Bool
  805. }
  806. extension Imgcodecs {
  807. @nonobjc public class func imdecodemulti(buf: opencv2.Mat, flags: Swift.Int32, mats: inout [opencv2.Mat]) -> Swift.Bool
  808. }
  809. extension Imgcodecs {
  810. @nonobjc public class func imencode(ext: Swift.String, img: opencv2.Mat, buf: inout [Swift.UInt8], params: [Swift.Int32]) -> Swift.Bool
  811. }
  812. extension Imgcodecs {
  813. @nonobjc public class func imencode(ext: Swift.String, img: opencv2.Mat, buf: inout [Swift.UInt8]) -> Swift.Bool
  814. }
  815. extension Imgproc {
  816. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, useHarrisDetector: Swift.Bool, k: Swift.Double)
  817. }
  818. extension Imgproc {
  819. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, useHarrisDetector: Swift.Bool)
  820. }
  821. extension Imgproc {
  822. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32)
  823. }
  824. extension Imgproc {
  825. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat)
  826. }
  827. extension Imgproc {
  828. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double)
  829. }
  830. extension Imgproc {
  831. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, gradientSize: Swift.Int32, useHarrisDetector: Swift.Bool, k: Swift.Double)
  832. }
  833. extension Imgproc {
  834. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, gradientSize: Swift.Int32, useHarrisDetector: Swift.Bool)
  835. }
  836. extension Imgproc {
  837. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, gradientSize: Swift.Int32)
  838. }
  839. extension Imgproc {
  840. @nonobjc public class func calcHist(images: [opencv2.Mat], channels: [Swift.Int32], mask: opencv2.Mat, hist: opencv2.Mat, histSize: [Swift.Int32], ranges: [Swift.Float], accumulate: Swift.Bool)
  841. }
  842. extension Imgproc {
  843. @nonobjc public class func calcHist(images: [opencv2.Mat], channels: [Swift.Int32], mask: opencv2.Mat, hist: opencv2.Mat, histSize: [Swift.Int32], ranges: [Swift.Float])
  844. }
  845. extension Imgproc {
  846. @nonobjc public class func calcBackProject(images: [opencv2.Mat], channels: [Swift.Int32], hist: opencv2.Mat, dst: opencv2.Mat, ranges: [Swift.Float], scale: Swift.Double)
  847. }
  848. extension Imgproc {
  849. @nonobjc public class func findContours(image: opencv2.Mat, contours: inout [[opencv2.Point2i]], hierarchy: opencv2.Mat, mode: opencv2.RetrievalModes, method: opencv2.ContourApproximationModes, offset: opencv2.Point2i)
  850. }
  851. extension Imgproc {
  852. @nonobjc public class func findContours(image: opencv2.Mat, contours: inout [[opencv2.Point2i]], hierarchy: opencv2.Mat, mode: opencv2.RetrievalModes, method: opencv2.ContourApproximationModes)
  853. }
  854. extension Imgproc {
  855. @nonobjc public class func approxPolyDP(curve: [opencv2.Point2f], approxCurve: inout [opencv2.Point2f], epsilon: Swift.Double, closed: Swift.Bool)
  856. }
  857. extension Imgproc {
  858. @nonobjc public class func convexHull(points: [opencv2.Point2i], hull: inout [Swift.Int32], clockwise: Swift.Bool)
  859. }
  860. extension Imgproc {
  861. @nonobjc public class func convexHull(points: [opencv2.Point2i], hull: inout [Swift.Int32])
  862. }
  863. extension Imgproc {
  864. @nonobjc public class func convexityDefects(contour: [opencv2.Point2i], convexhull: [Swift.Int32], convexityDefects: inout [opencv2.Int4])
  865. }
  866. extension Imgproc {
  867. @nonobjc public class func ellipse2Poly(center: opencv2.Point2i, axes: opencv2.Size2i, angle: Swift.Int32, arcStart: Swift.Int32, arcEnd: Swift.Int32, delta: Swift.Int32, pts: inout [opencv2.Point2i])
  868. }
  869. extension Subdiv2D {
  870. @nonobjc public func getEdgeList(edgeList: inout [opencv2.Float4])
  871. }
  872. extension Subdiv2D {
  873. @nonobjc public func getLeadingEdgeList(leadingEdgeList: inout [Swift.Int32])
  874. }
  875. extension Subdiv2D {
  876. @nonobjc public func getTriangleList(triangleList: inout [opencv2.Float6])
  877. }
  878. extension Subdiv2D {
  879. @nonobjc public func getVoronoiFacetList(idx: [Swift.Int32], facetList: inout [[opencv2.Point2f]], facetCenters: inout [opencv2.Point2f])
  880. }
  881. extension EM {
  882. @nonobjc public func getCovs(covs: inout [opencv2.Mat])
  883. }
  884. extension Objdetect {
  885. @nonobjc public class func groupRectangles(rectList: inout [opencv2.Rect2i], weights: inout [Swift.Int32], groupThreshold: Swift.Int32, eps: Swift.Double)
  886. }
  887. extension Objdetect {
  888. @nonobjc public class func groupRectangles(rectList: inout [opencv2.Rect2i], weights: inout [Swift.Int32], groupThreshold: Swift.Int32)
  889. }
  890. extension CascadeClassifier {
  891. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i)
  892. }
  893. extension CascadeClassifier {
  894. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i)
  895. }
  896. extension CascadeClassifier {
  897. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32)
  898. }
  899. extension CascadeClassifier {
  900. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32)
  901. }
  902. extension CascadeClassifier {
  903. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double)
  904. }
  905. extension CascadeClassifier {
  906. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i])
  907. }
  908. extension CascadeClassifier {
  909. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i)
  910. }
  911. extension CascadeClassifier {
  912. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i)
  913. }
  914. extension CascadeClassifier {
  915. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32)
  916. }
  917. extension CascadeClassifier {
  918. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32)
  919. }
  920. extension CascadeClassifier {
  921. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double)
  922. }
  923. extension CascadeClassifier {
  924. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32])
  925. }
  926. extension CascadeClassifier {
  927. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i, outputRejectLevels: Swift.Bool)
  928. }
  929. extension CascadeClassifier {
  930. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i)
  931. }
  932. extension CascadeClassifier {
  933. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i)
  934. }
  935. extension CascadeClassifier {
  936. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32)
  937. }
  938. extension CascadeClassifier {
  939. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32)
  940. }
  941. extension CascadeClassifier {
  942. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double)
  943. }
  944. extension CascadeClassifier {
  945. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double])
  946. }
  947. extension GraphicalCodeDetector {
  948. @nonobjc public func decodeMulti(img: opencv2.Mat, points: opencv2.Mat, decoded_info: inout [Swift.String], straight_code: inout [opencv2.Mat]) -> Swift.Bool
  949. }
  950. extension GraphicalCodeDetector {
  951. @nonobjc public func decodeMulti(img: opencv2.Mat, points: opencv2.Mat, decoded_info: inout [Swift.String]) -> Swift.Bool
  952. }
  953. extension GraphicalCodeDetector {
  954. @nonobjc public func detectAndDecodeMulti(img: opencv2.Mat, decoded_info: inout [Swift.String], points: opencv2.Mat, straight_code: inout [opencv2.Mat]) -> Swift.Bool
  955. }
  956. extension GraphicalCodeDetector {
  957. @nonobjc public func detectAndDecodeMulti(img: opencv2.Mat, decoded_info: inout [Swift.String], points: opencv2.Mat) -> Swift.Bool
  958. }
  959. extension GraphicalCodeDetector {
  960. @nonobjc public func detectAndDecodeMulti(img: opencv2.Mat, decoded_info: inout [Swift.String]) -> Swift.Bool
  961. }
  962. extension HOGDescriptor {
  963. @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float], winStride: opencv2.Size2i, padding: opencv2.Size2i, locations: [opencv2.Point2i])
  964. }
  965. extension HOGDescriptor {
  966. @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float], winStride: opencv2.Size2i, padding: opencv2.Size2i)
  967. }
  968. extension HOGDescriptor {
  969. @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float], winStride: opencv2.Size2i)
  970. }
  971. extension HOGDescriptor {
  972. @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float])
  973. }
  974. extension HOGDescriptor {
  975. @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, searchLocations: [opencv2.Point2i])
  976. }
  977. extension HOGDescriptor {
  978. @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i)
  979. }
  980. extension HOGDescriptor {
  981. @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i)
  982. }
  983. extension HOGDescriptor {
  984. @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double)
  985. }
  986. extension HOGDescriptor {
  987. @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double])
  988. }
  989. extension HOGDescriptor {
  990. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, scale: Swift.Double, groupThreshold: Swift.Double, useMeanshiftGrouping: Swift.Bool)
  991. }
  992. extension HOGDescriptor {
  993. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, scale: Swift.Double, groupThreshold: Swift.Double)
  994. }
  995. extension HOGDescriptor {
  996. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, scale: Swift.Double)
  997. }
  998. extension HOGDescriptor {
  999. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i)
  1000. }
  1001. extension HOGDescriptor {
  1002. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i)
  1003. }
  1004. extension HOGDescriptor {
  1005. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double)
  1006. }
  1007. extension HOGDescriptor {
  1008. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double])
  1009. }
  1010. extension QRCodeEncoder {
  1011. @nonobjc public func encodeStructuredAppend(encoded_info: Swift.String, qrcodes: inout [opencv2.Mat])
  1012. }
  1013. extension ArucoDetector {
  1014. @nonobjc public func detectMarkers(image: opencv2.Mat, corners: inout [opencv2.Mat], ids: opencv2.Mat, rejectedImgPoints: inout [opencv2.Mat])
  1015. }
  1016. extension ArucoDetector {
  1017. @nonobjc public func detectMarkers(image: opencv2.Mat, corners: inout [opencv2.Mat], ids: opencv2.Mat)
  1018. }
  1019. extension ArucoDetector {
  1020. @nonobjc public func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, recoveredIdxs: opencv2.Mat)
  1021. }
  1022. extension ArucoDetector {
  1023. @nonobjc public func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat)
  1024. }
  1025. extension ArucoDetector {
  1026. @nonobjc public func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat)
  1027. }
  1028. extension ArucoDetector {
  1029. @nonobjc public func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat])
  1030. }
  1031. extension CharucoDetector {
  1032. @nonobjc public func detectBoard(image: opencv2.Mat, charucoCorners: opencv2.Mat, charucoIds: opencv2.Mat, markerCorners: inout [opencv2.Mat], markerIds: opencv2.Mat)
  1033. }
  1034. extension CharucoDetector {
  1035. @nonobjc public func detectBoard(image: opencv2.Mat, charucoCorners: opencv2.Mat, charucoIds: opencv2.Mat, markerCorners: inout [opencv2.Mat])
  1036. }
  1037. extension CharucoDetector {
  1038. @nonobjc public func detectDiamonds(image: opencv2.Mat, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat, markerCorners: inout [opencv2.Mat], markerIds: opencv2.Mat)
  1039. }
  1040. extension CharucoDetector {
  1041. @nonobjc public func detectDiamonds(image: opencv2.Mat, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat, markerCorners: inout [opencv2.Mat])
  1042. }
  1043. extension CharucoDetector {
  1044. @nonobjc public func detectDiamonds(image: opencv2.Mat, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat)
  1045. }
  1046. extension BarcodeDetector {
  1047. @nonobjc public func decodeWithType(img: opencv2.Mat, points: opencv2.Mat, decoded_info: inout [Swift.String], decoded_type: inout [Swift.String]) -> Swift.Bool
  1048. }
  1049. extension BarcodeDetector {
  1050. @nonobjc public func detectAndDecodeWithType(img: opencv2.Mat, decoded_info: inout [Swift.String], decoded_type: inout [Swift.String], points: opencv2.Mat) -> Swift.Bool
  1051. }
  1052. extension BarcodeDetector {
  1053. @nonobjc public func detectAndDecodeWithType(img: opencv2.Mat, decoded_info: inout [Swift.String], decoded_type: inout [Swift.String]) -> Swift.Bool
  1054. }
  1055. extension Photo {
  1056. @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32, normType: Swift.Int32)
  1057. }
  1058. extension Photo {
  1059. @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32)
  1060. }
  1061. extension Photo {
  1062. @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float], templateWindowSize: Swift.Int32)
  1063. }
  1064. extension Photo {
  1065. @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float])
  1066. }
  1067. extension Photo {
  1068. @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32, normType: Swift.Int32)
  1069. }
  1070. extension Photo {
  1071. @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32)
  1072. }
  1073. extension Photo {
  1074. @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float], templateWindowSize: Swift.Int32)
  1075. }
  1076. extension Photo {
  1077. @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float])
  1078. }
  1079. extension SinusoidalPattern {
  1080. @nonobjc public func findProCamMatches(projUnwrappedPhaseMap: opencv2.Mat, camUnwrappedPhaseMap: opencv2.Mat, matches: inout [opencv2.Mat])
  1081. }
  1082. extension StructuredLightPattern {
  1083. @nonobjc public func generate(patternImages: inout [opencv2.Mat]) -> Swift.Bool
  1084. }
  1085. extension Text {
  1086. @nonobjc public class func detectTextSWT(input: opencv2.Mat, result: inout [opencv2.Rect2i], dark_on_light: Swift.Bool, draw: opencv2.Mat, chainBBs: opencv2.Mat)
  1087. }
  1088. extension Text {
  1089. @nonobjc public class func detectTextSWT(input: opencv2.Mat, result: inout [opencv2.Rect2i], dark_on_light: Swift.Bool, draw: opencv2.Mat)
  1090. }
  1091. extension Text {
  1092. @nonobjc public class func detectTextSWT(input: opencv2.Mat, result: inout [opencv2.Rect2i], dark_on_light: Swift.Bool)
  1093. }
  1094. extension Text {
  1095. @nonobjc public class func computeNMChannels(_src: opencv2.Mat, _channels: inout [opencv2.Mat], _mode: Swift.Int32)
  1096. }
  1097. extension Text {
  1098. @nonobjc public class func computeNMChannels(_src: opencv2.Mat, _channels: inout [opencv2.Mat])
  1099. }
  1100. extension Text {
  1101. @nonobjc public class func erGrouping(image: opencv2.Mat, channel: opencv2.Mat, regions: [[opencv2.Point2i]], groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes, filename: Swift.String, minProbablity: Swift.Float)
  1102. }
  1103. extension Text {
  1104. @nonobjc public class func erGrouping(image: opencv2.Mat, channel: opencv2.Mat, regions: [[opencv2.Point2i]], groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes, filename: Swift.String)
  1105. }
  1106. extension Text {
  1107. @nonobjc public class func erGrouping(image: opencv2.Mat, channel: opencv2.Mat, regions: [[opencv2.Point2i]], groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes)
  1108. }
  1109. extension Text {
  1110. @nonobjc public class func erGrouping(image: opencv2.Mat, channel: opencv2.Mat, regions: [[opencv2.Point2i]], groups_rects: inout [opencv2.Rect2i])
  1111. }
  1112. extension Text {
  1113. @nonobjc public class func detectRegions(image: opencv2.Mat, er_filter1: opencv2.ERFilter, er_filter2: opencv2.ERFilter, regions: inout [[opencv2.Point2i]])
  1114. }
  1115. extension Text {
  1116. @nonobjc public class func detectRegions(image: opencv2.Mat, er_filter1: opencv2.ERFilter, er_filter2: opencv2.ERFilter, groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes, filename: Swift.String, minProbability: Swift.Float)
  1117. }
  1118. extension Text {
  1119. @nonobjc public class func detectRegions(image: opencv2.Mat, er_filter1: opencv2.ERFilter, er_filter2: opencv2.ERFilter, groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes, filename: Swift.String)
  1120. }
  1121. extension Text {
  1122. @nonobjc public class func detectRegions(image: opencv2.Mat, er_filter1: opencv2.ERFilter, er_filter2: opencv2.ERFilter, groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes)
  1123. }
  1124. extension Text {
  1125. @nonobjc public class func detectRegions(image: opencv2.Mat, er_filter1: opencv2.ERFilter, er_filter2: opencv2.ERFilter, groups_rects: inout [opencv2.Rect2i])
  1126. }
  1127. extension TextDetector {
  1128. @nonobjc public func detect(inputImage: opencv2.Mat, Bbox: inout [opencv2.Rect2i], confidence: inout [Swift.Float])
  1129. }
  1130. extension Video {
  1131. @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool, pyrBorder: Swift.Int32, derivBorder: Swift.Int32, tryReuseInputImage: Swift.Bool) -> Swift.Int32
  1132. }
  1133. extension Video {
  1134. @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool, pyrBorder: Swift.Int32, derivBorder: Swift.Int32) -> Swift.Int32
  1135. }
  1136. extension Video {
  1137. @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool, pyrBorder: Swift.Int32) -> Swift.Int32
  1138. }
  1139. extension Video {
  1140. @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool) -> Swift.Int32
  1141. }
  1142. extension Video {
  1143. @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32) -> Swift.Int32
  1144. }
  1145. extension VideoCapture {
  1146. @nonobjc public convenience init(filename: Swift.String, apiPreference: Swift.Int32, params: [Swift.Int32])
  1147. }
  1148. extension VideoCapture {
  1149. @nonobjc public convenience init(index: Swift.Int32, apiPreference: Swift.Int32, params: [Swift.Int32])
  1150. }
  1151. extension VideoCapture {
  1152. @nonobjc public func open(filename: Swift.String, apiPreference: Swift.Int32, params: [Swift.Int32]) -> Swift.Bool
  1153. }
  1154. extension VideoCapture {
  1155. @nonobjc public func open(index: Swift.Int32, apiPreference: Swift.Int32, params: [Swift.Int32]) -> Swift.Bool
  1156. }
  1157. extension VideoWriter {
  1158. @nonobjc public convenience init(filename: Swift.String, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32])
  1159. }
  1160. extension VideoWriter {
  1161. @nonobjc public convenience init(filename: Swift.String, apiPreference: Swift.Int32, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32])
  1162. }
  1163. extension VideoWriter {
  1164. @nonobjc public func open(filename: Swift.String, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32]) -> Swift.Bool
  1165. }
  1166. extension VideoWriter {
  1167. @nonobjc public func open(filename: Swift.String, apiPreference: Swift.Int32, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32]) -> Swift.Bool
  1168. }
  1169. extension WeChatQRCode {
  1170. @nonobjc public func detectAndDecode(img: opencv2.Mat, points: inout [opencv2.Mat]) -> [Swift.String]
  1171. }
  1172. extension Xfeatures2d {
  1173. @nonobjc public class func matchGMS(size1: opencv2.Size2i, size2: opencv2.Size2i, keypoints1: [opencv2.KeyPoint], keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], matchesGMS: inout [opencv2.DMatch], withRotation: Swift.Bool, withScale: Swift.Bool, thresholdFactor: Swift.Double)
  1174. }
  1175. extension Xfeatures2d {
  1176. @nonobjc public class func matchGMS(size1: opencv2.Size2i, size2: opencv2.Size2i, keypoints1: [opencv2.KeyPoint], keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], matchesGMS: inout [opencv2.DMatch], withRotation: Swift.Bool, withScale: Swift.Bool)
  1177. }
  1178. extension Xfeatures2d {
  1179. @nonobjc public class func matchGMS(size1: opencv2.Size2i, size2: opencv2.Size2i, keypoints1: [opencv2.KeyPoint], keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], matchesGMS: inout [opencv2.DMatch], withRotation: Swift.Bool)
  1180. }
  1181. extension Xfeatures2d {
  1182. @nonobjc public class func matchGMS(size1: opencv2.Size2i, size2: opencv2.Size2i, keypoints1: [opencv2.KeyPoint], keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], matchesGMS: inout [opencv2.DMatch])
  1183. }
  1184. extension Xfeatures2d {
  1185. @nonobjc public class func matchLOGOS(keypoints1: [opencv2.KeyPoint], keypoints2: [opencv2.KeyPoint], nn1: [Swift.Int32], nn2: [Swift.Int32], matches1to2: [opencv2.DMatch])
  1186. }
  1187. extension FREAK {
  1188. @nonobjc public class func create(orientationNormalized: Swift.Bool, scaleNormalized: Swift.Bool, patternScale: Swift.Float, nOctaves: Swift.Int32, selectedPairs: [Swift.Int32]) -> opencv2.FREAK
  1189. }
  1190. extension PCTSignatures {
  1191. @nonobjc public class func create(initSamplingPoints: [opencv2.Point2f], initClusterSeedIndexes: [Swift.Int32]) -> opencv2.PCTSignatures
  1192. }
  1193. extension PCTSignatures {
  1194. @nonobjc public func setWeights(weights: [Swift.Float])
  1195. }
  1196. extension PCTSignatures {
  1197. @nonobjc public func setTranslations(translations: [Swift.Float])
  1198. }
  1199. extension PCTSignatures {
  1200. @nonobjc public func setInitSeedIndexes(initSeedIndexes: [Swift.Int32])
  1201. }
  1202. extension PCTSignaturesSQFD {
  1203. @nonobjc public func computeQuadraticFormDistances(sourceSignature: opencv2.Mat, imageSignatures: [opencv2.Mat], distances: [Swift.Float])
  1204. }
  1205. extension EdgeBoxes {
  1206. @nonobjc public func getBoundingBoxes(edge_map: opencv2.Mat, orientation_map: opencv2.Mat, boxes: inout [opencv2.Rect2i], scores: opencv2.Mat)
  1207. }
  1208. extension EdgeBoxes {
  1209. @nonobjc public func getBoundingBoxes(edge_map: opencv2.Mat, orientation_map: opencv2.Mat, boxes: inout [opencv2.Rect2i])
  1210. }
  1211. extension SelectiveSearchSegmentation {
  1212. @nonobjc public func process(rects: inout [opencv2.Rect2i])
  1213. }