1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213 |
- // swift-interface-format-version: 1.0
- // swift-compiler-version: Apple Swift version 5.3.2 (swiftlang-1200.0.45 clang-1200.0.32.28)
- // swift-module-flags: -target arm64-apple-ios9.0 -enable-objc-interop -enable-library-evolution -swift-version 5 -enforce-exclusivity=checked -O -module-name opencv2
- import Foundation
- import Swift
- @_exported import opencv2
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func detectMarkers(image: opencv2.Mat, dictionary: opencv2.Dictionary, corners: inout [opencv2.Mat], ids: opencv2.Mat, parameters: opencv2.DetectorParameters, rejectedImgPoints: inout [opencv2.Mat])
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func detectMarkers(image: opencv2.Mat, dictionary: opencv2.Dictionary, corners: inout [opencv2.Mat], ids: opencv2.Mat, parameters: opencv2.DetectorParameters)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func detectMarkers(image: opencv2.Mat, dictionary: opencv2.Dictionary, corners: inout [opencv2.Mat], ids: opencv2.Mat)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, minRepDistance: Swift.Float, errorCorrectionRate: Swift.Float, checkAllOrders: Swift.Bool, recoveredIdxs: opencv2.Mat, parameters: opencv2.DetectorParameters)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, minRepDistance: Swift.Float, errorCorrectionRate: Swift.Float, checkAllOrders: Swift.Bool, recoveredIdxs: opencv2.Mat)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, minRepDistance: Swift.Float, errorCorrectionRate: Swift.Float, checkAllOrders: Swift.Bool)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, minRepDistance: Swift.Float, errorCorrectionRate: Swift.Float)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, minRepDistance: Swift.Float)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat])
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraAruco(corners: [opencv2.Mat], ids: opencv2.Mat, counter: opencv2.Mat, board: opencv2.Board, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat]) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
- }
- extension Aruco {
- @nonobjc public class func calibrateCameraCharuco(charucoCorners: [opencv2.Mat], charucoIds: [opencv2.Mat], board: opencv2.CharucoBoard, imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat]) -> Swift.Double
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func detectCharucoDiamond(image: opencv2.Mat, markerCorners: [opencv2.Mat], markerIds: opencv2.Mat, squareMarkerLengthRate: Swift.Float, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, dictionary: opencv2.Dictionary)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func detectCharucoDiamond(image: opencv2.Mat, markerCorners: [opencv2.Mat], markerIds: opencv2.Mat, squareMarkerLengthRate: Swift.Float, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func detectCharucoDiamond(image: opencv2.Mat, markerCorners: [opencv2.Mat], markerIds: opencv2.Mat, squareMarkerLengthRate: Swift.Float, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat, cameraMatrix: opencv2.Mat)
- }
- extension Aruco {
- @available(*, deprecated)
- @nonobjc public class func detectCharucoDiamond(image: opencv2.Mat, markerCorners: [opencv2.Mat], markerIds: opencv2.Mat, squareMarkerLengthRate: Swift.Float, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat)
- }
- extension Calib3d {
- @nonobjc public class func solveP3P(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Int32
- }
- extension Calib3d {
- @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod, rvec: opencv2.Mat, tvec: opencv2.Mat, reprojectionError: opencv2.Mat) -> Swift.Int32
- }
- extension Calib3d {
- @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod, rvec: opencv2.Mat, tvec: opencv2.Mat) -> Swift.Int32
- }
- extension Calib3d {
- @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod, rvec: opencv2.Mat) -> Swift.Int32
- }
- extension Calib3d {
- @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod) -> Swift.Int32
- }
- extension Calib3d {
- @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool) -> Swift.Int32
- }
- extension Calib3d {
- @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Int32
- }
- extension Calib3d {
- @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, stdDeviationsObjPoints: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, stdDeviationsObjPoints: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, stdDeviationsObjPoints: opencv2.Mat, perViewErrors: opencv2.Mat) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], cameraMatrix1: opencv2.Mat, distCoeffs1: opencv2.Mat, cameraMatrix2: opencv2.Mat, distCoeffs2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, E: opencv2.Mat, F: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], cameraMatrix1: opencv2.Mat, distCoeffs1: opencv2.Mat, cameraMatrix2: opencv2.Mat, distCoeffs2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, E: opencv2.Mat, F: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], cameraMatrix1: opencv2.Mat, distCoeffs1: opencv2.Mat, cameraMatrix2: opencv2.Mat, distCoeffs2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, E: opencv2.Mat, F: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], perViewErrors: opencv2.Mat) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func decomposeHomographyMat(H: opencv2.Mat, K: opencv2.Mat, rotations: inout [opencv2.Mat], translations: inout [opencv2.Mat], normals: inout [opencv2.Mat]) -> Swift.Int32
- }
- extension Calib3d {
- @nonobjc public class func calibrate(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], image_size: opencv2.Size2i, K: opencv2.Mat, D: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrate(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], image_size: opencv2.Size2i, K: opencv2.Mat, D: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func calibrate(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], image_size: opencv2.Size2i, K: opencv2.Mat, D: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], K1: opencv2.Mat, D1: opencv2.Mat, K2: opencv2.Mat, D2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], K1: opencv2.Mat, D1: opencv2.Mat, K2: opencv2.Mat, D2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
- }
- extension Calib3d {
- @nonobjc public class func stereoCalibrate(objectPoints: [opencv2.Mat], imagePoints1: [opencv2.Mat], imagePoints2: [opencv2.Mat], K1: opencv2.Mat, D1: opencv2.Mat, K2: opencv2.Mat, D2: opencv2.Mat, imageSize: opencv2.Size2i, R: opencv2.Mat, T: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
- }
- extension ByteVector {
- public convenience init(_ array: [Swift.Int8])
- public convenience init(_ array: [Swift.UInt8])
- public subscript(index: Swift.Int) -> Swift.Int8 {
- get
- }
- public var array: [Swift.Int8] {
- get
- }
- public var unsignedArray: [Swift.UInt8] {
- get
- }
- }
- extension ByteVector : Swift.Sequence {
- public typealias Iterator = opencv2.ByteVectorIterator
- public func makeIterator() -> opencv2.ByteVectorIterator
- public typealias Element = opencv2.ByteVectorIterator.Element
- }
- public struct ByteVectorIterator : Swift.IteratorProtocol {
- public typealias Element = Swift.Int8
- public mutating func next() -> Swift.Int8?
- }
- extension Core {
- @nonobjc public class func meanStdDev(src: opencv2.Mat, mean: inout [Swift.Double], stddev: inout [Swift.Double], mask: opencv2.Mat)
- }
- extension Core {
- @nonobjc public class func meanStdDev(src: opencv2.Mat, mean: inout [Swift.Double], stddev: inout [Swift.Double])
- }
- extension Core {
- @nonobjc public class func split(m: opencv2.Mat, mv: inout [opencv2.Mat])
- }
- extension Core {
- @nonobjc public class func mixChannels(src: [opencv2.Mat], dst: [opencv2.Mat], fromTo: [Swift.Int32])
- }
- extension Core {
- @nonobjc public class func transposeND(src: opencv2.Mat, order: [Swift.Int32], dst: opencv2.Mat)
- }
- extension CvType {
- public static var CV_8U: Swift.Int32
- public static var CV_8S: Swift.Int32
- public static var CV_16U: Swift.Int32
- public static var CV_16S: Swift.Int32
- public static var CV_32S: Swift.Int32
- public static var CV_32F: Swift.Int32
- public static var CV_64F: Swift.Int32
- public static var CV_16F: Swift.Int32
- public static var CV_8UC1: Swift.Int32
- public static var CV_8UC2: Swift.Int32
- public static var CV_8UC3: Swift.Int32
- public static var CV_8UC4: Swift.Int32
- public static var CV_8SC1: Swift.Int32
- public static var CV_8SC2: Swift.Int32
- public static var CV_8SC3: Swift.Int32
- public static var CV_8SC4: Swift.Int32
- public static var CV_16UC1: Swift.Int32
- public static var CV_16UC2: Swift.Int32
- public static var CV_16UC3: Swift.Int32
- public static var CV_16UC4: Swift.Int32
- public static var CV_16SC1: Swift.Int32
- public static var CV_16SC2: Swift.Int32
- public static var CV_16SC3: Swift.Int32
- public static var CV_16SC4: Swift.Int32
- public static var CV_32SC1: Swift.Int32
- public static var CV_32SC2: Swift.Int32
- public static var CV_32SC3: Swift.Int32
- public static var CV_32SC4: Swift.Int32
- public static var CV_32FC1: Swift.Int32
- public static var CV_32FC2: Swift.Int32
- public static var CV_32FC3: Swift.Int32
- public static var CV_32FC4: Swift.Int32
- public static var CV_64FC1: Swift.Int32
- public static var CV_64FC2: Swift.Int32
- public static var CV_64FC3: Swift.Int32
- public static var CV_64FC4: Swift.Int32
- public static var CV_16FC1: Swift.Int32
- public static var CV_16FC2: Swift.Int32
- public static var CV_16FC3: Swift.Int32
- public static var CV_16FC4: Swift.Int32
- public static var CV_CN_MAX: Swift.Int
- public static var CV_CN_SHIFT: Swift.Int
- public static var CV_DEPTH_MAX: Swift.Int
- public static func CV_8UC(_ channels: Swift.Int32) -> Swift.Int32
- public static func CV_8SC(_ channels: Swift.Int32) -> Swift.Int32
- public static func CV_16UC(_ channels: Swift.Int32) -> Swift.Int32
- public static func CV_16SC(_ channels: Swift.Int32) -> Swift.Int32
- public static func CV_32SC(_ channels: Swift.Int32) -> Swift.Int32
- public static func CV_32FC(_ channels: Swift.Int32) -> Swift.Int32
- public static func CV_64FC(_ channels: Swift.Int32) -> Swift.Int32
- public static func CV_16FC(_ channels: Swift.Int32) -> Swift.Int32
- }
- extension DoubleVector {
- public convenience init(_ array: [Swift.Double])
- public subscript(index: Swift.Int) -> Swift.Double {
- get
- }
- public var array: [Swift.Double] {
- get
- }
- }
- extension DoubleVector : Swift.Sequence {
- public typealias Iterator = opencv2.DoubleVectorIterator
- public func makeIterator() -> opencv2.DoubleVectorIterator
- public typealias Element = opencv2.DoubleVectorIterator.Element
- }
- public struct DoubleVectorIterator : Swift.IteratorProtocol {
- public typealias Element = Swift.Double
- public mutating func next() -> Swift.Double?
- }
- extension FloatVector {
- public convenience init(_ array: [Swift.Float])
- public subscript(index: Swift.Int) -> Swift.Float {
- get
- }
- public var array: [Swift.Float] {
- get
- }
- }
- extension FloatVector : Swift.Sequence {
- public typealias Iterator = opencv2.FloatVectorIterator
- public func makeIterator() -> opencv2.FloatVectorIterator
- public typealias Element = opencv2.FloatVectorIterator.Element
- }
- public struct FloatVectorIterator : Swift.IteratorProtocol {
- public typealias Element = Swift.Float
- public mutating func next() -> Swift.Float?
- }
- extension IntVector {
- public convenience init(_ array: [Swift.Int32])
- public subscript(index: Swift.Int) -> Swift.Int32 {
- get
- }
- public var array: [Swift.Int32] {
- get
- }
- }
- extension IntVector : Swift.Sequence {
- public typealias Iterator = opencv2.IntVectorIterator
- public func makeIterator() -> opencv2.IntVectorIterator
- public typealias Element = opencv2.IntVectorIterator.Element
- }
- public struct IntVectorIterator : Swift.IteratorProtocol {
- public typealias Element = Swift.Int32
- public mutating func next() -> Swift.Int32?
- }
- public typealias T2<T> = (T, T)
- public typealias T3<T> = (T, T, T)
- public typealias T4<T> = (T, T, T, T)
- extension Mat {
- public convenience init(rows: Swift.Int32, cols: Swift.Int32, type: Swift.Int32, data: [Swift.Int8])
- public convenience init(rows: Swift.Int32, cols: Swift.Int32, type: Swift.Int32, data: [Swift.Int8], step: Swift.Int)
- @discardableResult
- public func get(indices: [Swift.Int32], data: inout [Swift.Int8]) throws -> Swift.Int32
- @discardableResult
- public func get(indices: [Swift.Int32], data: inout [Swift.UInt8]) throws -> Swift.Int32
- @discardableResult
- public func get(indices: [Swift.Int32], data: inout [Swift.Double]) throws -> Swift.Int32
- @discardableResult
- public func get(indices: [Swift.Int32], data: inout [Swift.Float]) throws -> Swift.Int32
- @discardableResult
- public func get(indices: [Swift.Int32], data: inout [Swift.Int32]) throws -> Swift.Int32
- @discardableResult
- public func get(indices: [Swift.Int32], data: inout [Swift.Int16]) throws -> Swift.Int32
- @discardableResult
- public func get(indices: [Swift.Int32], data: inout [Swift.UInt16]) throws -> Swift.Int32
- @discardableResult
- public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Int8]) throws -> Swift.Int32
- @discardableResult
- public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.UInt8]) throws -> Swift.Int32
- @discardableResult
- public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Double]) throws -> Swift.Int32
- @discardableResult
- public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Float]) throws -> Swift.Int32
- @discardableResult
- public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Int32]) throws -> Swift.Int32
- @discardableResult
- public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Int16]) throws -> Swift.Int32
- @discardableResult
- public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.UInt16]) throws -> Swift.Int32
- @discardableResult
- public func put(indices: [Swift.Int32], data: [Swift.Int8]) throws -> Swift.Int32
- @discardableResult
- public func put(indices: [Swift.Int32], data: [Swift.UInt8]) throws -> Swift.Int32
- @discardableResult
- public func put(indices: [Swift.Int32], data: [Swift.Int8], offset: Swift.Int, length: Swift.Int32) throws -> Swift.Int32
- @discardableResult
- public func put(indices: [Swift.Int32], data: [Swift.Double]) throws -> Swift.Int32
- @discardableResult
- public func put(indices: [Swift.Int32], data: [Swift.Float]) throws -> Swift.Int32
- @discardableResult
- public func put(indices: [Swift.Int32], data: [Swift.Int32]) throws -> Swift.Int32
- @discardableResult
- public func put(indices: [Swift.Int32], data: [Swift.Int16]) throws -> Swift.Int32
- @discardableResult
- public func put(indices: [Swift.Int32], data: [Swift.UInt16]) throws -> Swift.Int32
- @discardableResult
- public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int8]) throws -> Swift.Int32
- @discardableResult
- public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.UInt8]) throws -> Swift.Int32
- @discardableResult
- public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int8], offset: Swift.Int, length: Swift.Int32) throws -> Swift.Int32
- @discardableResult
- public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Double]) throws -> Swift.Int32
- @discardableResult
- public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Float]) throws -> Swift.Int32
- @discardableResult
- public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int32]) throws -> Swift.Int32
- @discardableResult
- public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int16]) throws -> Swift.Int32
- @discardableResult
- public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.UInt16]) throws -> Swift.Int32
- @discardableResult
- public func get(row: Swift.Int32, col: Swift.Int32) -> [Swift.Double]
- @discardableResult
- public func get(indices: [Swift.Int32]) -> [Swift.Double]
- }
- public protocol Atable {
- static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Self
- static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Self)
- static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Self, Self)
- static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Self, Self))
- static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Self, Self, Self)
- static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Self, Self, Self))
- static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Self, Self, Self, Self)
- static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Self, Self, Self, Self))
- }
- @_hasMissingDesignatedInitializers public class MatAt<N> where N : opencv2.Atable {
- public var v: N {
- get
- set(value)
- }
- public var v2c: (N, N) {
- get
- set(value)
- }
- public var v3c: (N, N, N) {
- get
- set(value)
- }
- public var v4c: (N, N, N, N) {
- get
- set(value)
- }
- @objc deinit
- }
- extension UInt8 : opencv2.Atable {
- public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.UInt8
- public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.UInt8)
- public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt8, Swift.UInt8)
- public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt8, Swift.UInt8))
- public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt8, Swift.UInt8, Swift.UInt8)
- public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt8, Swift.UInt8, Swift.UInt8))
- public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt8, Swift.UInt8, Swift.UInt8, Swift.UInt8)
- public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt8, Swift.UInt8, Swift.UInt8, Swift.UInt8))
- }
- extension Int8 : opencv2.Atable {
- public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Int8
- public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Int8)
- public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int8, Swift.Int8)
- public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int8, Swift.Int8))
- public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int8, Swift.Int8, Swift.Int8)
- public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int8, Swift.Int8, Swift.Int8))
- public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int8, Swift.Int8, Swift.Int8, Swift.Int8)
- public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int8, Swift.Int8, Swift.Int8, Swift.Int8))
- }
- extension Double : opencv2.Atable {
- public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Double
- public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Double)
- public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Double, Swift.Double)
- public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Double, Swift.Double))
- public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Double, Swift.Double, Swift.Double)
- public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Double, Swift.Double, Swift.Double))
- public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Double, Swift.Double, Swift.Double, Swift.Double)
- public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Double, Swift.Double, Swift.Double, Swift.Double))
- }
- extension Float : opencv2.Atable {
- public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Float
- public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Float)
- public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Float, Swift.Float)
- public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Float, Swift.Float))
- public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Float, Swift.Float, Swift.Float)
- public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Float, Swift.Float, Swift.Float))
- public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Float, Swift.Float, Swift.Float, Swift.Float)
- public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Float, Swift.Float, Swift.Float, Swift.Float))
- }
- extension Int32 : opencv2.Atable {
- public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Int32
- public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Int32)
- public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int32, Swift.Int32)
- public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int32, Swift.Int32))
- public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int32, Swift.Int32, Swift.Int32)
- public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int32, Swift.Int32, Swift.Int32))
- public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int32, Swift.Int32, Swift.Int32, Swift.Int32)
- public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int32, Swift.Int32, Swift.Int32, Swift.Int32))
- }
- extension UInt16 : opencv2.Atable {
- public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.UInt16
- public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.UInt16)
- public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt16, Swift.UInt16)
- public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt16, Swift.UInt16))
- public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt16, Swift.UInt16, Swift.UInt16)
- public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt16, Swift.UInt16, Swift.UInt16))
- public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt16, Swift.UInt16, Swift.UInt16, Swift.UInt16)
- public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt16, Swift.UInt16, Swift.UInt16, Swift.UInt16))
- }
- extension Int16 : opencv2.Atable {
- public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Int16
- public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Int16)
- public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int16, Swift.Int16)
- public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int16, Swift.Int16))
- public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int16, Swift.Int16, Swift.Int16)
- public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int16, Swift.Int16, Swift.Int16))
- public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int16, Swift.Int16, Swift.Int16, Swift.Int16)
- public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int16, Swift.Int16, Swift.Int16, Swift.Int16))
- }
- extension Mat {
- public func at<N>(row: Swift.Int32, col: Swift.Int32) -> opencv2.MatAt<N> where N : opencv2.Atable
- public func at<N>(indices: [Swift.Int32]) -> opencv2.MatAt<N> where N : opencv2.Atable
- }
- extension Mat {
- public static func * (lhs: opencv2.Mat, rhs: opencv2.Mat) -> opencv2.Mat
- }
- public typealias Rect = opencv2.Rect2i
- public typealias Point = opencv2.Point2i
- public typealias Size = opencv2.Size2i
- extension Dnn {
- @nonobjc public class func readNetFromDarknet(bufferCfg: [Swift.UInt8], bufferModel: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func readNetFromDarknet(bufferCfg: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func readNetFromCaffe(bufferProto: [Swift.UInt8], bufferModel: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func readNetFromCaffe(bufferProto: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func readNetFromTensorflow(bufferModel: [Swift.UInt8], bufferConfig: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func readNetFromTensorflow(bufferModel: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func readNetFromTFLite(bufferModel: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func readNet(framework: Swift.String, bufferModel: [Swift.UInt8], bufferConfig: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func readNet(framework: Swift.String, bufferModel: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func readNetFromModelOptimizer(bufferModelConfig: [Swift.UInt8], bufferWeights: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func readNetFromONNX(buffer: [Swift.UInt8]) -> opencv2.Net
- }
- extension Dnn {
- @nonobjc public class func imagesFromBlob(blob_: opencv2.Mat, images_: inout [opencv2.Mat])
- }
- extension Dnn {
- @nonobjc public class func NMSBoxes(bboxes: [opencv2.Rect2d], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float, top_k: Swift.Int32)
- }
- extension Dnn {
- @nonobjc public class func NMSBoxes(bboxes: [opencv2.Rect2d], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float)
- }
- extension Dnn {
- @nonobjc public class func NMSBoxes(bboxes: [opencv2.Rect2d], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
- }
- extension Dnn {
- @nonobjc public class func NMSBoxes(bboxes: [opencv2.RotatedRect], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float, top_k: Swift.Int32)
- }
- extension Dnn {
- @nonobjc public class func NMSBoxes(bboxes: [opencv2.RotatedRect], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float)
- }
- extension Dnn {
- @nonobjc public class func NMSBoxes(bboxes: [opencv2.RotatedRect], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
- }
- extension Dnn {
- @nonobjc public class func NMSBoxesBatched(bboxes: [opencv2.Rect2d], scores: [Swift.Float], class_ids: [Swift.Int32], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float, top_k: Swift.Int32)
- }
- extension Dnn {
- @nonobjc public class func NMSBoxesBatched(bboxes: [opencv2.Rect2d], scores: [Swift.Float], class_ids: [Swift.Int32], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float)
- }
- extension Dnn {
- @nonobjc public class func NMSBoxesBatched(bboxes: [opencv2.Rect2d], scores: [Swift.Float], class_ids: [Swift.Int32], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
- }
- extension Dnn {
- @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], top_k: Darwin.size_t, sigma: Swift.Float, method: opencv2.SoftNMSMethod)
- }
- extension Dnn {
- @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], top_k: Darwin.size_t, sigma: Swift.Float)
- }
- extension Dnn {
- @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], top_k: Darwin.size_t)
- }
- extension Dnn {
- @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
- }
- extension DetectionModel {
- @nonobjc public func detect(frame: opencv2.Mat, classIds: inout [Swift.Int32], confidences: inout [Swift.Float], boxes: inout [opencv2.Rect2i], confThreshold: Swift.Float, nmsThreshold: Swift.Float)
- }
- extension DetectionModel {
- @nonobjc public func detect(frame: opencv2.Mat, classIds: inout [Swift.Int32], confidences: inout [Swift.Float], boxes: inout [opencv2.Rect2i], confThreshold: Swift.Float)
- }
- extension DetectionModel {
- @nonobjc public func detect(frame: opencv2.Mat, classIds: inout [Swift.Int32], confidences: inout [Swift.Float], boxes: inout [opencv2.Rect2i])
- }
- extension Layer {
- @nonobjc public func finalize(inputs: [opencv2.Mat], outputs: inout [opencv2.Mat])
- }
- extension Layer {
- @available(*, deprecated)
- @nonobjc public func run(inputs: [opencv2.Mat], outputs: inout [opencv2.Mat], internals: inout [opencv2.Mat])
- }
- extension Model {
- @nonobjc public func predict(frame: opencv2.Mat, outs: inout [opencv2.Mat])
- }
- extension Net {
- @nonobjc public class func readFromModelOptimizer(bufferModelConfig: [Swift.UInt8], bufferWeights: [Swift.UInt8]) -> opencv2.Net
- }
- extension Net {
- @nonobjc public func forward(outputBlobs: inout [opencv2.Mat], outputName: Swift.String)
- }
- extension Net {
- @nonobjc public func forward(outputBlobs: inout [opencv2.Mat])
- }
- extension Net {
- @nonobjc public func forward(outputBlobs: inout [opencv2.Mat], outBlobNames: [Swift.String])
- }
- extension Net {
- @nonobjc public func forwardAndRetrieve(outputBlobs: inout [[opencv2.Mat]], outBlobNames: [Swift.String])
- }
- extension Net {
- @nonobjc public func getInputDetails(scales: inout [Swift.Float], zeropoints: inout [Swift.Int32])
- }
- extension Net {
- @nonobjc public func getOutputDetails(scales: inout [Swift.Float], zeropoints: inout [Swift.Int32])
- }
- extension Net {
- @nonobjc public func getLayersShapes(netInputShapes: [opencv2.IntVector], layersIds: inout [Swift.Int32], inLayersShapes: inout [[opencv2.IntVector]], outLayersShapes: inout [[opencv2.IntVector]])
- }
- extension Net {
- @nonobjc public func getLayersShapes(netInputShape: opencv2.IntVector, layersIds: inout [Swift.Int32], inLayersShapes: inout [[opencv2.IntVector]], outLayersShapes: inout [[opencv2.IntVector]])
- }
- extension Net {
- @nonobjc public func getLayerTypes(layersTypes: inout [Swift.String])
- }
- extension Net {
- @nonobjc public func getPerfProfile(timings: inout [Swift.Double]) -> Swift.Int
- }
- extension TextDetectionModel {
- @nonobjc public func detect(frame: opencv2.Mat, detections: inout [[opencv2.Point2i]], confidences: inout [Swift.Float])
- }
- extension TextDetectionModel {
- @nonobjc public func detect(frame: opencv2.Mat, detections: inout [[opencv2.Point2i]])
- }
- extension TextDetectionModel {
- @nonobjc public func detectTextRectangles(frame: opencv2.Mat, detections: inout [opencv2.RotatedRect], confidences: inout [Swift.Float])
- }
- extension TextDetectionModel {
- @nonobjc public func detectTextRectangles(frame: opencv2.Mat, detections: inout [opencv2.RotatedRect])
- }
- extension TextRecognitionModel {
- @nonobjc public func recognize(frame: opencv2.Mat, roiRects: [opencv2.Mat], results: inout [Swift.String])
- }
- extension Facemark {
- @nonobjc public func fit(image: opencv2.Mat, faces: opencv2.Mat, landmarks: inout [opencv2.Mat]) -> Swift.Bool
- }
- extension Features2d {
- @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8], flags: opencv2.DrawMatchesFlags)
- }
- extension Features2d {
- @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8])
- }
- extension Features2d {
- @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchesThickness: Swift.Int32, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8], flags: opencv2.DrawMatchesFlags)
- }
- extension Features2d {
- @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchesThickness: Swift.Int32, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8])
- }
- extension Features2d {
- @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [[opencv2.DMatch]], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [[Swift.Int8]], flags: opencv2.DrawMatchesFlags)
- }
- extension Features2d {
- @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [[opencv2.DMatch]], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [[Swift.Int8]])
- }
- extension AffineFeature {
- @nonobjc public func setViewParams(tilts: [Swift.Float], rolls: [Swift.Float])
- }
- extension AffineFeature {
- @nonobjc public func getViewParams(tilts: [Swift.Float], rolls: [Swift.Float])
- }
- extension BRISK {
- @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float, indexChange: [Swift.Int32]) -> opencv2.BRISK
- }
- extension BRISK {
- @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float) -> opencv2.BRISK
- }
- extension BRISK {
- @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float) -> opencv2.BRISK
- }
- extension BRISK {
- @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32]) -> opencv2.BRISK
- }
- extension BRISK {
- @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float, indexChange: [Swift.Int32]) -> opencv2.BRISK
- }
- extension BRISK {
- @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float) -> opencv2.BRISK
- }
- extension BRISK {
- @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float) -> opencv2.BRISK
- }
- extension BRISK {
- @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32]) -> opencv2.BRISK
- }
- extension DescriptorMatcher {
- @nonobjc public func match(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch], mask: opencv2.Mat)
- }
- extension DescriptorMatcher {
- @nonobjc public func match(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch])
- }
- extension DescriptorMatcher {
- @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, mask: opencv2.Mat, compactResult: Swift.Bool)
- }
- extension DescriptorMatcher {
- @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, mask: opencv2.Mat)
- }
- extension DescriptorMatcher {
- @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32)
- }
- extension DescriptorMatcher {
- @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, mask: opencv2.Mat, compactResult: Swift.Bool)
- }
- extension DescriptorMatcher {
- @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, mask: opencv2.Mat)
- }
- extension DescriptorMatcher {
- @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float)
- }
- extension DescriptorMatcher {
- @nonobjc public func match(queryDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch], masks: [opencv2.Mat])
- }
- extension DescriptorMatcher {
- @nonobjc public func match(queryDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch])
- }
- extension DescriptorMatcher {
- @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, masks: [opencv2.Mat], compactResult: Swift.Bool)
- }
- extension DescriptorMatcher {
- @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, masks: [opencv2.Mat])
- }
- extension DescriptorMatcher {
- @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32)
- }
- extension DescriptorMatcher {
- @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, masks: [opencv2.Mat], compactResult: Swift.Bool)
- }
- extension DescriptorMatcher {
- @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, masks: [opencv2.Mat])
- }
- extension DescriptorMatcher {
- @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float)
- }
- extension Feature2D {
- @nonobjc public func detect(image: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], mask: opencv2.Mat)
- }
- extension Feature2D {
- @nonobjc public func detect(image: opencv2.Mat, keypoints: inout [opencv2.KeyPoint])
- }
- extension Feature2D {
- @nonobjc public func detect(images: [opencv2.Mat], keypoints: inout [[opencv2.KeyPoint]], masks: [opencv2.Mat])
- }
- extension Feature2D {
- @nonobjc public func detect(images: [opencv2.Mat], keypoints: inout [[opencv2.KeyPoint]])
- }
- extension Feature2D {
- @nonobjc public func compute(image: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], descriptors: opencv2.Mat)
- }
- extension Feature2D {
- @nonobjc public func compute(images: [opencv2.Mat], keypoints: inout [[opencv2.KeyPoint]], descriptors: inout [opencv2.Mat])
- }
- extension Feature2D {
- @nonobjc public func detectAndCompute(image: opencv2.Mat, mask: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], descriptors: opencv2.Mat, useProvidedKeypoints: Swift.Bool)
- }
- extension Feature2D {
- @nonobjc public func detectAndCompute(image: opencv2.Mat, mask: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], descriptors: opencv2.Mat)
- }
- extension MSER {
- @nonobjc public func detectRegions(image: opencv2.Mat, msers: inout [[opencv2.Point2i]], bboxes: inout [opencv2.Rect2i])
- }
- extension Imgcodecs {
- @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Bool
- }
- extension Imgcodecs {
- @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat]) -> Swift.Bool
- }
- extension Imgcodecs {
- @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat], start: Swift.Int32, count: Swift.Int32, flags: Swift.Int32) -> Swift.Bool
- }
- extension Imgcodecs {
- @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat], start: Swift.Int32, count: Swift.Int32) -> Swift.Bool
- }
- extension Imgcodecs {
- @nonobjc public class func imwrite(filename: Swift.String, img: opencv2.Mat, params: [Swift.Int32]) -> Swift.Bool
- }
- extension Imgcodecs {
- @nonobjc public class func imwritemulti(filename: Swift.String, img: [opencv2.Mat], params: [Swift.Int32]) -> Swift.Bool
- }
- extension Imgcodecs {
- @nonobjc public class func imdecodemulti(buf: opencv2.Mat, flags: Swift.Int32, mats: inout [opencv2.Mat]) -> Swift.Bool
- }
- extension Imgcodecs {
- @nonobjc public class func imencode(ext: Swift.String, img: opencv2.Mat, buf: inout [Swift.UInt8], params: [Swift.Int32]) -> Swift.Bool
- }
- extension Imgcodecs {
- @nonobjc public class func imencode(ext: Swift.String, img: opencv2.Mat, buf: inout [Swift.UInt8]) -> Swift.Bool
- }
- extension Imgproc {
- @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, useHarrisDetector: Swift.Bool, k: Swift.Double)
- }
- extension Imgproc {
- @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, useHarrisDetector: Swift.Bool)
- }
- extension Imgproc {
- @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32)
- }
- extension Imgproc {
- @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat)
- }
- extension Imgproc {
- @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double)
- }
- extension Imgproc {
- @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, gradientSize: Swift.Int32, useHarrisDetector: Swift.Bool, k: Swift.Double)
- }
- extension Imgproc {
- @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, gradientSize: Swift.Int32, useHarrisDetector: Swift.Bool)
- }
- extension Imgproc {
- @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, gradientSize: Swift.Int32)
- }
- extension Imgproc {
- @nonobjc public class func calcHist(images: [opencv2.Mat], channels: [Swift.Int32], mask: opencv2.Mat, hist: opencv2.Mat, histSize: [Swift.Int32], ranges: [Swift.Float], accumulate: Swift.Bool)
- }
- extension Imgproc {
- @nonobjc public class func calcHist(images: [opencv2.Mat], channels: [Swift.Int32], mask: opencv2.Mat, hist: opencv2.Mat, histSize: [Swift.Int32], ranges: [Swift.Float])
- }
- extension Imgproc {
- @nonobjc public class func calcBackProject(images: [opencv2.Mat], channels: [Swift.Int32], hist: opencv2.Mat, dst: opencv2.Mat, ranges: [Swift.Float], scale: Swift.Double)
- }
- extension Imgproc {
- @nonobjc public class func findContours(image: opencv2.Mat, contours: inout [[opencv2.Point2i]], hierarchy: opencv2.Mat, mode: opencv2.RetrievalModes, method: opencv2.ContourApproximationModes, offset: opencv2.Point2i)
- }
- extension Imgproc {
- @nonobjc public class func findContours(image: opencv2.Mat, contours: inout [[opencv2.Point2i]], hierarchy: opencv2.Mat, mode: opencv2.RetrievalModes, method: opencv2.ContourApproximationModes)
- }
- extension Imgproc {
- @nonobjc public class func approxPolyDP(curve: [opencv2.Point2f], approxCurve: inout [opencv2.Point2f], epsilon: Swift.Double, closed: Swift.Bool)
- }
- extension Imgproc {
- @nonobjc public class func convexHull(points: [opencv2.Point2i], hull: inout [Swift.Int32], clockwise: Swift.Bool)
- }
- extension Imgproc {
- @nonobjc public class func convexHull(points: [opencv2.Point2i], hull: inout [Swift.Int32])
- }
- extension Imgproc {
- @nonobjc public class func convexityDefects(contour: [opencv2.Point2i], convexhull: [Swift.Int32], convexityDefects: inout [opencv2.Int4])
- }
- extension Imgproc {
- @nonobjc public class func ellipse2Poly(center: opencv2.Point2i, axes: opencv2.Size2i, angle: Swift.Int32, arcStart: Swift.Int32, arcEnd: Swift.Int32, delta: Swift.Int32, pts: inout [opencv2.Point2i])
- }
- extension Subdiv2D {
- @nonobjc public func getEdgeList(edgeList: inout [opencv2.Float4])
- }
- extension Subdiv2D {
- @nonobjc public func getLeadingEdgeList(leadingEdgeList: inout [Swift.Int32])
- }
- extension Subdiv2D {
- @nonobjc public func getTriangleList(triangleList: inout [opencv2.Float6])
- }
- extension Subdiv2D {
- @nonobjc public func getVoronoiFacetList(idx: [Swift.Int32], facetList: inout [[opencv2.Point2f]], facetCenters: inout [opencv2.Point2f])
- }
- extension EM {
- @nonobjc public func getCovs(covs: inout [opencv2.Mat])
- }
- extension Objdetect {
- @nonobjc public class func groupRectangles(rectList: inout [opencv2.Rect2i], weights: inout [Swift.Int32], groupThreshold: Swift.Int32, eps: Swift.Double)
- }
- extension Objdetect {
- @nonobjc public class func groupRectangles(rectList: inout [opencv2.Rect2i], weights: inout [Swift.Int32], groupThreshold: Swift.Int32)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i])
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32])
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i, outputRejectLevels: Swift.Bool)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double)
- }
- extension CascadeClassifier {
- @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double])
- }
- extension GraphicalCodeDetector {
- @nonobjc public func decodeMulti(img: opencv2.Mat, points: opencv2.Mat, decoded_info: inout [Swift.String], straight_code: inout [opencv2.Mat]) -> Swift.Bool
- }
- extension GraphicalCodeDetector {
- @nonobjc public func decodeMulti(img: opencv2.Mat, points: opencv2.Mat, decoded_info: inout [Swift.String]) -> Swift.Bool
- }
- extension GraphicalCodeDetector {
- @nonobjc public func detectAndDecodeMulti(img: opencv2.Mat, decoded_info: inout [Swift.String], points: opencv2.Mat, straight_code: inout [opencv2.Mat]) -> Swift.Bool
- }
- extension GraphicalCodeDetector {
- @nonobjc public func detectAndDecodeMulti(img: opencv2.Mat, decoded_info: inout [Swift.String], points: opencv2.Mat) -> Swift.Bool
- }
- extension GraphicalCodeDetector {
- @nonobjc public func detectAndDecodeMulti(img: opencv2.Mat, decoded_info: inout [Swift.String]) -> Swift.Bool
- }
- extension HOGDescriptor {
- @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float], winStride: opencv2.Size2i, padding: opencv2.Size2i, locations: [opencv2.Point2i])
- }
- extension HOGDescriptor {
- @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float], winStride: opencv2.Size2i, padding: opencv2.Size2i)
- }
- extension HOGDescriptor {
- @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float], winStride: opencv2.Size2i)
- }
- extension HOGDescriptor {
- @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float])
- }
- extension HOGDescriptor {
- @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, searchLocations: [opencv2.Point2i])
- }
- extension HOGDescriptor {
- @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i)
- }
- extension HOGDescriptor {
- @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i)
- }
- extension HOGDescriptor {
- @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double)
- }
- extension HOGDescriptor {
- @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double])
- }
- extension HOGDescriptor {
- @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, scale: Swift.Double, groupThreshold: Swift.Double, useMeanshiftGrouping: Swift.Bool)
- }
- extension HOGDescriptor {
- @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, scale: Swift.Double, groupThreshold: Swift.Double)
- }
- extension HOGDescriptor {
- @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, scale: Swift.Double)
- }
- extension HOGDescriptor {
- @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i)
- }
- extension HOGDescriptor {
- @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i)
- }
- extension HOGDescriptor {
- @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double)
- }
- extension HOGDescriptor {
- @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double])
- }
- extension QRCodeEncoder {
- @nonobjc public func encodeStructuredAppend(encoded_info: Swift.String, qrcodes: inout [opencv2.Mat])
- }
- extension ArucoDetector {
- @nonobjc public func detectMarkers(image: opencv2.Mat, corners: inout [opencv2.Mat], ids: opencv2.Mat, rejectedImgPoints: inout [opencv2.Mat])
- }
- extension ArucoDetector {
- @nonobjc public func detectMarkers(image: opencv2.Mat, corners: inout [opencv2.Mat], ids: opencv2.Mat)
- }
- extension ArucoDetector {
- @nonobjc public func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, recoveredIdxs: opencv2.Mat)
- }
- extension ArucoDetector {
- @nonobjc public func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat)
- }
- extension ArucoDetector {
- @nonobjc public func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat], cameraMatrix: opencv2.Mat)
- }
- extension ArucoDetector {
- @nonobjc public func refineDetectedMarkers(image: opencv2.Mat, board: opencv2.Board, detectedCorners: inout [opencv2.Mat], detectedIds: opencv2.Mat, rejectedCorners: inout [opencv2.Mat])
- }
- extension CharucoDetector {
- @nonobjc public func detectBoard(image: opencv2.Mat, charucoCorners: opencv2.Mat, charucoIds: opencv2.Mat, markerCorners: inout [opencv2.Mat], markerIds: opencv2.Mat)
- }
- extension CharucoDetector {
- @nonobjc public func detectBoard(image: opencv2.Mat, charucoCorners: opencv2.Mat, charucoIds: opencv2.Mat, markerCorners: inout [opencv2.Mat])
- }
- extension CharucoDetector {
- @nonobjc public func detectDiamonds(image: opencv2.Mat, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat, markerCorners: inout [opencv2.Mat], markerIds: opencv2.Mat)
- }
- extension CharucoDetector {
- @nonobjc public func detectDiamonds(image: opencv2.Mat, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat, markerCorners: inout [opencv2.Mat])
- }
- extension CharucoDetector {
- @nonobjc public func detectDiamonds(image: opencv2.Mat, diamondCorners: inout [opencv2.Mat], diamondIds: opencv2.Mat)
- }
- extension BarcodeDetector {
- @nonobjc public func decodeWithType(img: opencv2.Mat, points: opencv2.Mat, decoded_info: inout [Swift.String], decoded_type: inout [Swift.String]) -> Swift.Bool
- }
- extension BarcodeDetector {
- @nonobjc public func detectAndDecodeWithType(img: opencv2.Mat, decoded_info: inout [Swift.String], decoded_type: inout [Swift.String], points: opencv2.Mat) -> Swift.Bool
- }
- extension BarcodeDetector {
- @nonobjc public func detectAndDecodeWithType(img: opencv2.Mat, decoded_info: inout [Swift.String], decoded_type: inout [Swift.String]) -> Swift.Bool
- }
- extension Photo {
- @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32, normType: Swift.Int32)
- }
- extension Photo {
- @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32)
- }
- extension Photo {
- @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float], templateWindowSize: Swift.Int32)
- }
- extension Photo {
- @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float])
- }
- extension Photo {
- @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32, normType: Swift.Int32)
- }
- extension Photo {
- @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32)
- }
- extension Photo {
- @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float], templateWindowSize: Swift.Int32)
- }
- extension Photo {
- @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float])
- }
- extension SinusoidalPattern {
- @nonobjc public func findProCamMatches(projUnwrappedPhaseMap: opencv2.Mat, camUnwrappedPhaseMap: opencv2.Mat, matches: inout [opencv2.Mat])
- }
- extension StructuredLightPattern {
- @nonobjc public func generate(patternImages: inout [opencv2.Mat]) -> Swift.Bool
- }
- extension Text {
- @nonobjc public class func detectTextSWT(input: opencv2.Mat, result: inout [opencv2.Rect2i], dark_on_light: Swift.Bool, draw: opencv2.Mat, chainBBs: opencv2.Mat)
- }
- extension Text {
- @nonobjc public class func detectTextSWT(input: opencv2.Mat, result: inout [opencv2.Rect2i], dark_on_light: Swift.Bool, draw: opencv2.Mat)
- }
- extension Text {
- @nonobjc public class func detectTextSWT(input: opencv2.Mat, result: inout [opencv2.Rect2i], dark_on_light: Swift.Bool)
- }
- extension Text {
- @nonobjc public class func computeNMChannels(_src: opencv2.Mat, _channels: inout [opencv2.Mat], _mode: Swift.Int32)
- }
- extension Text {
- @nonobjc public class func computeNMChannels(_src: opencv2.Mat, _channels: inout [opencv2.Mat])
- }
- extension Text {
- @nonobjc public class func erGrouping(image: opencv2.Mat, channel: opencv2.Mat, regions: [[opencv2.Point2i]], groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes, filename: Swift.String, minProbablity: Swift.Float)
- }
- extension Text {
- @nonobjc public class func erGrouping(image: opencv2.Mat, channel: opencv2.Mat, regions: [[opencv2.Point2i]], groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes, filename: Swift.String)
- }
- extension Text {
- @nonobjc public class func erGrouping(image: opencv2.Mat, channel: opencv2.Mat, regions: [[opencv2.Point2i]], groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes)
- }
- extension Text {
- @nonobjc public class func erGrouping(image: opencv2.Mat, channel: opencv2.Mat, regions: [[opencv2.Point2i]], groups_rects: inout [opencv2.Rect2i])
- }
- extension Text {
- @nonobjc public class func detectRegions(image: opencv2.Mat, er_filter1: opencv2.ERFilter, er_filter2: opencv2.ERFilter, regions: inout [[opencv2.Point2i]])
- }
- extension Text {
- @nonobjc public class func detectRegions(image: opencv2.Mat, er_filter1: opencv2.ERFilter, er_filter2: opencv2.ERFilter, groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes, filename: Swift.String, minProbability: Swift.Float)
- }
- extension Text {
- @nonobjc public class func detectRegions(image: opencv2.Mat, er_filter1: opencv2.ERFilter, er_filter2: opencv2.ERFilter, groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes, filename: Swift.String)
- }
- extension Text {
- @nonobjc public class func detectRegions(image: opencv2.Mat, er_filter1: opencv2.ERFilter, er_filter2: opencv2.ERFilter, groups_rects: inout [opencv2.Rect2i], method: opencv2.erGrouping_Modes)
- }
- extension Text {
- @nonobjc public class func detectRegions(image: opencv2.Mat, er_filter1: opencv2.ERFilter, er_filter2: opencv2.ERFilter, groups_rects: inout [opencv2.Rect2i])
- }
- extension TextDetector {
- @nonobjc public func detect(inputImage: opencv2.Mat, Bbox: inout [opencv2.Rect2i], confidence: inout [Swift.Float])
- }
- extension Video {
- @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool, pyrBorder: Swift.Int32, derivBorder: Swift.Int32, tryReuseInputImage: Swift.Bool) -> Swift.Int32
- }
- extension Video {
- @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool, pyrBorder: Swift.Int32, derivBorder: Swift.Int32) -> Swift.Int32
- }
- extension Video {
- @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool, pyrBorder: Swift.Int32) -> Swift.Int32
- }
- extension Video {
- @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool) -> Swift.Int32
- }
- extension Video {
- @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32) -> Swift.Int32
- }
- extension VideoCapture {
- @nonobjc public convenience init(filename: Swift.String, apiPreference: Swift.Int32, params: [Swift.Int32])
- }
- extension VideoCapture {
- @nonobjc public convenience init(index: Swift.Int32, apiPreference: Swift.Int32, params: [Swift.Int32])
- }
- extension VideoCapture {
- @nonobjc public func open(filename: Swift.String, apiPreference: Swift.Int32, params: [Swift.Int32]) -> Swift.Bool
- }
- extension VideoCapture {
- @nonobjc public func open(index: Swift.Int32, apiPreference: Swift.Int32, params: [Swift.Int32]) -> Swift.Bool
- }
- extension VideoWriter {
- @nonobjc public convenience init(filename: Swift.String, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32])
- }
- extension VideoWriter {
- @nonobjc public convenience init(filename: Swift.String, apiPreference: Swift.Int32, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32])
- }
- extension VideoWriter {
- @nonobjc public func open(filename: Swift.String, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32]) -> Swift.Bool
- }
- extension VideoWriter {
- @nonobjc public func open(filename: Swift.String, apiPreference: Swift.Int32, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32]) -> Swift.Bool
- }
- extension WeChatQRCode {
- @nonobjc public func detectAndDecode(img: opencv2.Mat, points: inout [opencv2.Mat]) -> [Swift.String]
- }
- extension Xfeatures2d {
- @nonobjc public class func matchGMS(size1: opencv2.Size2i, size2: opencv2.Size2i, keypoints1: [opencv2.KeyPoint], keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], matchesGMS: inout [opencv2.DMatch], withRotation: Swift.Bool, withScale: Swift.Bool, thresholdFactor: Swift.Double)
- }
- extension Xfeatures2d {
- @nonobjc public class func matchGMS(size1: opencv2.Size2i, size2: opencv2.Size2i, keypoints1: [opencv2.KeyPoint], keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], matchesGMS: inout [opencv2.DMatch], withRotation: Swift.Bool, withScale: Swift.Bool)
- }
- extension Xfeatures2d {
- @nonobjc public class func matchGMS(size1: opencv2.Size2i, size2: opencv2.Size2i, keypoints1: [opencv2.KeyPoint], keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], matchesGMS: inout [opencv2.DMatch], withRotation: Swift.Bool)
- }
- extension Xfeatures2d {
- @nonobjc public class func matchGMS(size1: opencv2.Size2i, size2: opencv2.Size2i, keypoints1: [opencv2.KeyPoint], keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], matchesGMS: inout [opencv2.DMatch])
- }
- extension Xfeatures2d {
- @nonobjc public class func matchLOGOS(keypoints1: [opencv2.KeyPoint], keypoints2: [opencv2.KeyPoint], nn1: [Swift.Int32], nn2: [Swift.Int32], matches1to2: [opencv2.DMatch])
- }
- extension FREAK {
- @nonobjc public class func create(orientationNormalized: Swift.Bool, scaleNormalized: Swift.Bool, patternScale: Swift.Float, nOctaves: Swift.Int32, selectedPairs: [Swift.Int32]) -> opencv2.FREAK
- }
- extension PCTSignatures {
- @nonobjc public class func create(initSamplingPoints: [opencv2.Point2f], initClusterSeedIndexes: [Swift.Int32]) -> opencv2.PCTSignatures
- }
- extension PCTSignatures {
- @nonobjc public func setWeights(weights: [Swift.Float])
- }
- extension PCTSignatures {
- @nonobjc public func setTranslations(translations: [Swift.Float])
- }
- extension PCTSignatures {
- @nonobjc public func setInitSeedIndexes(initSeedIndexes: [Swift.Int32])
- }
- extension PCTSignaturesSQFD {
- @nonobjc public func computeQuadraticFormDistances(sourceSignature: opencv2.Mat, imageSignatures: [opencv2.Mat], distances: [Swift.Float])
- }
- extension EdgeBoxes {
- @nonobjc public func getBoundingBoxes(edge_map: opencv2.Mat, orientation_map: opencv2.Mat, boxes: inout [opencv2.Rect2i], scores: opencv2.Mat)
- }
- extension EdgeBoxes {
- @nonobjc public func getBoundingBoxes(edge_map: opencv2.Mat, orientation_map: opencv2.Mat, boxes: inout [opencv2.Rect2i])
- }
- extension SelectiveSearchSegmentation {
- @nonobjc public func process(rects: inout [opencv2.Rect2i])
- }
|