SVM.cs 41 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088
  1. using OpenCVForUnity.CoreModule;
  2. using OpenCVForUnity.UtilsModule;
  3. using System;
  4. using System.Collections.Generic;
  5. using System.Runtime.InteropServices;
  6. namespace OpenCVForUnity.MlModule
  7. {
  8. // C++: class SVM
  9. /**
  10. * Support Vector Machines.
  11. *
  12. * SEE: REF: ml_intro_svm
  13. */
  14. public class SVM : StatModel
  15. {
  16. protected override void Dispose(bool disposing)
  17. {
  18. try
  19. {
  20. if (disposing)
  21. {
  22. }
  23. if (IsEnabledDispose)
  24. {
  25. if (nativeObj != IntPtr.Zero)
  26. ml_SVM_delete(nativeObj);
  27. nativeObj = IntPtr.Zero;
  28. }
  29. }
  30. finally
  31. {
  32. base.Dispose(disposing);
  33. }
  34. }
  35. protected internal SVM(IntPtr addr) : base(addr) { }
  36. // internal usage only
  37. public static new SVM __fromPtr__(IntPtr addr) { return new SVM(addr); }
  38. // C++: enum cv.ml.SVM.KernelTypes
  39. public const int CUSTOM = -1;
  40. public const int LINEAR = 0;
  41. public const int POLY = 1;
  42. public const int RBF = 2;
  43. public const int SIGMOID = 3;
  44. public const int CHI2 = 4;
  45. public const int INTER = 5;
  46. // C++: enum cv.ml.SVM.ParamTypes
  47. public const int C = 0;
  48. public const int GAMMA = 1;
  49. public const int P = 2;
  50. public const int NU = 3;
  51. public const int COEF = 4;
  52. public const int DEGREE = 5;
  53. // C++: enum cv.ml.SVM.Types
  54. public const int C_SVC = 100;
  55. public const int NU_SVC = 101;
  56. public const int ONE_CLASS = 102;
  57. public const int EPS_SVR = 103;
  58. public const int NU_SVR = 104;
  59. //
  60. // C++: int cv::ml::SVM::getType()
  61. //
  62. /**
  63. * SEE: setType
  64. * return automatically generated
  65. */
  66. public int getType()
  67. {
  68. ThrowIfDisposed();
  69. return ml_SVM_getType_10(nativeObj);
  70. }
  71. //
  72. // C++: void cv::ml::SVM::setType(int val)
  73. //
  74. /**
  75. * getType SEE: getType
  76. * param val automatically generated
  77. */
  78. public void setType(int val)
  79. {
  80. ThrowIfDisposed();
  81. ml_SVM_setType_10(nativeObj, val);
  82. }
  83. //
  84. // C++: double cv::ml::SVM::getGamma()
  85. //
  86. /**
  87. * SEE: setGamma
  88. * return automatically generated
  89. */
  90. public double getGamma()
  91. {
  92. ThrowIfDisposed();
  93. return ml_SVM_getGamma_10(nativeObj);
  94. }
  95. //
  96. // C++: void cv::ml::SVM::setGamma(double val)
  97. //
  98. /**
  99. * getGamma SEE: getGamma
  100. * param val automatically generated
  101. */
  102. public void setGamma(double val)
  103. {
  104. ThrowIfDisposed();
  105. ml_SVM_setGamma_10(nativeObj, val);
  106. }
  107. //
  108. // C++: double cv::ml::SVM::getCoef0()
  109. //
  110. /**
  111. * SEE: setCoef0
  112. * return automatically generated
  113. */
  114. public double getCoef0()
  115. {
  116. ThrowIfDisposed();
  117. return ml_SVM_getCoef0_10(nativeObj);
  118. }
  119. //
  120. // C++: void cv::ml::SVM::setCoef0(double val)
  121. //
  122. /**
  123. * getCoef0 SEE: getCoef0
  124. * param val automatically generated
  125. */
  126. public void setCoef0(double val)
  127. {
  128. ThrowIfDisposed();
  129. ml_SVM_setCoef0_10(nativeObj, val);
  130. }
  131. //
  132. // C++: double cv::ml::SVM::getDegree()
  133. //
  134. /**
  135. * SEE: setDegree
  136. * return automatically generated
  137. */
  138. public double getDegree()
  139. {
  140. ThrowIfDisposed();
  141. return ml_SVM_getDegree_10(nativeObj);
  142. }
  143. //
  144. // C++: void cv::ml::SVM::setDegree(double val)
  145. //
  146. /**
  147. * getDegree SEE: getDegree
  148. * param val automatically generated
  149. */
  150. public void setDegree(double val)
  151. {
  152. ThrowIfDisposed();
  153. ml_SVM_setDegree_10(nativeObj, val);
  154. }
  155. //
  156. // C++: double cv::ml::SVM::getC()
  157. //
  158. /**
  159. * SEE: setC
  160. * return automatically generated
  161. */
  162. public double getC()
  163. {
  164. ThrowIfDisposed();
  165. return ml_SVM_getC_10(nativeObj);
  166. }
  167. //
  168. // C++: void cv::ml::SVM::setC(double val)
  169. //
  170. /**
  171. * getC SEE: getC
  172. * param val automatically generated
  173. */
  174. public void setC(double val)
  175. {
  176. ThrowIfDisposed();
  177. ml_SVM_setC_10(nativeObj, val);
  178. }
  179. //
  180. // C++: double cv::ml::SVM::getNu()
  181. //
  182. /**
  183. * SEE: setNu
  184. * return automatically generated
  185. */
  186. public double getNu()
  187. {
  188. ThrowIfDisposed();
  189. return ml_SVM_getNu_10(nativeObj);
  190. }
  191. //
  192. // C++: void cv::ml::SVM::setNu(double val)
  193. //
  194. /**
  195. * getNu SEE: getNu
  196. * param val automatically generated
  197. */
  198. public void setNu(double val)
  199. {
  200. ThrowIfDisposed();
  201. ml_SVM_setNu_10(nativeObj, val);
  202. }
  203. //
  204. // C++: double cv::ml::SVM::getP()
  205. //
  206. /**
  207. * SEE: setP
  208. * return automatically generated
  209. */
  210. public double getP()
  211. {
  212. ThrowIfDisposed();
  213. return ml_SVM_getP_10(nativeObj);
  214. }
  215. //
  216. // C++: void cv::ml::SVM::setP(double val)
  217. //
  218. /**
  219. * getP SEE: getP
  220. * param val automatically generated
  221. */
  222. public void setP(double val)
  223. {
  224. ThrowIfDisposed();
  225. ml_SVM_setP_10(nativeObj, val);
  226. }
  227. //
  228. // C++: Mat cv::ml::SVM::getClassWeights()
  229. //
  230. /**
  231. * SEE: setClassWeights
  232. * return automatically generated
  233. */
  234. public Mat getClassWeights()
  235. {
  236. ThrowIfDisposed();
  237. return new Mat(DisposableObject.ThrowIfNullIntPtr(ml_SVM_getClassWeights_10(nativeObj)));
  238. }
  239. //
  240. // C++: void cv::ml::SVM::setClassWeights(Mat val)
  241. //
  242. /**
  243. * getClassWeights SEE: getClassWeights
  244. * param val automatically generated
  245. */
  246. public void setClassWeights(Mat val)
  247. {
  248. ThrowIfDisposed();
  249. if (val != null) val.ThrowIfDisposed();
  250. ml_SVM_setClassWeights_10(nativeObj, val.nativeObj);
  251. }
  252. //
  253. // C++: TermCriteria cv::ml::SVM::getTermCriteria()
  254. //
  255. /**
  256. * SEE: setTermCriteria
  257. * return automatically generated
  258. */
  259. public TermCriteria getTermCriteria()
  260. {
  261. ThrowIfDisposed();
  262. double[] tmpArray = new double[3];
  263. ml_SVM_getTermCriteria_10(nativeObj, tmpArray);
  264. TermCriteria retVal = new TermCriteria(tmpArray);
  265. return retVal;
  266. }
  267. //
  268. // C++: void cv::ml::SVM::setTermCriteria(TermCriteria val)
  269. //
  270. /**
  271. * getTermCriteria SEE: getTermCriteria
  272. * param val automatically generated
  273. */
  274. public void setTermCriteria(TermCriteria val)
  275. {
  276. ThrowIfDisposed();
  277. ml_SVM_setTermCriteria_10(nativeObj, val.type, val.maxCount, val.epsilon);
  278. }
  279. //
  280. // C++: int cv::ml::SVM::getKernelType()
  281. //
  282. /**
  283. * Type of a %SVM kernel.
  284. * See SVM::KernelTypes. Default value is SVM::RBF.
  285. * return automatically generated
  286. */
  287. public int getKernelType()
  288. {
  289. ThrowIfDisposed();
  290. return ml_SVM_getKernelType_10(nativeObj);
  291. }
  292. //
  293. // C++: void cv::ml::SVM::setKernel(int kernelType)
  294. //
  295. /**
  296. * Initialize with one of predefined kernels.
  297. * See SVM::KernelTypes.
  298. * param kernelType automatically generated
  299. */
  300. public void setKernel(int kernelType)
  301. {
  302. ThrowIfDisposed();
  303. ml_SVM_setKernel_10(nativeObj, kernelType);
  304. }
  305. //
  306. // C++: bool cv::ml::SVM::trainAuto(Mat samples, int layout, Mat responses, int kFold = 10, Ptr_ParamGrid Cgrid = SVM::getDefaultGridPtr(SVM::C), Ptr_ParamGrid gammaGrid = SVM::getDefaultGridPtr(SVM::GAMMA), Ptr_ParamGrid pGrid = SVM::getDefaultGridPtr(SVM::P), Ptr_ParamGrid nuGrid = SVM::getDefaultGridPtr(SVM::NU), Ptr_ParamGrid coeffGrid = SVM::getDefaultGridPtr(SVM::COEF), Ptr_ParamGrid degreeGrid = SVM::getDefaultGridPtr(SVM::DEGREE), bool balanced = false)
  307. //
  308. /**
  309. * Trains an %SVM with optimal parameters
  310. *
  311. * param samples training samples
  312. * param layout See ml::SampleTypes.
  313. * param responses vector of responses associated with the training samples.
  314. * param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  315. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  316. * param Cgrid grid for C
  317. * param gammaGrid grid for gamma
  318. * param pGrid grid for p
  319. * param nuGrid grid for nu
  320. * param coeffGrid grid for coeff
  321. * param degreeGrid grid for degree
  322. * param balanced If true and the problem is 2-class classification then the method creates more
  323. * balanced cross-validation subsets that is proportions between classes in subsets are close
  324. * to such proportion in the whole train dataset.
  325. *
  326. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  327. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  328. * estimate of the test set error is minimal.
  329. *
  330. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  331. * offers rudimentary parameter options.
  332. *
  333. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  334. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  335. * the usual %SVM with parameters specified in params is executed.
  336. * return automatically generated
  337. */
  338. public bool trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid, ParamGrid pGrid, ParamGrid nuGrid, ParamGrid coeffGrid, ParamGrid degreeGrid, bool balanced)
  339. {
  340. ThrowIfDisposed();
  341. if (samples != null) samples.ThrowIfDisposed();
  342. if (responses != null) responses.ThrowIfDisposed();
  343. if (Cgrid != null) Cgrid.ThrowIfDisposed();
  344. if (gammaGrid != null) gammaGrid.ThrowIfDisposed();
  345. if (pGrid != null) pGrid.ThrowIfDisposed();
  346. if (nuGrid != null) nuGrid.ThrowIfDisposed();
  347. if (coeffGrid != null) coeffGrid.ThrowIfDisposed();
  348. if (degreeGrid != null) degreeGrid.ThrowIfDisposed();
  349. return ml_SVM_trainAuto_10(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr(), pGrid.getNativeObjAddr(), nuGrid.getNativeObjAddr(), coeffGrid.getNativeObjAddr(), degreeGrid.getNativeObjAddr(), balanced);
  350. }
  351. /**
  352. * Trains an %SVM with optimal parameters
  353. *
  354. * param samples training samples
  355. * param layout See ml::SampleTypes.
  356. * param responses vector of responses associated with the training samples.
  357. * param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  358. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  359. * param Cgrid grid for C
  360. * param gammaGrid grid for gamma
  361. * param pGrid grid for p
  362. * param nuGrid grid for nu
  363. * param coeffGrid grid for coeff
  364. * param degreeGrid grid for degree
  365. * balanced cross-validation subsets that is proportions between classes in subsets are close
  366. * to such proportion in the whole train dataset.
  367. *
  368. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  369. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  370. * estimate of the test set error is minimal.
  371. *
  372. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  373. * offers rudimentary parameter options.
  374. *
  375. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  376. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  377. * the usual %SVM with parameters specified in params is executed.
  378. * return automatically generated
  379. */
  380. public bool trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid, ParamGrid pGrid, ParamGrid nuGrid, ParamGrid coeffGrid, ParamGrid degreeGrid)
  381. {
  382. ThrowIfDisposed();
  383. if (samples != null) samples.ThrowIfDisposed();
  384. if (responses != null) responses.ThrowIfDisposed();
  385. if (Cgrid != null) Cgrid.ThrowIfDisposed();
  386. if (gammaGrid != null) gammaGrid.ThrowIfDisposed();
  387. if (pGrid != null) pGrid.ThrowIfDisposed();
  388. if (nuGrid != null) nuGrid.ThrowIfDisposed();
  389. if (coeffGrid != null) coeffGrid.ThrowIfDisposed();
  390. if (degreeGrid != null) degreeGrid.ThrowIfDisposed();
  391. return ml_SVM_trainAuto_11(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr(), pGrid.getNativeObjAddr(), nuGrid.getNativeObjAddr(), coeffGrid.getNativeObjAddr(), degreeGrid.getNativeObjAddr());
  392. }
  393. /**
  394. * Trains an %SVM with optimal parameters
  395. *
  396. * param samples training samples
  397. * param layout See ml::SampleTypes.
  398. * param responses vector of responses associated with the training samples.
  399. * param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  400. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  401. * param Cgrid grid for C
  402. * param gammaGrid grid for gamma
  403. * param pGrid grid for p
  404. * param nuGrid grid for nu
  405. * param coeffGrid grid for coeff
  406. * balanced cross-validation subsets that is proportions between classes in subsets are close
  407. * to such proportion in the whole train dataset.
  408. *
  409. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  410. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  411. * estimate of the test set error is minimal.
  412. *
  413. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  414. * offers rudimentary parameter options.
  415. *
  416. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  417. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  418. * the usual %SVM with parameters specified in params is executed.
  419. * return automatically generated
  420. */
  421. public bool trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid, ParamGrid pGrid, ParamGrid nuGrid, ParamGrid coeffGrid)
  422. {
  423. ThrowIfDisposed();
  424. if (samples != null) samples.ThrowIfDisposed();
  425. if (responses != null) responses.ThrowIfDisposed();
  426. if (Cgrid != null) Cgrid.ThrowIfDisposed();
  427. if (gammaGrid != null) gammaGrid.ThrowIfDisposed();
  428. if (pGrid != null) pGrid.ThrowIfDisposed();
  429. if (nuGrid != null) nuGrid.ThrowIfDisposed();
  430. if (coeffGrid != null) coeffGrid.ThrowIfDisposed();
  431. return ml_SVM_trainAuto_12(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr(), pGrid.getNativeObjAddr(), nuGrid.getNativeObjAddr(), coeffGrid.getNativeObjAddr());
  432. }
  433. /**
  434. * Trains an %SVM with optimal parameters
  435. *
  436. * param samples training samples
  437. * param layout See ml::SampleTypes.
  438. * param responses vector of responses associated with the training samples.
  439. * param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  440. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  441. * param Cgrid grid for C
  442. * param gammaGrid grid for gamma
  443. * param pGrid grid for p
  444. * param nuGrid grid for nu
  445. * balanced cross-validation subsets that is proportions between classes in subsets are close
  446. * to such proportion in the whole train dataset.
  447. *
  448. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  449. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  450. * estimate of the test set error is minimal.
  451. *
  452. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  453. * offers rudimentary parameter options.
  454. *
  455. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  456. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  457. * the usual %SVM with parameters specified in params is executed.
  458. * return automatically generated
  459. */
  460. public bool trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid, ParamGrid pGrid, ParamGrid nuGrid)
  461. {
  462. ThrowIfDisposed();
  463. if (samples != null) samples.ThrowIfDisposed();
  464. if (responses != null) responses.ThrowIfDisposed();
  465. if (Cgrid != null) Cgrid.ThrowIfDisposed();
  466. if (gammaGrid != null) gammaGrid.ThrowIfDisposed();
  467. if (pGrid != null) pGrid.ThrowIfDisposed();
  468. if (nuGrid != null) nuGrid.ThrowIfDisposed();
  469. return ml_SVM_trainAuto_13(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr(), pGrid.getNativeObjAddr(), nuGrid.getNativeObjAddr());
  470. }
  471. /**
  472. * Trains an %SVM with optimal parameters
  473. *
  474. * param samples training samples
  475. * param layout See ml::SampleTypes.
  476. * param responses vector of responses associated with the training samples.
  477. * param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  478. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  479. * param Cgrid grid for C
  480. * param gammaGrid grid for gamma
  481. * param pGrid grid for p
  482. * balanced cross-validation subsets that is proportions between classes in subsets are close
  483. * to such proportion in the whole train dataset.
  484. *
  485. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  486. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  487. * estimate of the test set error is minimal.
  488. *
  489. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  490. * offers rudimentary parameter options.
  491. *
  492. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  493. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  494. * the usual %SVM with parameters specified in params is executed.
  495. * return automatically generated
  496. */
  497. public bool trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid, ParamGrid pGrid)
  498. {
  499. ThrowIfDisposed();
  500. if (samples != null) samples.ThrowIfDisposed();
  501. if (responses != null) responses.ThrowIfDisposed();
  502. if (Cgrid != null) Cgrid.ThrowIfDisposed();
  503. if (gammaGrid != null) gammaGrid.ThrowIfDisposed();
  504. if (pGrid != null) pGrid.ThrowIfDisposed();
  505. return ml_SVM_trainAuto_14(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr(), pGrid.getNativeObjAddr());
  506. }
  507. /**
  508. * Trains an %SVM with optimal parameters
  509. *
  510. * param samples training samples
  511. * param layout See ml::SampleTypes.
  512. * param responses vector of responses associated with the training samples.
  513. * param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  514. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  515. * param Cgrid grid for C
  516. * param gammaGrid grid for gamma
  517. * balanced cross-validation subsets that is proportions between classes in subsets are close
  518. * to such proportion in the whole train dataset.
  519. *
  520. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  521. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  522. * estimate of the test set error is minimal.
  523. *
  524. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  525. * offers rudimentary parameter options.
  526. *
  527. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  528. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  529. * the usual %SVM with parameters specified in params is executed.
  530. * return automatically generated
  531. */
  532. public bool trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid)
  533. {
  534. ThrowIfDisposed();
  535. if (samples != null) samples.ThrowIfDisposed();
  536. if (responses != null) responses.ThrowIfDisposed();
  537. if (Cgrid != null) Cgrid.ThrowIfDisposed();
  538. if (gammaGrid != null) gammaGrid.ThrowIfDisposed();
  539. return ml_SVM_trainAuto_15(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr());
  540. }
  541. /**
  542. * Trains an %SVM with optimal parameters
  543. *
  544. * param samples training samples
  545. * param layout See ml::SampleTypes.
  546. * param responses vector of responses associated with the training samples.
  547. * param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  548. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  549. * param Cgrid grid for C
  550. * balanced cross-validation subsets that is proportions between classes in subsets are close
  551. * to such proportion in the whole train dataset.
  552. *
  553. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  554. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  555. * estimate of the test set error is minimal.
  556. *
  557. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  558. * offers rudimentary parameter options.
  559. *
  560. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  561. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  562. * the usual %SVM with parameters specified in params is executed.
  563. * return automatically generated
  564. */
  565. public bool trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid)
  566. {
  567. ThrowIfDisposed();
  568. if (samples != null) samples.ThrowIfDisposed();
  569. if (responses != null) responses.ThrowIfDisposed();
  570. if (Cgrid != null) Cgrid.ThrowIfDisposed();
  571. return ml_SVM_trainAuto_16(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr());
  572. }
  573. /**
  574. * Trains an %SVM with optimal parameters
  575. *
  576. * param samples training samples
  577. * param layout See ml::SampleTypes.
  578. * param responses vector of responses associated with the training samples.
  579. * param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  580. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  581. * balanced cross-validation subsets that is proportions between classes in subsets are close
  582. * to such proportion in the whole train dataset.
  583. *
  584. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  585. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  586. * estimate of the test set error is minimal.
  587. *
  588. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  589. * offers rudimentary parameter options.
  590. *
  591. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  592. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  593. * the usual %SVM with parameters specified in params is executed.
  594. * return automatically generated
  595. */
  596. public bool trainAuto(Mat samples, int layout, Mat responses, int kFold)
  597. {
  598. ThrowIfDisposed();
  599. if (samples != null) samples.ThrowIfDisposed();
  600. if (responses != null) responses.ThrowIfDisposed();
  601. return ml_SVM_trainAuto_17(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold);
  602. }
  603. /**
  604. * Trains an %SVM with optimal parameters
  605. *
  606. * param samples training samples
  607. * param layout See ml::SampleTypes.
  608. * param responses vector of responses associated with the training samples.
  609. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  610. * balanced cross-validation subsets that is proportions between classes in subsets are close
  611. * to such proportion in the whole train dataset.
  612. *
  613. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  614. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  615. * estimate of the test set error is minimal.
  616. *
  617. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  618. * offers rudimentary parameter options.
  619. *
  620. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  621. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  622. * the usual %SVM with parameters specified in params is executed.
  623. * return automatically generated
  624. */
  625. public bool trainAuto(Mat samples, int layout, Mat responses)
  626. {
  627. ThrowIfDisposed();
  628. if (samples != null) samples.ThrowIfDisposed();
  629. if (responses != null) responses.ThrowIfDisposed();
  630. return ml_SVM_trainAuto_18(nativeObj, samples.nativeObj, layout, responses.nativeObj);
  631. }
  632. //
  633. // C++: Mat cv::ml::SVM::getSupportVectors()
  634. //
  635. /**
  636. * Retrieves all the support vectors
  637. *
  638. * The method returns all the support vectors as a floating-point matrix, where support vectors are
  639. * stored as matrix rows.
  640. * return automatically generated
  641. */
  642. public Mat getSupportVectors()
  643. {
  644. ThrowIfDisposed();
  645. return new Mat(DisposableObject.ThrowIfNullIntPtr(ml_SVM_getSupportVectors_10(nativeObj)));
  646. }
  647. //
  648. // C++: Mat cv::ml::SVM::getUncompressedSupportVectors()
  649. //
  650. /**
  651. * Retrieves all the uncompressed support vectors of a linear %SVM
  652. *
  653. * The method returns all the uncompressed support vectors of a linear %SVM that the compressed
  654. * support vector, used for prediction, was derived from. They are returned in a floating-point
  655. * matrix, where the support vectors are stored as matrix rows.
  656. * return automatically generated
  657. */
  658. public Mat getUncompressedSupportVectors()
  659. {
  660. ThrowIfDisposed();
  661. return new Mat(DisposableObject.ThrowIfNullIntPtr(ml_SVM_getUncompressedSupportVectors_10(nativeObj)));
  662. }
  663. //
  664. // C++: double cv::ml::SVM::getDecisionFunction(int i, Mat& alpha, Mat& svidx)
  665. //
  666. /**
  667. * Retrieves the decision function
  668. *
  669. * param i the index of the decision function. If the problem solved is regression, 1-class or
  670. * 2-class classification, then there will be just one decision function and the index should
  671. * always be 0. Otherwise, in the case of N-class classification, there will be \(N(N-1)/2\)
  672. * decision functions.
  673. * param alpha the optional output vector for weights, corresponding to different support vectors.
  674. * In the case of linear %SVM all the alpha's will be 1's.
  675. * param svidx the optional output vector of indices of support vectors within the matrix of
  676. * support vectors (which can be retrieved by SVM::getSupportVectors). In the case of linear
  677. * %SVM each decision function consists of a single "compressed" support vector.
  678. *
  679. * The method returns rho parameter of the decision function, a scalar subtracted from the weighted
  680. * sum of kernel responses.
  681. * return automatically generated
  682. */
  683. public double getDecisionFunction(int i, Mat alpha, Mat svidx)
  684. {
  685. ThrowIfDisposed();
  686. if (alpha != null) alpha.ThrowIfDisposed();
  687. if (svidx != null) svidx.ThrowIfDisposed();
  688. return ml_SVM_getDecisionFunction_10(nativeObj, i, alpha.nativeObj, svidx.nativeObj);
  689. }
  690. //
  691. // C++: static Ptr_ParamGrid cv::ml::SVM::getDefaultGridPtr(int param_id)
  692. //
  693. /**
  694. * Generates a grid for %SVM parameters.
  695. *
  696. * param param_id %SVM parameters IDs that must be one of the SVM::ParamTypes. The grid is
  697. * generated for the parameter with this ID.
  698. *
  699. * The function generates a grid pointer for the specified parameter of the %SVM algorithm.
  700. * The grid may be passed to the function SVM::trainAuto.
  701. * return automatically generated
  702. */
  703. public static ParamGrid getDefaultGridPtr(int param_id)
  704. {
  705. return ParamGrid.__fromPtr__(DisposableObject.ThrowIfNullIntPtr(ml_SVM_getDefaultGridPtr_10(param_id)));
  706. }
  707. //
  708. // C++: static Ptr_SVM cv::ml::SVM::create()
  709. //
  710. /**
  711. * Creates empty model.
  712. * Use StatModel::train to train the model. Since %SVM has several parameters, you may want to
  713. * find the best parameters for your problem, it can be done with SVM::trainAuto.
  714. * return automatically generated
  715. */
  716. public static SVM create()
  717. {
  718. return SVM.__fromPtr__(DisposableObject.ThrowIfNullIntPtr(ml_SVM_create_10()));
  719. }
  720. //
  721. // C++: static Ptr_SVM cv::ml::SVM::load(String filepath)
  722. //
  723. /**
  724. * Loads and creates a serialized svm from a file
  725. *
  726. * Use SVM::save to serialize and store an SVM to disk.
  727. * Load the SVM from this file again, by calling this function with the path to the file.
  728. *
  729. * param filepath path to serialized svm
  730. * return automatically generated
  731. */
  732. public static SVM load(string filepath)
  733. {
  734. return SVM.__fromPtr__(DisposableObject.ThrowIfNullIntPtr(ml_SVM_load_10(filepath)));
  735. }
  736. #if (UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR
  737. const string LIBNAME = "__Internal";
  738. #else
  739. const string LIBNAME = "opencvforunity";
  740. #endif
  741. // C++: int cv::ml::SVM::getType()
  742. [DllImport(LIBNAME)]
  743. private static extern int ml_SVM_getType_10(IntPtr nativeObj);
  744. // C++: void cv::ml::SVM::setType(int val)
  745. [DllImport(LIBNAME)]
  746. private static extern void ml_SVM_setType_10(IntPtr nativeObj, int val);
  747. // C++: double cv::ml::SVM::getGamma()
  748. [DllImport(LIBNAME)]
  749. private static extern double ml_SVM_getGamma_10(IntPtr nativeObj);
  750. // C++: void cv::ml::SVM::setGamma(double val)
  751. [DllImport(LIBNAME)]
  752. private static extern void ml_SVM_setGamma_10(IntPtr nativeObj, double val);
  753. // C++: double cv::ml::SVM::getCoef0()
  754. [DllImport(LIBNAME)]
  755. private static extern double ml_SVM_getCoef0_10(IntPtr nativeObj);
  756. // C++: void cv::ml::SVM::setCoef0(double val)
  757. [DllImport(LIBNAME)]
  758. private static extern void ml_SVM_setCoef0_10(IntPtr nativeObj, double val);
  759. // C++: double cv::ml::SVM::getDegree()
  760. [DllImport(LIBNAME)]
  761. private static extern double ml_SVM_getDegree_10(IntPtr nativeObj);
  762. // C++: void cv::ml::SVM::setDegree(double val)
  763. [DllImport(LIBNAME)]
  764. private static extern void ml_SVM_setDegree_10(IntPtr nativeObj, double val);
  765. // C++: double cv::ml::SVM::getC()
  766. [DllImport(LIBNAME)]
  767. private static extern double ml_SVM_getC_10(IntPtr nativeObj);
  768. // C++: void cv::ml::SVM::setC(double val)
  769. [DllImport(LIBNAME)]
  770. private static extern void ml_SVM_setC_10(IntPtr nativeObj, double val);
  771. // C++: double cv::ml::SVM::getNu()
  772. [DllImport(LIBNAME)]
  773. private static extern double ml_SVM_getNu_10(IntPtr nativeObj);
  774. // C++: void cv::ml::SVM::setNu(double val)
  775. [DllImport(LIBNAME)]
  776. private static extern void ml_SVM_setNu_10(IntPtr nativeObj, double val);
  777. // C++: double cv::ml::SVM::getP()
  778. [DllImport(LIBNAME)]
  779. private static extern double ml_SVM_getP_10(IntPtr nativeObj);
  780. // C++: void cv::ml::SVM::setP(double val)
  781. [DllImport(LIBNAME)]
  782. private static extern void ml_SVM_setP_10(IntPtr nativeObj, double val);
  783. // C++: Mat cv::ml::SVM::getClassWeights()
  784. [DllImport(LIBNAME)]
  785. private static extern IntPtr ml_SVM_getClassWeights_10(IntPtr nativeObj);
  786. // C++: void cv::ml::SVM::setClassWeights(Mat val)
  787. [DllImport(LIBNAME)]
  788. private static extern void ml_SVM_setClassWeights_10(IntPtr nativeObj, IntPtr val_nativeObj);
  789. // C++: TermCriteria cv::ml::SVM::getTermCriteria()
  790. [DllImport(LIBNAME)]
  791. private static extern void ml_SVM_getTermCriteria_10(IntPtr nativeObj, double[] retVal);
  792. // C++: void cv::ml::SVM::setTermCriteria(TermCriteria val)
  793. [DllImport(LIBNAME)]
  794. private static extern void ml_SVM_setTermCriteria_10(IntPtr nativeObj, int val_type, int val_maxCount, double val_epsilon);
  795. // C++: int cv::ml::SVM::getKernelType()
  796. [DllImport(LIBNAME)]
  797. private static extern int ml_SVM_getKernelType_10(IntPtr nativeObj);
  798. // C++: void cv::ml::SVM::setKernel(int kernelType)
  799. [DllImport(LIBNAME)]
  800. private static extern void ml_SVM_setKernel_10(IntPtr nativeObj, int kernelType);
  801. // C++: bool cv::ml::SVM::trainAuto(Mat samples, int layout, Mat responses, int kFold = 10, Ptr_ParamGrid Cgrid = SVM::getDefaultGridPtr(SVM::C), Ptr_ParamGrid gammaGrid = SVM::getDefaultGridPtr(SVM::GAMMA), Ptr_ParamGrid pGrid = SVM::getDefaultGridPtr(SVM::P), Ptr_ParamGrid nuGrid = SVM::getDefaultGridPtr(SVM::NU), Ptr_ParamGrid coeffGrid = SVM::getDefaultGridPtr(SVM::COEF), Ptr_ParamGrid degreeGrid = SVM::getDefaultGridPtr(SVM::DEGREE), bool balanced = false)
  802. [DllImport(LIBNAME)]
  803. [return: MarshalAs(UnmanagedType.U1)]
  804. private static extern bool ml_SVM_trainAuto_10(IntPtr nativeObj, IntPtr samples_nativeObj, int layout, IntPtr responses_nativeObj, int kFold, IntPtr Cgrid_nativeObj, IntPtr gammaGrid_nativeObj, IntPtr pGrid_nativeObj, IntPtr nuGrid_nativeObj, IntPtr coeffGrid_nativeObj, IntPtr degreeGrid_nativeObj, [MarshalAs(UnmanagedType.U1)] bool balanced);
  805. [DllImport(LIBNAME)]
  806. [return: MarshalAs(UnmanagedType.U1)]
  807. private static extern bool ml_SVM_trainAuto_11(IntPtr nativeObj, IntPtr samples_nativeObj, int layout, IntPtr responses_nativeObj, int kFold, IntPtr Cgrid_nativeObj, IntPtr gammaGrid_nativeObj, IntPtr pGrid_nativeObj, IntPtr nuGrid_nativeObj, IntPtr coeffGrid_nativeObj, IntPtr degreeGrid_nativeObj);
  808. [DllImport(LIBNAME)]
  809. [return: MarshalAs(UnmanagedType.U1)]
  810. private static extern bool ml_SVM_trainAuto_12(IntPtr nativeObj, IntPtr samples_nativeObj, int layout, IntPtr responses_nativeObj, int kFold, IntPtr Cgrid_nativeObj, IntPtr gammaGrid_nativeObj, IntPtr pGrid_nativeObj, IntPtr nuGrid_nativeObj, IntPtr coeffGrid_nativeObj);
  811. [DllImport(LIBNAME)]
  812. [return: MarshalAs(UnmanagedType.U1)]
  813. private static extern bool ml_SVM_trainAuto_13(IntPtr nativeObj, IntPtr samples_nativeObj, int layout, IntPtr responses_nativeObj, int kFold, IntPtr Cgrid_nativeObj, IntPtr gammaGrid_nativeObj, IntPtr pGrid_nativeObj, IntPtr nuGrid_nativeObj);
  814. [DllImport(LIBNAME)]
  815. [return: MarshalAs(UnmanagedType.U1)]
  816. private static extern bool ml_SVM_trainAuto_14(IntPtr nativeObj, IntPtr samples_nativeObj, int layout, IntPtr responses_nativeObj, int kFold, IntPtr Cgrid_nativeObj, IntPtr gammaGrid_nativeObj, IntPtr pGrid_nativeObj);
  817. [DllImport(LIBNAME)]
  818. [return: MarshalAs(UnmanagedType.U1)]
  819. private static extern bool ml_SVM_trainAuto_15(IntPtr nativeObj, IntPtr samples_nativeObj, int layout, IntPtr responses_nativeObj, int kFold, IntPtr Cgrid_nativeObj, IntPtr gammaGrid_nativeObj);
  820. [DllImport(LIBNAME)]
  821. [return: MarshalAs(UnmanagedType.U1)]
  822. private static extern bool ml_SVM_trainAuto_16(IntPtr nativeObj, IntPtr samples_nativeObj, int layout, IntPtr responses_nativeObj, int kFold, IntPtr Cgrid_nativeObj);
  823. [DllImport(LIBNAME)]
  824. [return: MarshalAs(UnmanagedType.U1)]
  825. private static extern bool ml_SVM_trainAuto_17(IntPtr nativeObj, IntPtr samples_nativeObj, int layout, IntPtr responses_nativeObj, int kFold);
  826. [DllImport(LIBNAME)]
  827. [return: MarshalAs(UnmanagedType.U1)]
  828. private static extern bool ml_SVM_trainAuto_18(IntPtr nativeObj, IntPtr samples_nativeObj, int layout, IntPtr responses_nativeObj);
  829. // C++: Mat cv::ml::SVM::getSupportVectors()
  830. [DllImport(LIBNAME)]
  831. private static extern IntPtr ml_SVM_getSupportVectors_10(IntPtr nativeObj);
  832. // C++: Mat cv::ml::SVM::getUncompressedSupportVectors()
  833. [DllImport(LIBNAME)]
  834. private static extern IntPtr ml_SVM_getUncompressedSupportVectors_10(IntPtr nativeObj);
  835. // C++: double cv::ml::SVM::getDecisionFunction(int i, Mat& alpha, Mat& svidx)
  836. [DllImport(LIBNAME)]
  837. private static extern double ml_SVM_getDecisionFunction_10(IntPtr nativeObj, int i, IntPtr alpha_nativeObj, IntPtr svidx_nativeObj);
  838. // C++: static Ptr_ParamGrid cv::ml::SVM::getDefaultGridPtr(int param_id)
  839. [DllImport(LIBNAME)]
  840. private static extern IntPtr ml_SVM_getDefaultGridPtr_10(int param_id);
  841. // C++: static Ptr_SVM cv::ml::SVM::create()
  842. [DllImport(LIBNAME)]
  843. private static extern IntPtr ml_SVM_create_10();
  844. // C++: static Ptr_SVM cv::ml::SVM::load(String filepath)
  845. [DllImport(LIBNAME)]
  846. private static extern IntPtr ml_SVM_load_10(string filepath);
  847. // native support for java finalize()
  848. [DllImport(LIBNAME)]
  849. private static extern void ml_SVM_delete(IntPtr nativeObj);
  850. }
  851. }