12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046 |
- using OpenCVForUnity.CoreModule;
- using OpenCVForUnity.UtilsModule;
- using System;
- using System.Collections.Generic;
- using System.Runtime.InteropServices;
- namespace OpenCVForUnity.MlModule
- {
- // C++: class EM
- /**
- * The class implements the Expectation Maximization algorithm.
- *
- * SEE: REF: ml_intro_em
- */
- public class EM : StatModel
- {
- protected override void Dispose(bool disposing)
- {
- try
- {
- if (disposing)
- {
- }
- if (IsEnabledDispose)
- {
- if (nativeObj != IntPtr.Zero)
- ml_EM_delete(nativeObj);
- nativeObj = IntPtr.Zero;
- }
- }
- finally
- {
- base.Dispose(disposing);
- }
- }
- protected internal EM(IntPtr addr) : base(addr) { }
- // internal usage only
- public static new EM __fromPtr__(IntPtr addr) { return new EM(addr); }
- // C++: enum <unnamed>
- public const int DEFAULT_NCLUSTERS = 5;
- public const int DEFAULT_MAX_ITERS = 100;
- public const int START_E_STEP = 1;
- public const int START_M_STEP = 2;
- public const int START_AUTO_STEP = 0;
- // C++: enum cv.ml.EM.Types
- public const int COV_MAT_SPHERICAL = 0;
- public const int COV_MAT_DIAGONAL = 1;
- public const int COV_MAT_GENERIC = 2;
- public const int COV_MAT_DEFAULT = COV_MAT_DIAGONAL;
- //
- // C++: int cv::ml::EM::getClustersNumber()
- //
- /**
- * SEE: setClustersNumber
- * return automatically generated
- */
- public int getClustersNumber()
- {
- ThrowIfDisposed();
- return ml_EM_getClustersNumber_10(nativeObj);
- }
- //
- // C++: void cv::ml::EM::setClustersNumber(int val)
- //
- /**
- * getClustersNumber SEE: getClustersNumber
- * param val automatically generated
- */
- public void setClustersNumber(int val)
- {
- ThrowIfDisposed();
- ml_EM_setClustersNumber_10(nativeObj, val);
- }
- //
- // C++: int cv::ml::EM::getCovarianceMatrixType()
- //
- /**
- * SEE: setCovarianceMatrixType
- * return automatically generated
- */
- public int getCovarianceMatrixType()
- {
- ThrowIfDisposed();
- return ml_EM_getCovarianceMatrixType_10(nativeObj);
- }
- //
- // C++: void cv::ml::EM::setCovarianceMatrixType(int val)
- //
- /**
- * getCovarianceMatrixType SEE: getCovarianceMatrixType
- * param val automatically generated
- */
- public void setCovarianceMatrixType(int val)
- {
- ThrowIfDisposed();
- ml_EM_setCovarianceMatrixType_10(nativeObj, val);
- }
- //
- // C++: TermCriteria cv::ml::EM::getTermCriteria()
- //
- /**
- * SEE: setTermCriteria
- * return automatically generated
- */
- public TermCriteria getTermCriteria()
- {
- ThrowIfDisposed();
- double[] tmpArray = new double[3];
- ml_EM_getTermCriteria_10(nativeObj, tmpArray);
- TermCriteria retVal = new TermCriteria(tmpArray);
- return retVal;
- }
- //
- // C++: void cv::ml::EM::setTermCriteria(TermCriteria val)
- //
- /**
- * getTermCriteria SEE: getTermCriteria
- * param val automatically generated
- */
- public void setTermCriteria(TermCriteria val)
- {
- ThrowIfDisposed();
- ml_EM_setTermCriteria_10(nativeObj, val.type, val.maxCount, val.epsilon);
- }
- //
- // C++: Mat cv::ml::EM::getWeights()
- //
- /**
- * Returns weights of the mixtures
- *
- * Returns vector with the number of elements equal to the number of mixtures.
- * return automatically generated
- */
- public Mat getWeights()
- {
- ThrowIfDisposed();
- return new Mat(DisposableObject.ThrowIfNullIntPtr(ml_EM_getWeights_10(nativeObj)));
- }
- //
- // C++: Mat cv::ml::EM::getMeans()
- //
- /**
- * Returns the cluster centers (means of the Gaussian mixture)
- *
- * Returns matrix with the number of rows equal to the number of mixtures and number of columns
- * equal to the space dimensionality.
- * return automatically generated
- */
- public Mat getMeans()
- {
- ThrowIfDisposed();
- return new Mat(DisposableObject.ThrowIfNullIntPtr(ml_EM_getMeans_10(nativeObj)));
- }
- //
- // C++: void cv::ml::EM::getCovs(vector_Mat& covs)
- //
- /**
- * Returns covariation matrices
- *
- * Returns vector of covariation matrices. Number of matrices is the number of gaussian mixtures,
- * each matrix is a square floating-point matrix NxN, where N is the space dimensionality.
- * param covs automatically generated
- */
- public void getCovs(List<Mat> covs)
- {
- ThrowIfDisposed();
- Mat covs_mat = new Mat();
- ml_EM_getCovs_10(nativeObj, covs_mat.nativeObj);
- Converters.Mat_to_vector_Mat(covs_mat, covs);
- covs_mat.release();
- }
- //
- // C++: float cv::ml::EM::predict(Mat samples, Mat& results = Mat(), int flags = 0)
- //
- /**
- * Returns posterior probabilities for the provided samples
- *
- * param samples The input samples, floating-point matrix
- * param results The optional output \( nSamples \times nClusters\) matrix of results. It contains
- * posterior probabilities for each sample from the input
- * param flags This parameter will be ignored
- * return automatically generated
- */
- public override float predict(Mat samples, Mat results, int flags)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (results != null) results.ThrowIfDisposed();
- return ml_EM_predict_10(nativeObj, samples.nativeObj, results.nativeObj, flags);
- }
- /**
- * Returns posterior probabilities for the provided samples
- *
- * param samples The input samples, floating-point matrix
- * param results The optional output \( nSamples \times nClusters\) matrix of results. It contains
- * posterior probabilities for each sample from the input
- * return automatically generated
- */
- public override float predict(Mat samples, Mat results)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (results != null) results.ThrowIfDisposed();
- return ml_EM_predict_11(nativeObj, samples.nativeObj, results.nativeObj);
- }
- /**
- * Returns posterior probabilities for the provided samples
- *
- * param samples The input samples, floating-point matrix
- * posterior probabilities for each sample from the input
- * return automatically generated
- */
- public override float predict(Mat samples)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- return ml_EM_predict_12(nativeObj, samples.nativeObj);
- }
- //
- // C++: Vec2d cv::ml::EM::predict2(Mat sample, Mat& probs)
- //
- /**
- * Returns a likelihood logarithm value and an index of the most probable mixture component
- * for the given sample.
- *
- * param sample A sample for classification. It should be a one-channel matrix of
- * \(1 \times dims\) or \(dims \times 1\) size.
- * param probs Optional output matrix that contains posterior probabilities of each component
- * given the sample. It has \(1 \times nclusters\) size and CV_64FC1 type.
- *
- * The method returns a two-element double vector. Zero element is a likelihood logarithm value for
- * the sample. First element is an index of the most probable mixture component for the given
- * sample.
- * return automatically generated
- */
- public double[] predict2(Mat sample, Mat probs)
- {
- ThrowIfDisposed();
- if (sample != null) sample.ThrowIfDisposed();
- if (probs != null) probs.ThrowIfDisposed();
- double[] retVal = new double[2];
- ml_EM_predict2_10(nativeObj, sample.nativeObj, probs.nativeObj, retVal);
- return retVal;
- }
- //
- // C++: bool cv::ml::EM::trainEM(Mat samples, Mat& logLikelihoods = Mat(), Mat& labels = Mat(), Mat& probs = Mat())
- //
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Expectation step. Initial values of the model parameters will be
- * estimated by the k-means algorithm.
- *
- * Unlike many of the ML models, %EM is an unsupervised learning algorithm and it does not take
- * responses (class labels or function values) as input. Instead, it computes the *Maximum
- * Likelihood Estimate* of the Gaussian mixture parameters from an input sample set, stores all the
- * parameters inside the structure: \(p_{i,k}\) in probs, \(a_k\) in means , \(S_k\) in
- * covs[k], \(\pi_k\) in weights , and optionally computes the output "class label" for each
- * sample: \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most
- * probable mixture component for each sample).
- *
- * The trained model can be used further for prediction, just like any other classifier. The
- * trained model is similar to the NormalBayesClassifier.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param logLikelihoods The optional output matrix that contains a likelihood logarithm value for
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * param labels The optional output "class label" for each sample:
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * param probs The optional output matrix that contains posterior probabilities of each Gaussian
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainEM(Mat samples, Mat logLikelihoods, Mat labels, Mat probs)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (logLikelihoods != null) logLikelihoods.ThrowIfDisposed();
- if (labels != null) labels.ThrowIfDisposed();
- if (probs != null) probs.ThrowIfDisposed();
- return ml_EM_trainEM_10(nativeObj, samples.nativeObj, logLikelihoods.nativeObj, labels.nativeObj, probs.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Expectation step. Initial values of the model parameters will be
- * estimated by the k-means algorithm.
- *
- * Unlike many of the ML models, %EM is an unsupervised learning algorithm and it does not take
- * responses (class labels or function values) as input. Instead, it computes the *Maximum
- * Likelihood Estimate* of the Gaussian mixture parameters from an input sample set, stores all the
- * parameters inside the structure: \(p_{i,k}\) in probs, \(a_k\) in means , \(S_k\) in
- * covs[k], \(\pi_k\) in weights , and optionally computes the output "class label" for each
- * sample: \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most
- * probable mixture component for each sample).
- *
- * The trained model can be used further for prediction, just like any other classifier. The
- * trained model is similar to the NormalBayesClassifier.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param logLikelihoods The optional output matrix that contains a likelihood logarithm value for
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * param labels The optional output "class label" for each sample:
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainEM(Mat samples, Mat logLikelihoods, Mat labels)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (logLikelihoods != null) logLikelihoods.ThrowIfDisposed();
- if (labels != null) labels.ThrowIfDisposed();
- return ml_EM_trainEM_11(nativeObj, samples.nativeObj, logLikelihoods.nativeObj, labels.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Expectation step. Initial values of the model parameters will be
- * estimated by the k-means algorithm.
- *
- * Unlike many of the ML models, %EM is an unsupervised learning algorithm and it does not take
- * responses (class labels or function values) as input. Instead, it computes the *Maximum
- * Likelihood Estimate* of the Gaussian mixture parameters from an input sample set, stores all the
- * parameters inside the structure: \(p_{i,k}\) in probs, \(a_k\) in means , \(S_k\) in
- * covs[k], \(\pi_k\) in weights , and optionally computes the output "class label" for each
- * sample: \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most
- * probable mixture component for each sample).
- *
- * The trained model can be used further for prediction, just like any other classifier. The
- * trained model is similar to the NormalBayesClassifier.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param logLikelihoods The optional output matrix that contains a likelihood logarithm value for
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainEM(Mat samples, Mat logLikelihoods)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (logLikelihoods != null) logLikelihoods.ThrowIfDisposed();
- return ml_EM_trainEM_12(nativeObj, samples.nativeObj, logLikelihoods.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Expectation step. Initial values of the model parameters will be
- * estimated by the k-means algorithm.
- *
- * Unlike many of the ML models, %EM is an unsupervised learning algorithm and it does not take
- * responses (class labels or function values) as input. Instead, it computes the *Maximum
- * Likelihood Estimate* of the Gaussian mixture parameters from an input sample set, stores all the
- * parameters inside the structure: \(p_{i,k}\) in probs, \(a_k\) in means , \(S_k\) in
- * covs[k], \(\pi_k\) in weights , and optionally computes the output "class label" for each
- * sample: \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most
- * probable mixture component for each sample).
- *
- * The trained model can be used further for prediction, just like any other classifier. The
- * trained model is similar to the NormalBayesClassifier.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainEM(Mat samples)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- return ml_EM_trainEM_13(nativeObj, samples.nativeObj);
- }
- //
- // C++: bool cv::ml::EM::trainE(Mat samples, Mat means0, Mat covs0 = Mat(), Mat weights0 = Mat(), Mat& logLikelihoods = Mat(), Mat& labels = Mat(), Mat& probs = Mat())
- //
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Expectation step. You need to provide initial means \(a_k\) of
- * mixture components. Optionally you can pass initial weights \(\pi_k\) and covariance matrices
- * \(S_k\) of mixture components.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param means0 Initial means \(a_k\) of mixture components. It is a one-channel matrix of
- * \(nclusters \times dims\) size. If the matrix does not have CV_64F type it will be
- * converted to the inner matrix of such type for the further computing.
- * param covs0 The vector of initial covariance matrices \(S_k\) of mixture components. Each of
- * covariance matrices is a one-channel matrix of \(dims \times dims\) size. If the matrices
- * do not have CV_64F type they will be converted to the inner matrices of such type for the
- * further computing.
- * param weights0 Initial weights \(\pi_k\) of mixture components. It should be a one-channel
- * floating-point matrix with \(1 \times nclusters\) or \(nclusters \times 1\) size.
- * param logLikelihoods The optional output matrix that contains a likelihood logarithm value for
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * param labels The optional output "class label" for each sample:
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * param probs The optional output matrix that contains posterior probabilities of each Gaussian
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainE(Mat samples, Mat means0, Mat covs0, Mat weights0, Mat logLikelihoods, Mat labels, Mat probs)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (means0 != null) means0.ThrowIfDisposed();
- if (covs0 != null) covs0.ThrowIfDisposed();
- if (weights0 != null) weights0.ThrowIfDisposed();
- if (logLikelihoods != null) logLikelihoods.ThrowIfDisposed();
- if (labels != null) labels.ThrowIfDisposed();
- if (probs != null) probs.ThrowIfDisposed();
- return ml_EM_trainE_10(nativeObj, samples.nativeObj, means0.nativeObj, covs0.nativeObj, weights0.nativeObj, logLikelihoods.nativeObj, labels.nativeObj, probs.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Expectation step. You need to provide initial means \(a_k\) of
- * mixture components. Optionally you can pass initial weights \(\pi_k\) and covariance matrices
- * \(S_k\) of mixture components.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param means0 Initial means \(a_k\) of mixture components. It is a one-channel matrix of
- * \(nclusters \times dims\) size. If the matrix does not have CV_64F type it will be
- * converted to the inner matrix of such type for the further computing.
- * param covs0 The vector of initial covariance matrices \(S_k\) of mixture components. Each of
- * covariance matrices is a one-channel matrix of \(dims \times dims\) size. If the matrices
- * do not have CV_64F type they will be converted to the inner matrices of such type for the
- * further computing.
- * param weights0 Initial weights \(\pi_k\) of mixture components. It should be a one-channel
- * floating-point matrix with \(1 \times nclusters\) or \(nclusters \times 1\) size.
- * param logLikelihoods The optional output matrix that contains a likelihood logarithm value for
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * param labels The optional output "class label" for each sample:
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainE(Mat samples, Mat means0, Mat covs0, Mat weights0, Mat logLikelihoods, Mat labels)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (means0 != null) means0.ThrowIfDisposed();
- if (covs0 != null) covs0.ThrowIfDisposed();
- if (weights0 != null) weights0.ThrowIfDisposed();
- if (logLikelihoods != null) logLikelihoods.ThrowIfDisposed();
- if (labels != null) labels.ThrowIfDisposed();
- return ml_EM_trainE_11(nativeObj, samples.nativeObj, means0.nativeObj, covs0.nativeObj, weights0.nativeObj, logLikelihoods.nativeObj, labels.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Expectation step. You need to provide initial means \(a_k\) of
- * mixture components. Optionally you can pass initial weights \(\pi_k\) and covariance matrices
- * \(S_k\) of mixture components.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param means0 Initial means \(a_k\) of mixture components. It is a one-channel matrix of
- * \(nclusters \times dims\) size. If the matrix does not have CV_64F type it will be
- * converted to the inner matrix of such type for the further computing.
- * param covs0 The vector of initial covariance matrices \(S_k\) of mixture components. Each of
- * covariance matrices is a one-channel matrix of \(dims \times dims\) size. If the matrices
- * do not have CV_64F type they will be converted to the inner matrices of such type for the
- * further computing.
- * param weights0 Initial weights \(\pi_k\) of mixture components. It should be a one-channel
- * floating-point matrix with \(1 \times nclusters\) or \(nclusters \times 1\) size.
- * param logLikelihoods The optional output matrix that contains a likelihood logarithm value for
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainE(Mat samples, Mat means0, Mat covs0, Mat weights0, Mat logLikelihoods)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (means0 != null) means0.ThrowIfDisposed();
- if (covs0 != null) covs0.ThrowIfDisposed();
- if (weights0 != null) weights0.ThrowIfDisposed();
- if (logLikelihoods != null) logLikelihoods.ThrowIfDisposed();
- return ml_EM_trainE_12(nativeObj, samples.nativeObj, means0.nativeObj, covs0.nativeObj, weights0.nativeObj, logLikelihoods.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Expectation step. You need to provide initial means \(a_k\) of
- * mixture components. Optionally you can pass initial weights \(\pi_k\) and covariance matrices
- * \(S_k\) of mixture components.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param means0 Initial means \(a_k\) of mixture components. It is a one-channel matrix of
- * \(nclusters \times dims\) size. If the matrix does not have CV_64F type it will be
- * converted to the inner matrix of such type for the further computing.
- * param covs0 The vector of initial covariance matrices \(S_k\) of mixture components. Each of
- * covariance matrices is a one-channel matrix of \(dims \times dims\) size. If the matrices
- * do not have CV_64F type they will be converted to the inner matrices of such type for the
- * further computing.
- * param weights0 Initial weights \(\pi_k\) of mixture components. It should be a one-channel
- * floating-point matrix with \(1 \times nclusters\) or \(nclusters \times 1\) size.
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainE(Mat samples, Mat means0, Mat covs0, Mat weights0)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (means0 != null) means0.ThrowIfDisposed();
- if (covs0 != null) covs0.ThrowIfDisposed();
- if (weights0 != null) weights0.ThrowIfDisposed();
- return ml_EM_trainE_13(nativeObj, samples.nativeObj, means0.nativeObj, covs0.nativeObj, weights0.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Expectation step. You need to provide initial means \(a_k\) of
- * mixture components. Optionally you can pass initial weights \(\pi_k\) and covariance matrices
- * \(S_k\) of mixture components.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param means0 Initial means \(a_k\) of mixture components. It is a one-channel matrix of
- * \(nclusters \times dims\) size. If the matrix does not have CV_64F type it will be
- * converted to the inner matrix of such type for the further computing.
- * param covs0 The vector of initial covariance matrices \(S_k\) of mixture components. Each of
- * covariance matrices is a one-channel matrix of \(dims \times dims\) size. If the matrices
- * do not have CV_64F type they will be converted to the inner matrices of such type for the
- * further computing.
- * floating-point matrix with \(1 \times nclusters\) or \(nclusters \times 1\) size.
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainE(Mat samples, Mat means0, Mat covs0)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (means0 != null) means0.ThrowIfDisposed();
- if (covs0 != null) covs0.ThrowIfDisposed();
- return ml_EM_trainE_14(nativeObj, samples.nativeObj, means0.nativeObj, covs0.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Expectation step. You need to provide initial means \(a_k\) of
- * mixture components. Optionally you can pass initial weights \(\pi_k\) and covariance matrices
- * \(S_k\) of mixture components.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param means0 Initial means \(a_k\) of mixture components. It is a one-channel matrix of
- * \(nclusters \times dims\) size. If the matrix does not have CV_64F type it will be
- * converted to the inner matrix of such type for the further computing.
- * covariance matrices is a one-channel matrix of \(dims \times dims\) size. If the matrices
- * do not have CV_64F type they will be converted to the inner matrices of such type for the
- * further computing.
- * floating-point matrix with \(1 \times nclusters\) or \(nclusters \times 1\) size.
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainE(Mat samples, Mat means0)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (means0 != null) means0.ThrowIfDisposed();
- return ml_EM_trainE_15(nativeObj, samples.nativeObj, means0.nativeObj);
- }
- //
- // C++: bool cv::ml::EM::trainM(Mat samples, Mat probs0, Mat& logLikelihoods = Mat(), Mat& labels = Mat(), Mat& probs = Mat())
- //
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Maximization step. You need to provide initial probabilities
- * \(p_{i,k}\) to use this option.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param probs0 the probabilities
- * param logLikelihoods The optional output matrix that contains a likelihood logarithm value for
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * param labels The optional output "class label" for each sample:
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * param probs The optional output matrix that contains posterior probabilities of each Gaussian
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainM(Mat samples, Mat probs0, Mat logLikelihoods, Mat labels, Mat probs)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (probs0 != null) probs0.ThrowIfDisposed();
- if (logLikelihoods != null) logLikelihoods.ThrowIfDisposed();
- if (labels != null) labels.ThrowIfDisposed();
- if (probs != null) probs.ThrowIfDisposed();
- return ml_EM_trainM_10(nativeObj, samples.nativeObj, probs0.nativeObj, logLikelihoods.nativeObj, labels.nativeObj, probs.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Maximization step. You need to provide initial probabilities
- * \(p_{i,k}\) to use this option.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param probs0 the probabilities
- * param logLikelihoods The optional output matrix that contains a likelihood logarithm value for
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * param labels The optional output "class label" for each sample:
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainM(Mat samples, Mat probs0, Mat logLikelihoods, Mat labels)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (probs0 != null) probs0.ThrowIfDisposed();
- if (logLikelihoods != null) logLikelihoods.ThrowIfDisposed();
- if (labels != null) labels.ThrowIfDisposed();
- return ml_EM_trainM_11(nativeObj, samples.nativeObj, probs0.nativeObj, logLikelihoods.nativeObj, labels.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Maximization step. You need to provide initial probabilities
- * \(p_{i,k}\) to use this option.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param probs0 the probabilities
- * param logLikelihoods The optional output matrix that contains a likelihood logarithm value for
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainM(Mat samples, Mat probs0, Mat logLikelihoods)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (probs0 != null) probs0.ThrowIfDisposed();
- if (logLikelihoods != null) logLikelihoods.ThrowIfDisposed();
- return ml_EM_trainM_12(nativeObj, samples.nativeObj, probs0.nativeObj, logLikelihoods.nativeObj);
- }
- /**
- * Estimate the Gaussian mixture parameters from a samples set.
- *
- * This variation starts with Maximization step. You need to provide initial probabilities
- * \(p_{i,k}\) to use this option.
- *
- * param samples Samples from which the Gaussian mixture model will be estimated. It should be a
- * one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
- * it will be converted to the inner matrix of such type for the further computing.
- * param probs0 the probabilities
- * each sample. It has \(nsamples \times 1\) size and CV_64FC1 type.
- * \(\texttt{labels}_i=\texttt{arg max}_k(p_{i,k}), i=1..N\) (indices of the most probable
- * mixture component for each sample). It has \(nsamples \times 1\) size and CV_32SC1 type.
- * mixture component given the each sample. It has \(nsamples \times nclusters\) size and
- * CV_64FC1 type.
- * return automatically generated
- */
- public bool trainM(Mat samples, Mat probs0)
- {
- ThrowIfDisposed();
- if (samples != null) samples.ThrowIfDisposed();
- if (probs0 != null) probs0.ThrowIfDisposed();
- return ml_EM_trainM_13(nativeObj, samples.nativeObj, probs0.nativeObj);
- }
- //
- // C++: static Ptr_EM cv::ml::EM::create()
- //
- /**
- * Creates empty %EM model.
- * The model should be trained then using StatModel::train(traindata, flags) method. Alternatively, you
- * can use one of the EM::train\* methods or load it from file using Algorithm::load<EM>(filename).
- * return automatically generated
- */
- public static EM create()
- {
- return EM.__fromPtr__(DisposableObject.ThrowIfNullIntPtr(ml_EM_create_10()));
- }
- //
- // C++: static Ptr_EM cv::ml::EM::load(String filepath, String nodeName = String())
- //
- /**
- * Loads and creates a serialized EM from a file
- *
- * Use EM::save to serialize and store an EM to disk.
- * Load the EM from this file again, by calling this function with the path to the file.
- * Optionally specify the node for the file containing the classifier
- *
- * param filepath path to serialized EM
- * param nodeName name of node containing the classifier
- * return automatically generated
- */
- public static EM load(string filepath, string nodeName)
- {
- return EM.__fromPtr__(DisposableObject.ThrowIfNullIntPtr(ml_EM_load_10(filepath, nodeName)));
- }
- /**
- * Loads and creates a serialized EM from a file
- *
- * Use EM::save to serialize and store an EM to disk.
- * Load the EM from this file again, by calling this function with the path to the file.
- * Optionally specify the node for the file containing the classifier
- *
- * param filepath path to serialized EM
- * return automatically generated
- */
- public static EM load(string filepath)
- {
- return EM.__fromPtr__(DisposableObject.ThrowIfNullIntPtr(ml_EM_load_11(filepath)));
- }
- #if (UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR
- const string LIBNAME = "__Internal";
- #else
- const string LIBNAME = "opencvforunity";
- #endif
- // C++: int cv::ml::EM::getClustersNumber()
- [DllImport(LIBNAME)]
- private static extern int ml_EM_getClustersNumber_10(IntPtr nativeObj);
- // C++: void cv::ml::EM::setClustersNumber(int val)
- [DllImport(LIBNAME)]
- private static extern void ml_EM_setClustersNumber_10(IntPtr nativeObj, int val);
- // C++: int cv::ml::EM::getCovarianceMatrixType()
- [DllImport(LIBNAME)]
- private static extern int ml_EM_getCovarianceMatrixType_10(IntPtr nativeObj);
- // C++: void cv::ml::EM::setCovarianceMatrixType(int val)
- [DllImport(LIBNAME)]
- private static extern void ml_EM_setCovarianceMatrixType_10(IntPtr nativeObj, int val);
- // C++: TermCriteria cv::ml::EM::getTermCriteria()
- [DllImport(LIBNAME)]
- private static extern void ml_EM_getTermCriteria_10(IntPtr nativeObj, double[] retVal);
- // C++: void cv::ml::EM::setTermCriteria(TermCriteria val)
- [DllImport(LIBNAME)]
- private static extern void ml_EM_setTermCriteria_10(IntPtr nativeObj, int val_type, int val_maxCount, double val_epsilon);
- // C++: Mat cv::ml::EM::getWeights()
- [DllImport(LIBNAME)]
- private static extern IntPtr ml_EM_getWeights_10(IntPtr nativeObj);
- // C++: Mat cv::ml::EM::getMeans()
- [DllImport(LIBNAME)]
- private static extern IntPtr ml_EM_getMeans_10(IntPtr nativeObj);
- // C++: void cv::ml::EM::getCovs(vector_Mat& covs)
- [DllImport(LIBNAME)]
- private static extern void ml_EM_getCovs_10(IntPtr nativeObj, IntPtr covs_mat_nativeObj);
- // C++: float cv::ml::EM::predict(Mat samples, Mat& results = Mat(), int flags = 0)
- [DllImport(LIBNAME)]
- private static extern float ml_EM_predict_10(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr results_nativeObj, int flags);
- [DllImport(LIBNAME)]
- private static extern float ml_EM_predict_11(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr results_nativeObj);
- [DllImport(LIBNAME)]
- private static extern float ml_EM_predict_12(IntPtr nativeObj, IntPtr samples_nativeObj);
- // C++: Vec2d cv::ml::EM::predict2(Mat sample, Mat& probs)
- [DllImport(LIBNAME)]
- private static extern void ml_EM_predict2_10(IntPtr nativeObj, IntPtr sample_nativeObj, IntPtr probs_nativeObj, double[] retVal);
- // C++: bool cv::ml::EM::trainEM(Mat samples, Mat& logLikelihoods = Mat(), Mat& labels = Mat(), Mat& probs = Mat())
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainEM_10(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr logLikelihoods_nativeObj, IntPtr labels_nativeObj, IntPtr probs_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainEM_11(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr logLikelihoods_nativeObj, IntPtr labels_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainEM_12(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr logLikelihoods_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainEM_13(IntPtr nativeObj, IntPtr samples_nativeObj);
- // C++: bool cv::ml::EM::trainE(Mat samples, Mat means0, Mat covs0 = Mat(), Mat weights0 = Mat(), Mat& logLikelihoods = Mat(), Mat& labels = Mat(), Mat& probs = Mat())
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainE_10(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr means0_nativeObj, IntPtr covs0_nativeObj, IntPtr weights0_nativeObj, IntPtr logLikelihoods_nativeObj, IntPtr labels_nativeObj, IntPtr probs_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainE_11(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr means0_nativeObj, IntPtr covs0_nativeObj, IntPtr weights0_nativeObj, IntPtr logLikelihoods_nativeObj, IntPtr labels_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainE_12(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr means0_nativeObj, IntPtr covs0_nativeObj, IntPtr weights0_nativeObj, IntPtr logLikelihoods_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainE_13(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr means0_nativeObj, IntPtr covs0_nativeObj, IntPtr weights0_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainE_14(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr means0_nativeObj, IntPtr covs0_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainE_15(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr means0_nativeObj);
- // C++: bool cv::ml::EM::trainM(Mat samples, Mat probs0, Mat& logLikelihoods = Mat(), Mat& labels = Mat(), Mat& probs = Mat())
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainM_10(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr probs0_nativeObj, IntPtr logLikelihoods_nativeObj, IntPtr labels_nativeObj, IntPtr probs_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainM_11(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr probs0_nativeObj, IntPtr logLikelihoods_nativeObj, IntPtr labels_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainM_12(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr probs0_nativeObj, IntPtr logLikelihoods_nativeObj);
- [DllImport(LIBNAME)]
- [return: MarshalAs(UnmanagedType.U1)]
- private static extern bool ml_EM_trainM_13(IntPtr nativeObj, IntPtr samples_nativeObj, IntPtr probs0_nativeObj);
- // C++: static Ptr_EM cv::ml::EM::create()
- [DllImport(LIBNAME)]
- private static extern IntPtr ml_EM_create_10();
- // C++: static Ptr_EM cv::ml::EM::load(String filepath, String nodeName = String())
- [DllImport(LIBNAME)]
- private static extern IntPtr ml_EM_load_10(string filepath, string nodeName);
- [DllImport(LIBNAME)]
- private static extern IntPtr ml_EM_load_11(string filepath);
- // native support for java finalize()
- [DllImport(LIBNAME)]
- private static extern void ml_EM_delete(IntPtr nativeObj);
- }
- }
|