tracker.hpp 49 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534
  1. /*M///////////////////////////////////////////////////////////////////////////////////////
  2. //
  3. // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
  4. //
  5. // By downloading, copying, installing or using the software you agree to this license.
  6. // If you do not agree to this license, do not download, install,
  7. // copy or use the software.
  8. //
  9. //
  10. // License Agreement
  11. // For Open Source Computer Vision Library
  12. //
  13. // Copyright (C) 2013, OpenCV Foundation, all rights reserved.
  14. // Third party copyrights are property of their respective owners.
  15. //
  16. // Redistribution and use in source and binary forms, with or without modification,
  17. // are permitted provided that the following conditions are met:
  18. //
  19. // * Redistribution's of source code must retain the above copyright notice,
  20. // this list of conditions and the following disclaimer.
  21. //
  22. // * Redistribution's in binary form must reproduce the above copyright notice,
  23. // this list of conditions and the following disclaimer in the documentation
  24. // and/or other materials provided with the distribution.
  25. //
  26. // * The name of the copyright holders may not be used to endorse or promote products
  27. // derived from this software without specific prior written permission.
  28. //
  29. // This software is provided by the copyright holders and contributors "as is" and
  30. // any express or implied warranties, including, but not limited to, the implied
  31. // warranties of merchantability and fitness for a particular purpose are disclaimed.
  32. // In no event shall the Intel Corporation or contributors be liable for any direct,
  33. // indirect, incidental, special, exemplary, or consequential damages
  34. // (including, but not limited to, procurement of substitute goods or services;
  35. // loss of use, data, or profits; or business interruption) however caused
  36. // and on any theory of liability, whether in contract, strict liability,
  37. // or tort (including negligence or otherwise) arising in any way out of
  38. // the use of this software, even if advised of the possibility of such damage.
  39. //
  40. //M*/
  41. #ifndef __OPENCV_TRACKER_HPP__
  42. #define __OPENCV_TRACKER_HPP__
  43. #include "opencv2/core.hpp"
  44. #include "opencv2/imgproc/types_c.h"
  45. #include "feature.hpp"
  46. #include "onlineMIL.hpp"
  47. #include "onlineBoosting.hpp"
  48. /*
  49. * Partially based on:
  50. * ====================================================================================================================
  51. * - [AAM] S. Salti, A. Cavallaro, L. Di Stefano, Adaptive Appearance Modeling for Video Tracking: Survey and Evaluation
  52. * - [AMVOT] X. Li, W. Hu, C. Shen, Z. Zhang, A. Dick, A. van den Hengel, A Survey of Appearance Models in Visual Object Tracking
  53. *
  54. * This Tracking API has been designed with PlantUML. If you modify this API please change UML files under modules/tracking/doc/uml
  55. *
  56. */
  57. namespace cv
  58. {
  59. //! @addtogroup tracking
  60. //! @{
  61. /************************************ TrackerFeature Base Classes ************************************/
  62. /** @brief Abstract base class for TrackerFeature that represents the feature.
  63. */
  64. class CV_EXPORTS TrackerFeature
  65. {
  66. public:
  67. virtual ~TrackerFeature();
  68. /** @brief Compute the features in the images collection
  69. @param images The images
  70. @param response The output response
  71. */
  72. void compute( const std::vector<Mat>& images, Mat& response );
  73. /** @brief Create TrackerFeature by tracker feature type
  74. @param trackerFeatureType The TrackerFeature name
  75. The modes available now:
  76. - "HAAR" -- Haar Feature-based
  77. The modes that will be available soon:
  78. - "HOG" -- Histogram of Oriented Gradients features
  79. - "LBP" -- Local Binary Pattern features
  80. - "FEATURE2D" -- All types of Feature2D
  81. */
  82. static Ptr<TrackerFeature> create( const String& trackerFeatureType );
  83. /** @brief Identify most effective features
  84. @param response Collection of response for the specific TrackerFeature
  85. @param npoints Max number of features
  86. @note This method modifies the response parameter
  87. */
  88. virtual void selection( Mat& response, int npoints ) = 0;
  89. /** @brief Get the name of the specific TrackerFeature
  90. */
  91. String getClassName() const;
  92. protected:
  93. virtual bool computeImpl( const std::vector<Mat>& images, Mat& response ) = 0;
  94. String className;
  95. };
  96. /** @brief Class that manages the extraction and selection of features
  97. @cite AAM Feature Extraction and Feature Set Refinement (Feature Processing and Feature Selection).
  98. See table I and section III C @cite AMVOT Appearance modelling -\> Visual representation (Table II,
  99. section 3.1 - 3.2)
  100. TrackerFeatureSet is an aggregation of TrackerFeature
  101. @sa
  102. TrackerFeature
  103. */
  104. class CV_EXPORTS TrackerFeatureSet
  105. {
  106. public:
  107. TrackerFeatureSet();
  108. ~TrackerFeatureSet();
  109. /** @brief Extract features from the images collection
  110. @param images The input images
  111. */
  112. void extraction( const std::vector<Mat>& images );
  113. /** @brief Identify most effective features for all feature types (optional)
  114. */
  115. void selection();
  116. /** @brief Remove outliers for all feature types (optional)
  117. */
  118. void removeOutliers();
  119. /** @brief Add TrackerFeature in the collection. Return true if TrackerFeature is added, false otherwise
  120. @param trackerFeatureType The TrackerFeature name
  121. The modes available now:
  122. - "HAAR" -- Haar Feature-based
  123. The modes that will be available soon:
  124. - "HOG" -- Histogram of Oriented Gradients features
  125. - "LBP" -- Local Binary Pattern features
  126. - "FEATURE2D" -- All types of Feature2D
  127. Example TrackerFeatureSet::addTrackerFeature : :
  128. @code
  129. //sample usage:
  130. Ptr<TrackerFeature> trackerFeature = new TrackerFeatureHAAR( HAARparameters );
  131. featureSet->addTrackerFeature( trackerFeature );
  132. //or add CSC sampler with default parameters
  133. //featureSet->addTrackerFeature( "HAAR" );
  134. @endcode
  135. @note If you use the second method, you must initialize the TrackerFeature
  136. */
  137. bool addTrackerFeature( String trackerFeatureType );
  138. /** @overload
  139. @param feature The TrackerFeature class
  140. */
  141. bool addTrackerFeature( Ptr<TrackerFeature>& feature );
  142. /** @brief Get the TrackerFeature collection (TrackerFeature name, TrackerFeature pointer)
  143. */
  144. const std::vector<std::pair<String, Ptr<TrackerFeature> > >& getTrackerFeature() const;
  145. /** @brief Get the responses
  146. @note Be sure to call extraction before getResponses Example TrackerFeatureSet::getResponses : :
  147. */
  148. const std::vector<Mat>& getResponses() const;
  149. private:
  150. void clearResponses();
  151. bool blockAddTrackerFeature;
  152. std::vector<std::pair<String, Ptr<TrackerFeature> > > features; //list of features
  153. std::vector<Mat> responses; //list of response after compute
  154. };
  155. /************************************ TrackerSampler Base Classes ************************************/
  156. /** @brief Abstract base class for TrackerSamplerAlgorithm that represents the algorithm for the specific
  157. sampler.
  158. */
  159. class CV_EXPORTS TrackerSamplerAlgorithm
  160. {
  161. public:
  162. /**
  163. * \brief Destructor
  164. */
  165. virtual ~TrackerSamplerAlgorithm();
  166. /** @brief Create TrackerSamplerAlgorithm by tracker sampler type.
  167. @param trackerSamplerType The trackerSamplerType name
  168. The modes available now:
  169. - "CSC" -- Current State Center
  170. - "CS" -- Current State
  171. */
  172. static Ptr<TrackerSamplerAlgorithm> create( const String& trackerSamplerType );
  173. /** @brief Computes the regions starting from a position in an image.
  174. Return true if samples are computed, false otherwise
  175. @param image The current frame
  176. @param boundingBox The bounding box from which regions can be calculated
  177. @param sample The computed samples @cite AAM Fig. 1 variable Sk
  178. */
  179. bool sampling( const Mat& image, Rect boundingBox, std::vector<Mat>& sample );
  180. /** @brief Get the name of the specific TrackerSamplerAlgorithm
  181. */
  182. String getClassName() const;
  183. protected:
  184. String className;
  185. virtual bool samplingImpl( const Mat& image, Rect boundingBox, std::vector<Mat>& sample ) = 0;
  186. };
  187. /**
  188. * \brief Class that manages the sampler in order to select regions for the update the model of the tracker
  189. * [AAM] Sampling e Labeling. See table I and section III B
  190. */
  191. /** @brief Class that manages the sampler in order to select regions for the update the model of the tracker
  192. @cite AAM Sampling e Labeling. See table I and section III B
  193. TrackerSampler is an aggregation of TrackerSamplerAlgorithm
  194. @sa
  195. TrackerSamplerAlgorithm
  196. */
  197. class CV_EXPORTS TrackerSampler
  198. {
  199. public:
  200. /**
  201. * \brief Constructor
  202. */
  203. TrackerSampler();
  204. /**
  205. * \brief Destructor
  206. */
  207. ~TrackerSampler();
  208. /** @brief Computes the regions starting from a position in an image
  209. @param image The current frame
  210. @param boundingBox The bounding box from which regions can be calculated
  211. */
  212. void sampling( const Mat& image, Rect boundingBox );
  213. /** @brief Return the collection of the TrackerSamplerAlgorithm
  214. */
  215. const std::vector<std::pair<String, Ptr<TrackerSamplerAlgorithm> > >& getSamplers() const;
  216. /** @brief Return the samples from all TrackerSamplerAlgorithm, @cite AAM Fig. 1 variable Sk
  217. */
  218. const std::vector<Mat>& getSamples() const;
  219. /** @brief Add TrackerSamplerAlgorithm in the collection. Return true if sampler is added, false otherwise
  220. @param trackerSamplerAlgorithmType The TrackerSamplerAlgorithm name
  221. The modes available now:
  222. - "CSC" -- Current State Center
  223. - "CS" -- Current State
  224. - "PF" -- Particle Filtering
  225. Example TrackerSamplerAlgorithm::addTrackerSamplerAlgorithm : :
  226. @code
  227. TrackerSamplerCSC::Params CSCparameters;
  228. Ptr<TrackerSamplerAlgorithm> CSCSampler = new TrackerSamplerCSC( CSCparameters );
  229. if( !sampler->addTrackerSamplerAlgorithm( CSCSampler ) )
  230. return false;
  231. //or add CSC sampler with default parameters
  232. //sampler->addTrackerSamplerAlgorithm( "CSC" );
  233. @endcode
  234. @note If you use the second method, you must initialize the TrackerSamplerAlgorithm
  235. */
  236. bool addTrackerSamplerAlgorithm( String trackerSamplerAlgorithmType );
  237. /** @overload
  238. @param sampler The TrackerSamplerAlgorithm
  239. */
  240. bool addTrackerSamplerAlgorithm( Ptr<TrackerSamplerAlgorithm>& sampler );
  241. private:
  242. std::vector<std::pair<String, Ptr<TrackerSamplerAlgorithm> > > samplers;
  243. std::vector<Mat> samples;
  244. bool blockAddTrackerSampler;
  245. void clearSamples();
  246. };
  247. /************************************ TrackerModel Base Classes ************************************/
  248. /** @brief Abstract base class for TrackerTargetState that represents a possible state of the target.
  249. See @cite AAM \f$\hat{x}^{i}_{k}\f$ all the states candidates.
  250. Inherits this class with your Target state, In own implementation you can add scale variation,
  251. width, height, orientation, etc.
  252. */
  253. class CV_EXPORTS TrackerTargetState
  254. {
  255. public:
  256. virtual ~TrackerTargetState()
  257. {
  258. }
  259. ;
  260. /**
  261. * \brief Get the position
  262. * \return The position
  263. */
  264. Point2f getTargetPosition() const;
  265. /**
  266. * \brief Set the position
  267. * \param position The position
  268. */
  269. void setTargetPosition( const Point2f& position );
  270. /**
  271. * \brief Get the width of the target
  272. * \return The width of the target
  273. */
  274. int getTargetWidth() const;
  275. /**
  276. * \brief Set the width of the target
  277. * \param width The width of the target
  278. */
  279. void setTargetWidth( int width );
  280. /**
  281. * \brief Get the height of the target
  282. * \return The height of the target
  283. */
  284. int getTargetHeight() const;
  285. /**
  286. * \brief Set the height of the target
  287. * \param height The height of the target
  288. */
  289. void setTargetHeight( int height );
  290. protected:
  291. Point2f targetPosition;
  292. int targetWidth;
  293. int targetHeight;
  294. };
  295. /** @brief Represents the model of the target at frame \f$k\f$ (all states and scores)
  296. See @cite AAM The set of the pair \f$\langle \hat{x}^{i}_{k}, C^{i}_{k} \rangle\f$
  297. @sa TrackerTargetState
  298. */
  299. typedef std::vector<std::pair<Ptr<TrackerTargetState>, float> > ConfidenceMap;
  300. /** @brief Represents the estimate states for all frames
  301. @cite AAM \f$x_{k}\f$ is the trajectory of the target up to time \f$k\f$
  302. @sa TrackerTargetState
  303. */
  304. typedef std::vector<Ptr<TrackerTargetState> > Trajectory;
  305. /** @brief Abstract base class for TrackerStateEstimator that estimates the most likely target state.
  306. See @cite AAM State estimator
  307. See @cite AMVOT Statistical modeling (Fig. 3), Table III (generative) - IV (discriminative) - V (hybrid)
  308. */
  309. class CV_EXPORTS TrackerStateEstimator
  310. {
  311. public:
  312. virtual ~TrackerStateEstimator();
  313. /** @brief Estimate the most likely target state, return the estimated state
  314. @param confidenceMaps The overall appearance model as a list of :cConfidenceMap
  315. */
  316. Ptr<TrackerTargetState> estimate( const std::vector<ConfidenceMap>& confidenceMaps );
  317. /** @brief Update the ConfidenceMap with the scores
  318. @param confidenceMaps The overall appearance model as a list of :cConfidenceMap
  319. */
  320. void update( std::vector<ConfidenceMap>& confidenceMaps );
  321. /** @brief Create TrackerStateEstimator by tracker state estimator type
  322. @param trackeStateEstimatorType The TrackerStateEstimator name
  323. The modes available now:
  324. - "BOOSTING" -- Boosting-based discriminative appearance models. See @cite AMVOT section 4.4
  325. The modes available soon:
  326. - "SVM" -- SVM-based discriminative appearance models. See @cite AMVOT section 4.5
  327. */
  328. static Ptr<TrackerStateEstimator> create( const String& trackeStateEstimatorType );
  329. /** @brief Get the name of the specific TrackerStateEstimator
  330. */
  331. String getClassName() const;
  332. protected:
  333. virtual Ptr<TrackerTargetState> estimateImpl( const std::vector<ConfidenceMap>& confidenceMaps ) = 0;
  334. virtual void updateImpl( std::vector<ConfidenceMap>& confidenceMaps ) = 0;
  335. String className;
  336. };
  337. /** @brief Abstract class that represents the model of the target. It must be instantiated by specialized
  338. tracker
  339. See @cite AAM Ak
  340. Inherits this with your TrackerModel
  341. */
  342. class CV_EXPORTS TrackerModel
  343. {
  344. public:
  345. /**
  346. * \brief Constructor
  347. */
  348. TrackerModel();
  349. /**
  350. * \brief Destructor
  351. */
  352. virtual ~TrackerModel();
  353. /** @brief Set TrackerEstimator, return true if the tracker state estimator is added, false otherwise
  354. @param trackerStateEstimator The TrackerStateEstimator
  355. @note You can add only one TrackerStateEstimator
  356. */
  357. bool setTrackerStateEstimator( Ptr<TrackerStateEstimator> trackerStateEstimator );
  358. /** @brief Estimate the most likely target location
  359. @cite AAM ME, Model Estimation table I
  360. @param responses Features extracted from TrackerFeatureSet
  361. */
  362. void modelEstimation( const std::vector<Mat>& responses );
  363. /** @brief Update the model
  364. @cite AAM MU, Model Update table I
  365. */
  366. void modelUpdate();
  367. /** @brief Run the TrackerStateEstimator, return true if is possible to estimate a new state, false otherwise
  368. */
  369. bool runStateEstimator();
  370. /** @brief Set the current TrackerTargetState in the Trajectory
  371. @param lastTargetState The current TrackerTargetState
  372. */
  373. void setLastTargetState( const Ptr<TrackerTargetState>& lastTargetState );
  374. /** @brief Get the last TrackerTargetState from Trajectory
  375. */
  376. Ptr<TrackerTargetState> getLastTargetState() const;
  377. /** @brief Get the list of the ConfidenceMap
  378. */
  379. const std::vector<ConfidenceMap>& getConfidenceMaps() const;
  380. /** @brief Get the last ConfidenceMap for the current frame
  381. */
  382. const ConfidenceMap& getLastConfidenceMap() const;
  383. /** @brief Get the TrackerStateEstimator
  384. */
  385. Ptr<TrackerStateEstimator> getTrackerStateEstimator() const;
  386. private:
  387. void clearCurrentConfidenceMap();
  388. protected:
  389. std::vector<ConfidenceMap> confidenceMaps;
  390. Ptr<TrackerStateEstimator> stateEstimator;
  391. ConfidenceMap currentConfidenceMap;
  392. Trajectory trajectory;
  393. int maxCMLength;
  394. virtual void modelEstimationImpl( const std::vector<Mat>& responses ) = 0;
  395. virtual void modelUpdateImpl() = 0;
  396. };
  397. /************************************ Tracker Base Class ************************************/
  398. /** @brief Base abstract class for the long-term tracker:
  399. */
  400. class CV_EXPORTS_W Tracker : public virtual Algorithm
  401. {
  402. public:
  403. virtual ~Tracker() CV_OVERRIDE;
  404. /** @brief Initialize the tracker with a known bounding box that surrounded the target
  405. @param image The initial frame
  406. @param boundingBox The initial bounding box
  407. @return True if initialization went succesfully, false otherwise
  408. */
  409. CV_WRAP bool init( InputArray image, const Rect2d& boundingBox );
  410. /** @brief Update the tracker, find the new most likely bounding box for the target
  411. @param image The current frame
  412. @param boundingBox The bounding box that represent the new target location, if true was returned, not
  413. modified otherwise
  414. @return True means that target was located and false means that tracker cannot locate target in
  415. current frame. Note, that latter *does not* imply that tracker has failed, maybe target is indeed
  416. missing from the frame (say, out of sight)
  417. */
  418. CV_WRAP bool update( InputArray image, CV_OUT Rect2d& boundingBox );
  419. virtual void read( const FileNode& fn ) CV_OVERRIDE = 0;
  420. virtual void write( FileStorage& fs ) const CV_OVERRIDE = 0;
  421. protected:
  422. virtual bool initImpl( const Mat& image, const Rect2d& boundingBox ) = 0;
  423. virtual bool updateImpl( const Mat& image, Rect2d& boundingBox ) = 0;
  424. bool isInit;
  425. Ptr<TrackerFeatureSet> featureSet;
  426. Ptr<TrackerSampler> sampler;
  427. Ptr<TrackerModel> model;
  428. };
  429. /************************************ Specific TrackerStateEstimator Classes ************************************/
  430. /** @brief TrackerStateEstimator based on Boosting
  431. */
  432. class CV_EXPORTS TrackerStateEstimatorMILBoosting : public TrackerStateEstimator
  433. {
  434. public:
  435. /**
  436. * Implementation of the target state for TrackerStateEstimatorMILBoosting
  437. */
  438. class TrackerMILTargetState : public TrackerTargetState
  439. {
  440. public:
  441. /**
  442. * \brief Constructor
  443. * \param position Top left corner of the bounding box
  444. * \param width Width of the bounding box
  445. * \param height Height of the bounding box
  446. * \param foreground label for target or background
  447. * \param features features extracted
  448. */
  449. TrackerMILTargetState( const Point2f& position, int width, int height, bool foreground, const Mat& features );
  450. /**
  451. * \brief Destructor
  452. */
  453. ~TrackerMILTargetState()
  454. {
  455. }
  456. ;
  457. /** @brief Set label: true for target foreground, false for background
  458. @param foreground Label for background/foreground
  459. */
  460. void setTargetFg( bool foreground );
  461. /** @brief Set the features extracted from TrackerFeatureSet
  462. @param features The features extracted
  463. */
  464. void setFeatures( const Mat& features );
  465. /** @brief Get the label. Return true for target foreground, false for background
  466. */
  467. bool isTargetFg() const;
  468. /** @brief Get the features extracted
  469. */
  470. Mat getFeatures() const;
  471. private:
  472. bool isTarget;
  473. Mat targetFeatures;
  474. };
  475. /** @brief Constructor
  476. @param nFeatures Number of features for each sample
  477. */
  478. TrackerStateEstimatorMILBoosting( int nFeatures = 250 );
  479. ~TrackerStateEstimatorMILBoosting();
  480. /** @brief Set the current confidenceMap
  481. @param confidenceMap The current :cConfidenceMap
  482. */
  483. void setCurrentConfidenceMap( ConfidenceMap& confidenceMap );
  484. protected:
  485. Ptr<TrackerTargetState> estimateImpl( const std::vector<ConfidenceMap>& confidenceMaps ) CV_OVERRIDE;
  486. void updateImpl( std::vector<ConfidenceMap>& confidenceMaps ) CV_OVERRIDE;
  487. private:
  488. uint max_idx( const std::vector<float> &v );
  489. void prepareData( const ConfidenceMap& confidenceMap, Mat& positive, Mat& negative );
  490. ClfMilBoost boostMILModel;
  491. bool trained;
  492. int numFeatures;
  493. ConfidenceMap currentConfidenceMap;
  494. };
  495. /** @brief TrackerStateEstimatorAdaBoosting based on ADA-Boosting
  496. */
  497. class CV_EXPORTS TrackerStateEstimatorAdaBoosting : public TrackerStateEstimator
  498. {
  499. public:
  500. /** @brief Implementation of the target state for TrackerAdaBoostingTargetState
  501. */
  502. class TrackerAdaBoostingTargetState : public TrackerTargetState
  503. {
  504. public:
  505. /**
  506. * \brief Constructor
  507. * \param position Top left corner of the bounding box
  508. * \param width Width of the bounding box
  509. * \param height Height of the bounding box
  510. * \param foreground label for target or background
  511. * \param responses list of features
  512. */
  513. TrackerAdaBoostingTargetState( const Point2f& position, int width, int height, bool foreground, const Mat& responses );
  514. /**
  515. * \brief Destructor
  516. */
  517. ~TrackerAdaBoostingTargetState()
  518. {
  519. }
  520. ;
  521. /** @brief Set the features extracted from TrackerFeatureSet
  522. @param responses The features extracted
  523. */
  524. void setTargetResponses( const Mat& responses );
  525. /** @brief Set label: true for target foreground, false for background
  526. @param foreground Label for background/foreground
  527. */
  528. void setTargetFg( bool foreground );
  529. /** @brief Get the features extracted
  530. */
  531. Mat getTargetResponses() const;
  532. /** @brief Get the label. Return true for target foreground, false for background
  533. */
  534. bool isTargetFg() const;
  535. private:
  536. bool isTarget;
  537. Mat targetResponses;
  538. };
  539. /** @brief Constructor
  540. @param numClassifer Number of base classifiers
  541. @param initIterations Number of iterations in the initialization
  542. @param nFeatures Number of features/weak classifiers
  543. @param patchSize tracking rect
  544. @param ROI initial ROI
  545. */
  546. TrackerStateEstimatorAdaBoosting( int numClassifer, int initIterations, int nFeatures, Size patchSize, const Rect& ROI );
  547. /**
  548. * \brief Destructor
  549. */
  550. ~TrackerStateEstimatorAdaBoosting();
  551. /** @brief Get the sampling ROI
  552. */
  553. Rect getSampleROI() const;
  554. /** @brief Set the sampling ROI
  555. @param ROI the sampling ROI
  556. */
  557. void setSampleROI( const Rect& ROI );
  558. /** @brief Set the current confidenceMap
  559. @param confidenceMap The current :cConfidenceMap
  560. */
  561. void setCurrentConfidenceMap( ConfidenceMap& confidenceMap );
  562. /** @brief Get the list of the selected weak classifiers for the classification step
  563. */
  564. std::vector<int> computeSelectedWeakClassifier();
  565. /** @brief Get the list of the weak classifiers that should be replaced
  566. */
  567. std::vector<int> computeReplacedClassifier();
  568. /** @brief Get the list of the weak classifiers that replace those to be replaced
  569. */
  570. std::vector<int> computeSwappedClassifier();
  571. protected:
  572. Ptr<TrackerTargetState> estimateImpl( const std::vector<ConfidenceMap>& confidenceMaps ) CV_OVERRIDE;
  573. void updateImpl( std::vector<ConfidenceMap>& confidenceMaps ) CV_OVERRIDE;
  574. Ptr<StrongClassifierDirectSelection> boostClassifier;
  575. private:
  576. int numBaseClassifier;
  577. int iterationInit;
  578. int numFeatures;
  579. bool trained;
  580. Size initPatchSize;
  581. Rect sampleROI;
  582. std::vector<int> replacedClassifier;
  583. std::vector<int> swappedClassifier;
  584. ConfidenceMap currentConfidenceMap;
  585. };
  586. /**
  587. * \brief TrackerStateEstimator based on SVM
  588. */
  589. class CV_EXPORTS TrackerStateEstimatorSVM : public TrackerStateEstimator
  590. {
  591. public:
  592. TrackerStateEstimatorSVM();
  593. ~TrackerStateEstimatorSVM();
  594. protected:
  595. Ptr<TrackerTargetState> estimateImpl( const std::vector<ConfidenceMap>& confidenceMaps ) CV_OVERRIDE;
  596. void updateImpl( std::vector<ConfidenceMap>& confidenceMaps ) CV_OVERRIDE;
  597. };
  598. /************************************ Specific TrackerSamplerAlgorithm Classes ************************************/
  599. /** @brief TrackerSampler based on CSC (current state centered), used by MIL algorithm TrackerMIL
  600. */
  601. class CV_EXPORTS TrackerSamplerCSC : public TrackerSamplerAlgorithm
  602. {
  603. public:
  604. enum
  605. {
  606. MODE_INIT_POS = 1, //!< mode for init positive samples
  607. MODE_INIT_NEG = 2, //!< mode for init negative samples
  608. MODE_TRACK_POS = 3, //!< mode for update positive samples
  609. MODE_TRACK_NEG = 4, //!< mode for update negative samples
  610. MODE_DETECT = 5 //!< mode for detect samples
  611. };
  612. struct CV_EXPORTS Params
  613. {
  614. Params();
  615. float initInRad; //!< radius for gathering positive instances during init
  616. float trackInPosRad; //!< radius for gathering positive instances during tracking
  617. float searchWinSize; //!< size of search window
  618. int initMaxNegNum; //!< # negative samples to use during init
  619. int trackMaxPosNum; //!< # positive samples to use during training
  620. int trackMaxNegNum; //!< # negative samples to use during training
  621. };
  622. /** @brief Constructor
  623. @param parameters TrackerSamplerCSC parameters TrackerSamplerCSC::Params
  624. */
  625. TrackerSamplerCSC( const TrackerSamplerCSC::Params &parameters = TrackerSamplerCSC::Params() );
  626. /** @brief Set the sampling mode of TrackerSamplerCSC
  627. @param samplingMode The sampling mode
  628. The modes are:
  629. - "MODE_INIT_POS = 1" -- for the positive sampling in initialization step
  630. - "MODE_INIT_NEG = 2" -- for the negative sampling in initialization step
  631. - "MODE_TRACK_POS = 3" -- for the positive sampling in update step
  632. - "MODE_TRACK_NEG = 4" -- for the negative sampling in update step
  633. - "MODE_DETECT = 5" -- for the sampling in detection step
  634. */
  635. void setMode( int samplingMode );
  636. ~TrackerSamplerCSC();
  637. protected:
  638. bool samplingImpl( const Mat& image, Rect boundingBox, std::vector<Mat>& sample ) CV_OVERRIDE;
  639. private:
  640. Params params;
  641. int mode;
  642. RNG rng;
  643. std::vector<Mat> sampleImage( const Mat& img, int x, int y, int w, int h, float inrad, float outrad = 0, int maxnum = 1000000 );
  644. };
  645. /** @brief TrackerSampler based on CS (current state), used by algorithm TrackerBoosting
  646. */
  647. class CV_EXPORTS TrackerSamplerCS : public TrackerSamplerAlgorithm
  648. {
  649. public:
  650. enum
  651. {
  652. MODE_POSITIVE = 1, //!< mode for positive samples
  653. MODE_NEGATIVE = 2, //!< mode for negative samples
  654. MODE_CLASSIFY = 3 //!< mode for classify samples
  655. };
  656. struct CV_EXPORTS Params
  657. {
  658. Params();
  659. float overlap; //!<overlapping for the search windows
  660. float searchFactor; //!<search region parameter
  661. };
  662. /** @brief Constructor
  663. @param parameters TrackerSamplerCS parameters TrackerSamplerCS::Params
  664. */
  665. TrackerSamplerCS( const TrackerSamplerCS::Params &parameters = TrackerSamplerCS::Params() );
  666. /** @brief Set the sampling mode of TrackerSamplerCS
  667. @param samplingMode The sampling mode
  668. The modes are:
  669. - "MODE_POSITIVE = 1" -- for the positive sampling
  670. - "MODE_NEGATIVE = 2" -- for the negative sampling
  671. - "MODE_CLASSIFY = 3" -- for the sampling in classification step
  672. */
  673. void setMode( int samplingMode );
  674. ~TrackerSamplerCS();
  675. bool samplingImpl( const Mat& image, Rect boundingBox, std::vector<Mat>& sample ) CV_OVERRIDE;
  676. Rect getROI() const;
  677. private:
  678. Rect getTrackingROI( float searchFactor );
  679. Rect RectMultiply( const Rect & rect, float f );
  680. std::vector<Mat> patchesRegularScan( const Mat& image, Rect trackingROI, Size patchSize );
  681. void setCheckedROI( Rect imageROI );
  682. Params params;
  683. int mode;
  684. Rect trackedPatch;
  685. Rect validROI;
  686. Rect ROI;
  687. };
  688. /** @brief This sampler is based on particle filtering.
  689. In principle, it can be thought of as performing some sort of optimization (and indeed, this
  690. tracker uses opencv's optim module), where tracker seeks to find the rectangle in given frame,
  691. which is the most *"similar"* to the initial rectangle (the one, given through the constructor).
  692. The optimization performed is stochastic and somehow resembles genetic algorithms, where on each new
  693. image received (submitted via TrackerSamplerPF::sampling()) we start with the region bounded by
  694. boundingBox, then generate several "perturbed" boxes, take the ones most similar to the original.
  695. This selection round is repeated several times. At the end, we hope that only the most promising box
  696. remaining, and these are combined to produce the subrectangle of image, which is put as a sole
  697. element in array sample.
  698. It should be noted, that the definition of "similarity" between two rectangles is based on comparing
  699. their histograms. As experiments show, tracker is *not* very succesfull if target is assumed to
  700. strongly change its dimensions.
  701. */
  702. class CV_EXPORTS TrackerSamplerPF : public TrackerSamplerAlgorithm
  703. {
  704. public:
  705. /** @brief This structure contains all the parameters that can be varied during the course of sampling
  706. algorithm. Below is the structure exposed, together with its members briefly explained with
  707. reference to the above discussion on algorithm's working.
  708. */
  709. struct CV_EXPORTS Params
  710. {
  711. Params();
  712. int iterationNum; //!< number of selection rounds
  713. int particlesNum; //!< number of "perturbed" boxes on each round
  714. double alpha; //!< with each new round we exponentially decrease the amount of "perturbing" we allow (like in simulated annealing)
  715. //!< and this very alpha controls how fast annealing happens, ie. how fast perturbing decreases
  716. Mat_<double> std; //!< initial values for perturbing (1-by-4 array, as each rectangle is given by 4 values -- coordinates of opposite vertices,
  717. //!< hence we have 4 values to perturb)
  718. };
  719. /** @brief Constructor
  720. @param chosenRect Initial rectangle, that is supposed to contain target we'd like to track.
  721. @param parameters
  722. */
  723. TrackerSamplerPF(const Mat& chosenRect,const TrackerSamplerPF::Params &parameters = TrackerSamplerPF::Params());
  724. protected:
  725. bool samplingImpl( const Mat& image, Rect boundingBox, std::vector<Mat>& sample ) CV_OVERRIDE;
  726. private:
  727. Params params;
  728. Ptr<MinProblemSolver> _solver;
  729. Ptr<MinProblemSolver::Function> _function;
  730. };
  731. /************************************ Specific TrackerFeature Classes ************************************/
  732. /**
  733. * \brief TrackerFeature based on Feature2D
  734. */
  735. class CV_EXPORTS TrackerFeatureFeature2d : public TrackerFeature
  736. {
  737. public:
  738. /**
  739. * \brief Constructor
  740. * \param detectorType string of FeatureDetector
  741. * \param descriptorType string of DescriptorExtractor
  742. */
  743. TrackerFeatureFeature2d( String detectorType, String descriptorType );
  744. ~TrackerFeatureFeature2d() CV_OVERRIDE;
  745. void selection( Mat& response, int npoints ) CV_OVERRIDE;
  746. protected:
  747. bool computeImpl( const std::vector<Mat>& images, Mat& response ) CV_OVERRIDE;
  748. private:
  749. std::vector<KeyPoint> keypoints;
  750. };
  751. /**
  752. * \brief TrackerFeature based on HOG
  753. */
  754. class CV_EXPORTS TrackerFeatureHOG : public TrackerFeature
  755. {
  756. public:
  757. TrackerFeatureHOG();
  758. ~TrackerFeatureHOG() CV_OVERRIDE;
  759. void selection( Mat& response, int npoints ) CV_OVERRIDE;
  760. protected:
  761. bool computeImpl( const std::vector<Mat>& images, Mat& response ) CV_OVERRIDE;
  762. };
  763. /** @brief TrackerFeature based on HAAR features, used by TrackerMIL and many others algorithms
  764. @note HAAR features implementation is copied from apps/traincascade and modified according to MIL
  765. */
  766. class CV_EXPORTS TrackerFeatureHAAR : public TrackerFeature
  767. {
  768. public:
  769. struct CV_EXPORTS Params
  770. {
  771. Params();
  772. int numFeatures; //!< # of rects
  773. Size rectSize; //!< rect size
  774. bool isIntegral; //!< true if input images are integral, false otherwise
  775. };
  776. /** @brief Constructor
  777. @param parameters TrackerFeatureHAAR parameters TrackerFeatureHAAR::Params
  778. */
  779. TrackerFeatureHAAR( const TrackerFeatureHAAR::Params &parameters = TrackerFeatureHAAR::Params() );
  780. ~TrackerFeatureHAAR() CV_OVERRIDE;
  781. /** @brief Compute the features only for the selected indices in the images collection
  782. @param selFeatures indices of selected features
  783. @param images The images
  784. @param response Collection of response for the specific TrackerFeature
  785. */
  786. bool extractSelected( const std::vector<int> selFeatures, const std::vector<Mat>& images, Mat& response );
  787. /** @brief Identify most effective features
  788. @param response Collection of response for the specific TrackerFeature
  789. @param npoints Max number of features
  790. @note This method modifies the response parameter
  791. */
  792. void selection( Mat& response, int npoints ) CV_OVERRIDE;
  793. /** @brief Swap the feature in position source with the feature in position target
  794. @param source The source position
  795. @param target The target position
  796. */
  797. bool swapFeature( int source, int target );
  798. /** @brief Swap the feature in position id with the feature input
  799. @param id The position
  800. @param feature The feature
  801. */
  802. bool swapFeature( int id, CvHaarEvaluator::FeatureHaar& feature );
  803. /** @brief Get the feature in position id
  804. @param id The position
  805. */
  806. CvHaarEvaluator::FeatureHaar& getFeatureAt( int id );
  807. protected:
  808. bool computeImpl( const std::vector<Mat>& images, Mat& response ) CV_OVERRIDE;
  809. private:
  810. Params params;
  811. Ptr<CvHaarEvaluator> featureEvaluator;
  812. };
  813. /**
  814. * \brief TrackerFeature based on LBP
  815. */
  816. class CV_EXPORTS TrackerFeatureLBP : public TrackerFeature
  817. {
  818. public:
  819. TrackerFeatureLBP();
  820. ~TrackerFeatureLBP();
  821. void selection( Mat& response, int npoints ) CV_OVERRIDE;
  822. protected:
  823. bool computeImpl( const std::vector<Mat>& images, Mat& response ) CV_OVERRIDE;
  824. };
  825. /************************************ Specific Tracker Classes ************************************/
  826. /** @brief The MIL algorithm trains a classifier in an online manner to separate the object from the
  827. background.
  828. Multiple Instance Learning avoids the drift problem for a robust tracking. The implementation is
  829. based on @cite MIL .
  830. Original code can be found here <http://vision.ucsd.edu/~bbabenko/project_miltrack.shtml>
  831. */
  832. class CV_EXPORTS_W TrackerMIL : public Tracker
  833. {
  834. public:
  835. struct CV_EXPORTS Params
  836. {
  837. Params();
  838. //parameters for sampler
  839. float samplerInitInRadius; //!< radius for gathering positive instances during init
  840. int samplerInitMaxNegNum; //!< # negative samples to use during init
  841. float samplerSearchWinSize; //!< size of search window
  842. float samplerTrackInRadius; //!< radius for gathering positive instances during tracking
  843. int samplerTrackMaxPosNum; //!< # positive samples to use during tracking
  844. int samplerTrackMaxNegNum; //!< # negative samples to use during tracking
  845. int featureSetNumFeatures; //!< # features
  846. void read( const FileNode& fn );
  847. void write( FileStorage& fs ) const;
  848. };
  849. /** @brief Constructor
  850. @param parameters MIL parameters TrackerMIL::Params
  851. */
  852. static Ptr<TrackerMIL> create(const TrackerMIL::Params &parameters);
  853. CV_WRAP static Ptr<TrackerMIL> create();
  854. virtual ~TrackerMIL() CV_OVERRIDE {}
  855. };
  856. /** @brief This is a real-time object tracking based on a novel on-line version of the AdaBoost algorithm.
  857. The classifier uses the surrounding background as negative examples in update step to avoid the
  858. drifting problem. The implementation is based on @cite OLB .
  859. */
  860. class CV_EXPORTS_W TrackerBoosting : public Tracker
  861. {
  862. public:
  863. struct CV_EXPORTS Params
  864. {
  865. Params();
  866. int numClassifiers; //!<the number of classifiers to use in a OnlineBoosting algorithm
  867. float samplerOverlap; //!<search region parameters to use in a OnlineBoosting algorithm
  868. float samplerSearchFactor; //!< search region parameters to use in a OnlineBoosting algorithm
  869. int iterationInit; //!<the initial iterations
  870. int featureSetNumFeatures; //!< # features
  871. /**
  872. * \brief Read parameters from a file
  873. */
  874. void read( const FileNode& fn );
  875. /**
  876. * \brief Write parameters to a file
  877. */
  878. void write( FileStorage& fs ) const;
  879. };
  880. /** @brief Constructor
  881. @param parameters BOOSTING parameters TrackerBoosting::Params
  882. */
  883. static Ptr<TrackerBoosting> create(const TrackerBoosting::Params &parameters);
  884. CV_WRAP static Ptr<TrackerBoosting> create();
  885. virtual ~TrackerBoosting() CV_OVERRIDE {}
  886. };
  887. /** @brief Median Flow tracker implementation.
  888. Implementation of a paper @cite MedianFlow .
  889. The tracker is suitable for very smooth and predictable movements when object is visible throughout
  890. the whole sequence. It's quite and accurate for this type of problems (in particular, it was shown
  891. by authors to outperform MIL). During the implementation period the code at
  892. <http://www.aonsquared.co.uk/node/5>, the courtesy of the author Arthur Amarra, was used for the
  893. reference purpose.
  894. */
  895. class CV_EXPORTS_W TrackerMedianFlow : public Tracker
  896. {
  897. public:
  898. struct CV_EXPORTS Params
  899. {
  900. Params(); //!<default constructor
  901. //!<note that the default values of parameters are recommended for most of use cases
  902. int pointsInGrid; //!<square root of number of keypoints used; increase it to trade
  903. //!<accurateness for speed
  904. cv::Size winSize; //!<window size parameter for Lucas-Kanade optical flow
  905. int maxLevel; //!<maximal pyramid level number for Lucas-Kanade optical flow
  906. TermCriteria termCriteria; //!<termination criteria for Lucas-Kanade optical flow
  907. cv::Size winSizeNCC; //!<window size around a point for normalized cross-correlation check
  908. double maxMedianLengthOfDisplacementDifference; //!<criterion for loosing the tracked object
  909. void read( const FileNode& /*fn*/ );
  910. void write( FileStorage& /*fs*/ ) const;
  911. };
  912. /** @brief Constructor
  913. @param parameters Median Flow parameters TrackerMedianFlow::Params
  914. */
  915. static Ptr<TrackerMedianFlow> create(const TrackerMedianFlow::Params &parameters);
  916. CV_WRAP static Ptr<TrackerMedianFlow> create();
  917. virtual ~TrackerMedianFlow() CV_OVERRIDE {}
  918. };
  919. /** @brief TLD is a novel tracking framework that explicitly decomposes the long-term tracking task into
  920. tracking, learning and detection.
  921. The tracker follows the object from frame to frame. The detector localizes all appearances that
  922. have been observed so far and corrects the tracker if necessary. The learning estimates detector's
  923. errors and updates it to avoid these errors in the future. The implementation is based on @cite TLD .
  924. The Median Flow algorithm (see cv::TrackerMedianFlow) was chosen as a tracking component in this
  925. implementation, following authors. Tracker is supposed to be able to handle rapid motions, partial
  926. occlusions, object absence etc.
  927. */
  928. class CV_EXPORTS_W TrackerTLD : public Tracker
  929. {
  930. public:
  931. struct CV_EXPORTS Params
  932. {
  933. Params();
  934. void read( const FileNode& /*fn*/ );
  935. void write( FileStorage& /*fs*/ ) const;
  936. };
  937. /** @brief Constructor
  938. @param parameters TLD parameters TrackerTLD::Params
  939. */
  940. static Ptr<TrackerTLD> create(const TrackerTLD::Params &parameters);
  941. CV_WRAP static Ptr<TrackerTLD> create();
  942. virtual ~TrackerTLD() CV_OVERRIDE {}
  943. };
  944. /** @brief KCF is a novel tracking framework that utilizes properties of circulant matrix to enhance the processing speed.
  945. * This tracking method is an implementation of @cite KCF_ECCV which is extended to KCF with color-names features (@cite KCF_CN).
  946. * The original paper of KCF is available at <http://www.robots.ox.ac.uk/~joao/publications/henriques_tpami2015.pdf>
  947. * as well as the matlab implementation. For more information about KCF with color-names features, please refer to
  948. * <http://www.cvl.isy.liu.se/research/objrec/visualtracking/colvistrack/index.html>.
  949. */
  950. class CV_EXPORTS_W TrackerKCF : public Tracker
  951. {
  952. public:
  953. /**
  954. * \brief Feature type to be used in the tracking grayscale, colornames, compressed color-names
  955. * The modes available now:
  956. - "GRAY" -- Use grayscale values as the feature
  957. - "CN" -- Color-names feature
  958. */
  959. enum MODE {
  960. GRAY = (1 << 0),
  961. CN = (1 << 1),
  962. CUSTOM = (1 << 2)
  963. };
  964. struct CV_EXPORTS Params
  965. {
  966. /**
  967. * \brief Constructor
  968. */
  969. Params();
  970. /**
  971. * \brief Read parameters from a file
  972. */
  973. void read(const FileNode& /*fn*/);
  974. /**
  975. * \brief Write parameters to a file
  976. */
  977. void write(FileStorage& /*fs*/) const;
  978. float detect_thresh; //!< detection confidence threshold
  979. float sigma; //!< gaussian kernel bandwidth
  980. float lambda; //!< regularization
  981. float interp_factor; //!< linear interpolation factor for adaptation
  982. float output_sigma_factor; //!< spatial bandwidth (proportional to target)
  983. float pca_learning_rate; //!< compression learning rate
  984. bool resize; //!< activate the resize feature to improve the processing speed
  985. bool split_coeff; //!< split the training coefficients into two matrices
  986. bool wrap_kernel; //!< wrap around the kernel values
  987. bool compress_feature; //!< activate the pca method to compress the features
  988. int max_patch_size; //!< threshold for the ROI size
  989. int compressed_size; //!< feature size after compression
  990. int desc_pca; //!< compressed descriptors of TrackerKCF::MODE
  991. int desc_npca; //!< non-compressed descriptors of TrackerKCF::MODE
  992. };
  993. virtual void setFeatureExtractor(void(*)(const Mat, const Rect, Mat&), bool pca_func = false) = 0;
  994. /** @brief Constructor
  995. @param parameters KCF parameters TrackerKCF::Params
  996. */
  997. static Ptr<TrackerKCF> create(const TrackerKCF::Params &parameters);
  998. CV_WRAP static Ptr<TrackerKCF> create();
  999. virtual ~TrackerKCF() CV_OVERRIDE {}
  1000. };
  1001. /** @brief GOTURN (@cite GOTURN) is kind of trackers based on Convolutional Neural Networks (CNN). While taking all advantages of CNN trackers,
  1002. * GOTURN is much faster due to offline training without online fine-tuning nature.
  1003. * GOTURN tracker addresses the problem of single target tracking: given a bounding box label of an object in the first frame of the video,
  1004. * we track that object through the rest of the video. NOTE: Current method of GOTURN does not handle occlusions; however, it is fairly
  1005. * robust to viewpoint changes, lighting changes, and deformations.
  1006. * Inputs of GOTURN are two RGB patches representing Target and Search patches resized to 227x227.
  1007. * Outputs of GOTURN are predicted bounding box coordinates, relative to Search patch coordinate system, in format X1,Y1,X2,Y2.
  1008. * Original paper is here: <http://davheld.github.io/GOTURN/GOTURN.pdf>
  1009. * As long as original authors implementation: <https://github.com/davheld/GOTURN#train-the-tracker>
  1010. * Implementation of training algorithm is placed in separately here due to 3d-party dependencies:
  1011. * <https://github.com/Auron-X/GOTURN_Training_Toolkit>
  1012. * GOTURN architecture goturn.prototxt and trained model goturn.caffemodel are accessible on opencv_extra GitHub repository.
  1013. */
  1014. class CV_EXPORTS_W TrackerGOTURN : public Tracker
  1015. {
  1016. public:
  1017. struct CV_EXPORTS Params
  1018. {
  1019. Params();
  1020. void read(const FileNode& /*fn*/);
  1021. void write(FileStorage& /*fs*/) const;
  1022. };
  1023. /** @brief Constructor
  1024. @param parameters GOTURN parameters TrackerGOTURN::Params
  1025. */
  1026. static Ptr<TrackerGOTURN> create(const TrackerGOTURN::Params &parameters);
  1027. CV_WRAP static Ptr<TrackerGOTURN> create();
  1028. virtual ~TrackerGOTURN() CV_OVERRIDE {}
  1029. };
  1030. /** @brief the MOSSE tracker
  1031. note, that this tracker works with grayscale images, if passed bgr ones, they will get converted internally.
  1032. @cite MOSSE Visual Object Tracking using Adaptive Correlation Filters
  1033. */
  1034. class CV_EXPORTS_W TrackerMOSSE : public Tracker
  1035. {
  1036. public:
  1037. /** @brief Constructor
  1038. */
  1039. CV_WRAP static Ptr<TrackerMOSSE> create();
  1040. virtual ~TrackerMOSSE() CV_OVERRIDE {}
  1041. };
  1042. /************************************ MultiTracker Class ---By Laksono Kurnianggoro---) ************************************/
  1043. /** @brief This class is used to track multiple objects using the specified tracker algorithm.
  1044. * The MultiTracker is naive implementation of multiple object tracking.
  1045. * It process the tracked objects independently without any optimization accross the tracked objects.
  1046. */
  1047. class CV_EXPORTS_W MultiTracker : public Algorithm
  1048. {
  1049. public:
  1050. /**
  1051. * \brief Constructor.
  1052. */
  1053. CV_WRAP MultiTracker();
  1054. /**
  1055. * \brief Destructor
  1056. */
  1057. ~MultiTracker() CV_OVERRIDE;
  1058. /**
  1059. * \brief Add a new object to be tracked.
  1060. *
  1061. * @param newTracker tracking algorithm to be used
  1062. * @param image input image
  1063. * @param boundingBox a rectangle represents ROI of the tracked object
  1064. */
  1065. CV_WRAP bool add(Ptr<Tracker> newTracker, InputArray image, const Rect2d& boundingBox);
  1066. /**
  1067. * \brief Add a set of objects to be tracked.
  1068. * @param newTrackers list of tracking algorithms to be used
  1069. * @param image input image
  1070. * @param boundingBox list of the tracked objects
  1071. */
  1072. bool add(std::vector<Ptr<Tracker> > newTrackers, InputArray image, std::vector<Rect2d> boundingBox);
  1073. /**
  1074. * \brief Update the current tracking status.
  1075. * The result will be saved in the internal storage.
  1076. * @param image input image
  1077. */
  1078. bool update(InputArray image);
  1079. /**
  1080. * \brief Update the current tracking status.
  1081. * @param image input image
  1082. * @param boundingBox the tracking result, represent a list of ROIs of the tracked objects.
  1083. */
  1084. CV_WRAP bool update(InputArray image, CV_OUT std::vector<Rect2d> & boundingBox);
  1085. /**
  1086. * \brief Returns a reference to a storage for the tracked objects, each object corresponds to one tracker algorithm
  1087. */
  1088. CV_WRAP const std::vector<Rect2d>& getObjects() const;
  1089. /**
  1090. * \brief Returns a pointer to a new instance of MultiTracker
  1091. */
  1092. CV_WRAP static Ptr<MultiTracker> create();
  1093. protected:
  1094. //!< storage for the tracker algorithms.
  1095. std::vector< Ptr<Tracker> > trackerList;
  1096. //!< storage for the tracked objects, each object corresponds to one tracker algorithm.
  1097. std::vector<Rect2d> objects;
  1098. };
  1099. /************************************ Multi-Tracker Classes ---By Tyan Vladimir---************************************/
  1100. /** @brief Base abstract class for the long-term Multi Object Trackers:
  1101. @sa Tracker, MultiTrackerTLD
  1102. */
  1103. class CV_EXPORTS MultiTracker_Alt
  1104. {
  1105. public:
  1106. /** @brief Constructor for Multitracker
  1107. */
  1108. MultiTracker_Alt()
  1109. {
  1110. targetNum = 0;
  1111. }
  1112. /** @brief Add a new target to a tracking-list and initialize the tracker with a known bounding box that surrounded the target
  1113. @param image The initial frame
  1114. @param boundingBox The initial bounding box of target
  1115. @param tracker_algorithm Multi-tracker algorithm
  1116. @return True if new target initialization went succesfully, false otherwise
  1117. */
  1118. bool addTarget(InputArray image, const Rect2d& boundingBox, Ptr<Tracker> tracker_algorithm);
  1119. /** @brief Update all trackers from the tracking-list, find a new most likely bounding boxes for the targets
  1120. @param image The current frame
  1121. @return True means that all targets were located and false means that tracker couldn't locate one of the targets in
  1122. current frame. Note, that latter *does not* imply that tracker has failed, maybe target is indeed
  1123. missing from the frame (say, out of sight)
  1124. */
  1125. bool update(InputArray image);
  1126. /** @brief Current number of targets in tracking-list
  1127. */
  1128. int targetNum;
  1129. /** @brief Trackers list for Multi-Object-Tracker
  1130. */
  1131. std::vector <Ptr<Tracker> > trackers;
  1132. /** @brief Bounding Boxes list for Multi-Object-Tracker
  1133. */
  1134. std::vector <Rect2d> boundingBoxes;
  1135. /** @brief List of randomly generated colors for bounding boxes display
  1136. */
  1137. std::vector<Scalar> colors;
  1138. };
  1139. /** @brief Multi Object Tracker for TLD. TLD is a novel tracking framework that explicitly decomposes
  1140. the long-term tracking task into tracking, learning and detection.
  1141. The tracker follows the object from frame to frame. The detector localizes all appearances that
  1142. have been observed so far and corrects the tracker if necessary. The learning estimates detector's
  1143. errors and updates it to avoid these errors in the future. The implementation is based on @cite TLD .
  1144. The Median Flow algorithm (see cv::TrackerMedianFlow) was chosen as a tracking component in this
  1145. implementation, following authors. Tracker is supposed to be able to handle rapid motions, partial
  1146. occlusions, object absence etc.
  1147. @sa Tracker, MultiTracker, TrackerTLD
  1148. */
  1149. class CV_EXPORTS MultiTrackerTLD : public MultiTracker_Alt
  1150. {
  1151. public:
  1152. /** @brief Update all trackers from the tracking-list, find a new most likely bounding boxes for the targets by
  1153. optimized update method using some techniques to speedup calculations specifically for MO TLD. The only limitation
  1154. is that all target bounding boxes should have approximately same aspect ratios. Speed boost is around 20%
  1155. @param image The current frame.
  1156. @return True means that all targets were located and false means that tracker couldn't locate one of the targets in
  1157. current frame. Note, that latter *does not* imply that tracker has failed, maybe target is indeed
  1158. missing from the frame (say, out of sight)
  1159. */
  1160. bool update_opt(InputArray image);
  1161. };
  1162. //! @}
  1163. /*********************************** CSRT ************************************/
  1164. /** @brief Discriminative Correlation Filter Tracker with Channel and Spatial Reliability
  1165. */
  1166. class CV_EXPORTS_W TrackerCSRT : public Tracker
  1167. {
  1168. public:
  1169. struct CV_EXPORTS Params
  1170. {
  1171. /**
  1172. * \brief Constructor
  1173. */
  1174. Params();
  1175. /**
  1176. * \brief Read parameters from file
  1177. */
  1178. void read(const FileNode& /*fn*/);
  1179. /**
  1180. * \brief Write parameters from file
  1181. */
  1182. void write(cv::FileStorage& fs) const;
  1183. bool use_hog;
  1184. bool use_color_names;
  1185. bool use_gray;
  1186. bool use_rgb;
  1187. bool use_channel_weights;
  1188. bool use_segmentation;
  1189. std::string window_function; //!< Window function: "hann", "cheb", "kaiser"
  1190. float kaiser_alpha;
  1191. float cheb_attenuation;
  1192. float template_size;
  1193. float gsl_sigma;
  1194. float hog_orientations;
  1195. float hog_clip;
  1196. float padding;
  1197. float filter_lr;
  1198. float weights_lr;
  1199. int num_hog_channels_used;
  1200. int admm_iterations;
  1201. int histogram_bins;
  1202. float histogram_lr;
  1203. int background_ratio;
  1204. int number_of_scales;
  1205. float scale_sigma_factor;
  1206. float scale_model_max_area;
  1207. float scale_lr;
  1208. float scale_step;
  1209. float psr_threshold; //!< we lost the target, if the psr is lower than this.
  1210. };
  1211. /** @brief Constructor
  1212. @param parameters CSRT parameters TrackerCSRT::Params
  1213. */
  1214. static Ptr<TrackerCSRT> create(const TrackerCSRT::Params &parameters);
  1215. CV_WRAP static Ptr<TrackerCSRT> create();
  1216. CV_WRAP virtual void setInitialMask(InputArray mask) = 0;
  1217. virtual ~TrackerCSRT() CV_OVERRIDE {}
  1218. };
  1219. } /* namespace cv */
  1220. #endif