GrayCodePattern.h 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146
  1. //
  2. // This file is auto-generated. Please don't modify it!
  3. //
  4. #pragma once
  5. #ifdef __cplusplus
  6. //#import "opencv.hpp"
  7. #import "opencv2/structured_light.hpp"
  8. #import "opencv2/structured_light/graycodepattern.hpp"
  9. #else
  10. #define CV_EXPORTS
  11. #endif
  12. #import <Foundation/Foundation.h>
  13. #import "StructuredLightPattern.h"
  14. @class Mat;
  15. @class Point2i;
  16. NS_ASSUME_NONNULL_BEGIN
  17. // C++: class GrayCodePattern
  18. /**
  19. * Class implementing the Gray-code pattern, based on CITE: UNDERWORLD.
  20. *
  21. * The generation of the pattern images is performed with Gray encoding using the traditional white and black colors.
  22. *
  23. * The information about the two image axes x, y is encoded separately into two different pattern sequences.
  24. * A projector P with resolution (P_res_x, P_res_y) will result in Ncols = log 2 (P_res_x) encoded pattern images representing the columns, and
  25. * in Nrows = log 2 (P_res_y) encoded pattern images representing the rows.
  26. * For example a projector with resolution 1024x768 will result in Ncols = 10 and Nrows = 10.
  27. *
  28. * However, the generated pattern sequence consists of both regular color and color-inverted images: inverted pattern images are images
  29. * with the same structure as the original but with inverted colors.
  30. * This provides an effective method for easily determining the intensity value of each pixel when it is lit (highest value) and
  31. * when it is not lit (lowest value). So for a a projector with resolution 1024x768, the number of pattern images will be Ncols * 2 + Nrows * 2 = 40.
  32. *
  33. *
  34. * Member of `Structured_light`
  35. */
  36. CV_EXPORTS @interface GrayCodePattern : StructuredLightPattern
  37. #ifdef __cplusplus
  38. @property(readonly)cv::Ptr<cv::structured_light::GrayCodePattern> nativePtrGrayCodePattern;
  39. #endif
  40. #ifdef __cplusplus
  41. - (instancetype)initWithNativePtr:(cv::Ptr<cv::structured_light::GrayCodePattern>)nativePtr;
  42. + (instancetype)fromNative:(cv::Ptr<cv::structured_light::GrayCodePattern>)nativePtr;
  43. #endif
  44. #pragma mark - Methods
  45. //
  46. // static Ptr_GrayCodePattern cv::structured_light::GrayCodePattern::create(int width, int height)
  47. //
  48. /**
  49. * Constructor
  50. */
  51. + (GrayCodePattern*)create:(int)width height:(int)height NS_SWIFT_NAME(create(width:height:));
  52. //
  53. // size_t cv::structured_light::GrayCodePattern::getNumberOfPatternImages()
  54. //
  55. /**
  56. * Get the number of pattern images needed for the graycode pattern.
  57. *
  58. * @return The number of pattern images needed for the graycode pattern.
  59. *
  60. */
  61. - (size_t)getNumberOfPatternImages NS_SWIFT_NAME(getNumberOfPatternImages());
  62. //
  63. // void cv::structured_light::GrayCodePattern::setWhiteThreshold(size_t value)
  64. //
  65. /**
  66. * Sets the value for white threshold, needed for decoding.
  67. *
  68. * White threshold is a number between 0-255 that represents the minimum brightness difference required for valid pixels, between the graycode pattern and its inverse images; used in getProjPixel method.
  69. *
  70. * @param value The desired white threshold value.
  71. *
  72. */
  73. - (void)setWhiteThreshold:(size_t)value NS_SWIFT_NAME(setWhiteThreshold(value:));
  74. //
  75. // void cv::structured_light::GrayCodePattern::setBlackThreshold(size_t value)
  76. //
  77. /**
  78. * Sets the value for black threshold, needed for decoding (shadowsmasks computation).
  79. *
  80. * Black threshold is a number between 0-255 that represents the minimum brightness difference required for valid pixels, between the fully illuminated (white) and the not illuminated images (black); used in computeShadowMasks method.
  81. *
  82. * @param value The desired black threshold value.
  83. *
  84. */
  85. - (void)setBlackThreshold:(size_t)value NS_SWIFT_NAME(setBlackThreshold(value:));
  86. //
  87. // void cv::structured_light::GrayCodePattern::getImagesForShadowMasks(Mat& blackImage, Mat& whiteImage)
  88. //
  89. /**
  90. * Generates the all-black and all-white images needed for shadowMasks computation.
  91. *
  92. * To identify shadow regions, the regions of two images where the pixels are not lit by projector's light and thus where there is not coded information,
  93. * the 3DUNDERWORLD algorithm computes a shadow mask for the two cameras views, starting from a white and a black images captured by each camera.
  94. * This method generates these two additional images to project.
  95. *
  96. * @param blackImage The generated all-black CV_8U image, at projector's resolution.
  97. * @param whiteImage The generated all-white CV_8U image, at projector's resolution.
  98. */
  99. - (void)getImagesForShadowMasks:(Mat*)blackImage whiteImage:(Mat*)whiteImage NS_SWIFT_NAME(getImagesForShadowMasks(blackImage:whiteImage:));
  100. //
  101. // bool cv::structured_light::GrayCodePattern::getProjPixel(vector_Mat patternImages, int x, int y, Point& projPix)
  102. //
  103. /**
  104. * For a (x,y) pixel of a camera returns the corresponding projector pixel.
  105. *
  106. * The function decodes each pixel in the pattern images acquired by a camera into their corresponding decimal numbers representing the projector's column and row,
  107. * providing a mapping between camera's and projector's pixel.
  108. *
  109. * @param patternImages The pattern images acquired by the camera, stored in a grayscale vector < Mat >.
  110. * @param x x coordinate of the image pixel.
  111. * @param y y coordinate of the image pixel.
  112. * @param projPix Projector's pixel corresponding to the camera's pixel: projPix.x and projPix.y are the image coordinates of the projector's pixel corresponding to the pixel being decoded in a camera.
  113. */
  114. - (BOOL)getProjPixel:(NSArray<Mat*>*)patternImages x:(int)x y:(int)y projPix:(Point2i*)projPix NS_SWIFT_NAME(getProjPixel(patternImages:x:y:projPix:));
  115. @end
  116. NS_ASSUME_NONNULL_END