CameraCapture.mm 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741
  1. #if !PLATFORM_TVOS && UNITY_USES_WEBCAM
  2. #include "CameraCapture.h"
  3. #include "AVCapture.h"
  4. #include "CMVideoSampling.h"
  5. #include "CVTextureCache.h"
  6. #import <CoreVideo/CoreVideo.h>
  7. #include <cmath>
  8. static NSMutableArray<CameraCaptureController*> *activeColorAndDepthCameraControllers = nil;
  9. @implementation CameraCaptureController
  10. {
  11. AVCaptureDevice* _captureDevice;
  12. AVCaptureSession* _captureSession;
  13. AVCaptureDeviceInput* _captureInput;
  14. AVCaptureVideoDataOutput* _captureOutput;
  15. AVCaptureDepthDataOutput* _captureDepthOutput;
  16. AVCaptureDataOutputSynchronizer* _captureSynchronizer;
  17. @public bool _isDepth;
  18. uint8_t* _pixelBufferCopy;
  19. CMVideoSampling _cmVideoSampling;
  20. NSString* _preset;
  21. CGPoint _focusPoint;
  22. AVCaptureFocusMode _focusMode;
  23. @public void* _userData;
  24. @public size_t _width, _height;
  25. }
  26. - (bool)initCapture:(AVCaptureDevice*)device
  27. {
  28. if (UnityGetAVCapturePermission(avVideoCapture) == avCapturePermissionDenied)
  29. return false;
  30. self.captureDevice = device;
  31. self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice: device error: nil];
  32. self.captureOutput = [[AVCaptureVideoDataOutput alloc] init];
  33. if (self.captureOutput == nil || self.captureInput == nil)
  34. return false;
  35. self.captureOutput.alwaysDiscardsLateVideoFrames = YES;
  36. NSDictionary* options = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
  37. [self.captureOutput setVideoSettings: options];
  38. CMVideoSampling_Initialize(&self->_cmVideoSampling);
  39. _width = _height = 0;
  40. _focusPoint = CGPointMake(0.5, 0.5); // default focus point is center
  41. _focusMode = AVCaptureFocusModeContinuousAutoFocus;
  42. _pixelBufferCopy = nullptr;
  43. return true;
  44. }
  45. - (void)setCaptureFPS:(float)fps
  46. {
  47. if ([self.captureDevice lockForConfiguration: nil])
  48. {
  49. if (self.captureDevice.activeFormat)
  50. {
  51. fps = [self pickAvailableFrameRate: fps];
  52. self.captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
  53. self.captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, fps);
  54. }
  55. else
  56. {
  57. // In some corner cases (seeing this on iPod iOS 6.1.5) activeFormat is null.
  58. #pragma clang diagnostic push
  59. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  60. self.captureOutput.minFrameDuration = CMTimeMake(1, fps);
  61. #pragma clang diagnostic pop
  62. }
  63. [self.captureDevice unlockForConfiguration];
  64. }
  65. }
  66. - (bool)initCapture:(AVCaptureDevice*)device preset:(NSString*)preset fps:(float)fps
  67. {
  68. if (![self initCapture: device])
  69. return false;
  70. self.captureSession = [[AVCaptureSession alloc] init];
  71. [self.captureSession addInput: self.captureInput];
  72. [self.captureSession addOutput: self.captureOutput];
  73. // queue on main thread to simplify gles life
  74. [self.captureOutput setSampleBufferDelegate: self queue: dispatch_get_main_queue()];
  75. self->_preset = preset;
  76. [self.captureSession setSessionPreset: preset];
  77. [self setCaptureFPS: fps];
  78. return true;
  79. }
  80. - (void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
  81. {
  82. intptr_t tex = (intptr_t)CMVideoSampling_SampleBuffer(&self->_cmVideoSampling, sampleBuffer, &_width, &_height);
  83. UnityDidCaptureVideoFrame(tex, self->_userData);
  84. }
  85. - (void)capturePixelBufferToMemBuffer:(uint8_t*)dst
  86. {
  87. CVPixelBufferRef pbuf = (CVPixelBufferRef)self->_cmVideoSampling.cvImageBuffer;
  88. const size_t srcRowSize = CVPixelBufferGetBytesPerRow(pbuf);
  89. const size_t bufSize = srcRowSize * self->_height;
  90. if (self->_pixelBufferCopy == nullptr)
  91. {
  92. self->_pixelBufferCopy = (uint8_t*)::malloc(bufSize);
  93. }
  94. // while not the best way memory-wise, we want to minimize stalling
  95. CVPixelBufferLockBaseAddress(pbuf, kCVPixelBufferLock_ReadOnly);
  96. {
  97. ::memcpy(self->_pixelBufferCopy, CVPixelBufferGetBaseAddress(pbuf), bufSize);
  98. }
  99. CVPixelBufferUnlockBaseAddress(pbuf, kCVPixelBufferLock_ReadOnly);
  100. OSType pixelFormat = CVPixelBufferGetPixelFormatType(pbuf);
  101. size_t bpp = 0;
  102. switch (pixelFormat)
  103. {
  104. case kCVPixelFormatType_32BGRA:
  105. bpp = 4;
  106. break;
  107. case kCVPixelFormatType_DepthFloat16:
  108. bpp = 2;
  109. break;
  110. default:
  111. assert(false);
  112. break;
  113. }
  114. const size_t dstRowSize = self->_width * bpp;
  115. uint8_t* src = self->_pixelBufferCopy + (self->_height - 1) * srcRowSize;
  116. for (size_t i = 0; i < self->_height; ++i)
  117. {
  118. ::memcpy(dst, src, dstRowSize);
  119. dst += dstRowSize;
  120. src -= srcRowSize;
  121. }
  122. }
  123. - (int)isCVTextureFlipped
  124. {
  125. return IsCVTextureFlipped(self->_cmVideoSampling.cvTextureCacheTexture);
  126. }
  127. + (BOOL)focusPointSupported:(AVCaptureDevice*)captureDevice withFocusMode:(AVCaptureFocusMode)focusMode
  128. {
  129. return captureDevice.focusPointOfInterestSupported && [captureDevice isFocusModeSupported: focusMode];
  130. }
  131. - (int)setFocusPointWithX:(float)x Y:(float)y
  132. {
  133. if (x < 0 || x > 1 || y < 0 || y > 1)
  134. {
  135. _focusPoint = CGPointMake(0.5, 0.5); // default value for iOS
  136. _focusMode = AVCaptureFocusModeContinuousAutoFocus;
  137. }
  138. else
  139. {
  140. _focusPoint = CGPointMake(x, 1.0 - y);
  141. _focusMode = AVCaptureFocusModeAutoFocus;
  142. }
  143. return [self setFocusPoint];
  144. }
  145. - (int)setFocusPoint
  146. {
  147. if (self.captureDevice != nil && [CameraCaptureController focusPointSupported: self.captureDevice withFocusMode: _focusMode])
  148. {
  149. if ([self.captureDevice lockForConfiguration: nil])
  150. {
  151. self.captureDevice.focusPointOfInterest = _focusPoint;
  152. self.captureDevice.focusMode = _focusMode;
  153. [self.captureDevice unlockForConfiguration];
  154. return 1;
  155. }
  156. }
  157. return 0;
  158. }
  159. + (NSMutableArray<CameraCaptureController*>*)getActiveColorAndDepthCameraControllers
  160. {
  161. if (activeColorAndDepthCameraControllers == nil)
  162. {
  163. activeColorAndDepthCameraControllers = [[NSMutableArray alloc] init];
  164. }
  165. return activeColorAndDepthCameraControllers;
  166. }
  167. + (void)addColorAndDepthCameraController:(CameraCaptureController*)controller
  168. {
  169. CameraCaptureController* prevController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: controller->_isDepth];
  170. if (prevController != nil)
  171. [prevController pause];
  172. CameraCaptureController* otherController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: !controller->_isDepth];
  173. if (otherController != nil)
  174. {
  175. [otherController.captureSession stopRunning];
  176. [otherController clearColorAndDepthCameraCaptureSession];
  177. }
  178. [[self getActiveColorAndDepthCameraControllers] addObject: controller];
  179. }
  180. + (void)removeColorAndDepthCameraController:(CameraCaptureController*)controller
  181. {
  182. [[self getActiveColorAndDepthCameraControllers] removeObject: controller];
  183. CameraCaptureController* otherController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: !controller->_isDepth];
  184. if (otherController != nil)
  185. {
  186. [otherController initColorAndDepthCameraCaptureSession];
  187. [otherController.captureSession startRunning];
  188. }
  189. }
  190. + (void)clearColorAndDepthCameraControllers
  191. {
  192. NSMutableArray<CameraCaptureController*>* activeColorAndDepthCameraControllers = [self getActiveColorAndDepthCameraControllers];
  193. for (CameraCaptureController *controller in activeColorAndDepthCameraControllers)
  194. {
  195. if (controller.captureSession != nil)
  196. {
  197. [controller.captureSession stopRunning];
  198. [controller clearColorAndDepthCameraCaptureSession];
  199. }
  200. }
  201. [activeColorAndDepthCameraControllers removeAllObjects];
  202. }
  203. + (CameraCaptureController*)findColorAndDepthCameraController:(AVCaptureDevice*)device isDepth:(bool)isDepth
  204. {
  205. for (CameraCaptureController *controller in [self getActiveColorAndDepthCameraControllers])
  206. {
  207. if (controller.captureDevice == device && controller->_isDepth == isDepth)
  208. return controller;
  209. }
  210. return nil;
  211. }
  212. - (bool)initColorAndDepthCameraCapture:(AVCaptureDevice*)device preset:(NSString*)preset fps:(float)fps isDepth:(bool)isDepth
  213. {
  214. if (![self initCapture: device])
  215. return false;
  216. self.captureDepthOutput = [[AVCaptureDepthDataOutput alloc] init];
  217. if (self.captureDepthOutput == nil)
  218. return false;
  219. self.captureDepthOutput.filteringEnabled = YES; // getting filtered depth data to avoid invalid values
  220. self.captureDepthOutput.alwaysDiscardsLateDepthData = YES;
  221. self->_preset = preset;
  222. [self initColorAndDepthCameraCaptureSession];
  223. [self setCaptureFPS: fps];
  224. NSArray<AVCaptureOutput*> *outputs = [NSArray arrayWithObjects: self.captureOutput, self.captureDepthOutput, nil];
  225. self.captureSynchronizer = [[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs: outputs];
  226. // queue on main thread to simplify gles life
  227. [self.captureSynchronizer setDelegate: self queue: dispatch_get_main_queue()];
  228. _isDepth = isDepth;
  229. return true;
  230. }
  231. - (void)initColorAndDepthCameraCaptureSession
  232. {
  233. self.captureSession = [[AVCaptureSession alloc] init];
  234. [self.captureSession setSessionPreset: self->_preset];
  235. [self.captureSession addInput: self.captureInput];
  236. [self.captureSession addOutput: self.captureOutput];
  237. [self.captureSession addOutput: self.captureDepthOutput];
  238. }
  239. - (void)clearColorAndDepthCameraCaptureSession
  240. {
  241. [self.captureSession removeInput: self.captureInput];
  242. [self.captureSession removeOutput: self.captureOutput];
  243. [self.captureSession removeOutput: self.captureDepthOutput];
  244. self.captureSession = nil;
  245. }
  246. - (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection
  247. {
  248. AVCaptureSynchronizedSampleBufferData *sampleData = (AVCaptureSynchronizedSampleBufferData*)[synchronizedDataCollection synchronizedDataForCaptureOutput: self.captureOutput];
  249. if (CMSampleBufferGetImageBuffer(sampleData.sampleBuffer) != nil)
  250. {
  251. CameraCaptureController* colorController = !self->_isDepth ? self : [CameraCaptureController findColorAndDepthCameraController: self.captureDevice isDepth: false];
  252. if (colorController != nil)
  253. {
  254. intptr_t tex = (intptr_t)CMVideoSampling_SampleBuffer(&colorController->_cmVideoSampling, sampleData.sampleBuffer, &(colorController->_width), &(colorController->_height));
  255. UnityDidCaptureVideoFrame(tex, colorController->_userData);
  256. }
  257. }
  258. AVCaptureSynchronizedDepthData *depthData = (AVCaptureSynchronizedDepthData*)[synchronizedDataCollection synchronizedDataForCaptureOutput: self.captureDepthOutput];
  259. if (depthData.depthData.depthDataMap != nil)
  260. {
  261. CameraCaptureController* depthController = self->_isDepth ? self : [CameraCaptureController findColorAndDepthCameraController: self.captureDevice isDepth: true];
  262. if (depthController != nil)
  263. {
  264. intptr_t tex = (intptr_t)CMVideoSampling_ImageBuffer(&depthController->_cmVideoSampling, [depthData.depthData depthDataByConvertingToDepthDataType: kCVPixelFormatType_DepthFloat16].depthDataMap, &(depthController->_width), &(depthController->_height));
  265. UnityDidCaptureVideoFrame(tex, depthController->_userData);
  266. }
  267. }
  268. }
  269. - (void)start
  270. {
  271. if (self.captureDepthOutput != nil)
  272. {
  273. [CameraCaptureController addColorAndDepthCameraController: self];
  274. }
  275. else
  276. {
  277. [CameraCaptureController clearColorAndDepthCameraControllers];
  278. }
  279. [self.captureSession startRunning];
  280. }
  281. - (void)pause
  282. {
  283. [self.captureSession stopRunning];
  284. if (self.captureDepthOutput != nil)
  285. {
  286. [CameraCaptureController removeColorAndDepthCameraController: self];
  287. }
  288. }
  289. - (void)stop
  290. {
  291. [self.captureSession stopRunning];
  292. [self.captureSession removeInput: self.captureInput];
  293. [self.captureSession removeOutput: self.captureOutput];
  294. self.captureInput = nil;
  295. self.captureOutput = nil;
  296. if (self.captureDepthOutput != nil)
  297. {
  298. self.captureSynchronizer = nil;
  299. [self.captureSession removeOutput: self.captureDepthOutput];
  300. self.captureDepthOutput = nil;
  301. [CameraCaptureController removeColorAndDepthCameraController: self];
  302. }
  303. self.captureDevice = nil;
  304. self.captureSession = nil;
  305. if (self->_pixelBufferCopy != nullptr)
  306. {
  307. ::free(self->_pixelBufferCopy);
  308. self->_pixelBufferCopy = nullptr;
  309. }
  310. CMVideoSampling_Uninitialize(&self->_cmVideoSampling);
  311. }
  312. - (float)pickAvailableFrameRate:(float)fps
  313. {
  314. AVFrameRateRange* bestRange = nil;
  315. float minDiff = INFINITY;
  316. float epsilon = 0.1;
  317. fps = fps > epsilon ? fps : 24;
  318. for (AVFrameRateRange* rate in self.captureDevice.activeFormat.videoSupportedFrameRateRanges)
  319. {
  320. if (fps + epsilon > rate.minFrameRate && fps - epsilon < rate.maxFrameRate)
  321. return fps;
  322. else
  323. {
  324. float diff = ::fmin(::fabs(fps - rate.minFrameRate), ::fabs(fps - rate.maxFrameRate));
  325. if (diff < minDiff)
  326. {
  327. minDiff = diff;
  328. bestRange = rate;
  329. }
  330. }
  331. }
  332. return fps > bestRange.maxFrameRate ? bestRange.maxFrameRate : bestRange.minFrameRate;
  333. }
  334. @synthesize captureDevice = _captureDevice;
  335. @synthesize captureSession = _captureSession;
  336. @synthesize captureOutput = _captureOutput;
  337. @synthesize captureInput = _captureInput;
  338. @synthesize captureDepthOutput = _captureDepthOutput;
  339. @synthesize captureSynchronizer = _captureSynchronizer;
  340. @end
  341. // Preset for getting depth data with max resolution available
  342. static NSString* const depthCaptureSessionPreset = AVCaptureSessionPresetPhoto;
  343. static NSMutableArray<CameraCaptureDevice*> *videoCaptureDevices = nil;
  344. @implementation CameraCaptureDevice
  345. {
  346. @public AVCaptureDevice* _device;
  347. @public int _frontFacing;
  348. @public int _autoFocusPointSupported;
  349. @public WebCamKind _kind;
  350. @public NSMutableArray<NSValue*>* _resolutions;
  351. NSMutableArray<NSString*>* _resPresets;
  352. }
  353. - (bool)isColorAndDepthCaptureDevice
  354. {
  355. for (AVCaptureDeviceFormat *format in [self->_device formats])
  356. {
  357. if ([format supportedDepthDataFormats].count > 0)
  358. return true;
  359. }
  360. return false;
  361. }
  362. - (WebCamKind)getKind
  363. {
  364. if ([self->_device.localizedName containsString: @"Telephoto"])
  365. return kWebCamTelephoto;
  366. if ([self->_device.localizedName containsString: @"Ultra Wide"])
  367. return kWebCamUltraWideAngle;
  368. if ([self->_device.localizedName containsString: @"Dual"] && [self isColorAndDepthCaptureDevice])
  369. return kWebCamColorAndDepth;
  370. if ([self->_device.localizedName containsString: @"TrueDepth"] && [self isColorAndDepthCaptureDevice])
  371. return kWebCamColorAndDepth;
  372. return kWebCamWideAngle;
  373. }
  374. - (void)fillCaptureDeviceResolutions
  375. {
  376. static NSString* preset[] =
  377. {
  378. AVCaptureSessionPresetLow, // usually 192x144
  379. AVCaptureSessionPreset352x288,
  380. AVCaptureSessionPresetMedium, // usually 480x320
  381. AVCaptureSessionPreset640x480,
  382. AVCaptureSessionPreset1280x720,
  383. AVCaptureSessionPreset1920x1080, // usually the same as AVCaptureSessionPresetHigh
  384. AVCaptureSessionPreset3840x2160,
  385. };
  386. const int count = sizeof(preset) / sizeof(preset[0]);
  387. self->_resolutions = [NSMutableArray arrayWithCapacity: count];
  388. self->_resPresets = [NSMutableArray arrayWithCapacity: count];
  389. AVCaptureInput* captureInput = [AVCaptureDeviceInput deviceInputWithDevice: self->_device error: nil];
  390. //Don't attempt to setup an AVCaptureSession if the user has explicitly denied permission to use the camera.
  391. if (captureInput != nil)
  392. {
  393. AVCaptureSession* captureSession = [[AVCaptureSession alloc] init];
  394. [captureSession addInput: captureInput];
  395. if (self->_kind == kWebCamColorAndDepth)
  396. {
  397. AVCaptureDepthDataOutput* captureDepthOutput = [[AVCaptureDepthDataOutput alloc] init];
  398. if ([captureSession canSetSessionPreset: depthCaptureSessionPreset])
  399. {
  400. [captureSession setSessionPreset: AVCaptureSessionPresetPhoto];
  401. [captureSession addOutput: captureDepthOutput];
  402. CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(self->_device.activeDepthDataFormat.formatDescription); // for ColorAndDepth camera return depth buffer resolution
  403. [self->_resolutions addObject: [NSValue valueWithCGSize: CGSizeMake(dim.width, dim.height)]];
  404. [self->_resPresets addObject: AVCaptureSessionPresetPhoto];
  405. }
  406. }
  407. else
  408. {
  409. for (int i = 0; i < count; ++i)
  410. {
  411. if ([captureSession canSetSessionPreset: preset[i]])
  412. {
  413. [captureSession setSessionPreset: preset[i]];
  414. CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(self->_device.activeFormat.formatDescription);
  415. [self->_resolutions addObject: [NSValue valueWithCGSize: CGSizeMake(dim.width, dim.height)]];
  416. [self->_resPresets addObject: preset[i]];
  417. }
  418. }
  419. }
  420. }
  421. }
  422. - (NSString*)pickPresetFromWidth:(int)w height:(int)h
  423. {
  424. if (self->_kind == kWebCamColorAndDepth)
  425. {
  426. return depthCaptureSessionPreset;
  427. }
  428. int requestedWidth = w > 0 ? w : 640;
  429. int requestedHeight = h > 0 ? h : 480;
  430. if (requestedHeight > requestedWidth) // hardware camera frame is landscape oriented
  431. std::swap(requestedWidth, requestedHeight);
  432. NSInteger ret = -1;
  433. double bestMatch = std::numeric_limits<double>::max();
  434. for (NSInteger i = 0, n = [_resolutions count]; i < n; ++i)
  435. {
  436. double width = [self->_resolutions[i] CGSizeValue].width;
  437. double height = [self->_resolutions[i] CGSizeValue].height;
  438. double match = std::abs(std::log(requestedWidth / width)) + std::abs(std::log(requestedHeight / height));
  439. if (match < bestMatch)
  440. {
  441. ret = i;
  442. bestMatch = match;
  443. }
  444. }
  445. NSAssert(ret != -1, @"Cannot pick capture preset");
  446. return ret != -1 ? self->_resPresets[ret] : AVCaptureSessionPresetHigh;
  447. }
  448. - (CameraCaptureDevice*)initWithDevice:(AVCaptureDevice*)device
  449. {
  450. self->_device = device;
  451. self->_frontFacing = device.position == AVCaptureDevicePositionFront ? 1 : 0;
  452. self->_autoFocusPointSupported = [CameraCaptureController focusPointSupported: device withFocusMode: AVCaptureFocusModeAutoFocus] ? 1 : 0;
  453. self->_kind = [self getKind];
  454. [self fillCaptureDeviceResolutions];
  455. return self;
  456. }
  457. - (bool)initCaptureForController:(CameraCaptureController*)controller width:(int)w height:(int)h fps:(float)fps isDepth:(bool)isDepth
  458. {
  459. bool initResult = false;
  460. NSString *preset = [self pickPresetFromWidth: w height: h];
  461. if ([self isColorAndDepthCaptureDevice])
  462. {
  463. initResult = [controller initColorAndDepthCameraCapture: self->_device preset: preset fps: fps isDepth: isDepth];
  464. }
  465. else
  466. {
  467. assert(!isDepth);
  468. initResult = [controller initCapture: self->_device preset: preset fps: fps];
  469. }
  470. return initResult;
  471. }
  472. + (bool)initialized
  473. {
  474. return videoCaptureDevices != nil;
  475. }
  476. + (void)createCameraCaptureDevicesArray
  477. {
  478. videoCaptureDevices = [NSMutableArray arrayWithCapacity: 2];
  479. }
  480. + (void)addCameraCaptureDevice:(AVCaptureDevice*)device
  481. {
  482. [videoCaptureDevices addObject: [[CameraCaptureDevice alloc] initWithDevice: device]];
  483. }
  484. @end
  485. extern "C" void UnityEnumVideoCaptureDevices(void* udata, void(*callback)(void* udata, const char* name, int frontFacing, int autoFocusPointSupported, int kind, const int* resolutions, int resCount))
  486. {
  487. if (![CameraCaptureDevice initialized])
  488. {
  489. [CameraCaptureDevice createCameraCaptureDevicesArray];
  490. NSMutableArray<AVCaptureDeviceType>* captureDevices = [NSMutableArray arrayWithObjects: AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera, nil];
  491. [captureDevices addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  492. [captureDevices addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  493. if (UnityiOS130orNewer())
  494. {
  495. [captureDevices addObject: AVCaptureDeviceTypeBuiltInUltraWideCamera];
  496. [captureDevices addObject: AVCaptureDeviceTypeBuiltInDualWideCamera];
  497. [captureDevices addObject: AVCaptureDeviceTypeBuiltInTripleCamera];
  498. }
  499. AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: captureDevices mediaType: AVMediaTypeVideo position: AVCaptureDevicePositionUnspecified];
  500. for (AVCaptureDevice* device in [captureDeviceDiscoverySession devices])
  501. {
  502. [CameraCaptureDevice addCameraCaptureDevice: device];
  503. }
  504. }
  505. // we should not provide camera devices information while access has not been granted
  506. // but we need to try to enumerate camera devices anyway to trigger permission request dialog
  507. if ([AVCaptureDevice authorizationStatusForMediaType: AVMediaTypeVideo] != AVAuthorizationStatusAuthorized)
  508. return;
  509. for (CameraCaptureDevice *cameraCaptureDevice in videoCaptureDevices)
  510. {
  511. int resCount = (int)[cameraCaptureDevice->_resolutions count];
  512. int *resolutions = new int[resCount * 2];
  513. for (int i = 0; i < resCount; ++i)
  514. {
  515. resolutions[i * 2] = (int)[cameraCaptureDevice->_resolutions[i] CGSizeValue].width;
  516. resolutions[i * 2 + 1] = (int)[cameraCaptureDevice->_resolutions[i] CGSizeValue].height;
  517. }
  518. callback(udata, [cameraCaptureDevice->_device.localizedName UTF8String], cameraCaptureDevice->_frontFacing, cameraCaptureDevice->_autoFocusPointSupported, cameraCaptureDevice->_kind, resolutions, resCount);
  519. delete[] resolutions;
  520. }
  521. }
  522. extern "C" void* UnityInitCameraCapture(int deviceIndex, int w, int h, int fps, int isDepth, void* udata)
  523. {
  524. if (videoCaptureDevices != nil && deviceIndex < videoCaptureDevices.count)
  525. {
  526. CameraCaptureController* controller = [CameraCaptureController alloc];
  527. bool initResult = [videoCaptureDevices[deviceIndex] initCaptureForController: controller width: w height: h fps: (float)fps isDepth: (isDepth != 0)];
  528. if (initResult)
  529. {
  530. controller->_userData = udata;
  531. return (__bridge_retained void*)controller;
  532. }
  533. controller = nil;
  534. }
  535. return 0;
  536. }
  537. extern "C" void UnityStartCameraCapture(void* capture)
  538. {
  539. [(__bridge CameraCaptureController*)capture start];
  540. }
  541. extern "C" void UnityPauseCameraCapture(void* capture)
  542. {
  543. [(__bridge CameraCaptureController*)capture pause];
  544. }
  545. extern "C" void UnityStopCameraCapture(void* capture)
  546. {
  547. CameraCaptureController* controller = (__bridge_transfer CameraCaptureController*)capture;
  548. [controller stop];
  549. controller = nil;
  550. }
  551. extern "C" void UnityCameraCaptureExtents(void* capture, int* w, int* h)
  552. {
  553. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  554. if (controller == nil)
  555. return;
  556. *w = (int)controller->_width;
  557. *h = (int)controller->_height;
  558. }
  559. extern "C" void UnityCameraCaptureReadToMemory(void* capture, void* dst_, int w, int h)
  560. {
  561. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  562. if (controller == nil)
  563. return;
  564. assert(w == controller->_width && h == controller->_height);
  565. [controller capturePixelBufferToMemBuffer: (uint8_t*)dst_];
  566. }
  567. extern "C" int UnityCameraCaptureVideoRotationDeg(void* capture)
  568. {
  569. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  570. if (controller == nil)
  571. return 0;
  572. // all cams are landscape.
  573. switch (UnityCurrentOrientation())
  574. {
  575. case portrait: return 90;
  576. case portraitUpsideDown: return 270;
  577. case landscapeLeft: return controller.captureDevice.position == AVCaptureDevicePositionFront ? 180 : 0;
  578. case landscapeRight: return controller.captureDevice.position == AVCaptureDevicePositionFront ? 0 : 180;
  579. default: assert(false && "bad orientation returned from UnityCurrentOrientation()"); break;
  580. }
  581. return 0;
  582. }
  583. extern "C" int UnityCameraCaptureVerticallyMirrored(void* capture)
  584. {
  585. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  586. if (controller == nil)
  587. return 0;
  588. return [controller isCVTextureFlipped];
  589. }
  590. extern "C" int UnityCameraCaptureSetAutoFocusPoint(void* capture, float x, float y)
  591. {
  592. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  593. if (controller == nil)
  594. return 0;
  595. return [controller setFocusPointWithX: x Y: y];
  596. }
  597. #else
  598. // STUBBED OUT UNTIL DEVELOPER FINDs AN AWESOME CAMERA SOLUTION FOR APPLE TV //
  599. extern "C" void UnityEnumVideoCaptureDevices(void* udata, void(*callback)(void* udata, const char* name, int frontFacing, int autoFocusPointSupported, int kind, const int* resolutions, int resCount))
  600. {
  601. }
  602. extern "C" void* UnityInitCameraCapture(int deviceIndex, int w, int h, int fps, int isDepth, void* udata)
  603. {
  604. return 0;
  605. }
  606. extern "C" void UnityStartCameraCapture(void* capture)
  607. {
  608. }
  609. extern "C" void UnityPauseCameraCapture(void* capture)
  610. {
  611. }
  612. extern "C" void UnityStopCameraCapture(void* capture)
  613. {
  614. }
  615. extern "C" void UnityCameraCaptureExtents(void* capture, int* w, int* h)
  616. {
  617. }
  618. extern "C" void UnityCameraCaptureReadToMemory(void* capture, void* dst_, int w, int h)
  619. {
  620. }
  621. extern "C" int UnityCameraCaptureVideoRotationDeg(void* capture)
  622. {
  623. return 0;
  624. }
  625. extern "C" int UnityCameraCaptureVerticallyMirrored(void* capture)
  626. {
  627. return 0;
  628. }
  629. extern "C" int UnityCameraCaptureSetAutoFocusPoint(void* capture, float x, float y)
  630. {
  631. return 0;
  632. }
  633. #endif