VideoPlayer.mm 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495
  1. #include "VideoPlayer.h"
  2. #include "CVTextureCache.h"
  3. #include "CMVideoSampling.h"
  4. #import <AVFoundation/AVFoundation.h>
  5. static void* _ObserveItemStatusContext = (void*)0x1;
  6. static void* _ObservePlayerItemContext = (void*)0x2;
  7. @implementation VideoPlayerView
  8. + (Class)layerClass
  9. {
  10. return [AVPlayerLayer class];
  11. }
  12. - (AVPlayer*)player
  13. {
  14. return [(AVPlayerLayer*)[self layer] player];
  15. }
  16. - (void)setPlayer:(AVPlayer*)player
  17. {
  18. [(AVPlayerLayer*)[self layer] setPlayer: player];
  19. }
  20. - (void)dealloc
  21. {
  22. self.player = nil;
  23. }
  24. @end
  25. @implementation VideoPlayer
  26. {
  27. AVPlayerItem* _playerItem;
  28. AVPlayer* _player;
  29. AVAssetReader* _reader;
  30. AVAssetReaderTrackOutput* _videoOut;
  31. CMSampleBufferRef _cmSampleBuffer;
  32. CMVideoSampling _videoSampling;
  33. CMTime _duration;
  34. CMTime _curTime;
  35. CMTime _curFrameTimestamp;
  36. CMTime _lastFrameTimestamp;
  37. CGSize _videoSize;
  38. BOOL _playerReady;
  39. // we need to have both because the order of asset/item getting ready is not strict
  40. BOOL _assetReady;
  41. BOOL _itemReady;
  42. }
  43. @synthesize delegate;
  44. @synthesize player = _player;
  45. - (BOOL)readyToPlay { return _playerReady; }
  46. - (CGSize)videoSize { return _videoSize; }
  47. - (CMTime)duration { return _duration; }
  48. - (float)durationSeconds { return CMTIME_IS_VALID(_duration) ? (float)CMTimeGetSeconds(_duration) : 0.0f; }
  49. + (BOOL)CanPlayToTexture:(NSURL*)url { return [url isFileURL]; }
  50. + (BOOL)CheckScalingModeAspectFill:(CGSize)videoSize screenSize:(CGSize)screenSize
  51. {
  52. BOOL ret = NO;
  53. CGFloat screenAspect = (screenSize.width / screenSize.height);
  54. CGFloat videoAspect = (videoSize.width / videoSize.height);
  55. CGFloat width = ceilf(videoSize.width * videoAspect / screenAspect);
  56. CGFloat height = ceilf(videoSize.height * videoAspect / screenAspect);
  57. // Do additional input video and device resolution aspect ratio
  58. // rounding check to see if the width and height values are still
  59. // the ~same.
  60. //
  61. // If they still match, we can change the video scaling mode from
  62. // aspectFit to aspectFill, this works around some off-by-one scaling
  63. // errors with certain screen size and video resolution combos
  64. //
  65. // TODO: Shouldn't harm to extend width/height check to
  66. // match values within -1..+1 range from the original
  67. if (videoSize.width == width && videoSize.height == height)
  68. {
  69. ret = YES;
  70. }
  71. return ret;
  72. }
  73. - (void)reportError:(NSError*)error category:(const char*)category
  74. {
  75. ::printf("[%s]Error: %s\n", category, [[error localizedDescription] UTF8String]);
  76. ::printf("%s\n", [[error localizedFailureReason] UTF8String]);
  77. [delegate onPlayerError: error];
  78. }
  79. - (void)reportErrorWithString:(const char*)error category:(const char*)category
  80. {
  81. ::printf("[%s]Error: %s\n", category, error);
  82. [delegate onPlayerError: nil];
  83. }
  84. - (id)init
  85. {
  86. if ((self = [super init]))
  87. {
  88. _duration = _curTime = kCMTimeZero;
  89. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  90. }
  91. return self;
  92. }
  93. - (void)cleanupCVTextureCache
  94. {
  95. if (_cmSampleBuffer)
  96. {
  97. CFRelease(_cmSampleBuffer);
  98. _cmSampleBuffer = 0;
  99. }
  100. CMVideoSampling_Uninitialize(&_videoSampling);
  101. }
  102. - (void)cleanupAssetReader
  103. {
  104. if (_reader)
  105. [_reader cancelReading];
  106. _reader = nil;
  107. _videoOut = nil;
  108. }
  109. - (void)cleanupPlayer
  110. {
  111. if (_player)
  112. {
  113. [[NSNotificationCenter defaultCenter] removeObserver: self name: AVAudioSessionRouteChangeNotification object: nil];
  114. [_player.currentItem removeObserver: self forKeyPath: @"status"];
  115. [_player removeObserver: self forKeyPath: @"currentItem"];
  116. [_player pause];
  117. _player = nil;
  118. }
  119. if (_playerItem)
  120. {
  121. [[NSNotificationCenter defaultCenter] removeObserver: self name: AVPlayerItemDidPlayToEndTimeNotification object: _playerItem];
  122. _playerItem = nil;
  123. }
  124. }
  125. - (void)unloadPlayer
  126. {
  127. [self cleanupCVTextureCache];
  128. [self cleanupAssetReader];
  129. [self cleanupPlayer];
  130. _videoSize = CGSizeMake(0, 0);
  131. _duration = _curTime = kCMTimeZero;
  132. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  133. self->_playerReady = self->_assetReady = self->_itemReady = NO;
  134. }
  135. - (BOOL)loadVideo:(NSURL*)url
  136. {
  137. AVURLAsset* asset = [AVURLAsset URLAssetWithURL: url options: nil];
  138. if (!asset)
  139. return NO;
  140. NSArray* requestedKeys = @[@"tracks", @"playable"];
  141. [asset loadValuesAsynchronouslyForKeys: requestedKeys completionHandler:^{
  142. dispatch_async(dispatch_get_main_queue(), ^{
  143. [self prepareAsset: asset withKeys: requestedKeys];
  144. });
  145. }];
  146. return YES;
  147. }
  148. - (BOOL)_playWithPrepareBlock:(BOOL (^)())preparePlaybackBlock
  149. {
  150. if (!_playerReady)
  151. return NO;
  152. if (preparePlaybackBlock && preparePlaybackBlock() == NO)
  153. return NO;
  154. // do not do seekTo and setRate here, it seems that http streaming may hang sometimes if you do so. go figure
  155. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  156. [_player play];
  157. return YES;
  158. }
  159. - (BOOL)playToView:(VideoPlayerView*)view
  160. {
  161. return [self _playWithPrepareBlock:^() {
  162. view.player = self->_player;
  163. return YES;
  164. }];
  165. }
  166. - (BOOL)playToTexture
  167. {
  168. return [self _playWithPrepareBlock:^() {
  169. return [self prepareReader];
  170. }];
  171. }
  172. - (BOOL)playVideoPlayer
  173. {
  174. return [self _playWithPrepareBlock: nil];
  175. }
  176. - (BOOL)isPlaying { return _playerReady && _player.rate != 0.0f; }
  177. - (void)pause
  178. {
  179. if (_playerReady && _player.rate != 0.0f)
  180. [_player pause];
  181. }
  182. - (void)resume
  183. {
  184. if (_playerReady && _player.rate == 0.0f)
  185. {
  186. [self seekToTimestamp: _player.currentTime];
  187. [_player play];
  188. }
  189. }
  190. - (void)rewind { [self seekToTimestamp: kCMTimeZero]; }
  191. - (void)seekTo:(float)timeSeconds { [self seekToTimestamp: CMTimeMakeWithSeconds(timeSeconds, 1)]; }
  192. - (void)seekToTimestamp:(CMTime)time
  193. {
  194. [_player seekToTime: time toleranceBefore: kCMTimeZero toleranceAfter: kCMTimeZero];
  195. _curFrameTimestamp = _lastFrameTimestamp = time;
  196. }
  197. - (intptr_t)curFrameTexture
  198. {
  199. if (!_reader)
  200. return 0;
  201. intptr_t curTex = CMVideoSampling_LastSampledTexture(&_videoSampling);
  202. CMTime time = [_player currentTime];
  203. // if we have changed audio route and due to current category apple decided to pause playback - resume automatically
  204. if (_AudioRouteWasChanged && _player.rate == 0.0f)
  205. _player.rate = 1.0f;
  206. if (CMTimeCompare(time, _curTime) == 0 || _reader.status != AVAssetReaderStatusReading)
  207. return curTex;
  208. _curTime = time;
  209. while (_reader.status == AVAssetReaderStatusReading && CMTimeCompare(_curFrameTimestamp, _curTime) <= 0)
  210. {
  211. if (_cmSampleBuffer)
  212. CFRelease(_cmSampleBuffer);
  213. // TODO: properly handle ending
  214. _cmSampleBuffer = [_videoOut copyNextSampleBuffer];
  215. if (_cmSampleBuffer == 0)
  216. {
  217. [self cleanupCVTextureCache];
  218. return 0;
  219. }
  220. _curFrameTimestamp = CMSampleBufferGetPresentationTimeStamp(_cmSampleBuffer);
  221. }
  222. if (CMTimeCompare(_lastFrameTimestamp, _curFrameTimestamp) < 0)
  223. {
  224. _lastFrameTimestamp = _curFrameTimestamp;
  225. size_t w, h;
  226. curTex = CMVideoSampling_SampleBuffer(&_videoSampling, _cmSampleBuffer, &w, &h);
  227. _videoSize = CGSizeMake(w, h);
  228. }
  229. return curTex;
  230. }
  231. - (BOOL)setAudioVolume:(float)volume
  232. {
  233. #if !PLATFORM_VISIONOS
  234. if (!_playerReady)
  235. return NO;
  236. NSArray* audio = [_playerItem.asset tracksWithMediaType: AVMediaTypeAudio];
  237. NSMutableArray* params = [NSMutableArray array];
  238. for (AVAssetTrack* track in audio)
  239. {
  240. AVMutableAudioMixInputParameters* inputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
  241. [inputParams setVolume: volume atTime: kCMTimeZero];
  242. [inputParams setTrackID: [track trackID]];
  243. [params addObject: inputParams];
  244. }
  245. AVMutableAudioMix* audioMix = [AVMutableAudioMix audioMix];
  246. [audioMix setInputParameters: params];
  247. [_playerItem setAudioMix: audioMix];
  248. return YES;
  249. #else
  250. return YES;
  251. #endif
  252. }
  253. - (void)playerItemDidReachEnd:(NSNotification*)notification
  254. {
  255. [delegate onPlayerDidFinishPlayingVideo];
  256. }
  257. static bool _AudioRouteWasChanged = false;
  258. - (void)audioRouteChanged:(NSNotification*)notification
  259. {
  260. _AudioRouteWasChanged = true;
  261. }
  262. - (void)observeValueForKeyPath:(NSString*)path ofObject:(id)object change:(NSDictionary*)change context:(void*)context
  263. {
  264. BOOL reportPlayerReady = NO;
  265. if (context == _ObserveItemStatusContext)
  266. {
  267. AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey: NSKeyValueChangeNewKey] integerValue];
  268. switch (status)
  269. {
  270. case AVPlayerStatusUnknown:
  271. break;
  272. case AVPlayerStatusReadyToPlay:
  273. {
  274. #if !PLATFORM_VISIONOS
  275. NSArray* video = [_playerItem.asset tracksWithMediaType: AVMediaTypeVideo];
  276. if ([video count])
  277. _videoSize = [(AVAssetTrack*)[video objectAtIndex: 0] naturalSize];
  278. #endif
  279. _duration = [_playerItem duration];
  280. _assetReady = YES;
  281. reportPlayerReady = _itemReady;
  282. }
  283. break;
  284. case AVPlayerStatusFailed:
  285. {
  286. AVPlayerItem *playerItem = (AVPlayerItem*)object;
  287. [self reportError: playerItem.error category: "prepareAsset"];
  288. }
  289. break;
  290. }
  291. }
  292. else if (context == _ObservePlayerItemContext)
  293. {
  294. if ([change objectForKey: NSKeyValueChangeNewKey] != (id)[NSNull null])
  295. {
  296. _itemReady = YES;
  297. reportPlayerReady = _assetReady;
  298. }
  299. }
  300. else
  301. {
  302. [super observeValueForKeyPath: path ofObject: object change: change context: context];
  303. }
  304. if (reportPlayerReady)
  305. {
  306. _playerReady = YES;
  307. [delegate onPlayerReady];
  308. }
  309. }
  310. - (void)prepareAsset:(AVAsset*)asset withKeys:(NSArray*)requestedKeys
  311. {
  312. // check succesful loading
  313. for (NSString* key in requestedKeys)
  314. {
  315. NSError* error = nil;
  316. AVKeyValueStatus keyStatus = [asset statusOfValueForKey: key error: &error];
  317. if (keyStatus == AVKeyValueStatusFailed)
  318. {
  319. [self reportError: error category: "prepareAsset"];
  320. return;
  321. }
  322. }
  323. if (!asset.playable)
  324. {
  325. [self reportErrorWithString: "Item cannot be played" category: "prepareAsset"];
  326. return;
  327. }
  328. if (_playerItem)
  329. {
  330. [_playerItem removeObserver: self forKeyPath: @"status"];
  331. [[NSNotificationCenter defaultCenter] removeObserver: self name: AVPlayerItemDidPlayToEndTimeNotification object: _playerItem];
  332. _playerItem = nil;
  333. }
  334. _playerItem = [AVPlayerItem playerItemWithAsset: asset];
  335. [_playerItem addObserver: self forKeyPath: @"status"
  336. options: NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
  337. context: _ObserveItemStatusContext
  338. ];
  339. [[NSNotificationCenter defaultCenter] addObserver: self selector: @selector(playerItemDidReachEnd:)
  340. name: AVPlayerItemDidPlayToEndTimeNotification object: _playerItem
  341. ];
  342. if (!_player)
  343. {
  344. _player = [AVPlayer playerWithPlayerItem: _playerItem];
  345. [_player addObserver: self forKeyPath: @"currentItem"
  346. options: NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
  347. context: _ObservePlayerItemContext
  348. ];
  349. #if !PLATFORM_VISIONOS
  350. [_player setAllowsExternalPlayback: NO];
  351. #endif
  352. // we want to subscribe to route change notifications, for that we need audio session active
  353. // and in case FMOD wasnt used up to this point it is still not active
  354. [[AVAudioSession sharedInstance] setActive: YES error: nil];
  355. [[NSNotificationCenter defaultCenter] addObserver: self selector: @selector(audioRouteChanged:)
  356. name: AVAudioSessionRouteChangeNotification object: nil
  357. ];
  358. }
  359. if (_player.currentItem != _playerItem)
  360. [_player replaceCurrentItemWithPlayerItem: _playerItem];
  361. else
  362. [_player seekToTime: kCMTimeZero];
  363. }
  364. - (BOOL)prepareReader
  365. {
  366. if (!_playerReady)
  367. return NO;
  368. [self cleanupAssetReader];
  369. AVURLAsset* asset = (AVURLAsset*)_playerItem.asset;
  370. if (![asset.URL isFileURL])
  371. {
  372. [self reportErrorWithString: "non-file url. no video to texture." category: "prepareReader"];
  373. return NO;
  374. }
  375. NSError* error = nil;
  376. _reader = [AVAssetReader assetReaderWithAsset: _playerItem.asset error: &error];
  377. if (error)
  378. [self reportError: error category: "prepareReader"];
  379. _reader.timeRange = CMTimeRangeMake(kCMTimeZero, _duration);
  380. #if !PLATFORM_VISIONOS
  381. AVAssetTrack* videoTrack = [[_playerItem.asset tracksWithMediaType: AVMediaTypeVideo] objectAtIndex: 0];
  382. NSDictionary* options = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
  383. _videoOut = [[AVAssetReaderTrackOutput alloc] initWithTrack: videoTrack outputSettings: options];
  384. _videoOut.alwaysCopiesSampleData = NO;
  385. if (![_reader canAddOutput: _videoOut])
  386. {
  387. [self reportErrorWithString: "canAddOutput returned false" category: "prepareReader"];
  388. return NO;
  389. }
  390. [_reader addOutput: _videoOut];
  391. if (![_reader startReading])
  392. {
  393. [self reportError: [_reader error] category: "prepareReader"];
  394. return NO;
  395. }
  396. [self cleanupCVTextureCache];
  397. CMVideoSampling_Initialize(&_videoSampling);
  398. return YES;
  399. #else
  400. return NO;
  401. #endif
  402. }
  403. @end