From 75f615b22397a5ef7810b7278e5a518d5924c1f6 Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Tue, 1 Mar 2016 10:25:45 +0000 Subject: [PATCH 01/13] Rearranged the method order to put the construction in one place and moved construction of the playerNode to -constructPlayerNode for DRY --- AsyncDisplayKit/ASVideoNode.mm | 198 ++++++++++++++++----------------- 1 file changed, 99 insertions(+), 99 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index 5e7d93a197..5fbe5e13fb 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -14,14 +14,14 @@ ASDN::RecursiveMutex _videoLock; __weak id _delegate; - + BOOL _shouldBePlaying; BOOL _shouldAutorepeat; BOOL _shouldAutoplay; BOOL _muted; - + AVAsset *_asset; AVPlayerItem *_currentItem; @@ -41,12 +41,15 @@ @implementation ASVideoNode + +#pragma mark - Construction and Layout + - (instancetype)init { if (!(self = [super init])) { return nil; } - + _previewQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0); self.playButton = [[ASDefaultPlayButton alloc] init]; @@ -54,10 +57,89 @@ self.gravity = AVLayerVideoGravityResizeAspect; [self addTarget:self action:@selector(tapped) forControlEvents:ASControlNodeEventTouchUpInside]; - + return self; } +- (instancetype)initWithViewBlock:(ASDisplayNodeViewBlock)viewBlock didLoadBlock:(ASDisplayNodeDidLoadBlock)didLoadBlock +{ + ASDisplayNodeAssertNotSupported(); + return nil; +} + +- (ASDisplayNode*) constructPlayerNode { + ASDisplayNode* playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{ + AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init]; + if (!_player) { + _player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc] initWithAsset:_asset]]; + _player.muted = _muted; + } + playerLayer.player = _player; + playerLayer.videoGravity = [self gravity]; + return playerLayer; + }]; + + return playerNode; +} + +- (void)didLoad +{ + [super didLoad]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didPlayToEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; + + if (_shouldBePlaying) { + _playerNode = [self constructPlayerNode]; + [self insertSubnode:_playerNode atIndex:0]; + } else { + dispatch_async(_previewQueue, ^{ + AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:_asset]; + imageGenerator.appliesPreferredTrackTransform = YES; + [imageGenerator generateCGImagesAsynchronouslyForTimes:@[[NSValue valueWithCMTime:CMTimeMake(0, 1)]] completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) { + UIImage *theImage = [UIImage imageWithCGImage:image]; + + _placeholderImageNode = [[ASImageNode alloc] init]; + _placeholderImageNode.layerBacked = YES; + _placeholderImageNode.image = theImage; + + if ([_gravity isEqualToString:AVLayerVideoGravityResize]) { + _placeholderImageNode.contentMode = UIViewContentModeRedraw; + } + if ([_gravity isEqualToString:AVLayerVideoGravityResizeAspect]) { + _placeholderImageNode.contentMode = UIViewContentModeScaleAspectFit; + } + if ([_gravity isEqual:AVLayerVideoGravityResizeAspectFill]) { + _placeholderImageNode.contentMode = UIViewContentModeScaleAspectFill; + } + + dispatch_async(dispatch_get_main_queue(), ^{ + _placeholderImageNode.frame = self.bounds; + [self insertSubnode:_placeholderImageNode atIndex:0]; + }); + }]; + }); + } +} + +- (void)layout +{ + [super layout]; + + CGRect bounds = self.bounds; + + _placeholderImageNode.frame = bounds; + _playerNode.frame = bounds; + _playerNode.layer.frame = bounds; + + _playButton.frame = bounds; + + CGFloat horizontalDiff = (bounds.size.width - _playButton.bounds.size.width)/2; + CGFloat verticalDiff = (bounds.size.height - _playButton.bounds.size.height)/2; + _playButton.hitTestSlop = UIEdgeInsetsMake(-verticalDiff, -horizontalDiff, -verticalDiff, -horizontalDiff); + + _spinner.bounds = CGRectMake(0, 0, 44, 44); + _spinner.position = CGPointMake(bounds.size.width/2, bounds.size.height/2); +} + - (void)interfaceStateDidChange:(ASInterfaceState)newState fromState:(ASInterfaceState)oldState { if (!(newState & ASInterfaceStateVisible)) { @@ -106,74 +188,6 @@ } } -- (void)layout -{ - [super layout]; - - CGRect bounds = self.bounds; - - _placeholderImageNode.frame = bounds; - _playerNode.frame = bounds; - _playerNode.layer.frame = bounds; - - _playButton.frame = bounds; - - CGFloat horizontalDiff = (bounds.size.width - _playButton.bounds.size.width)/2; - CGFloat verticalDiff = (bounds.size.height - _playButton.bounds.size.height)/2; - _playButton.hitTestSlop = UIEdgeInsetsMake(-verticalDiff, -horizontalDiff, -verticalDiff, -horizontalDiff); - - _spinner.bounds = CGRectMake(0, 0, 44, 44); - _spinner.position = CGPointMake(bounds.size.width/2, bounds.size.height/2); -} - -- (void)didLoad -{ - [super didLoad]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didPlayToEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; - - if (_shouldBePlaying) { - _playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{ - AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init]; - if (!_player) { - _player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc] initWithAsset:_asset]]; - _player.muted = _muted; - } - playerLayer.player = _player; - playerLayer.videoGravity = [self gravity]; - return playerLayer; - }]; - - [self insertSubnode:_playerNode atIndex:0]; - } else { - dispatch_async(_previewQueue, ^{ - AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:_asset]; - imageGenerator.appliesPreferredTrackTransform = YES; - [imageGenerator generateCGImagesAsynchronouslyForTimes:@[[NSValue valueWithCMTime:CMTimeMake(0, 1)]] completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) { - UIImage *theImage = [UIImage imageWithCGImage:image]; - - _placeholderImageNode = [[ASImageNode alloc] init]; - _placeholderImageNode.layerBacked = YES; - _placeholderImageNode.image = theImage; - - if ([_gravity isEqualToString:AVLayerVideoGravityResize]) { - _placeholderImageNode.contentMode = UIViewContentModeRedraw; - } - if ([_gravity isEqualToString:AVLayerVideoGravityResizeAspect]) { - _placeholderImageNode.contentMode = UIViewContentModeScaleAspectFit; - } - if ([_gravity isEqual:AVLayerVideoGravityResizeAspectFill]) { - _placeholderImageNode.contentMode = UIViewContentModeScaleAspectFill; - } - - dispatch_async(dispatch_get_main_queue(), ^{ - _placeholderImageNode.frame = self.bounds; - [self insertSubnode:_placeholderImageNode atIndex:0]; - }); - }]; - }); - } -} - - (void)tapped { if (self.delegate && [self.delegate respondsToSelector:@selector(videoNodeWasTapped:)]) { @@ -187,28 +201,22 @@ } } -- (instancetype)initWithViewBlock:(ASDisplayNodeViewBlock)viewBlock didLoadBlock:(ASDisplayNodeDidLoadBlock)didLoadBlock -{ - ASDisplayNodeAssertNotSupported(); - return nil; -} - - (void)fetchData { [super fetchData]; - + @try { [_currentItem removeObserver:self forKeyPath:NSStringFromSelector(@selector(status))]; } @catch (NSException * __unused exception) { NSLog(@"unnecessary removal in fetch data"); } - + { ASDN::MutexLocker l(_videoLock); _currentItem = [[AVPlayerItem alloc] initWithAsset:_asset]; [_currentItem addObserver:self forKeyPath:NSStringFromSelector(@selector(status)) options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew context:NULL]; - + if (_player) { [_player replaceCurrentItemWithPlayerItem:_currentItem]; } else { @@ -246,13 +254,14 @@ } ((AVPlayerLayer *)_playerNode.layer).player = _player; } - + if (_shouldBePlaying) { [self play]; } } } + #pragma mark - Video Properties - (void)setPlayButton:(ASButtonNode *)playButton @@ -278,11 +287,11 @@ ASDN::MutexLocker l(_videoLock); if (ASObjectIsEqual(asset, _asset) - || ([asset isKindOfClass:[AVURLAsset class]] - && [_asset isKindOfClass:[AVURLAsset class]] - && ASObjectIsEqual(((AVURLAsset *)asset).URL, ((AVURLAsset *)_asset).URL))) { - return; - } + || ([asset isKindOfClass:[AVURLAsset class]] + && [_asset isKindOfClass:[AVURLAsset class]] + && ASObjectIsEqual(((AVURLAsset *)asset).URL, ((AVURLAsset *)_asset).URL))) { + return; + } _asset = asset; @@ -323,14 +332,14 @@ - (BOOL)muted { ASDN::MutexLocker l(_videoLock); - + return _muted; } - (void)setMuted:(BOOL)muted { ASDN::MutexLocker l(_videoLock); - + _muted = muted; } @@ -350,16 +359,7 @@ } if (!_playerNode) { - _playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{ - AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init]; - if (!_player) { - _player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc] initWithAsset:_asset]]; - _player.muted = _muted; - } - playerLayer.player = _player; - playerLayer.videoGravity = [self gravity]; - return playerLayer; - }]; + _playerNode = [self constructPlayerNode]; if ([self.subnodes containsObject:_playButton]) { [self insertSubnode:_playerNode belowSubnode:_playButton]; From f9c8c043a16e61805e0fd480486d01738385792f Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Tue, 1 Mar 2016 12:08:09 +0000 Subject: [PATCH 02/13] First working version for HLS streams --- AsyncDisplayKit/ASVideoNode.h | 6 ++- AsyncDisplayKit/ASVideoNode.mm | 52 ++++++++++++++++--- examples/VideoTableView/Sample/NicCageNode.mm | 28 ++++++++-- 3 files changed, 74 insertions(+), 12 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.h b/AsyncDisplayKit/ASVideoNode.h index ebc9957c0c..4f3d7eea67 100644 --- a/AsyncDisplayKit/ASVideoNode.h +++ b/AsyncDisplayKit/ASVideoNode.h @@ -15,7 +15,7 @@ // in an issue on GitHub: https://github.com/facebook/AsyncDisplayKit/issues @interface ASVideoNode : ASControlNode -@property (atomic, strong, readwrite) AVAsset *asset; +@property (atomic, strong, readonly) AVAsset *asset; @property (atomic, strong, readonly) AVPlayer *player; @property (atomic, strong, readonly) AVPlayerItem *currentItem; @@ -31,6 +31,10 @@ @property (atomic, weak, readwrite) id delegate; +// Mirror the construction of AVPlayerItem with the URL or AVAsset +- (instancetype)initWithURL:(NSURL*)url; +- (instancetype)initWithAsset:(AVAsset*)asset; + - (void)play; - (void)pause; diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index 5fbe5e13fb..210df9c762 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -23,6 +23,7 @@ BOOL _muted; AVAsset *_asset; + NSURL *_url; AVPlayerItem *_currentItem; AVPlayer *_player; @@ -41,26 +42,51 @@ @implementation ASVideoNode +//TODO: Have a bash at getting the preview images sorted for the URL types - might need to observe until it's loaded + +//TODO: Have a bash at supplying a preview image node so that we're deferring the construction of the video as it eats memory at the moment +// [[[[playerItem tracks] objectAtIndex:0] assetTrack] asset] #pragma mark - Construction and Layout -- (instancetype)init +- (instancetype)initWithURL:(NSURL*)url { + ASDisplayNodeAssertNotNil(url, @"URL must be supplied in initWithURL:"); if (!(self = [super init])) { return nil; } + _url = url; + return [self commonInit]; +} + +- (instancetype)initWithAsset:(AVAsset*)asset +{ + ASDisplayNodeAssertNotNil(asset, @"Asset must be supplied in initWithAsset:"); + if (!(self = [super init])) { + return nil; + } + _asset = asset; + return [self commonInit]; +} + +- (instancetype)commonInit +{ _previewQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0); self.playButton = [[ASDefaultPlayButton alloc] init]; - self.gravity = AVLayerVideoGravityResizeAspect; - [self addTarget:self action:@selector(tapped) forControlEvents:ASControlNodeEventTouchUpInside]; return self; } +- (instancetype)init +{ + ASDisplayNodeAssertNotSupported(); + return nil; +} + - (instancetype)initWithViewBlock:(ASDisplayNodeViewBlock)viewBlock didLoadBlock:(ASDisplayNodeDidLoadBlock)didLoadBlock { ASDisplayNodeAssertNotSupported(); @@ -71,7 +97,7 @@ ASDisplayNode* playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{ AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init]; if (!_player) { - _player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc] initWithAsset:_asset]]; + _player = [AVPlayer playerWithPlayerItem:[self constructPlayerItemFromInitData]]; _player.muted = _muted; } playerLayer.player = _player; @@ -82,6 +108,18 @@ return playerNode; } +- (AVPlayerItem*) constructPlayerItemFromInitData { + ASDisplayNodeAssert(_asset || _url, @"Must be initialised with an AVAsset or URL"); + + if (_asset) { + return [[AVPlayerItem alloc] initWithAsset:_asset]; + } else if (_url) { + return [[AVPlayerItem alloc] initWithURL:_url]; + } + + return nil; +} + - (void)didLoad { [super didLoad]; @@ -90,7 +128,7 @@ if (_shouldBePlaying) { _playerNode = [self constructPlayerNode]; [self insertSubnode:_playerNode atIndex:0]; - } else { + } else if (_asset) { dispatch_async(_previewQueue, ^{ AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:_asset]; imageGenerator.appliesPreferredTrackTransform = YES; @@ -214,7 +252,7 @@ { ASDN::MutexLocker l(_videoLock); - _currentItem = [[AVPlayerItem alloc] initWithAsset:_asset]; + _currentItem = [self constructPlayerItemFromInitData]; [_currentItem addObserver:self forKeyPath:NSStringFromSelector(@selector(status)) options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew context:NULL]; if (_player) { @@ -249,7 +287,7 @@ if (isVisible) { if (_playerNode.isNodeLoaded) { if (!_player) { - _player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc] initWithAsset:_asset]]; + _player = [AVPlayer playerWithPlayerItem:[self constructPlayerItemFromInitData]]; _player.muted = _muted; } ((AVPlayerLayer *)_playerNode.layer).player = _player; diff --git a/examples/VideoTableView/Sample/NicCageNode.mm b/examples/VideoTableView/Sample/NicCageNode.mm index c2277762ed..b0cf7ebc5b 100644 --- a/examples/VideoTableView/Sample/NicCageNode.mm +++ b/examples/VideoTableView/Sample/NicCageNode.mm @@ -80,16 +80,36 @@ static const CGFloat kInnerPadding = 10.0f; return nil; _kittenSize = size; - - _videoNode = [[ASVideoNode alloc] init]; + + u_int32_t videoInitMethod = arc4random_uniform(3); + NSArray* methodArray = @[@"AVAsset", @"File URL", @"HLS URL"]; + + switch (videoInitMethod) { + case 0: + // Construct an AVAsset from a URL + _videoNode = [[ASVideoNode alloc] initWithAsset: + [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]]]; + break; + + case 1: + // Construct the video node directly from the .mp4 URL + _videoNode = [[ASVideoNode alloc] initWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]]; + break; + + case 2: + // Construct the video node from an HTTP Live Streaming URL + // URL from https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/02_Playback.html + _videoNode = [[ASVideoNode alloc] initWithURL:[NSURL URLWithString:@"http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8"]]; + break; + } + // _videoNode.shouldAutoplay = YES; _videoNode.backgroundColor = ASDisplayNodeDefaultPlaceholderColor(); - _videoNode.asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]]; [self addSubnode:_videoNode]; _textNode = [[ASTextNode alloc] init]; - _textNode.attributedString = [[NSAttributedString alloc] initWithString:[self kittyIpsum] + _textNode.attributedString = [[NSAttributedString alloc] initWithString:[NSString stringWithFormat:@"%@ %@", methodArray[videoInitMethod], [self kittyIpsum]] attributes:[self textStyle]]; [self addSubnode:_textNode]; From 6318db45c8ada56aab04297e13edd0041de6c7c2 Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Tue, 1 Mar 2016 13:06:29 +0000 Subject: [PATCH 03/13] Updated the Videos sample app to cover the API changes --- AsyncDisplayKit/ASVideoNode.mm | 2 ++ examples/Videos/Sample/ViewController.m | 14 +++++--------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index 210df9c762..3f41c06281 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -47,6 +47,8 @@ //TODO: Have a bash at supplying a preview image node so that we're deferring the construction of the video as it eats memory at the moment // [[[[playerItem tracks] objectAtIndex:0] assetTrack] asset] +//TODO: URL file videos don't seem to repeat + #pragma mark - Construction and Layout - (instancetype)initWithURL:(NSURL*)url diff --git a/examples/Videos/Sample/ViewController.m b/examples/Videos/Sample/ViewController.m index 5c4c0488ef..3d4698028b 100644 --- a/examples/Videos/Sample/ViewController.m +++ b/examples/Videos/Sample/ViewController.m @@ -33,9 +33,8 @@ - (ASVideoNode *)guitarVideo; { - ASVideoNode *videoNode = [[ASVideoNode alloc] init]; - - videoNode.asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-3045b261-7e93-4492-b7e5-5d6358376c9f-editedLiveAndDie.mov"]]; + AVAsset* asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-3045b261-7e93-4492-b7e5-5d6358376c9f-editedLiveAndDie.mov"]]; + ASVideoNode *videoNode = [[ASVideoNode alloc] initWithAsset:asset]; videoNode.frame = CGRectMake(0, 0, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height/3); @@ -48,12 +47,11 @@ - (ASVideoNode *)nicCageVideo; { - ASVideoNode *nicCageVideo = [[ASVideoNode alloc] init]; + AVAsset* asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]]; + ASVideoNode *nicCageVideo = [[ASVideoNode alloc] initWithAsset:asset]; nicCageVideo.delegate = self; - nicCageVideo.asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]]; - nicCageVideo.frame = CGRectMake([UIScreen mainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/3, [UIScreen mainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/3); nicCageVideo.gravity = AVLayerVideoGravityResize; @@ -68,10 +66,8 @@ - (ASVideoNode *)simonVideo; { - ASVideoNode *simonVideo = [[ASVideoNode alloc] init]; - NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"simon" ofType:@"mp4"]]; - simonVideo.asset = [AVAsset assetWithURL:url]; + ASVideoNode *simonVideo = [[ASVideoNode alloc] initWithURL:url]; simonVideo.frame = CGRectMake(0, [UIScreen mainScreen].bounds.size.height - ([UIScreen mainScreen].bounds.size.height/3), [UIScreen mainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/3); From 4060f40d324e0cbad01d05b8d69eb7efa601d04e Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Wed, 2 Mar 2016 13:53:19 +0000 Subject: [PATCH 04/13] Added preview images for single file URLs. Unfortunately doesn't seem possible for HLS video. --- AsyncDisplayKit/ASVideoNode.mm | 85 ++++++++++++++++++++++------------ 1 file changed, 55 insertions(+), 30 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index 3f41c06281..c09bf6f8c4 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -42,13 +42,15 @@ @implementation ASVideoNode -//TODO: Have a bash at getting the preview images sorted for the URL types - might need to observe until it's loaded - //TODO: Have a bash at supplying a preview image node so that we're deferring the construction of the video as it eats memory at the moment // [[[[playerItem tracks] objectAtIndex:0] assetTrack] asset] //TODO: URL file videos don't seem to repeat +//TODO: Have a look at any unit tests + +//TODO: The preview image doesn't seem to scale with the video layout when you click on the item + #pragma mark - Construction and Layout - (instancetype)initWithURL:(NSURL*)url @@ -99,7 +101,8 @@ ASDisplayNode* playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{ AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init]; if (!_player) { - _player = [AVPlayer playerWithPlayerItem:[self constructPlayerItemFromInitData]]; + _currentItem = [self constructPlayerItemFromInitData]; + _player = [AVPlayer playerWithPlayerItem:_currentItem]; _player.muted = _muted; } playerLayer.player = _player; @@ -131,32 +134,7 @@ _playerNode = [self constructPlayerNode]; [self insertSubnode:_playerNode atIndex:0]; } else if (_asset) { - dispatch_async(_previewQueue, ^{ - AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:_asset]; - imageGenerator.appliesPreferredTrackTransform = YES; - [imageGenerator generateCGImagesAsynchronouslyForTimes:@[[NSValue valueWithCMTime:CMTimeMake(0, 1)]] completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) { - UIImage *theImage = [UIImage imageWithCGImage:image]; - - _placeholderImageNode = [[ASImageNode alloc] init]; - _placeholderImageNode.layerBacked = YES; - _placeholderImageNode.image = theImage; - - if ([_gravity isEqualToString:AVLayerVideoGravityResize]) { - _placeholderImageNode.contentMode = UIViewContentModeRedraw; - } - if ([_gravity isEqualToString:AVLayerVideoGravityResizeAspect]) { - _placeholderImageNode.contentMode = UIViewContentModeScaleAspectFit; - } - if ([_gravity isEqual:AVLayerVideoGravityResizeAspectFill]) { - _placeholderImageNode.contentMode = UIViewContentModeScaleAspectFill; - } - - dispatch_async(dispatch_get_main_queue(), ^{ - _placeholderImageNode.frame = self.bounds; - [self insertSubnode:_placeholderImageNode atIndex:0]; - }); - }]; - }); + [self setPlaceholderImagefromAsset:_asset]; } } @@ -180,6 +158,41 @@ _spinner.position = CGPointMake(bounds.size.width/2, bounds.size.height/2); } +- (void)setPlaceholderImagefromAsset:(AVAsset*)asset { + dispatch_async(_previewQueue, ^{ + AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:_asset]; + imageGenerator.appliesPreferredTrackTransform = YES; + [imageGenerator generateCGImagesAsynchronouslyForTimes:@[[NSValue valueWithCMTime:CMTimeMake(0, 1)]] completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) { + + // Unfortunately it's not possible to generate a preview image for an HTTP live stream asset, so we'll give up here + // http://stackoverflow.com/questions/32112205/m3u8-file-avassetimagegenerator-error + if (image) { + UIImage *theImage = [UIImage imageWithCGImage:image]; + + _placeholderImageNode = [[ASImageNode alloc] init]; + _placeholderImageNode.layerBacked = YES; + _placeholderImageNode.image = theImage; + + if ([_gravity isEqualToString:AVLayerVideoGravityResize]) { + _placeholderImageNode.contentMode = UIViewContentModeRedraw; + } + if ([_gravity isEqualToString:AVLayerVideoGravityResizeAspect]) { + _placeholderImageNode.contentMode = UIViewContentModeScaleAspectFit; + } + if ([_gravity isEqual:AVLayerVideoGravityResizeAspectFill]) { + _placeholderImageNode.contentMode = UIViewContentModeScaleAspectFill; + } + + dispatch_async(dispatch_get_main_queue(), ^{ + _placeholderImageNode.frame = self.bounds; + [self insertSubnode:_placeholderImageNode atIndex:0]; + [self setNeedsLayout]; + }); + } + }]; + }); +} + - (void)interfaceStateDidChange:(ASInterfaceState)newState fromState:(ASInterfaceState)oldState { if (!(newState & ASInterfaceStateVisible)) { @@ -205,6 +218,17 @@ [_spinner removeFromSupernode]; _spinner = nil; } + + // If we don't yet have a placeholder image update it now that we should have data available for it + if (!_placeholderImageNode) { + if (_currentItem && + _currentItem.tracks.count > 0 && + _currentItem.tracks[0].assetTrack && + _currentItem.tracks[0].assetTrack.asset) { + _asset = _currentItem.tracks[0].assetTrack.asset; + [self setPlaceholderImagefromAsset:_asset]; + } + } } if ([[change objectForKey:@"new"] integerValue] == AVPlayerItemStatusFailed) { @@ -289,7 +313,8 @@ if (isVisible) { if (_playerNode.isNodeLoaded) { if (!_player) { - _player = [AVPlayer playerWithPlayerItem:[self constructPlayerItemFromInitData]]; + _currentItem = [self constructPlayerItemFromInitData]; + _player = [AVPlayer playerWithPlayerItem:_currentItem]; _player.muted = _muted; } ((AVPlayerLayer *)_playerNode.layer).player = _player; From 2e53770d0e1ce62a0737721bdd68b647dc15cbf1 Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Wed, 2 Mar 2016 14:00:25 +0000 Subject: [PATCH 05/13] Didn't need the relayout --- AsyncDisplayKit/ASVideoNode.mm | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index c09bf6f8c4..9b891b89b9 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -43,7 +43,6 @@ @implementation ASVideoNode //TODO: Have a bash at supplying a preview image node so that we're deferring the construction of the video as it eats memory at the moment -// [[[[playerItem tracks] objectAtIndex:0] assetTrack] asset] //TODO: URL file videos don't seem to repeat @@ -186,7 +185,7 @@ dispatch_async(dispatch_get_main_queue(), ^{ _placeholderImageNode.frame = self.bounds; [self insertSubnode:_placeholderImageNode atIndex:0]; - [self setNeedsLayout]; + // [self setNeedsLayout]; }); } }]; From 1eebe4d31acaf4ad528503e2af1b4926ab93f81d Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Thu, 3 Mar 2016 09:46:58 +0000 Subject: [PATCH 06/13] Fixed an issue where the preview image doesn't seem to scale with the video layout when you click on the item --- AsyncDisplayKit/ASVideoNode.mm | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index 9b891b89b9..96e64ca219 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -48,7 +48,6 @@ //TODO: Have a look at any unit tests -//TODO: The preview image doesn't seem to scale with the video layout when you click on the item #pragma mark - Construction and Layout @@ -157,7 +156,12 @@ _spinner.position = CGPointMake(bounds.size.width/2, bounds.size.height/2); } -- (void)setPlaceholderImagefromAsset:(AVAsset*)asset { +- (void)setPlaceholderImagefromAsset:(AVAsset*)asset +{ + // Construct the preview image early on to avoid multiple threads trying to set it + if (!_placeholderImageNode) + _placeholderImageNode = [[ASImageNode alloc] init]; + dispatch_async(_previewQueue, ^{ AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:_asset]; imageGenerator.appliesPreferredTrackTransform = YES; From 7c20ba0cdf3e23a9fb522fcf868d8960d0c2c41f Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Thu, 3 Mar 2016 13:34:38 +0000 Subject: [PATCH 07/13] HLS videos now repeat at the end fine --- AsyncDisplayKit/ASVideoNode.mm | 78 +++++++++++++++++----------------- 1 file changed, 40 insertions(+), 38 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index 96e64ca219..01e2ad6e85 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -25,7 +25,7 @@ AVAsset *_asset; NSURL *_url; - AVPlayerItem *_currentItem; + AVPlayerItem *_currentPlayerItem; AVPlayer *_player; ASImageNode *_placeholderImageNode; @@ -44,10 +44,10 @@ //TODO: Have a bash at supplying a preview image node so that we're deferring the construction of the video as it eats memory at the moment -//TODO: URL file videos don't seem to repeat - //TODO: Have a look at any unit tests +//TODO: URL-based streams show a black square when paused, the AVAsset ones pause fine + #pragma mark - Construction and Layout @@ -99,8 +99,8 @@ ASDisplayNode* playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{ AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init]; if (!_player) { - _currentItem = [self constructPlayerItemFromInitData]; - _player = [AVPlayer playerWithPlayerItem:_currentItem]; + [self constructCurrentPlayerItemFromInitData]; + _player = [AVPlayer playerWithPlayerItem:_currentPlayerItem]; _player.muted = _muted; } playerLayer.player = _player; @@ -111,22 +111,24 @@ return playerNode; } -- (AVPlayerItem*) constructPlayerItemFromInitData { +- (void) constructCurrentPlayerItemFromInitData { ASDisplayNodeAssert(_asset || _url, @"Must be initialised with an AVAsset or URL"); + if (_currentPlayerItem) + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; if (_asset) { - return [[AVPlayerItem alloc] initWithAsset:_asset]; + _currentPlayerItem = [[AVPlayerItem alloc] initWithAsset:_asset]; } else if (_url) { - return [[AVPlayerItem alloc] initWithURL:_url]; + _currentPlayerItem = [[AVPlayerItem alloc] initWithURL:_url]; } - return nil; + if (_currentPlayerItem) + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didPlayToEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:_currentPlayerItem]; } - (void)didLoad { [super didLoad]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didPlayToEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; if (_shouldBePlaying) { _playerNode = [self constructPlayerNode]; @@ -224,11 +226,11 @@ // If we don't yet have a placeholder image update it now that we should have data available for it if (!_placeholderImageNode) { - if (_currentItem && - _currentItem.tracks.count > 0 && - _currentItem.tracks[0].assetTrack && - _currentItem.tracks[0].assetTrack.asset) { - _asset = _currentItem.tracks[0].assetTrack.asset; + if (_currentPlayerItem && + _currentPlayerItem.tracks.count > 0 && + _currentPlayerItem.tracks[0].assetTrack && + _currentPlayerItem.tracks[0].assetTrack.asset) { + _asset = _currentPlayerItem.tracks[0].assetTrack.asset; [self setPlaceholderImagefromAsset:_asset]; } } @@ -241,17 +243,15 @@ - (void)didPlayToEnd:(NSNotification *)notification { - if (ASObjectIsEqual([[notification object] asset], _asset)) { - if ([_delegate respondsToSelector:@selector(videoPlaybackDidFinish:)]) { - [_delegate videoPlaybackDidFinish:self]; - } - [_player seekToTime:CMTimeMakeWithSeconds(0, 1)]; - - if (_shouldAutorepeat) { - [self play]; - } else { - [self pause]; - } + if ([_delegate respondsToSelector:@selector(videoPlaybackDidFinish:)]) { + [_delegate videoPlaybackDidFinish:self]; + } + [_player seekToTime:CMTimeMakeWithSeconds(0, 1)]; + + if (_shouldAutorepeat) { + [self play]; + } else { + [self pause]; } } @@ -273,7 +273,7 @@ [super fetchData]; @try { - [_currentItem removeObserver:self forKeyPath:NSStringFromSelector(@selector(status))]; + [_currentPlayerItem removeObserver:self forKeyPath:NSStringFromSelector(@selector(status))]; } @catch (NSException * __unused exception) { NSLog(@"unnecessary removal in fetch data"); @@ -281,13 +281,13 @@ { ASDN::MutexLocker l(_videoLock); - _currentItem = [self constructPlayerItemFromInitData]; - [_currentItem addObserver:self forKeyPath:NSStringFromSelector(@selector(status)) options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew context:NULL]; + [self constructCurrentPlayerItemFromInitData]; + [_currentPlayerItem addObserver:self forKeyPath:NSStringFromSelector(@selector(status)) options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew context:NULL]; if (_player) { - [_player replaceCurrentItemWithPlayerItem:_currentItem]; + [_player replaceCurrentItemWithPlayerItem:_currentPlayerItem]; } else { - _player = [[AVPlayer alloc] initWithPlayerItem:_currentItem]; + _player = [[AVPlayer alloc] initWithPlayerItem:_currentPlayerItem]; _player.muted = _muted; } } @@ -316,8 +316,8 @@ if (isVisible) { if (_playerNode.isNodeLoaded) { if (!_player) { - _currentItem = [self constructPlayerItemFromInitData]; - _player = [AVPlayer playerWithPlayerItem:_currentItem]; + [self constructCurrentPlayerItemFromInitData]; + _player = [AVPlayer playerWithPlayerItem:_currentPlayerItem]; _player.muted = _muted; } ((AVPlayerLayer *)_playerNode.layer).player = _player; @@ -451,7 +451,7 @@ - (BOOL)ready { - return _currentItem.status == AVPlayerItemStatusReadyToPlay; + return _currentPlayerItem.status == AVPlayerItemStatusReadyToPlay; } - (void)pause @@ -484,13 +484,13 @@ - (AVPlayerItem *)curentItem { ASDN::MutexLocker l(_videoLock); - return _currentItem; + return _currentPlayerItem; } - (void)setCurrentItem:(AVPlayerItem *)currentItem { ASDN::MutexLocker l(_videoLock); - _currentItem = currentItem; + _currentPlayerItem = currentItem; } - (ASDisplayNode *)playerNode @@ -509,9 +509,11 @@ - (void)dealloc { - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; + if (_currentPlayerItem) + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; + @try { - [_currentItem removeObserver:self forKeyPath:NSStringFromSelector(@selector(status))]; + [_currentPlayerItem removeObserver:self forKeyPath:NSStringFromSelector(@selector(status))]; } @catch (NSException * __unused exception) { NSLog(@"unnecessary removal in dealloc"); From 57844cb94df7226877d8081e45a1b76a20fa40c0 Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Thu, 3 Mar 2016 13:39:47 +0000 Subject: [PATCH 08/13] Fixed an issue where the preview image wasn't resizing correctly when the video was playing --- AsyncDisplayKit/ASVideoNode.mm | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index 01e2ad6e85..412eacc7f3 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -43,8 +43,7 @@ @implementation ASVideoNode //TODO: Have a bash at supplying a preview image node so that we're deferring the construction of the video as it eats memory at the moment - -//TODO: Have a look at any unit tests +// - or could keep the API the same and try to avoid starting up the video player until we need to //TODO: URL-based streams show a black square when paused, the AVAsset ones pause fine @@ -232,6 +231,7 @@ _currentPlayerItem.tracks[0].assetTrack.asset) { _asset = _currentPlayerItem.tracks[0].assetTrack.asset; [self setPlaceholderImagefromAsset:_asset]; + [self setNeedsLayout]; } } } From f1ae1cad42b81fb389c1477ca1b955488358cdb3 Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Thu, 3 Mar 2016 14:52:02 +0000 Subject: [PATCH 09/13] Added some logging for AVPlayerItem error conditions --- AsyncDisplayKit/ASVideoNode.mm | 74 +++++++++++++++++++++++----------- 1 file changed, 51 insertions(+), 23 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index 412eacc7f3..d0446179eb 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -42,10 +42,7 @@ @implementation ASVideoNode -//TODO: Have a bash at supplying a preview image node so that we're deferring the construction of the video as it eats memory at the moment -// - or could keep the API the same and try to avoid starting up the video player until we need to - -//TODO: URL-based streams show a black square when paused, the AVAsset ones pause fine +//TODO: Have a bash at supplying a preview image node for use with HLS videos as we can't have a priview with those #pragma mark - Construction and Layout @@ -112,8 +109,7 @@ - (void) constructCurrentPlayerItemFromInitData { ASDisplayNodeAssert(_asset || _url, @"Must be initialised with an AVAsset or URL"); - if (_currentPlayerItem) - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; + [self removePlayerItemObservers]; if (_asset) { _currentPlayerItem = [[AVPlayerItem alloc] initWithAsset:_asset]; @@ -121,8 +117,20 @@ _currentPlayerItem = [[AVPlayerItem alloc] initWithURL:_url]; } - if (_currentPlayerItem) + if (_currentPlayerItem) { [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didPlayToEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:_currentPlayerItem]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(errorWhilePlaying:) name:AVPlayerItemFailedToPlayToEndTimeNotification object:_currentPlayerItem]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(errorWhilePlaying:) name:AVPlayerItemNewErrorLogEntryNotification object:_currentPlayerItem]; + } +} + +- (void) removePlayerItemObservers +{ + if (_currentPlayerItem) { + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:nil]; + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemNewErrorLogEntryNotification object:nil]; + } } - (void)didLoad @@ -241,20 +249,6 @@ } } -- (void)didPlayToEnd:(NSNotification *)notification -{ - if ([_delegate respondsToSelector:@selector(videoPlaybackDidFinish:)]) { - [_delegate videoPlaybackDidFinish:self]; - } - [_player seekToTime:CMTimeMakeWithSeconds(0, 1)]; - - if (_shouldAutorepeat) { - [self play]; - } else { - [self pause]; - } -} - - (void)tapped { if (self.delegate && [self.delegate respondsToSelector:@selector(videoNodeWasTapped:)]) { @@ -473,6 +467,41 @@ return (_player.rate > 0 && !_player.error); } + +#pragma mark - Playback observers + +- (void)didPlayToEnd:(NSNotification *)notification +{ + if ([_delegate respondsToSelector:@selector(videoPlaybackDidFinish:)]) { + [_delegate videoPlaybackDidFinish:self]; + } + [_player seekToTime:CMTimeMakeWithSeconds(0, 1)]; + + if (_shouldAutorepeat) { + [self play]; + } else { + [self pause]; + } +} + +- (void)errorWhilePlaying:(NSNotification *)notification +{ + if ([notification.name isEqualToString:AVPlayerItemFailedToPlayToEndTimeNotification]) { + NSLog(@"Failed to play video"); + } + else if ([notification.name isEqualToString:AVPlayerItemNewErrorLogEntryNotification]) { + AVPlayerItem* item = (AVPlayerItem*)notification.object; + AVPlayerItemErrorLogEvent* logEvent = item.errorLog.events.lastObject; + NSLog(@"AVPlayerItem error log entry added for video with error %@ status %@", item.error, + (item.status == AVPlayerItemStatusFailed ? @"FAILED" : [NSString stringWithFormat:@"%ld", (long)item.status])); + NSLog(@"Item is %@", item); + + if (logEvent) + NSLog(@"Log code %ld domain %@ comment %@", logEvent.errorStatusCode, logEvent.errorDomain, logEvent.errorComment); + } +} + + #pragma mark - Property Accessors for Tests - (ASDisplayNode *)spinner @@ -509,8 +538,7 @@ - (void)dealloc { - if (_currentPlayerItem) - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; + [self removePlayerItemObservers]; @try { [_currentPlayerItem removeObserver:self forKeyPath:NSStringFromSelector(@selector(status))]; From 195c651811ec798c996393ee7d2bc0d5fc9f1922 Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Thu, 3 Mar 2016 15:04:42 +0000 Subject: [PATCH 10/13] Fixed up the test app to do various video types --- examples/VideoTableView/Sample/NicCageNode.mm | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/examples/VideoTableView/Sample/NicCageNode.mm b/examples/VideoTableView/Sample/NicCageNode.mm index b0cf7ebc5b..eecd4bbedb 100644 --- a/examples/VideoTableView/Sample/NicCageNode.mm +++ b/examples/VideoTableView/Sample/NicCageNode.mm @@ -82,7 +82,9 @@ static const CGFloat kInnerPadding = 10.0f; _kittenSize = size; u_int32_t videoInitMethod = arc4random_uniform(3); + u_int32_t autoPlay = arc4random_uniform(2); NSArray* methodArray = @[@"AVAsset", @"File URL", @"HLS URL"]; + NSArray* autoPlayArray = @[@"paused", @"auto play"]; switch (videoInitMethod) { case 0: @@ -103,13 +105,16 @@ static const CGFloat kInnerPadding = 10.0f; break; } -// _videoNode.shouldAutoplay = YES; + if (autoPlay == 1) + _videoNode.shouldAutoplay = YES; + + _videoNode.shouldAutorepeat = YES; _videoNode.backgroundColor = ASDisplayNodeDefaultPlaceholderColor(); [self addSubnode:_videoNode]; _textNode = [[ASTextNode alloc] init]; - _textNode.attributedString = [[NSAttributedString alloc] initWithString:[NSString stringWithFormat:@"%@ %@", methodArray[videoInitMethod], [self kittyIpsum]] + _textNode.attributedString = [[NSAttributedString alloc] initWithString:[NSString stringWithFormat:@"%@ %@ %@", methodArray[videoInitMethod], autoPlayArray[autoPlay], [self kittyIpsum]] attributes:[self textStyle]]; [self addSubnode:_textNode]; From fb1c5e8ee6e2ee696b6d8ffd675a84044691ba14 Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Fri, 4 Mar 2016 16:50:39 +0000 Subject: [PATCH 11/13] Fixed an issue where the preview image could appear twice --- AsyncDisplayKit/ASVideoNode.mm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index d0446179eb..bc05f7d68c 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -178,7 +178,7 @@ // Unfortunately it's not possible to generate a preview image for an HTTP live stream asset, so we'll give up here // http://stackoverflow.com/questions/32112205/m3u8-file-avassetimagegenerator-error - if (image) { + if (image && _placeholderImageNode.image == nil) { UIImage *theImage = [UIImage imageWithCGImage:image]; _placeholderImageNode = [[ASImageNode alloc] init]; From f7ff8b4173808166d82b6cc5517e5499355ddf29 Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Tue, 8 Mar 2016 08:30:13 +0000 Subject: [PATCH 12/13] Updated tests to cover the new API and repeated all of the test logic for URL initialisation to ensure that the functionality is the same --- AsyncDisplayKit/ASVideoNode.mm | 2 +- AsyncDisplayKitTests/ASVideoNodeTests.m | 160 ++++++++++++++++++------ 2 files changed, 123 insertions(+), 39 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index bc05f7d68c..54b6dcdf64 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -497,7 +497,7 @@ NSLog(@"Item is %@", item); if (logEvent) - NSLog(@"Log code %ld domain %@ comment %@", logEvent.errorStatusCode, logEvent.errorDomain, logEvent.errorComment); + NSLog(@"Log code %ld domain %@ comment %@", (long)logEvent.errorStatusCode, logEvent.errorDomain, logEvent.errorComment); } } diff --git a/AsyncDisplayKitTests/ASVideoNodeTests.m b/AsyncDisplayKitTests/ASVideoNodeTests.m index d14329d936..7c663eeca9 100644 --- a/AsyncDisplayKitTests/ASVideoNodeTests.m +++ b/AsyncDisplayKitTests/ASVideoNodeTests.m @@ -15,6 +15,7 @@ ASVideoNode *_videoNode; AVURLAsset *_firstAsset; AVAsset *_secondAsset; + NSURL *_url; } @end @@ -42,103 +43,151 @@ - (void)setUp { - _videoNode = [[ASVideoNode alloc] init]; + // _videoNode = [[ASVideoNode alloc] init]; _firstAsset = [AVURLAsset assetWithURL:[NSURL URLWithString:@"firstURL"]]; _secondAsset = [AVAsset assetWithURL:[NSURL URLWithString:@"secondURL"]]; + _url = [NSURL URLWithString:@"testURL"]; } -- (void)testVideoNodeReplacesAVPlayerItemWhenNewURLIsSet -{ - _videoNode.interfaceState = ASInterfaceStateFetchData; - _videoNode.asset = _firstAsset; - - AVPlayerItem *item = [_videoNode currentItem]; - - _videoNode.asset = _secondAsset; - AVPlayerItem *secondItem = [_videoNode currentItem]; - - XCTAssertNotEqualObjects(item, secondItem); -} - -- (void)testVideoNodeDoesNotReplaceAVPlayerItemWhenSameURLIsSet -{ - _videoNode.interfaceState = ASInterfaceStateFetchData; - - _videoNode.asset = _firstAsset; - AVPlayerItem *item = [_videoNode currentItem]; - - _videoNode.asset = [AVAsset assetWithURL:_firstAsset.URL]; - AVPlayerItem *secondItem = [_videoNode currentItem]; - - XCTAssertEqualObjects(item, secondItem); -} - (void)testSpinnerDefaultsToNil { XCTAssertNil(_videoNode.spinner); } + - (void)testOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnode +{ + _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + [self doOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl]; +} + +- (void)testOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl +{ + _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + [self doOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl]; +} + +- (void)doOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl { _videoNode.interfaceState = ASInterfaceStateFetchData; - _videoNode.asset = _firstAsset; [_videoNode play]; XCTAssertNotNil(_videoNode.spinner); } + - (void)testOnPauseSpinnerIsPausedIfPresent +{ + _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + [self doOnPauseSpinnerIsPausedIfPresentWithURL]; +} + +- (void)testOnPauseSpinnerIsPausedIfPresentWithURL +{ + _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + [self doOnPauseSpinnerIsPausedIfPresentWithURL]; +} + +- (void)doOnPauseSpinnerIsPausedIfPresentWithURL { _videoNode.interfaceState = ASInterfaceStateFetchData; - _videoNode.asset = _firstAsset; [_videoNode play]; - [_videoNode pause]; XCTAssertFalse(((UIActivityIndicatorView *)_videoNode.spinner.view).isAnimating); } + - (void)testOnVideoReadySpinnerIsStoppedAndRemoved { - _videoNode.interfaceState = ASInterfaceStateFetchData; - _videoNode.asset = _firstAsset; + _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + [self doOnVideoReadySpinnerIsStoppedAndRemovedWithURL]; +} +- (void)testOnVideoReadySpinnerIsStoppedAndRemovedWithURL +{ + _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + [self doOnVideoReadySpinnerIsStoppedAndRemovedWithURL]; +} + +- (void)doOnVideoReadySpinnerIsStoppedAndRemovedWithURL +{ + _videoNode.interfaceState = ASInterfaceStateFetchData; + [_videoNode play]; [_videoNode observeValueForKeyPath:@"status" ofObject:[_videoNode currentItem] change:@{@"new" : @(AVPlayerItemStatusReadyToPlay)} context:NULL]; XCTAssertFalse(((UIActivityIndicatorView *)_videoNode.spinner.view).isAnimating); } + - (void)testPlayerDefaultsToNil { + _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + XCTAssertNil(_videoNode.player); +} + +- (void)testPlayerDefaultsToNilWithURL +{ + _videoNode = [[ASVideoNode alloc] initWithURL:_url]; XCTAssertNil(_videoNode.player); } - (void)testPlayerIsCreatedInFetchData { - _videoNode.asset = _firstAsset; - + _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; _videoNode.interfaceState = ASInterfaceStateFetchData; XCTAssertNotNil(_videoNode.player); } +- (void)testPlayerIsCreatedInFetchDataWithURL +{ + _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.interfaceState = ASInterfaceStateFetchData; + + XCTAssertNotNil(_videoNode.player); +} + + - (void)testPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlaying { - _videoNode.asset = _firstAsset; + _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + [self doPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL]; +} +- (void)testPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL +{ + _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + [self doPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL]; +} + +- (void)doPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL +{ [_videoNode setInterfaceState:ASInterfaceStateNone]; [_videoNode didLoad]; XCTAssert(![_videoNode.subnodes containsObject:_videoNode.playerNode]); } + - (void)testPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlaying { - _videoNode.asset = _firstAsset; + _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + [self doPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlaying]; +} +- (void)testPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlayingWithUrl +{ + _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + [self doPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlaying]; +} + +- (void)doPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlaying +{ [_videoNode pause]; [_videoNode setInterfaceState:ASInterfaceStateVisible]; [_videoNode didLoad]; @@ -149,7 +198,18 @@ - (void)testVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplay { - _videoNode.asset = _firstAsset; + _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + [self doVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplay]; +} + +- (void)testVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplayWithURL +{ + _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + [self doVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplay]; +} + +- (void)doVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplay +{ _videoNode.shouldAutoplay = YES; _videoNode.playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{ AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init]; @@ -162,9 +222,21 @@ XCTAssertTrue(_videoNode.shouldBePlaying); } + - (void)testVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLater { - _videoNode.asset = _firstAsset; + _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + [self doVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLater]; +} + +- (void)testVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLaterWithURL +{ + _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + [self doVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLater]; +} + +- (void)doVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLater +{ [_videoNode play]; [_videoNode interfaceStateDidChange:ASInterfaceStateNone fromState:ASInterfaceStateVisible]; @@ -173,9 +245,21 @@ XCTAssertTrue(_videoNode.shouldBePlaying); } + - (void)testVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBack { - _videoNode.asset = _firstAsset; + _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + [self doVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBack]; +} + +- (void)testVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBackWithURL +{ + _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + [self doVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBack]; +} + +- (void)doVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBack +{ [_videoNode play]; [_videoNode interfaceStateDidChange:ASInterfaceStateVisible fromState:ASInterfaceStateNone]; From 9e28e0792556b280e376163aa275a16cb48695b3 Mon Sep 17 00:00:00 2001 From: Gareth Reese Date: Thu, 7 Apr 2016 14:08:45 +0100 Subject: [PATCH 13/13] ASVideoNode updates based on review comments in https://github.com/facebook/AsyncDisplayKit/pull/1323 --- AsyncDisplayKit/ASVideoNode.h | 9 +- AsyncDisplayKit/ASVideoNode.mm | 117 +++++++++--------- AsyncDisplayKitTests/ASVideoNodeTests.m | 43 ++++--- examples/VideoTableView/Sample/NicCageNode.mm | 10 +- examples/Videos/Sample/ViewController.m | 9 +- 5 files changed, 97 insertions(+), 91 deletions(-) diff --git a/AsyncDisplayKit/ASVideoNode.h b/AsyncDisplayKit/ASVideoNode.h index 3a45b2f04c..1615ce7cdb 100644 --- a/AsyncDisplayKit/ASVideoNode.h +++ b/AsyncDisplayKit/ASVideoNode.h @@ -18,7 +18,10 @@ // in an issue on GitHub: https://github.com/facebook/AsyncDisplayKit/issues @interface ASVideoNode : ASControlNode -@property (atomic, strong, readonly) AVAsset *asset; + +@property (atomic, strong, readwrite) NSURL *url; +@property (atomic, strong, readwrite) AVAsset *asset; + @property (atomic, strong, readonly) AVPlayer *player; @property (atomic, strong, readonly) AVPlayerItem *currentItem; @@ -34,9 +37,7 @@ @property (atomic, weak, readwrite) id delegate; -// Mirror the construction of AVPlayerItem with the URL or AVAsset -- (instancetype)initWithURL:(NSURL*)url; -- (instancetype)initWithAsset:(AVAsset*)asset; +- (instancetype)init; - (void)play; - (void)pause; diff --git a/AsyncDisplayKit/ASVideoNode.mm b/AsyncDisplayKit/ASVideoNode.mm index 94084d215b..17ad3cb117 100644 --- a/AsyncDisplayKit/ASVideoNode.mm +++ b/AsyncDisplayKit/ASVideoNode.mm @@ -47,30 +47,9 @@ #pragma mark - Construction and Layout -- (instancetype)initWithURL:(NSURL*)url +- (instancetype)init { - ASDisplayNodeAssertNotNil(url, @"URL must be supplied in initWithURL:"); - if (!(self = [super init])) { - return nil; - } - - _url = url; - return [self commonInit]; -} - -- (instancetype)initWithAsset:(AVAsset*)asset -{ - ASDisplayNodeAssertNotNil(asset, @"Asset must be supplied in initWithAsset:"); - if (!(self = [super init])) { - return nil; - } - _asset = asset; - return [self commonInit]; -} - -- (instancetype)commonInit -{ - _previewQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0); + _previewQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0); self.playButton = [[ASDefaultPlayButton alloc] init]; self.gravity = AVLayerVideoGravityResizeAspect; @@ -79,19 +58,14 @@ return self; } -- (instancetype)init -{ - ASDisplayNodeAssertNotSupported(); - return nil; -} - - (instancetype)initWithViewBlock:(ASDisplayNodeViewBlock)viewBlock didLoadBlock:(ASDisplayNodeDidLoadBlock)didLoadBlock { ASDisplayNodeAssertNotSupported(); return nil; } -- (ASDisplayNode*) constructPlayerNode { +- (ASDisplayNode*)constructPlayerNode +{ ASDisplayNode* playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{ AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init]; if (!_player) { @@ -107,8 +81,9 @@ return playerNode; } -- (void) constructCurrentPlayerItemFromInitData { - ASDisplayNodeAssert(_asset || _url, @"Must be initialised with an AVAsset or URL"); +- (void)constructCurrentPlayerItemFromInitData +{ + ASDisplayNodeAssert(_asset || _url, @"ASVideoNode must be initialised with either an AVAsset or URL"); [self removePlayerItemObservers]; if (_asset) { @@ -124,7 +99,7 @@ } } -- (void) removePlayerItemObservers +- (void)removePlayerItemObservers { if (_currentPlayerItem) { [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; @@ -167,14 +142,16 @@ - (void)setPlaceholderImagefromAsset:(AVAsset*)asset { - // Construct the preview image early on to avoid multiple threads trying to set it + ASDN::MutexLocker l(_videoLock); if (!_placeholderImageNode) _placeholderImageNode = [[ASImageNode alloc] init]; dispatch_async(_previewQueue, ^{ AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:_asset]; imageGenerator.appliesPreferredTrackTransform = YES; - [imageGenerator generateCGImagesAsynchronouslyForTimes:@[[NSValue valueWithCMTime:CMTimeMake(0, 1)]] completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) { + NSArray *times = @[[NSValue valueWithCMTime:CMTimeMake(0, 1)]]; + + [imageGenerator generateCGImagesAsynchronouslyForTimes:times completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) { // Unfortunately it's not possible to generate a preview image for an HTTP live stream asset, so we'll give up here // http://stackoverflow.com/questions/32112205/m3u8-file-avassetimagegenerator-error @@ -198,7 +175,6 @@ dispatch_async(dispatch_get_main_queue(), ^{ _placeholderImageNode.frame = self.bounds; [self insertSubnode:_placeholderImageNode atIndex:0]; - // [self setNeedsLayout]; }); } }]; @@ -208,9 +184,12 @@ - (void)interfaceStateDidChange:(ASInterfaceState)newState fromState:(ASInterfaceState)oldState { [super interfaceStateDidChange:newState fromState:oldState]; + + BOOL nowVisible = ASInterfaceStateIncludesVisible(newState); + BOOL wasVisible = ASInterfaceStateIncludesVisible(oldState); - if (!(newState & ASInterfaceStateVisible)) { - if (oldState & ASInterfaceStateVisible) { + if (!nowVisible) { + if (wasVisible) { if (_shouldBePlaying) { [self pause]; _shouldBePlaying = YES; @@ -227,28 +206,29 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { - if ([[change objectForKey:@"new"] integerValue] == AVPlayerItemStatusReadyToPlay) { - if ([self.subnodes containsObject:_spinner]) { - [_spinner removeFromSupernode]; - _spinner = nil; - } - - // If we don't yet have a placeholder image update it now that we should have data available for it - if (!_placeholderImageNode) { - if (_currentPlayerItem && - _currentPlayerItem.tracks.count > 0 && - _currentPlayerItem.tracks[0].assetTrack && - _currentPlayerItem.tracks[0].assetTrack.asset) { - _asset = _currentPlayerItem.tracks[0].assetTrack.asset; - [self setPlaceholderImagefromAsset:_asset]; - [self setNeedsLayout]; + if (object == _currentPlayerItem && [keyPath isEqualToString:@"status"]) { + if (_currentPlayerItem.status == AVPlayerItemStatusReadyToPlay) { + if ([self.subnodes containsObject:_spinner]) { + [_spinner removeFromSupernode]; + _spinner = nil; } + + // If we don't yet have a placeholder image update it now that we should have data available for it + if (!_placeholderImageNode) { + if (_currentPlayerItem && + _currentPlayerItem.tracks.count > 0 && + _currentPlayerItem.tracks[0].assetTrack && + _currentPlayerItem.tracks[0].assetTrack.asset) { + _asset = _currentPlayerItem.tracks[0].assetTrack.asset; + [self setPlaceholderImagefromAsset:_asset]; + [self setNeedsLayout]; + } + } + + } else if (_currentPlayerItem.status == AVPlayerItemStatusFailed) { + } } - - if ([[change objectForKey:@"new"] integerValue] == AVPlayerItemStatusFailed) { - - } } - (void)tapped @@ -373,6 +353,27 @@ return _asset; } +- (void)setUrl:(NSURL *)url +{ + ASDN::MutexLocker l(_videoLock); + + if (ASObjectIsEqual(url, _url)) + return; + + _url = url; + + // FIXME: Adopt -setNeedsFetchData when it is available + if (self.interfaceState & ASInterfaceStateFetchData) { + [self fetchData]; + } +} + +- (NSURL *)url +{ + ASDN::MutexLocker l(_videoLock); + return _url; +} + - (AVPlayer *)player { ASDN::MutexLocker l(_videoLock); @@ -442,7 +443,7 @@ _playButton.alpha = 0.0; }]; - if (![self ready] && _shouldBePlaying && (self.interfaceState & ASInterfaceStateVisible)) { + if (![self ready] && _shouldBePlaying && ASInterfaceStateIncludesVisible(self.interfaceState)) { [self addSubnode:_spinner]; [(UIActivityIndicatorView *)_spinner.view startAnimating]; } diff --git a/AsyncDisplayKitTests/ASVideoNodeTests.m b/AsyncDisplayKitTests/ASVideoNodeTests.m index de5fe2dd38..0999918059 100644 --- a/AsyncDisplayKitTests/ASVideoNodeTests.m +++ b/AsyncDisplayKitTests/ASVideoNodeTests.m @@ -50,7 +50,7 @@ - (void)setUp { - // _videoNode = [[ASVideoNode alloc] init]; + _videoNode = [[ASVideoNode alloc] init]; _firstAsset = [AVURLAsset assetWithURL:[NSURL URLWithString:@"firstURL"]]; _secondAsset = [AVAsset assetWithURL:[NSURL URLWithString:@"secondURL"]]; _url = [NSURL URLWithString:@"testURL"]; @@ -65,20 +65,19 @@ - (void)testOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnode { - _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + _videoNode.asset = _firstAsset; [self doOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl]; } - (void)testOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl { - _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.url = _url; [self doOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl]; } - (void)doOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl { _videoNode.interfaceState = ASInterfaceStateFetchData; - [_videoNode play]; XCTAssertNotNil(_videoNode.spinner); @@ -87,13 +86,13 @@ - (void)testOnPauseSpinnerIsPausedIfPresent { - _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + _videoNode.asset = _firstAsset; [self doOnPauseSpinnerIsPausedIfPresentWithURL]; } - (void)testOnPauseSpinnerIsPausedIfPresentWithURL { - _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.url = _url; [self doOnPauseSpinnerIsPausedIfPresentWithURL]; } @@ -110,13 +109,13 @@ - (void)testOnVideoReadySpinnerIsStoppedAndRemoved { - _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + _videoNode.asset = _firstAsset; [self doOnVideoReadySpinnerIsStoppedAndRemovedWithURL]; } - (void)testOnVideoReadySpinnerIsStoppedAndRemovedWithURL { - _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.url = _url; [self doOnVideoReadySpinnerIsStoppedAndRemovedWithURL]; } @@ -133,19 +132,19 @@ - (void)testPlayerDefaultsToNil { - _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + _videoNode.asset = _firstAsset; XCTAssertNil(_videoNode.player); } - (void)testPlayerDefaultsToNilWithURL { - _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.url = _url; XCTAssertNil(_videoNode.player); } - (void)testPlayerIsCreatedInFetchData { - _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + _videoNode.asset = _firstAsset; _videoNode.interfaceState = ASInterfaceStateFetchData; XCTAssertNotNil(_videoNode.player); @@ -153,7 +152,7 @@ - (void)testPlayerIsCreatedInFetchDataWithURL { - _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.url = _url; _videoNode.interfaceState = ASInterfaceStateFetchData; XCTAssertNotNil(_videoNode.player); @@ -162,13 +161,13 @@ - (void)testPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlaying { - _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + _videoNode.asset = _firstAsset; [self doPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL]; } - (void)testPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL { - _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.url = _url; [self doPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL]; } @@ -183,13 +182,13 @@ - (void)testPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlaying { - _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + _videoNode.asset = _firstAsset; [self doPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlaying]; } - (void)testPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlayingWithUrl { - _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.url = _url; [self doPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlaying]; } @@ -205,13 +204,13 @@ - (void)testVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplay { - _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + _videoNode.asset = _firstAsset; [self doVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplay]; } - (void)testVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplayWithURL { - _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.url = _url; [self doVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplay]; } @@ -232,13 +231,13 @@ - (void)testVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLater { - _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + _videoNode.asset = _firstAsset; [self doVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLater]; } - (void)testVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLaterWithURL { - _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.url = _url; [self doVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLater]; } @@ -255,13 +254,13 @@ - (void)testVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBack { - _videoNode = [[ASVideoNode alloc] initWithAsset:_firstAsset]; + _videoNode.asset = _firstAsset; [self doVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBack]; } - (void)testVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBackWithURL { - _videoNode = [[ASVideoNode alloc] initWithURL:_url]; + _videoNode.url = _url; [self doVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBack]; } diff --git a/examples/VideoTableView/Sample/NicCageNode.mm b/examples/VideoTableView/Sample/NicCageNode.mm index eecd4bbedb..f430a10db9 100644 --- a/examples/VideoTableView/Sample/NicCageNode.mm +++ b/examples/VideoTableView/Sample/NicCageNode.mm @@ -89,19 +89,21 @@ static const CGFloat kInnerPadding = 10.0f; switch (videoInitMethod) { case 0: // Construct an AVAsset from a URL - _videoNode = [[ASVideoNode alloc] initWithAsset: - [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]]]; + _videoNode = [[ASVideoNode alloc] init]; + _videoNode.asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]]; break; case 1: // Construct the video node directly from the .mp4 URL - _videoNode = [[ASVideoNode alloc] initWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]]; + _videoNode = [[ASVideoNode alloc] init]; + _videoNode.url = [NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]; break; case 2: // Construct the video node from an HTTP Live Streaming URL // URL from https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/02_Playback.html - _videoNode = [[ASVideoNode alloc] initWithURL:[NSURL URLWithString:@"http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8"]]; + _videoNode = [[ASVideoNode alloc] init]; + _videoNode.url = [NSURL URLWithString:@"http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8"]; break; } diff --git a/examples/Videos/Sample/ViewController.m b/examples/Videos/Sample/ViewController.m index a987785e10..def29c0122 100644 --- a/examples/Videos/Sample/ViewController.m +++ b/examples/Videos/Sample/ViewController.m @@ -34,7 +34,8 @@ - (ASVideoNode *)guitarVideo; { AVAsset* asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-3045b261-7e93-4492-b7e5-5d6358376c9f-editedLiveAndDie.mov"]]; - ASVideoNode *videoNode = [[ASVideoNode alloc] initWithAsset:asset]; + ASVideoNode *videoNode = [[ASVideoNode alloc] init]; + videoNode.asset = asset; videoNode.frame = CGRectMake(0, 0, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height/3); @@ -48,7 +49,8 @@ - (ASVideoNode *)nicCageVideo; { AVAsset* asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]]; - ASVideoNode *nicCageVideo = [[ASVideoNode alloc] initWithAsset:asset]; + ASVideoNode *nicCageVideo = [[ASVideoNode alloc] init]; + nicCageVideo.asset = asset; nicCageVideo.delegate = self; @@ -67,7 +69,8 @@ - (ASVideoNode *)simonVideo; { NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"simon" ofType:@"mp4"]]; - ASVideoNode *simonVideo = [[ASVideoNode alloc] initWithURL:url]; + ASVideoNode *simonVideo = [[ASVideoNode alloc] init]; + simonVideo.url = url; simonVideo.frame = CGRectMake(0, [UIScreen mainScreen].bounds.size.height - ([UIScreen mainScreen].bounds.size.height/3), [UIScreen mainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/3);