[ASVideoNode] Cleanup from recent diffs, standardizing on .asset rather than .url.

This commit is contained in:
Scott Goodson 2016-04-17 20:12:37 -07:00
parent 94d0d908dc
commit 11744b7f31
5 changed files with 69 additions and 78 deletions

View File

@ -19,7 +19,12 @@
@interface ASVideoNode : ASControlNode @interface ASVideoNode : ASControlNode
@property (atomic, strong, readwrite) NSURL *url; - (instancetype)init; // ASVideoNode is created with a simple alloc/init.
- (void)play;
- (void)pause;
- (BOOL)isPlaying;
@property (atomic, strong, readwrite) AVAsset *asset; @property (atomic, strong, readwrite) AVAsset *asset;
@property (atomic, strong, readonly) AVPlayer *player; @property (atomic, strong, readonly) AVPlayer *player;
@ -37,13 +42,6 @@
@property (atomic, weak, readwrite) id<ASVideoNodeDelegate> delegate; @property (atomic, weak, readwrite) id<ASVideoNodeDelegate> delegate;
- (instancetype)init;
- (void)play;
- (void)pause;
- (BOOL)isPlaying;
@end @end
@protocol ASVideoNodeDelegate <NSObject> @protocol ASVideoNodeDelegate <NSObject>

View File

@ -23,33 +23,31 @@
BOOL _muted; BOOL _muted;
AVAsset *_asset; AVAsset *_asset;
NSURL *_url;
AVPlayerItem *_currentPlayerItem; AVPlayerItem *_currentPlayerItem;
AVPlayer *_player; AVPlayer *_player;
ASImageNode *_placeholderImageNode; ASImageNode *_placeholderImageNode; // TODO: Make ASVideoNode an ASImageNode subclass; remove this.
ASButtonNode *_playButton; ASButtonNode *_playButton;
ASDisplayNode *_playerNode; ASDisplayNode *_playerNode;
ASDisplayNode *_spinner; ASDisplayNode *_spinner;
NSString *_gravity; NSString *_gravity;
dispatch_queue_t _previewQueue;
} }
@end @end
@implementation ASVideoNode @implementation ASVideoNode
//TODO: Have a bash at supplying a preview image node for use with HLS videos as we can't have a priview with those // TODO: Support preview images with HTTP Live Streaming videos.
#pragma mark - Construction and Layout #pragma mark - Construction and Layout
- (instancetype)init - (instancetype)init
{ {
_previewQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0); if (!(self = [super init])) {
return nil;
}
self.playButton = [[ASDefaultPlayButton alloc] init]; self.playButton = [[ASDefaultPlayButton alloc] init];
self.gravity = AVLayerVideoGravityResizeAspect; self.gravity = AVLayerVideoGravityResizeAspect;
@ -64,9 +62,11 @@
return nil; return nil;
} }
- (ASDisplayNode*)constructPlayerNode - (ASDisplayNode *)constructPlayerNode
{ {
ASDisplayNode* playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{ ASDisplayNode * playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{
ASDN::MutexLocker l(_videoLock);
AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init]; AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init];
if (!_player) { if (!_player) {
[self constructCurrentPlayerItemFromInitData]; [self constructCurrentPlayerItemFromInitData];
@ -83,13 +83,13 @@
- (void)constructCurrentPlayerItemFromInitData - (void)constructCurrentPlayerItemFromInitData
{ {
ASDisplayNodeAssert(_asset || _url, @"ASVideoNode must be initialised with either an AVAsset or URL"); ASDN::MutexLocker l(_videoLock);
ASDisplayNodeAssert(_asset, @"ASVideoNode must be initialized with an AVAsset");
[self removePlayerItemObservers]; [self removePlayerItemObservers];
if (_asset) { if (_asset) {
_currentPlayerItem = [[AVPlayerItem alloc] initWithAsset:_asset]; _currentPlayerItem = [[AVPlayerItem alloc] initWithAsset:_asset];
} else if (_url) {
_currentPlayerItem = [[AVPlayerItem alloc] initWithURL:_url];
} }
if (_currentPlayerItem) { if (_currentPlayerItem) {
@ -101,6 +101,8 @@
- (void)removePlayerItemObservers - (void)removePlayerItemObservers
{ {
ASDN::MutexLocker l(_videoLock);
if (_currentPlayerItem) { if (_currentPlayerItem) {
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:nil]; [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:nil];
@ -112,6 +114,8 @@
{ {
[super didLoad]; [super didLoad];
ASDN::MutexLocker l(_videoLock);
if (_shouldBePlaying) { if (_shouldBePlaying) {
_playerNode = [self constructPlayerNode]; _playerNode = [self constructPlayerNode];
[self insertSubnode:_playerNode atIndex:0]; [self insertSubnode:_playerNode atIndex:0];
@ -126,10 +130,10 @@
CGRect bounds = self.bounds; CGRect bounds = self.bounds;
ASDN::MutexLocker l(_videoLock);
_placeholderImageNode.frame = bounds; _placeholderImageNode.frame = bounds;
_playerNode.frame = bounds; _playerNode.frame = bounds;
_playerNode.layer.frame = bounds;
_playButton.frame = bounds; _playButton.frame = bounds;
CGFloat horizontalDiff = (bounds.size.width - _playButton.bounds.size.width)/2; CGFloat horizontalDiff = (bounds.size.width - _playButton.bounds.size.width)/2;
@ -142,39 +146,44 @@
- (void)setPlaceholderImagefromAsset:(AVAsset*)asset - (void)setPlaceholderImagefromAsset:(AVAsset*)asset
{ {
ASDN::MutexLocker l(_videoLock); ASPerformBlockOnBackgroundThread(^{
if (!_placeholderImageNode) ASDN::MutexLocker l(_videoLock);
_placeholderImageNode = [[ASImageNode alloc] init];
dispatch_async(_previewQueue, ^{
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:_asset]; AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:_asset];
imageGenerator.appliesPreferredTrackTransform = YES; imageGenerator.appliesPreferredTrackTransform = YES;
NSArray *times = @[[NSValue valueWithCMTime:CMTimeMake(0, 1)]]; NSArray *times = @[[NSValue valueWithCMTime:CMTimeMake(0, 1)]];
[imageGenerator generateCGImagesAsynchronouslyForTimes:times completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) { [imageGenerator generateCGImagesAsynchronouslyForTimes:times completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
ASDN::MutexLocker l(_videoLock);
// Unfortunately it's not possible to generate a preview image for an HTTP live stream asset, so we'll give up here // Unfortunately it's not possible to generate a preview image for an HTTP live stream asset, so we'll give up here
// http://stackoverflow.com/questions/32112205/m3u8-file-avassetimagegenerator-error // http://stackoverflow.com/questions/32112205/m3u8-file-avassetimagegenerator-error
if (image && _placeholderImageNode.image == nil) { if (image && _placeholderImageNode.image == nil) {
UIImage *theImage = [UIImage imageWithCGImage:image]; UIImage *theImage = [UIImage imageWithCGImage:image];
_placeholderImageNode = [[ASImageNode alloc] init]; if (!_placeholderImageNode) {
_placeholderImageNode.layerBacked = YES; _placeholderImageNode = [[ASImageNode alloc] init];
_placeholderImageNode.layerBacked = YES;
}
_placeholderImageNode.image = theImage; _placeholderImageNode.image = theImage;
if ([_gravity isEqualToString:AVLayerVideoGravityResize]) { if ([_gravity isEqualToString:AVLayerVideoGravityResize]) {
_placeholderImageNode.contentMode = UIViewContentModeRedraw; _placeholderImageNode.contentMode = UIViewContentModeRedraw;
} }
if ([_gravity isEqualToString:AVLayerVideoGravityResizeAspect]) { else if ([_gravity isEqualToString:AVLayerVideoGravityResizeAspect]) {
_placeholderImageNode.contentMode = UIViewContentModeScaleAspectFit; _placeholderImageNode.contentMode = UIViewContentModeScaleAspectFit;
} }
if ([_gravity isEqual:AVLayerVideoGravityResizeAspectFill]) { else if ([_gravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
_placeholderImageNode.contentMode = UIViewContentModeScaleAspectFill; _placeholderImageNode.contentMode = UIViewContentModeScaleAspectFill;
} }
dispatch_async(dispatch_get_main_queue(), ^{ dispatch_async(dispatch_get_main_queue(), ^{
_placeholderImageNode.frame = self.bounds; ASDN::MutexLocker l(_videoLock);
[self insertSubnode:_placeholderImageNode atIndex:0]; [self insertSubnode:_placeholderImageNode atIndex:0];
[self setNeedsLayout];
}); });
} }
}]; }];
@ -185,8 +194,10 @@
{ {
[super interfaceStateDidChange:newState fromState:oldState]; [super interfaceStateDidChange:newState fromState:oldState];
BOOL nowVisible = ASInterfaceStateIncludesVisible(newState); BOOL nowVisible = ASInterfaceStateIncludesVisible(newState);
BOOL wasVisible = ASInterfaceStateIncludesVisible(oldState); BOOL wasVisible = ASInterfaceStateIncludesVisible(oldState);
ASDN::MutexLocker l(_videoLock);
if (!nowVisible) { if (!nowVisible) {
if (wasVisible) { if (wasVisible) {
@ -206,6 +217,8 @@
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{ {
ASDN::MutexLocker l(_videoLock);
if (object == _currentPlayerItem && [keyPath isEqualToString:@"status"]) { if (object == _currentPlayerItem && [keyPath isEqualToString:@"status"]) {
if (_currentPlayerItem.status == AVPlayerItemStatusReadyToPlay) { if (_currentPlayerItem.status == AVPlayerItemStatusReadyToPlay) {
if ([self.subnodes containsObject:_spinner]) { if ([self.subnodes containsObject:_spinner]) {
@ -214,7 +227,7 @@
} }
// If we don't yet have a placeholder image update it now that we should have data available for it // If we don't yet have a placeholder image update it now that we should have data available for it
if (!_placeholderImageNode) { if (_placeholderImageNode.image == nil) {
if (_currentPlayerItem && if (_currentPlayerItem &&
_currentPlayerItem.tracks.count > 0 && _currentPlayerItem.tracks.count > 0 &&
_currentPlayerItem.tracks[0].assetTrack && _currentPlayerItem.tracks[0].assetTrack &&
@ -353,27 +366,6 @@
return _asset; return _asset;
} }
- (void)setUrl:(NSURL *)url
{
ASDN::MutexLocker l(_videoLock);
if (ASObjectIsEqual(url, _url))
return;
_url = url;
// FIXME: Adopt -setNeedsFetchData when it is available
if (self.interfaceState & ASInterfaceStateFetchData) {
[self fetchData];
}
}
- (NSURL *)url
{
ASDN::MutexLocker l(_videoLock);
return _url;
}
- (AVPlayer *)player - (AVPlayer *)player
{ {
ASDN::MutexLocker l(_videoLock); ASDN::MutexLocker l(_videoLock);

View File

@ -71,7 +71,7 @@
- (void)testOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl - (void)testOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl
{ {
_videoNode.url = _url; _videoNode.asset = [AVAsset assetWithURL:_url];
[self doOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl]; [self doOnPlayIfVideoIsNotReadyInitializeSpinnerAndAddAsSubnodeWithUrl];
} }
@ -92,7 +92,7 @@
- (void)testOnPauseSpinnerIsPausedIfPresentWithURL - (void)testOnPauseSpinnerIsPausedIfPresentWithURL
{ {
_videoNode.url = _url; _videoNode.asset = [AVAsset assetWithURL:_url];
[self doOnPauseSpinnerIsPausedIfPresentWithURL]; [self doOnPauseSpinnerIsPausedIfPresentWithURL];
} }
@ -115,7 +115,7 @@
- (void)testOnVideoReadySpinnerIsStoppedAndRemovedWithURL - (void)testOnVideoReadySpinnerIsStoppedAndRemovedWithURL
{ {
_videoNode.url = _url; _videoNode.asset = [AVAsset assetWithURL:_url];
[self doOnVideoReadySpinnerIsStoppedAndRemovedWithURL]; [self doOnVideoReadySpinnerIsStoppedAndRemovedWithURL];
} }
@ -138,7 +138,7 @@
- (void)testPlayerDefaultsToNilWithURL - (void)testPlayerDefaultsToNilWithURL
{ {
_videoNode.url = _url; _videoNode.asset = [AVAsset assetWithURL:_url];
XCTAssertNil(_videoNode.player); XCTAssertNil(_videoNode.player);
} }
@ -152,7 +152,7 @@
- (void)testPlayerIsCreatedInFetchDataWithURL - (void)testPlayerIsCreatedInFetchDataWithURL
{ {
_videoNode.url = _url; _videoNode.asset = [AVAsset assetWithURL:_url];
_videoNode.interfaceState = ASInterfaceStateFetchData; _videoNode.interfaceState = ASInterfaceStateFetchData;
XCTAssertNotNil(_videoNode.player); XCTAssertNotNil(_videoNode.player);
@ -167,7 +167,7 @@
- (void)testPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL - (void)testPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL
{ {
_videoNode.url = _url; _videoNode.asset = [AVAsset assetWithURL:_url];
[self doPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL]; [self doPlayerLayerNodeIsAddedOnDidLoadIfVisibleAndAutoPlayingWithURL];
} }
@ -188,7 +188,7 @@
- (void)testPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlayingWithUrl - (void)testPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlayingWithUrl
{ {
_videoNode.url = _url; _videoNode.asset = [AVAsset assetWithURL:_url];
[self doPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlaying]; [self doPlayerLayerNodeIsNotAddedIfVisibleButShouldNotBePlaying];
} }
@ -210,7 +210,7 @@
- (void)testVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplayWithURL - (void)testVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplayWithURL
{ {
_videoNode.url = _url; _videoNode.asset = [AVAsset assetWithURL:_url];
[self doVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplay]; [self doVideoStartsPlayingOnDidDidBecomeVisibleWhenShouldAutoplay];
} }
@ -237,7 +237,7 @@
- (void)testVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLaterWithURL - (void)testVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLaterWithURL
{ {
_videoNode.url = _url; _videoNode.asset = [AVAsset assetWithURL:_url];
[self doVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLater]; [self doVideoShouldPauseWhenItLeavesVisibleButShouldKnowPlayingShouldRestartLater];
} }
@ -260,7 +260,7 @@
- (void)testVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBackWithURL - (void)testVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBackWithURL
{ {
_videoNode.url = _url; _videoNode.asset = [AVAsset assetWithURL:_url];
[self doVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBack]; [self doVideoThatIsPlayingWhenItLeavesVisibleRangeStartsAgainWhenItComesBack];
} }

View File

@ -96,14 +96,14 @@ static const CGFloat kInnerPadding = 10.0f;
case 1: case 1:
// Construct the video node directly from the .mp4 URL // Construct the video node directly from the .mp4 URL
_videoNode = [[ASVideoNode alloc] init]; _videoNode = [[ASVideoNode alloc] init];
_videoNode.url = [NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]; _videoNode.asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]];
break; break;
case 2: case 2:
// Construct the video node from an HTTP Live Streaming URL // Construct the video node from an HTTP Live Streaming URL
// URL from https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/02_Playback.html // URL from https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/02_Playback.html
_videoNode = [[ASVideoNode alloc] init]; _videoNode = [[ASVideoNode alloc] init];
_videoNode.url = [NSURL URLWithString:@"http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8"]; _videoNode.asset = [AVAsset assetWithURL:[NSURL URLWithString:@"http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8"]];
break; break;
} }

View File

@ -33,9 +33,9 @@
- (ASVideoNode *)guitarVideo; - (ASVideoNode *)guitarVideo;
{ {
AVAsset* asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-3045b261-7e93-4492-b7e5-5d6358376c9f-editedLiveAndDie.mov"]];
ASVideoNode *videoNode = [[ASVideoNode alloc] init]; ASVideoNode *videoNode = [[ASVideoNode alloc] init];
videoNode.asset = asset;
videoNode.asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-3045b261-7e93-4492-b7e5-5d6358376c9f-editedLiveAndDie.mov"]];
videoNode.frame = CGRectMake(0, 0, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height/3); videoNode.frame = CGRectMake(0, 0, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height/3);
@ -48,12 +48,12 @@
- (ASVideoNode *)nicCageVideo; - (ASVideoNode *)nicCageVideo;
{ {
AVAsset* asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]];
ASVideoNode *nicCageVideo = [[ASVideoNode alloc] init]; ASVideoNode *nicCageVideo = [[ASVideoNode alloc] init];
nicCageVideo.asset = asset;
nicCageVideo.delegate = self; nicCageVideo.delegate = self;
nicCageVideo.asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]];
nicCageVideo.frame = CGRectMake([UIScreen mainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/3, [UIScreen mainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/3); nicCageVideo.frame = CGRectMake([UIScreen mainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/3, [UIScreen mainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/3);
nicCageVideo.gravity = AVLayerVideoGravityResize; nicCageVideo.gravity = AVLayerVideoGravityResize;
@ -68,9 +68,10 @@
- (ASVideoNode *)simonVideo; - (ASVideoNode *)simonVideo;
{ {
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"simon" ofType:@"mp4"]];
ASVideoNode *simonVideo = [[ASVideoNode alloc] init]; ASVideoNode *simonVideo = [[ASVideoNode alloc] init];
simonVideo.url = url;
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"simon" ofType:@"mp4"]];
simonVideo.asset = [AVAsset assetWithURL:url];
simonVideo.frame = CGRectMake(0, [UIScreen mainScreen].bounds.size.height - ([UIScreen mainScreen].bounds.size.height/3), [UIScreen mainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/3); simonVideo.frame = CGRectMake(0, [UIScreen mainScreen].bounds.size.height - ([UIScreen mainScreen].bounds.size.height/3), [UIScreen mainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/3);