First working version for HLS streams

This commit is contained in:
Gareth Reese
2016-03-01 12:08:09 +00:00
parent 75f615b223
commit f9c8c043a1
3 changed files with 74 additions and 12 deletions

View File

@@ -15,7 +15,7 @@
// in an issue on GitHub: https://github.com/facebook/AsyncDisplayKit/issues
@interface ASVideoNode : ASControlNode
@property (atomic, strong, readwrite) AVAsset *asset;
@property (atomic, strong, readonly) AVAsset *asset;
@property (atomic, strong, readonly) AVPlayer *player;
@property (atomic, strong, readonly) AVPlayerItem *currentItem;
@@ -31,6 +31,10 @@
@property (atomic, weak, readwrite) id<ASVideoNodeDelegate> delegate;
// Mirror the construction of AVPlayerItem with the URL or AVAsset
- (instancetype)initWithURL:(NSURL*)url;
- (instancetype)initWithAsset:(AVAsset*)asset;
- (void)play;
- (void)pause;

View File

@@ -23,6 +23,7 @@
BOOL _muted;
AVAsset *_asset;
NSURL *_url;
AVPlayerItem *_currentItem;
AVPlayer *_player;
@@ -41,26 +42,51 @@
@implementation ASVideoNode
//TODO: Have a bash at getting the preview images sorted for the URL types - might need to observe until it's loaded
//TODO: Have a bash at supplying a preview image node so that we're deferring the construction of the video as it eats memory at the moment
// [[[[playerItem tracks] objectAtIndex:0] assetTrack] asset]
#pragma mark - Construction and Layout
- (instancetype)init
- (instancetype)initWithURL:(NSURL*)url
{
ASDisplayNodeAssertNotNil(url, @"URL must be supplied in initWithURL:");
if (!(self = [super init])) {
return nil;
}
_url = url;
return [self commonInit];
}
- (instancetype)initWithAsset:(AVAsset*)asset
{
ASDisplayNodeAssertNotNil(asset, @"Asset must be supplied in initWithAsset:");
if (!(self = [super init])) {
return nil;
}
_asset = asset;
return [self commonInit];
}
- (instancetype)commonInit
{
_previewQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
self.playButton = [[ASDefaultPlayButton alloc] init];
self.gravity = AVLayerVideoGravityResizeAspect;
[self addTarget:self action:@selector(tapped) forControlEvents:ASControlNodeEventTouchUpInside];
return self;
}
- (instancetype)init
{
ASDisplayNodeAssertNotSupported();
return nil;
}
- (instancetype)initWithViewBlock:(ASDisplayNodeViewBlock)viewBlock didLoadBlock:(ASDisplayNodeDidLoadBlock)didLoadBlock
{
ASDisplayNodeAssertNotSupported();
@@ -71,7 +97,7 @@
ASDisplayNode* playerNode = [[ASDisplayNode alloc] initWithLayerBlock:^CALayer *{
AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init];
if (!_player) {
_player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc] initWithAsset:_asset]];
_player = [AVPlayer playerWithPlayerItem:[self constructPlayerItemFromInitData]];
_player.muted = _muted;
}
playerLayer.player = _player;
@@ -82,6 +108,18 @@
return playerNode;
}
- (AVPlayerItem*) constructPlayerItemFromInitData {
ASDisplayNodeAssert(_asset || _url, @"Must be initialised with an AVAsset or URL");
if (_asset) {
return [[AVPlayerItem alloc] initWithAsset:_asset];
} else if (_url) {
return [[AVPlayerItem alloc] initWithURL:_url];
}
return nil;
}
- (void)didLoad
{
[super didLoad];
@@ -90,7 +128,7 @@
if (_shouldBePlaying) {
_playerNode = [self constructPlayerNode];
[self insertSubnode:_playerNode atIndex:0];
} else {
} else if (_asset) {
dispatch_async(_previewQueue, ^{
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:_asset];
imageGenerator.appliesPreferredTrackTransform = YES;
@@ -214,7 +252,7 @@
{
ASDN::MutexLocker l(_videoLock);
_currentItem = [[AVPlayerItem alloc] initWithAsset:_asset];
_currentItem = [self constructPlayerItemFromInitData];
[_currentItem addObserver:self forKeyPath:NSStringFromSelector(@selector(status)) options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew context:NULL];
if (_player) {
@@ -249,7 +287,7 @@
if (isVisible) {
if (_playerNode.isNodeLoaded) {
if (!_player) {
_player = [AVPlayer playerWithPlayerItem:[[AVPlayerItem alloc] initWithAsset:_asset]];
_player = [AVPlayer playerWithPlayerItem:[self constructPlayerItemFromInitData]];
_player.muted = _muted;
}
((AVPlayerLayer *)_playerNode.layer).player = _player;

View File

@@ -80,16 +80,36 @@ static const CGFloat kInnerPadding = 10.0f;
return nil;
_kittenSize = size;
_videoNode = [[ASVideoNode alloc] init];
u_int32_t videoInitMethod = arc4random_uniform(3);
NSArray* methodArray = @[@"AVAsset", @"File URL", @"HLS URL"];
switch (videoInitMethod) {
case 0:
// Construct an AVAsset from a URL
_videoNode = [[ASVideoNode alloc] initWithAsset:
[AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]]];
break;
case 1:
// Construct the video node directly from the .mp4 URL
_videoNode = [[ASVideoNode alloc] initWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]];
break;
case 2:
// Construct the video node from an HTTP Live Streaming URL
// URL from https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/02_Playback.html
_videoNode = [[ASVideoNode alloc] initWithURL:[NSURL URLWithString:@"http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8"]];
break;
}
// _videoNode.shouldAutoplay = YES;
_videoNode.backgroundColor = ASDisplayNodeDefaultPlaceholderColor();
_videoNode.asset = [AVAsset assetWithURL:[NSURL URLWithString:@"https://files.parsetfss.com/8a8a3b0c-619e-4e4d-b1d5-1b5ba9bf2b42/tfss-753fe655-86bb-46da-89b7-aa59c60e49c0-niccage.mp4"]];
[self addSubnode:_videoNode];
_textNode = [[ASTextNode alloc] init];
_textNode.attributedString = [[NSAttributedString alloc] initWithString:[self kittyIpsum]
_textNode.attributedString = [[NSAttributedString alloc] initWithString:[NSString stringWithFormat:@"%@ %@", methodArray[videoInitMethod], [self kittyIpsum]]
attributes:[self textStyle]];
[self addSubnode:_textNode];