Video avatar improvements

This commit is contained in:
Ilya Laktyushin 2020-06-20 03:13:03 +03:00
parent 48322ade2c
commit 473c6255bd
12 changed files with 262 additions and 86 deletions

View File

@ -12,11 +12,13 @@ typedef enum
TGMediaVideoConversionPresetCompressedHigh,
TGMediaVideoConversionPresetCompressedVeryHigh,
TGMediaVideoConversionPresetAnimation,
TGMediaVideoConversionPresetVideoMessage
TGMediaVideoConversionPresetVideoMessage,
TGMediaVideoConversionPresetProfile
} TGMediaVideoConversionPreset;
@interface TGVideoEditAdjustments : NSObject <TGMediaEditAdjustments>
@property (nonatomic, readonly) NSTimeInterval videoStartValue;
@property (nonatomic, readonly) NSTimeInterval trimStartValue;
@property (nonatomic, readonly) NSTimeInterval trimEndValue;
@property (nonatomic, readonly) TGMediaVideoConversionPreset preset;
@ -29,7 +31,7 @@ typedef enum
- (NSDictionary *)dictionary;
- (instancetype)editAdjustmentsWithPreset:(TGMediaVideoConversionPreset)preset maxDuration:(NSTimeInterval)maxDuration;
- (instancetype)editAdjustmentsWithPreset:(TGMediaVideoConversionPreset)preset maxDuration:(NSTimeInterval)maxDuration videoStartValue:(NSTimeInterval)videoStartValue;
+ (instancetype)editAdjustmentsWithOriginalSize:(CGSize)originalSize preset:(TGMediaVideoConversionPreset)preset;
+ (instancetype)editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)values;
+ (instancetype)editAdjustmentsWithDictionary:(NSDictionary *)dictionary;

View File

@ -344,9 +344,26 @@
if (TGOrientationIsSideward(adjustments.cropOrientation, NULL))
outputDimensions = CGSizeMake(outputDimensions.height, outputDimensions.width);
AVMutableCompositionTrack *trimVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[trimVideoTrack insertTimeRange:timeRange ofTrack:videoTrack atTime:kCMTimeZero error:NULL];
CMTimeRange instructionTimeRange = CMTimeRangeMake(kCMTimeZero, timeRange.duration);
AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
if (adjustments.videoStartValue > 0.0 && adjustments.videoStartValue > adjustments.trimStartValue) {
NSTimeInterval trimEndValue = adjustments.trimEndValue > adjustments.trimStartValue ? adjustments.trimEndValue : CMTimeGetSeconds(videoTrack.timeRange.duration);
CMTimeRange firstRange = CMTimeRangeMake(CMTimeMakeWithSeconds(adjustments.videoStartValue, NSEC_PER_SEC), CMTimeMakeWithSeconds(trimEndValue - adjustments.videoStartValue, NSEC_PER_SEC));
NSError *error;
[compositionTrack insertTimeRange:firstRange ofTrack:videoTrack atTime:kCMTimeZero error:&error];
NSLog(@"");
[compositionTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(adjustments.trimStartValue, NSEC_PER_SEC), CMTimeMakeWithSeconds(adjustments.videoStartValue - adjustments.trimStartValue, NSEC_PER_SEC)) ofTrack:videoTrack atTime:firstRange.duration error:&error];
NSLog(@"");
// instructionTimeRange = CMTimeRangeMake(kCMTimeZero, );
} else {
[compositionTrack insertTimeRange:timeRange ofTrack:videoTrack atTime:kCMTimeZero error:NULL];
}
CMTime frameDuration = CMTimeMake(1, 30);
if (videoTrack.nominalFrameRate > 0)
frameDuration = CMTimeMake(1, (int32_t)videoTrack.nominalFrameRate);
@ -456,18 +473,18 @@
}];
} else {
videoComposition = [AVMutableVideoComposition videoComposition];
bool mirrored = false;
UIImageOrientation videoOrientation = TGVideoOrientationForAsset(avAsset, &mirrored);
CGAffineTransform transform = TGVideoTransformForOrientation(videoOrientation, videoTrack.naturalSize, cropRect, mirrored);
CGAffineTransform rotationTransform = TGVideoTransformForCrop(adjustments.cropOrientation, cropRect.size, adjustments.cropMirrored);
CGAffineTransform finalTransform = CGAffineTransformConcat(transform, rotationTransform);
AVMutableVideoCompositionLayerInstruction *transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:trimVideoTrack];
AVMutableVideoCompositionLayerInstruction *transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTrack];
[transformer setTransform:finalTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, timeRange.duration);
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, compositionTrack.timeRange.duration);
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject:instruction];
}
@ -1234,9 +1251,11 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
return (CGSize){ 1920.0f, 1920.0f };
case TGMediaVideoConversionPresetVideoMessage:
{
return (CGSize){ 240.0f, 240.0f };
}
case TGMediaVideoConversionPresetProfile:
return (CGSize){ 800.0f, 800.0f };
default:
return (CGSize){ 848.0f, 848.0f };
}
@ -1320,6 +1339,9 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
case TGMediaVideoConversionPresetVideoMessage:
return 300;
case TGMediaVideoConversionPresetProfile:
return 1000;
default:
return 900;
}
@ -1347,6 +1369,10 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
case TGMediaVideoConversionPresetVideoMessage:
return 32;
case TGMediaVideoConversionPresetAnimation:
case TGMediaVideoConversionPresetProfile:
return 32;
default:
return 32;
}
@ -1371,6 +1397,10 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
case TGMediaVideoConversionPresetCompressedVeryHigh:
return 2;
case TGMediaVideoConversionPresetAnimation:
case TGMediaVideoConversionPresetProfile:
return 0;
default:
return 1;
}

View File

@ -1641,7 +1641,6 @@
}
UIImage *fullImage = nil;
UIImage *thumbnailImage = nil;
if (adjustments.toolsApplied) {
image = [PGPhotoEditor resultImageForImage:image adjustments:adjustments];
@ -1662,7 +1661,7 @@
TGDispatchOnMainThread(^{
if (self.didFinishEditingVideo != nil)
self.didFinishEditingVideo(asset.URL, [adjustments editAdjustmentsWithPreset:TGMediaVideoConversionPresetAnimation maxDuration:0.0], fullImage, nil, true);
self.didFinishEditingVideo(asset.URL, [adjustments editAdjustmentsWithPreset:TGMediaVideoConversionPresetProfile maxDuration:0.0 videoStartValue:coverPosition], fullImage, nil, true);
[self transitionOutSaving:true completion:^
{

View File

@ -651,7 +651,7 @@ const NSTimeInterval TGPhotoQualityPreviewDuration = 15.0f;
[self _updateVideoDuration:((TGMediaAsset *)self.item).videoDuration hasAudio:true];
TGVideoEditAdjustments *adjustments = [self.photoEditor exportAdjustments];
adjustments = [adjustments editAdjustmentsWithPreset:self.preset maxDuration:TGPhotoQualityPreviewDuration];
adjustments = [adjustments editAdjustmentsWithPreset:self.preset maxDuration:TGPhotoQualityPreviewDuration videoStartValue:0.0];
__block NSTimeInterval delay = 0.0;
__weak TGPhotoQualityController *weakSelf = self;

View File

@ -153,7 +153,7 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
return adjustments;
}
- (instancetype)editAdjustmentsWithPreset:(TGMediaVideoConversionPreset)preset maxDuration:(NSTimeInterval)maxDuration
- (instancetype)editAdjustmentsWithPreset:(TGMediaVideoConversionPreset)preset maxDuration:(NSTimeInterval)maxDuration videoStartValue:(NSTimeInterval)videoStartValue
{
TGVideoEditAdjustments *adjustments = [[[self class] alloc] init];
adjustments->_originalSize = _originalSize;
@ -168,6 +168,7 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
adjustments->_sendAsGif = _sendAsGif;
adjustments->_preset = preset;
adjustments->_toolValues = _toolValues;
adjustments->_videoStartValue = videoStartValue;
if (maxDuration > DBL_EPSILON)
{

View File

@ -24,8 +24,8 @@ public enum AvatarGalleryEntry: Equatable {
switch self {
case .topImage:
return .topImage
case let .image(image):
return .image(image.0)
case let .image(id, _, _, _, _, _, indexData, _):
return .image(id)
}
}
@ -40,7 +40,7 @@ public enum AvatarGalleryEntry: Equatable {
public var videoRepresentations: [TelegramMediaImage.VideoRepresentation] {
switch self {
case let .topImage(representations, _):
case .topImage:
return []
case let .image(_, _, _, videoRepresentations, _, _, _, _):
return videoRepresentations
@ -106,7 +106,7 @@ public func fetchedAvatarGalleryEntries(account: Account, peer: Peer) -> Signal<
var index: Int32 = 0
for photo in photos {
let indexData = GalleryItemIndexData(position: index, totalCount: Int32(photos.count))
if result.isEmpty, let first = initialEntries.first {
if result.isEmpty, let first = initialEntries.first, photo.image.videoRepresentations.isEmpty {
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, first.videoRepresentations, peer, photo.date, indexData, photo.messageId))
} else {
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.standalone(resource: $0.resource)) }), photo.image.videoRepresentations, peer, photo.date, indexData, photo.messageId))

View File

@ -104,6 +104,18 @@ class PeerAvatarImageGalleryItem: GalleryItem {
}
}
private class PeerAvatarImageGalleryContentNode: ASDisplayNode {
override func layout() {
super.layout()
if let subnodes = self.subnodes {
for node in subnodes {
node.frame = self.bounds
}
}
}
}
final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
private let context: AccountContext
private let peer: Peer
@ -111,6 +123,7 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
private var entry: AvatarGalleryEntry?
private let contentNode: PeerAvatarImageGalleryContentNode
private let imageNode: TransformImageNode
private var videoNode: UniversalVideoNode?
private var videoContent: NativeVideoContent?
@ -130,6 +143,7 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
self.peer = peer
self.sourceHasRoundCorners = sourceHasRoundCorners
self.contentNode = PeerAvatarImageGalleryContentNode()
self.imageNode = TransformImageNode()
self.footerContentNode = AvatarGalleryItemFooterContentNode(context: context, presentationData: presentationData)
@ -139,6 +153,8 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
super.init()
self.contentNode.addSubnode(self.imageNode)
self.imageNode.imageUpdated = { [weak self] _ in
self?._ready.set(.single(Void()))
}
@ -155,7 +171,14 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
self.footerContentNode.share = { [weak self] interaction in
if let strongSelf = self, let entry = strongSelf.entry, !entry.representations.isEmpty {
let shareController = ShareController(context: strongSelf.context, subject: .image(entry.representations), preferredAction: .saveToCameraRoll)
let subject: ShareControllerSubject
if let video = entry.videoRepresentations.last {
let videoFileReference = FileMediaReference.standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
subject = .media(videoFileReference.abstract)
} else {
subject = .image(entry.representations)
}
let shareController = ShareController(context: strongSelf.context, subject: subject, preferredAction: .saveToCameraRoll)
interaction.presentController(shareController, nil)
}
}
@ -202,51 +225,88 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
representations = imageRepresentations
}
self.imageNode.setSignal(chatAvatarGalleryPhoto(account: self.context.account, representations: representations), dispatchOnDisplayLink: false)
self.zoomableContent = (largestSize.dimensions.cgSize, self.imageNode)
self.zoomableContent = (largestSize.dimensions.cgSize, self.contentNode)
if let largestIndex = representations.firstIndex(where: { $0.representation == largestSize }) {
self.fetchDisposable.set(fetchedMediaResource(mediaBox: self.context.account.postbox.mediaBox, reference: representations[largestIndex].reference).start())
}
self.statusDisposable.set((self.context.account.postbox.mediaBox.resourceStatus(largestSize.resource)
|> deliverOnMainQueue).start(next: { [weak self] status in
if let strongSelf = self {
let previousStatus = strongSelf.status
strongSelf.status = status
switch status {
case .Remote:
strongSelf.statusNode.isHidden = false
strongSelf.statusNodeContainer.isUserInteractionEnabled = true
strongSelf.statusNode.transitionToState(.download(.white), completion: {})
case let .Fetching(_, progress):
strongSelf.statusNode.isHidden = false
strongSelf.statusNodeContainer.isUserInteractionEnabled = true
let adjustedProgress = max(progress, 0.027)
strongSelf.statusNode.transitionToState(.progress(color: .white, lineWidth: nil, value: CGFloat(adjustedProgress), cancelEnabled: true), completion: {})
case .Local:
if let previousStatus = previousStatus, case .Fetching = previousStatus {
strongSelf.statusNode.transitionToState(.progress(color: .white, lineWidth: nil, value: 1.0, cancelEnabled: true), completion: {
if let strongSelf = self {
strongSelf.statusNode.alpha = 0.0
strongSelf.statusNodeContainer.isUserInteractionEnabled = false
strongSelf.statusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, completion: { _ in
if let strongSelf = self {
strongSelf.statusNode.transitionToState(.none, animated: false, completion: {})
}
})
}
})
} else if !strongSelf.statusNode.isHidden && !strongSelf.statusNode.alpha.isZero {
strongSelf.statusNode.alpha = 0.0
strongSelf.statusNodeContainer.isUserInteractionEnabled = false
strongSelf.statusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, completion: { _ in
if let strongSelf = self {
strongSelf.statusNode.transitionToState(.none, animated: false, completion: {})
}
})
}
// self.statusDisposable.set((self.context.account.postbox.mediaBox.resourceStatus(largestSize.resource)
// |> deliverOnMainQueue).start(next: { [weak self] status in
// if let strongSelf = self {
// let previousStatus = strongSelf.status
// strongSelf.status = status
// switch status {
// case .Remote:
// strongSelf.statusNode.isHidden = false
// strongSelf.statusNodeContainer.isUserInteractionEnabled = true
// strongSelf.statusNode.transitionToState(.download(.white), completion: {})
// case let .Fetching(_, progress):
// strongSelf.statusNode.isHidden = false
// strongSelf.statusNodeContainer.isUserInteractionEnabled = true
// let adjustedProgress = max(progress, 0.027)
// strongSelf.statusNode.transitionToState(.progress(color: .white, lineWidth: nil, value: CGFloat(adjustedProgress), cancelEnabled: true), completion: {})
// case .Local:
// if let previousStatus = previousStatus, case .Fetching = previousStatus {
// strongSelf.statusNode.transitionToState(.progress(color: .white, lineWidth: nil, value: 1.0, cancelEnabled: true), completion: {
// if let strongSelf = self {
// strongSelf.statusNode.alpha = 0.0
// strongSelf.statusNodeContainer.isUserInteractionEnabled = false
// strongSelf.statusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, completion: { _ in
// if let strongSelf = self {
// strongSelf.statusNode.transitionToState(.none, animated: false, completion: {})
// }
// })
// }
// })
// } else if !strongSelf.statusNode.isHidden && !strongSelf.statusNode.alpha.isZero {
// strongSelf.statusNode.alpha = 0.0
// strongSelf.statusNodeContainer.isUserInteractionEnabled = false
// strongSelf.statusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, completion: { _ in
// if let strongSelf = self {
// strongSelf.statusNode.transitionToState(.none, animated: false, completion: {})
// }
// })
// }
// }
// }
// }))
var id: Int64?
if case let .image(image) = entry {
id = image.0.id
}
if let video = entry.videoRepresentations.last, let id = id {
let mediaManager = self.context.sharedContext.mediaManager
let videoFileReference = FileMediaReference.standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
let videoContent = NativeVideoContent(id: .profileVideo(id), fileReference: videoFileReference, streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black)
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .embedded)
videoNode.isUserInteractionEnabled = false
videoNode.ownsContentNodeUpdated = { [weak self] owns in
if let strongSelf = self {
strongSelf.videoNode?.isHidden = !owns
}
}
}))
videoNode.canAttachContent = true
if videoNode.hasAttachedContext {
videoNode.play()
}
self.videoContent = videoContent
self.videoNode = videoNode
videoNode.updateLayout(size: largestSize.dimensions.cgSize, transition: .immediate)
self.contentNode.addSubnode(videoNode)
} else if let videoNode = self.videoNode {
self.videoContent = nil
self.videoNode = nil
videoNode.removeFromSupernode()
}
self.imageNode.frame = self.contentNode.bounds
self.videoNode?.frame = self.contentNode.bounds
} else {
self._ready.set(.single(Void()))
}
@ -254,11 +314,11 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
}
override func animateIn(from node: (ASDisplayNode, CGRect, () -> (UIView?, UIView?)), addToTransitionSurface: (UIView) -> Void, completion: @escaping () -> Void) {
var transformedFrame = node.0.view.convert(node.0.view.bounds, to: self.imageNode.view)
let transformedSuperFrame = node.0.view.convert(node.0.view.bounds, to: self.imageNode.view.superview)
var transformedFrame = node.0.view.convert(node.0.view.bounds, to: self.contentNode.view)
let transformedSuperFrame = node.0.view.convert(node.0.view.bounds, to: self.contentNode.view.superview)
let transformedSelfFrame = node.0.view.convert(node.0.view.bounds, to: self.view)
let transformedCopyViewFinalFrame = self.imageNode.view.convert(self.imageNode.view.bounds, to: self.view)
let scaledLocalImageViewBounds = self.imageNode.view.bounds
let transformedCopyViewFinalFrame = self.contentNode.view.convert(self.contentNode.view.bounds, to: self.view)
let scaledLocalImageViewBounds = self.contentNode.view.bounds
let copyViewContents = node.2().0!
let copyView = UIView()
@ -276,7 +336,7 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
var transformedSurfaceFinalFrame: CGRect?
if let contentSurface = surfaceCopyView.superview {
transformedSurfaceFrame = node.0.view.convert(node.0.view.bounds, to: contentSurface)
transformedSurfaceFinalFrame = self.imageNode.view.convert(scaledLocalImageViewBounds, to: contentSurface)
transformedSurfaceFinalFrame = self.contentNode.view.convert(scaledLocalImageViewBounds, to: contentSurface)
}
if let transformedSurfaceFrame = transformedSurfaceFrame, let transformedSurfaceFinalFrame = transformedSurfaceFinalFrame {
@ -306,22 +366,22 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
let scale = CGSize(width: transformedCopyViewFinalFrame.size.width / transformedSelfFrame.size.width, height: transformedCopyViewFinalFrame.size.height / transformedSelfFrame.size.height)
copyView.layer.animate(from: NSValue(caTransform3D: CATransform3DIdentity), to: NSValue(caTransform3D: CATransform3DMakeScale(scale.width, scale.height, 1.0)), keyPath: "transform", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.25, removeOnCompletion: false)
self.imageNode.layer.animatePosition(from: CGPoint(x: transformedSuperFrame.midX, y: transformedSuperFrame.midY), to: self.imageNode.layer.position, duration: 0.25, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in
self.contentNode.layer.animatePosition(from: CGPoint(x: transformedSuperFrame.midX, y: transformedSuperFrame.midY), to: self.contentNode.layer.position, duration: 0.25, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in
completion()
})
self.imageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.07)
self.contentNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.07)
transformedFrame.origin = CGPoint()
//self.imageNode.layer.animateBounds(from: transformedFrame, to: self.imageNode.layer.bounds, duration: 0.25, timingFunction: kCAMediaTimingFunctionSpring)
let transform = CATransform3DScale(self.imageNode.layer.transform, transformedFrame.size.width / self.imageNode.layer.bounds.size.width, transformedFrame.size.height / self.imageNode.layer.bounds.size.height, 1.0)
self.imageNode.layer.animate(from: NSValue(caTransform3D: transform), to: NSValue(caTransform3D: self.imageNode.layer.transform), keyPath: "transform", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.25)
let transform = CATransform3DScale(self.contentNode.layer.transform, transformedFrame.size.width / self.contentNode.layer.bounds.size.width, transformedFrame.size.height / self.contentNode.layer.bounds.size.height, 1.0)
self.contentNode.layer.animate(from: NSValue(caTransform3D: transform), to: NSValue(caTransform3D: self.contentNode.layer.transform), keyPath: "transform", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.25)
self.imageNode.clipsToBounds = true
self.contentNode.clipsToBounds = true
if self.sourceHasRoundCorners {
self.imageNode.layer.animate(from: (self.imageNode.frame.width / 2.0) as NSNumber, to: 0.0 as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.default.rawValue, duration: 0.18, removeOnCompletion: false, completion: { [weak self] value in
self.contentNode.layer.animate(from: (self.contentNode.frame.width / 2.0) as NSNumber, to: 0.0 as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.default.rawValue, duration: 0.18, removeOnCompletion: false, completion: { [weak self] value in
if value {
self?.imageNode.clipsToBounds = false
self?.contentNode.clipsToBounds = false
}
})
}
@ -332,10 +392,10 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
}
override func animateOut(to node: (ASDisplayNode, CGRect, () -> (UIView?, UIView?)), addToTransitionSurface: (UIView) -> Void, completion: @escaping () -> Void) {
var transformedFrame = node.0.view.convert(node.0.view.bounds, to: self.imageNode.view)
let transformedSuperFrame = node.0.view.convert(node.0.view.bounds, to: self.imageNode.view.superview)
var transformedFrame = node.0.view.convert(node.0.view.bounds, to: self.contentNode.view)
let transformedSuperFrame = node.0.view.convert(node.0.view.bounds, to: self.contentNode.view.superview)
let transformedSelfFrame = node.0.view.convert(node.0.view.bounds, to: self.view)
let transformedCopyViewInitialFrame = self.imageNode.view.convert(self.imageNode.view.bounds, to: self.view)
let transformedCopyViewInitialFrame = self.contentNode.view.convert(self.contentNode.view.bounds, to: self.view)
var positionCompleted = false
var boundsCompleted = false
@ -358,7 +418,7 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
var transformedSurfaceCopyViewInitialFrame: CGRect?
if let contentSurface = surfaceCopyView.superview {
transformedSurfaceFrame = node.0.view.convert(node.0.view.bounds, to: contentSurface)
transformedSurfaceCopyViewInitialFrame = self.imageNode.view.convert(self.imageNode.view.bounds, to: contentSurface)
transformedSurfaceCopyViewInitialFrame = self.contentNode.view.convert(self.contentNode.view.bounds, to: contentSurface)
}
let durationFactor = 1.0
@ -393,24 +453,24 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
intermediateCompletion()
})
self.imageNode.layer.animatePosition(from: self.imageNode.layer.position, to: CGPoint(x: transformedSuperFrame.midX, y: transformedSuperFrame.midY), duration: 0.25 * durationFactor, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { _ in
self.contentNode.layer.animatePosition(from: self.contentNode.layer.position, to: CGPoint(x: transformedSuperFrame.midX, y: transformedSuperFrame.midY), duration: 0.25 * durationFactor, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { _ in
positionCompleted = true
intermediateCompletion()
})
self.imageNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25 * durationFactor, removeOnCompletion: false)
self.contentNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25 * durationFactor, removeOnCompletion: false)
transformedFrame.origin = CGPoint()
let transform = CATransform3DScale(self.imageNode.layer.transform, transformedFrame.size.width / self.imageNode.layer.bounds.size.width, transformedFrame.size.height / self.imageNode.layer.bounds.size.height, 1.0)
self.imageNode.layer.animate(from: NSValue(caTransform3D: self.imageNode.layer.transform), to: NSValue(caTransform3D: transform), keyPath: "transform", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.25 * durationFactor, removeOnCompletion: false, completion: { _ in
let transform = CATransform3DScale(self.contentNode.layer.transform, transformedFrame.size.width / self.contentNode.layer.bounds.size.width, transformedFrame.size.height / self.contentNode.layer.bounds.size.height, 1.0)
self.contentNode.layer.animate(from: NSValue(caTransform3D: self.contentNode.layer.transform), to: NSValue(caTransform3D: transform), keyPath: "transform", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.25 * durationFactor, removeOnCompletion: false, completion: { _ in
boundsCompleted = true
intermediateCompletion()
})
self.imageNode.clipsToBounds = true
self.contentNode.clipsToBounds = true
if self.sourceHasRoundCorners {
self.imageNode.layer.animate(from: 0.0 as NSNumber, to: (self.imageNode.frame.width / 2.0) as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.default.rawValue, duration: 0.18 * durationFactor, removeOnCompletion: false)
self.contentNode.layer.animate(from: 0.0 as NSNumber, to: (self.contentNode.frame.width / 2.0) as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.default.rawValue, duration: 0.18 * durationFactor, removeOnCompletion: false)
}
self.statusNodeContainer.layer.animatePosition(from: self.statusNodeContainer.position, to: CGPoint(x: transformedSuperFrame.midX, y: transformedSuperFrame.midY), duration: 0.25, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false)

View File

@ -1013,7 +1013,7 @@ public func settingsController(context: AccountContext, accountManager: AccountM
})
hiddenAvatarRepresentationDisposable.set((galleryController.hiddenMedia |> deliverOnMainQueue).start(next: { entry in
avatarAndNameInfoContext.hiddenAvatarRepresentation = entry?.representations.first?.representation
avatarAndNameInfoContext.hiddenAvatarRepresentation = entry?.representations.last?.representation
updateHiddenAvatarImpl?()
}))
presentControllerImpl?(galleryController, AvatarGalleryControllerPresentationArguments(transitionArguments: { entry in

View File

@ -467,7 +467,7 @@ private func groupStatsControllerEntries(state: GroupStatsState, data: GroupStat
if !data.topWeekdaysGraph.isEmpty {
entries.append(.topWeekdaysTitle(presentationData.theme, presentationData.strings.Stats_GroupTopWeekdaysTitle))
entries.append(.topWeekdaysGraph(presentationData.theme, presentationData.strings, presentationData.dateTimeFormat, data.topWeekdaysGraph, .area))
entries.append(.topWeekdaysGraph(presentationData.theme, presentationData.strings, presentationData.dateTimeFormat, data.topWeekdaysGraph, .pie))
}
if let peers = peers {

View File

@ -173,7 +173,7 @@ private final class ChatEmptyNodeNearbyChatContent: ASDisplayNode, ChatEmptyNode
let index = ItemCollectionItemIndex(index: 0, id: 0)
let collectionId = ItemCollectionId(namespace: 0, id: 0)
let stickerPackItem = StickerPackItem(index: index, file: sticker.file, indexKeys: [])
let item = ChatMediaInputStickerGridItem(account: strongSelf.account, collectionId: collectionId, stickerPackInfo: nil, index: ItemCollectionViewEntryIndex(collectionIndex: 0, collectionId: collectionId, itemIndex: index), stickerItem: stickerPackItem, canManagePeerSpecificPack: nil, interfaceInteraction: nil, inputNodeInteraction: inputNodeInteraction, hasAccessory: false, theme: interfaceState.theme, selected: {})
let item = ChatMediaInputStickerGridItem(account: strongSelf.account, collectionId: collectionId, stickerPackInfo: nil, index: ItemCollectionViewEntryIndex(collectionIndex: 0, collectionId: collectionId, itemIndex: index), stickerItem: stickerPackItem, canManagePeerSpecificPack: nil, interfaceInteraction: nil, inputNodeInteraction: inputNodeInteraction, hasAccessory: false, theme: interfaceState.theme, large: true, selected: {})
strongSelf.stickerItem = item
strongSelf.stickerNode.updateLayout(item: item, size: stickerSize, isVisible: true, synchronousLoads: true)
strongSelf.stickerNode.isVisibleInGrid = true

View File

@ -123,16 +123,18 @@ final class ChatMediaInputStickerGridItem: GridItem {
let interfaceInteraction: ChatControllerInteraction?
let inputNodeInteraction: ChatMediaInputNodeInteraction
let theme: PresentationTheme
let large: Bool
let section: GridSection?
init(account: Account, collectionId: ItemCollectionId, stickerPackInfo: StickerPackCollectionInfo?, index: ItemCollectionViewEntryIndex, stickerItem: StickerPackItem, canManagePeerSpecificPack: Bool?, interfaceInteraction: ChatControllerInteraction?, inputNodeInteraction: ChatMediaInputNodeInteraction, hasAccessory: Bool, theme: PresentationTheme, selected: @escaping () -> Void) {
init(account: Account, collectionId: ItemCollectionId, stickerPackInfo: StickerPackCollectionInfo?, index: ItemCollectionViewEntryIndex, stickerItem: StickerPackItem, canManagePeerSpecificPack: Bool?, interfaceInteraction: ChatControllerInteraction?, inputNodeInteraction: ChatMediaInputNodeInteraction, hasAccessory: Bool, theme: PresentationTheme, large: Bool = false, selected: @escaping () -> Void) {
self.account = account
self.index = index
self.stickerItem = stickerItem
self.interfaceInteraction = interfaceInteraction
self.inputNodeInteraction = inputNodeInteraction
self.theme = theme
self.large = large
self.selected = selected
if collectionId.namespace == ChatMediaInputPanelAuxiliaryNamespace.savedStickers.rawValue {
self.section = nil
@ -276,7 +278,8 @@ final class ChatMediaInputStickerGridItemNode: GridItemNode {
}
}
let dimensions = item.stickerItem.file.dimensions ?? PixelDimensions(width: 512, height: 512)
self.imageNode.setSignal(chatMessageAnimatedSticker(postbox: item.account.postbox, file: item.stickerItem.file, small: false, size: dimensions.cgSize.aspectFitted(CGSize(width: 160.0, height: 160.0))))
let fittedSize = item.large ? CGSize(width: 384.0, height: 384.0) : CGSize(width: 160.0, height: 160.0)
self.imageNode.setSignal(chatMessageAnimatedSticker(postbox: item.account.postbox, file: item.stickerItem.file, small: false, size: dimensions.cgSize.aspectFitted(fittedSize)))
self.updateVisibility()
self.stickerFetchedDisposable.set(freeMediaFileResourceInteractiveFetched(account: item.account, fileReference: stickerPackFileReference(item.stickerItem.file), resource: item.stickerItem.file.resource).start())
} else {
@ -287,8 +290,8 @@ final class ChatMediaInputStickerGridItemNode: GridItemNode {
self.imageNode.isHidden = false
self.didSetUpAnimationNode = false
}
self.imageNode.setSignal(chatMessageSticker(account: item.account, file: item.stickerItem.file, small: true, synchronousLoad: synchronousLoads && isVisible))
self.stickerFetchedDisposable.set(freeMediaFileResourceInteractiveFetched(account: item.account, fileReference: stickerPackFileReference(item.stickerItem.file), resource: chatMessageStickerResource(file: item.stickerItem.file, small: true)).start())
self.imageNode.setSignal(chatMessageSticker(account: item.account, file: item.stickerItem.file, small: !item.large, synchronousLoad: synchronousLoads && isVisible))
self.stickerFetchedDisposable.set(freeMediaFileResourceInteractiveFetched(account: item.account, fileReference: stickerPackFileReference(item.stickerItem.file), resource: chatMessageStickerResource(file: item.stickerItem.file, small: !item.large)).start())
}
self.currentState = (item.account, item.stickerItem, dimensions.cgSize)

View File

@ -15,6 +15,7 @@ import TelegramStringFormatting
import ActivityIndicator
import TelegramUniversalVideoContent
import GalleryUI
import UniversalMediaPlayer
enum PeerInfoHeaderButtonKey: Hashable {
case message
@ -174,6 +175,13 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
let isReady = Promise<Bool>()
private var didSetReady: Bool = false
private var statusPromise = Promise<MediaPlayerStatus?>()
var mediaStatus: Signal<MediaPlayerStatus?, NoError> {
get {
return self.statusPromise.get()
}
}
init(context: AccountContext) {
self.context = context
self.imageNode = TransformImageNode()
@ -227,6 +235,15 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
self.videoNode = videoNode
self.addSubnode(videoNode)
self.statusPromise.set(videoNode.status)
} else if let videoNode = self.videoNode {
self.videoContent = nil
self.videoNode = nil
videoNode.removeFromSupernode()
self.statusPromise.set(.single(nil))
}
}
@ -268,6 +285,7 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
private var items: [PeerInfoAvatarListItem] = []
private var itemNodes: [WrappedMediaResourceId: PeerInfoAvatarListItemNode] = [:]
private var stripNodes: [ASImageNode] = []
private var stripWidth: CGFloat = 0.0
private let activeStripImage: UIImage
private var appliedStripNodeCurrentIndex: Int?
private var currentIndex: Int = 0
@ -276,6 +294,7 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
private var validLayout: CGSize?
private let disposable = MetaDisposable()
private let positionDisposable = MetaDisposable()
private var initializedList = false
let isReady = Promise<Bool>()
@ -297,6 +316,55 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
}
}
private var playerUpdateTimer: SwiftSignalKit.Timer?
private var playerStatus: MediaPlayerStatus? {
didSet {
if self.playerStatus != oldValue {
if let playerStatus = playerStatus, case .playing = playerStatus.status {
self.ensureHasTimer()
} else {
self.stopTimer()
}
self.updateStatus()
}
}
}
private func ensureHasTimer() {
if self.playerUpdateTimer == nil {
let timer = SwiftSignalKit.Timer(timeout: 0.016, repeat: true, completion: { [weak self] in
self?.updateStatus()
}, queue: Queue.mainQueue())
self.playerUpdateTimer = timer
timer.start()
}
}
private func updateStatus() {
var position: CGFloat = 1.0
if let playerStatus = self.playerStatus {
var playerPosition: Double
if !playerStatus.generationTimestamp.isZero, case .playing = playerStatus.status {
playerPosition = playerStatus.timestamp + (CACurrentMediaTime() - playerStatus.generationTimestamp)
} else {
playerPosition = playerStatus.timestamp
}
position = CGFloat(playerPosition / playerStatus.duration)
}
if let appliedStripNodeCurrentIndex = self.appliedStripNodeCurrentIndex {
var frame = self.stripNodes[appliedStripNodeCurrentIndex].frame
frame.size.width = self.stripWidth * position
self.stripNodes[appliedStripNodeCurrentIndex].frame = frame
}
}
private func stopTimer() {
self.playerUpdateTimer?.invalidate()
self.playerUpdateTimer = nil
}
init(context: AccountContext) {
self.context = context
@ -469,6 +537,7 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
deinit {
self.disposable.dispose()
self.positionDisposable.dispose()
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
@ -679,6 +748,17 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
if self.currentIndex >= 0 && self.currentIndex < self.stripNodes.count {
self.stripNodes[self.currentIndex].alpha = 1.0
}
if let currentItemNode = self.currentItemNode {
self.positionDisposable.set((currentItemNode.mediaStatus
|> deliverOnMainQueue).start(next: { [weak self] status in
if let strongSelf = self {
strongSelf.playerStatus = status
}
}))
} else {
self.positionDisposable.set(nil)
}
}
if hadOneStripNode && self.stripNodes.count > 1 {
self.stripContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
@ -686,6 +766,7 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
let stripInset: CGFloat = 8.0
let stripSpacing: CGFloat = 4.0
let stripWidth: CGFloat = max(5.0, floor((size.width - stripInset * 2.0 - stripSpacing * CGFloat(self.stripNodes.count - 1)) / CGFloat(self.stripNodes.count)))
self.stripWidth = stripWidth
let currentStripMinX = stripInset + CGFloat(self.currentIndex) * (stripWidth + stripSpacing)
let currentStripMidX = floor(currentStripMinX + stripWidth / 2.0)
let lastStripMaxX = stripInset + CGFloat(self.stripNodes.count - 1) * (stripWidth + stripSpacing) + stripWidth