From cba1a833afc4f1cc6cc6395828fa6e49f2afd293 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Fri, 21 May 2021 22:03:29 +0400 Subject: [PATCH 1/2] Video Chat Improvements --- submodules/AudioBlob/Sources/BlobView.swift | 12 +- .../Sources/VoiceChatController.swift | 9 +- .../Sources/VoiceChatMainStageNode.swift | 141 +++++++++++++++++- .../Sources/VoiceChatTileGridNode.swift | 3 +- .../Sources/VoiceChatTileItemNode.swift | 121 ++++++++++++++- 5 files changed, 273 insertions(+), 13 deletions(-) diff --git a/submodules/AudioBlob/Sources/BlobView.swift b/submodules/AudioBlob/Sources/BlobView.swift index 7296b045b8..27d6080366 100644 --- a/submodules/AudioBlob/Sources/BlobView.swift +++ b/submodules/AudioBlob/Sources/BlobView.swift @@ -13,7 +13,7 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco private var displayLinkAnimator: ConstantDisplayLinkAnimator? private var audioLevel: CGFloat = 0 - private var presentationAudioLevel: CGFloat = 0 + public var presentationAudioLevel: CGFloat = 0 private(set) var isAnimating = false @@ -104,11 +104,17 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco } public func startAnimating() { + self.startAnimating(immediately: false) + } + + public func startAnimating(immediately: Bool = false) { guard !isAnimating else { return } isAnimating = true - mediumBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false) - bigBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false) + if !immediately { + mediumBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false) + bigBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false) + } updateBlobsState() diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift index de2d12743f..f7343e3bfc 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift @@ -3194,7 +3194,10 @@ public final class VoiceChatController: ViewController { let sideInset: CGFloat = 14.0 let bottomPanelCoverHeight = bottomAreaHeight + layout.intrinsicInsets.bottom - let bottomGradientFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomPanelCoverHeight), size: CGSize(width: size.width, height: bottomGradientHeight)) + var bottomGradientFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomPanelCoverHeight), size: CGSize(width: size.width, height: bottomGradientHeight)) + if isLandscape { + bottomGradientFrame.origin.y = layout.size.height + } let transitionContainerFrame = CGRect(x: 0.0, y: 0.0, width: layout.size.width, height: layout.size.height) transition.updateFrame(node: self.transitionContainerNode, frame: transitionContainerFrame) @@ -3203,7 +3206,7 @@ public final class VoiceChatController: ViewController { transition.updateFrame(layer: self.transitionMaskTopFillLayer, frame: CGRect(x: 0.0, y: 0.0, width: transitionContainerFrame.width, height: topPanelFrame.maxY)) transition.updateFrame(layer: self.transitionMaskFillLayer, frame: CGRect(x: 0.0, y: topPanelFrame.maxY, width: transitionContainerFrame.width, height: bottomGradientFrame.minY - topPanelFrame.maxY)) transition.updateFrame(layer: self.transitionMaskGradientLayer, frame: CGRect(x: 0.0, y: bottomGradientFrame.minY, width: transitionContainerFrame.width, height: bottomGradientFrame.height)) - transition.updateFrame(layer: self.transitionMaskBottomFillLayer, frame: CGRect(x: 0.0, y: bottomGradientFrame.minY, width: transitionContainerFrame.width, height: transitionContainerFrame.height - bottomGradientFrame.minY)) + transition.updateFrame(layer: self.transitionMaskBottomFillLayer, frame: CGRect(x: 0.0, y: bottomGradientFrame.minY, width: transitionContainerFrame.width, height: max(0.0, transitionContainerFrame.height - bottomGradientFrame.minY))) } if transition.isAnimated { updateMaskLayers() @@ -3214,11 +3217,9 @@ public final class VoiceChatController: ViewController { CATransaction.commit() } - var isFullscreen = false var bottomInset: CGFloat = 0.0 var bottomEdgeInset: CGFloat = 0.0 if case let .fullscreen(controlsHidden) = self.effectiveDisplayMode { - isFullscreen = true if !controlsHidden { bottomInset = 80.0 } diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift b/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift index 80d3e9e2b6..2ebd27d0c1 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift @@ -399,7 +399,7 @@ final class VoiceChatMainStageNode: ASDisplayNode { strongSelf.speakingContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) strongSelf.speakingContainerNode.layer.animateScale(from: 0.01, to: 1.0, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) - let blobFrame = strongSelf.speakingAvatarNode.frame.insetBy(dx: -7.0, dy: -7.0) + let blobFrame = strongSelf.speakingAvatarNode.frame.insetBy(dx: -10.0, dy: -10.0) strongSelf.speakingAudioLevelDisposable.set((getAudioLevel(peerId) |> deliverOnMainQueue).start(next: { [weak self] value in guard let strongSelf = self else { @@ -427,7 +427,7 @@ final class VoiceChatMainStageNode: ASDisplayNode { let avatarScale: CGFloat if value > 0.02 { - audioLevelView.startAnimating() + audioLevelView.startAnimating(immediately: true) avatarScale = 1.03 + level * 0.13 audioLevelView.setColor(wavesColor, animated: true) } else { @@ -528,7 +528,7 @@ final class VoiceChatMainStageNode: ASDisplayNode { let avatarScale: CGFloat if value > 0.02 { - audioLevelView.startAnimating() + audioLevelView.startAnimating(immediately: true) avatarScale = 1.03 + level * 0.13 audioLevelView.setColor(wavesColor, animated: true) @@ -744,7 +744,7 @@ final class VoiceChatMainStageNode: ASDisplayNode { self.speakingEffectView?.frame = CGRect(origin: CGPoint(), size: speakingContainerSize) self.speakingAvatarNode.frame = CGRect(origin: CGPoint(x: 4.0, y: 4.0), size: speakingAvatarSize) self.speakingTitleNode.frame = CGRect(origin: CGPoint(x: 4.0 + speakingAvatarSize.width + 14.0, y: floorToScreenPixels((38.0 - speakingTitleSize.height) / 2.0)), size: speakingTitleSize) - transition.updateFrame(node: self.speakingContainerNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - speakingContainerSize.width) / 2.0), y: size.height - bottomInset - speakingContainerSize.height - 44.0), size: speakingContainerSize)) + transition.updateFrame(node: self.speakingContainerNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - speakingContainerSize.width) / 2.0), y: 46.0), size: speakingContainerSize)) } func flipVideoIfNeeded() { @@ -754,3 +754,136 @@ final class VoiceChatMainStageNode: ASDisplayNode { self.currentVideoNode?.flip(withBackground: false) } } + +private let blue = UIColor(rgb: 0x007fff) +private let lightBlue = UIColor(rgb: 0x00affe) +private let green = UIColor(rgb: 0x33c659) +private let activeBlue = UIColor(rgb: 0x00a0b9) +private let purple = UIColor(rgb: 0x3252ef) +private let pink = UIColor(rgb: 0xef436c) + +class VoiceChatBlobNode: ASDisplayNode { + enum Gradient { + case speaking + case active + case connecting + case muted + } + private let size: CGSize + + private let blobView: VoiceBlobView + private let foregroundGradientLayer = CAGradientLayer() + + private let hierarchyTrackingNode: HierarchyTrackingNode + private var isCurrentlyInHierarchy = false + + init(size: CGSize) { + self.size = size + self.blobView = VoiceBlobView( + frame: CGRect(origin: CGPoint(), size: size), + maxLevel: 1.5, + smallBlobRange: (0, 0), + mediumBlobRange: (0.69, 0.87), + bigBlobRange: (0.71, 1.0) + ) + self.blobView.setColor(.white) + + self.foregroundGradientLayer.type = .radial + self.foregroundGradientLayer.colors = [lightBlue.cgColor, blue.cgColor, blue.cgColor] + self.foregroundGradientLayer.locations = [0.0, 0.55, 1.0] + self.foregroundGradientLayer.startPoint = CGPoint(x: 1.0, y: 0.0) + self.foregroundGradientLayer.endPoint = CGPoint(x: 0.0, y: 1.0) + + var updateInHierarchy: ((Bool) -> Void)? + self.hierarchyTrackingNode = HierarchyTrackingNode({ value in + updateInHierarchy?(value) + }) + + super.init() + + updateInHierarchy = { [weak self] value in + if let strongSelf = self { + strongSelf.isCurrentlyInHierarchy = value + strongSelf.updateAnimations() + } + } + } + + override func didLoad() { + super.didLoad() + + self.view.mask = self.blobView + self.layer.addSublayer(self.foregroundGradientLayer) + } + + func updateAnimations() { + if !self.isCurrentlyInHierarchy { + self.foregroundGradientLayer.removeAllAnimations() + self.blobView.stopAnimating() + return + } + self.setupGradientAnimations() + self.blobView.startAnimating(immediately: true) + } + + func updateLevel(_ level: CGFloat) { + self.blobView.updateLevel(level) + } + + private func setupGradientAnimations() { + if let _ = self.foregroundGradientLayer.animation(forKey: "movement") { + } else { + let previousValue = self.foregroundGradientLayer.startPoint + let newValue: CGPoint + if self.blobView.presentationAudioLevel > 0.22 { + newValue = CGPoint(x: CGFloat.random(in: 0.9 ..< 1.0), y: CGFloat.random(in: 0.15 ..< 0.35)) + } else if self.blobView.presentationAudioLevel > 0.01 { + newValue = CGPoint(x: CGFloat.random(in: 0.57 ..< 0.85), y: CGFloat.random(in: 0.15 ..< 0.45)) + } else { + newValue = CGPoint(x: CGFloat.random(in: 0.6 ..< 0.75), y: CGFloat.random(in: 0.25 ..< 0.45)) + } + self.foregroundGradientLayer.startPoint = newValue + + CATransaction.begin() + + let animation = CABasicAnimation(keyPath: "startPoint") + animation.duration = Double.random(in: 0.8 ..< 1.4) + animation.fromValue = previousValue + animation.toValue = newValue + + CATransaction.setCompletionBlock { [weak self] in + if let isCurrentlyInHierarchy = self?.isCurrentlyInHierarchy, isCurrentlyInHierarchy { + self?.setupGradientAnimations() + } + } + + self.foregroundGradientLayer.add(animation, forKey: "movement") + CATransaction.commit() + } + } + + func updateGlowAndGradientAnimations(type: Gradient, animated: Bool = true) { + let initialColors = self.foregroundGradientLayer.colors + let targetColors: [CGColor] + switch type { + case .speaking: + targetColors = [activeBlue.cgColor, green.cgColor, green.cgColor] + case .active: + targetColors = [lightBlue.cgColor, blue.cgColor, blue.cgColor] + case .connecting: + targetColors = [lightBlue.cgColor, blue.cgColor, blue.cgColor] + case .muted: + targetColors = [pink.cgColor, purple.cgColor, purple.cgColor] + } + self.foregroundGradientLayer.colors = targetColors + if animated { + self.foregroundGradientLayer.animate(from: initialColors as AnyObject, to: targetColors as AnyObject, keyPath: "colors", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3) + } + } + + override func layout() { + super.layout() + + self.blobView.frame = CGRect(x: 0.0, y: 0.0, width: self.bounds.width, height: self.bounds.height) + } +} diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatTileGridNode.swift b/submodules/TelegramCallsUI/Sources/VoiceChatTileGridNode.swift index cd128944c7..073174dd6c 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatTileGridNode.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatTileGridNode.swift @@ -213,10 +213,11 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode { if currentItem == nil { tileGridNode.frame = CGRect(x: params.leftInset, y: 0.0, width: tileGridSize.width, height: 0.0) strongSelf.backgroundNode.frame = tileGridNode.frame - strongSelf.cornersNode.frame = CGRect(x: 14.0, y: 0.0, width: tileGridSize.width, height: 50.0) + strongSelf.cornersNode.frame = CGRect(x: params.leftInset, y: layout.size.height, width: tileGridSize.width, height: 50.0) } else { transition.updateFrame(node: tileGridNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize)) transition.updateFrame(node: strongSelf.backgroundNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize)) + strongSelf.cornersNode.frame = CGRect(x: params.leftInset, y: layout.size.height, width: tileGridSize.width, height: 50.0) } } }) diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift b/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift index 77da053407..56773baa86 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift @@ -450,6 +450,125 @@ final class VoiceChatTileItemNode: ASDisplayNode { } } -private class VoiceChatTileHighlightNode: ASDisplayNode { +private let blue = UIColor(rgb: 0x007fff) +private let lightBlue = UIColor(rgb: 0x00affe) +private let green = UIColor(rgb: 0x33c659) +private let activeBlue = UIColor(rgb: 0x00a0b9) +private let purple = UIColor(rgb: 0x3252ef) +private let pink = UIColor(rgb: 0xef436c) + +class VoiceChatTileHighlightNode: ASDisplayNode { + enum Gradient { + case speaking + case active + case connecting + case muted + } + private let maskView: UIView + private let maskLayer = CAShapeLayer() + + private let foregroundGradientLayer = CAGradientLayer() + + private let hierarchyTrackingNode: HierarchyTrackingNode + private var isCurrentlyInHierarchy = false + + private var audioLevel: CGFloat = 0.0 + private var presentationAudioLevel: CGFloat = 0.0 + + private var displayLinkAnimator: ConstantDisplayLinkAnimator? + + override init() { + self.maskView = UIView() + self.maskView.layer.addSublayer(self.maskLayer) + + var updateInHierarchy: ((Bool) -> Void)? + self.hierarchyTrackingNode = HierarchyTrackingNode({ value in + updateInHierarchy?(value) + }) + + super.init() + + updateInHierarchy = { [weak self] value in + if let strongSelf = self { + strongSelf.isCurrentlyInHierarchy = value + strongSelf.updateAnimations() + } + } + + displayLinkAnimator = ConstantDisplayLinkAnimator() { [weak self] in + guard let strongSelf = self else { return } + + strongSelf.presentationAudioLevel = strongSelf.presentationAudioLevel * 0.9 + strongSelf.audioLevel * 0.1 + } + } + + override func didLoad() { + super.didLoad() + + self.view.mask = self.maskView + } + + func updateAnimations() { + if !self.isCurrentlyInHierarchy { + self.foregroundGradientLayer.removeAllAnimations() + return + } + self.setupGradientAnimations() + } + + func updateLevel(_ level: CGFloat) { + self.audioLevel = level + } + + private func setupGradientAnimations() { + if let _ = self.foregroundGradientLayer.animation(forKey: "movement") { + } else { + let previousValue = self.foregroundGradientLayer.startPoint + let newValue: CGPoint + if self.presentationAudioLevel > 0.22 { + newValue = CGPoint(x: CGFloat.random(in: 0.9 ..< 1.0), y: CGFloat.random(in: 0.15 ..< 0.35)) + } else if self.presentationAudioLevel > 0.01 { + newValue = CGPoint(x: CGFloat.random(in: 0.57 ..< 0.85), y: CGFloat.random(in: 0.15 ..< 0.45)) + } else { + newValue = CGPoint(x: CGFloat.random(in: 0.6 ..< 0.75), y: CGFloat.random(in: 0.25 ..< 0.45)) + } + self.foregroundGradientLayer.startPoint = newValue + + CATransaction.begin() + + let animation = CABasicAnimation(keyPath: "startPoint") + animation.duration = Double.random(in: 0.8 ..< 1.4) + animation.fromValue = previousValue + animation.toValue = newValue + + CATransaction.setCompletionBlock { [weak self] in + if let isCurrentlyInHierarchy = self?.isCurrentlyInHierarchy, isCurrentlyInHierarchy { + self?.setupGradientAnimations() + } + } + + self.foregroundGradientLayer.add(animation, forKey: "movement") + CATransaction.commit() + } + } + + func updateGlowAndGradientAnimations(type: Gradient, animated: Bool = true) { + let initialColors = self.foregroundGradientLayer.colors + let targetColors: [CGColor] + switch type { + case .speaking: + targetColors = [activeBlue.cgColor, green.cgColor, green.cgColor] + case .active: + targetColors = [lightBlue.cgColor, blue.cgColor, blue.cgColor] + case .connecting: + targetColors = [lightBlue.cgColor, blue.cgColor, blue.cgColor] + case .muted: + targetColors = [pink.cgColor, purple.cgColor, purple.cgColor] + } + self.foregroundGradientLayer.colors = targetColors + if animated { + self.foregroundGradientLayer.animate(from: initialColors as AnyObject, to: targetColors as AnyObject, keyPath: "colors", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3) + } + } } From 039228439dd0e8fa1dcddad20f55157b7daa2209 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Fri, 21 May 2021 22:19:16 +0400 Subject: [PATCH 2/2] Fix animated emoji blinking after playing --- .../Sources/ChatMessageAnimatedStickerItemNode.swift | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/submodules/TelegramUI/Sources/ChatMessageAnimatedStickerItemNode.swift b/submodules/TelegramUI/Sources/ChatMessageAnimatedStickerItemNode.swift index 2ccf6bcf25..bbc72d23c8 100644 --- a/submodules/TelegramUI/Sources/ChatMessageAnimatedStickerItemNode.swift +++ b/submodules/TelegramUI/Sources/ChatMessageAnimatedStickerItemNode.swift @@ -302,10 +302,10 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView { } private func removePlaceholder(animated: Bool) { + self.placeholderNode.alpha = 0.0 if !animated { self.placeholderNode.removeFromSupernode() } else { - self.placeholderNode.alpha = 0.0 self.placeholderNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, completion: { [weak self] _ in self?.placeholderNode.removeFromSupernode() }) @@ -425,8 +425,10 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView { if !strongSelf.enableSynchronousImageApply { let current = CACurrentMediaTime() if let setupTimestamp = strongSelf.setupTimestamp, current - setupTimestamp > 0.3 { - strongSelf.animationNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) - strongSelf.removePlaceholder(animated: true) + if !strongSelf.placeholderNode.alpha.isZero { + strongSelf.animationNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + strongSelf.removePlaceholder(animated: true) + } } else { strongSelf.removePlaceholder(animated: false) }