From 29df2fd22915a8c44c324518c211e6ad811ef0b6 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Sat, 29 May 2021 14:35:12 +0400 Subject: [PATCH 1/5] Video Chat Improvements --- submodules/AudioBlob/Sources/BlobView.swift | 7 ++ .../Sources/VoiceChatActionButton.swift | 13 ++-- .../VoiceChatCameraPreviewController.swift | 4 +- .../Sources/VoiceChatController.swift | 65 +++++++++--------- .../Sources/VoiceChatMainStageNode.swift | 31 +++++++-- .../Sources/VoiceChatTileItemNode.swift | 68 ++++++++++++++++--- 6 files changed, 136 insertions(+), 52 deletions(-) diff --git a/submodules/AudioBlob/Sources/BlobView.swift b/submodules/AudioBlob/Sources/BlobView.swift index 00456d0b53..7eaaa19b9e 100644 --- a/submodules/AudioBlob/Sources/BlobView.swift +++ b/submodules/AudioBlob/Sources/BlobView.swift @@ -94,6 +94,10 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco } public func updateLevel(_ level: CGFloat) { + self.updateLevel(level, immediately: false) + } + + public func updateLevel(_ level: CGFloat, immediately: Bool = false) { let normalizedLevel = min(1, max(level / maxLevel, 0)) smallBlob.updateSpeedLevel(to: normalizedLevel) @@ -101,6 +105,9 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco bigBlob.updateSpeedLevel(to: normalizedLevel) audioLevel = normalizedLevel + if immediately { + presentationAudioLevel = normalizedLevel + } } public func startAnimating() { diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatActionButton.swift b/submodules/TelegramCallsUI/Sources/VoiceChatActionButton.swift index 1ef137b6db..6f7e8bdd14 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatActionButton.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatActionButton.swift @@ -206,7 +206,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode { self.activeDisposable.dispose() } - func updateLevel(_ level: CGFloat) { + func updateLevel(_ level: CGFloat, immediately: Bool = false) { self.backgroundNode.audioLevel = level } @@ -545,10 +545,12 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode { var audioLevel: CGFloat = 0.0 { didSet { - self.maskBlobView.updateLevel(audioLevel) + self.maskBlobView.updateLevel(self.audioLevel, immediately: false) } } + + var updatedActive: ((Bool) -> Void)? var updatedColors: ((UIColor?, UIColor?) -> Void)? @@ -1299,13 +1301,16 @@ private final class VoiceBlobView: UIView { bigBlob.setColor(color.withAlphaComponent(0.21)) } - public func updateLevel(_ level: CGFloat) { + public func updateLevel(_ level: CGFloat, immediately: Bool) { let normalizedLevel = min(1, max(level / maxLevel, 0)) mediumBlob.updateSpeedLevel(to: normalizedLevel) bigBlob.updateSpeedLevel(to: normalizedLevel) audioLevel = normalizedLevel + if immediately { + presentationAudioLevel = normalizedLevel + } } public func startAnimating() { @@ -1450,7 +1455,7 @@ final class BlobView: UIView { let animation = CABasicAnimation(keyPath: "path") let previousPath = self.shapeLayer.path self.shapeLayer.path = nextPath - animation.duration = CFTimeInterval(1 / (minSpeed + (maxSpeed - minSpeed) * speedLevel)) + animation.duration = CFTimeInterval(1.0 / (minSpeed + (maxSpeed - minSpeed) * speedLevel)) animation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut) animation.fromValue = previousPath animation.toValue = nextPath diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatCameraPreviewController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatCameraPreviewController.swift index 970be25abe..33e65761c2 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatCameraPreviewController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatCameraPreviewController.swift @@ -13,6 +13,8 @@ import PresentationDataUtils import UIKitRuntimeUtils import ReplayKit +private let accentColor: UIColor = UIColor(rgb: 0x007aff) + final class VoiceChatCameraPreviewController: ViewController { private var controllerNode: VoiceChatCameraPreviewControllerNode { return self.displayNode as! VoiceChatCameraPreviewControllerNode @@ -183,7 +185,7 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U self.titleNode = ASTextNode() self.titleNode.attributedText = NSAttributedString(string: title, font: Font.bold(17.0), textColor: textColor) - self.cameraButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(theme: self.presentationData.theme), font: .bold, height: 52.0, cornerRadius: 11.0, gloss: false) + self.cameraButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(backgroundColor: accentColor, foregroundColor: .white), font: .bold, height: 52.0, cornerRadius: 11.0, gloss: false) self.cameraButton.title = self.presentationData.strings.VoiceChat_VideoPreviewShareCamera self.screenButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(backgroundColor: buttonColor, foregroundColor: buttonTextColor), font: .bold, height: 52.0, cornerRadius: 11.0, gloss: false) diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift index 584cd52c68..df06634835 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift @@ -87,6 +87,8 @@ func decorationTopCornersImage(dark: Bool) -> UIImage? { })?.stretchableImage(withLeftCapWidth: 25, topCapHeight: 32) } + + func decorationBottomCornersImage(dark: Bool) -> UIImage? { return generateImage(CGSize(width: 50.0, height: 110.0), rotatedContext: { (size, context) in let bounds = CGRect(origin: CGPoint(), size: size) @@ -414,7 +416,7 @@ public final class VoiceChatController: ViewController { } } - func tileItem(context: AccountContext, presentationData: PresentationData, interaction: Interaction, videoEndpointId: String, videoReady: Bool) -> VoiceChatTileItem? { + func tileItem(context: AccountContext, presentationData: PresentationData, interaction: Interaction, videoEndpointId: String, videoReady: Bool, showAsPresentation: Bool) -> VoiceChatTileItem? { guard case let .peer(peerEntry, _) = self else { return nil } @@ -485,7 +487,7 @@ public final class VoiceChatController: ViewController { text = .text(about, textIcon, .generic) } - return VoiceChatTileItem(account: context.account, peer: peerEntry.peer, videoEndpointId: videoEndpointId, videoReady: videoReady, strings: presentationData.strings, nameDisplayOrder: presentationData.nameDisplayOrder, speaking: speaking, icon: icon, text: text, additionalText: additionalText, action: { + return VoiceChatTileItem(account: context.account, peer: peerEntry.peer, videoEndpointId: videoEndpointId, videoReady: videoReady, strings: presentationData.strings, nameDisplayOrder: presentationData.nameDisplayOrder, speaking: speaking, icon: showAsPresentation ? .presentation : icon, text: text, additionalText: additionalText, action: { interaction.switchToPeer(peer.id, videoEndpointId, true) }, contextAction: { node, gesture in interaction.peerContextAction(peerEntry, node, gesture) @@ -786,6 +788,7 @@ public final class VoiceChatController: ViewController { private var animatingExpansion = false private var animatingAppearance = false private var animatingButtonsSwap = false + private var animatingMainStage = false private var panGestureArguments: (topInset: CGFloat, offset: CGFloat)? private var isPanning = false @@ -865,8 +868,7 @@ public final class VoiceChatController: ViewController { private var timeoutedEndpointIds = Set() private var readyVideoDisposables = DisposableDict() - private var endpointToPeerId: [String: PeerId] = [:] - private var peerIdToEndpoint: [PeerId: String] = [:] + private var peerIdToEndpointId: [PeerId: String] = [:] private var currentSpeakers: [PeerId] = [] private var currentDominantSpeaker: (PeerId, String?, Double)? @@ -1114,7 +1116,7 @@ public final class VoiceChatController: ViewController { }, switchToPeer: { [weak self] peerId, videoEndpointId, expand in if let strongSelf = self { if expand, let videoEndpointId = videoEndpointId { - strongSelf.currentDominantSpeaker = (peerId, videoEndpointId, CACurrentMediaTime()) + strongSelf.currentDominantSpeaker = (peerId, videoEndpointId, CACurrentMediaTime() + 3.0) strongSelf.updateDisplayMode(.fullscreen(controlsHidden: false)) } else { strongSelf.currentForcedSpeaker = nil @@ -1883,7 +1885,7 @@ public final class VoiceChatController: ViewController { var maxLevelWithVideo: (PeerId, Float)? for (peerId, source, level, hasSpeech) in levels { - let hasVideo = strongSelf.peerIdToEndpoint[peerId] != nil + let hasVideo = strongSelf.peerIdToEndpointId[peerId] != nil if hasSpeech && source != 0 && hasVideo { if let (_, currentLevel) = maxLevelWithVideo { if currentLevel < level { @@ -1898,7 +1900,7 @@ public final class VoiceChatController: ViewController { if maxLevelWithVideo == nil { if let (peerId, _, _) = strongSelf.currentDominantSpeaker { maxLevelWithVideo = (peerId, 0.0) - } else if strongSelf.peerIdToEndpoint.count > 0 { + } else if strongSelf.peerIdToEndpointId.count > 0 { for entry in strongSelf.currentFullscreenEntries { if case let .peer(peerEntry, _) = entry { if let _ = peerEntry.effectiveVideoEndpointId { @@ -2111,7 +2113,7 @@ public final class VoiceChatController: ViewController { } self.mainStageNode.back = { [weak self] in - if let strongSelf = self { + if let strongSelf = self, !strongSelf.isPanning && !strongSelf.animatingExpansion && !strongSelf.mainStageNode.animating { strongSelf.currentForcedSpeaker = nil strongSelf.updateDisplayMode(.modal(isExpanded: true, isFilled: true), fromPan: true) strongSelf.effectiveSpeaker = nil @@ -3489,7 +3491,7 @@ public final class VoiceChatController: ViewController { let listMaxY = listTopInset + listSize.height let bottomOffset = min(0.0, bottomEdge - listMaxY) + layout.size.height - bottomPanelHeight - let bottomCornersFrame = CGRect(origin: CGPoint(x: sideInset + floorToScreenPixels((size.width - contentWidth) / 2.0), y: -50.0 + bottomOffset + bottomGradientHeight), size: CGSize(width: contentWidth - sideInset * 2.0, height: 50.0 + 40.0)) + let bottomCornersFrame = CGRect(origin: CGPoint(x: sideInset + floorToScreenPixels((size.width - contentWidth) / 2.0), y: -50.0 + bottomOffset + bottomGradientHeight), size: CGSize(width: contentWidth - sideInset * 2.0, height: 50.0 + 60.0)) let previousBottomCornersFrame = self.bottomCornersNode.frame if !bottomCornersFrame.equalTo(previousBottomCornersFrame) { self.bottomCornersNode.frame = bottomCornersFrame @@ -4362,11 +4364,11 @@ public final class VoiceChatController: ViewController { }) } - private func updateMembers(maybeUpdateVideo: Bool = true) { - self.updateMembers(muteState: self.effectiveMuteState, callMembers: self.currentCallMembers ?? ([], nil), invitedPeers: self.currentInvitedPeers ?? [], speakingPeers: self.currentSpeakingPeers ?? Set(), maybeUpdateVideo: maybeUpdateVideo) + private func updateMembers(maybeUpdateVideo: Bool = true, force: Bool = false) { + self.updateMembers(muteState: self.effectiveMuteState, callMembers: self.currentCallMembers ?? ([], nil), invitedPeers: self.currentInvitedPeers ?? [], speakingPeers: self.currentSpeakingPeers ?? Set(), maybeUpdateVideo: maybeUpdateVideo, force: force) } - private func updateMembers(muteState: GroupCallParticipantsContext.Participant.MuteState?, callMembers: ([GroupCallParticipantsContext.Participant], String?), invitedPeers: [Peer], speakingPeers: Set, maybeUpdateVideo: Bool = true) { + private func updateMembers(muteState: GroupCallParticipantsContext.Participant.MuteState?, callMembers: ([GroupCallParticipantsContext.Participant], String?), invitedPeers: [Peer], speakingPeers: Set, maybeUpdateVideo: Bool = true, force: Bool = false) { var disableAnimation = false if self.currentCallMembers?.1 != callMembers.1 { disableAnimation = true @@ -4384,7 +4386,7 @@ public final class VoiceChatController: ViewController { var processedPeerIds = Set() var processedFullscreenPeerIds = Set() - var endpointIdToPeerId: [String: PeerId] = [:] + var peerIdToCameraEndpointId: [PeerId: String] = [:] var peerIdToEndpointId: [PeerId: String] = [:] var requestedVideoChannels: [PresentationGroupCallRequestedVideo] = [] @@ -4446,10 +4448,7 @@ public final class VoiceChatController: ViewController { } if let videoEndpointId = member.videoEndpointId { - endpointIdToPeerId[videoEndpointId] = member.peer.id - } - if let presentationEndpointId = member.presentationEndpointId { - endpointIdToPeerId[presentationEndpointId] = member.peer.id + peerIdToCameraEndpointId[member.peer.id] = videoEndpointId } if let anyEndpointId = member.presentationEndpointId ?? member.videoEndpointId { peerIdToEndpointId[member.peer.id] = anyEndpointId @@ -4485,7 +4484,7 @@ public final class VoiceChatController: ViewController { self.videoOrder.append(videoEndpointId) } } - if let tileItem = ListEntry.peer(peerEntry, 0).tileItem(context: self.context, presentationData: self.presentationData, interaction: interaction, videoEndpointId: videoEndpointId, videoReady: self.readyVideoEndpointIds.contains(videoEndpointId)) { + if let tileItem = ListEntry.peer(peerEntry, 0).tileItem(context: self.context, presentationData: self.presentationData, interaction: interaction, videoEndpointId: videoEndpointId, videoReady: self.readyVideoEndpointIds.contains(videoEndpointId), showAsPresentation: peerIdToCameraEndpointId[peerEntry.peer.id] != nil) { isTile = true tileByVideoEndpoint[videoEndpointId] = tileItem } @@ -4501,7 +4500,7 @@ public final class VoiceChatController: ViewController { self.videoOrder.append(videoEndpointId) } } - if let tileItem = ListEntry.peer(peerEntry, 0).tileItem(context: self.context, presentationData: self.presentationData, interaction: interaction, videoEndpointId: videoEndpointId, videoReady: self.readyVideoEndpointIds.contains(videoEndpointId)) { + if let tileItem = ListEntry.peer(peerEntry, 0).tileItem(context: self.context, presentationData: self.presentationData, interaction: interaction, videoEndpointId: videoEndpointId, videoReady: self.readyVideoEndpointIds.contains(videoEndpointId), showAsPresentation: false) { isTile = true tileByVideoEndpoint[videoEndpointId] = tileItem } @@ -4597,7 +4596,7 @@ public final class VoiceChatController: ViewController { self.requestedVideoChannels = requestedVideoChannels - guard self.didSetDataReady && !self.isPanning && !self.animatingExpansion else { + guard self.didSetDataReady && (force || (!self.isPanning && !self.animatingExpansion && !self.animatingMainStage)) else { return } @@ -4619,8 +4618,7 @@ public final class VoiceChatController: ViewController { self.updateRequestedVideoChannels() - self.endpointToPeerId = endpointIdToPeerId - self.peerIdToEndpoint = peerIdToEndpointId + self.peerIdToEndpointId = peerIdToEndpointId if !tileItems.isEmpty { entries.insert(.tiles(tileItems), at: 0) @@ -4835,7 +4833,7 @@ public final class VoiceChatController: ViewController { self.effectiveSpeaker = effectiveSpeaker if updateMembers { - self.updateMembers(maybeUpdateVideo: false) + self.updateMembers(maybeUpdateVideo: false, force: force) } self.mainStageNode.update(peer: effectiveSpeaker, waitForFullSize: waitForFullSize, completion: { completion?() @@ -5010,6 +5008,7 @@ public final class VoiceChatController: ViewController { self.panGestureArguments = nil self.fullscreenListContainer.subnodeTransform = CATransform3DIdentity if abs(translation.y) > 100.0 || abs(velocity.y) > 300.0 { + self.mainStageBackgroundNode.layer.removeAllAnimations() self.currentForcedSpeaker = nil self.updateDisplayMode(.modal(isExpanded: true, isFilled: true), fromPan: true) self.effectiveSpeaker = nil @@ -5551,7 +5550,7 @@ public final class VoiceChatController: ViewController { } private func updateDisplayMode(_ displayMode: DisplayMode, fromPan: Bool = false) { - guard !self.animatingExpansion && !self.mainStageNode.animating else { + guard !self.animatingExpansion && !self.animatingMainStage && !self.mainStageNode.animating else { return } self.updateMembers() @@ -5562,6 +5561,11 @@ public final class VoiceChatController: ViewController { isFullscreen = true } + if case .fullscreen = previousDisplayMode, case .fullscreen = displayMode { + } else { + self.animatingMainStage = true + } + let completion = { self.displayMode = displayMode self.updateDecorationsColors() @@ -5585,8 +5589,6 @@ public final class VoiceChatController: ViewController { } } - self.animatingExpansion = true - let completion = { let effectiveSpeakerPeerId = self.effectiveSpeaker?.0 @@ -5606,7 +5608,9 @@ public final class VoiceChatController: ViewController { let transitionStartPosition = otherItemNode.view.convert(CGPoint(x: otherItemNode.frame.width / 2.0, y: otherItemNode.frame.height), to: self.fullscreenListContainer.view.superview) self.fullscreenListContainer.layer.animatePosition(from: transitionStartPosition, to: self.fullscreenListContainer.position, duration: 0.55, timingFunction: kCAMediaTimingFunctionSpring) - self.mainStageNode.animateTransitionIn(from: otherItemNode, transition: transition) + self.mainStageNode.animateTransitionIn(from: otherItemNode, transition: transition, completion: { [weak self] in + self?.animatingMainStage = false + }) self.mainStageNode.alpha = 1.0 self.mainStageBackgroundNode.alpha = 1.0 @@ -5669,9 +5673,7 @@ public final class VoiceChatController: ViewController { fullscreenItemNodes[String(item.peer.id.toInt64()) + "_" + (item.videoEndpointId ?? "")] = itemNode } } - - self.animatingExpansion = true - + let completion = { let effectiveSpeakerPeerId = self.effectiveSpeaker?.0 var targetTileNode: VoiceChatTileItemNode? @@ -5727,6 +5729,7 @@ public final class VoiceChatController: ViewController { strongSelf.contentContainer.insertSubnode(strongSelf.mainStageContainerNode, belowSubnode: strongSelf.transitionContainerNode) strongSelf.isPanning = false + strongSelf.animatingMainStage = false }) self.listContainer.layer.animateScale(from: 0.86, to: 1.0, duration: 0.55, timingFunction: kCAMediaTimingFunctionSpring) @@ -5757,8 +5760,6 @@ public final class VoiceChatController: ViewController { completion() } } else if case .fullscreen = self.displayMode { - self.animatingExpansion = true - if let (layout, navigationHeight) = self.validLayout { let transition: ContainedViewLayoutTransition = .animated(duration: 0.4, curve: .spring) self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: transition) diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift b/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift index 8acabbefd4..4c7203ae68 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift @@ -290,12 +290,13 @@ final class VoiceChatMainStageNode: ASDisplayNode { private var animatingOut = false private var appeared = false - func animateTransitionIn(from sourceNode: ASDisplayNode, transition: ContainedViewLayoutTransition) { + func animateTransitionIn(from sourceNode: ASDisplayNode, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) { guard let sourceNode = sourceNode as? VoiceChatTileItemNode, let _ = sourceNode.item, let (_, sideInset, bottomInset, isLandscape) = self.validLayout else { return } self.appeared = true + self.backgroundNode.alpha = 0.0 self.topFadeNode.alpha = 0.0 self.titleNode.alpha = 0.0 self.microphoneNode.alpha = 0.0 @@ -331,6 +332,7 @@ final class VoiceChatMainStageNode: ASDisplayNode { transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in sourceNode.alpha = 1.0 self?.animatingIn = false + completion() }) } @@ -346,6 +348,11 @@ final class VoiceChatMainStageNode: ASDisplayNode { alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 0.0) } else { self.backgroundNode.alpha = 0.0 + + self.microphoneNode.alpha = 1.0 + self.titleNode.alpha = 1.0 + self.bottomFadeNode.alpha = 1.0 + self.bottomFillNode.alpha = 1.0 } alphaTransition.updateAlpha(node: self.topFadeNode, alpha: 0.0) alphaTransition.updateAlpha(node: self.titleNode, alpha: 0.0) @@ -572,14 +579,17 @@ final class VoiceChatMainStageNode: ASDisplayNode { self.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate) } - self.audioLevelNode.updateGlowAndGradientAnimations(type: gradient, animated: true) - self.pinButtonTitleNode.isHidden = !pinned self.pinButtonIconNode.image = !pinned ? generateTintedImage(image: UIImage(bundleImageName: "Call/Pin"), color: .white) : generateTintedImage(image: UIImage(bundleImageName: "Call/Unpin"), color: .white) self.audioLevelNode.startAnimating(immediately: true) if let getAudioLevel = self.getAudioLevel, previousPeerEntry?.peer.id != peerEntry.peer.id { + self.avatarNode.layer.removeAllAnimations() + self.avatarNode.transform = CATransform3DIdentity + self.audioLevelNode.updateGlowAndGradientAnimations(type: .active, animated: false) + self.audioLevelNode.updateLevel(0.0, immediately: true) + self.audioLevelNode.isHidden = self.currentPeer?.1 != nil self.audioLevelDisposable.set((getAudioLevel(peerEntry.peer.id) |> deliverOnMainQueue).start(next: { [weak self] value in @@ -589,7 +599,7 @@ final class VoiceChatMainStageNode: ASDisplayNode { let level = min(1.5, max(0.0, CGFloat(value))) - strongSelf.audioLevelNode.updateLevel(CGFloat(value)) + strongSelf.audioLevelNode.updateLevel(CGFloat(value), immediately: false) let avatarScale: CGFloat if value > 0.02 { @@ -603,6 +613,8 @@ final class VoiceChatMainStageNode: ASDisplayNode { })) } + self.audioLevelNode.updateGlowAndGradientAnimations(type: gradient, animated: true) + self.microphoneNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: microphoneColor), animated: true) } @@ -932,8 +944,8 @@ class VoiceChatBlobNode: ASDisplayNode { self.blobView.startAnimating(immediately: true) } - func updateLevel(_ level: CGFloat) { - self.blobView.updateLevel(level) + func updateLevel(_ level: CGFloat, immediately: Bool) { + self.blobView.updateLevel(level, immediately: immediately) } func startAnimating(immediately: Bool) { @@ -996,9 +1008,14 @@ class VoiceChatBlobNode: ASDisplayNode { case .muted: targetColors = [pink.cgColor, purple.cgColor, purple.cgColor] } - self.foregroundGradientLayer.colors = targetColors if animated { + self.foregroundGradientLayer.colors = targetColors self.foregroundGradientLayer.animate(from: initialColors as AnyObject, to: targetColors as AnyObject, keyPath: "colors", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3) + } else { + CATransaction.begin() + CATransaction.setDisableActions(true) + self.foregroundGradientLayer.colors = targetColors + CATransaction.commit() } } diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift b/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift index c6ae25add3..6512e82482 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift @@ -69,7 +69,7 @@ final class VoiceChatTileItem: Equatable { if lhs.videoReady != rhs.videoReady { return false } - if lhs.speaking != rhs.speaking { + if lhs.icon != rhs.icon { return false } if lhs.text != rhs.text { @@ -78,6 +78,9 @@ final class VoiceChatTileItem: Equatable { if lhs.additionalText != rhs.additionalText { return false } + if lhs.speaking != rhs.speaking { + return false + } if lhs.icon != rhs.icon { return false } @@ -113,7 +116,7 @@ final class VoiceChatTileItemNode: ASDisplayNode { let fadeNode: ASDisplayNode private var shimmerNode: VoiceChatTileShimmeringNode? private let titleNode: ImmediateTextNode - private let iconNode: ASImageNode + private var iconNode: ASImageNode? private var animationNode: VoiceChatMicrophoneNode? var highlightNode: VoiceChatTileHighlightNode private let statusNode: VoiceChatParticipantStatusNode @@ -157,10 +160,6 @@ final class VoiceChatTileItemNode: ASDisplayNode { self.statusNode = VoiceChatParticipantStatusNode() - self.iconNode = ASImageNode() - self.iconNode.displaysAsynchronously = false - self.iconNode.displayWithoutProcessing = true - self.highlightNode = VoiceChatTileHighlightNode() self.highlightNode.alpha = 0.0 self.highlightNode.updateGlowAndGradientAnimations(type: .speaking) @@ -179,11 +178,10 @@ final class VoiceChatTileItemNode: ASDisplayNode { self.contentNode.addSubnode(self.fadeNode) self.contentNode.addSubnode(self.infoNode) self.infoNode.addSubnode(self.titleNode) - self.infoNode.addSubnode(self.iconNode) self.contentNode.addSubnode(self.highlightNode) self.containerNode.shouldBegin = { [weak self] location in - guard let _ = self else { + guard let strongSelf = self, let item = strongSelf.item, item.videoReady else { return false } return true @@ -378,6 +376,56 @@ final class VoiceChatTileItemNode: ASDisplayNode { self.animationNode = nil animationNode.removeFromSupernode() } + + var hadMicrophoneNode = false + var hadIconNode = false + var nodeToAnimateIn: ASDisplayNode? + + if case let .microphone(muted) = item.icon { + let animationNode: VoiceChatMicrophoneNode + if let current = self.animationNode { + animationNode = current + } else { + animationNode = VoiceChatMicrophoneNode() + self.animationNode = animationNode + self.infoNode.addSubnode(animationNode) + } + animationNode.alpha = 1.0 + animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: microphoneColor), animated: true) + } else if let animationNode = self.animationNode { + hadMicrophoneNode = true + self.animationNode = nil + animationNode.removeFromSupernode() + } + + if case .presentation = item.icon { + let iconNode: ASImageNode + if let current = self.iconNode { + iconNode = current + } else { + iconNode = ASImageNode() + iconNode.displaysAsynchronously = false + iconNode.contentMode = .center + self.iconNode = iconNode + self.infoNode.addSubnode(iconNode) + + nodeToAnimateIn = iconNode + } + + iconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/StatusScreen"), color: .white) + } else if let iconNode = self.iconNode { + hadIconNode = true + self.iconNode = nil + iconNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false) + iconNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak iconNode] _ in + iconNode?.removeFromSupernode() + }) + } + + if let node = nodeToAnimateIn, hadMicrophoneNode || hadIconNode { + node.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + node.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2) + } } let bounds = CGRect(origin: CGPoint(), size: size) @@ -415,6 +463,10 @@ final class VoiceChatTileItemNode: ASDisplayNode { let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - 50.0, height: size.height)) self.titleNode.frame = CGRect(origin: CGPoint(x: 30.0, y: size.height - titleSize.height - 8.0), size: titleSize) + if let iconNode = self.iconNode, let image = iconNode.image { + transition.updateFrame(node: iconNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels(16.0 - image.size.width / 2.0), y: floorToScreenPixels(size.height - 15.0 - image.size.height / 2.0)), size: image.size)) + } + if let animationNode = self.animationNode { let animationSize = CGSize(width: 36.0, height: 36.0) animationNode.bounds = CGRect(origin: CGPoint(), size: animationSize) From 95779538c9f902ca3ffb86d924b046a7d2cec684 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Sat, 29 May 2021 15:02:33 +0400 Subject: [PATCH 2/5] Video Chat Improvements --- .../TelegramCallsUI/Sources/VoiceChatController.swift | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift index df06634835..3afeda8d55 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift @@ -3874,7 +3874,7 @@ public final class VoiceChatController: ViewController { ) fullscreenListTransform = CATransform3DIdentity fullscreenListUpdateSizeAndInsets = ListViewUpdateSizeAndInsets(size: CGSize(width: fullscreenListHeight, height: layout.size.height), insets: UIEdgeInsets(top: fullscreenListInset, left: 0.0, bottom: fullscreenListInset, right: 0.0), duration: duration, curve: curve) - fullscreenListContainerFrame = CGRect(x: layout.size.width - min(self.effectiveBottomAreaHeight, fullscreenBottomAreaHeight) - layout.safeInsets.right - fullscreenListHeight, y: layout.size.height / 2.0, width: layout.size.width, height: fullscreenListHeight) + fullscreenListContainerFrame = CGRect(x: layout.size.width - min(self.effectiveBottomAreaHeight, fullscreenBottomAreaHeight) - layout.safeInsets.right - fullscreenListHeight, y: 0.0, width: fullscreenListHeight, height: layout.size.height) } else { fullscreenListWidth = layout.size.width fullscreenListPosition = CGPoint( @@ -4835,6 +4835,12 @@ public final class VoiceChatController: ViewController { if updateMembers { self.updateMembers(maybeUpdateVideo: false, force: force) } + + var waitForFullSize = waitForFullSize + if let (_, maybeVideoEndpointId) = effectiveSpeaker, let videoEndpointId = maybeVideoEndpointId, !self.readyVideoEndpointIds.contains(videoEndpointId) { + waitForFullSize = false + } + self.mainStageNode.update(peer: effectiveSpeaker, waitForFullSize: waitForFullSize, completion: { completion?() }) @@ -5562,6 +5568,7 @@ public final class VoiceChatController: ViewController { } if case .fullscreen = previousDisplayMode, case .fullscreen = displayMode { + self.animatingExpansion = true } else { self.animatingMainStage = true } From bee3bd23148c148d7dfa10dd84c0cb143916aad9 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Sat, 29 May 2021 18:09:41 +0400 Subject: [PATCH 3/5] Video Chat Fixes --- submodules/TelegramCallsUI/Sources/VoiceChatController.swift | 2 +- .../Sources/VoiceChatFullscreenParticipantItem.swift | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift index 3afeda8d55..631df480de 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift @@ -4837,7 +4837,7 @@ public final class VoiceChatController: ViewController { } var waitForFullSize = waitForFullSize - if let (_, maybeVideoEndpointId) = effectiveSpeaker, let videoEndpointId = maybeVideoEndpointId, !self.readyVideoEndpointIds.contains(videoEndpointId) { + if let (_, maybeVideoEndpointId) = effectiveSpeaker, let videoEndpointId = maybeVideoEndpointId, !self.readyVideoEndpointIds.contains(videoEndpointId), entries == nil { waitForFullSize = false } diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift b/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift index f52b2ec39d..e835c09ccb 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift @@ -334,7 +334,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode { self.videoFadeNode.alpha = 1.0 self.videoFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - } else if initialAnimate { + } else { videoNode.updateLayout(size: videoSize, layoutMode: .fillOrFitToSquare, transition: .immediate) self.videoFadeNode.alpha = 1.0 } From 7a72ead1e532ee63cedfa45049dfa6b94bbff377 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Sat, 29 May 2021 19:06:26 +0400 Subject: [PATCH 4/5] Video Chat Improvements --- .../Sources/VoiceChatController.swift | 3 +- .../VoiceChatFullscreenParticipantItem.swift | 56 +++++++++---------- .../Sources/VoiceChatMainStageNode.swift | 12 ++-- 3 files changed, 36 insertions(+), 35 deletions(-) diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift index 631df480de..6547086080 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift @@ -5621,7 +5621,8 @@ public final class VoiceChatController: ViewController { self.mainStageNode.alpha = 1.0 self.mainStageBackgroundNode.alpha = 1.0 - self.mainStageBackgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.13, completion: { _ in + self.mainStageBackgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.13, completion: { [weak otherItemNode] _ in + otherItemNode?.alpha = 0.0 completion() }) } else { diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift b/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift index e835c09ccb..26586fff78 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift @@ -549,24 +549,25 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode { let videoContainerScale = tileSize.width / videoSize.width + let appearanceDuration: Double = 0.25 + let apperanceTransition = ContainedViewLayoutTransition.animated(duration: appearanceDuration, curve: .easeInOut) let videoNode = item.getVideo() if let currentVideoNode = strongSelf.videoNode, currentVideoNode !== videoNode { if videoNode == nil { - let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) if strongSelf.avatarNode.alpha.isZero { strongSelf.animatingSelection = true - strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: 0.2) - strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2, completion: { [weak self] _ in + strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: appearanceDuration) + strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: appearanceDuration, completion: { [weak self] _ in self?.animatingSelection = false }) - strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -9.0), duration: 0.2, additive: true) - strongSelf.audioLevelView?.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) + strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -9.0), duration: appearanceDuration, additive: true) + strongSelf.audioLevelView?.layer.animateScale(from: 0.0, to: 1.0, duration: appearanceDuration) } - transition.updateAlpha(node: currentVideoNode, alpha: 0.0) - transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0) - transition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0) + apperanceTransition.updateAlpha(node: currentVideoNode, alpha: 0.0) + apperanceTransition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0) + apperanceTransition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0) if let audioLevelView = strongSelf.audioLevelView { - transition.updateAlpha(layer: audioLevelView.layer, alpha: 1.0) + apperanceTransition.updateAlpha(layer: audioLevelView.layer, alpha: 1.0) } } else { currentVideoNode.removeFromSupernode() @@ -874,42 +875,41 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode { let canUpdateAvatarVisibility = !strongSelf.isExtracted && !strongSelf.animatingExtraction if let videoNode = videoNode { - let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) if !strongSelf.isExtracted && !strongSelf.animatingExtraction { if currentItem != nil { if item.active { if strongSelf.avatarNode.alpha.isZero { strongSelf.animatingSelection = true - strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: 0.2) - strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2, completion: { [weak self] _ in + strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: appearanceDuration) + strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: appearanceDuration, completion: { [weak self] _ in self?.animatingSelection = false }) - strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -9.0), duration: 0.2, additive: true) - strongSelf.audioLevelView?.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) + strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -9.0), duration: appearanceDuration, additive: true) + strongSelf.audioLevelView?.layer.animateScale(from: 0.0, to: 1.0, duration: appearanceDuration) } if videoNodeUpdated { videoNode.alpha = 0.0 strongSelf.videoFadeNode.alpha = 0.0 } else { - transition.updateAlpha(node: videoNode, alpha: 0.0) - transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0) + apperanceTransition.updateAlpha(node: videoNode, alpha: 0.0) + apperanceTransition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0) } - transition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0) + apperanceTransition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0) if let audioLevelView = strongSelf.audioLevelView { - transition.updateAlpha(layer: audioLevelView.layer, alpha: 1.0) + apperanceTransition.updateAlpha(layer: audioLevelView.layer, alpha: 1.0) } } else { if !strongSelf.avatarNode.alpha.isZero { - strongSelf.videoContainerNode.layer.animateScale(from: 0.001, to: videoContainerScale, duration: 0.2) - strongSelf.avatarNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2) - strongSelf.audioLevelView?.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2) - strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(x: 0.0, y: -9.0), to: CGPoint(), duration: 0.2, additive: true) + strongSelf.videoContainerNode.layer.animateScale(from: 0.001, to: videoContainerScale, duration: appearanceDuration) + strongSelf.avatarNode.layer.animateScale(from: 1.0, to: 0.001, duration: appearanceDuration) + strongSelf.audioLevelView?.layer.animateScale(from: 1.0, to: 0.001, duration: appearanceDuration) + strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(x: 0.0, y: -9.0), to: CGPoint(), duration: appearanceDuration, additive: true) } - transition.updateAlpha(node: videoNode, alpha: 1.0) - transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 1.0) - transition.updateAlpha(node: strongSelf.avatarNode, alpha: 0.0) + apperanceTransition.updateAlpha(node: videoNode, alpha: 1.0) + apperanceTransition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 1.0) + apperanceTransition.updateAlpha(node: strongSelf.avatarNode, alpha: 0.0) if let audioLevelView = strongSelf.audioLevelView { - transition.updateAlpha(layer: audioLevelView.layer, alpha: 0.0) + apperanceTransition.updateAlpha(layer: audioLevelView.layer, alpha: 0.0) } } } else { @@ -944,8 +944,8 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode { videoNode.alpha = 0.0 } else { strongSelf.avatarNode.alpha = 0.0 - strongSelf.avatarNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2) - videoNode.layer.animateScale(from: 0.01, to: 1.0, duration: 0.2) + strongSelf.avatarNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: appearanceDuration) + videoNode.layer.animateScale(from: 0.01, to: 1.0, duration: appearanceDuration) videoNode.alpha = 1.0 } } else { diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift b/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift index 4c7203ae68..a73b0e35af 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatMainStageNode.swift @@ -321,9 +321,7 @@ final class VoiceChatMainStageNode: ASDisplayNode { infoFrame.origin.y = targetFrame.height - infoFrame.height - (sideInset.isZero ? bottomInset : 14.0) transition.updateFrame(view: snapshotView, frame: infoFrame) } - - sourceNode.alpha = 0.0 - + self.animatingIn = true let startLocalFrame = sourceNode.view.convert(sourceNode.bounds, to: self.supernode?.view) self.update(size: startLocalFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, force: true, transition: .immediate) @@ -695,15 +693,17 @@ final class VoiceChatMainStageNode: ASDisplayNode { |> filter { $0 } |> take(1) |> deliverOnMainQueue).start(next: { [weak self] _ in - Queue.mainQueue().after(0.07) { + Queue.mainQueue().after(0.1) { if let strongSelf = self { if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout { strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate) } } - completion?() - + Queue.mainQueue().after(0.02) { + completion?() + } + if delayTransition { if let videoNode = strongSelf.currentVideoNode { videoNode.alpha = 1.0 From 0bba45699b8f3d98a94a81630c7b4fae7b6da547 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Sun, 30 May 2021 10:51:41 +0400 Subject: [PATCH 5/5] Video Chat Improvements --- .../Sources/GroupVideoNode.swift | 2 +- .../VoiceChatCameraPreviewController.swift | 3 +- .../Sources/VoiceChatController.swift | 70 ++++++++++++++----- 3 files changed, 53 insertions(+), 22 deletions(-) diff --git a/submodules/TelegramCallsUI/Sources/GroupVideoNode.swift b/submodules/TelegramCallsUI/Sources/GroupVideoNode.swift index 8c439a5094..11220ccc2a 100644 --- a/submodules/TelegramCallsUI/Sources/GroupVideoNode.swift +++ b/submodules/TelegramCallsUI/Sources/GroupVideoNode.swift @@ -226,7 +226,7 @@ final class GroupVideoNode: ASDisplayNode { let fittedSize = rotatedVideoSize.aspectFitted(containerSize) let filledSize = rotatedVideoSize.aspectFilled(containerSize) - let filledToSquareSize = rotatedVideoSize.aspectFilled(CGSize(width: containerSize.height, height: containerSize.height)) + let filledToSquareSize = rotatedVideoSize.aspectFilled(CGSize(width: size.height, height: size.height)) switch layoutMode { case .fillOrFitToSquare: diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatCameraPreviewController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatCameraPreviewController.swift index 33e65761c2..7719bd0701 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatCameraPreviewController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatCameraPreviewController.swift @@ -452,7 +452,6 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U contentHeight = titleHeight + bottomInset + 52.0 + 17.0 + innerContentHeight + buttonOffset } - let previewInset: CGFloat = 16.0 let sideInset = floor((layout.size.width - width) / 2.0) let contentContainerFrame = CGRect(origin: CGPoint(x: sideInset, y: layout.size.height - contentHeight), size: CGSize(width: width, height: contentHeight)) @@ -480,7 +479,7 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U previewSize = CGSize(width: min(contentFrame.width - layout.safeInsets.left - layout.safeInsets.right, previewHeight * 1.7778), height: previewHeight) previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((contentFrame.width - previewSize.width) / 2.0), y: 0.0), size: previewSize) } else { - previewSize = CGSize(width: contentFrame.width - previewInset * 2.0, height: contentHeight - 243.0 - bottomInset) + previewSize = CGSize(width: contentFrame.width - previewInset * 2.0, height: contentHeight - 243.0 - bottomInset + (120.0 - buttonOffset)) previewFrame = CGRect(origin: CGPoint(x: previewInset, y: 56.0), size: previewSize) } transition.updateFrame(node: self.previewContainerNode, frame: previewFrame) diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift index 6547086080..e8fbe6ffc6 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift @@ -884,6 +884,7 @@ public final class VoiceChatController: ViewController { private var ignoreConnectingTimer: SwiftSignalKit.Timer? private var displayUnmuteTooltipTimer: SwiftSignalKit.Timer? + private var dismissUnmuteTooltipTimer: SwiftSignalKit.Timer? private var lastUnmuteTooltipDisplayTimestamp: Double? private var displayMode: DisplayMode = .modal(isExpanded: false, isFilled: false) { @@ -1945,29 +1946,50 @@ public final class VoiceChatController: ViewController { } if let state = strongSelf.callState, state.muteState == nil || strongSelf.pushingToTalk { strongSelf.displayUnmuteTooltipTimer?.invalidate() + strongSelf.displayUnmuteTooltipTimer = nil + strongSelf.dismissUnmuteTooltipTimer?.invalidate() + strongSelf.dismissUnmuteTooltipTimer = nil } else { if isSpeaking { var shouldDisplayTooltip = false - if let previousTimstamp = strongSelf.lastUnmuteTooltipDisplayTimestamp, CACurrentMediaTime() < previousTimstamp + 60.0 { + if let previousTimstamp = strongSelf.lastUnmuteTooltipDisplayTimestamp, CACurrentMediaTime() > previousTimstamp + 45.0 { shouldDisplayTooltip = true } else if strongSelf.lastUnmuteTooltipDisplayTimestamp == nil { shouldDisplayTooltip = true } if shouldDisplayTooltip { - let timer = SwiftSignalKit.Timer(timeout: 2.0, repeat: false, completion: { [weak self] in - guard let strongSelf = self else { - return - } - strongSelf.lastUnmuteTooltipDisplayTimestamp = CACurrentMediaTime() - strongSelf.displayUnmuteTooltip() - strongSelf.displayUnmuteTooltipTimer?.invalidate() - strongSelf.displayUnmuteTooltipTimer = nil - }, queue: Queue.mainQueue()) - timer.start() - strongSelf.displayUnmuteTooltipTimer = timer + strongSelf.dismissUnmuteTooltipTimer?.invalidate() + strongSelf.dismissUnmuteTooltipTimer = nil + + if strongSelf.displayUnmuteTooltipTimer == nil { + let timer = SwiftSignalKit.Timer(timeout: 1.0, repeat: false, completion: { [weak self] in + guard let strongSelf = self else { + return + } + strongSelf.lastUnmuteTooltipDisplayTimestamp = CACurrentMediaTime() + strongSelf.displayUnmuteTooltip() + strongSelf.displayUnmuteTooltipTimer?.invalidate() + strongSelf.displayUnmuteTooltipTimer = nil + strongSelf.dismissUnmuteTooltipTimer?.invalidate() + strongSelf.dismissUnmuteTooltipTimer = nil + }, queue: Queue.mainQueue()) + timer.start() + strongSelf.displayUnmuteTooltipTimer = timer + } } - } else { - strongSelf.displayUnmuteTooltipTimer?.invalidate() + } else if strongSelf.dismissUnmuteTooltipTimer == nil && strongSelf.displayUnmuteTooltipTimer != nil { + let timer = SwiftSignalKit.Timer(timeout: 0.4, repeat: false, completion: { [weak self] in + guard let strongSelf = self else { + return + } + strongSelf.displayUnmuteTooltipTimer?.invalidate() + strongSelf.displayUnmuteTooltipTimer = nil + + strongSelf.dismissUnmuteTooltipTimer?.invalidate() + strongSelf.dismissUnmuteTooltipTimer = nil + }, queue: Queue.mainQueue()) + timer.start() + strongSelf.dismissUnmuteTooltipTimer = timer } } }) @@ -2001,7 +2023,9 @@ public final class VoiceChatController: ViewController { var visiblePeerIds = Set() strongSelf.fullscreenListNode.forEachVisibleItemNode { itemNode in if let itemNode = itemNode as? VoiceChatFullscreenParticipantItemNode, let item = itemNode.item { - visiblePeerIds.insert(item.peer.id) + if item.videoEndpointId == nil { + visiblePeerIds.insert(item.peer.id) + } } } strongSelf.mainStageNode.update(visiblePeerIds: visiblePeerIds) @@ -3743,8 +3767,8 @@ public final class VoiceChatController: ViewController { self.switchCameraButton.update(size: audioButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .flipCamera), text: "", transition: transition) - transition.updateAlpha(node: self.switchCameraButton, alpha: hasVideo ? 1.0 : 0.0) - transition.updateTransformScale(node: self.switchCameraButton, scale: hasVideo ? 1.0 : 0.0) + transition.updateAlpha(node: self.switchCameraButton, alpha: hasCameraButton && hasVideo ? 1.0 : 0.0) + transition.updateTransformScale(node: self.switchCameraButton, scale: hasCameraButton && hasVideo ? 1.0 : 0.0) transition.updateTransformScale(node: self.cameraButton, scale: hasCameraButton ? 1.0 : 0.0) @@ -5487,8 +5511,16 @@ public final class VoiceChatController: ViewController { private func displayUnmuteTooltip() { let location = self.actionButton.view.convert(self.actionButton.bounds, to: self.view).center - let point = CGRect(origin: CGPoint(x: location.x - 5.0, y: location.y - 5.0 - 68.0), size: CGSize(width: 10.0, height: 10.0)) - self.controller?.present(TooltipScreen(text: self.presentationData.strings.VoiceChat_UnmuteSuggestion, style: .gradient(UIColor(rgb: 0x1d446c), UIColor(rgb: 0x193e63)), icon: nil, location: .point(point, .bottom), displayDuration: .custom(3.0), shouldDismissOnTouch: { _ in + var point = CGRect(origin: CGPoint(x: location.x - 5.0, y: location.y - 5.0 - 68.0), size: CGSize(width: 10.0, height: 10.0)) + var position: TooltipScreen.ArrowPosition = .bottom + if self.isLandscape { + point.origin.x = location.x - 5.0 - 36.0 + point.origin.y = location.y - 5.0 + position = .right + } else if case .fullscreen = self.displayMode { + point.origin.y += 32.0 + } + self.controller?.present(TooltipScreen(text: self.presentationData.strings.VoiceChat_UnmuteSuggestion, style: .gradient(UIColor(rgb: 0x1d446c), UIColor(rgb: 0x193e63)), icon: nil, location: .point(point, position), displayDuration: .custom(8.0), shouldDismissOnTouch: { _ in return .dismiss(consume: false) }), in: .window(.root)) }