diff --git a/submodules/AvatarNode/Sources/AvatarNode.swift b/submodules/AvatarNode/Sources/AvatarNode.swift index a8a29c486a..3fcfba0dd7 100644 --- a/submodules/AvatarNode/Sources/AvatarNode.swift +++ b/submodules/AvatarNode/Sources/AvatarNode.swift @@ -67,7 +67,7 @@ private class AvatarNodeParameters: NSObject { } } -private func calculateColors(context: AccountContext?, explicitColorIndex: Int?, peerId: EnginePeer.Id?, nameColor: PeerNameColor?, icon: AvatarNodeIcon, theme: PresentationTheme?) -> [UIColor] { +public func calculateAvatarColors(context: AccountContext?, explicitColorIndex: Int?, peerId: EnginePeer.Id?, nameColor: PeerNameColor?, icon: AvatarNodeIcon, theme: PresentationTheme?) -> [UIColor] { let colorIndex: Int if let explicitColorIndex = explicitColorIndex { colorIndex = explicitColorIndex @@ -183,7 +183,7 @@ private func ==(lhs: AvatarNodeState, rhs: AvatarNodeState) -> Bool { } } -private enum AvatarNodeIcon: Equatable { +public enum AvatarNodeIcon: Equatable { case none case savedMessagesIcon case repliesIcon @@ -577,7 +577,7 @@ public final class AvatarNode: ASDisplayNode { self.editOverlayNode?.isHidden = true } - parameters = AvatarNodeParameters(theme: theme, accountPeerId: accountPeerId, peerId: peer.id, colors: calculateColors(context: nil, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle) + parameters = AvatarNodeParameters(theme: theme, accountPeerId: accountPeerId, peerId: peer.id, colors: calculateAvatarColors(context: nil, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle) } else { self.imageReady.set(.single(true)) self.displaySuspended = false @@ -586,7 +586,7 @@ public final class AvatarNode: ASDisplayNode { } self.editOverlayNode?.isHidden = true - let colors = calculateColors(context: nil, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme) + let colors = calculateAvatarColors(context: nil, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme) parameters = AvatarNodeParameters(theme: theme, accountPeerId: accountPeerId, peerId: peer?.id ?? EnginePeer.Id(0), colors: colors, letters: peer?.displayLetters ?? [], font: self.font, icon: icon, explicitColorIndex: nil, hasImage: false, clipStyle: clipStyle) if let badgeView = self.badgeView { @@ -754,7 +754,7 @@ public final class AvatarNode: ASDisplayNode { self.editOverlayNode?.isHidden = true } - parameters = AvatarNodeParameters(theme: theme, accountPeerId: account.peerId, peerId: peer.id, colors: calculateColors(context: genericContext, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle) + parameters = AvatarNodeParameters(theme: theme, accountPeerId: account.peerId, peerId: peer.id, colors: calculateAvatarColors(context: genericContext, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle) } else { self.imageReady.set(.single(true)) self.displaySuspended = false @@ -763,7 +763,7 @@ public final class AvatarNode: ASDisplayNode { } self.editOverlayNode?.isHidden = true - let colors = calculateColors(context: genericContext, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme) + let colors = calculateAvatarColors(context: genericContext, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme) parameters = AvatarNodeParameters(theme: theme, accountPeerId: account.peerId, peerId: peer?.id ?? EnginePeer.Id(0), colors: colors, letters: peer?.displayLetters ?? [], font: self.font, icon: icon, explicitColorIndex: nil, hasImage: false, clipStyle: clipStyle) if let badgeView = self.badgeView { @@ -800,9 +800,9 @@ public final class AvatarNode: ASDisplayNode { let parameters: AvatarNodeParameters if let icon = icon, case .phone = icon { - parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .phoneIcon, theme: nil), letters: [], font: self.font, icon: .phoneIcon, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round) + parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateAvatarColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .phoneIcon, theme: nil), letters: [], font: self.font, icon: .phoneIcon, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round) } else { - parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .none, theme: nil), letters: letters, font: self.font, icon: .none, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round) + parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateAvatarColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .none, theme: nil), letters: letters, font: self.font, icon: .none, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round) } self.displaySuspended = true diff --git a/submodules/Components/BundleIconComponent/Sources/BundleIconComponent.swift b/submodules/Components/BundleIconComponent/Sources/BundleIconComponent.swift index d6165246af..b561f12bee 100644 --- a/submodules/Components/BundleIconComponent/Sources/BundleIconComponent.swift +++ b/submodules/Components/BundleIconComponent/Sources/BundleIconComponent.swift @@ -8,11 +8,15 @@ public final class BundleIconComponent: Component { public let name: String public let tintColor: UIColor? public let maxSize: CGSize? + public let shadowColor: UIColor? + public let shadowBlur: CGFloat - public init(name: String, tintColor: UIColor?, maxSize: CGSize? = nil) { + public init(name: String, tintColor: UIColor?, maxSize: CGSize? = nil, shadowColor: UIColor? = nil, shadowBlur: CGFloat = 0.0) { self.name = name self.tintColor = tintColor self.maxSize = maxSize + self.shadowColor = shadowColor + self.shadowBlur = shadowBlur } public static func ==(lhs: BundleIconComponent, rhs: BundleIconComponent) -> Bool { @@ -25,6 +29,12 @@ public final class BundleIconComponent: Component { if lhs.maxSize != rhs.maxSize { return false } + if lhs.shadowColor != rhs.shadowColor { + return false + } + if lhs.shadowBlur != rhs.shadowBlur { + return false + } return true } @@ -40,12 +50,24 @@ public final class BundleIconComponent: Component { } func update(component: BundleIconComponent, availableSize: CGSize, transition: ComponentTransition) -> CGSize { - if self.component?.name != component.name || self.component?.tintColor != component.tintColor { + if self.component?.name != component.name || self.component?.tintColor != component.tintColor || self.component?.shadowColor != component.shadowColor || self.component?.shadowBlur != component.shadowBlur { + var image: UIImage? if let tintColor = component.tintColor { - self.image = generateTintedImage(image: UIImage(bundleImageName: component.name), color: tintColor, backgroundColor: nil) + image = generateTintedImage(image: UIImage(bundleImageName: component.name), color: tintColor, backgroundColor: nil) } else { - self.image = UIImage(bundleImageName: component.name) + image = UIImage(bundleImageName: component.name) } + if let imageValue = image, let shadowColor = component.shadowColor, component.shadowBlur != 0.0 { + image = generateImage(CGSize(width: imageValue.size.width + component.shadowBlur * 2.0, height: imageValue.size.height + component.shadowBlur * 2.0), contextGenerator: { size, context in + context.clear(CGRect(origin: CGPoint(), size: size)) + context.setShadow(offset: CGSize(), blur: component.shadowBlur, color: shadowColor.cgColor) + + if let cgImage = imageValue.cgImage { + context.draw(cgImage, in: CGRect(origin: CGPoint(x: component.shadowBlur, y: component.shadowBlur), size: imageValue.size)) + } + }) + } + self.image = image } self.component = component diff --git a/submodules/TelegramCallsUI/Sources/VideoChatActionButtonComponent.swift b/submodules/TelegramCallsUI/Sources/VideoChatActionButtonComponent.swift index 8d6b3f69bd..656494c344 100644 --- a/submodules/TelegramCallsUI/Sources/VideoChatActionButtonComponent.swift +++ b/submodules/TelegramCallsUI/Sources/VideoChatActionButtonComponent.swift @@ -36,13 +36,13 @@ final class VideoChatActionButtonComponent: Component { case leave } - case audio(audio: Audio) + case audio(audio: Audio, isEnabled: Bool) case video(isActive: Bool) case leave fileprivate var iconType: IconType { switch self { - case let .audio(audio): + case let .audio(audio, _): let mappedAudio: IconType.Audio switch audio { case .none, .builtin, .speaker: @@ -136,14 +136,16 @@ final class VideoChatActionButtonComponent: Component { let titleText: String let backgroundColor: UIColor let iconDiameter: CGFloat + var isEnabled: Bool = true switch component.content { - case let .audio(audio): + case let .audio(audio, isEnabledValue): var isActive = false switch audio { case .none, .builtin: titleText = component.strings.Call_Speaker case .speaker: - isActive = true + isEnabled = isEnabledValue + isActive = isEnabledValue titleText = component.strings.Call_Speaker case .headphones: titleText = component.strings.Call_Audio @@ -276,8 +278,11 @@ final class VideoChatActionButtonComponent: Component { self.addSubview(iconView) } transition.setFrame(view: iconView, frame: iconFrame) + transition.setAlpha(view: iconView, alpha: isEnabled ? 1.0 : 0.6) } + self.isEnabled = isEnabled + return size } } diff --git a/submodules/TelegramCallsUI/Sources/VideoChatMuteIconComponent.swift b/submodules/TelegramCallsUI/Sources/VideoChatMuteIconComponent.swift index 3bc92c4bcf..748a319b13 100644 --- a/submodules/TelegramCallsUI/Sources/VideoChatMuteIconComponent.swift +++ b/submodules/TelegramCallsUI/Sources/VideoChatMuteIconComponent.swift @@ -16,13 +16,19 @@ final class VideoChatMuteIconComponent: Component { let color: UIColor let content: Content + let shadowColor: UIColor? + let shadowBlur: CGFloat init( color: UIColor, - content: Content + content: Content, + shadowColor: UIColor? = nil, + shadowBlur: CGFloat = 0.0 ) { self.color = color self.content = content + self.shadowColor = shadowColor + self.shadowBlur = shadowBlur } static func ==(lhs: VideoChatMuteIconComponent, rhs: VideoChatMuteIconComponent) -> Bool { @@ -32,6 +38,12 @@ final class VideoChatMuteIconComponent: Component { if lhs.content != rhs.content { return false } + if lhs.shadowColor != rhs.shadowColor { + return false + } + if lhs.shadowBlur != rhs.shadowBlur { + return false + } return true } @@ -75,9 +87,9 @@ final class VideoChatMuteIconComponent: Component { } let animationSize = availableSize - let animationFrame = animationSize.centered(in: CGRect(origin: CGPoint(), size: availableSize)) + let animationFrame = animationSize.centered(in: CGRect(origin: CGPoint(), size: availableSize)).insetBy(dx: -component.shadowBlur, dy: -component.shadowBlur) transition.setFrame(view: icon.view, frame: animationFrame) - icon.update(state: VoiceChatMicrophoneNode.State(muted: isMuted, filled: isFilled, color: component.color), animated: !transition.animation.isImmediate) + icon.update(state: VoiceChatMicrophoneNode.State(muted: isMuted, filled: isFilled, color: component.color, shadowColor: component.shadowColor, shadowBlur: component.shadowBlur), animated: !transition.animation.isImmediate) } else { if let icon = self.icon { self.icon = nil @@ -97,7 +109,9 @@ final class VideoChatMuteIconComponent: Component { transition: transition, component: AnyComponent(BundleIconComponent( name: "Call/StatusScreen", - tintColor: component.color + tintColor: component.color, + shadowColor: component.shadowColor, + shadowBlur: component.shadowBlur )), environment: {}, containerSize: availableSize diff --git a/submodules/TelegramCallsUI/Sources/VideoChatParticipantVideoComponent.swift b/submodules/TelegramCallsUI/Sources/VideoChatParticipantVideoComponent.swift index d295c40a7f..787908ccba 100644 --- a/submodules/TelegramCallsUI/Sources/VideoChatParticipantVideoComponent.swift +++ b/submodules/TelegramCallsUI/Sources/VideoChatParticipantVideoComponent.swift @@ -12,6 +12,9 @@ import AccountContext import SwiftSignalKit import DirectMediaImageCache import FastBlur +import ContextUI +import ComponentDisplayAdapters +import AvatarNode private func blurredAvatarImage(_ dataImage: UIImage) -> UIImage? { let imageContextSize = CGSize(width: 64.0, height: 64.0) @@ -35,6 +38,7 @@ private let activityBorderImage: UIImage = { }() final class VideoChatParticipantVideoComponent: Component { + let theme: PresentationTheme let strings: PresentationStrings let call: PresentationGroupCall let participant: GroupCallParticipantsContext.Participant @@ -47,8 +51,12 @@ final class VideoChatParticipantVideoComponent: Component { let controlInsets: UIEdgeInsets let interfaceOrientation: UIInterfaceOrientation let action: (() -> Void)? + let contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)? + let activatePinch: ((PinchSourceContainerNode) -> Void)? + let deactivatedPinch: (() -> Void)? init( + theme: PresentationTheme, strings: PresentationStrings, call: PresentationGroupCall, participant: GroupCallParticipantsContext.Participant, @@ -60,8 +68,12 @@ final class VideoChatParticipantVideoComponent: Component { contentInsets: UIEdgeInsets, controlInsets: UIEdgeInsets, interfaceOrientation: UIInterfaceOrientation, - action: (() -> Void)? + action: (() -> Void)?, + contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?, + activatePinch: ((PinchSourceContainerNode) -> Void)?, + deactivatedPinch: (() -> Void)? ) { + self.theme = theme self.strings = strings self.call = call self.participant = participant @@ -74,6 +86,9 @@ final class VideoChatParticipantVideoComponent: Component { self.controlInsets = controlInsets self.interfaceOrientation = interfaceOrientation self.action = action + self.contextAction = contextAction + self.activatePinch = activatePinch + self.deactivatedPinch = deactivatedPinch } static func ==(lhs: VideoChatParticipantVideoComponent, rhs: VideoChatParticipantVideoComponent) -> Bool { @@ -107,6 +122,15 @@ final class VideoChatParticipantVideoComponent: Component { if (lhs.action == nil) != (rhs.action == nil) { return false } + if (lhs.contextAction == nil) != (rhs.contextAction == nil) { + return false + } + if (lhs.activatePinch == nil) != (rhs.activatePinch == nil) { + return false + } + if (lhs.deactivatedPinch == nil) != (rhs.deactivatedPinch == nil) { + return false + } return true } @@ -144,7 +168,7 @@ final class VideoChatParticipantVideoComponent: Component { } } - final class View: HighlightTrackingButton { + final class View: ContextControllerSourceView { private var component: VideoChatParticipantVideoComponent? private weak var componentState: EmptyComponentState? private var isUpdating: Bool = false @@ -158,6 +182,8 @@ final class VideoChatParticipantVideoComponent: Component { private var blurredAvatarDisposable: Disposable? private var blurredAvatarView: UIImageView? + private let pinchContainerNode: PinchSourceContainerNode + private let extractedContainerView: ContextExtractedContentContainingView private var videoSource: AdaptedCallVideoSource? private var videoDisposable: Disposable? private var videoBackgroundLayer: SimpleLayer? @@ -173,16 +199,44 @@ final class VideoChatParticipantVideoComponent: Component { override init(frame: CGRect) { self.backgroundGradientView = UIImageView() + self.pinchContainerNode = PinchSourceContainerNode() + self.extractedContainerView = ContextExtractedContentContainingView() super.init(frame: frame) - self.addSubview(self.backgroundGradientView) + self.addSubview(self.extractedContainerView) + self.targetViewForActivationProgress = self.extractedContainerView + + self.extractedContainerView.contentView.addSubview(self.pinchContainerNode.view) + self.pinchContainerNode.contentNode.view.addSubview(self.backgroundGradientView) //TODO:release optimize - self.clipsToBounds = true - self.layer.cornerRadius = 10.0 + self.pinchContainerNode.contentNode.view.layer.cornerRadius = 10.0 + self.pinchContainerNode.contentNode.view.clipsToBounds = true - self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside) + self.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:)))) + + self.pinchContainerNode.activate = { [weak self] sourceNode in + guard let self, let component = self.component else { + return + } + component.activatePinch?(sourceNode) + } + self.pinchContainerNode.animatedOut = { [weak self] in + guard let self, let component = self.component else { + return + } + + component.deactivatedPinch?() + } + + self.activated = { [weak self] gesture, _ in + guard let self, let component = self.component else { + gesture.cancel() + return + } + component.contextAction?(EnginePeer(component.participant.peer), self.extractedContainerView, gesture) + } } required init?(coder: NSCoder) { @@ -194,11 +248,13 @@ final class VideoChatParticipantVideoComponent: Component { self.blurredAvatarDisposable?.dispose() } - @objc private func pressed() { - guard let component = self.component, let action = component.action else { - return + @objc private func tapGesture(_ recognizer: UITapGestureRecognizer) { + if case .ended = recognizer.state { + guard let component = self.component, let action = component.action else { + return + } + action() } - action() } func update(component: VideoChatParticipantVideoComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { @@ -211,6 +267,19 @@ final class VideoChatParticipantVideoComponent: Component { self.component = component self.componentState = state + self.isGestureEnabled = !component.isExpanded + + self.pinchContainerNode.isPinchGestureEnabled = component.activatePinch != nil + transition.setPosition(view: self.pinchContainerNode.view, position: CGRect(origin: CGPoint(), size: availableSize).center) + transition.setBounds(view: self.pinchContainerNode.view, bounds: CGRect(origin: CGPoint(), size: availableSize)) + self.pinchContainerNode.update(size: availableSize, transition: transition.containedViewLayoutTransition) + + transition.setPosition(view: self.extractedContainerView, position: CGRect(origin: CGPoint(), size: availableSize).center) + transition.setBounds(view: self.extractedContainerView, bounds: CGRect(origin: CGPoint(), size: availableSize)) + self.extractedContainerView.contentRect = CGRect(origin: CGPoint(), size: availableSize) + + transition.setFrame(view: self.pinchContainerNode.contentNode.view, frame: CGRect(origin: CGPoint(), size: availableSize)) + transition.setFrame(view: self.backgroundGradientView, frame: CGRect(origin: CGPoint(), size: availableSize)) let alphaTransition: ComponentTransition @@ -229,14 +298,10 @@ final class VideoChatParticipantVideoComponent: Component { let controlsAlpha: CGFloat = component.isUIHidden ? 0.0 : 1.0 - let nameColor = component.participant.peer.nameColor ?? .blue - let nameColors = component.call.accountContext.peerNameColors.get(nameColor, dark: true) - if previousComponent == nil { - self.backgroundGradientView.image = generateGradientImage(size: CGSize(width: 8.0, height: 32.0), colors: [ - nameColors.main.withMultiplied(hue: 1.0, saturation: 1.1, brightness: 1.3), - nameColors.main.withMultiplied(hue: 1.0, saturation: 1.2, brightness: 1.0) - ], locations: [0.0, 1.0], direction: .vertical) + let colors = calculateAvatarColors(context: component.call.accountContext, explicitColorIndex: nil, peerId: component.participant.peer.id, nameColor: component.participant.peer.nameColor, icon: .none, theme: component.theme) + + self.backgroundGradientView.image = generateGradientImage(size: CGSize(width: 8.0, height: 32.0), colors: colors.reversed(), locations: [0.0, 1.0], direction: .vertical) } if let smallProfileImage = component.participant.peer.smallProfileImage { @@ -249,7 +314,7 @@ final class VideoChatParticipantVideoComponent: Component { blurredAvatarView = UIImageView() blurredAvatarView.contentMode = .scaleAspectFill self.blurredAvatarView = blurredAvatarView - self.insertSubview(blurredAvatarView, aboveSubview: self.backgroundGradientView) + self.pinchContainerNode.contentNode.view.insertSubview(blurredAvatarView, aboveSubview: self.backgroundGradientView) blurredAvatarView.frame = CGRect(origin: CGPoint(), size: availableSize) } @@ -292,7 +357,9 @@ final class VideoChatParticipantVideoComponent: Component { transition: transition, component: AnyComponent(VideoChatMuteIconComponent( color: .white, - content: component.isPresentation ? .screenshare : .mute(isFilled: true, isMuted: component.participant.muteState != nil && !component.isSpeaking) + content: component.isPresentation ? .screenshare : .mute(isFilled: true, isMuted: component.participant.muteState != nil && !component.isSpeaking), + shadowColor: UIColor(white: 0.0, alpha: 0.7), + shadowBlur: 8.0 )), environment: {}, containerSize: CGSize(width: 36.0, height: 36.0) @@ -305,14 +372,8 @@ final class VideoChatParticipantVideoComponent: Component { } if let muteStatusView = self.muteStatus.view { if muteStatusView.superview == nil { - self.addSubview(muteStatusView) + self.pinchContainerNode.contentNode.view.addSubview(muteStatusView) muteStatusView.alpha = controlsAlpha - - //TODO:release - muteStatusView.layer.shadowOpacity = 0.7 - muteStatusView.layer.shadowColor = UIColor(white: 0.0, alpha: 1.0).cgColor - muteStatusView.layer.shadowOffset = CGSize(width: 0.0, height: 1.0) - muteStatusView.layer.shadowRadius = 8.0 } transition.setPosition(view: muteStatusView, position: muteStatusFrame.center) transition.setBounds(view: muteStatusView, bounds: CGRect(origin: CGPoint(), size: muteStatusFrame.size)) @@ -320,31 +381,29 @@ final class VideoChatParticipantVideoComponent: Component { alphaTransition.setAlpha(view: muteStatusView, alpha: controlsAlpha) } + let titleInnerInsets = UIEdgeInsets(top: 8.0, left: 8.0, bottom: 8.0, right: 8.0) let titleSize = self.title.update( transition: .immediate, component: AnyComponent(MultilineTextComponent( - text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.semibold(16.0), textColor: .white)) + text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.semibold(16.0), textColor: .white)), + insets: titleInnerInsets, + textShadowColor: UIColor(white: 0.0, alpha: 0.7), + textShadowBlur: 8.0 )), environment: {}, containerSize: CGSize(width: availableSize.width - 8.0 * 2.0 - 4.0, height: 100.0) ) let titleFrame: CGRect if component.isExpanded { - titleFrame = CGRect(origin: CGPoint(x: 36.0, y: availableSize.height - component.controlInsets.bottom - 8.0 - titleSize.height), size: titleSize) + titleFrame = CGRect(origin: CGPoint(x: 36.0 - titleInnerInsets.left, y: availableSize.height - component.controlInsets.bottom - 8.0 - titleSize.height + titleInnerInsets.top), size: titleSize) } else { - titleFrame = CGRect(origin: CGPoint(x: 29.0, y: availableSize.height - component.controlInsets.bottom - 4.0 - titleSize.height), size: titleSize) + titleFrame = CGRect(origin: CGPoint(x: 29.0 - titleInnerInsets.left, y: availableSize.height - component.controlInsets.bottom - 4.0 - titleSize.height + titleInnerInsets.top + 1.0), size: titleSize) } if let titleView = self.title.view { if titleView.superview == nil { titleView.layer.anchorPoint = CGPoint() - self.addSubview(titleView) + self.pinchContainerNode.contentNode.view.addSubview(titleView) titleView.alpha = controlsAlpha - - //TODO:release - titleView.layer.shadowOpacity = 0.7 - titleView.layer.shadowColor = UIColor(white: 0.0, alpha: 1.0).cgColor - titleView.layer.shadowOffset = CGSize(width: 0.0, height: 1.0) - titleView.layer.shadowRadius = 8.0 } transition.setPosition(view: titleView, position: titleFrame.origin) titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size) @@ -377,9 +436,9 @@ final class VideoChatParticipantVideoComponent: Component { videoBackgroundLayer.opacity = 0.0 self.videoBackgroundLayer = videoBackgroundLayer if let blurredAvatarView = self.blurredAvatarView { - self.layer.insertSublayer(videoBackgroundLayer, above: blurredAvatarView.layer) + self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoBackgroundLayer, above: blurredAvatarView.layer) } else { - self.layer.insertSublayer(videoBackgroundLayer, above: self.backgroundGradientView.layer) + self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoBackgroundLayer, above: self.backgroundGradientView.layer) } videoBackgroundLayer.isHidden = true } @@ -391,8 +450,8 @@ final class VideoChatParticipantVideoComponent: Component { videoLayer = PrivateCallVideoLayer() self.videoLayer = videoLayer videoLayer.opacity = 0.0 - self.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer) - self.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer) + self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer) + self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer) videoLayer.blurredLayer.opacity = 0.0 @@ -537,7 +596,7 @@ final class VideoChatParticipantVideoComponent: Component { if videoStatusView.superview == nil { videoStatusView.isUserInteractionEnabled = false videoStatusView.alpha = 0.0 - self.addSubview(videoStatusView) + self.pinchContainerNode.contentNode.view.addSubview(videoStatusView) } videoStatusTransition.setFrame(view: videoStatusView, frame: CGRect(origin: CGPoint(), size: availableSize)) videoAlphaTransition.setAlpha(view: videoStatusView, alpha: 1.0) @@ -557,7 +616,7 @@ final class VideoChatParticipantVideoComponent: Component { self.loadingEffectView = loadingEffectView loadingEffectView.alpha = 0.0 loadingEffectView.isUserInteractionEnabled = false - self.addSubview(loadingEffectView) + self.pinchContainerNode.contentNode.view.addSubview(loadingEffectView) if let referenceLocation = self.referenceLocation { self.updateHorizontalReferenceLocation(containerWidth: referenceLocation.containerWidth, positionX: referenceLocation.positionX, transition: .immediate) } @@ -578,7 +637,7 @@ final class VideoChatParticipantVideoComponent: Component { } else { activityBorderView = UIImageView() self.activityBorderView = activityBorderView - self.addSubview(activityBorderView) + self.pinchContainerNode.contentNode.view.addSubview(activityBorderView) activityBorderView.image = activityBorderImage activityBorderView.tintColor = UIColor(rgb: 0x33C758) diff --git a/submodules/TelegramCallsUI/Sources/VideoChatParticipantsComponent.swift b/submodules/TelegramCallsUI/Sources/VideoChatParticipantsComponent.swift index dda1b187c8..e4d904eb72 100644 --- a/submodules/TelegramCallsUI/Sources/VideoChatParticipantsComponent.swift +++ b/submodules/TelegramCallsUI/Sources/VideoChatParticipantsComponent.swift @@ -10,6 +10,7 @@ import SwiftSignalKit import MultilineTextComponent import TelegramPresentationData import PeerListItemComponent +import ContextUI final class VideoChatParticipantsComponent: Component { struct Layout: Equatable { @@ -645,6 +646,8 @@ final class VideoChatParticipantsComponent: Component { private var appliedGridIsEmpty: Bool = true + private var isPinchToZoomActive: Bool = false + private var currentLoadMoreToken: String? private var mainScrollViewEventCycleState: EventCycleState? @@ -986,7 +989,10 @@ final class VideoChatParticipantsComponent: Component { var itemControlInsets: UIEdgeInsets if isItemExpanded { itemControlInsets = itemContentInsets - itemControlInsets.bottom = max(itemControlInsets.bottom, 96.0) + if let expandedVideoState = component.expandedVideoState, expandedVideoState.isUIHidden { + } else { + itemControlInsets.bottom = max(itemControlInsets.bottom, 96.0) + } } else { itemControlInsets = itemContentInsets } @@ -1003,6 +1009,7 @@ final class VideoChatParticipantsComponent: Component { let _ = itemView.view.update( transition: itemTransition, component: AnyComponent(VideoChatParticipantVideoComponent( + theme: component.theme, strings: component.strings, call: component.call, participant: videoParticipant.participant, @@ -1010,7 +1017,7 @@ final class VideoChatParticipantsComponent: Component { isPresentation: videoParticipant.isPresentation, isSpeaking: component.speakingParticipants.contains(videoParticipant.participant.peer.id), isExpanded: isItemExpanded, - isUIHidden: isItemUIHidden, + isUIHidden: isItemUIHidden || self.isPinchToZoomActive, contentInsets: itemContentInsets, controlInsets: itemControlInsets, interfaceOrientation: component.interfaceOrientation, @@ -1032,7 +1039,31 @@ final class VideoChatParticipantsComponent: Component { component.updateMainParticipant(videoParticipantKey, nil) } } - } + }, + contextAction: !isItemExpanded ? { [weak self] peer, sourceView, gesture in + guard let self, let component = self.component else { + return + } + component.openParticipantContextMenu(peer.id, sourceView, gesture) + } : nil, + activatePinch: isItemExpanded ? { [weak self] sourceNode in + guard let self, let component = self.component else { + return + } + self.isPinchToZoomActive = true + self.state?.updated(transition: .immediate, isLocal: true) + let pinchController = PinchController(sourceNode: sourceNode, getContentAreaInScreenSpace: { + return UIScreen.main.bounds + }) + component.call.accountContext.sharedContext.mainWindow?.presentInGlobalOverlay(pinchController) + } : nil, + deactivatedPinch: isItemExpanded ? { [weak self] in + guard let self else { + return + } + self.isPinchToZoomActive = false + self.state?.updated(transition: .spring(duration: 0.4), isLocal: true) + } : nil )), environment: {}, containerSize: itemFrame.size @@ -1158,7 +1189,7 @@ final class VideoChatParticipantsComponent: Component { if participant.peer.id == component.call.accountContext.account.peerId { subtitle = PeerListItemComponent.Subtitle(text: "this is you", color: .accent) } else if component.speakingParticipants.contains(participant.peer.id) { - if let volume = participant.volume, volume != 10000 { + if let volume = participant.volume, volume / 100 != 100 { subtitle = PeerListItemComponent.Subtitle(text: "\(volume / 100)% speaking", color: .constructive) } else { subtitle = PeerListItemComponent.Subtitle(text: "speaking", color: .constructive) @@ -1322,17 +1353,8 @@ final class VideoChatParticipantsComponent: Component { )) }*/ - let expandedControlsAlpha: CGFloat = expandedVideoState.isUIHidden ? 0.0 : 1.0 + let expandedControlsAlpha: CGFloat = (expandedVideoState.isUIHidden || self.isPinchToZoomActive) ? 0.0 : 1.0 let expandedThumbnailsAlpha: CGFloat = expandedControlsAlpha - /*if itemLayout.layout.videoColumn == nil { - if expandedVideoState.isUIHidden { - expandedThumbnailsAlpha = 0.0 - } else { - expandedThumbnailsAlpha = 1.0 - } - } else { - expandedThumbnailsAlpha = 0.0 - }*/ var expandedThumbnailsTransition = transition let expandedThumbnailsView: ComponentView diff --git a/submodules/TelegramCallsUI/Sources/VideoChatScreen.swift b/submodules/TelegramCallsUI/Sources/VideoChatScreen.swift index d013e312fb..79e0ba3204 100644 --- a/submodules/TelegramCallsUI/Sources/VideoChatScreen.swift +++ b/submodules/TelegramCallsUI/Sources/VideoChatScreen.swift @@ -22,6 +22,7 @@ import ShareController import AvatarNode import TelegramAudio import LegacyComponents +import TooltipUI final class VideoChatScreenComponent: Component { typealias EnvironmentType = ViewControllerComponentContainer.Environment @@ -83,6 +84,7 @@ final class VideoChatScreenComponent: Component { var scheduleInfo: ComponentView? var reconnectedAsEventsDisposable: Disposable? + var memberEventsDisposable: Disposable? var peer: EnginePeer? var callState: PresentationGroupCallState? @@ -144,6 +146,7 @@ final class VideoChatScreenComponent: Component { self.membersDisposable?.dispose() self.applicationStateDisposable?.dispose() self.reconnectedAsEventsDisposable?.dispose() + self.memberEventsDisposable?.dispose() self.displayAsPeersDisposable?.dispose() self.audioOutputStateDisposable?.dispose() self.inviteLinksDisposable?.dispose() @@ -819,7 +822,7 @@ final class VideoChatScreenComponent: Component { self.members = members - if let members, let _ = self.expandedParticipantsVideoState { + if let members, let expandedParticipantsVideoState = self.expandedParticipantsVideoState, !expandedParticipantsVideoState.isUIHidden { var videoCount = 0 for participant in members.participants { if participant.presentationDescription != nil { @@ -1008,6 +1011,31 @@ final class VideoChatScreenComponent: Component { } self.presentUndoOverlay(content: .invitedToVoiceChat(context: component.call.accountContext, peer: peer, title: nil, text: text, action: nil, duration: 3), action: { _ in return false }) }) + + self.memberEventsDisposable = (component.call.memberEvents + |> deliverOnMainQueue).start(next: { [weak self] event in + guard let self, let members = self.members, let component = self.component, let environment = self.environment else { + return + } + if event.joined { + var displayEvent = false + if case let .channel(channel) = self.peer, case .broadcast = channel.info { + displayEvent = false + } + if members.totalCount < 250 { + displayEvent = true + } else if event.peer.isVerified { + displayEvent = true + } else if event.isContact || event.isInChatList { + displayEvent = true + } + + if displayEvent { + let text = environment.strings.VoiceChat_PeerJoinedText(event.peer.displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)).string + self.presentUndoOverlay(content: .invitedToVoiceChat(context: component.call.accountContext, peer: event.peer, title: nil, text: text, action: nil, duration: 3), action: { _ in return false }) + } + } + }) } self.isPresentedValue.set(environment.isVisible) @@ -1072,7 +1100,7 @@ final class VideoChatScreenComponent: Component { } else { containerOffset = verticalPanState.fraction * availableSize.height } - self.containerView.layer.cornerRadius = environment.deviceMetrics.screenCornerRadius + self.containerView.layer.cornerRadius = containerOffset.isZero ? 0.0 : environment.deviceMetrics.screenCornerRadius } transition.setFrame(view: self.containerView, frame: CGRect(origin: CGPoint(x: 0.0, y: containerOffset), size: availableSize), completion: { [weak self] completed in @@ -1249,13 +1277,49 @@ final class VideoChatScreenComponent: Component { } else { idleTitleStatusText = " " } + + let canManageCall = self.callState?.canManageCall ?? false + let titleSize = self.title.update( transition: transition, component: AnyComponent(VideoChatTitleComponent( title: self.callState?.title ?? self.peer?.debugDisplayTitle ?? " ", status: idleTitleStatusText, isRecording: self.callState?.recordingStartTimestamp != nil, - strings: environment.strings + strings: environment.strings, + tapAction: self.callState?.recordingStartTimestamp != nil ? { [weak self] in + guard let self, let component = self.component, let environment = self.environment else { + return + } + guard let titleView = self.title.view as? VideoChatTitleComponent.View, let recordingIndicatorView = titleView.recordingIndicatorView else { + return + } + var hasTooltipAlready = false + environment.controller()?.forEachController { controller -> Bool in + if controller is TooltipScreen { + hasTooltipAlready = true + } + return true + } + if !hasTooltipAlready { + let location = recordingIndicatorView.convert(recordingIndicatorView.bounds, to: self) + let text: String + if case let .channel(channel) = self.peer, case .broadcast = channel.info { + text = environment.strings.LiveStream_RecordingInProgress + } else { + text = environment.strings.VoiceChat_RecordingInProgress + } + environment.controller()?.present(TooltipScreen(account: component.call.accountContext.account, sharedContext: component.call.accountContext.sharedContext, text: .plain(text: text), icon: nil, location: .point(location.offsetBy(dx: 1.0, dy: 0.0), .top), displayDuration: .custom(3.0), shouldDismissOnTouch: { _, _ in + return .dismiss(consume: true) + }), in: .current) + } + } : nil, + longTapAction: canManageCall ? { [weak self] in + guard let self else { + return + } + self.openTitleEditing() + } : nil )), environment: {}, containerSize: CGSize(width: availableSize.width - sideInset * 2.0 - navigationButtonAreaWidth * 2.0 - 4.0 * 2.0, height: 100.0) @@ -1263,7 +1327,6 @@ final class VideoChatScreenComponent: Component { let titleFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - titleSize.width) * 0.5), y: topInset + floor((navigationBarHeight - titleSize.height) * 0.5)), size: titleSize) if let titleView = self.title.view { if titleView.superview == nil { - titleView.isUserInteractionEnabled = false self.containerView.addSubview(titleView) } transition.setFrame(view: titleView, frame: titleFrame) @@ -1436,7 +1499,7 @@ final class VideoChatScreenComponent: Component { component: AnyComponent(VideoChatParticipantsComponent( call: component.call, participants: mappedParticipants, - speakingParticipants: members?.speakingParticipants ?? Set(), + speakingParticipants: self.members?.speakingParticipants ?? Set(), expandedVideoState: self.expandedParticipantsVideoState, theme: environment.theme, strings: environment.strings, @@ -1699,7 +1762,9 @@ final class VideoChatScreenComponent: Component { let videoButtonContent: VideoChatActionButtonComponent.Content if let callState = self.callState, let muteState = callState.muteState, !muteState.canUnmute { var buttonAudio: VideoChatActionButtonComponent.Content.Audio = .speaker + var buttonIsEnabled = false if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput { + buttonIsEnabled = availableOutputs.count > 1 switch currentOutput { case .builtin: buttonAudio = .builtin @@ -1723,7 +1788,7 @@ final class VideoChatScreenComponent: Component { buttonAudio = .none } } - videoButtonContent = .audio(audio: buttonAudio) + videoButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled) } else { //TODO:release videoButtonContent = .video(isActive: false) diff --git a/submodules/TelegramCallsUI/Sources/VideoChatTitleComponent.swift b/submodules/TelegramCallsUI/Sources/VideoChatTitleComponent.swift index 6b36289463..b1818a7e93 100644 --- a/submodules/TelegramCallsUI/Sources/VideoChatTitleComponent.swift +++ b/submodules/TelegramCallsUI/Sources/VideoChatTitleComponent.swift @@ -12,17 +12,23 @@ final class VideoChatTitleComponent: Component { let status: String let isRecording: Bool let strings: PresentationStrings + let tapAction: (() -> Void)? + let longTapAction: (() -> Void)? init( title: String, status: String, isRecording: Bool, - strings: PresentationStrings + strings: PresentationStrings, + tapAction: (() -> Void)?, + longTapAction: (() -> Void)? ) { self.title = title self.status = status self.isRecording = isRecording self.strings = strings + self.tapAction = tapAction + self.longTapAction = longTapAction } static func ==(lhs: VideoChatTitleComponent, rhs: VideoChatTitleComponent) -> Bool { @@ -38,6 +44,12 @@ final class VideoChatTitleComponent: Component { if lhs.strings !== rhs.strings { return false } + if (lhs.tapAction == nil) != (rhs.tapAction == nil) { + return false + } + if (lhs.longTapAction == nil) != (rhs.longTapAction == nil) { + return false + } return true } @@ -55,6 +67,12 @@ final class VideoChatTitleComponent: Component { private var currentActivityStatus: String? private var currentSize: CGSize? + private var tapRecognizer: TapLongTapOrDoubleTapGestureRecognizer? + + public var recordingIndicatorView: UIView? { + return self.recordingImageView + } + override init(frame: CGRect) { self.hierarchyTrackingLayer = HierarchyTrackingLayer() @@ -67,12 +85,33 @@ final class VideoChatTitleComponent: Component { } self.updateAnimations() } + + let tapRecognizer = TapLongTapOrDoubleTapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))) + tapRecognizer.tapActionAtPoint = { _ in + return .waitForSingleTap + } + self.addGestureRecognizer(tapRecognizer) } required init?(coder: NSCoder) { fatalError("init(coder:) has not been implemented") } + @objc private func tapGesture(_ recognizer: TapLongTapOrDoubleTapGestureRecognizer) { + guard let component = self.component else { + return + } + if case .ended = recognizer.state { + if let (gesture, _) = recognizer.lastRecognizedGestureAndLocation { + if case .tap = gesture { + component.tapAction?() + } else if case .longTap = gesture { + component.longTapAction?() + } + } + } + } + private func updateAnimations() { if let recordingImageView = self.recordingImageView { if recordingImageView.layer.animation(forKey: "blink") == nil { @@ -153,15 +192,22 @@ final class VideoChatTitleComponent: Component { self.component = component + self.tapRecognizer?.isEnabled = component.longTapAction != nil || component.tapAction != nil + let spacing: CGFloat = 1.0 + var maxTitleWidth = availableSize.width + if component.isRecording { + maxTitleWidth -= 10.0 + } + let titleSize = self.title.update( transition: .immediate, component: AnyComponent(MultilineTextComponent( text: .plain(NSAttributedString(string: component.title, font: Font.semibold(17.0), textColor: .white)) )), environment: {}, - containerSize: CGSize(width: availableSize.width, height: 100.0) + containerSize: CGSize(width: maxTitleWidth, height: 100.0) ) let statusComponent: AnyComponent @@ -181,15 +227,18 @@ final class VideoChatTitleComponent: Component { let titleFrame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) * 0.5), y: 0.0), size: titleSize) if let titleView = self.title.view { if titleView.superview == nil { + titleView.layer.anchorPoint = CGPoint() + titleView.isUserInteractionEnabled = false self.addSubview(titleView) } - transition.setPosition(view: titleView, position: titleFrame.center) + transition.setPosition(view: titleView, position: titleFrame.origin) titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size) } let statusFrame = CGRect(origin: CGPoint(x: floor((size.width - statusSize.width) * 0.5), y: titleFrame.maxY + spacing), size: statusSize) if let statusView = self.status.view { if statusView.superview == nil { + statusView.isUserInteractionEnabled = false self.addSubview(statusView) } transition.setPosition(view: statusView, position: statusFrame.center) diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatMicrophoneNode.swift b/submodules/TelegramCallsUI/Sources/VoiceChatMicrophoneNode.swift index 0edd0f7c02..c644942dc0 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatMicrophoneNode.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatMicrophoneNode.swift @@ -5,12 +5,16 @@ import Display private final class VoiceChatMicrophoneNodeDrawingState: NSObject { let color: UIColor + let shadowColor: UIColor? + let shadowBlur: CGFloat let filled: Bool let transition: CGFloat let reverse: Bool - init(color: UIColor, filled: Bool, transition: CGFloat, reverse: Bool) { + init(color: UIColor, shadowColor: UIColor?, shadowBlur: CGFloat, filled: Bool, transition: CGFloat, reverse: Bool) { self.color = color + self.shadowColor = shadowColor + self.shadowBlur = shadowBlur self.filled = filled self.transition = transition self.reverse = reverse @@ -24,11 +28,15 @@ final class VoiceChatMicrophoneNode: ASDisplayNode { let muted: Bool let color: UIColor let filled: Bool + let shadowColor: UIColor? + let shadowBlur: CGFloat - init(muted: Bool, filled: Bool, color: UIColor) { + init(muted: Bool, filled: Bool, color: UIColor, shadowColor: UIColor? = nil, shadowBlur: CGFloat = 0.0) { self.muted = muted self.filled = filled self.color = color + self.shadowColor = shadowColor + self.shadowBlur = shadowBlur } static func ==(lhs: State, rhs: State) -> Bool { @@ -41,6 +49,12 @@ final class VoiceChatMicrophoneNode: ASDisplayNode { if lhs.filled != rhs.filled { return false } + if lhs.shadowColor != rhs.shadowColor { + return false + } + if lhs.shadowBlur != rhs.shadowBlur { + return false + } return true } } @@ -122,6 +136,8 @@ final class VoiceChatMicrophoneNode: ASDisplayNode { override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? { var transitionFraction: CGFloat = self.state.muted ? 1.0 : 0.0 var color = self.state.color + var shadowColor = self.state.shadowColor + var shadowBlur = self.state.shadowBlur var reverse = false if let transitionContext = self.transitionContext { @@ -138,9 +154,17 @@ final class VoiceChatMicrophoneNode: ASDisplayNode { if transitionContext.previousState.color.rgb != color.rgb { color = transitionContext.previousState.color.interpolateTo(color, fraction: t)! } + + if let previousShadowColor = transitionContext.previousState.shadowColor, let shadowColorValue = shadowColor, previousShadowColor.rgb != shadowColorValue.rgb { + shadowColor = previousShadowColor.interpolateTo(shadowColorValue, fraction: t)! + } + + if transitionContext.previousState.shadowBlur != shadowBlur { + shadowBlur = transitionContext.previousState.shadowBlur * (1.0 - t) + shadowBlur * t + } } - return VoiceChatMicrophoneNodeDrawingState(color: color, filled: self.state.filled, transition: transitionFraction, reverse: reverse) + return VoiceChatMicrophoneNodeDrawingState(color: color, shadowColor: shadowColor, shadowBlur: shadowBlur, filled: self.state.filled, transition: transitionFraction, reverse: reverse) } @objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) { @@ -155,9 +179,18 @@ final class VoiceChatMicrophoneNode: ASDisplayNode { guard let parameters = parameters as? VoiceChatMicrophoneNodeDrawingState else { return } + + var bounds = bounds + bounds = bounds.insetBy(dx: parameters.shadowBlur, dy: parameters.shadowBlur) + + context.translateBy(x: bounds.minX, y: bounds.minY) context.setFillColor(parameters.color.cgColor) + if let shadowColor = parameters.shadowColor, parameters.shadowBlur != 0.0 { + context.setShadow(offset: CGSize(), blur: parameters.shadowBlur, color: shadowColor.cgColor) + } + var clearLineWidth: CGFloat = 2.0 var lineWidth: CGFloat = 1.0 + UIScreenPixel if bounds.size.width > 36.0 { diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageAnimatedStickerItemNode/Sources/ChatMessageAnimatedStickerItemNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageAnimatedStickerItemNode/Sources/ChatMessageAnimatedStickerItemNode.swift index acf4bc4dbd..b286626bdd 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageAnimatedStickerItemNode/Sources/ChatMessageAnimatedStickerItemNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageAnimatedStickerItemNode/Sources/ChatMessageAnimatedStickerItemNode.swift @@ -343,15 +343,7 @@ public class ChatMessageAnimatedStickerItemNode: ChatMessageItemView { } if oldValue != self.visibility { - switch self.visibility { - case .none: - self.textNode.visibilityRect = nil - case let .visible(_, subRect): - var subRect = subRect - subRect.origin.x = 0.0 - subRect.size.width = 10000.0 - self.textNode.visibilityRect = subRect - } + self.updateVisibility() } } } @@ -594,6 +586,21 @@ public class ChatMessageAnimatedStickerItemNode: ChatMessageItemView { let isPlaying = self.visibilityStatus == true && !self.forceStopAnimations + var effectiveVisibility = self.visibility + if !isPlaying { + effectiveVisibility = .none + } + + switch effectiveVisibility { + case .none: + self.textNode.visibilityRect = nil + case let .visible(_, subRect): + var subRect = subRect + subRect.origin.x = 0.0 + subRect.size.width = 10000.0 + self.textNode.visibilityRect = subRect + } + var canPlayEffects = isPlaying if !item.controllerInteraction.canReadHistory { canPlayEffects = false diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageBubbleItemNode/Sources/ChatMessageBubbleItemNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageBubbleItemNode/Sources/ChatMessageBubbleItemNode.swift index 59ac95207b..8a36726273 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageBubbleItemNode/Sources/ChatMessageBubbleItemNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageBubbleItemNode/Sources/ChatMessageBubbleItemNode.swift @@ -680,22 +680,6 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI override public var visibility: ListViewItemNodeVisibility { didSet { if self.visibility != oldValue { - for contentNode in self.contentNodes { - contentNode.visibility = mapVisibility(self.visibility, boundsSize: self.bounds.size, insets: self.insets, to: contentNode) - } - - if let threadInfoNode = self.threadInfoNode { - threadInfoNode.visibility = self.visibility != .none - } - - if let replyInfoNode = self.replyInfoNode { - replyInfoNode.visibility = self.visibility != .none - } - - if let unlockButtonNode = self.unlockButtonNode { - unlockButtonNode.visibility = self.visibility != .none - } - self.visibilityStatus = self.visibility != .none self.updateVisibility() @@ -718,6 +702,8 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI } } + private var forceStopAnimations: Bool = false + required public init(rotated: Bool) { self.mainContextSourceNode = ContextExtractedContentContainingNode() self.mainContainerNode = ContextControllerSourceNode() @@ -6207,6 +6193,11 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI return false } + override public func updateStickerSettings(forceStopAnimations: Bool) { + self.forceStopAnimations = forceStopAnimations + self.updateVisibility() + } + private func updateVisibility() { guard let item = self.item else { return @@ -6223,11 +6214,35 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI if !item.controllerInteraction.canReadHistory { isPlaying = false } + if self.forceStopAnimations { + isPlaying = false + } if !isPlaying { self.removeEffectAnimations() } + var effectiveVisibility = self.visibility + if !isPlaying { + effectiveVisibility = .none + } + + for contentNode in self.contentNodes { + contentNode.visibility = mapVisibility(effectiveVisibility, boundsSize: self.bounds.size, insets: self.insets, to: contentNode) + } + + if let threadInfoNode = self.threadInfoNode { + threadInfoNode.visibility = effectiveVisibility != .none + } + + if let replyInfoNode = self.replyInfoNode { + replyInfoNode.visibility = effectiveVisibility != .none + } + + if let unlockButtonNode = self.unlockButtonNode { + unlockButtonNode.visibility = effectiveVisibility != .none + } + if isPlaying { var alreadySeen = true if item.message.flags.contains(.Incoming) { diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift index 707944d151..6cfd0d6f48 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift @@ -1425,6 +1425,8 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr if let strongSelf = self { if file.isAnimated { strongSelf.fetchDisposable.set(fetchedMediaResource(mediaBox: context.account.postbox.mediaBox, userLocation: .peer(message.id.peerId), userContentType: MediaResourceUserContentType(file: file), reference: AnyMediaReference.message(message: MessageReference(message), media: file).resourceReference(file.resource), statsCategory: statsCategoryForFileWithAttributes(file.attributes)).startStrict()) + } else if NativeVideoContent.isHLSVideo(file: file) { + strongSelf.fetchDisposable.set(nil) } else { strongSelf.fetchDisposable.set(messageMediaFileInteractiveFetched(context: context, message: message, file: file, userInitiated: manual, storeToDownloadsPeerId: storeToDownloadsPeerId).startStrict()) } @@ -1659,16 +1661,12 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr let loopVideo = updatedVideoFile.isAnimated let videoContent: UniversalVideoContent - if !"".isEmpty && NativeVideoContent.isHLSVideo(file: updatedVideoFile) { - videoContent = HLSVideoContent(id: .message(message.id, message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: true, loopVideo: loopVideo) - } else { - videoContent = NativeVideoContent(id: .message(message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: streamVideo ? .conservative : .none, loopVideo: loopVideo, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false), continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo, placeholderColor: emptyColor, captureProtected: message.isCopyProtected() || isExtendedMedia, storeAfterDownload: { [weak context] in - guard let context, let peerId else { - return - } - let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone() - }) - } + videoContent = NativeVideoContent(id: .message(message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: streamVideo ? .conservative : .none, loopVideo: loopVideo, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false), continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo, placeholderColor: emptyColor, captureProtected: message.isCopyProtected() || isExtendedMedia, storeAfterDownload: { [weak context] in + guard let context, let peerId else { + return + } + let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone() + }) let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) videoNode.isUserInteractionEnabled = false videoNode.ownsContentNodeUpdated = { [weak self] owns in @@ -1850,7 +1848,32 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr } } - if case .full = automaticDownload { + if automaticDownload != .none, let file = media as? TelegramMediaFile, NativeVideoContent.isHLSVideo(file: file) { + let postbox = context.account.postbox + let fetchSignal = HLSVideoContent.minimizedHLSQualityPreloadData(postbox: context.account.postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true) + |> mapToSignal { fileAndRange -> Signal in + guard let fileAndRange else { + return .complete() + } + return freeMediaFileResourceInteractiveFetched(postbox: postbox, userLocation: .peer(message.id.peerId), fileReference: fileAndRange.0, resource: fileAndRange.0.media.resource, range: (fileAndRange.1, .default)) + |> ignoreValues + |> `catch` { _ -> Signal in + return .complete() + } + } + + let visibilityAwareFetchSignal = strongSelf.visibilityPromise.get() + |> mapToSignal { visibility -> Signal in + if visibility { + return fetchSignal + |> mapToSignal { _ -> Signal in + } + } else { + return .complete() + } + } + strongSelf.fetchDisposable.set(visibilityAwareFetchSignal.startStrict()) + } else if case .full = automaticDownload { if let _ = media as? TelegramMediaImage { updatedFetchControls.fetch(false) } else if let image = media as? TelegramMediaWebFile { diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageMediaBubbleContentNode/BUILD b/submodules/TelegramUI/Components/Chat/ChatMessageMediaBubbleContentNode/BUILD index 5de32fcb35..e5008a8e93 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageMediaBubbleContentNode/BUILD +++ b/submodules/TelegramUI/Components/Chat/ChatMessageMediaBubbleContentNode/BUILD @@ -24,6 +24,7 @@ swift_library( "//submodules/TelegramUI/Components/Chat/ChatMessageBubbleContentNode", "//submodules/TelegramUI/Components/Chat/ChatMessageItemCommon", "//submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode", + "//submodules/TelegramUniversalVideoContent", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageMediaBubbleContentNode/Sources/ChatMessageMediaBubbleContentNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageMediaBubbleContentNode/Sources/ChatMessageMediaBubbleContentNode.swift index dbe1880c41..fbf52337fa 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageMediaBubbleContentNode/Sources/ChatMessageMediaBubbleContentNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageMediaBubbleContentNode/Sources/ChatMessageMediaBubbleContentNode.swift @@ -16,6 +16,7 @@ import ChatMessageItemCommon import ChatMessageInteractiveMediaNode import ChatControllerInteraction import InvisibleInkDustNode +import TelegramUniversalVideoContent public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode { override public var supportsMosaic: Bool { @@ -163,7 +164,9 @@ public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode { automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil } } else if (telegramFile.isVideo && !telegramFile.isAnimated) && item.context.sharedContext.energyUsageSettings.autoplayVideo { - if case .full = automaticDownload { + if NativeVideoContent.isHLSVideo(file: telegramFile) { + automaticPlayback = true + } else if case .full = automaticDownload { automaticPlayback = true } else { automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil @@ -207,7 +210,9 @@ public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode { automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil } } else if (telegramFile.isVideo && !telegramFile.isAnimated) && item.context.sharedContext.energyUsageSettings.autoplayVideo { - if case .full = automaticDownload { + if NativeVideoContent.isHLSVideo(file: telegramFile) { + automaticPlayback = true + } else if case .full = automaticDownload { automaticPlayback = true } else { automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageStickerItemNode/Sources/ChatMessageStickerItemNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageStickerItemNode/Sources/ChatMessageStickerItemNode.swift index c63976ce11..c6c2ccdcc1 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageStickerItemNode/Sources/ChatMessageStickerItemNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageStickerItemNode/Sources/ChatMessageStickerItemNode.swift @@ -101,6 +101,8 @@ public class ChatMessageStickerItemNode: ChatMessageItemView { } } + private var forceStopAnimations: Bool = false + required public init(rotated: Bool) { self.contextSourceNode = ContextExtractedContentContainingNode() self.containerNode = ContextControllerSourceNode() @@ -2160,6 +2162,9 @@ public class ChatMessageStickerItemNode: ChatMessageItemView { if !item.controllerInteraction.canReadHistory { isPlaying = false } + if self.forceStopAnimations { + isPlaying = false + } if !isPlaying { self.removeEffectAnimations() @@ -2191,6 +2196,11 @@ public class ChatMessageStickerItemNode: ChatMessageItemView { } } + override public func updateStickerSettings(forceStopAnimations: Bool) { + self.forceStopAnimations = forceStopAnimations + self.updateVisibility() + } + override public func messageEffectTargetView() -> UIView? { if let result = self.dateAndStatusNode.messageEffectTargetView() { return result diff --git a/submodules/TelegramUI/Sources/ChatController.swift b/submodules/TelegramUI/Sources/ChatController.swift index b84d45e5ef..86d8e703ae 100644 --- a/submodules/TelegramUI/Sources/ChatController.swift +++ b/submodules/TelegramUI/Sources/ChatController.swift @@ -6946,12 +6946,21 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G } }) - self.stickerSettingsDisposable = combineLatest(queue: Queue.mainQueue(), context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.stickerSettings]), self.disableStickerAnimationsPromise.get()).startStrict(next: { [weak self] sharedData, disableStickerAnimations in + self.stickerSettingsDisposable = combineLatest(queue: Queue.mainQueue(), + context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.stickerSettings]), + self.disableStickerAnimationsPromise.get(), + context.sharedContext.hasGroupCallOnScreen + ).startStrict(next: { [weak self] sharedData, disableStickerAnimations, hasGroupCallOnScreen in var stickerSettings = StickerSettings.defaultSettings if let value = sharedData.entries[ApplicationSpecificSharedDataKeys.stickerSettings]?.get(StickerSettings.self) { stickerSettings = value } + var disableStickerAnimations = disableStickerAnimations + if hasGroupCallOnScreen { + disableStickerAnimations = true + } + let chatStickerSettings = ChatInterfaceStickerSettings(stickerSettings: stickerSettings) if let strongSelf = self, strongSelf.stickerSettings != chatStickerSettings || strongSelf.disableStickerAnimationsValue != disableStickerAnimations { strongSelf.stickerSettings = chatStickerSettings diff --git a/submodules/TgVoipWebrtc/tgcalls b/submodules/TgVoipWebrtc/tgcalls index 846f704048..b6e7349b98 160000 --- a/submodules/TgVoipWebrtc/tgcalls +++ b/submodules/TgVoipWebrtc/tgcalls @@ -1 +1 @@ -Subproject commit 846f7040480f52b8bc0382fb9e2e78e8ef60c633 +Subproject commit b6e7349b98c5d3999f45e9468eee068aff86ee37