diff --git a/submodules/SettingsUI/Sources/Data and Storage/ProxyServerActionSheetController.swift b/submodules/SettingsUI/Sources/Data and Storage/ProxyServerActionSheetController.swift index 610a18a8e9..48b0ac5ebf 100644 --- a/submodules/SettingsUI/Sources/Data and Storage/ProxyServerActionSheetController.swift +++ b/submodules/SettingsUI/Sources/Data and Storage/ProxyServerActionSheetController.swift @@ -12,6 +12,7 @@ import ActivityIndicator import OverlayStatusController import AccountContext import PresentationDataUtils +import UrlEscaping public final class ProxyServerActionSheetController: ActionSheetController { private var presentationDisposable: Disposable? @@ -131,7 +132,7 @@ private final class ProxyServerInfoItemNode: ActionSheetItemNode { let serverTextNode = ImmediateTextNode() serverTextNode.isUserInteractionEnabled = false serverTextNode.displaysAsynchronously = false - serverTextNode.attributedText = NSAttributedString(string: server.host, font: textFont, textColor: theme.primaryTextColor) + serverTextNode.attributedText = NSAttributedString(string: urlEncodedStringFromString(server.host), font: textFont, textColor: theme.primaryTextColor) fieldNodes.append((serverTitleNode, serverTextNode)) let portTitleNode = ImmediateTextNode() diff --git a/submodules/SettingsUI/Sources/Data and Storage/ProxySettingsServerItem.swift b/submodules/SettingsUI/Sources/Data and Storage/ProxySettingsServerItem.swift index 5d7e756311..3e68942278 100644 --- a/submodules/SettingsUI/Sources/Data and Storage/ProxySettingsServerItem.swift +++ b/submodules/SettingsUI/Sources/Data and Storage/ProxySettingsServerItem.swift @@ -10,6 +10,7 @@ import TelegramPresentationData import ItemListUI import PresentationDataUtils import ActivityIndicator +import UrlEscaping private let activitySize = CGSize(width: 24.0, height: 24.0) @@ -236,7 +237,7 @@ private final class ProxySettingsServerItemNode: ItemListRevealOptionsItemNode { } let titleAttributedString = NSMutableAttributedString() - titleAttributedString.append(NSAttributedString(string: item.server.host, font: titleFont, textColor: item.theme.list.itemPrimaryTextColor)) + titleAttributedString.append(NSAttributedString(string: urlEncodedStringFromString(item.server.host), font: titleFont, textColor: item.theme.list.itemPrimaryTextColor)) titleAttributedString.append(NSAttributedString(string: ":\(item.server.port)", font: titleFont, textColor: item.theme.list.itemSecondaryTextColor)) let statusAttributedString = NSAttributedString(string: item.label, font: statusFont, textColor: item.labelAccent ? item.theme.list.itemAccentColor : item.theme.list.itemSecondaryTextColor) diff --git a/submodules/ShareController/Sources/ShareController.swift b/submodules/ShareController/Sources/ShareController.swift index dcf21b5996..34d1aecd33 100644 --- a/submodules/ShareController/Sources/ShareController.swift +++ b/submodules/ShareController/Sources/ShareController.swift @@ -708,7 +708,19 @@ public final class ShareController: ViewController { } else { authorPeerId = accountPeerId } - collectableItems.append(CollectableExternalShareItem(url: url, text: message.text, author: authorPeerId, timestamp: message.timestamp, mediaReference: selectedMedia.flatMap({ AnyMediaReference.message(message: MessageReference(message), media: $0) }))) + + var restrictedText: String? + for attribute in message.attributes { + if let attribute = attribute as? RestrictedContentMessageAttribute { + restrictedText = attribute.platformText(platform: "ios", contentSettings: strongSelf.currentContext.currentContentSettings.with { $0 }) ?? "" + } + } + + if let restrictedText = restrictedText { + collectableItems.append(CollectableExternalShareItem(url: url, text: restrictedText, author: authorPeerId, timestamp: message.timestamp, mediaReference: nil)) + } else { + collectableItems.append(CollectableExternalShareItem(url: url, text: message.text, author: authorPeerId, timestamp: message.timestamp, mediaReference: selectedMedia.flatMap({ AnyMediaReference.message(message: MessageReference(message), media: $0) }))) + } } case .fromExternal: break diff --git a/submodules/TelegramCallsUI/Sources/GroupVideoNode.swift b/submodules/TelegramCallsUI/Sources/GroupVideoNode.swift new file mode 100644 index 0000000000..1200ac2778 --- /dev/null +++ b/submodules/TelegramCallsUI/Sources/GroupVideoNode.swift @@ -0,0 +1,231 @@ +import Foundation +import UIKit +import AsyncDisplayKit +import Display +import SwiftSignalKit +import AccountContext + +final class GroupVideoNode: ASDisplayNode { + private let videoViewContainer: UIView + private let videoView: PresentationCallVideoView + + private let backdropVideoViewContainer: UIView + private let backdropVideoView: PresentationCallVideoView? + private var backdropEffectView: UIVisualEffectView? + + private var effectView: UIVisualEffectView? + private var isBlurred: Bool = false + + private var validLayout: (CGSize, Bool)? + + var tapped: (() -> Void)? + + private let readyPromise = ValuePromise(false) + var ready: Signal { + return self.readyPromise.get() + } + + init(videoView: PresentationCallVideoView, backdropVideoView: PresentationCallVideoView?) { + self.videoViewContainer = UIView() + self.videoView = videoView + + self.backdropVideoViewContainer = UIView() + self.backdropVideoView = backdropVideoView + + super.init() + + self.isUserInteractionEnabled = false + + if let backdropVideoView = backdropVideoView { + self.backdropVideoViewContainer.addSubview(backdropVideoView.view) + self.view.addSubview(self.backdropVideoViewContainer) + + if #available(iOS 13.0, *) { + let backdropEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .systemThinMaterialDark)) + self.view.addSubview(backdropEffectView) + self.backdropEffectView = backdropEffectView + } else { + } + } + + self.videoViewContainer.addSubview(self.videoView.view) + self.view.addSubview(self.videoViewContainer) + + self.clipsToBounds = true + + videoView.setOnFirstFrameReceived({ [weak self] _ in + Queue.mainQueue().async { + guard let strongSelf = self else { + return + } + strongSelf.readyPromise.set(true) + if let (size, isLandscape) = strongSelf.validLayout { + strongSelf.updateLayout(size: size, isLandscape: isLandscape, transition: .immediate) + } + } + }) + + videoView.setOnOrientationUpdated({ [weak self] _, _ in + Queue.mainQueue().async { + guard let strongSelf = self else { + return + } + if let (size, isLandscape) = strongSelf.validLayout { + strongSelf.updateLayout(size: size, isLandscape: isLandscape, transition: .immediate) + } + } + }) + + self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:)))) + } + + func updateIsBlurred(isBlurred: Bool, light: Bool = false, animated: Bool = true) { + if self.isBlurred == isBlurred { + return + } + self.isBlurred = isBlurred + + if isBlurred { + if self.effectView == nil { + let effectView = UIVisualEffectView() + self.effectView = effectView + effectView.frame = self.bounds + self.view.addSubview(effectView) + } + if animated { + UIView.animate(withDuration: 0.3, animations: { + self.effectView?.effect = UIBlurEffect(style: light ? .light : .dark) + }) + } else { + self.effectView?.effect = UIBlurEffect(style: light ? .light : .dark) + } + } else if let effectView = self.effectView { + self.effectView = nil + UIView.animate(withDuration: 0.3, animations: { + effectView.effect = nil + }, completion: { [weak effectView] _ in + effectView?.removeFromSuperview() + }) + } + } + + func flip(withBackground: Bool) { + if withBackground { + self.backgroundColor = .black + } + UIView.transition(with: withBackground ? self.videoViewContainer : self.view, duration: 0.4, options: [.transitionFlipFromLeft, .curveEaseOut], animations: { + UIView.performWithoutAnimation { + self.updateIsBlurred(isBlurred: true, light: true, animated: false) + } + }) { finished in + self.backgroundColor = nil + Queue.mainQueue().after(0.5) { + self.updateIsBlurred(isBlurred: false) + } + } + } + + @objc private func tapGesture(_ recognizer: UITapGestureRecognizer) { + if case .ended = recognizer.state { + self.tapped?() + } + } + + func updateLayout(size: CGSize, isLandscape: Bool, transition: ContainedViewLayoutTransition) { + self.validLayout = (size, isLandscape) + transition.updateFrameAsPositionAndBounds(layer: self.videoViewContainer.layer, frame: CGRect(origin: CGPoint(), size: size)) + transition.updateFrameAsPositionAndBounds(layer: self.backdropVideoViewContainer.layer, frame: CGRect(origin: CGPoint(), size: size)) + + let orientation = self.videoView.getOrientation() + var aspect = self.videoView.getAspect() + if aspect <= 0.01 { + aspect = 3.0 / 4.0 + } + + let rotatedAspect: CGFloat + let angle: CGFloat + let switchOrientation: Bool + switch orientation { + case .rotation0: + angle = 0.0 + rotatedAspect = 1 / aspect + switchOrientation = false + case .rotation90: + angle = CGFloat.pi / 2.0 + rotatedAspect = aspect + switchOrientation = true + case .rotation180: + angle = CGFloat.pi + rotatedAspect = 1 / aspect + switchOrientation = false + case .rotation270: + angle = CGFloat.pi * 3.0 / 2.0 + rotatedAspect = aspect + switchOrientation = true + } + + var rotatedVideoSize = CGSize(width: 100.0, height: rotatedAspect * 100.0) + + var containerSize = size + if switchOrientation { + rotatedVideoSize = CGSize(width: rotatedVideoSize.height, height: rotatedVideoSize.width) + containerSize = CGSize(width: containerSize.height, height: containerSize.width) + } + + let fittedSize = rotatedVideoSize.aspectFitted(containerSize) + let filledSize = rotatedVideoSize.aspectFilled(containerSize) + + if isLandscape { + rotatedVideoSize = fittedSize + } else { + rotatedVideoSize = filledSize + } + + var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize) + rotatedVideoFrame.origin.x = floor(rotatedVideoFrame.origin.x) + rotatedVideoFrame.origin.y = floor(rotatedVideoFrame.origin.y) + rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width) + rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height) + + let videoSize = rotatedVideoFrame.size.aspectFilled(CGSize(width: 1080.0, height: 1080.0)) + transition.updatePosition(layer: self.videoView.view.layer, position: rotatedVideoFrame.center) + transition.updateBounds(layer: self.videoView.view.layer, bounds: CGRect(origin: CGPoint(), size: videoSize)) + + let transformScale: CGFloat = rotatedVideoFrame.width / videoSize.width + transition.updateTransformScale(layer: self.videoViewContainer.layer, scale: transformScale) + + let transition: ContainedViewLayoutTransition = .immediate + transition.updateTransformRotation(view: self.videoView.view, angle: angle) + + if let backdropVideoView = self.backdropVideoView { + rotatedVideoSize = filledSize + var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize) + rotatedVideoFrame.origin.x = floor(rotatedVideoFrame.origin.x) + rotatedVideoFrame.origin.y = floor(rotatedVideoFrame.origin.y) + rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width) + rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height) + + let videoSize = rotatedVideoFrame.size.aspectFilled(CGSize(width: 1080.0, height: 1080.0)) + transition.updatePosition(layer: backdropVideoView.view.layer, position: rotatedVideoFrame.center) + transition.updateBounds(layer: backdropVideoView.view.layer, bounds: CGRect(origin: CGPoint(), size: videoSize)) + + let transformScale: CGFloat = rotatedVideoFrame.width / videoSize.width + transition.updateTransformScale(layer: self.backdropVideoViewContainer.layer, scale: transformScale) + + let transition: ContainedViewLayoutTransition = .immediate + transition.updateTransformRotation(view: backdropVideoView.view, angle: angle) + } + + if let backdropEffectView = self.backdropEffectView { + transition.updateFrame(view: backdropEffectView, frame: self.bounds) + } + + if let effectView = self.effectView { + transition.updateFrame(view: effectView, frame: self.bounds) + } + + // TODO: properly fix the issue + // On iOS 13 and later metal layer transformation is broken if the layer does not require compositing + self.videoView.view.alpha = 0.995 + } +} diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatActionItem.swift b/submodules/TelegramCallsUI/Sources/VoiceChatActionItem.swift index bbdea52471..a07cd0ff4c 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatActionItem.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatActionItem.swift @@ -155,12 +155,12 @@ class VoiceChatActionItemNode: ListViewItemNode { let titleFont = Font.regular(17.0) - var leftInset: CGFloat = 16.0 + params.leftInset + var leftInset: CGFloat = 8.0 + params.leftInset if case .generic = item.icon { leftInset += 49.0 } - let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.title, font: titleFont, textColor: UIColor(rgb: 0xffffff)), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width - 10.0 - leftInset - params.rightInset, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.title, font: titleFont, textColor: item.presentationData.theme.list.itemAccentColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width - 10.0 - leftInset - params.rightInset, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) let contentHeight: CGFloat = 12.0 * 2.0 + titleLayout.size.height @@ -182,9 +182,9 @@ class VoiceChatActionItemNode: ListViewItemNode { strongSelf.bottomStripeNode.backgroundColor = UIColor(rgb: 0xffffff, alpha: 0.08) strongSelf.highlightedBackgroundNode.backgroundColor = item.presentationData.theme.list.itemHighlightedBackgroundColor - strongSelf.iconNode.image = generateTintedImage(image: item.icon.image, color: UIColor(rgb: 0xffffff)) + strongSelf.iconNode.image = generateTintedImage(image: item.icon.image, color: item.presentationData.theme.list.itemAccentColor) } else if updatedContent { - strongSelf.iconNode.image = generateTintedImage(image: item.icon.image, color: UIColor(rgb: 0xffffff)) + strongSelf.iconNode.image = generateTintedImage(image: item.icon.image, color: item.presentationData.theme.list.itemAccentColor) } let _ = titleApply() @@ -192,7 +192,7 @@ class VoiceChatActionItemNode: ListViewItemNode { let titleOffset = leftInset let hideBottomStripe: Bool = last if let image = item.icon.image { - let iconFrame = CGRect(origin: CGPoint(x: params.leftInset + floor((leftInset - params.leftInset - image.size.width) / 2.0) + 3.0, y: floor((contentSize.height - image.size.height) / 2.0)), size: image.size) + let iconFrame = CGRect(origin: CGPoint(x: params.leftInset + floor((leftInset - params.leftInset - image.size.width) / 2.0) - 1.0, y: floor((contentSize.height - image.size.height) / 2.0)), size: image.size) strongSelf.iconNode.frame = iconFrame } @@ -208,7 +208,7 @@ class VoiceChatActionItemNode: ListViewItemNode { strongSelf.bottomStripeNode.frame = CGRect(origin: CGPoint(x: leftInset, y: contentSize.height - separatorHeight), size: CGSize(width: params.width - leftInset, height: separatorHeight)) - strongSelf.titleNode.frame = CGRect(origin: CGPoint(x: titleOffset, y: floor((contentSize.height - titleLayout.size.height) / 2.0)), size: titleLayout.size) + strongSelf.titleNode.frame = CGRect(origin: CGPoint(x: titleOffset + 1.0, y: floor((contentSize.height - titleLayout.size.height) / 2.0)), size: titleLayout.size) strongSelf.highlightedBackgroundNode.frame = CGRect(origin: CGPoint(x: 0.0, y: -UIScreenPixel), size: CGSize(width: params.width, height: contentSize.height + UIScreenPixel + UIScreenPixel)) } diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift index 7fefabb76f..6c4500e829 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift @@ -32,16 +32,15 @@ import WebSearchUI import MapResourceToAvatarSizes import SolidRoundedButtonNode -private let panelBackgroundColor = UIColor(rgb: 0x1c1c1e) -private let secondaryPanelBackgroundColor = UIColor(rgb: 0x2c2c2e) -private let fullscreenBackgroundColor = UIColor(rgb: 0x000000) -private let dimColor = UIColor(white: 0.0, alpha: 0.5) +let panelBackgroundColor = UIColor(rgb: 0x1c1c1e) +let secondaryPanelBackgroundColor = UIColor(rgb: 0x2c2c2e) +let fullscreenBackgroundColor = UIColor(rgb: 0x000000) private let smallButtonSize = CGSize(width: 36.0, height: 36.0) private let sideButtonSize = CGSize(width: 56.0, height: 56.0) -private let mainVideoHeight: CGFloat = 240.0 private let topPanelHeight: CGFloat = 63.0 private let bottomAreaHeight: CGFloat = 205.0 private let fullscreenBottomAreaHeight: CGFloat = 80.0 +private let bottomGradientHeight: CGFloat = 70.0 private func decorationCornersImage(top: Bool, bottom: Bool, dark: Bool) -> UIImage? { if !top && !bottom { @@ -69,428 +68,17 @@ private func decorationCornersImage(top: Bool, bottom: Bool, dark: Bool) -> UIIm })?.stretchableImage(withLeftCapWidth: 25, topCapHeight: 25) } -final class GroupVideoNode: ASDisplayNode { - private let videoViewContainer: UIView - private let videoView: PresentationCallVideoView - - private var effectView: UIVisualEffectView? - private var isBlurred: Bool = false - - private var validLayout: (CGSize, Bool)? - - var tapped: (() -> Void)? - - private let readyPromise = ValuePromise(false) - var ready: Signal { - return self.readyPromise.get() - } - - init(videoView: PresentationCallVideoView) { - self.videoViewContainer = UIView() - self.videoView = videoView +private func decorationBottomGradientImage(dark: Bool) -> UIImage? { + return generateImage(CGSize(width: 1.0, height: bottomGradientHeight), rotatedContext: { size, context in + let bounds = CGRect(origin: CGPoint(), size: size) + context.clear(bounds) - super.init() - - self.isUserInteractionEnabled = false - - self.videoViewContainer.addSubview(self.videoView.view) - self.view.addSubview(self.videoViewContainer) - - self.clipsToBounds = true - - videoView.setOnFirstFrameReceived({ [weak self] _ in - Queue.mainQueue().async { - guard let strongSelf = self else { - return - } - strongSelf.readyPromise.set(true) - if let (size, isLandscape) = strongSelf.validLayout { - strongSelf.updateLayout(size: size, isLandscape: isLandscape, transition: .immediate) - } - } - }) - - videoView.setOnOrientationUpdated({ [weak self] _, _ in - Queue.mainQueue().async { - guard let strongSelf = self else { - return - } - if let (size, isLandscape) = strongSelf.validLayout { - strongSelf.updateLayout(size: size, isLandscape: isLandscape, transition: .immediate) - } - } - }) - - self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:)))) - } - - func updateIsBlurred(isBlurred: Bool, light: Bool = false, animated: Bool = true) { - if self.isBlurred == isBlurred { - return - } - self.isBlurred = isBlurred - - if isBlurred { - if self.effectView == nil { - let effectView = UIVisualEffectView() - self.effectView = effectView - effectView.frame = self.bounds - self.view.addSubview(effectView) - } - if animated { - UIView.animate(withDuration: 0.3, animations: { - self.effectView?.effect = UIBlurEffect(style: light ? .light : .dark) - }) - } else { - self.effectView?.effect = UIBlurEffect(style: light ? .light : .dark) - } - } else if let effectView = self.effectView { - self.effectView = nil - UIView.animate(withDuration: 0.3, animations: { - effectView.effect = nil - }, completion: { [weak effectView] _ in - effectView?.removeFromSuperview() - }) - } - } - - func flip(withBackground: Bool) { - if withBackground { - self.backgroundColor = .black - } - UIView.transition(with: withBackground ? self.videoViewContainer : self.view, duration: 0.4, options: [.transitionFlipFromLeft, .curveEaseOut], animations: { - UIView.performWithoutAnimation { - self.updateIsBlurred(isBlurred: true, light: true, animated: false) - } - }) { finished in - self.backgroundColor = nil - Queue.mainQueue().after(0.5) { - self.updateIsBlurred(isBlurred: false) - } - } - } - - @objc private func tapGesture(_ recognizer: UITapGestureRecognizer) { - if case .ended = recognizer.state { - self.tapped?() - } - } - - func updateLayout(size: CGSize, isLandscape: Bool, transition: ContainedViewLayoutTransition) { - self.validLayout = (size, isLandscape) - transition.updateFrameAsPositionAndBounds(layer: self.videoViewContainer.layer, frame: CGRect(origin: CGPoint(), size: size)) - - let orientation = self.videoView.getOrientation() - var aspect = self.videoView.getAspect() - if aspect <= 0.01 { - aspect = 3.0 / 4.0 - } - - let rotatedAspect: CGFloat - let angle: CGFloat - let switchOrientation: Bool - switch orientation { - case .rotation0: - angle = 0.0 - rotatedAspect = 1 / aspect - switchOrientation = false - case .rotation90: - angle = CGFloat.pi / 2.0 - rotatedAspect = aspect - switchOrientation = true - case .rotation180: - angle = CGFloat.pi - rotatedAspect = 1 / aspect - switchOrientation = false - case .rotation270: - angle = CGFloat.pi * 3.0 / 2.0 - rotatedAspect = aspect - switchOrientation = true - } - - var rotatedVideoSize = CGSize(width: 100.0, height: rotatedAspect * 100.0) - - var containerSize = size - if switchOrientation { - rotatedVideoSize = CGSize(width: rotatedVideoSize.height, height: rotatedVideoSize.width) - containerSize = CGSize(width: containerSize.height, height: containerSize.width) - } - - if isLandscape { - rotatedVideoSize = rotatedVideoSize.aspectFitted(containerSize) - } else { - rotatedVideoSize = rotatedVideoSize.aspectFilled(containerSize) - } - - var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize) - rotatedVideoFrame.origin.x = floor(rotatedVideoFrame.origin.x) - rotatedVideoFrame.origin.y = floor(rotatedVideoFrame.origin.y) - rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width) - rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height) - - let videoSize = rotatedVideoFrame.size.aspectFilled(CGSize(width: 1080.0, height: 1080.0)) - transition.updatePosition(layer: self.videoView.view.layer, position: rotatedVideoFrame.center) - transition.updateBounds(layer: self.videoView.view.layer, bounds: CGRect(origin: CGPoint(), size: videoSize)) - - let transformScale: CGFloat = rotatedVideoFrame.width / videoSize.width - transition.updateTransformScale(layer: self.videoViewContainer.layer, scale: transformScale) - - let transition: ContainedViewLayoutTransition = .immediate - transition.updateTransformRotation(view: self.videoView.view, angle: angle) - - if let effectView = self.effectView { - transition.updateFrame(view: effectView, frame: self.bounds) - } - - // TODO: properly fix the issue - // On iOS 13 and later metal layer transformation is broken if the layer does not require compositing - self.videoView.view.alpha = 0.995 - } -} - -private final class MainVideoContainerNode: ASDisplayNode { - private let context: AccountContext - private let call: PresentationGroupCall - - private var backdropVideoNode: GroupVideoNode? - - private var currentVideoNode: GroupVideoNode? - private var candidateVideoNode: GroupVideoNode? - - private let otherVideoButtonNode: HighlightTrackingButtonNode - fileprivate let otherVideoWrapperNode: ASDisplayNode - private let otherVideoShadowNode: ASImageNode - private var otherVideoNode: GroupVideoNode? - - private let topCornersNode: ASImageNode - private let bottomCornersNode: ASImageNode - private let bottomEdgeNode: ASDisplayNode - private let fadeNode: ASImageNode - private var currentPeer: (PeerId, String, String?)? - - private var validLayout: (CGSize, CGFloat, Bool)? - - var tapped: (() -> Void)? - var otherVideoTapped: (() -> Void)? - - private let videoReadyDisposable = MetaDisposable() - private let otherVideoReadyDisposable = MetaDisposable() - - init(context: AccountContext, call: PresentationGroupCall) { - self.context = context - self.call = call - - self.topCornersNode = ASImageNode() - self.topCornersNode.displaysAsynchronously = false - self.topCornersNode.image = decorationCornersImage(top: true, bottom: false, dark: true) - - self.bottomCornersNode = ASImageNode() - self.bottomCornersNode.displaysAsynchronously = false - self.bottomCornersNode.image = decorationCornersImage(top: false, bottom: true, dark: true) - - self.bottomEdgeNode = ASDisplayNode() - self.bottomEdgeNode.backgroundColor = UIColor(rgb: 0x000000) - - self.fadeNode = ASImageNode() - self.fadeNode.displaysAsynchronously = false - self.fadeNode.displayWithoutProcessing = true - self.fadeNode.contentMode = .scaleToFill - self.fadeNode.image = generateImage(CGSize(width: 1.0, height: 50.0), rotatedContext: { size, context in - let bounds = CGRect(origin: CGPoint(), size: size) - context.clear(bounds) - - let colorsArray = [UIColor(rgb: 0x000000, alpha: 0.0).cgColor, UIColor(rgb: 0x000000, alpha: 0.7).cgColor] as CFArray - var locations: [CGFloat] = [0.0, 1.0] - let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)! - context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) - }) - - self.otherVideoShadowNode = ASImageNode() - self.otherVideoShadowNode.displaysAsynchronously = false - self.otherVideoShadowNode.image = generateImage(CGSize(width: 56.0, height: 56.0), rotatedContext: { size, context in - context.clear(CGRect(origin: CGPoint(), size: size)) - context.setShadow(offset: CGSize(), blur: 6.0, color: UIColor(white: 0.0, alpha: 0.75).cgColor) - context.setFillColor(UIColor(white: 0.0, alpha: 0.75).cgColor) - context.addPath(UIBezierPath(roundedRect: CGRect(x: 8.0, y: 8.0, width: 40.0, height: 40.0), cornerRadius: 5.5).cgPath) - context.fillPath() - }) - - self.otherVideoButtonNode = HighlightTrackingButtonNode() - self.otherVideoButtonNode.clipsToBounds = true - if #available(iOS 13.0, *) { - self.otherVideoButtonNode.layer.cornerCurve = .continuous - } - self.otherVideoButtonNode.cornerRadius = 5.5 - - self.otherVideoWrapperNode = ASDisplayNode() - self.otherVideoWrapperNode.alpha = 0.0 - self.otherVideoWrapperNode.transform = CATransform3DMakeScale(0.001, 0.001, 1.0) - - super.init() - - self.clipsToBounds = true - self.backgroundColor = UIColor(rgb: 0x1c1c1e) - - self.addSubnode(self.topCornersNode) - self.addSubnode(self.otherVideoWrapperNode) - self.addSubnode(self.fadeNode) - self.addSubnode(self.bottomCornersNode) - self.addSubnode(self.bottomEdgeNode) - - self.otherVideoWrapperNode.addSubnode(self.otherVideoShadowNode) - self.otherVideoWrapperNode.addSubnode(self.otherVideoButtonNode) - - self.otherVideoButtonNode.addTarget(self, action: #selector(self.otherVideoPressed), forControlEvents: .touchUpInside) - } - - deinit { - self.videoReadyDisposable.dispose() - self.otherVideoReadyDisposable.dispose() - } - - override func didLoad() { - super.didLoad() - - self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tap))) - } - - @objc private func tap() { - self.tapped?() - } - - @objc private func otherVideoPressed() { - self.otherVideoTapped?() - } - - func updatePeer(peer: (peerId: PeerId, endpointId: String, otherEndpointId: String?)?, waitForFullSize: Bool, completion: (() -> Void)? = nil) { - if self.currentPeer?.0 == peer?.0 && self.currentPeer?.1 == peer?.1 && self.currentPeer?.2 == peer?.2 { - return - } - let previousPeer = self.currentPeer - self.currentPeer = peer - if let (_, endpointId, otherEndpointId) = peer { - let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) - if let otherEndpointId = otherEndpointId { - if otherEndpointId != previousPeer?.2 { - self.call.makeIncomingVideoView(endpointId: otherEndpointId) { [weak self] videoView in - guard let strongSelf = self, let videoView = videoView else { - return - } - - let videoNode = GroupVideoNode(videoView: videoView) - if let currentVideoNode = strongSelf.otherVideoNode { - currentVideoNode.removeFromSupernode() - strongSelf.otherVideoNode = nil - } - strongSelf.otherVideoNode = videoNode - strongSelf.otherVideoButtonNode.addSubnode(videoNode) - if let (size, sideInset, isLandscape) = strongSelf.validLayout { - strongSelf.update(size: size, sideInset: sideInset, isLandscape: isLandscape, transition: .immediate) - } - - if strongSelf.otherVideoWrapperNode.alpha.isZero { - strongSelf.otherVideoReadyDisposable.set((videoNode.ready - |> filter { $0 } - |> take(1) - |> deliverOnMainQueue).start(next: { [weak self] _ in - if let strongSelf = self { - transition.updateAlpha(node: strongSelf.otherVideoWrapperNode, alpha: 1.0) - transition.updateTransformScale(node: strongSelf.otherVideoWrapperNode, scale: 1.0) - } - })) - } - } - } - } else { - if let otherVideoNode = self.otherVideoNode { - self.otherVideoNode = nil - self.otherVideoReadyDisposable.set(nil) - let completion = { - otherVideoNode.removeFromSupernode() - } - if !self.otherVideoWrapperNode.alpha.isZero { - transition.updateAlpha(node: self.otherVideoWrapperNode, alpha: 0.0, completion: { finished in - completion() - }) - transition.updateTransformScale(node: self.otherVideoWrapperNode, scale: 0.001) - } else { - completion() - } - } - } - if endpointId != previousPeer?.1 { - self.call.makeIncomingVideoView(endpointId: endpointId, completion: { [weak self] videoView in - Queue.mainQueue().async { - guard let strongSelf = self, let videoView = videoView else { - return - } - - let videoNode = GroupVideoNode(videoView: videoView) - if let currentVideoNode = strongSelf.currentVideoNode { - strongSelf.currentVideoNode = nil - - currentVideoNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak currentVideoNode] _ in - currentVideoNode?.removeFromSupernode() - }) - } - strongSelf.currentVideoNode = videoNode - strongSelf.insertSubnode(videoNode, belowSubnode: strongSelf.topCornersNode) - if let (size, sideInset, isLandscape) = strongSelf.validLayout { - strongSelf.update(size: size, sideInset: sideInset, isLandscape: isLandscape, transition: .immediate) - } - - if waitForFullSize { - strongSelf.videoReadyDisposable.set((videoNode.ready - |> filter { $0 } - |> take(1) - |> deliverOnMainQueue).start(next: { _ in - completion?() - })) - } else { - strongSelf.videoReadyDisposable.set(nil) - completion?() - } - } - }) - } - } else { - self.videoReadyDisposable.set(nil) - self.otherVideoReadyDisposable.set(nil) - if let currentVideoNode = self.currentVideoNode { - currentVideoNode.removeFromSupernode() - self.currentVideoNode = nil - } - } - } - - func update(size: CGSize, sideInset: CGFloat, isLandscape: Bool, transition: ContainedViewLayoutTransition) { - self.validLayout = (size, sideInset, isLandscape) - - if let currentVideoNode = self.currentVideoNode { - transition.updateFrame(node: currentVideoNode, frame: CGRect(origin: CGPoint(), size: size)) - currentVideoNode.updateLayout(size: size, isLandscape: true, transition: transition) - } - - let smallVideoSize = CGSize(width: 40.0, height: 40.0) - transition.updateFrame(node: self.otherVideoWrapperNode, frame: CGRect(origin: CGPoint(x: sideInset.isZero ? size.width : size.width - smallVideoSize.width - 8.0 - sideInset, y: 8.0), size: smallVideoSize).insetBy(dx: -8.0, dy: -8.0)) - transition.updateFrame(node: self.otherVideoShadowNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: 56.0, height: 56.0))) - transition.updateFrame(node: self.otherVideoButtonNode, frame: CGRect(origin: CGPoint(x: 8.0, y: 8.0), size: smallVideoSize)) - - if let otherVideoNode = self.otherVideoNode { - otherVideoNode.frame = CGRect(origin: CGPoint(), size: smallVideoSize) - otherVideoNode.updateLayout(size: smallVideoSize, isLandscape: false, transition: transition) - } - - transition.updateFrame(node: self.topCornersNode, frame: CGRect(x: sideInset, y: 0.0, width: size.width - sideInset * 2.0, height: 50.0)) - transition.updateFrame(node: self.bottomCornersNode, frame: CGRect(x: sideInset, y: size.height - 6.0 - 50.0, width: size.width - sideInset * 2.0, height: 50.0)) - transition.updateFrame(node: self.bottomEdgeNode, frame: CGRect(x: sideInset, y: size.height - 6.0, width: size.width - sideInset * 2.0, height: 6.0)) - - var fadeHeight: CGFloat = 50.0 - if size.width < size.height { - fadeHeight = 140.0 - } - transition.updateFrame(node: self.fadeNode, frame: CGRect(x: sideInset, y: size.height - 6.0 - fadeHeight, width: size.width - sideInset * 2.0, height: fadeHeight)) - } + let color = dark ? fullscreenBackgroundColor : panelBackgroundColor + let colorsArray = [color.withAlphaComponent(0.0).cgColor, color.cgColor] as CFArray + var locations: [CGFloat] = [0.0, 1.0] + let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)! + context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) + }) } public final class VoiceChatController: ViewController { @@ -507,17 +95,12 @@ public final class VoiceChatController: ViewController { let animated: Bool } - private struct State: Equatable { - var revealedPeerId: PeerId? - } - private final class Interaction { let updateIsMuted: (PeerId, Bool) -> Void let pinPeer: (PeerId) -> Void let togglePeerVideo: (PeerId) -> Void let openInvite: () -> Void let peerContextAction: (PeerEntry, ASDisplayNode, ContextGesture?) -> Void - let setPeerIdWithRevealedOptions: (PeerId?, PeerId?) -> Void let getPeerVideo: (String, Bool) -> GroupVideoNode? var isExpanded: Bool = false @@ -531,7 +114,6 @@ public final class VoiceChatController: ViewController { togglePeerVideo: @escaping (PeerId) -> Void, openInvite: @escaping () -> Void, peerContextAction: @escaping (PeerEntry, ASDisplayNode, ContextGesture?) -> Void, - setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void, getPeerVideo: @escaping (String, Bool) -> GroupVideoNode? ) { self.updateIsMuted = updateIsMuted @@ -539,7 +121,6 @@ public final class VoiceChatController: ViewController { self.togglePeerVideo = togglePeerVideo self.openInvite = openInvite self.peerContextAction = peerContextAction - self.setPeerIdWithRevealedOptions = setPeerIdWithRevealedOptions self.getPeerVideo = getPeerVideo } @@ -591,58 +172,52 @@ public final class VoiceChatController: ViewController { var peer: Peer var about: String? var isMyPeer: Bool - var ssrc: UInt32? var videoEndpointId: String? - var screencastEndpointId: String? + var presentationEndpointId: String? var activityTimestamp: Int32 var state: State var muteState: GroupCallParticipantsContext.Participant.MuteState? - var revealed: Bool? var canManageCall: Bool var volume: Int32? var raisedHand: Bool var displayRaisedHandStatus: Bool - var pinned: Bool - var style: VoiceChatParticipantItem.LayoutStyle + var active: Bool + var isLandscape: Bool var effectiveVideoEndpointId: String? { - return self.screencastEndpointId ?? self.videoEndpointId + return self.presentationEndpointId ?? self.videoEndpointId } init( peer: Peer, about: String?, isMyPeer: Bool, - ssrc: UInt32?, videoEndpointId: String?, - screencastEndpointId: String?, + presentationEndpointId: String?, activityTimestamp: Int32, state: State, muteState: GroupCallParticipantsContext.Participant.MuteState?, - revealed: Bool?, canManageCall: Bool, volume: Int32?, raisedHand: Bool, displayRaisedHandStatus: Bool, - pinned: Bool, - style: VoiceChatParticipantItem.LayoutStyle + active: Bool, + isLandscape: Bool ) { self.peer = peer self.about = about self.isMyPeer = isMyPeer - self.ssrc = ssrc self.videoEndpointId = videoEndpointId - self.screencastEndpointId = screencastEndpointId + self.presentationEndpointId = presentationEndpointId self.activityTimestamp = activityTimestamp self.state = state self.muteState = muteState - self.revealed = revealed self.canManageCall = canManageCall self.volume = volume self.raisedHand = raisedHand self.displayRaisedHandStatus = displayRaisedHandStatus - self.pinned = pinned - self.style = style + self.active = active + self.isLandscape = isLandscape } var stableId: PeerId { @@ -659,13 +234,10 @@ public final class VoiceChatController: ViewController { if lhs.isMyPeer != rhs.isMyPeer { return false } - if lhs.ssrc != rhs.ssrc { - return false - } if lhs.videoEndpointId != rhs.videoEndpointId { return false } - if lhs.screencastEndpointId != rhs.screencastEndpointId { + if lhs.presentationEndpointId != rhs.presentationEndpointId { return false } if lhs.activityTimestamp != rhs.activityTimestamp { @@ -677,9 +249,6 @@ public final class VoiceChatController: ViewController { if lhs.muteState != rhs.muteState { return false } - if lhs.revealed != rhs.revealed { - return false - } if lhs.canManageCall != rhs.canManageCall { return false } @@ -692,10 +261,10 @@ public final class VoiceChatController: ViewController { if lhs.displayRaisedHandStatus != rhs.displayRaisedHandStatus { return false } - if lhs.pinned != rhs.pinned { + if lhs.active != rhs.active { return false } - if lhs.style != rhs.style { + if lhs.isLandscape != rhs.isLandscape { return false } return true @@ -710,6 +279,7 @@ public final class VoiceChatController: ViewController { } private enum EntryId: Hashable { + case tiles case invite case peerId(PeerId) @@ -719,6 +289,13 @@ public final class VoiceChatController: ViewController { static func ==(lhs: EntryId, rhs: EntryId) -> Bool { switch lhs { + case .tiles: + switch rhs { + case .tiles: + return true + default: + return false + } case .invite: switch rhs { case .invite: @@ -738,11 +315,14 @@ public final class VoiceChatController: ViewController { } private enum ListEntry: Comparable, Identifiable { + case tiles([VoiceChatTileItem]) case invite(PresentationTheme, PresentationStrings, String, Bool) case peer(PeerEntry) var stableId: EntryId { switch self { + case .tiles: + return .tiles case .invite: return .invite case let .peer(peerEntry): @@ -752,6 +332,12 @@ public final class VoiceChatController: ViewController { static func ==(lhs: ListEntry, rhs: ListEntry) -> Bool { switch lhs { + case let .tiles(lhsTiles): + if case let .tiles(rhsTiles) = rhs, lhsTiles == rhsTiles { + return true + } else { + return false + } case let .invite(lhsTheme, lhsStrings, lhsText, lhsIsLink): if case let .invite(rhsTheme, rhsStrings, rhsText, rhsIsLink) = rhs, lhsTheme === rhsTheme, lhsStrings === rhsStrings, lhsText == rhsText, lhsIsLink == rhsIsLink { return true @@ -770,20 +356,84 @@ public final class VoiceChatController: ViewController { static func <(lhs: ListEntry, rhs: ListEntry) -> Bool { switch lhs { - case .invite: + case .tiles: return true + case .invite: + return false case let .peer(lhsPeerEntry): switch rhs { - case .invite: + case .tiles: return false case let .peer(rhsPeerEntry): return lhsPeerEntry < rhsPeerEntry + case .invite: + return true } } } - func item(context: AccountContext, presentationData: PresentationData, interaction: Interaction, transparent: Bool) -> ListViewItem { + func fullscreenItem(context: AccountContext, presentationData: PresentationData, interaction: Interaction) -> ListViewItem { switch self { + case .tiles: + return VoiceChatActionItem(presentationData: ItemListPresentationData(presentationData), title: "", icon: .none, action: { + }) + case .invite: + return VoiceChatActionItem(presentationData: ItemListPresentationData(presentationData), title: "", icon: .generic(UIImage(bundleImageName: "Chat/Context Menu/AddUser")!), action: { + interaction.openInvite() + }) + case let .peer(peerEntry): + var color: VoiceChatFullscreenParticipantItem.Color = .generic + let icon: VoiceChatFullscreenParticipantItem.Icon + + var state = peerEntry.state + if let muteState = peerEntry.muteState, case .speaking = state, muteState.mutedByYou || !muteState.canUnmute { + state = .listening + } + + switch state { + case .listening: + if let muteState = peerEntry.muteState, muteState.mutedByYou { + color = .destructive + icon = .microphone(true, UIColor(rgb: 0xff3b30)) + } else { + icon = .microphone(peerEntry.muteState != nil, UIColor.white) + } + case .speaking: + if let muteState = peerEntry.muteState, muteState.mutedByYou { + color = .destructive + icon = .microphone(true, UIColor(rgb: 0xff3b30)) + } else { + icon = .microphone(false, UIColor(rgb: 0x34c759)) + } + case .raisedHand: + color = .accent + icon = .wantsToSpeak + case .invited: + icon = .none + } + + return VoiceChatFullscreenParticipantItem(presentationData: ItemListPresentationData(presentationData), nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peerEntry.peer, icon: icon, color: color, isLandscape: peerEntry.isLandscape, active: peerEntry.active, getAudioLevel: { return interaction.getAudioLevel(peerEntry.peer.id) }, getVideo: { + if let endpointId = peerEntry.effectiveVideoEndpointId { + return interaction.getPeerVideo(endpointId, true) + } else { + return nil + } + }, action: { _ in + interaction.pinPeer(peerEntry.peer.id) + }, contextAction: { node, gesture in +// interaction.peerContextAction(peerEntry, node, gesture) + }, getUpdatingAvatar: { + return interaction.updateAvatarPromise.get() + }) + } + } + + func item(context: AccountContext, presentationData: PresentationData, interaction: Interaction) -> ListViewItem { + switch self { + case let .tiles(tiles): + return VoiceChatTilesGridItem(context: context, tiles: tiles, getIsExpanded: { + return interaction.isExpanded + }) case let .invite(_, _, text, isLink): return VoiceChatActionItem(presentationData: ItemListPresentationData(presentationData), title: text, icon: .generic(UIImage(bundleImageName: isLink ? "Chat/Context Menu/Link" : "Chat/Context Menu/AddUser")!), action: { interaction.openInvite() @@ -801,28 +451,17 @@ public final class VoiceChatController: ViewController { } var textIcon = VoiceChatParticipantItem.ParticipantText.TextIcon() - if !transparent { - if peerEntry.videoEndpointId != nil { - textIcon.insert(.video) - } - if peerEntry.screencastEndpointId != nil { - textIcon.insert(.screen) - } - } let yourText: String - if transparent { - yourText = presentationData.strings.VoiceChat_You + if (peerEntry.about?.isEmpty ?? true) && peer.smallProfileImage == nil { + yourText = presentationData.strings.VoiceChat_TapToAddPhotoOrBio + } else if peer.smallProfileImage == nil { + yourText = presentationData.strings.VoiceChat_TapToAddPhoto + } else if (peerEntry.about?.isEmpty ?? true) { + yourText = presentationData.strings.VoiceChat_TapToAddBio } else { - if (peerEntry.about?.isEmpty ?? true) && peer.smallProfileImage == nil { - yourText = presentationData.strings.VoiceChat_TapToAddPhotoOrBio - } else if peer.smallProfileImage == nil { - yourText = presentationData.strings.VoiceChat_TapToAddPhoto - } else if (peerEntry.about?.isEmpty ?? true) { - yourText = presentationData.strings.VoiceChat_TapToAddBio - } else { - yourText = presentationData.strings.VoiceChat_You - } + yourText = presentationData.strings.VoiceChat_You } + switch state { case .listening: if peerEntry.isMyPeer { @@ -874,31 +513,14 @@ public final class VoiceChatController: ViewController { if let about = peerEntry.about, !about.isEmpty { expandedText = .text(about, textIcon, .generic) } - - let revealOptions: [VoiceChatParticipantItem.RevealOption] = [] - - return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, ssrc: peerEntry.ssrc, presence: nil, text: text, expandedText: expandedText, icon: icon, style: peerEntry.style, enabled: true, transparent: transparent, pinned: peerEntry.pinned, selectable: true, getAudioLevel: { return interaction.getAudioLevel(peer.id) }, getVideo: { - return nil - if let endpointId = peerEntry.effectiveVideoEndpointId { - return interaction.getPeerVideo(endpointId, peerEntry.style != .list) - } else { - return nil - } - }, revealOptions: revealOptions, revealed: peerEntry.revealed, setPeerIdWithRevealedOptions: { peerId, fromPeerId in - interaction.setPeerIdWithRevealedOptions(peerId, fromPeerId) - }, action: { node in - if case .list = peerEntry.style { + + return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, text: text, expandedText: expandedText, icon: icon, getAudioLevel: { return interaction.getAudioLevel(peer.id) }, action: { node in + if let node = node { interaction.peerContextAction(peerEntry, node, nil) - } else if peerEntry.effectiveVideoEndpointId != nil { - if peerEntry.pinned && peerEntry.videoEndpointId != nil && peerEntry.screencastEndpointId != nil { - interaction.togglePeerVideo(peer.id) - } else { - interaction.pinPeer(peer.id) - } } - }, contextAction: peerEntry.style == .list ? { node, gesture in + }, contextAction: { node, gesture in interaction.peerContextAction(peerEntry, node, gesture) - } : nil, getIsExpanded: { + }, getIsExpanded: { return interaction.isExpanded }, getUpdatingAvatar: { return interaction.updateAvatarPromise.get() @@ -911,8 +533,18 @@ public final class VoiceChatController: ViewController { let (deleteIndices, indicesAndItems, updateIndices) = mergeListsStableWithUpdates(leftList: fromEntries, rightList: toEntries) let deletions = deleteIndices.map { ListViewDeleteItem(index: $0, directionHint: nil) } - let insertions = indicesAndItems.map { ListViewInsertItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction, transparent: false), directionHint: nil) } - let updates = updateIndices.map { ListViewUpdateItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction, transparent: false), directionHint: nil) } + let insertions = indicesAndItems.map { ListViewInsertItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction), directionHint: nil) } + let updates = updateIndices.map { ListViewUpdateItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction), directionHint: nil) } + + return ListTransition(deletions: deletions, insertions: insertions, updates: updates, isLoading: isLoading, isEmpty: isEmpty, canInvite: canInvite, crossFade: crossFade, count: toEntries.count, animated: animated) + } + + private func preparedFullscreenTransition(from fromEntries: [ListEntry], to toEntries: [ListEntry], isLoading: Bool, isEmpty: Bool, canInvite: Bool, crossFade: Bool, animated: Bool, context: AccountContext, presentationData: PresentationData, interaction: Interaction) -> ListTransition { + let (deleteIndices, indicesAndItems, updateIndices) = mergeListsStableWithUpdates(leftList: fromEntries, rightList: toEntries) + + let deletions = deleteIndices.map { ListViewDeleteItem(index: $0, directionHint: nil) } + let insertions = indicesAndItems.map { ListViewInsertItem(index: $0.0, previousIndex: $0.2, item: $0.1.fullscreenItem(context: context, presentationData: presentationData, interaction: interaction), directionHint: nil) } + let updates = updateIndices.map { ListViewUpdateItem(index: $0.0, previousIndex: $0.2, item: $0.1.fullscreenItem(context: context, presentationData: presentationData, interaction: interaction), directionHint: nil) } return ListTransition(deletions: deletions, insertions: insertions, updates: updates, isLoading: isLoading, isEmpty: isEmpty, canInvite: canInvite, crossFade: crossFade, count: toEntries.count, animated: animated) } @@ -930,12 +562,9 @@ public final class VoiceChatController: ViewController { private let dimNode: ASDisplayNode private let contentContainer: ASDisplayNode private let backgroundNode: ASDisplayNode - private let mainStageVideoClippingNode: ASDisplayNode - private var mainStageVideoContainerNode: MainVideoContainerNode? - private var mainParticipantNode: VoiceChatParticipantItemNode private var toggleFullscreenButton: HighlightTrackingButtonNode private let listNode: ListView - private let tileListNode: ListView + private let fullscreenListNode: ListView private let topPanelNode: ASDisplayNode private let topPanelEdgeNode: ASDisplayNode private let topPanelBackgroundNode: ASDisplayNode @@ -945,6 +574,7 @@ public final class VoiceChatController: ViewController { private let topCornersNode: ASImageNode private let bottomPanelCoverNode: ASDisplayNode fileprivate let bottomPanelNode: ASDisplayNode + private let bottomGradientNode: ASImageNode private let bottomPanelBackgroundNode: ASDisplayNode private let bottomCornersNode: ASImageNode fileprivate let audioButton: CallControllerButtonItemNode @@ -954,6 +584,8 @@ public final class VoiceChatController: ViewController { fileprivate let actionButton: VoiceChatActionButton private let leftBorderNode: ASDisplayNode private let rightBorderNode: ASDisplayNode + private let mainVideoNode: VoiceChatMainVideoContainerNode + private let mainVideoContainerNode: ASDisplayNode private let transitionContainerNode: ASDisplayNode private var isScheduling = false @@ -967,7 +599,7 @@ public final class VoiceChatController: ViewController { private let titleNode: VoiceChatTitleNode private var enqueuedTransitions: [ListTransition] = [] - private var enqueuedTileTransitions: [ListTransition] = [] + private var enqueuedFullscreenTransitions: [ListTransition] = [] private var validLayout: (ContainerViewLayout, CGFloat)? private var didSetContentsReady: Bool = false @@ -979,6 +611,7 @@ public final class VoiceChatController: ViewController { private var animatingInsertion = false private var animatingExpansion = false private var animatingAppearance = false + private var animatingButtonsSwap = false private var panGestureArguments: (topInset: CGFloat, offset: CGFloat)? private var peer: Peer? @@ -989,14 +622,13 @@ public final class VoiceChatController: ViewController { private var currentInvitedPeers: [Peer]? private var currentSpeakingPeers: Set? private var currentContentOffset: CGFloat? - private var ignoreScrolling = false private var currentNormalButtonColor: UIColor? private var currentActiveButtonColor: UIColor? private var switchedToCameraPeers = Set() private var currentEntries: [ListEntry] = [] - private var currentTileEntries: [ListEntry] = [] - private var pinnedEntry: ListEntry? + private var currentFullscreenEntries: [ListEntry] = [] + private var videoNodesOrder: [String] = [] private var peerViewDisposable: Disposable? private let leaveDisposable = MetaDisposable() @@ -1050,14 +682,16 @@ public final class VoiceChatController: ViewController { private var requestedVideoSources = Set() private var requestedVideoChannels: [PresentationGroupCallRequestedVideo] = [] - private var videoNodes: [(String, GroupVideoNode)] = [] + private var videoNodes: [String: GroupVideoNode] = [:] + private var readyVideoNodes = Set() + private var readyVideoDisposables = DisposableDict() + private var endpointToPeerId: [String: PeerId] = [:] private var peerIdToEndpoint: [PeerId: String] = [:] - + private var currentDominantSpeakerWithVideo: PeerId? private var currentForcedSpeakerWithVideo: PeerId? private var effectiveSpeakerWithVideo: (PeerId, String)? - private let updateSpeakerWithVideoDisposable = MetaDisposable() private var updateAvatarDisposable = MetaDisposable() private let updateAvatarPromise = Promise<(TelegramMediaImageRepresentation, Float)?>(nil) @@ -1067,11 +701,38 @@ public final class VoiceChatController: ViewController { private var ignoreConnectingTimer: SwiftSignalKit.Timer? private enum DisplayMode { - case `default` + case modal(isExpanded: Bool, isFilled: Bool) case fullscreen(controlsHidden: Bool) } - private var displayMode: DisplayMode = .default + private var displayMode: DisplayMode = .modal(isExpanded: false, isFilled: false) { + didSet { + if case let .modal(isExpanded, _) = self.displayMode { + self.itemInteraction?.isExpanded = isExpanded + } else { + self.itemInteraction?.isExpanded = true + } + } + } + + private var effectiveDisplayMode: DisplayMode { + let currentDisplayMode = self.displayMode + switch currentDisplayMode { + case .modal: + return self.isLandscape ? .fullscreen(controlsHidden: false) : currentDisplayMode + case .fullscreen: + return currentDisplayMode + } + } + + private var isExpanded: Bool { + switch self.displayMode { + case .modal(true, _), .fullscreen: + return true + default: + return false + } + } init(controller: VoiceChatController, sharedContext: SharedAccountContext, call: PresentationGroupCall) { self.controller = controller @@ -1088,7 +749,7 @@ public final class VoiceChatController: ViewController { self.currentSubtitle = self.presentationData.strings.SocksProxySetup_ProxyStatusConnecting self.dimNode = ASDisplayNode() - self.dimNode.backgroundColor = dimColor + self.dimNode.backgroundColor = UIColor(white: 0.0, alpha: 0.5) self.contentContainer = ASDisplayNode() self.contentContainer.isHidden = true @@ -1097,13 +758,6 @@ public final class VoiceChatController: ViewController { self.backgroundNode.backgroundColor = self.isScheduling ? panelBackgroundColor : secondaryPanelBackgroundColor self.backgroundNode.clipsToBounds = false - self.mainStageVideoClippingNode = ASDisplayNode() - self.mainStageVideoClippingNode.clipsToBounds = true - - self.mainStageVideoContainerNode = MainVideoContainerNode(context: call.accountContext, call: call) - - - self.mainParticipantNode = VoiceChatParticipantItemNode() self.toggleFullscreenButton = HighlightTrackingButtonNode() self.toggleFullscreenButton.alpha = 0.65 @@ -1117,11 +771,11 @@ public final class VoiceChatController: ViewController { return presentationData.strings.VoiceOver_ScrollStatus(row, count).0 } - self.tileListNode = ListView() - self.tileListNode.transform = CATransform3DMakeRotation(-CGFloat(CGFloat.pi / 2.0), 0.0, 0.0, 1.0) - self.tileListNode.clipsToBounds = true - self.tileListNode.isHidden = true - self.tileListNode.accessibilityPageScrolledString = { row, count in + self.fullscreenListNode = ListView() + self.fullscreenListNode.transform = CATransform3DMakeRotation(-CGFloat(CGFloat.pi / 2.0), 0.0, 0.0, 1.0) + self.fullscreenListNode.clipsToBounds = true + self.fullscreenListNode.isHidden = true + self.fullscreenListNode.accessibilityPageScrolledString = { row, count in return presentationData.strings.VoiceOver_ScrollStatus(row, count).0 } @@ -1162,6 +816,11 @@ public final class VoiceChatController: ViewController { self.bottomPanelBackgroundNode.backgroundColor = panelBackgroundColor self.bottomPanelBackgroundNode.isUserInteractionEnabled = false + self.bottomGradientNode = ASImageNode() + self.bottomGradientNode.displaysAsynchronously = false + self.bottomGradientNode.contentMode = .scaleToFill + self.bottomGradientNode.image = decorationBottomGradientImage(dark: false) + self.bottomCornersNode = ASImageNode() self.bottomCornersNode.displaysAsynchronously = false self.bottomCornersNode.displayWithoutProcessing = true @@ -1195,6 +854,13 @@ public final class VoiceChatController: ViewController { self.rightBorderNode.isUserInteractionEnabled = false self.rightBorderNode.clipsToBounds = false + self.mainVideoNode = VoiceChatMainVideoContainerNode(context: self.context, call: self.call) + + self.mainVideoContainerNode = ASDisplayNode() + self.mainVideoContainerNode.clipsToBounds = true + self.mainVideoContainerNode.isUserInteractionEnabled = false + self.mainVideoContainerNode.isHidden = true + self.transitionContainerNode = ASDisplayNode() self.transitionContainerNode.clipsToBounds = true self.transitionContainerNode.isUserInteractionEnabled = false @@ -1218,12 +884,6 @@ public final class VoiceChatController: ViewController { super.init() - let statePromise = ValuePromise(State(), ignoreRepeated: true) - let stateValue = Atomic(value: State()) - let updateState: ((State) -> State) -> Void = { f in - statePromise.set(stateValue.modify { f($0) }) - } - let context = self.context let currentAccountPeer = self.context.account.postbox.loadedPeerWithId(context.account.peerId) |> map { peer in @@ -1256,18 +916,26 @@ public final class VoiceChatController: ViewController { let _ = self?.call.updateMuteState(peerId: peerId, isMuted: isMuted) }, pinPeer: { [weak self] peerId in if let strongSelf = self { - if peerId != strongSelf.currentForcedSpeakerWithVideo { - strongSelf.currentForcedSpeakerWithVideo = peerId - } else { - strongSelf.currentForcedSpeakerWithVideo = nil + if peerId != strongSelf.currentDominantSpeakerWithVideo { + strongSelf.currentDominantSpeakerWithVideo = peerId } - strongSelf.updateMainStageVideo(waitForFullSize: false) + strongSelf.updateMainVideo(waitForFullSize: false, updateMembers: true, force: true) } }, togglePeerVideo: { [weak self] peerId in guard let strongSelf = self else { return } - strongSelf.mainStageVideoContainerNode?.otherVideoTapped?() + if let strongSelf = self { + if peerId != strongSelf.currentForcedSpeakerWithVideo { + strongSelf.currentForcedSpeakerWithVideo = peerId + } + for entry in strongSelf.currentEntries { + if case let .peer(peerEntry) = entry, peerEntry.peer.id == peerId, let endpoint = peerEntry.effectiveVideoEndpointId { + strongSelf.effectiveSpeakerWithVideo = (peerId, endpoint) + strongSelf.mainVideoNode.updatePeer(peer: strongSelf.effectiveSpeakerWithVideo, waitForFullSize: false) + } + } + } }, openInvite: { [weak self] in guard let strongSelf = self else { return @@ -1577,22 +1245,7 @@ public final class VoiceChatController: ViewController { return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Tip"), color: theme.actionSheet.primaryTextColor) }), true)) } - - for (endpointId, _) in strongSelf.videoNodes { - if entry.videoEndpointId == endpointId || entry.screencastEndpointId == endpointId { - items.append(.action(ContextMenuActionItem(text: strongSelf.currentForcedSpeakerWithVideo == peer.id ? strongSelf.presentationData.strings.VoiceChat_UnpinVideo : strongSelf.presentationData.strings.VoiceChat_PinVideo, icon: { theme in - return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Pin"), color: theme.actionSheet.primaryTextColor) - }, action: { _, f in - guard let strongSelf = self else { - return - } - strongSelf.itemInteraction?.pinPeer(peer.id) - f(.default) - }))) - break - } - } - + if peer.id == strongSelf.callState?.myPeerId { if entry.raisedHand { items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.VoiceChat_CancelSpeakRequest, icon: { theme in @@ -1614,7 +1267,6 @@ public final class VoiceChatController: ViewController { } f(.default) - Queue.mainQueue().after(0.1) { strongSelf.openAvatarForEditing(fromGallery: false, completion: {}) } @@ -1827,7 +1479,6 @@ public final class VoiceChatController: ViewController { return itemsForEntry(entry, muteState) } - let dismissPromise = ValuePromise(false) let source = VoiceChatContextExtractedContentSource(controller: controller, sourceNode: sourceNode, keepInPlace: false, blurBackground: true, centerVertically: entry.peer.smallProfileImage != nil, shouldBeDismissed: dismissPromise.get()) sourceNode.requestDismiss = { @@ -1837,12 +1488,6 @@ public final class VoiceChatController: ViewController { let contextController = ContextController(account: strongSelf.context.account, presentationData: strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme), source: .extracted(source), items: items, reactionItems: [], gesture: gesture) contextController.useComplexItemsTransitionAnimation = true strongSelf.controller?.presentInGlobalOverlay(contextController) - }, setPeerIdWithRevealedOptions: { peerId, _ in - updateState { state in - var updated = state - updated.revealedPeerId = peerId - return updated - } }, getPeerVideo: { [weak self] endpointId, tile in guard let strongSelf = self else { return nil @@ -1873,10 +1518,8 @@ public final class VoiceChatController: ViewController { self.topPanelNode.addSubnode(self.topCornersNode) self.bottomPanelNode.addSubnode(self.audioButton) - if let _ = self.mainStageVideoContainerNode { - self.bottomPanelNode.addSubnode(self.cameraButton) - self.bottomPanelNode.addSubnode(self.switchCameraButton) - } + self.bottomPanelNode.addSubnode(self.cameraButton) + self.bottomPanelNode.addSubnode(self.switchCameraButton) self.bottomPanelNode.addSubnode(self.leaveButton) self.bottomPanelNode.addSubnode(self.actionButton) self.bottomPanelNode.addSubnode(self.scheduleCancelButton) @@ -1885,25 +1528,25 @@ public final class VoiceChatController: ViewController { self.addSubnode(self.contentContainer) self.contentContainer.addSubnode(self.backgroundNode) - if let mainVideoContainer = self.mainStageVideoContainerNode { - self.contentContainer.addSubnode(self.mainStageVideoClippingNode) - self.mainStageVideoClippingNode.addSubnode(mainVideoContainer) - self.mainStageVideoClippingNode.addSubnode(self.mainParticipantNode) - self.mainStageVideoClippingNode.addSubnode(self.toggleFullscreenButton) - } self.contentContainer.addSubnode(self.listNode) self.contentContainer.addSubnode(self.topPanelNode) self.contentContainer.addSubnode(self.leftBorderNode) self.contentContainer.addSubnode(self.rightBorderNode) - self.contentContainer.addSubnode(self.bottomPanelCoverNode) +// self.contentContainer.addSubnode(self.bottomPanelCoverNode) self.contentContainer.addSubnode(self.bottomCornersNode) + self.contentContainer.addSubnode(self.bottomGradientNode) self.contentContainer.addSubnode(self.bottomPanelBackgroundNode) self.contentContainer.addSubnode(self.bottomPanelNode) + self.contentContainer.addSubnode(self.mainVideoContainerNode) self.contentContainer.addSubnode(self.timerNode) self.contentContainer.addSubnode(self.scheduleTextNode) - self.contentContainer.addSubnode(self.tileListNode) + self.contentContainer.addSubnode(self.fullscreenListNode) self.addSubnode(self.transitionContainerNode) + self.mainVideoContainerNode.addSubnode(self.mainVideoNode) + + self.updateDecorationsColors() + self.toggleFullscreenButton.addTarget(self, action: #selector(self.toggleFullscreenPressed), forControlEvents: .touchUpInside) let invitedPeers: Signal<[Peer], NoError> = self.call.invitedPeers @@ -2072,7 +1715,7 @@ public final class VoiceChatController: ViewController { if let (peerId, _) = maxLevelWithVideo { strongSelf.currentDominantSpeakerWithVideo = peerId - strongSelf.updateMainStageVideo(waitForFullSize: false) +// strongSelf.updateMainStageVideo(waitForFullSize: false) } strongSelf.itemInteraction?.updateAudioLevels(levels) @@ -2091,12 +1734,12 @@ public final class VoiceChatController: ViewController { }) self.leaveButton.addTarget(self, action: #selector(self.leavePressed), forControlEvents: .touchUpInside) - self.actionButton.addTarget(self, action: #selector(self.actionButtonPressed), forControlEvents: .touchUpInside) - self.audioButton.addTarget(self, action: #selector(self.audioOutputPressed), forControlEvents: .touchUpInside) + self.actionButton.addTarget(self, action: #selector(self.actionPressed), forControlEvents: .touchUpInside) + self.audioButton.addTarget(self, action: #selector(self.audioPressed), forControlEvents: .touchUpInside) self.cameraButton.addTarget(self, action: #selector(self.cameraPressed), forControlEvents: .touchUpInside) self.switchCameraButton.addTarget(self, action: #selector(self.switchCameraPressed), forControlEvents: .touchUpInside) self.optionsButton.contextAction = { [weak self] sourceNode, gesture in - self?.openContextMenu(sourceNode: sourceNode, gesture: gesture) + self?.openSettingsMenu(sourceNode: sourceNode, gesture: gesture) } self.optionsButton.addTarget(self, action: #selector(self.optionsPressed), forControlEvents: .touchUpInside) self.closeButton.addTarget(self, action: #selector(self.closePressed), forControlEvents: .touchUpInside) @@ -2107,14 +1750,14 @@ public final class VoiceChatController: ViewController { let animated = strongSelf.currentNormalButtonColor != nil strongSelf.currentNormalButtonColor = normalColor strongSelf.currentActiveButtonColor = activeColor - strongSelf.updateButtons(animated: animated) + strongSelf.updateButtons(transition: animated ? .animated(duration: 0.3, curve: .linear) : .immediate) } }) self.listNode.updateFloatingHeaderOffset = { [weak self] offset, transition in if let strongSelf = self { strongSelf.currentContentOffset = offset - if !strongSelf.animatingExpansion && !strongSelf.animatingInsertion && strongSelf.panGestureArguments == nil && !strongSelf.animatingAppearance { + if !(strongSelf.animatingExpansion || strongSelf.animatingInsertion || strongSelf.animatingAppearance) && strongSelf.panGestureArguments == nil { strongSelf.updateDecorationsLayout(transition: transition) } } @@ -2159,93 +1802,6 @@ public final class VoiceChatController: ViewController { strongSelf.callStateDidReset() })) - /*self.voiceSourcesDisposable.set((self.call.incomingVideoSources - |> deliverOnMainQueue).start(next: { [weak self] endpointIds in - guard let strongSelf = self else { - return - } - var validSources = Set() - for endpointId in endpointIds { - validSources.insert(endpointId) - - if !strongSelf.requestedVideoSources.contains(endpointId) { - strongSelf.requestedVideoSources.insert(endpointId) - strongSelf.call.makeIncomingVideoView(endpointId: endpointId, completion: { videoView in - Queue.mainQueue().async { - guard let strongSelf = self, let videoView = videoView else { - return - } - let videoNode = GroupVideoNode(videoView: videoView) - strongSelf.videoNodes.append((endpointId, videoNode)) - - if let _ = strongSelf.validLayout { - loop: for i in 0 ..< strongSelf.currentEntries.count { - let entry = strongSelf.currentEntries[i] - let tileEntry = strongSelf.currentTileEntries[i] - switch entry { - case let .peer(peerEntry): - if peerEntry.effectiveVideoEndpointId == endpointId { - let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme) - strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) - strongSelf.tileListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: tileEntry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) - break loop - } - default: - break - } - } - } - } - }) - } - } - - var removeRequestedVideoSources: [String] = [] - for source in strongSelf.requestedVideoSources { - if !validSources.contains(source) { - removeRequestedVideoSources.append(source) - } - } - for source in removeRequestedVideoSources { - strongSelf.requestedVideoSources.remove(source) - } - - for i in (0 ..< strongSelf.videoNodes.count).reversed() { - if !validSources.contains(strongSelf.videoNodes[i].0) { - let endpointId = strongSelf.videoNodes[i].0 - strongSelf.videoNodes.remove(at: i) - - loop: for j in 0 ..< strongSelf.currentEntries.count { - let entry = strongSelf.currentEntries[j] - let tileEntry = strongSelf.currentTileEntries[j] - switch entry { - case let .peer(peerEntry): - if peerEntry.effectiveVideoEndpointId == endpointId { - let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme) - strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: j, previousIndex: j, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) - strongSelf.tileListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: j, previousIndex: j, item: tileEntry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) - break loop - } - default: - break - } - } - } - } - - if let (peerId, endpointId) = strongSelf.effectiveSpeakerWithVideo { - if !validSources.contains(endpointId) { - if peerId == strongSelf.currentForcedSpeakerWithVideo { - strongSelf.currentForcedSpeakerWithVideo = nil - } - if peerId == strongSelf.currentDominantSpeakerWithVideo { - strongSelf.currentDominantSpeakerWithVideo = nil - } - strongSelf.updateMainStageVideo(waitForFullSize: false) - } - } - }))*/ - self.titleNode.tapped = { [weak self] in if let strongSelf = self, !strongSelf.isScheduling { if strongSelf.callState?.canManageCall ?? false { @@ -2268,158 +1824,6 @@ public final class VoiceChatController: ViewController { } } - self.mainStageVideoContainerNode?.tapped = { [weak self] in - if let strongSelf = self, !strongSelf.animatingExpansion { - var effectiveDisplayMode = strongSelf.displayMode - var isLandscape = false - if let (layout, _) = strongSelf.validLayout, layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass { - isLandscape = true - if case .fullscreen = effectiveDisplayMode { - } else { - effectiveDisplayMode = .fullscreen(controlsHidden: false) - } - } - - switch effectiveDisplayMode { - case .default: - strongSelf.displayMode = .fullscreen(controlsHidden: false) - case let .fullscreen(controlsHidden): - if controlsHidden { - if !isLandscape { - strongSelf.displayMode = .default - } else { - strongSelf.displayMode = .fullscreen(controlsHidden: false) - } - } else { - strongSelf.displayMode = .fullscreen(controlsHidden: true) - } - } - - if case .default = effectiveDisplayMode, case .fullscreen = strongSelf.displayMode { - strongSelf.tileListNode.isHidden = false - - var minimalVisiblePeerid: (PeerId, CGFloat)? - var verticalItemNodes: [PeerId: VoiceChatParticipantItemNode] = [:] - strongSelf.listNode.forEachItemNode { itemNode in - if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item { - let convertedFrame = itemNode.view.convert(itemNode.bounds, to: strongSelf.transitionContainerNode.view) - if let (_, y) = minimalVisiblePeerid { - if convertedFrame.minY >= 0.0 && convertedFrame.minY < y { - minimalVisiblePeerid = (item.peer.id, convertedFrame.minY) - } - } else { - if convertedFrame.minY >= 0.0 { - minimalVisiblePeerid = (item.peer.id, convertedFrame.minY) - } - } - verticalItemNodes[item.peer.id] = itemNode - } - } - - strongSelf.animatingExpansion = true - - let completion = { - strongSelf.tileListNode.forEachItemNode { itemNode in - if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item, let otherItemNode = verticalItemNodes[item.peer.id] { - itemNode.animateTransitionIn(from: otherItemNode, containerNode: strongSelf) - } - } - - strongSelf.updateIsFullscreen(strongSelf.isFullscreen, force: true) - - if let (layout, navigationHeight) = strongSelf.validLayout { - strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) - strongSelf.updateDecorationsLayout(transition: .animated(duration: 0.3, curve: .easeInOut)) - } - } - if let (peerId, _) = minimalVisiblePeerid { - var index = 0 - for item in strongSelf.currentEntries { - if case let .peer(entry) = item, entry.peer.id == peerId { - break - } else { - index += 1 - } - } - strongSelf.tileListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: ListViewScrollToItem(index: index, position: .top(0.0), animated: false, curve: .Default(duration: nil), directionHint: .Up), updateSizeAndInsets: nil, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in - completion() - }) - } else { - completion() - } - } else if case .fullscreen = effectiveDisplayMode, case .default = strongSelf.displayMode { - var minimalVisiblePeerid: (PeerId, CGFloat)? - var tileItemNodes: [PeerId: VoiceChatParticipantItemNode] = [:] - strongSelf.tileListNode.forEachItemNode { itemNode in - if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item { - let convertedFrame = itemNode.view.convert(itemNode.bounds, to: strongSelf.transitionContainerNode.view) - if let (_, x) = minimalVisiblePeerid { - if convertedFrame.minX >= 0.0 && convertedFrame.minX < x { - minimalVisiblePeerid = (item.peer.id, convertedFrame.minX) - } - } else if convertedFrame.minX >= 0.0 { - minimalVisiblePeerid = (item.peer.id, convertedFrame.minX) - } - tileItemNodes[item.peer.id] = itemNode - } - } - - strongSelf.animatingExpansion = true - - let completion = { - strongSelf.listNode.forEachItemNode { itemNode in - if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item, let otherItemNode = tileItemNodes[item.peer.id] { - itemNode.animateTransitionIn(from: otherItemNode, containerNode: strongSelf.transitionContainerNode) - } - } - - strongSelf.updateIsFullscreen(strongSelf.isFullscreen, force: true) - - if let (layout, navigationHeight) = strongSelf.validLayout { - strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) - strongSelf.updateDecorationsLayout(transition: .animated(duration: 0.3, curve: .easeInOut)) - } - } - if let (peerId, _) = minimalVisiblePeerid { - var index = 0 - for item in strongSelf.currentEntries { - if case let .peer(entry) = item, entry.peer.id == peerId { - break - } else { - index += 1 - } - } - strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: ListViewScrollToItem(index: index, position: .top(0.0), animated: false, curve: .Default(duration: nil), directionHint: .Up), updateSizeAndInsets: nil, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in - completion() - }) - } else { - completion() - } - } else if case .fullscreen = strongSelf.displayMode { - strongSelf.animatingExpansion = true - strongSelf.updateIsFullscreen(strongSelf.isFullscreen, force: true) - - if let (layout, navigationHeight) = strongSelf.validLayout { - strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) - strongSelf.updateDecorationsLayout(transition: .animated(duration: 0.3, curve: .easeInOut)) - } - } - } - } - self.mainStageVideoContainerNode?.otherVideoTapped = { [weak self] in - if let strongSelf = self, let peerId = strongSelf.effectiveSpeakerWithVideo?.0 { - let switchingToCamera = !strongSelf.switchedToCameraPeers.contains(peerId) - if switchingToCamera { - strongSelf.switchedToCameraPeers.insert(peerId) - } else { - strongSelf.switchedToCameraPeers.remove(peerId) - } - strongSelf.updateMainStageVideo(waitForFullSize: false, force: true) - strongSelf.displayToggleVideoSourceTooltip(screencast: !switchingToCamera) - } - } - - self.scheduleCancelButton.pressed = { [weak self] in if let strongSelf = self { strongSelf.dismissScheduled() @@ -2437,6 +1841,12 @@ public final class VoiceChatController: ViewController { } } } + + self.mainVideoNode.tapped = { [weak self] in + if let strongSelf = self { + strongSelf.toggleDisplayMode() + } + } } deinit { @@ -2456,18 +1866,18 @@ public final class VoiceChatController: ViewController { self.voiceSourcesDisposable.dispose() self.updateAvatarDisposable.dispose() self.ignoreConnectingTimer?.invalidate() - self.updateSpeakerWithVideoDisposable.dispose() + self.readyVideoDisposables.dispose() } @objc private func toggleFullscreenPressed() { - if case .default = self.displayMode { + if case .modal = self.displayMode { } else { self.displayMode = .fullscreen(controlsHidden: true) } - self.mainStageVideoContainerNode?.tapped?() + } - private func openContextMenu(sourceNode: ASDisplayNode, gesture: ContextGesture?) { + private func openSettingsMenu(sourceNode: ASDisplayNode, gesture: ContextGesture?) { let canManageCall = !self.optionsButtonIsAvatar let items: Signal<[ContextMenuItem], NoError> if canManageCall { @@ -2619,7 +2029,7 @@ public final class VoiceChatController: ViewController { })))*/ if let callState = strongSelf.callState, callState.canManageCall { - let isScheduled = strongSelf.callState?.scheduleTimestamp != nil + let isScheduled = strongSelf.isScheduled items.append(.action(ContextMenuActionItem(text: isScheduled ? strongSelf.presentationData.strings.VoiceChat_CancelVoiceChat : strongSelf.presentationData.strings.VoiceChat_EndVoiceChat, textColor: .destructive, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Clear"), color: theme.actionSheet.destructiveActionTextColor) }, action: { _, f in @@ -2870,7 +2280,7 @@ public final class VoiceChatController: ViewController { pickerView.timeZone = TimeZone.current pickerView.minuteInterval = 1 self.contentContainer.view.addSubview(pickerView) - pickerView.addTarget(self, action: #selector(self.datePickerUpdated), for: .valueChanged) + pickerView.addTarget(self, action: #selector(self.scheduleDatePickerUpdated), for: .valueChanged) if #available(iOS 13.4, *) { pickerView.preferredDatePickerStyle = .wheels } @@ -2917,7 +2327,7 @@ public final class VoiceChatController: ViewController { } } - @objc private func datePickerUpdated() { + @objc private func scheduleDatePickerUpdated() { self.updateScheduleButtonTitle() } @@ -2991,7 +2401,7 @@ public final class VoiceChatController: ViewController { } private func transitionToCall() { - self.updateIsFullscreen(false, force: true) + self.updateDecorationsColors() self.listNode.alpha = 1.0 self.listNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) @@ -3012,10 +2422,8 @@ public final class VoiceChatController: ViewController { } @objc private func optionsPressed() { - if self.optionsButton.isUserInteractionEnabled { - self.optionsButton.play() - self.optionsButton.contextAction?(self.optionsButton.containerNode, nil) - } + self.optionsButton.play() + self.optionsButton.contextAction?(self.optionsButton.containerNode, nil) } @objc private func closePressed() { @@ -3028,7 +2436,6 @@ public final class VoiceChatController: ViewController { self.controller?.dismissAllTooltips() if let callState = self.callState, callState.canManageCall { - let isScheduled = callState.scheduleTimestamp != nil let action: () -> Void = { [weak self] in guard let strongSelf = self else { return @@ -3044,12 +2451,12 @@ public final class VoiceChatController: ViewController { var items: [ActionSheetItem] = [] items.append(ActionSheetTextItem(title: self.presentationData.strings.VoiceChat_LeaveConfirmation)) - items.append(ActionSheetButtonItem(title: isScheduled ? self.presentationData.strings.VoiceChat_LeaveAndCancelVoiceChat : self.presentationData.strings.VoiceChat_LeaveAndEndVoiceChat, color: .destructive, action: { [weak self, weak actionSheet] in + items.append(ActionSheetButtonItem(title: self.isScheduled ? self.presentationData.strings.VoiceChat_LeaveAndCancelVoiceChat : self.presentationData.strings.VoiceChat_LeaveAndEndVoiceChat, color: .destructive, action: { [weak self, weak actionSheet] in actionSheet?.dismissAnimated() if let strongSelf = self { if let (members, _) = strongSelf.currentCallMembers, members.count >= 10 || true { - let alertController = textAlertController(context: strongSelf.context, forceTheme: strongSelf.darkTheme, title: isScheduled ? strongSelf.presentationData.strings.VoiceChat_CancelConfirmationTitle : strongSelf.presentationData.strings.VoiceChat_EndConfirmationTitle, text: isScheduled ? strongSelf.presentationData.strings.VoiceChat_CancelConfirmationText : strongSelf.presentationData.strings.VoiceChat_EndConfirmationText, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: isScheduled ? strongSelf.presentationData.strings.VoiceChat_CancelConfirmationEnd : strongSelf.presentationData.strings.VoiceChat_EndConfirmationEnd, action: { + let alertController = textAlertController(context: strongSelf.context, forceTheme: strongSelf.darkTheme, title: strongSelf.isScheduled ? strongSelf.presentationData.strings.VoiceChat_CancelConfirmationTitle : strongSelf.presentationData.strings.VoiceChat_EndConfirmationTitle, text: strongSelf.isScheduled ? strongSelf.presentationData.strings.VoiceChat_CancelConfirmationText : strongSelf.presentationData.strings.VoiceChat_EndConfirmationText, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: strongSelf.isScheduled ? strongSelf.presentationData.strings.VoiceChat_CancelConfirmationEnd : strongSelf.presentationData.strings.VoiceChat_EndConfirmationEnd, action: { action() })]) strongSelf.controller?.present(alertController, in: .window(.root)) @@ -3195,23 +2602,23 @@ public final class VoiceChatController: ViewController { }) } - private var pressTimer: SwiftSignalKit.Timer? - private func startPressTimer() { - self.pressTimer?.invalidate() + private var actionButtonPressTimer: SwiftSignalKit.Timer? + private func startActionButtonPressTimer() { + self.actionButtonPressTimer?.invalidate() let pressTimer = SwiftSignalKit.Timer(timeout: 0.185, repeat: false, completion: { [weak self] in - self?.pressTimerFired() - self?.pressTimer = nil + self?.actionButtonPressTimerFired() + self?.actionButtonPressTimer = nil }, queue: Queue.mainQueue()) - self.pressTimer = pressTimer + self.actionButtonPressTimer = pressTimer pressTimer.start() } - private func stopPressTimer() { - self.pressTimer?.invalidate() - self.pressTimer = nil + private func stopActionButtonPressTimer() { + self.actionButtonPressTimer?.invalidate() + self.actionButtonPressTimer = nil } - private func pressTimerFired() { + private func actionButtonPressTimerFired() { guard let callState = self.callState else { return } @@ -3288,7 +2695,7 @@ public final class VoiceChatController: ViewController { case .began: self.actionButton.pressing = true self.hapticFeedback.impact(.light) - self.startPressTimer() + self.startActionButtonPressTimer() if let (layout, navigationHeight) = self.validLayout { self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .spring)) } @@ -3296,8 +2703,8 @@ public final class VoiceChatController: ViewController { self.pushingToTalk = false self.actionButton.pressing = false - if self.pressTimer != nil { - self.stopPressTimer() + if self.actionButtonPressTimer != nil { + self.stopActionButtonPressTimer() self.call.toggleIsMuted() } else { self.hapticFeedback.impact(.light) @@ -3317,13 +2724,13 @@ public final class VoiceChatController: ViewController { } } - @objc private func actionButtonPressed() { + @objc private func actionPressed() { if self.isScheduling { self.schedule() } } - @objc private func audioOutputPressed() { + @objc private func audioPressed() { self.hapticFeedback.impact(.light) if let _ = self.callState?.scheduleTimestamp { @@ -3425,14 +2832,13 @@ public final class VoiceChatController: ViewController { } } - private var animatingButtons = false @objc private func cameraPressed() { if self.call.hasVideo || self.call.hasScreencast { self.call.disableVideo() self.call.disableScreencast() if let (layout, navigationHeight) = self.validLayout { - self.animatingButtons = true + self.animatingButtonsSwap = true self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .linear)) } } else { @@ -3440,13 +2846,13 @@ public final class VoiceChatController: ViewController { guard let strongSelf = self, let view = view else { return } - let cameraNode = GroupVideoNode(videoView: view) + let cameraNode = GroupVideoNode(videoView: view, backdropVideoView: nil) let controller = VoiceChatCameraPreviewController(context: strongSelf.context, cameraNode: cameraNode, shareCamera: { [weak self] videoNode in if let strongSelf = self { strongSelf.call.requestVideo() if let (layout, navigationHeight) = strongSelf.validLayout { - strongSelf.animatingButtons = true + strongSelf.animatingButtonsSwap = true strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .linear)) } } @@ -3479,15 +2885,20 @@ public final class VoiceChatController: ViewController { private var effectiveBottomAreaHeight: CGFloat { switch self.displayMode { - case .default: + case .modal: return bottomAreaHeight case let .fullscreen(controlsHidden): return controlsHidden ? 0.0 : fullscreenBottomAreaHeight } } - private var hasMainVideo: Bool { - return self.mainStageVideoContainerNode != nil && self.effectiveSpeakerWithVideo != nil + private var isFullscreen: Bool { + switch self.effectiveDisplayMode { + case .fullscreen(_), .modal(_, true): + return true + default: + return false + } } private var bringVideoToBackOnCompletion = false @@ -3523,10 +2934,10 @@ public final class VoiceChatController: ViewController { contentWidth = isLandscape ? min(530.0, size.width - 210.0) : size.width } - let listSize = CGSize(width: contentWidth, height: layout.size.height - listTopInset - bottomPanelHeight) + let listSize = CGSize(width: contentWidth, height: layout.size.height - listTopInset - bottomPanelHeight + bottomGradientHeight) let topInset: CGFloat if let (panInitialTopInset, panOffset) = self.panGestureArguments { - if self.isExpanded && !self.hasMainVideo { + if self.isExpanded { topInset = min(self.topInset ?? listSize.height, panInitialTopInset + max(0.0, panOffset)) } else { topInset = max(0.0, panInitialTopInset + min(0.0, panOffset)) @@ -3547,8 +2958,7 @@ public final class VoiceChatController: ViewController { } } - let currentContentOffset = self.currentContentOffset ?? 0.0 - let offset = (bottomEdge.isZero ? 0.0 : currentContentOffset) + topInset + let offset = topInset if bottomEdge.isZero { bottomEdge = self.listNode.frame.minY + 46.0 + 56.0 @@ -3562,89 +2972,27 @@ public final class VoiceChatController: ViewController { } else { topPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: panelOffset), size: CGSize(width: size.width, height: topPanelHeight)) } - let sideInset: CGFloat = 16.0 - if let mainVideoContainer = self.mainStageVideoContainerNode { - let videoClippingFrame: CGRect - let videoContainerFrame: CGRect - let videoInset: CGFloat - let videoHeight: CGFloat - var isFullscreen = false - if isLandscape { - videoInset = 0.0 - videoClippingFrame = CGRect(x: layout.safeInsets.left, y: 0.0, width: layout.size.width - layout.safeInsets.left - layout.safeInsets.right - fullscreenBottomAreaHeight, height: layout.size.height + 6.0) - videoContainerFrame = CGRect(origin: CGPoint(), size: videoClippingFrame.size) - videoHeight = videoClippingFrame.height - } else { - let videoY: CGFloat - switch effectiveDisplayMode { - case .default: - videoInset = sideInset - videoHeight = min(mainVideoHeight, layout.size.width) - videoY = topPanelFrame.maxY - case .fullscreen: - videoInset = 0.0 - videoHeight = layout.size.height - (layout.statusBarHeight ?? 0.0) - layout.intrinsicInsets.bottom - fullscreenBottomAreaHeight - 6.0 - videoY = layout.statusBarHeight ?? 20.0 - isFullscreen = true - } - videoClippingFrame = CGRect(origin: CGPoint(x: videoInset, y: videoY), size: CGSize(width: layout.size.width - videoInset * 2.0, height: self.hasMainVideo ? videoHeight : 0.0)) - videoContainerFrame = CGRect(origin: CGPoint(x: -videoInset, y: 0.0), size: CGSize(width: layout.size.width, height: videoHeight)) - } - - let topEdgeY = topPanelFrame.maxY + min(mainVideoHeight, layout.size.width) - let bottomEdgeY = isFullscreen ? layout.size.height : layout.size.height - bottomAreaHeight - layout.intrinsicInsets.bottom - transition.updateFrame(node: self.transitionContainerNode, frame: CGRect(x: sideInset, y: topEdgeY, width: layout.size.width - sideInset * 2.0, height: max(0.0, bottomEdgeY - topEdgeY))) - - let offset: CGFloat - var mainParticipantNodeWidth = videoClippingFrame.width - if case let .fullscreen(controlsHidden) = effectiveDisplayMode { - if isLandscape { - offset = 56.0 + 6.0 + layout.intrinsicInsets.bottom - mainParticipantNodeWidth -= controlsHidden ? 66.0 : 140.0 - } else { - offset = controlsHidden ? 66.0 : 140.0 - mainParticipantNodeWidth -= 50.0 - } - } else { - offset = 56.0 + 6.0 - mainParticipantNodeWidth -= 50.0 - } - - if let entry = self.pinnedEntry, let interaction = self.itemInteraction { - self.mainParticipantNode.isHidden = false - let item = entry.item(context: self.context, presentationData: self.presentationData, interaction: interaction, transparent: true) - let itemNode = self.mainParticipantNode - item.updateNode(async: { $0() }, node: { - return itemNode - }, params: ListViewItemLayoutParams(width: mainParticipantNodeWidth, leftInset: 0.0, rightInset: 0.0, availableHeight: self.bounds.height), previousItem: nil, nextItem: nil, animation: .System(duration: 0.2), completion: { (layout, apply) in - itemNode.contentSize = layout.contentSize - itemNode.insets = layout.insets - itemNode.isUserInteractionEnabled = false - - apply(ListViewItemApply(isOnScreen: true)) - }) - } else { - self.mainParticipantNode.isHidden = true - } - - transition.updateFrame(node: self.mainParticipantNode, frame: CGRect(x: 0.0, y: videoClippingFrame.height - offset, width: mainParticipantNodeWidth, height: 56.0)) - transition.updateFrame(node: self.toggleFullscreenButton, frame: CGRect(x: mainParticipantNodeWidth + 1.0, y: videoClippingFrame.height - offset + 7.0, width: 44.0, height: 44.0)) - - transition.updateFrame(node: self.mainStageVideoClippingNode, frame: videoClippingFrame) - transition.updateFrame(node: mainVideoContainer, frame: videoContainerFrame, completion: { [weak self] _ in - if let strongSelf = self { - strongSelf.animatingExpansion = false - - if strongSelf.bringVideoToBackOnCompletion { - strongSelf.tileListNode.isHidden = true - strongSelf.bringVideoToBackOnCompletion = false - strongSelf.contentContainer.insertSubnode(strongSelf.mainStageVideoClippingNode, belowSubnode: strongSelf.tileListNode) - } - } - }) - mainVideoContainer.update(size: videoContainerFrame.size, sideInset: videoInset, isLandscape: isLandscape, transition: transition) + let sideInset: CGFloat = 14.0 + + let topEdgeY = topPanelFrame.maxY + let bottomEdgeY = self.isFullscreen ? layout.size.height : layout.size.height - bottomAreaHeight - layout.intrinsicInsets.bottom + transition.updateFrame(node: self.transitionContainerNode, frame: CGRect(x: sideInset, y: topEdgeY, width: layout.size.width - sideInset * 2.0, height: max(0.0, bottomEdgeY - topEdgeY))) + + var isFullscreen = false + if case .fullscreen = self.effectiveDisplayMode { + isFullscreen = true } + transition.updateAlpha(node: self.bottomGradientNode, alpha: isFullscreen ? 0.0 : 1.0) + + let videoTopEdgeY = isLandscape ? 0.0 : layoutTopInset + let videoBottomEdgeY = self.isLandscape ? layout.size.height : layout.size.height - layout.intrinsicInsets.bottom - 84.0 + let videoFrame = CGRect(x: 0.0, y: videoTopEdgeY, width: isLandscape ? layout.size.width - layout.safeInsets.right - 84.0 : layout.size.width, height: videoBottomEdgeY - videoTopEdgeY) + transition.updateFrame(node: self.mainVideoContainerNode, frame: videoFrame) + if !self.mainVideoNode.animating { + transition.updateFrame(node: self.mainVideoNode, frame: CGRect(origin: CGPoint(), size: videoFrame.size)) + } + self.mainVideoNode.update(size: videoFrame.size, sideInset: 0.0, isLandscape: true, transition: transition) let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY), size: CGSize(width: size.width, height: layout.size.height)) @@ -3687,7 +3035,7 @@ public final class VoiceChatController: ViewController { let listMaxY = listTopInset + listSize.height let bottomOffset: CGFloat = min(0.0, bottomEdge - listMaxY) + layout.size.height - bottomPanelHeight - let bottomDelta = self.effectiveBottomAreaHeight - bottomAreaHeight + let bottomDelta = self.effectiveBottomAreaHeight - bottomAreaHeight + bottomGradientHeight let bottomCornersFrame = CGRect(origin: CGPoint(x: sideInset + floorToScreenPixels((size.width - contentWidth) / 2.0), y: -50.0 + bottomOffset + bottomDelta), size: CGSize(width: contentWidth - sideInset * 2.0, height: 50.0)) let previousBottomCornersFrame = self.bottomCornersNode.frame @@ -3701,13 +3049,16 @@ public final class VoiceChatController: ViewController { } } - var isFullscreen = false - func updateIsFullscreen(_ isFullscreen: Bool, force: Bool = false) { - guard self.isFullscreen != isFullscreen || force, let (layout, _) = self.validLayout else { + private var decorationsAreDark: Bool? + private func updateDecorationsColors() { + guard let (layout, _) = self.validLayout else { return } - self.isFullscreen = isFullscreen - + + let isFullscreen = self.isFullscreen + let isLandscape = self.isLandscape + let effectiveDisplayMode = self.effectiveDisplayMode + self.controller?.statusBar.updateStatusBarStyle(isFullscreen ? .White : .Ignore, animated: true) var size = layout.size @@ -3728,16 +3079,6 @@ public final class VoiceChatController: ViewController { topEdgeFrame = CGRect(x: 0.0, y: 0.0, width: size.width, height: topPanelHeight) } - var isLandscape = false - var effectiveDisplayMode = self.displayMode - if case .compact = layout.metrics.widthClass, layout.size.width > layout.size.height { - isLandscape = true - if case .fullscreen = effectiveDisplayMode { - } else { - effectiveDisplayMode = .fullscreen(controlsHidden: false) - } - } - let backgroundColor: UIColor if case .fullscreen = effectiveDisplayMode { if isLandscape { @@ -3761,31 +3102,51 @@ public final class VoiceChatController: ViewController { transition.updateBackgroundColor(node: self.leftBorderNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor) transition.updateBackgroundColor(node: self.rightBorderNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor) - if let snapshotView = self.topCornersNode.view.snapshotContentTree() { - snapshotView.frame = self.topCornersNode.frame - self.topPanelNode.view.addSubview(snapshotView) - - snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak snapshotView] _ in - snapshotView?.removeFromSuperview() - }) + var gridNode: VoiceChatTilesGridItemNode? + self.listNode.forEachItemNode { itemNode in + if let itemNode = itemNode as? VoiceChatTilesGridItemNode { + gridNode = itemNode + } + } + if let gridNode = gridNode { + transition.updateBackgroundColor(node: gridNode.backgroundNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor) } - self.topCornersNode.image = decorationCornersImage(top: true, bottom: false, dark: isFullscreen) - if let snapshotView = self.bottomCornersNode.view.snapshotContentTree() { - snapshotView.frame = self.bottomCornersNode.bounds - self.bottomCornersNode.view.addSubview(snapshotView) + let previousDark = self.decorationsAreDark + self.decorationsAreDark = isFullscreen + if previousDark != self.decorationsAreDark { + if let snapshotView = self.topCornersNode.view.snapshotContentTree() { + snapshotView.frame = self.topCornersNode.frame + self.topPanelNode.view.addSubview(snapshotView) + + snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.linear.rawValue, removeOnCompletion: false, completion: { [weak snapshotView] _ in + snapshotView?.removeFromSuperview() + }) + } + self.topCornersNode.image = decorationCornersImage(top: true, bottom: false, dark: isFullscreen) - snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak snapshotView] _ in - snapshotView?.removeFromSuperview() - }) - } - self.bottomCornersNode.image = decorationCornersImage(top: false, bottom: true, dark: isFullscreen) + if let snapshotView = self.bottomCornersNode.view.snapshotContentTree() { + snapshotView.frame = self.bottomCornersNode.bounds + self.bottomCornersNode.view.addSubview(snapshotView) + + snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.linear.rawValue, removeOnCompletion: false, completion: { [weak snapshotView] _ in + snapshotView?.removeFromSuperview() + }) + } + self.bottomCornersNode.image = decorationCornersImage(top: false, bottom: true, dark: isFullscreen) + + UIView.transition(with: self.bottomGradientNode.view, duration: 0.3, options: [.transitionCrossDissolve, .curveLinear]) { + self.bottomGradientNode.image = decorationBottomGradientImage(dark: isFullscreen) + } completion: { _ in + } + self.closeButton.setContent(.image(closeButtonImage(dark: isFullscreen)), animated: transition.isAnimated) + } + if !self.optionsButtonIsAvatar { self.optionsButton.setContent(.more(optionsCircleImage(dark: isFullscreen)), animated: transition.isAnimated) } - self.closeButton.setContent(.image(closeButtonImage(dark: isFullscreen)), animated: transition.isAnimated) - + self.updateTitle(transition: transition) } @@ -3793,10 +3154,11 @@ public final class VoiceChatController: ViewController { guard let _ = self.validLayout else { return } + var title = self.currentTitle if self.isScheduling { title = self.presentationData.strings.ScheduleVoiceChat_Title - } else if !self.isFullscreen && !self.currentTitleIsCustom { + } else if case .modal(_, false) = self.displayMode, !self.currentTitleIsCustom { if let navigationController = self.controller?.navigationController as? NavigationController { for controller in navigationController.viewControllers.reversed() { if let controller = controller as? ChatController, case let .peer(peerId) = controller.chatLocation, peerId == self.call.peerId { @@ -3820,7 +3182,7 @@ public final class VoiceChatController: ViewController { self.titleNode.update(size: CGSize(width: self.titleNode.bounds.width, height: 44.0), title: title, subtitle: subtitle, slide: slide, transition: transition) } - private func updateButtons(animated: Bool) { + private func updateButtons(transition: ContainedViewLayoutTransition) { var audioMode: CallControllerButtonsSpeakerMode = .none //var hasAudioRouteMenu: Bool = false if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput { @@ -3886,10 +3248,10 @@ public final class VoiceChatController: ViewController { let isScheduled = self.isScheduling || self.callState?.scheduleTimestamp != nil - var soundEnabled = true + var isSoundEnabled = true if isScheduled { if let callState = self.callState, let peer = self.peer, !callState.canManageCall && (peer.addressName?.isEmpty ?? true) { - soundEnabled = false + isSoundEnabled = false } else { soundImage = .share soundTitle = self.presentationData.strings.VoiceChat_ShareShort @@ -3908,7 +3270,7 @@ public final class VoiceChatController: ViewController { } switch effectiveDisplayMode { - case .default: + case .modal: videoButtonSize = smallButtonSize buttonsTitleAlpha = 1.0 case .fullscreen: @@ -3917,19 +3279,18 @@ public final class VoiceChatController: ViewController { } let hasVideo = self.call.hasVideo || self.call.hasScreencast - let transition: ContainedViewLayoutTransition = animated ? .animated(duration: 0.3, curve: .linear) : .immediate self.cameraButton.update(size: hasVideo ? sideButtonSize : videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: hasVideo ? activeButtonAppearance : normalButtonAppearance, image: hasVideo ? .cameraOn : .cameraOff), text: self.presentationData.strings.VoiceChat_Video, transition: transition) + + self.switchCameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .flipCamera), text: "", transition: transition) transition.updateAlpha(node: self.switchCameraButton, alpha: hasVideo ? 1.0 : 0.0) transition.updateTransformScale(node: self.switchCameraButton, scale: hasVideo ? 1.0 : 0.0) - + transition.updateAlpha(node: self.audioButton, alpha: hasVideo ? 0.0 : 1.0) transition.updateTransformScale(node: self.audioButton, scale: hasVideo ? 0.0 : 1.0) - self.switchCameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .flipCamera), text: "", transition: transition) - - self.audioButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage, isEnabled: soundEnabled), text: soundTitle, transition: transition) - self.audioButton.isUserInteractionEnabled = soundEnabled + self.audioButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage, isEnabled: isSoundEnabled), text: soundTitle, transition: transition) + self.audioButton.isUserInteractionEnabled = isSoundEnabled self.leaveButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: .color(.custom(0xff3b30, 0.3)), image: .cancel), text: self.presentationData.strings.VoiceChat_Leave, transition: .immediate) @@ -3950,55 +3311,36 @@ public final class VoiceChatController: ViewController { size.width = floor(min(size.width, size.height) * 0.5) contentWidth = size.width } else { - contentWidth = isLandscape ? min(530.0, size.width - 210.0) : size.width + contentWidth = self.isLandscape ? min(530.0, size.width - 210.0) : size.width } - - let isScheduled = self.isScheduling || self.callState?.scheduleTimestamp != nil - + var previousIsLandscape = false if let previousLayout = previousLayout, case .compact = previousLayout.metrics.widthClass, previousLayout.size.width > previousLayout.size.height { previousIsLandscape = true } - var isLandscape = false - var effectiveDisplayMode = self.displayMode - if case .compact = layout.metrics.widthClass, layout.size.width > layout.size.height { - isLandscape = true - - if !self.isFullscreen { - self.isExpanded = true - self.updateIsFullscreen(true) - } - if self.hasMainVideo { - self.tileListNode.isHidden = false - } - if case .fullscreen = effectiveDisplayMode { - } else { - effectiveDisplayMode = .fullscreen(controlsHidden: false) - } - } else if case .default = effectiveDisplayMode { - if self.hasMainVideo { - self.tileListNode.isHidden = true - } - } + let isLandscape = self.isLandscape + let effectiveDisplayMode = self.effectiveDisplayMode if previousIsLandscape != isLandscape { + self.updateDecorationsColors() + self.updateDecorationsLayout(transition: transition) self.updateMembers(muteState: self.effectiveMuteState, callMembers: self.currentCallMembers ?? ([], nil), invitedPeers: self.currentInvitedPeers ?? [], speakingPeers: self.currentSpeakingPeers ?? Set()) } - - if let videoIndex = self.contentContainer.subnodes?.firstIndex(where: { $0 === self.mainStageVideoClippingNode }), let listIndex = self.contentContainer.subnodes?.firstIndex(where: { $0 === self.listNode }) { - switch effectiveDisplayMode { - case .default: - if listIndex < videoIndex { - self.bringVideoToBackOnCompletion = true - } - case .fullscreen: - if listIndex > videoIndex { - self.contentContainer.insertSubnode(self.mainStageVideoClippingNode, belowSubnode: self.tileListNode) - } - } - } - +// +// if let videoIndex = self.contentContainer.subnodes?.firstIndex(where: { $0 === self.mainStageVideoClippingNode }), let listIndex = self.contentContainer.subnodes?.firstIndex(where: { $0 === self.listNode }) { +// switch effectiveDisplayMode { +// case .modal: +// if listIndex < videoIndex { +// self.bringVideoToBackOnCompletion = true +// } +// case .fullscreen: +// if listIndex > videoIndex { +// self.contentContainer.insertSubnode(self.mainStageVideoClippingNode, belowSubnode: self.fullscreenListNode) +// } +// } +// } +// transition.updateFrame(node: self.titleNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - contentWidth) / 2.0), y: 10.0), size: CGSize(width: contentWidth, height: 44.0))) self.updateTitle(transition: transition) @@ -4009,11 +3351,11 @@ public final class VoiceChatController: ViewController { transition.updateFrame(node: self.contentContainer, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - size.width) / 2.0), y: 0.0), size: size)) let layoutTopInset: CGFloat = max(layout.statusBarHeight ?? 0.0, layout.safeInsets.top) - let sideInset: CGFloat = 16.0 + let sideInset: CGFloat = 14.0 - var insets = UIEdgeInsets() - insets.left = sideInset + (isLandscape ? 0.0 : layout.safeInsets.left) - insets.right = sideInset + (isLandscape ? 0.0 : layout.safeInsets.right) + var listInsets = UIEdgeInsets() + listInsets.left = sideInset + (isLandscape ? 0.0 : layout.safeInsets.left) + listInsets.right = sideInset + (isLandscape ? 0.0 : layout.safeInsets.right) let topEdgeOffset: CGFloat if let statusBarHeight = layout.statusBarHeight { @@ -4037,19 +3379,15 @@ public final class VoiceChatController: ViewController { let bottomPanelHeight = self.effectiveBottomAreaHeight + layout.intrinsicInsets.bottom var listTopInset = layoutTopInset + topPanelHeight - var topCornersY = topPanelHeight + let topCornersY = topPanelHeight if isLandscape { listTopInset = topPanelHeight - } else if self.hasMainVideo && self.isExpanded { - let videoContainerHeight = min(mainVideoHeight, layout.size.width) - listTopInset += videoContainerHeight - topCornersY += videoContainerHeight } - let listSize = CGSize(width: contentWidth, height: layout.size.height - listTopInset - (isLandscape ? layout.intrinsicInsets.bottom : bottomPanelHeight)) + let listSize = CGSize(width: contentWidth, height: layout.size.height - listTopInset - (isLandscape ? layout.intrinsicInsets.bottom : bottomPanelHeight) + bottomGradientHeight) let topInset: CGFloat if let (panInitialTopInset, panOffset) = self.panGestureArguments { - if self.isExpanded && !self.hasMainVideo { + if self.isExpanded { topInset = min(self.topInset ?? listSize.height, panInitialTopInset + max(0.0, panOffset)) } else { topInset = max(0.0, panInitialTopInset + min(0.0, panOffset)) @@ -4057,41 +3395,43 @@ public final class VoiceChatController: ViewController { } else if let currentTopInset = self.topInset { topInset = self.isExpanded ? 0.0 : currentTopInset } else { - topInset = listSize.height - 46.0 - floor(56.0 * 3.5) + topInset = listSize.height - 46.0 - floor(56.0 * 3.5) - bottomGradientHeight } transition.updateFrame(node: self.listNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - contentWidth) / 2.0), y: listTopInset + topInset), size: listSize)) - let (duration, curve) = listViewAnimationDurationAndCurve(transition: transition) - self.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: nil, updateSizeAndInsets: ListViewUpdateSizeAndInsets(size: listSize, insets: insets, duration: duration, curve: curve), stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in }) + listInsets.bottom = bottomGradientHeight - let tileListWidth: CGFloat - let tileListHeight: CGFloat = 84.0 - let tileListPosition: CGPoint - let tileListTransform: CATransform3D - let tileListInset: CGFloat = 16.0 - let tileListUpdateSizeAndInsets: ListViewUpdateSizeAndInsets + let (duration, curve) = listViewAnimationDurationAndCurve(transition: transition) + self.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: nil, updateSizeAndInsets: ListViewUpdateSizeAndInsets(size: listSize, insets: listInsets, duration: duration, curve: curve), stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in }) + + let fullscreenListWidth: CGFloat + let fullscreenListHeight: CGFloat = 84.0 + let fullscreenListPosition: CGPoint + let fullscreenListTransform: CATransform3D + let fullscreenListInset: CGFloat = 14.0 + let fullscreenListUpdateSizeAndInsets: ListViewUpdateSizeAndInsets if isLandscape { - tileListWidth = layout.size.height - tileListPosition = CGPoint( - x: layout.size.width - min(self.effectiveBottomAreaHeight, fullscreenBottomAreaHeight) - layout.safeInsets.right - tileListHeight / 2.0, + fullscreenListWidth = layout.size.height + fullscreenListPosition = CGPoint( + x: layout.size.width - min(self.effectiveBottomAreaHeight, fullscreenBottomAreaHeight) - layout.safeInsets.right - fullscreenListHeight / 2.0, y: layout.size.height / 2.0 ) - tileListTransform = CATransform3DIdentity - tileListUpdateSizeAndInsets = ListViewUpdateSizeAndInsets(size: CGSize(width: tileListHeight, height: layout.size.height), insets: UIEdgeInsets(top: tileListInset, left: 0.0, bottom: tileListInset, right: 0.0), duration: duration, curve: curve) + fullscreenListTransform = CATransform3DIdentity + fullscreenListUpdateSizeAndInsets = ListViewUpdateSizeAndInsets(size: CGSize(width: fullscreenListHeight, height: layout.size.height), insets: UIEdgeInsets(top: fullscreenListInset, left: 0.0, bottom: fullscreenListInset, right: 0.0), duration: duration, curve: curve) } else { - tileListWidth = layout.size.width - tileListPosition = CGPoint( + fullscreenListWidth = layout.size.width + fullscreenListPosition = CGPoint( x: layout.safeInsets.left + layout.size.width / 2.0, - y: layout.size.height - min(bottomPanelHeight, fullscreenBottomAreaHeight + layout.intrinsicInsets.bottom) - tileListHeight / 2.0 + y: layout.size.height - min(bottomPanelHeight, fullscreenBottomAreaHeight + layout.intrinsicInsets.bottom) - fullscreenListHeight / 2.0 + 4.0 ) - tileListTransform = CATransform3DMakeRotation(-CGFloat(CGFloat.pi / 2.0), 0.0, 0.0, 1.0) - tileListUpdateSizeAndInsets = ListViewUpdateSizeAndInsets(size: CGSize(width: tileListHeight, height: layout.size.width), insets: UIEdgeInsets(top: tileListInset + layout.safeInsets.left, left: 0.0, bottom: tileListInset + layout.safeInsets.left, right: 0.0), duration: duration, curve: curve) + fullscreenListTransform = CATransform3DMakeRotation(-CGFloat(CGFloat.pi / 2.0), 0.0, 0.0, 1.0) + fullscreenListUpdateSizeAndInsets = ListViewUpdateSizeAndInsets(size: CGSize(width: fullscreenListHeight, height: layout.size.width), insets: UIEdgeInsets(top: fullscreenListInset + layout.safeInsets.left, left: 0.0, bottom: fullscreenListInset + layout.safeInsets.left, right: 0.0), duration: duration, curve: curve) } - self.tileListNode.bounds = CGRect(x: 0.0, y: 0.0, width: tileListHeight, height: tileListWidth) - transition.updatePosition(node: self.tileListNode, position: tileListPosition) - self.tileListNode.transform = tileListTransform - self.tileListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: nil, updateSizeAndInsets: tileListUpdateSizeAndInsets, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in }) + self.fullscreenListNode.bounds = CGRect(x: 0.0, y: 0.0, width: fullscreenListHeight, height: fullscreenListWidth) + transition.updatePosition(node: self.fullscreenListNode, position: fullscreenListPosition) + self.fullscreenListNode.transform = fullscreenListTransform + self.fullscreenListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: nil, updateSizeAndInsets: fullscreenListUpdateSizeAndInsets, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in }) transition.updateFrame(node: self.topCornersNode, frame: CGRect(origin: CGPoint(x: sideInset + floorToScreenPixels((size.width - contentWidth) / 2.0), y: topCornersY), size: CGSize(width: contentWidth - sideInset * 2.0, height: 50.0))) @@ -4099,17 +3439,12 @@ public final class VoiceChatController: ViewController { let bottomPanelCoverHeight = bottomAreaHeight + layout.intrinsicInsets.bottom let bottomPanelCoverFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomPanelCoverHeight), size: CGSize(width: size.width, height: bottomPanelCoverHeight)) if isLandscape { -// transition.updateAlpha(node: self.closeButton, alpha: 0.0) -// transition.updateAlpha(node: self.optionsButton, alpha: 0.0) -// transition.updateAlpha(node: self.titleNode, alpha: 0.0) bottomPanelFrame = CGRect(origin: CGPoint(x: layout.size.width - fullscreenBottomAreaHeight - layout.safeInsets.right, y: 0.0), size: CGSize(width: fullscreenBottomAreaHeight + layout.safeInsets.right, height: layout.size.height)) - } else { -// transition.updateAlpha(node: self.closeButton, alpha: 1.0) -// transition.updateAlpha(node: self.optionsButton, alpha: self.optionsButton.isUserInteractionEnabled ? 1.0 : 0.0) -// transition.updateAlpha(node: self.titleNode, alpha: 1.0) } + let bottomGradientFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomPanelCoverHeight), size: CGSize(width: size.width, height: bottomGradientHeight)) transition.updateAlpha(node: self.optionsButton, alpha: self.optionsButton.isUserInteractionEnabled ? 1.0 : 0.0) transition.updateFrame(node: self.bottomPanelCoverNode, frame: bottomPanelCoverFrame) + transition.updateFrame(node: self.bottomGradientNode, frame: bottomGradientFrame) transition.updateFrame(node: self.bottomPanelNode, frame: bottomPanelFrame) if let pickerView = self.pickerView { @@ -4137,7 +3472,7 @@ public final class VoiceChatController: ViewController { let forthButtonFrame: CGRect let leftButtonFrame: CGRect - if self.mainStageVideoContainerNode == nil || isScheduled { + if self.isScheduled { leftButtonFrame = CGRect(origin: CGPoint(x: sideButtonOrigin, y: floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize) } else { leftButtonFrame = CGRect(origin: CGPoint(x: sideButtonOrigin, y: floor((self.effectiveBottomAreaHeight - sideButtonSize.height - upperButtonDistance - cameraButtonSize.height) / 2.0) + upperButtonDistance + cameraButtonSize.height), size: sideButtonSize) @@ -4146,7 +3481,7 @@ public final class VoiceChatController: ViewController { let smallButtons: Bool switch effectiveDisplayMode { - case .default: + case .modal: smallButtons = false firstButtonFrame = CGRect(origin: CGPoint(x: floor(leftButtonFrame.midX - cameraButtonSize.width / 2.0), y: leftButtonFrame.minY - upperButtonDistance - cameraButtonSize.height), size: cameraButtonSize) secondButtonFrame = leftButtonFrame @@ -4158,7 +3493,7 @@ public final class VoiceChatController: ViewController { if isLandscape { let sideInset: CGFloat let buttonsCount: Int - if self.mainStageVideoContainerNode == nil { + if false { sideInset = 42.0 buttonsCount = 3 } else { @@ -4283,22 +3618,24 @@ public final class VoiceChatController: ViewController { self.scheduleCancelButton.frame = CGRect(x: 16.0, y: 137.0, width: size.width - 32.0, height: buttonHeight) if self.actionButton.supernode === self.bottomPanelNode { - transition.updateFrame(node: self.actionButton, frame: thirdButtonFrame) + transition.updateFrame(node: self.actionButton, frame: thirdButtonFrame, completion: transition.isAnimated ? { [weak self] _ in + self?.animatingExpansion = false + } : nil) } - self.updateButtons(animated: !isFirstTime) + self.updateButtons(transition: !isFirstTime ? .animated(duration: 0.3, curve: .linear) : .immediate) if self.audioButton.supernode === self.bottomPanelNode { transition.updateFrameAsPositionAndBounds(node: self.switchCameraButton, frame: firstButtonFrame) - if !self.animatingButtons || transition.isAnimated { + if !self.animatingButtonsSwap || transition.isAnimated { if self.call.hasVideo { transition.updateFrameAsPositionAndBounds(node: self.cameraButton, frame: secondButtonFrame, completion: { [weak self] _ in - self?.animatingButtons = false + self?.animatingButtonsSwap = false }) } else { transition.updateFrameAsPositionAndBounds(node: self.cameraButton, frame: firstButtonFrame, completion: { [weak self] _ in - self?.animatingButtons = false + self?.animatingButtonsSwap = false }) } } @@ -4309,8 +3646,8 @@ public final class VoiceChatController: ViewController { while !self.enqueuedTransitions.isEmpty { self.dequeueTransition() } - while !self.enqueuedTileTransitions.isEmpty { - self.dequeueTransition() + while !self.enqueuedFullscreenTransitions.isEmpty { + self.dequeueFullscreenTransition() } } } @@ -4320,10 +3657,6 @@ public final class VoiceChatController: ViewController { return } - if self.hasMainVideo && !self.isFullscreen { - self.updateIsFullscreen(true) - } - self.updateDecorationsLayout(transition: .immediate) self.animatingAppearance = true @@ -4397,12 +3730,12 @@ public final class VoiceChatController: ViewController { } } - private func enqueueTileTransition(_ transition: ListTransition) { - self.enqueuedTileTransitions.append(transition) + private func enqueueFullscreenTransition(_ transition: ListTransition) { + self.enqueuedFullscreenTransitions.append(transition) if let _ = self.validLayout { - while !self.enqueuedTileTransitions.isEmpty { - self.dequeueTileTransition() + while !self.enqueuedFullscreenTransitions.isEmpty { + self.dequeueFullscreenTransition() } } } @@ -4421,7 +3754,7 @@ public final class VoiceChatController: ViewController { self.listNode.alpha = 0.0 self.listNode.isUserInteractionEnabled = false self.backgroundNode.backgroundColor = panelBackgroundColor - self.updateIsFullscreen(false) + self.updateDecorationsColors() } else if callState.scheduleTimestamp == nil && !self.isScheduling && self.listNode.alpha == 0.0 { self.transitionToCall() } @@ -4441,20 +3774,7 @@ public final class VoiceChatController: ViewController { } options.insert(.LowLatency) options.insert(.PreferSynchronousResourceLoading) - - var itemsHeight: CGFloat = 0.0 - var itemsCount = transition.count - if transition.canInvite { - itemsHeight += 46.0 - itemsCount -= 1 - } - itemsHeight += CGFloat(itemsCount) * 56.0 - - let sideInset: CGFloat = 16.0 - var insets = UIEdgeInsets() - insets.left = layout.safeInsets.left + sideInset - insets.right = layout.safeInsets.right + sideInset - + var size = layout.size if case .regular = layout.metrics.widthClass { size.width = floor(min(size.width, size.height) * 0.5) @@ -4463,18 +3783,10 @@ public final class VoiceChatController: ViewController { let bottomPanelHeight = self.isLandscape ? layout.intrinsicInsets.bottom : self.effectiveBottomAreaHeight + layout.intrinsicInsets.bottom let layoutTopInset: CGFloat = max(layout.statusBarHeight ?? 0.0, layout.safeInsets.top) let listTopInset = layoutTopInset + topPanelHeight - let listSize = CGSize(width: size.width, height: layout.size.height - listTopInset - bottomPanelHeight) + let listSize = CGSize(width: size.width, height: layout.size.height - listTopInset - bottomPanelHeight + bottomGradientHeight) - self.topInset = listSize.height - 46.0 - floor(56.0 * 3.5) + self.topInset = listSize.height - 46.0 - floor(56.0 * 3.5) - bottomGradientHeight - let targetY = listTopInset + (self.topInset ?? listSize.height) - - if isFirstTime { - var frame = self.listNode.frame - frame.origin.y = targetY - self.listNode.frame = frame - } - if transition.animated { self.animatingInsertion = true } @@ -4495,26 +3807,21 @@ public final class VoiceChatController: ViewController { }) } - private func dequeueTileTransition() { - guard let _ = self.validLayout, let transition = self.enqueuedTileTransitions.first else { + private func dequeueFullscreenTransition() { + guard let _ = self.validLayout, let transition = self.enqueuedFullscreenTransitions.first else { return } - self.enqueuedTileTransitions.remove(at: 0) + self.enqueuedFullscreenTransitions.remove(at: 0) var options = ListViewDeleteAndInsertOptions() let isFirstTime = self.isFirstTime if !isFirstTime { - if transition.crossFade { - options.insert(.AnimateCrossfade) - } if transition.animated { options.insert(.AnimateInsertion) } } - options.insert(.LowLatency) - options.insert(.PreferSynchronousResourceLoading) - self.tileListNode.transaction(deleteIndices: transition.deletions, insertIndicesAndItems: transition.insertions, updateIndicesAndItems: transition.updates, options: options, scrollToItem: nil, updateSizeAndInsets: nil, updateOpaqueState: nil, completion: { _ in + self.fullscreenListNode.transaction(deleteIndices: transition.deletions, insertIndicesAndItems: transition.insertions, updateIndicesAndItems: transition.updates, options: options, scrollToItem: nil, updateSizeAndInsets: nil, updateOpaqueState: nil, completion: { _ in }) } @@ -4529,34 +3836,19 @@ public final class VoiceChatController: ViewController { self.currentInvitedPeers = invitedPeers var entries: [ListEntry] = [] - var tileEntries: [ListEntry] = [] + var fullscreenEntries: [ListEntry] = [] var index: Int32 = 0 var processedPeerIds = Set() - - var canInvite = true - var inviteIsLink = false - if let peer = self.peer as? TelegramChannel { - if peer.flags.contains(.isGigagroup) || (peer.addressName?.isEmpty ?? true) { - if peer.flags.contains(.isCreator) || peer.adminRights != nil { - } else { - canInvite = false - } - } - if case .broadcast = peer.info, !(peer.addressName?.isEmpty ?? true) { - inviteIsLink = true - } - } - - if false, canInvite { - entries.append(.invite(self.presentationData.theme, self.presentationData.strings, inviteIsLink ? self.presentationData.strings.VoiceChat_Share : self.presentationData.strings.VoiceChat_InviteMember, inviteIsLink)) - } - + var endpointIdToPeerId: [String: PeerId] = [:] var peerIdToEndpointId: [PeerId: String] = [:] var requestedVideoChannels: [PresentationGroupCallRequestedVideo] = [] + var tileItems: [VoiceChatTileItem] = [] + var tileMap: [String: VoiceChatTileItem] = [:] + + var fullscreenById: [PeerId: ListEntry] = [:] - var pinnedEntry: ListEntry? for member in callMembers.0 { if processedPeerIds.contains(member.peer.id) { continue @@ -4612,8 +3904,8 @@ public final class VoiceChatController: ViewController { if let videoEndpointId = member.videoEndpointId { endpointIdToPeerId[videoEndpointId] = member.peer.id } - if let screencastEndpointId = member.presentationEndpointId { - endpointIdToPeerId[screencastEndpointId] = member.peer.id + if let presentationEndpointId = member.presentationEndpointId { + endpointIdToPeerId[presentationEndpointId] = member.peer.id } if let anyEndpointId = member.presentationEndpointId ?? member.videoEndpointId { peerIdToEndpointId[member.peer.id] = anyEndpointId @@ -4623,77 +3915,102 @@ public final class VoiceChatController: ViewController { peer: memberPeer, about: member.about, isMyPeer: self.callState?.myPeerId == member.peer.id, - ssrc: member.ssrc, videoEndpointId: member.videoEndpointId, - screencastEndpointId: member.presentationEndpointId, + presentationEndpointId: member.presentationEndpointId, activityTimestamp: Int32.max - 1 - index, state: memberState, muteState: memberMuteState, - revealed: false, canManageCall: self.callState?.canManageCall ?? false, volume: member.volume, raisedHand: member.hasRaiseHand, displayRaisedHandStatus: self.displayedRaisedHands.contains(member.peer.id), - pinned: memberPeer.id == self.effectiveSpeakerWithVideo?.0, - style: .list + active: memberPeer.id == self.effectiveSpeakerWithVideo?.0, + isLandscape: false )) - entries.append(entry) - let tileEntry: ListEntry = .peer(PeerEntry( + var isTile = false + if let interaction = self.itemInteraction { + if let videoEndpointId = member.presentationEndpointId, self.readyVideoNodes.contains(videoEndpointId) { + if !self.videoNodesOrder.contains(videoEndpointId) { + self.videoNodesOrder.append(videoEndpointId) + } + isTile = true + tileMap[videoEndpointId] = VoiceChatTileItem(peer: member.peer, videoEndpointId: videoEndpointId, strings: self.presentationData.strings, nameDisplayOrder: self.presentationData.nameDisplayOrder, speaking: speakingPeers.contains(member.peer.id), icon: .microphone(true), action: { [weak self] in + if let strongSelf = self { + strongSelf.currentDominantSpeakerWithVideo = member.peer.id + strongSelf.effectiveSpeakerWithVideo = (member.peer.id, videoEndpointId) + strongSelf.toggleDisplayMode() + } + }, getVideo: { + return interaction.getPeerVideo(videoEndpointId, false) + }, getAudioLevel: { + return interaction.getAudioLevel(member.peer.id) + }) + } + if let videoEndpointId = member.videoEndpointId, self.readyVideoNodes.contains(videoEndpointId) { + if !self.videoNodesOrder.contains(videoEndpointId) { + self.videoNodesOrder.append(videoEndpointId) + } + isTile = true + tileMap[videoEndpointId] = VoiceChatTileItem(peer: member.peer, videoEndpointId: videoEndpointId, strings: self.presentationData.strings, nameDisplayOrder: self.presentationData.nameDisplayOrder, speaking: speakingPeers.contains(member.peer.id), icon: .microphone(true), action: { [weak self] in + if let strongSelf = self { + strongSelf.currentDominantSpeakerWithVideo = member.peer.id + strongSelf.effectiveSpeakerWithVideo = (member.peer.id, videoEndpointId) + strongSelf.toggleDisplayMode() + } + }, getVideo: { + return interaction.getPeerVideo(videoEndpointId, false) + }, getAudioLevel: { + return interaction.getAudioLevel(member.peer.id) + }) + } + } + + if !isTile { + entries.append(entry) + } + + let fullscreenEntry: ListEntry = .peer(PeerEntry( peer: memberPeer, about: member.about, isMyPeer: self.callState?.myPeerId == member.peer.id, - ssrc: member.ssrc, videoEndpointId: member.videoEndpointId, - screencastEndpointId: member.presentationEndpointId, + presentationEndpointId: member.presentationEndpointId, activityTimestamp: Int32.max - 1 - index, state: memberState, muteState: memberMuteState, - revealed: false, canManageCall: self.callState?.canManageCall ?? false, volume: member.volume, raisedHand: member.hasRaiseHand, displayRaisedHandStatus: self.displayedRaisedHands.contains(member.peer.id), - pinned: memberPeer.id == self.effectiveSpeakerWithVideo?.0, - style: .tile(isLandscape: self.isLandscape) + active: memberPeer.id == self.effectiveSpeakerWithVideo?.0, + isLandscape: self.isLandscape )) - tileEntries.append(tileEntry) + fullscreenEntries.append(fullscreenEntry) index += 1 - - if memberPeer.id == self.effectiveSpeakerWithVideo?.0 { - pinnedEntry = .peer(PeerEntry( - peer: memberPeer, - about: nil, - isMyPeer: self.callState?.myPeerId == member.peer.id, - ssrc: member.ssrc, - videoEndpointId: member.videoEndpointId, - screencastEndpointId: member.presentationEndpointId, - activityTimestamp: Int32.max - 1 - index, - state: memberState, - muteState: memberMuteState, - revealed: false, - canManageCall: self.callState?.canManageCall ?? false, - volume: member.volume, - raisedHand: member.hasRaiseHand, - displayRaisedHandStatus: self.displayedRaisedHands.contains(member.peer.id), - pinned: memberPeer.id == self.currentForcedSpeakerWithVideo, - style: .list - )) - } + + if self.callState?.networkState == .connecting { + } else { + if var videoChannel = member.requestedVideoChannel(quality: .medium) { + if self.effectiveSpeakerWithVideo?.1 == videoChannel.endpointId { + videoChannel.quality = .full + } - if var videoChannel = member.requestedVideoChannel(quality: .thumbnail) { - if self.effectiveSpeakerWithVideo?.1 == videoChannel.endpointId { - videoChannel.quality = .full + requestedVideoChannels.append(videoChannel) } + if var presentationChannel = member.requestedPresentationVideoChannel(quality: .medium) { + if self.effectiveSpeakerWithVideo?.1 == presentationChannel.endpointId { + presentationChannel.quality = .full + } - requestedVideoChannels.append(videoChannel) - } - if var presentationChannel = member.requestedVideoChannel(quality: .thumbnail) { - if self.effectiveSpeakerWithVideo?.1 == presentationChannel.endpointId { - presentationChannel.quality = .full + requestedVideoChannels.append(presentationChannel) } - - requestedVideoChannels.append(presentationChannel) + } + } + + for tileEndpoint in self.videoNodesOrder { + if let tileItem = tileMap[tileEndpoint] { + tileItems.append(tileItem) } } @@ -4710,19 +4027,17 @@ public final class VoiceChatController: ViewController { peer: peer, about: nil, isMyPeer: false, - ssrc: nil, videoEndpointId: nil, - screencastEndpointId: nil, + presentationEndpointId: nil, activityTimestamp: Int32.max - 1 - index, state: .invited, muteState: nil, - revealed: false, canManageCall: false, volume: nil, raisedHand: false, displayRaisedHandStatus: false, - pinned: false, - style: .list + active: false, + isLandscape: false ))) index += 1 } @@ -4734,31 +4049,32 @@ public final class VoiceChatController: ViewController { self.endpointToPeerId = endpointIdToPeerId self.peerIdToEndpoint = peerIdToEndpointId - let previousPinnedEntry = self.pinnedEntry - self.pinnedEntry = pinnedEntry - var previousPinnedPeerEntry: PeerEntry? - var pinnedPeerEntry: PeerEntry? - - if let previousPinnedEntry = previousPinnedEntry, case let .peer(previousPeerEntry) = previousPinnedEntry { - previousPinnedPeerEntry = previousPeerEntry - } - if let pinnedEntry = pinnedEntry, case let .peer(peerEntry) = pinnedEntry { - pinnedPeerEntry = peerEntry - } - if previousPinnedPeerEntry?.peer.id != pinnedPeerEntry?.peer.id { - self.updateDecorationsLayout(transition: .animated(duration: 0.2, curve: .easeInOut)) + if !tileItems.isEmpty { + entries.insert(.tiles(tileItems), at: 0) } - if updatePinnedPeer && (previousPinnedPeerEntry?.videoEndpointId != pinnedPeerEntry?.videoEndpointId || previousPinnedPeerEntry?.screencastEndpointId != pinnedPeerEntry?.screencastEndpointId) { - self.updateMainStageVideo(waitForFullSize: false, currentEntries: entries, updateMembers: true, force: true) - return + var canInvite = true + var inviteIsLink = false + if let peer = self.peer as? TelegramChannel { + if peer.flags.contains(.isGigagroup) || (peer.addressName?.isEmpty ?? true) { + if peer.flags.contains(.isCreator) || peer.adminRights != nil { + } else { + canInvite = false + } + } + if case .broadcast = peer.info, !(peer.addressName?.isEmpty ?? true) { + inviteIsLink = true + } + } + if canInvite { + entries.append(.invite(self.presentationData.theme, self.presentationData.strings, inviteIsLink ? self.presentationData.strings.VoiceChat_Share : self.presentationData.strings.VoiceChat_InviteMember, inviteIsLink)) } let previousEntries = self.currentEntries - let previousTileEntries = self.currentTileEntries + let previousFullscreenEntries = self.currentFullscreenEntries self.currentEntries = entries - self.currentTileEntries = tileEntries + self.currentFullscreenEntries = fullscreenEntries if previousEntries.count == entries.count { var allEqual = true @@ -4768,9 +4084,6 @@ public final class VoiceChatController: ViewController { if lhsPeer.isMyPeer != rhsPeer.isMyPeer { allEqual = false break - } else if lhsPeer.pinned || rhsPeer.pinned { - allEqual = false - break } } else { allEqual = false @@ -4786,11 +4099,11 @@ public final class VoiceChatController: ViewController { } let presentationData = self.presentationData.withUpdated(theme: self.darkTheme) - let transition = preparedTransition(from: previousEntries, to: entries, isLoading: false, isEmpty: false, canInvite: canInvite, crossFade: false, animated: !disableAnimation, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!) + let transition = self.preparedTransition(from: previousEntries, to: entries, isLoading: false, isEmpty: false, canInvite: canInvite, crossFade: false, animated: !disableAnimation, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!) self.enqueueTransition(transition) - let tileTransition = preparedTransition(from: previousTileEntries, to: tileEntries, isLoading: false, isEmpty: false, canInvite: canInvite, crossFade: false, animated: !disableAnimation, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!) - self.enqueueTileTransition(tileTransition) + let fullscreenTransition = self.preparedFullscreenTransition(from: previousFullscreenEntries, to: fullscreenEntries, isLoading: false, isEmpty: false, canInvite: canInvite, crossFade: false, animated: true, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!) + self.enqueueFullscreenTransition(fullscreenTransition) } private func callStateDidReset() { @@ -4808,29 +4121,28 @@ public final class VoiceChatController: ViewController { self.requestedVideoSources.insert(channel.endpointId) self.call.makeIncomingVideoView(endpointId: channel.endpointId, completion: { [weak self] videoView in Queue.mainQueue().async { - guard let strongSelf = self, let videoView = videoView else { - return - } - let videoNode = GroupVideoNode(videoView: videoView) - strongSelf.videoNodes.append((channel.endpointId, videoNode)) - - if let _ = strongSelf.validLayout { - loop: for i in 0 ..< strongSelf.currentEntries.count { - let entry = strongSelf.currentEntries[i] - let tileEntry = strongSelf.currentTileEntries[i] - switch entry { - case let .peer(peerEntry): - if peerEntry.effectiveVideoEndpointId == channel.endpointId { - let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme) - strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) - strongSelf.tileListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: tileEntry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) - break loop + self?.call.makeIncomingVideoView(endpointId: channel.endpointId, completion: { [weak self] backdropVideoView in + Queue.mainQueue().async { + guard let strongSelf = self, let videoView = videoView else { + return + } + let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView) + strongSelf.readyVideoDisposables.set((videoNode.ready + |> filter { $0 } + |> take(1) + ).start(next: { [weak self] _ in + if let strongSelf = self { + strongSelf.readyVideoNodes.insert(channel.endpointId) + strongSelf.updateMembers(muteState: strongSelf.effectiveMuteState, callMembers: strongSelf.currentCallMembers ?? ([], nil), invitedPeers: strongSelf.currentInvitedPeers ?? [], speakingPeers: strongSelf.currentSpeakingPeers ?? Set()) } - default: - break + }), forKey: channel.endpointId) + strongSelf.videoNodes[channel.endpointId] = videoNode + + if let _ = strongSelf.validLayout { + strongSelf.updateMembers(muteState: strongSelf.effectiveMuteState, callMembers: strongSelf.currentCallMembers ?? ([], nil), invitedPeers: strongSelf.currentInvitedPeers ?? [], speakingPeers: strongSelf.currentSpeakingPeers ?? Set()) } } - } + }) } }) } @@ -4846,55 +4158,35 @@ public final class VoiceChatController: ViewController { self.requestedVideoSources.remove(source) } - for i in (0 ..< self.videoNodes.count).reversed() { - if !validSources.contains(self.videoNodes[i].0) { - let endpointId = self.videoNodes[i].0 - self.videoNodes.remove(at: i) - - loop: for j in 0 ..< self.currentEntries.count { - let entry = self.currentEntries[j] - let tileEntry = self.currentTileEntries[j] - switch entry { - case let .peer(peerEntry): - if peerEntry.effectiveVideoEndpointId == endpointId { - let presentationData = self.presentationData.withUpdated(theme: self.darkTheme) - self.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: j, previousIndex: j, item: entry.item(context: self.context, presentationData: presentationData, interaction: self.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) - self.tileListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: j, previousIndex: j, item: tileEntry.item(context: self.context, presentationData: presentationData, interaction: self.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) - break loop - } - default: - break - } - } + for (videoEndpointId, _) in self.videoNodes { + if !validSources.contains(videoEndpointId) { + self.videoNodes[videoEndpointId] = nil + self.readyVideoDisposables.set(nil, forKey: videoEndpointId) + +// loop: for j in 0 ..< self.currentFullscreenEntries.count { +// let fullscreenEntry = self.currentFullscreenEntries[j] +// switch fullscreenEntry { +// case let .peer(peerEntry): +// if peerEntry.effectiveVideoEndpointId == videoEndpointId { +// let presentationData = self.presentationData.withUpdated(theme: self.darkTheme) +// self.fullscreenListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: j, previousIndex: j, item: fullscreenEntry.item(context: self.context, presentationData: presentationData, interaction: self.itemInteraction!), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) +// break loop +// } +// default: +// break +// } +// } } } - - if let (peerId, endpointId) = self.effectiveSpeakerWithVideo { - if !validSources.contains(endpointId) { - if peerId == self.currentForcedSpeakerWithVideo { - self.currentForcedSpeakerWithVideo = nil - } - if peerId == self.currentDominantSpeakerWithVideo { - self.currentDominantSpeakerWithVideo = nil - } - self.updateMainStageVideo(waitForFullSize: false) - } - } - } - - private func updateRequestedVideoChannels() { - self.filterRequestedVideoChannels(channels: self.requestedVideoChannels) - - self.call.setRequestedVideoList(items: self.requestedVideoChannels) } - private func updateMainStageVideo(waitForFullSize: Bool, currentEntries: [ListEntry]? = nil, updateMembers: Bool = true, force: Bool = false) { + private func updateMainVideo(waitForFullSize: Bool, currentEntries: [ListEntry]? = nil, updateMembers: Bool = true, force: Bool = false) { let effectiveMainParticipant = self.currentForcedSpeakerWithVideo ?? self.currentDominantSpeakerWithVideo guard effectiveMainParticipant != self.effectiveSpeakerWithVideo?.0 || force else { return } - let currentEntries = currentEntries ?? self.currentEntries + let currentEntries = currentEntries ?? self.currentFullscreenEntries var effectivePeer: (PeerId, String, String?)? = nil var anyPeer: (PeerId, String, String?)? = nil @@ -4911,8 +4203,8 @@ public final class VoiceChatController: ViewController { var otherEndpointId: String? if effectiveEndpointId != peer.videoEndpointId { otherEndpointId = peer.videoEndpointId - } else if effectiveEndpointId != peer.screencastEndpointId { - otherEndpointId = peer.screencastEndpointId + } else if effectiveEndpointId != peer.presentationEndpointId { + otherEndpointId = peer.presentationEndpointId } if let endpointId = effectiveEndpointId { @@ -4927,8 +4219,8 @@ public final class VoiceChatController: ViewController { var otherEndpointId: String? if effectiveEndpointId != peer.videoEndpointId { otherEndpointId = peer.videoEndpointId - } else if effectiveEndpointId != peer.screencastEndpointId { - otherEndpointId = peer.screencastEndpointId + } else if effectiveEndpointId != peer.presentationEndpointId { + otherEndpointId = peer.presentationEndpointId } if let endpointId = effectiveEndpointId { @@ -4951,91 +4243,21 @@ public final class VoiceChatController: ViewController { effectivePeer = anyPeer } - let completion = { - var updateLayout = false - if self.effectiveSpeakerWithVideo != nil && !self.isExpanded { - self.isExpanded = true - updateLayout = true - } else if self.effectiveSpeakerWithVideo == nil && self.isExpanded { - self.isExpanded = false - updateLayout = true - } - - if updateLayout { - self.updateIsFullscreen(self.isExpanded) - self.animatingExpansion = true - let transition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .spring) - if let (layout, navigationHeight) = self.validLayout { - self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: transition) - } - self.updateDecorationsLayout(transition: transition, completion: { - self.animatingExpansion = false - }) - } - } - - var waitForFullSize = waitForFullSize - if !self.isExpanded { - waitForFullSize = effectivePeer != nil - self.mainStageVideoClippingNode.alpha = 0.0 - } - self.effectiveSpeakerWithVideo = effectivePeer.flatMap { ($0.0, $0.1) } if updateMembers { self.updateMembers(muteState: self.effectiveMuteState, callMembers: self.currentCallMembers ?? ([], nil), invitedPeers: self.currentInvitedPeers ?? [], speakingPeers: self.currentSpeakingPeers ?? Set(), updatePinnedPeer: false) } - self.mainStageVideoContainerNode?.updatePeer(peer: effectivePeer, waitForFullSize: waitForFullSize, completion: { [weak self] in - if waitForFullSize { - completion() + self.mainVideoNode.updatePeer(peer: self.effectiveSpeakerWithVideo, waitForFullSize: false) + } - if let strongSelf = self, strongSelf.mainStageVideoClippingNode.alpha.isZero { - strongSelf.mainStageVideoClippingNode.alpha = 1.0 - strongSelf.mainStageVideoClippingNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4) - } - } - }) - - //self.call.setFullSizeVideo(endpointId: effectivePeer?.1) - - /*self.updateSpeakerWithVideoDisposable.set((self.call.incomingVideoSources - |> mapToSignal { videoSources -> Signal in - if let (_, endpointId, otherEndpointId) = effectivePeer { - var exists = true - if !videoSources.contains(endpointId) { - exists = false - } - if let otherEndpointId = otherEndpointId, !videoSources.contains(otherEndpointId) { - exists = false - } - if exists { - return .single(true) - } - } - return .complete() - } - |> take(1) - |> deliverOnMainQueue - ).start(next: { [weak self] _ in - if let strongSelf = self { - strongSelf.mainStageVideoContainerNode?.updatePeer(peer: effectivePeer, waitForFullSize: waitForFullSize, completion: { [weak self] in - if waitForFullSize { - completion() - - if let strongSelf = self, strongSelf.mainStageVideoClippingNode.alpha.isZero { - strongSelf.mainStageVideoClippingNode.alpha = 1.0 - strongSelf.mainStageVideoClippingNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4) - } - } - }) - } - }))*/ - - if !waitForFullSize { - completion() + private func updateRequestedVideoChannels() { + Queue.mainQueue().after(0.3) { + self.call.setRequestedVideoList(items: self.requestedVideoChannels) + self.filterRequestedVideoChannels(channels: self.requestedVideoChannels) } } - + override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool { if gestureRecognizer is UILongPressGestureRecognizer { return !self.isScheduling @@ -5063,15 +4285,8 @@ public final class VoiceChatController: ViewController { return false } - private var isExpanded = false { - didSet { - self.itemInteraction?.isExpanded = self.isExpanded - } - } - @objc func panGesture(_ recognizer: UIPanGestureRecognizer) { let contentOffset = self.listNode.visibleContentOffset() - let isScheduling = self.isScheduling || self.callState?.scheduleTimestamp != nil switch recognizer.state { case .began: let topInset: CGFloat @@ -5087,36 +4302,44 @@ public final class VoiceChatController: ViewController { self.controller?.dismissAllTooltips() case .changed: var translation = recognizer.translation(in: self.contentContainer.view).y - if isScheduling && translation < 0.0 { + if self.isScheduled && translation < 0.0 { return } - - var topInset: CGFloat = 0.0 - if let (currentTopInset, currentPanOffset) = self.panGestureArguments { - topInset = currentTopInset - - if case let .known(value) = contentOffset, value <= 0.5 { - } else { - translation = currentPanOffset - if self.isExpanded { - recognizer.setTranslation(CGPoint(), in: self.contentContainer.view) + + if case let .modal(isExpanded, previousIsFilled) = self.effectiveDisplayMode { + var topInset: CGFloat = 0.0 + if let (currentTopInset, currentPanOffset) = self.panGestureArguments { + topInset = currentTopInset + + if case let .known(value) = contentOffset, value <= 0.5 { + } else { + translation = currentPanOffset + if self.isExpanded { + recognizer.setTranslation(CGPoint(), in: self.contentContainer.view) + } } + + self.panGestureArguments = (currentTopInset, translation) } - - self.panGestureArguments = (currentTopInset, translation) - } - - let currentOffset = topInset + translation - if currentOffset < 20.0 { - self.updateIsFullscreen(true) - } else if currentOffset > 40.0 { - self.updateIsFullscreen(false) - } - - if self.isExpanded && !self.hasMainVideo { - } else { - if currentOffset > 0.0 { - self.listNode.scroller.panGestureRecognizer.setTranslation(CGPoint(), in: self.listNode.scroller) + + let currentOffset = topInset + translation + + var isFilled = previousIsFilled + if currentOffset < 20.0 { + isFilled = true + } else if currentOffset > 40.0 { + isFilled = false + } + if isFilled != previousIsFilled { + self.displayMode = .modal(isExpanded: isExpanded, isFilled: isFilled) + self.updateDecorationsColors() + } + + if self.isExpanded { + } else { + if currentOffset > 0.0 { + self.listNode.scroller.panGestureRecognizer.setTranslation(CGPoint(), in: self.listNode.scroller) + } } } @@ -5125,7 +4348,12 @@ public final class VoiceChatController: ViewController { self.updateDecorationsLayout(transition: .immediate) } - if !self.isExpanded || self.hasMainVideo { + var translateBounds = !self.isExpanded + if case .fullscreen = self.effectiveDisplayMode { + translateBounds = true + } + + if translateBounds { var bounds = self.contentContainer.bounds bounds.origin.y = -translation bounds.origin.y = min(0.0, bounds.origin.y) @@ -5159,11 +4387,11 @@ public final class VoiceChatController: ViewController { topInset = self.listNode.frame.height } - if self.isExpanded && !self.hasMainVideo { + if case .modal(true, _) = self.effectiveDisplayMode { self.panGestureArguments = nil if velocity.y > 300.0 || offset > topInset / 2.0 { - self.isExpanded = false - self.updateIsFullscreen(false) + self.displayMode = .modal(isExpanded: false, isFilled: false) + self.updateDecorationsColors() self.animatingExpansion = true self.listNode.scroller.setContentOffset(CGPoint(), animated: false) @@ -5174,7 +4402,8 @@ public final class VoiceChatController: ViewController { self.animatingExpansion = false }) } else { - self.updateIsFullscreen(true) + self.displayMode = .modal(isExpanded: true, isFilled: true) + self.updateDecorationsColors() self.animatingExpansion = true if let (layout, navigationHeight) = self.validLayout { @@ -5194,15 +4423,17 @@ public final class VoiceChatController: ViewController { self.controller?.dismiss(closing: false, manual: true) } dismissing = true - } else if !isScheduling && (velocity.y < -300.0 || offset < topInset / 2.0) { + } else if !self.isScheduling && (velocity.y < -300.0 || offset < topInset / 2.0) { if velocity.y > -1500.0 && !self.isFullscreen { DispatchQueue.main.async { self.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: ListViewScrollToItem(index: 0, position: .top(0.0), animated: true, curve: .Default(duration: nil), directionHint: .Up), updateSizeAndInsets: nil, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in }) } } - self.isExpanded = true - self.updateIsFullscreen(true) + if case .modal = self.effectiveDisplayMode { + self.displayMode = .modal(isExpanded: true, isFilled: true) + } + self.updateDecorationsColors() self.animatingExpansion = true if let (layout, navigationHeight) = self.validLayout { @@ -5211,8 +4442,8 @@ public final class VoiceChatController: ViewController { self.updateDecorationsLayout(transition: .animated(duration: 0.3, curve: .easeInOut), completion: { self.animatingExpansion = false }) - } else if !isScheduling && !self.hasMainVideo { - self.updateIsFullscreen(false) + } else if !self.isScheduling { + self.updateDecorationsColors() self.animatingExpansion = true self.listNode.scroller.setContentOffset(CGPoint(), animated: false) @@ -5223,7 +4454,7 @@ public final class VoiceChatController: ViewController { self.animatingExpansion = false }) } - if !dismissing && self.hasMainVideo { + if !dismissing { var bounds = self.contentContainer.bounds let previousBounds = bounds bounds.origin.y = 0.0 @@ -5256,11 +4487,9 @@ public final class VoiceChatController: ViewController { if result === self.topPanelNode.view { return self.view } - if result === self.bottomPanelNode.view { return self.view } - if !self.bounds.contains(point) { return nil } @@ -5575,29 +4804,214 @@ public final class VoiceChatController: ViewController { private func displayToggleVideoSourceTooltip(screencast: Bool) { - guard let videoContainerNode = self.mainStageVideoContainerNode, let peerId = self.effectiveSpeakerWithVideo?.0 else { +// guard let videoContainerNode = self.mainStageVideoContainerNode else { +// return +// } +// +// let location = videoContainerNode.view.convert(videoContainerNode.otherVideoWrapperNode.frame, to: nil) +// self.controller?.present(TooltipScreen(text: screencast ? self.presentationData.strings.VoiceChat_TapToViewCameraVideo : self.presentationData.strings.VoiceChat_TapToViewScreenVideo, icon: nil, location: .point(location.offsetBy(dx: -9.0, dy: 0.0), .right), displayDuration: .custom(3.0), shouldDismissOnTouch: { _ in +// return .dismiss(consume: false) +// }), in: .window(.root)) + } + + private var isScheduled: Bool { + return self.isScheduling || self.callState?.scheduleTimestamp != nil + } + + private func toggleDisplayMode() { + guard !self.animatingExpansion else { return } - switch self.displayMode { - case .default: - let location = videoContainerNode.view.convert(videoContainerNode.otherVideoWrapperNode.frame, to: nil) - self.controller?.present(TooltipScreen(text: screencast ? self.presentationData.strings.VoiceChat_TapToViewCameraVideo : self.presentationData.strings.VoiceChat_TapToViewScreenVideo, icon: nil, location: .point(location.offsetBy(dx: -9.0, dy: 0.0), .right), displayDuration: .custom(3.0), shouldDismissOnTouch: { _ in - return .dismiss(consume: false) - }), in: .window(.root)) - case .fullscreen: - var sourceNode: ASDisplayNode? - self.tileListNode.forEachItemNode { itemNode in - if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item, item.peer.id == peerId { - sourceNode = itemNode + self.updateMembers(muteState: self.effectiveMuteState, callMembers: self.currentCallMembers ?? ([], nil), invitedPeers: self.currentInvitedPeers ?? [], speakingPeers: self.currentSpeakingPeers ?? Set()) + + let effectiveDisplayMode = self.displayMode + let nextDisplayMode: DisplayMode + let isLandscape = self.isLandscape + var isFullscreen = false + + switch effectiveDisplayMode { + case .modal: + isFullscreen = true + nextDisplayMode = .fullscreen(controlsHidden: false) + case let .fullscreen(controlsHidden): + if controlsHidden { + if !isLandscape { + nextDisplayMode = .modal(isExpanded: true, isFilled: true) + } else { + isFullscreen = true + nextDisplayMode = .fullscreen(controlsHidden: false) + } + } else { + isFullscreen = true + nextDisplayMode = .fullscreen(controlsHidden: true) + } + } + + let completion = { + self.displayMode = nextDisplayMode + + self.updateDecorationsColors() + + self.mainVideoContainerNode.isHidden = false + self.mainVideoContainerNode.isUserInteractionEnabled = isFullscreen + + if case .modal = effectiveDisplayMode, case .fullscreen = self.displayMode { + self.fullscreenListNode.isHidden = false + + var minimalVisiblePeerid: (PeerId, CGFloat)? + var verticalItemNodes: [PeerId: ASDisplayNode] = [:] + self.listNode.forEachItemNode { itemNode in + if let itemNode = itemNode as? VoiceChatTilesGridItemNode { + for tileNode in itemNode.tileNodes { + let convertedFrame = tileNode.view.convert(tileNode.bounds, to: self.transitionContainerNode.view) + if let item = tileNode.item { + if let (_, y) = minimalVisiblePeerid { + if convertedFrame.minY >= 0.0 && convertedFrame.minY < y { + minimalVisiblePeerid = (item.peer.id, convertedFrame.minY) + } + } else { + if convertedFrame.minY >= 0.0 { + minimalVisiblePeerid = (item.peer.id, convertedFrame.minY) + } + } + verticalItemNodes[item.peer.id] = tileNode + } + } + } else if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item { + let convertedFrame = itemNode.view.convert(itemNode.bounds, to: self.transitionContainerNode.view) + if let (_, y) = minimalVisiblePeerid { + if convertedFrame.minY >= 0.0 && convertedFrame.minY < y { + minimalVisiblePeerid = (item.peer.id, convertedFrame.minY) + } + } else { + if convertedFrame.minY >= 0.0 { + minimalVisiblePeerid = (item.peer.id, convertedFrame.minY) + } + } + verticalItemNodes[item.peer.id] = itemNode } } - if let sourceNode = sourceNode { - let location = sourceNode.view.convert(sourceNode.bounds, to: nil) - self.controller?.present(TooltipScreen(text: screencast ? self.presentationData.strings.VoiceChat_TapToViewCameraVideo : self.presentationData.strings.VoiceChat_TapToViewScreenVideo, icon: nil, location: .point(location.offsetBy(dx: 0.0, dy: -9.0), .bottom), displayDuration: .custom(3.0), shouldDismissOnTouch: { _ in - return .dismiss(consume: false) - }), in: .window(.root)) + + self.animatingExpansion = true + + let completion = { + let effectiveSpeakerPeerId = self.effectiveSpeakerWithVideo?.0 + if let effectiveSpeakerPeerId = effectiveSpeakerPeerId, let otherItemNode = verticalItemNodes[effectiveSpeakerPeerId] { + self.mainVideoNode.animateTransitionIn(from: otherItemNode) + } + + self.fullscreenListNode.forEachItemNode { itemNode in + if let itemNode = itemNode as? VoiceChatFullscreenParticipantItemNode, let item = itemNode.item, let otherItemNode = verticalItemNodes[item.peer.id] { + itemNode.animateTransitionIn(from: otherItemNode, containerNode: self, animate: item.peer.id != effectiveSpeakerPeerId) + } + } + + if let (layout, navigationHeight) = self.validLayout { + self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) + self.updateDecorationsLayout(transition: .animated(duration: 0.3, curve: .easeInOut)) + } } + if false, let (peerId, _) = minimalVisiblePeerid { + var index = 0 + for item in self.currentEntries { + if case let .peer(entry) = item, entry.peer.id == peerId { + break + } else { + index += 1 + } + } + self.fullscreenListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: ListViewScrollToItem(index: index, position: .top(0.0), animated: false, curve: .Default(duration: nil), directionHint: .Up), updateSizeAndInsets: nil, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in + completion() + }) + } else { + completion() + } + } else if case .fullscreen = effectiveDisplayMode, case .modal = self.displayMode { + var minimalVisiblePeerid: (PeerId, CGFloat)? + var fullscreenItemNodes: [PeerId: VoiceChatFullscreenParticipantItemNode] = [:] + self.fullscreenListNode.forEachItemNode { itemNode in + if let itemNode = itemNode as? VoiceChatFullscreenParticipantItemNode, let item = itemNode.item { + let convertedFrame = itemNode.view.convert(itemNode.bounds, to: self.transitionContainerNode.view) + if let (_, x) = minimalVisiblePeerid { + if convertedFrame.minX >= 0.0 && convertedFrame.minX < x { + minimalVisiblePeerid = (item.peer.id, convertedFrame.minX) + } + } else if convertedFrame.minX >= 0.0 { + minimalVisiblePeerid = (item.peer.id, convertedFrame.minX) + } + fullscreenItemNodes[item.peer.id] = itemNode + } + } + + self.animatingExpansion = true + + let completion = { + let effectiveSpeakerPeerId = self.effectiveSpeakerWithVideo?.0 + var targetTileNode: VoiceChatTileItemNode? + + self.listNode.forEachItemNode { itemNode in + if let itemNode = itemNode as? VoiceChatTilesGridItemNode { + for tileNode in itemNode.tileNodes { + if let item = tileNode.item, let otherItemNode = fullscreenItemNodes[item.peer.id] { + tileNode.animateTransitionIn(from: otherItemNode, containerNode: self.transitionContainerNode, animate: item.peer.id != effectiveSpeakerPeerId) + + if item.peer.id == effectiveSpeakerPeerId { + targetTileNode = tileNode + } + } + } + } else if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item, let otherItemNode = fullscreenItemNodes[item.peer.id] { + itemNode.animateTransitionIn(from: otherItemNode, containerNode: self.transitionContainerNode) + } + } + + if let targetTileNode = targetTileNode { + self.mainVideoNode.animateTransitionOut(to: targetTileNode, completion: { [weak self] in + self?.effectiveSpeakerWithVideo = nil + self?.mainVideoNode.updatePeer(peer: nil, waitForFullSize: false) + self?.fullscreenListNode.isHidden = true + self?.mainVideoContainerNode.isHidden = true + }) + } + + if let (layout, navigationHeight) = self.validLayout { + self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) + self.updateDecorationsLayout(transition: .animated(duration: 0.3, curve: .easeInOut)) + } + } + if false, let (peerId, _) = minimalVisiblePeerid { + var index = 0 + for item in self.currentEntries { + if case let .peer(entry) = item, entry.peer.id == peerId { + break + } else { + index += 1 + } + } + self.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: ListViewScrollToItem(index: index, position: .top(0.0), animated: false, curve: .Default(duration: nil), directionHint: .Up), updateSizeAndInsets: nil, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in + completion() + }) + } else { + completion() + } + } else if case .fullscreen = self.displayMode { + self.animatingExpansion = true + // self.updateIsFullscreen(strongSelf.isFullscreen, force: true) + + if let (layout, navigationHeight) = self.validLayout { + self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) + self.updateDecorationsLayout(transition: .animated(duration: 0.3, curve: .easeInOut)) + } + } + } + + if case .fullscreen(false) = nextDisplayMode, case .modal = effectiveDisplayMode { + self.mainVideoNode.updatePeer(peer: self.effectiveSpeakerWithVideo, waitForFullSize: true, completion: { + completion() + }) + } else { + completion() } } } @@ -5861,3 +5275,192 @@ private final class VoiceChatContextReferenceContentSource: ContextReferenceCont return ContextControllerReferenceViewInfo(referenceNode: self.sourceNode, contentAreaInScreenSpace: UIScreen.main.bounds) } } + +final class VoiceChatMainVideoContainerNode: ASDisplayNode { + private let context: AccountContext + private let call: PresentationGroupCall + + private var currentVideoNode: GroupVideoNode? + private var candidateVideoNode: GroupVideoNode? + + private let backgroundNode: ASDisplayNode + private let fadeNode: ASImageNode + private var currentPeer: (PeerId, String)? + + private var validLayout: (CGSize, CGFloat, Bool)? + + var tapped: (() -> Void)? + var otherVideoTapped: (() -> Void)? + + private let videoReadyDisposable = MetaDisposable() + + init(context: AccountContext, call: PresentationGroupCall) { + self.context = context + self.call = call + + self.backgroundNode = ASDisplayNode() + self.backgroundNode.alpha = 0.0 + self.backgroundNode.backgroundColor = UIColor(rgb: 0x1c1c1e) + + self.fadeNode = ASImageNode() + self.fadeNode.alpha = 0.0 + self.fadeNode.displaysAsynchronously = false + self.fadeNode.displayWithoutProcessing = true + self.fadeNode.contentMode = .scaleToFill + self.fadeNode.image = generateImage(CGSize(width: 1.0, height: 50.0), rotatedContext: { size, context in + let bounds = CGRect(origin: CGPoint(), size: size) + context.clear(bounds) + + let colorsArray = [UIColor(rgb: 0x000000, alpha: 0.0).cgColor, UIColor(rgb: 0x000000, alpha: 0.7).cgColor] as CFArray + var locations: [CGFloat] = [0.0, 1.0] + let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)! + context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) + }) + + super.init() + + self.clipsToBounds = true + self.cornerRadius = 11.0 + + self.addSubnode(self.backgroundNode) + self.addSubnode(self.fadeNode) + } + + deinit { + self.videoReadyDisposable.dispose() + } + + override func didLoad() { + super.didLoad() + + self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tap))) + } + + @objc private func tap() { + self.tapped?() + } + + var animating = false + fileprivate func animateTransitionIn(from sourceNode: ASDisplayNode) { + guard let sourceNode = sourceNode as? VoiceChatTileItemNode, let _ = sourceNode.item else { + return + } + + let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .linear) + alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 1.0) + alphaTransition.updateAlpha(node: self.fadeNode, alpha: 1.0) + + self.animating = true + let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) + let targetFrame = self.frame + let startLocalFrame = sourceNode.view.convert(sourceNode.bounds, to: self.supernode?.view) + self.update(size: startLocalFrame.size, sideInset: 0.0, isLandscape: true, force: true, transition: .immediate) + self.frame = startLocalFrame + self.update(size: targetFrame.size, sideInset: 0.0, isLandscape: true, force: true, transition: transition) + transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in + self?.animating = false + }) + } + + fileprivate func animateTransitionOut(to targetNode: ASDisplayNode, completion: @escaping () -> Void) { + guard let targetNode = targetNode as? VoiceChatTileItemNode, let _ = targetNode.item else { + return + } + + let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .linear) + alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 0.0) + alphaTransition.updateAlpha(node: self.fadeNode, alpha: 0.0) + + self.animating = true + let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) + let initialFrame = self.frame + let targetFrame = targetNode.view.convert(targetNode.bounds, to: self.supernode?.view) + self.update(size: targetFrame.size, sideInset: 0.0, isLandscape: true, force: true, transition: transition) + transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in + if let strongSelf = self { + completion() + strongSelf.animating = false + strongSelf.frame = initialFrame + strongSelf.update(size: initialFrame.size, sideInset: 0.0, isLandscape: true, transition: .immediate) + } + }) + } + + func updatePeer(peer: (peerId: PeerId, endpointId: String)?, waitForFullSize: Bool, completion: (() -> Void)? = nil) { + if self.currentPeer?.0 == peer?.0 && self.currentPeer?.1 == peer?.1 { + completion?() + return + } + let previousPeer = self.currentPeer + self.currentPeer = peer + if let (_, endpointId) = peer { + if endpointId != previousPeer?.1 { + self.call.makeIncomingVideoView(endpointId: endpointId, completion: { [weak self] videoView in + Queue.mainQueue().async { + guard let strongSelf = self, let videoView = videoView else { + return + } + + let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: nil) + if let currentVideoNode = strongSelf.currentVideoNode { + strongSelf.currentVideoNode = nil + + currentVideoNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak currentVideoNode] _ in + currentVideoNode?.removeFromSupernode() + }) + } + strongSelf.currentVideoNode = videoNode + strongSelf.insertSubnode(videoNode, aboveSubnode: strongSelf.backgroundNode) + if let (size, sideInset, isLandscape) = strongSelf.validLayout { + strongSelf.update(size: size, sideInset: sideInset, isLandscape: isLandscape, transition: .immediate) + } + + if waitForFullSize { + strongSelf.videoReadyDisposable.set((videoNode.ready + |> filter { $0 } + |> take(1) + |> deliverOnMainQueue).start(next: { _ in + Queue.mainQueue().after(0.01) { + completion?() + } + })) + } else { + strongSelf.videoReadyDisposable.set(nil) + completion?() + } + } + }) + } else { + completion?() + } + } else { + self.videoReadyDisposable.set(nil) + if let currentVideoNode = self.currentVideoNode { + currentVideoNode.removeFromSupernode() + self.currentVideoNode = nil + } + completion?() + } + } + + func update(size: CGSize, sideInset: CGFloat, isLandscape: Bool, force: Bool = false, transition: ContainedViewLayoutTransition) { + self.validLayout = (size, sideInset, isLandscape) + + if self.animating && !force { + return + } + + if let currentVideoNode = self.currentVideoNode { + transition.updateFrame(node: currentVideoNode, frame: CGRect(origin: CGPoint(), size: size)) + currentVideoNode.updateLayout(size: size, isLandscape: isLandscape, transition: transition) + } + + transition.updateFrame(node: self.backgroundNode, frame: CGRect(origin: CGPoint(), size: size)) + + var fadeHeight: CGFloat = 50.0 + if size.width < size.height { + fadeHeight = 140.0 + } + transition.updateFrame(node: self.fadeNode, frame: CGRect(x: sideInset, y: size.height - fadeHeight, width: size.width - sideInset * 2.0, height: fadeHeight)) + } +} diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift b/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift new file mode 100644 index 0000000000..df49290f99 --- /dev/null +++ b/submodules/TelegramCallsUI/Sources/VoiceChatFullscreenParticipantItem.swift @@ -0,0 +1,1204 @@ +import Foundation +import UIKit +import Display +import AsyncDisplayKit +import SwiftSignalKit +import Postbox +import TelegramCore +import SyncCore +import TelegramPresentationData +import TelegramUIPreferences +import ItemListUI +import PresentationDataUtils +import AvatarNode +import TelegramStringFormatting +import PeerPresenceStatusManager +import ContextUI +import AccountContext +import LegacyComponents +import AudioBlob +import PeerInfoAvatarListNode + +private let avatarFont = avatarPlaceholderFont(size: floor(50.0 * 16.0 / 37.0)) +private let tileSize = CGSize(width: 84.0, height: 84.0) +private let backgroundCornerRadius: CGFloat = 11.0 +private let videoCornerRadius: CGFloat = 23.0 +private let avatarSize: CGFloat = 50.0 +private let videoSize = CGSize(width: 180.0, height: 180.0) + +private let accentColor: UIColor = UIColor(rgb: 0x007aff) +private let constructiveColor: UIColor = UIColor(rgb: 0x34c759) +private let destructiveColor: UIColor = UIColor(rgb: 0xff3b30) + +private let borderLineWidth: CGFloat = 2.0 +private let borderImage = generateImage(CGSize(width: tileSize.width, height: tileSize.height), rotatedContext: { size, context in + let bounds = CGRect(origin: CGPoint(), size: size) + context.clear(bounds) + + context.setLineWidth(borderLineWidth) + context.setStrokeColor(constructiveColor.cgColor) + + context.addPath(UIBezierPath(roundedRect: bounds.insetBy(dx: (borderLineWidth - UIScreenPixel) / 2.0, dy: (borderLineWidth - UIScreenPixel) / 2.0), cornerRadius: backgroundCornerRadius - UIScreenPixel).cgPath) + context.strokePath() +}) + +private let fadeImage = generateImage(CGSize(width: 1.0, height: 30.0), rotatedContext: { size, context in + let bounds = CGRect(origin: CGPoint(), size: size) + context.clear(bounds) + + let colorsArray = [UIColor(rgb: 0x000000, alpha: 0.0).cgColor, UIColor(rgb: 0x000000, alpha: 0.7).cgColor] as CFArray + var locations: [CGFloat] = [0.0, 1.0] + let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)! + context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) +}) + +final class VoiceChatFullscreenParticipantItem: ListViewItem { + enum Icon { + case none + case microphone(Bool, UIColor) + case invite(Bool) + case wantsToSpeak + } + + enum Color { + case generic + case accent + case constructive + case destructive + } + + let presentationData: ItemListPresentationData + let nameDisplayOrder: PresentationPersonNameOrder + let context: AccountContext + let peer: Peer + let icon: Icon + let color: Color + let isLandscape: Bool + let active: Bool + let getAudioLevel: (() -> Signal)? + let getVideo: () -> GroupVideoNode? + let action: ((ASDisplayNode?) -> Void)? + let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? + let getUpdatingAvatar: () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError> + + public let selectable: Bool = true + + public init(presentationData: ItemListPresentationData, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, icon: Icon, color: Color, isLandscape: Bool, active: Bool, getAudioLevel: (() -> Signal)?, getVideo: @escaping () -> GroupVideoNode?, action: ((ASDisplayNode?) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) { + self.presentationData = presentationData + self.nameDisplayOrder = nameDisplayOrder + self.context = context + self.peer = peer + self.icon = icon + self.color = color + self.isLandscape = isLandscape + self.active = active + self.getAudioLevel = getAudioLevel + self.getVideo = getVideo + self.action = action + self.contextAction = contextAction + self.getUpdatingAvatar = getUpdatingAvatar + } + + public func nodeConfiguredForParams(async: @escaping (@escaping () -> Void) -> Void, params: ListViewItemLayoutParams, synchronousLoads: Bool, previousItem: ListViewItem?, nextItem: ListViewItem?, completion: @escaping (ListViewItemNode, @escaping () -> (Signal?, (ListViewItemApply) -> Void)) -> Void) { + async { + let node = VoiceChatFullscreenParticipantItemNode() + let (layout, apply) = node.asyncLayout()(self, params, previousItem == nil, nextItem == nil) + + node.contentSize = layout.contentSize + node.insets = layout.insets + + Queue.mainQueue().async { + completion(node, { + return (node.avatarNode.ready, { _ in apply(synchronousLoads, false) }) + }) + } + } + } + + public func updateNode(async: @escaping (@escaping () -> Void) -> Void, node: @escaping () -> ListViewItemNode, params: ListViewItemLayoutParams, previousItem: ListViewItem?, nextItem: ListViewItem?, animation: ListViewItemUpdateAnimation, completion: @escaping (ListViewItemNodeLayout, @escaping (ListViewItemApply) -> Void) -> Void) { + Queue.mainQueue().async { + if let nodeValue = node() as? VoiceChatFullscreenParticipantItemNode { + let makeLayout = nodeValue.asyncLayout() + + var animated = true + if case .None = animation { + animated = false + } + + async { + let (layout, apply) = makeLayout(self, params, previousItem == nil, nextItem == nil) + Queue.mainQueue().async { + completion(layout, { _ in + apply(false, animated) + }) + } + } + } + } + } + + public func selected(listView: ListView) { + listView.clearHighlightAnimated(true) + } +} + +class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode { + let contextSourceNode: ContextExtractedContentContainingNode + private let containerNode: ContextControllerSourceNode + let backgroundImageNode: ASImageNode + private let extractedBackgroundImageNode: ASImageNode + let offsetContainerNode: ASDisplayNode + let borderImageNode: ASImageNode + + private var extractedRect: CGRect? + private var nonExtractedRect: CGRect? + private var extractedVerticalOffset: CGFloat? + + let avatarNode: AvatarNode + let contentWrapperNode: ASDisplayNode + private let titleNode: TextNode + private var credibilityIconNode: ASImageNode? + + private let actionContainerNode: ASDisplayNode + private var animationNode: VoiceChatMicrophoneNode? + private var iconNode: ASImageNode? + private var raiseHandNode: VoiceChatRaiseHandNode? + private var actionButtonNode: HighlightableButtonNode + + private var audioLevelView: VoiceBlobView? + private let audioLevelDisposable = MetaDisposable() + private var didSetupAudioLevel = false + + private var absoluteLocation: (CGRect, CGSize)? + + private var layoutParams: (VoiceChatFullscreenParticipantItem, ListViewItemLayoutParams, Bool, Bool)? + private var isExtracted = false + private var animatingExtraction = false + private var wavesColor: UIColor? + + let videoContainerNode: ASDisplayNode + private let videoFadeNode: ASImageNode + var videoNode: GroupVideoNode? + private let videoReadyDisposable = MetaDisposable() + private var videoReadyDelayed = false + private var videoReady = false + + private var raiseHandTimer: SwiftSignalKit.Timer? + + var item: VoiceChatFullscreenParticipantItem? { + return self.layoutParams?.0 + } + + private var currentTitle: String? + + init() { + self.contextSourceNode = ContextExtractedContentContainingNode() + self.containerNode = ContextControllerSourceNode() + + self.backgroundImageNode = ASImageNode() + self.backgroundImageNode.clipsToBounds = true + self.backgroundImageNode.displaysAsynchronously = false + self.backgroundImageNode.alpha = 0.0 + + self.extractedBackgroundImageNode = ASImageNode() + self.extractedBackgroundImageNode.clipsToBounds = true + self.extractedBackgroundImageNode.displaysAsynchronously = false + self.extractedBackgroundImageNode.alpha = 0.0 + + self.borderImageNode = ASImageNode() + self.borderImageNode.displaysAsynchronously = false + self.borderImageNode.image = borderImage + self.borderImageNode.isHidden = true + + self.offsetContainerNode = ASDisplayNode() + + self.avatarNode = AvatarNode(font: avatarFont) + self.avatarNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: avatarSize, height: avatarSize)) + + self.contentWrapperNode = ASDisplayNode() + + self.videoContainerNode = ASDisplayNode() + self.videoContainerNode.clipsToBounds = true + + self.videoFadeNode = ASImageNode() + self.videoFadeNode.displaysAsynchronously = false + self.videoFadeNode.displayWithoutProcessing = true + self.videoFadeNode.contentMode = .scaleToFill + self.videoFadeNode.image = fadeImage + self.videoContainerNode.addSubnode(videoFadeNode) + + self.titleNode = TextNode() + self.titleNode.isUserInteractionEnabled = false + self.titleNode.contentMode = .left + self.titleNode.contentsScale = UIScreen.main.scale + + self.actionContainerNode = ASDisplayNode() + self.actionButtonNode = HighlightableButtonNode() + + super.init(layerBacked: false, dynamicBounce: false, rotated: false, seeThrough: false) + + self.isAccessibilityElement = true + + self.containerNode.addSubnode(self.contextSourceNode) + self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode + self.addSubnode(self.containerNode) + + self.contextSourceNode.contentNode.addSubnode(self.backgroundImageNode) + self.backgroundImageNode.addSubnode(self.extractedBackgroundImageNode) + self.contextSourceNode.contentNode.addSubnode(self.offsetContainerNode) + self.offsetContainerNode.addSubnode(self.videoContainerNode) + self.offsetContainerNode.addSubnode(self.contentWrapperNode) + self.contentWrapperNode.addSubnode(self.titleNode) + self.contentWrapperNode.addSubnode(self.actionContainerNode) + self.actionContainerNode.addSubnode(self.actionButtonNode) + self.offsetContainerNode.addSubnode(self.avatarNode) + self.contextSourceNode.contentNode.addSubnode(self.borderImageNode) + self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode + + self.containerNode.shouldBegin = { [weak self] location in + guard let strongSelf = self else { + return false + } + return true + } + self.containerNode.activated = { [weak self] gesture, _ in + guard let strongSelf = self, let item = strongSelf.layoutParams?.0, let contextAction = item.contextAction else { + gesture.cancel() + return + } + contextAction(strongSelf.contextSourceNode, gesture) + } + +// self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in +// guard let strongSelf = self, let item = strongSelf.layoutParams?.0 else { +// return +// } +// +// strongSelf.isExtracted = isExtracted +// +// let inset: CGFloat = 12.0 +//// if isExtracted { +//// strongSelf.contextSourceNode.contentNode.customHitTest = { [weak self] point in +//// if let strongSelf = self { +//// if let avatarListWrapperNode = strongSelf.avatarListWrapperNode, avatarListWrapperNode.frame.contains(point) { +//// return strongSelf.avatarListNode?.view +//// } +//// } +//// return nil +//// } +//// } else { +//// strongSelf.contextSourceNode.contentNode.customHitTest = nil +//// } +// +// let extractedVerticalOffset = strongSelf.extractedVerticalOffset ?? 0.0 +// if let extractedRect = strongSelf.extractedRect, let nonExtractedRect = strongSelf.nonExtractedRect { +// let rect: CGRect +// if isExtracted { +// if extractedVerticalOffset > 0.0 { +// rect = CGRect(x: extractedRect.minX, y: extractedRect.minY + extractedVerticalOffset, width: extractedRect.width, height: extractedRect.height - extractedVerticalOffset) +// } else { +// rect = extractedRect +// } +// } else { +// rect = nonExtractedRect +// } +// +// let springDuration: Double = isExtracted ? 0.42 : 0.3 +// let springDamping: CGFloat = isExtracted ? 104.0 : 1000.0 +// +// let itemBackgroundColor: UIColor = item.getIsExpanded() ? UIColor(rgb: 0x1c1c1e) : UIColor(rgb: 0x2c2c2e) +// +// if !extractedVerticalOffset.isZero { +// let radiusTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) +// if isExtracted { +// strongSelf.backgroundImageNode.image = generateImage(CGSize(width: backgroundCornerRadius * 2.0, height: backgroundCornerRadius * 2.0), rotatedContext: { (size, context) in +// let bounds = CGRect(origin: CGPoint(), size: size) +// context.clear(bounds) +// +// context.setFillColor(itemBackgroundColor.cgColor) +// context.fillEllipse(in: bounds) +// context.fill(CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height / 2.0)) +// })?.stretchableImage(withLeftCapWidth: Int(backgroundCornerRadius), topCapHeight: Int(backgroundCornerRadius)) +// strongSelf.extractedBackgroundImageNode.image = generateImage(CGSize(width: backgroundCornerRadius * 2.0, height: backgroundCornerRadius * 2.0), rotatedContext: { (size, context) in +// let bounds = CGRect(origin: CGPoint(), size: size) +// context.clear(bounds) +// +// context.setFillColor(item.presentationData.theme.list.itemBlocksBackgroundColor.cgColor) +// context.fillEllipse(in: bounds) +// context.fill(CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height / 2.0)) +// })?.stretchableImage(withLeftCapWidth: Int(backgroundCornerRadius), topCapHeight: Int(backgroundCornerRadius)) +// strongSelf.backgroundImageNode.cornerRadius = backgroundCornerRadius +// +// strongSelf.avatarNode.transform = CATransform3DIdentity +// var avatarInitialRect = strongSelf.avatarNode.view.convert(strongSelf.avatarNode.bounds, to: strongSelf.offsetContainerNode.supernode?.view) +// if strongSelf.avatarTransitionNode == nil { +// transition.updateCornerRadius(node: strongSelf.backgroundImageNode, cornerRadius: 0.0) +// +// let targetRect = CGRect(x: extractedRect.minX, y: extractedRect.minY, width: extractedRect.width, height: extractedRect.width) +// let initialScale = avatarInitialRect.width / targetRect.width +// avatarInitialRect.origin.y += backgroundCornerRadius / 2.0 * initialScale +// +// let avatarListWrapperNode = PinchSourceContainerNode() +// avatarListWrapperNode.clipsToBounds = true +// avatarListWrapperNode.cornerRadius = backgroundCornerRadius +// avatarListWrapperNode.activate = { [weak self] sourceNode in +// guard let strongSelf = self else { +// return +// } +// strongSelf.avatarListNode?.controlsContainerNode.alpha = 0.0 +// let pinchController = PinchController(sourceNode: sourceNode, getContentAreaInScreenSpace: { +// return UIScreen.main.bounds +// }) +// item.context.sharedContext.mainWindow?.presentInGlobalOverlay(pinchController) +// } +// avatarListWrapperNode.deactivated = { [weak self] in +// guard let strongSelf = self else { +// return +// } +// strongSelf.avatarListWrapperNode?.contentNode.layer.animate(from: 0.0 as NSNumber, to: backgroundCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.3, completion: { _ in +// }) +// } +// avatarListWrapperNode.update(size: targetRect.size, transition: .immediate) +// avatarListWrapperNode.frame = CGRect(x: targetRect.minX, y: targetRect.minY, width: targetRect.width, height: targetRect.height + backgroundCornerRadius) +// avatarListWrapperNode.animatedOut = { [weak self] in +// guard let strongSelf = self else { +// return +// } +// strongSelf.avatarListNode?.controlsContainerNode.alpha = 1.0 +// strongSelf.avatarListNode?.controlsContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25) +// } +// +// let transitionNode = ASImageNode() +// transitionNode.clipsToBounds = true +// transitionNode.displaysAsynchronously = false +// transitionNode.displayWithoutProcessing = true +// transitionNode.image = strongSelf.avatarNode.unroundedImage +// transitionNode.frame = CGRect(origin: CGPoint(), size: targetRect.size) +// transitionNode.cornerRadius = targetRect.width / 2.0 +// radiusTransition.updateCornerRadius(node: transitionNode, cornerRadius: 0.0) +// +// strongSelf.avatarNode.isHidden = true +// avatarListWrapperNode.contentNode.addSubnode(transitionNode) +// +// strongSelf.videoContainerNode.position = CGPoint(x: avatarListWrapperNode.frame.width / 2.0, y: avatarListWrapperNode.frame.height / 2.0) +// strongSelf.videoContainerNode.cornerRadius = tileSize.width / 2.0 +// strongSelf.videoContainerNode.transform = CATransform3DMakeScale(avatarListWrapperNode.frame.width / tileSize.width * 1.05, avatarListWrapperNode.frame.height / tileSize.width * 1.05, 1.0) +// avatarListWrapperNode.contentNode.addSubnode(strongSelf.videoContainerNode) +// +// strongSelf.avatarTransitionNode = transitionNode +// +// let avatarListContainerNode = ASDisplayNode() +// avatarListContainerNode.clipsToBounds = true +// avatarListContainerNode.frame = CGRect(origin: CGPoint(), size: targetRect.size) +// avatarListContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) +// avatarListContainerNode.cornerRadius = targetRect.width / 2.0 +// +// avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping) +// avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: avatarInitialRect.center), to: NSValue(cgPoint: avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in +// if let strongSelf = self, let avatarListNode = strongSelf.avatarListNode { +// avatarListNode.currentItemNode?.addSubnode(strongSelf.videoContainerNode) +// } +// }) +// +// radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: 0.0) +// radiusTransition.updateCornerRadius(node: strongSelf.videoContainerNode, cornerRadius: 0.0) +// +// let avatarListNode = PeerInfoAvatarListContainerNode(context: item.context) +// avatarListWrapperNode.contentNode.clipsToBounds = true +// avatarListNode.backgroundColor = .clear +// avatarListNode.peer = item.peer +// avatarListNode.firstFullSizeOnly = true +// avatarListNode.offsetLocation = true +// avatarListNode.customCenterTapAction = { [weak self] in +// self?.contextSourceNode.requestDismiss?() +// } +// avatarListNode.frame = CGRect(x: targetRect.width / 2.0, y: targetRect.height / 2.0, width: targetRect.width, height: targetRect.height) +// avatarListNode.controlsClippingNode.frame = CGRect(x: -targetRect.width / 2.0, y: -targetRect.height / 2.0, width: targetRect.width, height: targetRect.height) +// avatarListNode.controlsClippingOffsetNode.frame = CGRect(origin: CGPoint(x: targetRect.width / 2.0, y: targetRect.height / 2.0), size: CGSize()) +// avatarListNode.stripContainerNode.frame = CGRect(x: 0.0, y: 13.0, width: targetRect.width, height: 2.0) +// +// avatarListContainerNode.addSubnode(avatarListNode) +// avatarListContainerNode.addSubnode(avatarListNode.controlsClippingOffsetNode) +// avatarListWrapperNode.contentNode.addSubnode(avatarListContainerNode) +// +// avatarListNode.update(size: targetRect.size, peer: item.peer, customNode: strongSelf.videoContainerNode, additionalEntry: item.getUpdatingAvatar(), isExpanded: true, transition: .immediate) +// strongSelf.offsetContainerNode.supernode?.addSubnode(avatarListWrapperNode) +// +// strongSelf.audioLevelView?.alpha = 0.0 +// +// strongSelf.avatarListWrapperNode = avatarListWrapperNode +// strongSelf.avatarListContainerNode = avatarListContainerNode +// strongSelf.avatarListNode = avatarListNode +// } +// } else if let transitionNode = strongSelf.avatarTransitionNode, let avatarListWrapperNode = strongSelf.avatarListWrapperNode, let avatarListContainerNode = strongSelf.avatarListContainerNode { +// strongSelf.animatingExtraction = true +// +// transition.updateCornerRadius(node: strongSelf.backgroundImageNode, cornerRadius: backgroundCornerRadius) +// +// var avatarInitialRect = CGRect(origin: strongSelf.avatarNode.frame.origin, size: strongSelf.avatarNode.frame.size) +// let targetScale = avatarInitialRect.width / avatarListContainerNode.frame.width +// avatarInitialRect.origin.y += backgroundCornerRadius / 2.0 * targetScale +// +// strongSelf.avatarTransitionNode = nil +// strongSelf.avatarListWrapperNode = nil +// strongSelf.avatarListContainerNode = nil +// strongSelf.avatarListNode = nil +// +// avatarListContainerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak avatarListContainerNode] _ in +// avatarListContainerNode?.removeFromSupernode() +// }) +// +// avatarListWrapperNode.contentNode.insertSubnode(strongSelf.videoContainerNode, aboveSubnode: transitionNode) +// +// avatarListWrapperNode.layer.animate(from: 1.0 as NSNumber, to: targetScale as NSNumber, keyPath: "transform.scale", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false) +// avatarListWrapperNode.layer.animate(from: NSValue(cgPoint: avatarListWrapperNode.position), to: NSValue(cgPoint: avatarInitialRect.center), keyPath: "position", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { [weak transitionNode, weak self] _ in +// transitionNode?.removeFromSupernode() +// self?.avatarNode.isHidden = false +// +// self?.audioLevelView?.alpha = 1.0 +// self?.audioLevelView?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) +// +// if let strongSelf = self { +// strongSelf.animatingExtraction = false +// +// strongSelf.offsetContainerNode.insertSubnode(strongSelf.videoContainerNode, belowSubnode: strongSelf.contentWrapperNode) +// +// switch item.style { +// case .list: +// strongSelf.videoFadeNode.alpha = 0.0 +// strongSelf.videoContainerNode.position = strongSelf.avatarNode.position +// strongSelf.videoContainerNode.cornerRadius = tileSize.width / 2.0 +// strongSelf.videoContainerNode.transform = CATransform3DMakeScale(avatarSize / tileSize.width, avatarSize / tileSize.width, 1.0) +// case .tile: +// strongSelf.videoFadeNode.alpha = 1.0 +// strongSelf.videoContainerNode.position = CGPoint(x: tileSize.width / 2.0, y: tileSize.height / 2.0) +// strongSelf.videoContainerNode.cornerRadius = backgroundCornerRadius +// strongSelf.videoContainerNode.transform = CATransform3DMakeScale(1.0, 1.0, 1.0) +// } +// } +// }) +// +// radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: avatarListContainerNode.frame.width / 2.0) +// radiusTransition.updateCornerRadius(node: transitionNode, cornerRadius: avatarListContainerNode.frame.width / 2.0) +// radiusTransition.updateCornerRadius(node: strongSelf.videoContainerNode, cornerRadius: tileSize.width / 2.0) +// } +// +// let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) +// alphaTransition.updateAlpha(node: strongSelf.statusNode, alpha: isExtracted ? 0.0 : 1.0) +// alphaTransition.updateAlpha(node: strongSelf.expandedStatusNode, alpha: isExtracted ? 1.0 : 0.0) +// alphaTransition.updateAlpha(node: strongSelf.actionContainerNode, alpha: isExtracted ? 0.0 : 1.0, delay: isExtracted ? 0.0 : 0.1) +// +// let offsetInitialSublayerTransform = strongSelf.offsetContainerNode.layer.sublayerTransform +// strongSelf.offsetContainerNode.layer.sublayerTransform = CATransform3DMakeTranslation(isExtracted ? -33 : 0.0, isExtracted ? extractedVerticalOffset : 0.0, 0.0) +// +// let actionInitialSublayerTransform = strongSelf.actionContainerNode.layer.sublayerTransform +// strongSelf.actionContainerNode.layer.sublayerTransform = CATransform3DMakeTranslation(isExtracted ? 21.0 : 0.0, 0.0, 0.0) +// +// let initialBackgroundPosition = strongSelf.backgroundImageNode.position +// strongSelf.backgroundImageNode.layer.position = rect.center +// let initialBackgroundBounds = strongSelf.backgroundImageNode.bounds +// strongSelf.backgroundImageNode.layer.bounds = CGRect(origin: CGPoint(), size: rect.size) +// +// let initialExtractedBackgroundPosition = strongSelf.extractedBackgroundImageNode.position +// strongSelf.extractedBackgroundImageNode.layer.position = CGPoint(x: rect.size.width / 2.0, y: rect.size.height / 2.0) +// let initialExtractedBackgroundBounds = strongSelf.extractedBackgroundImageNode.bounds +// strongSelf.extractedBackgroundImageNode.layer.bounds = strongSelf.backgroundImageNode.layer.bounds +// if isExtracted { +// strongSelf.offsetContainerNode.layer.animateSpring(from: NSValue(caTransform3D: offsetInitialSublayerTransform), to: NSValue(caTransform3D: strongSelf.offsetContainerNode.layer.sublayerTransform), keyPath: "sublayerTransform", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping) +// strongSelf.actionContainerNode.layer.animateSpring(from: NSValue(caTransform3D: actionInitialSublayerTransform), to: NSValue(caTransform3D: strongSelf.actionContainerNode.layer.sublayerTransform), keyPath: "sublayerTransform", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping) +// strongSelf.backgroundImageNode.layer.animateSpring(from: NSValue(cgPoint: initialBackgroundPosition), to: NSValue(cgPoint: strongSelf.backgroundImageNode.position), keyPath: "position", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping) +// strongSelf.backgroundImageNode.layer.animateSpring(from: NSValue(cgRect: initialBackgroundBounds), to: NSValue(cgRect: strongSelf.backgroundImageNode.bounds), keyPath: "bounds", duration: springDuration, initialVelocity: 0.0, damping: springDamping) +// strongSelf.extractedBackgroundImageNode.layer.animateSpring(from: NSValue(cgPoint: initialExtractedBackgroundPosition), to: NSValue(cgPoint: strongSelf.extractedBackgroundImageNode.position), keyPath: "position", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping) +// strongSelf.extractedBackgroundImageNode.layer.animateSpring(from: NSValue(cgRect: initialExtractedBackgroundBounds), to: NSValue(cgRect: strongSelf.extractedBackgroundImageNode.bounds), keyPath: "bounds", duration: springDuration, initialVelocity: 0.0, damping: springDamping) +// } else { +// strongSelf.offsetContainerNode.layer.animate(from: NSValue(caTransform3D: offsetInitialSublayerTransform), to: NSValue(caTransform3D: strongSelf.offsetContainerNode.layer.sublayerTransform), keyPath: "sublayerTransform", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2) +// strongSelf.actionContainerNode.layer.animate(from: NSValue(caTransform3D: actionInitialSublayerTransform), to: NSValue(caTransform3D: strongSelf.actionContainerNode.layer.sublayerTransform), keyPath: "sublayerTransform", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2) +// strongSelf.backgroundImageNode.layer.animate(from: NSValue(cgPoint: initialBackgroundPosition), to: NSValue(cgPoint: strongSelf.backgroundImageNode.position), keyPath: "position", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2) +// strongSelf.backgroundImageNode.layer.animate(from: NSValue(cgRect: initialBackgroundBounds), to: NSValue(cgRect: strongSelf.backgroundImageNode.bounds), keyPath: "bounds", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2) +// strongSelf.extractedBackgroundImageNode.layer.animate(from: NSValue(cgPoint: initialExtractedBackgroundPosition), to: NSValue(cgPoint: strongSelf.extractedBackgroundImageNode.position), keyPath: "position", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2) +// strongSelf.extractedBackgroundImageNode.layer.animate(from: NSValue(cgRect: initialExtractedBackgroundBounds), to: NSValue(cgRect: strongSelf.extractedBackgroundImageNode.bounds), keyPath: "bounds", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2) +// } +// +// if isExtracted { +// strongSelf.backgroundImageNode.alpha = 1.0 +// strongSelf.extractedBackgroundImageNode.alpha = 1.0 +// strongSelf.extractedBackgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.1, delay: 0.1, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue) +// } else { +// strongSelf.extractedBackgroundImageNode.alpha = 0.0 +// strongSelf.extractedBackgroundImageNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, delay: 0.0, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: false, completion: { [weak self] _ in +// if let strongSelf = self { +// if strongSelf.item?.style == .list { +// strongSelf.backgroundImageNode.image = nil +// } +// strongSelf.extractedBackgroundImageNode.image = nil +// strongSelf.extractedBackgroundImageNode.layer.removeAllAnimations() +// } +// }) +// } +// } else { +// if isExtracted { +// strongSelf.backgroundImageNode.alpha = 0.0 +// strongSelf.extractedBackgroundImageNode.alpha = 1.0 +// strongSelf.backgroundImageNode.image = generateStretchableFilledCircleImage(diameter: backgroundCornerRadius * 2.0, color: itemBackgroundColor) +// strongSelf.extractedBackgroundImageNode.image = generateStretchableFilledCircleImage(diameter: backgroundCornerRadius * 2.0, color: item.presentationData.theme.list.itemBlocksBackgroundColor) +// } +// +// transition.updateFrame(node: strongSelf.backgroundImageNode, frame: rect) +// transition.updateFrame(node: strongSelf.extractedBackgroundImageNode, frame: CGRect(origin: CGPoint(), size: rect.size)) +// +// transition.updateAlpha(node: strongSelf.statusNode, alpha: isExtracted ? 0.0 : 1.0) +// transition.updateAlpha(node: strongSelf.expandedStatusNode, alpha: isExtracted ? 1.0 : 0.0) +// transition.updateAlpha(node: strongSelf.actionContainerNode, alpha: isExtracted ? 0.0 : 1.0) +// +// transition.updateSublayerTransformOffset(layer: strongSelf.offsetContainerNode.layer, offset: CGPoint(x: isExtracted ? inset : 0.0, y: isExtracted ? extractedVerticalOffset : 0.0)) +// transition.updateSublayerTransformOffset(layer: strongSelf.actionContainerNode.layer, offset: CGPoint(x: isExtracted ? -24.0 : 0.0, y: 0.0)) +// +// transition.updateAlpha(node: strongSelf.backgroundImageNode, alpha: isExtracted ? 1.0 : 0.0, completion: { _ in +// if !isExtracted { +// self?.backgroundImageNode.image = nil +// self?.extractedBackgroundImageNode.image = nil +// } +// }) +// } +// } +// } + } + + deinit { + self.videoReadyDisposable.dispose() + self.audioLevelDisposable.dispose() + self.raiseHandTimer?.invalidate() + } + + override func selected() { + super.selected() + self.layoutParams?.0.action?(self.contextSourceNode) + } + + func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, animate: Bool = true) { + guard let item = self.item else { + return + } + + let initialAnimate = animate + if let sourceNode = sourceNode as? VoiceChatTileItemNode { + var startContainerPosition = sourceNode.view.convert(sourceNode.bounds, to: containerNode.view).center + var animate = initialAnimate + if startContainerPosition.y > containerNode.frame.height - 238.0 { + animate = false + } + + if let videoNode = sourceNode.videoNode { + if item.active { + self.avatarNode.alpha = 1.0 + videoNode.alpha = 0.0 + startContainerPosition = startContainerPosition.offsetBy(dx: 0.0, dy: 9.0) + } else { + self.avatarNode.alpha = 0.0 + } + + sourceNode.videoNode = nil + self.videoNode = videoNode + self.videoContainerNode.insertSubnode(videoNode, at: 0) + + if animate { + let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) + videoNode.updateLayout(size: videoSize, isLandscape: true, transition: transition) + + let scale = sourceNode.bounds.width / videoSize.width + self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + self.videoContainerNode.layer.animate(from: backgroundCornerRadius * (1.0 / scale) as NSNumber, to: videoCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { _ in + }) + + self.videoFadeNode.alpha = 1.0 + self.videoFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + } else if initialAnimate { + videoNode.updateLayout(size: videoSize, isLandscape: true, transition: .immediate) + self.videoFadeNode.alpha = 1.0 + } + } + + if animate { + let initialPosition = self.contextSourceNode.position + let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center + + self.contextSourceNode.position = targetContainerPosition + containerNode.addSubnode(self.contextSourceNode) + + self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self] _ in + if let strongSelf = self { + strongSelf.contextSourceNode.position = initialPosition + strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode) + } + }) + + if item.active { + self.borderImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + self.borderImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + } + + self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + } else if !initialAnimate { + self.contextSourceNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + self.contextSourceNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) + } + } else if let sourceNode = sourceNode as? VoiceChatParticipantItemNode, let _ = sourceNode.item { + var startContainerPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center + var animate = true + if startContainerPosition.y > containerNode.frame.height - 238.0 { + animate = false + } + startContainerPosition = startContainerPosition.offsetBy(dx: 0.0, dy: 9.0) + + if animate { + sourceNode.avatarNode.alpha = 0.0 + + let initialPosition = self.contextSourceNode.position + let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center + + self.contextSourceNode.position = targetContainerPosition + containerNode.addSubnode(self.contextSourceNode) + + let timingFunction = CAMediaTimingFunctionName.easeInEaseOut.rawValue + self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak self, weak sourceNode] _ in + if let strongSelf = self { + sourceNode?.avatarNode.alpha = 1.0 + strongSelf.contextSourceNode.position = initialPosition + strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode) + } + }) + + if item.active { + self.borderImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + self.borderImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction) + } + + self.avatarNode.layer.animateScale(from: 0.8, to: 1.0, duration: 0.2) + + self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction) + self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, timingFunction: timingFunction) + self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction) + self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: timingFunction) + } + } + } + + func asyncLayout() -> (_ item: VoiceChatFullscreenParticipantItem, _ params: ListViewItemLayoutParams, _ first: Bool, _ last: Bool) -> (ListViewItemNodeLayout, (Bool, Bool) -> Void) { + let makeTitleLayout = TextNode.asyncLayout(self.titleNode) + + let currentItem = self.layoutParams?.0 + let hasVideo = self.videoNode != nil + + return { item, params, first, last in + let titleFont = Font.semibold(12.0) + var titleAttributedString: NSAttributedString? + + var titleColor = item.presentationData.theme.list.itemPrimaryTextColor + if !hasVideo || item.active { + switch item.color { + case .generic: + titleColor = item.presentationData.theme.list.itemPrimaryTextColor + case .accent: + titleColor = item.presentationData.theme.list.itemAccentColor + case .constructive: + titleColor = constructiveColor + case .destructive: + titleColor = destructiveColor + } + } + let currentBoldFont: UIFont = titleFont + + if let user = item.peer as? TelegramUser { + if let firstName = user.firstName, let lastName = user.lastName, !firstName.isEmpty, !lastName.isEmpty { + titleAttributedString = NSAttributedString(string: firstName, font: titleFont, textColor: titleColor) + } else if let firstName = user.firstName, !firstName.isEmpty { + titleAttributedString = NSAttributedString(string: firstName, font: currentBoldFont, textColor: titleColor) + } else if let lastName = user.lastName, !lastName.isEmpty { + titleAttributedString = NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor) + } else { + titleAttributedString = NSAttributedString(string: item.presentationData.strings.User_DeletedAccount, font: currentBoldFont, textColor: titleColor) + } + } else if let group = item.peer as? TelegramGroup { + titleAttributedString = NSAttributedString(string: group.title, font: currentBoldFont, textColor: titleColor) + } else if let channel = item.peer as? TelegramChannel { + titleAttributedString = NSAttributedString(string: channel.title, font: currentBoldFont, textColor: titleColor) + } + + var wavesColor = UIColor(rgb: 0x34c759) + switch item.color { + case .accent: + wavesColor = accentColor + case .destructive: + wavesColor = destructiveColor + default: + break + } + + let leftInset: CGFloat = 58.0 + params.leftInset + + var titleIconsWidth: CGFloat = 0.0 + var currentCredibilityIconImage: UIImage? + var credibilityIconOffset: CGFloat = 0.0 + if item.peer.isScam { + currentCredibilityIconImage = PresentationResourcesChatList.scamIcon(item.presentationData.theme, strings: item.presentationData.strings, type: .regular) + credibilityIconOffset = 2.0 + } else if item.peer.isFake { + currentCredibilityIconImage = PresentationResourcesChatList.fakeIcon(item.presentationData.theme, strings: item.presentationData.strings, type: .regular) + credibilityIconOffset = 2.0 + } else if item.peer.isVerified { + currentCredibilityIconImage = PresentationResourcesChatList.verifiedIcon(item.presentationData.theme) + credibilityIconOffset = 3.0 + } + + if let currentCredibilityIconImage = currentCredibilityIconImage { + titleIconsWidth += 4.0 + currentCredibilityIconImage.size.width + } + + let constrainedWidth = params.width - 24.0 - 10.0 + let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: constrainedWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + + let contentSize = tileSize + let insets = UIEdgeInsets(top: 0.0, left: 0.0, bottom: !last ? 6.0 : 0.0, right: 0.0) + + let layout = ListViewItemNodeLayout(contentSize: contentSize, insets: insets) + + return (layout, { [weak self] synchronousLoad, animated in + if let strongSelf = self { + let hadItem = strongSelf.layoutParams?.0 != nil + strongSelf.layoutParams = (item, params, first, last) + strongSelf.currentTitle = titleAttributedString?.string + strongSelf.wavesColor = wavesColor + + let videoNode = item.getVideo() + if let current = strongSelf.videoNode, current !== videoNode { + current.removeFromSupernode() + strongSelf.videoReadyDisposable.set(nil) + } + + let videoNodeUpdated = strongSelf.videoNode !== videoNode + strongSelf.videoNode = videoNode + + let nonExtractedRect: CGRect + let avatarFrame: CGRect + let titleFrame: CGRect + let animationSize: CGSize + let animationFrame: CGRect + let animationScale: CGFloat + + nonExtractedRect = CGRect(origin: CGPoint(), size: layout.contentSize) + strongSelf.containerNode.transform = CATransform3DMakeRotation(item.isLandscape ? 0.0 : CGFloat.pi / 2.0, 0.0, 0.0, 1.0) + avatarFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - avatarSize) / 2.0), y: 7.0), size: CGSize(width: avatarSize, height: avatarSize)) + + animationSize = CGSize(width: 36.0, height: 36.0) + animationScale = 0.66667 + animationFrame = CGRect(x: layout.size.width - 29.0, y: 54.0, width: 24.0, height: 24.0) + titleFrame = CGRect(origin: CGPoint(x: 8.0, y: 63.0), size: titleLayout.size) + + var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: 16.0 + params.leftInset, dy: 0.0) + var extractedHeight = extractedRect.height + var extractedVerticalOffset: CGFloat = 0.0 + if item.peer.smallProfileImage != nil || strongSelf.videoNode != nil { + extractedVerticalOffset = extractedRect.width + extractedHeight += extractedVerticalOffset + } + + extractedRect.size.height = extractedHeight + + strongSelf.extractedVerticalOffset = extractedVerticalOffset + strongSelf.extractedRect = extractedRect + strongSelf.nonExtractedRect = nonExtractedRect + + if strongSelf.isExtracted { + var extractedRect = extractedRect + if !extractedVerticalOffset.isZero { + extractedRect = CGRect(x: extractedRect.minX, y: extractedRect.minY + extractedVerticalOffset, width: extractedRect.width, height: extractedRect.height - extractedVerticalOffset) + } + strongSelf.backgroundImageNode.frame = extractedRect + } else { + strongSelf.backgroundImageNode.frame = nonExtractedRect + } + if strongSelf.backgroundImageNode.image == nil { + strongSelf.backgroundImageNode.image = generateStretchableFilledCircleImage(diameter: backgroundCornerRadius * 2.0, color: UIColor(rgb: 0x1c1c1e)) + strongSelf.backgroundImageNode.alpha = 1.0 + } + strongSelf.extractedBackgroundImageNode.frame = strongSelf.backgroundImageNode.bounds + strongSelf.contextSourceNode.contentRect = extractedRect + + let contentBounds = CGRect(origin: CGPoint(), size: layout.contentSize) + strongSelf.containerNode.frame = contentBounds + strongSelf.contextSourceNode.frame = contentBounds + strongSelf.contentWrapperNode.frame = contentBounds + strongSelf.offsetContainerNode.frame = contentBounds + strongSelf.contextSourceNode.contentNode.frame = contentBounds + strongSelf.actionContainerNode.frame = contentBounds + strongSelf.borderImageNode.frame = contentBounds + + strongSelf.containerNode.isGestureEnabled = item.contextAction != nil + + strongSelf.accessibilityLabel = titleAttributedString?.string + var combinedValueString = "" +// if let statusString = statusAttributedString?.string, !statusString.isEmpty { +// combinedValueString.append(statusString) +// } + + strongSelf.accessibilityValue = combinedValueString + + let transition: ContainedViewLayoutTransition + if animated && hadItem { + transition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut) + } else { + transition = .immediate + } + + let _ = titleApply() + transition.updateFrame(node: strongSelf.titleNode, frame: titleFrame) + + if let currentCredibilityIconImage = currentCredibilityIconImage { + let iconNode: ASImageNode + if let current = strongSelf.credibilityIconNode { + iconNode = current + } else { + iconNode = ASImageNode() + iconNode.isLayerBacked = true + iconNode.displaysAsynchronously = false + iconNode.displayWithoutProcessing = true + strongSelf.offsetContainerNode.addSubnode(iconNode) + strongSelf.credibilityIconNode = iconNode + } + iconNode.image = currentCredibilityIconImage + transition.updateFrame(node: iconNode, frame: CGRect(origin: CGPoint(x: leftInset + titleLayout.size.width + 3.0, y: credibilityIconOffset), size: currentCredibilityIconImage.size)) + } else if let credibilityIconNode = strongSelf.credibilityIconNode { + strongSelf.credibilityIconNode = nil + credibilityIconNode.removeFromSupernode() + } + + transition.updateFrameAsPositionAndBounds(node: strongSelf.avatarNode, frame: avatarFrame) + + let blobFrame = avatarFrame.insetBy(dx: -14.0, dy: -14.0) + if let getAudioLevel = item.getAudioLevel { + if !strongSelf.didSetupAudioLevel || currentItem?.peer.id != item.peer.id { + strongSelf.audioLevelView?.frame = blobFrame + strongSelf.didSetupAudioLevel = true + strongSelf.audioLevelDisposable.set((getAudioLevel() + |> deliverOnMainQueue).start(next: { value in + guard let strongSelf = self else { + return + } + + if strongSelf.audioLevelView == nil, value > 0.0 { + let audioLevelView = VoiceBlobView( + frame: blobFrame, + maxLevel: 1.5, + smallBlobRange: (0, 0), + mediumBlobRange: (0.69, 0.87), + bigBlobRange: (0.71, 1.0) + ) + + let maskRect = CGRect(origin: .zero, size: blobFrame.size) + let playbackMaskLayer = CAShapeLayer() + playbackMaskLayer.frame = maskRect + playbackMaskLayer.fillRule = .evenOdd + let maskPath = UIBezierPath() + maskPath.append(UIBezierPath(roundedRect: maskRect.insetBy(dx: 14, dy: 14), cornerRadius: 22)) + maskPath.append(UIBezierPath(rect: maskRect)) + playbackMaskLayer.path = maskPath.cgPath + audioLevelView.layer.mask = playbackMaskLayer + + audioLevelView.setColor(wavesColor) + audioLevelView.alpha = strongSelf.isExtracted ? 0.0 : 1.0 + + strongSelf.audioLevelView = audioLevelView + strongSelf.offsetContainerNode.view.insertSubview(audioLevelView, at: 0) + } + + let level = min(1.0, max(0.0, CGFloat(value))) + if let audioLevelView = strongSelf.audioLevelView { + audioLevelView.updateLevel(CGFloat(value)) + + let avatarScale: CGFloat + if value > 0.0 { + audioLevelView.startAnimating() + avatarScale = 1.03 + level * 0.13 + if let wavesColor = strongSelf.wavesColor { + audioLevelView.setColor(wavesColor, animated: true) + } + } else { + audioLevelView.stopAnimating(duration: 0.5) + avatarScale = 1.0 + } + + let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .easeInOut) + transition.updateTransformScale(node: strongSelf.avatarNode, scale: strongSelf.isExtracted ? 1.0 : avatarScale, beginWithCurrentState: true) + } + })) + } + } else if let audioLevelView = strongSelf.audioLevelView { + strongSelf.audioLevelView = nil + audioLevelView.removeFromSuperview() + + strongSelf.audioLevelDisposable.set(nil) + } + + var overrideImage: AvatarNodeImageOverride? + if item.peer.isDeleted { + overrideImage = .deletedIcon + } + strongSelf.avatarNode.setPeer(context: item.context, theme: item.presentationData.theme, peer: item.peer, overrideImage: overrideImage, emptyColor: item.presentationData.theme.list.mediaPlaceholderColor, synchronousLoad: synchronousLoad, storeUnrounded: true) + + var hadMicrophoneNode = false + var hadRaiseHandNode = false + var hadIconNode = false + var nodeToAnimateIn: ASDisplayNode? + + if case let .microphone(muted, color) = item.icon { + let animationNode: VoiceChatMicrophoneNode + if let current = strongSelf.animationNode { + animationNode = current + } else { + animationNode = VoiceChatMicrophoneNode() + strongSelf.animationNode = animationNode + strongSelf.actionButtonNode.addSubnode(animationNode) + + nodeToAnimateIn = animationNode + } + var color = color + if color.rgb == 0x979797 { + color = UIColor(rgb: 0xffffff) + } + animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: color), animated: true) + strongSelf.actionButtonNode.isUserInteractionEnabled = false + } else if let animationNode = strongSelf.animationNode { + hadMicrophoneNode = true + strongSelf.animationNode = nil + animationNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false) + animationNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak animationNode] _ in + animationNode?.removeFromSupernode() + }) + } + + if case .wantsToSpeak = item.icon { + let raiseHandNode: VoiceChatRaiseHandNode + if let current = strongSelf.raiseHandNode { + raiseHandNode = current + } else { + raiseHandNode = VoiceChatRaiseHandNode(color: item.presentationData.theme.list.itemAccentColor) + raiseHandNode.contentMode = .center + strongSelf.raiseHandNode = raiseHandNode + strongSelf.actionButtonNode.addSubnode(raiseHandNode) + + nodeToAnimateIn = raiseHandNode + raiseHandNode.playRandomAnimation() + + strongSelf.raiseHandTimer = SwiftSignalKit.Timer(timeout: Double.random(in: 8.0 ... 10.5), repeat: true, completion: { + self?.raiseHandNode?.playRandomAnimation() + }, queue: Queue.mainQueue()) + strongSelf.raiseHandTimer?.start() + } + strongSelf.actionButtonNode.isUserInteractionEnabled = false + } else if let raiseHandNode = strongSelf.raiseHandNode { + hadRaiseHandNode = true + strongSelf.raiseHandNode = nil + if let raiseHandTimer = strongSelf.raiseHandTimer { + strongSelf.raiseHandTimer = nil + raiseHandTimer.invalidate() + } + raiseHandNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false) + raiseHandNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak raiseHandNode] _ in + raiseHandNode?.removeFromSupernode() + }) + } + + if case let .invite(invited) = item.icon { + let iconNode: ASImageNode + if let current = strongSelf.iconNode { + iconNode = current + } else { + iconNode = ASImageNode() + iconNode.contentMode = .center + strongSelf.iconNode = iconNode + strongSelf.actionButtonNode.addSubnode(iconNode) + + nodeToAnimateIn = iconNode + } + + if invited { + iconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/Invited"), color: UIColor(rgb: 0x979797)) + } else { + iconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/AddUser"), color: item.presentationData.theme.list.itemAccentColor) + } + strongSelf.actionButtonNode.isUserInteractionEnabled = false + } else if let iconNode = strongSelf.iconNode { + hadIconNode = true + strongSelf.iconNode = nil + iconNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false) + iconNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak iconNode] _ in + iconNode?.removeFromSupernode() + }) + } + + if let node = nodeToAnimateIn, hadMicrophoneNode || hadRaiseHandNode || hadIconNode { + node.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + node.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2) + } + + let videoContainerScale = tileSize.width / videoSize.width + + if !strongSelf.isExtracted && !strongSelf.animatingExtraction { + strongSelf.videoFadeNode.frame = CGRect(x: 0.0, y: videoSize.height - 75.0, width: videoSize.width, height: 75.0) + strongSelf.videoContainerNode.bounds = CGRect(origin: CGPoint(), size: videoSize) + + if let videoNode = strongSelf.videoNode { + strongSelf.videoFadeNode.alpha = videoNode.alpha + } else { + strongSelf.videoFadeNode.alpha = 0.0 + } + strongSelf.videoContainerNode.position = CGPoint(x: tileSize.width / 2.0, y: tileSize.height / 2.0) + strongSelf.videoContainerNode.cornerRadius = videoCornerRadius + strongSelf.videoContainerNode.transform = CATransform3DMakeScale(videoContainerScale, videoContainerScale, 1.0) + } + + strongSelf.borderImageNode.isHidden = !item.active + + let canUpdateAvatarVisibility = !strongSelf.isExtracted && !strongSelf.animatingExtraction + + if let videoNode = videoNode { + let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) + if !strongSelf.isExtracted && !strongSelf.animatingExtraction { + if currentItem != nil { + if item.active { + if strongSelf.avatarNode.alpha.isZero { + strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: 0.2) + strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) + strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -9.0), duration: 0.2, additive: true) + } + transition.updateAlpha(node: videoNode, alpha: 0.0) + transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0) + transition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0) + } else { + if !strongSelf.avatarNode.alpha.isZero { + strongSelf.videoContainerNode.layer.animateScale(from: 0.001, to: videoContainerScale, duration: 0.2) + strongSelf.avatarNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2) + strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(x: 0.0, y: -9.0), to: CGPoint(), duration: 0.2, additive: true) + } + transition.updateAlpha(node: videoNode, alpha: 1.0) + transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 1.0) + transition.updateAlpha(node: strongSelf.avatarNode, alpha: 0.0) + } + } else { + if item.active { + videoNode.alpha = 0.0 + if canUpdateAvatarVisibility { + strongSelf.avatarNode.alpha = 1.0 + } + } else if strongSelf.videoReady { + videoNode.alpha = 1.0 + strongSelf.avatarNode.alpha = 0.0 + } + } + } + + videoNode.updateLayout(size: videoSize, isLandscape: true, transition: .immediate) + if !strongSelf.isExtracted && !strongSelf.animatingExtraction { + if videoNode.supernode !== strongSelf.videoContainerNode { + videoNode.clipsToBounds = true + strongSelf.videoContainerNode.addSubnode(videoNode) + } + + videoNode.position = CGPoint(x: videoSize.width / 2.0, y: videoSize.height / 2.0) + videoNode.bounds = CGRect(origin: CGPoint(), size: videoSize) + } + + if videoNodeUpdated { + strongSelf.videoReadyDelayed = false + strongSelf.videoReadyDisposable.set((videoNode.ready + |> deliverOnMainQueue).start(next: { [weak self] ready in + if let strongSelf = self { + if !ready { + strongSelf.videoReadyDelayed = true + } + strongSelf.videoReady = ready + if let videoNode = strongSelf.videoNode, ready { + if strongSelf.videoReadyDelayed { + Queue.mainQueue().after(0.15) { + guard let currentItem = strongSelf.item else { + return + } + if currentItem.active { + if canUpdateAvatarVisibility { + strongSelf.avatarNode.alpha = 1.0 + } + videoNode.alpha = 0.0 + } else { + strongSelf.avatarNode.alpha = 0.0 + strongSelf.avatarNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2) + videoNode.layer.animateScale(from: 0.01, to: 1.0, duration: 0.2) + videoNode.alpha = 1.0 + } + } + } else { + if item.active { + if canUpdateAvatarVisibility { + strongSelf.avatarNode.alpha = 1.0 + } + videoNode.alpha = 0.0 + } else { + strongSelf.avatarNode.alpha = 0.0 + videoNode.alpha = 1.0 + } + } + } + } + })) + } + } else if canUpdateAvatarVisibility { + strongSelf.avatarNode.alpha = 1.0 + } + + strongSelf.iconNode?.frame = CGRect(origin: CGPoint(), size: animationSize) + strongSelf.animationNode?.frame = CGRect(origin: CGPoint(), size: animationSize) + strongSelf.raiseHandNode?.frame = CGRect(origin: CGPoint(), size: animationSize).insetBy(dx: -6.0, dy: -6.0).offsetBy(dx: -2.0, dy: 0.0) + + strongSelf.actionButtonNode.transform = CATransform3DMakeScale(animationScale, animationScale, 1.0) +// strongSelf.actionButtonNode.frame = animationFrame + transition.updateFrame(node: strongSelf.actionButtonNode, frame: animationFrame) + + strongSelf.updateIsHighlighted(transition: transition) + } + }) + } + } + + var isHighlighted = false + func updateIsHighlighted(transition: ContainedViewLayoutTransition) { + + } + + override func setHighlighted(_ highlighted: Bool, at point: CGPoint, animated: Bool) { + super.setHighlighted(highlighted, at: point, animated: animated) + + self.isHighlighted = highlighted + + self.updateIsHighlighted(transition: (animated && !highlighted) ? .animated(duration: 0.3, curve: .easeInOut) : .immediate) + } + + override func animateInsertion(_ currentTimestamp: Double, duration: Double, short: Bool) { + self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4) + } + + override func animateRemoved(_ currentTimestamp: Double, duration: Double) { + self.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false) + } + + override func header() -> ListViewItemHeader? { + return nil + } + + override func updateAbsoluteRect(_ rect: CGRect, within containerSize: CGSize) { + var rect = rect + rect.origin.y += self.insets.top + self.absoluteLocation = (rect, containerSize) + } +} diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatJoinScreen.swift b/submodules/TelegramCallsUI/Sources/VoiceChatJoinScreen.swift index a0f81d690a..23844806f1 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatJoinScreen.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatJoinScreen.swift @@ -515,10 +515,16 @@ public final class VoiceChatJoinScreen: ViewController { self.dimNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4) let offset = self.bounds.size.height - self.contentBackgroundNode.frame.minY - let dimPosition = self.dimNode.layer.position - self.dimNode.layer.animatePosition(from: CGPoint(x: dimPosition.x, y: dimPosition.y - offset), to: dimPosition, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) - self.layer.animateBoundsOriginYAdditive(from: -offset, to: 0.0, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + + let transition = ContainedViewLayoutTransition.animated(duration: 0.4, curve: .spring) + let targetBounds = self.bounds + self.bounds = self.bounds.offsetBy(dx: 0.0, dy: -offset) + self.dimNode.position = CGPoint(x: dimPosition.x, y: dimPosition.y - offset) + transition.animateView({ + self.bounds = targetBounds + self.dimNode.position = dimPosition + }) } } diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift b/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift index 7686e94cb9..be870f5cab 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift @@ -12,7 +12,6 @@ import ItemListUI import PresentationDataUtils import AvatarNode import TelegramStringFormatting -import PeerPresenceStatusManager import ContextUI import AccountContext import LegacyComponents @@ -20,13 +19,8 @@ import AudioBlob import PeerInfoAvatarListNode final class VoiceChatParticipantItem: ListViewItem { - enum LayoutStyle: Equatable { - case list - case tile(isLandscape: Bool) - } - enum ParticipantText: Equatable { - public struct TextIcon: OptionSet { + struct TextIcon: OptionSet { public var rawValue: Int32 public init(rawValue: Int32) { @@ -42,14 +36,13 @@ final class VoiceChatParticipantItem: ListViewItem { public static let screen = TextIcon(rawValue: 1 << 2) } - public enum TextColor { + enum TextColor { case generic case accent case constructive case destructive } - case presence case text(String, TextIcon, TextColor) case none } @@ -61,71 +54,32 @@ final class VoiceChatParticipantItem: ListViewItem { case wantsToSpeak } - struct RevealOption { - enum RevealOptionType { - case neutral - case warning - case destructive - case accent - } - - var type: RevealOptionType - var title: String - var action: () -> Void - - init(type: RevealOptionType, title: String, action: @escaping () -> Void) { - self.type = type - self.title = title - self.action = action - } - } - let presentationData: ItemListPresentationData let dateTimeFormat: PresentationDateTimeFormat let nameDisplayOrder: PresentationPersonNameOrder let context: AccountContext let peer: Peer - let ssrc: UInt32? - let presence: PeerPresence? let text: ParticipantText let expandedText: ParticipantText? let icon: Icon - let style: LayoutStyle - let enabled: Bool - let transparent: Bool - let pinned: Bool - public let selectable: Bool let getAudioLevel: (() -> Signal)? - let getVideo: () -> GroupVideoNode? - let revealOptions: [RevealOption] - let revealed: Bool? - let setPeerIdWithRevealedOptions: (PeerId?, PeerId?) -> Void - let action: ((ASDisplayNode) -> Void)? + let action: ((ASDisplayNode?) -> Void)? let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? let getIsExpanded: () -> Bool let getUpdatingAvatar: () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError> - public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, ssrc: UInt32?, presence: PeerPresence?, text: ParticipantText, expandedText: ParticipantText?, icon: Icon, style: LayoutStyle, enabled: Bool, transparent: Bool, pinned: Bool, selectable: Bool, getAudioLevel: (() -> Signal)?, getVideo: @escaping () -> GroupVideoNode?, revealOptions: [RevealOption], revealed: Bool?, setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void, action: ((ASDisplayNode) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getIsExpanded: @escaping () -> Bool, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) { + public let selectable: Bool = true + + public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, text: ParticipantText, expandedText: ParticipantText?, icon: Icon, getAudioLevel: (() -> Signal)?, action: ((ASDisplayNode?) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getIsExpanded: @escaping () -> Bool, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) { self.presentationData = presentationData self.dateTimeFormat = dateTimeFormat self.nameDisplayOrder = nameDisplayOrder self.context = context self.peer = peer - self.ssrc = ssrc - self.presence = presence self.text = text self.expandedText = expandedText self.icon = icon - self.style = style - self.enabled = enabled - self.transparent = transparent - self.pinned = pinned - self.selectable = selectable self.getAudioLevel = getAudioLevel - self.getVideo = getVideo - self.revealOptions = revealOptions - self.revealed = revealed - self.setPeerIdWithRevealedOptions = setPeerIdWithRevealedOptions self.action = action self.contextAction = contextAction self.getIsExpanded = getIsExpanded @@ -135,7 +89,7 @@ final class VoiceChatParticipantItem: ListViewItem { public func nodeConfiguredForParams(async: @escaping (@escaping () -> Void) -> Void, params: ListViewItemLayoutParams, synchronousLoads: Bool, previousItem: ListViewItem?, nextItem: ListViewItem?, completion: @escaping (ListViewItemNode, @escaping () -> (Signal?, (ListViewItemApply) -> Void)) -> Void) { async { let node = VoiceChatParticipantItemNode() - let (layout, apply) = node.asyncLayout()(self, params, previousItem == nil, nextItem == nil) + let (layout, apply) = node.asyncLayout()(self, params, previousItem == nil || previousItem is VoiceChatTilesGridItem, nextItem == nil) node.contentSize = layout.contentSize node.insets = layout.insets @@ -159,7 +113,7 @@ final class VoiceChatParticipantItem: ListViewItem { } async { - let (layout, apply) = makeLayout(self, params, previousItem == nil, nextItem == nil) + let (layout, apply) = makeLayout(self, params, previousItem == nil || previousItem is VoiceChatTilesGridItem, nextItem == nil) Queue.mainQueue().async { completion(layout, { _ in apply(false, animated) @@ -184,28 +138,6 @@ private let accentColor: UIColor = UIColor(rgb: 0x007aff) private let constructiveColor: UIColor = UIColor(rgb: 0x34c759) private let destructiveColor: UIColor = UIColor(rgb: 0xff3b30) -private let borderLineWidth: CGFloat = 2.0 -private let borderImage = generateImage(CGSize(width: tileSize.width, height: tileSize.height), rotatedContext: { size, context in - let bounds = CGRect(origin: CGPoint(), size: size) - context.clear(bounds) - - context.setLineWidth(borderLineWidth) - context.setStrokeColor(constructiveColor.cgColor) - - context.addPath(UIBezierPath(roundedRect: bounds.insetBy(dx: (borderLineWidth - UIScreenPixel) / 2.0, dy: (borderLineWidth - UIScreenPixel) / 2.0), cornerRadius: backgroundCornerRadius - UIScreenPixel).cgPath) - context.strokePath() -}) - -private let fadeImage = generateImage(CGSize(width: 1.0, height: 30.0), rotatedContext: { size, context in - let bounds = CGRect(origin: CGPoint(), size: size) - context.clear(bounds) - - let colorsArray = [UIColor(rgb: 0x000000, alpha: 0.0).cgColor, UIColor(rgb: 0x000000, alpha: 0.7).cgColor] as CFArray - var locations: [CGFloat] = [0.0, 1.0] - let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)! - context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) -}) - private class VoiceChatParticipantStatusNode: ASDisplayNode { private var iconNodes: [ASImageNode] private let textNode: TextNode @@ -317,21 +249,18 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { private let topStripeNode: ASDisplayNode private let bottomStripeNode: ASDisplayNode private let highlightedBackgroundNode: ASDisplayNode - private var disabledOverlayNode: ASDisplayNode? let contextSourceNode: ContextExtractedContentContainingNode private let containerNode: ContextControllerSourceNode private let backgroundImageNode: ASImageNode private let extractedBackgroundImageNode: ASImageNode private let offsetContainerNode: ASDisplayNode - private let borderImageNode: ASImageNode private var extractedRect: CGRect? private var nonExtractedRect: CGRect? private var extractedVerticalOffset: CGFloat? - fileprivate let avatarNode: AvatarNode - private let pinIconNode: ASImageNode + let avatarNode: AvatarNode private let contentWrapperNode: ASDisplayNode private let titleNode: TextNode private let statusNode: VoiceChatParticipantStatusNode @@ -355,19 +284,11 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { private var absoluteLocation: (CGRect, CGSize)? - private var peerPresenceManager: PeerPresenceStatusManager? private var layoutParams: (VoiceChatParticipantItem, ListViewItemLayoutParams, Bool, Bool)? private var isExtracted = false private var animatingExtraction = false private var wavesColor: UIColor? - - private let videoContainerNode: ASDisplayNode - private let videoFadeNode: ASImageNode - private var videoNode: GroupVideoNode? - private let videoReadyDisposable = MetaDisposable() - private var videoReadyDelayed = false - private var videoReady = false - + private var raiseHandTimer: SwiftSignalKit.Timer? var item: VoiceChatParticipantItem? { @@ -395,35 +316,14 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { self.extractedBackgroundImageNode.clipsToBounds = true self.extractedBackgroundImageNode.displaysAsynchronously = false self.extractedBackgroundImageNode.alpha = 0.0 - - self.borderImageNode = ASImageNode() - self.borderImageNode.displaysAsynchronously = false - self.borderImageNode.image = borderImage - self.borderImageNode.isHidden = true - + self.offsetContainerNode = ASDisplayNode() self.avatarNode = AvatarNode(font: avatarFont) self.avatarNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 40.0, height: 40.0)) - self.pinIconNode = ASImageNode() - self.pinIconNode.alpha = 0.65 - self.pinIconNode.displaysAsynchronously = false - self.pinIconNode.displayWithoutProcessing = true - self.pinIconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Pin"), color: UIColor(rgb: 0xffffff)) - self.contentWrapperNode = ASDisplayNode() - - self.videoContainerNode = ASDisplayNode() - self.videoContainerNode.clipsToBounds = true - - self.videoFadeNode = ASImageNode() - self.videoFadeNode.displaysAsynchronously = false - self.videoFadeNode.displayWithoutProcessing = true - self.videoFadeNode.contentMode = .scaleToFill - self.videoFadeNode.image = fadeImage - self.videoContainerNode.addSubnode(videoFadeNode) - + self.titleNode = TextNode() self.titleNode.isUserInteractionEnabled = false self.titleNode.contentMode = .left @@ -453,27 +353,17 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { self.contextSourceNode.contentNode.addSubnode(self.backgroundImageNode) self.backgroundImageNode.addSubnode(self.extractedBackgroundImageNode) self.contextSourceNode.contentNode.addSubnode(self.offsetContainerNode) - self.offsetContainerNode.addSubnode(self.videoContainerNode) self.offsetContainerNode.addSubnode(self.contentWrapperNode) self.contentWrapperNode.addSubnode(self.titleNode) self.contentWrapperNode.addSubnode(self.statusNode) self.contentWrapperNode.addSubnode(self.expandedStatusNode) self.contentWrapperNode.addSubnode(self.actionContainerNode) self.actionContainerNode.addSubnode(self.actionButtonNode) - self.offsetContainerNode.addSubnode(self.pinIconNode) self.offsetContainerNode.addSubnode(self.avatarNode) - self.contextSourceNode.contentNode.addSubnode(self.borderImageNode) self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode self.actionButtonNode.addTarget(self, action: #selector(self.actionButtonPressed), forControlEvents: .touchUpInside) - - self.peerPresenceManager = PeerPresenceStatusManager(update: { [weak self] in - if let strongSelf = self, let layoutParams = strongSelf.layoutParams { - let (_, apply) = strongSelf.asyncLayout()(layoutParams.0, layoutParams.1, layoutParams.2, layoutParams.3) - apply(false, true) - } - }) - + self.containerNode.shouldBegin = { [weak self] location in guard let strongSelf = self else { return false @@ -602,10 +492,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { strongSelf.avatarNode.isHidden = true avatarListWrapperNode.contentNode.addSubnode(transitionNode) - strongSelf.videoContainerNode.position = CGPoint(x: avatarListWrapperNode.frame.width / 2.0, y: avatarListWrapperNode.frame.height / 2.0) - strongSelf.videoContainerNode.cornerRadius = tileSize.width / 2.0 - strongSelf.videoContainerNode.transform = CATransform3DMakeScale(avatarListWrapperNode.frame.width / tileSize.width * 1.05, avatarListWrapperNode.frame.height / tileSize.width * 1.05, 1.0) - avatarListWrapperNode.contentNode.addSubnode(strongSelf.videoContainerNode) + strongSelf.avatarTransitionNode = transitionNode @@ -617,13 +504,9 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping) avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: avatarInitialRect.center), to: NSValue(cgPoint: avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in - if let strongSelf = self, let avatarListNode = strongSelf.avatarListNode { - avatarListNode.currentItemNode?.addSubnode(strongSelf.videoContainerNode) - } }) radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: 0.0) - radiusTransition.updateCornerRadius(node: strongSelf.videoContainerNode, cornerRadius: 0.0) let avatarListNode = PeerInfoAvatarListContainerNode(context: item.context) avatarListWrapperNode.contentNode.clipsToBounds = true @@ -643,7 +526,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { avatarListContainerNode.addSubnode(avatarListNode.controlsClippingOffsetNode) avatarListWrapperNode.contentNode.addSubnode(avatarListContainerNode) - avatarListNode.update(size: targetRect.size, peer: item.peer, customNode: strongSelf.videoContainerNode, additionalEntry: item.getUpdatingAvatar(), isExpanded: true, transition: .immediate) + avatarListNode.update(size: targetRect.size, peer: item.peer, customNode: nil, additionalEntry: item.getUpdatingAvatar(), isExpanded: true, transition: .immediate) strongSelf.offsetContainerNode.supernode?.addSubnode(avatarListWrapperNode) strongSelf.audioLevelView?.alpha = 0.0 @@ -669,9 +552,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { avatarListContainerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak avatarListContainerNode] _ in avatarListContainerNode?.removeFromSupernode() }) - - avatarListWrapperNode.contentNode.insertSubnode(strongSelf.videoContainerNode, aboveSubnode: transitionNode) - + avatarListWrapperNode.layer.animate(from: 1.0 as NSNumber, to: targetScale as NSNumber, keyPath: "transform.scale", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false) avatarListWrapperNode.layer.animate(from: NSValue(cgPoint: avatarListWrapperNode.position), to: NSValue(cgPoint: avatarInitialRect.center), keyPath: "position", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { [weak transitionNode, weak self] _ in transitionNode?.removeFromSupernode() @@ -682,27 +563,11 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { if let strongSelf = self { strongSelf.animatingExtraction = false - - strongSelf.offsetContainerNode.insertSubnode(strongSelf.videoContainerNode, belowSubnode: strongSelf.contentWrapperNode) - - switch item.style { - case .list: - strongSelf.videoFadeNode.alpha = 0.0 - strongSelf.videoContainerNode.position = strongSelf.avatarNode.position - strongSelf.videoContainerNode.cornerRadius = tileSize.width / 2.0 - strongSelf.videoContainerNode.transform = CATransform3DMakeScale(avatarSize / tileSize.width, avatarSize / tileSize.width, 1.0) - case .tile: - strongSelf.videoFadeNode.alpha = 1.0 - strongSelf.videoContainerNode.position = CGPoint(x: tileSize.width / 2.0, y: tileSize.height / 2.0) - strongSelf.videoContainerNode.cornerRadius = backgroundCornerRadius - strongSelf.videoContainerNode.transform = CATransform3DMakeScale(1.0, 1.0, 1.0) - } } }) radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: avatarListContainerNode.frame.width / 2.0) radiusTransition.updateCornerRadius(node: transitionNode, cornerRadius: avatarListContainerNode.frame.width / 2.0) - radiusTransition.updateCornerRadius(node: strongSelf.videoContainerNode, cornerRadius: tileSize.width / 2.0) } let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) @@ -749,9 +614,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { strongSelf.extractedBackgroundImageNode.alpha = 0.0 strongSelf.extractedBackgroundImageNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, delay: 0.0, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: false, completion: { [weak self] _ in if let strongSelf = self { - if strongSelf.item?.style == .list { - strongSelf.backgroundImageNode.image = nil - } + strongSelf.backgroundImageNode.image = nil strongSelf.extractedBackgroundImageNode.image = nil strongSelf.extractedBackgroundImageNode.layer.removeAllAnimations() } @@ -787,7 +650,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { } deinit { - self.videoReadyDisposable.dispose() self.audioLevelDisposable.dispose() self.raiseHandTimer?.invalidate() } @@ -797,170 +659,69 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { self.layoutParams?.0.action?(self.contextSourceNode) } - func animateTransitionIn(from sourceNode: VoiceChatParticipantItemNode, containerNode: ASDisplayNode) { - guard let item = self.item, let sourceItem = sourceNode.item, sourceItem.style != item.style else { + func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode) { + guard let _ = self.item, let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item else { return } - - switch sourceItem.style { - case .list: - var startContainerPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center - var animate = true - if startContainerPosition.y > containerNode.frame.height - 238.0 { - animate = false - } - - if let videoNode = sourceNode.videoNode { - if item.pinned { - self.avatarNode.alpha = 1.0 - videoNode.alpha = 0.0 - startContainerPosition = startContainerPosition.offsetBy(dx: 0.0, dy: 9.0) - } else { - self.avatarNode.alpha = 0.0 - } - - sourceNode.videoNode = nil - self.videoNode = videoNode - - if animate { - self.videoContainerNode.layer.animateScale(from: avatarSize / tileSize.width, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - } - self.videoContainerNode.insertSubnode(videoNode, at: 0) - - if animate { - self.videoContainerNode.layer.animate(from: (tileSize.width / 2.0) as NSNumber, to: backgroundCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { _ in - }) - } - } else { - startContainerPosition = startContainerPosition.offsetBy(dx: 0.0, dy: 9.0) - } - if animate { - sourceNode.avatarNode.alpha = 0.0 - - let initialPosition = self.contextSourceNode.position - let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center - - self.contextSourceNode.position = targetContainerPosition - containerNode.addSubnode(self.contextSourceNode) - - self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self, weak sourceNode] _ in - if let strongSelf = self { - sourceNode?.avatarNode.alpha = 1.0 - strongSelf.contextSourceNode.position = initialPosition - strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode) - } - }) - - self.videoFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - - if item.pinned { - self.borderImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) - self.borderImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - } - - self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + let startContainerAvatarPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center + var animate = true + if startContainerAvatarPosition.x < -tileSize.width || startContainerAvatarPosition.x > containerNode.frame.width + tileSize.width { + animate = false + } + if animate { + sourceNode.avatarNode.alpha = 0.0 + + let initialAvatarPosition = self.avatarNode.position + let targetContainerAvatarPosition = self.avatarNode.view.convert(self.avatarNode.bounds, to: containerNode.view).center + + let startContainerBackgroundPosition = sourceNode.backgroundImageNode.view.convert(sourceNode.backgroundImageNode.bounds, to: containerNode.view).center + let startContainerContentPosition = sourceNode.contentWrapperNode.view.convert(sourceNode.contentWrapperNode.bounds, to: containerNode.view).center + + let initialBackgroundPosition = sourceNode.backgroundImageNode.position + let initialContentPosition = sourceNode.contentWrapperNode.position + + sourceNode.backgroundImageNode.position = targetContainerAvatarPosition + sourceNode.contentWrapperNode.position = targetContainerAvatarPosition + containerNode.addSubnode(sourceNode.backgroundImageNode) + containerNode.addSubnode(sourceNode.contentWrapperNode) + + sourceNode.borderImageNode.alpha = 0.0 + + let timingFunction = CAMediaTimingFunctionName.easeInEaseOut.rawValue + sourceNode.backgroundImageNode.layer.animatePosition(from: startContainerBackgroundPosition, to: targetContainerAvatarPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak sourceNode] _ in + if let sourceNode = sourceNode { + sourceNode.backgroundImageNode.alpha = 1.0 + sourceNode.borderImageNode.alpha = 1.0 + sourceNode.backgroundImageNode.position = initialBackgroundPosition + sourceNode.contextSourceNode.contentNode.insertSubnode(sourceNode.backgroundImageNode, at: 0) } - case .tile: - let startContainerAvatarPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center - var animate = true - if startContainerAvatarPosition.x < -tileSize.width || startContainerAvatarPosition.x > containerNode.frame.width + tileSize.width { - animate = false + }) + + sourceNode.contentWrapperNode.layer.animatePosition(from: startContainerContentPosition, to: targetContainerAvatarPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak sourceNode] _ in + if let sourceNode = sourceNode { + sourceNode.avatarNode.alpha = 1.0 + sourceNode.contentWrapperNode.position = initialContentPosition + sourceNode.offsetContainerNode.insertSubnode(sourceNode.contentWrapperNode, aboveSubnode: sourceNode.videoContainerNode) } - - if let videoNode = sourceNode.videoNode { - if item.pinned { - self.avatarNode.alpha = 1.0 - videoNode.alpha = 0.0 - } else { - self.avatarNode.alpha = 0.0 - videoNode.alpha = 1.0 - } - - sourceNode.videoNode = nil - self.videoNode = videoNode - self.videoContainerNode.insertSubnode(videoNode, at: 0) - } - - if animate { - sourceNode.avatarNode.alpha = 0.0 - sourceNode.videoFadeNode.alpha = 0.0 - - let initialAvatarPosition = self.avatarNode.position - let targetContainerAvatarPosition = self.avatarNode.view.convert(self.avatarNode.bounds, to: containerNode.view).center - - let startContainerBackgroundPosition = sourceNode.backgroundImageNode.view.convert(sourceNode.backgroundImageNode.bounds, to: containerNode.view).center - let startContainerContentPosition = sourceNode.contentWrapperNode.view.convert(sourceNode.contentWrapperNode.bounds, to: containerNode.view).center - let startContainerVideoPosition = sourceNode.videoContainerNode.view.convert(sourceNode.videoContainerNode.bounds, to: containerNode.view).center - - let initialBackgroundPosition = sourceNode.backgroundImageNode.position - let initialContentPosition = sourceNode.contentWrapperNode.position - - sourceNode.backgroundImageNode.position = targetContainerAvatarPosition - sourceNode.contentWrapperNode.position = targetContainerAvatarPosition - containerNode.addSubnode(sourceNode.backgroundImageNode) - containerNode.addSubnode(sourceNode.contentWrapperNode) - - if self.videoNode != nil { - sourceNode.backgroundImageNode.alpha = 0.0 - } - - sourceNode.borderImageNode.alpha = 0.0 - - sourceNode.backgroundImageNode.layer.animatePosition(from: startContainerBackgroundPosition, to: targetContainerAvatarPosition, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak sourceNode] _ in - if let sourceNode = sourceNode { - sourceNode.backgroundImageNode.alpha = 1.0 - sourceNode.borderImageNode.alpha = 1.0 - sourceNode.backgroundImageNode.position = initialBackgroundPosition - sourceNode.contextSourceNode.contentNode.insertSubnode(sourceNode.backgroundImageNode, at: 0) - } - }) - - sourceNode.contentWrapperNode.layer.animatePosition(from: startContainerContentPosition, to: targetContainerAvatarPosition, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak sourceNode] _ in - if let sourceNode = sourceNode { - sourceNode.avatarNode.alpha = 1.0 - sourceNode.videoFadeNode.alpha = 1.0 - sourceNode.contentWrapperNode.position = initialContentPosition - sourceNode.offsetContainerNode.insertSubnode(sourceNode.contentWrapperNode, aboveSubnode: sourceNode.videoContainerNode) - } - }) - - - self.avatarNode.position = targetContainerAvatarPosition - containerNode.addSubnode(self.avatarNode) - - self.avatarNode.layer.animatePosition(from: startContainerAvatarPosition, to: targetContainerAvatarPosition, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self] _ in - if let strongSelf = self { - strongSelf.avatarNode.position = initialAvatarPosition - strongSelf.offsetContainerNode.addSubnode(strongSelf.avatarNode) - } - }) - - - self.videoContainerNode.position = targetContainerAvatarPosition - containerNode.addSubnode(self.videoContainerNode) - - self.videoContainerNode.layer.animatePosition(from: startContainerVideoPosition, to: targetContainerAvatarPosition, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self] _ in - if let strongSelf = self { - strongSelf.videoContainerNode.position = initialAvatarPosition - strongSelf.offsetContainerNode.insertSubnode(strongSelf.videoContainerNode, belowSubnode: strongSelf.contentWrapperNode) - } - }) - - self.videoContainerNode.layer.animateScale(from: 1.0, to: avatarSize / tileSize.width, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - self.videoContainerNode.layer.animate(from: backgroundCornerRadius as NSNumber, to: (tileSize.width / 2.0) as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { _ in - }) - - self.videoFadeNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - - sourceNode.backgroundImageNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.35, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - sourceNode.backgroundImageNode.layer.animateAlpha(from: sourceNode.backgroundImageNode.alpha, to: 0.0, duration: 0.35, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - sourceNode.contentWrapperNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.35, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) - sourceNode.contentWrapperNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.35, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + }) + + self.avatarNode.position = targetContainerAvatarPosition + containerNode.addSubnode(self.avatarNode) + + self.avatarNode.layer.animateScale(from: 1.25, to: 1.0, duration: 0.2, timingFunction: timingFunction) + + self.avatarNode.layer.animatePosition(from: startContainerAvatarPosition, to: targetContainerAvatarPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak self] _ in + if let strongSelf = self { + strongSelf.avatarNode.position = initialAvatarPosition + strongSelf.offsetContainerNode.addSubnode(strongSelf.avatarNode) } + }) + + sourceNode.backgroundImageNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.25, timingFunction: timingFunction) + sourceNode.backgroundImageNode.layer.animateAlpha(from: sourceNode.backgroundImageNode.alpha, to: 0.0, duration: 0.35, timingFunction: timingFunction) + sourceNode.contentWrapperNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.25, timingFunction: timingFunction) + sourceNode.contentWrapperNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, timingFunction: timingFunction) } } @@ -968,11 +729,9 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { let makeTitleLayout = TextNode.asyncLayout(self.titleNode) let makeStatusLayout = self.statusNode.asyncLayout() let makeExpandedStatusLayout = self.expandedStatusNode.asyncLayout() - var currentDisabledOverlayNode = self.disabledOverlayNode let currentItem = self.layoutParams?.0 let currentTitle = self.currentTitle - let hasVideo = self.videoNode != nil return { item, params, first, last in var updatedTheme: PresentationTheme? @@ -980,69 +739,38 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { updatedTheme = item.presentationData.theme } - var titleFont = item.style == .list ? Font.regular(17.0) : Font.regular(12.0) - + let titleFont = Font.regular(17.0) var titleAttributedString: NSAttributedString? + let titleColor = item.presentationData.theme.list.itemPrimaryTextColor let rightInset: CGFloat = params.rightInset - - var titleColor = item.presentationData.theme.list.itemPrimaryTextColor - if case .list = item.style, item.transparent{ - titleFont = Font.semibold(17.0) - titleColor = UIColor(rgb: 0xffffff, alpha: 0.65) - } else if case .tile = item.style, !hasVideo { - switch item.text { - case let .text(_, _, textColor): - switch textColor { - case .generic: - titleColor = item.presentationData.theme.list.itemPrimaryTextColor - case .accent: - if item.peer.id != item.context.account.peerId { - titleColor = item.presentationData.theme.list.itemAccentColor - } - case .constructive: - titleColor = constructiveColor - case .destructive: - titleColor = destructiveColor - } - default: - break - } - - } - let currentBoldFont: UIFont = titleFont var updatedTitle = false if let user = item.peer as? TelegramUser { if let firstName = user.firstName, let lastName = user.lastName, !firstName.isEmpty, !lastName.isEmpty { - switch item.style { - case .list: - let string = NSMutableAttributedString() - switch item.nameDisplayOrder { - case .firstLast: - string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)) - string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor)) - string.append(NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor)) - case .lastFirst: - string.append(NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor)) - string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor)) - string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)) - } - titleAttributedString = string - case .tile: - titleAttributedString = NSAttributedString(string: firstName, font: titleFont, textColor: titleColor) - } + let string = NSMutableAttributedString() + switch item.nameDisplayOrder { + case .firstLast: + string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: lastName, font: titleFont, textColor: titleColor)) + case .lastFirst: + string.append(NSAttributedString(string: lastName, font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)) + } + titleAttributedString = string } else if let firstName = user.firstName, !firstName.isEmpty { - titleAttributedString = NSAttributedString(string: firstName, font: currentBoldFont, textColor: titleColor) + titleAttributedString = NSAttributedString(string: firstName, font: titleFont, textColor: titleColor) } else if let lastName = user.lastName, !lastName.isEmpty { - titleAttributedString = NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor) + titleAttributedString = NSAttributedString(string: lastName, font: titleFont, textColor: titleColor) } else { - titleAttributedString = NSAttributedString(string: item.presentationData.strings.User_DeletedAccount, font: currentBoldFont, textColor: titleColor) + titleAttributedString = NSAttributedString(string: item.presentationData.strings.User_DeletedAccount, font: titleFont, textColor: titleColor) } } else if let group = item.peer as? TelegramGroup { - titleAttributedString = NSAttributedString(string: group.title, font: currentBoldFont, textColor: titleColor) + titleAttributedString = NSAttributedString(string: group.title, font: titleFont, textColor: titleColor) } else if let channel = item.peer as? TelegramChannel { - titleAttributedString = NSAttributedString(string: channel.title, font: currentBoldFont, textColor: titleColor) + titleAttributedString = NSAttributedString(string: channel.title, font: titleFont, textColor: titleColor) } if let currentTitle = currentTitle, currentTitle != titleAttributedString?.string { updatedTitle = true @@ -1087,16 +815,10 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { expandedRightInset = 0.0 } - let constrainedWidth: CGFloat - switch item.style { - case .list: - constrainedWidth = params.width - leftInset - 12.0 - rightInset - 30.0 - titleIconsWidth - case .tile: - constrainedWidth = params.width - 24.0 - 10.0 - } + let constrainedWidth = params.width - leftInset - 12.0 - rightInset - 30.0 - titleIconsWidth let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: constrainedWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - let (statusLayout, statusApply) = makeStatusLayout(CGSize(width: params.width - leftInset - 8.0 - rightInset - 30.0, height: CGFloat.greatestFiniteMagnitude), item.text, item.transparent && item.style == .list) + let (statusLayout, statusApply) = makeStatusLayout(CGSize(width: params.width - leftInset - 8.0 - rightInset - 30.0, height: CGFloat.greatestFiniteMagnitude), item.text, false) let (expandedStatusLayout, expandedStatusApply) = makeExpandedStatusLayout(CGSize(width: params.width - leftInset - 8.0 - rightInset - expandedRightInset, height: CGFloat.greatestFiniteMagnitude), item.expandedText ?? item.text, false) let titleSpacing: CGFloat = statusLayout.height == 0.0 ? 0.0 : 1.0 @@ -1104,84 +826,27 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { let minHeight: CGFloat = titleLayout.size.height + verticalInset * 2.0 let rawHeight: CGFloat = verticalInset * 2.0 + titleLayout.size.height + titleSpacing + statusLayout.height - let contentSize: CGSize - let insets: UIEdgeInsets - switch item.style { - case .list: - contentSize = CGSize(width: params.width, height: max(minHeight, rawHeight)) - insets = UIEdgeInsets(top: 0.0, left: 0.0, bottom: item.transparent ? 6.0 : 0.0, right: 0.0) - case .tile: - contentSize = tileSize - insets = UIEdgeInsets(top: 0.0, left: 0.0, bottom: !last ? 6.0 : 0.0, right: 0.0) - } - + let contentSize = CGSize(width: params.width, height: max(minHeight, rawHeight)) + let insets = UIEdgeInsets() let separatorHeight = UIScreenPixel let layout = ListViewItemNodeLayout(contentSize: contentSize, insets: insets) let layoutSize = layout.size - if !item.enabled { - if currentDisabledOverlayNode == nil { - currentDisabledOverlayNode = ASDisplayNode() - currentDisabledOverlayNode?.backgroundColor = item.presentationData.theme.list.itemBlocksBackgroundColor.withAlphaComponent(0.5) - } - } else { - currentDisabledOverlayNode = nil - } - var animateStatusTransitionFromUp: Bool? if let currentItem = currentItem { - if case .presence = currentItem.text, case let .text(_, _, newColor) = item.text { + if case let .text(_, _, currentColor) = currentItem.text, case let .text(_, _, newColor) = item.text, currentColor != newColor { animateStatusTransitionFromUp = newColor == .constructive - } else if case let .text(_, _, currentColor) = currentItem.text, case let .text(_, _, newColor) = item.text, currentColor != newColor { - animateStatusTransitionFromUp = newColor == .constructive - } else if case .text = currentItem.text, case .presence = item.text { - animateStatusTransitionFromUp = false } } - - let peerRevealOptions: [ItemListRevealOption] - var mappedOptions: [ItemListRevealOption] = [] - var index: Int32 = 0 - for option in item.revealOptions { - let color: UIColor - let textColor: UIColor - switch option.type { - case .neutral: - color = item.presentationData.theme.list.itemDisclosureActions.constructive.fillColor - textColor = item.presentationData.theme.list.itemDisclosureActions.constructive.foregroundColor - case .warning: - color = item.presentationData.theme.list.itemDisclosureActions.warning.fillColor - textColor = item.presentationData.theme.list.itemDisclosureActions.warning.foregroundColor - case .destructive: - color = item.presentationData.theme.list.itemDisclosureActions.destructive.fillColor - textColor = item.presentationData.theme.list.itemDisclosureActions.destructive.foregroundColor - case .accent: - color = item.presentationData.theme.list.itemDisclosureActions.accent.fillColor - textColor = item.presentationData.theme.list.itemDisclosureActions.accent.foregroundColor - } - mappedOptions.append(ItemListRevealOption(key: index, title: option.title, icon: .none, color: color, textColor: textColor)) - index += 1 - } - peerRevealOptions = mappedOptions - + return (layout, { [weak self] synchronousLoad, animated in if let strongSelf = self { let hadItem = strongSelf.layoutParams?.0 != nil strongSelf.layoutParams = (item, params, first, last) strongSelf.currentTitle = titleAttributedString?.string strongSelf.wavesColor = wavesColor - - let videoSize = tileSize - let videoNode = !item.transparent ? item.getVideo() : nil - if let current = strongSelf.videoNode, current !== videoNode { - current.removeFromSupernode() - strongSelf.videoReadyDisposable.set(nil) - } - - let videoNodeUpdated = strongSelf.videoNode !== videoNode - strongSelf.videoNode = videoNode - + let nonExtractedRect: CGRect let avatarFrame: CGRect let titleFrame: CGRect @@ -1189,33 +854,17 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { let animationFrame: CGRect let animationScale: CGFloat - switch item.style { - case .list: - nonExtractedRect = CGRect(origin: CGPoint(x: 16.0, y: 0.0), size: CGSize(width: layout.contentSize.width - 32.0, height: layout.contentSize.height)) - avatarFrame = CGRect(origin: CGPoint(x: params.leftInset + 8.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize)) - animationSize = CGSize(width: 36.0, height: 36.0) - animationScale = 1.0 - animationFrame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height) - titleFrame = CGRect(origin: CGPoint(x: leftInset, y: verticalInset + verticalOffset), size: titleLayout.size) - case let .tile(isLandscape): - nonExtractedRect = CGRect(origin: CGPoint(), size: layout.contentSize) - strongSelf.containerNode.transform = CATransform3DMakeRotation(isLandscape ? 0.0 : CGFloat.pi / 2.0, 0.0, 0.0, 1.0) - strongSelf.statusNode.isHidden = true - strongSelf.expandedStatusNode.isHidden = true - avatarFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - avatarSize) / 2.0), y: 13.0), size: CGSize(width: avatarSize, height: avatarSize)) - - let textWidth: CGFloat = 24.0 + titleLayout.size.width - let textOrigin: CGFloat = floor((layout.size.width - textWidth) / 2.0) - 4.0 - animationSize = CGSize(width: 36.0, height: 36.0) - animationScale = 0.66667 - animationFrame = CGRect(x: textOrigin, y: 53.0, width: 24.0, height: 24.0) - titleFrame = CGRect(origin: CGPoint(x: textOrigin + 24.0, y: 61.0), size: titleLayout.size) - } - + nonExtractedRect = CGRect(origin: CGPoint(x: 16.0, y: 0.0), size: CGSize(width: layout.contentSize.width - 32.0, height: layout.contentSize.height)) + avatarFrame = CGRect(origin: CGPoint(x: params.leftInset + 8.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize)) + animationSize = CGSize(width: 36.0, height: 36.0) + animationScale = 1.0 + animationFrame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height) + titleFrame = CGRect(origin: CGPoint(x: leftInset, y: verticalInset + verticalOffset), size: titleLayout.size) + var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: 16.0 + params.leftInset, dy: 0.0) var extractedHeight = extractedRect.height + expandedStatusLayout.height - statusLayout.height var extractedVerticalOffset: CGFloat = 0.0 - if item.peer.smallProfileImage != nil || strongSelf.videoNode != nil { + if item.peer.smallProfileImage != nil { extractedVerticalOffset = extractedRect.width extractedHeight += extractedVerticalOffset } @@ -1235,10 +884,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { } else { strongSelf.backgroundImageNode.frame = nonExtractedRect } - if case .tile = item.style, strongSelf.backgroundImageNode.image == nil { - strongSelf.backgroundImageNode.image = generateStretchableFilledCircleImage(diameter: backgroundCornerRadius * 2.0, color: UIColor(rgb: 0x1c1c1e)) - strongSelf.backgroundImageNode.alpha = 1.0 - } strongSelf.extractedBackgroundImageNode.frame = strongSelf.backgroundImageNode.bounds strongSelf.contextSourceNode.contentRect = extractedRect @@ -1249,7 +894,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { strongSelf.offsetContainerNode.frame = contentBounds strongSelf.contextSourceNode.contentNode.frame = contentBounds strongSelf.actionContainerNode.frame = contentBounds - strongSelf.borderImageNode.frame = contentBounds strongSelf.containerNode.isGestureEnabled = item.contextAction != nil @@ -1274,31 +918,9 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { transition = .immediate } - if let currentDisabledOverlayNode = currentDisabledOverlayNode { - if currentDisabledOverlayNode != strongSelf.disabledOverlayNode { - strongSelf.disabledOverlayNode = currentDisabledOverlayNode - strongSelf.addSubnode(currentDisabledOverlayNode) - currentDisabledOverlayNode.alpha = 0.0 - transition.updateAlpha(node: currentDisabledOverlayNode, alpha: 1.0) - currentDisabledOverlayNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: layout.contentSize.width, height: layout.contentSize.height - separatorHeight)) - } else { - transition.updateFrame(node: currentDisabledOverlayNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: layout.contentSize.width, height: layout.contentSize.height - separatorHeight))) - } - } else if let disabledOverlayNode = strongSelf.disabledOverlayNode { - transition.updateAlpha(node: disabledOverlayNode, alpha: 0.0, completion: { [weak disabledOverlayNode] _ in - disabledOverlayNode?.removeFromSupernode() - }) - strongSelf.disabledOverlayNode = nil - } - if updatedTitle, let snapshotView = strongSelf.titleNode.view.snapshotContentTree() { strongSelf.titleNode.view.superview?.insertSubview(snapshotView, aboveSubview: strongSelf.titleNode.view) - if item.transparent { - snapshotView.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -20.0), duration: 0.2, removeOnCompletion: false, additive: true) - strongSelf.titleNode.layer.animatePosition(from: CGPoint(x: 0.0, y: 20.0), to: CGPoint(), duration: 0.2, additive: true) - } - snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak snapshotView] _ in snapshotView?.removeFromSuperview() }) @@ -1332,8 +954,8 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { strongSelf.insertSubnode(strongSelf.bottomStripeNode, at: 1) } - strongSelf.topStripeNode.isHidden = first || item.style != .list || item.transparent - strongSelf.bottomStripeNode.isHidden = last || item.style != .list || item.transparent + strongSelf.topStripeNode.isHidden = first + strongSelf.bottomStripeNode.isHidden = last transition.updateFrame(node: strongSelf.topStripeNode, frame: CGRect(origin: CGPoint(x: leftInset, y: -min(insets.top, separatorHeight)), size: CGSize(width: layoutSize.width, height: separatorHeight))) transition.updateFrame(node: strongSelf.bottomStripeNode, frame: CGRect(origin: CGPoint(x: leftInset, y: contentSize.height + -separatorHeight), size: CGSize(width: layoutSize.width - leftInset, height: separatorHeight))) @@ -1394,7 +1016,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { audioLevelView.layer.mask = playbackMaskLayer audioLevelView.setColor(wavesColor) - audioLevelView.alpha = strongSelf.isExtracted || (strongSelf.item?.transparent == true) ? 0.0 : 1.0 + audioLevelView.alpha = 1.0 strongSelf.audioLevelView = audioLevelView strongSelf.offsetContainerNode.view.insertSubview(audioLevelView, at: 0) @@ -1452,13 +1074,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { nodeToAnimateIn = animationNode } - var color = color - if item.transparent { - color = UIColor(rgb: 0xffffff) - } else if color.rgb == 0x979797 && item.style != .list { - color = UIColor(rgb: 0xffffff) - } - animationNode.alpha = item.transparent && item.style == .list ? 0.65 : 1.0 + animationNode.alpha = 1.0 animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: false, color: color), animated: true) strongSelf.actionButtonNode.isUserInteractionEnabled = false } else if let animationNode = strongSelf.animationNode { @@ -1535,179 +1151,16 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { node.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2) } - if !strongSelf.isExtracted && !strongSelf.animatingExtraction { - strongSelf.videoFadeNode.frame = CGRect(x: 0.0, y: tileSize.height - 30.0, width: tileSize.width, height: 30.0) - strongSelf.videoContainerNode.bounds = CGRect(origin: CGPoint(), size: tileSize) - switch item.style { - case .list: - strongSelf.videoFadeNode.alpha = 0.0 - strongSelf.videoContainerNode.position = strongSelf.avatarNode.position - strongSelf.videoContainerNode.cornerRadius = tileSize.width / 2.0 - strongSelf.videoContainerNode.transform = CATransform3DMakeScale(avatarSize / tileSize.width, avatarSize / tileSize.width, 1.0) - case .tile: - strongSelf.videoFadeNode.alpha = 1.0 - strongSelf.videoContainerNode.position = CGPoint(x: tileSize.width / 2.0, y: tileSize.height / 2.0) - strongSelf.videoContainerNode.cornerRadius = backgroundCornerRadius - strongSelf.videoContainerNode.transform = CATransform3DMakeScale(1.0, 1.0, 1.0) - } - } - - strongSelf.borderImageNode.isHidden = !item.pinned || item.style == .list - - let canUpdateAvatarVisibility = !strongSelf.isExtracted && !strongSelf.animatingExtraction - - if let videoNode = videoNode { - let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) - if !strongSelf.isExtracted && !strongSelf.animatingExtraction { - if currentItem != nil { - if case .tile = item.style { - if item.pinned { - if strongSelf.avatarNode.alpha.isZero { - strongSelf.videoContainerNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2) - strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) - } - transition.updateAlpha(node: videoNode, alpha: 0.0) - transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0) - transition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0) - } else { - if !strongSelf.avatarNode.alpha.isZero { - strongSelf.videoContainerNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2) - strongSelf.avatarNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2) - } - transition.updateAlpha(node: videoNode, alpha: 1.0) - transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 1.0) - transition.updateAlpha(node: strongSelf.avatarNode, alpha: 0.0) - } - } else { - if item.pinned { - videoNode.alpha = 0.0 - if canUpdateAvatarVisibility { - strongSelf.avatarNode.alpha = 1.0 - } - } else if strongSelf.videoReady { - videoNode.alpha = 1.0 - strongSelf.avatarNode.alpha = 0.0 - } - } - } else { - if item.pinned { - videoNode.alpha = 0.0 - if canUpdateAvatarVisibility { - strongSelf.avatarNode.alpha = 1.0 - } - } else if strongSelf.videoReady { - videoNode.alpha = 1.0 - strongSelf.avatarNode.alpha = 0.0 - } - } - } - - videoNode.updateLayout(size: videoSize, isLandscape: false, transition: .immediate) - if !strongSelf.isExtracted && !strongSelf.animatingExtraction { - if videoNode.supernode !== strongSelf.videoContainerNode { - videoNode.clipsToBounds = true - strongSelf.videoContainerNode.addSubnode(videoNode) - } - - videoNode.position = CGPoint(x: videoSize.width / 2.0, y: videoSize.height / 2.0) - videoNode.bounds = CGRect(origin: CGPoint(), size: videoSize) - } - - if videoNodeUpdated { - strongSelf.videoReadyDelayed = false - strongSelf.videoReadyDisposable.set((videoNode.ready - |> deliverOnMainQueue).start(next: { [weak self] ready in - if let strongSelf = self { - if !ready { - strongSelf.videoReadyDelayed = true - } - strongSelf.videoReady = ready - if let videoNode = strongSelf.videoNode, ready && !item.transparent { - if strongSelf.videoReadyDelayed { - Queue.mainQueue().after(0.15) { - guard let currentItem = strongSelf.item else { - return - } - switch currentItem.style { - case .list: - if currentItem.pinned { - if canUpdateAvatarVisibility { - strongSelf.avatarNode.alpha = 1.0 - } - videoNode.alpha = 0.0 - } else { - strongSelf.avatarNode.alpha = 0.0 - strongSelf.avatarNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2) - videoNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) - videoNode.alpha = 1.0 - } - case .tile: - if currentItem.pinned { - if canUpdateAvatarVisibility { - strongSelf.avatarNode.alpha = 1.0 - } - videoNode.alpha = 0.0 - } else { - strongSelf.avatarNode.alpha = 0.0 - strongSelf.avatarNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2) - videoNode.layer.animateScale(from: 0.01, to: 1.0, duration: 0.2) - videoNode.alpha = 1.0 - } - } - } - } else { - if item.pinned { - if canUpdateAvatarVisibility { - strongSelf.avatarNode.alpha = 1.0 - } - videoNode.alpha = 0.0 - } else { - strongSelf.avatarNode.alpha = 0.0 - videoNode.alpha = 1.0 - } - } - } - } - })) - } - } else if canUpdateAvatarVisibility { - strongSelf.avatarNode.alpha = 1.0 - } - - switch item.style { - case .list: - strongSelf.audioLevelView?.alpha = item.transparent ? 0.0 : 1.0 - strongSelf.avatarNode.isHidden = item.transparent || strongSelf.isExtracted - strongSelf.videoContainerNode.isHidden = item.transparent - strongSelf.pinIconNode.isHidden = !item.transparent - if item.transparent && currentItem?.pinned != item.pinned { - strongSelf.pinIconNode.image = generateTintedImage(image: UIImage(bundleImageName: item.pinned ? "Chat/Context Menu/Pin" : "Chat/Context Menu/Unpin"), color: UIColor(rgb: 0xffffff)) - } - case .tile: - strongSelf.pinIconNode.isHidden = true - strongSelf.videoContainerNode.isHidden = item.transparent - } - - if let image = strongSelf.pinIconNode.image { - strongSelf.pinIconNode.frame = CGRect(origin: CGPoint(x: 16.0, y: 17.0), size: image.size) - } + strongSelf.avatarNode.isHidden = strongSelf.isExtracted strongSelf.iconNode?.frame = CGRect(origin: CGPoint(), size: animationSize) strongSelf.animationNode?.frame = CGRect(origin: CGPoint(), size: animationSize) strongSelf.raiseHandNode?.frame = CGRect(origin: CGPoint(), size: animationSize).insetBy(dx: -6.0, dy: -6.0).offsetBy(dx: -2.0, dy: 0.0) strongSelf.actionButtonNode.transform = CATransform3DMakeScale(animationScale, animationScale, 1.0) -// strongSelf.actionButtonNode.frame = animationFrame transition.updateFrame(node: strongSelf.actionButtonNode, frame: animationFrame) - - if let presence = item.presence as? TelegramUserPresence { - strongSelf.peerPresenceManager?.reset(presence: presence) - } - + strongSelf.updateIsHighlighted(transition: transition) - - strongSelf.setRevealOptions((left: [], right: peerRevealOptions)) - strongSelf.setRevealOptionsOpened(item.revealed ?? false, animated: animated) } }) } @@ -1715,51 +1168,36 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { var isHighlighted = false func updateIsHighlighted(transition: ContainedViewLayoutTransition) { - guard let item = self.item else { - return - } - switch item.style { - case .list: - if self.isHighlighted { - self.highlightedBackgroundNode.alpha = 1.0 - if self.highlightedBackgroundNode.supernode == nil { - var anchorNode: ASDisplayNode? - if self.bottomStripeNode.supernode != nil { - anchorNode = self.bottomStripeNode - } else if self.topStripeNode.supernode != nil { - anchorNode = self.topStripeNode - } - if let anchorNode = anchorNode { - self.insertSubnode(self.highlightedBackgroundNode, aboveSubnode: anchorNode) - } else { - self.addSubnode(self.highlightedBackgroundNode) - } - } - } else { - if self.highlightedBackgroundNode.supernode != nil { - if transition.isAnimated { - self.highlightedBackgroundNode.layer.animateAlpha(from: self.highlightedBackgroundNode.alpha, to: 0.0, duration: 0.4, completion: { [weak self] completed in - if let strongSelf = self { - if completed { - strongSelf.highlightedBackgroundNode.removeFromSupernode() - } - } - }) - self.highlightedBackgroundNode.alpha = 0.0 - } else { - self.highlightedBackgroundNode.removeFromSupernode() - } - } + if self.isHighlighted { + self.highlightedBackgroundNode.alpha = 1.0 + if self.highlightedBackgroundNode.supernode == nil { + var anchorNode: ASDisplayNode? + if self.bottomStripeNode.supernode != nil { + anchorNode = self.bottomStripeNode + } else if self.topStripeNode.supernode != nil { + anchorNode = self.topStripeNode } - case .tile: - break -// if self.isHighlighted { -// let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .spring) -// transition.updateSublayerTransformScale(node: self, scale: 0.9) -// } else { -// let transition: ContainedViewLayoutTransition = .animated(duration: 0.5, curve: .spring) -// transition.updateSublayerTransformScale(node: self, scale: 1.0) -// } + if let anchorNode = anchorNode { + self.insertSubnode(self.highlightedBackgroundNode, aboveSubnode: anchorNode) + } else { + self.addSubnode(self.highlightedBackgroundNode) + } + } + } else { + if self.highlightedBackgroundNode.supernode != nil { + if transition.isAnimated { + self.highlightedBackgroundNode.layer.animateAlpha(from: self.highlightedBackgroundNode.alpha, to: 0.0, duration: 0.4, completion: { [weak self] completed in + if let strongSelf = self { + if completed { + strongSelf.highlightedBackgroundNode.removeFromSupernode() + } + } + }) + self.highlightedBackgroundNode.alpha = 0.0 + } else { + self.highlightedBackgroundNode.removeFromSupernode() + } + } } } @@ -1794,47 +1232,4 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { contextAction(self.contextSourceNode, nil) } } - - override func updateRevealOffset(offset: CGFloat, transition: ContainedViewLayoutTransition) { - super.updateRevealOffset(offset: offset, transition: transition) - - if let _ = self.layoutParams?.0, let params = self.layoutParams?.1 { - let leftInset: CGFloat = 65.0 + params.leftInset - - var avatarFrame = self.avatarNode.frame - avatarFrame.origin.x = offset + leftInset - 50.0 - transition.updateFrame(node: self.avatarNode, frame: avatarFrame) - - var titleFrame = self.titleNode.frame - titleFrame.origin.x = leftInset + offset - transition.updateFrame(node: self.titleNode, frame: titleFrame) - - var statusFrame = self.statusNode.frame - let previousStatusFrame = statusFrame - statusFrame.origin.x = leftInset + offset - self.statusNode.frame = statusFrame - transition.animatePositionAdditive(node: self.statusNode, offset: CGPoint(x: previousStatusFrame.minX - statusFrame.minX, y: 0)) - } - } - - override func revealOptionsInteractivelyOpened() { - if let item = self.layoutParams?.0 { - item.setPeerIdWithRevealedOptions(item.peer.id, nil) - } - } - - override func revealOptionsInteractivelyClosed() { - if let item = self.layoutParams?.0 { - item.setPeerIdWithRevealedOptions(nil, item.peer.id) - } - } - - override func revealOptionSelected(_ option: ItemListRevealOption, animated: Bool) { - if let item = self.layoutParams?.0 { - item.revealOptions[Int(option.key)].action() - } - - self.setRevealOptionsOpened(false, animated: true) - self.revealOptionsInteractivelyClosed() - } } diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatPeerProfileNode.swift b/submodules/TelegramCallsUI/Sources/VoiceChatPeerProfileNode.swift new file mode 100644 index 0000000000..c07919819a --- /dev/null +++ b/submodules/TelegramCallsUI/Sources/VoiceChatPeerProfileNode.swift @@ -0,0 +1,8 @@ +// +// VoiceChatPeerProfileNode.swift +// _idx_TelegramCallsUI_5BDA0798_ios_min9.0 +// +// Created by Ilya Laktyushin on 11.05.2021. +// + +import Foundation diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatTileGridNode.swift b/submodules/TelegramCallsUI/Sources/VoiceChatTileGridNode.swift new file mode 100644 index 0000000000..dc51ff4a7b --- /dev/null +++ b/submodules/TelegramCallsUI/Sources/VoiceChatTileGridNode.swift @@ -0,0 +1,217 @@ +import Foundation +import UIKit +import AsyncDisplayKit +import Display +import SwiftSignalKit +import AccountContext + +private let tileSpacing: CGFloat = 4.0 +private let tileHeight: CGFloat = 180.0 + +final class VoiceChatTileGridNode: ASDisplayNode { + private let context: AccountContext + + private var items: [VoiceChatTileItem] = [] + fileprivate var itemNodes: [String: VoiceChatTileItemNode] = [:] + private var isFirstTime = true + + init(context: AccountContext) { + self.context = context + + super.init() + + self.clipsToBounds = true + } + + func update(size: CGSize, items: [VoiceChatTileItem], transition: ContainedViewLayoutTransition) -> CGSize { + self.items = items + + var validIds: [String] = [] + + let halfWidth = floorToScreenPixels((size.width - tileSpacing) / 2.0) + let lastItemIsWide = items.count % 2 != 0 + + for i in 0 ..< self.items.count { + let item = self.items[i] + let isLast = i == self.items.count - 1 + + let itemSize = CGSize( + width: isLast && lastItemIsWide ? size.width : halfWidth, + height: tileHeight + ) + let col = CGFloat(i % 2) + let row = floor(CGFloat(i) / 2.0) + let itemFrame = CGRect(origin: CGPoint(x: col * (halfWidth + tileSpacing), y: row * (tileHeight + tileSpacing)), size: itemSize) + + validIds.append(item.id) + var itemNode: VoiceChatTileItemNode? + var wasAdded = false + if let current = self.itemNodes[item.id] { + itemNode = current + current.update(size: itemSize, item: item, transition: transition) + } else { + wasAdded = true + let addedItemNode = VoiceChatTileItemNode(context: self.context) + itemNode = addedItemNode + addedItemNode.update(size: itemSize, item: item, transition: .immediate) + self.itemNodes[self.items[i].id] = addedItemNode + self.addSubnode(addedItemNode) + } + if let itemNode = itemNode { + if wasAdded { + itemNode.frame = itemFrame + if self.isFirstTime { + self.isFirstTime = false + } else { + itemNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) + itemNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + } + } else { + transition.updateFrame(node: itemNode, frame: itemFrame) + } + } + } + + var removeIds: [String] = [] + for (id, _) in self.itemNodes { + if !validIds.contains(id) { + removeIds.append(id) + } + } + for id in removeIds { + if let itemNode = self.itemNodes.removeValue(forKey: id) { + itemNode.removeFromSupernode() + } + } + + let rowCount = ceil(CGFloat(self.items.count) / 2.0) + return CGSize(width: size.width, height: rowCount * (tileHeight + tileSpacing)) + } +} + +final class VoiceChatTilesGridItem: ListViewItem { + let context: AccountContext + let tiles: [VoiceChatTileItem] + let getIsExpanded: () -> Bool + + init(context: AccountContext, tiles: [VoiceChatTileItem], getIsExpanded: @escaping () -> Bool) { + self.context = context + self.tiles = tiles + self.getIsExpanded = getIsExpanded + } + + func nodeConfiguredForParams(async: @escaping (@escaping () -> Void) -> Void, params: ListViewItemLayoutParams, synchronousLoads: Bool, previousItem: ListViewItem?, nextItem: ListViewItem?, completion: @escaping (ListViewItemNode, @escaping () -> (Signal?, (ListViewItemApply) -> Void)) -> Void) { + async { + let node = VoiceChatTilesGridItemNode() + let (layout, apply) = node.asyncLayout()(self, params) + + node.contentSize = layout.contentSize + node.insets = layout.insets + + Queue.mainQueue().async { + completion(node, { + return (nil, { _ in apply() }) + }) + } + } + } + + func updateNode(async: @escaping (@escaping () -> Void) -> Void, node: @escaping () -> ListViewItemNode, params: ListViewItemLayoutParams, previousItem: ListViewItem?, nextItem: ListViewItem?, animation: ListViewItemUpdateAnimation, completion: @escaping (ListViewItemNodeLayout, @escaping (ListViewItemApply) -> Void) -> Void) { + Queue.mainQueue().async { + if let nodeValue = node() as? VoiceChatTilesGridItemNode { + let makeLayout = nodeValue.asyncLayout() + + async { + let (layout, apply) = makeLayout(self, params) + Queue.mainQueue().async { + completion(layout, { _ in + apply() + }) + } + } + } + } + } +} + +final class VoiceChatTilesGridItemNode: ListViewItemNode { + private var item: VoiceChatTilesGridItem? + + private var tileGridNode: VoiceChatTileGridNode? + let backgroundNode: ASDisplayNode + let cornersNode: ASImageNode + + var tileNodes: [VoiceChatTileItemNode] { + if let values = self.tileGridNode?.itemNodes.values { + return Array(values) + } else { + return [] + } + } + + init() { + self.backgroundNode = ASDisplayNode() + + self.cornersNode = ASImageNode() + self.cornersNode.displaysAsynchronously = false + + super.init(layerBacked: false, dynamicBounce: false) + + self.clipsToBounds = true + + self.addSubnode(self.backgroundNode) + } + + override func animateFrameTransition(_ progress: CGFloat, _ currentValue: CGFloat) { + super.animateFrameTransition(progress, currentValue) + + if let tileGridNode = self.tileGridNode { + var gridFrame = tileGridNode.frame + gridFrame.size.height = currentValue + tileGridNode.frame = gridFrame + } + + var backgroundFrame = self.backgroundNode.frame + backgroundFrame.size.height = currentValue + self.backgroundNode.frame = backgroundFrame + } + + func asyncLayout() -> (_ item: VoiceChatTilesGridItem, _ params: ListViewItemLayoutParams) -> (ListViewItemNodeLayout, () -> Void) { + let currentItem = self.item + return { item, params in + let rowCount = ceil(CGFloat(item.tiles.count) / 2.0) + let contentSize = CGSize(width: params.width, height: rowCount * (tileHeight + tileSpacing)) + let layout = ListViewItemNodeLayout(contentSize: contentSize, insets: UIEdgeInsets()) + return (layout, { [weak self] in + if let strongSelf = self { + strongSelf.item = item + + let tileGridNode: VoiceChatTileGridNode + if let current = strongSelf.tileGridNode { + tileGridNode = current + } else { + strongSelf.backgroundNode.backgroundColor = item.getIsExpanded() ? fullscreenBackgroundColor : panelBackgroundColor + + tileGridNode = VoiceChatTileGridNode(context: item.context) + strongSelf.addSubnode(tileGridNode) + strongSelf.tileGridNode = tileGridNode + } + + let transition: ContainedViewLayoutTransition = currentItem == nil ? .immediate : .animated(duration: 0.3, curve: .spring) + let tileGridSize = tileGridNode.update(size: CGSize(width: params.width - params.leftInset - params.rightInset, height: CGFloat.greatestFiniteMagnitude), items: item.tiles, transition: transition) + if currentItem == nil { + let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .easeInOut) + tileGridNode.frame = CGRect(x: params.leftInset, y: 0.0, width: tileGridSize.width, height: 0.0) +// transition.updateFrame(node: tileGridNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize)) + + strongSelf.backgroundNode.frame = tileGridNode.frame +// transition.updateFrame(node: strongSelf.backgroundNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize)) + } else { + transition.updateFrame(node: tileGridNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize)) + transition.updateFrame(node: strongSelf.backgroundNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize)) + } + } + }) + } + } +} diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift b/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift new file mode 100644 index 0000000000..9f915e4b43 --- /dev/null +++ b/submodules/TelegramCallsUI/Sources/VoiceChatTileItemNode.swift @@ -0,0 +1,348 @@ +import Foundation +import UIKit +import AsyncDisplayKit +import Display +import SwiftSignalKit +import Postbox +import SyncCore +import TelegramCore +import AccountContext +import TelegramUIPreferences +import TelegramPresentationData + +private let backgroundCornerRadius: CGFloat = 11.0 +private let constructiveColor: UIColor = UIColor(rgb: 0x34c759) +private let borderLineWidth: CGFloat = 2.0 +private let borderImage = generateImage(CGSize(width: 24.0, height: 24.0), rotatedContext: { size, context in + let bounds = CGRect(origin: CGPoint(), size: size) + context.clear(bounds) + + context.setLineWidth(borderLineWidth) + context.setStrokeColor(constructiveColor.cgColor) + + context.addPath(UIBezierPath(roundedRect: bounds.insetBy(dx: (borderLineWidth - UIScreenPixel) / 2.0, dy: (borderLineWidth - UIScreenPixel) / 2.0), cornerRadius: backgroundCornerRadius - UIScreenPixel).cgPath) + context.strokePath() +}) + +private let fadeHeight: CGFloat = 50.0 + +final class VoiceChatTileItem: Equatable { + enum Icon: Equatable { + case none + case microphone(Bool) + case presentation + } + + let peer: Peer + let videoEndpointId: String + let icon: Icon + let strings: PresentationStrings + let nameDisplayOrder: PresentationPersonNameOrder + let speaking: Bool + let action: () -> Void + let getVideo: () -> GroupVideoNode? + let getAudioLevel: (() -> Signal)? + + var id: String { + return self.videoEndpointId + } + + init(peer: Peer, videoEndpointId: String, strings: PresentationStrings, nameDisplayOrder: PresentationPersonNameOrder, speaking: Bool, icon: Icon, action: @escaping () -> Void, getVideo: @escaping () -> GroupVideoNode?, getAudioLevel: (() -> Signal)?) { + self.peer = peer + self.videoEndpointId = videoEndpointId + self.strings = strings + self.nameDisplayOrder = nameDisplayOrder + self.icon = icon + self.speaking = speaking + self.action = action + self.getVideo = getVideo + self.getAudioLevel = getAudioLevel + } + + static func == (lhs: VoiceChatTileItem, rhs: VoiceChatTileItem) -> Bool { + if !arePeersEqual(lhs.peer, rhs.peer) { + return false + } + if lhs.videoEndpointId != rhs.videoEndpointId { + return false + } + if lhs.speaking != rhs.speaking { + return false + } + if lhs.icon != rhs.icon { + return false + } + return true + } +} + +private var fadeImage: UIImage? = { + return generateImage(CGSize(width: 1.0, height: fadeHeight), rotatedContext: { size, context in + let bounds = CGRect(origin: CGPoint(), size: size) + context.clear(bounds) + + let colorsArray = [UIColor(rgb: 0x000000, alpha: 0.0).cgColor, UIColor(rgb: 0x000000, alpha: 0.7).cgColor] as CFArray + var locations: [CGFloat] = [0.0, 1.0] + let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)! + context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) + }) +}() + +final class VoiceChatTileItemNode: ASDisplayNode { + private let context: AccountContext + + let contextSourceNode: ContextExtractedContentContainingNode + private let containerNode: ContextControllerSourceNode + private let backgroundNode: ASDisplayNode + var videoNode: GroupVideoNode? + private let fadeNode: ASImageNode + private let titleNode: ImmediateTextNode + private let iconNode: ASImageNode + private var animationNode: VoiceChatMicrophoneNode? + private var highlightNode: ASImageNode + + private var validLayout: CGSize? + var item: VoiceChatTileItem? + + private let audioLevelDisposable = MetaDisposable() + + init(context: AccountContext) { + self.context = context + + self.contextSourceNode = ContextExtractedContentContainingNode() + self.containerNode = ContextControllerSourceNode() + + self.backgroundNode = ASDisplayNode() + self.backgroundNode.backgroundColor = panelBackgroundColor + + self.fadeNode = ASImageNode() + self.fadeNode.displaysAsynchronously = false + self.fadeNode.displayWithoutProcessing = true + self.fadeNode.contentMode = .scaleToFill + self.fadeNode.image = fadeImage + + self.titleNode = ImmediateTextNode() + + self.iconNode = ASImageNode() + self.iconNode.displaysAsynchronously = false + self.iconNode.displayWithoutProcessing = true + + self.highlightNode = ASImageNode() + self.highlightNode.contentMode = .scaleToFill + self.highlightNode.image = borderImage?.stretchableImage(withLeftCapWidth: 12, topCapHeight: 12) + self.highlightNode.alpha = 0.0 + + super.init() + + self.clipsToBounds = true + + self.contextSourceNode.contentNode.clipsToBounds = true + self.contextSourceNode.contentNode.cornerRadius = 11.0 + + self.containerNode.addSubnode(self.contextSourceNode) + self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode + self.addSubnode(self.containerNode) + + self.contextSourceNode.contentNode.addSubnode(self.backgroundNode) + self.contextSourceNode.contentNode.addSubnode(self.fadeNode) + self.contextSourceNode.contentNode.addSubnode(self.titleNode) + self.contextSourceNode.contentNode.addSubnode(self.iconNode) + self.contextSourceNode.contentNode.addSubnode(self.highlightNode) + } + + deinit { + self.audioLevelDisposable.dispose() + } + + override func didLoad() { + super.didLoad() + + self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tap))) + } + + @objc private func tap() { + self.item?.action() + } + + func update(size: CGSize, item: VoiceChatTileItem, transition: ContainedViewLayoutTransition) { + guard self.validLayout != size || self.item != item else { + return + } + + var itemTransition = transition + if self.item != item { + let previousItem = self.item + self.item = item + + if false, let getAudioLevel = item.getAudioLevel { + self.audioLevelDisposable.set((getAudioLevel() + |> deliverOnMainQueue).start(next: { [weak self] value in + guard let strongSelf = self else { + return + } + + let transition: ContainedViewLayoutTransition = .animated(duration: 0.25, curve: .easeInOut) + if value > 0.4 { + transition.updateAlpha(node: strongSelf.highlightNode, alpha: 1.0) + } else { + transition.updateAlpha(node: strongSelf.highlightNode, alpha: 0.0) + } + })) + } + + let transition: ContainedViewLayoutTransition = .animated(duration: 0.25, curve: .easeInOut) + if item.speaking { + transition.updateAlpha(node: self.highlightNode, alpha: 1.0) + } else { + transition.updateAlpha(node: self.highlightNode, alpha: 0.0) + } + + if previousItem?.videoEndpointId != item.videoEndpointId || self.videoNode == nil { + if let current = self.videoNode { + self.videoNode = nil + current.removeFromSupernode() + } + + if let videoNode = item.getVideo() { + itemTransition = .immediate + self.videoNode = videoNode + self.contextSourceNode.contentNode.insertSubnode(videoNode, at: 1) + } + } + + let titleFont = Font.semibold(13.0) + let titleColor = UIColor.white + var titleAttributedString: NSAttributedString? + if let user = item.peer as? TelegramUser { + if let firstName = user.firstName, let lastName = user.lastName, !firstName.isEmpty, !lastName.isEmpty { + let string = NSMutableAttributedString() + switch item.nameDisplayOrder { + case .firstLast: + string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: lastName, font: titleFont, textColor: titleColor)) + case .lastFirst: + string.append(NSAttributedString(string: lastName, font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)) + } + titleAttributedString = string + } else if let firstName = user.firstName, !firstName.isEmpty { + titleAttributedString = NSAttributedString(string: firstName, font: titleFont, textColor: titleColor) + } else if let lastName = user.lastName, !lastName.isEmpty { + titleAttributedString = NSAttributedString(string: lastName, font: titleFont, textColor: titleColor) + } else { + titleAttributedString = NSAttributedString(string: item.strings.User_DeletedAccount, font: titleFont, textColor: titleColor) + } + } else if let group = item.peer as? TelegramGroup { + titleAttributedString = NSAttributedString(string: group.title, font: titleFont, textColor: titleColor) + } else if let channel = item.peer as? TelegramChannel { + titleAttributedString = NSAttributedString(string: channel.title, font: titleFont, textColor: titleColor) + } + self.titleNode.attributedText = titleAttributedString + + if case let .microphone(muted) = item.icon { + let animationNode: VoiceChatMicrophoneNode + if let current = self.animationNode { + animationNode = current + } else { + animationNode = VoiceChatMicrophoneNode() + self.animationNode = animationNode + self.contextSourceNode.contentNode.addSubnode(animationNode) + } + animationNode.alpha = 1.0 + animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: UIColor.white), animated: true) + } else if let animationNode = self.animationNode { + self.animationNode = nil + animationNode.removeFromSupernode() + } + } + + let bounds = CGRect(origin: CGPoint(), size: size) + self.containerNode.frame = bounds + self.contextSourceNode.frame = bounds + self.contextSourceNode.contentNode.frame = bounds + + if let videoNode = self.videoNode { + transition.updateFrame(node: videoNode, frame: bounds) + videoNode.updateLayout(size: size, isLandscape: true, transition: itemTransition) + } + + transition.updateFrame(node: self.backgroundNode, frame: bounds) + transition.updateFrame(node: self.highlightNode, frame: bounds) + transition.updateFrame(node: self.fadeNode, frame: CGRect(x: 0.0, y: size.height - fadeHeight, width: size.width, height: fadeHeight)) + + let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - 50.0, height: size.height)) + self.titleNode.frame = CGRect(origin: CGPoint(x: 11.0, y: size.height - titleSize.height - 8.0), size: titleSize) + + if let animationNode = self.animationNode { + let animationSize = CGSize(width: 36.0, height: 36.0) + animationNode.bounds = CGRect(origin: CGPoint(), size: animationSize) + animationNode.transform = CATransform3DMakeScale(0.66667, 0.66667, 1.0) + transition.updatePosition(node: animationNode, position: CGPoint(x: size.width - 19.0, y: size.height - 15.0)) + } + } + + func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, animate: Bool = true) { + guard let _ = self.item else { + return + } + + if let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item { + let initialAnimate = animate + + var startContainerPosition = sourceNode.view.convert(sourceNode.bounds, to: containerNode.view).center + var animate = initialAnimate +// if startContainerPosition.y > containerNode.frame.height - 238.0 { +// animate = false +// } + + if let videoNode = sourceNode.videoNode { + sourceNode.videoNode = nil + videoNode.alpha = 1.0 + self.videoNode = videoNode + self.contextSourceNode.contentNode.insertSubnode(videoNode, at: 1) + + if animate { +// self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) +// self.videoContainerNode.layer.animate(from: (tileSize.width / 2.0) as NSNumber, to: videoCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { _ in +// }) + } + } + + sourceNode.isHidden = true + Queue.mainQueue().after(0.25) { + sourceNode.isHidden = false + } + + if animate { + let initialPosition = self.contextSourceNode.position + let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center + + self.contextSourceNode.position = targetContainerPosition + containerNode.addSubnode(self.contextSourceNode) + + self.contextSourceNode.layer.animateScale(from: 0.467, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self] _ in + if let strongSelf = self { + strongSelf.contextSourceNode.position = initialPosition + strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode) + } + }) + + let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) + self.videoNode?.updateLayout(size: self.bounds.size, isLandscape: true, transition: transition) + self.videoNode?.frame = self.bounds + } else if !initialAnimate { + self.videoNode?.updateLayout(size: self.bounds.size, isLandscape: true, transition: .immediate) + self.videoNode?.frame = self.bounds + } + + self.fadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) + } + } +} + +private class VoiceChatTileHighlightNode: ASDisplayNode { + +} diff --git a/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift b/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift index 7d2c72e85a..885d390aca 100644 --- a/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift +++ b/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift @@ -550,13 +550,21 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState } else { let copyTextWithEntities = { var messageEntities: [MessageTextEntity]? + var restrictedText: String? for attribute in message.attributes { if let attribute = attribute as? TextEntitiesMessageAttribute { messageEntities = attribute.entities - break + } + if let attribute = attribute as? RestrictedContentMessageAttribute { + restrictedText = attribute.platformText(platform: "ios", contentSettings: context.currentContentSettings.with { $0 }) ?? "" } } - storeMessageTextInPasteboard(message.text, entities: messageEntities) + + if let restrictedText = restrictedText { + storeMessageTextInPasteboard(restrictedText, entities: nil) + } else { + storeMessageTextInPasteboard(message.text, entities: messageEntities) + } Queue.mainQueue().after(0.2, { let content: UndoOverlayContent = .copy(text: chatPresentationInterfaceState.strings.Conversation_MessageCopied)