Video Chat Improvements

This commit is contained in:
Ilya Laktyushin 2021-05-19 16:52:43 +04:00
parent 981367c33c
commit 877d7459bf
19 changed files with 1503 additions and 522 deletions

View File

@ -107,8 +107,8 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
guard !isAnimating else { return }
isAnimating = true
mediumBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.15, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.15, removeOnCompletion: false)
mediumBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false)
updateBlobsState()
@ -123,8 +123,8 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
guard isAnimating else { return }
isAnimating = false
mediumBlob.layer.animateScale(from: 1.0, to: 0.5, duration: duration, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 1.0, to: 0.5, duration: duration, removeOnCompletion: false)
mediumBlob.layer.animateScale(from: 1.0, to: 0.75, duration: duration, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 1.0, to: 0.75, duration: duration, removeOnCompletion: false)
updateBlobsState()

View File

@ -1390,6 +1390,10 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
} else {
overflowOffset = min(0.0, originalContentFrame.minY - contentTopInset)
contentContainerFrame = originalContentFrame.offsetBy(dx: -contentParentNode.contentRect.minX, dy: -overflowOffset - contentParentNode.contentRect.minY)
if contentContainerFrame.maxX > layout.size.width {
contentContainerFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - contentContainerFrame.width) / 2.0), y: contentContainerFrame.minY), size: contentContainerFrame.size)
}
}
if case let .extracted(source) = self.source, source.centerVertically {

View File

@ -185,7 +185,10 @@
- (void)cleanup
{
[_diskCache cleanup];
TGModernCache *diskCache = _diskCache;
TGDispatchAfter(10.0, dispatch_get_main_queue(), ^{
[diskCache cleanup];
});
[[NSFileManager defaultManager] removeItemAtPath:_fullSizeResultsUrl.path error:nil];
[[NSFileManager defaultManager] removeItemAtPath:_paintingImagesUrl.path error:nil];
@ -991,7 +994,7 @@
+ (NSUInteger)diskMemoryLimit
{
return 64 * 1024 * 1024;
return 512 * 1024 * 1024;
}
+ (NSUInteger)imageSoftMemoryLimit

View File

@ -146,7 +146,6 @@
{
_context = context;
_actionHandle = [[ASHandle alloc] initWithDelegate:self releaseOnMainThread:true];
_standaloneEditingContext = [[TGMediaEditingContext alloc] init];
self.automaticallyManageScrollViewInsets = false;
self.autoManageStatusBarBackground = false;
@ -2182,10 +2181,14 @@
- (TGMediaEditingContext *)editingContext
{
if (_editingContext)
if (_editingContext) {
return _editingContext;
else
} else {
if (_standaloneEditingContext == nil) {
_standaloneEditingContext = [[TGMediaEditingContext alloc] init];
}
return _standaloneEditingContext;
}
}
- (void)doneButtonLongPressed:(UIButton *)sender

View File

@ -40,12 +40,15 @@ final class GroupVideoNode: ASDisplayNode {
self.backdropVideoViewContainer.addSubview(backdropVideoView.view)
self.view.addSubview(self.backdropVideoViewContainer)
let effect: UIVisualEffect
if #available(iOS 13.0, *) {
let backdropEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .systemThinMaterialDark))
effect = UIBlurEffect(style: .systemThinMaterialDark)
} else {
effect = UIBlurEffect(style: .dark)
}
let backdropEffectView = UIVisualEffectView(effect: effect)
self.view.addSubview(backdropEffectView)
self.backdropEffectView = backdropEffectView
} else {
}
}
self.videoViewContainer.addSubview(self.videoView.view)
@ -131,10 +134,15 @@ final class GroupVideoNode: ASDisplayNode {
}
}
var aspectRatio: CGFloat {
return self.videoView.getAspect()
}
func updateLayout(size: CGSize, isLandscape: Bool, transition: ContainedViewLayoutTransition) {
self.validLayout = (size, isLandscape)
transition.updateFrameAsPositionAndBounds(layer: self.videoViewContainer.layer, frame: CGRect(origin: CGPoint(), size: size))
transition.updateFrameAsPositionAndBounds(layer: self.backdropVideoViewContainer.layer, frame: CGRect(origin: CGPoint(), size: size))
let bounds = CGRect(origin: CGPoint(), size: size)
transition.updateFrameAsPositionAndBounds(layer: self.videoViewContainer.layer, frame: bounds)
transition.updateFrameAsPositionAndBounds(layer: self.backdropVideoViewContainer.layer, frame: bounds)
let orientation = self.videoView.getOrientation()
var aspect = self.videoView.getAspect()
@ -194,9 +202,6 @@ final class GroupVideoNode: ASDisplayNode {
let transformScale: CGFloat = rotatedVideoFrame.width / videoSize.width
transition.updateTransformScale(layer: self.videoViewContainer.layer, scale: transformScale)
let transition: ContainedViewLayoutTransition = .immediate
transition.updateTransformRotation(view: self.videoView.view, angle: angle)
if let backdropVideoView = self.backdropVideoView {
rotatedVideoSize = filledSize
var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize)
@ -217,11 +222,16 @@ final class GroupVideoNode: ASDisplayNode {
}
if let backdropEffectView = self.backdropEffectView {
transition.updateFrame(view: backdropEffectView, frame: self.bounds)
let maxSide = max(bounds.width, bounds.height)
let squareBounds = CGRect(x: (bounds.width - maxSide) / 2.0, y: (bounds.width - maxSide) / 2.0, width: maxSide, height: maxSide)
transition.updateFrame(view: backdropEffectView, frame: squareBounds)
}
let transition: ContainedViewLayoutTransition = .immediate
transition.updateTransformRotation(view: self.videoView.view, angle: angle)
if let effectView = self.effectView {
transition.updateFrame(view: effectView, frame: self.bounds)
transition.updateFrame(view: effectView, frame: bounds)
}
// TODO: properly fix the issue

View File

@ -657,7 +657,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.temporaryJoinTimestamp = Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970)
self.isVideoEnabled = accountContext.sharedContext.immediateExperimentalUISettings.demoVideoChats
self.isVideoEnabled = true
self.hasVideo = false
self.hasScreencast = false

View File

@ -61,6 +61,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
private let containerNode: ASDisplayNode
private let backgroundNode: VoiceChatActionButtonBackgroundNode
private let iconNode: VoiceChatActionButtonIconNode
private let labelContainerNode: ASDisplayNode
let titleLabel: ImmediateTextNode
private let subtitleLabel: ImmediateTextNode
private let buttonTitleLabel: ImmediateTextNode
@ -138,6 +139,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
self.backgroundNode = VoiceChatActionButtonBackgroundNode()
self.iconNode = VoiceChatActionButtonIconNode(isColored: false)
self.labelContainerNode = ASDisplayNode()
self.titleLabel = ImmediateTextNode()
self.subtitleLabel = ImmediateTextNode()
self.buttonTitleLabel = ImmediateTextNode()
@ -147,8 +149,9 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
super.init()
self.addSubnode(self.bottomNode)
self.addSubnode(self.titleLabel)
self.addSubnode(self.subtitleLabel)
self.labelContainerNode.addSubnode(self.titleLabel)
self.labelContainerNode.addSubnode(self.subtitleLabel)
self.addSubnode(self.labelContainerNode)
self.addSubnode(self.containerNode)
self.containerNode.addSubnode(self.backgroundNode)
@ -242,6 +245,8 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
let subtitleSize = self.subtitleLabel.updateLayout(CGSize(width: size.width, height: .greatestFiniteMagnitude))
let totalHeight = titleSize.height + subtitleSize.height + 1.0
self.labelContainerNode.frame = CGRect(origin: CGPoint(), size: size)
self.titleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) / 2.0), y: floor((size.height - totalHeight) / 2.0) + 84.0), size: titleSize)
self.subtitleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - subtitleSize.width) / 2.0), y: self.titleLabel.frame.maxY + 1.0), size: subtitleSize)
@ -272,17 +277,19 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
transition.updateAlpha(node: self.subtitleLabel, alpha: 0.0)
transition.updateAlpha(layer: self.backgroundNode.maskProgressLayer, alpha: 0.0)
} else {
let transition: ContainedViewLayoutTransition = animated ? .animated(duration: 0.2, curve: .easeInOut) : .immediate
let transition: ContainedViewLayoutTransition = animated ? .animated(duration: 0.4, curve: .spring) : .immediate
if small {
transition.updateTransformScale(node: self.backgroundNode, scale: self.pressing ? smallScale * 0.9 : smallScale, delay: 0.05)
transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? smallIconScale * 0.9 : smallIconScale, delay: 0.05)
transition.updateTransformScale(node: self.backgroundNode, scale: self.pressing ? smallScale * 0.9 : smallScale, delay: 0.0)
transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? smallIconScale * 0.9 : smallIconScale, delay: 0.0)
transition.updateAlpha(node: self.titleLabel, alpha: 0.0)
transition.updateAlpha(node: self.subtitleLabel, alpha: 0.0)
transition.updateSublayerTransformOffset(layer: self.labelContainerNode.layer, offset: CGPoint(x: 0.0, y: -40.0))
} else {
transition.updateTransformScale(node: self.backgroundNode, scale: 1.0, delay: 0.05)
transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? 0.9 : 1.0, delay: 0.05)
transition.updateTransformScale(node: self.backgroundNode, scale: 1.0, delay: 0.0)
transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? 0.9 : 1.0, delay: 0.0)
transition.updateAlpha(node: self.titleLabel, alpha: 1.0, delay: 0.05)
transition.updateAlpha(node: self.subtitleLabel, alpha: 1.0, delay: 0.05)
transition.updateSublayerTransformOffset(layer: self.labelContainerNode.layer, offset: CGPoint())
}
transition.updateAlpha(layer: self.backgroundNode.maskProgressLayer, alpha: 1.0)
}

View File

@ -23,12 +23,12 @@ final class VoiceChatCameraPreviewController: ViewController {
private var animatedIn = false
private let cameraNode: GroupVideoNode
private let shareCamera: (ASDisplayNode) -> Void
private let shareCamera: (ASDisplayNode, Bool) -> Void
private let switchCamera: () -> Void
private var presentationDataDisposable: Disposable?
init(context: AccountContext, cameraNode: GroupVideoNode, shareCamera: @escaping (ASDisplayNode) -> Void, switchCamera: @escaping () -> Void) {
init(context: AccountContext, cameraNode: GroupVideoNode, shareCamera: @escaping (ASDisplayNode, Bool) -> Void, switchCamera: @escaping () -> Void) {
self.context = context
self.cameraNode = cameraNode
self.shareCamera = shareCamera
@ -60,9 +60,9 @@ final class VoiceChatCameraPreviewController: ViewController {
override public func loadDisplayNode() {
self.displayNode = VoiceChatCameraPreviewControllerNode(controller: self, context: self.context, cameraNode: self.cameraNode)
self.controllerNode.shareCamera = { [weak self] in
self.controllerNode.shareCamera = { [weak self] unmuted in
if let strongSelf = self {
strongSelf.shareCamera(strongSelf.cameraNode)
strongSelf.shareCamera(strongSelf.cameraNode, unmuted)
strongSelf.dismiss()
}
}
@ -121,6 +121,10 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
private var broadcastPickerView: UIView?
private let cancelButton: SolidRoundedButtonNode
private let microphoneButton: HighlightTrackingButtonNode
private let microphoneEffectView: UIVisualEffectView
private let microphoneIconNode: VoiceChatMicrophoneNode
private let switchCameraButton: HighlightTrackingButtonNode
private let switchCameraEffectView: UIVisualEffectView
private let switchCameraIconNode: ASImageNode
@ -129,7 +133,7 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
private var applicationStateDisposable: Disposable?
var shareCamera: (() -> Void)?
var shareCamera: ((Bool) -> Void)?
var switchCamera: (() -> Void)?
var dismiss: (() -> Void)?
var cancel: (() -> Void)?
@ -196,6 +200,16 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.previewContainerNode.cornerRadius = 11.0
self.previewContainerNode.backgroundColor = .black
self.microphoneButton = HighlightTrackingButtonNode()
self.microphoneButton.isSelected = true
self.microphoneEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
self.microphoneEffectView.clipsToBounds = true
self.microphoneEffectView.layer.cornerRadius = 24.0
self.microphoneEffectView.isUserInteractionEnabled = false
self.microphoneIconNode = VoiceChatMicrophoneNode()
self.microphoneIconNode.update(state: .init(muted: false, filled: true, color: .white), animated: false)
self.switchCameraButton = HighlightTrackingButtonNode()
self.switchCameraEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
self.switchCameraEffectView.clipsToBounds = true
@ -234,13 +248,16 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.contentContainerNode.addSubnode(self.previewContainerNode)
self.previewContainerNode.addSubnode(self.cameraNode)
self.previewContainerNode.addSubnode(self.microphoneButton)
self.microphoneButton.view.addSubview(self.microphoneEffectView)
self.microphoneButton.addSubnode(self.microphoneIconNode)
self.previewContainerNode.addSubnode(self.switchCameraButton)
self.switchCameraButton.view.addSubview(self.switchCameraEffectView)
self.switchCameraButton.addSubnode(self.switchCameraIconNode)
self.cameraButton.pressed = { [weak self] in
if let strongSelf = self {
strongSelf.shareCamera?()
strongSelf.shareCamera?(strongSelf.microphoneButton.isSelected)
}
}
self.cancelButton.pressed = { [weak self] in
@ -249,6 +266,19 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
}
}
self.microphoneButton.addTarget(self, action: #selector(self.microphonePressed), forControlEvents: .touchUpInside)
self.microphoneButton.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self {
if highlighted {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .spring)
transition.updateSublayerTransformScale(node: strongSelf.microphoneButton, scale: 0.9)
} else {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.5, curve: .spring)
transition.updateSublayerTransformScale(node: strongSelf.microphoneButton, scale: 1.0)
}
}
}
self.switchCameraButton.addTarget(self, action: #selector(self.switchCameraPressed), forControlEvents: .touchUpInside)
self.switchCameraButton.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self {
@ -263,6 +293,11 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
}
}
@objc private func microphonePressed() {
self.microphoneButton.isSelected = !self.microphoneButton.isSelected
self.microphoneIconNode.update(state: .init(muted: !self.microphoneButton.isSelected, filled: true, color: .white), animated: true)
}
@objc private func switchCameraPressed() {
self.switchCamera?()
@ -403,6 +438,12 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.cameraNode.frame = CGRect(origin: CGPoint(), size: previewSize)
self.cameraNode.updateLayout(size: previewSize, isLandscape: false, transition: .immediate)
let microphoneFrame = CGRect(x: 16.0, y: previewSize.height - 48.0 - 16.0, width: 48.0, height: 48.0)
transition.updateFrame(node: self.microphoneButton, frame: microphoneFrame)
transition.updateFrame(view: self.microphoneEffectView, frame: CGRect(origin: CGPoint(), size: microphoneFrame.size))
transition.updateFrameAsPositionAndBounds(node: self.microphoneIconNode, frame: CGRect(origin: CGPoint(x: 1.0, y: 0.0), size: microphoneFrame.size).insetBy(dx: 6.0, dy: 6.0))
self.microphoneIconNode.transform = CATransform3DMakeScale(1.2, 1.2, 1.0)
let switchCameraFrame = CGRect(x: previewSize.width - 48.0 - 16.0, y: previewSize.height - 48.0 - 16.0, width: 48.0, height: 48.0)
transition.updateFrame(node: self.switchCameraButton, frame: switchCameraFrame)
transition.updateFrame(view: self.switchCameraEffectView, frame: CGRect(origin: CGPoint(), size: switchCameraFrame.size))

File diff suppressed because it is too large Load Diff

View File

@ -72,6 +72,7 @@ final class VoiceChatFullscreenParticipantItem: ListViewItem {
let context: AccountContext
let peer: Peer
let icon: Icon
let text: VoiceChatParticipantItem.ParticipantText
let color: Color
let isLandscape: Bool
let active: Bool
@ -83,12 +84,13 @@ final class VoiceChatFullscreenParticipantItem: ListViewItem {
public let selectable: Bool = true
public init(presentationData: ItemListPresentationData, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, icon: Icon, color: Color, isLandscape: Bool, active: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, action: ((ASDisplayNode?) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) {
public init(presentationData: ItemListPresentationData, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, color: Color, isLandscape: Bool, active: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, action: ((ASDisplayNode?) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) {
self.presentationData = presentationData
self.nameDisplayOrder = nameDisplayOrder
self.context = context
self.peer = peer
self.icon = icon
self.text = text
self.color = color
self.isLandscape = isLandscape
self.active = active
@ -157,6 +159,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
let avatarNode: AvatarNode
let contentWrapperNode: ASDisplayNode
private let titleNode: TextNode
private let statusNode: VoiceChatParticipantStatusNode
private var credibilityIconNode: ASImageNode?
private let actionContainerNode: ASDisplayNode
@ -174,6 +177,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
private var layoutParams: (VoiceChatFullscreenParticipantItem, ListViewItemLayoutParams, Bool, Bool)?
private var isExtracted = false
private var animatingExtraction = false
private var animatingSelection = false
private var wavesColor: UIColor?
let videoContainerNode: ASDisplayNode
@ -183,14 +187,14 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
private var videoReadyDelayed = false
private var videoReady = false
private var profileNode: VoiceChatPeerProfileNode?
private var raiseHandTimer: SwiftSignalKit.Timer?
var item: VoiceChatFullscreenParticipantItem? {
return self.layoutParams?.0
}
private var currentTitle: String?
init() {
self.contextSourceNode = ContextExtractedContentContainingNode()
self.containerNode = ContextControllerSourceNode()
@ -232,6 +236,8 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.titleNode.contentMode = .left
self.titleNode.contentsScale = UIScreen.main.scale
self.statusNode = VoiceChatParticipantStatusNode()
self.actionContainerNode = ASDisplayNode()
self.actionButtonNode = HighlightableButtonNode()
@ -256,7 +262,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode
self.containerNode.shouldBegin = { [weak self] location in
guard let strongSelf = self else {
guard let _ = self else {
return false
}
return true
@ -268,6 +274,12 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
}
contextAction(strongSelf.contextSourceNode, gesture)
}
self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in
guard let strongSelf = self, let _ = strongSelf.item else {
return
}
strongSelf.updateIsExtracted(isExtracted, transition: transition)
}
// self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in
// guard let strongSelf = self, let item = strongSelf.layoutParams?.0 else {
@ -575,10 +587,16 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.layoutParams?.0.action?(self.contextSourceNode)
}
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, animate: Bool = true) {
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, transition: ContainedViewLayoutTransition, animate: Bool = true) {
guard let item = self.item else {
return
}
var duration: Double = 0.2
var timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue
if case let .animated(transitionDuration, curve) = transition {
duration = transitionDuration
timingFunction = curve.timingFunction
}
let initialAnimate = animate
if let sourceNode = sourceNode as? VoiceChatTileItemNode {
@ -602,12 +620,11 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.videoContainerNode.insertSubnode(videoNode, at: 0)
if animate {
let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
videoNode.updateLayout(size: videoSize, isLandscape: true, transition: transition)
let scale = sourceNode.bounds.width / videoSize.width
self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.videoContainerNode.layer.animate(from: backgroundCornerRadius * (1.0 / scale) as NSNumber, to: videoCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { _ in
self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: duration, timingFunction: timingFunction)
self.videoContainerNode.layer.animate(from: backgroundCornerRadius * (1.0 / scale) as NSNumber, to: videoCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: timingFunction, duration: duration, removeOnCompletion: false, completion: { _ in
})
self.videoFadeNode.alpha = 1.0
@ -625,7 +642,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.contextSourceNode.position = targetContainerPosition
containerNode.addSubnode(self.contextSourceNode)
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self] _ in
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.contextSourceNode.position = initialPosition
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
@ -634,16 +651,16 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
if item.active {
self.borderImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.borderImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.borderImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction)
}
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
} else if !initialAnimate {
self.contextSourceNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.contextSourceNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2)
self.contextSourceNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contextSourceNode.layer.animateScale(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
}
} else if let sourceNode = sourceNode as? VoiceChatParticipantItemNode, let _ = sourceNode.item {
var startContainerPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center
@ -662,8 +679,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.contextSourceNode.position = targetContainerPosition
containerNode.addSubnode(self.contextSourceNode)
let timingFunction = CAMediaTimingFunctionName.easeInEaseOut.rawValue
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak self, weak sourceNode] _ in
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self, weak sourceNode] _ in
if let strongSelf = self {
sourceNode?.avatarNode.alpha = 1.0
strongSelf.contextSourceNode.position = initialPosition
@ -676,18 +692,49 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.borderImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction)
}
self.avatarNode.layer.animateScale(from: 0.8, to: 1.0, duration: 0.2)
self.avatarNode.layer.animateScale(from: 0.8, to: 1.0, duration: duration, timingFunction: timingFunction)
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction)
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, timingFunction: timingFunction)
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction)
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: timingFunction)
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
}
}
}
private func updateIsExtracted(_ isExtracted: Bool, transition: ContainedViewLayoutTransition) {
guard self.isExtracted != isExtracted, let extractedRect = self.extractedRect, let nonExtractedRect = self.nonExtractedRect, let item = self.item else {
return
}
self.isExtracted = isExtracted
if isExtracted {
let profileNode = VoiceChatPeerProfileNode(context: item.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
self?.contextSourceNode.requestDismiss?()
})
self.profileNode = profileNode
self.contextSourceNode.contentNode.addSubnode(profileNode)
profileNode.animateIn(from: self, targetRect: extractedRect, transition: transition)
self.contextSourceNode.contentNode.customHitTest = { [weak self] point in
if let strongSelf = self, let profileNode = strongSelf.profileNode {
if profileNode.avatarListWrapperNode.frame.contains(point) {
return profileNode.avatarListNode.view
}
}
return nil
}
} else if let profileNode = self.profileNode {
self.profileNode = nil
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition)
self.contextSourceNode.contentNode.customHitTest = nil
}
}
func asyncLayout() -> (_ item: VoiceChatFullscreenParticipantItem, _ params: ListViewItemLayoutParams, _ first: Bool, _ last: Bool) -> (ListViewItemNodeLayout, (Bool, Bool) -> Void) {
let makeTitleLayout = TextNode.asyncLayout(self.titleNode)
let makeStatusLayout = self.statusNode.asyncLayout()
let currentItem = self.layoutParams?.0
let hasVideo = self.videoNode != nil
@ -760,6 +807,9 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
let constrainedWidth = params.width - 24.0 - 10.0
let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: constrainedWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let availableWidth = params.availableHeight
let (statusLayout, _) = makeStatusLayout(CGSize(width: availableWidth - 30.0, height: CGFloat.greatestFiniteMagnitude), item.text, true)
let contentSize = tileSize
let insets = UIEdgeInsets(top: 0.0, left: 0.0, bottom: !last ? 6.0 : 0.0, right: 0.0)
@ -769,7 +819,6 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
if let strongSelf = self {
let hadItem = strongSelf.layoutParams?.0 != nil
strongSelf.layoutParams = (item, params, first, last)
strongSelf.currentTitle = titleAttributedString?.string
strongSelf.wavesColor = wavesColor
let videoNode = item.getVideo()
@ -877,7 +926,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
transition.updateFrameAsPositionAndBounds(node: strongSelf.avatarNode, frame: avatarFrame)
let blobFrame = avatarFrame.insetBy(dx: -14.0, dy: -14.0)
let blobFrame = avatarFrame.insetBy(dx: -18.0, dy: -18.0)
if let getAudioLevel = item.getAudioLevel {
if !strongSelf.didSetupAudioLevel || currentItem?.peer.id != item.peer.id {
strongSelf.audioLevelView?.frame = blobFrame
@ -902,7 +951,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
playbackMaskLayer.frame = maskRect
playbackMaskLayer.fillRule = .evenOdd
let maskPath = UIBezierPath()
maskPath.append(UIBezierPath(roundedRect: maskRect.insetBy(dx: 14, dy: 14), cornerRadius: 22))
maskPath.append(UIBezierPath(roundedRect: maskRect.insetBy(dx: 18, dy: 18), cornerRadius: 22))
maskPath.append(UIBezierPath(rect: maskRect))
playbackMaskLayer.path = maskPath.cgPath
audioLevelView.layer.mask = playbackMaskLayer
@ -912,6 +961,10 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.audioLevelView = audioLevelView
strongSelf.offsetContainerNode.view.insertSubview(audioLevelView, at: 0)
if let item = strongSelf.item, strongSelf.videoNode != nil || item.active {
audioLevelView.alpha = 0.0
}
}
let level = min(1.0, max(0.0, CGFloat(value)))
@ -926,13 +979,14 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
audioLevelView.setColor(wavesColor, animated: true)
}
} else {
audioLevelView.stopAnimating(duration: 0.5)
avatarScale = 1.0
}
if !strongSelf.animatingSelection {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .easeInOut)
transition.updateTransformScale(node: strongSelf.avatarNode, scale: strongSelf.isExtracted ? 1.0 : avatarScale, beginWithCurrentState: true)
}
}
}))
}
} else if let audioLevelView = strongSelf.audioLevelView {
@ -1073,19 +1127,27 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: 0.2)
strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2)
strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -9.0), duration: 0.2, additive: true)
strongSelf.audioLevelView?.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2)
}
transition.updateAlpha(node: videoNode, alpha: 0.0)
transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0)
transition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0)
if let audioLevelView = strongSelf.audioLevelView {
transition.updateAlpha(layer: audioLevelView.layer, alpha: 1.0)
}
} else {
if !strongSelf.avatarNode.alpha.isZero {
strongSelf.videoContainerNode.layer.animateScale(from: 0.001, to: videoContainerScale, duration: 0.2)
strongSelf.avatarNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2)
strongSelf.audioLevelView?.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2)
strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(x: 0.0, y: -9.0), to: CGPoint(), duration: 0.2, additive: true)
}
transition.updateAlpha(node: videoNode, alpha: 1.0)
transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 1.0)
transition.updateAlpha(node: strongSelf.avatarNode, alpha: 0.0)
if let audioLevelView = strongSelf.audioLevelView {
transition.updateAlpha(layer: audioLevelView.layer, alpha: 0.0)
}
}
} else {
if item.active {

View File

@ -169,13 +169,13 @@ public final class VoiceChatOverlayController: ViewController {
if reclaim {
self.dismissed = true
let targetPosition = CGPoint(x: layout.size.width / 2.0, y: layout.size.height - layout.intrinsicInsets.bottom - 205.0 / 2.0 - 2.0)
let targetPosition = CGPoint(x: layout.size.width / 2.0, y: layout.size.height - layout.intrinsicInsets.bottom - bottomAreaHeight / 2.0 - 3.0)
if self.isSlidOffscreen {
self.isSlidOffscreen = false
self.isButtonHidden = true
actionButton.layer.sublayerTransform = CATransform3DIdentity
actionButton.update(snap: false, animated: false)
actionButton.position = CGPoint(x: targetPosition.x, y: 205.0 / 2.0)
actionButton.position = CGPoint(x: targetPosition.x, y: bottomAreaHeight / 2.0)
leftButton.isHidden = false
rightButton.isHidden = false
@ -191,7 +191,7 @@ public final class VoiceChatOverlayController: ViewController {
actionButton.layer.removeAllAnimations()
actionButton.layer.sublayerTransform = CATransform3DIdentity
actionButton.update(snap: false, animated: false)
actionButton.position = CGPoint(x: targetPosition.x, y: 205.0 / 2.0)
actionButton.position = CGPoint(x: targetPosition.x, y: bottomAreaHeight / 2.0)
leftButton.isHidden = false
rightButton.isHidden = false

View File

@ -138,7 +138,7 @@ private let accentColor: UIColor = UIColor(rgb: 0x007aff)
private let constructiveColor: UIColor = UIColor(rgb: 0x34c759)
private let destructiveColor: UIColor = UIColor(rgb: 0xff3b30)
private class VoiceChatParticipantStatusNode: ASDisplayNode {
class VoiceChatParticipantStatusNode: ASDisplayNode {
private var iconNodes: [ASImageNode]
private let textNode: TextNode
@ -156,10 +156,10 @@ private class VoiceChatParticipantStatusNode: ASDisplayNode {
self.addSubnode(self.textNode)
}
func asyncLayout() -> (_ size: CGSize, _ text: VoiceChatParticipantItem.ParticipantText, _ transparent: Bool) -> (CGSize, () -> Void) {
func asyncLayout() -> (_ size: CGSize, _ text: VoiceChatParticipantItem.ParticipantText, _ expanded: Bool) -> (CGSize, () -> Void) {
let makeTextLayout = TextNode.asyncLayout(self.textNode)
return { size, text, transparent in
return { size, text, expanded in
let statusFont = Font.regular(14.0)
var attributedString: NSAttributedString?
@ -184,9 +184,6 @@ private class VoiceChatParticipantStatusNode: ASDisplayNode {
case .destructive:
textColorValue = destructiveColor
}
if transparent {
textColorValue = UIColor(rgb: 0xffffff, alpha: 0.65)
}
color = textColorValue
attributedString = NSAttributedString(string: text, font: statusFont, textColor: textColorValue)
default:
@ -207,7 +204,7 @@ private class VoiceChatParticipantStatusNode: ASDisplayNode {
icons.append(image)
}
let (textLayout, textApply) = makeTextLayout(TextNodeLayoutArguments(attributedString: attributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: size.width - (iconSize.width + spacing) * CGFloat(icons.count), height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let (textLayout, textApply) = makeTextLayout(TextNodeLayoutArguments(attributedString: attributedString, backgroundColor: nil, maximumNumberOfLines: expanded ? 4 : 1, truncationType: .end, constrainedSize: CGSize(width: size.width - (iconSize.width + spacing) * CGFloat(icons.count), height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
var contentSize = textLayout.size
contentSize.width += (iconSize.width + spacing) * CGFloat(icons.count)
@ -388,7 +385,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.isExtracted = isExtracted
let inset: CGFloat = 12.0
let inset: CGFloat = 0.0
if isExtracted {
strongSelf.contextSourceNode.contentNode.customHitTest = { [weak self] point in
if let strongSelf = self {
@ -492,8 +489,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.avatarNode.isHidden = true
avatarListWrapperNode.contentNode.addSubnode(transitionNode)
strongSelf.avatarTransitionNode = transitionNode
let avatarListContainerNode = ASDisplayNode()
@ -503,8 +498,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
avatarListContainerNode.cornerRadius = targetRect.width / 2.0
avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: avatarInitialRect.center), to: NSValue(cgPoint: avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
})
avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: avatarInitialRect.center), to: NSValue(cgPoint: avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: 0.0)
@ -576,10 +570,10 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
alphaTransition.updateAlpha(node: strongSelf.actionContainerNode, alpha: isExtracted ? 0.0 : 1.0, delay: isExtracted ? 0.0 : 0.1)
let offsetInitialSublayerTransform = strongSelf.offsetContainerNode.layer.sublayerTransform
strongSelf.offsetContainerNode.layer.sublayerTransform = CATransform3DMakeTranslation(isExtracted ? -33 : 0.0, isExtracted ? extractedVerticalOffset : 0.0, 0.0)
strongSelf.offsetContainerNode.layer.sublayerTransform = CATransform3DMakeTranslation(isExtracted ? -43 : 0.0, isExtracted ? extractedVerticalOffset : 0.0, 0.0)
let actionInitialSublayerTransform = strongSelf.actionContainerNode.layer.sublayerTransform
strongSelf.actionContainerNode.layer.sublayerTransform = CATransform3DMakeTranslation(isExtracted ? 21.0 : 0.0, 0.0, 0.0)
strongSelf.actionContainerNode.layer.sublayerTransform = CATransform3DMakeTranslation(isExtracted ? 43.0 : 0.0, 0.0, 0.0)
let initialBackgroundPosition = strongSelf.backgroundImageNode.position
strongSelf.backgroundImageNode.layer.position = rect.center
@ -636,7 +630,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
transition.updateAlpha(node: strongSelf.actionContainerNode, alpha: isExtracted ? 0.0 : 1.0)
transition.updateSublayerTransformOffset(layer: strongSelf.offsetContainerNode.layer, offset: CGPoint(x: isExtracted ? inset : 0.0, y: isExtracted ? extractedVerticalOffset : 0.0))
transition.updateSublayerTransformOffset(layer: strongSelf.actionContainerNode.layer, offset: CGPoint(x: isExtracted ? -24.0 : 0.0, y: 0.0))
transition.updateSublayerTransformOffset(layer: strongSelf.actionContainerNode.layer, offset: CGPoint(x: isExtracted ? -inset * 2.0 : 0.0, y: 0.0))
transition.updateAlpha(node: strongSelf.backgroundImageNode, alpha: isExtracted ? 1.0 : 0.0, completion: { _ in
if !isExtracted {
@ -659,10 +653,16 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self.layoutParams?.0.action?(self.contextSourceNode)
}
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode) {
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, transition: ContainedViewLayoutTransition) {
guard let _ = self.item, let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item else {
return
}
var duration: Double = 0.2
var timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue
if case let .animated(transitionDuration, curve) = transition {
duration = transitionDuration
timingFunction = curve.timingFunction
}
let startContainerAvatarPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center
var animate = true
@ -673,13 +673,13 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
sourceNode.avatarNode.alpha = 0.0
let initialAvatarPosition = self.avatarNode.position
let targetContainerAvatarPosition = self.avatarNode.view.convert(self.avatarNode.bounds, to: containerNode.view).center
let initialBackgroundPosition = sourceNode.backgroundImageNode.position
let initialContentPosition = sourceNode.contentWrapperNode.position
let startContainerBackgroundPosition = sourceNode.backgroundImageNode.view.convert(sourceNode.backgroundImageNode.bounds, to: containerNode.view).center
let startContainerContentPosition = sourceNode.contentWrapperNode.view.convert(sourceNode.contentWrapperNode.bounds, to: containerNode.view).center
let initialBackgroundPosition = sourceNode.backgroundImageNode.position
let initialContentPosition = sourceNode.contentWrapperNode.position
let targetContainerAvatarPosition = self.avatarNode.view.convert(self.avatarNode.bounds, to: containerNode.view).center
sourceNode.backgroundImageNode.position = targetContainerAvatarPosition
sourceNode.contentWrapperNode.position = targetContainerAvatarPosition
@ -688,8 +688,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
sourceNode.borderImageNode.alpha = 0.0
let timingFunction = CAMediaTimingFunctionName.easeInEaseOut.rawValue
sourceNode.backgroundImageNode.layer.animatePosition(from: startContainerBackgroundPosition, to: targetContainerAvatarPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak sourceNode] _ in
sourceNode.backgroundImageNode.layer.animatePosition(from: startContainerBackgroundPosition, to: targetContainerAvatarPosition, duration: duration, timingFunction: timingFunction, completion: { [weak sourceNode] _ in
if let sourceNode = sourceNode {
sourceNode.backgroundImageNode.alpha = 1.0
sourceNode.borderImageNode.alpha = 1.0
@ -698,7 +697,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
}
})
sourceNode.contentWrapperNode.layer.animatePosition(from: startContainerContentPosition, to: targetContainerAvatarPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak sourceNode] _ in
sourceNode.contentWrapperNode.layer.animatePosition(from: startContainerContentPosition, to: targetContainerAvatarPosition, duration: duration, timingFunction: timingFunction, completion: { [weak sourceNode] _ in
if let sourceNode = sourceNode {
sourceNode.avatarNode.alpha = 1.0
sourceNode.contentWrapperNode.position = initialContentPosition
@ -709,19 +708,18 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self.avatarNode.position = targetContainerAvatarPosition
containerNode.addSubnode(self.avatarNode)
self.avatarNode.layer.animateScale(from: 1.25, to: 1.0, duration: 0.2, timingFunction: timingFunction)
self.avatarNode.layer.animatePosition(from: startContainerAvatarPosition, to: targetContainerAvatarPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak self] _ in
self.avatarNode.layer.animateScale(from: 1.25, to: 1.0, duration: duration, timingFunction: timingFunction)
self.avatarNode.layer.animatePosition(from: startContainerAvatarPosition, to: targetContainerAvatarPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.avatarNode.position = initialAvatarPosition
strongSelf.offsetContainerNode.addSubnode(strongSelf.avatarNode)
}
})
sourceNode.backgroundImageNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.25, timingFunction: timingFunction)
sourceNode.backgroundImageNode.layer.animateAlpha(from: sourceNode.backgroundImageNode.alpha, to: 0.0, duration: 0.35, timingFunction: timingFunction)
sourceNode.contentWrapperNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.25, timingFunction: timingFunction)
sourceNode.contentWrapperNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, timingFunction: timingFunction)
sourceNode.backgroundImageNode.layer.animateScale(from: 1.0, to: 0.001, duration: duration, timingFunction: timingFunction)
sourceNode.backgroundImageNode.layer.animateAlpha(from: sourceNode.backgroundImageNode.alpha, to: 0.0, duration: duration, timingFunction: timingFunction)
sourceNode.contentWrapperNode.layer.animateScale(from: 1.0, to: 0.001, duration: duration, timingFunction: timingFunction)
sourceNode.contentWrapperNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: duration, timingFunction: timingFunction)
}
}
@ -819,7 +817,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: constrainedWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let (statusLayout, statusApply) = makeStatusLayout(CGSize(width: params.width - leftInset - 8.0 - rightInset - 30.0, height: CGFloat.greatestFiniteMagnitude), item.text, false)
let (expandedStatusLayout, expandedStatusApply) = makeExpandedStatusLayout(CGSize(width: params.width - leftInset - 8.0 - rightInset - expandedRightInset, height: CGFloat.greatestFiniteMagnitude), item.expandedText ?? item.text, false)
let (expandedStatusLayout, expandedStatusApply) = makeExpandedStatusLayout(CGSize(width: params.width - leftInset - 8.0 - rightInset - expandedRightInset, height: CGFloat.greatestFiniteMagnitude), item.expandedText ?? item.text, true)
let titleSpacing: CGFloat = statusLayout.height == 0.0 ? 0.0 : 1.0
@ -861,7 +859,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
animationFrame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height)
titleFrame = CGRect(origin: CGPoint(x: leftInset, y: verticalInset + verticalOffset), size: titleLayout.size)
var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: 16.0 + params.leftInset, dy: 0.0)
var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: params.leftInset, dy: 0.0)
var extractedHeight = extractedRect.height + expandedStatusLayout.height - statusLayout.height
var extractedVerticalOffset: CGFloat = 0.0
if item.peer.smallProfileImage != nil {
@ -996,7 +994,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
return
}
if false, strongSelf.audioLevelView == nil, value > 0.0 {
if strongSelf.audioLevelView == nil, value > 0.0 {
let audioLevelView = VoiceBlobView(
frame: blobFrame,
maxLevel: 1.5,
@ -1034,7 +1032,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
audioLevelView.setColor(wavesColor, animated: true)
}
} else {
audioLevelView.stopAnimating(duration: 0.5)
avatarScale = 1.0
}

View File

@ -1,8 +1,341 @@
//
// VoiceChatPeerProfileNode.swift
// _idx_TelegramCallsUI_5BDA0798_ios_min9.0
//
// Created by Ilya Laktyushin on 11.05.2021.
//
import Foundation
import UIKit
import Display
import AsyncDisplayKit
import SwiftSignalKit
import Postbox
import TelegramCore
import SyncCore
import TelegramPresentationData
import TelegramUIPreferences
import PresentationDataUtils
import AvatarNode
import TelegramStringFormatting
import ContextUI
import AccountContext
import LegacyComponents
import PeerInfoAvatarListNode
private let backgroundCornerRadius: CGFloat = 14.0
final class VoiceChatPeerProfileNode: ASDisplayNode {
private let context: AccountContext
private let size: CGSize
private var peer: Peer
private var text: VoiceChatParticipantItem.ParticipantText
private let customNode: ASDisplayNode?
private let additionalEntry: Signal<(TelegramMediaImageRepresentation, Float)?, NoError>
private let backgroundImageNode: ASImageNode
private let avatarListContainerNode: ASDisplayNode
let avatarListWrapperNode: PinchSourceContainerNode
let avatarListNode: PeerInfoAvatarListContainerNode
private var videoFadeNode: ASImageNode
private let infoNode: ASDisplayNode
private let titleNode: ImmediateTextNode
private let statusNode: VoiceChatParticipantStatusNode
private var videoNode: GroupVideoNode?
private var appeared = false
init(context: AccountContext, size: CGSize, peer: Peer, text: VoiceChatParticipantItem.ParticipantText, customNode: ASDisplayNode? = nil, additionalEntry: Signal<(TelegramMediaImageRepresentation, Float)?, NoError>, requestDismiss: (() -> Void)?) {
self.context = context
self.size = size
self.peer = peer
self.text = text
self.customNode = customNode
self.additionalEntry = additionalEntry
self.backgroundImageNode = ASImageNode()
self.backgroundImageNode.clipsToBounds = true
self.backgroundImageNode.displaysAsynchronously = false
self.backgroundImageNode.displayWithoutProcessing = true
self.videoFadeNode = ASImageNode()
self.videoFadeNode.displaysAsynchronously = false
self.videoFadeNode.contentMode = .scaleToFill
self.avatarListContainerNode = ASDisplayNode()
self.avatarListContainerNode.clipsToBounds = true
self.avatarListWrapperNode = PinchSourceContainerNode()
self.avatarListWrapperNode.clipsToBounds = true
self.avatarListWrapperNode.cornerRadius = backgroundCornerRadius
self.avatarListNode = PeerInfoAvatarListContainerNode(context: context)
self.avatarListNode.backgroundColor = .clear
self.avatarListNode.peer = peer
self.avatarListNode.firstFullSizeOnly = true
self.avatarListNode.offsetLocation = true
self.avatarListNode.customCenterTapAction = {
requestDismiss?()
}
self.infoNode = ASDisplayNode()
self.titleNode = ImmediateTextNode()
self.titleNode.isUserInteractionEnabled = false
self.titleNode.contentMode = .left
self.titleNode.contentsScale = UIScreen.main.scale
self.statusNode = VoiceChatParticipantStatusNode()
self.statusNode.isUserInteractionEnabled = false
super.init()
self.clipsToBounds = true
self.addSubnode(self.backgroundImageNode)
self.addSubnode(self.infoNode)
self.addSubnode(self.videoFadeNode)
self.addSubnode(self.avatarListWrapperNode)
self.infoNode.addSubnode(self.titleNode)
self.infoNode.addSubnode(self.statusNode)
self.avatarListContainerNode.addSubnode(self.avatarListNode)
self.avatarListContainerNode.addSubnode(self.avatarListNode.controlsClippingOffsetNode)
self.avatarListWrapperNode.contentNode.addSubnode(self.avatarListContainerNode)
self.avatarListWrapperNode.activate = { [weak self] sourceNode in
guard let strongSelf = self else {
return
}
strongSelf.avatarListNode.controlsContainerNode.alpha = 0.0
let pinchController = PinchController(sourceNode: sourceNode, getContentAreaInScreenSpace: {
return UIScreen.main.bounds
})
context.sharedContext.mainWindow?.presentInGlobalOverlay(pinchController)
}
self.avatarListWrapperNode.deactivated = { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.avatarListWrapperNode.contentNode.layer.animate(from: 0.0 as NSNumber, to: backgroundCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.3, completion: { _ in
})
}
self.avatarListWrapperNode.animatedOut = { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.avatarListNode.controlsContainerNode.alpha = 1.0
strongSelf.avatarListNode.controlsContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
}
self.updateInfo(size: size, animate: false)
}
func updateInfo(size: CGSize, animate: Bool) {
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
let titleFont = Font.regular(17.0)
let titleColor = UIColor.white
var titleAttributedString: NSAttributedString?
if let user = self.peer as? TelegramUser {
if let firstName = user.firstName, let lastName = user.lastName, !firstName.isEmpty, !lastName.isEmpty {
let string = NSMutableAttributedString()
switch presentationData.nameDisplayOrder {
case .firstLast:
string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: lastName, font: titleFont, textColor: titleColor))
case .lastFirst:
string.append(NSAttributedString(string: lastName, font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor))
}
titleAttributedString = string
} else if let firstName = user.firstName, !firstName.isEmpty {
titleAttributedString = NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)
} else if let lastName = user.lastName, !lastName.isEmpty {
titleAttributedString = NSAttributedString(string: lastName, font: titleFont, textColor: titleColor)
} else {
titleAttributedString = NSAttributedString(string: presentationData.strings.User_DeletedAccount, font: titleFont, textColor: titleColor)
}
} else if let group = peer as? TelegramGroup {
titleAttributedString = NSAttributedString(string: group.title, font: titleFont, textColor: titleColor)
} else if let channel = peer as? TelegramChannel {
titleAttributedString = NSAttributedString(string: channel.title, font: titleFont, textColor: titleColor)
}
self.titleNode.attributedText = titleAttributedString
let titleSize = self.titleNode.updateLayout(CGSize(width: self.size.width - 24.0, height: size.height))
let makeStatusLayout = self.statusNode.asyncLayout()
let (statusLayout, statusApply) = makeStatusLayout(CGSize(width: self.size.width - 24.0, height: CGFloat.greatestFiniteMagnitude), self.text, true)
let _ = statusApply()
self.titleNode.frame = CGRect(origin: CGPoint(x: 14.0, y: 0.0), size: titleSize)
self.statusNode.frame = CGRect(origin: CGPoint(x: 14.0, y: titleSize.height + 3.0), size: statusLayout)
let totalHeight = titleSize.height + statusLayout.height + 3.0 + 8.0
let infoFrame = CGRect(x: 0.0, y: size.height - totalHeight, width: self.size.width, height: totalHeight)
if animate {
let springDuration: Double = !self.appeared ? 0.42 : 0.3
let springDamping: CGFloat = !self.appeared ? 104.0 : 1000.0
let initialInfoPosition = self.infoNode.position
self.infoNode.layer.position = infoFrame.center
let initialInfoBounds = self.infoNode.bounds
self.infoNode.layer.bounds = CGRect(origin: CGPoint(), size: infoFrame.size)
self.infoNode.layer.animateSpring(from: NSValue(cgPoint: initialInfoPosition), to: NSValue(cgPoint: self.infoNode.position), keyPath: "position", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping)
self.infoNode.layer.animateSpring(from: NSValue(cgRect: initialInfoBounds), to: NSValue(cgRect: self.infoNode.bounds), keyPath: "bounds", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
} else {
self.infoNode.frame = infoFrame
}
}
func animateIn(from sourceNode: ASDisplayNode, targetRect: CGRect, transition: ContainedViewLayoutTransition) {
let radiusTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
let springDuration: Double = 0.42
let springDamping: CGFloat = 104.0
if let sourceNode = sourceNode as? VoiceChatTileItemNode {
let sourceRect = sourceNode.bounds
self.backgroundImageNode.frame = sourceNode.bounds
self.updateInfo(size: sourceNode.bounds.size, animate: false)
self.updateInfo(size: targetRect.size, animate: true)
self.backgroundImageNode.image = generateImage(CGSize(width: backgroundCornerRadius * 2.0, height: backgroundCornerRadius * 2.0), rotatedContext: { (size, context) in
let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds)
context.setFillColor(UIColor(rgb: 0x1c1c1e).cgColor)
context.fillEllipse(in: bounds)
context.fill(CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height / 2.0))
})?.stretchableImage(withLeftCapWidth: Int(backgroundCornerRadius), topCapHeight: Int(backgroundCornerRadius))
self.backgroundImageNode.cornerRadius = backgroundCornerRadius
transition.updateCornerRadius(node: self.backgroundImageNode, cornerRadius: 0.0)
let initialRect = sourceNode.frame
let initialScale: CGFloat = sourceRect.width / targetRect.width
let targetSize = CGSize(width: targetRect.size.width, height: targetRect.size.width)
self.avatarListWrapperNode.update(size: targetSize, transition: .immediate)
self.avatarListWrapperNode.frame = CGRect(x: targetRect.minX, y: targetRect.minY, width: targetRect.width, height: targetRect.width + backgroundCornerRadius)
self.avatarListContainerNode.frame = CGRect(origin: CGPoint(), size: targetSize)
self.avatarListContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.avatarListContainerNode.cornerRadius = targetRect.width / 2.0
if let videoNode = sourceNode.videoNode {
videoNode.updateLayout(size: targetSize, isLandscape: true, transition: transition)
transition.updateFrame(node: videoNode, frame: CGRect(origin: CGPoint(), size: targetSize))
transition.updateFrame(node: sourceNode.videoContainerNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: targetSize.width, height: targetSize.height + backgroundCornerRadius)))
sourceNode.videoContainerNode.cornerRadius = backgroundCornerRadius
}
self.insertSubnode(sourceNode.videoContainerNode, belowSubnode: self.avatarListWrapperNode)
if let snapshotView = sourceNode.infoNode.view.snapshotView(afterScreenUpdates: false) {
self.videoFadeNode.image = sourceNode.fadeNode.image
self.videoFadeNode.frame = CGRect(x: 0.0, y: sourceRect.height - sourceNode.fadeNode.frame.height, width: sourceRect.width, height: sourceNode.fadeNode.frame.height)
self.insertSubnode(self.videoFadeNode, aboveSubnode: sourceNode.videoContainerNode)
self.view.insertSubview(snapshotView, aboveSubview: sourceNode.videoContainerNode.view)
snapshotView.frame = sourceRect
transition.updateFrame(view: snapshotView, frame: CGRect(origin: CGPoint(x: 0.0, y: targetSize.height - snapshotView.frame.size.height), size: snapshotView.frame.size))
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
snapshotView.removeFromSuperview()
})
transition.updateFrame(node: self.videoFadeNode, frame: CGRect(origin: CGPoint(x: 0.0, y: targetSize.height - self.videoFadeNode.frame.size.height), size: CGSize(width: targetSize.width, height: self.videoFadeNode.frame.height)))
self.videoFadeNode.alpha = 0.0
self.videoFadeNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
}
self.avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
self.avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: initialRect.center), to: NSValue(cgPoint: self.avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.avatarListNode.currentItemNode?.addSubnode(sourceNode.videoContainerNode)
}
})
radiusTransition.updateCornerRadius(node: self.avatarListContainerNode, cornerRadius: 0.0)
self.avatarListWrapperNode.contentNode.clipsToBounds = true
self.avatarListNode.frame = CGRect(x: targetRect.width / 2.0, y: targetRect.width / 2.0, width: targetRect.width, height: targetRect.width)
self.avatarListNode.controlsClippingNode.frame = CGRect(x: -targetRect.width / 2.0, y: -targetRect.width / 2.0, width: targetRect.width, height: targetRect.width)
self.avatarListNode.controlsClippingOffsetNode.frame = CGRect(origin: CGPoint(x: targetRect.width / 2.0, y: targetRect.width / 2.0), size: CGSize())
self.avatarListNode.stripContainerNode.frame = CGRect(x: 0.0, y: 13.0, width: targetRect.width, height: 2.0)
self.avatarListNode.update(size: targetSize, peer: self.peer, customNode: self.customNode, additionalEntry: self.additionalEntry, isExpanded: true, transition: .immediate)
let backgroundTargetRect = CGRect(x: 0.0, y: targetSize.height - backgroundCornerRadius * 2.0, width: targetRect.width, height: targetRect.height - targetSize.height + backgroundCornerRadius * 2.0)
let initialBackgroundPosition = self.backgroundImageNode.position
self.backgroundImageNode.layer.position = backgroundTargetRect.center
let initialBackgroundBounds = self.backgroundImageNode.bounds
self.backgroundImageNode.layer.bounds = CGRect(origin: CGPoint(), size: backgroundTargetRect.size)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgPoint: initialBackgroundPosition), to: NSValue(cgPoint: self.backgroundImageNode.position), keyPath: "position", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgRect: initialBackgroundBounds), to: NSValue(cgRect: self.backgroundImageNode.bounds), keyPath: "bounds", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
} else if let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode {
}
self.appeared = true
}
func animateOut(to targetNode: ASDisplayNode, targetRect: CGRect, transition: ContainedViewLayoutTransition) {
let radiusTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
let springDuration: Double = 0.3
let springDamping: CGFloat = 1000.0
if let targetNode = targetNode as? VoiceChatTileItemNode {
let initialSize = self.bounds
self.updateInfo(size: targetRect.size, animate: true)
transition.updateCornerRadius(node: self.backgroundImageNode, cornerRadius: backgroundCornerRadius)
let targetScale = targetRect.width / avatarListContainerNode.frame.width
self.insertSubnode(targetNode.videoContainerNode, belowSubnode: self.avatarListWrapperNode)
self.insertSubnode(self.videoFadeNode, aboveSubnode: targetNode.videoContainerNode)
self.avatarListWrapperNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
self.avatarListWrapperNode.layer.animate(from: 1.0 as NSNumber, to: targetScale as NSNumber, keyPath: "transform.scale", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false)
self.avatarListWrapperNode.layer.animate(from: NSValue(cgPoint: self.avatarListWrapperNode.position), to: NSValue(cgPoint: targetRect.center), keyPath: "position", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { [weak self, weak targetNode] _ in
if let targetNode = targetNode {
targetNode.contentNode.insertSubnode(targetNode.videoContainerNode, aboveSubnode: targetNode.backgroundNode)
}
self?.removeFromSupernode()
})
radiusTransition.updateCornerRadius(node: self.avatarListContainerNode, cornerRadius: backgroundCornerRadius)
if let snapshotView = targetNode.infoNode.view.snapshotView(afterScreenUpdates: false) {
self.view.insertSubview(snapshotView, aboveSubview: targetNode.videoContainerNode.view)
let snapshotFrame = snapshotView.frame
snapshotView.frame = CGRect(origin: CGPoint(x: 0.0, y: initialSize.width - snapshotView.frame.size.height), size: snapshotView.frame.size)
transition.updateFrame(view: snapshotView, frame: snapshotFrame)
snapshotView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
transition.updateFrame(node: self.videoFadeNode, frame: CGRect(origin: CGPoint(x: 0.0, y: targetRect.height - self.videoFadeNode.frame.size.height), size: CGSize(width: targetRect.width, height: self.videoFadeNode.frame.height)))
self.videoFadeNode.alpha = 1.0
self.videoFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
if let videoNode = targetNode.videoNode {
videoNode.updateLayout(size: targetRect.size, isLandscape: true, transition: transition)
transition.updateFrame(node: videoNode, frame: targetRect)
transition.updateFrame(node: targetNode.videoContainerNode, frame: targetRect)
}
let backgroundTargetRect = targetRect
let initialBackgroundPosition = self.backgroundImageNode.position
self.backgroundImageNode.layer.position = backgroundTargetRect.center
let initialBackgroundBounds = self.backgroundImageNode.bounds
self.backgroundImageNode.layer.bounds = CGRect(origin: CGPoint(), size: backgroundTargetRect.size)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgPoint: initialBackgroundPosition), to: NSValue(cgPoint: self.backgroundImageNode.position), keyPath: "position", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgRect: initialBackgroundBounds), to: NSValue(cgRect: self.backgroundImageNode.bounds), keyPath: "bounds", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
self.avatarListNode.stripContainerNode.alpha = 0.0
self.avatarListNode.stripContainerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
self.infoNode.alpha = 0.0
self.infoNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
} else if let targetNode = targetNode as? VoiceChatFullscreenParticipantItemNode {
}
}
}

View File

@ -48,12 +48,12 @@ final class VoiceChatTileGridNode: ASDisplayNode {
var wasAdded = false
if let current = self.itemNodes[item.id] {
itemNode = current
current.update(size: itemSize, item: item, transition: transition)
current.update(size: itemSize, availableWidth: size.width, item: item, transition: transition)
} else {
wasAdded = true
let addedItemNode = VoiceChatTileItemNode(context: self.context)
itemNode = addedItemNode
addedItemNode.update(size: itemSize, item: item, transition: .immediate)
addedItemNode.update(size: itemSize, availableWidth: size.width, item: item, transition: .immediate)
self.itemNodes[self.items[i].id] = addedItemNode
self.addSubnode(addedItemNode)
}
@ -197,15 +197,12 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode {
strongSelf.tileGridNode = tileGridNode
}
let transition: ContainedViewLayoutTransition = currentItem == nil ? .immediate : .animated(duration: 0.3, curve: .spring)
let transition: ContainedViewLayoutTransition = currentItem == nil ? .immediate : .animated(duration: 0.4, curve: .spring)
let tileGridSize = tileGridNode.update(size: CGSize(width: params.width - params.leftInset - params.rightInset, height: CGFloat.greatestFiniteMagnitude), items: item.tiles, transition: transition)
if currentItem == nil {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .easeInOut)
tileGridNode.frame = CGRect(x: params.leftInset, y: 0.0, width: tileGridSize.width, height: 0.0)
// transition.updateFrame(node: tileGridNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize))
strongSelf.backgroundNode.frame = tileGridNode.frame
// transition.updateFrame(node: strongSelf.backgroundNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize))
} else {
transition.updateFrame(node: tileGridNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize))
transition.updateFrame(node: strongSelf.backgroundNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize))

View File

@ -35,11 +35,13 @@ final class VoiceChatTileItem: Equatable {
let peer: Peer
let videoEndpointId: String
let icon: Icon
let strings: PresentationStrings
let nameDisplayOrder: PresentationPersonNameOrder
let icon: Icon
let text: VoiceChatParticipantItem.ParticipantText
let speaking: Bool
let action: () -> Void
let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
let getVideo: () -> GroupVideoNode?
let getAudioLevel: (() -> Signal<Float, NoError>)?
@ -47,14 +49,16 @@ final class VoiceChatTileItem: Equatable {
return self.videoEndpointId
}
init(peer: Peer, videoEndpointId: String, strings: PresentationStrings, nameDisplayOrder: PresentationPersonNameOrder, speaking: Bool, icon: Icon, action: @escaping () -> Void, getVideo: @escaping () -> GroupVideoNode?, getAudioLevel: (() -> Signal<Float, NoError>)?) {
init(peer: Peer, videoEndpointId: String, strings: PresentationStrings, nameDisplayOrder: PresentationPersonNameOrder, speaking: Bool, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, action: @escaping () -> Void, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?, getVideo: @escaping () -> GroupVideoNode?, getAudioLevel: (() -> Signal<Float, NoError>)?) {
self.peer = peer
self.videoEndpointId = videoEndpointId
self.strings = strings
self.nameDisplayOrder = nameDisplayOrder
self.icon = icon
self.text = text
self.speaking = speaking
self.action = action
self.contextAction = contextAction
self.getVideo = getVideo
self.getAudioLevel = getAudioLevel
}
@ -93,16 +97,26 @@ final class VoiceChatTileItemNode: ASDisplayNode {
let contextSourceNode: ContextExtractedContentContainingNode
private let containerNode: ContextControllerSourceNode
private let backgroundNode: ASDisplayNode
let contentNode: ASDisplayNode
let backgroundNode: ASDisplayNode
var videoContainerNode: ASDisplayNode
var videoNode: GroupVideoNode?
private let fadeNode: ASImageNode
let infoNode: ASDisplayNode
let fadeNode: ASImageNode
private let titleNode: ImmediateTextNode
private let iconNode: ASImageNode
private var animationNode: VoiceChatMicrophoneNode?
private var highlightNode: ASImageNode
private let statusNode: VoiceChatParticipantStatusNode
private var validLayout: CGSize?
private var profileNode: VoiceChatPeerProfileNode?
private var extractedRect: CGRect?
private var nonExtractedRect: CGRect?
private var extractedVerticalOffset: CGFloat?
private var validLayout: (CGSize, CGFloat)?
var item: VoiceChatTileItem?
private var isExtracted = false
private let audioLevelDisposable = MetaDisposable()
@ -112,9 +126,18 @@ final class VoiceChatTileItemNode: ASDisplayNode {
self.contextSourceNode = ContextExtractedContentContainingNode()
self.containerNode = ContextControllerSourceNode()
self.contentNode = ASDisplayNode()
self.contentNode.clipsToBounds = true
self.contentNode.cornerRadius = 11.0
self.backgroundNode = ASDisplayNode()
self.backgroundNode.backgroundColor = panelBackgroundColor
self.videoContainerNode = ASDisplayNode()
self.videoContainerNode.clipsToBounds = true
self.infoNode = ASDisplayNode()
self.fadeNode = ASImageNode()
self.fadeNode.displaysAsynchronously = false
self.fadeNode.displayWithoutProcessing = true
@ -122,6 +145,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
self.fadeNode.image = fadeImage
self.titleNode = ImmediateTextNode()
self.statusNode = VoiceChatParticipantStatusNode()
self.iconNode = ASImageNode()
self.iconNode.displaysAsynchronously = false
@ -136,18 +160,38 @@ final class VoiceChatTileItemNode: ASDisplayNode {
self.clipsToBounds = true
self.contextSourceNode.contentNode.clipsToBounds = true
self.contextSourceNode.contentNode.cornerRadius = 11.0
self.containerNode.addSubnode(self.contextSourceNode)
self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode
self.addSubnode(self.containerNode)
self.contextSourceNode.contentNode.addSubnode(self.backgroundNode)
self.contextSourceNode.contentNode.addSubnode(self.fadeNode)
self.contextSourceNode.contentNode.addSubnode(self.titleNode)
self.contextSourceNode.contentNode.addSubnode(self.iconNode)
self.contextSourceNode.contentNode.addSubnode(self.highlightNode)
self.contextSourceNode.contentNode.addSubnode(self.contentNode)
self.contentNode.addSubnode(self.backgroundNode)
self.contentNode.addSubnode(self.videoContainerNode)
self.contentNode.addSubnode(self.fadeNode)
self.contentNode.addSubnode(self.infoNode)
self.infoNode.addSubnode(self.titleNode)
self.infoNode.addSubnode(self.iconNode)
self.contentNode.addSubnode(self.highlightNode)
self.containerNode.shouldBegin = { [weak self] location in
guard let _ = self else {
return false
}
return true
}
self.containerNode.activated = { [weak self] gesture, _ in
guard let strongSelf = self, let item = strongSelf.item, let contextAction = item.contextAction else {
gesture.cancel()
return
}
contextAction(strongSelf.contextSourceNode, gesture)
}
self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in
guard let strongSelf = self, let _ = strongSelf.item else {
return
}
strongSelf.updateIsExtracted(isExtracted, transition: transition)
}
}
deinit {
@ -164,10 +208,44 @@ final class VoiceChatTileItemNode: ASDisplayNode {
self.item?.action()
}
func update(size: CGSize, item: VoiceChatTileItem, transition: ContainedViewLayoutTransition) {
guard self.validLayout != size || self.item != item else {
private func updateIsExtracted(_ isExtracted: Bool, transition: ContainedViewLayoutTransition) {
guard self.isExtracted != isExtracted, let extractedRect = self.extractedRect, let nonExtractedRect = self.nonExtractedRect, let item = self.item else {
return
}
self.isExtracted = isExtracted
if isExtracted {
let profileNode = VoiceChatPeerProfileNode(context: self.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
self?.contextSourceNode.requestDismiss?()
})
profileNode.frame = CGRect(origin: CGPoint(), size: extractedRect.size)
self.profileNode = profileNode
self.contextSourceNode.contentNode.addSubnode(profileNode)
profileNode.animateIn(from: self, targetRect: extractedRect, transition: transition)
self.contextSourceNode.contentNode.customHitTest = { [weak self] point in
if let strongSelf = self, let profileNode = strongSelf.profileNode {
if profileNode.avatarListWrapperNode.frame.contains(point) {
return profileNode.avatarListNode.view
}
}
return nil
}
} else if let profileNode = self.profileNode {
self.profileNode = nil
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition)
self.contextSourceNode.contentNode.customHitTest = nil
}
}
func update(size: CGSize, availableWidth: CGFloat, item: VoiceChatTileItem, transition: ContainedViewLayoutTransition) {
guard self.validLayout?.0 != size || self.validLayout?.1 != availableWidth || self.item != item else {
return
}
self.validLayout = (size, availableWidth)
var itemTransition = transition
if self.item != item {
@ -206,7 +284,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
if let videoNode = item.getVideo() {
itemTransition = .immediate
self.videoNode = videoNode
self.contextSourceNode.contentNode.insertSubnode(videoNode, at: 1)
self.videoContainerNode.addSubnode(videoNode)
}
}
@ -248,7 +326,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
} else {
animationNode = VoiceChatMicrophoneNode()
self.animationNode = animationNode
self.contextSourceNode.contentNode.addSubnode(animationNode)
self.infoNode.addSubnode(animationNode)
}
animationNode.alpha = 1.0
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: UIColor.white), animated: true)
@ -259,34 +337,56 @@ final class VoiceChatTileItemNode: ASDisplayNode {
}
let bounds = CGRect(origin: CGPoint(), size: size)
self.contentNode.frame = bounds
self.containerNode.frame = bounds
self.contextSourceNode.frame = bounds
self.contextSourceNode.contentNode.frame = bounds
let extractedWidth = availableWidth
let makeStatusLayout = self.statusNode.asyncLayout()
let (statusLayout, _) = makeStatusLayout(CGSize(width: availableWidth - 30.0, height: CGFloat.greatestFiniteMagnitude), item.text, true)
let extractedRect = CGRect(x: 0.0, y: 0.0, width: extractedWidth, height: extractedWidth + statusLayout.height + 39.0)
let nonExtractedRect = bounds
self.extractedRect = extractedRect
self.nonExtractedRect = nonExtractedRect
self.contextSourceNode.contentRect = extractedRect
if self.videoContainerNode.supernode === self.contentNode {
if let videoNode = self.videoNode {
transition.updateFrame(node: videoNode, frame: bounds)
videoNode.updateLayout(size: size, isLandscape: true, transition: itemTransition)
}
transition.updateFrame(node: self.videoContainerNode, frame: bounds)
}
transition.updateFrame(node: self.backgroundNode, frame: bounds)
transition.updateFrame(node: self.highlightNode, frame: bounds)
transition.updateFrame(node: self.infoNode, frame: bounds)
transition.updateFrame(node: self.fadeNode, frame: CGRect(x: 0.0, y: size.height - fadeHeight, width: size.width, height: fadeHeight))
let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - 50.0, height: size.height))
self.titleNode.frame = CGRect(origin: CGPoint(x: 11.0, y: size.height - titleSize.height - 8.0), size: titleSize)
self.titleNode.frame = CGRect(origin: CGPoint(x: 30.0, y: size.height - titleSize.height - 8.0), size: titleSize)
if let animationNode = self.animationNode {
let animationSize = CGSize(width: 36.0, height: 36.0)
animationNode.bounds = CGRect(origin: CGPoint(), size: animationSize)
animationNode.transform = CATransform3DMakeScale(0.66667, 0.66667, 1.0)
transition.updatePosition(node: animationNode, position: CGPoint(x: size.width - 19.0, y: size.height - 15.0))
transition.updatePosition(node: animationNode, position: CGPoint(x: 16.0, y: size.height - 15.0))
}
}
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, animate: Bool = true) {
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, transition: ContainedViewLayoutTransition, animate: Bool = true) {
guard let _ = self.item else {
return
}
var duration: Double = 0.2
var timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue
if case let .animated(transitionDuration, curve) = transition {
duration = transitionDuration
timingFunction = curve.timingFunction
}
if let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item {
let initialAnimate = animate
@ -301,7 +401,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
sourceNode.videoNode = nil
videoNode.alpha = 1.0
self.videoNode = videoNode
self.contextSourceNode.contentNode.insertSubnode(videoNode, at: 1)
self.videoContainerNode.addSubnode(videoNode)
if animate {
// self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
@ -322,15 +422,14 @@ final class VoiceChatTileItemNode: ASDisplayNode {
self.contextSourceNode.position = targetContainerPosition
containerNode.addSubnode(self.contextSourceNode)
self.contextSourceNode.layer.animateScale(from: 0.467, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self] _ in
self.contextSourceNode.layer.animateScale(from: 0.467, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.contextSourceNode.position = initialPosition
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
}
})
let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
self.videoNode?.updateLayout(size: self.bounds.size, isLandscape: true, transition: transition)
self.videoNode?.frame = self.bounds
} else if !initialAnimate {

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "pin.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "unpin.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.