Merge commit '71818a565ecf90a0fd1a24c1a70bfab0862040fe'

This commit is contained in:
Ali 2021-05-19 21:52:41 +04:00
commit f4931a77f6
22 changed files with 2102 additions and 825 deletions

View File

@ -107,8 +107,8 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
guard !isAnimating else { return } guard !isAnimating else { return }
isAnimating = true isAnimating = true
mediumBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.15, removeOnCompletion: false) mediumBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.15, removeOnCompletion: false) bigBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false)
updateBlobsState() updateBlobsState()
@ -123,8 +123,8 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
guard isAnimating else { return } guard isAnimating else { return }
isAnimating = false isAnimating = false
mediumBlob.layer.animateScale(from: 1.0, to: 0.5, duration: duration, removeOnCompletion: false) mediumBlob.layer.animateScale(from: 1.0, to: 0.75, duration: duration, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 1.0, to: 0.5, duration: duration, removeOnCompletion: false) bigBlob.layer.animateScale(from: 1.0, to: 0.75, duration: duration, removeOnCompletion: false)
updateBlobsState() updateBlobsState()

View File

@ -1390,6 +1390,10 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
} else { } else {
overflowOffset = min(0.0, originalContentFrame.minY - contentTopInset) overflowOffset = min(0.0, originalContentFrame.minY - contentTopInset)
contentContainerFrame = originalContentFrame.offsetBy(dx: -contentParentNode.contentRect.minX, dy: -overflowOffset - contentParentNode.contentRect.minY) contentContainerFrame = originalContentFrame.offsetBy(dx: -contentParentNode.contentRect.minX, dy: -overflowOffset - contentParentNode.contentRect.minY)
if contentContainerFrame.maxX > layout.size.width {
contentContainerFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - contentContainerFrame.width) / 2.0), y: contentContainerFrame.minY), size: contentContainerFrame.size)
}
} }
if case let .extracted(source) = self.source, source.centerVertically { if case let .extracted(source) = self.source, source.centerVertically {

View File

@ -185,7 +185,10 @@
- (void)cleanup - (void)cleanup
{ {
[_diskCache cleanup]; TGModernCache *diskCache = _diskCache;
TGDispatchAfter(10.0, dispatch_get_main_queue(), ^{
[diskCache cleanup];
});
[[NSFileManager defaultManager] removeItemAtPath:_fullSizeResultsUrl.path error:nil]; [[NSFileManager defaultManager] removeItemAtPath:_fullSizeResultsUrl.path error:nil];
[[NSFileManager defaultManager] removeItemAtPath:_paintingImagesUrl.path error:nil]; [[NSFileManager defaultManager] removeItemAtPath:_paintingImagesUrl.path error:nil];
@ -991,7 +994,7 @@
+ (NSUInteger)diskMemoryLimit + (NSUInteger)diskMemoryLimit
{ {
return 64 * 1024 * 1024; return 512 * 1024 * 1024;
} }
+ (NSUInteger)imageSoftMemoryLimit + (NSUInteger)imageSoftMemoryLimit

View File

@ -146,7 +146,6 @@
{ {
_context = context; _context = context;
_actionHandle = [[ASHandle alloc] initWithDelegate:self releaseOnMainThread:true]; _actionHandle = [[ASHandle alloc] initWithDelegate:self releaseOnMainThread:true];
_standaloneEditingContext = [[TGMediaEditingContext alloc] init];
self.automaticallyManageScrollViewInsets = false; self.automaticallyManageScrollViewInsets = false;
self.autoManageStatusBarBackground = false; self.autoManageStatusBarBackground = false;
@ -2182,10 +2181,14 @@
- (TGMediaEditingContext *)editingContext - (TGMediaEditingContext *)editingContext
{ {
if (_editingContext) if (_editingContext) {
return _editingContext; return _editingContext;
else } else {
if (_standaloneEditingContext == nil) {
_standaloneEditingContext = [[TGMediaEditingContext alloc] init];
}
return _standaloneEditingContext; return _standaloneEditingContext;
}
} }
- (void)doneButtonLongPressed:(UIButton *)sender - (void)doneButtonLongPressed:(UIButton *)sender

View File

@ -40,12 +40,15 @@ final class GroupVideoNode: ASDisplayNode {
self.backdropVideoViewContainer.addSubview(backdropVideoView.view) self.backdropVideoViewContainer.addSubview(backdropVideoView.view)
self.view.addSubview(self.backdropVideoViewContainer) self.view.addSubview(self.backdropVideoViewContainer)
let effect: UIVisualEffect
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
let backdropEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .systemThinMaterialDark)) effect = UIBlurEffect(style: .systemThinMaterialDark)
self.view.addSubview(backdropEffectView)
self.backdropEffectView = backdropEffectView
} else { } else {
effect = UIBlurEffect(style: .dark)
} }
let backdropEffectView = UIVisualEffectView(effect: effect)
self.view.addSubview(backdropEffectView)
self.backdropEffectView = backdropEffectView
} }
self.videoViewContainer.addSubview(self.videoView.view) self.videoViewContainer.addSubview(self.videoView.view)
@ -131,10 +134,15 @@ final class GroupVideoNode: ASDisplayNode {
} }
} }
var aspectRatio: CGFloat {
return self.videoView.getAspect()
}
func updateLayout(size: CGSize, isLandscape: Bool, transition: ContainedViewLayoutTransition) { func updateLayout(size: CGSize, isLandscape: Bool, transition: ContainedViewLayoutTransition) {
self.validLayout = (size, isLandscape) self.validLayout = (size, isLandscape)
transition.updateFrameAsPositionAndBounds(layer: self.videoViewContainer.layer, frame: CGRect(origin: CGPoint(), size: size)) let bounds = CGRect(origin: CGPoint(), size: size)
transition.updateFrameAsPositionAndBounds(layer: self.backdropVideoViewContainer.layer, frame: CGRect(origin: CGPoint(), size: size)) transition.updateFrameAsPositionAndBounds(layer: self.videoViewContainer.layer, frame: bounds)
transition.updateFrameAsPositionAndBounds(layer: self.backdropVideoViewContainer.layer, frame: bounds)
let orientation = self.videoView.getOrientation() let orientation = self.videoView.getOrientation()
var aspect = self.videoView.getAspect() var aspect = self.videoView.getAspect()
@ -194,9 +202,6 @@ final class GroupVideoNode: ASDisplayNode {
let transformScale: CGFloat = rotatedVideoFrame.width / videoSize.width let transformScale: CGFloat = rotatedVideoFrame.width / videoSize.width
transition.updateTransformScale(layer: self.videoViewContainer.layer, scale: transformScale) transition.updateTransformScale(layer: self.videoViewContainer.layer, scale: transformScale)
let transition: ContainedViewLayoutTransition = .immediate
transition.updateTransformRotation(view: self.videoView.view, angle: angle)
if let backdropVideoView = self.backdropVideoView { if let backdropVideoView = self.backdropVideoView {
rotatedVideoSize = filledSize rotatedVideoSize = filledSize
var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize) var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize)
@ -217,11 +222,16 @@ final class GroupVideoNode: ASDisplayNode {
} }
if let backdropEffectView = self.backdropEffectView { if let backdropEffectView = self.backdropEffectView {
transition.updateFrame(view: backdropEffectView, frame: self.bounds) let maxSide = max(bounds.width, bounds.height)
let squareBounds = CGRect(x: (bounds.width - maxSide) / 2.0, y: (bounds.width - maxSide) / 2.0, width: maxSide, height: maxSide)
transition.updateFrame(view: backdropEffectView, frame: squareBounds)
} }
let transition: ContainedViewLayoutTransition = .immediate
transition.updateTransformRotation(view: self.videoView.view, angle: angle)
if let effectView = self.effectView { if let effectView = self.effectView {
transition.updateFrame(view: effectView, frame: self.bounds) transition.updateFrame(view: effectView, frame: bounds)
} }
// TODO: properly fix the issue // TODO: properly fix the issue

View File

@ -657,7 +657,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.temporaryJoinTimestamp = Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970) self.temporaryJoinTimestamp = Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970)
self.isVideoEnabled = accountContext.sharedContext.immediateExperimentalUISettings.demoVideoChats self.isVideoEnabled = true
self.hasVideo = false self.hasVideo = false
self.hasScreencast = false self.hasScreencast = false

View File

@ -61,6 +61,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
private let containerNode: ASDisplayNode private let containerNode: ASDisplayNode
private let backgroundNode: VoiceChatActionButtonBackgroundNode private let backgroundNode: VoiceChatActionButtonBackgroundNode
private let iconNode: VoiceChatActionButtonIconNode private let iconNode: VoiceChatActionButtonIconNode
private let labelContainerNode: ASDisplayNode
let titleLabel: ImmediateTextNode let titleLabel: ImmediateTextNode
private let subtitleLabel: ImmediateTextNode private let subtitleLabel: ImmediateTextNode
private let buttonTitleLabel: ImmediateTextNode private let buttonTitleLabel: ImmediateTextNode
@ -138,6 +139,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
self.backgroundNode = VoiceChatActionButtonBackgroundNode() self.backgroundNode = VoiceChatActionButtonBackgroundNode()
self.iconNode = VoiceChatActionButtonIconNode(isColored: false) self.iconNode = VoiceChatActionButtonIconNode(isColored: false)
self.labelContainerNode = ASDisplayNode()
self.titleLabel = ImmediateTextNode() self.titleLabel = ImmediateTextNode()
self.subtitleLabel = ImmediateTextNode() self.subtitleLabel = ImmediateTextNode()
self.buttonTitleLabel = ImmediateTextNode() self.buttonTitleLabel = ImmediateTextNode()
@ -147,9 +149,10 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
super.init() super.init()
self.addSubnode(self.bottomNode) self.addSubnode(self.bottomNode)
self.addSubnode(self.titleLabel) self.labelContainerNode.addSubnode(self.titleLabel)
self.addSubnode(self.subtitleLabel) self.labelContainerNode.addSubnode(self.subtitleLabel)
self.addSubnode(self.labelContainerNode)
self.addSubnode(self.containerNode) self.addSubnode(self.containerNode)
self.containerNode.addSubnode(self.backgroundNode) self.containerNode.addSubnode(self.backgroundNode)
self.containerNode.addSubnode(self.iconNode) self.containerNode.addSubnode(self.iconNode)
@ -242,6 +245,8 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
let subtitleSize = self.subtitleLabel.updateLayout(CGSize(width: size.width, height: .greatestFiniteMagnitude)) let subtitleSize = self.subtitleLabel.updateLayout(CGSize(width: size.width, height: .greatestFiniteMagnitude))
let totalHeight = titleSize.height + subtitleSize.height + 1.0 let totalHeight = titleSize.height + subtitleSize.height + 1.0
self.labelContainerNode.frame = CGRect(origin: CGPoint(), size: size)
self.titleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) / 2.0), y: floor((size.height - totalHeight) / 2.0) + 84.0), size: titleSize) self.titleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) / 2.0), y: floor((size.height - totalHeight) / 2.0) + 84.0), size: titleSize)
self.subtitleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - subtitleSize.width) / 2.0), y: self.titleLabel.frame.maxY + 1.0), size: subtitleSize) self.subtitleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - subtitleSize.width) / 2.0), y: self.titleLabel.frame.maxY + 1.0), size: subtitleSize)
@ -272,17 +277,19 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
transition.updateAlpha(node: self.subtitleLabel, alpha: 0.0) transition.updateAlpha(node: self.subtitleLabel, alpha: 0.0)
transition.updateAlpha(layer: self.backgroundNode.maskProgressLayer, alpha: 0.0) transition.updateAlpha(layer: self.backgroundNode.maskProgressLayer, alpha: 0.0)
} else { } else {
let transition: ContainedViewLayoutTransition = animated ? .animated(duration: 0.2, curve: .easeInOut) : .immediate let transition: ContainedViewLayoutTransition = animated ? .animated(duration: 0.4, curve: .spring) : .immediate
if small { if small {
transition.updateTransformScale(node: self.backgroundNode, scale: self.pressing ? smallScale * 0.9 : smallScale, delay: 0.05) transition.updateTransformScale(node: self.backgroundNode, scale: self.pressing ? smallScale * 0.9 : smallScale, delay: 0.0)
transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? smallIconScale * 0.9 : smallIconScale, delay: 0.05) transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? smallIconScale * 0.9 : smallIconScale, delay: 0.0)
transition.updateAlpha(node: self.titleLabel, alpha: 0.0) transition.updateAlpha(node: self.titleLabel, alpha: 0.0)
transition.updateAlpha(node: self.subtitleLabel, alpha: 0.0) transition.updateAlpha(node: self.subtitleLabel, alpha: 0.0)
transition.updateSublayerTransformOffset(layer: self.labelContainerNode.layer, offset: CGPoint(x: 0.0, y: -50.0))
} else { } else {
transition.updateTransformScale(node: self.backgroundNode, scale: 1.0, delay: 0.05) transition.updateTransformScale(node: self.backgroundNode, scale: 1.0, delay: 0.0)
transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? 0.9 : 1.0, delay: 0.05) transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? 0.9 : 1.0, delay: 0.0)
transition.updateAlpha(node: self.titleLabel, alpha: 1.0, delay: 0.05) transition.updateAlpha(node: self.titleLabel, alpha: 1.0, delay: 0.05)
transition.updateAlpha(node: self.subtitleLabel, alpha: 1.0, delay: 0.05) transition.updateAlpha(node: self.subtitleLabel, alpha: 1.0, delay: 0.05)
transition.updateSublayerTransformOffset(layer: self.labelContainerNode.layer, offset: CGPoint())
} }
transition.updateAlpha(layer: self.backgroundNode.maskProgressLayer, alpha: 1.0) transition.updateAlpha(layer: self.backgroundNode.maskProgressLayer, alpha: 1.0)
} }

View File

@ -23,12 +23,12 @@ final class VoiceChatCameraPreviewController: ViewController {
private var animatedIn = false private var animatedIn = false
private let cameraNode: GroupVideoNode private let cameraNode: GroupVideoNode
private let shareCamera: (ASDisplayNode) -> Void private let shareCamera: (ASDisplayNode, Bool) -> Void
private let switchCamera: () -> Void private let switchCamera: () -> Void
private var presentationDataDisposable: Disposable? private var presentationDataDisposable: Disposable?
init(context: AccountContext, cameraNode: GroupVideoNode, shareCamera: @escaping (ASDisplayNode) -> Void, switchCamera: @escaping () -> Void) { init(context: AccountContext, cameraNode: GroupVideoNode, shareCamera: @escaping (ASDisplayNode, Bool) -> Void, switchCamera: @escaping () -> Void) {
self.context = context self.context = context
self.cameraNode = cameraNode self.cameraNode = cameraNode
self.shareCamera = shareCamera self.shareCamera = shareCamera
@ -60,9 +60,9 @@ final class VoiceChatCameraPreviewController: ViewController {
override public func loadDisplayNode() { override public func loadDisplayNode() {
self.displayNode = VoiceChatCameraPreviewControllerNode(controller: self, context: self.context, cameraNode: self.cameraNode) self.displayNode = VoiceChatCameraPreviewControllerNode(controller: self, context: self.context, cameraNode: self.cameraNode)
self.controllerNode.shareCamera = { [weak self] in self.controllerNode.shareCamera = { [weak self] unmuted in
if let strongSelf = self { if let strongSelf = self {
strongSelf.shareCamera(strongSelf.cameraNode) strongSelf.shareCamera(strongSelf.cameraNode, unmuted)
strongSelf.dismiss() strongSelf.dismiss()
} }
} }
@ -121,6 +121,10 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
private var broadcastPickerView: UIView? private var broadcastPickerView: UIView?
private let cancelButton: SolidRoundedButtonNode private let cancelButton: SolidRoundedButtonNode
private let microphoneButton: HighlightTrackingButtonNode
private let microphoneEffectView: UIVisualEffectView
private let microphoneIconNode: VoiceChatMicrophoneNode
private let switchCameraButton: HighlightTrackingButtonNode private let switchCameraButton: HighlightTrackingButtonNode
private let switchCameraEffectView: UIVisualEffectView private let switchCameraEffectView: UIVisualEffectView
private let switchCameraIconNode: ASImageNode private let switchCameraIconNode: ASImageNode
@ -129,7 +133,7 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
private var applicationStateDisposable: Disposable? private var applicationStateDisposable: Disposable?
var shareCamera: (() -> Void)? var shareCamera: ((Bool) -> Void)?
var switchCamera: (() -> Void)? var switchCamera: (() -> Void)?
var dismiss: (() -> Void)? var dismiss: (() -> Void)?
var cancel: (() -> Void)? var cancel: (() -> Void)?
@ -196,6 +200,16 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.previewContainerNode.cornerRadius = 11.0 self.previewContainerNode.cornerRadius = 11.0
self.previewContainerNode.backgroundColor = .black self.previewContainerNode.backgroundColor = .black
self.microphoneButton = HighlightTrackingButtonNode()
self.microphoneButton.isSelected = true
self.microphoneEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
self.microphoneEffectView.clipsToBounds = true
self.microphoneEffectView.layer.cornerRadius = 24.0
self.microphoneEffectView.isUserInteractionEnabled = false
self.microphoneIconNode = VoiceChatMicrophoneNode()
self.microphoneIconNode.update(state: .init(muted: false, filled: true, color: .white), animated: false)
self.switchCameraButton = HighlightTrackingButtonNode() self.switchCameraButton = HighlightTrackingButtonNode()
self.switchCameraEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark)) self.switchCameraEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
self.switchCameraEffectView.clipsToBounds = true self.switchCameraEffectView.clipsToBounds = true
@ -234,13 +248,16 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.contentContainerNode.addSubnode(self.previewContainerNode) self.contentContainerNode.addSubnode(self.previewContainerNode)
self.previewContainerNode.addSubnode(self.cameraNode) self.previewContainerNode.addSubnode(self.cameraNode)
self.previewContainerNode.addSubnode(self.microphoneButton)
self.microphoneButton.view.addSubview(self.microphoneEffectView)
self.microphoneButton.addSubnode(self.microphoneIconNode)
self.previewContainerNode.addSubnode(self.switchCameraButton) self.previewContainerNode.addSubnode(self.switchCameraButton)
self.switchCameraButton.view.addSubview(self.switchCameraEffectView) self.switchCameraButton.view.addSubview(self.switchCameraEffectView)
self.switchCameraButton.addSubnode(self.switchCameraIconNode) self.switchCameraButton.addSubnode(self.switchCameraIconNode)
self.cameraButton.pressed = { [weak self] in self.cameraButton.pressed = { [weak self] in
if let strongSelf = self { if let strongSelf = self {
strongSelf.shareCamera?() strongSelf.shareCamera?(strongSelf.microphoneButton.isSelected)
} }
} }
self.cancelButton.pressed = { [weak self] in self.cancelButton.pressed = { [weak self] in
@ -249,6 +266,19 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
} }
} }
self.microphoneButton.addTarget(self, action: #selector(self.microphonePressed), forControlEvents: .touchUpInside)
self.microphoneButton.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self {
if highlighted {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .spring)
transition.updateSublayerTransformScale(node: strongSelf.microphoneButton, scale: 0.9)
} else {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.5, curve: .spring)
transition.updateSublayerTransformScale(node: strongSelf.microphoneButton, scale: 1.0)
}
}
}
self.switchCameraButton.addTarget(self, action: #selector(self.switchCameraPressed), forControlEvents: .touchUpInside) self.switchCameraButton.addTarget(self, action: #selector(self.switchCameraPressed), forControlEvents: .touchUpInside)
self.switchCameraButton.highligthedChanged = { [weak self] highlighted in self.switchCameraButton.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self { if let strongSelf = self {
@ -263,6 +293,11 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
} }
} }
@objc private func microphonePressed() {
self.microphoneButton.isSelected = !self.microphoneButton.isSelected
self.microphoneIconNode.update(state: .init(muted: !self.microphoneButton.isSelected, filled: true, color: .white), animated: true)
}
@objc private func switchCameraPressed() { @objc private func switchCameraPressed() {
self.switchCamera?() self.switchCamera?()
@ -368,8 +403,10 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
let cleanInsets = layout.insets(options: [.statusBar]) let cleanInsets = layout.insets(options: [.statusBar])
insets.top = max(10.0, insets.top) insets.top = max(10.0, insets.top)
let buttonOffset: CGFloat = 120.0 var buttonOffset: CGFloat = 60.0
if let _ = self.broadcastPickerView {
buttonOffset *= 2.0
}
let bottomInset: CGFloat = 10.0 + cleanInsets.bottom let bottomInset: CGFloat = 10.0 + cleanInsets.bottom
let titleHeight: CGFloat = 54.0 let titleHeight: CGFloat = 54.0
var contentHeight = titleHeight + bottomInset + 52.0 + 17.0 var contentHeight = titleHeight + bottomInset + 52.0 + 17.0
@ -403,6 +440,12 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.cameraNode.frame = CGRect(origin: CGPoint(), size: previewSize) self.cameraNode.frame = CGRect(origin: CGPoint(), size: previewSize)
self.cameraNode.updateLayout(size: previewSize, isLandscape: false, transition: .immediate) self.cameraNode.updateLayout(size: previewSize, isLandscape: false, transition: .immediate)
let microphoneFrame = CGRect(x: 16.0, y: previewSize.height - 48.0 - 16.0, width: 48.0, height: 48.0)
transition.updateFrame(node: self.microphoneButton, frame: microphoneFrame)
transition.updateFrame(view: self.microphoneEffectView, frame: CGRect(origin: CGPoint(), size: microphoneFrame.size))
transition.updateFrameAsPositionAndBounds(node: self.microphoneIconNode, frame: CGRect(origin: CGPoint(x: 1.0, y: 0.0), size: microphoneFrame.size).insetBy(dx: 6.0, dy: 6.0))
self.microphoneIconNode.transform = CATransform3DMakeScale(1.2, 1.2, 1.0)
let switchCameraFrame = CGRect(x: previewSize.width - 48.0 - 16.0, y: previewSize.height - 48.0 - 16.0, width: 48.0, height: 48.0) let switchCameraFrame = CGRect(x: previewSize.width - 48.0 - 16.0, y: previewSize.height - 48.0 - 16.0, width: 48.0, height: 48.0)
transition.updateFrame(node: self.switchCameraButton, frame: switchCameraFrame) transition.updateFrame(node: self.switchCameraButton, frame: switchCameraFrame)
transition.updateFrame(view: self.switchCameraEffectView, frame: CGRect(origin: CGPoint(), size: switchCameraFrame.size)) transition.updateFrame(view: self.switchCameraEffectView, frame: CGRect(origin: CGPoint(), size: switchCameraFrame.size))
@ -416,6 +459,8 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
transition.updateFrame(node: self.screenButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - insets.bottom - 16.0, width: contentFrame.width, height: screenButtonHeight)) transition.updateFrame(node: self.screenButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - insets.bottom - 16.0, width: contentFrame.width, height: screenButtonHeight))
if let broadcastPickerView = self.broadcastPickerView { if let broadcastPickerView = self.broadcastPickerView {
broadcastPickerView.frame = CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - insets.bottom - 16.0, width: contentFrame.width + 1000.0, height: screenButtonHeight) broadcastPickerView.frame = CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - insets.bottom - 16.0, width: contentFrame.width + 1000.0, height: screenButtonHeight)
} else {
self.screenButton.isHidden = true
} }
let cancelButtonHeight = self.cancelButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition) let cancelButtonHeight = self.cancelButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)

File diff suppressed because it is too large Load Diff

View File

@ -72,6 +72,7 @@ final class VoiceChatFullscreenParticipantItem: ListViewItem {
let context: AccountContext let context: AccountContext
let peer: Peer let peer: Peer
let icon: Icon let icon: Icon
let text: VoiceChatParticipantItem.ParticipantText
let color: Color let color: Color
let isLandscape: Bool let isLandscape: Bool
let active: Bool let active: Bool
@ -83,12 +84,13 @@ final class VoiceChatFullscreenParticipantItem: ListViewItem {
public let selectable: Bool = true public let selectable: Bool = true
public init(presentationData: ItemListPresentationData, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, icon: Icon, color: Color, isLandscape: Bool, active: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, action: ((ASDisplayNode?) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) { public init(presentationData: ItemListPresentationData, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, color: Color, isLandscape: Bool, active: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, action: ((ASDisplayNode?) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) {
self.presentationData = presentationData self.presentationData = presentationData
self.nameDisplayOrder = nameDisplayOrder self.nameDisplayOrder = nameDisplayOrder
self.context = context self.context = context
self.peer = peer self.peer = peer
self.icon = icon self.icon = icon
self.text = text
self.color = color self.color = color
self.isLandscape = isLandscape self.isLandscape = isLandscape
self.active = active self.active = active
@ -157,6 +159,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
let avatarNode: AvatarNode let avatarNode: AvatarNode
let contentWrapperNode: ASDisplayNode let contentWrapperNode: ASDisplayNode
private let titleNode: TextNode private let titleNode: TextNode
private let statusNode: VoiceChatParticipantStatusNode
private var credibilityIconNode: ASImageNode? private var credibilityIconNode: ASImageNode?
private let actionContainerNode: ASDisplayNode private let actionContainerNode: ASDisplayNode
@ -174,6 +177,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
private var layoutParams: (VoiceChatFullscreenParticipantItem, ListViewItemLayoutParams, Bool, Bool)? private var layoutParams: (VoiceChatFullscreenParticipantItem, ListViewItemLayoutParams, Bool, Bool)?
private var isExtracted = false private var isExtracted = false
private var animatingExtraction = false private var animatingExtraction = false
private var animatingSelection = false
private var wavesColor: UIColor? private var wavesColor: UIColor?
let videoContainerNode: ASDisplayNode let videoContainerNode: ASDisplayNode
@ -183,14 +187,14 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
private var videoReadyDelayed = false private var videoReadyDelayed = false
private var videoReady = false private var videoReady = false
private var profileNode: VoiceChatPeerProfileNode?
private var raiseHandTimer: SwiftSignalKit.Timer? private var raiseHandTimer: SwiftSignalKit.Timer?
var item: VoiceChatFullscreenParticipantItem? { var item: VoiceChatFullscreenParticipantItem? {
return self.layoutParams?.0 return self.layoutParams?.0
} }
private var currentTitle: String?
init() { init() {
self.contextSourceNode = ContextExtractedContentContainingNode() self.contextSourceNode = ContextExtractedContentContainingNode()
self.containerNode = ContextControllerSourceNode() self.containerNode = ContextControllerSourceNode()
@ -231,6 +235,8 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.titleNode.isUserInteractionEnabled = false self.titleNode.isUserInteractionEnabled = false
self.titleNode.contentMode = .left self.titleNode.contentMode = .left
self.titleNode.contentsScale = UIScreen.main.scale self.titleNode.contentsScale = UIScreen.main.scale
self.statusNode = VoiceChatParticipantStatusNode()
self.actionContainerNode = ASDisplayNode() self.actionContainerNode = ASDisplayNode()
self.actionButtonNode = HighlightableButtonNode() self.actionButtonNode = HighlightableButtonNode()
@ -256,7 +262,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode
self.containerNode.shouldBegin = { [weak self] location in self.containerNode.shouldBegin = { [weak self] location in
guard let strongSelf = self else { guard let _ = self else {
return false return false
} }
return true return true
@ -268,6 +274,12 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
} }
contextAction(strongSelf.contextSourceNode, gesture) contextAction(strongSelf.contextSourceNode, gesture)
} }
self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in
guard let strongSelf = self, let _ = strongSelf.item else {
return
}
strongSelf.updateIsExtracted(isExtracted, transition: transition)
}
// self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in // self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in
// guard let strongSelf = self, let item = strongSelf.layoutParams?.0 else { // guard let strongSelf = self, let item = strongSelf.layoutParams?.0 else {
@ -575,10 +587,16 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.layoutParams?.0.action?(self.contextSourceNode) self.layoutParams?.0.action?(self.contextSourceNode)
} }
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, animate: Bool = true) { func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, transition: ContainedViewLayoutTransition, animate: Bool = true) {
guard let item = self.item else { guard let item = self.item else {
return return
} }
var duration: Double = 0.2
var timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue
if case let .animated(transitionDuration, curve) = transition {
duration = transitionDuration
timingFunction = curve.timingFunction
}
let initialAnimate = animate let initialAnimate = animate
if let sourceNode = sourceNode as? VoiceChatTileItemNode { if let sourceNode = sourceNode as? VoiceChatTileItemNode {
@ -602,12 +620,11 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.videoContainerNode.insertSubnode(videoNode, at: 0) self.videoContainerNode.insertSubnode(videoNode, at: 0)
if animate { if animate {
let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
videoNode.updateLayout(size: videoSize, isLandscape: true, transition: transition) videoNode.updateLayout(size: videoSize, isLandscape: true, transition: transition)
let scale = sourceNode.bounds.width / videoSize.width let scale = sourceNode.bounds.width / videoSize.width
self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: duration, timingFunction: timingFunction)
self.videoContainerNode.layer.animate(from: backgroundCornerRadius * (1.0 / scale) as NSNumber, to: videoCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { _ in self.videoContainerNode.layer.animate(from: backgroundCornerRadius * (1.0 / scale) as NSNumber, to: videoCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: timingFunction, duration: duration, removeOnCompletion: false, completion: { _ in
}) })
self.videoFadeNode.alpha = 1.0 self.videoFadeNode.alpha = 1.0
@ -625,7 +642,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.contextSourceNode.position = targetContainerPosition self.contextSourceNode.position = targetContainerPosition
containerNode.addSubnode(self.contextSourceNode) containerNode.addSubnode(self.contextSourceNode)
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self] _ in self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self] _ in
if let strongSelf = self { if let strongSelf = self {
strongSelf.contextSourceNode.position = initialPosition strongSelf.contextSourceNode.position = initialPosition
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode) strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
@ -634,16 +651,16 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
if item.active { if item.active {
self.borderImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) self.borderImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.borderImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) self.borderImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction)
} }
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
} else if !initialAnimate { } else if !initialAnimate {
self.contextSourceNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) self.contextSourceNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contextSourceNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) self.contextSourceNode.layer.animateScale(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
} }
} else if let sourceNode = sourceNode as? VoiceChatParticipantItemNode, let _ = sourceNode.item { } else if let sourceNode = sourceNode as? VoiceChatParticipantItemNode, let _ = sourceNode.item {
var startContainerPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center var startContainerPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center
@ -662,8 +679,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.contextSourceNode.position = targetContainerPosition self.contextSourceNode.position = targetContainerPosition
containerNode.addSubnode(self.contextSourceNode) containerNode.addSubnode(self.contextSourceNode)
let timingFunction = CAMediaTimingFunctionName.easeInEaseOut.rawValue self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self, weak sourceNode] _ in
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak self, weak sourceNode] _ in
if let strongSelf = self { if let strongSelf = self {
sourceNode?.avatarNode.alpha = 1.0 sourceNode?.avatarNode.alpha = 1.0
strongSelf.contextSourceNode.position = initialPosition strongSelf.contextSourceNode.position = initialPosition
@ -676,24 +692,56 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
self.borderImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction) self.borderImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction)
} }
self.avatarNode.layer.animateScale(from: 0.8, to: 1.0, duration: 0.2) self.avatarNode.layer.animateScale(from: 0.8, to: 1.0, duration: duration, timingFunction: timingFunction)
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction) self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, timingFunction: timingFunction) self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction) self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: timingFunction) self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
} }
} }
} }
private func updateIsExtracted(_ isExtracted: Bool, transition: ContainedViewLayoutTransition) {
guard self.isExtracted != isExtracted, let extractedRect = self.extractedRect, let nonExtractedRect = self.nonExtractedRect, let item = self.item else {
return
}
self.isExtracted = isExtracted
if isExtracted {
let profileNode = VoiceChatPeerProfileNode(context: item.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
self?.contextSourceNode.requestDismiss?()
})
profileNode.frame = CGRect(origin: CGPoint(), size: extractedRect.size)
self.profileNode = profileNode
self.contextSourceNode.contentNode.addSubnode(profileNode)
profileNode.animateIn(from: self, targetRect: extractedRect, transition: transition)
self.contextSourceNode.contentNode.customHitTest = { [weak self] point in
if let strongSelf = self, let profileNode = strongSelf.profileNode {
if profileNode.avatarListWrapperNode.frame.contains(point) {
return profileNode.avatarListNode.view
}
}
return nil
}
} else if let profileNode = self.profileNode {
self.profileNode = nil
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition)
self.contextSourceNode.contentNode.customHitTest = nil
}
}
func asyncLayout() -> (_ item: VoiceChatFullscreenParticipantItem, _ params: ListViewItemLayoutParams, _ first: Bool, _ last: Bool) -> (ListViewItemNodeLayout, (Bool, Bool) -> Void) { func asyncLayout() -> (_ item: VoiceChatFullscreenParticipantItem, _ params: ListViewItemLayoutParams, _ first: Bool, _ last: Bool) -> (ListViewItemNodeLayout, (Bool, Bool) -> Void) {
let makeTitleLayout = TextNode.asyncLayout(self.titleNode) let makeTitleLayout = TextNode.asyncLayout(self.titleNode)
let makeStatusLayout = self.statusNode.asyncLayout()
let currentItem = self.layoutParams?.0 let currentItem = self.layoutParams?.0
let hasVideo = self.videoNode != nil let hasVideo = self.videoNode != nil
return { item, params, first, last in return { item, params, first, last in
let titleFont = Font.semibold(12.0) let titleFont = Font.semibold(13.0)
var titleAttributedString: NSAttributedString? var titleAttributedString: NSAttributedString?
var titleColor = item.presentationData.theme.list.itemPrimaryTextColor var titleColor = item.presentationData.theme.list.itemPrimaryTextColor
@ -760,6 +808,9 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
let constrainedWidth = params.width - 24.0 - 10.0 let constrainedWidth = params.width - 24.0 - 10.0
let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: constrainedWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: constrainedWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let availableWidth = params.availableHeight
let (statusLayout, _) = makeStatusLayout(CGSize(width: availableWidth - 30.0, height: CGFloat.greatestFiniteMagnitude), item.text, true)
let contentSize = tileSize let contentSize = tileSize
let insets = UIEdgeInsets(top: 0.0, left: 0.0, bottom: !last ? 6.0 : 0.0, right: 0.0) let insets = UIEdgeInsets(top: 0.0, left: 0.0, bottom: !last ? 6.0 : 0.0, right: 0.0)
@ -769,7 +820,6 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
if let strongSelf = self { if let strongSelf = self {
let hadItem = strongSelf.layoutParams?.0 != nil let hadItem = strongSelf.layoutParams?.0 != nil
strongSelf.layoutParams = (item, params, first, last) strongSelf.layoutParams = (item, params, first, last)
strongSelf.currentTitle = titleAttributedString?.string
strongSelf.wavesColor = wavesColor strongSelf.wavesColor = wavesColor
let videoNode = item.getVideo() let videoNode = item.getVideo()
@ -794,28 +844,15 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
animationSize = CGSize(width: 36.0, height: 36.0) animationSize = CGSize(width: 36.0, height: 36.0)
animationScale = 0.66667 animationScale = 0.66667
animationFrame = CGRect(x: layout.size.width - 29.0, y: 54.0, width: 24.0, height: 24.0) animationFrame = CGRect(x: layout.size.width - 29.0, y: 55.0, width: 24.0, height: 24.0)
titleFrame = CGRect(origin: CGPoint(x: 8.0, y: 63.0), size: titleLayout.size) titleFrame = CGRect(origin: CGPoint(x: 8.0, y: 63.0), size: titleLayout.size)
var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: 16.0 + params.leftInset, dy: 0.0) let extractedWidth = availableWidth
var extractedHeight = extractedRect.height let extractedRect = CGRect(x: 0.0, y: 0.0, width: extractedWidth, height: extractedWidth + statusLayout.height + 39.0)
var extractedVerticalOffset: CGFloat = 0.0
if item.peer.smallProfileImage != nil || strongSelf.videoNode != nil {
extractedVerticalOffset = extractedRect.width
extractedHeight += extractedVerticalOffset
}
extractedRect.size.height = extractedHeight
strongSelf.extractedVerticalOffset = extractedVerticalOffset
strongSelf.extractedRect = extractedRect strongSelf.extractedRect = extractedRect
strongSelf.nonExtractedRect = nonExtractedRect strongSelf.nonExtractedRect = nonExtractedRect
if strongSelf.isExtracted { if strongSelf.isExtracted {
var extractedRect = extractedRect
if !extractedVerticalOffset.isZero {
extractedRect = CGRect(x: extractedRect.minX, y: extractedRect.minY + extractedVerticalOffset, width: extractedRect.width, height: extractedRect.height - extractedVerticalOffset)
}
strongSelf.backgroundImageNode.frame = extractedRect strongSelf.backgroundImageNode.frame = extractedRect
} else { } else {
strongSelf.backgroundImageNode.frame = nonExtractedRect strongSelf.backgroundImageNode.frame = nonExtractedRect
@ -877,7 +914,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
transition.updateFrameAsPositionAndBounds(node: strongSelf.avatarNode, frame: avatarFrame) transition.updateFrameAsPositionAndBounds(node: strongSelf.avatarNode, frame: avatarFrame)
let blobFrame = avatarFrame.insetBy(dx: -14.0, dy: -14.0) let blobFrame = avatarFrame.insetBy(dx: -18.0, dy: -18.0)
if let getAudioLevel = item.getAudioLevel { if let getAudioLevel = item.getAudioLevel {
if !strongSelf.didSetupAudioLevel || currentItem?.peer.id != item.peer.id { if !strongSelf.didSetupAudioLevel || currentItem?.peer.id != item.peer.id {
strongSelf.audioLevelView?.frame = blobFrame strongSelf.audioLevelView?.frame = blobFrame
@ -902,7 +939,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
playbackMaskLayer.frame = maskRect playbackMaskLayer.frame = maskRect
playbackMaskLayer.fillRule = .evenOdd playbackMaskLayer.fillRule = .evenOdd
let maskPath = UIBezierPath() let maskPath = UIBezierPath()
maskPath.append(UIBezierPath(roundedRect: maskRect.insetBy(dx: 14, dy: 14), cornerRadius: 22)) maskPath.append(UIBezierPath(roundedRect: maskRect.insetBy(dx: 18, dy: 18), cornerRadius: 22))
maskPath.append(UIBezierPath(rect: maskRect)) maskPath.append(UIBezierPath(rect: maskRect))
playbackMaskLayer.path = maskPath.cgPath playbackMaskLayer.path = maskPath.cgPath
audioLevelView.layer.mask = playbackMaskLayer audioLevelView.layer.mask = playbackMaskLayer
@ -912,6 +949,10 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.audioLevelView = audioLevelView strongSelf.audioLevelView = audioLevelView
strongSelf.offsetContainerNode.view.insertSubview(audioLevelView, at: 0) strongSelf.offsetContainerNode.view.insertSubview(audioLevelView, at: 0)
if let item = strongSelf.item, strongSelf.videoNode != nil || item.active {
audioLevelView.alpha = 0.0
}
} }
let level = min(1.0, max(0.0, CGFloat(value))) let level = min(1.0, max(0.0, CGFloat(value)))
@ -926,12 +967,13 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
audioLevelView.setColor(wavesColor, animated: true) audioLevelView.setColor(wavesColor, animated: true)
} }
} else { } else {
audioLevelView.stopAnimating(duration: 0.5)
avatarScale = 1.0 avatarScale = 1.0
} }
let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .easeInOut) if !strongSelf.animatingSelection {
transition.updateTransformScale(node: strongSelf.avatarNode, scale: strongSelf.isExtracted ? 1.0 : avatarScale, beginWithCurrentState: true) let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .easeInOut)
transition.updateTransformScale(node: strongSelf.avatarNode, scale: strongSelf.isExtracted ? 1.0 : avatarScale, beginWithCurrentState: true)
}
} }
})) }))
} }
@ -1073,19 +1115,27 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: 0.2) strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: 0.2)
strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2)
strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -9.0), duration: 0.2, additive: true) strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -9.0), duration: 0.2, additive: true)
strongSelf.audioLevelView?.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2)
} }
transition.updateAlpha(node: videoNode, alpha: 0.0) transition.updateAlpha(node: videoNode, alpha: 0.0)
transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0) transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0)
transition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0) transition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0)
if let audioLevelView = strongSelf.audioLevelView {
transition.updateAlpha(layer: audioLevelView.layer, alpha: 1.0)
}
} else { } else {
if !strongSelf.avatarNode.alpha.isZero { if !strongSelf.avatarNode.alpha.isZero {
strongSelf.videoContainerNode.layer.animateScale(from: 0.001, to: videoContainerScale, duration: 0.2) strongSelf.videoContainerNode.layer.animateScale(from: 0.001, to: videoContainerScale, duration: 0.2)
strongSelf.avatarNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2) strongSelf.avatarNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2)
strongSelf.audioLevelView?.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2)
strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(x: 0.0, y: -9.0), to: CGPoint(), duration: 0.2, additive: true) strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(x: 0.0, y: -9.0), to: CGPoint(), duration: 0.2, additive: true)
} }
transition.updateAlpha(node: videoNode, alpha: 1.0) transition.updateAlpha(node: videoNode, alpha: 1.0)
transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 1.0) transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 1.0)
transition.updateAlpha(node: strongSelf.avatarNode, alpha: 0.0) transition.updateAlpha(node: strongSelf.avatarNode, alpha: 0.0)
if let audioLevelView = strongSelf.audioLevelView {
transition.updateAlpha(layer: audioLevelView.layer, alpha: 0.0)
}
} }
} else { } else {
if item.active { if item.active {

View File

@ -0,0 +1,595 @@
import Foundation
import UIKit
import Display
import AsyncDisplayKit
import SwiftSignalKit
import TelegramPresentationData
import TelegramUIPreferences
import TelegramStringFormatting
import TelegramVoip
import TelegramAudio
import AccountContext
import Postbox
import TelegramCore
import SyncCore
import AppBundle
import PresentationDataUtils
import AvatarNode
import AudioBlob
private let backArrowImage = NavigationBarTheme.generateBackArrowImage(color: .white)
final class VoiceChatMainStageNode: ASDisplayNode {
private let context: AccountContext
private let call: PresentationGroupCall
private var currentPeer: (PeerId, String?)?
private var currentPeerEntry: VoiceChatPeerEntry?
private var currentVideoNode: GroupVideoNode?
private var candidateVideoNode: GroupVideoNode?
private let backgroundNode: ASDisplayNode
private let topFadeNode: ASImageNode
private let bottomFadeNode: ASImageNode
private let headerNode: ASDisplayNode
private let backButtonNode: HighlightableButtonNode
private let backButtonArrowNode: ASImageNode
private let pinButtonNode: HighlightTrackingButtonNode
private let pinButtonIconNode: ASImageNode
private let pinButtonTitleNode: ImmediateTextNode
private var audioLevelView: VoiceBlobView?
private let audioLevelDisposable = MetaDisposable()
private let speakingPeerDisposable = MetaDisposable()
private let speakingAudioLevelDisposable = MetaDisposable()
private var avatarNode: AvatarNode
private let titleNode: ImmediateTextNode
private let microphoneNode: VoiceChatMicrophoneNode
private let speakingContainerNode: ASDisplayNode
private var speakingEffectView: UIVisualEffectView?
private let speakingAvatarNode: AvatarNode
private let speakingTitleNode: ImmediateTextNode
private var speakingAudioLevelView: VoiceBlobView?
private var validLayout: (CGSize, CGFloat, CGFloat, Bool)?
var tapped: (() -> Void)?
var back: (() -> Void)?
var togglePin: (() -> Void)?
var getAudioLevel: ((PeerId) -> Signal<Float, NoError>)?
private let videoReadyDisposable = MetaDisposable()
init(context: AccountContext, call: PresentationGroupCall) {
self.context = context
self.call = call
self.backgroundNode = ASDisplayNode()
self.backgroundNode.alpha = 0.0
self.backgroundNode.backgroundColor = UIColor(rgb: 0x1c1c1e)
self.topFadeNode = ASImageNode()
self.topFadeNode.alpha = 0.0
self.topFadeNode.displaysAsynchronously = false
self.topFadeNode.displayWithoutProcessing = true
self.topFadeNode.contentMode = .scaleToFill
self.topFadeNode.image = generateImage(CGSize(width: 1.0, height: 50.0), rotatedContext: { size, context in
let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds)
let colorsArray = [UIColor(rgb: 0x000000, alpha: 0.7).cgColor, UIColor(rgb: 0x000000, alpha: 0.0).cgColor] as CFArray
var locations: [CGFloat] = [0.0, 1.0]
let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
})
self.bottomFadeNode = ASImageNode()
self.bottomFadeNode.alpha = 0.0
self.bottomFadeNode.displaysAsynchronously = false
self.bottomFadeNode.displayWithoutProcessing = true
self.bottomFadeNode.contentMode = .scaleToFill
self.bottomFadeNode.image = generateImage(CGSize(width: 1.0, height: 50.0), rotatedContext: { size, context in
let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds)
let colorsArray = [UIColor(rgb: 0x000000, alpha: 0.0).cgColor, UIColor(rgb: 0x000000, alpha: 0.7).cgColor] as CFArray
var locations: [CGFloat] = [0.0, 1.0]
let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
})
self.headerNode = ASDisplayNode()
self.headerNode.alpha = 0.0
self.backButtonArrowNode = ASImageNode()
self.backButtonArrowNode.displayWithoutProcessing = true
self.backButtonArrowNode.displaysAsynchronously = false
self.backButtonArrowNode.image = NavigationBarTheme.generateBackArrowImage(color: .white)
self.backButtonNode = HighlightableButtonNode()
self.pinButtonIconNode = ASImageNode()
self.pinButtonIconNode.displayWithoutProcessing = true
self.pinButtonIconNode.displaysAsynchronously = false
self.pinButtonIconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/Pin"), color: .white)
self.pinButtonTitleNode = ImmediateTextNode()
self.pinButtonTitleNode.isHidden = true
self.pinButtonTitleNode.attributedText = NSAttributedString(string: "Unpin", font: Font.regular(17.0), textColor: .white)
self.pinButtonNode = HighlightableButtonNode()
self.avatarNode = AvatarNode(font: avatarPlaceholderFont(size: 42.0))
self.avatarNode.isHidden = true
self.titleNode = ImmediateTextNode()
self.titleNode.alpha = 0.0
self.titleNode.isUserInteractionEnabled = false
self.microphoneNode = VoiceChatMicrophoneNode()
self.microphoneNode.alpha = 0.0
self.speakingContainerNode = ASDisplayNode()
self.speakingContainerNode.cornerRadius = 19.0
self.speakingAvatarNode = AvatarNode(font: avatarPlaceholderFont(size: 14.0))
self.speakingTitleNode = ImmediateTextNode()
super.init()
self.clipsToBounds = true
self.cornerRadius = 11.0
self.addSubnode(self.backgroundNode)
self.addSubnode(self.topFadeNode)
self.addSubnode(self.bottomFadeNode)
self.addSubnode(self.avatarNode)
self.addSubnode(self.titleNode)
self.addSubnode(self.microphoneNode)
self.addSubnode(self.headerNode)
self.headerNode.addSubnode(self.backButtonNode)
self.headerNode.addSubnode(self.backButtonArrowNode)
self.headerNode.addSubnode(self.pinButtonIconNode)
self.headerNode.addSubnode(self.pinButtonTitleNode)
self.headerNode.addSubnode(self.pinButtonNode)
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
self.backButtonNode.setTitle(presentationData.strings.Common_Back, with: Font.regular(17.0), with: .white, for: [])
self.backButtonNode.hitTestSlop = UIEdgeInsets(top: -8.0, left: -20.0, bottom: -8.0, right: -8.0)
self.backButtonNode.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self {
if highlighted {
strongSelf.backButtonNode.layer.removeAnimation(forKey: "opacity")
strongSelf.backButtonArrowNode.layer.removeAnimation(forKey: "opacity")
strongSelf.backButtonNode.alpha = 0.4
strongSelf.backButtonArrowNode.alpha = 0.4
} else {
strongSelf.backButtonNode.alpha = 1.0
strongSelf.backButtonArrowNode.alpha = 1.0
strongSelf.backButtonNode.layer.animateAlpha(from: 0.4, to: 1.0, duration: 0.2)
strongSelf.backButtonArrowNode.layer.animateAlpha(from: 0.4, to: 1.0, duration: 0.2)
}
}
}
self.backButtonNode.addTarget(self, action: #selector(self.backPressed), forControlEvents: .touchUpInside)
self.pinButtonNode.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self {
if highlighted {
strongSelf.pinButtonTitleNode.layer.removeAnimation(forKey: "opacity")
strongSelf.pinButtonIconNode.layer.removeAnimation(forKey: "opacity")
strongSelf.pinButtonTitleNode.alpha = 0.4
strongSelf.pinButtonIconNode.alpha = 0.4
} else {
strongSelf.pinButtonTitleNode.alpha = 1.0
strongSelf.pinButtonIconNode.alpha = 1.0
strongSelf.pinButtonTitleNode.layer.animateAlpha(from: 0.4, to: 1.0, duration: 0.2)
strongSelf.pinButtonIconNode.layer.animateAlpha(from: 0.4, to: 1.0, duration: 0.2)
}
}
}
self.pinButtonNode.addTarget(self, action: #selector(self.pinPressed), forControlEvents: .touchUpInside)
}
deinit {
self.videoReadyDisposable.dispose()
self.audioLevelDisposable.dispose()
self.speakingPeerDisposable.dispose()
self.speakingAudioLevelDisposable.dispose()
}
override func didLoad() {
super.didLoad()
let speakingEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
self.speakingContainerNode.view.addSubview(speakingEffectView)
self.speakingEffectView = speakingEffectView
self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tap)))
}
@objc private func tap() {
self.tapped?()
}
@objc private func backPressed() {
self.back?()
}
@objc private func pinPressed() {
self.togglePin?()
}
var animating = false
func animateTransitionIn(from sourceNode: ASDisplayNode, transition: ContainedViewLayoutTransition) {
guard let sourceNode = sourceNode as? VoiceChatTileItemNode, let _ = sourceNode.item, let (_, sideInset, bottomInset, _) = self.validLayout else {
return
}
let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .linear)
alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 1.0)
alphaTransition.updateAlpha(node: self.topFadeNode, alpha: 1.0)
alphaTransition.updateAlpha(node: self.bottomFadeNode, alpha: 1.0)
alphaTransition.updateAlpha(node: self.titleNode, alpha: 1.0)
alphaTransition.updateAlpha(node: self.microphoneNode, alpha: 1.0)
alphaTransition.updateAlpha(node: self.headerNode, alpha: 1.0)
sourceNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
self.animating = true
let targetFrame = self.frame
let startLocalFrame = sourceNode.view.convert(sourceNode.bounds, to: self.supernode?.view)
self.update(size: startLocalFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: true, force: true, transition: .immediate)
self.frame = startLocalFrame
self.update(size: targetFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: true, force: true, transition: transition)
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
self?.animating = false
})
}
func animateTransitionOut(to targetNode: ASDisplayNode?, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) {
guard let (_, sideInset, bottomInset, _) = self.validLayout else {
return
}
let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .linear)
alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 0.0)
alphaTransition.updateAlpha(node: self.topFadeNode, alpha: 0.0)
// alphaTransition.updateAlpha(node: self.bottomFadeNode, alpha: 0.0)
alphaTransition.updateAlpha(node: self.titleNode, alpha: 0.0)
alphaTransition.updateAlpha(node: self.microphoneNode, alpha: 0.0)
alphaTransition.updateAlpha(node: self.headerNode, alpha: 0.0)
guard let targetNode = targetNode as? VoiceChatTileItemNode, let _ = targetNode.item else {
completion()
return
}
targetNode.fadeNode.isHidden = true
targetNode.isHidden = false
targetNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.1)
self.animating = true
let initialFrame = self.frame
let targetFrame = targetNode.view.convert(targetNode.bounds, to: self.supernode?.view)
self.update(size: targetFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: true, force: true, transition: transition)
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
if let strongSelf = self {
completion()
strongSelf.bottomFadeNode.alpha = 0.0
targetNode.fadeNode.isHidden = false
strongSelf.animating = false
strongSelf.frame = initialFrame
strongSelf.update(size: initialFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: true, transition: .immediate)
}
})
}
private var speakingPeerId: PeerId?
func update(speakingPeerId: PeerId?) {
guard self.speakingPeerId != speakingPeerId else {
return
}
var wavesColor = UIColor(rgb: 0x34c759)
if let getAudioLevel = self.getAudioLevel, let peerId = speakingPeerId {
self.speakingAudioLevelView?.removeFromSuperview()
let blobFrame = self.speakingAvatarNode.frame.insetBy(dx: -14.0, dy: -14.0)
self.speakingAudioLevelDisposable.set((getAudioLevel(peerId)
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let strongSelf = self else {
return
}
if strongSelf.speakingAudioLevelView == nil, value > 0.0 {
let audioLevelView = VoiceBlobView(
frame: blobFrame,
maxLevel: 1.5,
smallBlobRange: (0, 0),
mediumBlobRange: (0.69, 0.87),
bigBlobRange: (0.71, 1.0)
)
audioLevelView.isHidden = strongSelf.currentPeer?.1 != nil
audioLevelView.setColor(wavesColor)
audioLevelView.alpha = 1.0
strongSelf.speakingAudioLevelView = audioLevelView
strongSelf.speakingContainerNode.view.insertSubview(audioLevelView, belowSubview: strongSelf.speakingAvatarNode.view)
}
let level = min(1.5, max(0.0, CGFloat(value)))
if let audioLevelView = strongSelf.speakingAudioLevelView {
audioLevelView.updateLevel(CGFloat(value))
let avatarScale: CGFloat
if value > 0.02 {
audioLevelView.startAnimating()
avatarScale = 1.03 + level * 0.13
audioLevelView.setColor(wavesColor, animated: true)
if let silenceTimer = strongSelf.silenceTimer {
silenceTimer.invalidate()
strongSelf.silenceTimer = nil
}
} else {
avatarScale = 1.0
}
let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .easeInOut)
transition.updateTransformScale(node: strongSelf.avatarNode, scale: avatarScale, beginWithCurrentState: true)
}
}))
} else {
self.speakingPeerDisposable.set(nil)
if let audioLevelView = self.audioLevelView {
audioLevelView.removeFromSuperview()
self.audioLevelView = nil
}
}
}
private var silenceTimer: SwiftSignalKit.Timer?
func update(peerEntry: VoiceChatPeerEntry, pinned: Bool) {
let previousPeerEntry = self.currentPeerEntry
self.currentPeerEntry = peerEntry
if !arePeersEqual(previousPeerEntry?.peer, peerEntry.peer) {
let peer = peerEntry.peer
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
if previousPeerEntry?.peer.id == peerEntry.peer.id {
self.avatarNode.setPeer(context: self.context, theme: presentationData.theme, peer: peer)
} else {
let previousAvatarNode = self.avatarNode
self.avatarNode = AvatarNode(font: avatarPlaceholderFont(size: 42.0))
self.avatarNode.setPeer(context: self.context, theme: presentationData.theme, peer: peer, synchronousLoad: true)
self.avatarNode.frame = previousAvatarNode.frame
previousAvatarNode.supernode?.insertSubnode(self.avatarNode, aboveSubnode: previousAvatarNode)
previousAvatarNode.removeFromSupernode()
}
self.titleNode.attributedText = NSAttributedString(string: peer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder), font: Font.semibold(15.0), textColor: .white)
if let (size, sideInset, bottomInset, isLandscape) = self.validLayout {
self.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
}
}
self.pinButtonTitleNode.isHidden = !pinned
self.pinButtonIconNode.image = !pinned ? generateTintedImage(image: UIImage(bundleImageName: "Call/Pin"), color: .white) : generateTintedImage(image: UIImage(bundleImageName: "Call/Unpin"), color: .white)
var wavesColor = UIColor(rgb: 0x34c759)
if let getAudioLevel = self.getAudioLevel, previousPeerEntry?.peer.id != peerEntry.peer.id {
self.audioLevelView?.removeFromSuperview()
let blobFrame = self.avatarNode.frame.insetBy(dx: -60.0, dy: -60.0)
self.audioLevelDisposable.set((getAudioLevel(peerEntry.peer.id)
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let strongSelf = self else {
return
}
if strongSelf.audioLevelView == nil, value > 0.0 {
let audioLevelView = VoiceBlobView(
frame: blobFrame,
maxLevel: 1.5,
smallBlobRange: (0, 0),
mediumBlobRange: (0.69, 0.87),
bigBlobRange: (0.71, 1.0)
)
audioLevelView.isHidden = strongSelf.currentPeer?.1 != nil
audioLevelView.setColor(wavesColor)
audioLevelView.alpha = 1.0
strongSelf.audioLevelView = audioLevelView
strongSelf.view.insertSubview(audioLevelView, belowSubview: strongSelf.avatarNode.view)
}
let level = min(1.5, max(0.0, CGFloat(value)))
if let audioLevelView = strongSelf.audioLevelView {
audioLevelView.updateLevel(CGFloat(value))
let avatarScale: CGFloat
if value > 0.02 {
audioLevelView.startAnimating()
avatarScale = 1.03 + level * 0.13
audioLevelView.setColor(wavesColor, animated: true)
if let silenceTimer = strongSelf.silenceTimer {
silenceTimer.invalidate()
strongSelf.silenceTimer = nil
}
} else {
avatarScale = 1.0
if strongSelf.silenceTimer == nil {
let silenceTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: false, completion: { [weak self] in
self?.audioLevelView?.stopAnimating(duration: 0.5)
self?.silenceTimer = nil
}, queue: Queue.mainQueue())
strongSelf.silenceTimer = silenceTimer
silenceTimer.start()
}
}
let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .easeInOut)
transition.updateTransformScale(node: strongSelf.avatarNode, scale: avatarScale, beginWithCurrentState: true)
}
}))
}
var muted = false
var state = peerEntry.state
if let muteState = peerEntry.muteState, case .speaking = state, muteState.mutedByYou || !muteState.canUnmute {
state = .listening
}
switch state {
case .listening:
if let muteState = peerEntry.muteState, muteState.mutedByYou {
muted = true
} else {
muted = peerEntry.muteState != nil
}
case .speaking:
if let muteState = peerEntry.muteState, muteState.mutedByYou {
muted = true
} else {
muted = false
}
case .raisedHand, .invited:
muted = true
}
self.microphoneNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: .white), animated: true)
}
func update(peer: (peer: PeerId, endpointId: String?)?, waitForFullSize: Bool, completion: (() -> Void)? = nil) {
let previousPeer = self.currentPeer
if previousPeer?.0 == peer?.0 && previousPeer?.1 == peer?.1 {
completion?()
return
}
self.currentPeer = peer
if let (_, endpointId) = peer {
if endpointId != previousPeer?.1 {
if let endpointId = endpointId {
self.avatarNode.isHidden = true
self.audioLevelView?.isHidden = true
self.call.makeIncomingVideoView(endpointId: endpointId, completion: { [weak self] videoView in
Queue.mainQueue().async {
guard let strongSelf = self, let videoView = videoView else {
return
}
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: nil)
if let currentVideoNode = strongSelf.currentVideoNode {
strongSelf.currentVideoNode = nil
currentVideoNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak currentVideoNode] _ in
currentVideoNode?.removeFromSupernode()
})
}
strongSelf.currentVideoNode = videoNode
strongSelf.insertSubnode(videoNode, aboveSubnode: strongSelf.backgroundNode)
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
}
if waitForFullSize {
strongSelf.videoReadyDisposable.set((videoNode.ready
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(next: { _ in
Queue.mainQueue().after(0.01) {
completion?()
}
}))
} else {
strongSelf.videoReadyDisposable.set(nil)
completion?()
}
}
})
} else {
self.avatarNode.isHidden = false
self.audioLevelView?.isHidden = false
if let currentVideoNode = self.currentVideoNode {
currentVideoNode.removeFromSupernode()
self.currentVideoNode = nil
}
}
} else {
self.audioLevelView?.isHidden = self.currentPeer?.1 != nil
completion?()
}
} else {
self.videoReadyDisposable.set(nil)
if let currentVideoNode = self.currentVideoNode {
currentVideoNode.removeFromSupernode()
self.currentVideoNode = nil
}
completion?()
}
}
func update(size: CGSize, sideInset: CGFloat, bottomInset: CGFloat, isLandscape: Bool, force: Bool = false, transition: ContainedViewLayoutTransition) {
self.validLayout = (size, sideInset, bottomInset, isLandscape)
if self.animating && !force {
return
}
let initialBottomInset = bottomInset
var bottomInset = bottomInset
if !sideInset.isZero {
bottomInset = 14.0
}
if let currentVideoNode = self.currentVideoNode {
transition.updateFrame(node: currentVideoNode, frame: CGRect(origin: CGPoint(), size: size))
currentVideoNode.updateLayout(size: size, isLandscape: isLandscape, transition: transition)
}
transition.updateFrame(node: self.backgroundNode, frame: CGRect(origin: CGPoint(), size: size))
let avatarSize = CGSize(width: 180.0, height: 180.0)
let avatarFrame = CGRect(origin: CGPoint(x: (size.width - avatarSize.width) / 2.0, y: (size.height - avatarSize.height) / 2.0), size: avatarSize)
transition.updateFrame(node: self.avatarNode, frame: avatarFrame)
if let audioLevelView = self.audioLevelView {
transition.updatePosition(layer: audioLevelView.layer, position: avatarFrame.center)
}
let animationSize = CGSize(width: 36.0, height: 36.0)
let titleSize = self.titleNode.updateLayout(size)
transition.updateFrame(node: self.titleNode, frame: CGRect(origin: CGPoint(x: sideInset + 12.0 + animationSize.width, y: size.height - bottomInset - titleSize.height - 16.0), size: titleSize))
transition.updateFrame(node: self.microphoneNode, frame: CGRect(origin: CGPoint(x: sideInset + 7.0, y: size.height - bottomInset - animationSize.height - 6.0), size: animationSize))
var fadeHeight: CGFloat = 50.0
if size.width < size.height {
fadeHeight = 140.0
}
transition.updateFrame(node: self.bottomFadeNode, frame: CGRect(x: 0.0, y: size.height - fadeHeight, width: size.width, height: fadeHeight))
transition.updateFrame(node: self.topFadeNode, frame: CGRect(x: 0.0, y: 0.0, width: size.width, height: 50.0))
let backSize = self.backButtonNode.measure(CGSize(width: 320.0, height: 100.0))
if let image = self.backButtonArrowNode.image {
transition.updateFrame(node: self.backButtonArrowNode, frame: CGRect(origin: CGPoint(x: sideInset + 9.0, y: 12.0), size: image.size))
}
transition.updateFrame(node: self.backButtonNode, frame: CGRect(origin: CGPoint(x: sideInset + 28.0, y: 13.0), size: backSize))
let unpinSize = self.pinButtonTitleNode.updateLayout(size)
if let image = self.pinButtonIconNode.image {
let offset: CGFloat = sideInset.isZero ? 0.0 : initialBottomInset + 8.0
transition.updateFrame(node: self.pinButtonIconNode, frame: CGRect(origin: CGPoint(x: size.width - image.size.width - offset, y: 0.0), size: image.size))
transition.updateFrame(node: self.pinButtonTitleNode, frame: CGRect(origin: CGPoint(x: size.width - image.size.width - unpinSize.width + 4.0 - offset, y: 14.0), size: unpinSize))
transition.updateFrame(node: self.pinButtonNode, frame: CGRect(x: size.width - image.size.width - unpinSize.width - offset, y: 0.0, width: unpinSize.width + image.size.width, height: 44.0))
}
transition.updateFrame(node: self.headerNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: 64.0)))
}
}

View File

@ -169,13 +169,13 @@ public final class VoiceChatOverlayController: ViewController {
if reclaim { if reclaim {
self.dismissed = true self.dismissed = true
let targetPosition = CGPoint(x: layout.size.width / 2.0, y: layout.size.height - layout.intrinsicInsets.bottom - 205.0 / 2.0 - 2.0) let targetPosition = CGPoint(x: layout.size.width / 2.0, y: layout.size.height - layout.intrinsicInsets.bottom - bottomAreaHeight / 2.0 - 3.0)
if self.isSlidOffscreen { if self.isSlidOffscreen {
self.isSlidOffscreen = false self.isSlidOffscreen = false
self.isButtonHidden = true self.isButtonHidden = true
actionButton.layer.sublayerTransform = CATransform3DIdentity actionButton.layer.sublayerTransform = CATransform3DIdentity
actionButton.update(snap: false, animated: false) actionButton.update(snap: false, animated: false)
actionButton.position = CGPoint(x: targetPosition.x, y: 205.0 / 2.0) actionButton.position = CGPoint(x: targetPosition.x, y: bottomAreaHeight / 2.0)
leftButton.isHidden = false leftButton.isHidden = false
rightButton.isHidden = false rightButton.isHidden = false
@ -191,7 +191,7 @@ public final class VoiceChatOverlayController: ViewController {
actionButton.layer.removeAllAnimations() actionButton.layer.removeAllAnimations()
actionButton.layer.sublayerTransform = CATransform3DIdentity actionButton.layer.sublayerTransform = CATransform3DIdentity
actionButton.update(snap: false, animated: false) actionButton.update(snap: false, animated: false)
actionButton.position = CGPoint(x: targetPosition.x, y: 205.0 / 2.0) actionButton.position = CGPoint(x: targetPosition.x, y: bottomAreaHeight / 2.0)
leftButton.isHidden = false leftButton.isHidden = false
rightButton.isHidden = false rightButton.isHidden = false

View File

@ -138,7 +138,7 @@ private let accentColor: UIColor = UIColor(rgb: 0x007aff)
private let constructiveColor: UIColor = UIColor(rgb: 0x34c759) private let constructiveColor: UIColor = UIColor(rgb: 0x34c759)
private let destructiveColor: UIColor = UIColor(rgb: 0xff3b30) private let destructiveColor: UIColor = UIColor(rgb: 0xff3b30)
private class VoiceChatParticipantStatusNode: ASDisplayNode { class VoiceChatParticipantStatusNode: ASDisplayNode {
private var iconNodes: [ASImageNode] private var iconNodes: [ASImageNode]
private let textNode: TextNode private let textNode: TextNode
@ -156,10 +156,10 @@ private class VoiceChatParticipantStatusNode: ASDisplayNode {
self.addSubnode(self.textNode) self.addSubnode(self.textNode)
} }
func asyncLayout() -> (_ size: CGSize, _ text: VoiceChatParticipantItem.ParticipantText, _ transparent: Bool) -> (CGSize, () -> Void) { func asyncLayout() -> (_ size: CGSize, _ text: VoiceChatParticipantItem.ParticipantText, _ expanded: Bool) -> (CGSize, () -> Void) {
let makeTextLayout = TextNode.asyncLayout(self.textNode) let makeTextLayout = TextNode.asyncLayout(self.textNode)
return { size, text, transparent in return { size, text, expanded in
let statusFont = Font.regular(14.0) let statusFont = Font.regular(14.0)
var attributedString: NSAttributedString? var attributedString: NSAttributedString?
@ -184,9 +184,6 @@ private class VoiceChatParticipantStatusNode: ASDisplayNode {
case .destructive: case .destructive:
textColorValue = destructiveColor textColorValue = destructiveColor
} }
if transparent {
textColorValue = UIColor(rgb: 0xffffff, alpha: 0.65)
}
color = textColorValue color = textColorValue
attributedString = NSAttributedString(string: text, font: statusFont, textColor: textColorValue) attributedString = NSAttributedString(string: text, font: statusFont, textColor: textColorValue)
default: default:
@ -207,7 +204,7 @@ private class VoiceChatParticipantStatusNode: ASDisplayNode {
icons.append(image) icons.append(image)
} }
let (textLayout, textApply) = makeTextLayout(TextNodeLayoutArguments(attributedString: attributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: size.width - (iconSize.width + spacing) * CGFloat(icons.count), height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) let (textLayout, textApply) = makeTextLayout(TextNodeLayoutArguments(attributedString: attributedString, backgroundColor: nil, maximumNumberOfLines: expanded ? 4 : 1, truncationType: .end, constrainedSize: CGSize(width: size.width - (iconSize.width + spacing) * CGFloat(icons.count), height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
var contentSize = textLayout.size var contentSize = textLayout.size
contentSize.width += (iconSize.width + spacing) * CGFloat(icons.count) contentSize.width += (iconSize.width + spacing) * CGFloat(icons.count)
@ -388,7 +385,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.isExtracted = isExtracted strongSelf.isExtracted = isExtracted
let inset: CGFloat = 12.0 let inset: CGFloat = 0.0
if isExtracted { if isExtracted {
strongSelf.contextSourceNode.contentNode.customHitTest = { [weak self] point in strongSelf.contextSourceNode.contentNode.customHitTest = { [weak self] point in
if let strongSelf = self { if let strongSelf = self {
@ -492,8 +489,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.avatarNode.isHidden = true strongSelf.avatarNode.isHidden = true
avatarListWrapperNode.contentNode.addSubnode(transitionNode) avatarListWrapperNode.contentNode.addSubnode(transitionNode)
strongSelf.avatarTransitionNode = transitionNode strongSelf.avatarTransitionNode = transitionNode
let avatarListContainerNode = ASDisplayNode() let avatarListContainerNode = ASDisplayNode()
@ -503,8 +498,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
avatarListContainerNode.cornerRadius = targetRect.width / 2.0 avatarListContainerNode.cornerRadius = targetRect.width / 2.0
avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping) avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: avatarInitialRect.center), to: NSValue(cgPoint: avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: avatarInitialRect.center), to: NSValue(cgPoint: avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
})
radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: 0.0) radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: 0.0)
@ -576,10 +570,10 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
alphaTransition.updateAlpha(node: strongSelf.actionContainerNode, alpha: isExtracted ? 0.0 : 1.0, delay: isExtracted ? 0.0 : 0.1) alphaTransition.updateAlpha(node: strongSelf.actionContainerNode, alpha: isExtracted ? 0.0 : 1.0, delay: isExtracted ? 0.0 : 0.1)
let offsetInitialSublayerTransform = strongSelf.offsetContainerNode.layer.sublayerTransform let offsetInitialSublayerTransform = strongSelf.offsetContainerNode.layer.sublayerTransform
strongSelf.offsetContainerNode.layer.sublayerTransform = CATransform3DMakeTranslation(isExtracted ? -33 : 0.0, isExtracted ? extractedVerticalOffset : 0.0, 0.0) strongSelf.offsetContainerNode.layer.sublayerTransform = CATransform3DMakeTranslation(isExtracted ? -43 : 0.0, isExtracted ? extractedVerticalOffset : 0.0, 0.0)
let actionInitialSublayerTransform = strongSelf.actionContainerNode.layer.sublayerTransform let actionInitialSublayerTransform = strongSelf.actionContainerNode.layer.sublayerTransform
strongSelf.actionContainerNode.layer.sublayerTransform = CATransform3DMakeTranslation(isExtracted ? 21.0 : 0.0, 0.0, 0.0) strongSelf.actionContainerNode.layer.sublayerTransform = CATransform3DMakeTranslation(isExtracted ? 43.0 : 0.0, 0.0, 0.0)
let initialBackgroundPosition = strongSelf.backgroundImageNode.position let initialBackgroundPosition = strongSelf.backgroundImageNode.position
strongSelf.backgroundImageNode.layer.position = rect.center strongSelf.backgroundImageNode.layer.position = rect.center
@ -636,7 +630,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
transition.updateAlpha(node: strongSelf.actionContainerNode, alpha: isExtracted ? 0.0 : 1.0) transition.updateAlpha(node: strongSelf.actionContainerNode, alpha: isExtracted ? 0.0 : 1.0)
transition.updateSublayerTransformOffset(layer: strongSelf.offsetContainerNode.layer, offset: CGPoint(x: isExtracted ? inset : 0.0, y: isExtracted ? extractedVerticalOffset : 0.0)) transition.updateSublayerTransformOffset(layer: strongSelf.offsetContainerNode.layer, offset: CGPoint(x: isExtracted ? inset : 0.0, y: isExtracted ? extractedVerticalOffset : 0.0))
transition.updateSublayerTransformOffset(layer: strongSelf.actionContainerNode.layer, offset: CGPoint(x: isExtracted ? -24.0 : 0.0, y: 0.0)) transition.updateSublayerTransformOffset(layer: strongSelf.actionContainerNode.layer, offset: CGPoint(x: isExtracted ? -inset * 2.0 : 0.0, y: 0.0))
transition.updateAlpha(node: strongSelf.backgroundImageNode, alpha: isExtracted ? 1.0 : 0.0, completion: { _ in transition.updateAlpha(node: strongSelf.backgroundImageNode, alpha: isExtracted ? 1.0 : 0.0, completion: { _ in
if !isExtracted { if !isExtracted {
@ -659,11 +653,17 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self.layoutParams?.0.action?(self.contextSourceNode) self.layoutParams?.0.action?(self.contextSourceNode)
} }
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode) { func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, transition: ContainedViewLayoutTransition) {
guard let _ = self.item, let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item else { guard let _ = self.item, let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item else {
return return
} }
var duration: Double = 0.2
var timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue
if case let .animated(transitionDuration, curve) = transition {
duration = transitionDuration + 0.08
timingFunction = curve.timingFunction
}
let startContainerAvatarPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center let startContainerAvatarPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center
var animate = true var animate = true
if startContainerAvatarPosition.x < -tileSize.width || startContainerAvatarPosition.x > containerNode.frame.width + tileSize.width { if startContainerAvatarPosition.x < -tileSize.width || startContainerAvatarPosition.x > containerNode.frame.width + tileSize.width {
@ -673,13 +673,13 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
sourceNode.avatarNode.alpha = 0.0 sourceNode.avatarNode.alpha = 0.0
let initialAvatarPosition = self.avatarNode.position let initialAvatarPosition = self.avatarNode.position
let targetContainerAvatarPosition = self.avatarNode.view.convert(self.avatarNode.bounds, to: containerNode.view).center let initialBackgroundPosition = sourceNode.backgroundImageNode.position
let initialContentPosition = sourceNode.contentWrapperNode.position
let startContainerBackgroundPosition = sourceNode.backgroundImageNode.view.convert(sourceNode.backgroundImageNode.bounds, to: containerNode.view).center let startContainerBackgroundPosition = sourceNode.backgroundImageNode.view.convert(sourceNode.backgroundImageNode.bounds, to: containerNode.view).center
let startContainerContentPosition = sourceNode.contentWrapperNode.view.convert(sourceNode.contentWrapperNode.bounds, to: containerNode.view).center let startContainerContentPosition = sourceNode.contentWrapperNode.view.convert(sourceNode.contentWrapperNode.bounds, to: containerNode.view).center
let initialBackgroundPosition = sourceNode.backgroundImageNode.position let targetContainerAvatarPosition = self.avatarNode.view.convert(self.avatarNode.bounds, to: containerNode.view).center
let initialContentPosition = sourceNode.contentWrapperNode.position
sourceNode.backgroundImageNode.position = targetContainerAvatarPosition sourceNode.backgroundImageNode.position = targetContainerAvatarPosition
sourceNode.contentWrapperNode.position = targetContainerAvatarPosition sourceNode.contentWrapperNode.position = targetContainerAvatarPosition
@ -687,10 +687,13 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
containerNode.addSubnode(sourceNode.contentWrapperNode) containerNode.addSubnode(sourceNode.contentWrapperNode)
sourceNode.borderImageNode.alpha = 0.0 sourceNode.borderImageNode.alpha = 0.0
let timingFunction = CAMediaTimingFunctionName.easeInEaseOut.rawValue sourceNode.backgroundImageNode.layer.animatePosition(from: startContainerBackgroundPosition, to: targetContainerAvatarPosition, duration: duration, timingFunction: timingFunction, completion: { [weak sourceNode] _ in
sourceNode.backgroundImageNode.layer.animatePosition(from: startContainerBackgroundPosition, to: targetContainerAvatarPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak sourceNode] _ in
if let sourceNode = sourceNode { if let sourceNode = sourceNode {
Queue.mainQueue().after(0.1, {
sourceNode.backgroundImageNode.layer.removeAllAnimations()
sourceNode.contentWrapperNode.layer.removeAllAnimations()
})
sourceNode.backgroundImageNode.alpha = 1.0 sourceNode.backgroundImageNode.alpha = 1.0
sourceNode.borderImageNode.alpha = 1.0 sourceNode.borderImageNode.alpha = 1.0
sourceNode.backgroundImageNode.position = initialBackgroundPosition sourceNode.backgroundImageNode.position = initialBackgroundPosition
@ -698,7 +701,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
} }
}) })
sourceNode.contentWrapperNode.layer.animatePosition(from: startContainerContentPosition, to: targetContainerAvatarPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak sourceNode] _ in sourceNode.contentWrapperNode.layer.animatePosition(from: startContainerContentPosition, to: targetContainerAvatarPosition, duration: duration, timingFunction: timingFunction, completion: { [weak sourceNode] _ in
if let sourceNode = sourceNode { if let sourceNode = sourceNode {
sourceNode.avatarNode.alpha = 1.0 sourceNode.avatarNode.alpha = 1.0
sourceNode.contentWrapperNode.position = initialContentPosition sourceNode.contentWrapperNode.position = initialContentPosition
@ -709,19 +712,18 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self.avatarNode.position = targetContainerAvatarPosition self.avatarNode.position = targetContainerAvatarPosition
containerNode.addSubnode(self.avatarNode) containerNode.addSubnode(self.avatarNode)
self.avatarNode.layer.animateScale(from: 1.25, to: 1.0, duration: 0.2, timingFunction: timingFunction) self.avatarNode.layer.animateScale(from: 1.25, to: 1.0, duration: duration, timingFunction: timingFunction)
self.avatarNode.layer.animatePosition(from: startContainerAvatarPosition, to: targetContainerAvatarPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self] _ in
self.avatarNode.layer.animatePosition(from: startContainerAvatarPosition, to: targetContainerAvatarPosition, duration: 0.2, timingFunction: timingFunction, completion: { [weak self] _ in
if let strongSelf = self { if let strongSelf = self {
strongSelf.avatarNode.position = initialAvatarPosition strongSelf.avatarNode.position = initialAvatarPosition
strongSelf.offsetContainerNode.addSubnode(strongSelf.avatarNode) strongSelf.offsetContainerNode.addSubnode(strongSelf.avatarNode)
} }
}) })
sourceNode.backgroundImageNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.25, timingFunction: timingFunction) sourceNode.backgroundImageNode.layer.animateScale(from: 1.0, to: 0.001, duration: duration, timingFunction: timingFunction)
sourceNode.backgroundImageNode.layer.animateAlpha(from: sourceNode.backgroundImageNode.alpha, to: 0.0, duration: 0.35, timingFunction: timingFunction) sourceNode.backgroundImageNode.layer.animateAlpha(from: sourceNode.backgroundImageNode.alpha, to: 0.0, duration: duration, timingFunction: timingFunction, removeOnCompletion: false)
sourceNode.contentWrapperNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.25, timingFunction: timingFunction) sourceNode.contentWrapperNode.layer.animateScale(from: 1.0, to: 0.001, duration: duration, timingFunction: timingFunction)
sourceNode.contentWrapperNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, timingFunction: timingFunction) sourceNode.contentWrapperNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: duration, timingFunction: timingFunction, removeOnCompletion: false)
} }
} }
@ -819,7 +821,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: constrainedWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: constrainedWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let (statusLayout, statusApply) = makeStatusLayout(CGSize(width: params.width - leftInset - 8.0 - rightInset - 30.0, height: CGFloat.greatestFiniteMagnitude), item.text, false) let (statusLayout, statusApply) = makeStatusLayout(CGSize(width: params.width - leftInset - 8.0 - rightInset - 30.0, height: CGFloat.greatestFiniteMagnitude), item.text, false)
let (expandedStatusLayout, expandedStatusApply) = makeExpandedStatusLayout(CGSize(width: params.width - leftInset - 8.0 - rightInset - expandedRightInset, height: CGFloat.greatestFiniteMagnitude), item.expandedText ?? item.text, false) let (expandedStatusLayout, expandedStatusApply) = makeExpandedStatusLayout(CGSize(width: params.width - leftInset - 8.0 - rightInset - expandedRightInset, height: CGFloat.greatestFiniteMagnitude), item.expandedText ?? item.text, true)
let titleSpacing: CGFloat = statusLayout.height == 0.0 ? 0.0 : 1.0 let titleSpacing: CGFloat = statusLayout.height == 0.0 ? 0.0 : 1.0
@ -861,7 +863,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
animationFrame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height) animationFrame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height)
titleFrame = CGRect(origin: CGPoint(x: leftInset, y: verticalInset + verticalOffset), size: titleLayout.size) titleFrame = CGRect(origin: CGPoint(x: leftInset, y: verticalInset + verticalOffset), size: titleLayout.size)
var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: 16.0 + params.leftInset, dy: 0.0) var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: params.leftInset, dy: 0.0)
var extractedHeight = extractedRect.height + expandedStatusLayout.height - statusLayout.height var extractedHeight = extractedRect.height + expandedStatusLayout.height - statusLayout.height
var extractedVerticalOffset: CGFloat = 0.0 var extractedVerticalOffset: CGFloat = 0.0
if item.peer.smallProfileImage != nil { if item.peer.smallProfileImage != nil {
@ -996,7 +998,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
return return
} }
if false, strongSelf.audioLevelView == nil, value > 0.0 { if strongSelf.audioLevelView == nil, value > 0.0 {
let audioLevelView = VoiceBlobView( let audioLevelView = VoiceBlobView(
frame: blobFrame, frame: blobFrame,
maxLevel: 1.5, maxLevel: 1.5,

View File

@ -1,8 +1,469 @@
//
// VoiceChatPeerProfileNode.swift
// _idx_TelegramCallsUI_5BDA0798_ios_min9.0
//
// Created by Ilya Laktyushin on 11.05.2021.
//
import Foundation import Foundation
import UIKit
import Display
import AsyncDisplayKit
import SwiftSignalKit
import Postbox
import TelegramCore
import SyncCore
import TelegramPresentationData
import TelegramUIPreferences
import PresentationDataUtils
import AvatarNode
import TelegramStringFormatting
import ContextUI
import AccountContext
import LegacyComponents
import PeerInfoAvatarListNode
private let backgroundCornerRadius: CGFloat = 14.0
final class VoiceChatPeerProfileNode: ASDisplayNode {
private let context: AccountContext
private let size: CGSize
private var peer: Peer
private var text: VoiceChatParticipantItem.ParticipantText
private let customNode: ASDisplayNode?
private let additionalEntry: Signal<(TelegramMediaImageRepresentation, Float)?, NoError>
private let backgroundImageNode: ASImageNode
private let avatarListContainerNode: ASDisplayNode
let avatarListWrapperNode: PinchSourceContainerNode
let avatarListNode: PeerInfoAvatarListContainerNode
private var videoFadeNode: ASImageNode
private let infoNode: ASDisplayNode
private let titleNode: ImmediateTextNode
private let statusNode: VoiceChatParticipantStatusNode
private var videoNode: GroupVideoNode?
private var appeared = false
init(context: AccountContext, size: CGSize, peer: Peer, text: VoiceChatParticipantItem.ParticipantText, customNode: ASDisplayNode? = nil, additionalEntry: Signal<(TelegramMediaImageRepresentation, Float)?, NoError>, requestDismiss: (() -> Void)?) {
self.context = context
self.size = size
self.peer = peer
self.text = text
self.customNode = customNode
self.additionalEntry = additionalEntry
self.backgroundImageNode = ASImageNode()
self.backgroundImageNode.clipsToBounds = true
self.backgroundImageNode.displaysAsynchronously = false
self.backgroundImageNode.displayWithoutProcessing = true
self.videoFadeNode = ASImageNode()
self.videoFadeNode.displaysAsynchronously = false
self.videoFadeNode.contentMode = .scaleToFill
self.avatarListContainerNode = ASDisplayNode()
self.avatarListContainerNode.clipsToBounds = true
self.avatarListWrapperNode = PinchSourceContainerNode()
self.avatarListWrapperNode.clipsToBounds = true
self.avatarListWrapperNode.cornerRadius = backgroundCornerRadius
self.avatarListNode = PeerInfoAvatarListContainerNode(context: context)
self.avatarListNode.backgroundColor = .clear
self.avatarListNode.peer = peer
self.avatarListNode.firstFullSizeOnly = true
self.avatarListNode.offsetLocation = true
self.avatarListNode.customCenterTapAction = {
requestDismiss?()
}
self.infoNode = ASDisplayNode()
self.titleNode = ImmediateTextNode()
self.titleNode.isUserInteractionEnabled = false
self.titleNode.contentMode = .left
self.titleNode.contentsScale = UIScreen.main.scale
self.statusNode = VoiceChatParticipantStatusNode()
self.statusNode.isUserInteractionEnabled = false
super.init()
self.clipsToBounds = true
self.addSubnode(self.backgroundImageNode)
self.addSubnode(self.infoNode)
self.addSubnode(self.videoFadeNode)
self.addSubnode(self.avatarListWrapperNode)
self.infoNode.addSubnode(self.titleNode)
self.infoNode.addSubnode(self.statusNode)
self.avatarListContainerNode.addSubnode(self.avatarListNode)
self.avatarListContainerNode.addSubnode(self.avatarListNode.controlsClippingOffsetNode)
self.avatarListWrapperNode.contentNode.addSubnode(self.avatarListContainerNode)
self.avatarListWrapperNode.activate = { [weak self] sourceNode in
guard let strongSelf = self else {
return
}
strongSelf.avatarListNode.controlsContainerNode.alpha = 0.0
let pinchController = PinchController(sourceNode: sourceNode, getContentAreaInScreenSpace: {
return UIScreen.main.bounds
})
context.sharedContext.mainWindow?.presentInGlobalOverlay(pinchController)
}
self.avatarListWrapperNode.deactivated = { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.avatarListWrapperNode.contentNode.layer.animate(from: 0.0 as NSNumber, to: backgroundCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.3, completion: { _ in
})
}
self.avatarListWrapperNode.animatedOut = { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.avatarListNode.controlsContainerNode.alpha = 1.0
strongSelf.avatarListNode.controlsContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
}
self.updateInfo(size: size, animate: false)
}
func updateInfo(size: CGSize, animate: Bool) {
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
let titleFont = Font.regular(17.0)
let titleColor = UIColor.white
var titleAttributedString: NSAttributedString?
if let user = self.peer as? TelegramUser {
if let firstName = user.firstName, let lastName = user.lastName, !firstName.isEmpty, !lastName.isEmpty {
let string = NSMutableAttributedString()
switch presentationData.nameDisplayOrder {
case .firstLast:
string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: lastName, font: titleFont, textColor: titleColor))
case .lastFirst:
string.append(NSAttributedString(string: lastName, font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor))
}
titleAttributedString = string
} else if let firstName = user.firstName, !firstName.isEmpty {
titleAttributedString = NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)
} else if let lastName = user.lastName, !lastName.isEmpty {
titleAttributedString = NSAttributedString(string: lastName, font: titleFont, textColor: titleColor)
} else {
titleAttributedString = NSAttributedString(string: presentationData.strings.User_DeletedAccount, font: titleFont, textColor: titleColor)
}
} else if let group = peer as? TelegramGroup {
titleAttributedString = NSAttributedString(string: group.title, font: titleFont, textColor: titleColor)
} else if let channel = peer as? TelegramChannel {
titleAttributedString = NSAttributedString(string: channel.title, font: titleFont, textColor: titleColor)
}
self.titleNode.attributedText = titleAttributedString
let titleSize = self.titleNode.updateLayout(CGSize(width: self.size.width - 24.0, height: size.height))
let makeStatusLayout = self.statusNode.asyncLayout()
let (statusLayout, statusApply) = makeStatusLayout(CGSize(width: self.size.width - 24.0, height: CGFloat.greatestFiniteMagnitude), self.text, true)
let _ = statusApply()
self.titleNode.frame = CGRect(origin: CGPoint(x: 14.0, y: 0.0), size: titleSize)
self.statusNode.frame = CGRect(origin: CGPoint(x: 14.0, y: titleSize.height + 3.0), size: statusLayout)
let totalHeight = titleSize.height + statusLayout.height + 3.0 + 8.0
let infoFrame = CGRect(x: 0.0, y: size.height - totalHeight, width: self.size.width, height: totalHeight)
if animate {
let springDuration: Double = !self.appeared ? 0.42 : 0.3
let springDamping: CGFloat = !self.appeared ? 104.0 : 1000.0
let initialInfoPosition = self.infoNode.position
self.infoNode.layer.position = infoFrame.center
let initialInfoBounds = self.infoNode.bounds
self.infoNode.layer.bounds = CGRect(origin: CGPoint(), size: infoFrame.size)
self.infoNode.layer.animateSpring(from: NSValue(cgPoint: initialInfoPosition), to: NSValue(cgPoint: self.infoNode.position), keyPath: "position", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping)
self.infoNode.layer.animateSpring(from: NSValue(cgRect: initialInfoBounds), to: NSValue(cgRect: self.infoNode.bounds), keyPath: "bounds", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
} else {
self.infoNode.frame = infoFrame
}
}
func animateIn(from sourceNode: ASDisplayNode, targetRect: CGRect, transition: ContainedViewLayoutTransition) {
let radiusTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
let springDuration: Double = 0.42
let springDamping: CGFloat = 104.0
if let sourceNode = sourceNode as? VoiceChatTileItemNode {
let sourceRect = sourceNode.bounds
self.backgroundImageNode.frame = sourceNode.bounds
self.updateInfo(size: sourceNode.bounds.size, animate: false)
self.updateInfo(size: targetRect.size, animate: true)
self.backgroundImageNode.image = generateImage(CGSize(width: backgroundCornerRadius * 2.0, height: backgroundCornerRadius * 2.0), rotatedContext: { (size, context) in
let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds)
context.setFillColor(UIColor(rgb: 0x1c1c1e).cgColor)
context.fillEllipse(in: bounds)
context.fill(CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height / 2.0))
})?.stretchableImage(withLeftCapWidth: Int(backgroundCornerRadius), topCapHeight: Int(backgroundCornerRadius))
self.backgroundImageNode.cornerRadius = backgroundCornerRadius
transition.updateCornerRadius(node: self.backgroundImageNode, cornerRadius: 0.0)
let initialRect = sourceNode.frame
let initialScale: CGFloat = sourceRect.width / targetRect.width
let targetSize = CGSize(width: targetRect.size.width, height: targetRect.size.width)
self.avatarListWrapperNode.update(size: targetSize, transition: .immediate)
self.avatarListWrapperNode.frame = CGRect(x: targetRect.minX, y: targetRect.minY, width: targetRect.width, height: targetRect.width + backgroundCornerRadius)
self.avatarListContainerNode.frame = CGRect(origin: CGPoint(), size: targetSize)
self.avatarListContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.avatarListContainerNode.cornerRadius = targetRect.width / 2.0
if let videoNode = sourceNode.videoNode {
videoNode.updateLayout(size: targetSize, isLandscape: true, transition: transition)
transition.updateFrame(node: videoNode, frame: CGRect(origin: CGPoint(), size: targetSize))
transition.updateFrame(node: sourceNode.videoContainerNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: targetSize.width, height: targetSize.height + backgroundCornerRadius)))
sourceNode.videoContainerNode.cornerRadius = backgroundCornerRadius
}
self.insertSubnode(sourceNode.videoContainerNode, belowSubnode: self.avatarListWrapperNode)
if let snapshotView = sourceNode.infoNode.view.snapshotView(afterScreenUpdates: false) {
self.videoFadeNode.image = sourceNode.fadeNode.image
self.videoFadeNode.frame = CGRect(x: 0.0, y: sourceRect.height - sourceNode.fadeNode.frame.height, width: sourceRect.width, height: sourceNode.fadeNode.frame.height)
self.insertSubnode(self.videoFadeNode, aboveSubnode: sourceNode.videoContainerNode)
self.view.insertSubview(snapshotView, aboveSubview: sourceNode.videoContainerNode.view)
snapshotView.frame = sourceRect
transition.updateFrame(view: snapshotView, frame: CGRect(origin: CGPoint(x: 0.0, y: targetSize.height - snapshotView.frame.size.height), size: snapshotView.frame.size))
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
snapshotView.removeFromSuperview()
})
transition.updateFrame(node: self.videoFadeNode, frame: CGRect(origin: CGPoint(x: 0.0, y: targetSize.height - self.videoFadeNode.frame.size.height), size: CGSize(width: targetSize.width, height: self.videoFadeNode.frame.height)))
self.videoFadeNode.alpha = 0.0
self.videoFadeNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
}
self.avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
self.avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: initialRect.center), to: NSValue(cgPoint: self.avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.avatarListNode.currentItemNode?.addSubnode(sourceNode.videoContainerNode)
}
})
radiusTransition.updateCornerRadius(node: self.avatarListContainerNode, cornerRadius: 0.0)
self.avatarListWrapperNode.contentNode.clipsToBounds = true
self.avatarListNode.frame = CGRect(x: targetRect.width / 2.0, y: targetRect.width / 2.0, width: targetRect.width, height: targetRect.width)
self.avatarListNode.controlsClippingNode.frame = CGRect(x: -targetRect.width / 2.0, y: -targetRect.width / 2.0, width: targetRect.width, height: targetRect.width)
self.avatarListNode.controlsClippingOffsetNode.frame = CGRect(origin: CGPoint(x: targetRect.width / 2.0, y: targetRect.width / 2.0), size: CGSize())
self.avatarListNode.stripContainerNode.frame = CGRect(x: 0.0, y: 13.0, width: targetRect.width, height: 2.0)
self.avatarListNode.update(size: targetSize, peer: self.peer, customNode: self.customNode, additionalEntry: self.additionalEntry, isExpanded: true, transition: .immediate)
let backgroundTargetRect = CGRect(x: 0.0, y: targetSize.height - backgroundCornerRadius * 2.0, width: targetRect.width, height: targetRect.height - targetSize.height + backgroundCornerRadius * 2.0)
let initialBackgroundPosition = self.backgroundImageNode.position
self.backgroundImageNode.layer.position = backgroundTargetRect.center
let initialBackgroundBounds = self.backgroundImageNode.bounds
self.backgroundImageNode.layer.bounds = CGRect(origin: CGPoint(), size: backgroundTargetRect.size)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgPoint: initialBackgroundPosition), to: NSValue(cgPoint: self.backgroundImageNode.position), keyPath: "position", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgRect: initialBackgroundBounds), to: NSValue(cgRect: self.backgroundImageNode.bounds), keyPath: "bounds", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
} else if let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode {
let sourceRect = sourceNode.bounds
self.backgroundImageNode.frame = sourceNode.bounds
self.updateInfo(size: sourceNode.bounds.size, animate: false)
self.updateInfo(size: targetRect.size, animate: true)
self.backgroundImageNode.image = generateImage(CGSize(width: backgroundCornerRadius * 2.0, height: backgroundCornerRadius * 2.0), rotatedContext: { (size, context) in
let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds)
context.setFillColor(UIColor(rgb: 0x1c1c1e).cgColor)
context.fillEllipse(in: bounds)
context.fill(CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height / 2.0))
})?.stretchableImage(withLeftCapWidth: Int(backgroundCornerRadius), topCapHeight: Int(backgroundCornerRadius))
self.backgroundImageNode.cornerRadius = backgroundCornerRadius
transition.updateCornerRadius(node: self.backgroundImageNode, cornerRadius: 0.0)
let initialRect = sourceNode.frame
let initialScale: CGFloat = sourceRect.width / targetRect.width
let targetSize = CGSize(width: targetRect.size.width, height: targetRect.size.width)
self.avatarListWrapperNode.update(size: targetSize, transition: .immediate)
self.avatarListWrapperNode.frame = CGRect(x: targetRect.minX, y: targetRect.minY, width: targetRect.width, height: targetRect.width + backgroundCornerRadius)
self.avatarListContainerNode.frame = CGRect(origin: CGPoint(), size: targetSize)
self.avatarListContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.avatarListContainerNode.cornerRadius = targetRect.width / 2.0
if false, let videoNode = sourceNode.videoNode {
videoNode.updateLayout(size: targetSize, isLandscape: true, transition: transition)
transition.updateFrame(node: videoNode, frame: CGRect(origin: CGPoint(), size: targetSize))
transition.updateFrame(node: sourceNode.videoContainerNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: targetSize.width, height: targetSize.height + backgroundCornerRadius)))
sourceNode.videoContainerNode.cornerRadius = backgroundCornerRadius
}
// self.insertSubnode(sourceNode.videoContainerNode, belowSubnode: self.avatarListWrapperNode)
// if let snapshotView = sourceNode.infoNode.view.snapshotView(afterScreenUpdates: false) {
// self.videoFadeNode.image = sourceNode.fadeNode.image
// self.videoFadeNode.frame = CGRect(x: 0.0, y: sourceRect.height - sourceNode.fadeNode.frame.height, width: sourceRect.width, height: sourceNode.fadeNode.frame.height)
//
// self.insertSubnode(self.videoFadeNode, aboveSubnode: sourceNode.videoContainerNode)
// self.view.insertSubview(snapshotView, aboveSubview: sourceNode.videoContainerNode.view)
// snapshotView.frame = sourceRect
// transition.updateFrame(view: snapshotView, frame: CGRect(origin: CGPoint(x: 0.0, y: targetSize.height - snapshotView.frame.size.height), size: snapshotView.frame.size))
// snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
// snapshotView.removeFromSuperview()
// })
// transition.updateFrame(node: self.videoFadeNode, frame: CGRect(origin: CGPoint(x: 0.0, y: targetSize.height - self.videoFadeNode.frame.size.height), size: CGSize(width: targetSize.width, height: self.videoFadeNode.frame.height)))
// self.videoFadeNode.alpha = 0.0
// self.videoFadeNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
// }
self.avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
self.avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: initialRect.center), to: NSValue(cgPoint: self.avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
if let strongSelf = self {
// strongSelf.avatarListNode.currentItemNode?.addSubnode(sourceNode.videoContainerNode)
}
})
radiusTransition.updateCornerRadius(node: self.avatarListContainerNode, cornerRadius: 0.0)
self.avatarListWrapperNode.contentNode.clipsToBounds = true
self.avatarListNode.frame = CGRect(x: targetRect.width / 2.0, y: targetRect.width / 2.0, width: targetRect.width, height: targetRect.width)
self.avatarListNode.controlsClippingNode.frame = CGRect(x: -targetRect.width / 2.0, y: -targetRect.width / 2.0, width: targetRect.width, height: targetRect.width)
self.avatarListNode.controlsClippingOffsetNode.frame = CGRect(origin: CGPoint(x: targetRect.width / 2.0, y: targetRect.width / 2.0), size: CGSize())
self.avatarListNode.stripContainerNode.frame = CGRect(x: 0.0, y: 13.0, width: targetRect.width, height: 2.0)
self.avatarListNode.update(size: targetSize, peer: self.peer, customNode: nil, additionalEntry: self.additionalEntry, isExpanded: true, transition: .immediate)
let backgroundTargetRect = CGRect(x: 0.0, y: targetSize.height - backgroundCornerRadius * 2.0, width: targetRect.width, height: targetRect.height - targetSize.height + backgroundCornerRadius * 2.0)
let initialBackgroundPosition = self.backgroundImageNode.position
self.backgroundImageNode.layer.position = backgroundTargetRect.center
let initialBackgroundBounds = self.backgroundImageNode.bounds
self.backgroundImageNode.layer.bounds = CGRect(origin: CGPoint(), size: backgroundTargetRect.size)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgPoint: initialBackgroundPosition), to: NSValue(cgPoint: self.backgroundImageNode.position), keyPath: "position", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgRect: initialBackgroundBounds), to: NSValue(cgRect: self.backgroundImageNode.bounds), keyPath: "bounds", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
}
self.appeared = true
}
func animateOut(to targetNode: ASDisplayNode, targetRect: CGRect, transition: ContainedViewLayoutTransition) {
let radiusTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
let springDuration: Double = 0.3
let springDamping: CGFloat = 1000.0
if let targetNode = targetNode as? VoiceChatTileItemNode {
let initialSize = self.bounds
self.updateInfo(size: targetRect.size, animate: true)
transition.updateCornerRadius(node: self.backgroundImageNode, cornerRadius: backgroundCornerRadius)
let targetScale = targetRect.width / avatarListContainerNode.frame.width
self.insertSubnode(targetNode.videoContainerNode, belowSubnode: self.avatarListWrapperNode)
self.insertSubnode(self.videoFadeNode, aboveSubnode: targetNode.videoContainerNode)
self.avatarListWrapperNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
self.avatarListWrapperNode.layer.animate(from: 1.0 as NSNumber, to: targetScale as NSNumber, keyPath: "transform.scale", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false)
self.avatarListWrapperNode.layer.animate(from: NSValue(cgPoint: self.avatarListWrapperNode.position), to: NSValue(cgPoint: targetRect.center), keyPath: "position", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { [weak self, weak targetNode] _ in
if let targetNode = targetNode {
targetNode.contentNode.insertSubnode(targetNode.videoContainerNode, aboveSubnode: targetNode.backgroundNode)
}
self?.removeFromSupernode()
})
radiusTransition.updateCornerRadius(node: self.avatarListContainerNode, cornerRadius: backgroundCornerRadius)
if let snapshotView = targetNode.infoNode.view.snapshotView(afterScreenUpdates: false) {
self.view.insertSubview(snapshotView, aboveSubview: targetNode.videoContainerNode.view)
let snapshotFrame = snapshotView.frame
snapshotView.frame = CGRect(origin: CGPoint(x: 0.0, y: initialSize.width - snapshotView.frame.size.height), size: snapshotView.frame.size)
transition.updateFrame(view: snapshotView, frame: snapshotFrame)
snapshotView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
transition.updateFrame(node: self.videoFadeNode, frame: CGRect(origin: CGPoint(x: 0.0, y: targetRect.height - self.videoFadeNode.frame.size.height), size: CGSize(width: targetRect.width, height: self.videoFadeNode.frame.height)))
self.videoFadeNode.alpha = 1.0
self.videoFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
if let videoNode = targetNode.videoNode {
videoNode.updateLayout(size: targetRect.size, isLandscape: true, transition: transition)
transition.updateFrame(node: videoNode, frame: targetRect)
transition.updateFrame(node: targetNode.videoContainerNode, frame: targetRect)
}
let backgroundTargetRect = targetRect
let initialBackgroundPosition = self.backgroundImageNode.position
self.backgroundImageNode.layer.position = backgroundTargetRect.center
let initialBackgroundBounds = self.backgroundImageNode.bounds
self.backgroundImageNode.layer.bounds = CGRect(origin: CGPoint(), size: backgroundTargetRect.size)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgPoint: initialBackgroundPosition), to: NSValue(cgPoint: self.backgroundImageNode.position), keyPath: "position", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgRect: initialBackgroundBounds), to: NSValue(cgRect: self.backgroundImageNode.bounds), keyPath: "bounds", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
self.avatarListNode.stripContainerNode.alpha = 0.0
self.avatarListNode.stripContainerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
self.infoNode.alpha = 0.0
self.infoNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
} else if let targetNode = targetNode as? VoiceChatFullscreenParticipantItemNode {
let initialSize = self.bounds
self.updateInfo(size: targetRect.size, animate: true)
transition.updateCornerRadius(node: self.backgroundImageNode, cornerRadius: backgroundCornerRadius)
let targetScale = targetRect.width / avatarListContainerNode.frame.width
self.insertSubnode(targetNode.videoContainerNode, belowSubnode: self.avatarListWrapperNode)
self.insertSubnode(self.videoFadeNode, aboveSubnode: targetNode.videoContainerNode)
self.avatarListWrapperNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
self.avatarListWrapperNode.layer.animate(from: 1.0 as NSNumber, to: targetScale as NSNumber, keyPath: "transform.scale", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false)
self.avatarListWrapperNode.layer.animate(from: NSValue(cgPoint: self.avatarListWrapperNode.position), to: NSValue(cgPoint: targetRect.center), keyPath: "position", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { [weak self, weak targetNode] _ in
if let targetNode = targetNode {
targetNode.offsetContainerNode.insertSubnode(targetNode.videoContainerNode, at: 0)
}
self?.removeFromSupernode()
})
radiusTransition.updateCornerRadius(node: self.avatarListContainerNode, cornerRadius: backgroundCornerRadius)
// if let snapshotView = targetNode.infoNode.view.snapshotView(afterScreenUpdates: false) {
// self.view.insertSubview(snapshotView, aboveSubview: targetNode.videoContainerNode.view)
// let snapshotFrame = snapshotView.frame
// snapshotView.frame = CGRect(origin: CGPoint(x: 0.0, y: initialSize.width - snapshotView.frame.size.height), size: snapshotView.frame.size)
// transition.updateFrame(view: snapshotView, frame: snapshotFrame)
// snapshotView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
// transition.updateFrame(node: self.videoFadeNode, frame: CGRect(origin: CGPoint(x: 0.0, y: targetRect.height - self.videoFadeNode.frame.size.height), size: CGSize(width: targetRect.width, height: self.videoFadeNode.frame.height)))
// self.videoFadeNode.alpha = 1.0
// self.videoFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
// }
if false, let videoNode = targetNode.videoNode {
videoNode.updateLayout(size: targetRect.size, isLandscape: true, transition: transition)
transition.updateFrame(node: videoNode, frame: targetRect)
transition.updateFrame(node: targetNode.videoContainerNode, frame: targetRect)
}
let backgroundTargetRect = targetRect
let initialBackgroundPosition = self.backgroundImageNode.position
self.backgroundImageNode.layer.position = backgroundTargetRect.center
let initialBackgroundBounds = self.backgroundImageNode.bounds
self.backgroundImageNode.layer.bounds = CGRect(origin: CGPoint(), size: backgroundTargetRect.size)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgPoint: initialBackgroundPosition), to: NSValue(cgPoint: self.backgroundImageNode.position), keyPath: "position", duration: springDuration, delay: 0.0, initialVelocity: 0.0, damping: springDamping)
self.backgroundImageNode.layer.animateSpring(from: NSValue(cgRect: initialBackgroundBounds), to: NSValue(cgRect: self.backgroundImageNode.bounds), keyPath: "bounds", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
self.avatarListNode.stripContainerNode.alpha = 0.0
self.avatarListNode.stripContainerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
self.infoNode.alpha = 0.0
self.infoNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
}
}
}

View File

@ -48,12 +48,12 @@ final class VoiceChatTileGridNode: ASDisplayNode {
var wasAdded = false var wasAdded = false
if let current = self.itemNodes[item.id] { if let current = self.itemNodes[item.id] {
itemNode = current itemNode = current
current.update(size: itemSize, item: item, transition: transition) current.update(size: itemSize, availableWidth: size.width, item: item, transition: transition)
} else { } else {
wasAdded = true wasAdded = true
let addedItemNode = VoiceChatTileItemNode(context: self.context) let addedItemNode = VoiceChatTileItemNode(context: self.context)
itemNode = addedItemNode itemNode = addedItemNode
addedItemNode.update(size: itemSize, item: item, transition: .immediate) addedItemNode.update(size: itemSize, availableWidth: size.width, item: item, transition: .immediate)
self.itemNodes[self.items[i].id] = addedItemNode self.itemNodes[self.items[i].id] = addedItemNode
self.addSubnode(addedItemNode) self.addSubnode(addedItemNode)
} }
@ -154,12 +154,12 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode {
self.cornersNode = ASImageNode() self.cornersNode = ASImageNode()
self.cornersNode.displaysAsynchronously = false self.cornersNode.displaysAsynchronously = false
self.cornersNode.image = decorationCornersImage(top: true, bottom: false, dark: false)
super.init(layerBacked: false, dynamicBounce: false) super.init(layerBacked: false, dynamicBounce: false)
self.clipsToBounds = true
self.addSubnode(self.backgroundNode) self.addSubnode(self.backgroundNode)
self.addSubnode(self.cornersNode)
} }
override func animateFrameTransition(_ progress: CGFloat, _ currentValue: CGFloat) { override func animateFrameTransition(_ progress: CGFloat, _ currentValue: CGFloat) {
@ -174,6 +174,10 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode {
var backgroundFrame = self.backgroundNode.frame var backgroundFrame = self.backgroundNode.frame
backgroundFrame.size.height = currentValue backgroundFrame.size.height = currentValue
self.backgroundNode.frame = backgroundFrame self.backgroundNode.frame = backgroundFrame
var cornersFrame = self.cornersNode.frame
cornersFrame.origin.y = currentValue
self.cornersNode.frame = cornersFrame
} }
func asyncLayout() -> (_ item: VoiceChatTilesGridItem, _ params: ListViewItemLayoutParams) -> (ListViewItemNodeLayout, () -> Void) { func asyncLayout() -> (_ item: VoiceChatTilesGridItem, _ params: ListViewItemLayoutParams) -> (ListViewItemNodeLayout, () -> Void) {
@ -191,21 +195,19 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode {
tileGridNode = current tileGridNode = current
} else { } else {
strongSelf.backgroundNode.backgroundColor = item.getIsExpanded() ? fullscreenBackgroundColor : panelBackgroundColor strongSelf.backgroundNode.backgroundColor = item.getIsExpanded() ? fullscreenBackgroundColor : panelBackgroundColor
strongSelf.cornersNode.image = decorationCornersImage(top: true, bottom: false, dark: item.getIsExpanded())
tileGridNode = VoiceChatTileGridNode(context: item.context) tileGridNode = VoiceChatTileGridNode(context: item.context)
strongSelf.addSubnode(tileGridNode) strongSelf.addSubnode(tileGridNode)
strongSelf.tileGridNode = tileGridNode strongSelf.tileGridNode = tileGridNode
} }
let transition: ContainedViewLayoutTransition = currentItem == nil ? .immediate : .animated(duration: 0.3, curve: .spring) let transition: ContainedViewLayoutTransition = currentItem == nil ? .immediate : .animated(duration: 0.4, curve: .spring)
let tileGridSize = tileGridNode.update(size: CGSize(width: params.width - params.leftInset - params.rightInset, height: CGFloat.greatestFiniteMagnitude), items: item.tiles, transition: transition) let tileGridSize = tileGridNode.update(size: CGSize(width: params.width - params.leftInset - params.rightInset, height: CGFloat.greatestFiniteMagnitude), items: item.tiles, transition: transition)
if currentItem == nil { if currentItem == nil {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .easeInOut)
tileGridNode.frame = CGRect(x: params.leftInset, y: 0.0, width: tileGridSize.width, height: 0.0) tileGridNode.frame = CGRect(x: params.leftInset, y: 0.0, width: tileGridSize.width, height: 0.0)
// transition.updateFrame(node: tileGridNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize))
strongSelf.backgroundNode.frame = tileGridNode.frame strongSelf.backgroundNode.frame = tileGridNode.frame
// transition.updateFrame(node: strongSelf.backgroundNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize)) strongSelf.cornersNode.frame = CGRect(x: 14.0, y: 0.0, width: tileGridSize.width, height: 50.0)
} else { } else {
transition.updateFrame(node: tileGridNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize)) transition.updateFrame(node: tileGridNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize))
transition.updateFrame(node: strongSelf.backgroundNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize)) transition.updateFrame(node: strongSelf.backgroundNode, frame: CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: tileGridSize))

View File

@ -35,11 +35,13 @@ final class VoiceChatTileItem: Equatable {
let peer: Peer let peer: Peer
let videoEndpointId: String let videoEndpointId: String
let icon: Icon
let strings: PresentationStrings let strings: PresentationStrings
let nameDisplayOrder: PresentationPersonNameOrder let nameDisplayOrder: PresentationPersonNameOrder
let icon: Icon
let text: VoiceChatParticipantItem.ParticipantText
let speaking: Bool let speaking: Bool
let action: () -> Void let action: () -> Void
let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
let getVideo: () -> GroupVideoNode? let getVideo: () -> GroupVideoNode?
let getAudioLevel: (() -> Signal<Float, NoError>)? let getAudioLevel: (() -> Signal<Float, NoError>)?
@ -47,14 +49,16 @@ final class VoiceChatTileItem: Equatable {
return self.videoEndpointId return self.videoEndpointId
} }
init(peer: Peer, videoEndpointId: String, strings: PresentationStrings, nameDisplayOrder: PresentationPersonNameOrder, speaking: Bool, icon: Icon, action: @escaping () -> Void, getVideo: @escaping () -> GroupVideoNode?, getAudioLevel: (() -> Signal<Float, NoError>)?) { init(peer: Peer, videoEndpointId: String, strings: PresentationStrings, nameDisplayOrder: PresentationPersonNameOrder, speaking: Bool, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, action: @escaping () -> Void, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?, getVideo: @escaping () -> GroupVideoNode?, getAudioLevel: (() -> Signal<Float, NoError>)?) {
self.peer = peer self.peer = peer
self.videoEndpointId = videoEndpointId self.videoEndpointId = videoEndpointId
self.strings = strings self.strings = strings
self.nameDisplayOrder = nameDisplayOrder self.nameDisplayOrder = nameDisplayOrder
self.icon = icon self.icon = icon
self.text = text
self.speaking = speaking self.speaking = speaking
self.action = action self.action = action
self.contextAction = contextAction
self.getVideo = getVideo self.getVideo = getVideo
self.getAudioLevel = getAudioLevel self.getAudioLevel = getAudioLevel
} }
@ -93,16 +97,25 @@ final class VoiceChatTileItemNode: ASDisplayNode {
let contextSourceNode: ContextExtractedContentContainingNode let contextSourceNode: ContextExtractedContentContainingNode
private let containerNode: ContextControllerSourceNode private let containerNode: ContextControllerSourceNode
private let backgroundNode: ASDisplayNode let contentNode: ASDisplayNode
let backgroundNode: ASDisplayNode
var videoContainerNode: ASDisplayNode
var videoNode: GroupVideoNode? var videoNode: GroupVideoNode?
private let fadeNode: ASImageNode let infoNode: ASDisplayNode
let fadeNode: ASImageNode
private let titleNode: ImmediateTextNode private let titleNode: ImmediateTextNode
private let iconNode: ASImageNode private let iconNode: ASImageNode
private var animationNode: VoiceChatMicrophoneNode? private var animationNode: VoiceChatMicrophoneNode?
private var highlightNode: ASImageNode private var highlightNode: ASImageNode
private let statusNode: VoiceChatParticipantStatusNode
private var validLayout: CGSize? private var profileNode: VoiceChatPeerProfileNode?
private var extractedRect: CGRect?
private var nonExtractedRect: CGRect?
private var validLayout: (CGSize, CGFloat)?
var item: VoiceChatTileItem? var item: VoiceChatTileItem?
private var isExtracted = false
private let audioLevelDisposable = MetaDisposable() private let audioLevelDisposable = MetaDisposable()
@ -111,10 +124,19 @@ final class VoiceChatTileItemNode: ASDisplayNode {
self.contextSourceNode = ContextExtractedContentContainingNode() self.contextSourceNode = ContextExtractedContentContainingNode()
self.containerNode = ContextControllerSourceNode() self.containerNode = ContextControllerSourceNode()
self.contentNode = ASDisplayNode()
self.contentNode.clipsToBounds = true
self.contentNode.cornerRadius = 11.0
self.backgroundNode = ASDisplayNode() self.backgroundNode = ASDisplayNode()
self.backgroundNode.backgroundColor = panelBackgroundColor self.backgroundNode.backgroundColor = panelBackgroundColor
self.videoContainerNode = ASDisplayNode()
self.videoContainerNode.clipsToBounds = true
self.infoNode = ASDisplayNode()
self.fadeNode = ASImageNode() self.fadeNode = ASImageNode()
self.fadeNode.displaysAsynchronously = false self.fadeNode.displaysAsynchronously = false
self.fadeNode.displayWithoutProcessing = true self.fadeNode.displayWithoutProcessing = true
@ -122,6 +144,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
self.fadeNode.image = fadeImage self.fadeNode.image = fadeImage
self.titleNode = ImmediateTextNode() self.titleNode = ImmediateTextNode()
self.statusNode = VoiceChatParticipantStatusNode()
self.iconNode = ASImageNode() self.iconNode = ASImageNode()
self.iconNode.displaysAsynchronously = false self.iconNode.displaysAsynchronously = false
@ -136,18 +159,38 @@ final class VoiceChatTileItemNode: ASDisplayNode {
self.clipsToBounds = true self.clipsToBounds = true
self.contextSourceNode.contentNode.clipsToBounds = true
self.contextSourceNode.contentNode.cornerRadius = 11.0
self.containerNode.addSubnode(self.contextSourceNode) self.containerNode.addSubnode(self.contextSourceNode)
self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode
self.addSubnode(self.containerNode) self.addSubnode(self.containerNode)
self.contextSourceNode.contentNode.addSubnode(self.backgroundNode) self.contextSourceNode.contentNode.addSubnode(self.contentNode)
self.contextSourceNode.contentNode.addSubnode(self.fadeNode) self.contentNode.addSubnode(self.backgroundNode)
self.contextSourceNode.contentNode.addSubnode(self.titleNode) self.contentNode.addSubnode(self.videoContainerNode)
self.contextSourceNode.contentNode.addSubnode(self.iconNode) self.contentNode.addSubnode(self.fadeNode)
self.contextSourceNode.contentNode.addSubnode(self.highlightNode) self.contentNode.addSubnode(self.infoNode)
self.infoNode.addSubnode(self.titleNode)
self.infoNode.addSubnode(self.iconNode)
self.contentNode.addSubnode(self.highlightNode)
self.containerNode.shouldBegin = { [weak self] location in
guard let _ = self else {
return false
}
return true
}
self.containerNode.activated = { [weak self] gesture, _ in
guard let strongSelf = self, let item = strongSelf.item, let contextAction = item.contextAction else {
gesture.cancel()
return
}
contextAction(strongSelf.contextSourceNode, gesture)
}
self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in
guard let strongSelf = self, let _ = strongSelf.item else {
return
}
strongSelf.updateIsExtracted(isExtracted, transition: transition)
}
} }
deinit { deinit {
@ -164,10 +207,44 @@ final class VoiceChatTileItemNode: ASDisplayNode {
self.item?.action() self.item?.action()
} }
func update(size: CGSize, item: VoiceChatTileItem, transition: ContainedViewLayoutTransition) { private func updateIsExtracted(_ isExtracted: Bool, transition: ContainedViewLayoutTransition) {
guard self.validLayout != size || self.item != item else { guard self.isExtracted != isExtracted, let extractedRect = self.extractedRect, let nonExtractedRect = self.nonExtractedRect, let item = self.item else {
return return
} }
self.isExtracted = isExtracted
if isExtracted {
let profileNode = VoiceChatPeerProfileNode(context: self.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
self?.contextSourceNode.requestDismiss?()
})
profileNode.frame = CGRect(origin: CGPoint(), size: extractedRect.size)
self.profileNode = profileNode
self.contextSourceNode.contentNode.addSubnode(profileNode)
profileNode.animateIn(from: self, targetRect: extractedRect, transition: transition)
self.contextSourceNode.contentNode.customHitTest = { [weak self] point in
if let strongSelf = self, let profileNode = strongSelf.profileNode {
if profileNode.avatarListWrapperNode.frame.contains(point) {
return profileNode.avatarListNode.view
}
}
return nil
}
} else if let profileNode = self.profileNode {
self.profileNode = nil
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition)
self.contextSourceNode.contentNode.customHitTest = nil
}
}
func update(size: CGSize, availableWidth: CGFloat, item: VoiceChatTileItem, transition: ContainedViewLayoutTransition) {
guard self.validLayout?.0 != size || self.validLayout?.1 != availableWidth || self.item != item else {
return
}
self.validLayout = (size, availableWidth)
var itemTransition = transition var itemTransition = transition
if self.item != item { if self.item != item {
@ -206,7 +283,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
if let videoNode = item.getVideo() { if let videoNode = item.getVideo() {
itemTransition = .immediate itemTransition = .immediate
self.videoNode = videoNode self.videoNode = videoNode
self.contextSourceNode.contentNode.insertSubnode(videoNode, at: 1) self.videoContainerNode.addSubnode(videoNode)
} }
} }
@ -248,7 +325,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
} else { } else {
animationNode = VoiceChatMicrophoneNode() animationNode = VoiceChatMicrophoneNode()
self.animationNode = animationNode self.animationNode = animationNode
self.contextSourceNode.contentNode.addSubnode(animationNode) self.infoNode.addSubnode(animationNode)
} }
animationNode.alpha = 1.0 animationNode.alpha = 1.0
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: UIColor.white), animated: true) animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: UIColor.white), animated: true)
@ -259,34 +336,56 @@ final class VoiceChatTileItemNode: ASDisplayNode {
} }
let bounds = CGRect(origin: CGPoint(), size: size) let bounds = CGRect(origin: CGPoint(), size: size)
self.contentNode.frame = bounds
self.containerNode.frame = bounds self.containerNode.frame = bounds
self.contextSourceNode.frame = bounds self.contextSourceNode.frame = bounds
self.contextSourceNode.contentNode.frame = bounds self.contextSourceNode.contentNode.frame = bounds
if let videoNode = self.videoNode { let extractedWidth = availableWidth
transition.updateFrame(node: videoNode, frame: bounds) let makeStatusLayout = self.statusNode.asyncLayout()
videoNode.updateLayout(size: size, isLandscape: true, transition: itemTransition) let (statusLayout, _) = makeStatusLayout(CGSize(width: availableWidth - 30.0, height: CGFloat.greatestFiniteMagnitude), item.text, true)
let extractedRect = CGRect(x: 0.0, y: 0.0, width: extractedWidth, height: extractedWidth + statusLayout.height + 39.0)
let nonExtractedRect = bounds
self.extractedRect = extractedRect
self.nonExtractedRect = nonExtractedRect
self.contextSourceNode.contentRect = extractedRect
if self.videoContainerNode.supernode === self.contentNode {
if let videoNode = self.videoNode {
transition.updateFrame(node: videoNode, frame: bounds)
videoNode.updateLayout(size: size, isLandscape: true, transition: itemTransition)
}
transition.updateFrame(node: self.videoContainerNode, frame: bounds)
} }
transition.updateFrame(node: self.backgroundNode, frame: bounds) transition.updateFrame(node: self.backgroundNode, frame: bounds)
transition.updateFrame(node: self.highlightNode, frame: bounds) transition.updateFrame(node: self.highlightNode, frame: bounds)
transition.updateFrame(node: self.infoNode, frame: bounds)
transition.updateFrame(node: self.fadeNode, frame: CGRect(x: 0.0, y: size.height - fadeHeight, width: size.width, height: fadeHeight)) transition.updateFrame(node: self.fadeNode, frame: CGRect(x: 0.0, y: size.height - fadeHeight, width: size.width, height: fadeHeight))
let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - 50.0, height: size.height)) let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - 50.0, height: size.height))
self.titleNode.frame = CGRect(origin: CGPoint(x: 11.0, y: size.height - titleSize.height - 8.0), size: titleSize) self.titleNode.frame = CGRect(origin: CGPoint(x: 30.0, y: size.height - titleSize.height - 8.0), size: titleSize)
if let animationNode = self.animationNode { if let animationNode = self.animationNode {
let animationSize = CGSize(width: 36.0, height: 36.0) let animationSize = CGSize(width: 36.0, height: 36.0)
animationNode.bounds = CGRect(origin: CGPoint(), size: animationSize) animationNode.bounds = CGRect(origin: CGPoint(), size: animationSize)
animationNode.transform = CATransform3DMakeScale(0.66667, 0.66667, 1.0) animationNode.transform = CATransform3DMakeScale(0.66667, 0.66667, 1.0)
transition.updatePosition(node: animationNode, position: CGPoint(x: size.width - 19.0, y: size.height - 15.0)) transition.updatePosition(node: animationNode, position: CGPoint(x: 16.0, y: size.height - 15.0))
} }
} }
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, animate: Bool = true) { func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, transition: ContainedViewLayoutTransition, animate: Bool = true) {
guard let _ = self.item else { guard let _ = self.item else {
return return
} }
var duration: Double = 0.2
var timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue
if case let .animated(transitionDuration, curve) = transition {
duration = transitionDuration + 0.05
timingFunction = curve.timingFunction
}
if let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item { if let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item {
let initialAnimate = animate let initialAnimate = animate
@ -301,7 +400,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
sourceNode.videoNode = nil sourceNode.videoNode = nil
videoNode.alpha = 1.0 videoNode.alpha = 1.0
self.videoNode = videoNode self.videoNode = videoNode
self.contextSourceNode.contentNode.insertSubnode(videoNode, at: 1) self.videoContainerNode.addSubnode(videoNode)
if animate { if animate {
// self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) // self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
@ -311,7 +410,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
} }
sourceNode.isHidden = true sourceNode.isHidden = true
Queue.mainQueue().after(0.25) { Queue.mainQueue().after(0.4) {
sourceNode.isHidden = false sourceNode.isHidden = false
} }
@ -322,15 +421,14 @@ final class VoiceChatTileItemNode: ASDisplayNode {
self.contextSourceNode.position = targetContainerPosition self.contextSourceNode.position = targetContainerPosition
containerNode.addSubnode(self.contextSourceNode) containerNode.addSubnode(self.contextSourceNode)
self.contextSourceNode.layer.animateScale(from: 0.467, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) self.contextSourceNode.layer.animateScale(from: 0.467, to: 1.0, duration: duration, timingFunction: timingFunction)
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self] _ in self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self] _ in
if let strongSelf = self { if let strongSelf = self {
strongSelf.contextSourceNode.position = initialPosition strongSelf.contextSourceNode.position = initialPosition
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode) strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
} }
}) })
let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
self.videoNode?.updateLayout(size: self.bounds.size, isLandscape: true, transition: transition) self.videoNode?.updateLayout(size: self.bounds.size, isLandscape: true, transition: transition)
self.videoNode?.frame = self.bounds self.videoNode?.frame = self.bounds
} else if !initialAnimate { } else if !initialAnimate {

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_sharescreen.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "pin.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "unpin.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.