Video Chat Improvements

This commit is contained in:
Ilya Laktyushin 2021-04-27 20:20:24 +04:00
parent 91b81eadff
commit 3fd2b28b45
4 changed files with 151 additions and 77 deletions

View File

@ -358,6 +358,9 @@ public final class PeerInfoAvatarListItemNode: ASDisplayNode {
representations = [] representations = []
videoRepresentations = [] videoRepresentations = []
immediateThumbnailData = nil immediateThumbnailData = nil
if !synchronous {
self.addSubnode(node)
}
case let .topImage(topRepresentations, videoRepresentationsValue, immediateThumbnail): case let .topImage(topRepresentations, videoRepresentationsValue, immediateThumbnail):
representations = topRepresentations representations = topRepresentations
videoRepresentations = videoRepresentationsValue videoRepresentations = videoRepresentationsValue
@ -953,7 +956,7 @@ public final class PeerInfoAvatarListContainerNode: ASDisplayNode {
} }
private var additionalEntryProgress: Signal<Float?, NoError>? = nil private var additionalEntryProgress: Signal<Float?, NoError>? = nil
public func update(size: CGSize, peer: Peer?, additionalEntry: Signal<(TelegramMediaImageRepresentation, Float)?, NoError> = .single(nil), isExpanded: Bool, transition: ContainedViewLayoutTransition) { public func update(size: CGSize, peer: Peer?, customNode: ASDisplayNode? = nil, additionalEntry: Signal<(TelegramMediaImageRepresentation, Float)?, NoError> = .single(nil), isExpanded: Bool, transition: ContainedViewLayoutTransition) {
self.validLayout = size self.validLayout = size
let previousExpanded = self.isExpanded let previousExpanded = self.isExpanded
self.isExpanded = isExpanded self.isExpanded = isExpanded
@ -1009,6 +1012,9 @@ public final class PeerInfoAvatarListContainerNode: ASDisplayNode {
} }
var items: [PeerInfoAvatarListItem] = [] var items: [PeerInfoAvatarListItem] = []
if let customNode = customNode {
items.append(.custom(customNode))
}
for entry in entries { for entry in entries {
items.append(PeerInfoAvatarListItem(entry: entry)) items.append(PeerInfoAvatarListItem(entry: entry))
} }

View File

@ -176,7 +176,8 @@ final class GroupVideoNode: ASDisplayNode {
rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width) rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width)
rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height) rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height)
var videoSize = CGSize(width: 1203, height: 677) var videoSize = rotatedVideoFrame.size
// CGSize(width: 1203, height: 677)
transition.updatePosition(layer: self.videoView.view.layer, position: rotatedVideoFrame.center) transition.updatePosition(layer: self.videoView.view.layer, position: rotatedVideoFrame.center)
transition.updateBounds(layer: self.videoView.view.layer, bounds: CGRect(origin: CGPoint(), size: videoSize)) transition.updateBounds(layer: self.videoView.view.layer, bounds: CGRect(origin: CGPoint(), size: videoSize))
@ -591,6 +592,12 @@ public final class VoiceChatController: ViewController {
state = .listening state = .listening
} }
let textIcon: VoiceChatParticipantItem.ParticipantText.Icon?
if peerEntry.volume != nil {
textIcon = .volume
} else {
textIcon = nil
}
let yourText: String let yourText: String
if (peerEntry.about?.isEmpty ?? true) && peer.smallProfileImage == nil { if (peerEntry.about?.isEmpty ?? true) && peer.smallProfileImage == nil {
yourText = presentationData.strings.VoiceChat_TapToAddPhotoOrBio yourText = presentationData.strings.VoiceChat_TapToAddPhotoOrBio
@ -604,13 +611,13 @@ public final class VoiceChatController: ViewController {
switch state { switch state {
case .listening: case .listening:
if peerEntry.isMyPeer { if peerEntry.isMyPeer {
text = .text(yourText, .accent) text = .text(yourText, textIcon, .accent)
} else if let muteState = peerEntry.muteState, muteState.mutedByYou { } else if let muteState = peerEntry.muteState, muteState.mutedByYou {
text = .text(presentationData.strings.VoiceChat_StatusMutedForYou, .destructive) text = .text(presentationData.strings.VoiceChat_StatusMutedForYou, textIcon, .destructive)
} else if let about = peerEntry.about, !about.isEmpty { } else if let about = peerEntry.about, !about.isEmpty {
text = .text(about, .generic) text = .text(about, textIcon, .generic)
} else { } else {
text = .text(presentationData.strings.VoiceChat_StatusListening, .generic) text = .text(presentationData.strings.VoiceChat_StatusListening, textIcon, .generic)
} }
let microphoneColor: UIColor let microphoneColor: UIColor
if let muteState = peerEntry.muteState, !muteState.canUnmute || muteState.mutedByYou { if let muteState = peerEntry.muteState, !muteState.canUnmute || muteState.mutedByYou {
@ -621,33 +628,33 @@ public final class VoiceChatController: ViewController {
icon = .microphone(peerEntry.muteState != nil, microphoneColor) icon = .microphone(peerEntry.muteState != nil, microphoneColor)
case .speaking: case .speaking:
if let muteState = peerEntry.muteState, muteState.mutedByYou { if let muteState = peerEntry.muteState, muteState.mutedByYou {
text = .text(presentationData.strings.VoiceChat_StatusMutedForYou, .destructive) text = .text(presentationData.strings.VoiceChat_StatusMutedForYou, textIcon, .destructive)
icon = .microphone(true, UIColor(rgb: 0xff3b30)) icon = .microphone(true, UIColor(rgb: 0xff3b30))
} else { } else {
let volumeValue = peerEntry.volume.flatMap { $0 / 100 } let volumeValue = peerEntry.volume.flatMap { $0 / 100 }
if let volume = volumeValue, volume != 100 { if let volume = volumeValue, volume != 100 {
text = .text( presentationData.strings.VoiceChat_StatusSpeakingVolume("\(volume)%").0, .constructive) text = .text( presentationData.strings.VoiceChat_StatusSpeakingVolume("\(volume)%").0, textIcon, .constructive)
} else { } else {
text = .text(presentationData.strings.VoiceChat_StatusSpeaking, .constructive) text = .text(presentationData.strings.VoiceChat_StatusSpeaking, textIcon, .constructive)
} }
icon = .microphone(false, UIColor(rgb: 0x34c759)) icon = .microphone(false, UIColor(rgb: 0x34c759))
} }
case .invited: case .invited:
text = .text(presentationData.strings.VoiceChat_StatusInvited, .generic) text = .text(presentationData.strings.VoiceChat_StatusInvited, textIcon, .generic)
icon = .invite(true) icon = .invite(true)
case .raisedHand: case .raisedHand:
if peerEntry.isMyPeer && !peerEntry.displayRaisedHandStatus { if peerEntry.isMyPeer && !peerEntry.displayRaisedHandStatus {
text = .text(yourText, .accent) text = .text(yourText, textIcon, .accent)
} else if let about = peerEntry.about, !about.isEmpty && !peerEntry.displayRaisedHandStatus { } else if let about = peerEntry.about, !about.isEmpty && !peerEntry.displayRaisedHandStatus {
text = .text(about, .generic) text = .text(about, textIcon, .generic)
} else { } else {
text = .text(presentationData.strings.VoiceChat_StatusWantsToSpeak, .accent) text = .text(presentationData.strings.VoiceChat_StatusWantsToSpeak, textIcon, .accent)
} }
icon = .wantsToSpeak icon = .wantsToSpeak
} }
if let about = peerEntry.about, !about.isEmpty { if let about = peerEntry.about, !about.isEmpty {
expandedText = .text(about, .generic) expandedText = .text(about, textIcon, .generic)
} }
let revealOptions: [VoiceChatParticipantItem.RevealOption] = [] let revealOptions: [VoiceChatParticipantItem.RevealOption] = []
@ -2158,6 +2165,7 @@ public final class VoiceChatController: ViewController {
completion() completion()
} }
} else if case .fullscreen = strongSelf.displayMode { } else if case .fullscreen = strongSelf.displayMode {
strongSelf.animatingExpansion = true
strongSelf.updateIsFullscreen(strongSelf.isFullscreen, force: true) strongSelf.updateIsFullscreen(strongSelf.isFullscreen, force: true)
if let (layout, navigationHeight) = strongSelf.validLayout { if let (layout, navigationHeight) = strongSelf.validLayout {
@ -3178,16 +3186,13 @@ public final class VoiceChatController: ViewController {
} }
@objc private func cameraPressed() { @objc private func cameraPressed() {
let controller = voiceChatCameraPreviewController(sharedContext: self.context.sharedContext, account: self.context.account, forceTheme: self.darkTheme, title: self.presentationData.strings.VoiceChat_VideoPreviewTitle, text: self.presentationData.strings.VoiceChat_VideoPreviewDescription, apply: {
})
self.controller?.present(controller, in: .window(.root))
return
if self.call.isVideo { if self.call.isVideo {
self.call.disableVideo() self.call.disableVideo()
} else { } else {
self.call.requestVideo() let controller = voiceChatCameraPreviewController(sharedContext: self.context.sharedContext, account: self.context.account, forceTheme: self.darkTheme, title: self.presentationData.strings.VoiceChat_VideoPreviewTitle, text: self.presentationData.strings.VoiceChat_VideoPreviewDescription, apply: { [weak self] in
self?.call.requestVideo()
})
self.controller?.present(controller, in: .window(.root))
} }
} }
@ -3331,7 +3336,7 @@ public final class VoiceChatController: ViewController {
let itemNode = self.mainParticipantNode let itemNode = self.mainParticipantNode
item.updateNode(async: { $0() }, node: { item.updateNode(async: { $0() }, node: {
return itemNode return itemNode
}, params: ListViewItemLayoutParams(width: mainParticipantNodeWidth, leftInset: 0.0, rightInset: 0.0, availableHeight: self.bounds.height), previousItem: nil, nextItem: nil, animation: .None, completion: { (layout, apply) in }, params: ListViewItemLayoutParams(width: mainParticipantNodeWidth, leftInset: 0.0, rightInset: 0.0, availableHeight: self.bounds.height), previousItem: nil, nextItem: nil, animation: .System(duration: 0.2), completion: { (layout, apply) in
itemNode.contentSize = layout.contentSize itemNode.contentSize = layout.contentSize
itemNode.insets = layout.insets itemNode.insets = layout.insets
itemNode.isUserInteractionEnabled = false itemNode.isUserInteractionEnabled = false

View File

@ -26,6 +26,11 @@ final class VoiceChatParticipantItem: ListViewItem {
} }
enum ParticipantText { enum ParticipantText {
public enum Icon {
case volume
case video
}
public enum TextColor { public enum TextColor {
case generic case generic
case accent case accent
@ -34,7 +39,7 @@ final class VoiceChatParticipantItem: ListViewItem {
} }
case presence case presence
case text(String, TextColor) case text(String, Icon?, TextColor)
case none case none
} }
@ -190,6 +195,27 @@ private let fadeImage = generateImage(CGSize(width: 1.0, height: 30.0), rotatedC
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
}) })
private class VoiceChatParticipantStatusNode: ASDisplayNode {
private let iconNode: ASImageNode
private let textNode: TextNode
override init() {
self.iconNode = ASImageNode()
self.iconNode.displaysAsynchronously = false
self.textNode = TextNode()
super.init()
self.addSubnode(self.iconNode)
self.addSubnode(self.textNode)
}
func update() {
}
}
class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
private let topStripeNode: ASDisplayNode private let topStripeNode: ASDisplayNode
private let bottomStripeNode: ASDisplayNode private let bottomStripeNode: ASDisplayNode
@ -236,10 +262,11 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
private var peerPresenceManager: PeerPresenceStatusManager? private var peerPresenceManager: PeerPresenceStatusManager?
private var layoutParams: (VoiceChatParticipantItem, ListViewItemLayoutParams, Bool, Bool)? private var layoutParams: (VoiceChatParticipantItem, ListViewItemLayoutParams, Bool, Bool)?
private var isExtracted = false private var isExtracted = false
private var animatingExtraction = false
private var wavesColor: UIColor? private var wavesColor: UIColor?
private let videoContainerNode: ASDisplayNode private let videoContainerNode: ASDisplayNode
private let fadeNode: ASImageNode private let videoFadeNode: ASImageNode
private var videoNode: GroupVideoNode? private var videoNode: GroupVideoNode?
private let videoReadyDisposable = MetaDisposable() private let videoReadyDisposable = MetaDisposable()
private var videoReadyDelayed = false private var videoReadyDelayed = false
@ -293,12 +320,12 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self.videoContainerNode = ASDisplayNode() self.videoContainerNode = ASDisplayNode()
self.videoContainerNode.clipsToBounds = true self.videoContainerNode.clipsToBounds = true
self.fadeNode = ASImageNode() self.videoFadeNode = ASImageNode()
self.fadeNode.displaysAsynchronously = false self.videoFadeNode.displaysAsynchronously = false
self.fadeNode.displayWithoutProcessing = true self.videoFadeNode.displayWithoutProcessing = true
self.fadeNode.contentMode = .scaleToFill self.videoFadeNode.contentMode = .scaleToFill
self.fadeNode.image = fadeImage self.videoFadeNode.image = fadeImage
self.videoContainerNode.addSubnode(fadeNode) self.videoContainerNode.addSubnode(videoFadeNode)
self.titleNode = TextNode() self.titleNode = TextNode()
self.titleNode.isUserInteractionEnabled = false self.titleNode.isUserInteractionEnabled = false
@ -446,9 +473,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
let avatarListWrapperNode = PinchSourceContainerNode() let avatarListWrapperNode = PinchSourceContainerNode()
avatarListWrapperNode.clipsToBounds = true avatarListWrapperNode.clipsToBounds = true
avatarListWrapperNode.cornerRadius = backgroundCornerRadius avatarListWrapperNode.cornerRadius = backgroundCornerRadius
avatarListWrapperNode.activate = { [weak self] sourceNode in avatarListWrapperNode.activate = { [weak self] sourceNode in
guard let strongSelf = self else { guard let strongSelf = self else {
return return
@ -486,9 +511,13 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
radiusTransition.updateCornerRadius(node: transitionNode, cornerRadius: 0.0) radiusTransition.updateCornerRadius(node: transitionNode, cornerRadius: 0.0)
strongSelf.avatarNode.isHidden = true strongSelf.avatarNode.isHidden = true
strongSelf.videoContainerNode.isHidden = true
avatarListWrapperNode.contentNode.addSubnode(transitionNode) avatarListWrapperNode.contentNode.addSubnode(transitionNode)
strongSelf.videoContainerNode.position = CGPoint(x: avatarListWrapperNode.frame.width / 2.0, y: avatarListWrapperNode.frame.height / 2.0)
strongSelf.videoContainerNode.cornerRadius = tileSize.width / 2.0
strongSelf.videoContainerNode.transform = CATransform3DMakeScale(avatarListWrapperNode.frame.width / tileSize.width * 1.05, avatarListWrapperNode.frame.height / tileSize.width * 1.05, 1.0)
avatarListWrapperNode.contentNode.addSubnode(strongSelf.videoContainerNode)
strongSelf.avatarTransitionNode = transitionNode strongSelf.avatarTransitionNode = transitionNode
let avatarListContainerNode = ASDisplayNode() let avatarListContainerNode = ASDisplayNode()
@ -498,8 +527,14 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
avatarListContainerNode.cornerRadius = targetRect.width / 2.0 avatarListContainerNode.cornerRadius = targetRect.width / 2.0
avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping) avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: avatarInitialRect.center), to: NSValue(cgPoint: avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping) avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: avatarInitialRect.center), to: NSValue(cgPoint: avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
if let strongSelf = self, let avatarListNode = strongSelf.avatarListNode {
avatarListNode.currentItemNode?.addSubnode(strongSelf.videoContainerNode)
}
})
radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: 0.0) radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: 0.0)
radiusTransition.updateCornerRadius(node: strongSelf.videoContainerNode, cornerRadius: 0.0)
let avatarListNode = PeerInfoAvatarListContainerNode(context: item.context) let avatarListNode = PeerInfoAvatarListContainerNode(context: item.context)
avatarListWrapperNode.contentNode.clipsToBounds = true avatarListWrapperNode.contentNode.clipsToBounds = true
@ -519,7 +554,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
avatarListContainerNode.addSubnode(avatarListNode.controlsClippingOffsetNode) avatarListContainerNode.addSubnode(avatarListNode.controlsClippingOffsetNode)
avatarListWrapperNode.contentNode.addSubnode(avatarListContainerNode) avatarListWrapperNode.contentNode.addSubnode(avatarListContainerNode)
avatarListNode.update(size: targetRect.size, peer: item.peer, additionalEntry: item.getUpdatingAvatar(), isExpanded: true, transition: .immediate) avatarListNode.update(size: targetRect.size, peer: item.peer, customNode: strongSelf.videoContainerNode, additionalEntry: item.getUpdatingAvatar(), isExpanded: true, transition: .immediate)
strongSelf.offsetContainerNode.supernode?.addSubnode(avatarListWrapperNode) strongSelf.offsetContainerNode.supernode?.addSubnode(avatarListWrapperNode)
strongSelf.audioLevelView?.alpha = 0.0 strongSelf.audioLevelView?.alpha = 0.0
@ -529,6 +564,8 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.avatarListNode = avatarListNode strongSelf.avatarListNode = avatarListNode
} }
} else if let transitionNode = strongSelf.avatarTransitionNode, let avatarListWrapperNode = strongSelf.avatarListWrapperNode, let avatarListContainerNode = strongSelf.avatarListContainerNode { } else if let transitionNode = strongSelf.avatarTransitionNode, let avatarListWrapperNode = strongSelf.avatarListWrapperNode, let avatarListContainerNode = strongSelf.avatarListContainerNode {
strongSelf.animatingExtraction = true
transition.updateCornerRadius(node: strongSelf.backgroundImageNode, cornerRadius: backgroundCornerRadius) transition.updateCornerRadius(node: strongSelf.backgroundImageNode, cornerRadius: backgroundCornerRadius)
var avatarInitialRect = CGRect(origin: strongSelf.avatarNode.frame.origin, size: strongSelf.avatarNode.frame.size) var avatarInitialRect = CGRect(origin: strongSelf.avatarNode.frame.origin, size: strongSelf.avatarNode.frame.size)
@ -544,7 +581,8 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
avatarListContainerNode?.removeFromSupernode() avatarListContainerNode?.removeFromSupernode()
}) })
strongSelf.videoContainerNode.isHidden = false avatarListWrapperNode.contentNode.insertSubnode(strongSelf.videoContainerNode, aboveSubnode: transitionNode)
avatarListWrapperNode.layer.animate(from: 1.0 as NSNumber, to: targetScale as NSNumber, keyPath: "transform.scale", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false) avatarListWrapperNode.layer.animate(from: 1.0 as NSNumber, to: targetScale as NSNumber, keyPath: "transform.scale", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false)
avatarListWrapperNode.layer.animate(from: NSValue(cgPoint: avatarListWrapperNode.position), to: NSValue(cgPoint: avatarInitialRect.center), keyPath: "position", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { [weak transitionNode, weak self] _ in avatarListWrapperNode.layer.animate(from: NSValue(cgPoint: avatarListWrapperNode.position), to: NSValue(cgPoint: avatarInitialRect.center), keyPath: "position", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { [weak transitionNode, weak self] _ in
transitionNode?.removeFromSupernode() transitionNode?.removeFromSupernode()
@ -552,10 +590,30 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self?.audioLevelView?.alpha = 1.0 self?.audioLevelView?.alpha = 1.0
self?.audioLevelView?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) self?.audioLevelView?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
if let strongSelf = self {
strongSelf.animatingExtraction = false
strongSelf.offsetContainerNode.insertSubnode(strongSelf.videoContainerNode, belowSubnode: strongSelf.contentWrapperNode)
switch item.style {
case .list:
strongSelf.videoFadeNode.alpha = 0.0
strongSelf.videoContainerNode.position = strongSelf.avatarNode.position
strongSelf.videoContainerNode.cornerRadius = tileSize.width / 2.0
strongSelf.videoContainerNode.transform = CATransform3DMakeScale(avatarSize / tileSize.width, avatarSize / tileSize.width, 1.0)
case .tile:
strongSelf.videoFadeNode.alpha = 1.0
strongSelf.videoContainerNode.position = CGPoint(x: tileSize.width / 2.0, y: tileSize.height / 2.0)
strongSelf.videoContainerNode.cornerRadius = backgroundCornerRadius
strongSelf.videoContainerNode.transform = CATransform3DMakeScale(1.0, 1.0, 1.0)
}
}
}) })
radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: avatarListContainerNode.frame.width / 2.0) radiusTransition.updateCornerRadius(node: avatarListContainerNode, cornerRadius: avatarListContainerNode.frame.width / 2.0)
radiusTransition.updateCornerRadius(node: transitionNode, cornerRadius: avatarListContainerNode.frame.width / 2.0) radiusTransition.updateCornerRadius(node: transitionNode, cornerRadius: avatarListContainerNode.frame.width / 2.0)
radiusTransition.updateCornerRadius(node: strongSelf.videoContainerNode, cornerRadius: tileSize.width / 2.0)
} }
let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
@ -707,7 +765,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
} }
}) })
self.fadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) self.videoFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
if item.pinned { if item.pinned {
self.borderImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) self.borderImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
@ -740,7 +798,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
if animate { if animate {
sourceNode.avatarNode.alpha = 0.0 sourceNode.avatarNode.alpha = 0.0
sourceNode.fadeNode.alpha = 0.0 sourceNode.videoFadeNode.alpha = 0.0
let initialAvatarPosition = self.avatarNode.position let initialAvatarPosition = self.avatarNode.position
let targetContainerAvatarPosition = self.avatarNode.view.convert(self.avatarNode.bounds, to: containerNode.view).center let targetContainerAvatarPosition = self.avatarNode.view.convert(self.avatarNode.bounds, to: containerNode.view).center
@ -775,7 +833,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
sourceNode.contentWrapperNode.layer.animatePosition(from: startContainerContentPosition, to: targetContainerAvatarPosition, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak sourceNode] _ in sourceNode.contentWrapperNode.layer.animatePosition(from: startContainerContentPosition, to: targetContainerAvatarPosition, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak sourceNode] _ in
if let sourceNode = sourceNode { if let sourceNode = sourceNode {
sourceNode.avatarNode.alpha = 1.0 sourceNode.avatarNode.alpha = 1.0
sourceNode.fadeNode.alpha = 1.0 sourceNode.videoFadeNode.alpha = 1.0
sourceNode.contentWrapperNode.position = initialContentPosition sourceNode.contentWrapperNode.position = initialContentPosition
sourceNode.offsetContainerNode.insertSubnode(sourceNode.contentWrapperNode, aboveSubnode: sourceNode.videoContainerNode) sourceNode.offsetContainerNode.insertSubnode(sourceNode.contentWrapperNode, aboveSubnode: sourceNode.videoContainerNode)
} }
@ -807,7 +865,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self.videoContainerNode.layer.animate(from: backgroundCornerRadius as NSNumber, to: (tileSize.width / 2.0) as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { _ in self.videoContainerNode.layer.animate(from: backgroundCornerRadius as NSNumber, to: (tileSize.width / 2.0) as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { _ in
}) })
self.fadeNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) self.videoFadeNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
sourceNode.backgroundImageNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.35, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) sourceNode.backgroundImageNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.35, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
sourceNode.backgroundImageNode.layer.animateAlpha(from: sourceNode.backgroundImageNode.alpha, to: 0.0, duration: 0.35, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) sourceNode.backgroundImageNode.layer.animateAlpha(from: sourceNode.backgroundImageNode.alpha, to: 0.0, duration: 0.35, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
@ -1068,7 +1126,16 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.currentTitle = titleAttributedString?.string strongSelf.currentTitle = titleAttributedString?.string
strongSelf.wavesColor = wavesColor strongSelf.wavesColor = wavesColor
let videoSize = tileSize
let videoNode = !item.transparent ? item.getVideo() : nil
if let current = strongSelf.videoNode, current !== videoNode {
current.removeFromSupernode()
strongSelf.videoReadyDisposable.set(nil)
}
let videoNodeUpdated = strongSelf.videoNode !== videoNode
strongSelf.videoNode = videoNode
let nonExtractedRect: CGRect let nonExtractedRect: CGRect
let avatarFrame: CGRect let avatarFrame: CGRect
let titleFrame: CGRect let titleFrame: CGRect
@ -1102,7 +1169,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: 16.0 + params.leftInset, dy: 0.0) var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: 16.0 + params.leftInset, dy: 0.0)
var extractedHeight = extractedRect.height + expandedStatusLayout.size.height - statusLayout.size.height var extractedHeight = extractedRect.height + expandedStatusLayout.size.height - statusLayout.size.height
var extractedVerticalOffset: CGFloat = 0.0 var extractedVerticalOffset: CGFloat = 0.0
if item.peer.smallProfileImage != nil { if item.peer.smallProfileImage != nil || strongSelf.videoNode != nil {
extractedVerticalOffset = extractedRect.width extractedVerticalOffset = extractedRect.width
extractedHeight += extractedVerticalOffset extractedHeight += extractedVerticalOffset
} }
@ -1422,46 +1489,38 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
node.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2) node.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2)
} }
let videoSize = tileSize if !strongSelf.isExtracted && !strongSelf.animatingExtraction {
let videoNode = !item.transparent ? item.getVideo() : nil strongSelf.videoFadeNode.frame = CGRect(x: 0.0, y: tileSize.height - 30.0, width: tileSize.width, height: 30.0)
if let current = strongSelf.videoNode, current !== videoNode { strongSelf.videoContainerNode.bounds = CGRect(origin: CGPoint(), size: tileSize)
current.removeFromSupernode() switch item.style {
strongSelf.videoReadyDisposable.set(nil) case .list:
} strongSelf.videoFadeNode.alpha = 0.0
strongSelf.videoContainerNode.position = strongSelf.avatarNode.position
let videoNodeUpdated = strongSelf.videoNode !== videoNode strongSelf.videoContainerNode.cornerRadius = tileSize.width / 2.0
strongSelf.videoNode = videoNode strongSelf.videoContainerNode.transform = CATransform3DMakeScale(avatarSize / tileSize.width, avatarSize / tileSize.width, 1.0)
case .tile:
strongSelf.fadeNode.frame = CGRect(x: 0.0, y: tileSize.height - 30.0, width: tileSize.width, height: 30.0) strongSelf.videoFadeNode.alpha = 1.0
strongSelf.videoContainerNode.bounds = CGRect(origin: CGPoint(), size: tileSize) strongSelf.videoContainerNode.position = CGPoint(x: tileSize.width / 2.0, y: tileSize.height / 2.0)
switch item.style { strongSelf.videoContainerNode.cornerRadius = backgroundCornerRadius
case .list: strongSelf.videoContainerNode.transform = CATransform3DMakeScale(1.0, 1.0, 1.0)
strongSelf.fadeNode.alpha = 0.0 }
strongSelf.videoContainerNode.position = strongSelf.avatarNode.position
strongSelf.videoContainerNode.cornerRadius = tileSize.width / 2.0
strongSelf.videoContainerNode.transform = CATransform3DMakeScale(avatarSize / tileSize.width, avatarSize / tileSize.width, 1.0)
case .tile:
strongSelf.fadeNode.alpha = 1.0
strongSelf.videoContainerNode.position = CGPoint(x: tileSize.width / 2.0, y: tileSize.height / 2.0)
strongSelf.videoContainerNode.cornerRadius = backgroundCornerRadius
strongSelf.videoContainerNode.transform = CATransform3DMakeScale(1.0, 1.0, 1.0)
} }
strongSelf.borderImageNode.isHidden = !item.pinned || item.style == .list strongSelf.borderImageNode.isHidden = !item.pinned || item.style == .list
if let videoNode = videoNode { if let videoNode = videoNode {
if case .tile = item.style { if case .tile = item.style, !strongSelf.isExtracted && !strongSelf.animatingExtraction {
if currentItem != nil { if currentItem != nil {
let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
if item.pinned { if item.pinned {
transition.updateAlpha(node: videoNode, alpha: 0.0) transition.updateAlpha(node: videoNode, alpha: 0.0)
transition.updateAlpha(node: strongSelf.fadeNode, alpha: 0.0) transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0)
strongSelf.videoContainerNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2) strongSelf.videoContainerNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2)
transition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0) transition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0)
strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) strongSelf.avatarNode.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2)
} else { } else {
transition.updateAlpha(node: videoNode, alpha: 1.0) transition.updateAlpha(node: videoNode, alpha: 1.0)
transition.updateAlpha(node: strongSelf.fadeNode, alpha: 1.0) transition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 1.0)
strongSelf.videoContainerNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2) strongSelf.videoContainerNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2)
transition.updateAlpha(node: strongSelf.avatarNode, alpha: 0.0) transition.updateAlpha(node: strongSelf.avatarNode, alpha: 0.0)
strongSelf.avatarNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2) strongSelf.avatarNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2)
@ -1478,14 +1537,16 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
} }
videoNode.updateLayout(size: videoSize, isLandscape: false, transition: .immediate) videoNode.updateLayout(size: videoSize, isLandscape: false, transition: .immediate)
if videoNode.supernode !== strongSelf.videoContainerNode { if !strongSelf.isExtracted && !strongSelf.animatingExtraction {
videoNode.clipsToBounds = true if videoNode.supernode !== strongSelf.videoContainerNode {
strongSelf.videoContainerNode.addSubnode(videoNode) videoNode.clipsToBounds = true
strongSelf.videoContainerNode.addSubnode(videoNode)
}
videoNode.position = CGPoint(x: videoSize.width / 2.0, y: videoSize.height / 2.0)
videoNode.bounds = CGRect(origin: CGPoint(), size: videoSize)
} }
videoNode.position = CGPoint(x: videoSize.width / 2.0, y: videoSize.height / 2.0)
videoNode.bounds = CGRect(origin: CGPoint(), size: videoSize)
if videoNodeUpdated { if videoNodeUpdated {
strongSelf.videoReadyDelayed = false strongSelf.videoReadyDelayed = false
strongSelf.videoReadyDisposable.set((videoNode.ready strongSelf.videoReadyDisposable.set((videoNode.ready
@ -1547,7 +1608,8 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.raiseHandNode?.frame = CGRect(origin: CGPoint(), size: animationSize).insetBy(dx: -6.0, dy: -6.0).offsetBy(dx: -2.0, dy: 0.0) strongSelf.raiseHandNode?.frame = CGRect(origin: CGPoint(), size: animationSize).insetBy(dx: -6.0, dy: -6.0).offsetBy(dx: -2.0, dy: 0.0)
strongSelf.actionButtonNode.transform = CATransform3DMakeScale(animationScale, animationScale, 1.0) strongSelf.actionButtonNode.transform = CATransform3DMakeScale(animationScale, animationScale, 1.0)
strongSelf.actionButtonNode.frame = animationFrame // strongSelf.actionButtonNode.frame = animationFrame
transition.updateFrame(node: strongSelf.actionButtonNode, frame: animationFrame)
if let presence = item.presence as? TelegramUserPresence { if let presence = item.presence as? TelegramUserPresence {
strongSelf.peerPresenceManager?.reset(presence: presence) strongSelf.peerPresenceManager?.reset(presence: presence)

View File

@ -265,6 +265,7 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
if strongSelf.enableSynchronousImageApply { if strongSelf.enableSynchronousImageApply {
strongSelf.removePlaceholder(animated: false) strongSelf.removePlaceholder(animated: false)
} else { } else {
strongSelf.imageNode.alpha = 0.0
strongSelf.animationNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) strongSelf.animationNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
strongSelf.removePlaceholder(animated: true) strongSelf.removePlaceholder(animated: true)
} }