Voice Chat improvements

This commit is contained in:
Ilya Laktyushin 2021-03-09 18:46:14 +04:00
parent f61f828893
commit cf8fd56e06
16 changed files with 4535 additions and 4150 deletions

View File

@ -6188,6 +6188,7 @@ Sorry for the inconvenience.";
"Contacts.VoiceOver.AddContact" = "Add Contact";
"VoiceChat.SelectAccount" = "Select Account";
"VoiceChat.DisplayAs" = "Display Me As...";
"VoiceChat.DisplayAsInfo" = "Choose whether you want to be displayed as your personal account or as your channel.";
"VoiceChat.DisplayAsSuccess" = "Members of this voice chat will now see your as **%@**.";
@ -6253,3 +6254,6 @@ Sorry for the inconvenience.";
"VoiceChat.YouCanNowSpeak" = "You can now speak";
"VoiceChat.YouCanNowSpeakIn" = "You can now speak in **%@**";
"VoiceChat.MutedByAdmin" = "Muted by Admin";
"VoiceChat.MutedByAdminHelp" = "Tap if you want to speak";

View File

@ -180,6 +180,7 @@ public struct PresentationGroupCallState: Equatable {
public var defaultParticipantMuteState: DefaultParticipantMuteState?
public var recordingStartTimestamp: Int32?
public var title: String?
public var raisedHand: Bool
public init(
myPeerId: PeerId,
@ -189,7 +190,8 @@ public struct PresentationGroupCallState: Equatable {
muteState: GroupCallParticipantsContext.Participant.MuteState?,
defaultParticipantMuteState: DefaultParticipantMuteState?,
recordingStartTimestamp: Int32?,
title: String?
title: String?,
raisedHand: Bool
) {
self.myPeerId = myPeerId
self.networkState = networkState
@ -199,6 +201,7 @@ public struct PresentationGroupCallState: Equatable {
self.defaultParticipantMuteState = defaultParticipantMuteState
self.recordingStartTimestamp = recordingStartTimestamp
self.title = title
self.raisedHand = raisedHand
}
}

View File

@ -16,9 +16,13 @@ public final class AnimationNode : ASDisplayNode {
private var colorCallbacks: [LOTColorValueCallback] = []
public var played = false
public var didPlay = false
public var completion: (() -> Void)?
public var isPlaying: Bool {
return self.animationView()?.isAnimationPlaying ?? false
}
public init(animation: String? = nil, colors: [String: UIColor]? = nil, scale: CGFloat = 1.0) {
self.scale = scale
@ -77,6 +81,15 @@ public final class AnimationNode : ASDisplayNode {
public func setAnimation(name: String) {
if let url = getAppBundle().url(forResource: name, withExtension: "json"), let composition = LOTComposition(filePath: url.path) {
self.didPlay = false
self.animationView()?.sceneModel = composition
}
}
public func setAnimation(data: Data) {
if let json = try? JSONSerialization.jsonObject(with: data, options: []) as? [AnyHashable: Any] {
let composition = LOTComposition(json: json)
self.didPlay = false
self.animationView()?.sceneModel = composition
}
}
@ -90,8 +103,8 @@ public final class AnimationNode : ASDisplayNode {
}
public func play() {
if let animationView = animationView(), !animationView.isAnimationPlaying, !self.played {
self.played = true
if let animationView = animationView(), !animationView.isAnimationPlaying && !self.didPlay {
self.didPlay = true
animationView.play { [weak self] _ in
self?.completion?()
}
@ -106,8 +119,8 @@ public final class AnimationNode : ASDisplayNode {
}
public func reset() {
if self.played, let animationView = animationView() {
self.played = false
if self.didPlay, let animationView = animationView() {
self.didPlay = false
animationView.stop()
}
}
@ -120,3 +133,64 @@ public final class AnimationNode : ASDisplayNode {
}
}
}
private let colorKeyRegex = try? NSRegularExpression(pattern: "\"k\":\\[[\\d\\.]+\\,[\\d\\.]+\\,[\\d\\.]+\\,[\\d\\.]+\\]")
public func transformedWithColors(data: Data, colors: [(UIColor, UIColor)]) -> Data {
if var string = String(data: data, encoding: .utf8) {
let sourceColors: [UIColor] = colors.map { $0.0 }
let replacementColors: [UIColor] = colors.map { $0.1 }
func colorToString(_ color: UIColor) -> String {
var r: CGFloat = 0.0
var g: CGFloat = 0.0
var b: CGFloat = 0.0
if color.getRed(&r, green: &g, blue: &b, alpha: nil) {
return "\"k\":[\(r),\(g),\(b),1]"
}
return ""
}
func match(_ a: Double, _ b: Double, eps: Double) -> Bool {
return abs(a - b) < eps
}
var replacements: [(NSTextCheckingResult, String)] = []
if let colorKeyRegex = colorKeyRegex {
let results = colorKeyRegex.matches(in: string, range: NSRange(string.startIndex..., in: string))
for result in results.reversed() {
if let range = Range(result.range, in: string) {
let substring = String(string[range])
let color = substring[substring.index(string.startIndex, offsetBy: "\"k\":[".count) ..< substring.index(before: substring.endIndex)]
let components = color.split(separator: ",")
if components.count == 4, let r = Double(components[0]), let g = Double(components[1]), let b = Double(components[2]), let a = Double(components[3]) {
if match(a, 1.0, eps: 0.01) {
for i in 0 ..< sourceColors.count {
let color = sourceColors[i]
var cr: CGFloat = 0.0
var cg: CGFloat = 0.0
var cb: CGFloat = 0.0
if color.getRed(&cr, green: &cg, blue: &cb, alpha: nil) {
if match(r, Double(cr), eps: 0.01) && match(g, Double(cg), eps: 0.01) && match(b, Double(cb), eps: 0.01) {
replacements.append((result, colorToString(replacementColors[i])))
}
}
}
}
}
}
}
}
for (result, text) in replacements {
if let range = Range(result.range, in: string) {
string = string.replacingCharacters(in: range, with: text)
}
}
return string.data(using: .utf8) ?? data
} else {
return data
}
}

View File

@ -14,65 +14,8 @@ import TelegramCore
import Markdown
import DeviceAccess
private let colorKeyRegex = try? NSRegularExpression(pattern: "\"k\":\\[[\\d\\.]+\\,[\\d\\.]+\\,[\\d\\.]+\\,[\\d\\.]+\\]")
private func transformedWithTheme(data: Data, theme: PresentationTheme) -> Data {
if var string = String(data: data, encoding: .utf8) {
var colors: [UIColor] = [0x333333, 0xFFFFFF, 0x50A7EA, 0x212121].map { UIColor(rgb: $0) }
let replacementColors: [UIColor] = [theme.list.itemPrimaryTextColor.mixedWith(.white, alpha: 0.2), theme.list.plainBackgroundColor, theme.list.itemAccentColor, theme.list.plainBackgroundColor]
func colorToString(_ color: UIColor) -> String {
var r: CGFloat = 0.0
var g: CGFloat = 0.0
var b: CGFloat = 0.0
if color.getRed(&r, green: &g, blue: &b, alpha: nil) {
return "\"k\":[\(r),\(g),\(b),1]"
}
return ""
}
func match(_ a: Double, _ b: Double, eps: Double) -> Bool {
return abs(a - b) < eps
}
var replacements: [(NSTextCheckingResult, String)] = []
if let colorKeyRegex = colorKeyRegex {
let results = colorKeyRegex.matches(in: string, range: NSRange(string.startIndex..., in: string))
for result in results.reversed() {
if let range = Range(result.range, in: string) {
let substring = String(string[range])
let color = substring[substring.index(string.startIndex, offsetBy: "\"k\":[".count) ..< substring.index(before: substring.endIndex)]
let components = color.split(separator: ",")
if components.count == 4, let r = Double(components[0]), let g = Double(components[1]), let b = Double(components[2]), let a = Double(components[3]) {
if match(a, 1.0, eps: 0.01) {
for i in 0 ..< colors.count {
let color = colors[i]
var cr: CGFloat = 0.0
var cg: CGFloat = 0.0
var cb: CGFloat = 0.0
if color.getRed(&cr, green: &cg, blue: &cb, alpha: nil) {
if match(r, Double(cr), eps: 0.01) && match(g, Double(cg), eps: 0.01) && match(b, Double(cb), eps: 0.01) {
replacements.append((result, colorToString(replacementColors[i])))
}
}
}
}
}
}
}
}
for (result, text) in replacements {
if let range = Range(result.range, in: string) {
string = string.replacingCharacters(in: range, with: text)
}
}
return string.data(using: .utf8) ?? data
} else {
return data
}
return transformedWithColors(data: data, colors: [(UIColor(rgb: 0x333333), theme.list.itemPrimaryTextColor.mixedWith(.white, alpha: 0.2)), (UIColor(rgb: 0xFFFFFF), theme.list.plainBackgroundColor), (UIColor(rgb: 0x50A7EA), theme.list.itemAccentColor), (UIColor(rgb: 0x212121), theme.list.plainBackgroundColor)])
}
public final class AuthDataTransferSplashScreen: ViewController {

View File

@ -406,7 +406,7 @@ open class TelegramBaseController: ViewController, KeyShortcutResponder {
strongSelf.joinGroupCall(
peerId: groupCallPanelData.peerId,
joinAsPeerId: nil,
info: groupCallPanelData.info
activeCall: CachedChannelData.ActiveCall(id: groupCallPanelData.info.id, accessHash: groupCallPanelData.info.accessHash, title: groupCallPanelData.info.title)
)
})
if let navigationBar = self.navigationBar {
@ -852,7 +852,7 @@ open class TelegramBaseController: ViewController, KeyShortcutResponder {
})]
}
open func joinGroupCall(peerId: PeerId, joinAsPeerId: PeerId?, info: GroupCallInfo) {
self.context.joinGroupCall(peerId: peerId, joinAsPeerId: joinAsPeerId, activeCall: CachedChannelData.ActiveCall(id: info.id, accessHash: info.accessHash, title: info.title))
open func joinGroupCall(peerId: PeerId, joinAsPeerId: PeerId?, activeCall: CachedChannelData.ActiveCall) {
self.context.joinGroupCall(peerId: peerId, joinAsPeerId: joinAsPeerId, activeCall: activeCall)
}
}

View File

@ -15,6 +15,7 @@ import DeviceAccess
import UniversalMediaPlayer
import AccountContext
import DeviceProximity
import UndoUI
private extension GroupCallParticipantsContext.Participant {
var allSsrcs: Set<UInt32> {
@ -203,7 +204,8 @@ private extension PresentationGroupCallState {
muteState: GroupCallParticipantsContext.Participant.MuteState(canUnmute: true, mutedByYou: false),
defaultParticipantMuteState: nil,
recordingStartTimestamp: nil,
title: nil
title: nil,
raisedHand: false
)
}
}
@ -1246,6 +1248,16 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
if participant.peer.id == strongSelf.joinAsPeerId {
if !(strongSelf.stateValue.muteState?.canUnmute ?? false) {
strongSelf.stateValue.raisedHand = participant.raiseHandRating != nil
}
if let muteState = participant.muteState, muteState.canUnmute && strongSelf.stateValue.raisedHand {
strongSelf.stateValue.raisedHand = false
let presentationData = strongSelf.accountContext.sharedContext.currentPresentationData.with { $0 }
strongSelf.accountContext.sharedContext.mainWindow?.present(UndoOverlayController(presentationData: presentationData, content: .voiceChatCanSpeak(text: presentationData.strings.VoiceChat_YouCanNowSpeak), elevatedLayout: false, animateInAsReplacement: false, action: { _ in return true }), on: .root, blockInteraction: false, completion: {})
}
if let muteState = participant.muteState {
if muteState.canUnmute {
switch strongSelf.isMutedValue {
@ -1572,7 +1584,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
if participant.raiseHandRating != nil {
return
}
break
}
}

View File

@ -14,7 +14,7 @@ public final class VoiceChatAccountHeaderActionSheetItem: ActionSheetItem {
let title: String
let text: String
public init(context: AccountContext, title: String, text: String) {
public init(title: String, text: String) {
self.title = title
self.text = text
}
@ -40,16 +40,30 @@ private final class VoiceChatAccountHeaderActionSheetItemNode: ActionSheetItemNo
init(theme: ActionSheetControllerTheme, title: String, text: String) {
self.theme = theme
let titleFont = Font.medium(floor(theme.baseFontSize * 13.0 / 17.0))
let titleFont = Font.bold(floor(theme.baseFontSize))
let textFont = Font.regular(floor(theme.baseFontSize * 13.0 / 17.0))
self.iconBackgroundNode = ASImageNode()
self.iconBackgroundNode.displaysAsynchronously = false
self.iconBackgroundNode.displayWithoutProcessing = true
self.iconBackgroundNode.image = generateImage(CGSize(width: 72.0, height: 72.0), contextGenerator: { size, context in
let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds)
context.addPath(UIBezierPath(ovalIn: bounds).cgPath)
context.clip()
var locations: [CGFloat] = [0.0, 1.0]
let colorsArray: NSArray = [UIColor(rgb: 0x2a9ef1).cgColor, UIColor(rgb: 0x72d5fd).cgColor]
let colorSpace = CGColorSpaceCreateDeviceRGB()
let gradient = CGGradient(colorsSpace: colorSpace, colors: colorsArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
})
self.iconNode = ASImageNode()
self.iconNode.displaysAsynchronously = false
self.iconNode.displayWithoutProcessing = true
self.iconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/Accounts"), color: UIColor.white)
self.titleNode = ImmediateTextNode()
self.titleNode.displaysAsynchronously = false
@ -66,10 +80,8 @@ private final class VoiceChatAccountHeaderActionSheetItemNode: ActionSheetItemNo
self.accessibilityArea = AccessibilityAreaNode()
super.init(theme: theme)
self.hasSeparator = false
self.addSubnode(self.backgroundNode)
self.addSubnode(self.iconBackgroundNode)
self.addSubnode(self.iconNode)
self.addSubnode(self.titleNode)
self.addSubnode(self.textNode)
@ -83,12 +95,13 @@ private final class VoiceChatAccountHeaderActionSheetItemNode: ActionSheetItemNo
}
public override func updateLayout(constrainedSize: CGSize, transition: ContainedViewLayoutTransition) -> CGSize {
let titleSize = self.titleNode.updateLayout(CGSize(width: constrainedSize.width - 120.0, height: .greatestFiniteMagnitude))
let textSize = self.textNode.updateLayout(CGSize(width: constrainedSize.width - 120.0, height: .greatestFiniteMagnitude))
let titleSize = self.titleNode.updateLayout(CGSize(width: constrainedSize.width - 80.0, height: .greatestFiniteMagnitude))
let textSize = self.textNode.updateLayout(CGSize(width: constrainedSize.width - 80.0, height: .greatestFiniteMagnitude))
let topInset: CGFloat = 26.0
let textSpacing: CGFloat = 17.0
let bottomInset: CGFloat = 15.0
let topInset: CGFloat = 20.0
let titleSpacing: CGFloat = 10.0
let textSpacing: CGFloat = 6.0
let bottomInset: CGFloat = 14.0
let iconSize = CGSize(width: 72.0, height: 72.0)
let iconFrame = CGRect(origin: CGPoint(x: floor((constrainedSize.width - iconSize.width) / 2.0), y: topInset), size: iconSize)
@ -97,11 +110,11 @@ private final class VoiceChatAccountHeaderActionSheetItemNode: ActionSheetItemNo
self.iconNode.frame = CGRect(origin: CGPoint(x: iconFrame.minX + floorToScreenPixels((iconSize.width - image.size.width) / 2.0), y: iconFrame.minY + floorToScreenPixels((iconSize.height - image.size.height) / 2.0)), size: image.size)
}
self.titleNode.frame = CGRect(origin: CGPoint(x: floor((constrainedSize.width - titleSize.width) / 2.0), y: topInset + iconSize.height + textSpacing), size: titleSize)
self.titleNode.frame = CGRect(origin: CGPoint(x: floor((constrainedSize.width - titleSize.width) / 2.0), y: topInset + iconSize.height + titleSpacing), size: titleSize)
self.textNode.frame = CGRect(origin: CGPoint(x: floor((constrainedSize.width - textSize.width) / 2.0), y: topInset + iconSize.height + textSpacing + titleSize.height), size: textSize)
self.textNode.frame = CGRect(origin: CGPoint(x: floor((constrainedSize.width - textSize.width) / 2.0), y: topInset + iconSize.height + titleSpacing + titleSize.height + textSpacing), size: textSize)
let size = CGSize(width: constrainedSize.width, height: topInset + iconSize.height + textSpacing + titleSize.height + textSize.height + bottomInset)
let size = CGSize(width: constrainedSize.width, height: topInset + iconSize.height + titleSpacing + titleSize.height + textSpacing + textSize.height + bottomInset)
self.accessibilityArea.frame = CGRect(origin: CGPoint(), size: size)
self.updateInternalLayout(size, constrainedSize: constrainedSize)

View File

@ -4,6 +4,8 @@ import AsyncDisplayKit
import Display
import SwiftSignalKit
import LegacyComponents
import AnimationUI
import AppBundle
private let titleFont = Font.regular(15.0)
private let subtitleFont = Font.regular(13.0)
@ -15,8 +17,8 @@ private let blue = UIColor(rgb: 0x0078ff)
private let lightBlue = UIColor(rgb: 0x59c7f8)
private let green = UIColor(rgb: 0x33c659)
private let activeBlue = UIColor(rgb: 0x00a0b9)
private let purple = UIColor(rgb: 0x6b81f0)
private let pink = UIColor(rgb: 0xd75a76)
private let purple = UIColor(rgb: 0x3252ef)
private let pink = UIColor(rgb: 0xef436c)
private let areaSize = CGSize(width: 300.0, height: 300.0)
private let blobSize = CGSize(width: 190.0, height: 190.0)
@ -45,6 +47,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
private let containerNode: ASDisplayNode
private let backgroundNode: VoiceChatActionButtonBackgroundNode
private let iconNode: VoiceChatMicrophoneNode
private let raiseHandNode: VoiceChatRaiseHandNode
private let titleLabel: ImmediateTextNode
private let subtitleLabel: ImmediateTextNode
@ -83,6 +86,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
if self.pressing {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.25, curve: .spring)
transition.updateTransformScale(node: self.iconNode, scale: snap ? 0.5 : 0.9)
transition.updateTransformScale(node: self.raiseHandNode, scale: snap ? 0.5 : 0.9)
switch state {
case let .active(state):
@ -98,6 +102,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
} else {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.25, curve: .spring)
transition.updateTransformScale(node: self.iconNode, scale: snap ? 0.5 : 1.0)
transition.updateTransformScale(node: self.raiseHandNode, scale: snap ? 0.5 : 1.0)
self.wasActiveWhenPressed = false
}
}
@ -108,6 +113,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
self.containerNode = ASDisplayNode()
self.backgroundNode = VoiceChatActionButtonBackgroundNode()
self.iconNode = VoiceChatMicrophoneNode()
self.raiseHandNode = VoiceChatRaiseHandNode(color: nil)
self.titleLabel = ImmediateTextNode()
self.subtitleLabel = ImmediateTextNode()
@ -121,18 +127,21 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
self.addSubnode(self.containerNode)
self.containerNode.addSubnode(self.backgroundNode)
self.containerNode.addSubnode(self.iconNode)
self.containerNode.addSubnode(self.raiseHandNode)
self.highligthedChanged = { [weak self] pressing in
if let strongSelf = self {
guard let (_, _, _, _, _, _, _, snap) = strongSelf.currentParams, !strongSelf.isDisabled else {
guard let (_, _, _, _, _, _, _, snap) = strongSelf.currentParams else {
return
}
if pressing {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.25, curve: .spring)
transition.updateTransformScale(node: strongSelf.iconNode, scale: snap ? 0.5 : 0.9)
transition.updateTransformScale(node: strongSelf.raiseHandNode, scale: snap ? 0.5 : 0.9)
} else if !strongSelf.pressing {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.25, curve: .spring)
transition.updateTransformScale(node: strongSelf.iconNode, scale: snap ? 0.5 : 1.0)
transition.updateTransformScale(node: strongSelf.raiseHandNode, scale: snap ? 0.5 : 1.0)
}
}
}
@ -229,6 +238,10 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
let iconSize = CGSize(width: 68.0, height: 68.0)
self.iconNode.bounds = CGRect(origin: CGPoint(), size: iconSize)
self.iconNode.position = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
let raiseHandSize = CGSize(width: 68.0, height: 68.0)
self.raiseHandNode.bounds = CGRect(origin: CGPoint(), size: raiseHandSize)
self.raiseHandNode.position = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
}
private func applyIconParams() {
@ -237,22 +250,25 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
}
var iconMuted = true
var iconColor: UIColor = UIColor(rgb: 0xffffff)
var speakIcon = false
let iconColor: UIColor = UIColor(rgb: 0xffffff)
switch state {
case let .active(state):
switch state {
case .on:
iconMuted = false
case .muted:
break
case .cantSpeak:
if !snap {
iconColor = UIColor(rgb: 0xff3b30)
}
speakIcon = true
default:
break
}
case .connecting:
break
}
let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
transition.updateAlpha(node: self.raiseHandNode, alpha: speakIcon ? 1.0 : 0.0)
transition.updateAlpha(node: self.iconNode, alpha: speakIcon ? 0.0 : 1.0)
self.iconNode.update(state: VoiceChatMicrophoneNode.State(muted: iconMuted, filled: true, color: iconColor), animated: true)
}
@ -318,6 +334,10 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
}
return result
}
func playAnimation() {
self.raiseHandNode.playRandomAnimation()
}
}
extension UIBezierPath {
@ -683,7 +703,7 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
case .muted:
targetColors = [pink.cgColor, purple.cgColor, purple.cgColor]
targetScale = 0.85
outerColor = UIColor(rgb: 0x1d588d)
outerColor = UIColor(rgb: 0x3b3474)
}
self.updatedOuterColor?(outerColor)
@ -1356,3 +1376,50 @@ final class BlobView: UIView {
CATransaction.commit()
}
}
final class VoiceChatRaiseHandNode: ASDisplayNode {
private let animationNode: AnimationNode
private let color: UIColor?
private var playedOnce = false
init(color: UIColor?) {
self.color = color
if let color = color, let url = getAppBundle().url(forResource: "anim_hand1", withExtension: "json"), let data = try? Data(contentsOf: url) {
self.animationNode = AnimationNode(animationData: transformedWithColors(data: data, colors: [(UIColor(rgb: 0xffffff), color)]))
} else {
self.animationNode = AnimationNode(animation: "anim_hand1", colors: nil, scale: 0.5)
}
super.init()
self.addSubnode(self.animationNode)
}
func playRandomAnimation() {
guard self.playedOnce else {
self.playedOnce = true
self.animationNode.play()
return
}
guard !self.animationNode.isPlaying else {
self.animationNode.completion = { [weak self] in
self?.playRandomAnimation()
}
return
}
self.animationNode.completion = nil
if let animationName = ["anim_hand1", "anim_hand2", "anim_hand3", "anim_hand4"].randomElement() {
if let color = color, let url = getAppBundle().url(forResource: animationName, withExtension: "json"), let data = try? Data(contentsOf: url) {
self.animationNode.setAnimation(data: transformedWithColors(data: data, colors: [(UIColor(rgb: 0xffffff), color)]))
} else {
self.animationNode.setAnimation(name: animationName)
}
self.animationNode.play()
}
}
override func layout() {
super.layout()
self.animationNode.frame = self.bounds
}
}

View File

@ -384,6 +384,7 @@ public final class VoiceChatController: ViewController {
var revealed: Bool?
var canManageCall: Bool
var volume: Int32?
var raisedHand: Bool
var stableId: PeerId {
return self.peer.id
@ -423,6 +424,9 @@ public final class VoiceChatController: ViewController {
if lhs.volume != rhs.volume {
return false
}
if lhs.raisedHand != rhs.raisedHand {
return false
}
return true
}
@ -1102,7 +1106,7 @@ public final class VoiceChatController: ViewController {
} else {
if let muteState = muteState, !muteState.canUnmute {
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.VoiceChat_UnmutePeer, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/Unmute"), color: theme.actionSheet.primaryTextColor)
return generateTintedImage(image: UIImage(bundleImageName: entry.raisedHand ? "Call/Context Menu/AllowToSpeak" : "Call/Context Menu/Unmute"), color: theme.actionSheet.primaryTextColor)
}, action: { _, f in
guard let strongSelf = self else {
return
@ -1559,9 +1563,9 @@ public final class VoiceChatController: ViewController {
}
mainItemsImpl = {
return displayAsPeersPromise.get()
return combineLatest(displayAsPeersPromise.get(), context.account.postbox.loadedPeerWithId(call.peerId))
|> take(1)
|> map { peers -> [ContextMenuItem] in
|> map { peers, chatPeer -> [ContextMenuItem] in
let presentationData = strongSelf.presentationData
var items: [ContextMenuItem] = []
@ -1590,11 +1594,21 @@ public final class VoiceChatController: ViewController {
self?.controller?.present(controller, in: .window(.root))
})))
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.VoiceChat_EditPermissions, icon: { theme -> UIImage? in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Restrict"), color: theme.actionSheet.primaryTextColor)
}, action: { c, _ in
c.setItems(permissionItems())
})))
var hasPermissions = true
if let chatPeer = chatPeer as? TelegramChannel {
if case .broadcast = chatPeer.info {
hasPermissions = false
} else if chatPeer.flags.contains(.isGigagroup) {
hasPermissions = false
}
}
if hasPermissions {
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.VoiceChat_EditPermissions, icon: { theme -> UIImage? in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Restrict"), color: theme.actionSheet.primaryTextColor)
}, action: { c, _ in
c.setItems(permissionItems())
})))
}
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.VoiceChat_Share, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Link"), color: theme.actionSheet.primaryTextColor)
@ -1957,21 +1971,25 @@ public final class VoiceChatController: ViewController {
}
if let muteState = callState.muteState {
if !muteState.canUnmute {
if case .ended = gestureRecognizer.state {
self.hapticFeedback.error()
self.actionButton.layer.addShakeAnimation()
self.call.raiseHand()
switch gestureRecognizer.state {
case .began:
self.actionButton.pressing = true
self.hapticFeedback.impact(.light)
case .ended, .cancelled:
self.actionButton.pressing = false
self.call.raiseHand()
self.actionButton.playAnimation()
default:
break
}
return
}
}
switch gestureRecognizer.state {
case .began:
self.actionButton.pressing = true
self.hapticFeedback.impact(.light)
self.startPressTimer()
self.actionButton.pressing = true
if let (layout, navigationHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .spring))
}
@ -2440,8 +2458,13 @@ public final class VoiceChatController: ViewController {
} else {
actionButtonState = .active(state: .cantSpeak)
actionButtonTitle = self.presentationData.strings.VoiceChat_Muted
actionButtonSubtitle = self.presentationData.strings.VoiceChat_MutedHelp
if callState.raisedHand {
actionButtonTitle = self.presentationData.strings.VoiceChat_AskedToSpeak
actionButtonSubtitle = self.presentationData.strings.VoiceChat_AskedToSpeakHelp
} else {
actionButtonTitle = self.presentationData.strings.VoiceChat_MutedByAdmin
actionButtonSubtitle = self.presentationData.strings.VoiceChat_MutedByAdminHelp
}
}
} else {
actionButtonState = .active(state: .on)
@ -2757,7 +2780,8 @@ public final class VoiceChatController: ViewController {
state: memberState,
muteState: memberMuteState,
canManageCall: self.callState?.canManageCall ?? false,
volume: member.volume
volume: member.volume,
raisedHand: member.raiseHandRating != nil
)))
index += 1
}
@ -2778,7 +2802,8 @@ public final class VoiceChatController: ViewController {
state: .invited,
muteState: nil,
canManageCall: false,
volume: nil
volume: nil,
raisedHand: false
)))
index += 1
}

View File

@ -165,6 +165,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
private let actionContainerNode: ASDisplayNode
private var animationNode: VoiceChatMicrophoneNode?
private var iconNode: ASImageNode?
private var raiseHandNode: VoiceChatRaiseHandNode?
private var actionButtonNode: HighlightableButtonNode
private var audioLevelView: VoiceBlobView?
@ -179,6 +180,8 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
private var videoNode: GroupVideoNode?
private var raiseHandTimer: SwiftSignalKit.Timer?
var item: VoiceChatParticipantItem? {
return self.layoutParams?.0
}
@ -291,6 +294,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
deinit {
self.audioLevelDisposable.dispose()
self.raiseHandTimer?.invalidate()
}
override func selected() {
@ -310,9 +314,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
if currentItem?.presentationData.theme !== item.presentationData.theme {
updatedTheme = item.presentationData.theme
}
let statusFontSize: CGFloat = floor(item.presentationData.fontSize.itemListBaseFontSize * 14.0 / 17.0)
let titleFont = Font.regular(17.0)
let statusFont = Font.regular(14.0)
@ -662,6 +664,11 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.highlightedBackgroundNode.frame = CGRect(origin: CGPoint(x: 0.0, y: -UIScreenPixel), size: CGSize(width: params.width, height: layout.contentSize.height + UIScreenPixel + UIScreenPixel))
var hadMicrophoneNode = false
var hadRaiseHandNode = false
var hadIconNode = false
var nodeToAnimateIn: ASDisplayNode?
if case let .microphone(muted, color) = item.icon {
let animationNode: VoiceChatMicrophoneNode
if let current = strongSelf.animationNode {
@ -670,14 +677,13 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
animationNode = VoiceChatMicrophoneNode()
strongSelf.animationNode = animationNode
strongSelf.actionButtonNode.addSubnode(animationNode)
if let _ = strongSelf.iconNode {
animationNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
animationNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2)
}
nodeToAnimateIn = animationNode
}
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: false, color: color), animated: true)
strongSelf.actionButtonNode.isUserInteractionEnabled = item.contextAction != nil
} else if let animationNode = strongSelf.animationNode {
hadMicrophoneNode = true
strongSelf.animationNode = nil
animationNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
animationNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak animationNode] _ in
@ -685,6 +691,37 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
})
}
if case .wantsToSpeak = item.icon {
let raiseHandNode: VoiceChatRaiseHandNode
if let current = strongSelf.raiseHandNode {
raiseHandNode = current
} else {
raiseHandNode = VoiceChatRaiseHandNode(color: item.presentationData.theme.list.itemAccentColor)
raiseHandNode.contentMode = .center
strongSelf.raiseHandNode = raiseHandNode
strongSelf.actionButtonNode.addSubnode(raiseHandNode)
nodeToAnimateIn = raiseHandNode
raiseHandNode.playRandomAnimation()
strongSelf.raiseHandTimer = SwiftSignalKit.Timer(timeout: Double.random(in: 8.0 ... 10.5), repeat: true, completion: {
strongSelf.raiseHandNode?.playRandomAnimation()
}, queue: Queue.mainQueue())
strongSelf.raiseHandTimer?.start()
}
} else if let raiseHandNode = strongSelf.raiseHandNode {
hadRaiseHandNode = true
strongSelf.raiseHandNode = nil
if let raiseHandTimer = strongSelf.raiseHandTimer {
strongSelf.raiseHandTimer = nil
raiseHandTimer.invalidate()
}
raiseHandNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
raiseHandNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak raiseHandNode] _ in
raiseHandNode?.removeFromSupernode()
})
}
if case let .invite(invited) = item.icon {
let iconNode: ASImageNode
if let current = strongSelf.iconNode {
@ -695,10 +732,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.iconNode = iconNode
strongSelf.actionButtonNode.addSubnode(iconNode)
if let _ = strongSelf.animationNode {
iconNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
iconNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2)
}
nodeToAnimateIn = iconNode
}
if invited {
@ -708,6 +742,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
}
strongSelf.actionButtonNode.isUserInteractionEnabled = false
} else if let iconNode = strongSelf.iconNode {
hadIconNode = true
strongSelf.iconNode = nil
iconNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
iconNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak iconNode] _ in
@ -715,6 +750,11 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
})
}
if let node = nodeToAnimateIn, hadMicrophoneNode || hadRaiseHandNode || hadIconNode {
node.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
node.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2)
}
let videoSize = CGSize(width: avatarSize, height: avatarSize)
let videoNode = item.getVideo()
@ -724,7 +764,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
let actionOffset: CGFloat = 0.0
strongSelf.videoNode = videoNode
if let videoNode = videoNode {
videoNode.updateLayout(size: videoSize, transition: .immediate)
if videoNode.supernode !== strongSelf.avatarNode {
videoNode.clipsToBounds = true
@ -738,6 +777,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
let animationSize = CGSize(width: 36.0, height: 36.0)
strongSelf.iconNode?.frame = CGRect(origin: CGPoint(), size: animationSize)
strongSelf.animationNode?.frame = CGRect(origin: CGPoint(), size: animationSize)
strongSelf.raiseHandNode?.frame = CGRect(origin: CGPoint(), size: CGSize(width: 32.0, height: 32.0))
strongSelf.actionButtonNode.frame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset + actionOffset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height)

View File

@ -0,0 +1,160 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Display
import TelegramCore
import SyncCore
import Postbox
import TelegramPresentationData
import AvatarNode
import AccountContext
public class VoiceChatPeerActionSheetItem: ActionSheetItem {
public let context: AccountContext
public let peer: Peer
public let title: String
public let subtitle: String
public let action: () -> Void
public init(context: AccountContext, peer: Peer, title: String, subtitle: String, action: @escaping () -> Void) {
self.context = context
self.peer = peer
self.title = title
self.subtitle = subtitle
self.action = action
}
public func node(theme: ActionSheetControllerTheme) -> ActionSheetItemNode {
let node = VoiceChatPeerActionSheetItemNode(theme: theme)
node.setItem(self)
return node
}
public func updateNode(_ node: ActionSheetItemNode) {
guard let node = node as? VoiceChatPeerActionSheetItemNode else {
assertionFailure()
return
}
node.setItem(self)
node.requestLayoutUpdate()
}
}
private let avatarFont = avatarPlaceholderFont(size: 15.0)
public class VoiceChatPeerActionSheetItemNode: ActionSheetItemNode {
private let theme: ActionSheetControllerTheme
private let defaultFont: UIFont
private var item: VoiceChatPeerActionSheetItem?
private let button: HighlightTrackingButton
private let avatarNode: AvatarNode
private let titleNode: ImmediateTextNode
private let subtitleNode: ImmediateTextNode
private let accessibilityArea: AccessibilityAreaNode
override public init(theme: ActionSheetControllerTheme) {
self.theme = theme
self.defaultFont = Font.regular(floor(theme.baseFontSize * 20.0 / 17.0))
self.button = HighlightTrackingButton()
self.button.isAccessibilityElement = false
self.avatarNode = AvatarNode(font: avatarFont)
self.avatarNode.isLayerBacked = !smartInvertColorsEnabled()
self.avatarNode.isAccessibilityElement = false
self.titleNode = ImmediateTextNode()
self.titleNode.isUserInteractionEnabled = false
self.titleNode.displaysAsynchronously = false
self.titleNode.maximumNumberOfLines = 1
self.titleNode.isAccessibilityElement = false
self.subtitleNode = ImmediateTextNode()
self.subtitleNode.isUserInteractionEnabled = false
self.subtitleNode.displaysAsynchronously = false
self.subtitleNode.maximumNumberOfLines = 1
self.subtitleNode.isAccessibilityElement = false
self.accessibilityArea = AccessibilityAreaNode()
super.init(theme: theme)
self.view.addSubview(self.button)
self.addSubnode(self.avatarNode)
self.addSubnode(self.titleNode)
self.addSubnode(self.subtitleNode)
self.addSubnode(self.accessibilityArea)
self.button.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self {
if highlighted {
strongSelf.backgroundNode.backgroundColor = strongSelf.theme.itemHighlightedBackgroundColor
} else {
UIView.animate(withDuration: 0.3, animations: {
strongSelf.backgroundNode.backgroundColor = strongSelf.theme.itemBackgroundColor
})
}
}
}
self.button.addTarget(self, action: #selector(self.buttonPressed), for: .touchUpInside)
self.accessibilityArea.activate = { [weak self] in
self?.buttonPressed()
return true
}
}
func setItem(_ item: VoiceChatPeerActionSheetItem) {
self.item = item
let titleFont = Font.regular(floor(self.theme.baseFontSize ))
self.titleNode.attributedText = NSAttributedString(string: item.title, font: titleFont, textColor: self.theme.primaryTextColor)
let subtitleFont = Font.regular(floor(self.theme.baseFontSize * 13.0 / 17.0))
self.subtitleNode.attributedText = NSAttributedString(string: item.subtitle, font: subtitleFont, textColor: self.theme.secondaryTextColor)
let theme = item.context.sharedContext.currentPresentationData.with { $0 }.theme
self.avatarNode.setPeer(context: item.context, theme: theme, peer: item.peer)
self.accessibilityArea.accessibilityTraits = [.button]
self.accessibilityArea.accessibilityLabel = item.title
self.accessibilityArea.accessibilityValue = item.subtitle
}
public override func updateLayout(constrainedSize: CGSize, transition: ContainedViewLayoutTransition) -> CGSize {
let size = CGSize(width: constrainedSize.width, height: 57.0)
self.button.frame = CGRect(origin: CGPoint(), size: size)
let avatarInset: CGFloat = 42.0
let avatarSize: CGFloat = 36.0
self.avatarNode.frame = CGRect(origin: CGPoint(x: size.width - avatarSize - 14.0, y: floor((size.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize))
let titleSize = self.titleNode.updateLayout(CGSize(width: max(1.0, size.width - avatarInset - 16.0 - 16.0 - 30.0), height: size.height))
self.titleNode.frame = CGRect(origin: CGPoint(x: 16.0, y: 9.0), size: titleSize)
let subtitleSize = self.subtitleNode.updateLayout(CGSize(width: max(1.0, size.width - avatarInset - 16.0 - 16.0 - 30.0), height: size.height))
self.subtitleNode.frame = CGRect(origin: CGPoint(x: 16.0, y: 32.0), size: subtitleSize)
self.accessibilityArea.frame = CGRect(origin: CGPoint(), size: size)
self.updateInternalLayout(size, constrainedSize: constrainedSize)
return size
}
@objc private func buttonPressed() {
if let item = self.item {
item.action()
}
}
}

View File

@ -274,7 +274,6 @@ public func getGroupCallParticipants(account: Account, callId: Int64, accessHash
loop: for participant in participants {
switch participant {
case let .groupCallParticipant(flags, apiPeerId, date, activeDate, source, volume, about, raiseHandRating):
let peerId: PeerId
switch apiPeerId {
case let .peerUser(userId):
@ -1454,11 +1453,13 @@ public final class GroupCallParticipantsContext {
}
let disposable = MetaDisposable()
self.stateValue.overlayState.pendingMuteStateChanges[peerId] = OverlayState.MuteStateChange(
state: muteState,
volume: volume,
disposable: disposable
)
if raiseHand == nil {
self.stateValue.overlayState.pendingMuteStateChanges[peerId] = OverlayState.MuteStateChange(
state: muteState,
volume: volume,
disposable: disposable
)
}
let account = self.account
let id = self.id
@ -1528,15 +1529,7 @@ public final class GroupCallParticipantsContext {
}
public func raiseHand() {
let flags: Int32 = 1 << 2
let signal = account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: self.id, accessHash: self.accessHash), participant: .inputPeerSelf, volume: nil, raiseHand: .boolTrue))
let _ = (signal
|> deliverOnMainQueue).start(next: { [weak self] result in
guard let strongSelf = self else {
return
}
strongSelf.account.stateManager.addUpdates(result)
})
self.updateMuteState(peerId: self.myPeerId, muteState: nil, volume: nil, raiseHand: true)
}
public func updateShouldBeRecording(_ shouldBeRecording: Bool, title: String?) {

View File

@ -534,7 +534,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
case .groupPhoneCall, .inviteToGroupPhoneCall:
if let activeCall = strongSelf.presentationInterfaceState.activeGroupCallInfo?.activeCall {
strongSelf.context.joinGroupCall(peerId: message.id.peerId, joinAsPeerId: nil, activeCall: CachedChannelData.ActiveCall(id: activeCall.id, accessHash: activeCall.accessHash, title: activeCall.title))
strongSelf.joinGroupCall(peerId: message.id.peerId, joinAsPeerId: nil, activeCall: CachedChannelData.ActiveCall(id: activeCall.id, accessHash: activeCall.accessHash, title: activeCall.title))
} else {
var canManageGroupCalls = false
if let channel = strongSelf.presentationInterfaceState.renderedPeer?.chatMainPeer as? TelegramChannel {
@ -568,7 +568,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
guard let strongSelf = self else {
return
}
strongSelf.context.joinGroupCall(peerId: message.id.peerId, joinAsPeerId: nil, activeCall: CachedChannelData.ActiveCall(id: info.id, accessHash: info.accessHash, title: info.title))
strongSelf.joinGroupCall(peerId: message.id.peerId, joinAsPeerId: nil, activeCall: CachedChannelData.ActiveCall(id: info.id, accessHash: info.accessHash, title: info.title))
}, error: { [weak self] error in
dismissStatus?()
@ -6403,7 +6403,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
guard let strongSelf = self, let peer = strongSelf.presentationInterfaceState.renderedPeer?.peer else {
return
}
strongSelf.context.joinGroupCall(peerId: peer.id, joinAsPeerId: nil, activeCall: activeCall)
strongSelf.joinGroupCall(peerId: peer.id, joinAsPeerId: nil, activeCall: activeCall)
}, presentInviteMembers: { [weak self] in
guard let strongSelf = self, let peer = strongSelf.presentationInterfaceState.renderedPeer?.peer else {
return
@ -11851,9 +11851,58 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
return inputShortcuts + otherShortcuts
}
public override func joinGroupCall(peerId: PeerId, joinAsPeerId: PeerId?, info: GroupCallInfo) {
let _ = self.presentVoiceMessageDiscardAlert(action: {
super.joinGroupCall(peerId: peerId, joinAsPeerId: joinAsPeerId, info: info)
public override func joinGroupCall(peerId: PeerId, joinAsPeerId: PeerId?, activeCall: CachedChannelData.ActiveCall) {
let context = self.context
let presentationData = self.presentationData
var proceed: (PeerId) -> Void = { joinAsPeerId in
super.joinGroupCall(peerId: peerId, joinAsPeerId: joinAsPeerId, activeCall: activeCall)
}
let _ = self.presentVoiceMessageDiscardAlert(action: { [weak self] in
let currentAccountPeer = context.account.postbox.loadedPeerWithId(context.account.peerId)
|> map { peer in
return [FoundPeer(peer: peer, subscribers: nil)]
}
let _ = (combineLatest(currentAccountPeer, cachedGroupCallDisplayAsAvailablePeers(account: context.account))
|> map { currentAccountPeer, availablePeers -> [FoundPeer] in
var result = currentAccountPeer
result.append(contentsOf: availablePeers)
return result
}
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] peers in
guard let strongSelf = self else {
return
}
let controller = ActionSheetController(presentationData: presentationData)
let dismissAction: () -> Void = { [weak controller] in
controller?.dismissAnimated()
}
var items: [ActionSheetItem] = []
items.append(VoiceChatAccountHeaderActionSheetItem(title: presentationData.strings.VoiceChat_SelectAccount, text: presentationData.strings.VoiceChat_DisplayAsInfo))
for peer in peers {
var subtitle: String?
if peer.peer.id.namespace == Namespaces.Peer.CloudUser {
subtitle = presentationData.strings.VoiceChat_PersonalAccount
} else if let subscribers = peer.subscribers {
subtitle = presentationData.strings.Conversation_StatusSubscribers(subscribers)
}
items.append(VoiceChatPeerActionSheetItem(context: context, peer: peer.peer, title: peer.peer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder), subtitle: subtitle ?? "", action: {
dismissAction()
proceed(peer.peer.id)
}))
}
controller.setItemGroups([
ActionSheetItemGroup(items: items),
ActionSheetItemGroup(items: [ActionSheetButtonItem(title: presentationData.strings.Common_Cancel, action: { dismissAction() })])
])
strongSelf.present(controller, in: .window(.root))
})
})
}

View File

@ -1084,7 +1084,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
var animateDotAppearing = false
let audioRecordingDotNode: AnimationNode
if let currentAudioRecordingDotNode = self.audioRecordingDotNode, !currentAudioRecordingDotNode.played {
if let currentAudioRecordingDotNode = self.audioRecordingDotNode, !currentAudioRecordingDotNode.didPlay {
audioRecordingDotNode = currentAudioRecordingDotNode
} else {
self.audioRecordingDotNode?.removeFromSupernode()