diff --git a/submodules/AccountContext/Sources/PresentationCallManager.swift b/submodules/AccountContext/Sources/PresentationCallManager.swift index 77db807fc1..0d6c60886f 100644 --- a/submodules/AccountContext/Sources/PresentationCallManager.swift +++ b/submodules/AccountContext/Sources/PresentationCallManager.swift @@ -196,13 +196,16 @@ public struct PresentationGroupCallSummaryState: Equatable { public struct PresentationGroupCallMemberState: Equatable { public var ssrc: UInt32 public var muteState: GroupCallParticipantsContext.Participant.MuteState? + public var speaking: Bool public init( ssrc: UInt32, - muteState: GroupCallParticipantsContext.Participant.MuteState? + muteState: GroupCallParticipantsContext.Participant.MuteState?, + speaking: Bool ) { self.ssrc = ssrc self.muteState = muteState + self.speaking = speaking } } diff --git a/submodules/AudioBlob/Sources/BlobView.swift b/submodules/AudioBlob/Sources/BlobView.swift index 8d753361e2..1269b97cf5 100644 --- a/submodules/AudioBlob/Sources/BlobView.swift +++ b/submodules/AudioBlob/Sources/BlobView.swift @@ -112,11 +112,15 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco } public func stopAnimating() { + self.stopAnimating(duration: 0.15) + } + + public func stopAnimating(duration: Double) { guard isAnimating else { return } isAnimating = false - mediumBlob.layer.animateScale(from: 1.0, to: 0.5, duration: 0.15, removeOnCompletion: false) - bigBlob.layer.animateScale(from: 1.0, to: 0.5, duration: 0.15, removeOnCompletion: false) + mediumBlob.layer.animateScale(from: 1.0, to: 0.5, duration: duration, removeOnCompletion: false) + bigBlob.layer.animateScale(from: 1.0, to: 0.5, duration: duration, removeOnCompletion: false) updateBlobsState() diff --git a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift index 955a13a609..805ff576cc 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift @@ -64,6 +64,69 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { } } + private class SpeakingParticipantsContext { + private let speakingLevelThreshold: Float = 0.15 + private let cutoffTimeout: Int32 = 1 + private let silentTimeout: Int32 = 2 + + struct Participant { + let timestamp: Int32 + let level: Float + } + + private var participants: [PeerId: Participant] = [:] + private let speakingParticipantsPromise = ValuePromise>() + private var speakingParticipants = Set() { + didSet { + self.speakingParticipantsPromise.set(self.speakingParticipants) + } + } + + init() { + + } + + func update(levels: [(PeerId, Float)]) { + let timestamp = Int32(CFAbsoluteTimeGetCurrent()) + let currentParticipants: [PeerId: Participant] = self.participants + + var validSpeakers: [PeerId: Participant] = [:] + var silentParticipants = Set() + var speakingParticipants = Set() + for (peerId, level) in levels { + if level > speakingLevelThreshold { + validSpeakers[peerId] = Participant(timestamp: timestamp, level: level) + speakingParticipants.insert(peerId) + } else { + silentParticipants.insert(peerId) + } + } + + for (peerId, participant) in currentParticipants { + if let _ = validSpeakers[peerId] { + } else { + let delta = timestamp - participant.timestamp + if silentParticipants.contains(peerId) { + if delta < silentTimeout { + validSpeakers[peerId] = participant + speakingParticipants.insert(peerId) + } + } else if delta < cutoffTimeout { + validSpeakers[peerId] = participant + speakingParticipants.insert(peerId) + } + } + } + + self.participants = validSpeakers + self.speakingParticipants = speakingParticipants + } + + func get() -> Signal, NoError> { + return self.speakingParticipantsPromise.get() + } + } + public let account: Account public let accountContext: AccountContext private let audioSession: ManagedAudioSession @@ -112,6 +175,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { } private var audioLevelsDisposable = MetaDisposable() + + private let speakingParticipantsContext = SpeakingParticipantsContext() + private var participantsContextStateDisposable = MetaDisposable() private var participantsContext: GroupCallParticipantsContext? @@ -445,6 +511,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { if !result.isEmpty { strongSelf.audioLevelsPipe.putNext(result) } + strongSelf.speakingParticipantsContext.update(levels: result) })) self.myAudioLevelDisposable.set((callContext.myAudioLevel @@ -479,8 +546,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { state: initialState ) self.participantsContext = participantsContext - self.participantsContextStateDisposable.set((participantsContext.state - |> deliverOnMainQueue).start(next: { [weak self] state in + self.participantsContextStateDisposable.set((combineLatest(participantsContext.state, self.speakingParticipantsContext.get()) + |> deliverOnMainQueue).start(next: { [weak self] state, speakingParticipants in guard let strongSelf = self else { return } @@ -496,7 +563,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { memberStates[participant.peer.id] = PresentationGroupCallMemberState( ssrc: participant.ssrc, - muteState: participant.muteState + muteState: participant.muteState, + speaking: speakingParticipants.contains(participant.peer.id) ) } strongSelf.membersValue = memberStates diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift index 8c59c03708..beb7720bb7 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift @@ -94,30 +94,49 @@ public final class VoiceChatController: ViewController { let count: Int } + private struct State: Equatable { + var revealedPeerId: PeerId? + } + private final class Interaction { let updateIsMuted: (PeerId, Bool) -> Void let invitePeer: (Peer) -> Void let peerContextAction: (PeerEntry, ASDisplayNode, ContextGesture?) -> Void + let setPeerIdWithRevealedOptions: (PeerId?, PeerId?) -> Void private var audioLevels: [PeerId: ValuePipe] = [:] init( updateIsMuted: @escaping (PeerId, Bool) -> Void, invitePeer: @escaping (Peer) -> Void, - peerContextAction: @escaping (PeerEntry, ASDisplayNode, ContextGesture?) -> Void + peerContextAction: @escaping (PeerEntry, ASDisplayNode, ContextGesture?) -> Void, + setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void ) { self.updateIsMuted = updateIsMuted self.invitePeer = invitePeer self.peerContextAction = peerContextAction + self.setPeerIdWithRevealedOptions = setPeerIdWithRevealedOptions } func getAudioLevel(_ peerId: PeerId) -> Signal? { + let signal: Signal if let current = self.audioLevels[peerId] { - return current.signal() + signal = current.signal() } else { let value = ValuePipe() self.audioLevels[peerId] = value - return value.signal() + signal = value.signal() + } + return signal + |> mapToSignal { value in + if value > 0.0 { + return .single(value) + |> then(.single(0.0) |> delay(1.0, queue: Queue.mainQueue())) + } else { + return .single(value) + } + } |> mapToThrottled { next -> Signal in + return .single(next) |> then(.complete() |> delay(0.1, queue: Queue.mainQueue())) } } @@ -143,6 +162,7 @@ public final class VoiceChatController: ViewController { var state: State var muteState: GroupCallParticipantsContext.Participant.MuteState? var invited: Bool + var revealed: Bool? var stableId: PeerId { return self.peer.id @@ -167,6 +187,9 @@ public final class VoiceChatController: ViewController { if lhs.invited != rhs.invited { return false } + if lhs.revealed != rhs.revealed { + return false + } return true } @@ -200,7 +223,11 @@ public final class VoiceChatController: ViewController { icon = .microphone(false, UIColor(rgb: 0x34c759)) } - return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, presence: self.presence, text: text, icon: icon, enabled: true, audioLevel: interaction.getAudioLevel(peer.id), action: { + let revealOptions: [VoiceChatParticipantItem.RevealOption] = [] + + return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, presence: self.presence, text: text, icon: icon, enabled: true, audioLevel: interaction.getAudioLevel(peer.id), revealOptions: revealOptions, revealed: self.revealed, setPeerIdWithRevealedOptions: { peerId, fromPeerId in + interaction.setPeerIdWithRevealedOptions(peerId, fromPeerId) + }, action: { interaction.invitePeer(peer) }, contextAction: { node, gesture in interaction.peerContextAction(self, node, gesture) @@ -292,6 +319,12 @@ public final class VoiceChatController: ViewController { super.init() + let statePromise = ValuePromise(State(), ignoreRepeated: true) + let stateValue = Atomic(value: State()) + let updateState: ((State) -> State) -> Void = { f in + statePromise.set(stateValue.modify { f($0) }) + } + let invitePeer: (Peer) -> Void = { [weak self] peer in guard let strongSelf = self else { return @@ -406,6 +439,12 @@ public final class VoiceChatController: ViewController { let contextController = ContextController(account: strongSelf.context.account, presentationData: strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme), source: .extracted(VoiceChatContextExtractedContentSource(controller: controller, sourceNode: sourceNode, keepInPlace: false)), items: .single(items), reactionItems: [], gesture: gesture) strongSelf.controller?.presentInGlobalOverlay(contextController) + }, setPeerIdWithRevealedOptions: { peerId, _ in + updateState { state in + var updated = state + updated.revealedPeerId = peerId + return updated + } }) self.contentContainer.addSubnode(self.listNode) @@ -955,7 +994,7 @@ public final class VoiceChatController: ViewController { memberState = .listening } } else if let state = memberStates[member.peer.id] { - memberState = .listening + memberState = state.speaking ? .speaking : .listening memberMuteState = state.muteState } else { memberState = .inactive diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift b/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift index 3f4715c540..f9cf0a9a6d 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift @@ -37,6 +37,25 @@ public final class VoiceChatParticipantItem: ListViewItem { case invite(Bool) } + public struct RevealOption { + public enum RevealOptionType { + case neutral + case warning + case destructive + case accent + } + + public var type: RevealOptionType + public var title: String + public var action: () -> Void + + public init(type: RevealOptionType, title: String, action: @escaping () -> Void) { + self.type = type + self.title = title + self.action = action + } + } + let presentationData: ItemListPresentationData let dateTimeFormat: PresentationDateTimeFormat let nameDisplayOrder: PresentationPersonNameOrder @@ -47,10 +66,13 @@ public final class VoiceChatParticipantItem: ListViewItem { let icon: Icon let enabled: Bool let audioLevel: Signal? + let revealOptions: [RevealOption] + let revealed: Bool? + let setPeerIdWithRevealedOptions: (PeerId?, PeerId?) -> Void let action: (() -> Void)? let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? - public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, presence: PeerPresence?, text: ParticipantText, icon: Icon, enabled: Bool, audioLevel: Signal?, action: (() -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil) { + public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, presence: PeerPresence?, text: ParticipantText, icon: Icon, enabled: Bool, audioLevel: Signal?, revealOptions: [RevealOption], revealed: Bool?, setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void, action: (() -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil) { self.presentationData = presentationData self.dateTimeFormat = dateTimeFormat self.nameDisplayOrder = nameDisplayOrder @@ -61,6 +83,9 @@ public final class VoiceChatParticipantItem: ListViewItem { self.icon = icon self.enabled = enabled self.audioLevel = audioLevel + self.revealOptions = revealOptions + self.revealed = revealed + self.setPeerIdWithRevealedOptions = setPeerIdWithRevealedOptions self.action = action self.contextAction = contextAction } @@ -113,7 +138,7 @@ public final class VoiceChatParticipantItem: ListViewItem { private let avatarFont = avatarPlaceholderFont(size: floor(40.0 * 16.0 / 37.0)) -public class VoiceChatParticipantItemNode: ListViewItemNode { +public class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { private let backgroundNode: ASDisplayNode private let topStripeNode: ASDisplayNode private let bottomStripeNode: ASDisplayNode @@ -139,6 +164,7 @@ public class VoiceChatParticipantItemNode: ListViewItemNode { private var audioLevelView: VoiceBlobView? private let audioLevelDisposable = MetaDisposable() + private var didSetupAudioLevel = false private var absoluteLocation: (CGRect, CGSize)? @@ -170,6 +196,7 @@ public class VoiceChatParticipantItemNode: ListViewItemNode { self.avatarNode = AvatarNode(font: avatarFont) self.avatarNode.isLayerBacked = !smartInvertColorsEnabled() + self.avatarNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 40.0, height: 40.0)) self.titleNode = TextNode() self.titleNode.isUserInteractionEnabled = false @@ -386,6 +413,31 @@ public class VoiceChatParticipantItemNode: ListViewItemNode { } } + let peerRevealOptions: [ItemListRevealOption] + var mappedOptions: [ItemListRevealOption] = [] + var index: Int32 = 0 + for option in item.revealOptions { + let color: UIColor + let textColor: UIColor + switch option.type { + case .neutral: + color = item.presentationData.theme.list.itemDisclosureActions.constructive.fillColor + textColor = item.presentationData.theme.list.itemDisclosureActions.constructive.foregroundColor + case .warning: + color = item.presentationData.theme.list.itemDisclosureActions.warning.fillColor + textColor = item.presentationData.theme.list.itemDisclosureActions.warning.foregroundColor + case .destructive: + color = item.presentationData.theme.list.itemDisclosureActions.destructive.fillColor + textColor = item.presentationData.theme.list.itemDisclosureActions.destructive.foregroundColor + case .accent: + color = item.presentationData.theme.list.itemDisclosureActions.accent.fillColor + textColor = item.presentationData.theme.list.itemDisclosureActions.accent.foregroundColor + } + mappedOptions.append(ItemListRevealOption(key: index, title: option.title, icon: .none, color: color, textColor: textColor)) + index += 1 + } + peerRevealOptions = mappedOptions + return (layout, { [weak self] synchronousLoad, animated in if let strongSelf = self { strongSelf.layoutParams = (item, params, first, last) @@ -488,11 +540,12 @@ public class VoiceChatParticipantItemNode: ListViewItemNode { transition.updateFrame(node: strongSelf.statusNode, frame: CGRect(origin: CGPoint(x: leftInset, y: strongSelf.titleNode.frame.maxY + titleSpacing), size: statusLayout.size)) let avatarFrame = CGRect(origin: CGPoint(x: params.leftInset + 15.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize)) - transition.updateFrame(node: strongSelf.avatarNode, frame: avatarFrame) + transition.updateFrameAsPositionAndBounds(node: strongSelf.avatarNode, frame: avatarFrame) let blobFrame = avatarFrame.insetBy(dx: -12.0, dy: -12.0) - if let audioLevel = item.audioLevel { + if let audioLevel = item.audioLevel, !strongSelf.didSetupAudioLevel || currentItem?.peer.id != item.peer.id { strongSelf.audioLevelView?.frame = blobFrame + strongSelf.didSetupAudioLevel = true strongSelf.audioLevelDisposable.set((audioLevel |> deliverOnMainQueue).start(next: { value in guard let strongSelf = self else { @@ -523,12 +576,20 @@ public class VoiceChatParticipantItemNode: ListViewItemNode { strongSelf.containerNode.view.insertSubview(audioLevelView, at: 0) } + let level = min(1.0, max(0.0, CGFloat(value))) + let avatarScale: CGFloat + strongSelf.audioLevelView?.updateLevel(CGFloat(value) * 2.0) if value > 0.0 { strongSelf.audioLevelView?.startAnimating() + avatarScale = 1.03 + level * 0.1 } else { - strongSelf.audioLevelView?.stopAnimating() + strongSelf.audioLevelView?.stopAnimating(duration: 0.5) + avatarScale = 1.0 } + + let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .spring) + transition.updateTransformScale(node: strongSelf.avatarNode, scale: avatarScale, beginWithCurrentState: true) })) } else if let audioLevelView = strongSelf.audioLevelView { strongSelf.audioLevelView = nil @@ -596,6 +657,9 @@ public class VoiceChatParticipantItemNode: ListViewItemNode { } strongSelf.updateIsHighlighted(transition: transition) + + strongSelf.setRevealOptions((left: [], right: peerRevealOptions)) + strongSelf.setRevealOptionsOpened(item.revealed ?? false, animated: animated) } }) } @@ -676,4 +740,47 @@ public class VoiceChatParticipantItemNode: ListViewItemNode { item.action?() } } + + override public func updateRevealOffset(offset: CGFloat, transition: ContainedViewLayoutTransition) { + super.updateRevealOffset(offset: offset, transition: transition) + + if let item = self.layoutParams?.0, let params = self.layoutParams?.1 { + var leftInset: CGFloat = 65.0 + params.leftInset + + var avatarFrame = self.avatarNode.frame + avatarFrame.origin.x = offset + leftInset - 50.0 + transition.updateFrame(node: self.avatarNode, frame: avatarFrame) + + var titleFrame = self.titleNode.frame + titleFrame.origin.x = leftInset + offset + transition.updateFrame(node: self.titleNode, frame: titleFrame) + + var statusFrame = self.statusNode.frame + let previousStatusFrame = statusFrame + statusFrame.origin.x = leftInset + offset + self.statusNode.frame = statusFrame + transition.animatePositionAdditive(node: self.statusNode, offset: CGPoint(x: previousStatusFrame.minX - statusFrame.minX, y: 0)) + } + } + + override public func revealOptionsInteractivelyOpened() { + if let item = self.layoutParams?.0 { + item.setPeerIdWithRevealedOptions(item.peer.id, nil) + } + } + + override public func revealOptionsInteractivelyClosed() { + if let item = self.layoutParams?.0 { + item.setPeerIdWithRevealedOptions(nil, item.peer.id) + } + } + + override public func revealOptionSelected(_ option: ItemListRevealOption, animated: Bool) { + if let item = self.layoutParams?.0 { + item.revealOptions[Int(option.key)].action() + } + + self.setRevealOptionsOpened(false, animated: true) + self.revealOptionsInteractivelyClosed() + } }