Group call optimizations

This commit is contained in:
Isaac 2024-06-14 23:03:36 +04:00
parent 93a4e355f7
commit 2f177e9a48
17 changed files with 250 additions and 118 deletions

View File

@ -419,6 +419,8 @@ public protocol PresentationGroupCall: AnyObject {
var memberEvents: Signal<PresentationGroupCallMemberEvent, NoError> { get } var memberEvents: Signal<PresentationGroupCallMemberEvent, NoError> { get }
var reconnectedAsEvents: Signal<EnginePeer, NoError> { get } var reconnectedAsEvents: Signal<EnginePeer, NoError> { get }
var onMutedSpeechActivityDetected: ((Bool) -> Void)? { get set }
func toggleScheduledSubscription(_ subscribe: Bool) func toggleScheduledSubscription(_ subscribe: Bool)
func schedule(timestamp: Int32) func schedule(timestamp: Int32)
func startScheduled() func startScheduled()

View File

@ -10,9 +10,11 @@ swift_library(
"-warnings-as-errors", "-warnings-as-errors",
], ],
deps = [ deps = [
"//submodules/AsyncDisplayKit:AsyncDisplayKit", "//submodules/AsyncDisplayKit",
"//submodules/Display:Display", "//submodules/Display",
"//submodules/LegacyComponents:LegacyComponents", "//submodules/LegacyComponents",
"//submodules/MetalEngine",
"//submodules/TelegramUI/Components/Calls/CallScreen",
], ],
visibility = [ visibility = [
"//visibility:public", "//visibility:public",

View File

@ -3,6 +3,8 @@ import UIKit
import AsyncDisplayKit import AsyncDisplayKit
import Display import Display
import LegacyComponents import LegacyComponents
import CallScreen
import MetalEngine
public final class VoiceBlobNode: ASDisplayNode { public final class VoiceBlobNode: ASDisplayNode {
public init( public init(
@ -36,9 +38,7 @@ public final class VoiceBlobNode: ASDisplayNode {
} }
public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration { public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration {
private let smallBlob: BlobNode private let blobsLayer: CallBlobsLayer
private let mediumBlob: BlobNode
private let bigBlob: BlobNode
private let maxLevel: CGFloat private let maxLevel: CGFloat
@ -65,7 +65,7 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
) { ) {
self.maxLevel = maxLevel self.maxLevel = maxLevel
self.smallBlob = BlobNode( /*self.smallBlob = BlobNode(
pointsCount: 8, pointsCount: 8,
minRandomness: 0.1, minRandomness: 0.1,
maxRandomness: 0.5, maxRandomness: 0.5,
@ -97,7 +97,9 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
maxScale: bigBlobRange.max, maxScale: bigBlobRange.max,
scaleSpeed: 0.2, scaleSpeed: 0.2,
isCircle: false isCircle: false
) )*/
self.blobsLayer = CallBlobsLayer(colors: [UIColor.white, UIColor.white.withAlphaComponent(0.3), UIColor.white.withAlphaComponent(0.15)])
var updateInHierarchy: ((Bool) -> Void)? var updateInHierarchy: ((Bool) -> Void)?
self.hierarchyTrackingNode = HierarchyTrackingNode({ value in self.hierarchyTrackingNode = HierarchyTrackingNode({ value in
@ -108,18 +110,21 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
self.addSubnode(self.hierarchyTrackingNode) self.addSubnode(self.hierarchyTrackingNode)
self.addSubnode(self.bigBlob) /*self.addSubnode(self.bigBlob)
self.addSubnode(self.mediumBlob) self.addSubnode(self.mediumBlob)
self.addSubnode(self.smallBlob) self.addSubnode(self.smallBlob)*/
displayLinkAnimator = ConstantDisplayLinkAnimator() { [weak self] in self.layer.addSublayer(self.blobsLayer)
self.displayLinkAnimator = ConstantDisplayLinkAnimator() { [weak self] in
guard let strongSelf = self else { return } guard let strongSelf = self else { return }
strongSelf.presentationAudioLevel = strongSelf.presentationAudioLevel * 0.9 + strongSelf.audioLevel * 0.1 strongSelf.presentationAudioLevel = strongSelf.presentationAudioLevel * 0.9 + strongSelf.audioLevel * 0.1
strongSelf.updateAudioLevel()
strongSelf.smallBlob.level = strongSelf.presentationAudioLevel /*strongSelf.smallBlob.level = strongSelf.presentationAudioLevel
strongSelf.mediumBlob.level = strongSelf.presentationAudioLevel strongSelf.mediumBlob.level = strongSelf.presentationAudioLevel
strongSelf.bigBlob.level = strongSelf.presentationAudioLevel strongSelf.bigBlob.level = strongSelf.presentationAudioLevel*/
} }
updateInHierarchy = { [weak self] value in updateInHierarchy = { [weak self] value in
@ -138,12 +143,20 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
} }
public func setColor(_ color: UIColor, animated: Bool) { public func setColor(_ color: UIColor, animated: Bool) {
let transition: ContainedViewLayoutTransition
if animated {
transition = .animated(duration: 0.2, curve: .easeInOut)
} else {
transition = .immediate
}
transition.updateTintColor(layer: self.blobsLayer, color: color)
if let isManuallyInHierarchy = self.isManuallyInHierarchy, !isManuallyInHierarchy { if let isManuallyInHierarchy = self.isManuallyInHierarchy, !isManuallyInHierarchy {
return return
} }
smallBlob.setColor(color, animated: animated) /*smallBlob.setColor(color, animated: animated)
mediumBlob.setColor(color.withAlphaComponent(0.3), animated: animated) mediumBlob.setColor(color.withAlphaComponent(0.3), animated: animated)
bigBlob.setColor(color.withAlphaComponent(0.15), animated: animated) bigBlob.setColor(color.withAlphaComponent(0.15), animated: animated)*/
} }
public func updateLevel(_ level: CGFloat) { public func updateLevel(_ level: CGFloat) {
@ -153,9 +166,9 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
public func updateLevel(_ level: CGFloat, immediately: Bool = false) { public func updateLevel(_ level: CGFloat, immediately: Bool = false) {
let normalizedLevel = min(1, max(level / maxLevel, 0)) let normalizedLevel = min(1, max(level / maxLevel, 0))
smallBlob.updateSpeedLevel(to: normalizedLevel) /*smallBlob.updateSpeedLevel(to: normalizedLevel)
mediumBlob.updateSpeedLevel(to: normalizedLevel) mediumBlob.updateSpeedLevel(to: normalizedLevel)
bigBlob.updateSpeedLevel(to: normalizedLevel) bigBlob.updateSpeedLevel(to: normalizedLevel)*/
audioLevel = normalizedLevel audioLevel = normalizedLevel
if immediately { if immediately {
@ -163,6 +176,13 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
} }
} }
private func updateAudioLevel() {
let additionalAvatarScale = CGFloat(max(0.0, min(self.presentationAudioLevel * 18.0, 5.0)) * 0.05)
let blobAmplificationFactor: CGFloat = 2.0
let blobScale = 1.0 + additionalAvatarScale * blobAmplificationFactor
self.blobsLayer.transform = CATransform3DMakeScale(blobScale, blobScale, 1.0)
}
public func startAnimating() { public func startAnimating() {
self.startAnimating(immediately: false) self.startAnimating(immediately: false)
} }
@ -171,13 +191,13 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
guard !isAnimating else { return } guard !isAnimating else { return }
isAnimating = true isAnimating = true
if !immediately { /*if !immediately {
mediumBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false) mediumBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false) bigBlob.layer.animateScale(from: 0.75, to: 1, duration: 0.35, removeOnCompletion: false)
} else { } else {
mediumBlob.layer.removeAllAnimations() mediumBlob.layer.removeAllAnimations()
bigBlob.layer.removeAllAnimations() bigBlob.layer.removeAllAnimations()
} }*/
updateBlobsState() updateBlobsState()
@ -192,8 +212,8 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
guard isAnimating else { return } guard isAnimating else { return }
isAnimating = false isAnimating = false
mediumBlob.layer.animateScale(from: 1.0, to: 0.75, duration: duration, removeOnCompletion: false) /*mediumBlob.layer.animateScale(from: 1.0, to: 0.75, duration: duration, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 1.0, to: 0.75, duration: duration, removeOnCompletion: false) bigBlob.layer.animateScale(from: 1.0, to: 0.75, duration: duration, removeOnCompletion: false)*/
updateBlobsState() updateBlobsState()
@ -201,7 +221,7 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
} }
private func updateBlobsState() { private func updateBlobsState() {
if self.isAnimating { /*if self.isAnimating {
if self.smallBlob.frame.size != .zero { if self.smallBlob.frame.size != .zero {
smallBlob.startAnimating() smallBlob.startAnimating()
mediumBlob.startAnimating() mediumBlob.startAnimating()
@ -211,15 +231,19 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
smallBlob.stopAnimating() smallBlob.stopAnimating()
mediumBlob.stopAnimating() mediumBlob.stopAnimating()
bigBlob.stopAnimating() bigBlob.stopAnimating()
} }*/
} }
override public func layoutSubviews() { override public func layoutSubviews() {
super.layoutSubviews() super.layoutSubviews()
self.smallBlob.frame = bounds /*self.smallBlob.frame = bounds
self.mediumBlob.frame = bounds self.mediumBlob.frame = bounds
self.bigBlob.frame = bounds self.bigBlob.frame = bounds*/
let blobsFrame = bounds.insetBy(dx: floor(bounds.width * 0.12), dy: floor(bounds.height * 0.12))
self.blobsLayer.position = blobsFrame.center
self.blobsLayer.bounds = CGRect(origin: CGPoint(), size: blobsFrame.size)
self.updateBlobsState() self.updateBlobsState()
} }

View File

@ -1265,7 +1265,15 @@ public func canSendMessagesToChat(_ state: ChatPresentationInterfaceState) -> Bo
return false return false
} }
} else if case .customChatContents = state.chatLocation { } else if case .customChatContents = state.chatLocation {
return true if case let .customChatContents(contents) = state.subject {
if case .hashTagSearch = contents.kind {
return false
} else {
return true
}
} else {
return true
}
} else { } else {
return false return false
} }

View File

@ -87,7 +87,7 @@ public func createChartController(_ data: String, type: ChartType, rate: Double
controller = StackedBarsChartController(chartsCollection: collection) controller = StackedBarsChartController(chartsCollection: collection)
controller.isZoomable = false controller.isZoomable = false
case .currency: case .currency:
controller = StackedBarsChartController(chartsCollection: collection, isCrypto: true, rate: rate) controller = StackedBarsChartController(chartsCollection: collection, currency: .ton, rate: rate)
controller.isZoomable = false controller.isZoomable = false
case .step: case .step:
controller = StepBarsChartController(chartsCollection: collection) controller = StepBarsChartController(chartsCollection: collection)

View File

@ -853,6 +853,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
public let isStream: Bool public let isStream: Bool
public var onMutedSpeechActivityDetected: ((Bool) -> Void)?
init( init(
accountContext: AccountContext, accountContext: AccountContext,
audioSession: ManagedAudioSession, audioSession: ManagedAudioSession,
@ -1674,8 +1676,14 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
strongSelf.requestCall(movingFromBroadcastToRtc: false) strongSelf.requestCall(movingFromBroadcastToRtc: false)
} }
} }
}, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: self.isVideoEnabled ? .generic : .none, enableNoiseSuppression: false, disableAudioInput: self.isStream, preferX264: self.accountContext.sharedContext.immediateExperimentalUISettings.preferredVideoCodec == "H264", logPath: allocateCallLogPath(account: self.account) }, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: self.isVideoEnabled ? .generic : .none, enableNoiseSuppression: false, disableAudioInput: self.isStream, preferX264: self.accountContext.sharedContext.immediateExperimentalUISettings.preferredVideoCodec == "H264", logPath: allocateCallLogPath(account: self.account), onMutedSpeechActivityDetected: { [weak self] value in
)) Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
strongSelf.onMutedSpeechActivityDetected?(value)
}
}))
} }
self.genericCallContext = genericCallContext self.genericCallContext = genericCallContext
@ -2967,7 +2975,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.hasScreencast = true self.hasScreencast = true
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, preferX264: false, logPath: "") let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in })
self.screencastCallContext = screencastCallContext self.screencastCallContext = screencastCallContext
self.screencastJoinDisposable.set((screencastCallContext.joinPayload self.screencastJoinDisposable.set((screencastCallContext.joinPayload

View File

@ -2452,6 +2452,24 @@ public final class VoiceChatControllerImpl: ViewController, VoiceChatController
} }
}) })
} }
var lastTimestamp = 0.0
self.call.onMutedSpeechActivityDetected = { [weak self] value in
Queue.mainQueue().async {
guard let self, value else {
return
}
let timestamp = CFAbsoluteTimeGetCurrent()
if lastTimestamp + 1000.0 < timestamp {
lastTimestamp = timestamp
//TODO:localize
self.presentUndoOverlay(content: .info(title: nil, text: "Your microphone is muted.", timeout: nil, customUndoText: nil), action: { _ in
return false
})
}
}
}
} }
deinit { deinit {

View File

@ -230,10 +230,10 @@ vertex BlobVertexOut callBlobVertex(
} }
fragment half4 callBlobFragment( fragment half4 callBlobFragment(
BlobVertexOut in [[stage_in]] BlobVertexOut in [[stage_in]],
const device float4 &color [[ buffer(0) ]]
) { ) {
half alpha = 0.35; return half4(color.r * color.a, color.g * color.a, color.b * color.a, color.a);
return half4(1.0 * alpha, 1.0 * alpha, 1.0 * alpha, alpha);
} }
kernel void videoBiPlanarToRGBA( kernel void videoBiPlanarToRGBA(

View File

@ -7,10 +7,13 @@ public final class CallBlobsLayer: MetalEngineSubjectLayer, MetalEngineSubject {
public var internalData: MetalEngineSubjectInternalData? public var internalData: MetalEngineSubjectInternalData?
private struct Blob { private struct Blob {
var color: SIMD4<Float>
var points: [Float] var points: [Float]
var nextPoints: [Float] var nextPoints: [Float]
init(count: Int) { init(count: Int, color: SIMD4<Float>) {
self.color = color
self.points = (0 ..< count).map { _ in self.points = (0 ..< count).map { _ in
Float.random(in: 0.0 ... 1.0) Float.random(in: 0.0 ... 1.0)
} }
@ -71,7 +74,7 @@ public final class CallBlobsLayer: MetalEngineSubjectLayer, MetalEngineSubject {
private var displayLinkSubscription: SharedDisplayLinkDriver.Link? private var displayLinkSubscription: SharedDisplayLinkDriver.Link?
override public init() { public init(colors: [UIColor] = [UIColor(white: 1.0, alpha: 0.35), UIColor(white: 1.0, alpha: 0.35)]) {
super.init() super.init()
self.didEnterHierarchy = { [weak self] in self.didEnterHierarchy = { [weak self] in
@ -100,8 +103,14 @@ public final class CallBlobsLayer: MetalEngineSubjectLayer, MetalEngineSubject {
} }
self.isOpaque = false self.isOpaque = false
self.blobs = (0 ..< 2).map { _ in self.blobs = colors.reversed().map { color in
Blob(count: 8) var r: CGFloat = 0.0
var g: CGFloat = 0.0
var b: CGFloat = 0.0
var a: CGFloat = 0.0
color.getRed(&r, green: &g, blue: &b, alpha: &a)
return Blob(count: 8, color: SIMD4<Float>(Float(r), Float(g), Float(b), Float(a)))
} }
} }
@ -137,6 +146,9 @@ public final class CallBlobsLayer: MetalEngineSubjectLayer, MetalEngineSubject {
encoder.setVertexBytes(&points, length: MemoryLayout<Float>.size * points.count, index: 1) encoder.setVertexBytes(&points, length: MemoryLayout<Float>.size * points.count, index: 1)
encoder.setVertexBytes(&count, length: MemoryLayout<Float>.size, index: 2) encoder.setVertexBytes(&count, length: MemoryLayout<Float>.size, index: 2)
var color = blobs[i].color
encoder.setFragmentBytes(&color, length: MemoryLayout<Float>.size * 4, index: 0)
encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 3 * 8 * points.count) encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 3 * 8 * points.count)
} }
}) })

View File

@ -1747,7 +1747,16 @@ extension ChatControllerImpl {
return return
} }
if let messageId = messageId { if let messageId = messageId {
if canSendMessagesToChat(strongSelf.presentationInterfaceState) { let intrinsicCanSendMessagesHere = canSendMessagesToChat(strongSelf.presentationInterfaceState)
var canSendMessagesHere = intrinsicCanSendMessagesHere
if case .standard(.embedded) = strongSelf.presentationInterfaceState.mode {
canSendMessagesHere = false
}
if case .inline = strongSelf.presentationInterfaceState.mode {
canSendMessagesHere = false
}
if canSendMessagesHere {
let _ = strongSelf.presentVoiceMessageDiscardAlert(action: { let _ = strongSelf.presentVoiceMessageDiscardAlert(action: {
if let message = strongSelf.chatDisplayNode.historyNode.messageInCurrentHistoryView(messageId) { if let message = strongSelf.chatDisplayNode.historyNode.messageInCurrentHistoryView(messageId) {
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, { $0.updatedInterfaceState({ strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, { $0.updatedInterfaceState({
@ -1771,11 +1780,18 @@ extension ChatControllerImpl {
messageId: messageId, messageId: messageId,
quote: nil quote: nil
) )
completion(.immediate, { completion(.immediate, {
guard let self else { guard let self else {
return return
} }
moveReplyMessageToAnotherChat(selfController: self, replySubject: replySubject) if intrinsicCanSendMessagesHere {
if let peerId = self.chatLocation.peerId {
moveReplyToChat(selfController: self, peerId: peerId, threadId: self.chatLocation.threadId, replySubject: replySubject, completion: {})
}
} else {
moveReplyMessageToAnotherChat(selfController: self, replySubject: replySubject)
}
}) })
} }
} else { } else {

View File

@ -617,73 +617,8 @@ func moveReplyMessageToAnotherChat(selfController: ChatControllerImpl, replySubj
selfController.searchResultsController = nil selfController.searchResultsController = nil
strongController.dismiss() strongController.dismiss()
} else { } else {
if let navigationController = selfController.navigationController as? NavigationController { moveReplyToChat(selfController: selfController, peerId: peerId, threadId: threadId, replySubject: replySubject, completion: { [weak strongController] in
for controller in navigationController.viewControllers { strongController?.dismiss()
if let maybeChat = controller as? ChatControllerImpl {
if case .peer(peerId) = maybeChat.chatLocation {
var isChatPinnedMessages = false
if case .pinnedMessages = maybeChat.presentationInterfaceState.subject {
isChatPinnedMessages = true
}
if !isChatPinnedMessages {
maybeChat.updateChatPresentationInterfaceState(animated: false, interactive: true, { $0.updatedInterfaceState({ $0.withUpdatedReplyMessageSubject(replySubject).withoutSelectionState() }) })
selfController.dismiss()
strongController.dismiss()
return
}
}
}
}
}
let _ = (ChatInterfaceState.update(engine: selfController.context.engine, peerId: peerId, threadId: threadId, { currentState in
return currentState.withUpdatedReplyMessageSubject(replySubject)
})
|> deliverOnMainQueue).startStandalone(completed: { [weak selfController] in
guard let selfController else {
return
}
let proceed: (ChatController) -> Void = { [weak selfController] chatController in
guard let selfController else {
return
}
selfController.updateChatPresentationInterfaceState(animated: false, interactive: true, { $0.updatedInterfaceState({ $0.withUpdatedReplyMessageSubject(nil).withUpdatedSendMessageEffect(nil).withoutSelectionState() }) })
let navigationController: NavigationController?
if let parentController = selfController.parentController {
navigationController = (parentController.navigationController as? NavigationController)
} else {
navigationController = selfController.effectiveNavigationController
}
if let navigationController = navigationController {
var viewControllers = navigationController.viewControllers
if threadId != nil {
viewControllers.insert(chatController, at: viewControllers.count - 2)
} else {
viewControllers.insert(chatController, at: viewControllers.count - 1)
}
navigationController.setViewControllers(viewControllers, animated: false)
selfController.controllerNavigationDisposable.set((chatController.ready.get()
|> SwiftSignalKit.filter { $0 }
|> take(1)
|> deliverOnMainQueue).startStrict(next: { [weak navigationController] _ in
viewControllers.removeAll(where: { $0 is PeerSelectionController })
navigationController?.setViewControllers(viewControllers, animated: true)
}))
}
}
if let threadId = threadId {
let _ = (selfController.context.sharedContext.chatControllerForForumThread(context: selfController.context, peerId: peerId, threadId: threadId)
|> deliverOnMainQueue).startStandalone(next: { chatController in
proceed(chatController)
})
} else {
let chatController = ChatControllerImpl(context: selfController.context, chatLocation: .peer(id: peerId))
chatController.activateInput(type: .text)
proceed(chatController)
}
}) })
} }
} }
@ -692,6 +627,86 @@ func moveReplyMessageToAnotherChat(selfController: ChatControllerImpl, replySubj
}) })
} }
func moveReplyToChat(selfController: ChatControllerImpl, peerId: EnginePeer.Id, threadId: Int64?, replySubject: ChatInterfaceState.ReplyMessageSubject, completion: @escaping () -> Void) {
if let navigationController = selfController.effectiveNavigationController {
for controller in navigationController.viewControllers {
if let maybeChat = controller as? ChatControllerImpl {
if case .peer(peerId) = maybeChat.chatLocation {
var isChatPinnedMessages = false
if case .pinnedMessages = maybeChat.presentationInterfaceState.subject {
isChatPinnedMessages = true
}
if !isChatPinnedMessages {
maybeChat.updateChatPresentationInterfaceState(animated: false, interactive: true, { $0.updatedInterfaceState({ $0.withUpdatedReplyMessageSubject(replySubject).withoutSelectionState() }) })
var viewControllers = navigationController.viewControllers
if let index = viewControllers.firstIndex(where: { $0 === maybeChat }), index != viewControllers.count - 1 {
viewControllers.removeSubrange((index + 1) ..< viewControllers.count)
navigationController.setViewControllers(viewControllers, animated: true)
} else {
selfController.dismiss()
}
completion()
return
}
}
}
}
}
let _ = (ChatInterfaceState.update(engine: selfController.context.engine, peerId: peerId, threadId: threadId, { currentState in
return currentState.withUpdatedReplyMessageSubject(replySubject)
})
|> deliverOnMainQueue).startStandalone(completed: { [weak selfController] in
guard let selfController else {
return
}
let proceed: (ChatController) -> Void = { [weak selfController] chatController in
guard let selfController else {
return
}
selfController.updateChatPresentationInterfaceState(animated: false, interactive: true, { $0.updatedInterfaceState({ $0.withUpdatedReplyMessageSubject(nil).withUpdatedSendMessageEffect(nil).withoutSelectionState() }) })
let navigationController: NavigationController?
if let parentController = selfController.parentController {
navigationController = (parentController.navigationController as? NavigationController)
} else {
navigationController = selfController.effectiveNavigationController
}
if let navigationController = navigationController {
var viewControllers = navigationController.viewControllers
if threadId != nil {
viewControllers.insert(chatController, at: viewControllers.count - 2)
} else {
viewControllers.insert(chatController, at: viewControllers.count - 1)
}
navigationController.setViewControllers(viewControllers, animated: false)
selfController.controllerNavigationDisposable.set((chatController.ready.get()
|> SwiftSignalKit.filter { $0 }
|> take(1)
|> timeout(0.2, queue: .mainQueue(), alternate: .single(true))
|> deliverOnMainQueue).startStrict(next: { [weak navigationController] _ in
viewControllers.removeAll(where: { $0 is PeerSelectionController })
navigationController?.setViewControllers(viewControllers, animated: true)
}))
}
}
if let threadId = threadId {
let _ = (selfController.context.sharedContext.chatControllerForForumThread(context: selfController.context, peerId: peerId, threadId: threadId)
|> deliverOnMainQueue).startStandalone(next: { chatController in
proceed(chatController)
})
} else {
let chatController = ChatControllerImpl(context: selfController.context, chatLocation: .peer(id: peerId))
chatController.activateInput(type: .text)
proceed(chatController)
}
})
}
private func chatLinkOptions(selfController: ChatControllerImpl, sourceNode: ASDisplayNode, getContextController: @escaping () -> ContextController?, replySelectionState: Promise<ChatControllerSubject.MessageOptionsInfo.SelectionState>) -> ContextController.Source? { private func chatLinkOptions(selfController: ChatControllerImpl, sourceNode: ASDisplayNode, getContextController: @escaping () -> ContextController?, replySelectionState: Promise<ChatControllerSubject.MessageOptionsInfo.SelectionState>) -> ContextController.Source? {
guard let peerId = selfController.chatLocation.peerId else { guard let peerId = selfController.chatLocation.peerId else {
return nil return nil

View File

@ -282,7 +282,7 @@ private func canViewReadStats(message: Message, participantCount: Int?, isMessag
func canReplyInChat(_ chatPresentationInterfaceState: ChatPresentationInterfaceState, accountPeerId: PeerId) -> Bool { func canReplyInChat(_ chatPresentationInterfaceState: ChatPresentationInterfaceState, accountPeerId: PeerId) -> Bool {
if case let .customChatContents(contents) = chatPresentationInterfaceState.subject, case .hashTagSearch = contents.kind { if case let .customChatContents(contents) = chatPresentationInterfaceState.subject, case .hashTagSearch = contents.kind {
return false return true
} }
if case .customChatContents = chatPresentationInterfaceState.chatLocation { if case .customChatContents = chatPresentationInterfaceState.chatLocation {
return true return true
@ -303,7 +303,7 @@ func canReplyInChat(_ chatPresentationInterfaceState: ChatPresentationInterfaceS
} }
switch chatPresentationInterfaceState.mode { switch chatPresentationInterfaceState.mode {
case .inline: case .inline:
return false return true
case .standard(.embedded): case .standard(.embedded):
return false return false
default: default:
@ -905,15 +905,17 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
messageActions.editTags = Set() messageActions.editTags = Set()
} }
return (MessageContextMenuData( let data = MessageContextMenuData(
starStatus: stickerSaveStatus, starStatus: stickerSaveStatus,
canReply: canReply && !isEmbeddedMode, canReply: canReply,
canPin: canPin && !isEmbeddedMode, canPin: canPin && !isEmbeddedMode,
canEdit: canEdit && !isEmbeddedMode, canEdit: canEdit && !isEmbeddedMode,
canSelect: canSelect && !isEmbeddedMode, canSelect: canSelect && !isEmbeddedMode,
resourceStatus: resourceStatus, resourceStatus: resourceStatus,
messageActions: messageActions messageActions: messageActions
), updatingMessageMedia, infoSummaryData, appConfig, isMessageRead, messageViewsPrivacyTips, availableReactions, translationSettings, loggingSettings, notificationSoundList, accountPeer) )
return (data, updatingMessageMedia, infoSummaryData, appConfig, isMessageRead, messageViewsPrivacyTips, availableReactions, translationSettings, loggingSettings, notificationSoundList, accountPeer)
} }
return dataSignal return dataSignal

View File

@ -323,7 +323,7 @@ final class ChatTextInputActionButtonsNode: ASDisplayNode, ChatSendMessageAction
} }
func makeCustomContents() -> UIView? { func makeCustomContents() -> UIView? {
if self.sendButtonHasApplyIcon { if self.sendButtonHasApplyIcon || self.effectBadgeView != nil {
let result = UIView() let result = UIView()
result.frame = self.bounds result.frame = self.bounds
if let copyView = self.sendContainerNode.view.snapshotView(afterScreenUpdates: false) { if let copyView = self.sendContainerNode.view.snapshotView(afterScreenUpdates: false) {

View File

@ -463,7 +463,7 @@ public final class OngoingGroupCallContext {
private let audioSessionActiveDisposable = MetaDisposable() private let audioSessionActiveDisposable = MetaDisposable()
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) { init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void) {
self.queue = queue self.queue = queue
#if os(iOS) #if os(iOS)
@ -576,6 +576,9 @@ public final class OngoingGroupCallContext {
disableAudioInput: disableAudioInput, disableAudioInput: disableAudioInput,
preferX264: preferX264, preferX264: preferX264,
logPath: logPath, logPath: logPath,
onMutedSpeechActivityDetected: { value in
onMutedSpeechActivityDetected(value)
},
audioDevice: audioDevice audioDevice: audioDevice
) )
#else #else
@ -669,7 +672,8 @@ public final class OngoingGroupCallContext {
enableNoiseSuppression: enableNoiseSuppression, enableNoiseSuppression: enableNoiseSuppression,
disableAudioInput: disableAudioInput, disableAudioInput: disableAudioInput,
preferX264: preferX264, preferX264: preferX264,
logPath: logPath logPath: logPath,
onMutedSpeechActivityDetected: { _ in }
) )
#endif #endif
@ -1109,10 +1113,10 @@ public final class OngoingGroupCallContext {
} }
} }
public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) { public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void) {
let queue = self.queue let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: { self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, preferX264: preferX264, logPath: logPath) return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, preferX264: preferX264, logPath: logPath, onMutedSpeechActivityDetected: onMutedSpeechActivityDetected)
}) })
} }

View File

@ -414,6 +414,7 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
disableAudioInput:(bool)disableAudioInput disableAudioInput:(bool)disableAudioInput
preferX264:(bool)preferX264 preferX264:(bool)preferX264
logPath:(NSString * _Nonnull)logPath logPath:(NSString * _Nonnull)logPath
onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice; audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice;
- (void)stop; - (void)stop;

View File

@ -1669,6 +1669,8 @@ private:
rtc::Thread *_currentAudioDeviceModuleThread; rtc::Thread *_currentAudioDeviceModuleThread;
SharedCallAudioDevice * _audioDevice; SharedCallAudioDevice * _audioDevice;
void (^_onMutedSpeechActivityDetected)(bool);
} }
@end @end
@ -1691,6 +1693,7 @@ private:
disableAudioInput:(bool)disableAudioInput disableAudioInput:(bool)disableAudioInput
preferX264:(bool)preferX264 preferX264:(bool)preferX264
logPath:(NSString * _Nonnull)logPath logPath:(NSString * _Nonnull)logPath
onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice { audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
self = [super init]; self = [super init];
if (self != nil) { if (self != nil) {
@ -1703,6 +1706,8 @@ audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
_networkStateUpdated = [networkStateUpdated copy]; _networkStateUpdated = [networkStateUpdated copy];
_videoCapturer = videoCapturer; _videoCapturer = videoCapturer;
_onMutedSpeechActivityDetected = [onMutedSpeechActivityDetected copy];
_audioDevice = audioDevice; _audioDevice = audioDevice;
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> audioDeviceModule; std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> audioDeviceModule;
if (_audioDevice) { if (_audioDevice) {
@ -1917,12 +1922,19 @@ audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task); return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
}, },
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit, .minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit,
.createAudioDeviceModule = [weakSelf, queue, disableAudioInput, audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> { .createAudioDeviceModule = [weakSelf, queue, disableAudioInput, audioDeviceModule, onMutedSpeechActivityDetected = _onMutedSpeechActivityDetected](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
if (audioDeviceModule) { if (audioDeviceModule) {
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule(); return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
} else { } else {
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current(); rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1); auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
if (resultModule) {
resultModule->mutedSpeechDetectionChanged = ^(bool value) {
if (onMutedSpeechActivityDetected) {
onMutedSpeechActivityDetected(value);
}
};
}
[queue dispatch:^{ [queue dispatch:^{
__strong GroupCallThreadLocalContext *strongSelf = weakSelf; __strong GroupCallThreadLocalContext *strongSelf = weakSelf;
if (strongSelf) { if (strongSelf) {
@ -1932,6 +1944,14 @@ audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
}]; }];
return resultModule; return resultModule;
} }
},
.onMutedSpeechActivityDetected = [weakSelf, queue](bool value) {
[queue dispatch:^{
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
if (strongSelf && strongSelf->_onMutedSpeechActivityDetected) {
strongSelf->_onMutedSpeechActivityDetected(value);
}
}];
} }
})); }));
} }

@ -1 +1 @@
Subproject commit 8721352f452128adec41c254b8407a4cb18cbbeb Subproject commit 8344f8ca2a043d0812743260c31a086a82190489