[WIP] Conference calls

This commit is contained in:
Isaac 2024-12-14 01:16:30 +08:00
parent d7ca478f24
commit abdfc238f8
11 changed files with 582 additions and 159 deletions

View File

@ -49,6 +49,9 @@ public final class PresentationCallImpl: PresentationCall {
private var callContextState: OngoingCallContextState? private var callContextState: OngoingCallContextState?
private var ongoingContext: OngoingCallContext? private var ongoingContext: OngoingCallContext?
private var ongoingContextStateDisposable: Disposable? private var ongoingContextStateDisposable: Disposable?
private var ongoingContextIsFailedDisposable: Disposable?
private var ongoingContextIsDroppedDisposable: Disposable?
private var didDropCall = false
private var sharedAudioDevice: OngoingCallContext.AudioDevice? private var sharedAudioDevice: OngoingCallContext.AudioDevice?
private var requestedVideoAspect: Float? private var requestedVideoAspect: Float?
private var reception: Int32? private var reception: Int32?
@ -136,6 +139,14 @@ public final class PresentationCallImpl: PresentationCall {
private var localVideoEndpointId: String? private var localVideoEndpointId: String?
private var remoteVideoEndpointId: String? private var remoteVideoEndpointId: String?
private var conferenceSignalingDataDisposable: Disposable?
private var conferenceIsConnected: Bool = false
private var notifyConferenceIsConnectedTimer: Foundation.Timer?
private var remoteConferenceIsConnectedTimestamp: Double?
private let remoteConferenceIsConnected = ValuePromise<Bool>(false, ignoreRepeated: true)
private var remoteConferenceIsConnectedTimer: Foundation.Timer?
init( init(
context: AccountContext, context: AccountContext,
audioSession: ManagedAudioSession, audioSession: ManagedAudioSession,
@ -296,7 +307,7 @@ public final class PresentationCallImpl: PresentationCall {
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] { if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] {
self.sharedAudioDevice = nil self.sharedAudioDevice = nil
} else { } else {
self.sharedAudioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: context.sharedContext.immediateExperimentalUISettings.experimentalCallMute) self.sharedAudioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: false)
} }
self.audioSessionActiveDisposable = (self.audioSessionActive.get() self.audioSessionActiveDisposable = (self.audioSessionActive.get()
@ -315,6 +326,18 @@ public final class PresentationCallImpl: PresentationCall {
self.proximityManagerIndex = DeviceProximityManager.shared().add { _ in self.proximityManagerIndex = DeviceProximityManager.shared().add { _ in
} }
} }
if self.isExpectedToBeConference {
self.conferenceSignalingDataDisposable = self.context.account.callSessionManager.beginReceivingCallSignalingData(internalId: self.internalId, { [weak self] dataList in
Queue.mainQueue().async {
guard let self else {
return
}
self.processConferenceSignalingData(dataList: dataList)
}
})
}
} }
deinit { deinit {
@ -330,6 +353,12 @@ public final class PresentationCallImpl: PresentationCall {
self.screencastAudioDataDisposable.dispose() self.screencastAudioDataDisposable.dispose()
self.screencastStateDisposable.dispose() self.screencastStateDisposable.dispose()
self.conferenceCallDisposable?.dispose() self.conferenceCallDisposable?.dispose()
self.ongoingContextStateDisposable?.dispose()
self.ongoingContextIsFailedDisposable?.dispose()
self.ongoingContextIsDroppedDisposable?.dispose()
self.notifyConferenceIsConnectedTimer?.invalidate()
self.conferenceSignalingDataDisposable?.dispose()
self.remoteConferenceIsConnectedTimer?.invalidate()
if let dropCallKitCallTimer = self.dropCallKitCallTimer { if let dropCallKitCallTimer = self.dropCallKitCallTimer {
dropCallKitCallTimer.invalidate() dropCallKitCallTimer.invalidate()
@ -559,16 +588,14 @@ public final class PresentationCallImpl: PresentationCall {
switch sessionState.state { switch sessionState.state {
case .requesting: case .requesting:
if let _ = audioSessionControl { if let _ = audioSessionControl {
if self.isExpectedToBeConference { self.audioSessionShouldBeActive.set(true)
} else {
self.audioSessionShouldBeActive.set(true)
}
} }
case let .active(id, key, keyVisualHash, connections, maxLayer, version, customParameters, allowsP2P, conferenceCall): case let .active(id, key, keyVisualHash, connections, maxLayer, version, customParameters, allowsP2P, conferenceCall):
if let conferenceCall, self.conferenceCallDisposable == nil { if let conferenceCall, self.conferenceCallDisposable == nil {
presentationState = PresentationCallState(state: .connecting(nil), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel) presentationState = PresentationCallState(state: .connecting(nil), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
self.conferenceCallDisposable = (self.context.engine.calls.getCurrentGroupCall(callId: conferenceCall.id, accessHash: conferenceCall.accessHash) self.conferenceCallDisposable = (self.context.engine.calls.getCurrentGroupCall(callId: conferenceCall.id, accessHash: conferenceCall.accessHash)
|> delay(sessionState.isOutgoing ? 0.0 : 2.0, queue: .mainQueue())
|> deliverOnMainQueue).startStrict(next: { [weak self] result in |> deliverOnMainQueue).startStrict(next: { [weak self] result in
guard let self, let result else { guard let self, let result else {
return return
@ -593,11 +620,14 @@ public final class PresentationCallImpl: PresentationCall {
invite: nil, invite: nil,
joinAsPeerId: nil, joinAsPeerId: nil,
isStream: false, isStream: false,
encryptionKey: key encryptionKey: key,
conferenceFromCallId: id,
isConference: true,
sharedAudioDevice: self.sharedAudioDevice
) )
self.conferenceCall = conferenceCall self.conferenceCall = conferenceCall
conferenceCall.setIsMuted(action: self.isMutedValue ? .muted(isPushToTalkActive: false) : .unmuted) conferenceCall.setIsMuted(action: .muted(isPushToTalkActive: !self.isMutedValue))
let accountPeerId = conferenceCall.account.peerId let accountPeerId = conferenceCall.account.peerId
let videoEndpoints: Signal<(local: String?, remote: PresentationGroupCallRequestedVideo?), NoError> = conferenceCall.members let videoEndpoints: Signal<(local: String?, remote: PresentationGroupCallRequestedVideo?), NoError> = conferenceCall.members
@ -624,24 +654,27 @@ public final class PresentationCallImpl: PresentationCall {
return lhs == rhs return lhs == rhs
}) })
let remoteIsConnectedAggregated = combineLatest(queue: .mainQueue(),
self.remoteConferenceIsConnected.get(),
conferenceCall.hasActiveIncomingData
)
|> map { remoteConferenceIsConnected, hasActiveIncomingData -> Bool in
return remoteConferenceIsConnected || hasActiveIncomingData
}
|> distinctUntilChanged
var startTimestamp: Double? var startTimestamp: Double?
self.ongoingContextStateDisposable = (combineLatest(queue: .mainQueue(), self.ongoingContextStateDisposable = (combineLatest(queue: .mainQueue(),
conferenceCall.state, conferenceCall.state,
videoEndpoints videoEndpoints,
conferenceCall.signalBars,
conferenceCall.isFailed,
remoteIsConnectedAggregated
) )
|> deliverOnMainQueue).startStrict(next: { [weak self] callState, videoEndpoints in |> deliverOnMainQueue).startStrict(next: { [weak self] callState, videoEndpoints, signalBars, isFailed, remoteIsConnectedAggregated in
guard let self else { guard let self else {
return return
} }
let mappedState: PresentationCallState.State
switch callState.networkState {
case .connecting:
mappedState = .connecting(nil)
case .connected:
let timestamp = startTimestamp ?? CFAbsoluteTimeGetCurrent()
startTimestamp = timestamp
mappedState = .active(timestamp, nil, keyVisualHash)
}
var mappedLocalVideoState: PresentationCallState.VideoState = .inactive var mappedLocalVideoState: PresentationCallState.VideoState = .inactive
var mappedRemoteVideoState: PresentationCallState.RemoteVideoState = .inactive var mappedRemoteVideoState: PresentationCallState.RemoteVideoState = .inactive
@ -664,13 +697,65 @@ public final class PresentationCallImpl: PresentationCall {
conferenceCall.setRequestedVideoList(items: requestedVideo) conferenceCall.setRequestedVideoList(items: requestedVideo)
} }
self.statePromise.set(PresentationCallState( var isConnected = false
state: mappedState, let mappedState: PresentationCallState.State
videoState: mappedLocalVideoState, if isFailed {
remoteVideoState: mappedRemoteVideoState, mappedState = .terminating(.error(.disconnected))
remoteAudioState: .active, } else {
remoteBatteryLevel: .normal switch callState.networkState {
)) case .connecting:
mappedState = .connecting(keyVisualHash)
case .connected:
isConnected = true
if remoteIsConnectedAggregated {
let timestamp = startTimestamp ?? CFAbsoluteTimeGetCurrent()
startTimestamp = timestamp
mappedState = .active(timestamp, signalBars, keyVisualHash)
} else {
mappedState = .connecting(keyVisualHash)
}
}
}
self.updateConferenceIsConnected(isConnected: isConnected)
if !self.didDropCall && !self.droppedCall {
let presentationState = PresentationCallState(
state: mappedState,
videoState: mappedLocalVideoState,
remoteVideoState: mappedRemoteVideoState,
remoteAudioState: .active,
remoteBatteryLevel: .normal
)
self.statePromise.set(presentationState)
self.updateTone(presentationState, callContextState: nil, previous: nil)
}
})
self.ongoingContextIsFailedDisposable = (conferenceCall.isFailed
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).startStrict(next: { [weak self] _ in
guard let self else {
return
}
if !self.didDropCall {
self.didDropCall = true
self.callSessionManager.drop(internalId: self.internalId, reason: .disconnect, debugLog: .single(nil))
}
})
self.ongoingContextIsDroppedDisposable = (conferenceCall.canBeRemoved
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).startStrict(next: { [weak self] _ in
guard let self else {
return
}
if !self.didDropCall {
self.didDropCall = true
self.callSessionManager.drop(internalId: self.internalId, reason: .disconnect, debugLog: .single(nil))
}
}) })
var audioLevelId: UInt32? var audioLevelId: UInt32?
@ -707,12 +792,13 @@ public final class PresentationCallImpl: PresentationCall {
self.createConferenceIfPossible() self.createConferenceIfPossible()
} }
self.audioSessionShouldBeActive.set(true)
if self.isExpectedToBeConference { if self.isExpectedToBeConference {
if sessionState.isOutgoing { if sessionState.isOutgoing {
self.callKitIntegration?.reportOutgoingCallConnected(uuid: sessionState.id, at: Date()) self.callKitIntegration?.reportOutgoingCallConnected(uuid: sessionState.id, at: Date())
} }
} else { } else {
self.audioSessionShouldBeActive.set(true)
if let _ = audioSessionControl, !wasActive || previousControl == nil { if let _ = audioSessionControl, !wasActive || previousControl == nil {
let logName = "\(id.id)_\(id.accessHash)" let logName = "\(id.id)_\(id.accessHash)"
@ -776,10 +862,7 @@ public final class PresentationCallImpl: PresentationCall {
} }
} }
case let .terminated(_, _, options): case let .terminated(_, _, options):
if self.isExpectedToBeConference { self.audioSessionShouldBeActive.set(true)
} else {
self.audioSessionShouldBeActive.set(true)
}
if wasActive { if wasActive {
let debugLogValue = Promise<String?>() let debugLogValue = Promise<String?>()
self.ongoingContext?.stop(sendDebugLogs: options.contains(.sendDebugLogs), debugLogValue: debugLogValue) self.ongoingContext?.stop(sendDebugLogs: options.contains(.sendDebugLogs), debugLogValue: debugLogValue)
@ -933,6 +1016,88 @@ public final class PresentationCallImpl: PresentationCall {
} }
} }
private func updateConferenceIsConnected(isConnected: Bool) {
if self.conferenceIsConnected != isConnected {
self.conferenceIsConnected = isConnected
self.sendConferenceIsConnectedState()
}
if self.notifyConferenceIsConnectedTimer == nil {
self.notifyConferenceIsConnectedTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true, block: { [weak self] _ in
guard let self else {
return
}
self.sendConferenceIsConnectedState()
})
}
}
private func sendConferenceIsConnectedState() {
self.sendConferenceSignalingMessage(dict: ["_$": "s", "c": self.conferenceIsConnected])
}
private func processConferenceSignalingData(dataList: [Data]) {
for data in dataList {
if let dict = try? JSONSerialization.jsonObject(with: data) as? [String: Any] {
self.processConferenceSignalingMessage(dict: dict)
}
}
}
private func processConferenceSignalingMessage(dict: [String: Any]) {
if let type = dict["_$"] as? String {
switch type {
case "s":
let isConnected = dict["c"] as? Bool ?? false
self.remoteConferenceIsConnected.set(isConnected)
if isConnected {
self.remoteConferenceIsConnectedTimestamp = CFAbsoluteTimeGetCurrent()
}
if self.remoteConferenceIsConnectedTimer == nil && isConnected {
self.remoteConferenceIsConnectedTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true, block: { [weak self] _ in
guard let self else {
return
}
let timestamp = CFAbsoluteTimeGetCurrent()
if let remoteConferenceIsConnectedTimestamp = self.remoteConferenceIsConnectedTimestamp {
if remoteConferenceIsConnectedTimestamp + 4.0 < timestamp {
self.remoteConferenceIsConnected.set(false)
}
if remoteConferenceIsConnectedTimestamp + 10.0 < timestamp {
if !self.didDropCall {
self.didDropCall = true
let presentationState = PresentationCallState(
state: .terminating(.error(.disconnected)),
videoState: .inactive,
remoteVideoState: .inactive,
remoteAudioState: .active,
remoteBatteryLevel: .normal
)
self.statePromise.set(presentationState)
self.updateTone(presentationState, callContextState: nil, previous: nil)
self.callSessionManager.drop(internalId: self.internalId, reason: .disconnect, debugLog: .single(nil))
}
}
}
})
}
default:
break
}
}
}
private func sendConferenceSignalingMessage(dict: [String: Any]) {
if let data = try? JSONSerialization.data(withJSONObject: dict) {
self.context.account.callSessionManager.sendSignalingData(internalId: self.internalId, data: data)
}
}
private func updateIsAudioSessionActive(_ value: Bool) { private func updateIsAudioSessionActive(_ value: Bool) {
if self.isAudioSessionActive != value { if self.isAudioSessionActive != value {
self.isAudioSessionActive = value self.isAudioSessionActive = value
@ -1010,7 +1175,7 @@ public final class PresentationCallImpl: PresentationCall {
self.isMutedValue = value self.isMutedValue = value
self.isMutedPromise.set(self.isMutedValue) self.isMutedPromise.set(self.isMutedValue)
self.ongoingContext?.setIsMuted(self.isMutedValue) self.ongoingContext?.setIsMuted(self.isMutedValue)
self.conferenceCall?.setIsMuted(action: self.isMutedValue ? .muted(isPushToTalkActive: false) : .unmuted) self.conferenceCall?.setIsMuted(action: .muted(isPushToTalkActive: !self.isMutedValue))
} }
public func requestVideo() { public func requestVideo() {

View File

@ -703,7 +703,10 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
invite: nil, invite: nil,
joinAsPeerId: nil, joinAsPeerId: nil,
isStream: false, isStream: false,
encryptionKey: nil encryptionKey: nil,
conferenceFromCallId: nil,
isConference: false,
sharedAudioDevice: nil
) )
call.schedule(timestamp: timestamp) call.schedule(timestamp: timestamp)
@ -743,7 +746,10 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
invite: nil, invite: nil,
joinAsPeerId: nil, joinAsPeerId: nil,
isStream: false, isStream: false,
encryptionKey: nil encryptionKey: nil,
conferenceFromCallId: nil,
isConference: false,
sharedAudioDevice: nil
) )
strongSelf.updateCurrentGroupCall(call) strongSelf.updateCurrentGroupCall(call)
strongSelf.currentGroupCallPromise.set(.single(call)) strongSelf.currentGroupCallPromise.set(.single(call))
@ -924,7 +930,10 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
invite: invite, invite: invite,
joinAsPeerId: joinAsPeerId, joinAsPeerId: joinAsPeerId,
isStream: initialCall.isStream ?? false, isStream: initialCall.isStream ?? false,
encryptionKey: nil encryptionKey: nil,
conferenceFromCallId: nil,
isConference: false,
sharedAudioDevice: nil
) )
strongSelf.updateCurrentGroupCall(call) strongSelf.updateCurrentGroupCall(call)
strongSelf.currentGroupCallPromise.set(.single(call)) strongSelf.currentGroupCallPromise.set(.single(call))

View File

@ -321,10 +321,10 @@ private extension CurrentImpl {
} }
} }
func stop() { func stop(account: Account, reportCallId: CallId?) {
switch self { switch self {
case let .call(callContext): case let .call(callContext):
callContext.stop() callContext.stop(account: account, reportCallId: reportCallId)
case .mediaStream, .externalMediaStream: case .mediaStream, .externalMediaStream:
break break
} }
@ -712,6 +712,30 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
private var myAudioLevelDisposable = MetaDisposable() private var myAudioLevelDisposable = MetaDisposable()
private var hasActiveIncomingDataValue: Bool = false {
didSet {
if self.hasActiveIncomingDataValue != oldValue {
self.hasActiveIncomingDataPromise.set(self.hasActiveIncomingDataValue)
}
}
}
private let hasActiveIncomingDataPromise = ValuePromise<Bool>(false)
var hasActiveIncomingData: Signal<Bool, NoError> {
return self.hasActiveIncomingDataPromise.get()
}
private var hasActiveIncomingDataDisposable: Disposable?
private var hasActiveIncomingDataTimer: Foundation.Timer?
private let isFailedPromise = ValuePromise<Bool>(false)
var isFailed: Signal<Bool, NoError> {
return self.isFailedPromise.get()
}
private let signalBarsPromise = Promise<Int32>(0)
var signalBars: Signal<Int32, NoError> {
return self.signalBarsPromise.get()
}
private var audioSessionControl: ManagedAudioSessionControl? private var audioSessionControl: ManagedAudioSessionControl?
private var audioSessionDisposable: Disposable? private var audioSessionDisposable: Disposable?
private let audioSessionShouldBeActive = ValuePromise<Bool>(false, ignoreRepeated: true) private let audioSessionShouldBeActive = ValuePromise<Bool>(false, ignoreRepeated: true)
@ -842,6 +866,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
public let isStream: Bool public let isStream: Bool
private let encryptionKey: Data? private let encryptionKey: Data?
private let sharedAudioDevice: OngoingCallContext.AudioDevice?
private let conferenceFromCallId: CallId?
private let isConference: Bool
public var onMutedSpeechActivityDetected: ((Bool) -> Void)? public var onMutedSpeechActivityDetected: ((Bool) -> Void)?
@ -857,7 +885,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
invite: String?, invite: String?,
joinAsPeerId: EnginePeer.Id?, joinAsPeerId: EnginePeer.Id?,
isStream: Bool, isStream: Bool,
encryptionKey: Data? encryptionKey: Data?,
conferenceFromCallId: CallId?,
isConference: Bool,
sharedAudioDevice: OngoingCallContext.AudioDevice?
) { ) {
self.account = accountContext.account self.account = accountContext.account
self.accountContext = accountContext self.accountContext = accountContext
@ -883,105 +914,110 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.hasVideo = false self.hasVideo = false
self.hasScreencast = false self.hasScreencast = false
self.isStream = isStream self.isStream = isStream
self.conferenceFromCallId = conferenceFromCallId
self.isConference = isConference
self.encryptionKey = encryptionKey self.encryptionKey = encryptionKey
self.sharedAudioDevice = sharedAudioDevice
var didReceiveAudioOutputs = false if self.sharedAudioDevice == nil {
var didReceiveAudioOutputs = false
if !audioSession.getIsHeadsetPluggedIn() {
self.currentSelectedAudioOutputValue = .speaker if !audioSession.getIsHeadsetPluggedIn() {
self.audioOutputStatePromise.set(.single(([], .speaker))) self.currentSelectedAudioOutputValue = .speaker
} self.audioOutputStatePromise.set(.single(([], .speaker)))
self.audioSessionDisposable = audioSession.push(audioSessionType: self.isStream ? .play(mixWithOthers: false) : .voiceCall, activateImmediately: true, manualActivate: { [weak self] control in
Queue.mainQueue().async {
if let strongSelf = self {
strongSelf.updateSessionState(internalState: strongSelf.internalState, audioSessionControl: control)
}
} }
}, deactivate: { [weak self] _ in
return Signal { subscriber in self.audioSessionDisposable = audioSession.push(audioSessionType: self.isStream ? .play(mixWithOthers: false) : .voiceCall, activateImmediately: true, manualActivate: { [weak self] control in
Queue.mainQueue().async { Queue.mainQueue().async {
if let strongSelf = self { if let strongSelf = self {
strongSelf.updateIsAudioSessionActive(false) strongSelf.updateSessionState(internalState: strongSelf.internalState, audioSessionControl: control)
strongSelf.updateSessionState(internalState: strongSelf.internalState, audioSessionControl: nil) }
}
if strongSelf.isStream { }, deactivate: { [weak self] _ in
let _ = strongSelf.leave(terminateIfPossible: false) return Signal { subscriber in
Queue.mainQueue().async {
if let strongSelf = self {
strongSelf.updateIsAudioSessionActive(false)
strongSelf.updateSessionState(internalState: strongSelf.internalState, audioSessionControl: nil)
if strongSelf.isStream {
let _ = strongSelf.leave(terminateIfPossible: false)
}
}
subscriber.putCompletion()
}
return EmptyDisposable
}
}, availableOutputsChanged: { [weak self] availableOutputs, currentOutput in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
strongSelf.audioOutputStateValue = (availableOutputs, currentOutput)
var signal: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> = .single((availableOutputs, currentOutput))
if !didReceiveAudioOutputs {
didReceiveAudioOutputs = true
if currentOutput == .speaker {
signal = .single((availableOutputs, .speaker))
|> then(
signal
|> delay(1.0, queue: Queue.mainQueue())
)
} }
} }
subscriber.putCompletion() strongSelf.audioOutputStatePromise.set(signal)
} }
return EmptyDisposable })
}
}, availableOutputsChanged: { [weak self] availableOutputs, currentOutput in self.audioSessionShouldBeActiveDisposable = (self.audioSessionShouldBeActive.get()
Queue.mainQueue().async { |> deliverOnMainQueue).start(next: { [weak self] value in
guard let strongSelf = self else { guard let strongSelf = self else {
return return
} }
strongSelf.audioOutputStateValue = (availableOutputs, currentOutput) if value {
if let audioSessionControl = strongSelf.audioSessionControl {
var signal: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> = .single((availableOutputs, currentOutput)) if !strongSelf.isStream, let callKitIntegration = strongSelf.callKitIntegration {
if !didReceiveAudioOutputs { _ = callKitIntegration.audioSessionActive
didReceiveAudioOutputs = true |> filter { $0 }
if currentOutput == .speaker { |> timeout(2.0, queue: Queue.mainQueue(), alternate: Signal { subscriber in
signal = .single((availableOutputs, .speaker)) subscriber.putNext(true)
|> then( subscriber.putCompletion()
signal return EmptyDisposable
|> delay(1.0, queue: Queue.mainQueue()) })
) } else {
} audioSessionControl.activate({ _ in
} Queue.mainQueue().async {
strongSelf.audioOutputStatePromise.set(signal) guard let strongSelf = self else {
} return
}) }
strongSelf.audioSessionActive.set(.single(true))
self.audioSessionShouldBeActiveDisposable = (self.audioSessionShouldBeActive.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let strongSelf = self else {
return
}
if value {
if let audioSessionControl = strongSelf.audioSessionControl {
if !strongSelf.isStream, let callKitIntegration = strongSelf.callKitIntegration {
_ = callKitIntegration.audioSessionActive
|> filter { $0 }
|> timeout(2.0, queue: Queue.mainQueue(), alternate: Signal { subscriber in
subscriber.putNext(true)
subscriber.putCompletion()
return EmptyDisposable
})
} else {
audioSessionControl.activate({ _ in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
} }
strongSelf.audioSessionActive.set(.single(true)) })
} }
}) } else {
strongSelf.audioSessionActive.set(.single(false))
} }
} else { } else {
strongSelf.audioSessionActive.set(.single(false)) strongSelf.audioSessionActive.set(.single(false))
} }
} else { })
strongSelf.audioSessionActive.set(.single(false))
} self.audioSessionActiveDisposable = (self.audioSessionActive.get()
}) |> deliverOnMainQueue).start(next: { [weak self] value in
if let strongSelf = self {
self.audioSessionActiveDisposable = (self.audioSessionActive.get() strongSelf.updateIsAudioSessionActive(value)
|> deliverOnMainQueue).start(next: { [weak self] value in }
if let strongSelf = self { })
strongSelf.updateIsAudioSessionActive(value)
} self.audioOutputStateDisposable = (self.audioOutputStatePromise.get()
}) |> deliverOnMainQueue).start(next: { [weak self] availableOutputs, currentOutput in
guard let strongSelf = self else {
self.audioOutputStateDisposable = (self.audioOutputStatePromise.get() return
|> deliverOnMainQueue).start(next: { [weak self] availableOutputs, currentOutput in }
guard let strongSelf = self else { strongSelf.updateAudioOutputs(availableOutputs: availableOutputs, currentOutput: currentOutput)
return })
} }
strongSelf.updateAudioOutputs(availableOutputs: availableOutputs, currentOutput: currentOutput)
})
self.groupCallParticipantUpdatesDisposable = (self.account.stateManager.groupCallParticipantUpdates self.groupCallParticipantUpdatesDisposable = (self.account.stateManager.groupCallParticipantUpdates
|> deliverOnMainQueue).start(next: { [weak self] updates in |> deliverOnMainQueue).start(next: { [weak self] updates in
@ -1173,6 +1209,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.participantsContextStateDisposable.dispose() self.participantsContextStateDisposable.dispose()
self.myAudioLevelDisposable.dispose() self.myAudioLevelDisposable.dispose()
self.memberEventsPipeDisposable.dispose() self.memberEventsPipeDisposable.dispose()
self.hasActiveIncomingDataDisposable?.dispose()
self.hasActiveIncomingDataTimer?.invalidate()
self.myAudioLevelTimer?.invalidate() self.myAudioLevelTimer?.invalidate()
self.typingDisposable.dispose() self.typingDisposable.dispose()
@ -1709,14 +1747,52 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
strongSelf.onMutedSpeechActivityDetected?(value) strongSelf.onMutedSpeechActivityDetected?(value)
} }
}, encryptionKey: encryptionKey)) }, encryptionKey: encryptionKey, isConference: self.isConference, sharedAudioDevice: self.sharedAudioDevice))
} }
self.genericCallContext = genericCallContext self.genericCallContext = genericCallContext
self.stateVersionValue += 1 self.stateVersionValue += 1
let isEffectivelyMuted: Bool
switch self.isMutedValue {
case let .muted(isPushToTalkActive):
isEffectivelyMuted = !isPushToTalkActive
case .unmuted:
isEffectivelyMuted = false
}
genericCallContext.setIsMuted(isEffectivelyMuted)
genericCallContext.setRequestedVideoChannels(self.suspendVideoChannelRequests ? [] : self.requestedVideoChannels) genericCallContext.setRequestedVideoChannels(self.suspendVideoChannelRequests ? [] : self.requestedVideoChannels)
self.connectPendingVideoSubscribers() self.connectPendingVideoSubscribers()
if case let .call(callContext) = genericCallContext {
var lastTimestamp: Double?
self.hasActiveIncomingDataDisposable?.dispose()
self.hasActiveIncomingDataDisposable = (callContext.ssrcActivities
|> filter { !$0.isEmpty }
|> deliverOnMainQueue).startStrict(next: { [weak self] _ in
guard let self else {
return
}
lastTimestamp = CFAbsoluteTimeGetCurrent()
self.hasActiveIncomingDataValue = true
})
self.hasActiveIncomingDataTimer?.invalidate()
self.hasActiveIncomingDataTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true, block: { [weak self] _ in
guard let self else {
return
}
let timestamp = CFAbsoluteTimeGetCurrent()
if let lastTimestamp {
if lastTimestamp + 1.0 < timestamp {
self.hasActiveIncomingDataValue = false
}
}
})
self.signalBarsPromise.set(callContext.signalBars)
}
} }
self.joinDisposable.set((genericCallContext.joinPayload self.joinDisposable.set((genericCallContext.joinPayload
@ -2570,10 +2646,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
self.markedAsCanBeRemoved = true self.markedAsCanBeRemoved = true
self.genericCallContext?.stop() self.genericCallContext?.stop(account: self.account, reportCallId: self.conferenceFromCallId)
//self.screencastIpcContext = nil //self.screencastIpcContext = nil
self.screencastCallContext?.stop() self.screencastCallContext?.stop(account: self.account, reportCallId: nil)
self._canBeRemoved.set(.single(true)) self._canBeRemoved.set(.single(true))
@ -3024,7 +3100,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.hasScreencast = true self.hasScreencast = true
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, enableSystemMute: false, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in }, encryptionKey: nil) let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, enableSystemMute: false, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in }, encryptionKey: nil, isConference: self.isConference, sharedAudioDevice: nil)
self.screencastCallContext = screencastCallContext self.screencastCallContext = screencastCallContext
self.screencastJoinDisposable.set((screencastCallContext.joinPayload self.screencastJoinDisposable.set((screencastCallContext.joinPayload
@ -3059,7 +3135,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.hasScreencast = false self.hasScreencast = false
if let screencastCallContext = self.screencastCallContext { if let screencastCallContext = self.screencastCallContext {
self.screencastCallContext = nil self.screencastCallContext = nil
screencastCallContext.stop() screencastCallContext.stop(account: self.account, reportCallId: nil)
let maybeCallInfo: GroupCallInfo? = self.internalState.callInfo let maybeCallInfo: GroupCallInfo? = self.internalState.callInfo
@ -3133,6 +3209,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
public func setCurrentAudioOutput(_ output: AudioSessionOutput) { public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
if self.sharedAudioDevice != nil {
return
}
guard self.currentSelectedAudioOutputValue != output else { guard self.currentSelectedAudioOutputValue != output else {
return return
} }

View File

@ -789,12 +789,18 @@ private final class CallSessionManagerContext {
return return
} }
var idAndAccessHash: (id: Int64, accessHash: Int64)?
switch context.state { switch context.state {
case let .active(id, accessHash, _, _, _, _, _, _, _, _, _, conferenceCall): case let .active(id, accessHash, _, _, _, _, _, _, _, _, _, conferenceCall):
if conferenceCall != nil { if conferenceCall != nil {
return return
} }
idAndAccessHash = (id, accessHash)
default:
break
}
if let (id, accessHash) = idAndAccessHash {
context.createConferenceCallDisposable = (createConferenceCall(postbox: self.postbox, network: self.network, accountPeerId: self.accountPeerId, callId: CallId(id: id, accessHash: accessHash)) context.createConferenceCallDisposable = (createConferenceCall(postbox: self.postbox, network: self.network, accountPeerId: self.accountPeerId, callId: CallId(id: id, accessHash: accessHash))
|> deliverOn(self.queue)).startStrict(next: { [weak self] result in |> deliverOn(self.queue)).startStrict(next: { [weak self] result in
guard let self else { guard let self else {
@ -813,8 +819,6 @@ private final class CallSessionManagerContext {
} }
} }
}) })
default:
break
} }
} }
} }

View File

@ -447,7 +447,7 @@ public final class OngoingGroupCallContext {
let queue: Queue let queue: Queue
let context: GroupCallThreadLocalContext let context: GroupCallThreadLocalContext
#if os(iOS) #if os(iOS)
let audioDevice: SharedCallAudioDevice? let audioDevice: OngoingCallContext.AudioDevice?
#endif #endif
let sessionId = UInt32.random(in: 0 ..< UInt32(Int32.max)) let sessionId = UInt32.random(in: 0 ..< UInt32(Int32.max))
@ -456,6 +456,8 @@ public final class OngoingGroupCallContext {
let isMuted = ValuePromise<Bool>(true, ignoreRepeated: true) let isMuted = ValuePromise<Bool>(true, ignoreRepeated: true)
let isNoiseSuppressionEnabled = ValuePromise<Bool>(true, ignoreRepeated: true) let isNoiseSuppressionEnabled = ValuePromise<Bool>(true, ignoreRepeated: true)
let audioLevels = ValuePipe<[(AudioLevelKey, Float, Bool)]>() let audioLevels = ValuePipe<[(AudioLevelKey, Float, Bool)]>()
let ssrcActivities = ValuePipe<[UInt32]>()
let signalBars = ValuePromise<Int32>(0)
private var currentRequestedVideoChannels: [VideoChannel] = [] private var currentRequestedVideoChannels: [VideoChannel] = []
@ -463,6 +465,9 @@ public final class OngoingGroupCallContext {
private let audioSessionActiveDisposable = MetaDisposable() private let audioSessionActiveDisposable = MetaDisposable()
private let logPath: String
private let tempStatsLogFile: EngineTempBox.File
init( init(
queue: Queue, queue: Queue,
inputDeviceId: String, inputDeviceId: String,
@ -479,16 +484,28 @@ public final class OngoingGroupCallContext {
preferX264: Bool, preferX264: Bool,
logPath: String, logPath: String,
onMutedSpeechActivityDetected: @escaping (Bool) -> Void, onMutedSpeechActivityDetected: @escaping (Bool) -> Void,
encryptionKey: Data? encryptionKey: Data?,
isConference: Bool,
sharedAudioDevice: OngoingCallContext.AudioDevice?
) { ) {
self.queue = queue self.queue = queue
self.logPath = logPath
self.tempStatsLogFile = EngineTempBox.shared.tempFile(fileName: "CallStats.json")
let tempStatsLogPath = self.tempStatsLogFile.path
#if os(iOS) #if os(iOS)
self.audioDevice = nil if sharedAudioDevice == nil {
self.audioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: false)
} else {
self.audioDevice = sharedAudioDevice
}
let audioDevice = self.audioDevice let audioDevice = self.audioDevice
#endif #endif
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)? var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)? var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
var activityUpdatedImpl: (([UInt32]) -> Void)?
let _videoContentType: OngoingGroupCallVideoContentType let _videoContentType: OngoingGroupCallVideoContentType
switch videoContentType { switch videoContentType {
@ -510,6 +527,9 @@ public final class OngoingGroupCallContext {
audioLevelsUpdated: { levels in audioLevelsUpdated: { levels in
audioLevelsUpdatedImpl?(levels) audioLevelsUpdatedImpl?(levels)
}, },
activityUpdated: { ssrcs in
activityUpdatedImpl?(ssrcs.map { $0.uint32Value })
},
inputDeviceId: inputDeviceId, inputDeviceId: inputDeviceId,
outputDeviceId: outputDeviceId, outputDeviceId: outputDeviceId,
videoCapturer: video?.impl, videoCapturer: video?.impl,
@ -594,11 +614,13 @@ public final class OngoingGroupCallContext {
enableSystemMute: enableSystemMute, enableSystemMute: enableSystemMute,
preferX264: preferX264, preferX264: preferX264,
logPath: logPath, logPath: logPath,
statsLogPath: tempStatsLogPath,
onMutedSpeechActivityDetected: { value in onMutedSpeechActivityDetected: { value in
onMutedSpeechActivityDetected(value) onMutedSpeechActivityDetected(value)
}, },
audioDevice: audioDevice, audioDevice: audioDevice?.impl,
encryptionKey: encryptionKey encryptionKey: encryptionKey,
isConference: isConference
) )
#else #else
self.context = GroupCallThreadLocalContext( self.context = GroupCallThreadLocalContext(
@ -609,6 +631,9 @@ public final class OngoingGroupCallContext {
audioLevelsUpdated: { levels in audioLevelsUpdated: { levels in
audioLevelsUpdatedImpl?(levels) audioLevelsUpdatedImpl?(levels)
}, },
activityUpdated: { ssrcs in
activityUpdatedImpl?(ssrcs.map { $0.uint32Value })
},
inputDeviceId: inputDeviceId, inputDeviceId: inputDeviceId,
outputDeviceId: outputDeviceId, outputDeviceId: outputDeviceId,
videoCapturer: video?.impl, videoCapturer: video?.impl,
@ -692,7 +717,9 @@ public final class OngoingGroupCallContext {
disableAudioInput: disableAudioInput, disableAudioInput: disableAudioInput,
preferX264: preferX264, preferX264: preferX264,
logPath: logPath, logPath: logPath,
encryptionKey: encryptionKey statsLogPath: tempStatsLogPath,
encryptionKey: encryptionKey,
isConference: isConference
) )
#endif #endif
@ -732,6 +759,20 @@ public final class OngoingGroupCallContext {
} }
} }
let ssrcActivities = self.ssrcActivities
activityUpdatedImpl = { ssrcs in
queue.async {
ssrcActivities.putNext(ssrcs)
}
}
let signalBars = self.signalBars
self.context.signalBarsChanged = { value in
queue.async {
signalBars.set(value)
}
}
self.context.emitJoinPayload({ [weak self] payload, ssrc in self.context.emitJoinPayload({ [weak self] payload, ssrc in
queue.async { queue.async {
guard let strongSelf = self else { guard let strongSelf = self else {
@ -741,16 +782,18 @@ public final class OngoingGroupCallContext {
} }
}) })
self.audioSessionActiveDisposable.set((audioSessionActive if sharedAudioDevice == nil {
|> deliverOn(queue)).start(next: { [weak self] isActive in self.audioSessionActiveDisposable.set((audioSessionActive
guard let `self` = self else { |> deliverOn(queue)).start(next: { [weak self] isActive in
return guard let `self` = self else {
} return
// self.audioDevice?.setManualAudioSessionIsActive(isActive) }
#if os(iOS) // self.audioDevice?.setManualAudioSessionIsActive(isActive)
self.context.setManualAudioSessionIsActive(isActive) #if os(iOS)
#endif self.context.setManualAudioSessionIsActive(isActive)
})) #endif
}))
}
} }
deinit { deinit {
@ -826,8 +869,40 @@ public final class OngoingGroupCallContext {
} }
} }
func stop() { func stop(account: Account, reportCallId: CallId?) {
self.context.stop() self.context.stop()
let logPath = self.logPath
var statsLogPath = ""
if !logPath.isEmpty {
statsLogPath = logPath + ".json"
}
let tempStatsLogPath = self.tempStatsLogFile.path
let queue = self.queue
self.context.stop({
queue.async {
if !statsLogPath.isEmpty {
let logsPath = callLogsPath(account: account)
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
let _ = try? FileManager.default.moveItem(atPath: tempStatsLogPath, toPath: statsLogPath)
}
if let callId = reportCallId, !statsLogPath.isEmpty, let data = try? Data(contentsOf: URL(fileURLWithPath: statsLogPath)), let dataString = String(data: data, encoding: .utf8) {
let engine = TelegramEngine(account: account)
let _ = engine.calls.saveCallDebugLog(callId: callId, log: dataString).start(next: { result in
switch result {
case .sendFullLog:
if !logPath.isEmpty {
let _ = engine.calls.saveCompleteCallDebugLog(callId: callId, logPath: logPath).start()
}
case .done:
break
}
})
}
}
})
} }
func setConnectionMode(_ connectionMode: ConnectionMode, keepBroadcastConnectedIfWasEnabled: Bool, isUnifiedBroadcast: Bool) { func setConnectionMode(_ connectionMode: ConnectionMode, keepBroadcastConnectedIfWasEnabled: Bool, isUnifiedBroadcast: Bool) {
@ -988,6 +1063,30 @@ public final class OngoingGroupCallContext {
} }
} }
public var ssrcActivities: Signal<[UInt32], NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.impl.with { impl in
disposable.set(impl.ssrcActivities.signal().start(next: { value in
subscriber.putNext(value)
}))
}
return disposable
}
}
public var signalBars: Signal<Int32, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.impl.with { impl in
disposable.set(impl.signalBars.get().start(next: { value in
subscriber.putNext(value)
}))
}
return disposable
}
}
public var isMuted: Signal<Bool, NoError> { public var isMuted: Signal<Bool, NoError> {
return Signal { subscriber in return Signal { subscriber in
let disposable = MetaDisposable() let disposable = MetaDisposable()
@ -1012,10 +1111,10 @@ public final class OngoingGroupCallContext {
} }
} }
public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, enableSystemMute: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void, encryptionKey: Data?) { public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, enableSystemMute: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void, encryptionKey: Data?, isConference: Bool, sharedAudioDevice: OngoingCallContext.AudioDevice?) {
let queue = self.queue let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: { self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, enableSystemMute: enableSystemMute, preferX264: preferX264, logPath: logPath, onMutedSpeechActivityDetected: onMutedSpeechActivityDetected, encryptionKey: encryptionKey) return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, enableSystemMute: enableSystemMute, preferX264: preferX264, logPath: logPath, onMutedSpeechActivityDetected: onMutedSpeechActivityDetected, encryptionKey: encryptionKey, isConference: isConference, sharedAudioDevice: sharedAudioDevice)
}) })
} }
@ -1103,9 +1202,9 @@ public final class OngoingGroupCallContext {
} }
} }
public func stop() { public func stop(account: Account, reportCallId: CallId?) {
self.impl.with { impl in self.impl.with { impl in
impl.stop() impl.stop(account: account, reportCallId: reportCallId)
} }
} }

View File

@ -794,6 +794,11 @@ public final class OngoingCallContext {
return self.audioLevelPromise.get() return self.audioLevelPromise.get()
} }
private let signalingDataPipe = ValuePipe<[Data]>()
public var signalingData: Signal<[Data], NoError> {
return self.signalingDataPipe.signal()
}
private let audioSessionDisposable = MetaDisposable() private let audioSessionDisposable = MetaDisposable()
private let audioSessionActiveDisposable = MetaDisposable() private let audioSessionActiveDisposable = MetaDisposable()
private var networkTypeDisposable: Disposable? private var networkTypeDisposable: Disposable?
@ -1122,7 +1127,13 @@ public final class OngoingCallContext {
strongSelf.signalingDataDisposable = callSessionManager.beginReceivingCallSignalingData(internalId: internalId, { [weak self] dataList in strongSelf.signalingDataDisposable = callSessionManager.beginReceivingCallSignalingData(internalId: internalId, { [weak self] dataList in
queue.async { queue.async {
self?.withContext { context in guard let self else {
return
}
self.signalingDataPipe.putNext(dataList)
self.withContext { context in
if let context = context as? OngoingCallThreadLocalContextWebrtc { if let context = context as? OngoingCallThreadLocalContextWebrtc {
for data in dataList { for data in dataList {
context.addSignaling(data) context.addSignaling(data)
@ -1301,6 +1312,21 @@ public final class OngoingCallContext {
context.addExternalAudioData(data: data) context.addExternalAudioData(data: data)
} }
} }
public func sendSignalingData(data: Data) {
self.queue.async { [weak self] in
guard let strongSelf = self else {
return
}
if let signalingConnectionManager = strongSelf.signalingConnectionManager {
signalingConnectionManager.with { impl in
impl.send(payloadData: data)
}
}
strongSelf.callSessionManager.sendSignalingData(internalId: strongSelf.internalId, data: data)
}
}
} }
private protocol CallSignalingConnection: AnyObject { private protocol CallSignalingConnection: AnyObject {

View File

@ -398,9 +398,12 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
@interface GroupCallThreadLocalContext : NSObject @interface GroupCallThreadLocalContext : NSObject
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue - (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue
networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated
audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated
activityUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))activityUpdated
inputDeviceId:(NSString * _Nonnull)inputDeviceId inputDeviceId:(NSString * _Nonnull)inputDeviceId
outputDeviceId:(NSString * _Nonnull)outputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId
videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer
@ -415,11 +418,13 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
enableSystemMute:(bool)enableSystemMute enableSystemMute:(bool)enableSystemMute
preferX264:(bool)preferX264 preferX264:(bool)preferX264
logPath:(NSString * _Nonnull)logPath logPath:(NSString * _Nonnull)logPath
statsLogPath:(NSString * _Nonnull)statsLogPath
onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice
encryptionKey:(NSData * _Nullable)encryptionKey; encryptionKey:(NSData * _Nullable)encryptionKey
isConference:(bool)isConference;
- (void)stop; - (void)stop:(void (^ _Nullable)())completion;
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive; - (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;

View File

@ -1671,6 +1671,8 @@ private:
SharedCallAudioDevice * _audioDevice; SharedCallAudioDevice * _audioDevice;
void (^_onMutedSpeechActivityDetected)(bool); void (^_onMutedSpeechActivityDetected)(bool);
int32_t _signalBars;
} }
@end @end
@ -1680,6 +1682,7 @@ private:
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue - (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue
networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated
audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated
activityUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))activityUpdated
inputDeviceId:(NSString * _Nonnull)inputDeviceId inputDeviceId:(NSString * _Nonnull)inputDeviceId
outputDeviceId:(NSString * _Nonnull)outputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId
videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer
@ -1694,9 +1697,11 @@ private:
enableSystemMute:(bool)enableSystemMute enableSystemMute:(bool)enableSystemMute
preferX264:(bool)preferX264 preferX264:(bool)preferX264
logPath:(NSString * _Nonnull)logPath logPath:(NSString * _Nonnull)logPath
statsLogPath:(NSString * _Nonnull)statsLogPath
onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice
encryptionKey:(NSData * _Nullable)encryptionKey { encryptionKey:(NSData * _Nullable)encryptionKey
isConference:(bool)isConference {
self = [super init]; self = [super init];
if (self != nil) { if (self != nil) {
_queue = queue; _queue = queue;
@ -1762,6 +1767,8 @@ encryptionKey:(NSData * _Nullable)encryptionKey {
config.need_log = true; config.need_log = true;
config.logPath.data = std::string(logPath.length == 0 ? "" : logPath.UTF8String); config.logPath.data = std::string(logPath.length == 0 ? "" : logPath.UTF8String);
std::string statsLogPathValue(statsLogPath.length == 0 ? "" : statsLogPath.UTF8String);
std::optional<tgcalls::EncryptionKey> mappedEncryptionKey; std::optional<tgcalls::EncryptionKey> mappedEncryptionKey;
if (encryptionKey) { if (encryptionKey) {
auto encryptionKeyValue = std::make_shared<std::array<uint8_t, 256>>(); auto encryptionKeyValue = std::make_shared<std::array<uint8_t, 256>>();
@ -1774,6 +1781,7 @@ encryptionKey:(NSData * _Nullable)encryptionKey {
_instance.reset(new tgcalls::GroupInstanceCustomImpl((tgcalls::GroupInstanceDescriptor){ _instance.reset(new tgcalls::GroupInstanceCustomImpl((tgcalls::GroupInstanceDescriptor){
.threads = tgcalls::StaticThreads::getThreads(), .threads = tgcalls::StaticThreads::getThreads(),
.config = config, .config = config,
.statsLogPath = statsLogPathValue,
.networkStateUpdated = [weakSelf, queue, networkStateUpdated](tgcalls::GroupNetworkState networkState) { .networkStateUpdated = [weakSelf, queue, networkStateUpdated](tgcalls::GroupNetworkState networkState) {
[queue dispatch:^{ [queue dispatch:^{
__strong GroupCallThreadLocalContext *strongSelf = weakSelf; __strong GroupCallThreadLocalContext *strongSelf = weakSelf;
@ -1786,6 +1794,17 @@ encryptionKey:(NSData * _Nullable)encryptionKey {
networkStateUpdated(mappedState); networkStateUpdated(mappedState);
}]; }];
}, },
.signalBarsUpdated = [weakSelf, queue](int value) {
[queue dispatch:^{
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
if (strongSelf) {
strongSelf->_signalBars = value;
if (strongSelf->_signalBarsChanged) {
strongSelf->_signalBarsChanged(value);
}
}
}];
},
.audioLevelsUpdated = [audioLevelsUpdated](tgcalls::GroupLevelsUpdate const &levels) { .audioLevelsUpdated = [audioLevelsUpdated](tgcalls::GroupLevelsUpdate const &levels) {
NSMutableArray *result = [[NSMutableArray alloc] init]; NSMutableArray *result = [[NSMutableArray alloc] init];
for (auto &it : levels.updates) { for (auto &it : levels.updates) {
@ -1799,6 +1818,13 @@ encryptionKey:(NSData * _Nullable)encryptionKey {
} }
audioLevelsUpdated(result); audioLevelsUpdated(result);
}, },
.ssrcActivityUpdated = [activityUpdated](tgcalls::GroupActivitiesUpdate const &update) {
NSMutableArray *result = [[NSMutableArray alloc] init];
for (auto &it : update.updates) {
[result addObject:@(it.ssrc)];
}
activityUpdated(result);
},
.initialInputDeviceId = inputDeviceId.UTF8String, .initialInputDeviceId = inputDeviceId.UTF8String,
.initialOutputDeviceId = outputDeviceId.UTF8String, .initialOutputDeviceId = outputDeviceId.UTF8String,
.videoCapture = [_videoCapturer getInterface], .videoCapture = [_videoCapturer getInterface],
@ -1968,7 +1994,8 @@ encryptionKey:(NSData * _Nullable)encryptionKey {
} }
}]; }];
}, },
.encryptionKey = mappedEncryptionKey .encryptionKey = mappedEncryptionKey,
.isConference = isConference
})); }));
} }
return self; return self;
@ -1984,7 +2011,7 @@ encryptionKey:(NSData * _Nullable)encryptionKey {
} }
} }
- (void)stop { - (void)stop:(void (^ _Nullable)())completion {
if (_currentAudioDeviceModuleThread) { if (_currentAudioDeviceModuleThread) {
auto currentAudioDeviceModule = _currentAudioDeviceModule; auto currentAudioDeviceModule = _currentAudioDeviceModule;
_currentAudioDeviceModule = nullptr; _currentAudioDeviceModule = nullptr;
@ -1994,8 +2021,17 @@ encryptionKey:(NSData * _Nullable)encryptionKey {
} }
if (_instance) { if (_instance) {
_instance->stop(); void (^capturedCompletion)() = [completion copy];
_instance->stop([capturedCompletion] {
if (capturedCompletion) {
capturedCompletion();
}
});
_instance.reset(); _instance.reset();
} else {
if (completion) {
completion();
}
} }
} }

@ -1 +1 @@
Subproject commit 518e1ed9dff6b897fc3cd07394edc9e2987e0fdb Subproject commit 965c46f32425cb270e88ab0aab7c3593b5be574e

@ -1 +1 @@
Subproject commit cff7487b9c9a856678d645879d363e55812f3039 Subproject commit 77d3d1fe2ff2f364e8edee58179a7b7b95239b01

View File

@ -1,5 +1,5 @@
{ {
"app": "11.5.2", "app": "11.5.3",
"xcode": "16.0", "xcode": "16.0",
"bazel": "7.3.1:981f82a470bad1349322b6f51c9c6ffa0aa291dab1014fac411543c12e661dff", "bazel": "7.3.1:981f82a470bad1349322b6f51c9c6ffa0aa291dab1014fac411543c12e661dff",
"macos": "15.0" "macos": "15.0"