diff --git a/submodules/TelegramAudio/Sources/ManagedAudioSession.swift b/submodules/TelegramAudio/Sources/ManagedAudioSession.swift index 2b3a8c16a3..6269ce926a 100644 --- a/submodules/TelegramAudio/Sources/ManagedAudioSession.swift +++ b/submodules/TelegramAudio/Sources/ManagedAudioSession.swift @@ -179,6 +179,8 @@ public class ManagedAudioSessionControl { } public final class ManagedAudioSession { + public private(set) static var shared: ManagedAudioSession? + private var nextId: Int32 = 0 private let queue: Queue private let hasLoudspeaker: Bool @@ -256,6 +258,8 @@ public final class ManagedAudioSession { self.isHeadsetPluggedInValue = self.isHeadsetPluggedIn() self.updateCurrentAudioRouteInfo() } + + ManagedAudioSession.shared = self } deinit { @@ -784,6 +788,61 @@ public final class ManagedAudioSession { } } + public func applyVoiceChatOutputModeInCurrentAudioSession(outputMode: AudioSessionOutputMode) { + managedAudioSessionLog("applyVoiceChatOutputModeInCurrentAudioSession \(outputMode)") + + do { + var resetToBuiltin = false + switch outputMode { + case .system: + resetToBuiltin = true + case let .custom(output): + switch output { + case .builtin: + resetToBuiltin = true + case .speaker: + if let routes = AVAudioSession.sharedInstance().availableInputs { + for route in routes { + if route.portType == .builtInMic { + let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route) + break + } + } + } + try AVAudioSession.sharedInstance().overrideOutputAudioPort(.speaker) + case .headphones: + break + case let .port(port): + try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none) + if let routes = AVAudioSession.sharedInstance().availableInputs { + for route in routes { + if route.uid == port.uid { + let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route) + break + } + } + } + } + case .speakerIfNoHeadphones: + try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none) + } + + if resetToBuiltin { + try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none) + if let routes = AVAudioSession.sharedInstance().availableInputs { + for route in routes { + if route.portType == .builtInMic { + let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route) + break + } + } + } + } + } catch let e { + managedAudioSessionLog("applyVoiceChatOutputModeInCurrentAudioSession error: \(e)") + } + } + private func setupOutputMode(_ outputMode: AudioSessionOutputMode, type: ManagedAudioSessionType) throws { managedAudioSessionLog("ManagedAudioSession setup \(outputMode) for \(type)") var resetToBuiltin = false @@ -902,7 +961,7 @@ public final class ManagedAudioSession { public func callKitActivatedAudioSession() { self.queue.async { - managedAudioSessionLog("ManagedAudioSession callKitDeactivatedAudioSession") + managedAudioSessionLog("ManagedAudioSession callKitActivatedAudioSession") self.callKitAudioSessionIsActive = true self.updateHolders() } diff --git a/submodules/TelegramCallsUI/Sources/CallKitIntegration.swift b/submodules/TelegramCallsUI/Sources/CallKitIntegration.swift index 3629693c2b..dc78dd3ef6 100644 --- a/submodules/TelegramCallsUI/Sources/CallKitIntegration.swift +++ b/submodules/TelegramCallsUI/Sources/CallKitIntegration.swift @@ -8,6 +8,8 @@ import TelegramCore import SwiftSignalKit import AppBundle import AccountContext +import TelegramAudio +import TelegramVoip private let sharedProviderDelegate: AnyObject? = { if #available(iOSApplicationExtension 10.0, iOS 10.0, *) { @@ -107,6 +109,10 @@ public final class CallKitIntegration { } } } + + public func applyVoiceChatOutputMode(outputMode: AudioSessionOutputMode) { + (sharedProviderDelegate as? CallKitProviderDelegate)?.applyVoiceChatOutputMode(outputMode: outputMode) + } } @available(iOSApplicationExtension 10.0, iOS 10.0, *) @@ -125,6 +131,9 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate { private var setCallMuted: ((UUID, Bool) -> Void)? private var audioSessionActivationChanged: ((Bool) -> Void)? + private var isAudioSessionActive: Bool = false + private var pendingVoiceChatOutputMode: AudioSessionOutputMode? + private let disposableSet = DisposableSet() fileprivate var audioSessionActivePromise: ValuePromise? @@ -161,25 +170,32 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate { } private func requestTransaction(_ transaction: CXTransaction, completion: ((Bool) -> Void)? = nil) { + Logger.shared.log("CallKitIntegration", "requestTransaction \(transaction)") self.callController.request(transaction) { error in if let error = error { - print("Error requesting transaction: \(error)") + Logger.shared.log("CallKitIntegration", "error in requestTransaction \(transaction): \(error)") } completion?(error == nil) } } func endCall(uuid: UUID) { + Logger.shared.log("CallKitIntegration", "endCall \(uuid)") + let endCallAction = CXEndCallAction(call: uuid) let transaction = CXTransaction(action: endCallAction) self.requestTransaction(transaction) } func dropCall(uuid: UUID) { + Logger.shared.log("CallKitIntegration", "report call ended \(uuid)") + self.provider.reportCall(with: uuid, endedAt: nil, reason: CXCallEndedReason.remoteEnded) } func answerCall(uuid: UUID) { + Logger.shared.log("CallKitIntegration", "answer call \(uuid)") + let answerCallAction = CXAnswerCallAction(call: uuid) let transaction = CXTransaction(action: answerCallAction) self.requestTransaction(transaction) @@ -203,6 +219,8 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate { startCallAction.isVideo = isVideo let transaction = CXTransaction(action: startCallAction) + Logger.shared.log("CallKitIntegration", "initiate call \(uuid)") + self.requestTransaction(transaction, completion: { _ in let update = CXCallUpdate() update.remoteHandle = handle @@ -238,23 +256,40 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate { update.supportsDTMF = false update.hasVideo = isVideo + Logger.shared.log("CallKitIntegration", "report incoming call \(uuid)") + + OngoingCallContext.setupAudioSession() + + /*do { + try AVAudioSession.sharedInstance().setMode(.voiceChat) + } catch let e { + print("AVAudioSession.sharedInstance().setMode(.voiceChat) error \(e)") + }*/ + self.provider.reportNewIncomingCall(with: uuid, update: update, completion: { error in completion?(error as NSError?) }) } func reportOutgoingCallConnecting(uuid: UUID, at date: Date) { + Logger.shared.log("CallKitIntegration", "report outgoing call connecting \(uuid)") + self.provider.reportOutgoingCall(with: uuid, startedConnectingAt: date) } func reportOutgoingCallConnected(uuid: UUID, at date: Date) { + Logger.shared.log("CallKitIntegration", "report call connected \(uuid)") + self.provider.reportOutgoingCall(with: uuid, connectedAt: date) } func providerDidReset(_ provider: CXProvider) { + Logger.shared.log("CallKitIntegration", "providerDidReset") } func provider(_ provider: CXProvider, perform action: CXStartCallAction) { + Logger.shared.log("CallKitIntegration", "provider perform start call action \(action)") + guard let startCall = self.startCall, let (uuid, context) = self.currentStartCallAccount, uuid == action.callUUID else { action.fail() return @@ -281,6 +316,8 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate { } func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) { + Logger.shared.log("CallKitIntegration", "provider perform answer call action \(action)") + guard let answerCall = self.answerCall else { action.fail() return @@ -290,6 +327,8 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate { } func provider(_ provider: CXProvider, perform action: CXEndCallAction) { + Logger.shared.log("CallKitIntegration", "provider perform end call action \(action)") + guard let endCall = self.endCall else { action.fail() return @@ -312,6 +351,8 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate { } func provider(_ provider: CXProvider, perform action: CXSetMutedCallAction) { + Logger.shared.log("CallKitIntegration", "provider perform mute call action \(action)") + guard let setCallMuted = self.setCallMuted else { action.fail() return @@ -321,13 +362,29 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate { } func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) { + Logger.shared.log("CallKitIntegration", "provider didActivate audio session") + self.isAudioSessionActive = true self.audioSessionActivationChanged?(true) self.audioSessionActivePromise?.set(true) + + if let outputMode = self.pendingVoiceChatOutputMode { + self.pendingVoiceChatOutputMode = nil + ManagedAudioSession.shared?.applyVoiceChatOutputModeInCurrentAudioSession(outputMode: outputMode) + } } func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) { + Logger.shared.log("CallKitIntegration", "provider didDeactivate audio session") + self.isAudioSessionActive = false self.audioSessionActivationChanged?(false) self.audioSessionActivePromise?.set(false) } + + func applyVoiceChatOutputMode(outputMode: AudioSessionOutputMode) { + if self.isAudioSessionActive { + ManagedAudioSession.shared?.applyVoiceChatOutputModeInCurrentAudioSession(outputMode: outputMode) + } else { + self.pendingVoiceChatOutputMode = outputMode + } + } } - diff --git a/submodules/TelegramCallsUI/Sources/PresentationCall.swift b/submodules/TelegramCallsUI/Sources/PresentationCall.swift index e877e21ebb..e4084d3cf1 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationCall.swift @@ -413,6 +413,9 @@ public final class PresentationCallImpl: PresentationCall { return } strongSelf.audioOutputStateValue = (availableOutputs, currentOutput) + if let currentOutput = currentOutput { + strongSelf.currentAudioOutputValue = currentOutput + } var signal: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> = .single((availableOutputs, currentOutput)) if !didReceiveAudioOutputs { @@ -437,7 +440,7 @@ public final class PresentationCallImpl: PresentationCall { let audioSessionActive: Signal if let callKitIntegration = strongSelf.callKitIntegration { audioSessionActive = callKitIntegration.audioSessionActive - |> filter { $0 } + /*|> filter { $0 } |> timeout(2.0, queue: Queue.mainQueue(), alternate: Signal { subscriber in if let strongSelf = self, let _ = strongSelf.audioSessionControl { //audioSessionControl.activate({ _ in }) @@ -445,7 +448,7 @@ public final class PresentationCallImpl: PresentationCall { subscriber.putNext(true) subscriber.putCompletion() return EmptyDisposable - }) + })*/ } else { audioSessionControl.activate({ _ in }) audioSessionActive = .single(true) @@ -534,8 +537,12 @@ public final class PresentationCallImpl: PresentationCall { } if let audioSessionControl = audioSessionControl, previous == nil || previousControl == nil { - audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue)) - audioSessionControl.setup(synchronous: true) + if let callKitIntegration = self.callKitIntegration { + callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue)) + } else { + audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue)) + audioSessionControl.setup(synchronous: true) + } } let mappedVideoState: PresentationCallState.VideoState @@ -876,6 +883,10 @@ public final class PresentationCallImpl: PresentationCall { } public func answer() { + self.answer(fromCallKitAction: false) + } + + func answer(fromCallKitAction: Bool) { let (presentationData, present, openSettings) = self.getDeviceAccessData() DeviceAccess.authorizeAccess(to: .microphone(.voiceCall), presentationData: presentationData, present: { c, a in @@ -898,14 +909,18 @@ public final class PresentationCallImpl: PresentationCall { } if value { strongSelf.callSessionManager.accept(internalId: strongSelf.internalId) - strongSelf.callKitIntegration?.answerCall(uuid: strongSelf.internalId) + if !fromCallKitAction { + strongSelf.callKitIntegration?.answerCall(uuid: strongSelf.internalId) + } } else { let _ = strongSelf.hangUp().start() } }) } else { strongSelf.callSessionManager.accept(internalId: strongSelf.internalId) - strongSelf.callKitIntegration?.answerCall(uuid: strongSelf.internalId) + if !fromCallKitAction { + strongSelf.callKitIntegration?.answerCall(uuid: strongSelf.internalId) + } } } else { let _ = strongSelf.hangUp().start() @@ -1031,7 +1046,11 @@ public final class PresentationCallImpl: PresentationCall { )) if let audioSessionControl = self.audioSessionControl { - audioSessionControl.setOutputMode(.custom(output)) + if let callKitIntegration = self.callKitIntegration { + callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue)) + } else { + audioSessionControl.setOutputMode(.custom(output)) + } } } diff --git a/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift b/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift index 99f57fb5fe..0337e2688e 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift @@ -240,7 +240,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager { answerCallImpl = { [weak self] uuid in if let strongSelf = self { - strongSelf.currentCall?.answer() + strongSelf.currentCall?.answer(fromCallKitAction: true) } } diff --git a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift index 369a610dff..51ef7b5d83 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift @@ -1648,7 +1648,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { outgoingAudioBitrateKbit = Int32(value) } - genericCallContext = .call(OngoingGroupCallContext(video: self.videoCapturer, requestMediaChannelDescriptions: { [weak self] ssrcs, completion in + genericCallContext = .call(OngoingGroupCallContext(audioSessionActive: self.audioSessionActive.get(), video: self.videoCapturer, requestMediaChannelDescriptions: { [weak self] ssrcs, completion in let disposable = MetaDisposable() Queue.mainQueue().async { guard let strongSelf = self else { @@ -2966,7 +2966,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { self.hasScreencast = true - let screencastCallContext = OngoingGroupCallContext(video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, preferX264: false, logPath: "") + let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, preferX264: false, logPath: "") self.screencastCallContext = screencastCallContext self.screencastJoinDisposable.set((screencastCallContext.joinPayload diff --git a/submodules/TelegramVoip/Sources/GroupCallContext.swift b/submodules/TelegramVoip/Sources/GroupCallContext.swift index cc912d35a3..ddec28e0e7 100644 --- a/submodules/TelegramVoip/Sources/GroupCallContext.swift +++ b/submodules/TelegramVoip/Sources/GroupCallContext.swift @@ -416,7 +416,9 @@ public final class OngoingGroupCallContext { private let broadcastPartsSource = Atomic(value: nil) - init(queue: Queue, inputDeviceId: String, outputDeviceId: String, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) { + private let audioSessionActiveDisposable = MetaDisposable() + + init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) { self.queue = queue var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)? @@ -571,9 +573,20 @@ public final class OngoingGroupCallContext { strongSelf.joinPayload.set(.single((payload, ssrc))) } }) + + self.audioSessionActiveDisposable.set((audioSessionActive + |> deliverOn(queue)).start(next: { [weak self] isActive in + guard let `self` = self else { + return + } + #if os(iOS) + self.context.setManualAudioSessionIsActive(isActive) + #endif + })) } deinit { + self.audioSessionActiveDisposable.dispose() } func setJoinResponse(payload: String) { @@ -936,10 +949,10 @@ public final class OngoingGroupCallContext { } } - public init(inputDeviceId: String = "", outputDeviceId: String = "", video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) { + public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) { let queue = self.queue self.impl = QueueLocalObject(queue: queue, generate: { - return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, preferX264: preferX264, logPath: logPath) + return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, preferX264: preferX264, logPath: logPath) }) } diff --git a/submodules/TelegramVoip/Sources/OngoingCallContext.swift b/submodules/TelegramVoip/Sources/OngoingCallContext.swift index 4a92c3708f..a4bdf14d58 100644 --- a/submodules/TelegramVoip/Sources/OngoingCallContext.swift +++ b/submodules/TelegramVoip/Sources/OngoingCallContext.swift @@ -326,6 +326,7 @@ private protocol OngoingCallThreadLocalContextProtocol: AnyObject { func nativeVersion() -> String func nativeGetDerivedState() -> Data func addExternalAudioData(data: Data) + func nativeSetIsAudioSessionActive(isActive: Bool) } private final class OngoingCallThreadLocalContextHolder { @@ -381,6 +382,9 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol { func addExternalAudioData(data: Data) { } + + func nativeSetIsAudioSessionActive(isActive: Bool) { + } } public final class OngoingCallVideoCapturer { @@ -573,6 +577,12 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt func addExternalAudioData(data: Data) { self.addExternalAudioData(data) } + + func nativeSetIsAudioSessionActive(isActive: Bool) { + #if os(iOS) + self.setManualAudioSessionIsActive(isActive) + #endif + } } private extension OngoingCallContextState.State { @@ -696,6 +706,10 @@ public final class OngoingCallContext { } } + public static func setupAudioSession() { + OngoingCallThreadLocalContextWebrtc.setupAudioSession() + } + public let callId: CallId public let internalId: CallSessionInternalId @@ -726,13 +740,13 @@ public final class OngoingCallContext { } private let audioSessionDisposable = MetaDisposable() + private let audioSessionActiveDisposable = MetaDisposable() private var networkTypeDisposable: Disposable? public static var maxLayer: Int32 { return OngoingCallThreadLocalContext.maxLayer() } - private let tempLogFile: EngineTempBoxFile private let tempStatsLogFile: EngineTempBoxFile private var signalingConnectionManager: QueueLocalObject? @@ -765,8 +779,8 @@ public final class OngoingCallContext { self.callSessionManager = callSessionManager self.logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log" let logPath = self.logPath - self.tempLogFile = EngineTempBox.shared.tempFile(fileName: "CallLog.txt") - let tempLogPath = self.tempLogFile.path + + let _ = try? FileManager.default.createDirectory(atPath: callLogsPath(account: account), withIntermediateDirectories: true, attributes: nil) self.tempStatsLogFile = EngineTempBox.shared.tempFile(fileName: "CallStats.json") let tempStatsLogPath = self.tempStatsLogFile.path @@ -871,7 +885,7 @@ public final class OngoingCallContext { } } - let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: Data(), key: key, isOutgoing: isOutgoing, connections: filteredConnections, maxLayer: maxLayer, allowP2P: allowP2P, allowTCP: enableTCP, enableStunMarking: enableStunMarking, logPath: tempLogPath, statsLogPath: tempStatsLogPath, sendSignalingData: { [weak callSessionManager] data in + let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: Data(), key: key, isOutgoing: isOutgoing, connections: filteredConnections, maxLayer: maxLayer, allowP2P: allowP2P, allowTCP: enableTCP, enableStunMarking: enableStunMarking, logPath: logPath, statsLogPath: tempStatsLogPath, sendSignalingData: { [weak callSessionManager] data in queue.async { guard let strongSelf = self else { return @@ -886,7 +900,7 @@ public final class OngoingCallContext { callSessionManager.sendSignalingData(internalId: internalId, data: data) } } - }, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "") + }, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "", useManualAudioSessionControl: true) strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context)) context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in @@ -950,6 +964,16 @@ public final class OngoingCallContext { self?.audioLevelPromise.set(.single(level)) } + strongSelf.audioSessionActiveDisposable.set((audioSessionActive + |> deliverOn(queue)).start(next: { isActive in + guard let strongSelf = self else { + return + } + strongSelf.withContext { context in + context.nativeSetIsAudioSessionActive(isActive: isActive) + } + })) + strongSelf.networkTypeDisposable = (updatedNetworkType |> deliverOn(queue)).start(next: { networkType in self?.withContext { context in @@ -1010,6 +1034,7 @@ public final class OngoingCallContext { } self.audioSessionDisposable.dispose() + self.audioSessionActiveDisposable.dispose() self.networkTypeDisposable?.dispose() } @@ -1048,7 +1073,6 @@ public final class OngoingCallContext { if !logPath.isEmpty { statsLogPath = logPath + ".json" } - let tempLogPath = self.tempLogFile.path let tempStatsLogPath = self.tempStatsLogFile.path self.withContextThenDeallocate { context in @@ -1062,12 +1086,6 @@ public final class OngoingCallContext { outgoing: bytesSentWifi)) updateAccountNetworkUsageStats(account: self.account, category: .call, delta: delta) - if !logPath.isEmpty { - let logsPath = callLogsPath(account: account) - let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil) - let _ = try? FileManager.default.moveItem(atPath: tempLogPath, toPath: logPath) - } - if !statsLogPath.isEmpty { let logsPath = callLogsPath(account: account) let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil) @@ -1256,6 +1274,8 @@ private final class CallSignalingConnectionImpl: CallSignalingConnection { } func start() { + OngoingCallThreadLocalContextWebrtc.logMessage("CallSignaling: Connecting...") + self.connection.start(queue: self.queue.queue) self.receivePacketHeader() } @@ -1487,4 +1507,3 @@ private final class CallSignalingConnectionManager { } } } - diff --git a/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h b/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h index d75206844c..6325fd73a7 100644 --- a/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h +++ b/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h @@ -202,11 +202,30 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) { + (int32_t)maxLayer; + (NSArray * _Nonnull)versionsWithIncludeReference:(bool)includeReference; ++ (void)setupAudioSession; + @property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc, OngoingCallRemoteAudioStateWebrtc, OngoingCallRemoteBatteryLevelWebrtc, float); @property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t); @property (nonatomic, copy) void (^ _Nullable audioLevelUpdated)(float); -- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P allowTCP:(BOOL)allowTCP enableStunMarking:(BOOL)enableStunMarking logPath:(NSString * _Nonnull)logPath statsLogPath:(NSString * _Nonnull)statsLogPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec audioInputDeviceId: (NSString * _Nonnull)audioInputDeviceId; +- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id _Nonnull)queue + proxy:(VoipProxyServerWebrtc * _Nullable)proxy + networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving + derivedState:(NSData * _Nonnull)derivedState + key:(NSData * _Nonnull)key + isOutgoing:(bool)isOutgoing + connections:(NSArray * _Nonnull)connections maxLayer:(int32_t)maxLayer + allowP2P:(BOOL)allowP2P + allowTCP:(BOOL)allowTCP + enableStunMarking:(BOOL)enableStunMarking + logPath:(NSString * _Nonnull)logPath + statsLogPath:(NSString * _Nonnull)statsLogPath + sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer + preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec + audioInputDeviceId:(NSString * _Nonnull)audioInputDeviceId + useManualAudioSessionControl:(bool)useManualAudioSessionControl; + +- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive; - (void)beginTermination; - (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion; @@ -360,6 +379,8 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) { - (void)stop; +- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive; + - (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast; - (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion; diff --git a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm index 1c0cbe2633..0ca3837f05 100644 --- a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm +++ b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm @@ -28,6 +28,9 @@ #include "platform/darwin/iOS/tgcalls_audio_device_module_ios.h" +#include "platform/darwin/iOS/RTCAudioSession.h" +#include "platform/darwin/iOS/RTCAudioSessionConfiguration.h" + #endif #import "group/GroupInstanceImpl.h" @@ -626,7 +629,7 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls: } std::shared_ptr interface = strongSelf->_interface; - if (false && requestClone) { + /*if (false && requestClone) { VideoSampleBufferView *remoteRenderer = [[VideoSampleBufferView alloc] initWithFrame:CGRectZero]; remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill; @@ -643,7 +646,7 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls: } completion(remoteRenderer, cloneRenderer); - } else if ([VideoMetalView isSupported]) { + } else */if ([VideoMetalView isSupported]) { VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero]; remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill; @@ -705,6 +708,8 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls: id _queue; int32_t _contextId; + bool _useManualAudioSessionControl; + OngoingCallNetworkTypeWebrtc _networkType; NSTimeInterval _callReceiveTimeout; NSTimeInterval _callRingTimeout; @@ -803,6 +808,18 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; } } ++ (void)setupAudioSession { + RTCAudioSessionConfiguration *sharedConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration]; + sharedConfiguration.mode = AVAudioSessionModeVoiceChat; + sharedConfiguration.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers; + sharedConfiguration.outputNumberOfChannels = 1; + [RTCAudioSessionConfiguration setWebRTCConfiguration:sharedConfiguration]; + + [[RTCAudioSession sharedInstance] lockForConfiguration]; + [[RTCAudioSession sharedInstance] setConfiguration:sharedConfiguration active:false error:nil disableRecording:false]; + [[RTCAudioSession sharedInstance] unlockForConfiguration]; +} + + (int32_t)maxLayer { return 92; } @@ -843,7 +860,22 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; } } -- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P allowTCP:(BOOL)allowTCP enableStunMarking:(BOOL)enableStunMarking logPath:(NSString * _Nonnull)logPath statsLogPath:(NSString * _Nonnull)statsLogPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec audioInputDeviceId: (NSString * _Nonnull)audioInputDeviceId { +- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id _Nonnull)queue + proxy:(VoipProxyServerWebrtc * _Nullable)proxy + networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving + derivedState:(NSData * _Nonnull)derivedState + key:(NSData * _Nonnull)key + isOutgoing:(bool)isOutgoing + connections:(NSArray * _Nonnull)connections maxLayer:(int32_t)maxLayer + allowP2P:(BOOL)allowP2P + allowTCP:(BOOL)allowTCP + enableStunMarking:(BOOL)enableStunMarking + logPath:(NSString * _Nonnull)logPath + statsLogPath:(NSString * _Nonnull)statsLogPath + sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer + preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec + audioInputDeviceId:(NSString * _Nonnull)audioInputDeviceId + useManualAudioSessionControl:(bool)useManualAudioSessionControl { self = [super init]; if (self != nil) { _version = version; @@ -852,6 +884,25 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; assert([[OngoingCallThreadLocalContextWebrtc versionsWithIncludeReference:true] containsObject:version]); + _useManualAudioSessionControl = useManualAudioSessionControl; + [RTCAudioSession sharedInstance].useManualAudio = true; + +#ifdef WEBRTC_IOS + RTCAudioSessionConfiguration *sharedConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration]; + if (useManualAudioSessionControl) { + sharedConfiguration.mode = AVAudioSessionModeVoiceChat; + } else { + sharedConfiguration.mode = AVAudioSessionModeVoiceChat; + } + sharedConfiguration.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers; + sharedConfiguration.outputNumberOfChannels = 1; + [RTCAudioSessionConfiguration setWebRTCConfiguration:sharedConfiguration]; + + /*[RTCAudioSession sharedInstance].useManualAudio = true; + [[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]]; + [RTCAudioSession sharedInstance].isAudioEnabled = true;*/ +#endif + _callReceiveTimeout = 20.0; _callRingTimeout = 90.0; _callConnectTimeout = 30.0; @@ -1094,6 +1145,17 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; - (void)beginTermination { } +- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive { + if (_useManualAudioSessionControl) { + if (isAudioSessionActive) { + [[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]]; + } else { + [[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]]; + } + [RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive; + } +} + + (void)stopWithTerminationResult:(OngoingCallThreadLocalContextWebrtcTerminationResult *)terminationResult completion:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion { if (completion) { if (terminationResult) { @@ -1429,6 +1491,22 @@ private: } } +#ifdef WEBRTC_IOS + RTCAudioSessionConfiguration *sharedConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration]; + sharedConfiguration.mode = AVAudioSessionModeVoiceChat; + sharedConfiguration.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers; + if (disableAudioInput) { + sharedConfiguration.outputNumberOfChannels = 2; + } else { + sharedConfiguration.outputNumberOfChannels = 1; + } + [RTCAudioSessionConfiguration setWebRTCConfiguration:sharedConfiguration]; + + /*[RTCAudioSession sharedInstance].useManualAudio = true; + [[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]]; + [RTCAudioSession sharedInstance].isAudioEnabled = true;*/ +#endif + std::vector videoCodecPreferences; int minOutgoingVideoBitrateKbit = 500; @@ -1612,6 +1690,15 @@ private: } } +- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive { + if (isAudioSessionActive) { + [[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]]; + } else { + [[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]]; + } + [RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive; +} + - (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast { if (_instance) { tgcalls::GroupConnectionMode mappedConnectionMode; diff --git a/submodules/TgVoipWebrtc/tgcalls b/submodules/TgVoipWebrtc/tgcalls index 53bb1711ae..6cb21fc91b 160000 --- a/submodules/TgVoipWebrtc/tgcalls +++ b/submodules/TgVoipWebrtc/tgcalls @@ -1 +1 @@ -Subproject commit 53bb1711ae0b3810d34edb1c81982b18d70c5506 +Subproject commit 6cb21fc91be59356d02059e639df87a3b544bfb3