mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-12-19 04:39:36 +00:00
Revert "Roll back tgcalls"
This reverts commit f93b032a6a699c57212bbbe94cead2e09a037287.
This commit is contained in:
parent
792baf4071
commit
d391651586
@ -179,6 +179,8 @@ public class ManagedAudioSessionControl {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public final class ManagedAudioSession {
|
public final class ManagedAudioSession {
|
||||||
|
public private(set) static var shared: ManagedAudioSession?
|
||||||
|
|
||||||
private var nextId: Int32 = 0
|
private var nextId: Int32 = 0
|
||||||
private let queue: Queue
|
private let queue: Queue
|
||||||
private let hasLoudspeaker: Bool
|
private let hasLoudspeaker: Bool
|
||||||
@ -256,6 +258,8 @@ public final class ManagedAudioSession {
|
|||||||
self.isHeadsetPluggedInValue = self.isHeadsetPluggedIn()
|
self.isHeadsetPluggedInValue = self.isHeadsetPluggedIn()
|
||||||
self.updateCurrentAudioRouteInfo()
|
self.updateCurrentAudioRouteInfo()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ManagedAudioSession.shared = self
|
||||||
}
|
}
|
||||||
|
|
||||||
deinit {
|
deinit {
|
||||||
@ -784,6 +788,61 @@ public final class ManagedAudioSession {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func applyVoiceChatOutputModeInCurrentAudioSession(outputMode: AudioSessionOutputMode) {
|
||||||
|
managedAudioSessionLog("applyVoiceChatOutputModeInCurrentAudioSession \(outputMode)")
|
||||||
|
|
||||||
|
do {
|
||||||
|
var resetToBuiltin = false
|
||||||
|
switch outputMode {
|
||||||
|
case .system:
|
||||||
|
resetToBuiltin = true
|
||||||
|
case let .custom(output):
|
||||||
|
switch output {
|
||||||
|
case .builtin:
|
||||||
|
resetToBuiltin = true
|
||||||
|
case .speaker:
|
||||||
|
if let routes = AVAudioSession.sharedInstance().availableInputs {
|
||||||
|
for route in routes {
|
||||||
|
if route.portType == .builtInMic {
|
||||||
|
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.speaker)
|
||||||
|
case .headphones:
|
||||||
|
break
|
||||||
|
case let .port(port):
|
||||||
|
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
|
||||||
|
if let routes = AVAudioSession.sharedInstance().availableInputs {
|
||||||
|
for route in routes {
|
||||||
|
if route.uid == port.uid {
|
||||||
|
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case .speakerIfNoHeadphones:
|
||||||
|
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resetToBuiltin {
|
||||||
|
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
|
||||||
|
if let routes = AVAudioSession.sharedInstance().availableInputs {
|
||||||
|
for route in routes {
|
||||||
|
if route.portType == .builtInMic {
|
||||||
|
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch let e {
|
||||||
|
managedAudioSessionLog("applyVoiceChatOutputModeInCurrentAudioSession error: \(e)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private func setupOutputMode(_ outputMode: AudioSessionOutputMode, type: ManagedAudioSessionType) throws {
|
private func setupOutputMode(_ outputMode: AudioSessionOutputMode, type: ManagedAudioSessionType) throws {
|
||||||
managedAudioSessionLog("ManagedAudioSession setup \(outputMode) for \(type)")
|
managedAudioSessionLog("ManagedAudioSession setup \(outputMode) for \(type)")
|
||||||
var resetToBuiltin = false
|
var resetToBuiltin = false
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import TelegramCore
|
|||||||
import SwiftSignalKit
|
import SwiftSignalKit
|
||||||
import AppBundle
|
import AppBundle
|
||||||
import AccountContext
|
import AccountContext
|
||||||
|
import TelegramAudio
|
||||||
|
|
||||||
private let sharedProviderDelegate: AnyObject? = {
|
private let sharedProviderDelegate: AnyObject? = {
|
||||||
if #available(iOSApplicationExtension 10.0, iOS 10.0, *) {
|
if #available(iOSApplicationExtension 10.0, iOS 10.0, *) {
|
||||||
@ -107,6 +108,10 @@ public final class CallKitIntegration {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func applyVoiceChatOutputMode(outputMode: AudioSessionOutputMode) {
|
||||||
|
(sharedProviderDelegate as? CallKitProviderDelegate)?.applyVoiceChatOutputMode(outputMode: outputMode)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@available(iOSApplicationExtension 10.0, iOS 10.0, *)
|
@available(iOSApplicationExtension 10.0, iOS 10.0, *)
|
||||||
@ -125,6 +130,9 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate {
|
|||||||
private var setCallMuted: ((UUID, Bool) -> Void)?
|
private var setCallMuted: ((UUID, Bool) -> Void)?
|
||||||
private var audioSessionActivationChanged: ((Bool) -> Void)?
|
private var audioSessionActivationChanged: ((Bool) -> Void)?
|
||||||
|
|
||||||
|
private var isAudioSessionActive: Bool = false
|
||||||
|
private var pendingVoiceChatOutputMode: AudioSessionOutputMode?
|
||||||
|
|
||||||
private let disposableSet = DisposableSet()
|
private let disposableSet = DisposableSet()
|
||||||
|
|
||||||
fileprivate var audioSessionActivePromise: ValuePromise<Bool>?
|
fileprivate var audioSessionActivePromise: ValuePromise<Bool>?
|
||||||
@ -163,7 +171,7 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate {
|
|||||||
private func requestTransaction(_ transaction: CXTransaction, completion: ((Bool) -> Void)? = nil) {
|
private func requestTransaction(_ transaction: CXTransaction, completion: ((Bool) -> Void)? = nil) {
|
||||||
self.callController.request(transaction) { error in
|
self.callController.request(transaction) { error in
|
||||||
if let error = error {
|
if let error = error {
|
||||||
print("Error requesting transaction: \(error)")
|
print("Error requesting transaction \(transaction): \(error)")
|
||||||
}
|
}
|
||||||
completion?(error == nil)
|
completion?(error == nil)
|
||||||
}
|
}
|
||||||
@ -238,6 +246,12 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate {
|
|||||||
update.supportsDTMF = false
|
update.supportsDTMF = false
|
||||||
update.hasVideo = isVideo
|
update.hasVideo = isVideo
|
||||||
|
|
||||||
|
do {
|
||||||
|
try AVAudioSession.sharedInstance().setMode(.voiceChat)
|
||||||
|
} catch let e {
|
||||||
|
print("AVAudioSession.sharedInstance().setMode(.voiceChat) error \(e)")
|
||||||
|
}
|
||||||
|
|
||||||
self.provider.reportNewIncomingCall(with: uuid, update: update, completion: { error in
|
self.provider.reportNewIncomingCall(with: uuid, update: update, completion: { error in
|
||||||
completion?(error as NSError?)
|
completion?(error as NSError?)
|
||||||
})
|
})
|
||||||
@ -321,13 +335,28 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
|
func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
|
||||||
|
print("provider didActivate default? \(audioSession === AVAudioSession.sharedInstance())")
|
||||||
|
self.isAudioSessionActive = true
|
||||||
self.audioSessionActivationChanged?(true)
|
self.audioSessionActivationChanged?(true)
|
||||||
self.audioSessionActivePromise?.set(true)
|
self.audioSessionActivePromise?.set(true)
|
||||||
|
|
||||||
|
if let outputMode = self.pendingVoiceChatOutputMode {
|
||||||
|
self.pendingVoiceChatOutputMode = nil
|
||||||
|
ManagedAudioSession.shared?.applyVoiceChatOutputModeInCurrentAudioSession(outputMode: outputMode)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
|
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
|
||||||
|
self.isAudioSessionActive = false
|
||||||
self.audioSessionActivationChanged?(false)
|
self.audioSessionActivationChanged?(false)
|
||||||
self.audioSessionActivePromise?.set(false)
|
self.audioSessionActivePromise?.set(false)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
func applyVoiceChatOutputMode(outputMode: AudioSessionOutputMode) {
|
||||||
|
if self.isAudioSessionActive {
|
||||||
|
ManagedAudioSession.shared?.applyVoiceChatOutputModeInCurrentAudioSession(outputMode: outputMode)
|
||||||
|
} else {
|
||||||
|
self.pendingVoiceChatOutputMode = outputMode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -413,6 +413,9 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
strongSelf.audioOutputStateValue = (availableOutputs, currentOutput)
|
strongSelf.audioOutputStateValue = (availableOutputs, currentOutput)
|
||||||
|
if let currentOutput = currentOutput {
|
||||||
|
strongSelf.currentAudioOutputValue = currentOutput
|
||||||
|
}
|
||||||
|
|
||||||
var signal: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> = .single((availableOutputs, currentOutput))
|
var signal: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> = .single((availableOutputs, currentOutput))
|
||||||
if !didReceiveAudioOutputs {
|
if !didReceiveAudioOutputs {
|
||||||
@ -437,7 +440,7 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
let audioSessionActive: Signal<Bool, NoError>
|
let audioSessionActive: Signal<Bool, NoError>
|
||||||
if let callKitIntegration = strongSelf.callKitIntegration {
|
if let callKitIntegration = strongSelf.callKitIntegration {
|
||||||
audioSessionActive = callKitIntegration.audioSessionActive
|
audioSessionActive = callKitIntegration.audioSessionActive
|
||||||
|> filter { $0 }
|
/*|> filter { $0 }
|
||||||
|> timeout(2.0, queue: Queue.mainQueue(), alternate: Signal { subscriber in
|
|> timeout(2.0, queue: Queue.mainQueue(), alternate: Signal { subscriber in
|
||||||
if let strongSelf = self, let _ = strongSelf.audioSessionControl {
|
if let strongSelf = self, let _ = strongSelf.audioSessionControl {
|
||||||
//audioSessionControl.activate({ _ in })
|
//audioSessionControl.activate({ _ in })
|
||||||
@ -445,7 +448,7 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
subscriber.putNext(true)
|
subscriber.putNext(true)
|
||||||
subscriber.putCompletion()
|
subscriber.putCompletion()
|
||||||
return EmptyDisposable
|
return EmptyDisposable
|
||||||
})
|
})*/
|
||||||
} else {
|
} else {
|
||||||
audioSessionControl.activate({ _ in })
|
audioSessionControl.activate({ _ in })
|
||||||
audioSessionActive = .single(true)
|
audioSessionActive = .single(true)
|
||||||
@ -534,8 +537,12 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let audioSessionControl = audioSessionControl, previous == nil || previousControl == nil {
|
if let audioSessionControl = audioSessionControl, previous == nil || previousControl == nil {
|
||||||
audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue))
|
if let callKitIntegration = self.callKitIntegration {
|
||||||
audioSessionControl.setup(synchronous: true)
|
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
|
||||||
|
} else {
|
||||||
|
audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue))
|
||||||
|
audioSessionControl.setup(synchronous: true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mappedVideoState: PresentationCallState.VideoState
|
let mappedVideoState: PresentationCallState.VideoState
|
||||||
@ -1031,7 +1038,11 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
))
|
))
|
||||||
|
|
||||||
if let audioSessionControl = self.audioSessionControl {
|
if let audioSessionControl = self.audioSessionControl {
|
||||||
audioSessionControl.setOutputMode(.custom(output))
|
if let callKitIntegration = self.callKitIntegration {
|
||||||
|
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
|
||||||
|
} else {
|
||||||
|
audioSessionControl.setOutputMode(.custom(output))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1648,7 +1648,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
outgoingAudioBitrateKbit = Int32(value)
|
outgoingAudioBitrateKbit = Int32(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
genericCallContext = .call(OngoingGroupCallContext(video: self.videoCapturer, requestMediaChannelDescriptions: { [weak self] ssrcs, completion in
|
genericCallContext = .call(OngoingGroupCallContext(audioSessionActive: self.audioSessionActive.get(), video: self.videoCapturer, requestMediaChannelDescriptions: { [weak self] ssrcs, completion in
|
||||||
let disposable = MetaDisposable()
|
let disposable = MetaDisposable()
|
||||||
Queue.mainQueue().async {
|
Queue.mainQueue().async {
|
||||||
guard let strongSelf = self else {
|
guard let strongSelf = self else {
|
||||||
@ -2966,7 +2966,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
|
|
||||||
self.hasScreencast = true
|
self.hasScreencast = true
|
||||||
|
|
||||||
let screencastCallContext = OngoingGroupCallContext(video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, preferX264: false, logPath: "")
|
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, preferX264: false, logPath: "")
|
||||||
self.screencastCallContext = screencastCallContext
|
self.screencastCallContext = screencastCallContext
|
||||||
|
|
||||||
self.screencastJoinDisposable.set((screencastCallContext.joinPayload
|
self.screencastJoinDisposable.set((screencastCallContext.joinPayload
|
||||||
|
|||||||
@ -416,7 +416,9 @@ public final class OngoingGroupCallContext {
|
|||||||
|
|
||||||
private let broadcastPartsSource = Atomic<BroadcastPartSource?>(value: nil)
|
private let broadcastPartsSource = Atomic<BroadcastPartSource?>(value: nil)
|
||||||
|
|
||||||
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
|
private let audioSessionActiveDisposable = MetaDisposable()
|
||||||
|
|
||||||
|
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
|
|
||||||
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
|
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
|
||||||
@ -571,9 +573,20 @@ public final class OngoingGroupCallContext {
|
|||||||
strongSelf.joinPayload.set(.single((payload, ssrc)))
|
strongSelf.joinPayload.set(.single((payload, ssrc)))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
self.audioSessionActiveDisposable.set((audioSessionActive
|
||||||
|
|> deliverOn(queue)).start(next: { [weak self] isActive in
|
||||||
|
guard let `self` = self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
#if os(iOS)
|
||||||
|
self.context.setManualAudioSessionIsActive(isActive)
|
||||||
|
#endif
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
deinit {
|
deinit {
|
||||||
|
self.audioSessionActiveDisposable.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
func setJoinResponse(payload: String) {
|
func setJoinResponse(payload: String) {
|
||||||
@ -936,10 +949,10 @@ public final class OngoingGroupCallContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public init(inputDeviceId: String = "", outputDeviceId: String = "", video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
|
public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
|
||||||
let queue = self.queue
|
let queue = self.queue
|
||||||
self.impl = QueueLocalObject(queue: queue, generate: {
|
self.impl = QueueLocalObject(queue: queue, generate: {
|
||||||
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, preferX264: preferX264, logPath: logPath)
|
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, preferX264: preferX264, logPath: logPath)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -326,6 +326,7 @@ private protocol OngoingCallThreadLocalContextProtocol: AnyObject {
|
|||||||
func nativeVersion() -> String
|
func nativeVersion() -> String
|
||||||
func nativeGetDerivedState() -> Data
|
func nativeGetDerivedState() -> Data
|
||||||
func addExternalAudioData(data: Data)
|
func addExternalAudioData(data: Data)
|
||||||
|
func nativeSetIsAudioSessionActive(isActive: Bool)
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class OngoingCallThreadLocalContextHolder {
|
private final class OngoingCallThreadLocalContextHolder {
|
||||||
@ -381,6 +382,9 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol {
|
|||||||
|
|
||||||
func addExternalAudioData(data: Data) {
|
func addExternalAudioData(data: Data) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func nativeSetIsAudioSessionActive(isActive: Bool) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class OngoingCallVideoCapturer {
|
public final class OngoingCallVideoCapturer {
|
||||||
@ -573,6 +577,12 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
|
|||||||
func addExternalAudioData(data: Data) {
|
func addExternalAudioData(data: Data) {
|
||||||
self.addExternalAudioData(data)
|
self.addExternalAudioData(data)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func nativeSetIsAudioSessionActive(isActive: Bool) {
|
||||||
|
#if os(iOS)
|
||||||
|
self.setManualAudioSessionIsActive(isActive)
|
||||||
|
#endif
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private extension OngoingCallContextState.State {
|
private extension OngoingCallContextState.State {
|
||||||
@ -726,13 +736,13 @@ public final class OngoingCallContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private let audioSessionDisposable = MetaDisposable()
|
private let audioSessionDisposable = MetaDisposable()
|
||||||
|
private let audioSessionActiveDisposable = MetaDisposable()
|
||||||
private var networkTypeDisposable: Disposable?
|
private var networkTypeDisposable: Disposable?
|
||||||
|
|
||||||
public static var maxLayer: Int32 {
|
public static var maxLayer: Int32 {
|
||||||
return OngoingCallThreadLocalContext.maxLayer()
|
return OngoingCallThreadLocalContext.maxLayer()
|
||||||
}
|
}
|
||||||
|
|
||||||
private let tempLogFile: EngineTempBoxFile
|
|
||||||
private let tempStatsLogFile: EngineTempBoxFile
|
private let tempStatsLogFile: EngineTempBoxFile
|
||||||
|
|
||||||
private var signalingConnectionManager: QueueLocalObject<CallSignalingConnectionManager>?
|
private var signalingConnectionManager: QueueLocalObject<CallSignalingConnectionManager>?
|
||||||
@ -765,8 +775,8 @@ public final class OngoingCallContext {
|
|||||||
self.callSessionManager = callSessionManager
|
self.callSessionManager = callSessionManager
|
||||||
self.logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
|
self.logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
|
||||||
let logPath = self.logPath
|
let logPath = self.logPath
|
||||||
self.tempLogFile = EngineTempBox.shared.tempFile(fileName: "CallLog.txt")
|
|
||||||
let tempLogPath = self.tempLogFile.path
|
let _ = try? FileManager.default.createDirectory(atPath: callLogsPath(account: account), withIntermediateDirectories: true, attributes: nil)
|
||||||
|
|
||||||
self.tempStatsLogFile = EngineTempBox.shared.tempFile(fileName: "CallStats.json")
|
self.tempStatsLogFile = EngineTempBox.shared.tempFile(fileName: "CallStats.json")
|
||||||
let tempStatsLogPath = self.tempStatsLogFile.path
|
let tempStatsLogPath = self.tempStatsLogFile.path
|
||||||
@ -871,7 +881,7 @@ public final class OngoingCallContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: Data(), key: key, isOutgoing: isOutgoing, connections: filteredConnections, maxLayer: maxLayer, allowP2P: allowP2P, allowTCP: enableTCP, enableStunMarking: enableStunMarking, logPath: tempLogPath, statsLogPath: tempStatsLogPath, sendSignalingData: { [weak callSessionManager] data in
|
let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: Data(), key: key, isOutgoing: isOutgoing, connections: filteredConnections, maxLayer: maxLayer, allowP2P: allowP2P, allowTCP: enableTCP, enableStunMarking: enableStunMarking, logPath: logPath, statsLogPath: tempStatsLogPath, sendSignalingData: { [weak callSessionManager] data in
|
||||||
queue.async {
|
queue.async {
|
||||||
guard let strongSelf = self else {
|
guard let strongSelf = self else {
|
||||||
return
|
return
|
||||||
@ -886,7 +896,7 @@ public final class OngoingCallContext {
|
|||||||
callSessionManager.sendSignalingData(internalId: internalId, data: data)
|
callSessionManager.sendSignalingData(internalId: internalId, data: data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "")
|
}, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "", useManualAudioSessionControl: true)
|
||||||
|
|
||||||
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
|
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
|
||||||
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in
|
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in
|
||||||
@ -950,6 +960,16 @@ public final class OngoingCallContext {
|
|||||||
self?.audioLevelPromise.set(.single(level))
|
self?.audioLevelPromise.set(.single(level))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
strongSelf.audioSessionActiveDisposable.set((audioSessionActive
|
||||||
|
|> deliverOn(queue)).start(next: { isActive in
|
||||||
|
guard let strongSelf = self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
strongSelf.withContext { context in
|
||||||
|
context.nativeSetIsAudioSessionActive(isActive: isActive)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
strongSelf.networkTypeDisposable = (updatedNetworkType
|
strongSelf.networkTypeDisposable = (updatedNetworkType
|
||||||
|> deliverOn(queue)).start(next: { networkType in
|
|> deliverOn(queue)).start(next: { networkType in
|
||||||
self?.withContext { context in
|
self?.withContext { context in
|
||||||
@ -1010,6 +1030,7 @@ public final class OngoingCallContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.audioSessionDisposable.dispose()
|
self.audioSessionDisposable.dispose()
|
||||||
|
self.audioSessionActiveDisposable.dispose()
|
||||||
self.networkTypeDisposable?.dispose()
|
self.networkTypeDisposable?.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1048,7 +1069,6 @@ public final class OngoingCallContext {
|
|||||||
if !logPath.isEmpty {
|
if !logPath.isEmpty {
|
||||||
statsLogPath = logPath + ".json"
|
statsLogPath = logPath + ".json"
|
||||||
}
|
}
|
||||||
let tempLogPath = self.tempLogFile.path
|
|
||||||
let tempStatsLogPath = self.tempStatsLogFile.path
|
let tempStatsLogPath = self.tempStatsLogFile.path
|
||||||
|
|
||||||
self.withContextThenDeallocate { context in
|
self.withContextThenDeallocate { context in
|
||||||
@ -1062,12 +1082,6 @@ public final class OngoingCallContext {
|
|||||||
outgoing: bytesSentWifi))
|
outgoing: bytesSentWifi))
|
||||||
updateAccountNetworkUsageStats(account: self.account, category: .call, delta: delta)
|
updateAccountNetworkUsageStats(account: self.account, category: .call, delta: delta)
|
||||||
|
|
||||||
if !logPath.isEmpty {
|
|
||||||
let logsPath = callLogsPath(account: account)
|
|
||||||
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
|
|
||||||
let _ = try? FileManager.default.moveItem(atPath: tempLogPath, toPath: logPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !statsLogPath.isEmpty {
|
if !statsLogPath.isEmpty {
|
||||||
let logsPath = callLogsPath(account: account)
|
let logsPath = callLogsPath(account: account)
|
||||||
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
|
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
|
||||||
@ -1256,6 +1270,8 @@ private final class CallSignalingConnectionImpl: CallSignalingConnection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func start() {
|
func start() {
|
||||||
|
OngoingCallThreadLocalContextWebrtc.logMessage("CallSignaling: Connecting...")
|
||||||
|
|
||||||
self.connection.start(queue: self.queue.queue)
|
self.connection.start(queue: self.queue.queue)
|
||||||
self.receivePacketHeader()
|
self.receivePacketHeader()
|
||||||
}
|
}
|
||||||
@ -1399,48 +1415,91 @@ private final class CallSignalingConnectionImpl: CallSignalingConnection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private final class CallSignalingConnectionManager {
|
private final class CallSignalingConnectionManager {
|
||||||
|
private final class ConnectionContext {
|
||||||
|
let connection: CallSignalingConnection
|
||||||
|
let host: String
|
||||||
|
let port: UInt16
|
||||||
|
|
||||||
|
init(connection: CallSignalingConnection, host: String, port: UInt16) {
|
||||||
|
self.connection = connection
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private let queue: Queue
|
private let queue: Queue
|
||||||
|
private let peerTag: Data
|
||||||
|
private let dataReceived: (Data) -> Void
|
||||||
|
|
||||||
|
private var isRunning: Bool = false
|
||||||
|
|
||||||
private var nextConnectionId: Int = 0
|
private var nextConnectionId: Int = 0
|
||||||
private var connections: [Int: CallSignalingConnection] = [:]
|
private var connections: [Int: ConnectionContext] = [:]
|
||||||
|
|
||||||
init(queue: Queue, peerTag: Data, servers: [OngoingCallConnectionDescriptionWebrtc], dataReceived: @escaping (Data) -> Void) {
|
init(queue: Queue, peerTag: Data, servers: [OngoingCallConnectionDescriptionWebrtc], dataReceived: @escaping (Data) -> Void) {
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
|
self.peerTag = peerTag
|
||||||
|
self.dataReceived = dataReceived
|
||||||
|
|
||||||
for server in servers {
|
for server in servers {
|
||||||
if server.hasTcp {
|
if server.hasTcp {
|
||||||
let id = self.nextConnectionId
|
self.spawnConnection(host: server.ip, port: UInt16(server.port))
|
||||||
self.nextConnectionId += 1
|
|
||||||
if #available(iOS 12.0, *) {
|
|
||||||
let connection = CallSignalingConnectionImpl(queue: queue, host: server.ip, port: UInt16(server.port), peerTag: peerTag, dataReceived: { data in
|
|
||||||
dataReceived(data)
|
|
||||||
}, isClosed: { [weak self] in
|
|
||||||
guard let strongSelf = self else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
let _ = strongSelf
|
|
||||||
})
|
|
||||||
connections[id] = connection
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func start() {
|
func start() {
|
||||||
|
if self.isRunning {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.isRunning = true
|
||||||
|
|
||||||
for (_, connection) in self.connections {
|
for (_, connection) in self.connections {
|
||||||
connection.start()
|
connection.connection.start()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func stop() {
|
func stop() {
|
||||||
|
if !self.isRunning {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.isRunning = false
|
||||||
|
|
||||||
for (_, connection) in self.connections {
|
for (_, connection) in self.connections {
|
||||||
connection.stop()
|
connection.connection.stop()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func send(payloadData: Data) {
|
func send(payloadData: Data) {
|
||||||
for (_, connection) in self.connections {
|
for (_, connection) in self.connections {
|
||||||
connection.send(payloadData: payloadData)
|
connection.connection.send(payloadData: payloadData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func spawnConnection(host: String, port: UInt16) {
|
||||||
|
let id = self.nextConnectionId
|
||||||
|
self.nextConnectionId += 1
|
||||||
|
if #available(iOS 12.0, *) {
|
||||||
|
let dataReceived = self.dataReceived
|
||||||
|
let connection = CallSignalingConnectionImpl(queue: queue, host: host, port: port, peerTag: self.peerTag, dataReceived: { data in
|
||||||
|
dataReceived(data)
|
||||||
|
}, isClosed: { [weak self] in
|
||||||
|
guard let `self` = self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.handleConnectionFailed(id: id)
|
||||||
|
})
|
||||||
|
self.connections[id] = ConnectionContext(connection: connection, host: host, port: port)
|
||||||
|
if self.isRunning {
|
||||||
|
connection.start()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func handleConnectionFailed(id: Int) {
|
||||||
|
if let connection = self.connections.removeValue(forKey: id) {
|
||||||
|
connection.connection.stop()
|
||||||
|
self.spawnConnection(host: connection.host, port: connection.port)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -206,7 +206,24 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
|||||||
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
|
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
|
||||||
@property (nonatomic, copy) void (^ _Nullable audioLevelUpdated)(float);
|
@property (nonatomic, copy) void (^ _Nullable audioLevelUpdated)(float);
|
||||||
|
|
||||||
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P allowTCP:(BOOL)allowTCP enableStunMarking:(BOOL)enableStunMarking logPath:(NSString * _Nonnull)logPath statsLogPath:(NSString * _Nonnull)statsLogPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec audioInputDeviceId: (NSString * _Nonnull)audioInputDeviceId;
|
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue
|
||||||
|
proxy:(VoipProxyServerWebrtc * _Nullable)proxy
|
||||||
|
networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving
|
||||||
|
derivedState:(NSData * _Nonnull)derivedState
|
||||||
|
key:(NSData * _Nonnull)key
|
||||||
|
isOutgoing:(bool)isOutgoing
|
||||||
|
connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer
|
||||||
|
allowP2P:(BOOL)allowP2P
|
||||||
|
allowTCP:(BOOL)allowTCP
|
||||||
|
enableStunMarking:(BOOL)enableStunMarking
|
||||||
|
logPath:(NSString * _Nonnull)logPath
|
||||||
|
statsLogPath:(NSString * _Nonnull)statsLogPath
|
||||||
|
sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer
|
||||||
|
preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec
|
||||||
|
audioInputDeviceId:(NSString * _Nonnull)audioInputDeviceId
|
||||||
|
useManualAudioSessionControl:(bool)useManualAudioSessionControl;
|
||||||
|
|
||||||
|
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
|
||||||
|
|
||||||
- (void)beginTermination;
|
- (void)beginTermination;
|
||||||
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;
|
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;
|
||||||
@ -360,6 +377,8 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
|
|||||||
|
|
||||||
- (void)stop;
|
- (void)stop;
|
||||||
|
|
||||||
|
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
|
||||||
|
|
||||||
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast;
|
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast;
|
||||||
|
|
||||||
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
|
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
|
||||||
|
|||||||
@ -28,6 +28,9 @@
|
|||||||
|
|
||||||
#include "platform/darwin/iOS/tgcalls_audio_device_module_ios.h"
|
#include "platform/darwin/iOS/tgcalls_audio_device_module_ios.h"
|
||||||
|
|
||||||
|
#include "platform/darwin/iOS/RTCAudioSession.h"
|
||||||
|
#include "platform/darwin/iOS/RTCAudioSessionConfiguration.h"
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#import "group/GroupInstanceImpl.h"
|
#import "group/GroupInstanceImpl.h"
|
||||||
@ -626,7 +629,7 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
|
|||||||
}
|
}
|
||||||
std::shared_ptr<tgcalls::VideoCaptureInterface> interface = strongSelf->_interface;
|
std::shared_ptr<tgcalls::VideoCaptureInterface> interface = strongSelf->_interface;
|
||||||
|
|
||||||
if (false && requestClone) {
|
/*if (false && requestClone) {
|
||||||
VideoSampleBufferView *remoteRenderer = [[VideoSampleBufferView alloc] initWithFrame:CGRectZero];
|
VideoSampleBufferView *remoteRenderer = [[VideoSampleBufferView alloc] initWithFrame:CGRectZero];
|
||||||
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
|
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
|
||||||
|
|
||||||
@ -643,7 +646,7 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
|
|||||||
}
|
}
|
||||||
|
|
||||||
completion(remoteRenderer, cloneRenderer);
|
completion(remoteRenderer, cloneRenderer);
|
||||||
} else if ([VideoMetalView isSupported]) {
|
} else */if ([VideoMetalView isSupported]) {
|
||||||
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
|
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
|
||||||
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
|
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
|
||||||
|
|
||||||
@ -705,6 +708,8 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
|
|||||||
id<OngoingCallThreadLocalContextQueueWebrtc> _queue;
|
id<OngoingCallThreadLocalContextQueueWebrtc> _queue;
|
||||||
int32_t _contextId;
|
int32_t _contextId;
|
||||||
|
|
||||||
|
bool _useManualAudioSessionControl;
|
||||||
|
|
||||||
OngoingCallNetworkTypeWebrtc _networkType;
|
OngoingCallNetworkTypeWebrtc _networkType;
|
||||||
NSTimeInterval _callReceiveTimeout;
|
NSTimeInterval _callReceiveTimeout;
|
||||||
NSTimeInterval _callRingTimeout;
|
NSTimeInterval _callRingTimeout;
|
||||||
@ -843,7 +848,22 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P allowTCP:(BOOL)allowTCP enableStunMarking:(BOOL)enableStunMarking logPath:(NSString * _Nonnull)logPath statsLogPath:(NSString * _Nonnull)statsLogPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec audioInputDeviceId: (NSString * _Nonnull)audioInputDeviceId {
|
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue
|
||||||
|
proxy:(VoipProxyServerWebrtc * _Nullable)proxy
|
||||||
|
networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving
|
||||||
|
derivedState:(NSData * _Nonnull)derivedState
|
||||||
|
key:(NSData * _Nonnull)key
|
||||||
|
isOutgoing:(bool)isOutgoing
|
||||||
|
connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer
|
||||||
|
allowP2P:(BOOL)allowP2P
|
||||||
|
allowTCP:(BOOL)allowTCP
|
||||||
|
enableStunMarking:(BOOL)enableStunMarking
|
||||||
|
logPath:(NSString * _Nonnull)logPath
|
||||||
|
statsLogPath:(NSString * _Nonnull)statsLogPath
|
||||||
|
sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer
|
||||||
|
preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec
|
||||||
|
audioInputDeviceId:(NSString * _Nonnull)audioInputDeviceId
|
||||||
|
useManualAudioSessionControl:(bool)useManualAudioSessionControl {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self != nil) {
|
if (self != nil) {
|
||||||
_version = version;
|
_version = version;
|
||||||
@ -852,6 +872,25 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
|||||||
|
|
||||||
assert([[OngoingCallThreadLocalContextWebrtc versionsWithIncludeReference:true] containsObject:version]);
|
assert([[OngoingCallThreadLocalContextWebrtc versionsWithIncludeReference:true] containsObject:version]);
|
||||||
|
|
||||||
|
_useManualAudioSessionControl = useManualAudioSessionControl;
|
||||||
|
[RTCAudioSession sharedInstance].useManualAudio = true;
|
||||||
|
|
||||||
|
#ifdef WEBRTC_IOS
|
||||||
|
RTCAudioSessionConfiguration *sharedConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration];
|
||||||
|
if (useManualAudioSessionControl) {
|
||||||
|
sharedConfiguration.mode = AVAudioSessionModeVoiceChat;
|
||||||
|
} else {
|
||||||
|
sharedConfiguration.mode = AVAudioSessionModeVoiceChat;
|
||||||
|
}
|
||||||
|
sharedConfiguration.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers;
|
||||||
|
sharedConfiguration.outputNumberOfChannels = 1;
|
||||||
|
[RTCAudioSessionConfiguration setWebRTCConfiguration:sharedConfiguration];
|
||||||
|
|
||||||
|
/*[RTCAudioSession sharedInstance].useManualAudio = true;
|
||||||
|
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
|
||||||
|
[RTCAudioSession sharedInstance].isAudioEnabled = true;*/
|
||||||
|
#endif
|
||||||
|
|
||||||
_callReceiveTimeout = 20.0;
|
_callReceiveTimeout = 20.0;
|
||||||
_callRingTimeout = 90.0;
|
_callRingTimeout = 90.0;
|
||||||
_callConnectTimeout = 30.0;
|
_callConnectTimeout = 30.0;
|
||||||
@ -1094,6 +1133,17 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
|||||||
- (void)beginTermination {
|
- (void)beginTermination {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive {
|
||||||
|
if (_useManualAudioSessionControl) {
|
||||||
|
if (isAudioSessionActive) {
|
||||||
|
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
|
||||||
|
} else {
|
||||||
|
[[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
|
||||||
|
}
|
||||||
|
[RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
+ (void)stopWithTerminationResult:(OngoingCallThreadLocalContextWebrtcTerminationResult *)terminationResult completion:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
|
+ (void)stopWithTerminationResult:(OngoingCallThreadLocalContextWebrtcTerminationResult *)terminationResult completion:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
|
||||||
if (completion) {
|
if (completion) {
|
||||||
if (terminationResult) {
|
if (terminationResult) {
|
||||||
@ -1429,6 +1479,22 @@ private:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef WEBRTC_IOS
|
||||||
|
RTCAudioSessionConfiguration *sharedConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration];
|
||||||
|
sharedConfiguration.mode = AVAudioSessionModeVoiceChat;
|
||||||
|
sharedConfiguration.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers;
|
||||||
|
if (disableAudioInput) {
|
||||||
|
sharedConfiguration.outputNumberOfChannels = 2;
|
||||||
|
} else {
|
||||||
|
sharedConfiguration.outputNumberOfChannels = 1;
|
||||||
|
}
|
||||||
|
[RTCAudioSessionConfiguration setWebRTCConfiguration:sharedConfiguration];
|
||||||
|
|
||||||
|
/*[RTCAudioSession sharedInstance].useManualAudio = true;
|
||||||
|
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
|
||||||
|
[RTCAudioSession sharedInstance].isAudioEnabled = true;*/
|
||||||
|
#endif
|
||||||
|
|
||||||
std::vector<tgcalls::VideoCodecName> videoCodecPreferences;
|
std::vector<tgcalls::VideoCodecName> videoCodecPreferences;
|
||||||
|
|
||||||
int minOutgoingVideoBitrateKbit = 500;
|
int minOutgoingVideoBitrateKbit = 500;
|
||||||
@ -1612,6 +1678,15 @@ private:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive {
|
||||||
|
if (isAudioSessionActive) {
|
||||||
|
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
|
||||||
|
} else {
|
||||||
|
[[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
|
||||||
|
}
|
||||||
|
[RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive;
|
||||||
|
}
|
||||||
|
|
||||||
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast {
|
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast {
|
||||||
if (_instance) {
|
if (_instance) {
|
||||||
tgcalls::GroupConnectionMode mappedConnectionMode;
|
tgcalls::GroupConnectionMode mappedConnectionMode;
|
||||||
|
|||||||
@ -1 +1 @@
|
|||||||
Subproject commit 53bb1711ae0b3810d34edb1c81982b18d70c5506
|
Subproject commit 0aa4b1277fd018e56bf194d72b5405e397c6918b
|
||||||
Loading…
x
Reference in New Issue
Block a user