Update tgcalls + webrtc + audio session state management

This commit is contained in:
Ali 2022-11-22 19:49:53 +04:00
parent b62aeeb6cc
commit f6c56c2241
5 changed files with 49 additions and 34 deletions

View File

@ -472,7 +472,11 @@ public final class ManagedAudioSession {
strongSelf.queue.async {
for holder in strongSelf.holders {
if holder.id == id && holder.active {
strongSelf.activate()
if strongSelf.currentTypeAndOutputMode?.0 != holder.audioSessionType || strongSelf.currentTypeAndOutputMode?.1 != holder.outputMode {
strongSelf.setup(type: holder.audioSessionType, outputMode: holder.outputMode, activateNow: true)
} else {
strongSelf.activate()
}
completion.f(AudioSessionActivationState(isHeadsetConnected: strongSelf.isHeadsetPluggedInValue))
break
}
@ -737,16 +741,16 @@ public final class ManagedAudioSession {
break
case .playWithPossiblePortOverride:
if case .playAndRecord = nativeCategory {
if #available(iOSApplicationExtension 10.0, iOS 10.0, *) {
options.insert(.allowBluetoothA2DP)
} else {
options.insert(.allowBluetooth)
}
options.insert(.allowBluetoothA2DP)
}
case .record, .recordWithOthers, .voiceCall, .videoCall:
case .voiceCall, .videoCall:
options.insert(.allowBluetooth)
options.insert(.allowBluetoothA2DP)
options.insert(.mixWithOthers)
case .record, .recordWithOthers:
options.insert(.allowBluetooth)
}
managedAudioSessionLog("ManagedAudioSession setting active true")
managedAudioSessionLog("ManagedAudioSession setting category and options")
let mode: AVAudioSession.Mode
switch type {
case .voiceCall:
@ -761,12 +765,18 @@ public final class ManagedAudioSession {
default:
mode = .default
}
if #available(iOSApplicationExtension 11.0, iOS 11.0, *) {
try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
try AVAudioSession.sharedInstance().setMode(mode)
if AVAudioSession.sharedInstance().categoryOptions != options {
managedAudioSessionLog("ManagedAudioSession resetting options")
try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
}
/*if #available(iOSApplicationExtension 11.0, iOS 11.0, *) {
try AVAudioSession.sharedInstance().setCategory(nativeCategory, mode: mode, policy: .default, options: options)
} else {
AVAudioSession.sharedInstance().perform(NSSelectorFromString("setCategory:error:"), with: nativeCategory)
try AVAudioSession.sharedInstance().setMode(mode)
}
}*/
} catch let error {
managedAudioSessionLog("ManagedAudioSession setup error \(error)")
}
@ -844,6 +854,8 @@ public final class ManagedAudioSession {
}
private func setupOutputMode(_ outputMode: AudioSessionOutputMode, type: ManagedAudioSessionType) throws {
var outputMode = outputMode
outputMode = .custom(.builtin)
managedAudioSessionLog("ManagedAudioSession setup \(outputMode) for \(type)")
var resetToBuiltin = false
switch outputMode {
@ -858,6 +870,7 @@ public final class ManagedAudioSession {
if let routes = AVAudioSession.sharedInstance().availableInputs {
for route in routes {
if route.portType == .builtInMic {
let _ = try? AVAudioSession.sharedInstance().setInputDataSource(route.selectedDataSource)
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
break
}
@ -914,7 +927,8 @@ public final class ManagedAudioSession {
if route.portType == .builtInMic {
if case .record = updatedType, self.isHeadsetPluggedInValue {
} else {
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
//let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
let _ = try? AVAudioSession.sharedInstance().setInputDataSource(nil)
}
break
}
@ -945,7 +959,7 @@ public final class ManagedAudioSession {
managedAudioSessionLog("\(CFAbsoluteTimeGetCurrent()) AudioSession updateCurrentAudioRouteInfo: \((CFAbsoluteTimeGetCurrent() - startTime) * 1000.0) ms")
if case .voiceCall = type {
try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(0.005)
//try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(0.005)
}
} catch let error {
managedAudioSessionLog("ManagedAudioSession activate error \(error)")

View File

@ -536,8 +536,6 @@ public final class PresentationCallImpl: PresentationCall {
}
}
#if DEBUG
#else
if let audioSessionControl = audioSessionControl, previous == nil || previousControl == nil {
if let callKitIntegration = self.callKitIntegration {
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
@ -546,7 +544,6 @@ public final class PresentationCallImpl: PresentationCall {
audioSessionControl.setup(synchronous: true)
}
}
#endif
let mappedVideoState: PresentationCallState.VideoState
let mappedRemoteVideoState: PresentationCallState.RemoteVideoState
@ -869,13 +866,13 @@ public final class PresentationCallImpl: PresentationCall {
}
if tone != self.toneRenderer?.tone {
if let tone = tone {
#if DEBUG
let _ = tone
#else
let toneRenderer = PresentationCallToneRenderer(tone: tone)
self.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
#endif
if "".isEmpty {
let _ = tone
} else {
let toneRenderer = PresentationCallToneRenderer(tone: tone)
self.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
}
} else {
self.toneRenderer = nil
}
@ -1052,8 +1049,6 @@ public final class PresentationCallImpl: PresentationCall {
|> delay(1.0, queue: Queue.mainQueue())
))
#if DEBUG
#else
if let audioSessionControl = self.audioSessionControl {
if let callKitIntegration = self.callKitIntegration {
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
@ -1061,7 +1056,6 @@ public final class PresentationCallImpl: PresentationCall {
audioSessionControl.setOutputMode(.custom(output))
}
}
#endif
}
public func debugInfo() -> Signal<(String, String), NoError> {

View File

@ -1575,17 +1575,20 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.internalStatePromise.set(.single(internalState))
if let audioSessionControl = audioSessionControl, previousControl == nil {
if self.isStream {
audioSessionControl.setOutputMode(.system)
if "".isEmpty {
} else {
switch self.currentSelectedAudioOutputValue {
case .speaker:
audioSessionControl.setOutputMode(.custom(self.currentSelectedAudioOutputValue))
default:
break
if self.isStream {
audioSessionControl.setOutputMode(.system)
} else {
switch self.currentSelectedAudioOutputValue {
case .speaker:
audioSessionControl.setOutputMode(.custom(self.currentSelectedAudioOutputValue))
default:
break
}
}
audioSessionControl.setup(synchronous: false)
}
audioSessionControl.setup(synchronous: false)
}
self.audioSessionShouldBeActive.set(true)
@ -2475,6 +2478,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
private func beginTone(tone: PresentationCallTone) {
if "".isEmpty {
return
}
if self.isStream {
switch tone {
case .groupJoined, .groupLeft:

@ -1 +1 @@
Subproject commit 6cb21fc91be59356d02059e639df87a3b544bfb3
Subproject commit 97d616abe1dae6214b11eae19b3ec25cb88d98ce

View File

@ -2954,6 +2954,7 @@ webrtc_sources = [
"api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h",
"p2p/base/ice_agent_interface.h",
"api/video_codecs/video_encoder_factory_template_open_h264_adapter.h",
"api/video_codecs/simulcast_stream.cc",
]
ios_objc_sources = [