Update tgcalls + webrtc + audio session state management

This commit is contained in:
Ali 2022-11-22 19:49:53 +04:00
parent b62aeeb6cc
commit f6c56c2241
5 changed files with 49 additions and 34 deletions

View File

@ -472,7 +472,11 @@ public final class ManagedAudioSession {
strongSelf.queue.async { strongSelf.queue.async {
for holder in strongSelf.holders { for holder in strongSelf.holders {
if holder.id == id && holder.active { if holder.id == id && holder.active {
if strongSelf.currentTypeAndOutputMode?.0 != holder.audioSessionType || strongSelf.currentTypeAndOutputMode?.1 != holder.outputMode {
strongSelf.setup(type: holder.audioSessionType, outputMode: holder.outputMode, activateNow: true)
} else {
strongSelf.activate() strongSelf.activate()
}
completion.f(AudioSessionActivationState(isHeadsetConnected: strongSelf.isHeadsetPluggedInValue)) completion.f(AudioSessionActivationState(isHeadsetConnected: strongSelf.isHeadsetPluggedInValue))
break break
} }
@ -737,16 +741,16 @@ public final class ManagedAudioSession {
break break
case .playWithPossiblePortOverride: case .playWithPossiblePortOverride:
if case .playAndRecord = nativeCategory { if case .playAndRecord = nativeCategory {
if #available(iOSApplicationExtension 10.0, iOS 10.0, *) {
options.insert(.allowBluetoothA2DP) options.insert(.allowBluetoothA2DP)
} else { }
case .voiceCall, .videoCall:
options.insert(.allowBluetooth)
options.insert(.allowBluetoothA2DP)
options.insert(.mixWithOthers)
case .record, .recordWithOthers:
options.insert(.allowBluetooth) options.insert(.allowBluetooth)
} }
} managedAudioSessionLog("ManagedAudioSession setting category and options")
case .record, .recordWithOthers, .voiceCall, .videoCall:
options.insert(.allowBluetooth)
}
managedAudioSessionLog("ManagedAudioSession setting active true")
let mode: AVAudioSession.Mode let mode: AVAudioSession.Mode
switch type { switch type {
case .voiceCall: case .voiceCall:
@ -761,12 +765,18 @@ public final class ManagedAudioSession {
default: default:
mode = .default mode = .default
} }
if #available(iOSApplicationExtension 11.0, iOS 11.0, *) { try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
try AVAudioSession.sharedInstance().setMode(mode)
if AVAudioSession.sharedInstance().categoryOptions != options {
managedAudioSessionLog("ManagedAudioSession resetting options")
try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
}
/*if #available(iOSApplicationExtension 11.0, iOS 11.0, *) {
try AVAudioSession.sharedInstance().setCategory(nativeCategory, mode: mode, policy: .default, options: options) try AVAudioSession.sharedInstance().setCategory(nativeCategory, mode: mode, policy: .default, options: options)
} else { } else {
AVAudioSession.sharedInstance().perform(NSSelectorFromString("setCategory:error:"), with: nativeCategory) AVAudioSession.sharedInstance().perform(NSSelectorFromString("setCategory:error:"), with: nativeCategory)
try AVAudioSession.sharedInstance().setMode(mode) try AVAudioSession.sharedInstance().setMode(mode)
} }*/
} catch let error { } catch let error {
managedAudioSessionLog("ManagedAudioSession setup error \(error)") managedAudioSessionLog("ManagedAudioSession setup error \(error)")
} }
@ -844,6 +854,8 @@ public final class ManagedAudioSession {
} }
private func setupOutputMode(_ outputMode: AudioSessionOutputMode, type: ManagedAudioSessionType) throws { private func setupOutputMode(_ outputMode: AudioSessionOutputMode, type: ManagedAudioSessionType) throws {
var outputMode = outputMode
outputMode = .custom(.builtin)
managedAudioSessionLog("ManagedAudioSession setup \(outputMode) for \(type)") managedAudioSessionLog("ManagedAudioSession setup \(outputMode) for \(type)")
var resetToBuiltin = false var resetToBuiltin = false
switch outputMode { switch outputMode {
@ -858,6 +870,7 @@ public final class ManagedAudioSession {
if let routes = AVAudioSession.sharedInstance().availableInputs { if let routes = AVAudioSession.sharedInstance().availableInputs {
for route in routes { for route in routes {
if route.portType == .builtInMic { if route.portType == .builtInMic {
let _ = try? AVAudioSession.sharedInstance().setInputDataSource(route.selectedDataSource)
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route) let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
break break
} }
@ -914,7 +927,8 @@ public final class ManagedAudioSession {
if route.portType == .builtInMic { if route.portType == .builtInMic {
if case .record = updatedType, self.isHeadsetPluggedInValue { if case .record = updatedType, self.isHeadsetPluggedInValue {
} else { } else {
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route) //let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
let _ = try? AVAudioSession.sharedInstance().setInputDataSource(nil)
} }
break break
} }
@ -945,7 +959,7 @@ public final class ManagedAudioSession {
managedAudioSessionLog("\(CFAbsoluteTimeGetCurrent()) AudioSession updateCurrentAudioRouteInfo: \((CFAbsoluteTimeGetCurrent() - startTime) * 1000.0) ms") managedAudioSessionLog("\(CFAbsoluteTimeGetCurrent()) AudioSession updateCurrentAudioRouteInfo: \((CFAbsoluteTimeGetCurrent() - startTime) * 1000.0) ms")
if case .voiceCall = type { if case .voiceCall = type {
try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(0.005) //try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(0.005)
} }
} catch let error { } catch let error {
managedAudioSessionLog("ManagedAudioSession activate error \(error)") managedAudioSessionLog("ManagedAudioSession activate error \(error)")

View File

@ -536,8 +536,6 @@ public final class PresentationCallImpl: PresentationCall {
} }
} }
#if DEBUG
#else
if let audioSessionControl = audioSessionControl, previous == nil || previousControl == nil { if let audioSessionControl = audioSessionControl, previous == nil || previousControl == nil {
if let callKitIntegration = self.callKitIntegration { if let callKitIntegration = self.callKitIntegration {
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue)) callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
@ -546,7 +544,6 @@ public final class PresentationCallImpl: PresentationCall {
audioSessionControl.setup(synchronous: true) audioSessionControl.setup(synchronous: true)
} }
} }
#endif
let mappedVideoState: PresentationCallState.VideoState let mappedVideoState: PresentationCallState.VideoState
let mappedRemoteVideoState: PresentationCallState.RemoteVideoState let mappedRemoteVideoState: PresentationCallState.RemoteVideoState
@ -869,13 +866,13 @@ public final class PresentationCallImpl: PresentationCall {
} }
if tone != self.toneRenderer?.tone { if tone != self.toneRenderer?.tone {
if let tone = tone { if let tone = tone {
#if DEBUG if "".isEmpty {
let _ = tone let _ = tone
#else } else {
let toneRenderer = PresentationCallToneRenderer(tone: tone) let toneRenderer = PresentationCallToneRenderer(tone: tone)
self.toneRenderer = toneRenderer self.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(self.isAudioSessionActive) toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
#endif }
} else { } else {
self.toneRenderer = nil self.toneRenderer = nil
} }
@ -1052,8 +1049,6 @@ public final class PresentationCallImpl: PresentationCall {
|> delay(1.0, queue: Queue.mainQueue()) |> delay(1.0, queue: Queue.mainQueue())
)) ))
#if DEBUG
#else
if let audioSessionControl = self.audioSessionControl { if let audioSessionControl = self.audioSessionControl {
if let callKitIntegration = self.callKitIntegration { if let callKitIntegration = self.callKitIntegration {
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue)) callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
@ -1061,7 +1056,6 @@ public final class PresentationCallImpl: PresentationCall {
audioSessionControl.setOutputMode(.custom(output)) audioSessionControl.setOutputMode(.custom(output))
} }
} }
#endif
} }
public func debugInfo() -> Signal<(String, String), NoError> { public func debugInfo() -> Signal<(String, String), NoError> {

View File

@ -1575,6 +1575,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.internalStatePromise.set(.single(internalState)) self.internalStatePromise.set(.single(internalState))
if let audioSessionControl = audioSessionControl, previousControl == nil { if let audioSessionControl = audioSessionControl, previousControl == nil {
if "".isEmpty {
} else {
if self.isStream { if self.isStream {
audioSessionControl.setOutputMode(.system) audioSessionControl.setOutputMode(.system)
} else { } else {
@ -1587,6 +1589,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
audioSessionControl.setup(synchronous: false) audioSessionControl.setup(synchronous: false)
} }
}
self.audioSessionShouldBeActive.set(true) self.audioSessionShouldBeActive.set(true)
@ -2475,6 +2478,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
private func beginTone(tone: PresentationCallTone) { private func beginTone(tone: PresentationCallTone) {
if "".isEmpty {
return
}
if self.isStream { if self.isStream {
switch tone { switch tone {
case .groupJoined, .groupLeft: case .groupJoined, .groupLeft:

@ -1 +1 @@
Subproject commit 6cb21fc91be59356d02059e639df87a3b544bfb3 Subproject commit 97d616abe1dae6214b11eae19b3ec25cb88d98ce

View File

@ -2954,6 +2954,7 @@ webrtc_sources = [
"api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h", "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h",
"p2p/base/ice_agent_interface.h", "p2p/base/ice_agent_interface.h",
"api/video_codecs/video_encoder_factory_template_open_h264_adapter.h", "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h",
"api/video_codecs/simulcast_stream.cc",
] ]
ios_objc_sources = [ ios_objc_sources = [