Reorder audio playback initialization, add play(mixWithOthers:) mode

This commit is contained in:
Ali
2023-07-03 18:46:18 +02:00
parent bc23589a5f
commit e316f0521b
12 changed files with 134 additions and 100 deletions

View File

@@ -238,6 +238,7 @@ private final class AudioPlayerRendererContext {
var audioSessionControl: ManagedAudioSessionControl?
let playAndRecord: Bool
let ambient: Bool
let mixWithOthers: Bool
var forceAudioToSpeaker: Bool {
didSet {
if self.forceAudioToSpeaker != oldValue {
@@ -251,7 +252,7 @@ private final class AudioPlayerRendererContext {
}
}
init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool, playAndRecord: Bool, useVoiceProcessingMode: Bool, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool, playAndRecord: Bool, useVoiceProcessingMode: Bool, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
assert(audioPlayerRendererQueue.isCurrent())
self.audioSession = audioSession
@@ -267,6 +268,7 @@ private final class AudioPlayerRendererContext {
self.playAndRecord = playAndRecord
self.useVoiceProcessingMode = useVoiceProcessingMode
self.ambient = ambient
self.mixWithOthers = mixWithOthers
self.audioStreamDescription = audioRendererNativeStreamDescription()
@@ -370,7 +372,7 @@ private final class AudioPlayerRendererContext {
if self.paused {
self.paused = false
self.startAudioUnit()
self.acquireAudioSession()
}
}
@@ -384,6 +386,69 @@ private final class AudioPlayerRendererContext {
}
}
private func acquireAudioSession() {
switch self.audioSession {
case let .manager(manager):
self.audioSessionDisposable.set(manager.push(audioSessionType: self.ambient ? .ambient : (self.playAndRecord ? .playWithPossiblePortOverride : .play(mixWithOthers: self.mixWithOthers)), outputMode: self.forceAudioToSpeaker ? .speakerIfNoHeadphones : .system, once: self.ambient, manualActivate: { [weak self] control in
audioPlayerRendererQueue.async {
if let strongSelf = self {
strongSelf.audioSessionControl = control
if !strongSelf.paused {
control.setup()
control.setOutputMode(strongSelf.forceAudioToSpeaker ? .speakerIfNoHeadphones : .system)
control.activate({ _ in
audioPlayerRendererQueue.async {
if let strongSelf = self, !strongSelf.paused {
strongSelf.audioSessionAcquired()
}
}
})
}
}
}
}, deactivate: { [weak self] temporary in
return Signal { subscriber in
audioPlayerRendererQueue.async {
if let strongSelf = self {
strongSelf.audioSessionControl = nil
if !temporary {
strongSelf.audioPaused()
strongSelf.stop()
}
subscriber.putCompletion()
}
}
return EmptyDisposable
}
}, headsetConnectionStatusChanged: { [weak self] value in
audioPlayerRendererQueue.async {
if let strongSelf = self, !value {
strongSelf.audioPaused()
}
}
}))
case let .custom(request):
self.audioSessionDisposable.set(request(MediaPlayerAudioSessionCustomControl(activate: { [weak self] in
audioPlayerRendererQueue.async {
if let strongSelf = self {
if !strongSelf.paused {
strongSelf.audioSessionAcquired()
}
}
}
}, deactivate: { [weak self] in
audioPlayerRendererQueue.async {
if let strongSelf = self {
strongSelf.audioSessionControl = nil
strongSelf.audioPaused()
strongSelf.stop()
}
}
})))
}
}
private func startAudioUnit() {
assert(audioPlayerRendererQueue.isCurrent())
@@ -538,72 +603,13 @@ private final class AudioPlayerRendererContext {
self.equalizerAudioUnit = equalizerAudioUnit
self.outputAudioUnit = outputAudioUnit
}
switch self.audioSession {
case let .manager(manager):
self.audioSessionDisposable.set(manager.push(audioSessionType: self.ambient ? .ambient : (self.playAndRecord ? .playWithPossiblePortOverride : .play), outputMode: self.forceAudioToSpeaker ? .speakerIfNoHeadphones : .system, once: self.ambient, manualActivate: { [weak self] control in
audioPlayerRendererQueue.async {
if let strongSelf = self {
strongSelf.audioSessionControl = control
if !strongSelf.paused {
control.setup()
control.setOutputMode(strongSelf.forceAudioToSpeaker ? .speakerIfNoHeadphones : .system)
control.activate({ _ in
audioPlayerRendererQueue.async {
if let strongSelf = self, !strongSelf.paused {
strongSelf.audioSessionAcquired()
}
}
})
}
}
}
}, deactivate: { [weak self] temporary in
return Signal { subscriber in
audioPlayerRendererQueue.async {
if let strongSelf = self {
strongSelf.audioSessionControl = nil
if !temporary {
strongSelf.audioPaused()
strongSelf.stop()
}
subscriber.putCompletion()
}
}
return EmptyDisposable
}
}, headsetConnectionStatusChanged: { [weak self] value in
audioPlayerRendererQueue.async {
if let strongSelf = self, !value {
strongSelf.audioPaused()
}
}
}))
case let .custom(request):
self.audioSessionDisposable.set(request(MediaPlayerAudioSessionCustomControl(activate: { [weak self] in
audioPlayerRendererQueue.async {
if let strongSelf = self {
if !strongSelf.paused {
strongSelf.audioSessionAcquired()
}
}
}
}, deactivate: { [weak self] in
audioPlayerRendererQueue.async {
if let strongSelf = self {
strongSelf.audioSessionControl = nil
strongSelf.audioPaused()
strongSelf.stop()
}
}
})))
}
}
private func audioSessionAcquired() {
assert(audioPlayerRendererQueue.isCurrent())
self.startAudioUnit()
if let audioGraph = self.audioGraph {
let startTime = CFAbsoluteTimeGetCurrent()
@@ -821,7 +827,7 @@ public final class MediaPlayerAudioRenderer {
private let audioClock: CMClock
public let audioTimebase: CMTimebase
public init(audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool = false, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
public init(audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool = false, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
var audioClock: CMClock?
CMAudioClockCreate(allocator: nil, clockOut: &audioClock)
if audioClock == nil {
@@ -834,7 +840,7 @@ public final class MediaPlayerAudioRenderer {
self.audioTimebase = audioTimebase!
audioPlayerRendererQueue.async {
let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, forAudioVideoMessage: forAudioVideoMessage, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, ambient: ambient, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused)
let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, forAudioVideoMessage: forAudioVideoMessage, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, ambient: ambient, mixWithOthers: mixWithOthers, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused)
self.contextRef = Unmanaged.passRetained(context)
}
}