Fix tones and update tgcalls

This commit is contained in:
Ali 2022-12-02 18:29:21 +04:00
parent 0871e98409
commit 6edaa572be
4 changed files with 24 additions and 230 deletions

View File

@ -15,180 +15,6 @@ import AccountContext
import DeviceProximity
import PhoneNumberFormat
final class PresentationCallToneRenderer {
let queue: Queue
let tone: PresentationCallTone
private let toneRenderer: MediaPlayerAudioRenderer
private var toneRendererAudioSession: MediaPlayerAudioSessionCustomControl?
private var toneRendererAudioSessionActivated = false
private let audioLevelPipe = ValuePipe<Float>()
init(tone: PresentationCallTone, completed: (() -> Void)? = nil) {
let queue = Queue.mainQueue()
self.queue = queue
self.tone = tone
var controlImpl: ((MediaPlayerAudioSessionCustomControl) -> Disposable)?
self.toneRenderer = MediaPlayerAudioRenderer(audioSession: .custom({ control in
return controlImpl?(control) ?? EmptyDisposable
}), playAndRecord: false, useVoiceProcessingMode: true, ambient: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: self.audioLevelPipe, updatedRate: {}, audioPaused: {})
controlImpl = { [weak self] control in
queue.async {
if let strongSelf = self {
strongSelf.toneRendererAudioSession = control
if strongSelf.toneRendererAudioSessionActivated {
control.activate()
}
}
}
return ActionDisposable {
}
}
let toneDataOffset = Atomic<Int>(value: 0)
let toneData = Atomic<Data?>(value: nil)
let reportedCompletion = Atomic<Bool>(value: false)
self.toneRenderer.beginRequestingFrames(queue: DispatchQueue.global(), takeFrame: {
var data = toneData.with { $0 }
if data == nil {
data = presentationCallToneData(tone)
if data != nil {
let _ = toneData.swap(data)
}
}
guard let toneData = data else {
if !reportedCompletion.swap(true) {
completed?()
}
return .finished
}
let toneDataMaxOffset: Int?
if let loopCount = tone.loopCount {
toneDataMaxOffset = (data?.count ?? 0) * loopCount
} else {
toneDataMaxOffset = nil
}
let frameSize = 44100
var takeOffset: Int?
let _ = toneDataOffset.modify { current in
takeOffset = current
return current + frameSize
}
if let takeOffset = takeOffset {
if let toneDataMaxOffset = toneDataMaxOffset, takeOffset >= toneDataMaxOffset {
if !reportedCompletion.swap(true) {
Queue.mainQueue().after(1.0, {
completed?()
})
}
return .finished
}
var blockBuffer: CMBlockBuffer?
let bytes = malloc(frameSize)!
toneData.withUnsafeBytes { dataBuffer -> Void in
guard let dataBytes = dataBuffer.baseAddress?.assumingMemoryBound(to: UInt8.self) else {
return
}
var takenCount = 0
while takenCount < frameSize {
let dataOffset = (takeOffset + takenCount) % toneData.count
let dataCount = min(frameSize - takenCount, toneData.count - dataOffset)
//print("take from \(dataOffset) count: \(dataCount)")
memcpy(bytes.advanced(by: takenCount), dataBytes.advanced(by: dataOffset), dataCount)
takenCount += dataCount
if let toneDataMaxOffset = toneDataMaxOffset, takeOffset + takenCount >= toneDataMaxOffset {
break
}
}
if takenCount < frameSize {
//print("fill with zeros from \(takenCount) count: \(frameSize - takenCount)")
memset(bytes.advanced(by: takenCount), 0, frameSize - takenCount)
}
}
/*if let toneDataMaxOffset = toneDataMaxOffset, takeOffset + frameSize > toneDataMaxOffset {
let validCount = max(0, toneDataMaxOffset - takeOffset)
memset(bytes.advanced(by: validCount), 0, frameSize - validCount)
print("clear from \(validCount) count: \(frameSize - validCount)")
}*/
let status = CMBlockBufferCreateWithMemoryBlock(allocator: nil, memoryBlock: bytes, blockLength: frameSize, blockAllocator: nil, customBlockSource: nil, offsetToData: 0, dataLength: frameSize, flags: 0, blockBufferOut: &blockBuffer)
if status != noErr {
if !reportedCompletion.swap(true) {
completed?()
}
return .finished
}
let sampleCount = frameSize / 2
let pts = CMTime(value: Int64(takeOffset / 2), timescale: 44100)
var timingInfo = CMSampleTimingInfo(duration: CMTime(value: Int64(sampleCount), timescale: 44100), presentationTimeStamp: pts, decodeTimeStamp: pts)
var sampleBuffer: CMSampleBuffer?
var sampleSize = frameSize
guard CMSampleBufferCreate(allocator: nil, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: nil, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer) == noErr else {
if !reportedCompletion.swap(true) {
completed?()
}
return .finished
}
if let sampleBuffer = sampleBuffer {
return .frame(MediaTrackFrame(type: .audio, sampleBuffer: sampleBuffer, resetDecoder: false, decoded: true))
} else {
if !reportedCompletion.swap(true) {
completed?()
}
return .finished
}
} else {
if !reportedCompletion.swap(true) {
completed?()
}
return .finished
}
})
self.toneRenderer.start()
self.toneRenderer.setRate(1.0)
}
deinit {
assert(self.queue.isCurrent())
self.toneRenderer.stop()
}
func setAudioSessionActive(_ value: Bool) {
if self.toneRendererAudioSessionActivated != value {
self.toneRendererAudioSessionActivated = value
if let control = self.toneRendererAudioSession {
if value {
self.toneRenderer.setRate(1.0)
control.activate()
} else {
self.toneRenderer.setRate(0.0)
control.deactivate()
}
}
}
}
}
public final class PresentationCallImpl: PresentationCall {
public let context: AccountContext
private let audioSession: ManagedAudioSession
@ -283,7 +109,6 @@ public final class PresentationCallImpl: PresentationCall {
private var audioSessionActiveDisposable: Disposable?
private var isAudioSessionActive = false
private var toneRenderer: PresentationCallToneRenderer?
private var currentTone: PresentationCallTone?
private var droppedCall = false
@ -465,9 +290,11 @@ public final class PresentationCallImpl: PresentationCall {
}
})
#if DEBUG
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] {
self.sharedAudioDevice = nil
} else {
self.sharedAudioDevice = OngoingCallContext.AudioDevice.create()
#endif
}
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
@ -873,26 +700,14 @@ public final class PresentationCallImpl: PresentationCall {
if tone != self.currentTone {
self.currentTone = tone
self.sharedAudioDevice?.setTone(tone: tone.flatMap(presentationCallToneData).flatMap { data in
return OngoingCallContext.Tone(samples: data, sampleRate: 44100, loopCount: tone?.loopCount ?? 1000000)
return OngoingCallContext.Tone(samples: data, sampleRate: 48000, loopCount: tone?.loopCount ?? 1000000)
})
/*if let tone = tone {
if "".isEmpty {
let _ = tone
} else {
let toneRenderer = PresentationCallToneRenderer(tone: tone)
self.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
}
} else {
self.toneRenderer = nil
}*/
}
}
private func updateIsAudioSessionActive(_ value: Bool) {
if self.isAudioSessionActive != value {
self.isAudioSessionActive = value
self.toneRenderer?.setAudioSessionActive(value)
}
self.sharedAudioDevice?.setIsAudioSessionActive(value)
}

View File

@ -832,7 +832,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var didStartConnectingOnce: Bool = false
private var didConnectOnce: Bool = false
private var toneRenderer: PresentationCallToneRenderer?
private var videoCapturer: OngoingCallVideoCapturer?
private var useFrontCamera: Bool = true
@ -1850,7 +1849,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
if isConnecting {
strongSelf.beginTone(tone: .groupConnecting)
} else {
strongSelf.toneRenderer = nil
strongSelf.beginTone(tone: nil)
}
}
@ -2479,12 +2478,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private func updateIsAudioSessionActive(_ value: Bool) {
if self.isAudioSessionActive != value {
self.isAudioSessionActive = value
self.toneRenderer?.setAudioSessionActive(value)
}
}
private func beginTone(tone: PresentationCallTone) {
if self.isStream {
private func beginTone(tone: PresentationCallTone?) {
if self.isStream, let tone {
switch tone {
case .groupJoined, .groupLeft:
return
@ -2492,40 +2490,15 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
break
}
}
if let toneData = presentationCallToneData(tone) {
if let tone, let toneData = presentationCallToneData(tone) {
self.genericCallContext?.setTone(tone: OngoingGroupCallContext.Tone(
samples: toneData,
sampleRate: 44100,
sampleRate: 48000,
loopCount: tone.loopCount ?? 100000
))
} else {
self.genericCallContext?.setTone(tone: nil)
}
/*if "".isEmpty {
return
}
if self.isStream {
switch tone {
case .groupJoined, .groupLeft:
return
default:
break
}
}
var completed: (() -> Void)?
let toneRenderer = PresentationCallToneRenderer(tone: tone, completed: {
completed?()
})
completed = { [weak self, weak toneRenderer] in
Queue.mainQueue().async {
guard let strongSelf = self, let toneRenderer = toneRenderer, toneRenderer === strongSelf.toneRenderer else {
return
}
strongSelf.toneRenderer = nil
}
}
self.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)*/
}
public func playTone(_ tone: PresentationGroupCallTone) {

View File

@ -434,11 +434,12 @@ public final class OngoingGroupCallContext {
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
self.queue = queue
#if DEBUG
self.audioDevice = nil
/*#if DEBUG
self.audioDevice = SharedCallAudioDevice(disableRecording: disableAudioInput)
#else
self.audioDevice = nil
#endif
#endif*/
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
@ -907,9 +908,14 @@ public final class OngoingGroupCallContext {
}
func setTone(tone: Tone?) {
self.audioDevice?.setTone(tone.flatMap { tone in
let mappedTone = tone.flatMap { tone in
CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount)
})
}
if let audioDevice = self.audioDevice {
audioDevice.setTone(mappedTone)
} else {
self.context.setTone(mappedTone)
}
}
}

@ -1 +1 @@
Subproject commit cddd3b0666e4b2a0135e18c8591b6b5e025490f4
Subproject commit e7032ab6f7b305cbd1914e2d422646c2fd132b49