From b9dba1efc80a784169b1dfa716e8724e31e2d429 Mon Sep 17 00:00:00 2001 From: Ali <> Date: Tue, 29 Nov 2022 00:10:40 +0400 Subject: [PATCH] Temp --- .../Sources/PresentationGroupCall.swift | 21 ++++- .../Sources/GroupCallContext.swift | 24 +++++ .../OngoingCallThreadLocalContext.h | 12 +++ .../Sources/OngoingCallThreadLocalContext.mm | 92 ++++++++++++++++++- 4 files changed, 144 insertions(+), 5 deletions(-) diff --git a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift index dc13c99796..f8a5e4294d 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift @@ -433,6 +433,15 @@ private extension CurrentImpl { break } } + + func setTone(tone: OngoingGroupCallContext.Tone?) { + switch self { + case let .call(callContext): + callContext.setTone(tone: tone) + case .mediaStream: + break + } + } } public func groupCallLogsPath(account: Account) -> String { @@ -2475,7 +2484,15 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { } private func beginTone(tone: PresentationCallTone) { - if "".isEmpty { + if let toneData = presentationCallToneData(tone) { + self.genericCallContext?.setTone(tone: OngoingGroupCallContext.Tone( + samples: toneData, + sampleRate: 44100, + loopCount: 1000 + )) + } + + /*if "".isEmpty { return } if self.isStream { @@ -2500,7 +2517,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { } self.toneRenderer = toneRenderer - toneRenderer.setAudioSessionActive(self.isAudioSessionActive) + toneRenderer.setAudioSessionActive(self.isAudioSessionActive)*/ } public func playTone(_ tone: PresentationGroupCallTone) { diff --git a/submodules/TelegramVoip/Sources/GroupCallContext.swift b/submodules/TelegramVoip/Sources/GroupCallContext.swift index ddec28e0e7..61ceeb8ebc 100644 --- a/submodules/TelegramVoip/Sources/GroupCallContext.swift +++ b/submodules/TelegramVoip/Sources/GroupCallContext.swift @@ -400,6 +400,18 @@ public final class OngoingGroupCallContext { public var incomingVideoStats: [String: IncomingVideoStats] } + public final class Tone { + public let samples: Data + public let sampleRate: Int + public let loopCount: Int + + public init(samples: Data, sampleRate: Int, loopCount: Int) { + self.samples = samples + self.sampleRate = sampleRate + self.loopCount = loopCount + } + } + private final class Impl { let queue: Queue let context: GroupCallThreadLocalContext @@ -884,6 +896,12 @@ public final class OngoingGroupCallContext { completion(Stats(incomingVideoStats: incomingVideoStats)) }) } + + func setTone(tone: Tone?) { + self.context.setTone(tone.flatMap { tone in + CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount) + }) + } } private let queue = Queue() @@ -1075,4 +1093,10 @@ public final class OngoingGroupCallContext { impl.getStats(completion: completion) } } + + public func setTone(tone: Tone?) { + self.impl.with { impl in + impl.setTone(tone: tone) + } + } } diff --git a/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h b/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h index b965c91dbd..f33dd060b9 100644 --- a/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h +++ b/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h @@ -203,6 +203,16 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) { @end +@interface CallAudioTone : NSObject + +@property (nonatomic, strong, readonly) NSData * _Nonnull samples; +@property (nonatomic, readonly) NSInteger sampleRate; +@property (nonatomic, readonly) NSInteger loopCount; + +- (instancetype _Nonnull)initWithSamples:(NSData * _Nonnull)samples sampleRate:(NSInteger)sampleRate loopCount:(NSInteger)loopCount; + +@end + @interface OngoingCallThreadLocalContextWebrtc : NSObject + (void)logMessage:(NSString * _Nonnull)string; @@ -391,6 +401,8 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) { - (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive; +- (void)setTone:(CallAudioTone * _Nullable)tone; + - (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast; - (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion; diff --git a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm index af82bcbf6d..f6bb810310 100644 --- a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm +++ b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm @@ -792,6 +792,28 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls: @end +@implementation CallAudioTone + +- (instancetype _Nonnull)initWithSamples:(NSData * _Nonnull)samples sampleRate:(NSInteger)sampleRate loopCount:(NSInteger)loopCount { + self = [super init]; + if (self != nil) { + _samples = samples; + _sampleRate = sampleRate; + _loopCount = loopCount; + } + return self; +} + +- (std::shared_ptr)asTone { + std::vector data; + data.resize(_samples.length / 2); + memcpy(data.data(), _samples.bytes, _samples.length); + + return std::make_shared(std::move(data), (int)_sampleRate, (int)_loopCount); +} + +@end + @interface OngoingCallThreadLocalContextWebrtc () { NSString *_version; id _queue; @@ -800,6 +822,9 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls: bool _useManualAudioSessionControl; SharedCallAudioDevice *_audioDevice; + rtc::scoped_refptr _currentAudioDeviceModule; + rtc::Thread *_currentAudioDeviceModuleThread; + OngoingCallNetworkTypeWebrtc _networkType; NSTimeInterval _callReceiveTimeout; NSTimeInterval _callRingTimeout; @@ -1213,11 +1238,20 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; } }]; }, - .createAudioDeviceModule = [audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr { + .createAudioDeviceModule = [weakSelf, queue, audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr { if (audioDeviceModule) { return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule(); } else { - return rtc::make_ref_counted(false, false, 1); + rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current(); + auto resultModule = rtc::make_ref_counted(false, false, 1); + [queue dispatch:^{ + __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf; + if (strongSelf) { + strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread; + strongSelf->_currentAudioDeviceModule = resultModule; + } + }]; + return resultModule; } } }); @@ -1232,6 +1266,14 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; InternalVoipLoggingFunction(@"OngoingCallThreadLocalContext: dealloc"); } + if (_currentAudioDeviceModuleThread) { + auto currentAudioDeviceModule = _currentAudioDeviceModule; + _currentAudioDeviceModule = nullptr; + _currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() { + }); + _currentAudioDeviceModuleThread = nullptr; + } + if (_tgVoip != NULL) { [self stop:nil]; } @@ -1537,6 +1579,9 @@ private: int _nextSinkId; NSMutableDictionary *_sinks; + + rtc::scoped_refptr _currentAudioDeviceModule; + rtc::Thread *_currentAudioDeviceModuleThread; } @end @@ -1777,19 +1822,60 @@ private: return std::make_shared(task); }, - .minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit + .minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit, + .createAudioDeviceModule = [weakSelf, queue, disableAudioInput](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr { + rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current(); + auto resultModule = rtc::make_ref_counted(false, disableAudioInput, disableAudioInput ? 2 : 1); + [queue dispatch:^{ + __strong GroupCallThreadLocalContext *strongSelf = weakSelf; + if (strongSelf) { + strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread; + strongSelf->_currentAudioDeviceModule = resultModule; + } + }]; + return resultModule; + } })); } return self; } +- (void)dealloc { + if (_currentAudioDeviceModuleThread) { + auto currentAudioDeviceModule = _currentAudioDeviceModule; + _currentAudioDeviceModule = nullptr; + _currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() { + }); + _currentAudioDeviceModuleThread = nullptr; + } +} + - (void)stop { + if (_currentAudioDeviceModuleThread) { + auto currentAudioDeviceModule = _currentAudioDeviceModule; + _currentAudioDeviceModule = nullptr; + _currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() { + }); + _currentAudioDeviceModuleThread = nullptr; + } + if (_instance) { _instance->stop(); _instance.reset(); } } +- (void)setTone:(CallAudioTone * _Nullable)tone { + if (_currentAudioDeviceModuleThread) { + auto currentAudioDeviceModule = _currentAudioDeviceModule; + if (currentAudioDeviceModule) { + _currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule, tone]() { + currentAudioDeviceModule->setTone([tone asTone]); + }); + } + } +} + - (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive { if (isAudioSessionActive) { [[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];