This commit is contained in:
Ali 2022-11-29 00:10:40 +04:00
parent 113bd95260
commit b9dba1efc8
4 changed files with 144 additions and 5 deletions

View File

@ -433,6 +433,15 @@ private extension CurrentImpl {
break
}
}
func setTone(tone: OngoingGroupCallContext.Tone?) {
switch self {
case let .call(callContext):
callContext.setTone(tone: tone)
case .mediaStream:
break
}
}
}
public func groupCallLogsPath(account: Account) -> String {
@ -2475,7 +2484,15 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
private func beginTone(tone: PresentationCallTone) {
if "".isEmpty {
if let toneData = presentationCallToneData(tone) {
self.genericCallContext?.setTone(tone: OngoingGroupCallContext.Tone(
samples: toneData,
sampleRate: 44100,
loopCount: 1000
))
}
/*if "".isEmpty {
return
}
if self.isStream {
@ -2500,7 +2517,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
self.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)*/
}
public func playTone(_ tone: PresentationGroupCallTone) {

View File

@ -400,6 +400,18 @@ public final class OngoingGroupCallContext {
public var incomingVideoStats: [String: IncomingVideoStats]
}
public final class Tone {
public let samples: Data
public let sampleRate: Int
public let loopCount: Int
public init(samples: Data, sampleRate: Int, loopCount: Int) {
self.samples = samples
self.sampleRate = sampleRate
self.loopCount = loopCount
}
}
private final class Impl {
let queue: Queue
let context: GroupCallThreadLocalContext
@ -884,6 +896,12 @@ public final class OngoingGroupCallContext {
completion(Stats(incomingVideoStats: incomingVideoStats))
})
}
func setTone(tone: Tone?) {
self.context.setTone(tone.flatMap { tone in
CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount)
})
}
}
private let queue = Queue()
@ -1075,4 +1093,10 @@ public final class OngoingGroupCallContext {
impl.getStats(completion: completion)
}
}
public func setTone(tone: Tone?) {
self.impl.with { impl in
impl.setTone(tone: tone)
}
}
}

View File

@ -203,6 +203,16 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
@end
@interface CallAudioTone : NSObject
@property (nonatomic, strong, readonly) NSData * _Nonnull samples;
@property (nonatomic, readonly) NSInteger sampleRate;
@property (nonatomic, readonly) NSInteger loopCount;
- (instancetype _Nonnull)initWithSamples:(NSData * _Nonnull)samples sampleRate:(NSInteger)sampleRate loopCount:(NSInteger)loopCount;
@end
@interface OngoingCallThreadLocalContextWebrtc : NSObject
+ (void)logMessage:(NSString * _Nonnull)string;
@ -391,6 +401,8 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
- (void)setTone:(CallAudioTone * _Nullable)tone;
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast;
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;

View File

@ -792,6 +792,28 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
@end
@implementation CallAudioTone
- (instancetype _Nonnull)initWithSamples:(NSData * _Nonnull)samples sampleRate:(NSInteger)sampleRate loopCount:(NSInteger)loopCount {
self = [super init];
if (self != nil) {
_samples = samples;
_sampleRate = sampleRate;
_loopCount = loopCount;
}
return self;
}
- (std::shared_ptr<tgcalls::CallAudioTone>)asTone {
std::vector<int16_t> data;
data.resize(_samples.length / 2);
memcpy(data.data(), _samples.bytes, _samples.length);
return std::make_shared<tgcalls::CallAudioTone>(std::move(data), (int)_sampleRate, (int)_loopCount);
}
@end
@interface OngoingCallThreadLocalContextWebrtc () {
NSString *_version;
id<OngoingCallThreadLocalContextQueueWebrtc> _queue;
@ -800,6 +822,9 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
bool _useManualAudioSessionControl;
SharedCallAudioDevice *_audioDevice;
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule;
rtc::Thread *_currentAudioDeviceModuleThread;
OngoingCallNetworkTypeWebrtc _networkType;
NSTimeInterval _callReceiveTimeout;
NSTimeInterval _callRingTimeout;
@ -1213,11 +1238,20 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}];
},
.createAudioDeviceModule = [audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
.createAudioDeviceModule = [weakSelf, queue, audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
if (audioDeviceModule) {
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
} else {
return rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread;
strongSelf->_currentAudioDeviceModule = resultModule;
}
}];
return resultModule;
}
}
});
@ -1232,6 +1266,14 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
InternalVoipLoggingFunction(@"OngoingCallThreadLocalContext: dealloc");
}
if (_currentAudioDeviceModuleThread) {
auto currentAudioDeviceModule = _currentAudioDeviceModule;
_currentAudioDeviceModule = nullptr;
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
});
_currentAudioDeviceModuleThread = nullptr;
}
if (_tgVoip != NULL) {
[self stop:nil];
}
@ -1537,6 +1579,9 @@ private:
int _nextSinkId;
NSMutableDictionary<NSNumber *, GroupCallVideoSink *> *_sinks;
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule;
rtc::Thread *_currentAudioDeviceModuleThread;
}
@end
@ -1777,19 +1822,60 @@ private:
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
},
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit,
.createAudioDeviceModule = [weakSelf, queue, disableAudioInput](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
[queue dispatch:^{
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
if (strongSelf) {
strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread;
strongSelf->_currentAudioDeviceModule = resultModule;
}
}];
return resultModule;
}
}));
}
return self;
}
- (void)dealloc {
if (_currentAudioDeviceModuleThread) {
auto currentAudioDeviceModule = _currentAudioDeviceModule;
_currentAudioDeviceModule = nullptr;
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
});
_currentAudioDeviceModuleThread = nullptr;
}
}
- (void)stop {
if (_currentAudioDeviceModuleThread) {
auto currentAudioDeviceModule = _currentAudioDeviceModule;
_currentAudioDeviceModule = nullptr;
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
});
_currentAudioDeviceModuleThread = nullptr;
}
if (_instance) {
_instance->stop();
_instance.reset();
}
}
- (void)setTone:(CallAudioTone * _Nullable)tone {
if (_currentAudioDeviceModuleThread) {
auto currentAudioDeviceModule = _currentAudioDeviceModule;
if (currentAudioDeviceModule) {
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule, tone]() {
currentAudioDeviceModule->setTone([tone asTone]);
});
}
}
}
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive {
if (isAudioSessionActive) {
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];