[WIP] Call tones

This commit is contained in:
Ali 2022-12-02 14:45:54 +04:00
parent 2345a1b8d2
commit 430dd4defd
7 changed files with 121 additions and 43 deletions

View File

@ -110,6 +110,9 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
private let joinButtonTitleNode: ImmediateTextNode private let joinButtonTitleNode: ImmediateTextNode
private let joinButtonBackgroundNode: ASImageNode private let joinButtonBackgroundNode: ASImageNode
private var previewImageNode: ASImageNode?
private var previewImage: UIImage?
private var audioLevelView: VoiceBlobView? private var audioLevelView: VoiceBlobView?
private let micButton: HighlightTrackingButtonNode private let micButton: HighlightTrackingButtonNode
@ -536,8 +539,18 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
guard let self, let data else { guard let self, let data else {
return return
} }
let _ = self
let _ = data var image: UIImage?
for i in 0 ..< 100 {
image = UIImage(data: data.subdata(in: i ..< data.count))
if image != nil {
break
}
}
self.previewImage = image
if let (size, leftInset, rightInset) = self.validLayout {
self.updateLayout(size: size, leftInset: leftInset, rightInset: rightInset, transition: .animated(duration: 0.2, curve: .easeInOut))
}
}) })
} }
} }
@ -668,6 +681,26 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
staticTransition.updateFrame(node: self.joinButtonBackgroundNode, frame: CGRect(origin: CGPoint(), size: joinButtonFrame.size)) staticTransition.updateFrame(node: self.joinButtonBackgroundNode, frame: CGRect(origin: CGPoint(), size: joinButtonFrame.size))
staticTransition.updateFrame(node: self.joinButtonTitleNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((joinButtonFrame.width - joinButtonTitleSize.width) / 2.0), y: floorToScreenPixels((joinButtonFrame.height - joinButtonTitleSize.height) / 2.0)), size: joinButtonTitleSize)) staticTransition.updateFrame(node: self.joinButtonTitleNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((joinButtonFrame.width - joinButtonTitleSize.width) / 2.0), y: floorToScreenPixels((joinButtonFrame.height - joinButtonTitleSize.height) / 2.0)), size: joinButtonTitleSize))
if let previewImage = self.previewImage {
let previewImageNode: ASImageNode
if let current = self.previewImageNode {
previewImageNode = current
} else {
previewImageNode = ASImageNode()
previewImageNode.clipsToBounds = true
previewImageNode.cornerRadius = 8.0
previewImageNode.contentMode = .scaleAspectFill
self.previewImageNode = previewImageNode
self.addSubnode(previewImageNode)
}
previewImageNode.image = previewImage
let previewSize = CGSize(width: 40.0, height: 40.0)
previewImageNode.frame = CGRect(origin: CGPoint(x: joinButtonFrame.minX - previewSize.width - 8.0, y: joinButtonFrame.minY + floor((joinButtonFrame.height - previewSize.height) / 2.0)), size: previewSize)
} else if let previewImageNode = self.previewImageNode {
self.previewImageNode = nil
previewImageNode.removeFromSupernode()
}
let micButtonSize = CGSize(width: 36.0, height: 36.0) let micButtonSize = CGSize(width: 36.0, height: 36.0)
let micButtonFrame = CGRect(origin: CGPoint(x: size.width - rightInset - 7.0 - micButtonSize.width, y: floor((panelHeight - micButtonSize.height) / 2.0)), size: micButtonSize) let micButtonFrame = CGRect(origin: CGPoint(x: size.width - rightInset - 7.0 - micButtonSize.width, y: floor((panelHeight - micButtonSize.height) / 2.0)), size: micButtonSize)
staticTransition.updateFrame(node: self.micButton, frame: micButtonFrame) staticTransition.updateFrame(node: self.micButton, frame: micButtonFrame)

View File

@ -4,12 +4,12 @@ import AVFoundation
private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Data? { private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Data? {
let outputSettings: [String: Any] = [ let outputSettings: [String: Any] = [
AVFormatIDKey: kAudioFormatLinearPCM as NSNumber, AVFormatIDKey: kAudioFormatLinearPCM as NSNumber,
AVSampleRateKey: 44100.0 as NSNumber, AVSampleRateKey: 48000.0 as NSNumber,
AVLinearPCMBitDepthKey: 16 as NSNumber, AVLinearPCMBitDepthKey: 16 as NSNumber,
AVLinearPCMIsNonInterleaved: false as NSNumber, AVLinearPCMIsNonInterleaved: false as NSNumber,
AVLinearPCMIsFloatKey: false as NSNumber, AVLinearPCMIsFloatKey: false as NSNumber,
AVLinearPCMIsBigEndianKey: false as NSNumber, AVLinearPCMIsBigEndianKey: false as NSNumber,
AVNumberOfChannelsKey: 2 as NSNumber AVNumberOfChannelsKey: 1 as NSNumber
] ]
let nsName: NSString = name as NSString let nsName: NSString = name as NSString
@ -63,9 +63,9 @@ private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Dat
} }
if !addSilenceDuration.isZero { if !addSilenceDuration.isZero {
let sampleRate = 44100 let sampleRate = 48000
let numberOfSamples = Int(Double(sampleRate) * addSilenceDuration) let numberOfSamples = Int(Double(sampleRate) * addSilenceDuration)
let numberOfChannels = 2 let numberOfChannels = 1
let numberOfBytes = numberOfSamples * 2 * numberOfChannels let numberOfBytes = numberOfSamples * 2 * numberOfChannels
data.append(Data(count: numberOfBytes)) data.append(Data(count: numberOfBytes))

View File

@ -2484,11 +2484,19 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
private func beginTone(tone: PresentationCallTone) { private func beginTone(tone: PresentationCallTone) {
if self.isStream {
switch tone {
case .groupJoined, .groupLeft:
return
default:
break
}
}
if let toneData = presentationCallToneData(tone) { if let toneData = presentationCallToneData(tone) {
self.genericCallContext?.setTone(tone: OngoingGroupCallContext.Tone( self.genericCallContext?.setTone(tone: OngoingGroupCallContext.Tone(
samples: toneData, samples: toneData,
sampleRate: 44100, sampleRate: 44100,
loopCount: 1000 loopCount: tone.loopCount ?? 100000
)) ))
} }

View File

@ -415,6 +415,7 @@ public final class OngoingGroupCallContext {
private final class Impl { private final class Impl {
let queue: Queue let queue: Queue
let context: GroupCallThreadLocalContext let context: GroupCallThreadLocalContext
let audioDevice: SharedCallAudioDevice?
let sessionId = UInt32.random(in: 0 ..< UInt32(Int32.max)) let sessionId = UInt32.random(in: 0 ..< UInt32(Int32.max))
@ -433,6 +434,12 @@ public final class OngoingGroupCallContext {
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) { init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
self.queue = queue self.queue = queue
#if DEBUG
self.audioDevice = SharedCallAudioDevice(disableRecording: disableAudioInput)
#else
self.audioDevice = nil
#endif
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)? var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)? var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
@ -538,7 +545,8 @@ public final class OngoingGroupCallContext {
enableNoiseSuppression: enableNoiseSuppression, enableNoiseSuppression: enableNoiseSuppression,
disableAudioInput: disableAudioInput, disableAudioInput: disableAudioInput,
preferX264: preferX264, preferX264: preferX264,
logPath: logPath logPath: logPath,
audioDevice: self.audioDevice
) )
let queue = self.queue let queue = self.queue
@ -592,6 +600,7 @@ public final class OngoingGroupCallContext {
return return
} }
#if os(iOS) #if os(iOS)
self.audioDevice?.setManualAudioSessionIsActive(isActive)
self.context.setManualAudioSessionIsActive(isActive) self.context.setManualAudioSessionIsActive(isActive)
#endif #endif
})) }))
@ -898,7 +907,7 @@ public final class OngoingGroupCallContext {
} }
func setTone(tone: Tone?) { func setTone(tone: Tone?) {
self.context.setTone(tone.flatMap { tone in self.audioDevice?.setTone(tone.flatMap { tone in
CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount) CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount)
}) })
} }

View File

@ -722,7 +722,7 @@ public final class OngoingCallContext {
let impl: SharedCallAudioDevice let impl: SharedCallAudioDevice
public static func create() -> AudioDevice? { public static func create() -> AudioDevice? {
return AudioDevice(impl: SharedCallAudioDevice()) return AudioDevice(impl: SharedCallAudioDevice(disableRecording: false))
} }
private init(impl: SharedCallAudioDevice) { private init(impl: SharedCallAudioDevice) {

View File

@ -23,7 +23,7 @@
@interface SharedCallAudioDevice : NSObject @interface SharedCallAudioDevice : NSObject
- (instancetype _Nonnull)init; - (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording;
+ (void)setupAudioSession; + (void)setupAudioSession;
@ -397,7 +397,8 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
enableNoiseSuppression:(bool)enableNoiseSuppression enableNoiseSuppression:(bool)enableNoiseSuppression
disableAudioInput:(bool)disableAudioInput disableAudioInput:(bool)disableAudioInput
preferX264:(bool)preferX264 preferX264:(bool)preferX264
logPath:(NSString * _Nonnull)logPath; logPath:(NSString * _Nonnull)logPath
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice;
- (void)stop; - (void)stop;

View File

@ -73,35 +73,31 @@ public:
public: public:
virtual rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> audioDeviceModule() = 0; virtual rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> audioDeviceModule() = 0;
virtual void start() = 0;
}; };
} }
class SharedAudioDeviceModuleImpl: public tgcalls::SharedAudioDeviceModule { class SharedAudioDeviceModuleImpl: public tgcalls::SharedAudioDeviceModule {
public: public:
SharedAudioDeviceModuleImpl() { SharedAudioDeviceModuleImpl(bool disableAudioInput) {
if (tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent()) { RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1); _audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
_audioDeviceModule->Init();
if (!_audioDeviceModule->Playing()) {
_audioDeviceModule->InitPlayout();
}
} else {
tgcalls::StaticThreads::getThreads()->getWorkerThread()->BlockingCall([&]() {
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
_audioDeviceModule->Init();
if (!_audioDeviceModule->Playing()) {
_audioDeviceModule->InitPlayout();
}
});
}
} }
virtual ~SharedAudioDeviceModuleImpl() override { virtual ~SharedAudioDeviceModuleImpl() override {
if (tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent()) { if (tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent()) {
if (_audioDeviceModule->Playing()) {
_audioDeviceModule->StopPlayout();
_audioDeviceModule->StopRecording();
}
_audioDeviceModule = nullptr; _audioDeviceModule = nullptr;
} else { } else {
tgcalls::StaticThreads::getThreads()->getWorkerThread()->BlockingCall([&]() { tgcalls::StaticThreads::getThreads()->getWorkerThread()->BlockingCall([&]() {
if (_audioDeviceModule->Playing()) {
_audioDeviceModule->StopPlayout();
_audioDeviceModule->StopRecording();
}
_audioDeviceModule = nullptr; _audioDeviceModule = nullptr;
}); });
} }
@ -112,6 +108,18 @@ public:
return _audioDeviceModule; return _audioDeviceModule;
} }
virtual void start() override {
RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
_audioDeviceModule->Init();
if (!_audioDeviceModule->Playing()) {
_audioDeviceModule->InitPlayout();
//_audioDeviceModule->InitRecording();
_audioDeviceModule->InternalStartPlayout();
//_audioDeviceModule->InternalStartRecording();
}
}
private: private:
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _audioDeviceModule; rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _audioDeviceModule;
}; };
@ -120,11 +128,11 @@ private:
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> _audioDeviceModule; std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> _audioDeviceModule;
} }
- (instancetype _Nonnull)init { - (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording {
self = [super init]; self = [super init];
if (self != nil) { if (self != nil) {
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), []() mutable { _audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), [disableRecording]() mutable {
return (tgcalls::SharedAudioDeviceModule *)(new SharedAudioDeviceModuleImpl()); return (tgcalls::SharedAudioDeviceModule *)(new SharedAudioDeviceModuleImpl(disableRecording));
})); }));
} }
return self; return self;
@ -164,6 +172,12 @@ private:
[[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]]; [[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
} }
[RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive; [RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive;
if (isAudioSessionActive) {
_audioDeviceModule->perform([](tgcalls::SharedAudioDeviceModule *audioDeviceModule) {
audioDeviceModule->start();
});
}
} }
@end @end
@ -1596,6 +1610,8 @@ private:
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule; rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule;
rtc::Thread *_currentAudioDeviceModuleThread; rtc::Thread *_currentAudioDeviceModuleThread;
SharedCallAudioDevice * _audioDevice;
} }
@end @end
@ -1617,7 +1633,8 @@ private:
enableNoiseSuppression:(bool)enableNoiseSuppression enableNoiseSuppression:(bool)enableNoiseSuppression
disableAudioInput:(bool)disableAudioInput disableAudioInput:(bool)disableAudioInput
preferX264:(bool)preferX264 preferX264:(bool)preferX264
logPath:(NSString * _Nonnull)logPath { logPath:(NSString * _Nonnull)logPath
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
self = [super init]; self = [super init];
if (self != nil) { if (self != nil) {
_queue = queue; _queue = queue;
@ -1629,6 +1646,12 @@ private:
_networkStateUpdated = [networkStateUpdated copy]; _networkStateUpdated = [networkStateUpdated copy];
_videoCapturer = videoCapturer; _videoCapturer = videoCapturer;
_audioDevice = audioDevice;
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> audioDeviceModule;
if (_audioDevice) {
audioDeviceModule = [_audioDevice getAudioDeviceModule];
}
tgcalls::VideoContentType _videoContentType; tgcalls::VideoContentType _videoContentType;
switch (videoContentType) { switch (videoContentType) {
case OngoingGroupCallVideoContentTypeGeneric: { case OngoingGroupCallVideoContentTypeGeneric: {
@ -1837,17 +1860,21 @@ private:
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task); return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
}, },
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit, .minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit,
.createAudioDeviceModule = [weakSelf, queue, disableAudioInput](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> { .createAudioDeviceModule = [weakSelf, queue, disableAudioInput, audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current(); if (audioDeviceModule) {
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1); return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
[queue dispatch:^{ } else {
__strong GroupCallThreadLocalContext *strongSelf = weakSelf; rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
if (strongSelf) { auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread; [queue dispatch:^{
strongSelf->_currentAudioDeviceModule = resultModule; __strong GroupCallThreadLocalContext *strongSelf = weakSelf;
} if (strongSelf) {
}]; strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread;
return resultModule; strongSelf->_currentAudioDeviceModule = resultModule;
}
}];
return resultModule;
}
} }
})); }));
} }