diff --git a/Telegram/BUILD b/Telegram/BUILD index bfd557feb6..45d52d72a5 100644 --- a/Telegram/BUILD +++ b/Telegram/BUILD @@ -1122,7 +1122,7 @@ ios_extension( ":VersionInfoPlist", ":AppNameInfoPlist", ], - minimum_os_version = "9.0", + minimum_os_version = "10.0", provisioning_profile = "//build-input/data/provisioning-profiles:NotificationContent.mobileprovision", deps = [":NotificationContentExtensionLib"], frameworks = [ @@ -1263,7 +1263,7 @@ ios_extension( ":VersionInfoPlist", ":AppNameInfoPlist", ], - minimum_os_version = "9.0", + minimum_os_version = "10.0", provisioning_profile = "//build-input/data/provisioning-profiles:Intents.mobileprovision", deps = [":IntentsExtensionLib"], frameworks = [ @@ -1519,7 +1519,6 @@ ios_application( ":MtProtoKitFramework", ":SwiftSignalKitFramework", ":PostboxFramework", - #":TelegramApiFramework", ":SyncCoreFramework", ":TelegramCoreFramework", ":AsyncDisplayKitFramework", diff --git a/Telegram/Telegram-iOS/Resources/voip_group_connecting.mp3 b/Telegram/Telegram-iOS/Resources/voip_group_connecting.mp3 index b1ace8c7f1..4e0f60b4f1 100644 Binary files a/Telegram/Telegram-iOS/Resources/voip_group_connecting.mp3 and b/Telegram/Telegram-iOS/Resources/voip_group_connecting.mp3 differ diff --git a/Telegram/Telegram-iOS/Resources/voip_group_joined.mp3 b/Telegram/Telegram-iOS/Resources/voip_group_joined.mp3 index 314b1b16fe..527959c9da 100644 Binary files a/Telegram/Telegram-iOS/Resources/voip_group_joined.mp3 and b/Telegram/Telegram-iOS/Resources/voip_group_joined.mp3 differ diff --git a/Telegram/Telegram-iOS/Resources/voip_group_left.mp3 b/Telegram/Telegram-iOS/Resources/voip_group_left.mp3 index a8d79f0aa1..c033909587 100644 Binary files a/Telegram/Telegram-iOS/Resources/voip_group_left.mp3 and b/Telegram/Telegram-iOS/Resources/voip_group_left.mp3 differ diff --git a/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift b/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift index e531344f15..0af3ee7a13 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift @@ -218,6 +218,7 @@ private final class AudioPlayerRendererContext { let lowWaterSizeInSeconds: Int = 2 let audioSession: MediaPlayerAudioSessionControl + let useVoiceProcessingMode: Bool let controlTimebase: CMTimebase let updatedRate: () -> Void let audioPaused: () -> Void @@ -250,7 +251,7 @@ private final class AudioPlayerRendererContext { } } - init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { + init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, useVoiceProcessingMode: Bool, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { assert(audioPlayerRendererQueue.isCurrent()) self.audioSession = audioSession @@ -263,6 +264,7 @@ private final class AudioPlayerRendererContext { self.audioPaused = audioPaused self.playAndRecord = playAndRecord + self.useVoiceProcessingMode = useVoiceProcessingMode self.ambient = ambient self.audioStreamDescription = audioRendererNativeStreamDescription() @@ -407,7 +409,11 @@ private final class AudioPlayerRendererContext { var outputNode: AUNode = 0 var outputDesc = AudioComponentDescription() outputDesc.componentType = kAudioUnitType_Output - outputDesc.componentSubType = kAudioUnitSubType_RemoteIO + if self.useVoiceProcessingMode { + outputDesc.componentSubType = kAudioUnitSubType_VoiceProcessingIO + } else { + outputDesc.componentSubType = kAudioUnitSubType_RemoteIO + } outputDesc.componentFlags = 0 outputDesc.componentFlagsMask = 0 outputDesc.componentManufacturer = kAudioUnitManufacturer_Apple @@ -753,7 +759,7 @@ public final class MediaPlayerAudioRenderer { private let audioClock: CMClock public let audioTimebase: CMTimebase - public init(audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { + public init(audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { var audioClock: CMClock? CMAudioClockCreate(allocator: nil, clockOut: &audioClock) if audioClock == nil { @@ -766,7 +772,7 @@ public final class MediaPlayerAudioRenderer { self.audioTimebase = audioTimebase! audioPlayerRendererQueue.async { - let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, playAndRecord: playAndRecord, ambient: ambient, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused) + let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, ambient: ambient, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused) self.contextRef = Unmanaged.passRetained(context) } } diff --git a/submodules/SettingsUI/Sources/Data and Storage/DataAndStorageSettingsController.swift b/submodules/SettingsUI/Sources/Data and Storage/DataAndStorageSettingsController.swift index 1e886d0e80..d446034172 100644 --- a/submodules/SettingsUI/Sources/Data and Storage/DataAndStorageSettingsController.swift +++ b/submodules/SettingsUI/Sources/Data and Storage/DataAndStorageSettingsController.swift @@ -505,9 +505,9 @@ private func dataAndStorageControllerEntries(state: DataAndStorageControllerStat if #available(iOSApplicationExtension 13.2, iOS 13.2, *) { entries.append(.shareSheet(presentationData.theme, presentationData.strings.ChatSettings_IntentsSettings)) } - if #available(iOSApplicationExtension 14.0, iOS 14.0, *) { + /*if #available(iOSApplicationExtension 14.0, iOS 14.0, *) { entries.append(.widgetSettings(presentationData.strings.ChatSettings_WidgetSettings)) - } + }*/ entries.append(.saveIncomingPhotos(presentationData.theme, presentationData.strings.Settings_SaveIncomingPhotos)) entries.append(.saveEditedPhotos(presentationData.theme, presentationData.strings.Settings_SaveEditedPhotos, data.generatedMediaStoreSettings.storeEditedPhotos)) entries.append(.openLinksIn(presentationData.theme, presentationData.strings.ChatSettings_OpenLinksIn, defaultWebBrowser)) diff --git a/submodules/ShareItems/Impl/BUILD b/submodules/ShareItems/Impl/BUILD index 7d9d422737..95a6289dab 100644 --- a/submodules/ShareItems/Impl/BUILD +++ b/submodules/ShareItems/Impl/BUILD @@ -24,6 +24,8 @@ objc_library( "MobileCoreServices", "AddressBook", "AVFoundation", + ], + weak_sdk_frameworks = [ "PassKit", ], visibility = [ diff --git a/submodules/Stripe/BUILD b/submodules/Stripe/BUILD index 9320c426ea..e4177bfd58 100644 --- a/submodules/Stripe/BUILD +++ b/submodules/Stripe/BUILD @@ -19,6 +19,9 @@ objc_library( sdk_frameworks = [ "Foundation", "UIKit", + "AddressBook", + ], + weak_sdk_frameworks = [ "PassKit", ], visibility = [ diff --git a/submodules/TelegramCallsUI/Sources/PresentationCall.swift b/submodules/TelegramCallsUI/Sources/PresentationCall.swift index 0f3df58856..ccb3c49aaf 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationCall.swift @@ -34,7 +34,7 @@ final class PresentationCallToneRenderer { self.toneRenderer = MediaPlayerAudioRenderer(audioSession: .custom({ control in return controlImpl?(control) ?? EmptyDisposable - }), playAndRecord: false, ambient: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: self.audioLevelPipe, updatedRate: {}, audioPaused: {}) + }), playAndRecord: false, useVoiceProcessingMode: true, ambient: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: self.audioLevelPipe, updatedRate: {}, audioPaused: {}) controlImpl = { [weak self] control in queue.async { diff --git a/submodules/TelegramCallsUI/Sources/PresentationCallToneData.swift b/submodules/TelegramCallsUI/Sources/PresentationCallToneData.swift index 32197dfe60..b8efbdfd14 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationCallToneData.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationCallToneData.swift @@ -1,7 +1,7 @@ import Foundation import AVFoundation -private func loadToneData(name: String) -> Data? { +private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Data? { let outputSettings: [String: Any] = [ AVFormatIDKey: kAudioFormatLinearPCM as NSNumber, AVSampleRateKey: 44100.0 as NSNumber, @@ -62,6 +62,15 @@ private func loadToneData(name: String) -> Data? { } } + if !addSilenceDuration.isZero { + let sampleRate = 44100 + let numberOfSamples = Int(Double(sampleRate) * addSilenceDuration) + let numberOfChannels = 2 + let numberOfBytes = numberOfSamples * 2 * numberOfChannels + + data.append(Data(count: numberOfBytes)) + } + return data } @@ -110,6 +119,6 @@ func presentationCallToneData(_ tone: PresentationCallTone) -> Data? { case .groupLeft: return loadToneData(name: "voip_group_left.mp3") case .groupConnecting: - return loadToneData(name: "voip_group_connecting.mp3") + return loadToneData(name: "voip_group_connecting.mp3", addSilenceDuration: 2.0) } } diff --git a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift index 1d805f9554..bec9ac61ee 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift @@ -444,6 +444,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { private var removedChannelMembersDisposable: Disposable? + private var didStartConnectingOnce: Bool = false private var didConnectOnce: Bool = false private var toneRenderer: PresentationCallToneRenderer? @@ -847,7 +848,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { } } - if wasConnecting != isConnecting && strongSelf.didConnectOnce { + if (wasConnecting != isConnecting && strongSelf.didConnectOnce) { //|| !strongSelf.didStartConnectingOnce { if isConnecting { let toneRenderer = PresentationCallToneRenderer(tone: .groupConnecting) strongSelf.toneRenderer = toneRenderer @@ -857,6 +858,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { } } + if isConnecting { + strongSelf.didStartConnectingOnce = true + } + if case .connected = state { if !strongSelf.didConnectOnce { strongSelf.didConnectOnce = true