mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
be4f069f1b
@ -1122,7 +1122,7 @@ ios_extension(
|
|||||||
":VersionInfoPlist",
|
":VersionInfoPlist",
|
||||||
":AppNameInfoPlist",
|
":AppNameInfoPlist",
|
||||||
],
|
],
|
||||||
minimum_os_version = "9.0",
|
minimum_os_version = "10.0",
|
||||||
provisioning_profile = "//build-input/data/provisioning-profiles:NotificationContent.mobileprovision",
|
provisioning_profile = "//build-input/data/provisioning-profiles:NotificationContent.mobileprovision",
|
||||||
deps = [":NotificationContentExtensionLib"],
|
deps = [":NotificationContentExtensionLib"],
|
||||||
frameworks = [
|
frameworks = [
|
||||||
@ -1263,7 +1263,7 @@ ios_extension(
|
|||||||
":VersionInfoPlist",
|
":VersionInfoPlist",
|
||||||
":AppNameInfoPlist",
|
":AppNameInfoPlist",
|
||||||
],
|
],
|
||||||
minimum_os_version = "9.0",
|
minimum_os_version = "10.0",
|
||||||
provisioning_profile = "//build-input/data/provisioning-profiles:Intents.mobileprovision",
|
provisioning_profile = "//build-input/data/provisioning-profiles:Intents.mobileprovision",
|
||||||
deps = [":IntentsExtensionLib"],
|
deps = [":IntentsExtensionLib"],
|
||||||
frameworks = [
|
frameworks = [
|
||||||
@ -1519,7 +1519,6 @@ ios_application(
|
|||||||
":MtProtoKitFramework",
|
":MtProtoKitFramework",
|
||||||
":SwiftSignalKitFramework",
|
":SwiftSignalKitFramework",
|
||||||
":PostboxFramework",
|
":PostboxFramework",
|
||||||
#":TelegramApiFramework",
|
|
||||||
":SyncCoreFramework",
|
":SyncCoreFramework",
|
||||||
":TelegramCoreFramework",
|
":TelegramCoreFramework",
|
||||||
":AsyncDisplayKitFramework",
|
":AsyncDisplayKitFramework",
|
||||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -218,6 +218,7 @@ private final class AudioPlayerRendererContext {
|
|||||||
let lowWaterSizeInSeconds: Int = 2
|
let lowWaterSizeInSeconds: Int = 2
|
||||||
|
|
||||||
let audioSession: MediaPlayerAudioSessionControl
|
let audioSession: MediaPlayerAudioSessionControl
|
||||||
|
let useVoiceProcessingMode: Bool
|
||||||
let controlTimebase: CMTimebase
|
let controlTimebase: CMTimebase
|
||||||
let updatedRate: () -> Void
|
let updatedRate: () -> Void
|
||||||
let audioPaused: () -> Void
|
let audioPaused: () -> Void
|
||||||
@ -250,7 +251,7 @@ private final class AudioPlayerRendererContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, useVoiceProcessingMode: Bool, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
||||||
assert(audioPlayerRendererQueue.isCurrent())
|
assert(audioPlayerRendererQueue.isCurrent())
|
||||||
|
|
||||||
self.audioSession = audioSession
|
self.audioSession = audioSession
|
||||||
@ -263,6 +264,7 @@ private final class AudioPlayerRendererContext {
|
|||||||
self.audioPaused = audioPaused
|
self.audioPaused = audioPaused
|
||||||
|
|
||||||
self.playAndRecord = playAndRecord
|
self.playAndRecord = playAndRecord
|
||||||
|
self.useVoiceProcessingMode = useVoiceProcessingMode
|
||||||
self.ambient = ambient
|
self.ambient = ambient
|
||||||
|
|
||||||
self.audioStreamDescription = audioRendererNativeStreamDescription()
|
self.audioStreamDescription = audioRendererNativeStreamDescription()
|
||||||
@ -407,7 +409,11 @@ private final class AudioPlayerRendererContext {
|
|||||||
var outputNode: AUNode = 0
|
var outputNode: AUNode = 0
|
||||||
var outputDesc = AudioComponentDescription()
|
var outputDesc = AudioComponentDescription()
|
||||||
outputDesc.componentType = kAudioUnitType_Output
|
outputDesc.componentType = kAudioUnitType_Output
|
||||||
outputDesc.componentSubType = kAudioUnitSubType_RemoteIO
|
if self.useVoiceProcessingMode {
|
||||||
|
outputDesc.componentSubType = kAudioUnitSubType_VoiceProcessingIO
|
||||||
|
} else {
|
||||||
|
outputDesc.componentSubType = kAudioUnitSubType_RemoteIO
|
||||||
|
}
|
||||||
outputDesc.componentFlags = 0
|
outputDesc.componentFlags = 0
|
||||||
outputDesc.componentFlagsMask = 0
|
outputDesc.componentFlagsMask = 0
|
||||||
outputDesc.componentManufacturer = kAudioUnitManufacturer_Apple
|
outputDesc.componentManufacturer = kAudioUnitManufacturer_Apple
|
||||||
@ -753,7 +759,7 @@ public final class MediaPlayerAudioRenderer {
|
|||||||
private let audioClock: CMClock
|
private let audioClock: CMClock
|
||||||
public let audioTimebase: CMTimebase
|
public let audioTimebase: CMTimebase
|
||||||
|
|
||||||
public init(audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
public init(audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
||||||
var audioClock: CMClock?
|
var audioClock: CMClock?
|
||||||
CMAudioClockCreate(allocator: nil, clockOut: &audioClock)
|
CMAudioClockCreate(allocator: nil, clockOut: &audioClock)
|
||||||
if audioClock == nil {
|
if audioClock == nil {
|
||||||
@ -766,7 +772,7 @@ public final class MediaPlayerAudioRenderer {
|
|||||||
self.audioTimebase = audioTimebase!
|
self.audioTimebase = audioTimebase!
|
||||||
|
|
||||||
audioPlayerRendererQueue.async {
|
audioPlayerRendererQueue.async {
|
||||||
let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, playAndRecord: playAndRecord, ambient: ambient, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused)
|
let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, ambient: ambient, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused)
|
||||||
self.contextRef = Unmanaged.passRetained(context)
|
self.contextRef = Unmanaged.passRetained(context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -505,9 +505,9 @@ private func dataAndStorageControllerEntries(state: DataAndStorageControllerStat
|
|||||||
if #available(iOSApplicationExtension 13.2, iOS 13.2, *) {
|
if #available(iOSApplicationExtension 13.2, iOS 13.2, *) {
|
||||||
entries.append(.shareSheet(presentationData.theme, presentationData.strings.ChatSettings_IntentsSettings))
|
entries.append(.shareSheet(presentationData.theme, presentationData.strings.ChatSettings_IntentsSettings))
|
||||||
}
|
}
|
||||||
if #available(iOSApplicationExtension 14.0, iOS 14.0, *) {
|
/*if #available(iOSApplicationExtension 14.0, iOS 14.0, *) {
|
||||||
entries.append(.widgetSettings(presentationData.strings.ChatSettings_WidgetSettings))
|
entries.append(.widgetSettings(presentationData.strings.ChatSettings_WidgetSettings))
|
||||||
}
|
}*/
|
||||||
entries.append(.saveIncomingPhotos(presentationData.theme, presentationData.strings.Settings_SaveIncomingPhotos))
|
entries.append(.saveIncomingPhotos(presentationData.theme, presentationData.strings.Settings_SaveIncomingPhotos))
|
||||||
entries.append(.saveEditedPhotos(presentationData.theme, presentationData.strings.Settings_SaveEditedPhotos, data.generatedMediaStoreSettings.storeEditedPhotos))
|
entries.append(.saveEditedPhotos(presentationData.theme, presentationData.strings.Settings_SaveEditedPhotos, data.generatedMediaStoreSettings.storeEditedPhotos))
|
||||||
entries.append(.openLinksIn(presentationData.theme, presentationData.strings.ChatSettings_OpenLinksIn, defaultWebBrowser))
|
entries.append(.openLinksIn(presentationData.theme, presentationData.strings.ChatSettings_OpenLinksIn, defaultWebBrowser))
|
||||||
|
@ -24,6 +24,8 @@ objc_library(
|
|||||||
"MobileCoreServices",
|
"MobileCoreServices",
|
||||||
"AddressBook",
|
"AddressBook",
|
||||||
"AVFoundation",
|
"AVFoundation",
|
||||||
|
],
|
||||||
|
weak_sdk_frameworks = [
|
||||||
"PassKit",
|
"PassKit",
|
||||||
],
|
],
|
||||||
visibility = [
|
visibility = [
|
||||||
|
3
submodules/Stripe/BUILD
vendored
3
submodules/Stripe/BUILD
vendored
@ -19,6 +19,9 @@ objc_library(
|
|||||||
sdk_frameworks = [
|
sdk_frameworks = [
|
||||||
"Foundation",
|
"Foundation",
|
||||||
"UIKit",
|
"UIKit",
|
||||||
|
"AddressBook",
|
||||||
|
],
|
||||||
|
weak_sdk_frameworks = [
|
||||||
"PassKit",
|
"PassKit",
|
||||||
],
|
],
|
||||||
visibility = [
|
visibility = [
|
||||||
|
@ -34,7 +34,7 @@ final class PresentationCallToneRenderer {
|
|||||||
|
|
||||||
self.toneRenderer = MediaPlayerAudioRenderer(audioSession: .custom({ control in
|
self.toneRenderer = MediaPlayerAudioRenderer(audioSession: .custom({ control in
|
||||||
return controlImpl?(control) ?? EmptyDisposable
|
return controlImpl?(control) ?? EmptyDisposable
|
||||||
}), playAndRecord: false, ambient: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: self.audioLevelPipe, updatedRate: {}, audioPaused: {})
|
}), playAndRecord: false, useVoiceProcessingMode: true, ambient: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: self.audioLevelPipe, updatedRate: {}, audioPaused: {})
|
||||||
|
|
||||||
controlImpl = { [weak self] control in
|
controlImpl = { [weak self] control in
|
||||||
queue.async {
|
queue.async {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import Foundation
|
import Foundation
|
||||||
import AVFoundation
|
import AVFoundation
|
||||||
|
|
||||||
private func loadToneData(name: String) -> Data? {
|
private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Data? {
|
||||||
let outputSettings: [String: Any] = [
|
let outputSettings: [String: Any] = [
|
||||||
AVFormatIDKey: kAudioFormatLinearPCM as NSNumber,
|
AVFormatIDKey: kAudioFormatLinearPCM as NSNumber,
|
||||||
AVSampleRateKey: 44100.0 as NSNumber,
|
AVSampleRateKey: 44100.0 as NSNumber,
|
||||||
@ -62,6 +62,15 @@ private func loadToneData(name: String) -> Data? {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !addSilenceDuration.isZero {
|
||||||
|
let sampleRate = 44100
|
||||||
|
let numberOfSamples = Int(Double(sampleRate) * addSilenceDuration)
|
||||||
|
let numberOfChannels = 2
|
||||||
|
let numberOfBytes = numberOfSamples * 2 * numberOfChannels
|
||||||
|
|
||||||
|
data.append(Data(count: numberOfBytes))
|
||||||
|
}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -110,6 +119,6 @@ func presentationCallToneData(_ tone: PresentationCallTone) -> Data? {
|
|||||||
case .groupLeft:
|
case .groupLeft:
|
||||||
return loadToneData(name: "voip_group_left.mp3")
|
return loadToneData(name: "voip_group_left.mp3")
|
||||||
case .groupConnecting:
|
case .groupConnecting:
|
||||||
return loadToneData(name: "voip_group_connecting.mp3")
|
return loadToneData(name: "voip_group_connecting.mp3", addSilenceDuration: 2.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -444,6 +444,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
|
|
||||||
private var removedChannelMembersDisposable: Disposable?
|
private var removedChannelMembersDisposable: Disposable?
|
||||||
|
|
||||||
|
private var didStartConnectingOnce: Bool = false
|
||||||
private var didConnectOnce: Bool = false
|
private var didConnectOnce: Bool = false
|
||||||
private var toneRenderer: PresentationCallToneRenderer?
|
private var toneRenderer: PresentationCallToneRenderer?
|
||||||
|
|
||||||
@ -847,7 +848,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if wasConnecting != isConnecting && strongSelf.didConnectOnce {
|
if (wasConnecting != isConnecting && strongSelf.didConnectOnce) { //|| !strongSelf.didStartConnectingOnce {
|
||||||
if isConnecting {
|
if isConnecting {
|
||||||
let toneRenderer = PresentationCallToneRenderer(tone: .groupConnecting)
|
let toneRenderer = PresentationCallToneRenderer(tone: .groupConnecting)
|
||||||
strongSelf.toneRenderer = toneRenderer
|
strongSelf.toneRenderer = toneRenderer
|
||||||
@ -857,6 +858,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if isConnecting {
|
||||||
|
strongSelf.didStartConnectingOnce = true
|
||||||
|
}
|
||||||
|
|
||||||
if case .connected = state {
|
if case .connected = state {
|
||||||
if !strongSelf.didConnectOnce {
|
if !strongSelf.didConnectOnce {
|
||||||
strongSelf.didConnectOnce = true
|
strongSelf.didConnectOnce = true
|
||||||
|
Loading…
x
Reference in New Issue
Block a user