Version 12.1.1

This commit is contained in:
Kylmakalle
2024-07-02 19:58:37 +03:00
parent 68a75ff7fc
commit 37a40f3d4a
974 changed files with 53359 additions and 2716 deletions

View File

@@ -1,3 +1,4 @@
import SGSimpleSettings
import Foundation
import UIKit
import AsyncDisplayKit
@@ -353,7 +354,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
return
}
if !context.isPremium, case .inProgress = self.audioTranscriptionState {
if /*!context.isPremium,*/ case .inProgress = self.audioTranscriptionState {
return
}
@@ -361,7 +362,8 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
let premiumConfiguration = PremiumConfiguration.with(appConfiguration: arguments.context.currentAppConfiguration.with { $0 })
let transcriptionText = self.forcedAudioTranscriptionText ?? transcribedText(message: message)
if transcriptionText == nil && !arguments.associatedData.alwaysDisplayTranscribeButton.providedByGroupBoost {
// MARK: Swiftgram
if transcriptionText == nil && false {
if premiumConfiguration.audioTransciptionTrialCount > 0 {
if !arguments.associatedData.isPremium {
if self.presentAudioTranscriptionTooltip(finished: false) {
@@ -420,7 +422,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
self.audioTranscriptionState = .inProgress
self.requestUpdateLayout(true)
if context.sharedContext.immediateExperimentalUISettings.localTranscription {
if context.sharedContext.immediateExperimentalUISettings.localTranscription || !arguments.associatedData.isPremium || SGSimpleSettings.shared.transcriptionBackend == SGSimpleSettings.TranscriptionBackend.apple.rawValue {
let appLocale = presentationData.strings.baseLanguageCode
let signal: Signal<LocallyTranscribedAudio?, NoError> = context.engine.data.get(TelegramEngine.EngineData.Item.Messages.Message(id: message.id))
@@ -452,7 +454,8 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
guard let result = result else {
return .single(nil)
}
return transcribeAudio(path: result, appLocale: appLocale)
return transcribeAudio(path: result, appLocale: arguments.controllerInteraction.sgGetChatPredictedLang() ?? appLocale)
}
self.transcribeDisposable = (signal
@@ -772,7 +775,8 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
displayTranscribe = false
} else if arguments.message.id.peerId.namespace != Namespaces.Peer.SecretChat && !isViewOnceMessage && !arguments.presentationData.isPreview {
let premiumConfiguration = PremiumConfiguration.with(appConfiguration: arguments.context.currentAppConfiguration.with { $0 })
if arguments.associatedData.isPremium {
// MARK: Swiftgram
if arguments.associatedData.isPremium || true {
displayTranscribe = true
} else if premiumConfiguration.audioTransciptionTrialCount > 0 {
if arguments.incoming {
@@ -803,7 +807,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
}
let currentTime = Int32(Date().timeIntervalSince1970)
if transcribedText == nil, let cooldownUntilTime = arguments.associatedData.audioTranscriptionTrial.cooldownUntilTime, cooldownUntilTime > currentTime {
if transcribedText == nil, let cooldownUntilTime = arguments.associatedData.audioTranscriptionTrial.cooldownUntilTime, cooldownUntilTime > currentTime, { return false }() /* MARK: Swiftgram */ {
updatedAudioTranscriptionState = .locked
}