Version 12.1.1

This commit is contained in:
Kylmakalle
2024-07-02 19:58:37 +03:00
parent 1c3b749ede
commit edbcf47190
977 changed files with 53378 additions and 2720 deletions

View File

@@ -1,3 +1,4 @@
import SGSimpleSettings
import Foundation
import UIKit
import AsyncDisplayKit
@@ -351,7 +352,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
return
}
if !context.isPremium, case .inProgress = self.audioTranscriptionState {
if /*!context.isPremium,*/ case .inProgress = self.audioTranscriptionState {
return
}
@@ -359,7 +360,8 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
let premiumConfiguration = PremiumConfiguration.with(appConfiguration: arguments.context.currentAppConfiguration.with { $0 })
let transcriptionText = self.forcedAudioTranscriptionText ?? transcribedText(message: message)
if transcriptionText == nil && !arguments.associatedData.alwaysDisplayTranscribeButton.providedByGroupBoost {
// MARK: Swiftgram
if transcriptionText == nil && false {
if premiumConfiguration.audioTransciptionTrialCount > 0 {
if !arguments.associatedData.isPremium {
if self.presentAudioTranscriptionTooltip(finished: false) {
@@ -418,7 +420,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
self.audioTranscriptionState = .inProgress
self.requestUpdateLayout(true)
if context.sharedContext.immediateExperimentalUISettings.localTranscription {
if context.sharedContext.immediateExperimentalUISettings.localTranscription || !arguments.associatedData.isPremium || SGSimpleSettings.shared.transcriptionBackend == SGSimpleSettings.TranscriptionBackend.apple.rawValue {
let appLocale = presentationData.strings.baseLanguageCode
let signal: Signal<LocallyTranscribedAudio?, NoError> = context.engine.data.get(TelegramEngine.EngineData.Item.Messages.Message(id: message.id))
@@ -450,7 +452,8 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
guard let result = result else {
return .single(nil)
}
return transcribeAudio(path: result, appLocale: appLocale)
return transcribeAudio(path: result, appLocale: arguments.controllerInteraction.sgGetChatPredictedLang() ?? appLocale)
}
self.transcribeDisposable = (signal
@@ -770,7 +773,8 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
displayTranscribe = false
} else if arguments.message.id.peerId.namespace != Namespaces.Peer.SecretChat && !isViewOnceMessage && !arguments.presentationData.isPreview {
let premiumConfiguration = PremiumConfiguration.with(appConfiguration: arguments.context.currentAppConfiguration.with { $0 })
if arguments.associatedData.isPremium {
// MARK: Swiftgram
if arguments.associatedData.isPremium || true {
displayTranscribe = true
} else if premiumConfiguration.audioTransciptionTrialCount > 0 {
if arguments.incoming {
@@ -801,7 +805,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
}
let currentTime = Int32(Date().timeIntervalSince1970)
if transcribedText == nil, let cooldownUntilTime = arguments.associatedData.audioTranscriptionTrial.cooldownUntilTime, cooldownUntilTime > currentTime {
if transcribedText == nil, let cooldownUntilTime = arguments.associatedData.audioTranscriptionTrial.cooldownUntilTime, cooldownUntilTime > currentTime, { return false }() /* MARK: Swiftgram */ {
updatedAudioTranscriptionState = .locked
}