Various fixes

This commit is contained in:
Ilya Laktyushin 2023-02-06 00:55:17 +04:00
parent b92a46ee58
commit bf94ea75e9
39 changed files with 202 additions and 141 deletions

View File

@ -8886,3 +8886,6 @@ Sorry for the inconvenience.";
"Translation.Language.ta" = "Tamil"; "Translation.Language.ta" = "Tamil";
"Translation.Language.tr" = "Turkish"; "Translation.Language.tr" = "Turkish";
"Translation.Language.uz" = "Uzbek"; "Translation.Language.uz" = "Uzbek";
"Settings.RaiseToListen" = "Raise to Listen";
"Settings.RaiseToListenInfo" = "Raise to Listen allows you to quickly listen and reply to incoming audio messages by raising the phone to your ear.";

View File

@ -954,7 +954,8 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
} }
} }
let signal = TGMediaVideoConverter.convert(avatarAsset, adjustments: avatarAdjustments, watcher: nil, entityRenderer: entityRenderer)! let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let signal = TGMediaVideoConverter.convert(avatarAsset, adjustments: avatarAdjustments, path: tempFile.path, watcher: nil, entityRenderer: entityRenderer)!
let signalDisposable = signal.start(next: { next in let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult { if let result = next as? TGMediaVideoConversionResult {
@ -964,6 +965,8 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max)) let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))
account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true) account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
subscriber.putNext(resource) subscriber.putNext(resource)
EngineTempBox.shared.dispose(tempFile)
} }
} }
subscriber.putCompletion() subscriber.putCompletion()

View File

@ -126,6 +126,8 @@ public final class ChatListSearchContainerNode: SearchDisplayControllerContentNo
private var selectedFilterPromise = Promise<ChatListSearchFilterEntry?>() private var selectedFilterPromise = Promise<ChatListSearchFilterEntry?>()
private var transitionFraction: CGFloat = 0.0 private var transitionFraction: CGFloat = 0.0
private var appearanceTimestamp: Double?
private weak var copyProtectionTooltipController: TooltipController? private weak var copyProtectionTooltipController: TooltipController?
private lazy var hapticFeedback = { HapticFeedback() }() private lazy var hapticFeedback = { HapticFeedback() }()
@ -306,6 +308,10 @@ public final class ChatListSearchContainerNode: SearchDisplayControllerContentNo
return return
} }
if let appearanceTimestamp = strongSelf.appearanceTimestamp, CACurrentMediaTime() - appearanceTimestamp < 0.5 {
return
}
var key: ChatListSearchPaneKey? var key: ChatListSearchPaneKey?
var date = strongSelf.currentSearchOptions.date var date = strongSelf.currentSearchOptions.date
var peer = strongSelf.currentSearchOptions.peer var peer = strongSelf.currentSearchOptions.peer
@ -667,6 +673,7 @@ public final class ChatListSearchContainerNode: SearchDisplayControllerContentNo
if isFirstTime { if isFirstTime {
self.filterContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) self.filterContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.appearanceTimestamp = CACurrentMediaTime()
} }
var bottomIntrinsicInset = layout.intrinsicInsets.bottom var bottomIntrinsicInset = layout.intrinsicInsets.bottom

View File

@ -1848,7 +1848,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
entities = translation.entities entities = translation.entities
} }
messageString = stringWithAppliedEntities(trimToLineCount(messageText, lineCount: authorAttributedString == nil ? 2 : 1), entities: entities, baseColor: theme.messageTextColor, linkColor: theme.messageTextColor, baseFont: textFont, linkFont: textFont, boldFont: textFont, italicFont: italicTextFont, boldItalicFont: textFont, fixedFont: textFont, blockQuoteFont: textFont, underlineLinks: false, message: message._asMessage()) messageString = foldLineBreaks(stringWithAppliedEntities(messageText, entities: entities, baseColor: theme.messageTextColor, linkColor: theme.messageTextColor, baseFont: textFont, linkFont: textFont, boldFont: textFont, italicFont: italicTextFont, boldItalicFont: textFont, fixedFont: textFont, blockQuoteFont: textFont, underlineLinks: false, message: message._asMessage()))
} else if spoilers != nil || customEmojiRanges != nil { } else if spoilers != nil || customEmojiRanges != nil {
let mutableString = NSMutableAttributedString(string: messageText, font: textFont, textColor: theme.messageTextColor) let mutableString = NSMutableAttributedString(string: messageText, font: textFont, textColor: theme.messageTextColor)
if let spoilers = spoilers { if let spoilers = spoilers {

View File

@ -25,7 +25,6 @@ swift_library(
"//submodules/GZip:GZip", "//submodules/GZip:GZip",
"//third-party/ZipArchive:ZipArchive", "//third-party/ZipArchive:ZipArchive",
"//submodules/InAppPurchaseManager:InAppPurchaseManager", "//submodules/InAppPurchaseManager:InAppPurchaseManager",
"//submodules/Utils/DarwinDirStat",
], ],
visibility = [ visibility = [
"//visibility:public", "//visibility:public",

View File

@ -16,7 +16,6 @@ import AppBundle
import ZipArchive import ZipArchive
import WebKit import WebKit
import InAppPurchaseManager import InAppPurchaseManager
import DarwinDirStat
@objc private final class DebugControllerMailComposeDelegate: NSObject, MFMailComposeViewControllerDelegate { @objc private final class DebugControllerMailComposeDelegate: NSObject, MFMailComposeViewControllerDelegate {
public func mailComposeController(_ controller: MFMailComposeViewController, didFinishWith result: MFMailComposeResult, error: Error?) { public func mailComposeController(_ controller: MFMailComposeViewController, didFinishWith result: MFMailComposeResult, error: Error?) {
@ -948,15 +947,10 @@ private enum DebugControllerEntry: ItemListNodeEntry {
case .resetTranslationStates: case .resetTranslationStates:
return ItemListActionItem(presentationData: presentationData, title: "Reset Translation States", kind: .generic, alignment: .natural, sectionId: self.section, style: .blocks, action: { return ItemListActionItem(presentationData: presentationData, title: "Reset Translation States", kind: .generic, alignment: .natural, sectionId: self.section, style: .blocks, action: {
if let context = arguments.context { if let context = arguments.context {
let size = statForDirectory(path: NSTemporaryDirectory()) let _ = context.engine.itemCache.clear(collectionIds: [
let controller = textAlertController(context: context, title: nil, text: "temp dir size \(size)", actions: [TextAlertAction(type: .genericAction, title: "OK", action: {})]) ApplicationSpecificItemCacheCollectionId.translationState
arguments.presentController(controller, nil) ]).start()
} }
// if let context = arguments.context {
// let _ = context.engine.itemCache.clear(collectionIds: [
// ApplicationSpecificItemCacheCollectionId.translationState
// ]).start()
// }
}) })
case .crash: case .crash:
return ItemListActionItem(presentationData: presentationData, title: "Crash", kind: .generic, alignment: .natural, sectionId: self.section, style: .blocks, action: { return ItemListActionItem(presentationData: presentationData, title: "Crash", kind: .generic, alignment: .natural, sectionId: self.section, style: .blocks, action: {
@ -1545,30 +1539,3 @@ public func triggerDebugSendLogsUI(context: AccountContext, additionalInfo: Stri
pushController(controller) pushController(controller)
}) })
} }
private func statForDirectory(path: String) -> Int64 {
if #available(macOS 10.13, *) {
var s = darwin_dirstat()
var result = dirstat_np(path, 1, &s, MemoryLayout<darwin_dirstat>.size)
if result != -1 {
return Int64(s.total_size)
} else {
result = dirstat_np(path, 0, &s, MemoryLayout<darwin_dirstat>.size)
if result != -1 {
return Int64(s.total_size)
} else {
return 0
}
}
} else {
let fileManager = FileManager.default
let folderURL = URL(fileURLWithPath: path)
var folderSize: Int64 = 0
if let files = try? fileManager.contentsOfDirectory(at: folderURL, includingPropertiesForKeys: nil, options: []) {
for file in files {
folderSize += (fileSize(file.path) ?? 0)
}
}
return folderSize
}
}

View File

@ -83,7 +83,7 @@ public final class InAppPurchaseManager: NSObject {
} }
public func pricePerMonth(_ monthsCount: Int) -> String { public func pricePerMonth(_ monthsCount: Int) -> String {
let price = self.skProduct.price.dividing(by: NSDecimalNumber(value: monthsCount)).prettyPrice().round(2) let price = self.skProduct.price.dividing(by: NSDecimalNumber(value: monthsCount)).round(2)
return self.numberFormatter.string(from: price) ?? "" return self.numberFormatter.string(from: price) ?? ""
} }

View File

@ -16,11 +16,11 @@
@interface TGMediaVideoConverter : NSObject @interface TGMediaVideoConverter : NSObject
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer; + (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments path:(NSString *)path watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer;
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher inhibitAudio:(bool)inhibitAudio entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer; + (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments path:(NSString *)path watcher:(TGMediaVideoFileWatcher *)watcher inhibitAudio:(bool)inhibitAudio entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer;
+ (SSignal *)hashForAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments; + (SSignal *)hashForAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments;
+ (SSignal *)renderUIImage:(UIImage *)image duration:(NSTimeInterval)duration adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer; + (SSignal *)renderUIImage:(UIImage *)image duration:(NSTimeInterval)duration adjustments:(TGMediaVideoEditAdjustments *)adjustments path:(NSString *)path watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer;
+ (NSUInteger)estimatedSizeForPreset:(TGMediaVideoConversionPreset)preset duration:(NSTimeInterval)duration hasAudio:(bool)hasAudio; + (NSUInteger)estimatedSizeForPreset:(TGMediaVideoConversionPreset)preset duration:(NSTimeInterval)duration hasAudio:(bool)hasAudio;
+ (TGMediaVideoConversionPreset)bestAvailablePresetForDimensions:(CGSize)dimensions; + (TGMediaVideoConversionPreset)bestAvailablePresetForDimensions:(CGSize)dimensions;

View File

@ -1475,6 +1475,8 @@
} }
} }
bool spoiler = [editingContext spoilerForItem:asset];
if ([asset isKindOfClass:[UIImage class]]) { if ([asset isKindOfClass:[UIImage class]]) {
if (intent == TGMediaAssetsControllerSendFileIntent) if (intent == TGMediaAssetsControllerSendFileIntent)
{ {
@ -1513,6 +1515,10 @@
if (groupedId != nil && !hasAnyTimers) if (groupedId != nil && !hasAnyTimers)
dict[@"groupedId"] = groupedId; dict[@"groupedId"] = groupedId;
if (spoiler) {
dict[@"spoiler"] = @true;
}
id generatedItem = descriptionGenerator(dict, caption, nil, nil); id generatedItem = descriptionGenerator(dict, caption, nil, nil);
return generatedItem; return generatedItem;
}]; }];
@ -1560,6 +1566,10 @@
if (groupedId != nil && !hasAnyTimers) if (groupedId != nil && !hasAnyTimers)
dict[@"groupedId"] = groupedId; dict[@"groupedId"] = groupedId;
if (spoiler) {
dict[@"spoiler"] = @true;
}
id generatedItem = descriptionGenerator(dict, caption, nil, nil); id generatedItem = descriptionGenerator(dict, caption, nil, nil);
return generatedItem; return generatedItem;
}] catch:^SSignal *(__unused id error) }] catch:^SSignal *(__unused id error)
@ -1636,6 +1646,10 @@
else if (groupedId != nil && !hasAnyTimers) else if (groupedId != nil && !hasAnyTimers)
dict[@"groupedId"] = groupedId; dict[@"groupedId"] = groupedId;
if (spoiler) {
dict[@"spoiler"] = @true;
}
id generatedItem = descriptionGenerator(dict, caption, nil, nil); id generatedItem = descriptionGenerator(dict, caption, nil, nil);
return generatedItem; return generatedItem;
}]; }];

View File

@ -101,12 +101,12 @@
@implementation TGMediaVideoConverter @implementation TGMediaVideoConverter
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer + (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments path:(NSString *)path watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer
{ {
return [self convertAVAsset:avAsset adjustments:adjustments watcher:watcher inhibitAudio:false entityRenderer:entityRenderer]; return [self convertAVAsset:avAsset adjustments:adjustments path:path watcher:watcher inhibitAudio:false entityRenderer:entityRenderer];
} }
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher inhibitAudio:(bool)inhibitAudio entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer + (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments path:(NSString *)path watcher:(TGMediaVideoFileWatcher *)watcher inhibitAudio:(bool)inhibitAudio entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer
{ {
if ([avAsset isKindOfClass:[NSURL class]]) { if ([avAsset isKindOfClass:[NSURL class]]) {
avAsset = [[AVURLAsset alloc] initWithURL:(NSURL *)avAsset options:nil]; avAsset = [[AVURLAsset alloc] initWithURL:(NSURL *)avAsset options:nil];
@ -116,7 +116,7 @@
return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber) return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
{ {
SAtomic *context = [[SAtomic alloc] initWithValue:[TGMediaVideoConversionContext contextWithQueue:queue subscriber:subscriber]]; SAtomic *context = [[SAtomic alloc] initWithValue:[TGMediaVideoConversionContext contextWithQueue:queue subscriber:subscriber]];
NSURL *outputUrl = [self _randomTemporaryURL]; NSURL *outputUrl = [NSURL fileURLWithPath:path];
NSArray *requiredKeys = @[ @"tracks", @"duration" ]; NSArray *requiredKeys = @[ @"tracks", @"duration" ];
[avAsset loadValuesAsynchronouslyForKeys:requiredKeys completionHandler:^ [avAsset loadValuesAsynchronouslyForKeys:requiredKeys completionHandler:^
@ -222,14 +222,14 @@
}]; }];
} }
+ (SSignal *)renderUIImage:(UIImage *)image duration:(NSTimeInterval)duration adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer + (SSignal *)renderUIImage:(UIImage *)image duration:(NSTimeInterval)duration adjustments:(TGMediaVideoEditAdjustments *)adjustments path:(NSString *)path watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer
{ {
SQueue *queue = [[SQueue alloc] init]; SQueue *queue = [[SQueue alloc] init];
return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber) return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
{ {
SAtomic *context = [[SAtomic alloc] initWithValue:[TGMediaVideoConversionContext contextWithQueue:queue subscriber:subscriber]]; SAtomic *context = [[SAtomic alloc] initWithValue:[TGMediaVideoConversionContext contextWithQueue:queue subscriber:subscriber]];
NSURL *outputUrl = [self _randomTemporaryURL]; NSURL *outputUrl = [NSURL fileURLWithPath:path];
NSString *path = TGComponentsPathForResource(@"blank", @"mp4"); NSString *path = TGComponentsPathForResource(@"blank", @"mp4");
AVAsset *avAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:path] options:nil]; AVAsset *avAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:path] options:nil];
@ -895,11 +895,6 @@
return CGSizeMake(renderWidth, renderHeight); return CGSizeMake(renderWidth, renderHeight);
} }
+ (NSURL *)_randomTemporaryURL
{
return [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[[NSString alloc] initWithFormat:@"%x.mp4", (int)arc4random()]]];
}
+ (NSUInteger)estimatedSizeForPreset:(TGMediaVideoConversionPreset)preset duration:(NSTimeInterval)duration hasAudio:(bool)hasAudio + (NSUInteger)estimatedSizeForPreset:(TGMediaVideoConversionPreset)preset duration:(NSTimeInterval)duration hasAudio:(bool)hasAudio
{ {
NSInteger bitrate = [TGMediaVideoConversionPresetSettings _videoBitrateKbpsForPreset:preset]; NSInteger bitrate = [TGMediaVideoConversionPresetSettings _videoBitrateKbpsForPreset:preset];

View File

@ -648,6 +648,11 @@ const NSTimeInterval TGPhotoQualityPreviewDuration = 15.0f;
[(TGPhotoEditorController *)self.parentViewController setInfoString:fileSize]; [(TGPhotoEditorController *)self.parentViewController setInfoString:fileSize];
} }
+ (NSString *)_randomTemporaryPath
{
return [NSTemporaryDirectory() stringByAppendingPathComponent:[[NSString alloc] initWithFormat:@"%x.mp4", (int)arc4random()]];
}
- (void)generateVideoPreview - (void)generateVideoPreview
{ {
if (self.preset == _currentPreset) if (self.preset == _currentPreset)
@ -665,6 +670,8 @@ const NSTimeInterval TGPhotoQualityPreviewDuration = 15.0f;
TGVideoEditAdjustments *adjustments = [self.photoEditor exportAdjustments]; TGVideoEditAdjustments *adjustments = [self.photoEditor exportAdjustments];
adjustments = [adjustments editAdjustmentsWithPreset:self.preset maxDuration:TGPhotoQualityPreviewDuration]; adjustments = [adjustments editAdjustmentsWithPreset:self.preset maxDuration:TGPhotoQualityPreviewDuration];
NSString *path = [TGPhotoQualityController _randomTemporaryPath];
__block NSTimeInterval delay = 0.0; __block NSTimeInterval delay = 0.0;
__weak TGPhotoQualityController *weakSelf = self; __weak TGPhotoQualityController *weakSelf = self;
SSignal *convertSignal = [[assetSignal onNext:^(AVAsset *next) { SSignal *convertSignal = [[assetSignal onNext:^(AVAsset *next) {
@ -680,7 +687,7 @@ const NSTimeInterval TGPhotoQualityPreviewDuration = 15.0f;
{ {
return [[[[[SSignal single:avAsset] delay:delay onQueue:[SQueue concurrentDefaultQueue]] mapToSignal:^SSignal *(AVAsset *avAsset) return [[[[[SSignal single:avAsset] delay:delay onQueue:[SQueue concurrentDefaultQueue]] mapToSignal:^SSignal *(AVAsset *avAsset)
{ {
return [TGMediaVideoConverter convertAVAsset:avAsset adjustments:adjustments watcher:nil inhibitAudio:true entityRenderer:nil]; return [TGMediaVideoConverter convertAVAsset:avAsset adjustments:adjustments path:path watcher:nil inhibitAudio:true entityRenderer:nil];
}] onError:^(__unused id error) { }] onError:^(__unused id error) {
delay = 1.0; delay = 1.0;
}] retryIf:^bool(__unused id error) }] retryIf:^bool(__unused id error)

View File

@ -751,7 +751,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
case .assets: case .assets:
signals = TGMediaAssetsController.resultSignals(for: controller.interaction?.selectionState, editingContext: controller.interaction?.editingState, intent: asFile ? TGMediaAssetsControllerSendFileIntent : TGMediaAssetsControllerSendMediaIntent, currentItem: nil, storeAssets: true, convertToJpeg: convertToJpeg, descriptionGenerator: legacyAssetPickerItemGenerator(), saveEditedPhotos: controller.saveEditedPhotos) signals = TGMediaAssetsController.resultSignals(for: controller.interaction?.selectionState, editingContext: controller.interaction?.editingState, intent: asFile ? TGMediaAssetsControllerSendFileIntent : TGMediaAssetsControllerSendMediaIntent, currentItem: nil, storeAssets: true, convertToJpeg: convertToJpeg, descriptionGenerator: legacyAssetPickerItemGenerator(), saveEditedPhotos: controller.saveEditedPhotos)
case .media: case .media:
signals = TGMediaAssetsController.pasteboardResultSignals(for: controller.interaction?.selectionState, editingContext: controller.interaction?.editingState, intent: asFile ? TGMediaAssetsControllerSendFileIntent : TGMediaAssetsControllerSendMediaIntent, currentItem: nil, descriptionGenerator: legacyAssetPickerItemGenerator()) signals = TGMediaAssetsController.pasteboardResultSignals(for: controller.interaction?.selectionState, editingContext: controller.interaction?.editingState, intent: asFile ? TGMediaAssetsControllerSendFileIntent : TGMediaAssetsControllerSendMediaIntent, currentItem: nil, descriptionGenerator: legacyAssetPickerItemGenerator())
} }
guard let signals = signals else { guard let signals = signals else {
return return

View File

@ -1067,6 +1067,8 @@ public class SearchBarNode: ASDisplayNode, UITextFieldDelegate {
node.isHidden = true node.isHidden = true
self.textField.isUserInteractionEnabled = false
if !self.clearButton.isHidden { if !self.clearButton.isHidden {
let xOffset = targetTextBackgroundFrame.width - self.textBackgroundNode.frame.width let xOffset = targetTextBackgroundFrame.width - self.textBackgroundNode.frame.width
if !xOffset.isZero { if !xOffset.isZero {
@ -1118,10 +1120,11 @@ public class SearchBarNode: ASDisplayNode, UITextFieldDelegate {
var backgroundCompleted = false var backgroundCompleted = false
var separatorCompleted = false var separatorCompleted = false
var textBackgroundCompleted = false var textBackgroundCompleted = false
let intermediateCompletion: () -> Void = { [weak node] in let intermediateCompletion: () -> Void = { [weak node, weak self] in
if backgroundCompleted && separatorCompleted && textBackgroundCompleted { if backgroundCompleted && separatorCompleted && textBackgroundCompleted {
completion() completion()
node?.isHidden = false node?.isHidden = false
self?.textField.isUserInteractionEnabled = true
} }
} }

View File

@ -30,6 +30,7 @@ private final class DataAndStorageControllerArguments {
let openSaveIncoming: (AutomaticSaveIncomingPeerType) -> Void let openSaveIncoming: (AutomaticSaveIncomingPeerType) -> Void
let toggleSaveEditedPhotos: (Bool) -> Void let toggleSaveEditedPhotos: (Bool) -> Void
let togglePauseMusicOnRecording: (Bool) -> Void let togglePauseMusicOnRecording: (Bool) -> Void
let toggleRaiseToListen: (Bool) -> Void
let toggleAutoplayGifs: (Bool) -> Void let toggleAutoplayGifs: (Bool) -> Void
let toggleAutoplayVideos: (Bool) -> Void let toggleAutoplayVideos: (Bool) -> Void
let toggleDownloadInBackground: (Bool) -> Void let toggleDownloadInBackground: (Bool) -> Void
@ -37,7 +38,7 @@ private final class DataAndStorageControllerArguments {
let openIntents: () -> Void let openIntents: () -> Void
let toggleEnableSensitiveContent: (Bool) -> Void let toggleEnableSensitiveContent: (Bool) -> Void
init(openStorageUsage: @escaping () -> Void, openNetworkUsage: @escaping () -> Void, openProxy: @escaping () -> Void, openAutomaticDownloadConnectionType: @escaping (AutomaticDownloadConnectionType) -> Void, resetAutomaticDownload: @escaping () -> Void, toggleVoiceUseLessData: @escaping (Bool) -> Void, openSaveIncoming: @escaping (AutomaticSaveIncomingPeerType) -> Void, toggleSaveEditedPhotos: @escaping (Bool) -> Void, togglePauseMusicOnRecording: @escaping (Bool) -> Void, toggleAutoplayGifs: @escaping (Bool) -> Void, toggleAutoplayVideos: @escaping (Bool) -> Void, toggleDownloadInBackground: @escaping (Bool) -> Void, openBrowserSelection: @escaping () -> Void, openIntents: @escaping () -> Void, toggleEnableSensitiveContent: @escaping (Bool) -> Void) { init(openStorageUsage: @escaping () -> Void, openNetworkUsage: @escaping () -> Void, openProxy: @escaping () -> Void, openAutomaticDownloadConnectionType: @escaping (AutomaticDownloadConnectionType) -> Void, resetAutomaticDownload: @escaping () -> Void, toggleVoiceUseLessData: @escaping (Bool) -> Void, openSaveIncoming: @escaping (AutomaticSaveIncomingPeerType) -> Void, toggleSaveEditedPhotos: @escaping (Bool) -> Void, togglePauseMusicOnRecording: @escaping (Bool) -> Void, toggleRaiseToListen: @escaping (Bool) -> Void, toggleAutoplayGifs: @escaping (Bool) -> Void, toggleAutoplayVideos: @escaping (Bool) -> Void, toggleDownloadInBackground: @escaping (Bool) -> Void, openBrowserSelection: @escaping () -> Void, openIntents: @escaping () -> Void, toggleEnableSensitiveContent: @escaping (Bool) -> Void) {
self.openStorageUsage = openStorageUsage self.openStorageUsage = openStorageUsage
self.openNetworkUsage = openNetworkUsage self.openNetworkUsage = openNetworkUsage
self.openProxy = openProxy self.openProxy = openProxy
@ -47,6 +48,7 @@ private final class DataAndStorageControllerArguments {
self.openSaveIncoming = openSaveIncoming self.openSaveIncoming = openSaveIncoming
self.toggleSaveEditedPhotos = toggleSaveEditedPhotos self.toggleSaveEditedPhotos = toggleSaveEditedPhotos
self.togglePauseMusicOnRecording = togglePauseMusicOnRecording self.togglePauseMusicOnRecording = togglePauseMusicOnRecording
self.toggleRaiseToListen = toggleRaiseToListen
self.toggleAutoplayGifs = toggleAutoplayGifs self.toggleAutoplayGifs = toggleAutoplayGifs
self.toggleAutoplayVideos = toggleAutoplayVideos self.toggleAutoplayVideos = toggleAutoplayVideos
self.toggleDownloadInBackground = toggleDownloadInBackground self.toggleDownloadInBackground = toggleDownloadInBackground
@ -74,6 +76,8 @@ public enum DataAndStorageEntryTag: ItemListItemTag, Equatable {
case autoplayVideos case autoplayVideos
case saveEditedPhotos case saveEditedPhotos
case downloadInBackground case downloadInBackground
case pauseMusicOnRecording
case raiseToListen
case autoSave(AutomaticSaveIncomingPeerType) case autoSave(AutomaticSaveIncomingPeerType)
public func isEqual(to other: ItemListItemTag) -> Bool { public func isEqual(to other: ItemListItemTag) -> Bool {
@ -108,9 +112,11 @@ private enum DataAndStorageEntry: ItemListNodeEntry {
case otherHeader(PresentationTheme, String) case otherHeader(PresentationTheme, String)
case shareSheet(PresentationTheme, String) case shareSheet(PresentationTheme, String)
case saveEditedPhotos(PresentationTheme, String, Bool) case saveEditedPhotos(PresentationTheme, String, Bool)
case pauseMusicOnRecording(PresentationTheme, String, Bool)
case openLinksIn(PresentationTheme, String, String) case openLinksIn(PresentationTheme, String, String)
case pauseMusicOnRecording(PresentationTheme, String, Bool)
case raiseToListen(PresentationTheme, String, Bool)
case raiseToListenInfo(PresentationTheme, String)
case connectionHeader(PresentationTheme, String) case connectionHeader(PresentationTheme, String)
case connectionProxy(PresentationTheme, String, String) case connectionProxy(PresentationTheme, String, String)
case enableSensitiveContent(String, Bool) case enableSensitiveContent(String, Bool)
@ -129,7 +135,7 @@ private enum DataAndStorageEntry: ItemListNodeEntry {
return DataAndStorageSection.voiceCalls.rawValue return DataAndStorageSection.voiceCalls.rawValue
case .autoplayHeader, .autoplayGifs, .autoplayVideos: case .autoplayHeader, .autoplayGifs, .autoplayVideos:
return DataAndStorageSection.autoPlay.rawValue return DataAndStorageSection.autoPlay.rawValue
case .otherHeader, .shareSheet, .saveEditedPhotos, .pauseMusicOnRecording, .openLinksIn: case .otherHeader, .shareSheet, .saveEditedPhotos, .openLinksIn, .pauseMusicOnRecording, .raiseToListen, .raiseToListenInfo:
return DataAndStorageSection.other.rawValue return DataAndStorageSection.other.rawValue
case .connectionHeader, .connectionProxy: case .connectionHeader, .connectionProxy:
return DataAndStorageSection.connection.rawValue return DataAndStorageSection.connection.rawValue
@ -152,14 +158,12 @@ private enum DataAndStorageEntry: ItemListNodeEntry {
return 4 return 4
case .automaticDownloadReset: case .automaticDownloadReset:
return 5 return 5
case .autoSaveHeader: case .autoSaveHeader:
return 6 return 6
case let .autoSaveItem(index, _, _, _, _): case let .autoSaveItem(index, _, _, _, _):
return 7 + Int32(index) return 7 + Int32(index)
case .autoSaveInfo: case .autoSaveInfo:
return 20 return 20
case .downloadInBackground: case .downloadInBackground:
return 21 return 21
case .downloadInBackgroundInfo: case .downloadInBackgroundInfo:
@ -180,16 +184,20 @@ private enum DataAndStorageEntry: ItemListNodeEntry {
return 29 return 29
case .saveEditedPhotos: case .saveEditedPhotos:
return 31 return 31
case .pauseMusicOnRecording:
return 32
case .openLinksIn: case .openLinksIn:
return 32
case .pauseMusicOnRecording:
return 33 return 33
case .connectionHeader: case .raiseToListen:
return 34 return 34
case .connectionProxy: case .raiseToListenInfo:
return 35 return 35
case .enableSensitiveContent: case .connectionHeader:
return 36 return 36
case .connectionProxy:
return 37
case .enableSensitiveContent:
return 38
} }
} }
@ -297,14 +305,26 @@ private enum DataAndStorageEntry: ItemListNodeEntry {
} else { } else {
return false return false
} }
case let .openLinksIn(lhsTheme, lhsText, lhsValue):
if case let .openLinksIn(rhsTheme, rhsText, rhsValue) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsValue == rhsValue {
return true
} else {
return false
}
case let .pauseMusicOnRecording(lhsTheme, lhsText, lhsValue): case let .pauseMusicOnRecording(lhsTheme, lhsText, lhsValue):
if case let .pauseMusicOnRecording(rhsTheme, rhsText, rhsValue) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsValue == rhsValue { if case let .pauseMusicOnRecording(rhsTheme, rhsText, rhsValue) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsValue == rhsValue {
return true return true
} else { } else {
return false return false
} }
case let .openLinksIn(lhsTheme, lhsText, lhsValue): case let .raiseToListen(lhsTheme, lhsText, lhsValue):
if case let .openLinksIn(rhsTheme, rhsText, rhsValue) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsValue == rhsValue { if case let .raiseToListen(rhsTheme, rhsText, rhsValue) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsValue == rhsValue {
return true
} else {
return false
}
case let .raiseToListenInfo(lhsTheme, lhsText):
if case let .raiseToListenInfo(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText {
return true return true
} else { } else {
return false return false
@ -420,14 +440,20 @@ private enum DataAndStorageEntry: ItemListNodeEntry {
return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, sectionId: self.section, style: .blocks, updated: { value in return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, sectionId: self.section, style: .blocks, updated: { value in
arguments.toggleSaveEditedPhotos(value) arguments.toggleSaveEditedPhotos(value)
}, tag: DataAndStorageEntryTag.saveEditedPhotos) }, tag: DataAndStorageEntryTag.saveEditedPhotos)
case let .pauseMusicOnRecording(_, text, value):
return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, sectionId: self.section, style: .blocks, updated: { value in
arguments.togglePauseMusicOnRecording(value)
}, tag: DataAndStorageEntryTag.saveEditedPhotos)
case let .openLinksIn(_, text, value): case let .openLinksIn(_, text, value):
return ItemListDisclosureItem(presentationData: presentationData, title: text, label: value, sectionId: self.section, style: .blocks, action: { return ItemListDisclosureItem(presentationData: presentationData, title: text, label: value, sectionId: self.section, style: .blocks, action: {
arguments.openBrowserSelection() arguments.openBrowserSelection()
}) })
case let .pauseMusicOnRecording(_, text, value):
return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, sectionId: self.section, style: .blocks, updated: { value in
arguments.togglePauseMusicOnRecording(value)
}, tag: DataAndStorageEntryTag.pauseMusicOnRecording)
case let .raiseToListen(_, text, value):
return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, sectionId: self.section, style: .blocks, updated: { value in
arguments.toggleRaiseToListen(value)
}, tag: DataAndStorageEntryTag.raiseToListen)
case let .raiseToListenInfo(_, text):
return ItemListTextItem(presentationData: presentationData, text: .markdown(text), sectionId: self.section)
case let .downloadInBackground(_, text, value): case let .downloadInBackground(_, text, value):
return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, sectionId: self.section, style: .blocks, updated: { value in return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, sectionId: self.section, style: .blocks, updated: { value in
arguments.toggleDownloadInBackground(value) arguments.toggleDownloadInBackground(value)
@ -646,8 +672,10 @@ private func dataAndStorageControllerEntries(state: DataAndStorageControllerStat
entries.append(.shareSheet(presentationData.theme, presentationData.strings.ChatSettings_IntentsSettings)) entries.append(.shareSheet(presentationData.theme, presentationData.strings.ChatSettings_IntentsSettings))
} }
entries.append(.saveEditedPhotos(presentationData.theme, presentationData.strings.Settings_SaveEditedPhotos, data.generatedMediaStoreSettings.storeEditedPhotos)) entries.append(.saveEditedPhotos(presentationData.theme, presentationData.strings.Settings_SaveEditedPhotos, data.generatedMediaStoreSettings.storeEditedPhotos))
entries.append(.pauseMusicOnRecording(presentationData.theme, presentationData.strings.Settings_PauseMusicOnRecording, data.mediaInputSettings.pauseMusicOnRecording))
entries.append(.openLinksIn(presentationData.theme, presentationData.strings.ChatSettings_OpenLinksIn, defaultWebBrowser)) entries.append(.openLinksIn(presentationData.theme, presentationData.strings.ChatSettings_OpenLinksIn, defaultWebBrowser))
entries.append(.pauseMusicOnRecording(presentationData.theme, presentationData.strings.Settings_PauseMusicOnRecording, data.mediaInputSettings.pauseMusicOnRecording))
entries.append(.raiseToListen(presentationData.theme, presentationData.strings.Settings_RaiseToListen, data.mediaInputSettings.enableRaiseToSpeak))
entries.append(.raiseToListenInfo(presentationData.theme, presentationData.strings.Settings_RaiseToListenInfo))
let proxyValue: String let proxyValue: String
if let proxySettings = data.proxySettings, let activeServer = proxySettings.activeServer, proxySettings.enabled { if let proxySettings = data.proxySettings, let activeServer = proxySettings.activeServer, proxySettings.enabled {
@ -878,6 +906,10 @@ public func dataAndStorageController(context: AccountContext, focusOnItemTag: Da
let _ = updateMediaInputSettingsInteractively(accountManager: context.sharedContext.accountManager, { current in let _ = updateMediaInputSettingsInteractively(accountManager: context.sharedContext.accountManager, { current in
return current.withUpdatedPauseMusicOnRecording(value) return current.withUpdatedPauseMusicOnRecording(value)
}).start() }).start()
}, toggleRaiseToListen: { value in
let _ = updateMediaInputSettingsInteractively(accountManager: context.sharedContext.accountManager, {
$0.withUpdatedEnableRaiseToSpeak(value)
}).start()
}, toggleAutoplayGifs: { value in }, toggleAutoplayGifs: { value in
let _ = updateMediaDownloadSettingsInteractively(accountManager: context.sharedContext.accountManager, { settings in let _ = updateMediaDownloadSettingsInteractively(accountManager: context.sharedContext.accountManager, { settings in
var settings = settings var settings = settings

View File

@ -6296,6 +6296,8 @@ public final class VoiceChatControllerImpl: ViewController, VoiceChatController
return nil return nil
} }
} }
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let uploadInterface = LegacyLiveUploadInterface(context: context) let uploadInterface = LegacyLiveUploadInterface(context: context)
let signal: SSignal let signal: SSignal
if let url = asset as? URL, url.absoluteString.hasSuffix(".jpg"), let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer { if let url = asset as? URL, url.absoluteString.hasSuffix(".jpg"), let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
@ -6311,14 +6313,14 @@ public final class VoiceChatControllerImpl: ViewController, VoiceChatController
}) })
signal = durationSignal.map(toSignal: { duration -> SSignal in signal = durationSignal.map(toSignal: { duration -> SSignal in
if let duration = duration as? Double { if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: nil, entityRenderer: entityRenderer)! return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: nil, entityRenderer: entityRenderer)!
} else { } else {
return SSignal.single(nil) return SSignal.single(nil)
} }
}) })
} else if let asset = asset as? AVAsset { } else if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)! signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, path: tempFile.path, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else { } else {
signal = SSignal.complete() signal = SSignal.complete()
} }
@ -6344,6 +6346,8 @@ public final class VoiceChatControllerImpl: ViewController, VoiceChatController
} }
account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true) account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
subscriber.putNext(resource) subscriber.putNext(resource)
EngineTempBox.shared.dispose(tempFile)
} }
} }
subscriber.putCompletion() subscriber.putCompletion()

View File

@ -418,7 +418,7 @@ public func foldLineBreaks(_ text: NSAttributedString) -> NSAttributedString {
while true { while true {
if let range = remainingString.string.range(of: "\n") { if let range = remainingString.string.range(of: "\n") {
let mappedRange = NSRange(range, in: remainingString.string) let mappedRange = NSRange(range, in: remainingString.string)
lines.append(remainingString.attributedSubstring(from: NSRange(location: 0, length: mappedRange.upperBound))) lines.append(remainingString.attributedSubstring(from: NSRange(location: 0, length: mappedRange.upperBound - 1)))
remainingString.replaceCharacters(in: NSRange(location: 0, length: mappedRange.upperBound), with: "") remainingString.replaceCharacters(in: NSRange(location: 0, length: mappedRange.upperBound), with: "")
} else { } else {
if lines.isEmpty { if lines.isEmpty {

View File

@ -77,7 +77,7 @@ final class BackgroundColorComponent: Component {
let itemSize = CGSize(width: 30.0, height: 30.0) let itemSize = CGSize(width: 30.0, height: 30.0)
let sideInset: CGFloat = 12.0 let sideInset: CGFloat = 12.0
let height: CGFloat = 50.0 let height: CGFloat = 50.0
let delta = (availableSize.width - sideInset * 2.0 - CGFloat(values.count) * itemSize.width) / CGFloat(values.count - 1) let delta = floorToScreenPixels((availableSize.width - sideInset * 2.0 - CGFloat(values.count) * itemSize.width) / CGFloat(values.count - 1))
for i in 0 ..< values.count { for i in 0 ..< values.count {
let view: ComponentView<Empty> let view: ComponentView<Empty>

View File

@ -1136,7 +1136,7 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
} }
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: .regular) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: .regular, associatedData: item.associatedData)
var isReplyThread = false var isReplyThread = false
if case .replyThread = item.chatLocation { if case .replyThread = item.chatLocation {

View File

@ -391,7 +391,7 @@ final class ChatMessageAttachedContentNode: ASDisplayNode {
} }
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: context.account.peerId, message: message, dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, strings: presentationData.strings) let dateText = stringForMessageTimestampStatus(accountPeerId: context.account.peerId, message: message, dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, strings: presentationData.strings, associatedData: associatedData)
var webpageGalleryMediaCount: Int? var webpageGalleryMediaCount: Int?
for media in message.media { for media in message.media {

View File

@ -1800,7 +1800,7 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
} else { } else {
dateFormat = .regular dateFormat = .regular
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: dateFormat) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: dateFormat, associatedData: item.associatedData)
let statusType: ChatMessageDateAndStatusType let statusType: ChatMessageDateAndStatusType
if incoming { if incoming {

View File

@ -167,7 +167,7 @@ class ChatMessageCallBubbleContentNode: ChatMessageBubbleContentNode {
} }
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, associatedData: item.associatedData)
let statusText: String let statusText: String
if let callDuration = callDuration, callDuration > 1 { if let callDuration = callDuration, callDuration > 1 {

View File

@ -188,7 +188,7 @@ class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
} }
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, associatedData: item.associatedData)
let statusType: ChatMessageDateAndStatusType? let statusType: ChatMessageDateAndStatusType?
switch position { switch position {

View File

@ -815,7 +815,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
edited = true edited = true
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: arguments.context.account.peerId, message: arguments.message, dateTimeFormat: arguments.presentationData.dateTimeFormat, nameDisplayOrder: arguments.presentationData.nameDisplayOrder, strings: arguments.presentationData.strings) let dateText = stringForMessageTimestampStatus(accountPeerId: arguments.context.account.peerId, message: arguments.message, dateTimeFormat: arguments.presentationData.dateTimeFormat, nameDisplayOrder: arguments.presentationData.nameDisplayOrder, strings: arguments.presentationData.strings, associatedData: arguments.associatedData)
let displayReactionsInline = shouldDisplayInlineDateReactions(message: arguments.message, isPremium: arguments.associatedData.isPremium, forceInline: arguments.associatedData.forceInlineReactions) let displayReactionsInline = shouldDisplayInlineDateReactions(message: arguments.message, isPremium: arguments.associatedData.isPremium, forceInline: arguments.associatedData.forceInlineReactions)
var reactionSettings: ChatMessageDateAndStatusNode.TrailingReactionSettings? var reactionSettings: ChatMessageDateAndStatusNode.TrailingReactionSettings?

View File

@ -487,7 +487,7 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
} }
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: .regular) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: .regular, associatedData: item.associatedData)
let maxDateAndStatusWidth: CGFloat let maxDateAndStatusWidth: CGFloat
if case .bubble = statusDisplayType { if case .bubble = statusDisplayType {

View File

@ -205,7 +205,7 @@ class ChatMessageMapBubbleContentNode: ChatMessageBubbleContentNode {
} }
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, associatedData: item.associatedData)
let statusType: ChatMessageDateAndStatusType? let statusType: ChatMessageDateAndStatusType?
switch position { switch position {

View File

@ -237,8 +237,8 @@ class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode {
} }
} }
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, associatedData: item.associatedData)
let statusType: ChatMessageDateAndStatusType? let statusType: ChatMessageDateAndStatusType?
switch preparePosition { switch preparePosition {

View File

@ -1069,7 +1069,7 @@ class ChatMessagePollBubbleContentNode: ChatMessageBubbleContentNode {
} }
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, associatedData: item.associatedData)
let statusType: ChatMessageDateAndStatusType? let statusType: ChatMessageDateAndStatusType?
switch position { switch position {

View File

@ -71,7 +71,7 @@ class ChatMessageRestrictedBubbleContentNode: ChatMessageBubbleContentNode {
} }
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, associatedData: item.associatedData)
let statusType: ChatMessageDateAndStatusType? let statusType: ChatMessageDateAndStatusType?
switch position { switch position {

View File

@ -561,7 +561,7 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
} }
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: .regular) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: .regular, associatedData: item.associatedData)
var isReplyThread = false var isReplyThread = false
if case .replyThread = item.chatLocation { if case .replyThread = item.chatLocation {

View File

@ -181,7 +181,7 @@ class ChatMessageTextBubbleContentNode: ChatMessageBubbleContentNode {
} else { } else {
dateFormat = .regular dateFormat = .regular
} }
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: dateFormat) let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: dateFormat, associatedData: item.associatedData)
let statusType: ChatMessageDateAndStatusType? let statusType: ChatMessageDateAndStatusType?
var displayStatus = false var displayStatus = false

View File

@ -226,8 +226,8 @@ final class ChatTranslationPanelNode: ASDisplayNode {
} }
} }
topLanguages.append(contentsOf: popularTranslationLanguages) topLanguages.append("")
var languages: [(String, String)] = [] var languages: [(String, String)] = []
let languageLocale = Locale(identifier: langCode) let languageLocale = Locale(identifier: langCode)
@ -322,6 +322,8 @@ private final class TranslationContextReferenceContentSource: ContextReferenceCo
} }
} }
private let separatorHeight: CGFloat = 7.0
private final class TranslationLanguagesContextMenuContent: ContextControllerItemsContent { private final class TranslationLanguagesContextMenuContent: ContextControllerItemsContent {
private final class BackButtonNode: HighlightTrackingButtonNode { private final class BackButtonNode: HighlightTrackingButtonNode {
let highlightBackgroundNode: ASDisplayNode let highlightBackgroundNode: ASDisplayNode
@ -445,7 +447,7 @@ private final class TranslationLanguagesContextMenuContent: ContextControllerIte
self.addSubnode(self.titleLabelNode) self.addSubnode(self.titleLabelNode)
self.highligthedChanged = { [weak self] highlighted in self.highligthedChanged = { [weak self] highlighted in
guard let strongSelf = self else { guard let strongSelf = self, let language = strongSelf.language, !language.isEmpty else {
return return
} }
if highlighted { if highlighted {
@ -461,6 +463,9 @@ private final class TranslationLanguagesContextMenuContent: ContextControllerIte
} }
@objc private func pressed() { @objc private func pressed() {
guard let language = self.language, !language.isEmpty else {
return
}
self.action() self.action()
} }
@ -476,7 +481,6 @@ private final class TranslationLanguagesContextMenuContent: ContextControllerIte
} }
self.highlightBackgroundNode.backgroundColor = presentationData.theme.contextMenu.itemHighlightedBackgroundColor self.highlightBackgroundNode.backgroundColor = presentationData.theme.contextMenu.itemHighlightedBackgroundColor
self.separatorNode.backgroundColor = presentationData.theme.contextMenu.itemSeparatorColor
self.highlightBackgroundNode.frame = CGRect(origin: CGPoint(), size: size) self.highlightBackgroundNode.frame = CGRect(origin: CGPoint(), size: size)
@ -487,8 +491,15 @@ private final class TranslationLanguagesContextMenuContent: ContextControllerIte
let titleFrame = CGRect(origin: CGPoint(x: sideInset, y: floor((size.height - titleSize.height) / 2.0)), size: titleSize) let titleFrame = CGRect(origin: CGPoint(x: sideInset, y: floor((size.height - titleSize.height) / 2.0)), size: titleSize)
self.titleLabelNode.frame = titleFrame self.titleLabelNode.frame = titleFrame
self.separatorNode.frame = CGRect(origin: CGPoint(x: 0.0, y: size.height), size: CGSize(width: size.width, height: UIScreenPixel)) if language == "" {
self.separatorNode.isHidden = isLast self.separatorNode.backgroundColor = presentationData.theme.contextMenu.sectionSeparatorColor
self.separatorNode.frame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: size.width, height: separatorHeight))
self.separatorNode.isHidden = false
} else {
self.separatorNode.backgroundColor = presentationData.theme.contextMenu.itemSeparatorColor
self.separatorNode.frame = CGRect(origin: CGPoint(x: 0.0, y: size.height), size: CGSize(width: size.width, height: UIScreenPixel))
self.separatorNode.isHidden = isLast
}
} }
} }
@ -537,26 +548,6 @@ private final class TranslationLanguagesContextMenuContent: ContextControllerIte
self.scrollNode.view.delegate = self self.scrollNode.view.delegate = self
self.clipsToBounds = true self.clipsToBounds = true
// self.stateDisposable = (self.listContext.state
// |> deliverOnMainQueue).start(next: { [weak self] state in
// guard let strongSelf = self else {
// return
// }
// let updatedState = ItemsState(listState: state, readStats: strongSelf.state.readStats)
// var animateIn = false
// if strongSelf.state.item(at: 0) == nil && updatedState.item(at: 0) != nil {
// animateIn = true
// }
// strongSelf.state = updatedState
// strongSelf.animateIn = true
// strongSelf.requestUpdate(strongSelf, animateIn ? .animated(duration: 0.2, curve: .easeInOut) : .immediate)
// if animateIn {
// for (_, itemNode) in strongSelf.itemNodes {
// itemNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
// }
// }
// })
} }
func scrollViewDidScroll(_ scrollView: UIScrollView) { func scrollViewDidScroll(_ scrollView: UIScrollView) {
@ -591,11 +582,23 @@ private final class TranslationLanguagesContextMenuContent: ContextControllerIte
let minVisibleIndex = max(0, Int(floor(visibleBounds.minY / itemHeight))) let minVisibleIndex = max(0, Int(floor(visibleBounds.minY / itemHeight)))
let maxVisibleIndex = Int(ceil(visibleBounds.maxY / itemHeight)) let maxVisibleIndex = Int(ceil(visibleBounds.maxY / itemHeight))
var separatorIndex = 0
for i in 0 ..< self.languages.count {
if self.languages[i].0.isEmpty {
separatorIndex = i
break
}
}
if minVisibleIndex <= maxVisibleIndex { if minVisibleIndex <= maxVisibleIndex {
for index in minVisibleIndex ... maxVisibleIndex { for index in minVisibleIndex ... maxVisibleIndex {
let itemFrame = CGRect(origin: CGPoint(x: 0.0, y: CGFloat(index) * itemHeight), size: CGSize(width: size.width, height: itemHeight)) let height = self.languages[index].0.isEmpty ? separatorHeight : itemHeight
var itemFrame = CGRect(origin: CGPoint(x: 0.0, y: CGFloat(index) * itemHeight), size: CGSize(width: size.width, height: height))
if index > separatorIndex {
itemFrame.origin.y += separatorHeight - itemHeight
}
if index < self.languages.count { if index < self.languages.count {
let (languageCode, displayTitle) = self.languages[index] let (languageCode, displayTitle) = self.languages[index]
validIds.insert(index) validIds.insert(index)
@ -612,7 +615,7 @@ private final class TranslationLanguagesContextMenuContent: ContextControllerIte
self.scrollNode.addSubnode(itemNode) self.scrollNode.addSubnode(itemNode)
} }
itemNode.update(size: itemFrame.size, presentationData: presentationData, language: languageCode, displayTitle: displayTitle, isLast: index == self.languages.count - 1, syncronousLoad: syncronousLoad) itemNode.update(size: itemFrame.size, presentationData: presentationData, language: languageCode, displayTitle: displayTitle, isLast: index == self.languages.count - 1 || index == separatorIndex - 1, syncronousLoad: syncronousLoad)
itemNode.frame = itemFrame itemNode.frame = itemFrame
} }
} }
@ -745,7 +748,7 @@ private final class TranslationLanguagesContextMenuContent: ContextControllerIte
topContentHeight += backButtonFrame.height topContentHeight += backButtonFrame.height
} }
if let separatorNode = self.separatorNode { if let separatorNode = self.separatorNode {
let separatorFrame = CGRect(origin: CGPoint(x: 0.0, y: topContentHeight), size: CGSize(width: constrainedSize.width, height: 7.0)) let separatorFrame = CGRect(origin: CGPoint(x: 0.0, y: topContentHeight), size: CGSize(width: constrainedSize.width, height: separatorHeight))
separatorNode.backgroundColor = self.presentationData.theme.contextMenu.sectionSeparatorColor separatorNode.backgroundColor = self.presentationData.theme.contextMenu.sectionSeparatorColor
transition.updateFrame(node: separatorNode, frame: separatorFrame) transition.updateFrame(node: separatorNode, frame: separatorFrame)
topContentHeight += separatorFrame.height topContentHeight += separatorFrame.height

View File

@ -534,6 +534,8 @@ public func createChannelController(context: AccountContext, mode: CreateChannel
return nil return nil
} }
} }
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let uploadInterface = LegacyLiveUploadInterface(context: context) let uploadInterface = LegacyLiveUploadInterface(context: context)
let signal: SSignal let signal: SSignal
if let url = asset as? URL, url.absoluteString.hasSuffix(".jpg"), let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer { if let url = asset as? URL, url.absoluteString.hasSuffix(".jpg"), let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
@ -549,14 +551,14 @@ public func createChannelController(context: AccountContext, mode: CreateChannel
}) })
signal = durationSignal.map(toSignal: { duration -> SSignal in signal = durationSignal.map(toSignal: { duration -> SSignal in
if let duration = duration as? Double { if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: nil, entityRenderer: entityRenderer)! return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: nil, entityRenderer: entityRenderer)!
} else { } else {
return SSignal.single(nil) return SSignal.single(nil)
} }
}) })
} else if let asset = asset as? AVAsset { } else if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)! signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, path: tempFile.path, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else { } else {
signal = SSignal.complete() signal = SSignal.complete()
} }
@ -582,6 +584,8 @@ public func createChannelController(context: AccountContext, mode: CreateChannel
} }
context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true) context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
subscriber.putNext(resource) subscriber.putNext(resource)
EngineTempBox.shared.dispose(tempFile)
} }
} }
subscriber.putCompletion() subscriber.putCompletion()

View File

@ -850,7 +850,6 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
} }
let signal = Signal<TelegramMediaResource?, UploadPeerPhotoError> { subscriber in let signal = Signal<TelegramMediaResource?, UploadPeerPhotoError> { subscriber in
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation { if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: context.account, adjustments: adjustments) return LegacyPaintEntityRenderer(account: context.account, adjustments: adjustments)
@ -858,6 +857,8 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
return nil return nil
} }
} }
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let uploadInterface = LegacyLiveUploadInterface(context: context) let uploadInterface = LegacyLiveUploadInterface(context: context)
let signal: SSignal let signal: SSignal
if let url = asset as? URL, url.absoluteString.hasSuffix(".jpg"), let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer { if let url = asset as? URL, url.absoluteString.hasSuffix(".jpg"), let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
@ -873,14 +874,14 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
}) })
signal = durationSignal.map(toSignal: { duration -> SSignal in signal = durationSignal.map(toSignal: { duration -> SSignal in
if let duration = duration as? Double { if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: nil, entityRenderer: entityRenderer)! return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: nil, entityRenderer: entityRenderer)!
} else { } else {
return SSignal.single(nil) return SSignal.single(nil)
} }
}) })
} else if let asset = asset as? AVAsset { } else if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)! signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, path: tempFile.path, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else { } else {
signal = SSignal.complete() signal = SSignal.complete()
} }
@ -906,6 +907,8 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
} }
context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true) context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
subscriber.putNext(resource) subscriber.putNext(resource)
EngineTempBox.shared.dispose(tempFile)
} }
} }
subscriber.putCompletion() subscriber.putCompletion()

View File

@ -265,7 +265,8 @@ public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibr
return nil return nil
} }
} }
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: VideoConversionWatcher(update: { path, size in let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in
var value = stat() var value = stat()
if stat(path, &value) == 0 { if stat(path, &value) == 0 {
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
@ -298,6 +299,8 @@ public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibr
subscriber.putError(.generic) subscriber.putError(.generic)
} }
subscriber.putCompletion() subscriber.putCompletion()
EngineTempBox.shared.dispose(tempFile)
} }
}, error: { _ in }, error: { _ in
subscriber.putError(.generic) subscriber.putError(.generic)
@ -342,6 +345,7 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo
adjustments = TGVideoEditAdjustments(dictionary: dict) adjustments = TGVideoEditAdjustments(dictionary: dict)
} }
} }
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let updatedSize = Atomic<Int64>(value: 0) let updatedSize = Atomic<Int64>(value: 0)
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation { if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
@ -366,7 +370,7 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo
signal = durationSignal.map(toSignal: { duration -> SSignal in signal = durationSignal.map(toSignal: { duration -> SSignal in
if let duration = duration as? Double { if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: VideoConversionWatcher(update: { path, size in return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in
var value = stat() var value = stat()
if stat(path, &value) == 0 { if stat(path, &value) == 0 {
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
@ -388,7 +392,7 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo
signal = SSignal.single(nil) signal = SSignal.single(nil)
} }
} else { } else {
signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: VideoConversionWatcher(update: { path, size in signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in
var value = stat() var value = stat()
if stat(path, &value) == 0 { if stat(path, &value) == 0 {
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
@ -430,6 +434,8 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024))
subscriber.putNext(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: true)) subscriber.putNext(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: true))
EngineTempBox.shared.dispose(tempFile)
} }
} }
subscriber.putCompletion() subscriber.putCompletion()

View File

@ -5,6 +5,7 @@ import Display
import SwiftSignalKit import SwiftSignalKit
import UniversalMediaPlayer import UniversalMediaPlayer
import LegacyComponents import LegacyComponents
import UIKitRuntimeUtils
private final class InstantVideoRadialStatusNodeParameters: NSObject { private final class InstantVideoRadialStatusNodeParameters: NSObject {
let color: UIColor let color: UIColor
@ -124,6 +125,8 @@ final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecognizerDele
strongSelf.statusValue = status strongSelf.statusValue = status
} }
}) })
self.view.disablesInteractiveTransitionGestureRecognizer = true
} }
deinit { deinit {

View File

@ -2510,10 +2510,10 @@ final class PeerInfoHeaderNode: ASDisplayNode {
func updateAvatarIsHidden(entry: AvatarGalleryEntry?) { func updateAvatarIsHidden(entry: AvatarGalleryEntry?) {
if let entry = entry { if let entry = entry {
self.avatarListNode.avatarContainerNode.containerNode.isHidden = entry == self.avatarListNode.listContainerNode.galleryEntries.first self.avatarListNode.avatarContainerNode.containerNode.isHidden = entry == self.avatarListNode.listContainerNode.galleryEntries.first
self.editingContentNode.isHidden = entry == self.avatarListNode.listContainerNode.galleryEntries.first self.editingContentNode.avatarNode.isHidden = entry == self.avatarListNode.listContainerNode.galleryEntries.first
} else { } else {
self.avatarListNode.avatarContainerNode.containerNode.isHidden = false self.avatarListNode.avatarContainerNode.containerNode.isHidden = false
self.editingContentNode.isHidden = false self.editingContentNode.avatarNode.isHidden = false
} }
self.avatarListNode.listContainerNode.updateEntryIsHidden(entry: entry) self.avatarListNode.listContainerNode.updateEntryIsHidden(entry: entry)
} }

View File

@ -7058,6 +7058,8 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewDelegate
return nil return nil
} }
} }
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let uploadInterface = LegacyLiveUploadInterface(context: context) let uploadInterface = LegacyLiveUploadInterface(context: context)
let signal: SSignal let signal: SSignal
if let url = asset as? URL, url.absoluteString.hasSuffix(".jpg"), let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer { if let url = asset as? URL, url.absoluteString.hasSuffix(".jpg"), let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
@ -7073,14 +7075,13 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewDelegate
}) })
signal = durationSignal.map(toSignal: { duration -> SSignal in signal = durationSignal.map(toSignal: { duration -> SSignal in
if let duration = duration as? Double { if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: nil, entityRenderer: entityRenderer)! return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: nil, entityRenderer: entityRenderer)!
} else { } else {
return SSignal.single(nil) return SSignal.single(nil)
} }
}) })
} else if let asset = asset as? AVAsset { } else if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)! signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, path: tempFile.path, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else { } else {
signal = SSignal.complete() signal = SSignal.complete()
} }
@ -7106,6 +7107,8 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewDelegate
} }
account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true) account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
subscriber.putNext(resource) subscriber.putNext(resource)
EngineTempBox.shared.dispose(tempFile)
} }
} }
subscriber.putCompletion() subscriber.putCompletion()

View File

@ -5,6 +5,7 @@ import TelegramPresentationData
import TelegramUIPreferences import TelegramUIPreferences
import TelegramStringFormatting import TelegramStringFormatting
import LocalizedPeerData import LocalizedPeerData
import AccountContext
enum MessageTimestampStatusFormat { enum MessageTimestampStatusFormat {
case regular case regular
@ -28,7 +29,7 @@ private func dateStringForDay(strings: PresentationStrings, dateTimeFormat: Pres
} }
} }
func stringForMessageTimestampStatus(accountPeerId: PeerId, message: Message, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, strings: PresentationStrings, format: MessageTimestampStatusFormat = .regular) -> String { func stringForMessageTimestampStatus(accountPeerId: PeerId, message: Message, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, strings: PresentationStrings, format: MessageTimestampStatusFormat = .regular, associatedData: ChatMessageItemAssociatedData) -> String {
if let adAttribute = message.adAttribute { if let adAttribute = message.adAttribute {
switch adAttribute.messageType { switch adAttribute.messageType {
case .sponsored: case .sponsored:
@ -86,6 +87,10 @@ func stringForMessageTimestampStatus(accountPeerId: PeerId, message: Message, da
} }
} }
if let subject = associatedData.subject, case .forwardedMessages = subject {
authorTitle = nil
}
if case .regular = format { if case .regular = format {
if let authorTitle = authorTitle, !authorTitle.isEmpty { if let authorTitle = authorTitle, !authorTitle.isEmpty {
dateText = "\(authorTitle), \(dateText)" dateText = "\(authorTitle), \(dateText)"

View File

@ -8,7 +8,7 @@ public struct MediaInputSettings: Codable, Equatable {
public let pauseMusicOnRecording: Bool public let pauseMusicOnRecording: Bool
public static var defaultSettings: MediaInputSettings { public static var defaultSettings: MediaInputSettings {
return MediaInputSettings(enableRaiseToSpeak: true, pauseMusicOnRecording: false) return MediaInputSettings(enableRaiseToSpeak: true, pauseMusicOnRecording: true)
} }
public init(enableRaiseToSpeak: Bool, pauseMusicOnRecording: Bool) { public init(enableRaiseToSpeak: Bool, pauseMusicOnRecording: Bool) {
@ -20,14 +20,14 @@ public struct MediaInputSettings: Codable, Equatable {
let container = try decoder.container(keyedBy: StringCodingKey.self) let container = try decoder.container(keyedBy: StringCodingKey.self)
self.enableRaiseToSpeak = (try container.decode(Int32.self, forKey: "enableRaiseToSpeak")) != 0 self.enableRaiseToSpeak = (try container.decode(Int32.self, forKey: "enableRaiseToSpeak")) != 0
self.pauseMusicOnRecording = (try container.decodeIfPresent(Int32.self, forKey: "pauseMusicOnRecording") ?? 0) != 0 self.pauseMusicOnRecording = (try container.decodeIfPresent(Int32.self, forKey: "pauseMusicOnRecording_v2") ?? 0) != 0
} }
public func encode(to encoder: Encoder) throws { public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: StringCodingKey.self) var container = encoder.container(keyedBy: StringCodingKey.self)
try container.encode((self.enableRaiseToSpeak ? 1 : 0) as Int32, forKey: "enableRaiseToSpeak") try container.encode((self.enableRaiseToSpeak ? 1 : 0) as Int32, forKey: "enableRaiseToSpeak")
try container.encode((self.pauseMusicOnRecording ? 1 : 0) as Int32, forKey: "pauseMusicOnRecording") try container.encode((self.pauseMusicOnRecording ? 1 : 0) as Int32, forKey: "pauseMusicOnRecording_v2")
} }
public static func ==(lhs: MediaInputSettings, rhs: MediaInputSettings) -> Bool { public static func ==(lhs: MediaInputSettings, rhs: MediaInputSettings) -> Bool {