From 95b6455f401f2c8612e94f88fdf5b0c0347ff3d5 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Thu, 18 Oct 2018 20:30:44 +0300 Subject: [PATCH] Added Watch reply preset settings --- .../MenuIcons/Watch.imageset/Contents.json | 22 ++ .../Watch.imageset/SettingsWatchIcon@2x.png | Bin 0 -> 925 bytes .../Watch.imageset/SettingsWatchIcon@3x.png | Bin 0 -> 760 bytes TelegramUI.xcodeproj/project.pbxproj | 48 ++++ .../ChatMessageInteractiveMediaNode.swift | 4 +- TelegramUI/DeclareEncodables.swift | 1 + TelegramUI/FetchMediaUtils.swift | 6 +- TelegramUI/LegacyBridgeAudio.swift | 26 +++ TelegramUI/PhotoResources.swift | 2 +- TelegramUI/PreferencesKeys.swift | 2 + TelegramUI/SettingsController.swift | 50 ++++- TelegramUI/StickerResources.swift | 6 +- TelegramUI/StorageUsageController.swift | 2 +- TelegramUI/TGBridgeAudioDecoder.h | 8 + TelegramUI/TGBridgeAudioDecoder.mm | 200 +++++++++++++++++ TelegramUI/TGBridgeAudioEncoder.h | 11 + TelegramUI/TGBridgeAudioEncoder.m | 211 ++++++++++++++++++ TelegramUI/TelegramApplicationContext.swift | 2 + TelegramUI/TelegramUIPrivate/module.modulemap | 2 + TelegramUI/TransformImageArguments.swift | 4 +- TelegramUI/WatchManager.swift | 36 +++ TelegramUI/WatchPresetSettings.swift | 68 ++++++ TelegramUI/WatchSettingsController.swift | 148 ++++++++++++ 23 files changed, 838 insertions(+), 21 deletions(-) create mode 100644 Images.xcassets/Settings/MenuIcons/Watch.imageset/Contents.json create mode 100644 Images.xcassets/Settings/MenuIcons/Watch.imageset/SettingsWatchIcon@2x.png create mode 100644 Images.xcassets/Settings/MenuIcons/Watch.imageset/SettingsWatchIcon@3x.png create mode 100644 TelegramUI/LegacyBridgeAudio.swift create mode 100644 TelegramUI/TGBridgeAudioDecoder.h create mode 100644 TelegramUI/TGBridgeAudioDecoder.mm create mode 100644 TelegramUI/TGBridgeAudioEncoder.h create mode 100644 TelegramUI/TGBridgeAudioEncoder.m create mode 100644 TelegramUI/WatchManager.swift create mode 100644 TelegramUI/WatchPresetSettings.swift create mode 100644 TelegramUI/WatchSettingsController.swift diff --git a/Images.xcassets/Settings/MenuIcons/Watch.imageset/Contents.json b/Images.xcassets/Settings/MenuIcons/Watch.imageset/Contents.json new file mode 100644 index 0000000000..a37ce181a7 --- /dev/null +++ b/Images.xcassets/Settings/MenuIcons/Watch.imageset/Contents.json @@ -0,0 +1,22 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "idiom" : "universal", + "filename" : "SettingsWatchIcon@2x.png", + "scale" : "2x" + }, + { + "idiom" : "universal", + "filename" : "SettingsWatchIcon@3x.png", + "scale" : "3x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/Images.xcassets/Settings/MenuIcons/Watch.imageset/SettingsWatchIcon@2x.png b/Images.xcassets/Settings/MenuIcons/Watch.imageset/SettingsWatchIcon@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..ced6620bf78f53684d43d073639e30158daf4e93 GIT binary patch literal 925 zcmV;O17iG%P)e3Fsa9*&X*8w^t=2Tc{$FRdu#cS@jgh=;7qd;PHEy%#Pct_$ zM>D?*4i5fXkSep;{09s6g#qp00Rdtc15VfJg8q3Bq`?sKFES?v2s|I?bmmugLb4Kl z&N6AmFDSqfMD)vznG_)vVpf@-ZWxndxhe_N&dT^d5fl>-zM)K;`otwfoLv=b^Zx?Y zNo7MGmvD%{vZ?3!5R)l{Dl2QKr>Bnw1_m#=zkiS#8k#9OI-Ym=jsvuU*8}3?lj-pA zi0+fi<)Vs;YMvQ&3z%&=1*hHJJ$k6zZa4lg++dN>bsm9pa|=kS`uYaAf?9Y4?(Xg- zsoL4uhg96_;H^!VXcNJzw`tzS`5sef#|H#fKFRM<45oMp$tap%Iq;-!szWMmv~ zQ2D3w`}+qNXOg_Vy$vwAr}z9p;jXS8^0bzhS1>gE3lzqU7&7@-;kdN7w;#te{(FFp zTeOyzc9N_4`2}P#Fiz@mu`5?KHFexh>RlWH%PnCXlX3&K@d!LJGD=cqu~>Ls8ydHH z1THVHBB>f5pX3Q@n&uD~6N==!n7q6~uAoi$z}HzF8=K$-DT2oEd_YD2zj}T`9&jfWE^g%C;#(K-j*gR%kd=;(laG&-kB^j(kCgxa|BjB6zrVkakCl>=q>qo2j*gU$j+Bm$ zl#h>;kB^j(kCTp$la7v)|NsB@_xP2Sn)>?s{r&x!nV!_t*6{H0@$vDSo1mPWpwQ6L z?d|Tsz`~!Oqqw-c=;-N~n4G@8!IF}f`1tv%s;-)vo}r$U99w9tIg^mPCL z0p&?VK~#8N?b*qWqc9MF;ld^mlMoWpnKF=lUwYr)|H(#HYer1sPW2e-#($0U9jr*$ zRZuSSTK*_hgN_LBrv;)jP~pgLc@fz2+6h)EX?qs6FR^;Ef2MY0fpsL}Zkg>#tSi|o zvA?jsZvMhuiFGFb97T-v7JspQQL`)hV3JsO(y$)Zp9cVK2Dc3mB@Av7frlJ<&_cTW zeP8IWiv4?zbS>~{_Ab-qvOC)KAtl*|Tm5gh52TwyNV+fjycg09K~e1e_`)6EQ?7y` zxhqYza&h0r2FQ+Eyli{Sj5~gFa9<7k{DJm9 zo(=r!;I6O#|98e+JGg~$C*Qgg;}#CCHX%Cx-nflhu5lZ;aT}Na`^Nu0=D$Di-*@=$ zXPD2(`y|u-m-)WV|GwaV53#-90>WKb-39sH6i76)urnkQZ14F2)BHhE?!+=LnNY4` zoA0Eg`{^)`8gb3fxaNIa^GU9GD%1RzYhKMXU+3BfFzqjx_bt%7Vb~8b?6X+*Z*2QQ q{{3coADf11_@DdOp$gOUee7?wbC$gdyO72J0000 Signal { +public func freeMediaFileInteractiveFetched(account: Account, fileReference: FileMediaReference) -> Signal { return fetchedMediaResource(postbox: account.postbox, reference: fileReference.resourceReference(fileReference.media.resource)) } @@ -21,7 +21,7 @@ private func fetchCategoryForFile(_ file: TelegramMediaFile) -> FetchManagerCate } } -func messageMediaFileInteractiveFetched(account: Account, message: Message, file: TelegramMediaFile, userInitiated: Bool) -> Signal { +public func messageMediaFileInteractiveFetched(account: Account, message: Message, file: TelegramMediaFile, userInitiated: Bool) -> Signal { return account.telegramApplicationContext.fetchManager.interactivelyFetched(category: fetchCategoryForFile(file), location: .chat(message.id.peerId), locationKey: .messageId(message.id), resourceReference: AnyMediaReference.message(message: MessageReference(message), media: file).resourceReference(file.resource), statsCategory: statsCategoryForFileWithAttributes(file.attributes), elevatedPriority: false, userInitiated: userInitiated) } @@ -29,7 +29,7 @@ func messageMediaFileCancelInteractiveFetch(account: Account, messageId: Message account.telegramApplicationContext.fetchManager.cancelInteractiveFetches(category: fetchCategoryForFile(file), location: .chat(messageId.peerId), locationKey: .messageId(messageId), resource: file.resource) } -func messageMediaImageInteractiveFetched(account: Account, message: Message, image: TelegramMediaImage, resource: MediaResource) -> Signal { +public func messageMediaImageInteractiveFetched(account: Account, message: Message, image: TelegramMediaImage, resource: MediaResource) -> Signal { return account.telegramApplicationContext.fetchManager.interactivelyFetched(category: .image, location: .chat(message.id.peerId), locationKey: .messageId(message.id), resourceReference: AnyMediaReference.message(message: MessageReference(message), media: image).resourceReference(resource), statsCategory: .image, elevatedPriority: false, userInitiated: true) } diff --git a/TelegramUI/LegacyBridgeAudio.swift b/TelegramUI/LegacyBridgeAudio.swift new file mode 100644 index 0000000000..3a0d9b2744 --- /dev/null +++ b/TelegramUI/LegacyBridgeAudio.swift @@ -0,0 +1,26 @@ +import Foundation +import SwiftSignalKit + +import TelegramUIPrivateModule + +public func legacyDecodeOpusAudio(path: String, outputPath: String) -> Signal { + return Signal { subscriber in + let decoder = TGBridgeAudioDecoder(url: URL(fileURLWithPath: path), outputUrl: URL(fileURLWithPath: outputPath)) + decoder?.start(completion: { + subscriber.putNext(outputPath) + subscriber.putCompletion() + }) + return EmptyDisposable + } +} + +public func legacyEncodeOpusAudio(path: String) -> Signal<(Data?, Int32), NoError> { + return Signal { subscriber in + let encoder = TGBridgeAudioEncoder(url: URL(fileURLWithPath: path)) + encoder?.start(completion: { (dataItem, duration) in + subscriber.putNext((dataItem?.data(), duration)) + subscriber.putCompletion() + }) + return EmptyDisposable + } +} diff --git a/TelegramUI/PhotoResources.swift b/TelegramUI/PhotoResources.swift index 3726792e93..e212a787ee 100644 --- a/TelegramUI/PhotoResources.swift +++ b/TelegramUI/PhotoResources.swift @@ -663,7 +663,7 @@ public func chatMessagePhotoInternal(photoData: Signal<(Data?, Data?, Bool), NoE } } -private func chatMessagePhotoThumbnailDatas(account: Account, photoReference: ImageMediaReference) -> Signal<(Data?, Data?, Bool), NoError> { +private func chatMessagePhotoThumbnailDatas(account: Account, photoReference: ImageMediaReference, onlyFullSize: Bool = false) -> Signal<(Data?, Data?, Bool), NoError> { let fullRepresentationSize: CGSize = CGSize(width: 1280.0, height: 1280.0) if let smallestRepresentation = smallestImageRepresentation(photoReference.media.representations), let largestRepresentation = photoReference.media.representationForDisplayAtSize(fullRepresentationSize) { diff --git a/TelegramUI/PreferencesKeys.swift b/TelegramUI/PreferencesKeys.swift index b8a1997b5c..e0b37fd774 100644 --- a/TelegramUI/PreferencesKeys.swift +++ b/TelegramUI/PreferencesKeys.swift @@ -17,6 +17,7 @@ private enum ApplicationSpecificPreferencesKeyValues: Int32 { case experimentalUISettings = 11 case contactSynchronizationSettings = 12 case stickerSettings = 13 + case watchPresetSettings = 14 } public struct ApplicationSpecificPreferencesKeys { @@ -34,4 +35,5 @@ public struct ApplicationSpecificPreferencesKeys { public static let experimentalUISettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.experimentalUISettings.rawValue) public static let contactSynchronizationSettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.contactSynchronizationSettings.rawValue) public static let stickerSettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.stickerSettings.rawValue) + public static let watchPresetSettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.watchPresetSettings.rawValue) } diff --git a/TelegramUI/SettingsController.swift b/TelegramUI/SettingsController.swift index 1558804242..4cc17f524a 100644 --- a/TelegramUI/SettingsController.swift +++ b/TelegramUI/SettingsController.swift @@ -17,7 +17,8 @@ private final class SettingsItemIcons { static let appearance = UIImage(bundleImageName: "Settings/MenuIcons/Appearance")?.precomposed() static let language = UIImage(bundleImageName: "Settings/MenuIcons/Language")?.precomposed() - static let secureId = UIImage(bundleImageName: "Settings/MenuIcons/Passport")?.precomposed() + static let passport = UIImage(bundleImageName: "Settings/MenuIcons/Passport")?.precomposed() + static let watch = UIImage(bundleImageName: "Settings/MenuIcons/Watch")?.precomposed() static let support = UIImage(bundleImageName: "Settings/MenuIcons/Support")?.precomposed() static let faq = UIImage(bundleImageName: "Settings/MenuIcons/Faq")?.precomposed() @@ -42,6 +43,7 @@ private struct SettingsItemArguments { let presentController: (ViewController) -> Void let openLanguage: () -> Void let openPassport: () -> Void + let openWatch: () -> Void let openSupport: () -> Void let openFaq: () -> Void let openEditing: () -> Void @@ -54,7 +56,7 @@ private enum SettingsSection: Int32 { case proxy case media case generalSettings - case passport + case advanced case help } @@ -75,6 +77,7 @@ private enum SettingsEntry: ItemListNodeEntry { case themes(PresentationTheme, UIImage?, String) case language(PresentationTheme, UIImage?, String, String) case passport(PresentationTheme, UIImage?, String, String) + case watch(PresentationTheme, UIImage?, String, String) case askAQuestion(PresentationTheme, UIImage?, String) case faq(PresentationTheme, UIImage?, String) @@ -89,8 +92,8 @@ private enum SettingsEntry: ItemListNodeEntry { return SettingsSection.media.rawValue case .notificationsAndSounds, .privacyAndSecurity, .dataAndStorage, .themes, .language: return SettingsSection.generalSettings.rawValue - case .passport: - return SettingsSection.passport.rawValue + case .passport, .watch : + return SettingsSection.advanced.rawValue case .askAQuestion, .faq: return SettingsSection.help.rawValue } @@ -124,10 +127,12 @@ private enum SettingsEntry: ItemListNodeEntry { return 11 case .passport: return 12 - case .askAQuestion: + case .watch: return 13 - case .faq: + case .askAQuestion: return 14 + case .faq: + return 15 } } @@ -240,6 +245,12 @@ private enum SettingsEntry: ItemListNodeEntry { } else { return false } + case let .watch(lhsTheme, lhsImage, lhsText, lhsValue): + if case let .watch(rhsTheme, rhsImage, rhsText, rhsValue) = rhs, lhsTheme === rhsTheme, lhsImage === rhsImage, lhsText == rhsText, lhsValue == rhsValue { + return true + } else { + return false + } case let .askAQuestion(lhsTheme, lhsImage, lhsText): if case let .askAQuestion(rhsTheme, rhsImage, rhsText) = rhs, lhsTheme === rhsTheme, lhsImage === rhsImage, lhsText == rhsText { return true @@ -320,6 +331,10 @@ private enum SettingsEntry: ItemListNodeEntry { return ItemListDisclosureItem(theme: theme, icon: image, title: text, label: value, sectionId: ItemListSectionId(self.section), style: .blocks, action: { arguments.openPassport() }) + case let .watch(theme, image, text, value): + return ItemListDisclosureItem(theme: theme, icon: image, title: text, label: value, sectionId: ItemListSectionId(self.section), style: .blocks, action: { + arguments.openWatch() + }) case let .askAQuestion(theme, image, text): return ItemListDisclosureItem(theme: theme, icon: image, title: text, label: "", sectionId: ItemListSectionId(self.section), style: .blocks, action: { arguments.openSupport() @@ -351,7 +366,7 @@ private struct SettingsState: Equatable { } } -private func settingsEntries(presentationData: PresentationData, state: SettingsState, view: PeerView, proxySettings: ProxySettings, unreadTrendingStickerPacks: Int, archivedPacks: [ArchivedStickerPackItem]?, hasPassport: Bool) -> [SettingsEntry] { +private func settingsEntries(presentationData: PresentationData, state: SettingsState, view: PeerView, proxySettings: ProxySettings, unreadTrendingStickerPacks: Int, archivedPacks: [ArchivedStickerPackItem]?, hasPassport: Bool, hasWatchApp: Bool) -> [SettingsEntry] { var entries: [SettingsEntry] = [] if let peer = peerViewMainPeer(view) as? TelegramUser { @@ -390,7 +405,10 @@ private func settingsEntries(presentationData: PresentationData, state: Settings entries.append(.language(presentationData.theme, SettingsItemIcons.language, presentationData.strings.Settings_AppLanguage, presentationData.strings.Localization_LanguageName)) if hasPassport { - entries.append(.passport(presentationData.theme, SettingsItemIcons.secureId, presentationData.strings.Settings_Passport, "")) + entries.append(.passport(presentationData.theme, SettingsItemIcons.passport, presentationData.strings.Settings_Passport, "")) + } + if hasWatchApp { + entries.append(.watch(presentationData.theme, SettingsItemIcons.watch, presentationData.strings.Settings_AppleWatch, "")) } entries.append(.askAQuestion(presentationData.theme, SettingsItemIcons.support, presentationData.strings.Settings_Support)) @@ -516,6 +534,9 @@ public func settingsController(account: Account, accountManager: AccountManager) }, openPassport: { let controller = SecureIdAuthController(account: account, mode: .list) presentControllerImpl?(controller, nil) + }, openWatch: { + let controller = watchSettingsController(account: account) + pushControllerImpl?(controller) }, openSupport: { let supportPeer = Promise() supportPeer.set(supportPeerId(account: account)) @@ -654,8 +675,13 @@ public func settingsController(account: Account, accountManager: AccountManager) } updatePassport() - let signal = combineLatest(account.telegramApplicationContext.presentationData, statePromise.get(), peerView, account.postbox.preferencesView(keys: [PreferencesKeys.proxySettings]), combineLatest(account.viewTracker.featuredStickerPacks(), archivedPacks.get()), hasPassport.get()) - |> map { presentationData, state, view, preferences, featuredAndArchived, hasPassport -> (ItemListControllerState, (ItemListNodeState, SettingsEntry.ItemGenerationArguments)) in + let hasWatchApp = Promise(false) + if let context = account.applicationContext as? TelegramApplicationContext, let watchManager = context.watchManager { + hasWatchApp.set(watchManager.watchAppInstalled) + } + + let signal = combineLatest(account.telegramApplicationContext.presentationData, statePromise.get(), peerView, account.postbox.preferencesView(keys: [PreferencesKeys.proxySettings]), combineLatest(account.viewTracker.featuredStickerPacks(), archivedPacks.get()), combineLatest(hasPassport.get(), hasWatchApp.get())) + |> map { presentationData, state, view, preferences, featuredAndArchived, hasPassportAndWatch -> (ItemListControllerState, (ItemListNodeState, SettingsEntry.ItemGenerationArguments)) in let proxySettings: ProxySettings if let value = preferences.values[PreferencesKeys.proxySettings] as? ProxySettings { proxySettings = value @@ -679,7 +705,9 @@ public func settingsController(account: Account, accountManager: AccountManager) } } - let listState = ItemListNodeState(entries: settingsEntries(presentationData: presentationData, state: state, view: view, proxySettings: proxySettings, unreadTrendingStickerPacks: unreadTrendingStickerPacks, archivedPacks: featuredAndArchived.1, hasPassport: hasPassport), style: .blocks) + let (hasPassport, hasWatchApp) = hasPassportAndWatch + + let listState = ItemListNodeState(entries: settingsEntries(presentationData: presentationData, state: state, view: view, proxySettings: proxySettings, unreadTrendingStickerPacks: unreadTrendingStickerPacks, archivedPacks: featuredAndArchived.1, hasPassport: hasPassport, hasWatchApp: hasWatchApp), style: .blocks) return (controllerState, (listState, arguments)) } |> afterDisposed { diff --git a/TelegramUI/StickerResources.swift b/TelegramUI/StickerResources.swift index edec29d278..5e86b6505c 100644 --- a/TelegramUI/StickerResources.swift +++ b/TelegramUI/StickerResources.swift @@ -147,7 +147,7 @@ public func chatMessageSticker(account: Account, file: TelegramMediaFile, small: return signal |> map { (thumbnailData, fullSizeData, fullSizeComplete) in return { arguments in - let context = DrawingContext(size: arguments.drawingSize, clear: true) + let context = DrawingContext(size: arguments.drawingSize, clear: arguments.emptyColor == nil) let drawingRect = arguments.drawingRect let fittedSize = arguments.imageSize @@ -179,6 +179,10 @@ public func chatMessageSticker(account: Account, file: TelegramMediaFile, small: context.withFlippedContext { c in c.setBlendMode(.copy) + if let color = arguments.emptyColor { + c.fill(drawingRect) + } + if let blurredThumbnailImage = blurredThumbnailImage { c.interpolationQuality = .low c.draw(blurredThumbnailImage.cgImage!, in: fittedRect) diff --git a/TelegramUI/StorageUsageController.swift b/TelegramUI/StorageUsageController.swift index c9c0174f76..5142210d44 100644 --- a/TelegramUI/StorageUsageController.swift +++ b/TelegramUI/StorageUsageController.swift @@ -523,7 +523,7 @@ func storageUsageController(account: Account) -> ViewController { if !items.isEmpty { items.append(ActionSheetButtonItem(title: presentationData.strings.Cache_Clear("\(dataSizeString(totalSize))").0, action: { if let statsPromise = statsPromise { - var clearCategories = sizeIndex.keys.filter({ sizeIndex[$0]!.0 }) + let clearCategories = sizeIndex.keys.filter({ sizeIndex[$0]!.0 }) //var clearSize: Int64 = 0 var clearMediaIds = Set() diff --git a/TelegramUI/TGBridgeAudioDecoder.h b/TelegramUI/TGBridgeAudioDecoder.h new file mode 100644 index 0000000000..9332bf611b --- /dev/null +++ b/TelegramUI/TGBridgeAudioDecoder.h @@ -0,0 +1,8 @@ +#import + +@interface TGBridgeAudioDecoder : NSObject + +- (instancetype)initWithURL:(NSURL *)url outputUrl:(NSURL *)outputURL; +- (void)startWithCompletion:(void (^)(void))completion; + +@end diff --git a/TelegramUI/TGBridgeAudioDecoder.mm b/TelegramUI/TGBridgeAudioDecoder.mm new file mode 100644 index 0000000000..a65f3d3aa7 --- /dev/null +++ b/TelegramUI/TGBridgeAudioDecoder.mm @@ -0,0 +1,200 @@ +#import "TGBridgeAudioDecoder.h" + +#import +#import + +#import + +#import "opusfile.h" +#import "opusenc.h" + +const NSInteger TGBridgeAudioDecoderInputSampleRate = 48000; +const NSInteger TGBridgeAudioDecoderResultSampleRate = 24000; +const NSUInteger TGBridgeAudioDecoderBufferSize = 32768; + +#define checkResult(result,operation) (_checkResultLite((result),(operation),__FILE__,__LINE__)) + +struct TGAudioBuffer +{ + NSUInteger capacity; + uint8_t *data; + NSUInteger size; + int64_t pcmOffset; +}; + +inline TGAudioBuffer *TGAudioBufferWithCapacity(NSUInteger capacity) +{ + TGAudioBuffer *audioBuffer = (TGAudioBuffer *)malloc(sizeof(TGAudioBuffer)); + audioBuffer->capacity = capacity; + audioBuffer->data = (uint8_t *)malloc(capacity); + audioBuffer->size = 0; + audioBuffer->pcmOffset = 0; + return audioBuffer; +} + +inline void TGAudioBufferDispose(TGAudioBuffer *audioBuffer) +{ + if (audioBuffer != NULL) + { + free(audioBuffer->data); + free(audioBuffer); + } +} + +static inline bool _checkResultLite(OSStatus result, const char *operation, const char* file, int line) +{ + if ( result != noErr ) + { + NSLog(@"%s:%d: %s result %d %08X %4.4s\n", file, line, operation, (int)result, (int)result, (char*)&result); + return NO; + } + return YES; +} + +@interface TGBridgeAudioDecoder () +{ + NSURL *_url; + NSURL *_resultURL; + + OggOpusFile *_opusFile; + + bool _finished; + bool _cancelled; +} +@end + +@implementation TGBridgeAudioDecoder + +- (instancetype)initWithURL:(NSURL *)url outputUrl:(NSURL *)outputUrl +{ + self = [super init]; + if (self != nil) + { + _url = url; + + int64_t randomId = 0; + arc4random_buf(&randomId, 8); + _resultURL = outputUrl; + } + return self; +} + +- (void)startWithCompletion:(void (^)(void))completion +{ + [[TGBridgeAudioDecoder processingQueue] dispatch:^ + { + int error = OPUS_OK; + _opusFile = op_open_file(_url.path.UTF8String, &error); + if (_opusFile == NULL || error != OPUS_OK) + { + return; + } + + AudioStreamBasicDescription sourceFormat; + sourceFormat.mSampleRate = TGBridgeAudioDecoderInputSampleRate; + sourceFormat.mFormatID = kAudioFormatLinearPCM; + sourceFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; + sourceFormat.mFramesPerPacket = 1; + sourceFormat.mChannelsPerFrame = 1; + sourceFormat.mBitsPerChannel = 16; + sourceFormat.mBytesPerPacket = 2; + sourceFormat.mBytesPerFrame = 2; + + AudioStreamBasicDescription destFormat; + memset(&destFormat, 0, sizeof(destFormat)); + destFormat.mChannelsPerFrame = sourceFormat.mChannelsPerFrame; + destFormat.mFormatID = kAudioFormatMPEG4AAC; + destFormat.mSampleRate = TGBridgeAudioDecoderResultSampleRate; + UInt32 size = sizeof(destFormat); + if (!checkResult(AudioFormatGetProperty(kAudioFormatProperty_FormatInfo, 0, NULL, &size, &destFormat), + "AudioFormatGetProperty(kAudioFormatProperty_FormatInfo)")) + { + return; + } + + ExtAudioFileRef destinationFile; + if (!checkResult(ExtAudioFileCreateWithURL((__bridge CFURLRef)_resultURL, kAudioFileM4AType, &destFormat, NULL, kAudioFileFlags_EraseFile, &destinationFile), "ExtAudioFileCreateWithURL")) + { + return; + } + + if (!checkResult(ExtAudioFileSetProperty(destinationFile, kExtAudioFileProperty_ClientDataFormat, size, &sourceFormat), + "ExtAudioFileSetProperty(destinationFile, kExtAudioFileProperty_ClientDataFormat")) + { + return; + } + + bool canResumeAfterInterruption = false; + AudioConverterRef converter; + size = sizeof(converter); + if (checkResult(ExtAudioFileGetProperty(destinationFile, kExtAudioFileProperty_AudioConverter, &size, &converter), + "ExtAudioFileGetProperty(kExtAudioFileProperty_AudioConverter;)")) + { + UInt32 canResume = 0; + size = sizeof(canResume); + if (AudioConverterGetProperty(converter, kAudioConverterPropertyCanResumeFromInterruption, &size, &canResume) == noErr) + canResumeAfterInterruption = canResume; + } + + uint8_t srcBuffer[TGBridgeAudioDecoderBufferSize]; + while (!_cancelled) + { + AudioBufferList bufferList; + bufferList.mNumberBuffers = 1; + bufferList.mBuffers[0].mNumberChannels = sourceFormat.mChannelsPerFrame; + bufferList.mBuffers[0].mDataByteSize = TGBridgeAudioDecoderBufferSize; + bufferList.mBuffers[0].mData = srcBuffer; + + uint32_t writtenOutputBytes = 0; + while (writtenOutputBytes < TGBridgeAudioDecoderBufferSize) + { + int32_t readSamples = op_read(_opusFile, (opus_int16 *)(srcBuffer + writtenOutputBytes), (TGBridgeAudioDecoderBufferSize - writtenOutputBytes) / sourceFormat.mBytesPerFrame, NULL); + + if (readSamples > 0) + writtenOutputBytes += readSamples * sourceFormat.mBytesPerFrame; + else + break; + } + bufferList.mBuffers[0].mDataByteSize = writtenOutputBytes; + int32_t nFrames = writtenOutputBytes / sourceFormat.mBytesPerFrame; + + if (nFrames == 0) + break; + + OSStatus status = ExtAudioFileWrite(destinationFile, nFrames, &bufferList); + if (status == kExtAudioFileError_CodecUnavailableInputConsumed) + { + //TGLog(@"1"); + } + else if (status == kExtAudioFileError_CodecUnavailableInputNotConsumed) + { + //TGLog(@"2"); + } + else if (!checkResult(status, "ExtAudioFileWrite")) + { + //TGLog(@"3"); + } + } + + ExtAudioFileDispose(destinationFile); + + if (completion != nil) + completion(); + }]; +} + ++ (SQueue *)processingQueue +{ + static SQueue *queue = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^ + { + static const char *queueSpecific = "org.telegram.opusAudioDecoderQueue"; + dispatch_queue_t dispatchQueue = dispatch_queue_create("org.telegram.opusAudioDecoderQueue", DISPATCH_QUEUE_SERIAL); + dispatch_queue_set_specific(dispatchQueue, queueSpecific, (void *)queueSpecific, NULL); + queue = [SQueue wrapConcurrentNativeQueue:dispatchQueue]; + }); + return queue; +} + +@end diff --git a/TelegramUI/TGBridgeAudioEncoder.h b/TelegramUI/TGBridgeAudioEncoder.h new file mode 100644 index 0000000000..ae30698b1a --- /dev/null +++ b/TelegramUI/TGBridgeAudioEncoder.h @@ -0,0 +1,11 @@ +#import + +@class TGDataItem; +@class TGLiveUploadActorData; + +@interface TGBridgeAudioEncoder : NSObject + +- (instancetype)initWithURL:(NSURL *)url; +- (void)startWithCompletion:(void (^)(TGDataItem *, int32_t))completion; + +@end diff --git a/TelegramUI/TGBridgeAudioEncoder.m b/TelegramUI/TGBridgeAudioEncoder.m new file mode 100644 index 0000000000..d5d75b8b0b --- /dev/null +++ b/TelegramUI/TGBridgeAudioEncoder.m @@ -0,0 +1,211 @@ +#import "TGBridgeAudioEncoder.h" +#import + +#import + +#import "opus.h" +#import "opusenc.h" + +#import "TGDataItem.h" + +const NSInteger TGBridgeAudioEncoderSampleRate = 16000; + +@interface TGBridgeAudioEncoder () +{ + AVAssetReader *_assetReader; + AVAssetReaderOutput *_readerOutput; + + NSMutableData *_audioBuffer; + TGDataItem *_tempFileItem; + TGOggOpusWriter *_oggWriter; +} +@end + +@implementation TGBridgeAudioEncoder + +- (instancetype)initWithURL:(NSURL *)url +{ + self = [super init]; + if (self != nil) + { + AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:nil]; + if (asset == nil || asset.tracks.count == 0) + { + //TGLog(@"Asset create fail"); + return nil; + } + + NSError *error; + _assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; + + NSDictionary *outputSettings = @ + { + AVFormatIDKey: @(kAudioFormatLinearPCM), + AVSampleRateKey: @(TGBridgeAudioEncoderSampleRate), + AVNumberOfChannelsKey: @1, + AVLinearPCMBitDepthKey: @16, + AVLinearPCMIsFloatKey: @false, + AVLinearPCMIsBigEndianKey: @false, + AVLinearPCMIsNonInterleaved: @false + }; + + _readerOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:asset.tracks audioSettings:outputSettings]; + + [_assetReader addOutput:_readerOutput]; + + _tempFileItem = [[TGDataItem alloc] init]; + } + return self; +} + +- (void)dealloc +{ + [self cleanup]; +} + +- (void)cleanup +{ + _oggWriter = nil; +} + ++ (SQueue *)processingQueue +{ + static SQueue *queue = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^ + { + static const char *queueSpecific = "org.telegram.opusAudioEncoderQueue"; + dispatch_queue_t dispatchQueue = dispatch_queue_create("org.telegram.opusAudioEncoderQueue", DISPATCH_QUEUE_SERIAL); + dispatch_queue_set_specific(dispatchQueue, queueSpecific, (void *)queueSpecific, NULL); + queue = [SQueue wrapConcurrentNativeQueue:dispatchQueue]; + }); + return queue; +} + +- (void)startWithCompletion:(void (^)(TGDataItem *, int32_t))completion +{ + CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); + + [[TGBridgeAudioEncoder processingQueue] dispatch:^ + { + _oggWriter = [[TGOggOpusWriter alloc] init]; + if (![_oggWriter beginWithDataItem:_tempFileItem]) + { + //TGLog(@"[TGBridgeAudioEncoder#%x error initializing ogg opus writer]", self); + [self cleanup]; + return; + } + + [_assetReader startReading]; + + while (_assetReader.status != AVAssetReaderStatusCompleted) + { + if (_assetReader.status == AVAssetReaderStatusReading) + { + CMSampleBufferRef nextBuffer = [_readerOutput copyNextSampleBuffer]; + if (nextBuffer) + { + AudioBufferList abl; + CMBlockBufferRef blockBuffer; + CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(nextBuffer, NULL, &abl, sizeof(abl), NULL, NULL, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer); + + [[TGBridgeAudioEncoder processingQueue] dispatch:^ + { + [self _processBuffer:&abl.mBuffers[0]]; + + CFRelease(nextBuffer); + CFRelease(blockBuffer); + }]; + } + else + { + break; + } + } + } + + TGDataItem *dataItemResult = nil; + NSTimeInterval durationResult = 0.0; + + NSUInteger totalBytes = 0; + + if (_assetReader.status == AVAssetReaderStatusCompleted) + { + if (_oggWriter != nil && [_oggWriter writeFrame:NULL frameByteCount:0]) + { + dataItemResult = _tempFileItem; + durationResult = [_oggWriter encodedDuration]; + totalBytes = [_oggWriter encodedBytes]; + } + + [self cleanup]; + } + + //TGLog(@"[TGBridgeAudioEncoder#%x convert time: %f ms]", self, (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0); + + if (completion != nil) + completion(dataItemResult, (int32_t)durationResult); + }]; +} + +- (void)_processBuffer:(AudioBuffer const *)buffer +{ + @autoreleasepool + { + if (_oggWriter == nil) + return; + + static const int millisecondsPerPacket = 60; + static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 1000 * millisecondsPerPacket * 2; + + unsigned char currentEncoderPacket[encoderPacketSizeInBytes]; + + int bufferOffset = 0; + + while (true) + { + int currentEncoderPacketSize = 0; + + while (currentEncoderPacketSize < encoderPacketSizeInBytes) + { + if (_audioBuffer.length != 0) + { + int takenBytes = MIN((int)_audioBuffer.length, encoderPacketSizeInBytes - currentEncoderPacketSize); + if (takenBytes != 0) + { + memcpy(currentEncoderPacket + currentEncoderPacketSize, _audioBuffer.bytes, takenBytes); + [_audioBuffer replaceBytesInRange:NSMakeRange(0, takenBytes) withBytes:NULL length:0]; + currentEncoderPacketSize += takenBytes; + } + } + else if (bufferOffset < (int)buffer->mDataByteSize) + { + int takenBytes = MIN((int)buffer->mDataByteSize - bufferOffset, encoderPacketSizeInBytes - currentEncoderPacketSize); + if (takenBytes != 0) + { + memcpy(currentEncoderPacket + currentEncoderPacketSize, ((const char *)buffer->mData) + bufferOffset, takenBytes); + bufferOffset += takenBytes; + currentEncoderPacketSize += takenBytes; + } + } + else + break; + } + + if (currentEncoderPacketSize < encoderPacketSizeInBytes) + { + if (_audioBuffer == nil) + _audioBuffer = [[NSMutableData alloc] initWithCapacity:encoderPacketSizeInBytes]; + [_audioBuffer appendBytes:currentEncoderPacket length:currentEncoderPacketSize]; + + break; + } + else + { + [_oggWriter writeFrame:currentEncoderPacket frameByteCount:(NSUInteger)currentEncoderPacketSize]; + } + } + } +} + +@end diff --git a/TelegramUI/TelegramApplicationContext.swift b/TelegramUI/TelegramApplicationContext.swift index 014f1044ff..506781c87a 100644 --- a/TelegramUI/TelegramApplicationContext.swift +++ b/TelegramUI/TelegramApplicationContext.swift @@ -99,6 +99,8 @@ public final class TelegramApplicationContext { } private var hasOngoingCallDisposable: Disposable? + public var watchManager: WatchManager? + private var immediateExperimentalUISettingsValue = Atomic(value: ExperimentalUISettings.defaultSettings) public var immediateExperimentalUISettings: ExperimentalUISettings { return self.immediateExperimentalUISettingsValue.with { $0 } diff --git a/TelegramUI/TelegramUIPrivate/module.modulemap b/TelegramUI/TelegramUIPrivate/module.modulemap index 3d0eee950b..4f3fe4bea8 100644 --- a/TelegramUI/TelegramUIPrivate/module.modulemap +++ b/TelegramUI/TelegramUIPrivate/module.modulemap @@ -28,4 +28,6 @@ module TelegramUIPrivateModule { header "../TGEmojiSuggestions.h" header "../TGChannelIntroController.h" header "../EDSunriseSet.h" + header "../TGBridgeAudioDecoder.h" + header "../TGBridgeAudioEncoder.h" } diff --git a/TelegramUI/TransformImageArguments.swift b/TelegramUI/TransformImageArguments.swift index 08061757b6..039fae22a7 100644 --- a/TelegramUI/TransformImageArguments.swift +++ b/TelegramUI/TransformImageArguments.swift @@ -13,9 +13,9 @@ public struct TransformImageArguments: Equatable { public let boundingSize: CGSize public let intrinsicInsets: UIEdgeInsets public let resizeMode: TransformImageResizeMode - public let emptyColor: UIColor + public let emptyColor: UIColor? - public init(corners: ImageCorners, imageSize: CGSize, boundingSize: CGSize, intrinsicInsets: UIEdgeInsets, resizeMode: TransformImageResizeMode = .fill(.black), emptyColor: UIColor = .white) { + public init(corners: ImageCorners, imageSize: CGSize, boundingSize: CGSize, intrinsicInsets: UIEdgeInsets, resizeMode: TransformImageResizeMode = .fill(.black), emptyColor: UIColor? = nil) { self.corners = corners self.imageSize = imageSize self.boundingSize = boundingSize diff --git a/TelegramUI/WatchManager.swift b/TelegramUI/WatchManager.swift new file mode 100644 index 0000000000..4267029d30 --- /dev/null +++ b/TelegramUI/WatchManager.swift @@ -0,0 +1,36 @@ +import Foundation +import SwiftSignalKit +import Postbox +import TelegramCore + +public final class WatchManagerArguments { + public let appInstalled: Signal + public let navigateToMessageRequested: Signal + public let runningRequests: Signal + + public init(appInstalled: Signal, navigateToMessageRequested: Signal, runningRequests: Signal) { + self.appInstalled = appInstalled + self.navigateToMessageRequested = navigateToMessageRequested + self.runningRequests = runningRequests + } +} + +public final class WatchManager { + private let arguments: WatchManagerArguments? + + public init(arguments: WatchManagerArguments?) { + self.arguments = arguments + } + + public var watchAppInstalled: Signal { + return self.arguments?.appInstalled ?? .single(false) + } + + public var navigateToMessageRequested: Signal { + return self.arguments?.navigateToMessageRequested ?? .never() + } + + public var runningRequests: Signal { + return self.arguments?.runningRequests ?? .single(false) + } +} diff --git a/TelegramUI/WatchPresetSettings.swift b/TelegramUI/WatchPresetSettings.swift new file mode 100644 index 0000000000..2674bbc2d4 --- /dev/null +++ b/TelegramUI/WatchPresetSettings.swift @@ -0,0 +1,68 @@ +import Foundation +import Postbox +import SwiftSignalKit + +public struct WatchPresetSettings: PreferencesEntry, Equatable { + public var customPresets: [String : String] + + public static var defaultSettings: WatchPresetSettings { + return WatchPresetSettings(presets: [:]) + } + + public init(presets: [String : String]) { + self.customPresets = presets + } + + public init(decoder: PostboxDecoder) { + let keys = decoder.decodeStringArrayForKey("presetKeys") + let values = decoder.decodeStringArrayForKey("presetValues") + if keys.count == values.count { + var presets: [String : String] = [:] + for i in 0 ..< keys.count { + presets[keys[i]] = values[i] + } + self.customPresets = presets + } else { + self.customPresets = [:] + } + } + + public func encode(_ encoder: PostboxEncoder) { + let keys = self.customPresets.keys.sorted() + let values = keys.reduce([String]()) { (values, index) -> [String] in + var values = values + if let value = self.customPresets[index] { + values.append(value) + } + return values + } + encoder.encodeStringArray(keys, forKey: "presetKeys") + encoder.encodeStringArray(values, forKey: "presetValues") + } + + public func isEqual(to: PreferencesEntry) -> Bool { + if let to = to as? WatchPresetSettings { + return self == to + } else { + return false + } + } + + public static func ==(lhs: WatchPresetSettings, rhs: WatchPresetSettings) -> Bool { + return lhs.customPresets == rhs.customPresets + } +} + +func updateWatchPresetSettingsInteractively(postbox: Postbox, _ f: @escaping (WatchPresetSettings) -> WatchPresetSettings) -> Signal { + return postbox.transaction { transaction -> Void in + transaction.updatePreferencesEntry(key: ApplicationSpecificPreferencesKeys.watchPresetSettings, { entry in + let currentSettings: WatchPresetSettings + if let entry = entry as? WatchPresetSettings { + currentSettings = entry + } else { + currentSettings = WatchPresetSettings.defaultSettings + } + return f(currentSettings) + }) + } +} diff --git a/TelegramUI/WatchSettingsController.swift b/TelegramUI/WatchSettingsController.swift new file mode 100644 index 0000000000..1321a40234 --- /dev/null +++ b/TelegramUI/WatchSettingsController.swift @@ -0,0 +1,148 @@ +import Foundation +import Display +import SwiftSignalKit +import Postbox +import TelegramCore + +private final class WatchSettingsControllerArguments { + let updatePreset: (String, String) -> Void + + init(updatePreset: @escaping (String, String) -> Void) { + self.updatePreset = updatePreset + } +} + +private enum WatchSettingsSection: Int32 { + case replyPresets +} + +private enum WatchSettingsControllerEntry: ItemListNodeEntry { + case replyPresetsHeader(PresentationTheme, String) + case replyPreset(PresentationTheme, String, String, String, Int32) + case replyPresetsInfo(PresentationTheme, String) + + var section: ItemListSectionId { + switch self { + case .replyPresetsHeader, .replyPreset, .replyPresetsInfo: + return WatchSettingsSection.replyPresets.rawValue + } + } + + var stableId: Int32 { + switch self { + case .replyPresetsHeader: + return 0 + case let .replyPreset(_, _, _, _, index): + return 1 + index + case .replyPresetsInfo: + return 100 + } + } + + static func ==(lhs: WatchSettingsControllerEntry, rhs: WatchSettingsControllerEntry) -> Bool { + switch lhs { + case let .replyPresetsHeader(lhsTheme, lhsText): + if case let .replyPresetsHeader(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText { + return true + } else { + return false + } + + case let .replyPreset(lhsTheme, lhsIdentifier, lhsPlaceholder, lhsValue, lhsIndex): + if case let .replyPreset(rhsTheme, rhsIdentifier, rhsPlaceholder, rhsValue, rhsIndex) = rhs, lhsTheme === rhsTheme, lhsIdentifier == rhsIdentifier, lhsPlaceholder == rhsPlaceholder, lhsValue == rhsValue, lhsIndex == rhsIndex { + return true + } else { + return false + } + + case let .replyPresetsInfo(lhsTheme, lhsText): + if case let .replyPresetsInfo(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText { + return true + } else { + return false + } + } + } + + static func <(lhs: WatchSettingsControllerEntry, rhs: WatchSettingsControllerEntry) -> Bool { + return lhs.stableId < rhs.stableId + } + + func item(_ arguments: WatchSettingsControllerArguments) -> ListViewItem { + switch self { + case let .replyPresetsHeader(theme, text): + return ItemListSectionHeaderItem(theme: theme, text: text, sectionId: self.section) + case let .replyPreset(theme, identifier, placeholder, value, _): + return ItemListSingleLineInputItem(theme: theme, title: NSAttributedString(string: ""), text: value, placeholder: placeholder, type: .regular(capitalization: true, autocorrection: true), spacing: 0.0, sectionId: self.section, textUpdated: { updatedText in + arguments.updatePreset(identifier, updatedText.trimmingCharacters(in: .whitespacesAndNewlines)) + }, action: {}) + case let .replyPresetsInfo(theme, text): + return ItemListTextItem(theme: theme, text: .plain(text), sectionId: self.section) + } + } +} + +private func watchSettingsControllerEntries(presentationData: PresentationData, customPresets: [String : String]) -> [WatchSettingsControllerEntry] { + var entries: [WatchSettingsControllerEntry] = [] + + let defaultSuggestions : [(Int32, String, String)] = [ + (0, "OK", presentationData.strings.Watch_Suggestion_OK), + (1, "Thanks", presentationData.strings.Watch_Suggestion_Thanks), + (2, "WhatsUp", presentationData.strings.Watch_Suggestion_WhatsUp), + (3, "TalkLater", presentationData.strings.Watch_Suggestion_TalkLater), + (4, "CantTalk", presentationData.strings.Watch_Suggestion_CantTalk), + (5, "HoldOn", presentationData.strings.Watch_Suggestion_HoldOn), + (6, "BRB", presentationData.strings.Watch_Suggestion_BRB), + (7, "OnMyWay", presentationData.strings.Watch_Suggestion_OnMyWay) + ] + + entries.append(.replyPresetsHeader(presentationData.theme, presentationData.strings.AppleWatch_ReplyPresets)) + for (index, identifier, placeholder) in defaultSuggestions { + entries.append(.replyPreset(presentationData.theme, identifier, placeholder, customPresets[identifier] ?? "", index)) + } + entries.append(.replyPresetsInfo(presentationData.theme, presentationData.strings.AppleWatch_ReplyPresetsHelp)) + + return entries +} + +public func watchSettingsController(account: Account) -> ViewController { + var pushControllerImpl: ((ViewController) -> Void)? + var presentControllerImpl: ((ViewController) -> Void)? + + let updateDisposable = MetaDisposable() + let arguments = WatchSettingsControllerArguments(updatePreset: { identifier, text in + updateDisposable.set((.complete() |> delay(1.0, queue: Queue.mainQueue()) |> then(updateWatchPresetSettingsInteractively(postbox: account.postbox, { current in + var updatedPresets = current.customPresets + if !text.isEmpty { + updatedPresets[identifier] = text + } else { + updatedPresets.removeValue(forKey: identifier) + } + return WatchPresetSettings(presets: updatedPresets) + }))).start()) + }) + + let watchPresetSettingsKey = ApplicationSpecificPreferencesKeys.watchPresetSettings + let preferences = account.postbox.preferencesView(keys: [watchPresetSettingsKey]) + + let signal = combineLatest(account.telegramApplicationContext.presentationData, preferences) + |> deliverOnMainQueue + |> map { presentationData, preferences -> (ItemListControllerState, (ItemListNodeState, WatchSettingsControllerEntry.ItemGenerationArguments)) in + let settings = (preferences.values[watchPresetSettingsKey] as? WatchPresetSettings) ?? WatchPresetSettings.defaultSettings + + let controllerState = ItemListControllerState(theme: presentationData.theme, title: .text(presentationData.strings.AppleWatch_Title), leftNavigationButton: nil, rightNavigationButton: nil, backNavigationButton: ItemListBackButton(title: presentationData.strings.Common_Back)) + let listState = ItemListNodeState(entries: watchSettingsControllerEntries(presentationData: presentationData, customPresets: settings.customPresets), style: .blocks, animateChanges: false) + + return (controllerState, (listState, arguments)) + } + + let controller = ItemListController(account: account, state: signal) + pushControllerImpl = { [weak controller] c in + (controller?.navigationController as? NavigationController)?.pushViewController(c) + } + presentControllerImpl = { [weak controller] c in + controller?.present(c, in: .window(.root)) + } + return controller +} +