Added Watch reply preset settings

This commit is contained in:
Ilya Laktyushin 2018-10-18 20:30:44 +03:00
parent 0d41a372c9
commit 95b6455f40
23 changed files with 838 additions and 21 deletions

View File

@ -0,0 +1,22 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "SettingsWatchIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "SettingsWatchIcon@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 925 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 760 B

View File

@ -23,6 +23,12 @@
0941A9A4210B0E2E00EBE194 /* OpenInAppIconResources.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0941A9A3210B0E2E00EBE194 /* OpenInAppIconResources.swift */; };
0941A9A6210B822D00EBE194 /* OpenInOptions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0941A9A5210B822D00EBE194 /* OpenInOptions.swift */; };
0952D1752176DEB500194860 /* NotificationMuteSettingsController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0952D1742176DEB500194860 /* NotificationMuteSettingsController.swift */; };
0952D1772177FB5400194860 /* WatchPresetSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0952D1762177FB5400194860 /* WatchPresetSettings.swift */; };
096C98BA21787A5C00C211FF /* LegacyBridgeAudio.swift in Sources */ = {isa = PBXBuildFile; fileRef = 096C98B921787A5C00C211FF /* LegacyBridgeAudio.swift */; };
096C98BF21787C6700C211FF /* TGBridgeAudioEncoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 096C98BB21787C6600C211FF /* TGBridgeAudioEncoder.m */; };
096C98C021787C6700C211FF /* TGBridgeAudioEncoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 096C98BC21787C6600C211FF /* TGBridgeAudioEncoder.h */; };
096C98C121787C6700C211FF /* TGBridgeAudioDecoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 096C98BD21787C6700C211FF /* TGBridgeAudioDecoder.h */; };
096C98C221787C6700C211FF /* TGBridgeAudioDecoder.mm in Sources */ = {isa = PBXBuildFile; fileRef = 096C98BE21787C6700C211FF /* TGBridgeAudioDecoder.mm */; };
09797873210633CD0077D77F /* InstantPageSettingsButtonItemNode.swift in Sources */ = {isa = PBXBuildFile; fileRef = 09797872210633CD0077D77F /* InstantPageSettingsButtonItemNode.swift */; };
0979787C210642CB0077D77F /* WebEmbedPlayerNode.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0979787B210642CB0077D77F /* WebEmbedPlayerNode.swift */; };
0979787E210646C00077D77F /* YoutubeEmbedImplementation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0979787D210646C00077D77F /* YoutubeEmbedImplementation.swift */; };
@ -40,6 +46,8 @@
09AE3823214C110900850BFD /* LegacySecureIdScanController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 09AE3822214C110800850BFD /* LegacySecureIdScanController.swift */; };
09C3466D2167D63A00B76780 /* Accessibility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 09C3466C2167D63A00B76780 /* Accessibility.swift */; };
09C500242142BA6400EF253E /* ItemListWebsiteItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = 09C500232142BA6400EF253E /* ItemListWebsiteItem.swift */; };
09D304152173C0E900C00567 /* WatchManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 09D304142173C0E900C00567 /* WatchManager.swift */; };
09D304182173C15700C00567 /* WatchSettingsController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 09D304172173C15700C00567 /* WatchSettingsController.swift */; };
09FE756D2153F5F900A3120F /* CallRouteActionSheetItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = 09FE756C2153F5F900A3120F /* CallRouteActionSheetItem.swift */; };
D007019C2029E8F2006B9E34 /* LegqacyICloudFileController.swift in Sources */ = {isa = PBXBuildFile; fileRef = D007019B2029E8F2006B9E34 /* LegqacyICloudFileController.swift */; };
D007019E2029EFDD006B9E34 /* ICloudResources.swift in Sources */ = {isa = PBXBuildFile; fileRef = D007019D2029EFDD006B9E34 /* ICloudResources.swift */; };
@ -1044,6 +1052,12 @@
0941A9A3210B0E2E00EBE194 /* OpenInAppIconResources.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OpenInAppIconResources.swift; sourceTree = "<group>"; };
0941A9A5210B822D00EBE194 /* OpenInOptions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OpenInOptions.swift; sourceTree = "<group>"; };
0952D1742176DEB500194860 /* NotificationMuteSettingsController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NotificationMuteSettingsController.swift; sourceTree = "<group>"; };
0952D1762177FB5400194860 /* WatchPresetSettings.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = WatchPresetSettings.swift; sourceTree = "<group>"; };
096C98B921787A5C00C211FF /* LegacyBridgeAudio.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LegacyBridgeAudio.swift; sourceTree = "<group>"; };
096C98BB21787C6600C211FF /* TGBridgeAudioEncoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGBridgeAudioEncoder.m; sourceTree = "<group>"; };
096C98BC21787C6600C211FF /* TGBridgeAudioEncoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGBridgeAudioEncoder.h; sourceTree = "<group>"; };
096C98BD21787C6700C211FF /* TGBridgeAudioDecoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGBridgeAudioDecoder.h; sourceTree = "<group>"; };
096C98BE21787C6700C211FF /* TGBridgeAudioDecoder.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = TGBridgeAudioDecoder.mm; sourceTree = "<group>"; };
09797872210633CD0077D77F /* InstantPageSettingsButtonItemNode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InstantPageSettingsButtonItemNode.swift; sourceTree = "<group>"; };
0979787B210642CB0077D77F /* WebEmbedPlayerNode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebEmbedPlayerNode.swift; sourceTree = "<group>"; };
0979787D210646C00077D77F /* YoutubeEmbedImplementation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = YoutubeEmbedImplementation.swift; sourceTree = "<group>"; };
@ -1065,6 +1079,8 @@
09AE3822214C110800850BFD /* LegacySecureIdScanController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LegacySecureIdScanController.swift; sourceTree = "<group>"; };
09C3466C2167D63A00B76780 /* Accessibility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Accessibility.swift; sourceTree = "<group>"; };
09C500232142BA6400EF253E /* ItemListWebsiteItem.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ItemListWebsiteItem.swift; sourceTree = "<group>"; };
09D304142173C0E900C00567 /* WatchManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WatchManager.swift; sourceTree = "<group>"; };
09D304172173C15700C00567 /* WatchSettingsController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WatchSettingsController.swift; sourceTree = "<group>"; };
09FE756C2153F5F900A3120F /* CallRouteActionSheetItem.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallRouteActionSheetItem.swift; sourceTree = "<group>"; };
D00219051DDD1C9E00BE708A /* ImageContainingNode.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageContainingNode.swift; sourceTree = "<group>"; };
D002A0D01E9B99F500A81812 /* SoftwareVideoSource.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SoftwareVideoSource.swift; sourceTree = "<group>"; };
@ -2213,6 +2229,18 @@
name = "Open In";
sourceTree = "<group>";
};
0965C7152178738A007C94D0 /* Bridge Audio */ = {
isa = PBXGroup;
children = (
096C98BD21787C6700C211FF /* TGBridgeAudioDecoder.h */,
096C98BE21787C6700C211FF /* TGBridgeAudioDecoder.mm */,
096C98BC21787C6600C211FF /* TGBridgeAudioEncoder.h */,
096C98BB21787C6600C211FF /* TGBridgeAudioEncoder.m */,
096C98B921787A5C00C211FF /* LegacyBridgeAudio.swift */,
);
name = "Bridge Audio";
sourceTree = "<group>";
};
0979787F21065EAA0077D77F /* Web Embed */ = {
isa = PBXGroup;
children = (
@ -2245,6 +2273,14 @@
name = "Web Embed";
sourceTree = "<group>";
};
09D304162173C13500C00567 /* Watch */ = {
isa = PBXGroup;
children = (
09D304172173C15700C00567 /* WatchSettingsController.swift */,
);
name = Watch;
sourceTree = "<group>";
};
D00C7CDA1E3776CA0080C3D5 /* Secret Preview */ = {
isa = PBXGroup;
children = (
@ -2884,6 +2920,7 @@
D07551891DDA4C7C0073E051 /* Legacy Components */ = {
isa = PBXGroup;
children = (
0965C7152178738A007C94D0 /* Bridge Audio */,
D04BB2C61E48797500650E93 /* RMIntro */,
D067B4AE211C916D00796039 /* Channel Intro */,
D075518A1DDA4D7D0073E051 /* LegacyController.swift */,
@ -2999,6 +3036,7 @@
D048B33A203C777500038D05 /* RenderedTotalUnreadCount.swift */,
D06ECFCA20B8448E00C576C2 /* ContactSynchronizationSettings.swift */,
D08A10BA211DF7A80077488B /* StickerSettings.swift */,
0952D1762177FB5400194860 /* WatchPresetSettings.swift */,
);
name = Settings;
sourceTree = "<group>";
@ -4229,6 +4267,7 @@
D0C9323A1E0B4AD40074F044 /* Data and Storage */,
D0FA0AC31E7742EE005BB9B7 /* Stickers */,
D05BFB4F1EA96EC100909D38 /* Themes */,
09D304162173C13500C00567 /* Watch */,
D0AF7C441ED84BB000CD8E0F /* Language Selection */,
D0CB27D020C17A6D001ACF93 /* Terms of Service */,
D01B279A1E39386C0022A4C0 /* SettingsController.swift */,
@ -4284,6 +4323,7 @@
D0383ED5207D19BC00C45548 /* Emoji */,
D0B69C3A20EBD8B3003632C7 /* Device Access */,
D01C7EFE1EF9D434008305F1 /* Device Contacts */,
09D304142173C0E900C00567 /* WatchManager.swift */,
D0B844551DAC3AEE005F29E1 /* PresenceStrings.swift */,
D08775081E3E59DE00A97350 /* PeerNotificationSoundStrings.swift */,
D0F69E931D6B8C9B0046BCD6 /* ProgressiveImage.swift */,
@ -4465,6 +4505,7 @@
buildActionMask = 2147483647;
files = (
D0E9BA221F05577700F079A4 /* STPCard.h in Headers */,
096C98C021787C6700C211FF /* TGBridgeAudioEncoder.h in Headers */,
D0E9BA591F055A2200F079A4 /* STPWeakStrongMacros.h in Headers */,
D0E9BADE1F0574D800F079A4 /* STPBackendAPIAdapter.h in Headers */,
D0E9BAD11F0573C000F079A4 /* STPToken.h in Headers */,
@ -4495,6 +4536,7 @@
D06F31E22135829B001A0F12 /* EDSunriseSet.h in Headers */,
D0E9BA531F0559DA00F079A4 /* STPImageLibrary+Private.h in Headers */,
D0E9BA601F055A4300F079A4 /* STPDelegateProxy.h in Headers */,
096C98C121787C6700C211FF /* TGBridgeAudioDecoder.h in Headers */,
D0E9BADF1F0574D800F079A4 /* STPDispatchFunctions.h in Headers */,
D0E9BACB1F05738600F079A4 /* STPAPIPostRequest.h in Headers */,
D0E9BA561F055A0B00F079A4 /* STPFormTextField.h in Headers */,
@ -4717,6 +4759,7 @@
D0208ADC1FA346A4001F0D5F /* RaiseToListen.swift in Sources */,
D0EB41F91F30E5B700838FE6 /* LegacyPeerAvatarPlaceholderDataSource.swift in Sources */,
D0EC6CBB1EB9F58800EBF1C3 /* texture_helper.m in Sources */,
09D304182173C15700C00567 /* WatchSettingsController.swift in Sources */,
D0EC6CBC1EB9F58800EBF1C3 /* LegacyController.swift in Sources */,
D0EC6CBD1EB9F58800EBF1C3 /* LegacyControllerNode.swift in Sources */,
D079FCE91F06A76C0038FADE /* Notices.swift in Sources */,
@ -4841,6 +4884,7 @@
D0ACCB1A1EC5E0C20079D8BF /* CallControllerKeyPreviewNode.swift in Sources */,
D0E9BA611F055A4300F079A4 /* STPDelegateProxy.m in Sources */,
D0EC6CF91EB9F58800EBF1C3 /* MediaManager.swift in Sources */,
096C98BF21787C6700C211FF /* TGBridgeAudioEncoder.m in Sources */,
D01776B81F1D6FB30044446D /* RadialProgressContentNode.swift in Sources */,
D0EC6CFA1EB9F58800EBF1C3 /* ManagedAudioSession.swift in Sources */,
D0EB5ADF1F798033004E89B6 /* PeerMediaCollectionEmptyNode.swift in Sources */,
@ -4977,6 +5021,7 @@
D0EC6D3F1EB9F58800EBF1C3 /* MediaNavigationAccessoryPanel.swift in Sources */,
D0E9BA3B1F0558E800F079A4 /* NSString+Stripe.m in Sources */,
D0CE8CE51F6F354400AA2DB0 /* ChatTextInputAccessoryItem.swift in Sources */,
096C98BA21787A5C00C211FF /* LegacyBridgeAudio.swift in Sources */,
D0EC6D401EB9F58800EBF1C3 /* MediaNavigationAccessoryContainerNode.swift in Sources */,
D0E266FD1F66706500BFC79F /* ChatBubbleVideoDecoration.swift in Sources */,
D0EC6D411EB9F58800EBF1C3 /* MediaNavigationAccessoryHeaderNode.swift in Sources */,
@ -5002,6 +5047,7 @@
D0208ADA1FA34017001F0D5F /* DeviceProximityManager.m in Sources */,
D04281FC200E61BC009DDE36 /* ChatRecentActionsInteraction.swift in Sources */,
D0EC6D561EB9F58800EBF1C3 /* ChatHistoryNode.swift in Sources */,
096C98C221787C6700C211FF /* TGBridgeAudioDecoder.mm in Sources */,
D0EC6D571EB9F58800EBF1C3 /* ChatHistoryListNode.swift in Sources */,
D0EC6D581EB9F58800EBF1C3 /* ChatHistoryGridNode.swift in Sources */,
D0B2F76E2052B59F00D3BFB9 /* InviteContactsController.swift in Sources */,
@ -5131,6 +5177,7 @@
D0147BA7206E8B4F00E40378 /* SecureIdAuthAcceptNode.swift in Sources */,
D0E8174E2011FC3800B82BBB /* ChatMessageEventLogPreviousDescriptionContentNode.swift in Sources */,
D0EC6D981EB9F58900EBF1C3 /* ChatMessageItemView.swift in Sources */,
09D304152173C0E900C00567 /* WatchManager.swift in Sources */,
D039FB1921711B5D00BD1BAD /* PlatformVideoContent.swift in Sources */,
D0CAD8FD20AE467D00ACD96E /* PeerChannelMemberCategoriesContextsManager.swift in Sources */,
D073D2DB1FB61DA9009E1DA2 /* CallListSettings.swift in Sources */,
@ -5360,6 +5407,7 @@
D0EC6E171EB9F58900EBF1C3 /* InstantPageTextStyleStack.swift in Sources */,
D0EC6E181EB9F58900EBF1C3 /* InstantPageTextItem.swift in Sources */,
D01C06B51FBB7720001561AB /* ChatMediaInputSettingsItem.swift in Sources */,
0952D1772177FB5400194860 /* WatchPresetSettings.swift in Sources */,
D091C7A61F8ECEA300D7DE13 /* SettingsThemeWallpaperNode.swift in Sources */,
D0EC6E191EB9F58900EBF1C3 /* InstantPageAnchorItem.swift in Sources */,
D05677531F4CA0D0001B723E /* InstantPagePeerReferenceNode.swift in Sources */,

View File

@ -311,8 +311,8 @@ final class ChatMessageInteractiveMediaNode: ASDisplayNode {
if let strongSelf = self {
if file.isAnimated {
strongSelf.fetchDisposable.set(fetchedMediaResource(postbox: account.postbox, reference: AnyMediaReference.message(message: MessageReference(message), media: file).resourceReference(file.resource), statsCategory: statsCategoryForFileWithAttributes(file.attributes)).start())
} else {
strongSelf.fetchDisposable.set(messageMediaFileInteractiveFetched(account: account, message: message, file: file, userInitiated: manual).start())
} else {
strongSelf.fetchDisposable.set(messageMediaFileInteractiveFetched(account: account, message: message, file: file, userInitiated: manual).start())
}
}
}, cancel: {

View File

@ -26,6 +26,7 @@ private var telegramUIDeclaredEncodables: Void = {
declareEncodable(CachedChannelAdminIds.self, f: { CachedChannelAdminIds(decoder: $0) })
declareEncodable(StickerSettings.self, f: { StickerSettings(decoder: $0) })
declareEncodable(InstantPagePresentationSettings.self, f: { InstantPagePresentationSettings(decoder: $0) })
declareEncodable(WatchPresetSettings.self, f: { WatchPresetSettings(decoder: $0) })
return
}()

View File

@ -3,7 +3,7 @@ import TelegramCore
import Postbox
import SwiftSignalKit
func freeMediaFileInteractiveFetched(account: Account, fileReference: FileMediaReference) -> Signal<FetchResourceSourceType, NoError> {
public func freeMediaFileInteractiveFetched(account: Account, fileReference: FileMediaReference) -> Signal<FetchResourceSourceType, NoError> {
return fetchedMediaResource(postbox: account.postbox, reference: fileReference.resourceReference(fileReference.media.resource))
}
@ -21,7 +21,7 @@ private func fetchCategoryForFile(_ file: TelegramMediaFile) -> FetchManagerCate
}
}
func messageMediaFileInteractiveFetched(account: Account, message: Message, file: TelegramMediaFile, userInitiated: Bool) -> Signal<Void, NoError> {
public func messageMediaFileInteractiveFetched(account: Account, message: Message, file: TelegramMediaFile, userInitiated: Bool) -> Signal<Void, NoError> {
return account.telegramApplicationContext.fetchManager.interactivelyFetched(category: fetchCategoryForFile(file), location: .chat(message.id.peerId), locationKey: .messageId(message.id), resourceReference: AnyMediaReference.message(message: MessageReference(message), media: file).resourceReference(file.resource), statsCategory: statsCategoryForFileWithAttributes(file.attributes), elevatedPriority: false, userInitiated: userInitiated)
}
@ -29,7 +29,7 @@ func messageMediaFileCancelInteractiveFetch(account: Account, messageId: Message
account.telegramApplicationContext.fetchManager.cancelInteractiveFetches(category: fetchCategoryForFile(file), location: .chat(messageId.peerId), locationKey: .messageId(messageId), resource: file.resource)
}
func messageMediaImageInteractiveFetched(account: Account, message: Message, image: TelegramMediaImage, resource: MediaResource) -> Signal<Void, NoError> {
public func messageMediaImageInteractiveFetched(account: Account, message: Message, image: TelegramMediaImage, resource: MediaResource) -> Signal<Void, NoError> {
return account.telegramApplicationContext.fetchManager.interactivelyFetched(category: .image, location: .chat(message.id.peerId), locationKey: .messageId(message.id), resourceReference: AnyMediaReference.message(message: MessageReference(message), media: image).resourceReference(resource), statsCategory: .image, elevatedPriority: false, userInitiated: true)
}

View File

@ -0,0 +1,26 @@
import Foundation
import SwiftSignalKit
import TelegramUIPrivateModule
public func legacyDecodeOpusAudio(path: String, outputPath: String) -> Signal<String, NoError> {
return Signal { subscriber in
let decoder = TGBridgeAudioDecoder(url: URL(fileURLWithPath: path), outputUrl: URL(fileURLWithPath: outputPath))
decoder?.start(completion: {
subscriber.putNext(outputPath)
subscriber.putCompletion()
})
return EmptyDisposable
}
}
public func legacyEncodeOpusAudio(path: String) -> Signal<(Data?, Int32), NoError> {
return Signal { subscriber in
let encoder = TGBridgeAudioEncoder(url: URL(fileURLWithPath: path))
encoder?.start(completion: { (dataItem, duration) in
subscriber.putNext((dataItem?.data(), duration))
subscriber.putCompletion()
})
return EmptyDisposable
}
}

View File

@ -663,7 +663,7 @@ public func chatMessagePhotoInternal(photoData: Signal<(Data?, Data?, Bool), NoE
}
}
private func chatMessagePhotoThumbnailDatas(account: Account, photoReference: ImageMediaReference) -> Signal<(Data?, Data?, Bool), NoError> {
private func chatMessagePhotoThumbnailDatas(account: Account, photoReference: ImageMediaReference, onlyFullSize: Bool = false) -> Signal<(Data?, Data?, Bool), NoError> {
let fullRepresentationSize: CGSize = CGSize(width: 1280.0, height: 1280.0)
if let smallestRepresentation = smallestImageRepresentation(photoReference.media.representations), let largestRepresentation = photoReference.media.representationForDisplayAtSize(fullRepresentationSize) {

View File

@ -17,6 +17,7 @@ private enum ApplicationSpecificPreferencesKeyValues: Int32 {
case experimentalUISettings = 11
case contactSynchronizationSettings = 12
case stickerSettings = 13
case watchPresetSettings = 14
}
public struct ApplicationSpecificPreferencesKeys {
@ -34,4 +35,5 @@ public struct ApplicationSpecificPreferencesKeys {
public static let experimentalUISettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.experimentalUISettings.rawValue)
public static let contactSynchronizationSettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.contactSynchronizationSettings.rawValue)
public static let stickerSettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.stickerSettings.rawValue)
public static let watchPresetSettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.watchPresetSettings.rawValue)
}

View File

@ -17,7 +17,8 @@ private final class SettingsItemIcons {
static let appearance = UIImage(bundleImageName: "Settings/MenuIcons/Appearance")?.precomposed()
static let language = UIImage(bundleImageName: "Settings/MenuIcons/Language")?.precomposed()
static let secureId = UIImage(bundleImageName: "Settings/MenuIcons/Passport")?.precomposed()
static let passport = UIImage(bundleImageName: "Settings/MenuIcons/Passport")?.precomposed()
static let watch = UIImage(bundleImageName: "Settings/MenuIcons/Watch")?.precomposed()
static let support = UIImage(bundleImageName: "Settings/MenuIcons/Support")?.precomposed()
static let faq = UIImage(bundleImageName: "Settings/MenuIcons/Faq")?.precomposed()
@ -42,6 +43,7 @@ private struct SettingsItemArguments {
let presentController: (ViewController) -> Void
let openLanguage: () -> Void
let openPassport: () -> Void
let openWatch: () -> Void
let openSupport: () -> Void
let openFaq: () -> Void
let openEditing: () -> Void
@ -54,7 +56,7 @@ private enum SettingsSection: Int32 {
case proxy
case media
case generalSettings
case passport
case advanced
case help
}
@ -75,6 +77,7 @@ private enum SettingsEntry: ItemListNodeEntry {
case themes(PresentationTheme, UIImage?, String)
case language(PresentationTheme, UIImage?, String, String)
case passport(PresentationTheme, UIImage?, String, String)
case watch(PresentationTheme, UIImage?, String, String)
case askAQuestion(PresentationTheme, UIImage?, String)
case faq(PresentationTheme, UIImage?, String)
@ -89,8 +92,8 @@ private enum SettingsEntry: ItemListNodeEntry {
return SettingsSection.media.rawValue
case .notificationsAndSounds, .privacyAndSecurity, .dataAndStorage, .themes, .language:
return SettingsSection.generalSettings.rawValue
case .passport:
return SettingsSection.passport.rawValue
case .passport, .watch :
return SettingsSection.advanced.rawValue
case .askAQuestion, .faq:
return SettingsSection.help.rawValue
}
@ -124,10 +127,12 @@ private enum SettingsEntry: ItemListNodeEntry {
return 11
case .passport:
return 12
case .askAQuestion:
case .watch:
return 13
case .faq:
case .askAQuestion:
return 14
case .faq:
return 15
}
}
@ -240,6 +245,12 @@ private enum SettingsEntry: ItemListNodeEntry {
} else {
return false
}
case let .watch(lhsTheme, lhsImage, lhsText, lhsValue):
if case let .watch(rhsTheme, rhsImage, rhsText, rhsValue) = rhs, lhsTheme === rhsTheme, lhsImage === rhsImage, lhsText == rhsText, lhsValue == rhsValue {
return true
} else {
return false
}
case let .askAQuestion(lhsTheme, lhsImage, lhsText):
if case let .askAQuestion(rhsTheme, rhsImage, rhsText) = rhs, lhsTheme === rhsTheme, lhsImage === rhsImage, lhsText == rhsText {
return true
@ -320,6 +331,10 @@ private enum SettingsEntry: ItemListNodeEntry {
return ItemListDisclosureItem(theme: theme, icon: image, title: text, label: value, sectionId: ItemListSectionId(self.section), style: .blocks, action: {
arguments.openPassport()
})
case let .watch(theme, image, text, value):
return ItemListDisclosureItem(theme: theme, icon: image, title: text, label: value, sectionId: ItemListSectionId(self.section), style: .blocks, action: {
arguments.openWatch()
})
case let .askAQuestion(theme, image, text):
return ItemListDisclosureItem(theme: theme, icon: image, title: text, label: "", sectionId: ItemListSectionId(self.section), style: .blocks, action: {
arguments.openSupport()
@ -351,7 +366,7 @@ private struct SettingsState: Equatable {
}
}
private func settingsEntries(presentationData: PresentationData, state: SettingsState, view: PeerView, proxySettings: ProxySettings, unreadTrendingStickerPacks: Int, archivedPacks: [ArchivedStickerPackItem]?, hasPassport: Bool) -> [SettingsEntry] {
private func settingsEntries(presentationData: PresentationData, state: SettingsState, view: PeerView, proxySettings: ProxySettings, unreadTrendingStickerPacks: Int, archivedPacks: [ArchivedStickerPackItem]?, hasPassport: Bool, hasWatchApp: Bool) -> [SettingsEntry] {
var entries: [SettingsEntry] = []
if let peer = peerViewMainPeer(view) as? TelegramUser {
@ -390,7 +405,10 @@ private func settingsEntries(presentationData: PresentationData, state: Settings
entries.append(.language(presentationData.theme, SettingsItemIcons.language, presentationData.strings.Settings_AppLanguage, presentationData.strings.Localization_LanguageName))
if hasPassport {
entries.append(.passport(presentationData.theme, SettingsItemIcons.secureId, presentationData.strings.Settings_Passport, ""))
entries.append(.passport(presentationData.theme, SettingsItemIcons.passport, presentationData.strings.Settings_Passport, ""))
}
if hasWatchApp {
entries.append(.watch(presentationData.theme, SettingsItemIcons.watch, presentationData.strings.Settings_AppleWatch, ""))
}
entries.append(.askAQuestion(presentationData.theme, SettingsItemIcons.support, presentationData.strings.Settings_Support))
@ -516,6 +534,9 @@ public func settingsController(account: Account, accountManager: AccountManager)
}, openPassport: {
let controller = SecureIdAuthController(account: account, mode: .list)
presentControllerImpl?(controller, nil)
}, openWatch: {
let controller = watchSettingsController(account: account)
pushControllerImpl?(controller)
}, openSupport: {
let supportPeer = Promise<PeerId?>()
supportPeer.set(supportPeerId(account: account))
@ -654,8 +675,13 @@ public func settingsController(account: Account, accountManager: AccountManager)
}
updatePassport()
let signal = combineLatest(account.telegramApplicationContext.presentationData, statePromise.get(), peerView, account.postbox.preferencesView(keys: [PreferencesKeys.proxySettings]), combineLatest(account.viewTracker.featuredStickerPacks(), archivedPacks.get()), hasPassport.get())
|> map { presentationData, state, view, preferences, featuredAndArchived, hasPassport -> (ItemListControllerState, (ItemListNodeState<SettingsEntry>, SettingsEntry.ItemGenerationArguments)) in
let hasWatchApp = Promise<Bool>(false)
if let context = account.applicationContext as? TelegramApplicationContext, let watchManager = context.watchManager {
hasWatchApp.set(watchManager.watchAppInstalled)
}
let signal = combineLatest(account.telegramApplicationContext.presentationData, statePromise.get(), peerView, account.postbox.preferencesView(keys: [PreferencesKeys.proxySettings]), combineLatest(account.viewTracker.featuredStickerPacks(), archivedPacks.get()), combineLatest(hasPassport.get(), hasWatchApp.get()))
|> map { presentationData, state, view, preferences, featuredAndArchived, hasPassportAndWatch -> (ItemListControllerState, (ItemListNodeState<SettingsEntry>, SettingsEntry.ItemGenerationArguments)) in
let proxySettings: ProxySettings
if let value = preferences.values[PreferencesKeys.proxySettings] as? ProxySettings {
proxySettings = value
@ -679,7 +705,9 @@ public func settingsController(account: Account, accountManager: AccountManager)
}
}
let listState = ItemListNodeState(entries: settingsEntries(presentationData: presentationData, state: state, view: view, proxySettings: proxySettings, unreadTrendingStickerPacks: unreadTrendingStickerPacks, archivedPacks: featuredAndArchived.1, hasPassport: hasPassport), style: .blocks)
let (hasPassport, hasWatchApp) = hasPassportAndWatch
let listState = ItemListNodeState(entries: settingsEntries(presentationData: presentationData, state: state, view: view, proxySettings: proxySettings, unreadTrendingStickerPacks: unreadTrendingStickerPacks, archivedPacks: featuredAndArchived.1, hasPassport: hasPassport, hasWatchApp: hasWatchApp), style: .blocks)
return (controllerState, (listState, arguments))
} |> afterDisposed {

View File

@ -147,7 +147,7 @@ public func chatMessageSticker(account: Account, file: TelegramMediaFile, small:
return signal |> map { (thumbnailData, fullSizeData, fullSizeComplete) in
return { arguments in
let context = DrawingContext(size: arguments.drawingSize, clear: true)
let context = DrawingContext(size: arguments.drawingSize, clear: arguments.emptyColor == nil)
let drawingRect = arguments.drawingRect
let fittedSize = arguments.imageSize
@ -179,6 +179,10 @@ public func chatMessageSticker(account: Account, file: TelegramMediaFile, small:
context.withFlippedContext { c in
c.setBlendMode(.copy)
if let color = arguments.emptyColor {
c.fill(drawingRect)
}
if let blurredThumbnailImage = blurredThumbnailImage {
c.interpolationQuality = .low
c.draw(blurredThumbnailImage.cgImage!, in: fittedRect)

View File

@ -523,7 +523,7 @@ func storageUsageController(account: Account) -> ViewController {
if !items.isEmpty {
items.append(ActionSheetButtonItem(title: presentationData.strings.Cache_Clear("\(dataSizeString(totalSize))").0, action: {
if let statsPromise = statsPromise {
var clearCategories = sizeIndex.keys.filter({ sizeIndex[$0]!.0 })
let clearCategories = sizeIndex.keys.filter({ sizeIndex[$0]!.0 })
//var clearSize: Int64 = 0
var clearMediaIds = Set<MediaId>()

View File

@ -0,0 +1,8 @@
#import <Foundation/Foundation.h>
@interface TGBridgeAudioDecoder : NSObject
- (instancetype)initWithURL:(NSURL *)url outputUrl:(NSURL *)outputURL;
- (void)startWithCompletion:(void (^)(void))completion;
@end

View File

@ -0,0 +1,200 @@
#import "TGBridgeAudioDecoder.h"
#import <AudioToolbox/AudioToolbox.h>
#import <AVFoundation/AVFoundation.h>
#import <SSignalKit/SSignalKit.h>
#import "opusfile.h"
#import "opusenc.h"
const NSInteger TGBridgeAudioDecoderInputSampleRate = 48000;
const NSInteger TGBridgeAudioDecoderResultSampleRate = 24000;
const NSUInteger TGBridgeAudioDecoderBufferSize = 32768;
#define checkResult(result,operation) (_checkResultLite((result),(operation),__FILE__,__LINE__))
struct TGAudioBuffer
{
NSUInteger capacity;
uint8_t *data;
NSUInteger size;
int64_t pcmOffset;
};
inline TGAudioBuffer *TGAudioBufferWithCapacity(NSUInteger capacity)
{
TGAudioBuffer *audioBuffer = (TGAudioBuffer *)malloc(sizeof(TGAudioBuffer));
audioBuffer->capacity = capacity;
audioBuffer->data = (uint8_t *)malloc(capacity);
audioBuffer->size = 0;
audioBuffer->pcmOffset = 0;
return audioBuffer;
}
inline void TGAudioBufferDispose(TGAudioBuffer *audioBuffer)
{
if (audioBuffer != NULL)
{
free(audioBuffer->data);
free(audioBuffer);
}
}
static inline bool _checkResultLite(OSStatus result, const char *operation, const char* file, int line)
{
if ( result != noErr )
{
NSLog(@"%s:%d: %s result %d %08X %4.4s\n", file, line, operation, (int)result, (int)result, (char*)&result);
return NO;
}
return YES;
}
@interface TGBridgeAudioDecoder ()
{
NSURL *_url;
NSURL *_resultURL;
OggOpusFile *_opusFile;
bool _finished;
bool _cancelled;
}
@end
@implementation TGBridgeAudioDecoder
- (instancetype)initWithURL:(NSURL *)url outputUrl:(NSURL *)outputUrl
{
self = [super init];
if (self != nil)
{
_url = url;
int64_t randomId = 0;
arc4random_buf(&randomId, 8);
_resultURL = outputUrl;
}
return self;
}
- (void)startWithCompletion:(void (^)(void))completion
{
[[TGBridgeAudioDecoder processingQueue] dispatch:^
{
int error = OPUS_OK;
_opusFile = op_open_file(_url.path.UTF8String, &error);
if (_opusFile == NULL || error != OPUS_OK)
{
return;
}
AudioStreamBasicDescription sourceFormat;
sourceFormat.mSampleRate = TGBridgeAudioDecoderInputSampleRate;
sourceFormat.mFormatID = kAudioFormatLinearPCM;
sourceFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
sourceFormat.mFramesPerPacket = 1;
sourceFormat.mChannelsPerFrame = 1;
sourceFormat.mBitsPerChannel = 16;
sourceFormat.mBytesPerPacket = 2;
sourceFormat.mBytesPerFrame = 2;
AudioStreamBasicDescription destFormat;
memset(&destFormat, 0, sizeof(destFormat));
destFormat.mChannelsPerFrame = sourceFormat.mChannelsPerFrame;
destFormat.mFormatID = kAudioFormatMPEG4AAC;
destFormat.mSampleRate = TGBridgeAudioDecoderResultSampleRate;
UInt32 size = sizeof(destFormat);
if (!checkResult(AudioFormatGetProperty(kAudioFormatProperty_FormatInfo, 0, NULL, &size, &destFormat),
"AudioFormatGetProperty(kAudioFormatProperty_FormatInfo)"))
{
return;
}
ExtAudioFileRef destinationFile;
if (!checkResult(ExtAudioFileCreateWithURL((__bridge CFURLRef)_resultURL, kAudioFileM4AType, &destFormat, NULL, kAudioFileFlags_EraseFile, &destinationFile), "ExtAudioFileCreateWithURL"))
{
return;
}
if (!checkResult(ExtAudioFileSetProperty(destinationFile, kExtAudioFileProperty_ClientDataFormat, size, &sourceFormat),
"ExtAudioFileSetProperty(destinationFile, kExtAudioFileProperty_ClientDataFormat"))
{
return;
}
bool canResumeAfterInterruption = false;
AudioConverterRef converter;
size = sizeof(converter);
if (checkResult(ExtAudioFileGetProperty(destinationFile, kExtAudioFileProperty_AudioConverter, &size, &converter),
"ExtAudioFileGetProperty(kExtAudioFileProperty_AudioConverter;)"))
{
UInt32 canResume = 0;
size = sizeof(canResume);
if (AudioConverterGetProperty(converter, kAudioConverterPropertyCanResumeFromInterruption, &size, &canResume) == noErr)
canResumeAfterInterruption = canResume;
}
uint8_t srcBuffer[TGBridgeAudioDecoderBufferSize];
while (!_cancelled)
{
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0].mNumberChannels = sourceFormat.mChannelsPerFrame;
bufferList.mBuffers[0].mDataByteSize = TGBridgeAudioDecoderBufferSize;
bufferList.mBuffers[0].mData = srcBuffer;
uint32_t writtenOutputBytes = 0;
while (writtenOutputBytes < TGBridgeAudioDecoderBufferSize)
{
int32_t readSamples = op_read(_opusFile, (opus_int16 *)(srcBuffer + writtenOutputBytes), (TGBridgeAudioDecoderBufferSize - writtenOutputBytes) / sourceFormat.mBytesPerFrame, NULL);
if (readSamples > 0)
writtenOutputBytes += readSamples * sourceFormat.mBytesPerFrame;
else
break;
}
bufferList.mBuffers[0].mDataByteSize = writtenOutputBytes;
int32_t nFrames = writtenOutputBytes / sourceFormat.mBytesPerFrame;
if (nFrames == 0)
break;
OSStatus status = ExtAudioFileWrite(destinationFile, nFrames, &bufferList);
if (status == kExtAudioFileError_CodecUnavailableInputConsumed)
{
//TGLog(@"1");
}
else if (status == kExtAudioFileError_CodecUnavailableInputNotConsumed)
{
//TGLog(@"2");
}
else if (!checkResult(status, "ExtAudioFileWrite"))
{
//TGLog(@"3");
}
}
ExtAudioFileDispose(destinationFile);
if (completion != nil)
completion();
}];
}
+ (SQueue *)processingQueue
{
static SQueue *queue = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^
{
static const char *queueSpecific = "org.telegram.opusAudioDecoderQueue";
dispatch_queue_t dispatchQueue = dispatch_queue_create("org.telegram.opusAudioDecoderQueue", DISPATCH_QUEUE_SERIAL);
dispatch_queue_set_specific(dispatchQueue, queueSpecific, (void *)queueSpecific, NULL);
queue = [SQueue wrapConcurrentNativeQueue:dispatchQueue];
});
return queue;
}
@end

View File

@ -0,0 +1,11 @@
#import <Foundation/Foundation.h>
@class TGDataItem;
@class TGLiveUploadActorData;
@interface TGBridgeAudioEncoder : NSObject
- (instancetype)initWithURL:(NSURL *)url;
- (void)startWithCompletion:(void (^)(TGDataItem *, int32_t))completion;
@end

View File

@ -0,0 +1,211 @@
#import "TGBridgeAudioEncoder.h"
#import <AVFoundation/AVFoundation.h>
#import <SSignalKit/SSignalKit.h>
#import "opus.h"
#import "opusenc.h"
#import "TGDataItem.h"
const NSInteger TGBridgeAudioEncoderSampleRate = 16000;
@interface TGBridgeAudioEncoder ()
{
AVAssetReader *_assetReader;
AVAssetReaderOutput *_readerOutput;
NSMutableData *_audioBuffer;
TGDataItem *_tempFileItem;
TGOggOpusWriter *_oggWriter;
}
@end
@implementation TGBridgeAudioEncoder
- (instancetype)initWithURL:(NSURL *)url
{
self = [super init];
if (self != nil)
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:nil];
if (asset == nil || asset.tracks.count == 0)
{
//TGLog(@"Asset create fail");
return nil;
}
NSError *error;
_assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
NSDictionary *outputSettings = @
{
AVFormatIDKey: @(kAudioFormatLinearPCM),
AVSampleRateKey: @(TGBridgeAudioEncoderSampleRate),
AVNumberOfChannelsKey: @1,
AVLinearPCMBitDepthKey: @16,
AVLinearPCMIsFloatKey: @false,
AVLinearPCMIsBigEndianKey: @false,
AVLinearPCMIsNonInterleaved: @false
};
_readerOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:asset.tracks audioSettings:outputSettings];
[_assetReader addOutput:_readerOutput];
_tempFileItem = [[TGDataItem alloc] init];
}
return self;
}
- (void)dealloc
{
[self cleanup];
}
- (void)cleanup
{
_oggWriter = nil;
}
+ (SQueue *)processingQueue
{
static SQueue *queue = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^
{
static const char *queueSpecific = "org.telegram.opusAudioEncoderQueue";
dispatch_queue_t dispatchQueue = dispatch_queue_create("org.telegram.opusAudioEncoderQueue", DISPATCH_QUEUE_SERIAL);
dispatch_queue_set_specific(dispatchQueue, queueSpecific, (void *)queueSpecific, NULL);
queue = [SQueue wrapConcurrentNativeQueue:dispatchQueue];
});
return queue;
}
- (void)startWithCompletion:(void (^)(TGDataItem *, int32_t))completion
{
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
[[TGBridgeAudioEncoder processingQueue] dispatch:^
{
_oggWriter = [[TGOggOpusWriter alloc] init];
if (![_oggWriter beginWithDataItem:_tempFileItem])
{
//TGLog(@"[TGBridgeAudioEncoder#%x error initializing ogg opus writer]", self);
[self cleanup];
return;
}
[_assetReader startReading];
while (_assetReader.status != AVAssetReaderStatusCompleted)
{
if (_assetReader.status == AVAssetReaderStatusReading)
{
CMSampleBufferRef nextBuffer = [_readerOutput copyNextSampleBuffer];
if (nextBuffer)
{
AudioBufferList abl;
CMBlockBufferRef blockBuffer;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(nextBuffer, NULL, &abl, sizeof(abl), NULL, NULL, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer);
[[TGBridgeAudioEncoder processingQueue] dispatch:^
{
[self _processBuffer:&abl.mBuffers[0]];
CFRelease(nextBuffer);
CFRelease(blockBuffer);
}];
}
else
{
break;
}
}
}
TGDataItem *dataItemResult = nil;
NSTimeInterval durationResult = 0.0;
NSUInteger totalBytes = 0;
if (_assetReader.status == AVAssetReaderStatusCompleted)
{
if (_oggWriter != nil && [_oggWriter writeFrame:NULL frameByteCount:0])
{
dataItemResult = _tempFileItem;
durationResult = [_oggWriter encodedDuration];
totalBytes = [_oggWriter encodedBytes];
}
[self cleanup];
}
//TGLog(@"[TGBridgeAudioEncoder#%x convert time: %f ms]", self, (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0);
if (completion != nil)
completion(dataItemResult, (int32_t)durationResult);
}];
}
- (void)_processBuffer:(AudioBuffer const *)buffer
{
@autoreleasepool
{
if (_oggWriter == nil)
return;
static const int millisecondsPerPacket = 60;
static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 1000 * millisecondsPerPacket * 2;
unsigned char currentEncoderPacket[encoderPacketSizeInBytes];
int bufferOffset = 0;
while (true)
{
int currentEncoderPacketSize = 0;
while (currentEncoderPacketSize < encoderPacketSizeInBytes)
{
if (_audioBuffer.length != 0)
{
int takenBytes = MIN((int)_audioBuffer.length, encoderPacketSizeInBytes - currentEncoderPacketSize);
if (takenBytes != 0)
{
memcpy(currentEncoderPacket + currentEncoderPacketSize, _audioBuffer.bytes, takenBytes);
[_audioBuffer replaceBytesInRange:NSMakeRange(0, takenBytes) withBytes:NULL length:0];
currentEncoderPacketSize += takenBytes;
}
}
else if (bufferOffset < (int)buffer->mDataByteSize)
{
int takenBytes = MIN((int)buffer->mDataByteSize - bufferOffset, encoderPacketSizeInBytes - currentEncoderPacketSize);
if (takenBytes != 0)
{
memcpy(currentEncoderPacket + currentEncoderPacketSize, ((const char *)buffer->mData) + bufferOffset, takenBytes);
bufferOffset += takenBytes;
currentEncoderPacketSize += takenBytes;
}
}
else
break;
}
if (currentEncoderPacketSize < encoderPacketSizeInBytes)
{
if (_audioBuffer == nil)
_audioBuffer = [[NSMutableData alloc] initWithCapacity:encoderPacketSizeInBytes];
[_audioBuffer appendBytes:currentEncoderPacket length:currentEncoderPacketSize];
break;
}
else
{
[_oggWriter writeFrame:currentEncoderPacket frameByteCount:(NSUInteger)currentEncoderPacketSize];
}
}
}
}
@end

View File

@ -99,6 +99,8 @@ public final class TelegramApplicationContext {
}
private var hasOngoingCallDisposable: Disposable?
public var watchManager: WatchManager?
private var immediateExperimentalUISettingsValue = Atomic<ExperimentalUISettings>(value: ExperimentalUISettings.defaultSettings)
public var immediateExperimentalUISettings: ExperimentalUISettings {
return self.immediateExperimentalUISettingsValue.with { $0 }

View File

@ -28,4 +28,6 @@ module TelegramUIPrivateModule {
header "../TGEmojiSuggestions.h"
header "../TGChannelIntroController.h"
header "../EDSunriseSet.h"
header "../TGBridgeAudioDecoder.h"
header "../TGBridgeAudioEncoder.h"
}

View File

@ -13,9 +13,9 @@ public struct TransformImageArguments: Equatable {
public let boundingSize: CGSize
public let intrinsicInsets: UIEdgeInsets
public let resizeMode: TransformImageResizeMode
public let emptyColor: UIColor
public let emptyColor: UIColor?
public init(corners: ImageCorners, imageSize: CGSize, boundingSize: CGSize, intrinsicInsets: UIEdgeInsets, resizeMode: TransformImageResizeMode = .fill(.black), emptyColor: UIColor = .white) {
public init(corners: ImageCorners, imageSize: CGSize, boundingSize: CGSize, intrinsicInsets: UIEdgeInsets, resizeMode: TransformImageResizeMode = .fill(.black), emptyColor: UIColor? = nil) {
self.corners = corners
self.imageSize = imageSize
self.boundingSize = boundingSize

View File

@ -0,0 +1,36 @@
import Foundation
import SwiftSignalKit
import Postbox
import TelegramCore
public final class WatchManagerArguments {
public let appInstalled: Signal<Bool, NoError>
public let navigateToMessageRequested: Signal<MessageId, NoError>
public let runningRequests: Signal<Bool, NoError>
public init(appInstalled: Signal<Bool, NoError>, navigateToMessageRequested: Signal<MessageId, NoError>, runningRequests: Signal<Bool, NoError>) {
self.appInstalled = appInstalled
self.navigateToMessageRequested = navigateToMessageRequested
self.runningRequests = runningRequests
}
}
public final class WatchManager {
private let arguments: WatchManagerArguments?
public init(arguments: WatchManagerArguments?) {
self.arguments = arguments
}
public var watchAppInstalled: Signal<Bool, NoError> {
return self.arguments?.appInstalled ?? .single(false)
}
public var navigateToMessageRequested: Signal<MessageId, NoError> {
return self.arguments?.navigateToMessageRequested ?? .never()
}
public var runningRequests: Signal<Bool, NoError> {
return self.arguments?.runningRequests ?? .single(false)
}
}

View File

@ -0,0 +1,68 @@
import Foundation
import Postbox
import SwiftSignalKit
public struct WatchPresetSettings: PreferencesEntry, Equatable {
public var customPresets: [String : String]
public static var defaultSettings: WatchPresetSettings {
return WatchPresetSettings(presets: [:])
}
public init(presets: [String : String]) {
self.customPresets = presets
}
public init(decoder: PostboxDecoder) {
let keys = decoder.decodeStringArrayForKey("presetKeys")
let values = decoder.decodeStringArrayForKey("presetValues")
if keys.count == values.count {
var presets: [String : String] = [:]
for i in 0 ..< keys.count {
presets[keys[i]] = values[i]
}
self.customPresets = presets
} else {
self.customPresets = [:]
}
}
public func encode(_ encoder: PostboxEncoder) {
let keys = self.customPresets.keys.sorted()
let values = keys.reduce([String]()) { (values, index) -> [String] in
var values = values
if let value = self.customPresets[index] {
values.append(value)
}
return values
}
encoder.encodeStringArray(keys, forKey: "presetKeys")
encoder.encodeStringArray(values, forKey: "presetValues")
}
public func isEqual(to: PreferencesEntry) -> Bool {
if let to = to as? WatchPresetSettings {
return self == to
} else {
return false
}
}
public static func ==(lhs: WatchPresetSettings, rhs: WatchPresetSettings) -> Bool {
return lhs.customPresets == rhs.customPresets
}
}
func updateWatchPresetSettingsInteractively(postbox: Postbox, _ f: @escaping (WatchPresetSettings) -> WatchPresetSettings) -> Signal<Void, NoError> {
return postbox.transaction { transaction -> Void in
transaction.updatePreferencesEntry(key: ApplicationSpecificPreferencesKeys.watchPresetSettings, { entry in
let currentSettings: WatchPresetSettings
if let entry = entry as? WatchPresetSettings {
currentSettings = entry
} else {
currentSettings = WatchPresetSettings.defaultSettings
}
return f(currentSettings)
})
}
}

View File

@ -0,0 +1,148 @@
import Foundation
import Display
import SwiftSignalKit
import Postbox
import TelegramCore
private final class WatchSettingsControllerArguments {
let updatePreset: (String, String) -> Void
init(updatePreset: @escaping (String, String) -> Void) {
self.updatePreset = updatePreset
}
}
private enum WatchSettingsSection: Int32 {
case replyPresets
}
private enum WatchSettingsControllerEntry: ItemListNodeEntry {
case replyPresetsHeader(PresentationTheme, String)
case replyPreset(PresentationTheme, String, String, String, Int32)
case replyPresetsInfo(PresentationTheme, String)
var section: ItemListSectionId {
switch self {
case .replyPresetsHeader, .replyPreset, .replyPresetsInfo:
return WatchSettingsSection.replyPresets.rawValue
}
}
var stableId: Int32 {
switch self {
case .replyPresetsHeader:
return 0
case let .replyPreset(_, _, _, _, index):
return 1 + index
case .replyPresetsInfo:
return 100
}
}
static func ==(lhs: WatchSettingsControllerEntry, rhs: WatchSettingsControllerEntry) -> Bool {
switch lhs {
case let .replyPresetsHeader(lhsTheme, lhsText):
if case let .replyPresetsHeader(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText {
return true
} else {
return false
}
case let .replyPreset(lhsTheme, lhsIdentifier, lhsPlaceholder, lhsValue, lhsIndex):
if case let .replyPreset(rhsTheme, rhsIdentifier, rhsPlaceholder, rhsValue, rhsIndex) = rhs, lhsTheme === rhsTheme, lhsIdentifier == rhsIdentifier, lhsPlaceholder == rhsPlaceholder, lhsValue == rhsValue, lhsIndex == rhsIndex {
return true
} else {
return false
}
case let .replyPresetsInfo(lhsTheme, lhsText):
if case let .replyPresetsInfo(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText {
return true
} else {
return false
}
}
}
static func <(lhs: WatchSettingsControllerEntry, rhs: WatchSettingsControllerEntry) -> Bool {
return lhs.stableId < rhs.stableId
}
func item(_ arguments: WatchSettingsControllerArguments) -> ListViewItem {
switch self {
case let .replyPresetsHeader(theme, text):
return ItemListSectionHeaderItem(theme: theme, text: text, sectionId: self.section)
case let .replyPreset(theme, identifier, placeholder, value, _):
return ItemListSingleLineInputItem(theme: theme, title: NSAttributedString(string: ""), text: value, placeholder: placeholder, type: .regular(capitalization: true, autocorrection: true), spacing: 0.0, sectionId: self.section, textUpdated: { updatedText in
arguments.updatePreset(identifier, updatedText.trimmingCharacters(in: .whitespacesAndNewlines))
}, action: {})
case let .replyPresetsInfo(theme, text):
return ItemListTextItem(theme: theme, text: .plain(text), sectionId: self.section)
}
}
}
private func watchSettingsControllerEntries(presentationData: PresentationData, customPresets: [String : String]) -> [WatchSettingsControllerEntry] {
var entries: [WatchSettingsControllerEntry] = []
let defaultSuggestions : [(Int32, String, String)] = [
(0, "OK", presentationData.strings.Watch_Suggestion_OK),
(1, "Thanks", presentationData.strings.Watch_Suggestion_Thanks),
(2, "WhatsUp", presentationData.strings.Watch_Suggestion_WhatsUp),
(3, "TalkLater", presentationData.strings.Watch_Suggestion_TalkLater),
(4, "CantTalk", presentationData.strings.Watch_Suggestion_CantTalk),
(5, "HoldOn", presentationData.strings.Watch_Suggestion_HoldOn),
(6, "BRB", presentationData.strings.Watch_Suggestion_BRB),
(7, "OnMyWay", presentationData.strings.Watch_Suggestion_OnMyWay)
]
entries.append(.replyPresetsHeader(presentationData.theme, presentationData.strings.AppleWatch_ReplyPresets))
for (index, identifier, placeholder) in defaultSuggestions {
entries.append(.replyPreset(presentationData.theme, identifier, placeholder, customPresets[identifier] ?? "", index))
}
entries.append(.replyPresetsInfo(presentationData.theme, presentationData.strings.AppleWatch_ReplyPresetsHelp))
return entries
}
public func watchSettingsController(account: Account) -> ViewController {
var pushControllerImpl: ((ViewController) -> Void)?
var presentControllerImpl: ((ViewController) -> Void)?
let updateDisposable = MetaDisposable()
let arguments = WatchSettingsControllerArguments(updatePreset: { identifier, text in
updateDisposable.set((.complete() |> delay(1.0, queue: Queue.mainQueue()) |> then(updateWatchPresetSettingsInteractively(postbox: account.postbox, { current in
var updatedPresets = current.customPresets
if !text.isEmpty {
updatedPresets[identifier] = text
} else {
updatedPresets.removeValue(forKey: identifier)
}
return WatchPresetSettings(presets: updatedPresets)
}))).start())
})
let watchPresetSettingsKey = ApplicationSpecificPreferencesKeys.watchPresetSettings
let preferences = account.postbox.preferencesView(keys: [watchPresetSettingsKey])
let signal = combineLatest(account.telegramApplicationContext.presentationData, preferences)
|> deliverOnMainQueue
|> map { presentationData, preferences -> (ItemListControllerState, (ItemListNodeState<WatchSettingsControllerEntry>, WatchSettingsControllerEntry.ItemGenerationArguments)) in
let settings = (preferences.values[watchPresetSettingsKey] as? WatchPresetSettings) ?? WatchPresetSettings.defaultSettings
let controllerState = ItemListControllerState(theme: presentationData.theme, title: .text(presentationData.strings.AppleWatch_Title), leftNavigationButton: nil, rightNavigationButton: nil, backNavigationButton: ItemListBackButton(title: presentationData.strings.Common_Back))
let listState = ItemListNodeState(entries: watchSettingsControllerEntries(presentationData: presentationData, customPresets: settings.customPresets), style: .blocks, animateChanges: false)
return (controllerState, (listState, arguments))
}
let controller = ItemListController(account: account, state: signal)
pushControllerImpl = { [weak controller] c in
(controller?.navigationController as? NavigationController)?.pushViewController(c)
}
presentControllerImpl = { [weak controller] c in
controller?.present(c, in: .window(.root))
}
return controller
}