diff --git a/Telegram/Telegram-iOS/en.lproj/Localizable.strings b/Telegram/Telegram-iOS/en.lproj/Localizable.strings index 1e0dd5321b..c341d0e8aa 100644 --- a/Telegram/Telegram-iOS/en.lproj/Localizable.strings +++ b/Telegram/Telegram-iOS/en.lproj/Localizable.strings @@ -10887,3 +10887,9 @@ Sorry for the inconvenience."; "Conversation.ContactAddContact" = "ADD"; "Conversation.ContactMessage" = "MESSAGE"; + +"Chat.PlayOnceVideoMessageTooltip" = "This video message can only be played once."; +"Chat.PlayOnceVideoMessageYourTooltip" = "This message will disappear once **%@** plays it once."; + +"Chat.TapToPlayVideoMessageOnceTooltip" = "Tap to set this message to **Play Once**"; +"Chat.PlayVideoMessageOnceTooltip" = "The recipient will be able to play it only once."; diff --git a/submodules/AccountContext/Sources/AccountContext.swift b/submodules/AccountContext/Sources/AccountContext.swift index 4b930cc473..eb7203647a 100644 --- a/submodules/AccountContext/Sources/AccountContext.swift +++ b/submodules/AccountContext/Sources/AccountContext.swift @@ -898,7 +898,7 @@ public protocol SharedAccountContext: AnyObject { selectedMessages: Signal?, NoError>, mode: ChatHistoryListMode ) -> ChatHistoryListNode - func makeChatMessagePreviewItem(context: AccountContext, messages: [Message], theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder, forcedResourceStatus: FileMediaResourceStatus?, tapMessage: ((Message) -> Void)?, clickThroughMessage: (() -> Void)?, backgroundNode: ASDisplayNode?, availableReactions: AvailableReactions?, accountPeer: Peer?, isCentered: Bool, isPreview: Bool) -> ListViewItem + func makeChatMessagePreviewItem(context: AccountContext, messages: [Message], theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder, forcedResourceStatus: FileMediaResourceStatus?, tapMessage: ((Message) -> Void)?, clickThroughMessage: (() -> Void)?, backgroundNode: ASDisplayNode?, availableReactions: AvailableReactions?, accountPeer: Peer?, isCentered: Bool, isPreview: Bool, isStandalone: Bool) -> ListViewItem func makeChatMessageDateHeaderItem(context: AccountContext, timestamp: Int32, theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder) -> ListViewItemHeader func makeChatMessageAvatarHeaderItem(context: AccountContext, timestamp: Int32, peer: Peer, message: Message, theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder) -> ListViewItemHeader func makePeerSharedMediaController(context: AccountContext, peerId: PeerId) -> ViewController? @@ -945,6 +945,7 @@ public protocol SharedAccountContext: AnyObject { func makePremiumDemoController(context: AccountContext, subject: PremiumDemoSubject, action: @escaping () -> Void) -> ViewController func makePremiumLimitController(context: AccountContext, subject: PremiumLimitSubject, count: Int32, forceDark: Bool, cancel: @escaping () -> Void, action: @escaping () -> Bool) -> ViewController func makePremiumGiftController(context: AccountContext, source: PremiumGiftSource) -> ViewController + func makePremiumPrivacyControllerController(context: AccountContext, subject: PremiumPrivacySubject, peerId: EnginePeer.Id) -> ViewController func makeStickerPackScreen(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)?, mainStickerPack: StickerPackReference, stickerPacks: [StickerPackReference], loadedStickerPacks: [LoadedStickerPack], parentNavigationController: NavigationController?, sendSticker: ((FileMediaReference, UIView, CGRect) -> Bool)?) -> ViewController @@ -969,6 +970,8 @@ public protocol SharedAccountContext: AnyObject { var enablePreloads: Promise { get } var hasPreloadBlockingContent: Promise { get } + var deviceContactPhoneNumbers: Promise> { get } + var hasGroupCallOnScreen: Signal { get } var currentGroupCallController: ViewController? { get } @@ -976,86 +979,6 @@ public protocol SharedAccountContext: AnyObject { func beginNewAuth(testingEnvironment: Bool) } -public enum PremiumIntroSource { - case settings - case stickers - case reactions - case ads - case upload - case groupsAndChannels - case pinnedChats - case publicLinks - case savedGifs - case savedStickers - case folders - case chatsPerFolder - case accounts - case appIcons - case about - case deeplink(String?) - case profile(PeerId) - case emojiStatus(PeerId, Int64, TelegramMediaFile?, LoadedStickerPack?) - case voiceToText - case fasterDownload - case translation - case stories - case storiesDownload - case storiesStealthMode - case storiesPermanentViews - case storiesFormatting - case storiesExpirationDurations - case storiesSuggestedReactions - case channelBoost(EnginePeer.Id) - case nameColor - case similarChannels - case wallpapers - case presence -} - -public enum PremiumGiftSource: Equatable { - case profile - case attachMenu - case settings - case chatList - case channelBoost - case deeplink(String?) -} - -public enum PremiumDemoSubject { - case doubleLimits - case moreUpload - case fasterDownload - case voiceToText - case noAds - case uniqueReactions - case premiumStickers - case advancedChatManagement - case profileBadge - case animatedUserpics - case appIcons - case animatedEmoji - case emojiStatus - case translation - case stories - case colors - case wallpapers -} - -public enum PremiumLimitSubject { - case folders - case chatsPerFolder - case pins - case files - case accounts - case linksPerSharedFolder - case membershipInSharedFolders - case channels - case expiringStories - case storiesWeekly - case storiesMonthly - case storiesChannelBoost(peer: EnginePeer, isCurrent: Bool, level: Int32, currentLevelBoosts: Int32, nextLevelBoosts: Int32?, link: String?, myBoostCount: Int32, canBoostAgain: Bool) -} - public protocol ComposeController: ViewController { } @@ -1114,102 +1037,6 @@ public protocol AccountContext: AnyObject { func requestCall(peerId: PeerId, isVideo: Bool, completion: @escaping () -> Void) } -public struct PremiumConfiguration { - public static var defaultValue: PremiumConfiguration { - return PremiumConfiguration( - isPremiumDisabled: false, - showPremiumGiftInAttachMenu: false, - showPremiumGiftInTextField: false, - giveawayGiftsPurchaseAvailable: false, - boostsPerGiftCount: 3, - audioTransciptionTrialMaxDuration: 300, - audioTransciptionTrialCount: 2, - minChannelNameColorLevel: 1, - minChannelNameIconLevel: 4, - minChannelProfileColorLevel: 5, - minChannelProfileIconLevel: 7, - minChannelEmojiStatusLevel: 8, - minChannelWallpaperLevel: 9, - minChannelCustomWallpaperLevel: 10 - ) - } - - public let isPremiumDisabled: Bool - public let showPremiumGiftInAttachMenu: Bool - public let showPremiumGiftInTextField: Bool - public let giveawayGiftsPurchaseAvailable: Bool - public let boostsPerGiftCount: Int32 - public let audioTransciptionTrialMaxDuration: Int32 - public let audioTransciptionTrialCount: Int32 - public let minChannelNameColorLevel: Int32 - public let minChannelNameIconLevel: Int32 - public let minChannelProfileColorLevel: Int32 - public let minChannelProfileIconLevel: Int32 - public let minChannelEmojiStatusLevel: Int32 - public let minChannelWallpaperLevel: Int32 - public let minChannelCustomWallpaperLevel: Int32 - - fileprivate init( - isPremiumDisabled: Bool, - showPremiumGiftInAttachMenu: Bool, - showPremiumGiftInTextField: Bool, - giveawayGiftsPurchaseAvailable: Bool, - boostsPerGiftCount: Int32, - audioTransciptionTrialMaxDuration: Int32, - audioTransciptionTrialCount: Int32, - minChannelNameColorLevel: Int32, - minChannelNameIconLevel: Int32, - minChannelProfileColorLevel: Int32, - minChannelProfileIconLevel: Int32, - minChannelEmojiStatusLevel: Int32, - minChannelWallpaperLevel: Int32, - minChannelCustomWallpaperLevel: Int32 - - ) { - self.isPremiumDisabled = isPremiumDisabled - self.showPremiumGiftInAttachMenu = showPremiumGiftInAttachMenu - self.showPremiumGiftInTextField = showPremiumGiftInTextField - self.giveawayGiftsPurchaseAvailable = giveawayGiftsPurchaseAvailable - self.boostsPerGiftCount = boostsPerGiftCount - self.audioTransciptionTrialMaxDuration = audioTransciptionTrialMaxDuration - self.audioTransciptionTrialCount = audioTransciptionTrialCount - self.minChannelNameColorLevel = minChannelNameColorLevel - self.minChannelNameIconLevel = minChannelNameIconLevel - self.minChannelProfileColorLevel = minChannelProfileColorLevel - self.minChannelProfileIconLevel = minChannelProfileIconLevel - self.minChannelEmojiStatusLevel = minChannelEmojiStatusLevel - self.minChannelWallpaperLevel = minChannelWallpaperLevel - self.minChannelCustomWallpaperLevel = minChannelCustomWallpaperLevel - } - - public static func with(appConfiguration: AppConfiguration) -> PremiumConfiguration { - let defaultValue = self.defaultValue - if let data = appConfiguration.data { - func get(_ value: Any?) -> Int32? { - return (value as? Double).flatMap(Int32.init) - } - return PremiumConfiguration( - isPremiumDisabled: data["premium_purchase_blocked"] as? Bool ?? defaultValue.isPremiumDisabled, - showPremiumGiftInAttachMenu: data["premium_gift_attach_menu_icon"] as? Bool ?? defaultValue.showPremiumGiftInAttachMenu, - showPremiumGiftInTextField: data["premium_gift_text_field_icon"] as? Bool ?? defaultValue.showPremiumGiftInTextField, - giveawayGiftsPurchaseAvailable: data["giveaway_gifts_purchase_available"] as? Bool ?? defaultValue.giveawayGiftsPurchaseAvailable, - boostsPerGiftCount: get(data["boosts_per_sent_gift"]) ?? defaultValue.boostsPerGiftCount, - audioTransciptionTrialMaxDuration: get(data["transcribe_audio_trial_duration_max"]) ?? defaultValue.audioTransciptionTrialMaxDuration, - audioTransciptionTrialCount: get(data["transcribe_audio_trial_weekly_number"]) ?? defaultValue.audioTransciptionTrialCount, - minChannelNameColorLevel: get(data["channel_color_level_min"]) ?? defaultValue.minChannelNameColorLevel, - minChannelNameIconLevel: get(data["channel_bg_icon_level_min"]) ?? defaultValue.minChannelNameIconLevel, - minChannelProfileColorLevel: get(data["channel_profile_color_level_min"]) ?? defaultValue.minChannelProfileColorLevel, - minChannelProfileIconLevel: get(data["channel_profile_bg_icon_level_min"]) ?? defaultValue.minChannelProfileIconLevel, - minChannelEmojiStatusLevel: get(data["channel_emoji_status_level_min"]) ?? defaultValue.minChannelEmojiStatusLevel, - minChannelWallpaperLevel: get(data["channel_wallpaper_level_min"]) ?? defaultValue.minChannelWallpaperLevel, - minChannelCustomWallpaperLevel: get(data["channel_custom_wallpaper_level_min"]) ?? defaultValue.minChannelCustomWallpaperLevel - ) - } else { - return defaultValue - } - } -} - public struct AntiSpamBotConfiguration { public static var defaultValue: AntiSpamBotConfiguration { return AntiSpamBotConfiguration(antiSpamBotId: nil, minimumGroupParticipants: 100) @@ -1316,289 +1143,3 @@ public struct StickersSearchConfiguration { } } } - -private extension PeerNameColors.Colors { - init?(colors: EngineAvailableColorOptions.MultiColorPack) { - if colors.colors.isEmpty { - return nil - } - self.main = UIColor(rgb: colors.colors[0]) - if colors.colors.count > 1 { - self.secondary = UIColor(rgb: colors.colors[1]) - } else { - self.secondary = nil - } - if colors.colors.count > 2 { - self.tertiary = UIColor(rgb: colors.colors[2]) - } else { - self.tertiary = nil - } - } -} - -public class PeerNameColors: Equatable { - public enum Subject { - case background - case palette - case stories - } - - public struct Colors: Equatable { - public let main: UIColor - public let secondary: UIColor? - public let tertiary: UIColor? - - public init(main: UIColor, secondary: UIColor?, tertiary: UIColor?) { - self.main = main - self.secondary = secondary - self.tertiary = tertiary - } - - public init(main: UIColor) { - self.main = main - self.secondary = nil - self.tertiary = nil - } - - public init?(colors: [UIColor]) { - guard let first = colors.first else { - return nil - } - self.main = first - if colors.count == 3 { - self.secondary = colors[1] - self.tertiary = colors[2] - } else if colors.count == 2, let second = colors.last { - self.secondary = second - self.tertiary = nil - } else { - self.secondary = nil - self.tertiary = nil - } - } - } - - public static var defaultSingleColors: [Int32: Colors] { - return [ - 0: Colors(main: UIColor(rgb: 0xcc5049)), - 1: Colors(main: UIColor(rgb: 0xd67722)), - 2: Colors(main: UIColor(rgb: 0x955cdb)), - 3: Colors(main: UIColor(rgb: 0x40a920)), - 4: Colors(main: UIColor(rgb: 0x309eba)), - 5: Colors(main: UIColor(rgb: 0x368ad1)), - 6: Colors(main: UIColor(rgb: 0xc7508b)) - ] - } - - public static var defaultValue: PeerNameColors { - return PeerNameColors( - colors: defaultSingleColors, - darkColors: [:], - displayOrder: [5, 3, 1, 0, 2, 4, 6], - profileColors: [:], - profileDarkColors: [:], - profilePaletteColors: [:], - profilePaletteDarkColors: [:], - profileStoryColors: [:], - profileStoryDarkColors: [:], - profileDisplayOrder: [], - nameColorsChannelMinRequiredBoostLevel: [:] - ) - } - - public let colors: [Int32: Colors] - public let darkColors: [Int32: Colors] - public let displayOrder: [Int32] - - public let profileColors: [Int32: Colors] - public let profileDarkColors: [Int32: Colors] - public let profilePaletteColors: [Int32: Colors] - public let profilePaletteDarkColors: [Int32: Colors] - public let profileStoryColors: [Int32: Colors] - public let profileStoryDarkColors: [Int32: Colors] - public let profileDisplayOrder: [Int32] - - public let nameColorsChannelMinRequiredBoostLevel: [Int32: Int32] - - public func get(_ color: PeerNameColor, dark: Bool = false) -> Colors { - if dark, let colors = self.darkColors[color.rawValue] { - return colors - } else if let colors = self.colors[color.rawValue] { - return colors - } else { - return PeerNameColors.defaultSingleColors[5]! - } - } - - public func getProfile(_ color: PeerNameColor, dark: Bool = false, subject: Subject = .background) -> Colors { - switch subject { - case .background: - if dark, let colors = self.profileDarkColors[color.rawValue] { - return colors - } else if let colors = self.profileColors[color.rawValue] { - return colors - } else { - return Colors(main: UIColor(rgb: 0xcc5049)) - } - case .palette: - if dark, let colors = self.profilePaletteDarkColors[color.rawValue] { - return colors - } else if let colors = self.profilePaletteColors[color.rawValue] { - return colors - } else { - return self.getProfile(color, dark: dark, subject: .background) - } - case .stories: - if dark, let colors = self.profileStoryDarkColors[color.rawValue] { - return colors - } else if let colors = self.profileStoryColors[color.rawValue] { - return colors - } else { - return self.getProfile(color, dark: dark, subject: .background) - } - } - } - - fileprivate init( - colors: [Int32: Colors], - darkColors: [Int32: Colors], - displayOrder: [Int32], - profileColors: [Int32: Colors], - profileDarkColors: [Int32: Colors], - profilePaletteColors: [Int32: Colors], - profilePaletteDarkColors: [Int32: Colors], - profileStoryColors: [Int32: Colors], - profileStoryDarkColors: [Int32: Colors], - profileDisplayOrder: [Int32], - nameColorsChannelMinRequiredBoostLevel: [Int32: Int32] - ) { - self.colors = colors - self.darkColors = darkColors - self.displayOrder = displayOrder - self.profileColors = profileColors - self.profileDarkColors = profileDarkColors - self.profilePaletteColors = profilePaletteColors - self.profilePaletteDarkColors = profilePaletteDarkColors - self.profileStoryColors = profileStoryColors - self.profileStoryDarkColors = profileStoryDarkColors - self.profileDisplayOrder = profileDisplayOrder - self.nameColorsChannelMinRequiredBoostLevel = nameColorsChannelMinRequiredBoostLevel - } - - public static func with(availableReplyColors: EngineAvailableColorOptions, availableProfileColors: EngineAvailableColorOptions) -> PeerNameColors { - var colors: [Int32: Colors] = [:] - var darkColors: [Int32: Colors] = [:] - var displayOrder: [Int32] = [] - var profileColors: [Int32: Colors] = [:] - var profileDarkColors: [Int32: Colors] = [:] - var profilePaletteColors: [Int32: Colors] = [:] - var profilePaletteDarkColors: [Int32: Colors] = [:] - var profileStoryColors: [Int32: Colors] = [:] - var profileStoryDarkColors: [Int32: Colors] = [:] - var profileDisplayOrder: [Int32] = [] - - var nameColorsChannelMinRequiredBoostLevel: [Int32: Int32] = [:] - - if !availableReplyColors.options.isEmpty { - for option in availableReplyColors.options { - if let requiredChannelMinBoostLevel = option.value.requiredChannelMinBoostLevel { - nameColorsChannelMinRequiredBoostLevel[option.key] = requiredChannelMinBoostLevel - } - - if let parsedLight = PeerNameColors.Colors(colors: option.value.light.background) { - colors[option.key] = parsedLight - } - if let parsedDark = (option.value.dark?.background).flatMap(PeerNameColors.Colors.init(colors:)) { - darkColors[option.key] = parsedDark - } - - for option in availableReplyColors.options { - if !displayOrder.contains(option.key) { - displayOrder.append(option.key) - } - } - } - } else { - let defaultValue = PeerNameColors.defaultValue - colors = defaultValue.colors - darkColors = defaultValue.darkColors - displayOrder = defaultValue.displayOrder - } - - if !availableProfileColors.options.isEmpty { - for option in availableProfileColors.options { - if let parsedLight = PeerNameColors.Colors(colors: option.value.light.background) { - profileColors[option.key] = parsedLight - } - if let parsedDark = (option.value.dark?.background).flatMap(PeerNameColors.Colors.init(colors:)) { - profileDarkColors[option.key] = parsedDark - } - if let parsedPaletteLight = PeerNameColors.Colors(colors: option.value.light.palette) { - profilePaletteColors[option.key] = parsedPaletteLight - } - if let parsedPaletteDark = (option.value.dark?.palette).flatMap(PeerNameColors.Colors.init(colors:)) { - profilePaletteDarkColors[option.key] = parsedPaletteDark - } - if let parsedStoryLight = (option.value.light.stories).flatMap(PeerNameColors.Colors.init(colors:)) { - profileStoryColors[option.key] = parsedStoryLight - } - if let parsedStoryDark = (option.value.dark?.stories).flatMap(PeerNameColors.Colors.init(colors:)) { - profileStoryDarkColors[option.key] = parsedStoryDark - } - for option in availableProfileColors.options { - if !profileDisplayOrder.contains(option.key) { - profileDisplayOrder.append(option.key) - } - } - } - } - - return PeerNameColors( - colors: colors, - darkColors: darkColors, - displayOrder: displayOrder, - profileColors: profileColors, - profileDarkColors: profileDarkColors, - profilePaletteColors: profilePaletteColors, - profilePaletteDarkColors: profilePaletteDarkColors, - profileStoryColors: profileStoryColors, - profileStoryDarkColors: profileStoryDarkColors, - profileDisplayOrder: profileDisplayOrder, - nameColorsChannelMinRequiredBoostLevel: nameColorsChannelMinRequiredBoostLevel - ) - } - - public static func == (lhs: PeerNameColors, rhs: PeerNameColors) -> Bool { - if lhs.colors != rhs.colors { - return false - } - if lhs.darkColors != rhs.darkColors { - return false - } - if lhs.displayOrder != rhs.displayOrder { - return false - } - if lhs.profileColors != rhs.profileColors { - return false - } - if lhs.profileDarkColors != rhs.profileDarkColors { - return false - } - if lhs.profilePaletteColors != rhs.profilePaletteColors { - return false - } - if lhs.profilePaletteDarkColors != rhs.profilePaletteDarkColors { - return false - } - if lhs.profileStoryColors != rhs.profileStoryColors { - return false - } - if lhs.profileStoryDarkColors != rhs.profileStoryDarkColors { - return false - } - if lhs.profileDisplayOrder != rhs.profileDisplayOrder { - return false - } - return true - } -} diff --git a/submodules/AccountContext/Sources/ChatController.swift b/submodules/AccountContext/Sources/ChatController.swift index 8605c86494..a9c229ab10 100644 --- a/submodules/AccountContext/Sources/ChatController.swift +++ b/submodules/AccountContext/Sources/ChatController.swift @@ -53,6 +53,8 @@ public final class ChatMessageItemAssociatedData: Equatable { public let recommendedChannels: RecommendedChannels? public let audioTranscriptionTrial: AudioTranscription.TrialState public let chatThemes: [TelegramTheme] + public let deviceContactsNumbers: Set + public let isStandalone: Bool public init( automaticDownloadPeerType: MediaAutoDownloadPeerType, @@ -79,7 +81,9 @@ public final class ChatMessageItemAssociatedData: Equatable { maxReadStoryId: Int32? = nil, recommendedChannels: RecommendedChannels? = nil, audioTranscriptionTrial: AudioTranscription.TrialState = .defaultValue, - chatThemes: [TelegramTheme] = [] + chatThemes: [TelegramTheme] = [], + deviceContactsNumbers: Set = Set(), + isStandalone: Bool = false ) { self.automaticDownloadPeerType = automaticDownloadPeerType self.automaticDownloadPeerId = automaticDownloadPeerId @@ -106,6 +110,8 @@ public final class ChatMessageItemAssociatedData: Equatable { self.recommendedChannels = recommendedChannels self.audioTranscriptionTrial = audioTranscriptionTrial self.chatThemes = chatThemes + self.deviceContactsNumbers = deviceContactsNumbers + self.isStandalone = isStandalone } public static func == (lhs: ChatMessageItemAssociatedData, rhs: ChatMessageItemAssociatedData) -> Bool { @@ -181,6 +187,12 @@ public final class ChatMessageItemAssociatedData: Equatable { if lhs.chatThemes != rhs.chatThemes { return false } + if lhs.deviceContactsNumbers != rhs.deviceContactsNumbers { + return false + } + if lhs.isStandalone != rhs.isStandalone { + return false + } return true } } diff --git a/submodules/AccountContext/Sources/PeerNameColors.swift b/submodules/AccountContext/Sources/PeerNameColors.swift new file mode 100644 index 0000000000..e8bce68a49 --- /dev/null +++ b/submodules/AccountContext/Sources/PeerNameColors.swift @@ -0,0 +1,289 @@ +import Foundation +import UIKit +import TelegramCore + +private extension PeerNameColors.Colors { + init?(colors: EngineAvailableColorOptions.MultiColorPack) { + if colors.colors.isEmpty { + return nil + } + self.main = UIColor(rgb: colors.colors[0]) + if colors.colors.count > 1 { + self.secondary = UIColor(rgb: colors.colors[1]) + } else { + self.secondary = nil + } + if colors.colors.count > 2 { + self.tertiary = UIColor(rgb: colors.colors[2]) + } else { + self.tertiary = nil + } + } +} + +public class PeerNameColors: Equatable { + public enum Subject { + case background + case palette + case stories + } + + public struct Colors: Equatable { + public let main: UIColor + public let secondary: UIColor? + public let tertiary: UIColor? + + public init(main: UIColor, secondary: UIColor?, tertiary: UIColor?) { + self.main = main + self.secondary = secondary + self.tertiary = tertiary + } + + public init(main: UIColor) { + self.main = main + self.secondary = nil + self.tertiary = nil + } + + public init?(colors: [UIColor]) { + guard let first = colors.first else { + return nil + } + self.main = first + if colors.count == 3 { + self.secondary = colors[1] + self.tertiary = colors[2] + } else if colors.count == 2, let second = colors.last { + self.secondary = second + self.tertiary = nil + } else { + self.secondary = nil + self.tertiary = nil + } + } + } + + public static var defaultSingleColors: [Int32: Colors] { + return [ + 0: Colors(main: UIColor(rgb: 0xcc5049)), + 1: Colors(main: UIColor(rgb: 0xd67722)), + 2: Colors(main: UIColor(rgb: 0x955cdb)), + 3: Colors(main: UIColor(rgb: 0x40a920)), + 4: Colors(main: UIColor(rgb: 0x309eba)), + 5: Colors(main: UIColor(rgb: 0x368ad1)), + 6: Colors(main: UIColor(rgb: 0xc7508b)) + ] + } + + public static var defaultValue: PeerNameColors { + return PeerNameColors( + colors: defaultSingleColors, + darkColors: [:], + displayOrder: [5, 3, 1, 0, 2, 4, 6], + profileColors: [:], + profileDarkColors: [:], + profilePaletteColors: [:], + profilePaletteDarkColors: [:], + profileStoryColors: [:], + profileStoryDarkColors: [:], + profileDisplayOrder: [], + nameColorsChannelMinRequiredBoostLevel: [:] + ) + } + + public let colors: [Int32: Colors] + public let darkColors: [Int32: Colors] + public let displayOrder: [Int32] + + public let profileColors: [Int32: Colors] + public let profileDarkColors: [Int32: Colors] + public let profilePaletteColors: [Int32: Colors] + public let profilePaletteDarkColors: [Int32: Colors] + public let profileStoryColors: [Int32: Colors] + public let profileStoryDarkColors: [Int32: Colors] + public let profileDisplayOrder: [Int32] + + public let nameColorsChannelMinRequiredBoostLevel: [Int32: Int32] + + public func get(_ color: PeerNameColor, dark: Bool = false) -> Colors { + if dark, let colors = self.darkColors[color.rawValue] { + return colors + } else if let colors = self.colors[color.rawValue] { + return colors + } else { + return PeerNameColors.defaultSingleColors[5]! + } + } + + public func getProfile(_ color: PeerNameColor, dark: Bool = false, subject: Subject = .background) -> Colors { + switch subject { + case .background: + if dark, let colors = self.profileDarkColors[color.rawValue] { + return colors + } else if let colors = self.profileColors[color.rawValue] { + return colors + } else { + return Colors(main: UIColor(rgb: 0xcc5049)) + } + case .palette: + if dark, let colors = self.profilePaletteDarkColors[color.rawValue] { + return colors + } else if let colors = self.profilePaletteColors[color.rawValue] { + return colors + } else { + return self.getProfile(color, dark: dark, subject: .background) + } + case .stories: + if dark, let colors = self.profileStoryDarkColors[color.rawValue] { + return colors + } else if let colors = self.profileStoryColors[color.rawValue] { + return colors + } else { + return self.getProfile(color, dark: dark, subject: .background) + } + } + } + + fileprivate init( + colors: [Int32: Colors], + darkColors: [Int32: Colors], + displayOrder: [Int32], + profileColors: [Int32: Colors], + profileDarkColors: [Int32: Colors], + profilePaletteColors: [Int32: Colors], + profilePaletteDarkColors: [Int32: Colors], + profileStoryColors: [Int32: Colors], + profileStoryDarkColors: [Int32: Colors], + profileDisplayOrder: [Int32], + nameColorsChannelMinRequiredBoostLevel: [Int32: Int32] + ) { + self.colors = colors + self.darkColors = darkColors + self.displayOrder = displayOrder + self.profileColors = profileColors + self.profileDarkColors = profileDarkColors + self.profilePaletteColors = profilePaletteColors + self.profilePaletteDarkColors = profilePaletteDarkColors + self.profileStoryColors = profileStoryColors + self.profileStoryDarkColors = profileStoryDarkColors + self.profileDisplayOrder = profileDisplayOrder + self.nameColorsChannelMinRequiredBoostLevel = nameColorsChannelMinRequiredBoostLevel + } + + public static func with(availableReplyColors: EngineAvailableColorOptions, availableProfileColors: EngineAvailableColorOptions) -> PeerNameColors { + var colors: [Int32: Colors] = [:] + var darkColors: [Int32: Colors] = [:] + var displayOrder: [Int32] = [] + var profileColors: [Int32: Colors] = [:] + var profileDarkColors: [Int32: Colors] = [:] + var profilePaletteColors: [Int32: Colors] = [:] + var profilePaletteDarkColors: [Int32: Colors] = [:] + var profileStoryColors: [Int32: Colors] = [:] + var profileStoryDarkColors: [Int32: Colors] = [:] + var profileDisplayOrder: [Int32] = [] + + var nameColorsChannelMinRequiredBoostLevel: [Int32: Int32] = [:] + + if !availableReplyColors.options.isEmpty { + for option in availableReplyColors.options { + if let requiredChannelMinBoostLevel = option.value.requiredChannelMinBoostLevel { + nameColorsChannelMinRequiredBoostLevel[option.key] = requiredChannelMinBoostLevel + } + + if let parsedLight = PeerNameColors.Colors(colors: option.value.light.background) { + colors[option.key] = parsedLight + } + if let parsedDark = (option.value.dark?.background).flatMap(PeerNameColors.Colors.init(colors:)) { + darkColors[option.key] = parsedDark + } + + for option in availableReplyColors.options { + if !displayOrder.contains(option.key) { + displayOrder.append(option.key) + } + } + } + } else { + let defaultValue = PeerNameColors.defaultValue + colors = defaultValue.colors + darkColors = defaultValue.darkColors + displayOrder = defaultValue.displayOrder + } + + if !availableProfileColors.options.isEmpty { + for option in availableProfileColors.options { + if let parsedLight = PeerNameColors.Colors(colors: option.value.light.background) { + profileColors[option.key] = parsedLight + } + if let parsedDark = (option.value.dark?.background).flatMap(PeerNameColors.Colors.init(colors:)) { + profileDarkColors[option.key] = parsedDark + } + if let parsedPaletteLight = PeerNameColors.Colors(colors: option.value.light.palette) { + profilePaletteColors[option.key] = parsedPaletteLight + } + if let parsedPaletteDark = (option.value.dark?.palette).flatMap(PeerNameColors.Colors.init(colors:)) { + profilePaletteDarkColors[option.key] = parsedPaletteDark + } + if let parsedStoryLight = (option.value.light.stories).flatMap(PeerNameColors.Colors.init(colors:)) { + profileStoryColors[option.key] = parsedStoryLight + } + if let parsedStoryDark = (option.value.dark?.stories).flatMap(PeerNameColors.Colors.init(colors:)) { + profileStoryDarkColors[option.key] = parsedStoryDark + } + for option in availableProfileColors.options { + if !profileDisplayOrder.contains(option.key) { + profileDisplayOrder.append(option.key) + } + } + } + } + + return PeerNameColors( + colors: colors, + darkColors: darkColors, + displayOrder: displayOrder, + profileColors: profileColors, + profileDarkColors: profileDarkColors, + profilePaletteColors: profilePaletteColors, + profilePaletteDarkColors: profilePaletteDarkColors, + profileStoryColors: profileStoryColors, + profileStoryDarkColors: profileStoryDarkColors, + profileDisplayOrder: profileDisplayOrder, + nameColorsChannelMinRequiredBoostLevel: nameColorsChannelMinRequiredBoostLevel + ) + } + + public static func == (lhs: PeerNameColors, rhs: PeerNameColors) -> Bool { + if lhs.colors != rhs.colors { + return false + } + if lhs.darkColors != rhs.darkColors { + return false + } + if lhs.displayOrder != rhs.displayOrder { + return false + } + if lhs.profileColors != rhs.profileColors { + return false + } + if lhs.profileDarkColors != rhs.profileDarkColors { + return false + } + if lhs.profilePaletteColors != rhs.profilePaletteColors { + return false + } + if lhs.profilePaletteDarkColors != rhs.profilePaletteDarkColors { + return false + } + if lhs.profileStoryColors != rhs.profileStoryColors { + return false + } + if lhs.profileStoryDarkColors != rhs.profileStoryDarkColors { + return false + } + if lhs.profileDisplayOrder != rhs.profileDisplayOrder { + return false + } + return true + } +} diff --git a/submodules/AccountContext/Sources/Premium.swift b/submodules/AccountContext/Sources/Premium.swift new file mode 100644 index 0000000000..f4dbb4f557 --- /dev/null +++ b/submodules/AccountContext/Sources/Premium.swift @@ -0,0 +1,184 @@ +import Foundation +import TelegramCore + +public enum PremiumIntroSource { + case settings + case stickers + case reactions + case ads + case upload + case groupsAndChannels + case pinnedChats + case publicLinks + case savedGifs + case savedStickers + case folders + case chatsPerFolder + case accounts + case appIcons + case about + case deeplink(String?) + case profile(EnginePeer.Id) + case emojiStatus(EnginePeer.Id, Int64, TelegramMediaFile?, LoadedStickerPack?) + case voiceToText + case fasterDownload + case translation + case stories + case storiesDownload + case storiesStealthMode + case storiesPermanentViews + case storiesFormatting + case storiesExpirationDurations + case storiesSuggestedReactions + case channelBoost(EnginePeer.Id) + case nameColor + case similarChannels + case wallpapers + case presence + case readTime +} + +public enum PremiumGiftSource: Equatable { + case profile + case attachMenu + case settings + case chatList + case channelBoost + case deeplink(String?) +} + +public enum PremiumDemoSubject { + case doubleLimits + case moreUpload + case fasterDownload + case voiceToText + case noAds + case uniqueReactions + case premiumStickers + case advancedChatManagement + case profileBadge + case animatedUserpics + case appIcons + case animatedEmoji + case emojiStatus + case translation + case stories + case colors + case wallpapers +} + +public enum PremiumLimitSubject { + case folders + case chatsPerFolder + case pins + case files + case accounts + case linksPerSharedFolder + case membershipInSharedFolders + case channels + case expiringStories + case storiesWeekly + case storiesMonthly + case storiesChannelBoost(peer: EnginePeer, isCurrent: Bool, level: Int32, currentLevelBoosts: Int32, nextLevelBoosts: Int32?, link: String?, myBoostCount: Int32, canBoostAgain: Bool) +} + +public enum PremiumPrivacySubject { + case presence + case readTime +} + +public struct PremiumConfiguration { + public static var defaultValue: PremiumConfiguration { + return PremiumConfiguration( + isPremiumDisabled: false, + showPremiumGiftInAttachMenu: false, + showPremiumGiftInTextField: false, + giveawayGiftsPurchaseAvailable: false, + boostsPerGiftCount: 3, + audioTransciptionTrialMaxDuration: 300, + audioTransciptionTrialCount: 2, + minChannelNameColorLevel: 1, + minChannelNameIconLevel: 4, + minChannelProfileColorLevel: 5, + minChannelProfileIconLevel: 7, + minChannelEmojiStatusLevel: 8, + minChannelWallpaperLevel: 9, + minChannelCustomWallpaperLevel: 10 + ) + } + + public let isPremiumDisabled: Bool + public let showPremiumGiftInAttachMenu: Bool + public let showPremiumGiftInTextField: Bool + public let giveawayGiftsPurchaseAvailable: Bool + public let boostsPerGiftCount: Int32 + public let audioTransciptionTrialMaxDuration: Int32 + public let audioTransciptionTrialCount: Int32 + public let minChannelNameColorLevel: Int32 + public let minChannelNameIconLevel: Int32 + public let minChannelProfileColorLevel: Int32 + public let minChannelProfileIconLevel: Int32 + public let minChannelEmojiStatusLevel: Int32 + public let minChannelWallpaperLevel: Int32 + public let minChannelCustomWallpaperLevel: Int32 + + fileprivate init( + isPremiumDisabled: Bool, + showPremiumGiftInAttachMenu: Bool, + showPremiumGiftInTextField: Bool, + giveawayGiftsPurchaseAvailable: Bool, + boostsPerGiftCount: Int32, + audioTransciptionTrialMaxDuration: Int32, + audioTransciptionTrialCount: Int32, + minChannelNameColorLevel: Int32, + minChannelNameIconLevel: Int32, + minChannelProfileColorLevel: Int32, + minChannelProfileIconLevel: Int32, + minChannelEmojiStatusLevel: Int32, + minChannelWallpaperLevel: Int32, + minChannelCustomWallpaperLevel: Int32 + + ) { + self.isPremiumDisabled = isPremiumDisabled + self.showPremiumGiftInAttachMenu = showPremiumGiftInAttachMenu + self.showPremiumGiftInTextField = showPremiumGiftInTextField + self.giveawayGiftsPurchaseAvailable = giveawayGiftsPurchaseAvailable + self.boostsPerGiftCount = boostsPerGiftCount + self.audioTransciptionTrialMaxDuration = audioTransciptionTrialMaxDuration + self.audioTransciptionTrialCount = audioTransciptionTrialCount + self.minChannelNameColorLevel = minChannelNameColorLevel + self.minChannelNameIconLevel = minChannelNameIconLevel + self.minChannelProfileColorLevel = minChannelProfileColorLevel + self.minChannelProfileIconLevel = minChannelProfileIconLevel + self.minChannelEmojiStatusLevel = minChannelEmojiStatusLevel + self.minChannelWallpaperLevel = minChannelWallpaperLevel + self.minChannelCustomWallpaperLevel = minChannelCustomWallpaperLevel + } + + public static func with(appConfiguration: AppConfiguration) -> PremiumConfiguration { + let defaultValue = self.defaultValue + if let data = appConfiguration.data { + func get(_ value: Any?) -> Int32? { + return (value as? Double).flatMap(Int32.init) + } + return PremiumConfiguration( + isPremiumDisabled: data["premium_purchase_blocked"] as? Bool ?? defaultValue.isPremiumDisabled, + showPremiumGiftInAttachMenu: data["premium_gift_attach_menu_icon"] as? Bool ?? defaultValue.showPremiumGiftInAttachMenu, + showPremiumGiftInTextField: data["premium_gift_text_field_icon"] as? Bool ?? defaultValue.showPremiumGiftInTextField, + giveawayGiftsPurchaseAvailable: data["giveaway_gifts_purchase_available"] as? Bool ?? defaultValue.giveawayGiftsPurchaseAvailable, + boostsPerGiftCount: get(data["boosts_per_sent_gift"]) ?? defaultValue.boostsPerGiftCount, + audioTransciptionTrialMaxDuration: get(data["transcribe_audio_trial_duration_max"]) ?? defaultValue.audioTransciptionTrialMaxDuration, + audioTransciptionTrialCount: get(data["transcribe_audio_trial_weekly_number"]) ?? defaultValue.audioTransciptionTrialCount, + minChannelNameColorLevel: get(data["channel_color_level_min"]) ?? defaultValue.minChannelNameColorLevel, + minChannelNameIconLevel: get(data["channel_bg_icon_level_min"]) ?? defaultValue.minChannelNameIconLevel, + minChannelProfileColorLevel: get(data["channel_profile_color_level_min"]) ?? defaultValue.minChannelProfileColorLevel, + minChannelProfileIconLevel: get(data["channel_profile_bg_icon_level_min"]) ?? defaultValue.minChannelProfileIconLevel, + minChannelEmojiStatusLevel: get(data["channel_emoji_status_level_min"]) ?? defaultValue.minChannelEmojiStatusLevel, + minChannelWallpaperLevel: get(data["channel_wallpaper_level_min"]) ?? defaultValue.minChannelWallpaperLevel, + minChannelCustomWallpaperLevel: get(data["channel_custom_wallpaper_level_min"]) ?? defaultValue.minChannelCustomWallpaperLevel + ) + } else { + return defaultValue + } + } +} diff --git a/submodules/AnimationUI/Sources/AnimationNode.swift b/submodules/AnimationUI/Sources/AnimationNode.swift index 99e485f8ce..8fc72d44c1 100644 --- a/submodules/AnimationUI/Sources/AnimationNode.swift +++ b/submodules/AnimationUI/Sources/AnimationNode.swift @@ -206,7 +206,7 @@ public final class AnimationNode: ASDisplayNode { } public func preferredSize() -> CGSize? { - if let animationView = animationView(), let animation = animationView.animation { + if let animationView = self.animationView(), let animation = animationView.animation { return CGSize(width: animation.size.width * self.scale, height: animation.size.height * self.scale) } else { return nil diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift index 08f7aa34bf..6875719fa1 100644 --- a/submodules/Camera/Sources/Camera.swift +++ b/submodules/Camera/Sources/Camera.swift @@ -50,11 +50,11 @@ final class CameraDeviceContext { let input = CameraInput() let output: CameraOutput - init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext) { + init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext, use32BGRA: Bool = false) { self.session = session self.exclusive = exclusive self.additional = additional - self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext) + self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext, use32BGRA: use32BGRA) } func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false) { @@ -288,7 +288,6 @@ private final class CameraContext { private var micLevelPeak: Int16 = 0 private var micLevelPeakCount = 0 - private var isDualCameraEnabled: Bool? public func setDualCameraEnabled(_ enabled: Bool, change: Bool = true) { guard enabled != self.isDualCameraEnabled else { @@ -303,10 +302,10 @@ private final class CameraContext { if enabled { self.configure { self.mainDeviceContext?.invalidate() - self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext) + self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo) self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata) - self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext) + self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo) self.additionalDeviceContext?.configure(position: .front, previewView: self.secondaryPreviewView, audio: false, photo: true, metadata: false) } self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in @@ -343,7 +342,7 @@ private final class CameraContext { self.additionalDeviceContext?.invalidate() self.additionalDeviceContext = nil - self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext) + self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, use32BGRA: false) self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide, preferLowerFramerate: self.initialConfiguration.preferLowerFramerate) } self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in @@ -476,6 +475,10 @@ private final class CameraContext { self.mainDeviceContext?.device.setZoomDelta(zoomDelta) } + func rampZoom(_ zoomLevel: CGFloat, rate: CGFloat) { + self.mainDeviceContext?.device.rampZoom(zoomLevel, rate: rate) + } + func takePhoto() -> Signal { guard let mainDeviceContext = self.mainDeviceContext else { return .complete() @@ -502,22 +505,26 @@ private final class CameraContext { } } - public func startRecording() -> Signal { + public func startRecording() -> Signal { guard let mainDeviceContext = self.mainDeviceContext else { return .complete() } mainDeviceContext.device.setTorchMode(self._flashMode) let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait - if let additionalDeviceContext = self.additionalDeviceContext { - return combineLatest( - mainDeviceContext.output.startRecording(isDualCamera: true, position: self.positionValue, orientation: orientation), - additionalDeviceContext.output.startRecording(isDualCamera: true, orientation: .portrait) - ) |> map { value, _ in - return value - } + if self.initialConfiguration.isRoundVideo { + return mainDeviceContext.output.startRecording(mode: .roundVideo, orientation: .portrait, additionalOutput: self.additionalDeviceContext?.output) } else { - return mainDeviceContext.output.startRecording(isDualCamera: false, orientation: orientation) + if let additionalDeviceContext = self.additionalDeviceContext { + return combineLatest( + mainDeviceContext.output.startRecording(mode: .dualCamera, position: self.positionValue, orientation: orientation), + additionalDeviceContext.output.startRecording(mode: .dualCamera, orientation: .portrait) + ) |> map { value, _ in + return value + } + } else { + return mainDeviceContext.output.startRecording(mode: .default, orientation: orientation) + } } } @@ -525,41 +532,12 @@ private final class CameraContext { guard let mainDeviceContext = self.mainDeviceContext else { return .complete() } - if let additionalDeviceContext = self.additionalDeviceContext { - return combineLatest( - mainDeviceContext.output.stopRecording(), - additionalDeviceContext.output.stopRecording() - ) |> mapToSignal { main, additional in - if case let .finished(mainResult, _, duration, positionChangeTimestamps, _) = main, case let .finished(additionalResult, _, _, _, _) = additional { - var additionalThumbnailImage = additionalResult.thumbnail - if let cgImage = additionalResult.thumbnail.cgImage { - additionalThumbnailImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored) - } - - return .single( - .finished( - main: mainResult, - additional: VideoCaptureResult.Result(path: additionalResult.path, thumbnail: additionalThumbnailImage, isMirrored: true, dimensions: additionalResult.dimensions), - duration: duration, - positionChangeTimestamps: positionChangeTimestamps, - captureTimestamp: CACurrentMediaTime() - ) - ) - } else { - return .complete() - } - } - } else { - let isMirrored = self.positionValue == .front + if self.initialConfiguration.isRoundVideo { return mainDeviceContext.output.stopRecording() |> map { result -> VideoCaptureResult in if case let .finished(mainResult, _, duration, positionChangeTimestamps, captureTimestamp) = result { - var thumbnailImage = mainResult.thumbnail - if isMirrored, let cgImage = thumbnailImage.cgImage { - thumbnailImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored) - } return .finished( - main: VideoCaptureResult.Result(path: mainResult.path, thumbnail: thumbnailImage, isMirrored: isMirrored, dimensions: mainResult.dimensions), + main: mainResult, additional: nil, duration: duration, positionChangeTimestamps: positionChangeTimestamps, @@ -569,6 +547,52 @@ private final class CameraContext { return result } } + } else { + if let additionalDeviceContext = self.additionalDeviceContext { + return combineLatest( + mainDeviceContext.output.stopRecording(), + additionalDeviceContext.output.stopRecording() + ) |> mapToSignal { main, additional in + if case let .finished(mainResult, _, duration, positionChangeTimestamps, _) = main, case let .finished(additionalResult, _, _, _, _) = additional { + var additionalThumbnailImage = additionalResult.thumbnail + if let cgImage = additionalResult.thumbnail.cgImage { + additionalThumbnailImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored) + } + + return .single( + .finished( + main: mainResult, + additional: VideoCaptureResult.Result(path: additionalResult.path, thumbnail: additionalThumbnailImage, isMirrored: true, dimensions: additionalResult.dimensions), + duration: duration, + positionChangeTimestamps: positionChangeTimestamps, + captureTimestamp: CACurrentMediaTime() + ) + ) + } else { + return .complete() + } + } + } else { + let isMirrored = self.positionValue == .front + return mainDeviceContext.output.stopRecording() + |> map { result -> VideoCaptureResult in + if case let .finished(mainResult, _, duration, positionChangeTimestamps, captureTimestamp) = result { + var thumbnailImage = mainResult.thumbnail + if isMirrored, let cgImage = thumbnailImage.cgImage { + thumbnailImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored) + } + return .finished( + main: VideoCaptureResult.Result(path: mainResult.path, thumbnail: thumbnailImage, isMirrored: isMirrored, dimensions: mainResult.dimensions), + additional: nil, + duration: duration, + positionChangeTimestamps: positionChangeTimestamps, + captureTimestamp: captureTimestamp + ) + } else { + return result + } + } + } } } @@ -580,6 +604,10 @@ private final class CameraContext { return self.audioLevelPipe.signal() } + var transitionImage: Signal { + return .single(self.mainDeviceContext?.output.transitionImage) + } + @objc private func sessionInterruptionEnded(notification: NSNotification) { } @@ -619,8 +647,9 @@ public final class Camera { let preferWide: Bool let preferLowerFramerate: Bool let reportAudioLevel: Bool + let isRoundVideo: Bool - public init(preset: Preset, position: Position, isDualEnabled: Bool = false, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false, reportAudioLevel: Bool = false) { + public init(preset: Preset, position: Position, isDualEnabled: Bool = false, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false, reportAudioLevel: Bool = false, isRoundVideo: Bool = false) { self.preset = preset self.position = position self.isDualEnabled = isDualEnabled @@ -630,6 +659,7 @@ public final class Camera { self.preferWide = preferWide self.preferLowerFramerate = preferLowerFramerate self.reportAudioLevel = reportAudioLevel + self.isRoundVideo = isRoundVideo } } @@ -749,7 +779,7 @@ public final class Camera { } } - public func startRecording() -> Signal { + public func startRecording() -> Signal { return Signal { subscriber in let disposable = MetaDisposable() self.queue.async { @@ -822,6 +852,14 @@ public final class Camera { } } + public func rampZoom(_ zoomLevel: CGFloat, rate: CGFloat) { + self.queue.async { + if let context = self.contextRef?.takeUnretainedValue() { + context.rampZoom(zoomLevel, rate: rate) + } + } + } + public func setTorchActive(_ active: Bool) { self.queue.async { if let context = self.contextRef?.takeUnretainedValue() { @@ -935,6 +973,20 @@ public final class Camera { } } + public var transitionImage: Signal { + return Signal { subscriber in + let disposable = MetaDisposable() + self.queue.async { + if let context = self.contextRef?.takeUnretainedValue() { + disposable.set(context.transitionImage.start(next: { codes in + subscriber.putNext(codes) + })) + } + } + return disposable + } + } + public enum ModeChange: Equatable { case none case position @@ -972,3 +1024,8 @@ public final class CameraHolder { self.previewView = previewView } } + +public struct CameraRecordingData { + public let duration: Double + public let filePath: String +} diff --git a/submodules/Camera/Sources/CameraDevice.swift b/submodules/Camera/Sources/CameraDevice.swift index 691f9c87bf..139d429b50 100644 --- a/submodules/Camera/Sources/CameraDevice.swift +++ b/submodules/Camera/Sources/CameraDevice.swift @@ -313,6 +313,15 @@ final class CameraDevice { } } + func rampZoom(_ zoomLevel: CGFloat, rate: CGFloat) { + guard let device = self.videoDevice else { + return + } + self.transaction(device) { device in + device.ramp(toVideoZoomFactor: zoomLevel, withRate: Float(rate)) + } + } + func resetZoom(neutral: Bool = true) { guard let device = self.videoDevice else { return diff --git a/submodules/Camera/Sources/CameraOutput.swift b/submodules/Camera/Sources/CameraOutput.swift index 4635a36c51..6e016ac6c0 100644 --- a/submodules/Camera/Sources/CameraOutput.swift +++ b/submodules/Camera/Sources/CameraOutput.swift @@ -80,6 +80,7 @@ public struct CameraCode: Equatable { final class CameraOutput: NSObject { let exclusive: Bool let ciContext: CIContext + let isVideoMessage: Bool let photoOutput = AVCapturePhotoOutput() let videoOutput = AVCaptureVideoDataOutput() @@ -89,6 +90,8 @@ final class CameraOutput: NSObject { private var photoConnection: AVCaptureConnection? private var videoConnection: AVCaptureConnection? private var previewConnection: AVCaptureConnection? + + private var roundVideoFilter: CameraRoundVideoFilter? private let queue = DispatchQueue(label: "") private let metadataQueue = DispatchQueue(label: "") @@ -99,10 +102,11 @@ final class CameraOutput: NSObject { var processSampleBuffer: ((CMSampleBuffer, CVImageBuffer, AVCaptureConnection) -> Void)? var processAudioBuffer: ((CMSampleBuffer) -> Void)? var processCodes: (([CameraCode]) -> Void)? - - init(exclusive: Bool, ciContext: CIContext) { + + init(exclusive: Bool, ciContext: CIContext, use32BGRA: Bool = false) { self.exclusive = exclusive self.ciContext = ciContext + self.isVideoMessage = use32BGRA super.init() @@ -111,7 +115,7 @@ final class CameraOutput: NSObject { } self.videoOutput.alwaysDiscardsLateVideoFrames = false - self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any] + self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: use32BGRA ? kCVPixelFormatType_32BGRA : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any] } deinit { @@ -224,6 +228,7 @@ final class CameraOutput: NSObject { if let videoDataOutputConnection = self.videoOutput.connection(with: .video) { if videoDataOutputConnection.isVideoStabilizationSupported { videoDataOutputConnection.preferredVideoStabilizationMode = .standard +// videoDataOutputConnection.preferredVideoStabilizationMode = self.isVideoMessage ? .cinematic : .standard } } } @@ -282,69 +287,97 @@ final class CameraOutput: NSObject { return self.videoRecorder != nil } + enum RecorderMode { + case `default` + case roundVideo + case dualCamera + } + + private var currentMode: RecorderMode = .default private var recordingCompletionPipe = ValuePipe() - func startRecording(isDualCamera: Bool, position: Camera.Position? = nil, orientation: AVCaptureVideoOrientation) -> Signal { + func startRecording(mode: RecorderMode, position: Camera.Position? = nil, orientation: AVCaptureVideoOrientation, additionalOutput: CameraOutput? = nil) -> Signal { guard self.videoRecorder == nil else { return .complete() } + self.currentMode = mode + let codecType: AVVideoCodecType - if hasHEVCHardwareEncoder { - codecType = .hevc - } else { + if case .roundVideo = mode { codecType = .h264 + } else { + if hasHEVCHardwareEncoder { + codecType = .hevc + } else { + codecType = .h264 + } } - guard let videoSettings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else { + guard var videoSettings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else { return .complete() } - let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:] var dimensions: CGSize = CGSize(width: 1080, height: 1920) - if orientation == .landscapeLeft { - dimensions = CGSize(width: 1920, height: 1080) - } else if orientation == .landscapeRight { + if orientation == .landscapeLeft || orientation == .landscapeRight { dimensions = CGSize(width: 1920, height: 1080) } + var orientation = orientation + if case .roundVideo = mode { + videoSettings[AVVideoWidthKey] = 400 + videoSettings[AVVideoHeightKey] = 400 + dimensions = CGSize(width: 400, height: 400) + orientation = .landscapeRight + } + + let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:] let outputFileName = NSUUID().uuidString let outputFilePath = NSTemporaryDirectory() + outputFileName + ".mp4" let outputFileURL = URL(fileURLWithPath: outputFilePath) - let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), ciContext: self.ciContext, orientation: orientation, fileUrl: outputFileURL, completion: { [weak self] result in - guard let self else { - return - } - if case let .success(transitionImage, duration, positionChangeTimestamps) = result { - self.recordingCompletionPipe.putNext( - .finished( - main: VideoCaptureResult.Result( - path: outputFilePath, - thumbnail: transitionImage ?? UIImage(), - isMirrored: false, - dimensions: dimensions - ), - additional: nil, - duration: duration, - positionChangeTimestamps: positionChangeTimestamps.map { ($0 == .front, $1) }, - captureTimestamp: CACurrentMediaTime() + let videoRecorder = VideoRecorder( + configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), + ciContext: self.ciContext, + orientation: orientation, + fileUrl: outputFileURL, + completion: { [weak self] result in + guard let self else { + return + } + if case let .success(transitionImage, duration, positionChangeTimestamps) = result { + self.recordingCompletionPipe.putNext( + .finished( + main: VideoCaptureResult.Result( + path: outputFilePath, + thumbnail: transitionImage ?? UIImage(), + isMirrored: false, + dimensions: dimensions + ), + additional: nil, + duration: duration, + positionChangeTimestamps: positionChangeTimestamps.map { ($0 == .front, $1) }, + captureTimestamp: CACurrentMediaTime() + ) ) - ) - } else { - self.recordingCompletionPipe.putNext(.failed) + } else { + self.recordingCompletionPipe.putNext(.failed) + } } - }) + ) videoRecorder?.start() self.videoRecorder = videoRecorder - if isDualCamera, let position { + if case .dualCamera = mode, let position { videoRecorder?.markPositionChange(position: position, time: .zero) + } else if case .roundVideo = mode { + additionalOutput?.masterOutput = self } return Signal { subscriber in - let timer = SwiftSignalKit.Timer(timeout: 0.1, repeat: true, completion: { [weak videoRecorder] in - subscriber.putNext(videoRecorder?.duration ?? 0.0) + let timer = SwiftSignalKit.Timer(timeout: 0.02, repeat: true, completion: { [weak videoRecorder] in + let recordingData = CameraRecordingData(duration: videoRecorder?.duration ?? 0.0, filePath: outputFilePath) + subscriber.putNext(recordingData) }, queue: Queue.mainQueue()) timer.start() @@ -367,7 +400,90 @@ final class CameraOutput: NSObject { } } + var transitionImage: UIImage? { + return self.videoRecorder?.transitionImage + } + + private weak var masterOutput: CameraOutput? + func processVideoRecording(_ sampleBuffer: CMSampleBuffer, fromAdditionalOutput: Bool) { + if let videoRecorder = self.videoRecorder, videoRecorder.isRecording { + if case .roundVideo = self.currentMode { + if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, mirror: fromAdditionalOutput) { + if case .front = self.currentPosition { + if fromAdditionalOutput { + videoRecorder.appendSampleBuffer(processedSampleBuffer) + } + } else { + if !fromAdditionalOutput { + videoRecorder.appendSampleBuffer(processedSampleBuffer) + } + } + } else { + videoRecorder.appendSampleBuffer(sampleBuffer) + } + } else { + videoRecorder.appendSampleBuffer(sampleBuffer) + } + } + } + + private func processRoundVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer, mirror: Bool) -> CMSampleBuffer? { + guard let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) else { + return nil + } + let mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription) + let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any] + + var updatedExtensions = extensions + updatedExtensions["CVBytesPerRow"] = 400 * 4 + + var newFormatDescription: CMFormatDescription? + var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: 400, height: 400, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription) + guard status == noErr, let newFormatDescription else { + return nil + } + + let filter: CameraRoundVideoFilter + if let current = self.roundVideoFilter { + filter = current + } else { + filter = CameraRoundVideoFilter(ciContext: self.ciContext) + self.roundVideoFilter = filter + } + if !filter.isPrepared { + filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3) + } + guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, mirror: mirror) else { + return nil + } + + var sampleTimingInfo: CMSampleTimingInfo = .invalid + CMSampleBufferGetSampleTimingInfo(sampleBuffer, at: 0, timingInfoOut: &sampleTimingInfo) + + var newSampleBuffer: CMSampleBuffer? + status = CMSampleBufferCreateForImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: newPixelBuffer, + dataReady: true, + makeDataReadyCallback: nil, + refcon: nil, + formatDescription: newFormatDescription, + sampleTiming: &sampleTimingInfo, + sampleBufferOut: &newSampleBuffer + ) + + if status == noErr, let newSampleBuffer { + return newSampleBuffer + } + return nil + } + + private var currentPosition: Camera.Position = .front + private var lastSwitchTimestamp: Double = 0.0 + func markPositionChange(position: Camera.Position) { + self.currentPosition = position + if let videoRecorder = self.videoRecorder { videoRecorder.markPositionChange(position: position) } @@ -386,8 +502,10 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA // self.processAudioBuffer?(sampleBuffer) } - if let videoRecorder = self.videoRecorder, videoRecorder.isRecording { - videoRecorder.appendSampleBuffer(sampleBuffer) + if let masterOutput = self.masterOutput { + masterOutput.processVideoRecording(sampleBuffer, fromAdditionalOutput: true) + } else { + self.processVideoRecording(sampleBuffer, fromAdditionalOutput: false) } } diff --git a/submodules/Camera/Sources/CameraRoundVideoFilter.swift b/submodules/Camera/Sources/CameraRoundVideoFilter.swift new file mode 100644 index 0000000000..801f853291 --- /dev/null +++ b/submodules/Camera/Sources/CameraRoundVideoFilter.swift @@ -0,0 +1,188 @@ +import Foundation +import UIKit +import CoreImage +import CoreMedia +import CoreVideo +import Metal +import Display + +func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> ( + outputBufferPool: CVPixelBufferPool?, + outputColorSpace: CGColorSpace?, + outputFormatDescription: CMFormatDescription?) { + let inputMediaSubType = CMFormatDescriptionGetMediaSubType(inputFormatDescription) + if inputMediaSubType != kCVPixelFormatType_32BGRA { + return (nil, nil, nil) + } + + let inputDimensions = CMVideoFormatDescriptionGetDimensions(inputFormatDescription) + var pixelBufferAttributes: [String: Any] = [ + kCVPixelBufferPixelFormatTypeKey as String: UInt(inputMediaSubType), + kCVPixelBufferWidthKey as String: Int(inputDimensions.width), + kCVPixelBufferHeightKey as String: Int(inputDimensions.height), + kCVPixelBufferIOSurfacePropertiesKey as String: [:] as NSDictionary + ] + + var cgColorSpace = CGColorSpaceCreateDeviceRGB() + if let inputFormatDescriptionExtension = CMFormatDescriptionGetExtensions(inputFormatDescription) as Dictionary? { + let colorPrimaries = inputFormatDescriptionExtension[kCVImageBufferColorPrimariesKey] + + if let colorPrimaries = colorPrimaries { + var colorSpaceProperties: [String: AnyObject] = [kCVImageBufferColorPrimariesKey as String: colorPrimaries] + + if let yCbCrMatrix = inputFormatDescriptionExtension[kCVImageBufferYCbCrMatrixKey] { + colorSpaceProperties[kCVImageBufferYCbCrMatrixKey as String] = yCbCrMatrix + } + + if let transferFunction = inputFormatDescriptionExtension[kCVImageBufferTransferFunctionKey] { + colorSpaceProperties[kCVImageBufferTransferFunctionKey as String] = transferFunction + } + + pixelBufferAttributes[kCVBufferPropagatedAttachmentsKey as String] = colorSpaceProperties + } + + if let cvColorspace = inputFormatDescriptionExtension[kCVImageBufferCGColorSpaceKey] { + cgColorSpace = cvColorspace as! CGColorSpace + } else if (colorPrimaries as? String) == (kCVImageBufferColorPrimaries_P3_D65 as String) { + cgColorSpace = CGColorSpace(name: CGColorSpace.displayP3)! + } + } + + let poolAttributes = [kCVPixelBufferPoolMinimumBufferCountKey as String: outputRetainedBufferCountHint] + var cvPixelBufferPool: CVPixelBufferPool? + CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttributes as NSDictionary?, pixelBufferAttributes as NSDictionary?, &cvPixelBufferPool) + guard let pixelBufferPool = cvPixelBufferPool else { + return (nil, nil, nil) + } + + preallocateBuffers(pool: pixelBufferPool, allocationThreshold: outputRetainedBufferCountHint) + + var pixelBuffer: CVPixelBuffer? + var outputFormatDescription: CMFormatDescription? + let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: outputRetainedBufferCountHint] as NSDictionary + CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pixelBufferPool, auxAttributes, &pixelBuffer) + if let pixelBuffer = pixelBuffer { + CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault, + imageBuffer: pixelBuffer, + formatDescriptionOut: &outputFormatDescription) + } + pixelBuffer = nil + + return (pixelBufferPool, cgColorSpace, outputFormatDescription) +} + +private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: Int) { + var pixelBuffers = [CVPixelBuffer]() + var error: CVReturn = kCVReturnSuccess + let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: allocationThreshold] as NSDictionary + var pixelBuffer: CVPixelBuffer? + while error == kCVReturnSuccess { + error = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer) + if let pixelBuffer = pixelBuffer { + pixelBuffers.append(pixelBuffer) + } + pixelBuffer = nil + } + pixelBuffers.removeAll() +} + +class CameraRoundVideoFilter { + private let ciContext: CIContext + + private var resizeFilter: CIFilter? + private var compositeFilter: CIFilter? + + private var outputColorSpace: CGColorSpace? + private var outputPixelBufferPool: CVPixelBufferPool? + private(set) var outputFormatDescription: CMFormatDescription? + private(set) var inputFormatDescription: CMFormatDescription? + + private(set) var isPrepared = false + + let semaphore = DispatchSemaphore(value: 1) + + init(ciContext: CIContext) { + self.ciContext = ciContext + } + + func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) { + self.reset() + + (self.outputPixelBufferPool, self.outputColorSpace, self.outputFormatDescription) = allocateOutputBufferPool(with: formatDescription, outputRetainedBufferCountHint: outputRetainedBufferCountHint) + if self.outputPixelBufferPool == nil { + return + } + self.inputFormatDescription = formatDescription + + let diameter: CGFloat = 400.0 + let circleImage = generateImage(CGSize(width: diameter, height: diameter), opaque: false, scale: 1.0, rotatedContext: { size, context in + let bounds = CGRect(origin: .zero, size: size) + context.clear(bounds) + context.setFillColor(UIColor.white.cgColor) + context.fill(bounds) + context.setBlendMode(.clear) + context.fillEllipse(in: bounds) + })! + + self.resizeFilter = CIFilter(name: "CILanczosScaleTransform") + + self.compositeFilter = CIFilter(name: "CISourceOverCompositing") + self.compositeFilter?.setValue(CIImage(image: circleImage), forKey: kCIInputImageKey) + + self.isPrepared = true + } + + func reset() { + self.resizeFilter = nil + self.compositeFilter = nil + self.outputColorSpace = nil + self.outputPixelBufferPool = nil + self.outputFormatDescription = nil + self.inputFormatDescription = nil + self.isPrepared = false + } + + func render(pixelBuffer: CVPixelBuffer, mirror: Bool) -> CVPixelBuffer? { + self.semaphore.wait() + + guard let resizeFilter = self.resizeFilter, let compositeFilter = self.compositeFilter, self.isPrepared else { + return nil + } + + var sourceImage = CIImage(cvImageBuffer: pixelBuffer) + sourceImage = sourceImage.oriented(mirror ? .leftMirrored : .right) + let scale = 400.0 / min(sourceImage.extent.width, sourceImage.extent.height) + + resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey) + resizeFilter.setValue(scale, forKey: kCIInputScaleKey) + + if let resizedImage = resizeFilter.outputImage { + sourceImage = resizedImage + } else { + sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true) + } + + sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0)) + + sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width)) + + compositeFilter.setValue(sourceImage, forKey: kCIInputBackgroundImageKey) + + let finalImage = compositeFilter.outputImage + guard let finalImage else { + return nil + } + + var pbuf: CVPixelBuffer? + CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf) + guard let outputPixelBuffer = pbuf else { + return nil + } + + self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: CGSize(width: 400, height: 400)), colorSpace: outputColorSpace) + + self.semaphore.signal() + + return outputPixelBuffer + } +} diff --git a/submodules/Camera/Sources/VideoRecorder.swift b/submodules/Camera/Sources/VideoRecorder.swift index 78dcff37a8..7f9bcb05f3 100644 --- a/submodules/Camera/Sources/VideoRecorder.swift +++ b/submodules/Camera/Sources/VideoRecorder.swift @@ -35,7 +35,7 @@ private final class VideoRecorderImpl { private var audioInput: AVAssetWriterInput? private let ciContext: CIContext - private var transitionImage: UIImage? + fileprivate var transitionImage: UIImage? private var savedTransitionImage = false private var pendingAudioSampleBuffers: [CMSampleBuffer] = [] @@ -519,7 +519,7 @@ public final class VideoRecorder { func markPositionChange(position: Camera.Position, time: CMTime? = nil) { self.impl.markPositionChange(position: position, time: time) } - + func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { guard let formatDescriptor = CMSampleBufferGetFormatDescription(sampleBuffer) else { return @@ -533,4 +533,8 @@ public final class VideoRecorder { } } } + + var transitionImage: UIImage? { + return self.impl.transitionImage + } } diff --git a/submodules/ChatPresentationInterfaceState/Sources/ChatPresentationInterfaceState.swift b/submodules/ChatPresentationInterfaceState/Sources/ChatPresentationInterfaceState.swift index 2ad9cc52e2..aac6940a51 100644 --- a/submodules/ChatPresentationInterfaceState/Sources/ChatPresentationInterfaceState.swift +++ b/submodules/ChatPresentationInterfaceState/Sources/ChatPresentationInterfaceState.swift @@ -1,5 +1,6 @@ import Foundation import UIKit +import SwiftSignalKit import Postbox import TelegramCore import TelegramPresentationData @@ -146,34 +147,87 @@ public struct ChatSearchData: Equatable { } } -public final class ChatRecordedMediaPreview: Equatable { - public let resource: TelegramMediaResource - public let fileSize: Int32 - public let duration: Int32 - public let waveform: AudioWaveform - - public init(resource: TelegramMediaResource, duration: Int32, fileSize: Int32, waveform: AudioWaveform) { - self.resource = resource - self.duration = duration - self.fileSize = fileSize - self.waveform = waveform +public enum ChatRecordedMediaPreview: Equatable { + public class Audio: Equatable { + public let resource: TelegramMediaResource + public let fileSize: Int32 + public let duration: Int32 + public let waveform: AudioWaveform + + public init( + resource: TelegramMediaResource, + fileSize: Int32, + duration: Int32, + waveform: AudioWaveform + ) { + self.resource = resource + self.fileSize = fileSize + self.duration = duration + self.waveform = waveform + } + + public static func ==(lhs: Audio, rhs: Audio) -> Bool { + if !lhs.resource.isEqual(to: rhs.resource) { + return false + } + if lhs.duration != rhs.duration { + return false + } + if lhs.fileSize != rhs.fileSize { + return false + } + if lhs.waveform != rhs.waveform { + return false + } + return true + } } - public static func ==(lhs: ChatRecordedMediaPreview, rhs: ChatRecordedMediaPreview) -> Bool { - if !lhs.resource.isEqual(to: rhs.resource) { - return false + public class Video: Equatable { + public class Control { + public let updateTrimRange: (Double, Double, Bool, Bool) -> Void + + public init(updateTrimRange: @escaping (Double, Double, Bool, Bool) -> Void) { + self.updateTrimRange = updateTrimRange + } } - if lhs.duration != rhs.duration { - return false + + public let duration: Int32 + public let frames: [UIImage] + public let framesUpdateTimestamp: Double + public let trimRange: Range? + public let control: Control + + public init( + duration: Int32, + frames: [UIImage], + framesUpdateTimestamp: Double, + trimRange: Range?, + control: Control + ) { + self.duration = duration + self.frames = frames + self.framesUpdateTimestamp = framesUpdateTimestamp + self.trimRange = trimRange + self.control = control } - if lhs.fileSize != rhs.fileSize { - return false + + public static func ==(lhs: Video, rhs: Video) -> Bool { + if lhs.duration != rhs.duration { + return false + } + if lhs.framesUpdateTimestamp != rhs.framesUpdateTimestamp { + return false + } + if lhs.trimRange != rhs.trimRange { + return false + } + return true } - if lhs.waveform != rhs.waveform { - return false - } - return true } + + case audio(Audio) + case video(Video) } public struct ChatContactStatus: Equatable { diff --git a/submodules/ContextUI/Sources/ContextController.swift b/submodules/ContextUI/Sources/ContextController.swift index c922edd917..91a047afe8 100644 --- a/submodules/ContextUI/Sources/ContextController.swift +++ b/submodules/ContextUI/Sources/ContextController.swift @@ -2142,6 +2142,7 @@ public enum ContextActionsHorizontalAlignment { } public protocol ContextExtractedContentSource: AnyObject { + var initialAppearanceOffset: CGPoint { get } var centerVertically: Bool { get } var keepInPlace: Bool { get } var ignoreContentTouches: Bool { get } @@ -2155,6 +2156,10 @@ public protocol ContextExtractedContentSource: AnyObject { } public extension ContextExtractedContentSource { + var initialAppearanceOffset: CGPoint { + return .zero + } + var centerVertically: Bool { return false } diff --git a/submodules/ContextUI/Sources/ContextControllerExtractedPresentationNode.swift b/submodules/ContextUI/Sources/ContextControllerExtractedPresentationNode.swift index 12f32c991f..75d68e4d2b 100644 --- a/submodules/ContextUI/Sources/ContextControllerExtractedPresentationNode.swift +++ b/submodules/ContextUI/Sources/ContextControllerExtractedPresentationNode.swift @@ -1136,8 +1136,11 @@ final class ContextControllerExtractedPresentationNode: ASDisplayNode, ContextCo let contentHeight = contentNode.containingItem.view.bounds.size.height if case let .extracted(extracted) = self.source, extracted.centerVertically { if actionsSize.height.isZero { + var initialContentRect = contentRect + initialContentRect.origin.y += extracted.initialAppearanceOffset.y + let fixedContentY = floorToScreenPixels((layout.size.height - contentHeight) / 2.0) - animationInContentYDistance = fixedContentY - contentRect.minY + animationInContentYDistance = fixedContentY - initialContentRect.minY } else if contentX + contentWidth > layout.size.width / 2.0, actionsSize.height > 0.0 { let fixedContentX = layout.size.width - (contentX + contentWidth) animationInContentXDistance = fixedContentX - contentX diff --git a/submodules/Display/Source/GenerateImage.swift b/submodules/Display/Source/GenerateImage.swift index 610628a96e..5618a7ea7d 100644 --- a/submodules/Display/Source/GenerateImage.swift +++ b/submodules/Display/Source/GenerateImage.swift @@ -398,18 +398,22 @@ public func generateGradientImage(size: CGSize, scale: CGFloat = 0.0, colors: [U return image } -public func generateGradientFilledCircleImage(diameter: CGFloat, colors: NSArray) -> UIImage? { +public func generateGradientFilledCircleImage(diameter: CGFloat, colors: NSArray, direction: GradientImageDirection = .vertical) -> UIImage? { return generateImage(CGSize(width: diameter, height: diameter), contextGenerator: { size, context in let bounds = CGRect(origin: CGPoint(), size: size) context.clear(bounds) context.addEllipse(in: bounds) context.clip() - var locations: [CGFloat] = [0.0, 1.0] + var locations: [CGFloat] = [] + for i in 0 ..< colors.count { + let t = CGFloat(i) / CGFloat(colors.count - 1) + locations.append(t) + } let colorSpace = DeviceGraphicsContextSettings.shared.colorSpace let gradient = CGGradient(colorsSpace: colorSpace, colors: colors, locations: &locations)! - context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: bounds.size.height), options: CGGradientDrawingOptions()) + context.drawLinearGradient(gradient, start: CGPoint(), end: direction == .horizontal ? CGPoint(x: size.width, y: 0.0) : CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) }) } diff --git a/submodules/DrawingUI/Sources/DrawingVideoRecorder.swift b/submodules/DrawingUI/Sources/DrawingVideoRecorder.swift index cc117612a5..5bd68600bd 100644 --- a/submodules/DrawingUI/Sources/DrawingVideoRecorder.swift +++ b/submodules/DrawingUI/Sources/DrawingVideoRecorder.swift @@ -147,12 +147,12 @@ public final class EntityVideoRecorder { self.start = CACurrentMediaTime() self.recordingDisposable.set((self.camera.startRecording() - |> deliverOnMainQueue).startStrict(next: { [weak self] duration in + |> deliverOnMainQueue).startStrict(next: { [weak self] recordingData in guard let self else { return } - self.durationPromise.set(duration) - if duration >= self.maxDuration { + self.durationPromise.set(recordingData.duration) + if recordingData.duration >= self.maxDuration { let onAutomaticStop = self.onAutomaticStop self.stopRecording(save: true, completion: { onAutomaticStop() diff --git a/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGModernConversationInputMicButton.h b/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGModernConversationInputMicButton.h index 94e5eff7d9..ed78e3cbef 100644 --- a/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGModernConversationInputMicButton.h +++ b/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGModernConversationInputMicButton.h @@ -87,8 +87,6 @@ - (void)_commitLocked; -- (void)lockImmediately; - - (void)setHidesPanelOnLock; - (UIView *)createLockPanelView; diff --git a/submodules/LegacyComponents/Sources/TGModernConversationInputMicButton.m b/submodules/LegacyComponents/Sources/TGModernConversationInputMicButton.m index 9e570cdd40..8f6ea3a6d1 100644 --- a/submodules/LegacyComponents/Sources/TGModernConversationInputMicButton.m +++ b/submodules/LegacyComponents/Sources/TGModernConversationInputMicButton.m @@ -146,6 +146,8 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius BOOL _xFeedbackOccured; BOOL _yFeedbackOccured; + + bool _skipCancelUpdate; } @end @@ -507,7 +509,9 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius [self displayLink].paused = false; if (_locked) { + _skipCancelUpdate = true; [self animateLock]; + _skipCancelUpdate = false; } } @@ -598,33 +602,6 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius return iconImage; } -- (void)lockImmediately { - _lockView.lockness = 1.0; - [_lock updateLockness:1.0]; - - UIImage *icon = TGComponentsImageNamed(@"RecordSendIcon"); - [self setIcon:TGTintedImage(icon, _pallete != nil && !_hidesPanelOnLock ? _pallete.iconColor : [UIColor whiteColor])]; - - _currentScale = 1; - _cancelTargetTranslation = 0; - - id delegate = _delegate; - if ([delegate respondsToSelector:@selector(micButtonInteractionUpdateCancelTranslation:)]) - [delegate micButtonInteractionUpdateCancelTranslation:-_cancelTargetTranslation]; - - _lockPanelView.frame = CGRectMake(_lockPanelView.frame.origin.x, 40.0f, _lockPanelView.frame.size.width, 72.0f - 32.0f); - _lockView.transform = CGAffineTransformMakeTranslation(0.0f, -11.0f); - _lock.transform = CGAffineTransformMakeTranslation(0.0f, -16.0f); - _lockArrowView.transform = CGAffineTransformMakeTranslation(0.0f, -39.0f); - _lockArrowView.alpha = 0.0f; - - _stopButton.userInteractionEnabled = true; - [UIView animateWithDuration:0.25 delay:0.56 options:kNilOptions animations:^ - { - _stopButton.alpha = 1.0f; - } completion:nil]; -} - - (void)animateLock { if (!_animatedIn) { return; @@ -644,7 +621,7 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius _currentScale = 1; _cancelTargetTranslation = 0; id delegate = _delegate; - if ([delegate respondsToSelector:@selector(micButtonInteractionUpdateCancelTranslation:)]) + if ([delegate respondsToSelector:@selector(micButtonInteractionUpdateCancelTranslation:)] && !_skipCancelUpdate) [delegate micButtonInteractionUpdateCancelTranslation:-_cancelTargetTranslation]; _innerIconView.transform = CGAffineTransformMakeScale(0.3f, 0.3f); diff --git a/submodules/LocalMediaResources/Sources/MediaResources.swift b/submodules/LocalMediaResources/Sources/MediaResources.swift index a6f87f3f19..779fbdc2c1 100644 --- a/submodules/LocalMediaResources/Sources/MediaResources.swift +++ b/submodules/LocalMediaResources/Sources/MediaResources.swift @@ -9,7 +9,7 @@ public final class VideoMediaResourceAdjustments: PostboxCoding, Equatable { public let digest: MemoryBuffer public let isStory: Bool - public init(data: MemoryBuffer, digest: MemoryBuffer, isStory: Bool) { + public init(data: MemoryBuffer, digest: MemoryBuffer, isStory: Bool = false) { self.data = data self.digest = digest self.isStory = isStory @@ -161,7 +161,7 @@ public final class LocalFileVideoMediaResource: TelegramMediaResource { } public let randomId: Int64 - public let path: String + public let paths: [String] public let adjustments: VideoMediaResourceAdjustments? public var headerSize: Int32 { @@ -170,19 +170,30 @@ public final class LocalFileVideoMediaResource: TelegramMediaResource { public init(randomId: Int64, path: String, adjustments: VideoMediaResourceAdjustments?) { self.randomId = randomId - self.path = path + self.paths = [path] + self.adjustments = adjustments + } + + public init(randomId: Int64, paths: [String], adjustments: VideoMediaResourceAdjustments?) { + self.randomId = randomId + self.paths = paths self.adjustments = adjustments } public required init(decoder: PostboxDecoder) { self.randomId = decoder.decodeInt64ForKey("i", orElse: 0) - self.path = decoder.decodeStringForKey("p", orElse: "") + let paths = decoder.decodeStringArrayForKey("ps") + if !paths.isEmpty { + self.paths = paths + } else { + self.paths = [decoder.decodeStringForKey("p", orElse: "")] + } self.adjustments = decoder.decodeObjectForKey("a", decoder: { VideoMediaResourceAdjustments(decoder: $0) }) as? VideoMediaResourceAdjustments } public func encode(_ encoder: PostboxEncoder) { encoder.encodeInt64(self.randomId, forKey: "i") - encoder.encodeString(self.path, forKey: "p") + encoder.encodeStringArray(self.paths, forKey: "ps") if let adjustments = self.adjustments { encoder.encodeObject(adjustments, forKey: "a") } else { @@ -196,7 +207,7 @@ public final class LocalFileVideoMediaResource: TelegramMediaResource { public func isEqual(to: MediaResource) -> Bool { if let to = to as? LocalFileVideoMediaResource { - return self.randomId == to.randomId && self.path == to.path && self.adjustments == to.adjustments + return self.randomId == to.randomId && self.paths == to.paths && self.adjustments == to.adjustments } else { return false } diff --git a/submodules/MediaPickerUI/Sources/MediaPickerSelectedListNode.swift b/submodules/MediaPickerUI/Sources/MediaPickerSelectedListNode.swift index 33cc6d4df6..9d05f7ea39 100644 --- a/submodules/MediaPickerUI/Sources/MediaPickerSelectedListNode.swift +++ b/submodules/MediaPickerUI/Sources/MediaPickerSelectedListNode.swift @@ -859,10 +859,10 @@ final class MediaPickerSelectedListNode: ASDisplayNode, UIScrollViewDelegate, UI let previewText = groupLayouts.count > 1 ? presentationData.strings.Attachment_MessagesPreview : presentationData.strings.Attachment_MessagePreview let previewMessage = Message(stableId: 0, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 0), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 0, flags: [], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [TelegramMediaAction(action: .customText(text: previewText, entities: [], additionalAttributes: nil))], peers: peers, associatedMessages: SimpleDictionary(), associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - let previewItem = self.context.sharedContext.makeChatMessagePreviewItem(context: context, messages: [previewMessage], theme: theme, strings: presentationData.strings, wallpaper: wallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: bubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: true, isPreview: true) + let previewItem = self.context.sharedContext.makeChatMessagePreviewItem(context: context, messages: [previewMessage], theme: theme, strings: presentationData.strings, wallpaper: wallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: bubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: true, isPreview: true, isStandalone: false) let dragMessage = Message(stableId: 0, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 0), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 0, flags: [], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [TelegramMediaAction(action: .customText(text: presentationData.strings.Attachment_DragToReorder, entities: [], additionalAttributes: nil))], peers: peers, associatedMessages: SimpleDictionary(), associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - let dragItem = self.context.sharedContext.makeChatMessagePreviewItem(context: context, messages: [dragMessage], theme: theme, strings: presentationData.strings, wallpaper: wallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: bubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: true, isPreview: true) + let dragItem = self.context.sharedContext.makeChatMessagePreviewItem(context: context, messages: [dragMessage], theme: theme, strings: presentationData.strings, wallpaper: wallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: bubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: true, isPreview: true, isStandalone: false) let headerItems: [ListViewItem] = [previewItem, dragItem] diff --git a/submodules/PhotoResources/Sources/PhotoResources.swift b/submodules/PhotoResources/Sources/PhotoResources.swift index 450d9bb9ad..38378ab4f5 100644 --- a/submodules/PhotoResources/Sources/PhotoResources.swift +++ b/submodules/PhotoResources/Sources/PhotoResources.swift @@ -2154,7 +2154,7 @@ public func chatSecretMessageVideo(account: Account, userLocation: MediaResource thumbnailContext2.withFlippedContext { c in c.interpolationQuality = .none if let image = thumbnailContext.generateImage()?.cgImage { - c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContext2Size)) + c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContext2Size).insetBy(dx: -4.0, dy: -4.0)) } } imageFastBlur(Int32(thumbnailContext2Size.width), Int32(thumbnailContext2Size.height), Int32(thumbnailContext2.bytesPerRow), thumbnailContext2.bytes) @@ -2185,7 +2185,7 @@ public func chatSecretMessageVideo(account: Account, userLocation: MediaResource } } - addCorners(context, arguments: arguments) +// addCorners(context, arguments: arguments) return context } diff --git a/submodules/PremiumUI/BUILD b/submodules/PremiumUI/BUILD index 83a13b2ba8..87af158f19 100644 --- a/submodules/PremiumUI/BUILD +++ b/submodules/PremiumUI/BUILD @@ -111,6 +111,7 @@ swift_library( "//submodules/InvisibleInkDustNode", "//submodules/AlertUI", "//submodules/TelegramUI/Components/Chat/MergedAvatarsNode", + "//submodules/TelegramUI/Components/LottieComponent", ], visibility = [ "//visibility:public", diff --git a/submodules/PremiumUI/Sources/PremiumBoostLevelsScreen.swift b/submodules/PremiumUI/Sources/PremiumBoostLevelsScreen.swift index 8425a7586a..ef521840b1 100644 --- a/submodules/PremiumUI/Sources/PremiumBoostLevelsScreen.swift +++ b/submodules/PremiumUI/Sources/PremiumBoostLevelsScreen.swift @@ -361,8 +361,6 @@ private final class LevelSectionComponent: CombinedComponent { } } - - private final class LimitSheetContent: CombinedComponent { typealias EnvironmentType = (Empty, ScrollChildEnvironment) @@ -821,8 +819,6 @@ private final class LimitSheetContent: CombinedComponent { } } - - private final class BoostLevelsContainerComponent: CombinedComponent { let context: AccountContext let theme: PresentationTheme diff --git a/submodules/PremiumUI/Sources/PremiumIntroScreen.swift b/submodules/PremiumUI/Sources/PremiumIntroScreen.swift index f384ab27f1..28dec4dc42 100644 --- a/submodules/PremiumUI/Sources/PremiumIntroScreen.swift +++ b/submodules/PremiumUI/Sources/PremiumIntroScreen.swift @@ -259,6 +259,12 @@ public enum PremiumSource: Equatable { } else { return false } + case .readTime: + if case .readTime = rhs { + return true + } else { + return false + } } } @@ -300,6 +306,7 @@ public enum PremiumSource: Equatable { case similarChannels case wallpapers case presence + case readTime var identifier: String? { switch self { @@ -381,6 +388,8 @@ public enum PremiumSource: Equatable { return "wallpapers" case .presence: return "presence" + case .readTime: + return "read_time" } } } diff --git a/submodules/PremiumUI/Sources/PremiumPrivacyScreen.swift b/submodules/PremiumUI/Sources/PremiumPrivacyScreen.swift new file mode 100644 index 0000000000..f8afc9b5b3 --- /dev/null +++ b/submodules/PremiumUI/Sources/PremiumPrivacyScreen.swift @@ -0,0 +1,517 @@ +import Foundation +import UIKit +import Display +import ComponentFlow +import Markdown +import TextFormat +import TelegramPresentationData +import ViewControllerComponent +import SheetComponent +import BundleIconComponent +import BalancedTextComponent +import MultilineTextComponent +import SolidRoundedButtonComponent +import LottieComponent +import AccountContext + +private final class SheetContent: CombinedComponent { + typealias EnvironmentType = ViewControllerComponentContainer.Environment + + let context: AccountContext + let subject: PremiumPrivacyScreen.Subject + + let action: () -> Void + let openPremiumIntro: () -> Void + let dismiss: () -> Void + + init(context: AccountContext, + subject: PremiumPrivacyScreen.Subject, + action: @escaping () -> Void, + openPremiumIntro: @escaping () -> Void, + dismiss: @escaping () -> Void + ) { + self.context = context + self.subject = subject + self.action = action + self.openPremiumIntro = openPremiumIntro + self.dismiss = dismiss + } + + static func ==(lhs: SheetContent, rhs: SheetContent) -> Bool { + if lhs.context !== rhs.context { + return false + } + if lhs.subject != rhs.subject { + return false + } + return true + } + + final class State: ComponentState { + var cachedCloseImage: (UIImage, PresentationTheme)? + var cachedIconImage: UIImage? + } + + func makeState() -> State { + return State() + } + + static var body: Body { + let closeButton = Child(Button.self) + + let iconBackground = Child(Image.self) + let icon = Child(LottieComponent.self) + + let title = Child(BalancedTextComponent.self) + let text = Child(BalancedTextComponent.self) + let actionButton = Child(SolidRoundedButtonComponent.self) + + let orLeftLine = Child(Rectangle.self) + let orRightLine = Child(Rectangle.self) + let orText = Child(MultilineTextComponent.self) + + let premiumTitle = Child(BalancedTextComponent.self) + let premiumText = Child(BalancedTextComponent.self) + let premiumButton = Child(SolidRoundedButtonComponent.self) + + return { context in + let environment = context.environment[EnvironmentType.self] + let component = context.component + let state = context.state + + let theme = environment.theme + let strings = environment.strings + + let sideInset: CGFloat = 16.0 + environment.safeInsets.left + let textSideInset: CGFloat = 32.0 + environment.safeInsets.left + + let titleFont = Font.semibold(20.0) + let textFont = Font.regular(15.0) + let boldTextFont = Font.semibold(15.0) + let textColor = theme.actionSheet.primaryTextColor + let secondaryTextColor = theme.actionSheet.secondaryTextColor + let linkColor = theme.actionSheet.controlAccentColor + let markdownAttributes = MarkdownAttributes(body: MarkdownAttributeSet(font: textFont, textColor: textColor), bold: MarkdownAttributeSet(font: boldTextFont, textColor: textColor), link: MarkdownAttributeSet(font: textFont, textColor: linkColor), linkAttribute: { contents in + return (TelegramTextAttributes.URL, contents) + }) + + let iconName: String + let titleString: String + let textString: String + let buttonTitle: String + let premiumString: String + + let premiumTitleString = "Upgrade to Premium" + let premiumButtonTitle = "Subscribe to Telegram Premium" + + let peerName = "Name" + switch component.subject { + case .presence: + iconName = "PremiumPrivacyPresence" + titleString = "Show Your Last Seen" + textString = "To see **\(peerName)'s** Last Seen time, either start showing your own Last Seen Time..." + buttonTitle = "Show My Last Seen to Everyone" + premiumString = "Subscription will let you see **\(peerName)'s** Last Seen status without showing yours." + case .readTime: + iconName = "PremiumPrivacyRead" + titleString = "Show Your Read Date" + textString = "To see when **\(peerName)** read the message, either start showing your own read time:" + buttonTitle = "Show My Read Time" + premiumString = "Subscription will let you see **\(peerName)'s** read time without showing yours." + } + + let spacing: CGFloat = 8.0 + var contentSize = CGSize(width: context.availableSize.width, height: 32.0) + + let closeImage: UIImage + if let (image, theme) = state.cachedCloseImage, theme === environment.theme { + closeImage = image + } else { + closeImage = generateCloseButtonImage(backgroundColor: UIColor(rgb: 0x808084, alpha: 0.1), foregroundColor: theme.actionSheet.inputClearButtonColor)! + state.cachedCloseImage = (closeImage, theme) + } + + let closeButton = closeButton.update( + component: Button( + content: AnyComponent(Image(image: closeImage)), + action: { [weak component] in + component?.dismiss() + } + ), + availableSize: CGSize(width: 30.0, height: 30.0), + transition: .immediate + ) + context.add(closeButton + .position(CGPoint(x: context.availableSize.width - environment.safeInsets.left - closeButton.size.width, y: 28.0)) + ) + + let iconSize = CGSize(width: 90.0, height: 90.0) + let gradientImage: UIImage + if let current = state.cachedIconImage { + gradientImage = current + } else { + gradientImage = generateFilledCircleImage(diameter: iconSize.width, color: theme.actionSheet.controlAccentColor)! + context.state.cachedIconImage = gradientImage + } + + let iconBackground = iconBackground.update( + component: Image(image: gradientImage), + availableSize: iconSize, + transition: .immediate + ) + context.add(iconBackground + .position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + iconBackground.size.height / 2.0)) + ) + +// let icon = icon.update( +// component: BundleIconComponent(name: iconName, tintColor: .white), +// availableSize: CGSize(width: 70.0, height: 70.0), +// transition: .immediate +// ) + + let icon = icon.update( + component: LottieComponent( + content: LottieComponent.AppBundleContent(name: iconName) + ), + availableSize: CGSize(width: 70, height: 70), + transition: .immediate + ) + + context.add(icon + .position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + iconBackground.size.height / 2.0)) + ) + contentSize.height += iconSize.height + contentSize.height += spacing + 5.0 + + let title = title.update( + component: BalancedTextComponent( + text: .plain(NSAttributedString(string: titleString, font: titleFont, textColor: textColor)), + horizontalAlignment: .center, + maximumNumberOfLines: 0, + lineSpacing: 0.1 + ), + availableSize: CGSize(width: context.availableSize.width - textSideInset * 2.0, height: context.availableSize.height), + transition: .immediate + ) + context.add(title + .position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + title.size.height / 2.0)) + ) + contentSize.height += title.size.height + contentSize.height += spacing + + let text = text.update( + component: BalancedTextComponent( + text: .markdown(text: textString, attributes: markdownAttributes), + horizontalAlignment: .center, + maximumNumberOfLines: 0, + lineSpacing: 0.2 + ), + availableSize: CGSize(width: context.availableSize.width - textSideInset * 2.0, height: context.availableSize.height), + transition: .immediate + ) + context.add(text + .position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + text.size.height / 2.0)) + ) + contentSize.height += text.size.height + contentSize.height += spacing + 5.0 + + let actionButton = actionButton.update( + component: SolidRoundedButtonComponent( + title: buttonTitle, + theme: SolidRoundedButtonComponent.Theme( + backgroundColor: theme.list.itemCheckColors.fillColor, + backgroundColors: [], + foregroundColor: theme.list.itemCheckColors.foregroundColor + ), + font: .bold, + fontSize: 17.0, + height: 50.0, + cornerRadius: 10.0, + gloss: false, + iconName: nil, + animationName: nil, + iconPosition: .left, + action: { + component.action() + component.dismiss() + } + ), + availableSize: CGSize(width: context.availableSize.width - sideInset * 2.0, height: 50.0), + transition: context.transition + ) + context.add(actionButton + .position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + actionButton.size.height / 2.0)) + ) + contentSize.height += actionButton.size.height + contentSize.height += 22.0 + + let orText = orText.update( + component: MultilineTextComponent(text: .plain(NSAttributedString(string: strings.ChannelBoost_Or, font: Font.regular(15.0), textColor: secondaryTextColor, paragraphAlignment: .center))), + availableSize: CGSize(width: context.availableSize.width - sideInset * 2.0, height: context.availableSize.height), + transition: .immediate + ) + context.add(orText + .position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + orText.size.height / 2.0)) + ) + + let orLeftLine = orLeftLine.update( + component: Rectangle(color: theme.list.itemBlocksSeparatorColor.withAlphaComponent(0.3)), + availableSize: CGSize(width: 90.0, height: 1.0 - UIScreenPixel), + transition: .immediate + ) + context.add(orLeftLine + .position(CGPoint(x: context.availableSize.width / 2.0 - orText.size.width / 2.0 - 11.0 - 45.0, y: contentSize.height + orText.size.height / 2.0)) + ) + + let orRightLine = orRightLine.update( + component: Rectangle(color: theme.list.itemBlocksSeparatorColor.withAlphaComponent(0.3)), + availableSize: CGSize(width: 90.0, height: 1.0 - UIScreenPixel), + transition: .immediate + ) + context.add(orRightLine + .position(CGPoint(x: context.availableSize.width / 2.0 + orText.size.width / 2.0 + 11.0 + 45.0, y: contentSize.height + orText.size.height / 2.0)) + ) + contentSize.height += orText.size.height + contentSize.height += 18.0 + + let premiumTitle = premiumTitle.update( + component: BalancedTextComponent( + text: .plain(NSAttributedString(string: premiumTitleString, font: titleFont, textColor: textColor)), + horizontalAlignment: .center, + maximumNumberOfLines: 0, + lineSpacing: 0.1 + ), + availableSize: CGSize(width: context.availableSize.width - textSideInset * 2.0, height: context.availableSize.height), + transition: .immediate + ) + context.add(premiumTitle + .position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + premiumTitle.size.height / 2.0)) + ) + contentSize.height += premiumTitle.size.height + contentSize.height += spacing + + let premiumText = premiumText.update( + component: BalancedTextComponent( + text: .markdown(text: premiumString, attributes: markdownAttributes), + horizontalAlignment: .center, + maximumNumberOfLines: 0, + lineSpacing: 0.2 + ), + availableSize: CGSize(width: context.availableSize.width - textSideInset * 2.0, height: context.availableSize.height), + transition: .immediate + ) + context.add(premiumText + .position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + premiumText.size.height / 2.0)) + ) + contentSize.height += premiumText.size.height + contentSize.height += spacing + 5.0 + + let premiumButton = premiumButton.update( + component: SolidRoundedButtonComponent( + title: premiumButtonTitle, + theme: SolidRoundedButtonComponent.Theme( + backgroundColor: .black, + backgroundColors: [ + UIColor(rgb: 0x0077ff), + UIColor(rgb: 0x6b93ff), + UIColor(rgb: 0x8878ff), + UIColor(rgb: 0xe46ace) + ], + foregroundColor: .white + ), + font: .bold, + fontSize: 17.0, + height: 50.0, + cornerRadius: 10.0, + gloss: false, + iconName: nil, + animationName: nil, + iconPosition: .left, + action: { + component.openPremiumIntro() + component.dismiss() + } + ), + availableSize: CGSize(width: context.availableSize.width - sideInset * 2.0, height: 50.0), + transition: context.transition + ) + context.add(premiumButton + .position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + premiumButton.size.height / 2.0)) + ) + contentSize.height += premiumButton.size.height + contentSize.height += 14.0 + + contentSize.height += environment.safeInsets.bottom + + return contentSize + } + } +} + +private final class SheetContainerComponent: CombinedComponent { + typealias EnvironmentType = ViewControllerComponentContainer.Environment + + let context: AccountContext + let subject: PremiumPrivacyScreen.Subject + let action: () -> Void + let openPremiumIntro: () -> Void + + init( + context: AccountContext, + subject: PremiumPrivacyScreen.Subject, + action: @escaping () -> Void, + openPremiumIntro: @escaping () -> Void + ) { + self.context = context + self.subject = subject + self.action = action + self.openPremiumIntro = openPremiumIntro + } + + static func ==(lhs: SheetContainerComponent, rhs: SheetContainerComponent) -> Bool { + if lhs.context !== rhs.context { + return false + } + if lhs.subject != rhs.subject { + return false + } + return true + } + + static var body: Body { + let sheet = Child(SheetComponent.self) + let animateOut = StoredActionSlot(Action.self) + + let sheetExternalState = SheetComponent.ExternalState() + + return { context in + let environment = context.environment[EnvironmentType.self] + + let controller = environment.controller + + let sheet = sheet.update( + component: SheetComponent( + content: AnyComponent(SheetContent( + context: context.component.context, + subject: context.component.subject, + action: context.component.action, + openPremiumIntro: context.component.openPremiumIntro, + dismiss: { + animateOut.invoke(Action { _ in + if let controller = controller() { + controller.dismiss(completion: nil) + } + }) + } + )), + backgroundColor: .color(environment.theme.actionSheet.opaqueItemBackgroundColor), + followContentSizeChanges: true, + externalState: sheetExternalState, + animateOut: animateOut + ), + environment: { + environment + SheetComponentEnvironment( + isDisplaying: environment.value.isVisible, + isCentered: environment.metrics.widthClass == .regular, + hasInputHeight: !environment.inputHeight.isZero, + regularMetricsSize: CGSize(width: 430.0, height: 900.0), + dismiss: { animated in + if animated { + animateOut.invoke(Action { _ in + if let controller = controller() { + controller.dismiss(completion: nil) + } + }) + } else { + if let controller = controller() { + controller.dismiss(completion: nil) + } + } + } + ) + }, + availableSize: context.availableSize, + transition: context.transition + ) + + context.add(sheet + .position(CGPoint(x: context.availableSize.width / 2.0, y: context.availableSize.height / 2.0)) + ) + + if let controller = controller(), !controller.automaticallyControlPresentationContextLayout { + let layout = ContainerViewLayout( + size: context.availableSize, + metrics: environment.metrics, + deviceMetrics: environment.deviceMetrics, + intrinsicInsets: UIEdgeInsets(top: 0.0, left: 0.0, bottom: max(environment.safeInsets.bottom, sheetExternalState.contentHeight), right: 0.0), + safeInsets: UIEdgeInsets(top: 0.0, left: environment.safeInsets.left, bottom: 0.0, right: environment.safeInsets.right), + additionalInsets: .zero, + statusBarHeight: environment.statusBarHeight, + inputHeight: nil, + inputHeightIsInteractivellyChanging: false, + inVoiceOver: false + ) + controller.presentationContext.containerLayoutUpdated(layout, transition: context.transition.containedViewLayoutTransition) + } + + return context.availableSize + } + } +} + + +public class PremiumPrivacyScreen: ViewControllerComponentContainer { + public enum Subject: Equatable { + case presence + case readTime + } + + private let context: AccountContext + private let subject: PremiumPrivacyScreen.Subject + private var action: (() -> Void)? + private var openPremiumIntro: (() -> Void)? + + public init( + context: AccountContext, + subject: PremiumPrivacyScreen.Subject, + action: @escaping () -> Void, + openPremiumIntro: @escaping () -> Void + ) { + self.context = context + self.subject = subject + self.action = action + self.openPremiumIntro = openPremiumIntro + + super.init( + context: context, + component: SheetContainerComponent( + context: context, + subject: subject, + action: action, + openPremiumIntro: openPremiumIntro + ), + navigationBarAppearance: .none, + statusBarStyle: .ignore, + theme: .default + ) + + self.navigationPresentation = .flatModal + } + + required public init(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + public override func viewDidLoad() { + super.viewDidLoad() + + self.view.disablesInteractiveModalDismiss = true + } + + public func dismissAnimated() { + if let view = self.node.hostView.findTaggedView(tag: SheetComponent.View.Tag()) as? SheetComponent.View { + view.dismissAnimated() + } + } +} diff --git a/submodules/SSignalKit/SwiftSignalKit/Source/Signal_Combine.swift b/submodules/SSignalKit/SwiftSignalKit/Source/Signal_Combine.swift index e2a6db088d..27cab69423 100644 --- a/submodules/SSignalKit/SwiftSignalKit/Source/Signal_Combine.swift +++ b/submodules/SSignalKit/SwiftSignalKit/Source/Signal_Combine.swift @@ -208,6 +208,14 @@ public func combineLatest(queue: Queue? = nil, _ s1: Signal, _ s2: Signal, _ s3: Signal, _ s4: Signal, _ s5: Signal, _ s6: Signal, _ s7: Signal, _ s8: Signal, _ s9: Signal, _ s10: Signal, _ s11: Signal, _ s12: Signal, _ s13: Signal, _ s14: Signal, _ s15: Signal, _ s16: Signal, _ s17: Signal, _ s18: Signal, _ s19: Signal, _ s20: Signal, _ s21: Signal) -> Signal<(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21), E> { + return combineLatestAny([signalOfAny(s1), signalOfAny(s2), signalOfAny(s3), signalOfAny(s4), signalOfAny(s5), signalOfAny(s6), signalOfAny(s7), signalOfAny(s8), signalOfAny(s9), signalOfAny(s10), signalOfAny(s11), signalOfAny(s12), signalOfAny(s13), signalOfAny(s14), signalOfAny(s15), signalOfAny(s16), signalOfAny(s17), signalOfAny(s18), signalOfAny(s19), signalOfAny(s20), signalOfAny(s21)], combine: { values in + return (values[0] as! T1, values[1] as! T2, values[2] as! T3, values[3] as! T4, values[4] as! T5, values[5] as! T6, values[6] as! T7, values[7] as! T8, values[8] as! T9, values[9] as! T10, values[10] as! T11, values[11] as! T12, values[12] as! T13, values[13] as! T14, values[14] as! T15, values[15] as! T16, values[16] as! T17, values[17] as! T18, values[18] as! T19, values[19] as! T20, values[20] as! T21) + }, initialValues: [:], queue: queue) +} + + + public func combineLatest(queue: Queue? = nil, _ signals: [Signal]) -> Signal<[T], E> { if signals.count == 0 { return single([T](), E.self) diff --git a/submodules/ScreenCaptureDetection/Sources/ScreenCaptureDetection.swift b/submodules/ScreenCaptureDetection/Sources/ScreenCaptureDetection.swift index f0d89f73f6..167c304955 100644 --- a/submodules/ScreenCaptureDetection/Sources/ScreenCaptureDetection.swift +++ b/submodules/ScreenCaptureDetection/Sources/ScreenCaptureDetection.swift @@ -96,6 +96,10 @@ public final class ScreenCaptureDetectionManager { guard let strongSelf = self else { return } + var value = value +#if DEBUG + value = false +#endif strongSelf.isRecordingActive = value if value { if strongSelf.screenRecordingCheckTimer == nil { diff --git a/submodules/SettingsUI/Sources/BubbleSettings/BubbleSettingsController.swift b/submodules/SettingsUI/Sources/BubbleSettings/BubbleSettingsController.swift index 948bb21b79..6c2a35621b 100644 --- a/submodules/SettingsUI/Sources/BubbleSettings/BubbleSettingsController.swift +++ b/submodules/SettingsUI/Sources/BubbleSettings/BubbleSettingsController.swift @@ -170,20 +170,20 @@ private final class BubbleSettingsControllerNode: ASDisplayNode, UIScrollViewDel messages[replyMessageId] = Message(stableId: 3, stableVersion: 0, id: replyMessageId, globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_1_Text, attributes: [], media: [], peers: peers, associatedMessages: SimpleDictionary(), associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) let message1 = Message(stableId: 4, stableVersion: 0, id: MessageId(peerId: otherPeerId, namespace: 0, id: 4), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66003, flags: [], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_3_Text, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) let message2 = Message(stableId: 3, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 3), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66002, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_2_Text, attributes: [ReplyMessageAttribute(messageId: replyMessageId, threadMessageId: nil, quote: nil, isQuote: false)], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) let waveformBase64 = "DAAOAAkACQAGAAwADwAMABAADQAPABsAGAALAA0AGAAfABoAHgATABgAGQAYABQADAAVABEAHwANAA0ACQAWABkACQAOAAwACQAfAAAAGQAVAAAAEwATAAAACAAfAAAAHAAAABwAHwAAABcAGQAAABQADgAAABQAHwAAAB8AHwAAAAwADwAAAB8AEwAAABoAFwAAAB8AFAAAAAAAHwAAAAAAHgAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAAAA=" let voiceAttributes: [TelegramMediaFileAttribute] = [.Audio(isVoice: true, duration: 23, title: nil, performer: nil, waveform: Data(base64Encoded: waveformBase64)!)] let voiceMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: voiceAttributes) let message3 = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [voiceMedia], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local), tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local), tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) let message4 = Message(stableId: 2, stableVersion: 0, id: MessageId(peerId: otherPeerId, namespace: 0, id: 2), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_1_Text, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message4], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message4], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) let width: CGFloat if case .regular = layout.metrics.widthClass { diff --git a/submodules/SettingsUI/Sources/Privacy and Security/ForwardPrivacyChatPreviewItem.swift b/submodules/SettingsUI/Sources/Privacy and Security/ForwardPrivacyChatPreviewItem.swift index ba5ee38b67..36b4e07238 100644 --- a/submodules/SettingsUI/Sources/Privacy and Security/ForwardPrivacyChatPreviewItem.swift +++ b/submodules/SettingsUI/Sources/Privacy and Security/ForwardPrivacyChatPreviewItem.swift @@ -149,7 +149,7 @@ class ForwardPrivacyChatPreviewItemNode: ListViewItemNode { let forwardInfo = MessageForwardInfo(author: item.linkEnabled ? peers[peerId] : nil, source: nil, sourceMessageId: nil, date: 0, authorSignature: item.linkEnabled ? nil : item.peerName, psaType: nil, flags: []) - let messageItem = item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: forwardInfo, author: nil, text: item.strings.Privacy_Forwards_PreviewMessageText, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])], theme: item.theme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true) + let messageItem = item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: forwardInfo, author: nil, text: item.strings.Privacy_Forwards_PreviewMessageText, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])], theme: item.theme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false) var node: ListViewItemNode? if let current = currentNode { diff --git a/submodules/SettingsUI/Sources/Privacy and Security/SelectivePrivacySettingsController.swift b/submodules/SettingsUI/Sources/Privacy and Security/SelectivePrivacySettingsController.swift index 3fc31c3295..a5c6273bee 100644 --- a/submodules/SettingsUI/Sources/Privacy and Security/SelectivePrivacySettingsController.swift +++ b/submodules/SettingsUI/Sources/Privacy and Security/SelectivePrivacySettingsController.swift @@ -849,11 +849,11 @@ private func selectivePrivacySettingsControllerEntries(presentationData: Present if case .presence = kind, let peer { //TODO:localize entries.append(.hideReadTime(presentationData.theme, "Hide Read Time", state.hideReadTimeEnabled == true)) - entries.append(.hideReadTimeInfo(presentationData.theme, "Do not show the time when you read a message to people you hid your last seen from.")) + entries.append(.hideReadTimeInfo(presentationData.theme, "Do not show the time when you read a message to people you hid your last seen from. If you turn this on, their read time will also be hidden from you.\nThis does not affect group chats.")) if !peer.isPremium { entries.append(.subscribeToPremium(presentationData.theme, "Subscribe to Telegram Premium")) - entries.append(.subscribeToPremiumInfo(presentationData.theme, "If you subscribe to Telegram Premium, you will still see other users' last seen and read time even if you hid yours from them (unless they specifically restricted it).")) + entries.append(.subscribeToPremiumInfo(presentationData.theme, "It you subscribe to Telegram Premium, you will still see other users' last seen and read time even if you hid yours from them (unless they specifically restricted it).")) } } diff --git a/submodules/SettingsUI/Sources/Text Size/TextSizeSelectionController.swift b/submodules/SettingsUI/Sources/Text Size/TextSizeSelectionController.swift index 574a9daa2b..f9e693c813 100644 --- a/submodules/SettingsUI/Sources/Text Size/TextSizeSelectionController.swift +++ b/submodules/SettingsUI/Sources/Text Size/TextSizeSelectionController.swift @@ -437,20 +437,20 @@ private final class TextSizeSelectionControllerNode: ASDisplayNode, UIScrollView messages[replyMessageId] = Message(stableId: 3, stableVersion: 0, id: replyMessageId, globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_1_Text, attributes: [], media: [], peers: peers, associatedMessages: SimpleDictionary(), associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) let message1 = Message(stableId: 4, stableVersion: 0, id: MessageId(peerId: otherPeerId, namespace: 0, id: 4), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66003, flags: [], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_3_Text, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) let message2 = Message(stableId: 3, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 3), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66002, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_2_Text, attributes: [ReplyMessageAttribute(messageId: replyMessageId, threadMessageId: nil, quote: nil, isQuote: false)], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) let waveformBase64 = "DAAOAAkACQAGAAwADwAMABAADQAPABsAGAALAA0AGAAfABoAHgATABgAGQAYABQADAAVABEAHwANAA0ACQAWABkACQAOAAwACQAfAAAAGQAVAAAAEwATAAAACAAfAAAAHAAAABwAHwAAABcAGQAAABQADgAAABQAHwAAAB8AHwAAAAwADwAAAB8AEwAAABoAFwAAAB8AFAAAAAAAHwAAAAAAHgAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAAAA=" let voiceAttributes: [TelegramMediaFileAttribute] = [.Audio(isVoice: true, duration: 23, title: nil, performer: nil, waveform: Data(base64Encoded: waveformBase64)!)] let voiceMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: voiceAttributes) let message3 = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [voiceMedia], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local), tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local), tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) let message4 = Message(stableId: 2, stableVersion: 0, id: MessageId(peerId: otherPeerId, namespace: 0, id: 2), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_1_Text, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message4], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message4], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) let width: CGFloat if case .regular = layout.metrics.widthClass { diff --git a/submodules/SettingsUI/Sources/Themes/ThemePreviewControllerNode.swift b/submodules/SettingsUI/Sources/Themes/ThemePreviewControllerNode.swift index 5f83dd3aec..29a2264c0d 100644 --- a/submodules/SettingsUI/Sources/Themes/ThemePreviewControllerNode.swift +++ b/submodules/SettingsUI/Sources/Themes/ThemePreviewControllerNode.swift @@ -621,7 +621,7 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate { sampleMessages.append(message8) items = sampleMessages.reversed().map { message in - self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message], theme: self.previewTheme, strings: self.presentationData.strings, wallpaper: self.wallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: !message.media.isEmpty ? FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local) : nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true) + self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message], theme: self.previewTheme, strings: self.presentationData.strings, wallpaper: self.wallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: !message.media.isEmpty ? FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local) : nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false) } let width: CGFloat diff --git a/submodules/SettingsUI/Sources/Themes/ThemeSettingsChatPreviewItem.swift b/submodules/SettingsUI/Sources/Themes/ThemeSettingsChatPreviewItem.swift index b26e8f3cc4..3cfc9025b2 100644 --- a/submodules/SettingsUI/Sources/Themes/ThemeSettingsChatPreviewItem.swift +++ b/submodules/SettingsUI/Sources/Themes/ThemeSettingsChatPreviewItem.swift @@ -168,7 +168,7 @@ class ThemeSettingsChatPreviewItemNode: ListViewItemNode { } let message = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: messageItem.outgoing ? otherPeerId : peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: messageItem.outgoing ? [] : [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: messageItem.outgoing ? TelegramUser(id: otherPeerId, accessHash: nil, firstName: "", lastName: "", username: nil, phone: nil, photo: [], botInfo: nil, restrictionInfo: nil, flags: [], emojiStatus: nil, usernames: [], storiesHidden: nil, nameColor: nil, backgroundEmojiId: nil, profileColor: nil, profileBackgroundEmojiId: nil) : nil, text: messageItem.text, attributes: messageItem.reply != nil ? [ReplyMessageAttribute(messageId: replyMessageId, threadMessageId: nil, quote: nil, isQuote: false)] : [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [message], theme: item.componentTheme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [message], theme: item.componentTheme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) } var nodes: [ListViewItemNode] = [] diff --git a/submodules/TelegramCore/Sources/State/PremiumRequiredToContact.swift b/submodules/TelegramCore/Sources/State/PremiumRequiredToContact.swift index a3f6546aa0..e2d26348bf 100644 --- a/submodules/TelegramCore/Sources/State/PremiumRequiredToContact.swift +++ b/submodules/TelegramCore/Sources/State/PremiumRequiredToContact.swift @@ -3,40 +3,54 @@ import Postbox import TelegramApi internal func _internal_updateIsPremiumRequiredToContact(account: Account, peerIds: [EnginePeer.Id]) -> Signal<[EnginePeer.Id], NoError> { - return account.postbox.transaction { transaction -> [Api.InputUser] in + return account.postbox.transaction { transaction -> ([Api.InputUser], [PeerId]) in var inputUsers: [Api.InputUser] = [] + var premiumRequired:[EnginePeer.Id] = [] for id in peerIds { if let peer = transaction.getPeer(id), let inputUser = apiInputUser(peer) { - inputUsers.append(inputUser) - } - } - return inputUsers - } |> mapToSignal { inputUsers -> Signal<[EnginePeer.Id], NoError> in - return account.network.request(Api.functions.users.getIsPremiumRequiredToContact(id: inputUsers)) - |> retryRequest - |> mapToSignal { result in - return account.postbox.transaction { transaction in - var requiredPeerIds: [EnginePeer.Id] = [] - for (i, req) in result.enumerated() { - let peerId = peerIds[i] - let required = req == .boolTrue - transaction.updatePeerCachedData(peerIds: Set([peerId]), update: { _, cachedData in - let data = cachedData as? CachedUserData ?? CachedUserData() - var flags = data.flags - if required { - flags.insert(.premiumRequired) - } else { - flags.remove(.premiumRequired) - } - return data.withUpdatedFlags(flags) - }) - if required { - requiredPeerIds.append(peerId) + if let cachedData = transaction.getPeerCachedData(peerId: id) as? CachedUserData { + if cachedData.flags.contains(.premiumRequired) { + premiumRequired.append(id) } + } else { + inputUsers.append(inputUser) } - - return requiredPeerIds } } + return (inputUsers, premiumRequired) + } |> mapToSignal { inputUsers, premiumRequired -> Signal<[EnginePeer.Id], NoError> in + + if !inputUsers.isEmpty { + return account.network.request(Api.functions.users.getIsPremiumRequiredToContact(id: inputUsers)) + |> retryRequest + |> mapToSignal { result in + return account.postbox.transaction { transaction in + var requiredPeerIds: [EnginePeer.Id] = [] + for (i, req) in result.enumerated() { + let peerId = peerIds[i] + let required = req == .boolTrue + transaction.updatePeerCachedData(peerIds: Set([peerId]), update: { _, cachedData in + let data = cachedData as? CachedUserData ?? CachedUserData() + var flags = data.flags + if required { + flags.insert(.premiumRequired) + } else { + flags.remove(.premiumRequired) + } + return data.withUpdatedFlags(flags) + }) + if required { + requiredPeerIds.append(peerId) + } + } + let result = requiredPeerIds + premiumRequired + return result + } + } + } else { + return .single(premiumRequired) + } + + } } diff --git a/submodules/TelegramNotices/Sources/Notices.swift b/submodules/TelegramNotices/Sources/Notices.swift index f450a51749..95dfbdca6b 100644 --- a/submodules/TelegramNotices/Sources/Notices.swift +++ b/submodules/TelegramNotices/Sources/Notices.swift @@ -192,6 +192,9 @@ private enum ApplicationSpecificGlobalNotice: Int32 { case voiceMessagesPlayOnceSuggestion = 58 case incomingVoiceMessagePlayOnceTip = 59 case outgoingVoiceMessagePlayOnceTip = 60 + case videoMessagesPlayOnceSuggestion = 61 + case incomingVideoMessagePlayOnceTip = 62 + case outgoingVideoMessagePlayOnceTip = 63 var key: ValueBoxKey { let v = ValueBoxKey(length: 4) @@ -489,6 +492,18 @@ private struct ApplicationSpecificNoticeKeys { static func outgoingVoiceMessagePlayOnceTip() -> NoticeEntryKey { return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.outgoingVoiceMessagePlayOnceTip.key) } + + static func videoMessagesPlayOnceSuggestion() -> NoticeEntryKey { + return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.videoMessagesPlayOnceSuggestion.key) + } + + static func incomingVideoMessagePlayOnceTip() -> NoticeEntryKey { + return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.incomingVideoMessagePlayOnceTip.key) + } + + static func outgoingVideoMessagePlayOnceTip() -> NoticeEntryKey { + return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.outgoingVideoMessagePlayOnceTip.key) + } } public struct ApplicationSpecificNotice { @@ -1984,4 +1999,85 @@ public struct ApplicationSpecificNotice { return Int(previousValue) } } + + public static func getVideoMessagesPlayOnceSuggestion(accountManager: AccountManager) -> Signal { + return accountManager.transaction { transaction -> Int32 in + if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.videoMessagesPlayOnceSuggestion())?.get(ApplicationSpecificCounterNotice.self) { + return value.value + } else { + return 0 + } + } + } + + public static func incrementVideoMessagesPlayOnceSuggestion(accountManager: AccountManager, count: Int = 1) -> Signal { + return accountManager.transaction { transaction -> Int in + var currentValue: Int32 = 0 + if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.videoMessagesPlayOnceSuggestion())?.get(ApplicationSpecificCounterNotice.self) { + currentValue = value.value + } + let previousValue = currentValue + currentValue += Int32(count) + + if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) { + transaction.setNotice(ApplicationSpecificNoticeKeys.videoMessagesPlayOnceSuggestion(), entry) + } + + return Int(previousValue) + } + } + + public static func getIncomingVideoMessagePlayOnceTip(accountManager: AccountManager) -> Signal { + return accountManager.transaction { transaction -> Int32 in + if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.incomingVideoMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) { + return value.value + } else { + return 0 + } + } + } + + public static func incrementIncomingVideoMessagePlayOnceTip(accountManager: AccountManager, count: Int = 1) -> Signal { + return accountManager.transaction { transaction -> Int in + var currentValue: Int32 = 0 + if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.incomingVideoMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) { + currentValue = value.value + } + let previousValue = currentValue + currentValue += Int32(count) + + if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) { + transaction.setNotice(ApplicationSpecificNoticeKeys.incomingVideoMessagePlayOnceTip(), entry) + } + + return Int(previousValue) + } + } + + public static func getOutgoingVideoMessagePlayOnceTip(accountManager: AccountManager) -> Signal { + return accountManager.transaction { transaction -> Int32 in + if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.outgoingVideoMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) { + return value.value + } else { + return 0 + } + } + } + + public static func incrementOutgoingVideoMessagePlayOnceTip(accountManager: AccountManager, count: Int = 1) -> Signal { + return accountManager.transaction { transaction -> Int in + var currentValue: Int32 = 0 + if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.outgoingVideoMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) { + currentValue = value.value + } + let previousValue = currentValue + currentValue += Int32(count) + + if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) { + transaction.setNotice(ApplicationSpecificNoticeKeys.outgoingVideoMessagePlayOnceTip(), entry) + } + + return Int(previousValue) + } + } } diff --git a/submodules/TelegramUI/BUILD b/submodules/TelegramUI/BUILD index 0e0ee8712c..40a76fbdc8 100644 --- a/submodules/TelegramUI/BUILD +++ b/submodules/TelegramUI/BUILD @@ -423,6 +423,8 @@ swift_library( "//submodules/Components/MultilineTextComponent", "//submodules/TelegramUI/Components/PlainButtonComponent", "//submodules/Components/BalancedTextComponent", + "//submodules/TelegramUI/Components/VideoMessageCameraScreen", + "//submodules/TelegramUI/Components/MediaScrubberComponent", ] + select({ "@build_bazel_rules_apple//apple:ios_arm64": appcenter_targets, "//build-system:ios_sim_arm64": [], diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift index 83d72e2721..0e7de448e1 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift @@ -651,10 +651,10 @@ private final class CameraScreenComponent: CombinedComponent { let startRecording = { self.resultDisposable.set((camera.startRecording() - |> deliverOnMainQueue).start(next: { [weak self] duration in + |> deliverOnMainQueue).start(next: { [weak self] recordingData in if let self, let controller = self.getController() { - controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1)) - if duration > 59.0 { + controller.updateCameraState({ $0.updatedDuration(recordingData.duration) }, transition: .easeInOut(duration: 0.1)) + if recordingData.duration > 59.0 { self.stopVideoRecording() } } @@ -1607,7 +1607,6 @@ public class CameraScreen: ViewController { if case .pendingImage = value { Queue.mainQueue().async { self.mainPreviewView.isEnabled = false - self.additionalPreviewView.isEnabled = false } } else { diff --git a/submodules/TelegramUI/Components/Chat/ChatInstantVideoMessageDurationNode/BUILD b/submodules/TelegramUI/Components/Chat/ChatInstantVideoMessageDurationNode/BUILD index d3250e2fad..ef0bf22fb1 100644 --- a/submodules/TelegramUI/Components/Chat/ChatInstantVideoMessageDurationNode/BUILD +++ b/submodules/TelegramUI/Components/Chat/ChatInstantVideoMessageDurationNode/BUILD @@ -14,6 +14,7 @@ swift_library( "//submodules/SSignalKit/SwiftSignalKit", "//submodules/Display", "//submodules/MediaPlayer:UniversalMediaPlayer", + "//submodules/AnimatedCountLabelNode", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/Chat/ChatInstantVideoMessageDurationNode/Sources/ChatInstantVideoMessageDurationNode.swift b/submodules/TelegramUI/Components/Chat/ChatInstantVideoMessageDurationNode/Sources/ChatInstantVideoMessageDurationNode.swift index e75e529677..b33ad62baa 100644 --- a/submodules/TelegramUI/Components/Chat/ChatInstantVideoMessageDurationNode/Sources/ChatInstantVideoMessageDurationNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatInstantVideoMessageDurationNode/Sources/ChatInstantVideoMessageDurationNode.swift @@ -4,6 +4,7 @@ import AsyncDisplayKit import SwiftSignalKit import Display import UniversalMediaPlayer +import AnimatedCountLabelNode private let textFont = Font.with(size: 11.0, design: .regular, weight: .regular, traits: [.monospacedNumbers]) diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageBubbleItemNode/Sources/ChatMessageBubbleItemNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageBubbleItemNode/Sources/ChatMessageBubbleItemNode.swift index e13352efde..e58dd0a6ea 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageBubbleItemNode/Sources/ChatMessageBubbleItemNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageBubbleItemNode/Sources/ChatMessageBubbleItemNode.swift @@ -2913,13 +2913,8 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI } var legacyTransition: ContainedViewLayoutTransition = .immediate - var useDisplayLinkAnimations = false if case let .System(duration, _) = animation { legacyTransition = .animated(duration: duration, curve: .spring) - - if let subject = item.associatedData.subject, case .messageOptions = subject, !"".isEmpty { - useDisplayLinkAnimations = true - } } var forceBackgroundSide = false @@ -3230,9 +3225,7 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI strongSelf.credibilityHighlightNode = nil } } - - let beginAt = applyInfo.timestamp ?? CACurrentMediaTime() - + let timingFunction = kCAMediaTimingFunctionSpring if let forwardInfoNode = forwardInfoSizeApply.1(bubbleContentWidth) { strongSelf.forwardInfoNode = forwardInfoNode @@ -3255,15 +3248,8 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI let forwardInfoFrame = CGRect(origin: CGPoint(x: contentOrigin.x + layoutConstants.text.bubbleInsets.left, y: layoutConstants.bubble.contentInsets.top + forwardInfoOriginY), size: CGSize(width: bubbleContentWidth, height: forwardInfoSizeApply.0.height)) if case let .System(duration, _) = animation { if animateFrame { - if useDisplayLinkAnimations { - let animation = ListViewAnimation(from: previousForwardInfoNodeFrame, to: forwardInfoFrame, duration: duration * UIView.animationDurationFactor(), curve: strongSelf.preferredAnimationCurve, beginAt: beginAt, update: { _, frame in - forwardInfoNode.frame = frame - }) - strongSelf.setAnimationForKey("forwardFrame", animation: animation) - } else { - forwardInfoNode.frame = forwardInfoFrame - forwardInfoNode.layer.animateFrame(from: previousForwardInfoNodeFrame, to: forwardInfoFrame, duration: duration, timingFunction: timingFunction) - } + forwardInfoNode.frame = forwardInfoFrame + forwardInfoNode.layer.animateFrame(from: previousForwardInfoNodeFrame, to: forwardInfoFrame, duration: duration, timingFunction: timingFunction) } else { forwardInfoNode.frame = forwardInfoFrame } @@ -3637,7 +3623,6 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI } let contentNodeFrame = relativeFrame.offsetBy(dx: effectiveContentOriginX, dy: effectiveContentOriginY) - let previousContentNodeFrame = contentNode.frame if case let .System(duration, _) = animation { var animateFrame = false @@ -3653,58 +3638,51 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI } if animateFrame { - if useDisplayLinkAnimations { - let animation = ListViewAnimation(from: previousContentNodeFrame, to: contentNodeFrame, duration: duration * UIView.animationDurationFactor(), curve: strongSelf.preferredAnimationCurve, beginAt: beginAt, update: { _, frame in - contentNode.frame = frame + var useExpensiveSnapshot = false + if case .messageOptions = item.associatedData.subject { + useExpensiveSnapshot = true + } + + if let animateTextAndWebpagePositionSwap, let contentNode = contentNode as? ChatMessageTextBubbleContentNode, let snapshotView = useExpensiveSnapshot ? contentNode.view.snapshotView(afterScreenUpdates: false) : contentNode.layer.snapshotContentTreeAsView() { + let clippingView = UIView() + clippingView.clipsToBounds = true + clippingView.frame = contentNode.frame + + clippingView.addSubview(snapshotView) + snapshotView.frame = CGRect(origin: CGPoint(), size: contentNode.bounds.size) + + contentNode.view.superview?.insertSubview(clippingView, belowSubview: contentNode.view) + + animation.animator.updateAlpha(layer: clippingView.layer, alpha: 0.0, completion: { [weak clippingView] _ in + clippingView?.removeFromSuperview() }) - strongSelf.setAnimationForKey("contentNode\(contentNodeIndex)Frame", animation: animation) - } else { - var useExpensiveSnapshot = false - if case .messageOptions = item.associatedData.subject { - useExpensiveSnapshot = true + + let positionOffset: CGFloat = animateTextAndWebpagePositionSwap ? -1.0 : 1.0 + + animation.animator.updatePosition(layer: snapshotView.layer, position: CGPoint(x: snapshotView.center.x, y: snapshotView.center.y + positionOffset * contentNode.frame.height), completion: nil) + + contentNode.frame = contentNodeFrame + + if let statusNode = contentNode.statusNode, let contentSuperview = contentNode.view.superview, statusNode.view.isDescendant(of: contentSuperview), let bottomStatusNodeAnimationSourcePosition { + let localSourcePosition = statusNode.view.convert(bottomStatusNodeAnimationSourcePosition, from: contentSuperview) + let offset = CGPoint(x: statusNode.bounds.width - localSourcePosition.x, y: statusNode.bounds.height - localSourcePosition.y) + animation.animator.animatePosition(layer: statusNode.layer, from: statusNode.layer.position.offsetBy(dx: -offset.x, dy: -offset.y), to: statusNode.layer.position, completion: nil) } - if let animateTextAndWebpagePositionSwap, let contentNode = contentNode as? ChatMessageTextBubbleContentNode, let snapshotView = useExpensiveSnapshot ? contentNode.view.snapshotView(afterScreenUpdates: false) : contentNode.layer.snapshotContentTreeAsView() { - let clippingView = UIView() - clippingView.clipsToBounds = true - clippingView.frame = contentNode.frame - - clippingView.addSubview(snapshotView) - snapshotView.frame = CGRect(origin: CGPoint(), size: contentNode.bounds.size) - - contentNode.view.superview?.insertSubview(clippingView, belowSubview: contentNode.view) - - animation.animator.updateAlpha(layer: clippingView.layer, alpha: 0.0, completion: { [weak clippingView] _ in - clippingView?.removeFromSuperview() - }) - - let positionOffset: CGFloat = animateTextAndWebpagePositionSwap ? -1.0 : 1.0 - - animation.animator.updatePosition(layer: snapshotView.layer, position: CGPoint(x: snapshotView.center.x, y: snapshotView.center.y + positionOffset * contentNode.frame.height), completion: nil) - - contentNode.frame = contentNodeFrame - - if let statusNode = contentNode.statusNode, let contentSuperview = contentNode.view.superview, statusNode.view.isDescendant(of: contentSuperview), let bottomStatusNodeAnimationSourcePosition { - let localSourcePosition = statusNode.view.convert(bottomStatusNodeAnimationSourcePosition, from: contentSuperview) - let offset = CGPoint(x: statusNode.bounds.width - localSourcePosition.x, y: statusNode.bounds.height - localSourcePosition.y) - animation.animator.animatePosition(layer: statusNode.layer, from: statusNode.layer.position.offsetBy(dx: -offset.x, dy: -offset.y), to: statusNode.layer.position, completion: nil) - } - - contentNode.animateClippingTransition(offset: positionOffset * contentNodeFrame.height, animation: animation) - - contentNode.alpha = 0.0 - animation.animator.updateAlpha(layer: contentNode.layer, alpha: 1.0, completion: nil) - } else if animateTextAndWebpagePositionSwap != nil, let contentNode = contentNode as? ChatMessageWebpageBubbleContentNode { - if let statusNode = contentNode.contentNode.statusNode, let contentSuperview = contentNode.view.superview, statusNode.view.isDescendant(of: contentSuperview), let bottomStatusNodeAnimationSourcePosition { - let localSourcePosition = statusNode.view.convert(bottomStatusNodeAnimationSourcePosition, from: contentSuperview) - let offset = CGPoint(x: statusNode.bounds.width - localSourcePosition.x, y: statusNode.bounds.height - localSourcePosition.y) - animation.animator.animatePosition(layer: statusNode.layer, from: statusNode.layer.position.offsetBy(dx: -offset.x, dy: -offset.y), to: statusNode.layer.position, completion: nil) - } - - animation.animator.updateFrame(layer: contentNode.layer, frame: contentNodeFrame, completion: nil) - } else { - animation.animator.updateFrame(layer: contentNode.layer, frame: contentNodeFrame, completion: nil) + contentNode.animateClippingTransition(offset: positionOffset * contentNodeFrame.height, animation: animation) + + contentNode.alpha = 0.0 + animation.animator.updateAlpha(layer: contentNode.layer, alpha: 1.0, completion: nil) + } else if animateTextAndWebpagePositionSwap != nil, let contentNode = contentNode as? ChatMessageWebpageBubbleContentNode { + if let statusNode = contentNode.contentNode.statusNode, let contentSuperview = contentNode.view.superview, statusNode.view.isDescendant(of: contentSuperview), let bottomStatusNodeAnimationSourcePosition { + let localSourcePosition = statusNode.view.convert(bottomStatusNodeAnimationSourcePosition, from: contentSuperview) + let offset = CGPoint(x: statusNode.bounds.width - localSourcePosition.x, y: statusNode.bounds.height - localSourcePosition.y) + animation.animator.animatePosition(layer: statusNode.layer, from: statusNode.layer.position.offsetBy(dx: -offset.x, dy: -offset.y), to: statusNode.layer.position, completion: nil) } + + animation.animator.updateFrame(layer: contentNode.layer, frame: contentNodeFrame, completion: nil) + } else { + animation.animator.updateFrame(layer: contentNode.layer, frame: contentNodeFrame, completion: nil) } } else if animateAlpha { contentNode.frame = contentNodeFrame @@ -3857,48 +3835,23 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI isCurrentlyPlayingMedia = true } - if case let .System(duration, _) = animation/*, !strongSelf.mainContextSourceNode.isExtractedToContextPreview*/ { + if case .System = animation/*, !strongSelf.mainContextSourceNode.isExtractedToContextPreview*/ { if !strongSelf.backgroundNode.frame.equalTo(backgroundFrame) { - if useDisplayLinkAnimations { - strongSelf.clippingNode.clipsToBounds = shouldClipOnTransitions - let backgroundAnimation = ListViewAnimation(from: strongSelf.backgroundNode.frame, to: backgroundFrame, duration: duration * UIView.animationDurationFactor(), curve: strongSelf.preferredAnimationCurve, beginAt: beginAt, update: { [weak strongSelf] _, frame in - if let strongSelf = strongSelf { - strongSelf.backgroundNode.frame = frame - if let backgroundHighlightNode = strongSelf.backgroundHighlightNode { - backgroundHighlightNode.frame = frame - backgroundHighlightNode.updateLayout(size: frame.size, transition: .immediate) - } - strongSelf.clippingNode.position = CGPoint(x: frame.midX, y: frame.midY) - strongSelf.clippingNode.bounds = CGRect(origin: CGPoint(x: frame.minX, y: frame.minY), size: frame.size) - - strongSelf.backgroundNode.updateLayout(size: frame.size, transition: .immediate) - strongSelf.backgroundWallpaperNode.updateFrame(frame, transition: .immediate) - strongSelf.shadowNode.updateLayout(backgroundFrame: frame, transition: .immediate) - } - }, completed: { [weak strongSelf] _ in - guard let strongSelf else { - return - } - strongSelf.clippingNode.clipsToBounds = false - }) - strongSelf.setAnimationForKey("backgroundNodeFrame", animation: backgroundAnimation) - } else { - animation.animator.updateFrame(layer: strongSelf.backgroundNode.layer, frame: backgroundFrame, completion: nil) - if let backgroundHighlightNode = strongSelf.backgroundHighlightNode { - animation.animator.updateFrame(layer: backgroundHighlightNode.layer, frame: backgroundFrame, completion: nil) - backgroundHighlightNode.updateLayout(size: backgroundFrame.size, transition: animation) - } - animation.animator.updatePosition(layer: strongSelf.clippingNode.layer, position: backgroundFrame.center, completion: nil) - strongSelf.clippingNode.clipsToBounds = shouldClipOnTransitions - animation.animator.updateBounds(layer: strongSelf.clippingNode.layer, bounds: CGRect(origin: CGPoint(x: backgroundFrame.minX, y: backgroundFrame.minY), size: backgroundFrame.size), completion: { [weak strongSelf] _ in - strongSelf?.clippingNode.clipsToBounds = false - }) - - strongSelf.backgroundNode.updateLayout(size: backgroundFrame.size, transition: animation) - animation.animator.updateFrame(layer: strongSelf.backgroundWallpaperNode.layer, frame: backgroundFrame, completion: nil) - strongSelf.shadowNode.updateLayout(backgroundFrame: backgroundFrame, animator: animation.animator) - strongSelf.backgroundWallpaperNode.updateFrame(backgroundFrame, animator: animation.animator) + animation.animator.updateFrame(layer: strongSelf.backgroundNode.layer, frame: backgroundFrame, completion: nil) + if let backgroundHighlightNode = strongSelf.backgroundHighlightNode { + animation.animator.updateFrame(layer: backgroundHighlightNode.layer, frame: backgroundFrame, completion: nil) + backgroundHighlightNode.updateLayout(size: backgroundFrame.size, transition: animation) } + animation.animator.updatePosition(layer: strongSelf.clippingNode.layer, position: backgroundFrame.center, completion: nil) + strongSelf.clippingNode.clipsToBounds = shouldClipOnTransitions + animation.animator.updateBounds(layer: strongSelf.clippingNode.layer, bounds: CGRect(origin: CGPoint(x: backgroundFrame.minX, y: backgroundFrame.minY), size: backgroundFrame.size), completion: { [weak strongSelf] _ in + strongSelf?.clippingNode.clipsToBounds = false + }) + + strongSelf.backgroundNode.updateLayout(size: backgroundFrame.size, transition: animation) + animation.animator.updateFrame(layer: strongSelf.backgroundWallpaperNode.layer, frame: backgroundFrame, completion: nil) + strongSelf.shadowNode.updateLayout(backgroundFrame: backgroundFrame, animator: animation.animator) + strongSelf.backgroundWallpaperNode.updateFrame(backgroundFrame, animator: animation.animator) if let _ = strongSelf.backgroundNode.type { if !strongSelf.mainContextSourceNode.isExtractedToContextPreview { diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageContactBubbleContentNode/Sources/ChatMessageContactBubbleContentNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageContactBubbleContentNode/Sources/ChatMessageContactBubbleContentNode.swift index 6af722fc3b..58649d5743 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageContactBubbleContentNode/Sources/ChatMessageContactBubbleContentNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageContactBubbleContentNode/Sources/ChatMessageContactBubbleContentNode.swift @@ -148,6 +148,9 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode { var textString: NSAttributedString? var updatedContactInfo: String? + var canMessage = false + var canAdd = false + var displayName: String = "" if let selectedContact = selectedContact { if !selectedContact.firstName.isEmpty && !selectedContact.lastName.isEmpty { @@ -161,6 +164,10 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode { displayName = item.presentationData.strings.Message_Contact } + if selectedContact.peerId != nil { + canMessage = true + } + let info: String if let previousContact = previousContact, previousContact.isEqual(to: selectedContact), let contactInfo = previousContactInfo { info = contactInfo @@ -199,6 +206,8 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode { } } + canAdd = !item.associatedData.deviceContactsNumbers.contains(selectedContact.phoneNumber) + updatedContactInfo = info titleString = NSAttributedString(string: displayName, font: titleFont, textColor: mainColor) @@ -305,10 +314,10 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode { if let statusSuggestedWidthAndContinue = statusSuggestedWidthAndContinue { maxContentWidth = max(maxContentWidth, statusSuggestedWidthAndContinue.0) } - maxContentWidth = max(maxContentWidth, avatarSize.width + 7.0 + titleLayout.size.width) - maxContentWidth = max(maxContentWidth, avatarSize.width + 7.0 + textLayout.size.width) + maxContentWidth = max(maxContentWidth, 7.0 + avatarSize.width + 7.0 + titleLayout.size.width + 7.0) + maxContentWidth = max(maxContentWidth, 7.0 + avatarSize.width + 7.0 + textLayout.size.width + 7.0) maxContentWidth = max(maxContentWidth, maxButtonWidth * 2.0) - maxContentWidth = max(maxContentWidth, 240.0) + maxContentWidth = max(maxContentWidth, 220.0) let contentWidth = maxContentWidth + layoutConstants.text.bubbleInsets.right * 2.0 @@ -316,7 +325,19 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode { let baseAvatarFrame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.right, y: layoutConstants.text.bubbleInsets.top), size: avatarSize) let lineWidth: CGFloat = 3.0 - let buttonWidth = floor((boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0 - lineWidth) / 2.0) + + var buttonCount = 0 + if canMessage { + buttonCount += 1 + } + if canAdd { + buttonCount += 1 + } + var buttonWidth = floor((boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0 - lineWidth)) + if buttonCount > 1 { + buttonWidth /= CGFloat(buttonCount) + } + let (messageButtonSize, messageButtonApply) = messageContinueLayout(buttonWidth, 33.0) let (addButtonSize, addButtonApply) = addContinueLayout(buttonWidth, 33.0) @@ -329,7 +350,7 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode { layoutSize.height += statusSizeAndApply.0.height - 4.0 } let messageButtonFrame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.right + lineWidth, y: layoutSize.height - 24.0 - messageButtonSize.height), size: messageButtonSize) - let addButtonFrame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.right + lineWidth + buttonWidth, y: layoutSize.height - 24.0 - addButtonSize.height), size: addButtonSize) + let addButtonFrame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.right + lineWidth + (canMessage ? buttonWidth : 0.0), y: layoutSize.height - 24.0 - addButtonSize.height), size: addButtonSize) let avatarFrame = baseAvatarFrame.offsetBy(dx: 9.0, dy: 14.0) var customLetters: [String] = [] @@ -362,12 +383,14 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode { strongSelf.titleNode.frame = CGRect(origin: CGPoint(x: avatarFrame.maxX + 7.0, y: avatarFrame.minY + 1.0), size: titleLayout.size) strongSelf.textNode.frame = CGRect(origin: CGPoint(x: avatarFrame.maxX + 7.0, y: avatarFrame.minY + 20.0), size: textLayout.size) - strongSelf.addButtonNode.frame = addButtonFrame + strongSelf.addButtonNode.frame = addButtonFrame + strongSelf.addButtonNode.isHidden = !canAdd strongSelf.messageButtonNode.frame = messageButtonFrame + strongSelf.messageButtonNode.isHidden = !canMessage let backgroundInsets = layoutConstants.text.bubbleInsets - let backgroundFrame = CGRect(origin: CGPoint(x: backgroundInsets.left, y: backgroundInsets.top + 5.0), size: CGSize(width: contentWidth - layoutConstants.text.bubbleInsets.right * 2.0, height: 94.0)) + let backgroundFrame = CGRect(origin: CGPoint(x: backgroundInsets.left, y: backgroundInsets.top + 5.0), size: CGSize(width: contentWidth - layoutConstants.text.bubbleInsets.right * 2.0, height: layoutSize.height - 34.0)) if let statusSizeAndApply = statusSizeAndApply { strongSelf.dateAndStatusNode.frame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.left, y: backgroundFrame.maxY + 3.0), size: statusSizeAndApply.0) @@ -479,7 +502,7 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode { return ChatMessageBubbleContentTapAction(content: .ignore) } if self.addButtonNode.frame.contains(point) { - return ChatMessageBubbleContentTapAction(content: .openMessage) + return ChatMessageBubbleContentTapAction(content: .ignore) } if self.dateAndStatusNode.supernode != nil, let _ = self.dateAndStatusNode.hitTest(self.view.convert(point, to: self.dateAndStatusNode.view), with: nil) { return ChatMessageBubbleContentTapAction(content: .ignore) @@ -490,7 +513,17 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode { @objc private func contactTap(_ recognizer: UITapGestureRecognizer) { if case .ended = recognizer.state { if let item = self.item { - let _ = item.controllerInteraction.openMessage(item.message, OpenMessageParams(mode: .default)) + var selectedContact: TelegramMediaContact? + for media in item.message.media { + if let media = media as? TelegramMediaContact { + selectedContact = media + } + } + if let peerId = selectedContact?.peerId, let peer = item.message.peers[peerId] { + item.controllerInteraction.openPeer(EnginePeer(peer), .info(nil), nil, .default) + } else { + let _ = item.controllerInteraction.openMessage(item.message, OpenMessageParams(mode: .default)) + } } } } diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInstantVideoBubbleContentNode/Sources/ChatMessageInstantVideoBubbleContentNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInstantVideoBubbleContentNode/Sources/ChatMessageInstantVideoBubbleContentNode.swift index 8806ca16f8..71bcca0c73 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInstantVideoBubbleContentNode/Sources/ChatMessageInstantVideoBubbleContentNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInstantVideoBubbleContentNode/Sources/ChatMessageInstantVideoBubbleContentNode.swift @@ -192,6 +192,9 @@ public class ChatMessageInstantVideoBubbleContentNode: ChatMessageBubbleContentN } } + let isViewOnceMessage = item.message.minAutoremoveOrClearTimeout == viewOnceTimeout + let forceIsPlaying = isViewOnceMessage && didSetupFileNode + var incoming = item.message.effectivelyIncoming(item.context.account.peerId) if let subject = item.associatedData.subject, case let .messageOptions(_, _, info) = subject, case .forward = info { incoming = false @@ -200,19 +203,19 @@ public class ChatMessageInstantVideoBubbleContentNode: ChatMessageBubbleContentN let statusType: ChatMessageDateAndStatusType? switch preparePosition { case .linear(_, .None), .linear(_, .Neighbour(true, _, _)): - if incoming { - statusType = .BubbleIncoming + if incoming { + statusType = .BubbleIncoming + } else { + if item.message.flags.contains(.Failed) { + statusType = .BubbleOutgoing(.Failed) + } else if (item.message.flags.isSending && !item.message.isSentOrAcknowledged) || item.attributes.updatingMedia != nil { + statusType = .BubbleOutgoing(.Sending) } else { - if item.message.flags.contains(.Failed) { - statusType = .BubbleOutgoing(.Failed) - } else if (item.message.flags.isSending && !item.message.isSentOrAcknowledged) || item.attributes.updatingMedia != nil { - statusType = .BubbleOutgoing(.Sending) - } else { - statusType = .BubbleOutgoing(.Sent(read: item.read)) - } + statusType = .BubbleOutgoing(.Sent(read: item.read)) } - default: - statusType = nil + } + default: + statusType = nil } let automaticDownload = shouldDownloadMediaAutomatically(settings: item.controllerInteraction.automaticMediaDownloadSettings, peerType: item.associatedData.automaticDownloadPeerType, networkType: item.associatedData.automaticDownloadNetworkType, authorPeerId: item.message.author?.id, contactsPeerIds: item.associatedData.contactsPeerIds, media: selectedFile!) @@ -256,7 +259,7 @@ public class ChatMessageInstantVideoBubbleContentNode: ChatMessageBubbleContentN let normalDisplaySize = layoutConstants.instantVideo.dimensions var displaySize = normalDisplaySize let maximumDisplaySize = CGSize(width: min(404, constrainedSize.width - 2.0), height: min(404, constrainedSize.width - 2.0)) - if item.associatedData.currentlyPlayingMessageId == item.message.index { + if (item.associatedData.currentlyPlayingMessageId == item.message.index || forceIsPlaying) && (!isViewOnceMessage || item.associatedData.isStandalone) { isPlaying = true if !isExpanded { displaySize = maximumDisplaySize diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/BUILD b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/BUILD index 874b586334..ac61001594 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/BUILD +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/BUILD @@ -20,7 +20,6 @@ swift_library( "//submodules/AccountContext", "//submodules/PhotoResources", "//submodules/TelegramStringFormatting", - "//submodules/RadialStatusNode", "//submodules/SemanticStatusNode", "//submodules/FileMediaResourceStatus", "//submodules/CheckNode", diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift index 06910acb2c..52a13d531a 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift @@ -10,7 +10,6 @@ import TelegramPresentationData import AccountContext import PhotoResources import TelegramStringFormatting -import RadialStatusNode import SemanticStatusNode import FileMediaResourceStatus import CheckNode diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/BUILD b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/BUILD index 2e541b8c8d..63595c29a2 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/BUILD +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/BUILD @@ -20,6 +20,7 @@ swift_library( "//submodules/TelegramPresentationData", "//submodules/AccountContext", "//submodules/RadialStatusNode", + "//submodules/SemanticStatusNode", "//submodules/PhotoResources", "//submodules/TelegramUniversalVideoContent", "//submodules/FileMediaResourceStatus", diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift index 5a7fb3719d..76821ce801 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift @@ -8,7 +8,7 @@ import TelegramCore import UniversalMediaPlayer import TelegramPresentationData import AccountContext -import RadialStatusNode +import SemanticStatusNode import PhotoResources import TelegramUniversalVideoContent import FileMediaResourceStatus @@ -91,8 +91,10 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { public var audioTranscriptionButton: ComponentHostView? private var dustNode: MediaDustNode? - private var statusNode: RadialStatusNode? - private var disappearingStatusNode: RadialStatusNode? + private var statusNode: SemanticStatusNode? + private var disappearingStatusNode: SemanticStatusNode? + private var streamingStatusNode: SemanticStatusNode? + private var playbackStatusNode: InstantVideoRadialStatusNode? public private(set) var videoFrame: CGRect? private var imageScale: CGFloat = 1.0 @@ -129,8 +131,12 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { private let playerStatusDisposable = MetaDisposable() private let fetchedThumbnailDisposable = MetaDisposable() + private var viewOnceIconImage: UIImage? + private var shouldAcquireVideoContext: Bool { - if self.visibility && self.trackingIsInHierarchy && !self.canAttachContent { + if let item = self.item, item.associatedData.isStandalone { + return true + } else if self.visibility && self.trackingIsInHierarchy && !self.canAttachContent { return true } else { return false @@ -172,6 +178,7 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { self.secretVideoPlaceholderBackground.displaysAsynchronously = false self.secretVideoPlaceholderBackground.displayWithoutProcessing = true self.secretVideoPlaceholder = TransformImageNode() + self.secretVideoPlaceholder.clipsToBounds = true self.infoBackgroundNode = ASImageNode() self.infoBackgroundNode.isLayerBacked = true @@ -620,12 +627,16 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { } let effectiveAudioTranscriptionState = updatedAudioTranscriptionState ?? audioTranscriptionState + + let principalGraphics = PresentationResourcesChat.principalGraphics(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper, bubbleCorners: item.presentationData.chatBubbleCorners) + let viewOnceIconImage = principalGraphics.radialIndicatorViewOnceIcon return (result, { [weak self] layoutData, animation in if let strongSelf = self { strongSelf.item = item strongSelf.videoFrame = displayVideoFrame strongSelf.appliedForwardInfo = (forwardSource, forwardAuthorSignature) + strongSelf.viewOnceIconImage = viewOnceIconImage strongSelf.automaticDownload = automaticDownload @@ -756,7 +767,7 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { } } } - }), content: NativeVideoContent(id: .message(item.message.stableId, telegramFile.fileId), userLocation: .peer(item.message.id.peerId), fileReference: .message(message: MessageReference(item.message), media: telegramFile), streamVideo: streamVideo ? .conservative : .none, enableSound: false, fetchAutomatically: false, isAudioVideoMessage: true, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), priority: .embedded, autoplay: item.context.sharedContext.energyUsageSettings.autoplayVideo) + }), content: NativeVideoContent(id: .message(item.message.stableId, telegramFile.fileId), userLocation: .peer(item.message.id.peerId), fileReference: .message(message: MessageReference(item.message), media: telegramFile), streamVideo: streamVideo ? .conservative : .none, enableSound: false, fetchAutomatically: false, isAudioVideoMessage: true, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), priority: item.associatedData.isStandalone ? .overlay : .embedded, autoplay: item.context.sharedContext.energyUsageSettings.autoplayVideo && !isViewOnceMessage) if let previousVideoNode = previousVideoNode { videoNode.bounds = previousVideoNode.bounds videoNode.position = previousVideoNode.position @@ -953,6 +964,7 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { let placeholderFrame = videoFrame.insetBy(dx: 2.0, dy: 2.0) strongSelf.secretVideoPlaceholder.bounds = CGRect(origin: CGPoint(), size: placeholderFrame.size) + animation.animator.updateCornerRadius(layer: strongSelf.secretVideoPlaceholder.layer, cornerRadius: placeholderFrame.size.width / 2.0, completion: nil) animation.animator.updateScale(layer: strongSelf.secretVideoPlaceholder.layer, scale: imageScale, completion: nil) animation.animator.updatePosition(layer: strongSelf.secretVideoPlaceholder.layer, position: displayVideoFrame.center, completion: nil) @@ -1151,24 +1163,23 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { guard let item = self.item, let status = self.status, let videoFrame = self.videoFrame else { return } - let messageTheme = item.presentationData.theme.theme.chat.message - + let isViewOnceMessage = item.message.minAutoremoveOrClearTimeout == viewOnceTimeout let isSecretMedia = item.message.containsSecretMedia - var secretBeginTimeAndTimeout: (Double, Double)? - if isSecretMedia { - if let attribute = item.message.autoclearAttribute { - if let countdownBeginTime = attribute.countdownBeginTime { - secretBeginTimeAndTimeout = (Double(countdownBeginTime), Double(attribute.timeout)) - } - } else if let attribute = item.message.autoremoveAttribute { - if let countdownBeginTime = attribute.countdownBeginTime { - secretBeginTimeAndTimeout = (Double(countdownBeginTime), Double(attribute.timeout)) - } - } - } +// var secretBeginTimeAndTimeout: (Double, Double)? +// if isSecretMedia { +// if let attribute = item.message.autoclearAttribute { +// if let countdownBeginTime = attribute.countdownBeginTime { +// secretBeginTimeAndTimeout = (Double(countdownBeginTime), Double(attribute.timeout)) +// } +// } else if let attribute = item.message.autoremoveAttribute { +// if let countdownBeginTime = attribute.countdownBeginTime { +// secretBeginTimeAndTimeout = (Double(countdownBeginTime), Double(attribute.timeout)) +// } +// } +// } var selectedMedia: TelegramMediaFile? for media in item.message.media { @@ -1234,13 +1245,19 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { } else if isBuffering ?? false { progressRequired = true } - if item.presentationData.isPreview { + if item.associatedData.isStandalone { + progressRequired = false + } else if item.presentationData.isPreview { progressRequired = true } if progressRequired { if self.statusNode == nil { - let statusNode = RadialStatusNode(backgroundNodeColor: item.presentationData.theme.theme.chat.message.mediaOverlayControlColors.fillColor, isPreview: item.presentationData.isPreview) + let statusNode = SemanticStatusNode( + backgroundNodeColor: item.presentationData.theme.theme.chat.message.mediaOverlayControlColors.fillColor, + foregroundNodeColor: item.presentationData.theme.theme.chat.message.mediaOverlayControlColors.foregroundColor, + overlayForegroundNodeColor: item.presentationData.theme.theme.chat.message.mediaOverlayControlColors.foregroundColor + ) statusNode.displaysAsynchronously = !item.presentationData.isPreview self.isUserInteractionEnabled = false self.statusNode = statusNode @@ -1249,7 +1266,7 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { } else { if let statusNode = self.statusNode { self.disappearingStatusNode = statusNode - statusNode.transitionToState(.none, completion: { [weak statusNode, weak self] in + statusNode.transitionToState(.none, animated: true, synchronous: item.presentationData.isPreview, completion: { [weak statusNode, weak self] in statusNode?.removeFromSupernode() if self?.disappearingStatusNode === statusNode { self?.disappearingStatusNode = nil @@ -1259,7 +1276,7 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { } } - let statusFrame = CGRect(origin: CGPoint(x: videoFrame.origin.x + floorToScreenPixels((videoFrame.size.width - 50.0) / 2.0), y: videoFrame.origin.y + floorToScreenPixels((videoFrame.size.height - 50.0) / 2.0)), size: CGSize(width: 50.0, height: 50.0)) + let statusFrame = CGRect(origin: CGPoint(x: videoFrame.origin.x + floorToScreenPixels((videoFrame.size.width - 54.0) / 2.0), y: videoFrame.origin.y + floorToScreenPixels((videoFrame.size.height - 54.0) / 2.0)), size: CGSize(width: 54.0, height: 54.0)) if let animator = animator { if let statusNode = self.statusNode { animator.updateFrame(layer: statusNode.layer, frame: statusFrame, completion: nil) @@ -1272,7 +1289,9 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { self.disappearingStatusNode?.frame = statusFrame } - var state: RadialStatusNodeState + var state: SemanticStatusNodeState + var streamingState: SemanticStatusNodeState = .none + switch status.mediaStatus { case var .fetchStatus(fetchStatus): if item.message.forwardInfo != nil { @@ -1283,28 +1302,31 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { case let .Fetching(_, progress): if let isBuffering = isBuffering { if isBuffering { - state = .progress(color: messageTheme.mediaOverlayControlColors.foregroundColor, lineWidth: nil, value: nil, cancelEnabled: true, animateRotation: true) + state = .progress(value: nil, cancelEnabled: true, appearance: nil) } else { state = .none } } else { let adjustedProgress = max(progress, 0.027) - state = .progress(color: messageTheme.mediaOverlayControlColors.foregroundColor, lineWidth: nil, value: CGFloat(adjustedProgress), cancelEnabled: true, animateRotation: true) + state = .progress(value: CGFloat(adjustedProgress), cancelEnabled: true, appearance: nil) } case .Local: if isViewOnceMessage { - state = .play(messageTheme.mediaOverlayControlColors.foregroundColor) + state = .play } else if isSecretMedia { - if let (beginTime, timeout) = secretBeginTimeAndTimeout { - state = .secretTimeout(color: messageTheme.mediaOverlayControlColors.foregroundColor, icon: .flame, beginTime: beginTime, timeout: timeout, sparks: true) - } else { - state = .staticTimeout - } + //TODO: + state = .play +// if let (beginTime, timeout) = secretBeginTimeAndTimeout { +// state = .secretTimeout(position: , duration: , generationTimestamp: , appearance: nil) +// state = .secretTimeout(color: messageTheme.mediaOverlayControlColors.foregroundColor, icon: .flame, beginTime: beginTime, timeout: timeout, sparks: true) +// } else { +// state = .staticTimeout +// } } else { state = .none } case .Remote, .Paused: - state = .download(messageTheme.mediaOverlayControlColors.foregroundColor) + state = .download } default: var isLocal = false @@ -1312,13 +1334,18 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { isLocal = true } if (isBuffering ?? false) && !isLocal { - state = .progress(color: messageTheme.mediaOverlayControlColors.foregroundColor, lineWidth: nil, value: nil, cancelEnabled: true, animateRotation: true) + state = .progress(value: nil, cancelEnabled: true, appearance: nil) } else { state = .none } } + + if isViewOnceMessage && progressRequired, let viewOnceIconImage = self.viewOnceIconImage, state == .play { + streamingState = .customIcon(viewOnceIconImage) + } + if item.presentationData.isPreview { - state = .play(messageTheme.mediaOverlayControlColors.foregroundColor) + state = .play } if let statusNode = self.statusNode { if state == .none { @@ -1331,12 +1358,63 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { }) } - if case .playbackStatus = status.mediaStatus { + let streamingProgressDiameter: CGFloat = 20.0 + let streamingCacheStatusFrame = CGRect(origin: statusFrame.origin.offsetBy(dx: 37.0, dy: 37.0), size: CGSize(width: streamingProgressDiameter, height: streamingProgressDiameter)) + if streamingState != .none && self.streamingStatusNode == nil { + let streamingStatusNode = SemanticStatusNode( + backgroundNodeColor: item.presentationData.theme.theme.chat.message.mediaOverlayControlColors.fillColor, + foregroundNodeColor: item.presentationData.theme.theme.chat.message.mediaOverlayControlColors.foregroundColor, + overlayForegroundNodeColor: item.presentationData.theme.theme.chat.message.mediaOverlayControlColors.foregroundColor + ) + self.streamingStatusNode = streamingStatusNode + streamingStatusNode.frame = streamingCacheStatusFrame + self.addSubnode(streamingStatusNode) + + if isViewOnceMessage { + streamingStatusNode.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.linear.rawValue) + streamingStatusNode.layer.animateAlpha(from: 0.1, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.linear.rawValue) + } + } else if let streamingStatusNode = self.streamingStatusNode { + streamingStatusNode.backgroundNodeColor = item.presentationData.theme.theme.chat.message.mediaOverlayControlColors.fillColor + } + + if let streamingStatusNode = self.streamingStatusNode { + if let animator = animator { + animator.updateFrame(layer: streamingStatusNode.layer, frame: streamingCacheStatusFrame, completion: nil) + } else { + streamingStatusNode.frame = streamingCacheStatusFrame + } + if streamingState == .none { + self.streamingStatusNode = nil + if isViewOnceMessage { + streamingStatusNode.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2, timingFunction: CAMediaTimingFunctionName.linear.rawValue, removeOnCompletion: false) + } + streamingStatusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.linear.rawValue, removeOnCompletion: false, completion: { [weak streamingStatusNode] _ in + if streamingState == .none { + streamingStatusNode?.removeFromSupernode() + } + }) + } else { + streamingStatusNode.transitionToState(streamingState) + } + } + + if let statusNode = self.statusNode, streamingState != .none { + let cutoutFrame = streamingCacheStatusFrame.offsetBy(dx: -statusFrame.minX, dy: -statusFrame.minY).insetBy(dx: -2.0 + UIScreenPixel, dy: -2.0 + UIScreenPixel) + statusNode.setCutout(cutoutFrame, animated: true) + } + + if case .playbackStatus = status.mediaStatus, !isViewOnceMessage || item.associatedData.isStandalone { let playbackStatusNode: InstantVideoRadialStatusNode if let current = self.playbackStatusNode { playbackStatusNode = current } else { playbackStatusNode = InstantVideoRadialStatusNode(color: UIColor(white: 1.0, alpha: 0.6), hasSeek: !isViewOnceMessage, sparks: isViewOnceMessage) + playbackStatusNode.alpha = 0.0 + Queue.mainQueue().after(0.15) { + playbackStatusNode.alpha = 1.0 + playbackStatusNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + } playbackStatusNode.isUserInteractionEnabled = !isViewOnceMessage playbackStatusNode.seekTo = { [weak self] position, play in guard let strongSelf = self else { @@ -1355,7 +1433,16 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { self.playbackStatusNode = playbackStatusNode } - playbackStatusNode.frame = videoFrame.insetBy(dx: 1.5, dy: 1.5) + let playbackStatusFrame = videoFrame.insetBy(dx: 1.5, dy: 1.5) + if playbackStatusNode.bounds.width > 0.0 && playbackStatusNode.bounds.width != playbackStatusFrame.width, let animator { + animator.animateScale(layer: playbackStatusNode.layer, from: playbackStatusNode.bounds.width / playbackStatusFrame.width, to: 1.0, completion: nil) + } + playbackStatusNode.bounds = playbackStatusFrame + if let animator { + animator.updatePosition(layer: playbackStatusNode.layer, position: playbackStatusFrame.center, completion: nil) + } else { + playbackStatusNode.position = playbackStatusFrame.center + } let status = messageFileMediaPlaybackStatus(context: item.context, file: file, message: EngineMessage(item.message), isRecentActions: item.associatedData.isRecentActions, isGlobalSearch: false, isDownloadList: false) playbackStatusNode.status = status diff --git a/submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton/Sources/ChatTextInputMediaRecordingButton.swift b/submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton/Sources/ChatTextInputMediaRecordingButton.swift index 83337ed8d5..64e22d4b71 100644 --- a/submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton/Sources/ChatTextInputMediaRecordingButton.swift +++ b/submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton/Sources/ChatTextInputMediaRecordingButton.swift @@ -179,6 +179,7 @@ public final class ChatTextInputMediaRecordingButton: TGModernConversationInputM private let context: AccountContext private var theme: PresentationTheme private let useDarkTheme: Bool + private let pause: Bool private let strings: PresentationStrings public var mode: ChatTextInputMediaRecordingButtonMode = .audio @@ -322,17 +323,18 @@ public final class ChatTextInputMediaRecordingButton: TGModernConversationInputM if let current = self.micLockValue { return current } else { - let lockView = LockView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 40.0, height: 60.0)), theme: self.theme, useDarkTheme: self.useDarkTheme, strings: self.strings) + let lockView = LockView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 40.0, height: 60.0)), theme: self.theme, useDarkTheme: self.useDarkTheme, pause: self.pause, strings: self.strings) lockView.addTarget(self, action: #selector(handleStopTap), for: .touchUpInside) self.micLockValue = lockView return lockView } } - public init(context: AccountContext, theme: PresentationTheme, useDarkTheme: Bool = false, strings: PresentationStrings, presentController: @escaping (ViewController) -> Void) { + public init(context: AccountContext, theme: PresentationTheme, useDarkTheme: Bool = false, pause: Bool = false, strings: PresentationStrings, presentController: @escaping (ViewController) -> Void) { self.context = context self.theme = theme self.useDarkTheme = useDarkTheme + self.pause = pause self.strings = strings self.animationView = ComponentView() self.presentController = presentController diff --git a/submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton/Sources/LockView.swift b/submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton/Sources/LockView.swift index 06a736e3f5..d2731acbe1 100644 --- a/submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton/Sources/LockView.swift +++ b/submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton/Sources/LockView.swift @@ -6,30 +6,33 @@ import TelegramPresentationData final class LockView: UIButton, TGModernConversationInputMicButtonLock { private let useDarkTheme: Bool + private let pause: Bool - private let idleView: AnimationView = { - guard let url = getAppBundle().url(forResource: "LockWait", withExtension: "json"), let animation = Animation.filepath(url.path) - else { return AnimationView() } - - let view = AnimationView(animation: animation, configuration: LottieConfiguration(renderingEngine: .mainThread, decodingStrategy: .codable)) - view.loopMode = .autoReverse - view.backgroundColor = .clear - view.isOpaque = false - return view - }() + private let idleView: AnimationView + private let lockingView: AnimationView - private let lockingView: AnimationView = { - guard let url = getAppBundle().url(forResource: "Lock", withExtension: "json"), let animation = Animation.filepath(url.path) - else { return AnimationView() } - - let view = AnimationView(animation: animation, configuration: LottieConfiguration(renderingEngine: .mainThread, decodingStrategy: .codable)) - view.backgroundColor = .clear - view.isOpaque = false - return view - }() - - init(frame: CGRect, theme: PresentationTheme, useDarkTheme: Bool = false, strings: PresentationStrings) { + init(frame: CGRect, theme: PresentationTheme, useDarkTheme: Bool = false, pause: Bool = false, strings: PresentationStrings) { self.useDarkTheme = useDarkTheme + self.pause = pause + + if let url = getAppBundle().url(forResource: "LockWait", withExtension: "json"), let animation = Animation.filepath(url.path) { + let view = AnimationView(animation: animation, configuration: LottieConfiguration(renderingEngine: .mainThread, decodingStrategy: .codable)) + view.loopMode = .autoReverse + view.backgroundColor = .clear + view.isOpaque = false + self.idleView = view + } else { + self.idleView = AnimationView() + } + + if let url = getAppBundle().url(forResource: self.pause ? "LockPause" : "Lock", withExtension: "json"), let animation = Animation.filepath(url.path) { + let view = AnimationView(animation: animation, configuration: LottieConfiguration(renderingEngine: .mainThread, decodingStrategy: .codable)) + view.backgroundColor = .clear + view.isOpaque = false + self.lockingView = view + } else { + self.lockingView = AnimationView() + } super.init(frame: frame) @@ -62,25 +65,33 @@ final class LockView: UIButton, TGModernConversationInputMicButtonLock { } func updateTheme(_ theme: PresentationTheme) { - [ - "Rectangle.Заливка 1": theme.chat.inputPanel.panelBackgroundColor, - "Rectangle.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor, - "Rectangle 2.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor, - "Path.Path.Обводка 1": theme.chat.inputPanel.panelControlAccentColor, - "Path 4.Path 4.Обводка 1": theme.chat.inputPanel.panelControlAccentColor - ].forEach { key, value in - idleView.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: "\(key).Color")) +// [ +// "Rectangle.Заливка 1": theme.chat.inputPanel.panelBackgroundColor, +// "Rectangle.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor, +// "Rectangle 2.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor, +// "Path.Path.Обводка 1": theme.chat.inputPanel.panelControlAccentColor, +// "Path 4.Path 4.Обводка 1": theme.chat.inputPanel.panelControlAccentColor +// ].forEach { key, value in +// idleView.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: "\(key).Color")) +// } +// + for keypath in idleView.allKeypaths(predicate: { $0.keys.last == "Color" }) { + idleView.setValueProvider(ColorValueProvider(theme.chat.inputPanel.panelControlAccentColor.lottieColorValue), keypath: AnimationKeypath(keypath: keypath)) } - [ - "Path.Path.Обводка 1": theme.chat.inputPanel.panelControlAccentColor, - "Path.Path.Заливка 1": theme.chat.inputPanel.panelBackgroundColor.withAlphaComponent(1.0), - "Rectangle.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor, - "Rectangle.Заливка 1": theme.chat.inputPanel.panelControlAccentColor, - "Path 4.Path 4.Обводка 1": theme.chat.inputPanel.panelControlAccentColor - ].forEach { key, value in - lockingView.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: "\(key).Color")) + for keypath in lockingView.allKeypaths(predicate: { $0.keys.last == "Color" }) { + lockingView.setValueProvider(ColorValueProvider(theme.chat.inputPanel.panelControlAccentColor.lottieColorValue), keypath: AnimationKeypath(keypath: keypath)) } +// +// [ +// "Path.Path.Обводка 1": theme.chat.inputPanel.panelControlAccentColor, +// "Path.Path.Заливка 1": theme.chat.inputPanel.panelBackgroundColor.withAlphaComponent(1.0), +// "Rectangle.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor, +// "Rectangle.Заливка 1": theme.chat.inputPanel.panelControlAccentColor, +// "Path 4.Path 4.Обводка 1": theme.chat.inputPanel.panelControlAccentColor +// ].forEach { key, value in +// lockingView.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: "\(key).Color")) +// } } override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/DrawingMessageRenderer.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/DrawingMessageRenderer.swift index cd0ba03359..9a37d1bb59 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/DrawingMessageRenderer.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/DrawingMessageRenderer.swift @@ -187,7 +187,7 @@ public final class DrawingMessageRenderer { let avatarHeaderItem = self.context.sharedContext.makeChatMessageAvatarHeaderItem(context: self.context, timestamp: self.messages.first?.timestamp ?? 0, peer: self.messages.first!.peers[self.messages.first!.author!.id]!, message: self.messages.first!, theme: theme, strings: presentationData.strings, wallpaper: presentationData.chatWallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: presentationData.chatBubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder) - let items: [ListViewItem] = [self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: self.messages, theme: theme, strings: presentationData.strings, wallpaper: presentationData.theme.chat.defaultWallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: presentationData.chatBubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: nil, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)] + let items: [ListViewItem] = [self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: self.messages, theme: theme, strings: presentationData.strings, wallpaper: presentationData.theme.chat.defaultWallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: presentationData.chatBubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: nil, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)] let inset: CGFloat = 16.0 let leftInset: CGFloat = 37.0 diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift index 899204a09e..6dba11a9ea 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift @@ -1736,7 +1736,7 @@ public final class MediaEditor { } } -private func videoFrames(asset: AVAsset, count: Int, mirror: Bool = false) -> Signal<([UIImage], Double), NoError> { +public func videoFrames(asset: AVAsset?, count: Int, initialPlaceholder: UIImage? = nil, initialTimestamp: Double? = nil, mirror: Bool = false) -> Signal<([UIImage], Double), NoError> { func blurredImage(_ image: UIImage) -> UIImage? { guard let image = image.cgImage else { return nil @@ -1769,55 +1769,82 @@ private func videoFrames(asset: AVAsset, count: Int, mirror: Bool = false) -> Si guard count > 0 else { return .complete() } - let scale = UIScreen.main.scale - let imageGenerator = AVAssetImageGenerator(asset: asset) - imageGenerator.maximumSize = CGSize(width: 48.0 * scale, height: 36.0 * scale) - imageGenerator.appliesPreferredTrackTransform = true - imageGenerator.requestedTimeToleranceBefore = .zero - imageGenerator.requestedTimeToleranceAfter = .zero - + + var firstFrame: UIImage - if let cgImage = try? imageGenerator.copyCGImage(at: .zero, actualTime: nil) { - firstFrame = UIImage(cgImage: cgImage) - if let blurred = blurredImage(firstFrame) { + + var imageGenerator: AVAssetImageGenerator? + if let asset { + let scale = UIScreen.main.scale + + imageGenerator = AVAssetImageGenerator(asset: asset) + imageGenerator?.maximumSize = CGSize(width: 48.0 * scale, height: 36.0 * scale) + imageGenerator?.appliesPreferredTrackTransform = true + imageGenerator?.requestedTimeToleranceBefore = .zero + imageGenerator?.requestedTimeToleranceAfter = .zero + } + + if var initialPlaceholder { + initialPlaceholder = generateScaledImage(image: initialPlaceholder, size: initialPlaceholder.size.aspectFitted(CGSize(width: 144.0, height: 144.0)), scale: 1.0)! + if let blurred = blurredImage(initialPlaceholder) { firstFrame = blurred + } else { + firstFrame = initialPlaceholder + } + } else if let imageGenerator { + if let cgImage = try? imageGenerator.copyCGImage(at: .zero, actualTime: nil) { + firstFrame = UIImage(cgImage: cgImage) + if let blurred = blurredImage(firstFrame) { + firstFrame = blurred + } + } else { + firstFrame = generateSingleColorImage(size: CGSize(width: 24.0, height: 36.0), color: .black)! } } else { firstFrame = generateSingleColorImage(size: CGSize(width: 24.0, height: 36.0), color: .black)! } - return Signal { subscriber in - subscriber.putNext((Array(repeating: firstFrame, count: count), CACurrentMediaTime())) - - var timestamps: [NSValue] = [] - let duration = asset.duration.seconds - let interval = duration / Double(count) - for i in 0 ..< count { - timestamps.append(NSValue(time: CMTime(seconds: Double(i) * interval, preferredTimescale: CMTimeScale(1000)))) - } - - var updatedFrames: [UIImage] = [] - imageGenerator.generateCGImagesAsynchronously(forTimes: timestamps) { _, image, _, _, _ in - if let image { - updatedFrames.append(UIImage(cgImage: image, scale: 1.0, orientation: mirror ? .upMirrored : .up)) - if updatedFrames.count == count { - subscriber.putNext((updatedFrames, CACurrentMediaTime())) - subscriber.putCompletion() - } else { - var tempFrames = updatedFrames - for _ in 0 ..< count - updatedFrames.count { - tempFrames.append(firstFrame) + + if let asset { + return Signal { subscriber in + subscriber.putNext((Array(repeating: firstFrame, count: count), initialTimestamp ?? CACurrentMediaTime())) + + var timestamps: [NSValue] = [] + let duration = asset.duration.seconds + let interval = duration / Double(count) + for i in 0 ..< count { + timestamps.append(NSValue(time: CMTime(seconds: Double(i) * interval, preferredTimescale: CMTimeScale(1000)))) + } + + var updatedFrames: [UIImage] = [] + imageGenerator?.generateCGImagesAsynchronously(forTimes: timestamps) { _, image, _, _, _ in + if let image { + updatedFrames.append(UIImage(cgImage: image, scale: 1.0, orientation: mirror ? .upMirrored : .up)) + if updatedFrames.count == count { + subscriber.putNext((updatedFrames, CACurrentMediaTime())) + subscriber.putCompletion() + } else { + var tempFrames = updatedFrames + for _ in 0 ..< count - updatedFrames.count { + tempFrames.append(firstFrame) + } + subscriber.putNext((tempFrames, CACurrentMediaTime())) + } + } else { + if let previous = updatedFrames.last { + updatedFrames.append(previous) } - subscriber.putNext((tempFrames, CACurrentMediaTime())) - } - } else { - if let previous = updatedFrames.last { - updatedFrames.append(previous) } } + + return ActionDisposable { + imageGenerator?.cancelAllCGImageGeneration() + } } - - return ActionDisposable { - imageGenerator.cancelAllCGImageGeneration() + } else { + var frames: [UIImage] = [] + for _ in 0 ..< count { + frames.append(firstFrame) } + return .single((frames, CACurrentMediaTime())) } } diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift index 9d8676a5be..cbd042f975 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift @@ -136,7 +136,7 @@ public enum MediaQualityPreset: Int32 { case .compressedVeryHigh: return 1920.0 case .videoMessage: - return 384.0 + return 400.0 case .profileLow: return 720.0 case .profile, .profileHigh, .profileVeryHigh: diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/BUILD b/submodules/TelegramUI/Components/MediaEditorScreen/BUILD index 144d6234e5..e0bf2ad253 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/BUILD +++ b/submodules/TelegramUI/Components/MediaEditorScreen/BUILD @@ -49,6 +49,7 @@ swift_library( "//submodules/TelegramUI/Components/VolumeSliderContextItem", "//submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent", "//submodules/TelegramUI/Components/ContextReferenceButtonComponent", + "//submodules/TelegramUI/Components/MediaScrubberComponent", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift index c8ed2ae305..1d06352aa7 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift @@ -39,6 +39,7 @@ import VolumeSliderContextItem import TelegramStringFormatting import ForwardInfoPanelComponent import ContextReferenceButtonComponent +import MediaScrubberComponent private let playbackButtonTag = GenericComponentViewTag() private let muteButtonTag = GenericComponentViewTag() @@ -1359,6 +1360,8 @@ final class MediaEditorScreenComponent: Component { transition: scrubberTransition, component: AnyComponent(MediaScrubberComponent( context: component.context, + style: .editor, + theme: environment.theme, generationTimestamp: playerState.generationTimestamp, position: playerState.position, minDuration: minDuration, @@ -6081,3 +6084,23 @@ private func setupButtonShadow(_ view: UIView, radius: CGFloat = 2.0) { view.layer.shadowColor = UIColor.black.cgColor view.layer.shadowOpacity = 0.35 } + +extension MediaScrubberComponent.Track { + public init(_ track: MediaEditorPlayerState.Track) { + let content: MediaScrubberComponent.Track.Content + switch track.content { + case let .video(frames, framesUpdateTimestamp): + content = .video(frames: frames, framesUpdateTimestamp: framesUpdateTimestamp) + case let .audio(artist, title, samples, peak): + content = .audio(artist: artist, title: title, samples: samples, peak: peak) + } + self.init( + id: track.id, + content: content, + duration: track.duration, + trimRange: track.trimRange, + offset: track.offset, + isMain: track.isMain + ) + } +} diff --git a/submodules/TelegramUI/Components/MediaScrubberComponent/BUILD b/submodules/TelegramUI/Components/MediaScrubberComponent/BUILD new file mode 100644 index 0000000000..06766ec3c3 --- /dev/null +++ b/submodules/TelegramUI/Components/MediaScrubberComponent/BUILD @@ -0,0 +1,25 @@ +load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library") + +swift_library( + name = "MediaScrubberComponent", + module_name = "MediaScrubberComponent", + srcs = glob([ + "Sources/**/*.swift", + ]), + copts = [ + "-warnings-as-errors", + ], + deps = [ + "//submodules/Display", + "//submodules/ComponentFlow", + "//submodules/TelegramPresentationData", + "//submodules/AccountContext", + "//submodules/Components/ComponentDisplayAdapters", + "//submodules/Components/MultilineTextComponent", + "//submodules/TelegramUI/Components/MediaEditor", + "//submodules/TelegramUI/Components/AudioWaveformComponent", + ], + visibility = [ + "//visibility:public", + ], +) diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaScrubberComponent.swift b/submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift similarity index 87% rename from submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaScrubberComponent.swift rename to submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift index 9c4132866e..891bfa7edf 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaScrubberComponent.swift +++ b/submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift @@ -4,7 +4,6 @@ import Display import AsyncDisplayKit import ComponentFlow import SwiftSignalKit -import ViewControllerComponent import ComponentDisplayAdapters import TelegramPresentationData import AccountContext @@ -17,17 +16,16 @@ private let trackHeight: CGFloat = 39.0 private let collapsedTrackHeight: CGFloat = 26.0 private let trackSpacing: CGFloat = 4.0 private let borderHeight: CGFloat = 1.0 + UIScreenPixel -private let frameWidth: CGFloat = 24.0 -final class MediaScrubberComponent: Component { - typealias EnvironmentType = Empty +public final class MediaScrubberComponent: Component { + public typealias EnvironmentType = Empty - struct Track: Equatable { - enum Content: Equatable { + public struct Track: Equatable { + public enum Content: Equatable { case video(frames: [UIImage], framesUpdateTimestamp: Double) case audio(artist: String?, title: String?, samples: Data?, peak: Int32) - static func ==(lhs: Content, rhs: Content) -> Bool { + public static func ==(lhs: Content, rhs: Content) -> Bool { switch lhs { case let .video(_, framesUpdateTimestamp): if case .video(_, framesUpdateTimestamp) = rhs { @@ -45,29 +43,39 @@ final class MediaScrubberComponent: Component { } } - let id: Int32 - let content: Content - let duration: Double - let trimRange: Range? - let offset: Double? - let isMain: Bool - - init(_ track: MediaEditorPlayerState.Track) { - self.id = track.id - switch track.content { - case let .video(frames, framesUpdateTimestamp): - self.content = .video(frames: frames, framesUpdateTimestamp: framesUpdateTimestamp) - case let .audio(artist, title, samples, peak): - self.content = .audio(artist: artist, title: title, samples: samples, peak: peak) - } - self.duration = track.duration - self.trimRange = track.trimRange - self.offset = track.offset - self.isMain = track.isMain + public let id: Int32 + public let content: Content + public let duration: Double + public let trimRange: Range? + public let offset: Double? + public let isMain: Bool + + public init( + id: Int32, + content: Content, + duration: Double, + trimRange: Range?, + offset: Double?, + isMain: Bool + ) { + self.id = id + self.content = content + self.duration = duration + self.trimRange = trimRange + self.offset = offset + self.isMain = isMain } } + public enum Style { + case editor + case videoMessage + } + let context: AccountContext + let style: Style + let theme: PresentationTheme + let generationTimestamp: Double let position: Double @@ -77,13 +85,15 @@ final class MediaScrubberComponent: Component { let tracks: [Track] - let positionUpdated: (Double, Bool) -> Void + let positionUpdated: (Double, Bool) -> Void let trackTrimUpdated: (Int32, Double, Double, Bool, Bool) -> Void let trackOffsetUpdated: (Int32, Double, Bool) -> Void let trackLongPressed: (Int32, UIView) -> Void - init( + public init( context: AccountContext, + style: Style, + theme: PresentationTheme, generationTimestamp: Double, position: Double, minDuration: Double, @@ -96,6 +106,8 @@ final class MediaScrubberComponent: Component { trackLongPressed: @escaping (Int32, UIView) -> Void ) { self.context = context + self.style = style + self.theme = theme self.generationTimestamp = generationTimestamp self.position = position self.minDuration = minDuration @@ -108,10 +120,13 @@ final class MediaScrubberComponent: Component { self.trackLongPressed = trackLongPressed } - static func ==(lhs: MediaScrubberComponent, rhs: MediaScrubberComponent) -> Bool { + public static func ==(lhs: MediaScrubberComponent, rhs: MediaScrubberComponent) -> Bool { if lhs.context !== rhs.context { return false } + if lhs.theme !== rhs.theme { + return false + } if lhs.generationTimestamp != rhs.generationTimestamp { return false } @@ -133,7 +148,7 @@ final class MediaScrubberComponent: Component { return true } - final class View: UIView, UIGestureRecognizerDelegate { + public final class View: UIView, UIGestureRecognizerDelegate { private var trackViews: [Int32: TrackView] = [:] private let trimView: TrimView private let ghostTrimView: TrimView @@ -260,7 +275,7 @@ final class MediaScrubberComponent: Component { guard let component = self.component, let firstTrack = component.tracks.first else { return 0.0 } - return firstTrack.trimRange?.upperBound ?? min(firstTrack.duration, storyMaxVideoDuration) + return firstTrack.trimRange?.upperBound ?? min(firstTrack.duration, component.maxDuration) } private var mainAudioTrackOffset: Double? { @@ -364,11 +379,18 @@ final class MediaScrubberComponent: Component { self.cursorView.frame = cursorFrame(size: scrubberSize, height: self.effectiveCursorHeight, position: updatedPosition, duration: self.trimDuration) } - func update(component: MediaScrubberComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: Transition) -> CGSize { + public func update(component: MediaScrubberComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: Transition) -> CGSize { let isFirstTime = self.component == nil self.component = component self.state = state + switch component.style { + case .editor: + self.cursorView.isHidden = false + case .videoMessage: + self.cursorView.isHidden = true + } + var totalHeight: CGFloat = 0.0 var trackLayout: [Int32: (CGRect, Transition, Bool)] = [:] @@ -427,6 +449,7 @@ final class MediaScrubberComponent: Component { let trackSize = trackView.update( context: component.context, + style: component.style, track: track, isSelected: id == self.selectedTrackId, availableSize: availableSize, @@ -495,12 +518,21 @@ final class MediaScrubberComponent: Component { } } - let scrubberSize = CGSize(width: availableSize.width, height: trackHeight) + let fullTrackHeight: CGFloat + switch component.style { + case .editor: + fullTrackHeight = trackHeight + case .videoMessage: + fullTrackHeight = 33.0 + } + let scrubberSize = CGSize(width: availableSize.width, height: fullTrackHeight) self.trimView.isHollow = self.selectedTrackId != lowestVideoId || self.isAudioOnly let (leftHandleFrame, rightHandleFrame) = self.trimView.update( + style: component.style, + theme: component.theme, visualInsets: trimViewVisualInsets, - scrubberSize: CGSize(width: trackViewWidth, height: trackHeight), + scrubberSize: CGSize(width: trackViewWidth, height: fullTrackHeight), duration: mainTrimDuration, startPosition: startPosition, endPosition: endPosition, @@ -511,6 +543,8 @@ final class MediaScrubberComponent: Component { ) let (ghostLeftHandleFrame, ghostRightHandleFrame) = self.ghostTrimView.update( + style: component.style, + theme: component.theme, visualInsets: .zero, scrubberSize: CGSize(width: scrubberSize.width, height: collapsedTrackHeight), duration: self.duration, @@ -591,7 +625,7 @@ final class MediaScrubberComponent: Component { return CGSize(width: availableSize.width, height: totalHeight) } - override func point(inside point: CGPoint, with event: UIEvent?) -> Bool { + public override func point(inside point: CGPoint, with event: UIEvent?) -> Bool { let hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0) return self.bounds.inset(by: hitTestSlop).contains(point) } @@ -683,11 +717,9 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega self.videoTransparentFramesContainer.alpha = 0.5 self.videoTransparentFramesContainer.clipsToBounds = true - self.videoTransparentFramesContainer.layer.cornerRadius = 9.0 self.videoTransparentFramesContainer.isUserInteractionEnabled = false self.videoOpaqueFramesContainer.clipsToBounds = true - self.videoOpaqueFramesContainer.layer.cornerRadius = 9.0 self.videoOpaqueFramesContainer.isUserInteractionEnabled = false self.addSubview(self.clippingView) @@ -760,6 +792,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega func update( context: AccountContext, + style: MediaScrubberComponent.Style, track: MediaScrubberComponent.Track, isSelected: Bool, availableSize: CGSize, @@ -769,7 +802,20 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega let previousParams = self.params self.params = (track, isSelected, duration) - let scrubberSize = CGSize(width: availableSize.width, height: isSelected ? trackHeight : collapsedTrackHeight) + let fullTrackHeight: CGFloat + let framesCornerRadius: CGFloat + switch style { + case .editor: + fullTrackHeight = trackHeight + framesCornerRadius = 9.0 + case .videoMessage: + fullTrackHeight = 33.0 + framesCornerRadius = fullTrackHeight / 2.0 + } + self.videoTransparentFramesContainer.layer.cornerRadius = framesCornerRadius + self.videoOpaqueFramesContainer.layer.cornerRadius = framesCornerRadius + + let scrubberSize = CGSize(width: availableSize.width, height: isSelected ? fullTrackHeight : collapsedTrackHeight) var screenSpanDuration = duration if track.isAudio && track.isMain { @@ -891,11 +937,18 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega transparentFrameLayer = VideoFrameLayer() transparentFrameLayer.masksToBounds = true transparentFrameLayer.contentsGravity = .resizeAspectFill + if case .videoMessage = style { + transparentFrameLayer.contentsRect = CGRect(origin: .zero, size: CGSize(width: 1.0, height: 1.0)).insetBy(dx: 0.15, dy: 0.15) + } self.videoTransparentFramesContainer.layer.addSublayer(transparentFrameLayer) self.videoTransparentFrameLayers.append(transparentFrameLayer) + opaqueFrameLayer = VideoFrameLayer() opaqueFrameLayer.masksToBounds = true opaqueFrameLayer.contentsGravity = .resizeAspectFill + if case .videoMessage = style { + opaqueFrameLayer.contentsRect = CGRect(origin: .zero, size: CGSize(width: 1.0, height: 1.0)).insetBy(dx: 0.15, dy: 0.15) + } self.videoOpaqueFramesContainer.layer.addSublayer(opaqueFrameLayer) self.videoOpaqueFrameLayers.append(opaqueFrameLayer) } else { @@ -927,7 +980,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega if let image = frames.first, image.size.height > 0.0 { frameAspectRatio = max(0.66, image.size.width / image.size.height) } - let frameSize = CGSize(width: trackHeight * frameAspectRatio, height: trackHeight) + let frameSize = CGSize(width: fullTrackHeight * frameAspectRatio, height: fullTrackHeight) var frameOffset: CGFloat = 0.0 for i in 0 ..< frames.count { if i < self.videoTransparentFrameLayers.count { @@ -1052,7 +1105,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega ) ), environment: {}, - containerSize: CGSize(width: containerFrame.width, height: trackHeight) + containerSize: CGSize(width: containerFrame.width, height: fullTrackHeight) ) if let view = self.audioWaveform.view as? AudioWaveformComponent.View { if view.superview == nil { @@ -1090,54 +1143,29 @@ private class TrimView: UIView { override init(frame: CGRect) { super.init(frame: .zero) - let height = trackHeight - let handleImage = generateImage(CGSize(width: handleWidth, height: height), rotatedContext: { size, context in - context.clear(CGRect(origin: .zero, size: size)) - context.setFillColor(UIColor.white.cgColor) - - let path = UIBezierPath(roundedRect: CGRect(origin: .zero, size: CGSize(width: size.width * 2.0, height: size.height)), cornerRadius: 9.0) - context.addPath(path.cgPath) - context.fillPath() - - context.setBlendMode(.clear) - let innerPath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: handleWidth - 3.0, y: borderHeight), size: CGSize(width: handleWidth, height: size.height - borderHeight * 2.0)), cornerRadius: 2.0) - context.addPath(innerPath.cgPath) - context.fillPath() - })?.withRenderingMode(.alwaysTemplate).resizableImage(withCapInsets: UIEdgeInsets(top: 10.0, left: 0.0, bottom: 10.0, right: 0.0)) - self.zoneView.image = UIImage() self.zoneView.isUserInteractionEnabled = true self.zoneView.hitTestSlop = UIEdgeInsets(top: -8.0, left: 0.0, bottom: -8.0, right: 0.0) - self.leftHandleView.image = handleImage self.leftHandleView.isUserInteractionEnabled = true self.leftHandleView.tintColor = .white self.leftHandleView.contentMode = .scaleToFill self.leftHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0) - self.rightHandleView.image = handleImage self.rightHandleView.transform = CGAffineTransform(scaleX: -1.0, y: 1.0) self.rightHandleView.isUserInteractionEnabled = true self.rightHandleView.tintColor = .white self.rightHandleView.contentMode = .scaleToFill self.rightHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0) - self.borderView.image = generateImage(CGSize(width: 1.0, height: height), rotatedContext: { size, context in - context.clear(CGRect(origin: .zero, size: size)) - context.setFillColor(UIColor.white.cgColor) - context.fill(CGRect(origin: .zero, size: CGSize(width: size.width, height: borderHeight))) - context.fill(CGRect(origin: CGPoint(x: 0.0, y: size.height - borderHeight), size: CGSize(width: size.width, height: height))) - })?.withRenderingMode(.alwaysTemplate).resizableImage(withCapInsets: UIEdgeInsets(top: 10.0, left: 0.0, bottom: 10.0, right: 0.0)) self.borderView.tintColor = .white self.borderView.isUserInteractionEnabled = false self.leftCapsuleView.clipsToBounds = true self.leftCapsuleView.layer.cornerRadius = 1.0 - self.leftCapsuleView.backgroundColor = UIColor(rgb: 0x343436) self.rightCapsuleView.clipsToBounds = true self.rightCapsuleView.layer.cornerRadius = 1.0 - self.rightCapsuleView.backgroundColor = UIColor(rgb: 0x343436) self.addSubview(self.zoneView) self.addSubview(self.leftHandleView) @@ -1279,6 +1307,8 @@ private class TrimView: UIView { )? func update( + style: MediaScrubberComponent.Style, + theme: PresentationTheme, visualInsets: UIEdgeInsets, scrubberSize: CGSize, duration: Double, @@ -1288,34 +1318,101 @@ private class TrimView: UIView { minDuration: Double, maxDuration: Double, transition: Transition - ) -> (leftHandleFrame: CGRect, rightHandleFrame: CGRect) - { + ) -> (leftHandleFrame: CGRect, rightHandleFrame: CGRect) { + let isFirstTime = self.params == nil self.params = (scrubberSize, duration, startPosition, endPosition, position, minDuration, maxDuration) - let trimColor = self.isPanningTrimHandle ? UIColor(rgb: 0xf8d74a) : .white + let effectiveHandleWidth: CGFloat + let fullTrackHeight: CGFloat + let capsuleOffset: CGFloat + let color: UIColor + let highlightColor: UIColor + + switch style { + case .editor: + effectiveHandleWidth = handleWidth + fullTrackHeight = trackHeight + capsuleOffset = 5.0 - UIScreenPixel + color = .white + highlightColor = UIColor(rgb: 0xf8d74a) + + if isFirstTime { + self.borderView.image = generateImage(CGSize(width: 1.0, height: fullTrackHeight), rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + context.setFillColor(UIColor.white.cgColor) + context.fill(CGRect(origin: .zero, size: CGSize(width: size.width, height: borderHeight))) + context.fill(CGRect(origin: CGPoint(x: 0.0, y: size.height - borderHeight), size: CGSize(width: size.width, height: fullTrackHeight))) + })?.withRenderingMode(.alwaysTemplate).resizableImage(withCapInsets: UIEdgeInsets(top: 10.0, left: 0.0, bottom: 10.0, right: 0.0)) + + let handleImage = generateImage(CGSize(width: handleWidth, height: fullTrackHeight), rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + context.setFillColor(UIColor.white.cgColor) + + let path = UIBezierPath(roundedRect: CGRect(origin: .zero, size: CGSize(width: size.width * 2.0, height: size.height)), cornerRadius: 9.0) + context.addPath(path.cgPath) + context.fillPath() + + context.setBlendMode(.clear) + let innerPath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: handleWidth - 3.0, y: borderHeight), size: CGSize(width: handleWidth, height: size.height - borderHeight * 2.0)), cornerRadius: 2.0) + context.addPath(innerPath.cgPath) + context.fillPath() + })?.withRenderingMode(.alwaysTemplate).resizableImage(withCapInsets: UIEdgeInsets(top: 10.0, left: 0.0, bottom: 10.0, right: 0.0)) + + self.leftHandleView.image = handleImage + self.rightHandleView.image = handleImage + + self.leftCapsuleView.backgroundColor = UIColor(rgb: 0x343436) + self.rightCapsuleView.backgroundColor = UIColor(rgb: 0x343436) + } + case .videoMessage: + effectiveHandleWidth = 16.0 + fullTrackHeight = 33.0 + capsuleOffset = 8.0 + color = theme.chat.inputPanel.panelControlAccentColor + highlightColor = theme.chat.inputPanel.panelControlAccentColor + + if isFirstTime { + let handleImage = generateImage(CGSize(width: effectiveHandleWidth, height: fullTrackHeight), rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + context.setFillColor(UIColor.white.cgColor) + + let path = UIBezierPath(roundedRect: CGRect(origin: .zero, size: CGSize(width: size.width * 2.0, height: size.height)), cornerRadius: 16.5) + context.addPath(path.cgPath) + context.fillPath() + })?.withRenderingMode(.alwaysTemplate) + + self.leftHandleView.image = handleImage + self.rightHandleView.image = handleImage + + self.leftCapsuleView.backgroundColor = .white + self.rightCapsuleView.backgroundColor = .white + } + } + + let trimColor = self.isPanningTrimHandle ? highlightColor : color transition.setTintColor(view: self.leftHandleView, color: trimColor) transition.setTintColor(view: self.rightHandleView, color: trimColor) transition.setTintColor(view: self.borderView, color: trimColor) let totalWidth = scrubberSize.width - let totalRange = totalWidth - handleWidth + let totalRange = totalWidth - effectiveHandleWidth let leftHandlePositionFraction = duration > 0.0 ? startPosition / duration : 0.0 - let leftHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalRange * leftHandlePositionFraction) + let leftHandlePosition = floorToScreenPixels(effectiveHandleWidth / 2.0 + totalRange * leftHandlePositionFraction) - var leftHandleFrame = CGRect(origin: CGPoint(x: leftHandlePosition - handleWidth / 2.0, y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height)) + var leftHandleFrame = CGRect(origin: CGPoint(x: leftHandlePosition - effectiveHandleWidth / 2.0, y: 0.0), size: CGSize(width: effectiveHandleWidth, height: scrubberSize.height)) leftHandleFrame.origin.x = max(leftHandleFrame.origin.x, visualInsets.left) transition.setFrame(view: self.leftHandleView, frame: leftHandleFrame) let rightHandlePositionFraction = duration > 0.0 ? endPosition / duration : 1.0 - let rightHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalRange * rightHandlePositionFraction) + let rightHandlePosition = floorToScreenPixels(effectiveHandleWidth / 2.0 + totalRange * rightHandlePositionFraction) - var rightHandleFrame = CGRect(origin: CGPoint(x: max(leftHandleFrame.maxX, rightHandlePosition - handleWidth / 2.0), y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height)) - rightHandleFrame.origin.x = min(rightHandleFrame.origin.x, totalWidth - visualInsets.right - handleWidth) + var rightHandleFrame = CGRect(origin: CGPoint(x: max(leftHandleFrame.maxX, rightHandlePosition - effectiveHandleWidth / 2.0), y: 0.0), size: CGSize(width: effectiveHandleWidth, height: scrubberSize.height)) + rightHandleFrame.origin.x = min(rightHandleFrame.origin.x, totalWidth - visualInsets.right - effectiveHandleWidth) transition.setFrame(view: self.rightHandleView, frame: rightHandleFrame) let capsuleSize = CGSize(width: 2.0, height: 11.0) - transition.setFrame(view: self.leftCapsuleView, frame: CGRect(origin: CGPoint(x: 5.0 - UIScreenPixel, y: floorToScreenPixels((leftHandleFrame.height - capsuleSize.height) / 2.0)), size: capsuleSize)) - transition.setFrame(view: self.rightCapsuleView, frame: CGRect(origin: CGPoint(x: 5.0 - UIScreenPixel, y: floorToScreenPixels((leftHandleFrame.height - capsuleSize.height) / 2.0)), size: capsuleSize)) + transition.setFrame(view: self.leftCapsuleView, frame: CGRect(origin: CGPoint(x: capsuleOffset, y: floorToScreenPixels((leftHandleFrame.height - capsuleSize.height) / 2.0)), size: capsuleSize)) + transition.setFrame(view: self.rightCapsuleView, frame: CGRect(origin: CGPoint(x: capsuleOffset, y: floorToScreenPixels((leftHandleFrame.height - capsuleSize.height) / 2.0)), size: capsuleSize)) let zoneFrame = CGRect(x: leftHandleFrame.maxX, y: 0.0, width: rightHandleFrame.minX - leftHandleFrame.maxX, height: scrubberSize.height) transition.setFrame(view: self.zoneView, frame: zoneFrame) @@ -1345,7 +1442,7 @@ private class VideoFrameLayer: SimpleShapeLayer { override func layoutSublayers() { super.layoutSublayers() - + if self.stripeLayer.superlayer == nil { self.stripeLayer.backgroundColor = UIColor(rgb: 0x000000, alpha: 0.3).cgColor self.addSublayer(self.stripeLayer) diff --git a/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift b/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift index 962f2aca1c..874c3d3d1e 100644 --- a/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift +++ b/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift @@ -333,6 +333,7 @@ public final class MessageInputActionButtonComponent: Component { context: component.context, theme: defaultDarkPresentationTheme, useDarkTheme: true, + pause: false, strings: component.strings, presentController: component.presentController ) diff --git a/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MediaPreviewPanelComponent.swift b/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MediaPreviewPanelComponent.swift index 69f49b4767..f31ebd0b05 100644 --- a/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MediaPreviewPanelComponent.swift +++ b/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MediaPreviewPanelComponent.swift @@ -104,7 +104,7 @@ public final class MediaPreviewPanelComponent: Component { if lhs.strings !== rhs.strings { return false } - if lhs.mediaPreview !== rhs.mediaPreview { + if lhs.mediaPreview != rhs.mediaPreview { return false } if lhs.insets != rhs.insets { @@ -199,7 +199,7 @@ public final class MediaPreviewPanelComponent: Component { } @objc private func playPauseButtonPressed() { - guard let component = self.component else { + guard let component = self.component, case let .audio(audio) = component.mediaPreview else { return } @@ -212,7 +212,7 @@ public final class MediaPreviewPanelComponent: Component { postbox: component.context.account.postbox, userLocation: .other, userContentType: .audio, - resourceReference: .standalone(resource: component.mediaPreview.resource), + resourceReference: .standalone(resource: audio.resource), streamable: .none, video: false, preferSoftwareDecoding: false, @@ -231,8 +231,8 @@ public final class MediaPreviewPanelComponent: Component { } func update(component: MediaPreviewPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: Transition) -> CGSize { - if self.component == nil { - self.timerTextValue = textForDuration(seconds: component.mediaPreview.duration) + if self.component == nil, case let .audio(audio) = component.mediaPreview { + self.timerTextValue = textForDuration(seconds: audio.duration) } self.component = component @@ -263,69 +263,71 @@ public final class MediaPreviewPanelComponent: Component { let waveformFrame = CGRect(origin: CGPoint(x: component.insets.left + 47.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - 24.0) * 0.5)), size: CGSize(width: availableSize.width - component.insets.right - 47.0 - (component.insets.left + 47.0), height: 24.0)) - let _ = self.waveform.update( - transition: transition, - component: AnyComponent(AudioWaveformComponent( - backgroundColor: UIColor.white.withAlphaComponent(0.1), - foregroundColor: UIColor.white.withAlphaComponent(1.0), - shimmerColor: nil, - style: .middle, - samples: component.mediaPreview.waveform.samples, - peak: component.mediaPreview.waveform.peak, - status: self.mediaPlayerStatus.get() |> map { value -> MediaPlayerStatus in - if let value { - return value - } else { - return MediaPlayerStatus( - generationTimestamp: 0.0, - duration: 0.0, - dimensions: CGSize(), - timestamp: 0.0, - baseRate: 1.0, - seekId: 0, - status: .paused, - soundEnabled: true - ) + if case let .audio(audio) = component.mediaPreview { + let _ = self.waveform.update( + transition: transition, + component: AnyComponent(AudioWaveformComponent( + backgroundColor: UIColor.white.withAlphaComponent(0.1), + foregroundColor: UIColor.white.withAlphaComponent(1.0), + shimmerColor: nil, + style: .middle, + samples: audio.waveform.samples, + peak: audio.waveform.peak, + status: self.mediaPlayerStatus.get() |> map { value -> MediaPlayerStatus in + if let value { + return value + } else { + return MediaPlayerStatus( + generationTimestamp: 0.0, + duration: 0.0, + dimensions: CGSize(), + timestamp: 0.0, + baseRate: 1.0, + seekId: 0, + status: .paused, + soundEnabled: true + ) + } + }, + isViewOnceMessage: false, + seek: { [weak self] timestamp in + guard let self, let mediaPlayer = self.mediaPlayer else { + return + } + mediaPlayer.seek(timestamp: timestamp) + }, + updateIsSeeking: { [weak self] isSeeking in + guard let self, let mediaPlayer = self.mediaPlayer else { + return + } + if isSeeking { + mediaPlayer.pause() + } else { + mediaPlayer.play() + } } - }, - isViewOnceMessage: false, - seek: { [weak self] timestamp in - guard let self, let mediaPlayer = self.mediaPlayer else { - return - } - mediaPlayer.seek(timestamp: timestamp) - }, - updateIsSeeking: { [weak self] isSeeking in - guard let self, let mediaPlayer = self.mediaPlayer else { - return - } - if isSeeking { - mediaPlayer.pause() - } else { - mediaPlayer.play() - } - } - )), - environment: {}, - containerSize: waveformFrame.size - ) - let _ = self.vibrancyWaveform.update( - transition: transition, - component: AnyComponent(AudioWaveformComponent( - backgroundColor: .white, - foregroundColor: .white, - shimmerColor: nil, - style: .middle, - samples: component.mediaPreview.waveform.samples, - peak: component.mediaPreview.waveform.peak, - status: .complete(), - isViewOnceMessage: false, - seek: nil, - updateIsSeeking: nil - )), - environment: {}, - containerSize: waveformFrame.size - ) + )), + environment: {}, + containerSize: waveformFrame.size + ) + let _ = self.vibrancyWaveform.update( + transition: transition, + component: AnyComponent(AudioWaveformComponent( + backgroundColor: .white, + foregroundColor: .white, + shimmerColor: nil, + style: .middle, + samples: audio.waveform.samples, + peak: audio.waveform.peak, + status: .complete(), + isViewOnceMessage: false, + seek: nil, + updateIsSeeking: nil + )), + environment: {}, + containerSize: waveformFrame.size + ) + } if let waveformView = self.waveform.view as? AudioWaveformComponent.View { if waveformView.superview == nil { diff --git a/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift b/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift index 35ce217451..91476b1fee 100644 --- a/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift +++ b/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift @@ -351,7 +351,7 @@ public final class MessageInputPanelComponent: Component { if lhs.wasRecordingDismissed != rhs.wasRecordingDismissed { return false } - if lhs.recordedAudioPreview !== rhs.recordedAudioPreview { + if lhs.recordedAudioPreview != rhs.recordedAudioPreview { return false } if lhs.hasRecordedVideoPreview != rhs.hasRecordedVideoPreview { diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift index 55e3528daf..63260ff3c6 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift @@ -3940,14 +3940,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro guard let self else { return } - var replaceImpl: ((ViewController) -> Void)? - let controller = PremiumDemoScreen(context: context, subject: .emojiStatus, action: { - let controller = PremiumIntroScreen(context: context, source: .settings) - replaceImpl?(controller) - }) - replaceImpl = { [weak controller] c in - controller?.replace(with: c) - } + let controller = self.context.sharedContext.makePremiumPrivacyControllerController(context: self.context, subject: .presence, peerId: self.peerId) self.controller?.push(controller) } diff --git a/submodules/TelegramUI/Components/PlainButtonComponent/Sources/PlainButtonComponent.swift b/submodules/TelegramUI/Components/PlainButtonComponent/Sources/PlainButtonComponent.swift index 1152c2cefd..bb386fa85d 100644 --- a/submodules/TelegramUI/Components/PlainButtonComponent/Sources/PlainButtonComponent.swift +++ b/submodules/TelegramUI/Components/PlainButtonComponent/Sources/PlainButtonComponent.swift @@ -16,6 +16,8 @@ public final class PlainButtonComponent: Component { public let contentInsets: UIEdgeInsets public let action: () -> Void public let isEnabled: Bool + public let animateAlpha: Bool + public let tag: AnyObject? public init( content: AnyComponent, @@ -23,7 +25,9 @@ public final class PlainButtonComponent: Component { minSize: CGSize? = nil, contentInsets: UIEdgeInsets = UIEdgeInsets(), action: @escaping () -> Void, - isEnabled: Bool = true + isEnabled: Bool = true, + animateAlpha: Bool = true, + tag : AnyObject? = nil ) { self.content = content self.effectAlignment = effectAlignment @@ -31,8 +35,10 @@ public final class PlainButtonComponent: Component { self.contentInsets = contentInsets self.action = action self.isEnabled = isEnabled + self.animateAlpha = animateAlpha + self.tag = tag } - + public static func ==(lhs: PlainButtonComponent, rhs: PlainButtonComponent) -> Bool { if lhs.content != rhs.content { return false @@ -49,10 +55,26 @@ public final class PlainButtonComponent: Component { if lhs.isEnabled != rhs.isEnabled { return false } + if lhs.animateAlpha != rhs.animateAlpha { + return false + } + if lhs.tag !== rhs.tag { + return false + } return true } - public final class View: HighlightTrackingButton { + public final class View: HighlightTrackingButton, ComponentTaggedView { + public func matches(tag: Any) -> Bool { + if let component = self.component, let componentTag = component.tag { + let tag = tag as AnyObject + if componentTag === tag { + return true + } + } + return false + } + private var component: PlainButtonComponent? private weak var componentState: EmptyComponentState? @@ -73,18 +95,25 @@ public final class PlainButtonComponent: Component { self.highligthedChanged = { [weak self] highlighted in if let self, self.bounds.width > 0.0 { + let animateAlpha = self.component?.animateAlpha ?? true + let topScale: CGFloat = (self.bounds.width - 8.0) / self.bounds.width let maxScale: CGFloat = (self.bounds.width + 2.0) / self.bounds.width if highlighted { self.contentContainer.layer.removeAnimation(forKey: "opacity") self.contentContainer.layer.removeAnimation(forKey: "sublayerTransform") - self.contentContainer.alpha = 0.7 + + if animateAlpha { + self.contentContainer.alpha = 0.7 + } let transition = Transition(animation: .curve(duration: 0.2, curve: .easeInOut)) transition.setScale(layer: self.contentContainer.layer, scale: topScale) } else { - self.contentContainer.alpha = 1.0 - self.contentContainer.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2) + if animateAlpha { + self.contentContainer.alpha = 1.0 + self.contentContainer.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2) + } let transition = Transition(animation: .none) transition.setScale(layer: self.contentContainer.layer, scale: 1.0) diff --git a/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift b/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift index 22d8e67ed5..23ade99f15 100644 --- a/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift +++ b/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift @@ -481,17 +481,46 @@ public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFi let signal = Signal { subscriber in subscriber.putNext(.reset) - var filteredPath = resource.path - if filteredPath.hasPrefix("file://") { - filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)]) + let filteredPaths = resource.paths.map { path in + if path.hasPrefix("file://") { + return path.replacingOccurrences(of: "file://", with: "") + } else { + return path + } } + let filteredPath = filteredPaths.first ?? "" let defaultPreset = TGMediaVideoConversionPreset(rawValue: UInt32(UserDefaults.standard.integer(forKey: "TG_preferredVideoPreset_v0"))) let qualityPreset = MediaQualityPreset(preset: defaultPreset) let isImage = filteredPath.contains(".jpg") var isStory = false - let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath)) + let avAsset: AVAsset? + + if isImage { + avAsset = nil + } else if filteredPaths.count > 1 { + let composition = AVMutableComposition() + var currentTime = CMTime.zero + + for path in filteredPaths { + let asset = AVURLAsset(url: URL(fileURLWithPath: path)) + let duration = asset.duration + do { + try composition.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: duration), + of: asset, + at: currentTime + ) + currentTime = CMTimeAdd(currentTime, duration) + } catch { + } + } + avAsset = composition + } else { + avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath)) + } + var adjustments: TGVideoEditAdjustments? var mediaEditorValues: MediaEditorValues? if let videoAdjustments = resource.adjustments { @@ -500,26 +529,34 @@ public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFi if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: videoAdjustments.data.makeData()) { mediaEditorValues = values } - } else if let dict = legacy_unarchiveDeprecated(data: videoAdjustments.data.makeData()) as? [AnyHashable : Any], let legacyAdjustments = TGVideoEditAdjustments(dictionary: dict) { - if alwaysUseModernPipeline && !isImage { - mediaEditorValues = MediaEditorValues(legacyAdjustments: legacyAdjustments, defaultPreset: qualityPreset) - } else { - adjustments = legacyAdjustments + } else { + if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: videoAdjustments.data.makeData()) { + mediaEditorValues = values + } else if let dict = legacy_unarchiveDeprecated(data: videoAdjustments.data.makeData()) as? [AnyHashable : Any], let legacyAdjustments = TGVideoEditAdjustments(dictionary: dict) { + if alwaysUseModernPipeline && !isImage { + mediaEditorValues = MediaEditorValues(legacyAdjustments: legacyAdjustments, defaultPreset: qualityPreset) + } else { + adjustments = legacyAdjustments + } } } } let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4") let updatedSize = Atomic(value: 0) if let mediaEditorValues { - let duration: Double = avAsset.duration.seconds - let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0) + let duration: Double let subject: MediaEditorVideoExport.Subject if isImage, let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) { + duration = 5.0 subject = .image(image: image) - } else { + } else if let avAsset { + duration = avAsset.duration.seconds subject = .video(asset: avAsset, isStory: isStory) + } else { + return EmptyDisposable } + let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0) let videoExport = MediaEditorVideoExport(postbox: postbox, subject: subject, configuration: configuration, outputPath: tempFile.path) videoExport.start() diff --git a/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/PeerNameColorChatPreviewItem.swift b/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/PeerNameColorChatPreviewItem.swift index c6cd207bae..4053e2464c 100644 --- a/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/PeerNameColorChatPreviewItem.swift +++ b/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/PeerNameColorChatPreviewItem.swift @@ -236,7 +236,7 @@ final class PeerNameColorChatPreviewItemNode: ListViewItemNode { } let message = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: messageItem.outgoing ? [] : [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[authorPeerId], text: messageItem.text, attributes: messageItem.reply != nil ? [ReplyMessageAttribute(messageId: replyMessageId, threadMessageId: nil, quote: nil, isQuote: false)] : [], media: media, peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [message], theme: item.componentTheme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [message], theme: item.componentTheme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) } var nodes: [ListViewItemNode] = [] diff --git a/submodules/TelegramUI/Components/Settings/QuickReactionSetupController/Sources/ReactionChatPreviewItem.swift b/submodules/TelegramUI/Components/Settings/QuickReactionSetupController/Sources/ReactionChatPreviewItem.swift index 2e7cca269a..706ec770ff 100644 --- a/submodules/TelegramUI/Components/Settings/QuickReactionSetupController/Sources/ReactionChatPreviewItem.swift +++ b/submodules/TelegramUI/Components/Settings/QuickReactionSetupController/Sources/ReactionChatPreviewItem.swift @@ -292,7 +292,7 @@ class ReactionChatPreviewItemNode: ListViewItemNode { attributes.append(ReactionsMessageAttribute(canViewList: false, isTags: false, reactions: [MessageReaction(value: reaction, count: 1, chosenOrder: 0)], recentPeers: recentPeers)) } - let messageItem = item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: chatPeerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[userPeerId], text: messageText, attributes: attributes, media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])], theme: item.theme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: item.availableReactions, accountPeer: item.accountPeer, isCentered: true, isPreview: true) + let messageItem = item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: chatPeerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[userPeerId], text: messageText, attributes: attributes, media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])], theme: item.theme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: item.availableReactions, accountPeer: item.accountPeer, isCentered: true, isPreview: true, isStandalone: false) var node: ListViewItemNode? if let current = currentNode { diff --git a/submodules/TelegramUI/Components/Settings/ThemeAccentColorScreen/Sources/ThemeAccentColorControllerNode.swift b/submodules/TelegramUI/Components/Settings/ThemeAccentColorScreen/Sources/ThemeAccentColorControllerNode.swift index 40ab1301ea..78a4cc1605 100644 --- a/submodules/TelegramUI/Components/Settings/ThemeAccentColorScreen/Sources/ThemeAccentColorControllerNode.swift +++ b/submodules/TelegramUI/Components/Settings/ThemeAccentColorScreen/Sources/ThemeAccentColorControllerNode.swift @@ -1089,7 +1089,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate return state }, animated: true) }, clickThroughMessage: { - }, backgroundNode: self.backgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true) + }, backgroundNode: self.backgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false) return item } diff --git a/submodules/TelegramUI/Components/Settings/WallpaperGalleryScreen/Sources/WallpaperGalleryItem.swift b/submodules/TelegramUI/Components/Settings/WallpaperGalleryScreen/Sources/WallpaperGalleryItem.swift index 5b1f2a4ac7..5835d255e3 100644 --- a/submodules/TelegramUI/Components/Settings/WallpaperGalleryScreen/Sources/WallpaperGalleryItem.swift +++ b/submodules/TelegramUI/Components/Settings/WallpaperGalleryScreen/Sources/WallpaperGalleryItem.swift @@ -1622,19 +1622,19 @@ final class WallpaperGalleryItemNode: GalleryItemNode { if !bottomMessageText.isEmpty { let message1 = Message(stableId: 2, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 2), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[otherPeerId], text: bottomMessageText, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) } let message2 = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: topMessageText, attributes: messageAttributes, media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) if let serviceMessageText { let attributedText = convertMarkdownToAttributes(NSAttributedString(string: serviceMessageText)) let entities = generateChatInputTextEntities(attributedText) let message3 = Message(stableId: 0, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 0), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66002, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [TelegramMediaAction(action: .customText(text: attributedText.string, entities: entities, additionalAttributes: nil))], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:]) - items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)) + items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false)) } let params = ListViewItemLayoutParams(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, availableHeight: layout.size.height) diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift index 5f746112aa..006f7b232a 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift @@ -571,12 +571,12 @@ final class StoryItemSetContainerSendMessage { let controller = component.controller() as? StoryContainerScreen - if let recordedAudioPreview = self.recordedAudioPreview { + if let recordedAudioPreview = self.recordedAudioPreview, case let .audio(audio) = recordedAudioPreview { self.recordedAudioPreview = nil - let waveformBuffer = recordedAudioPreview.waveform.makeBitstream() + let waveformBuffer = audio.waveform.makeBitstream() - let messages: [EnqueueMessage] = [.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: recordedAudioPreview.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(recordedAudioPreview.fileSize), attributes: [.Audio(isVoice: true, duration: Int(recordedAudioPreview.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: nil, replyToMessageId: nil, replyToStoryId: focusedStoryId, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])] + let messages: [EnqueueMessage] = [.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: audio.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(audio.fileSize), attributes: [.Audio(isVoice: true, duration: Int(audio.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: nil, replyToMessageId: nil, replyToStoryId: focusedStoryId, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])] let _ = enqueueMessages(account: component.context.account, peerId: peerId, messages: messages).start() @@ -939,7 +939,7 @@ final class StoryItemSetContainerSendMessage { let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max), size: Int64(data.compressedData.count)) component.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data.compressedData) - self.recordedAudioPreview = ChatRecordedMediaPreview(resource: resource, duration: Int32(data.duration), fileSize: Int32(data.compressedData.count), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5)) + self.recordedAudioPreview = .audio(ChatRecordedMediaPreview.Audio(resource: resource, fileSize: Int32(data.compressedData.count), duration: Int32(data.duration), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5))) view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring))) } }) diff --git a/submodules/TelegramUI/Components/VideoMessageCameraScreen/BUILD b/submodules/TelegramUI/Components/VideoMessageCameraScreen/BUILD new file mode 100644 index 0000000000..c872752a69 --- /dev/null +++ b/submodules/TelegramUI/Components/VideoMessageCameraScreen/BUILD @@ -0,0 +1,45 @@ +load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library") + +swift_library( + name = "VideoMessageCameraScreen", + module_name = "VideoMessageCameraScreen", + srcs = glob([ + "Sources/**/*.swift", + ]), + copts = [ + "-warnings-as-errors", + ], + deps = [ + "//submodules/AsyncDisplayKit", + "//submodules/Display", + "//submodules/Postbox", + "//submodules/TelegramCore", + "//submodules/SSignalKit/SwiftSignalKit", + "//submodules/ComponentFlow", + "//submodules/Components/ViewControllerComponent", + "//submodules/Components/ComponentDisplayAdapters", + "//submodules/TelegramPresentationData", + "//submodules/AccountContext", + "//submodules/AppBundle", + "//submodules/TelegramStringFormatting", + "//submodules/PresentationDataUtils", + "//submodules/MediaResources", + "//submodules/LocalMediaResources", + "//submodules/ImageCompression", + "//submodules/Camera", + "//submodules/Components/MultilineTextComponent", + "//submodules/Components/BlurredBackgroundComponent", + "//submodules/Components/BundleIconComponent:BundleIconComponent", + "//submodules/TelegramUI/Components/ButtonComponent", + "//submodules/TelegramUI/Components/PlainButtonComponent", + "//submodules/TelegramUI/Components/CameraButtonComponent", + "//submodules/TooltipUI", + "//submodules/TelegramNotices", + "//submodules/DeviceAccess", + "//submodules/TelegramUI/Components/MediaEditor", + "//submodules/LegacyMediaPickerUI", + ], + visibility = [ + "//visibility:public", + ], +) diff --git a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/RecordingProgressView.swift b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/RecordingProgressView.swift new file mode 100644 index 0000000000..14dd8120fa --- /dev/null +++ b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/RecordingProgressView.swift @@ -0,0 +1,59 @@ +import Foundation +import UIKit +import Display + +private extension SimpleShapeLayer { + func animateStrokeStart(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: Bool = true, completion: ((Bool) -> ())? = nil) { + self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "strokeStart", timingFunction: timingFunction, duration: duration, delay: delay, removeOnCompletion: removeOnCompletion, completion: completion) + } + + func animateStrokeEnd(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: Bool = true, completion: ((Bool) -> ())? = nil) { + self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "strokeEnd", timingFunction: timingFunction, duration: duration, delay: delay, removeOnCompletion: removeOnCompletion, completion: completion) + } +} + +final class RecordingProgressView: UIView { + let shapeLayer = SimpleShapeLayer() + + var value: CGFloat = 0.0 { + didSet { + if abs(self.shapeLayer.strokeEnd - self.value) >= 0.01 { + if abs(oldValue - self.value) < 0.1 { + let previousStrokeEnd = self.shapeLayer.strokeEnd + self.shapeLayer.strokeEnd = self.value + self.shapeLayer.animateStrokeEnd(from: previousStrokeEnd, to: self.shapeLayer.strokeEnd, duration: abs(previousStrokeEnd - self.value) * 60.0, timingFunction: CAMediaTimingFunctionName.linear.rawValue) + } else { + self.shapeLayer.strokeEnd = self.value + self.shapeLayer.removeAllAnimations() + } + } + } + } + + override init(frame: CGRect) { + super.init(frame: frame) + + self.shapeLayer.fillColor = UIColor.clear.cgColor + self.shapeLayer.strokeColor = UIColor(white: 1.0, alpha: 0.6).cgColor + self.shapeLayer.lineWidth = 4.0 + self.shapeLayer.lineCap = .round + self.shapeLayer.transform = CATransform3DMakeRotation(-.pi / 2.0, 0.0, 0.0, 1.0) + self.shapeLayer.strokeEnd = 0.0 + + self.layer.addSublayer(self.shapeLayer) + } + + required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func layoutSubviews() { + super.layoutSubviews() + + if self.shapeLayer.frame != self.bounds { + self.shapeLayer.frame = self.bounds + + self.shapeLayer.path = CGPath(ellipseIn: self.bounds.insetBy(dx: self.shapeLayer.lineWidth, dy: self.shapeLayer.lineWidth), transform: nil) + } + } +} diff --git a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/ResultPreviewView.swift b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/ResultPreviewView.swift new file mode 100644 index 0000000000..5e96dc1eb1 --- /dev/null +++ b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/ResultPreviewView.swift @@ -0,0 +1,128 @@ +import Foundation +import UIKit +import AVFoundation + +final class ResultPreviewView: UIView { + let composition: AVComposition + + let player: AVPlayer + let playerLayer: AVPlayerLayer + + var didPlayToEndTimeObserver: NSObjectProtocol? + + var trimRange: Range? { + didSet { + if let trimRange = self.trimRange { + self.player.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000)) + } else { + self.player.currentItem?.forwardPlaybackEndTime = .invalid + } + } + } + + var onLoop: () -> Void = {} + var isMuted = true { + didSet { + self.player.isMuted = self.isMuted + } + } + + init(composition: AVComposition) { + self.composition = composition + + self.player = AVPlayer(playerItem: AVPlayerItem(asset: composition)) + self.player.isMuted = true + + self.playerLayer = AVPlayerLayer(player: self.player) + + super.init(frame: .zero) + + self.layer.addSublayer(self.playerLayer) + + self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: self.player.currentItem, queue: nil, using: { [weak self] notification in + guard let self else { + return + } + var start: Double = 0.0 + if let trimRange = self.trimRange { + start = trimRange.lowerBound + } + self.player.pause() + self.seek(to: start, andPlay: true) + + self.onLoop() + }) + + self.player.play() + } + + required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + deinit { + if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver { + NotificationCenter.default.removeObserver(didPlayToEndTimeObserver) + } + } + + func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) { + if !apply { + self.player.pause() + } else { + self.trimRange = start.. CameraState { + return CameraState(position: position, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isViewOnceEnabled: self.isViewOnceEnabled) + } + + func updatedRecording(_ recording: Recording) -> CameraState { + return CameraState(position: self.position, recording: recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isViewOnceEnabled: self.isViewOnceEnabled) + } + + func updatedDuration(_ duration: Double) -> CameraState { + return CameraState(position: self.position, recording: self.recording, duration: duration, isDualCameraEnabled: self.isDualCameraEnabled, isViewOnceEnabled: self.isViewOnceEnabled) + } + + func updatedIsViewOnceEnabled(_ isViewOnceEnabled: Bool) -> CameraState { + return CameraState(position: self.position, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isViewOnceEnabled: isViewOnceEnabled) + } +} + +struct PreviewState: Equatable { + let composition: AVComposition + let trimRange: Range? + let isMuted: Bool +} + +enum CameraScreenTransition { + case animateIn + case animateOut + case finishedAnimateIn +} + +private let viewOnceButtonTag = GenericComponentViewTag() + +private final class CameraScreenComponent: CombinedComponent { + typealias EnvironmentType = ViewControllerComponentContainer.Environment + + let context: AccountContext + let cameraState: CameraState + let isPreviewing: Bool + let isMuted: Bool + let getController: () -> VideoMessageCameraScreen? + let present: (ViewController) -> Void + let push: (ViewController) -> Void + let startRecording: ActionSlot + let stopRecording: ActionSlot + let completion: ActionSlot + + init( + context: AccountContext, + cameraState: CameraState, + isPreviewing: Bool, + isMuted: Bool, + getController: @escaping () -> VideoMessageCameraScreen?, + present: @escaping (ViewController) -> Void, + push: @escaping (ViewController) -> Void, + startRecording: ActionSlot, + stopRecording: ActionSlot, + completion: ActionSlot + ) { + self.context = context + self.cameraState = cameraState + self.isPreviewing = isPreviewing + self.isMuted = isMuted + self.getController = getController + self.present = present + self.push = push + self.startRecording = startRecording + self.stopRecording = stopRecording + self.completion = completion + } + + static func ==(lhs: CameraScreenComponent, rhs: CameraScreenComponent) -> Bool { + if lhs.context !== rhs.context { + return false + } + if lhs.cameraState != rhs.cameraState { + return false + } + if lhs.isPreviewing != rhs.isPreviewing { + return false + } + if lhs.isMuted != rhs.isMuted { + return false + } + return true + } + + final class State: ComponentState { + enum ImageKey: Hashable { + case flip + case buttonBackground + } + private var cachedImages: [ImageKey: UIImage] = [:] + func image(_ key: ImageKey, theme: PresentationTheme) -> UIImage { + if let image = self.cachedImages[key] { + return image + } else { + var image: UIImage + switch key { + case .flip: + image = UIImage(bundleImageName: "Camera/VideoMessageFlip")!.withRenderingMode(.alwaysTemplate) + case .buttonBackground: + let innerSize = CGSize(width: 40.0, height: 40.0) + image = generateFilledCircleImage(diameter: innerSize.width, color: theme.rootController.navigationBar.opaqueBackgroundColor, strokeColor: theme.chat.inputPanel.panelSeparatorColor, strokeWidth: 0.5, backgroundColor: nil)! + } + cachedImages[key] = image + return image + } + } + + private let context: AccountContext + private let present: (ViewController) -> Void + private let startRecording: ActionSlot + private let stopRecording: ActionSlot + private let completion: ActionSlot + private let getController: () -> VideoMessageCameraScreen? + + private var resultDisposable = MetaDisposable() + + var cameraState: CameraState? + + private let hapticFeedback = HapticFeedback() + + init( + context: AccountContext, + present: @escaping (ViewController) -> Void, + startRecording: ActionSlot, + stopRecording: ActionSlot, + completion: ActionSlot, + getController: @escaping () -> VideoMessageCameraScreen? = { + return nil + } + ) { + self.context = context + self.present = present + self.startRecording = startRecording + self.stopRecording = stopRecording + self.completion = completion + self.getController = getController + + super.init() + + self.startRecording.connect({ [weak self] _ in + if let self, let controller = getController() { + self.startVideoRecording(pressing: !controller.scheduledLock) + controller.scheduledLock = false + } + }) + self.stopRecording.connect({ [weak self] _ in + self?.stopVideoRecording() + }) + } + + deinit { + self.resultDisposable.dispose() + } + + func toggleViewOnce() { + guard let controller = self.getController() else { + return + } + controller.updateCameraState({ $0.updatedIsViewOnceEnabled(!$0.isViewOnceEnabled) }, transition: .easeInOut(duration: 0.2)) + } + + private var lastFlipTimestamp: Double? + func togglePosition() { + guard let controller = self.getController(), let camera = controller.camera else { + return + } + let currentTimestamp = CACurrentMediaTime() + if let lastFlipTimestamp = self.lastFlipTimestamp, currentTimestamp - lastFlipTimestamp < 1.0 { + return + } + self.lastFlipTimestamp = currentTimestamp + + camera.togglePosition() + + self.hapticFeedback.impact(.veryLight) + } + + func startVideoRecording(pressing: Bool) { + guard let controller = self.getController(), let camera = controller.camera else { + return + } + guard case .none = controller.cameraState.recording else { + return + } + + let initialDuration = controller.node.previewState?.composition.duration.seconds ?? 0.0 + controller.updatePreviewState({ _ in return nil}, transition: .spring(duration: 0.4)) + + controller.node.dismissAllTooltips() + controller.updateCameraState({ $0.updatedRecording(pressing ? .holding : .handsFree).updatedDuration(initialDuration) }, transition: .spring(duration: 0.4)) + + let isFirstRecording = initialDuration.isZero + controller.node.resumeCameraCapture() + + controller.node.withReadyCamera(isFirstTime: !controller.node.cameraIsActive) { + self.resultDisposable.set((camera.startRecording() + |> deliverOnMainQueue).start(next: { [weak self] recordingData in + let duration = initialDuration + recordingData.duration + if let self, let controller = self.getController() { + controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1)) + if recordingData.duration > 59.0 { + self.stopVideoRecording() + } + if isFirstRecording { + controller.node.setupLiveUpload(filePath: recordingData.filePath) + } + } + })) + } + + if initialDuration > 0.0 { + controller.onResume() + } + } + + func stopVideoRecording() { + guard let controller = self.getController(), let camera = controller.camera else { + return + } + + self.resultDisposable.set((camera.stopRecording() + |> deliverOnMainQueue).start(next: { [weak self] result in + if let self, let controller = self.getController(), case let .finished(mainResult, _, duration, _, _) = result { + self.completion.invoke( + .video(VideoMessageCameraScreen.CaptureResult.Video( + videoPath: mainResult.path, + dimensions: PixelDimensions(mainResult.dimensions), + duration: duration, + thumbnail: mainResult.thumbnail + )) + ) + controller.updateCameraState({ $0.updatedRecording(.none) }, transition: .spring(duration: 0.4)) + } + })) + } + + func lockVideoRecording() { + guard let controller = self.getController() else { + return + } + controller.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4)) + } + + func updateZoom(fraction: CGFloat) { + guard let camera = self.getController()?.camera else { + return + } + camera.setZoomLevel(fraction) + } + } + + func makeState() -> State { + return State(context: self.context, present: self.present, startRecording: self.startRecording, stopRecording: self.stopRecording, completion: self.completion, getController: self.getController) + } + + static var body: Body { + let flipButton = Child(CameraButton.self) + + let viewOnceButton = Child(PlainButtonComponent.self) + let recordMoreButton = Child(PlainButtonComponent.self) + + return { context in + let environment = context.environment[ViewControllerComponentContainer.Environment.self].value + let component = context.component + let state = context.state + let availableSize = context.availableSize + + state.cameraState = component.cameraState + + var viewOnceOffset: CGFloat = 102.0 + + var showViewOnce = false + var showRecordMore = false + if component.isPreviewing { + showViewOnce = true + showRecordMore = true + + viewOnceOffset = 67.0 + } else if case .handsFree = component.cameraState.recording { + showViewOnce = true + } + + if !component.isPreviewing { + let flipButton = flipButton.update( + component: CameraButton( + content: AnyComponentWithIdentity( + id: "flip", + component: AnyComponent( + Image( + image: state.image(.flip, theme: environment.theme), + tintColor: environment.theme.list.itemAccentColor, + size: CGSize(width: 30.0, height: 30.0) + ) + ) + ), + minSize: CGSize(width: 44.0, height: 44.0), + action: { [weak state] in + if let state { + state.togglePosition() + } + } + ), + availableSize: availableSize, + transition: context.transition + ) + context.add(flipButton + .position(CGPoint(x: flipButton.size.width / 2.0 + 8.0, y: availableSize.height - flipButton.size.height / 2.0 - 8.0)) + .appear(.default(scale: true, alpha: true)) + .disappear(.default(scale: true, alpha: true)) + ) + } + + if showViewOnce { + let viewOnceButton = viewOnceButton.update( + component: PlainButtonComponent( + content: AnyComponent( + ZStack([ + AnyComponentWithIdentity( + id: "background", + component: AnyComponent( + Image( + image: state.image(.buttonBackground, theme: environment.theme), + size: CGSize(width: 40.0, height: 40.0) + ) + ) + ), + AnyComponentWithIdentity( + id: "icon", + component: AnyComponent( + BundleIconComponent( + name: component.cameraState.isViewOnceEnabled ? "Media Gallery/ViewOnceEnabled" : "Media Gallery/ViewOnce", + tintColor: environment.theme.list.itemAccentColor + ) + ) + ) + ]) + ), + effectAlignment: .center, + action: { [weak state] in + if let state { + state.toggleViewOnce() + } + }, + animateAlpha: false, + tag: viewOnceButtonTag + ), + availableSize: availableSize, + transition: context.transition + ) + context.add(viewOnceButton + .position(CGPoint(x: availableSize.width - viewOnceButton.size.width / 2.0 - 2.0 - UIScreenPixel, y: availableSize.height - viewOnceButton.size.height / 2.0 - 8.0 - viewOnceOffset)) + .appear(.default(scale: true, alpha: true)) + .disappear(.default(scale: true, alpha: true)) + ) + } + + if showRecordMore { + let recordMoreButton = recordMoreButton.update( + component: PlainButtonComponent( + content: AnyComponent( + ZStack([ + AnyComponentWithIdentity( + id: "background", + component: AnyComponent( + Image( + image: state.image(.buttonBackground, theme: environment.theme), + size: CGSize(width: 40.0, height: 40.0) + ) + ) + ), + AnyComponentWithIdentity( + id: "icon", + component: AnyComponent( + BundleIconComponent( + name: "Chat/Input/Text/IconVideo", + tintColor: environment.theme.list.itemAccentColor + ) + ) + ) + ]) + ), + effectAlignment: .center, + action: { [weak state] in + state?.startVideoRecording(pressing: false) + } + ), + availableSize: availableSize, + transition: context.transition + ) + context.add(recordMoreButton + .position(CGPoint(x: availableSize.width - recordMoreButton.size.width / 2.0 - 2.0 - UIScreenPixel, y: availableSize.height - recordMoreButton.size.height / 2.0 - 22.0)) + .appear(.default(scale: true, alpha: true)) + .disappear(.default(scale: true, alpha: true)) + ) + } + +// var isVideoRecording = false +// if case .video = component.cameraState.mode { +// isVideoRecording = true +// } else if component.cameraState.recording != .none { +// isVideoRecording = true +// } + + + return availableSize + } + } +} + +public class VideoMessageCameraScreen: ViewController { + public enum CaptureResult { + public struct Video { + public let videoPath: String + public let dimensions: PixelDimensions + public let duration: Double + public let thumbnail: UIImage + } + + case video(Video) + } + + fileprivate final class Node: ViewControllerTracingNode, UIGestureRecognizerDelegate { + private weak var controller: VideoMessageCameraScreen? + private let context: AccountContext + fileprivate var camera: Camera? + private let updateState: ActionSlot + + fileprivate var liveUploadInterface: LegacyLiveUploadInterface? + private var currentLiveUploadPath: String? + fileprivate var currentLiveUploadData: LegacyLiveUploadInterfaceResult? + + fileprivate let backgroundView: UIVisualEffectView + fileprivate let containerView: UIView + fileprivate let componentHost: ComponentView + fileprivate let previewContainerView: UIView + + fileprivate var mainPreviewView: CameraSimplePreviewView + fileprivate var additionalPreviewView: CameraSimplePreviewView + private var progressView: RecordingProgressView + + private var resultPreviewView: ResultPreviewView? + + private var cameraStateDisposable: Disposable? + private var changingPositionDisposable: Disposable? + + private let idleTimerExtensionDisposable = MetaDisposable() + + fileprivate var cameraIsActive = true { + didSet { + if self.cameraIsActive { + self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension()) + } else { + self.idleTimerExtensionDisposable.set(nil) + } + } + } + + private var presentationData: PresentationData + private var validLayout: ContainerViewLayout? + + fileprivate var didAppear: () -> Void = {} + + fileprivate let startRecording = ActionSlot() + fileprivate let stopRecording = ActionSlot() + private let completion = ActionSlot() + + var cameraState: CameraState { + didSet { + if self.cameraState.isViewOnceEnabled != oldValue.isViewOnceEnabled { + if self.cameraState.isViewOnceEnabled { + let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } + self.displayViewOnceTooltip(text: presentationData.strings.Chat_PlayVideoMessageOnceTooltip, hasIcon: false) + } else { + self.dismissAllTooltips() + } + } + } + } + var previewState: PreviewState? { + didSet { + self.previewStatePromise.set(.single(self.previewState)) + self.resultPreviewView?.isMuted = self.previewState?.isMuted ?? true + } + } + var previewStatePromise = Promise() + + var transitioningToPreview = false + + init(controller: VideoMessageCameraScreen) { + self.controller = controller + self.context = controller.context + self.updateState = ActionSlot() + + self.presentationData = controller.updatedPresentationData?.initial ?? self.context.sharedContext.currentPresentationData.with { $0 } + + self.backgroundView = UIVisualEffectView(effect: UIBlurEffect(style: self.presentationData.theme.overallDarkAppearance ? .dark : .light)) + + self.containerView = UIView() + self.containerView.clipsToBounds = true + + self.componentHost = ComponentView() + + self.previewContainerView = UIView() + self.previewContainerView.clipsToBounds = true + + let isDualCameraEnabled = Camera.isDualCameraSupported + let isFrontPosition = "".isEmpty + + self.mainPreviewView = CameraSimplePreviewView(frame: .zero, main: true) + self.additionalPreviewView = CameraSimplePreviewView(frame: .zero, main: false) + + self.progressView = RecordingProgressView(frame: .zero) + + if isDualCameraEnabled { + self.mainPreviewView.resetPlaceholder(front: false) + self.additionalPreviewView.resetPlaceholder(front: true) + } else { + self.mainPreviewView.resetPlaceholder(front: isFrontPosition) + } + + self.cameraState = CameraState( + position: isFrontPosition ? .front : .back, + recording: .none, + duration: 0.0, + isDualCameraEnabled: isDualCameraEnabled, + isViewOnceEnabled: false + ) + + self.previewState = nil + + super.init() + + self.backgroundColor = .clear + + self.view.addSubview(self.backgroundView) + self.view.addSubview(self.containerView) + + self.containerView.addSubview(self.previewContainerView) + + self.previewContainerView.addSubview(self.mainPreviewView) + self.previewContainerView.addSubview(self.additionalPreviewView) + self.previewContainerView.addSubview(self.progressView) + + self.completion.connect { [weak self] result in + if let self { + self.addCaptureResult(result) + } + } + + self.mainPreviewView.removePlaceholder(delay: 0.0) + self.withReadyCamera(isFirstTime: true, { + self.additionalPreviewView.removePlaceholder(delay: 0.35) + self.startRecording.invoke(Void()) + }) + + self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension()) + + self.setupCamera() + } + + deinit { + self.cameraStateDisposable?.dispose() + self.changingPositionDisposable?.dispose() + self.idleTimerExtensionDisposable.dispose() + } + + func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) { + if #available(iOS 13.0, *) { + let _ = (self.additionalPreviewView.isPreviewing + |> filter { $0 } + |> take(1)).startStandalone(next: { _ in + f() + }) + } else { + Queue.mainQueue().after(0.35) { + f() + } + } + } + + func setupLiveUpload(filePath: String) { + guard let controller = self.controller, controller.allowLiveUpload, self.liveUploadInterface == nil else { + return + } + let liveUploadInterface = LegacyLiveUploadInterface(context: self.context) + Queue.mainQueue().after(1.5, { + liveUploadInterface.setup(withFileURL: URL(fileURLWithPath: filePath)) + }) + self.liveUploadInterface = liveUploadInterface + } + + override func didLoad() { + super.didLoad() + + self.view.disablesInteractiveModalDismiss = true + self.view.disablesInteractiveKeyboardGestureRecognizer = true + + let pinchGestureRecognizer = UIPinchGestureRecognizer(target: self, action: #selector(self.handlePinch(_:))) + self.view.addGestureRecognizer(pinchGestureRecognizer) + } + + private func setupCamera() { + guard self.camera == nil else { + return + } + + let camera = Camera( + configuration: Camera.Configuration( + preset: .hd1920x1080, + position: self.cameraState.position, + isDualEnabled: self.cameraState.isDualCameraEnabled, + audio: true, + photo: true, + metadata: false, + isRoundVideo: true + ), + previewView: self.mainPreviewView, + secondaryPreviewView: self.additionalPreviewView + ) + + self.cameraStateDisposable = (camera.position + |> deliverOnMainQueue).start(next: { [weak self] position in + guard let self else { + return + } + self.cameraState = self.cameraState.updatedPosition(position) + self.requestUpdateLayout(transition: .easeInOut(duration: 0.2)) + }) + + self.changingPositionDisposable = (camera.modeChange + |> deliverOnMainQueue).start(next: { [weak self] modeChange in + if let self { + let _ = self + } + }) + + camera.focus(at: CGPoint(x: 0.5, y: 0.5), autoFocus: true) + camera.startCapture() + + self.camera = camera + } + + @objc private func handlePinch(_ gestureRecognizer: UIPinchGestureRecognizer) { + guard let camera = self.camera else { + return + } + switch gestureRecognizer.state { + case .changed: + let scale = gestureRecognizer.scale + camera.setZoomDelta(scale) + gestureRecognizer.scale = 1.0 + case .ended, .cancelled: + camera.rampZoom(1.0, rate: 8.0) + default: + break + } + } + + private var animatingIn = false + func animateIn() { + self.animatingIn = true + + self.backgroundView.alpha = 0.0 + UIView.animate(withDuration: 0.4, animations: { + self.backgroundView.alpha = 1.0 + }) + + let targetPosition = self.previewContainerView.center + self.previewContainerView.center = CGPoint(x: targetPosition.x, y: self.frame.height + self.previewContainerView.frame.height / 2.0) + + UIView.animate(withDuration: 0.5, delay: 0.0, usingSpringWithDamping: 0.8, initialSpringVelocity: 0.2, animations: { + self.previewContainerView.center = targetPosition + }, completion: { _ in + self.animatingIn = false + }) + + if let view = self.componentHost.view { + view.layer.animateAlpha(from: 0.1, to: 1.0, duration: 0.25) + } + } + + func animateOut(completion: @escaping () -> Void) { + self.camera?.stopCapture(invalidate: true) + + UIView.animate(withDuration: 0.25, animations: { + self.backgroundView.alpha = 0.0 + }, completion: { _ in + completion() + }) + + self.componentHost.view?.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false) + self.previewContainerView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false) + } + + func pauseCameraCapture() { + self.mainPreviewView.isEnabled = false + self.additionalPreviewView.isEnabled = false + self.camera?.stopCapture() + + self.cameraIsActive = false + self.requestUpdateLayout(transition: .immediate) + } + + func resumeCameraCapture() { + if !self.mainPreviewView.isEnabled { + self.mainPreviewView.isEnabled = true + self.additionalPreviewView.isEnabled = true + self.camera?.startCapture() + + self.cameraIsActive = true + self.requestUpdateLayout(transition: .immediate) + } + } + + fileprivate var results: [VideoMessageCameraScreen.CaptureResult] = [] + fileprivate var resultsPipe = ValuePipe() + + func addCaptureResult(_ result: VideoMessageCameraScreen.CaptureResult) { + guard let controller = self.controller else { + return + } + + if self.results.isEmpty { + if let liveUploadData = self.liveUploadInterface?.fileUpdated(true) as? LegacyLiveUploadInterfaceResult { + self.currentLiveUploadData = liveUploadData + } + } else { + self.currentLiveUploadData = nil + } + + self.pauseCameraCapture() + + self.results.append(result) + self.resultsPipe.putNext(result) + + self.transitioningToPreview = false + + let composition = composition(with: self.results) + controller.updatePreviewState({ _ in + return PreviewState(composition: composition, trimRange: nil, isMuted: true) + }, transition: .spring(duration: 0.4)) + } + + private func debugSaveResult(path: String) { + guard let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: .mappedIfSafe) else { + return + } + let id = Int64.random(in: Int64.min ... Int64.max) + let fileResource = LocalFileReferenceMediaResource(localFilePath: path, randomId: id) + + let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: Int64(data.count), attributes: [.FileName(fileName: "video.mp4")]) + let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: []) + + let _ = enqueueMessages(account: self.context.engine.account, peerId: self.context.engine.account.peerId, messages: [message]).start() + } + + override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { + let result = super.hitTest(point, with: event) + if let controller = self.controller, point.y > self.frame.height - controller.inputPanelFrame.height - 34.0 { + return nil + } + return result + } + + fileprivate func maybePresentViewOnceTooltip() { + let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } + let _ = (ApplicationSpecificNotice.getVideoMessagesPlayOnceSuggestion(accountManager: context.sharedContext.accountManager) + |> deliverOnMainQueue).startStandalone(next: { [weak self] counter in + guard let self else { + return + } + if counter >= 3 { + return + } + + Queue.mainQueue().after(0.3) { + self.displayViewOnceTooltip(text: presentationData.strings.Chat_TapToPlayVideoMessageOnceTooltip, hasIcon: true) + } + + let _ = ApplicationSpecificNotice.incrementVideoMessagesPlayOnceSuggestion(accountManager: context.sharedContext.accountManager).startStandalone() + }) + } + + private func displayViewOnceTooltip(text: String, hasIcon: Bool) { + guard let controller = self.controller, let sourceView = self.componentHost.findTaggedView(tag: viewOnceButtonTag) else { + return + } + + self.dismissAllTooltips() + + let absoluteFrame = sourceView.convert(sourceView.bounds, to: self.view) + let location = CGRect(origin: CGPoint(x: absoluteFrame.midX - 20.0, y: absoluteFrame.midY), size: CGSize()) + + let tooltipController = TooltipScreen( + account: context.account, + sharedContext: context.sharedContext, + text: .markdown(text: text), + balancedTextLayout: true, + constrainWidth: 240.0, + style: .customBlur(UIColor(rgb: 0x18181a), 0.0), + arrowStyle: .small, + icon: hasIcon ? .animation(name: "anim_autoremove_on", delay: 0.1, tintColor: nil) : nil, + location: .point(location, .right), + displayDuration: .default, + inset: 8.0, + cornerRadius: 8.0, + shouldDismissOnTouch: { _, _ in + return .ignore + } + ) + controller.present(tooltipController, in: .window(.root)) + } + + fileprivate func dismissAllTooltips() { + guard let controller = self.controller else { + return + } + controller.window?.forEachController({ controller in + if let controller = controller as? TooltipScreen { + controller.dismiss() + } + }) + controller.forEachController({ controller in + if let controller = controller as? TooltipScreen { + controller.dismiss() + } + return true + }) + } + + func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) { + guard let controller = self.controller else { + return + } + self.resultPreviewView?.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply) + controller.updatePreviewState({ state in + if let state { + return PreviewState(composition: state.composition, trimRange: start..)? + private let inputPanelFrame: CGRect + fileprivate var allowLiveUpload: Bool + + fileprivate let completion: (EnqueueMessage?) -> Void + + private var audioSessionDisposable: Disposable? + + private let hapticFeedback = HapticFeedback() + + private var validLayout: ContainerViewLayout? + + fileprivate var camera: Camera? { + return self.node.camera + } + + fileprivate var cameraState: CameraState { + return self.node.cameraState + } + + fileprivate func updateCameraState(_ f: (CameraState) -> CameraState, transition: Transition) { + self.node.cameraState = f(self.node.cameraState) + self.node.requestUpdateLayout(transition: transition) + self.durationValue.set(self.cameraState.duration) + } + + fileprivate func updatePreviewState(_ f: (PreviewState?) -> PreviewState?, transition: Transition) { + self.node.previewState = f(self.node.previewState) + self.node.requestUpdateLayout(transition: transition) + } + + public final class RecordingStatus { + public let micLevel: Signal + public let duration: Signal + + public init(micLevel: Signal, duration: Signal) { + self.micLevel = micLevel + self.duration = duration + } + } + + private let micLevelValue = ValuePromise(0.0) + private let durationValue = ValuePromise(0.0) + public let recordingStatus: RecordingStatus + + public var onDismiss: (Bool) -> Void = { _ in + } + + public var onStop: () -> Void = { + } + + public var onResume: () -> Void = { + } + + public struct RecordedVideoData { + public let duration: Double + public let frames: [UIImage] + public let framesUpdateTimestamp: Double + public let trimRange: Range? + } + + private var currentResults: Signal<[VideoMessageCameraScreen.CaptureResult], NoError> { + var results: Signal<[VideoMessageCameraScreen.CaptureResult], NoError> = .single(self.node.results) + if self.waitingForNextResult { + results = results + |> mapToSignal { initial in + return self.node.resultsPipe.signal() + |> take(1) + |> map { next in + var updatedResults = initial + updatedResults.append(next) + return updatedResults + } + } + } + self.waitingForNextResult = false + return results + } + + public func takenRecordedData() -> Signal { + let previewState = self.node.previewStatePromise.get() + let count = 12 + + let initialPlaceholder: Signal + if let firstResult = self.node.results.first { + if case let .video(video) = firstResult { + initialPlaceholder = .single(video.thumbnail) + } else { + initialPlaceholder = .single(nil) + } + } else { + initialPlaceholder = self.camera?.transitionImage ?? .single(nil) + } + + let immediateResult: Signal = initialPlaceholder + |> take(1) + |> mapToSignal { initialPlaceholder in + return videoFrames(asset: nil, count: count, initialPlaceholder: initialPlaceholder) + |> map { framesAndUpdateTimestamp in + return RecordedVideoData( + duration: 1.0, + frames: framesAndUpdateTimestamp.0, + framesUpdateTimestamp: framesAndUpdateTimestamp.1, + trimRange: nil + ) + } + } + + return immediateResult + |> mapToSignal { immediateResult in + return .single(immediateResult) + |> then( + self.currentResults + |> take(1) + |> mapToSignal { results in + var totalDuration: Double = 0.0 + for result in results { + if case let .video(video) = result { + totalDuration += video.duration + } + } + let composition = composition(with: results) + return combineLatest( + queue: Queue.mainQueue(), + videoFrames(asset: composition, count: count, initialTimestamp: immediateResult?.framesUpdateTimestamp), + previewState + ) + |> map { framesAndUpdateTimestamp, previewState in + return RecordedVideoData( + duration: totalDuration, + frames: framesAndUpdateTimestamp.0, + framesUpdateTimestamp: framesAndUpdateTimestamp.1, + trimRange: previewState?.trimRange + ) + } + } + ) + } + } + + public init( + context: AccountContext, + updatedPresentationData: (initial: PresentationData, signal: Signal)?, + inputPanelFrame: CGRect, + allowLiveUpload: Bool, + completion: @escaping (EnqueueMessage?) -> Void + ) { + self.context = context + self.updatedPresentationData = updatedPresentationData + self.inputPanelFrame = inputPanelFrame + self.allowLiveUpload = allowLiveUpload + self.completion = completion + + self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get()) + + super.init(navigationBarPresentationData: nil) + + self.statusBar.statusBarStyle = .Ignore + self.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait) + + self.navigationPresentation = .flatModal + + self.requestAudioSession() + } + + required public init(coder: NSCoder) { + preconditionFailure() + } + + deinit { + self.audioSessionDisposable?.dispose() + if #available(iOS 13.0, *) { + try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(false) + } + } + + override public func loadDisplayNode() { + self.displayNode = Node(controller: self) + + super.displayNodeDidLoad() + } + + public func sendVideoRecording() { + if case .none = self.cameraState.recording, self.node.results.isEmpty { + self.completion(nil) + return + } + + if case .none = self.cameraState.recording { + } else { + self.waitingForNextResult = true + self.node.stopRecording.invoke(Void()) + } + + let _ = (self.currentResults + |> take(1) + |> deliverOnMainQueue).startStandalone(next: { [weak self] results in + guard let self, let firstResult = results.first, case let .video(video) = firstResult else { + return + } + + var videoPaths: [String] = [] + var duration: Double = 0.0 + + var hasAdjustments = results.count > 1 + for result in results { + if case let .video(video) = result { + videoPaths.append(video.videoPath) + duration += video.duration + } + } + + let finalDuration: Double + if let trimRange = self.node.previewState?.trimRange { + finalDuration = trimRange.upperBound - trimRange.lowerBound + if finalDuration != duration { + hasAdjustments = true + } + } else { + finalDuration = duration + } + + let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: PixelDimensions(width: 400, height: 400), cropOffset: .zero, cropRect: CGRect(origin: .zero, size: CGSize(width: 400.0, height: 400.0)), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage) + + var resourceAdjustments: VideoMediaResourceAdjustments? = nil + if let valuesData = try? JSONEncoder().encode(values) { + let data = MemoryBuffer(data: valuesData) + let digest = MemoryBuffer(data: data.md5Digest()) + resourceAdjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: false) + } + + let resource: TelegramMediaResource + let liveUploadData: LegacyLiveUploadInterfaceResult? + if let current = self.node.currentLiveUploadData { + liveUploadData = current + } else { + liveUploadData = self.node.liveUploadInterface?.fileUpdated(true) as? LegacyLiveUploadInterfaceResult + } + if !hasAdjustments, let liveUploadData, let data = try? Data(contentsOf: URL(fileURLWithPath: video.videoPath)) { + resource = LocalFileMediaResource(fileId: liveUploadData.id) + self.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true) + } else { + resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), paths: videoPaths, adjustments: resourceAdjustments) + } + + var previewRepresentations: [TelegramMediaImageRepresentation] = [] + + let thumbnailResource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max)) + let thumbnailSize = video.dimensions.cgSize.aspectFitted(CGSize(width: 320.0, height: 320.0)) + let thumbnailImage = scaleImageToPixelSize(image: video.thumbnail, size: thumbnailSize) + if let thumbnailData = thumbnailImage?.jpegData(compressionQuality: 0.4) { + self.context.account.postbox.mediaBox.storeResourceData(thumbnailResource.id, data: thumbnailData) + previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(thumbnailSize), resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)) + } + + let tempFile = TempBox.shared.tempFile(fileName: "file") + defer { + TempBox.shared.dispose(tempFile) + } + if let data = compressImageToJPEG(video.thumbnail, quality: 0.7, tempFilePath: tempFile.path) { + context.account.postbox.mediaBox.storeCachedResourceRepresentation(resource, representation: CachedVideoFirstFrameRepresentation(), data: data) + } + + let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.FileName(fileName: "video.mp4"), .Video(duration: finalDuration, size: video.dimensions, flags: [.instantRoundVideo], preloadSize: nil)]) + + + var attributes: [MessageAttribute] = [] + if self.cameraState.isViewOnceEnabled { + attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil)) + } + + self.completion(.message( + text: "", + attributes: attributes, + inlineStickers: [:], + mediaReference: .standalone(media: media), + threadId: nil, + replyToMessageId: nil, + replyToStoryId: nil, + localGroupingKey: nil, + correlationId: nil, + bubbleUpEmojiOrStickersets: [] + )) + }) + } + + private var waitingForNextResult = false + public func stopVideoRecording() -> Bool { + self.waitingForNextResult = true + self.node.transitioningToPreview = true + self.node.requestUpdateLayout(transition: .spring(duration: 0.4)) + + self.node.stopRecording.invoke(Void()) + + return true + } + + fileprivate var scheduledLock = false + public func lockVideoRecording() { + if case .none = self.cameraState.recording { + self.scheduledLock = true + } else { + self.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4)) + } + + self.node.maybePresentViewOnceTooltip() + } + + public func discardVideo() { + self.requestDismiss(animated: true) + } + + public func extractVideoSnapshot() -> UIView? { + if let snapshotView = self.node.previewContainerView.snapshotView(afterScreenUpdates: false) { + snapshotView.frame = self.node.previewContainerView.convert(self.node.previewContainerView.bounds, to: nil) + return snapshotView + } + return nil + } + + public func hideVideoSnapshot() { + self.node.previewContainerView.alpha = 0.02 + } + + public func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) { + self.node.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply) + } + + private func requestAudioSession() { + self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { _ in + if #available(iOS 13.0, *) { + try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true) + } + }, deactivate: { _ in + return .single(Void()) + }) + } + + private var isDismissed = false + fileprivate func requestDismiss(animated: Bool) { + guard !self.isDismissed else { + return + } + + self.node.dismissAllTooltips() + + self.node.camera?.stopCapture(invalidate: true) + self.isDismissed = true + if animated { + self.node.animateOut(completion: { + self.dismiss(animated: false) + }) + } else { + self.dismiss(animated: false) + } + } + + override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) { + self.validLayout = layout + + super.containerLayoutUpdated(layout, transition: transition) + + if !self.isDismissed { + (self.displayNode as! Node).containerLayoutUpdated(layout: layout, transition: Transition(transition)) + } + } +} + +private func composition(with results: [VideoMessageCameraScreen.CaptureResult]) -> AVComposition { + let composition = AVMutableComposition() + var currentTime = CMTime.zero + + for result in results { + guard case let .video(video) = result else { + continue + } + let asset = AVAsset(url: URL(fileURLWithPath: video.videoPath)) + let duration = asset.duration + do { + try composition.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: duration), + of: asset, + at: currentTime + ) + currentTime = CMTimeAdd(currentTime, duration) + } catch { + } + } + return composition +} diff --git a/submodules/TelegramUI/Images.xcassets/Camera/VideoMessageFlip.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Camera/VideoMessageFlip.imageset/Contents.json new file mode 100644 index 0000000000..c55a51057a --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Camera/VideoMessageFlip.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "switchcamera_30.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Camera/VideoMessageFlip.imageset/switchcamera_30.pdf b/submodules/TelegramUI/Images.xcassets/Camera/VideoMessageFlip.imageset/switchcamera_30.pdf new file mode 100644 index 0000000000..aa86094918 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Camera/VideoMessageFlip.imageset/switchcamera_30.pdf @@ -0,0 +1,188 @@ +%PDF-1.7 + +1 0 obj + << >> +endobj + +2 0 obj + << /Length 3 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 3.084961 5.584656 cm +0.000000 0.000000 0.000000 scn +9.490515 20.080341 m +9.422689 20.080362 l +8.999260 20.080563 8.679590 20.080715 8.371326 20.006708 c +8.099400 19.941423 7.839443 19.833746 7.600999 19.687628 c +7.330693 19.521984 7.104758 19.295835 6.805490 18.996283 c +6.757546 18.948309 l +5.959852 18.150616 l +5.772345 17.963108 5.716480 17.908821 5.660614 17.864992 c +5.468593 17.714350 5.238950 17.619228 4.996650 17.589970 c +4.926156 17.581457 4.848266 17.580341 4.583089 17.580341 c +4.484859 17.580357 l +3.725908 17.580544 3.223788 17.580666 2.792615 17.474993 c +1.466152 17.149897 0.430476 16.114222 0.105380 14.787757 c +-0.000294 14.356585 -0.000171 13.854465 0.000015 13.095512 c +0.000031 12.997284 l +0.000031 5.465342 l +0.000031 5.436520 l +0.000025 4.620880 0.000020 3.968216 0.043112 3.440796 c +0.087345 2.899416 0.180274 2.431705 0.399492 2.001467 c +0.750868 1.311853 1.311542 0.751179 2.001156 0.399803 c +2.431395 0.180586 2.899104 0.087656 3.440485 0.043423 c +3.967894 0.000332 4.620543 0.000336 5.436161 0.000341 c +5.436225 0.000341 l +5.465031 0.000341 l +18.365032 0.000341 l +18.393837 0.000341 l +18.393900 0.000341 l +19.209520 0.000336 19.862169 0.000332 20.389578 0.043423 c +20.930958 0.087656 21.398668 0.180586 21.828907 0.399803 c +22.518520 0.751179 23.079195 1.311853 23.430571 2.001467 c +23.649788 2.431705 23.742718 2.899416 23.786951 3.440796 c +23.830042 3.968204 23.830038 4.620852 23.830032 5.436470 c +23.830032 5.436536 l +23.830032 5.465342 l +23.830032 12.997283 l +23.830048 13.095503 l +23.830235 13.854461 23.830357 14.356583 23.724682 14.787757 c +23.399588 16.114222 22.363911 17.149897 21.037447 17.474993 c +20.606276 17.580666 20.104155 17.580544 19.345201 17.580357 c +19.246973 17.580341 l +18.981796 17.580341 18.903906 17.581457 18.833412 17.589970 c +18.591112 17.619228 18.361469 17.714350 18.169449 17.864992 c +18.113583 17.908821 18.057718 17.963108 17.870209 18.150616 c +17.072515 18.948311 l +17.024576 18.996279 l +16.725307 19.295834 16.499371 19.521982 16.229063 19.687628 c +15.990620 19.833746 15.730661 19.941423 15.458736 20.006708 c +15.150473 20.080715 14.830803 20.080563 14.407373 20.080362 c +14.339548 20.080341 l +9.490515 20.080341 l +h +8.681808 18.713455 m +8.817650 18.746067 8.969681 18.750341 9.490515 18.750341 c +14.339548 18.750341 l +14.860382 18.750341 15.012413 18.746067 15.148252 18.713455 c +15.284472 18.680752 15.414694 18.626812 15.534140 18.553616 c +15.653254 18.480623 15.763777 18.376143 16.132063 18.007858 c +16.929756 17.210163 l +16.954220 17.185692 l +16.954237 17.185675 l +17.106804 17.033035 17.221756 16.918030 17.348524 16.818577 c +17.731848 16.517857 18.190273 16.327971 18.673967 16.269562 c +18.833941 16.250244 18.996555 16.250284 19.212389 16.250336 c +19.246973 16.250341 l +20.139725 16.250341 20.466656 16.245523 20.720854 16.183224 c +21.565954 15.976102 22.225792 15.316265 22.432913 14.471165 c +22.495213 14.216966 22.500031 13.890034 22.500031 12.997283 c +22.500031 5.465342 l +22.500031 4.614289 22.499514 4.015995 22.461367 3.549101 c +22.423855 3.089968 22.353294 2.816771 22.245531 2.605274 c +22.021667 2.165916 21.664457 1.808706 21.225100 1.584843 c +21.013603 1.477079 20.740406 1.406519 20.281273 1.369007 c +19.814379 1.330860 19.216084 1.330343 18.365032 1.330343 c +5.465031 1.330343 l +4.613979 1.330343 4.015684 1.330860 3.548789 1.369007 c +3.089657 1.406519 2.816460 1.477079 2.604963 1.584843 c +2.165605 1.808706 1.808395 2.165916 1.584531 2.605274 c +1.476768 2.816771 1.406208 3.089968 1.368695 3.549101 c +1.330548 4.015995 1.330031 4.614290 1.330031 5.465342 c +1.330031 12.997284 l +1.330031 13.890034 1.334849 14.216966 1.397150 14.471165 c +1.604271 15.316265 2.264108 15.976102 3.109208 16.183224 c +3.363407 16.245523 3.690338 16.250341 4.583089 16.250341 c +4.617674 16.250336 l +4.617689 16.250336 l +4.833515 16.250284 4.996125 16.250244 5.156096 16.269562 c +5.639788 16.327971 6.098214 16.517857 6.481537 16.818577 c +6.608315 16.918037 6.723272 17.033049 6.875851 17.185703 c +6.900304 17.210163 l +7.697999 18.007858 l +8.066284 18.376143 8.176808 18.480623 8.295923 18.553616 c +8.415368 18.626812 8.545589 18.680752 8.681808 18.713455 c +h +8.819138 12.449797 m +9.606685 13.253181 10.702194 13.750379 11.915030 13.750379 c +13.964883 13.750379 15.683014 12.327185 16.134258 10.415339 c +14.848875 10.415339 l +14.636918 10.415339 14.521129 10.168130 14.656816 10.005297 c +16.622980 7.645809 l +16.722927 7.525869 16.907139 7.525866 17.007090 7.645802 c +18.973408 10.005290 l +19.109104 10.168120 18.993317 10.415339 18.781355 10.415339 c +17.491955 10.415339 l +17.019377 13.067384 14.702655 15.080379 11.915030 15.080379 c +10.330412 15.080379 8.896755 14.428887 7.869370 13.380838 c +7.612269 13.118567 7.616461 12.697534 7.878733 12.440434 c +8.141004 12.183333 8.562037 12.187525 8.819138 12.449797 c +h +6.338119 8.415344 m +5.048842 8.415344 l +4.836884 8.415344 4.721095 8.662557 4.856786 8.825389 c +6.822981 11.184870 l +6.922931 11.304811 7.107148 11.304810 7.207097 11.184867 c +9.173254 8.825387 l +9.308942 8.662554 9.193151 8.415344 8.981194 8.415344 c +7.695821 8.415344 l +8.147092 6.503536 9.865205 5.080379 11.915030 5.080379 c +13.096758 5.080379 14.166923 5.552347 14.949532 6.319569 c +15.211796 6.576675 15.632830 6.572495 15.889937 6.310231 c +16.147045 6.047967 16.142864 5.626933 15.880600 5.369825 c +14.859452 4.368757 13.458792 3.750378 11.915030 3.750378 c +9.127432 3.750378 6.810725 5.763336 6.338119 8.415344 c +h +f* +n +Q + +endstream +endobj + +3 0 obj + 5480 +endobj + +4 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 30.000000 30.000000 ] + /Resources 1 0 R + /Contents 2 0 R + /Parent 5 0 R + >> +endobj + +5 0 obj + << /Kids [ 4 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +6 0 obj + << /Pages 5 0 R + /Type /Catalog + >> +endobj + +xref +0 7 +0000000000 65535 f +0000000010 00000 n +0000000034 00000 n +0000005570 00000 n +0000005593 00000 n +0000005766 00000 n +0000005840 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 6 0 R + /Size 7 +>> +startxref +5899 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Chat/Message/ExternalLink.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Chat/Message/ExternalLink.imageset/Contents.json new file mode 100644 index 0000000000..740a85579e --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Chat/Message/ExternalLink.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "addlink_16.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Chat/Message/ExternalLink.imageset/addlink_16.pdf b/submodules/TelegramUI/Images.xcassets/Chat/Message/ExternalLink.imageset/addlink_16.pdf new file mode 100644 index 0000000000..0eb367086f --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Chat/Message/ExternalLink.imageset/addlink_16.pdf @@ -0,0 +1,236 @@ +%PDF-1.7 + +1 0 obj + << >> +endobj + +2 0 obj + << /Length 3 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 8.500000 7.040154 cm +0.000000 0.000000 0.000000 scn +-0.470226 1.930072 m +-0.729925 1.670374 -0.729925 1.249319 -0.470226 0.989621 c +-0.210527 0.729922 0.210527 0.729922 0.470226 0.989621 c +-0.470226 1.930072 l +h +6.470226 6.989621 m +6.729925 7.249319 6.729925 7.670374 6.470226 7.930072 c +6.210527 8.189772 5.789473 8.189772 5.529774 7.930072 c +6.470226 6.989621 l +h +0.470226 0.989621 m +6.470226 6.989621 l +5.529774 7.930072 l +-0.470226 1.930072 l +0.470226 0.989621 l +h +f +n +Q +q +1.000000 0.000000 -0.000000 1.000000 9.500000 8.170006 cm +0.000000 0.000000 0.000000 scn +0.000000 6.994994 m +-0.367269 6.994994 -0.665000 6.697264 -0.665000 6.329994 c +-0.665000 5.962725 -0.367269 5.664994 0.000000 5.664994 c +0.000000 6.994994 l +h +5.000000 6.329994 m +5.665000 6.329994 l +5.665000 6.697264 5.367270 6.994994 5.000000 6.994994 c +5.000000 6.329994 l +h +4.335000 1.329994 m +4.335000 0.962725 4.632730 0.664994 5.000000 0.664994 c +5.367270 0.664994 5.665000 0.962725 5.665000 1.329994 c +4.335000 1.329994 l +h +0.000000 5.664994 m +5.000000 5.664994 l +5.000000 6.994994 l +0.000000 6.994994 l +0.000000 5.664994 l +h +4.335000 6.329994 m +4.335000 1.329994 l +5.665000 1.329994 l +5.665000 6.329994 l +4.335000 6.329994 l +h +f +n +Q +q +1.000000 0.000000 -0.000000 1.000000 3.000000 1.668968 cm +0.000000 0.000000 0.000000 scn +4.000000 10.666032 m +4.367270 10.666032 4.665000 10.963762 4.665000 11.331032 c +4.665000 11.698301 4.367270 11.996032 4.000000 11.996032 c +4.000000 10.666032 l +h +10.665000 5.331032 m +10.665000 5.698301 10.367270 5.996032 10.000000 5.996032 c +9.632730 5.996032 9.335000 5.698301 9.335000 5.331032 c +10.665000 5.331032 l +h +8.907981 1.549019 m +9.209885 0.956499 l +8.907981 1.549019 l +h +9.782013 2.423051 m +10.374533 2.121147 l +9.782013 2.423051 l +h +1.092019 11.113045 m +0.790115 11.705564 l +1.092019 11.113045 l +h +0.217987 10.239013 m +-0.374532 10.540916 l +0.217987 10.239013 l +h +4.000000 11.996032 m +3.200000 11.996032 l +3.200000 10.666032 l +4.000000 10.666032 l +4.000000 11.996032 l +h +-0.665000 8.131032 m +-0.665000 4.531032 l +0.665000 4.531032 l +0.665000 8.131032 l +-0.665000 8.131032 l +h +3.200000 0.666032 m +6.800000 0.666032 l +6.800000 1.996032 l +3.200000 1.996032 l +3.200000 0.666032 l +h +10.665000 4.531032 m +10.665000 5.331032 l +9.335000 5.331032 l +9.335000 4.531032 l +10.665000 4.531032 l +h +6.800000 0.666032 m +7.349080 0.666032 7.800883 0.665515 8.167748 0.695489 c +8.542377 0.726097 8.886601 0.791779 9.209885 0.956499 c +8.606077 2.141538 l +8.501536 2.088272 8.351824 2.044960 8.059443 2.021071 c +7.759301 1.996549 7.371026 1.996032 6.800000 1.996032 c +6.800000 0.666032 l +h +9.335000 4.531032 m +9.335000 3.960006 9.334483 3.571731 9.309960 3.271588 c +9.286072 2.979208 9.242760 2.829495 9.189494 2.724955 c +10.374533 2.121147 l +10.539253 2.444430 10.604935 2.788655 10.635543 3.163283 c +10.665517 3.530149 10.665000 3.981952 10.665000 4.531032 c +9.335000 4.531032 l +h +9.209885 0.956499 m +9.711337 1.212002 10.119030 1.619695 10.374533 2.121147 c +9.189494 2.724955 l +9.061502 2.473758 8.857274 2.269529 8.606077 2.141538 c +9.209885 0.956499 l +h +-0.665000 4.531032 m +-0.665000 3.981952 -0.665517 3.530149 -0.635543 3.163283 c +-0.604935 2.788655 -0.539253 2.444430 -0.374532 2.121147 c +0.810506 2.724955 l +0.757240 2.829495 0.713928 2.979208 0.690040 3.271588 c +0.665517 3.571731 0.665000 3.960006 0.665000 4.531032 c +-0.665000 4.531032 l +h +3.200000 1.996032 m +2.628974 1.996032 2.240699 1.996549 1.940556 2.021071 c +1.648176 2.044960 1.498463 2.088272 1.393923 2.141538 c +0.790115 0.956499 l +1.113398 0.791779 1.457623 0.726097 1.832252 0.695489 c +2.199117 0.665515 2.650921 0.666032 3.200000 0.666032 c +3.200000 1.996032 l +h +-0.374532 2.121147 m +-0.119030 1.619695 0.288663 1.212002 0.790115 0.956499 c +1.393923 2.141538 l +1.142726 2.269529 0.938497 2.473758 0.810506 2.724955 c +-0.374532 2.121147 l +h +3.200000 11.996032 m +2.650921 11.996032 2.199117 11.996549 1.832252 11.966575 c +1.457623 11.935966 1.113398 11.870285 0.790115 11.705564 c +1.393923 10.520525 l +1.498463 10.573792 1.648176 10.617104 1.940556 10.640992 c +2.240699 10.665515 2.628974 10.666032 3.200000 10.666032 c +3.200000 11.996032 l +h +0.665000 8.131032 m +0.665000 8.702057 0.665517 9.090332 0.690040 9.390476 c +0.713928 9.682856 0.757240 9.832568 0.810506 9.937109 c +-0.374532 10.540916 l +-0.539253 10.217633 -0.604935 9.873408 -0.635543 9.498780 c +-0.665517 9.131915 -0.665000 8.680111 -0.665000 8.131032 c +0.665000 8.131032 l +h +0.790115 11.705564 m +0.288663 11.450062 -0.119030 11.042369 -0.374532 10.540916 c +0.810506 9.937109 l +0.938497 10.188306 1.142726 10.392534 1.393923 10.520525 c +0.790115 11.705564 l +h +f +n +Q + +endstream +endobj + +3 0 obj + 4659 +endobj + +4 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 16.000000 16.000000 ] + /Resources 1 0 R + /Contents 2 0 R + /Parent 5 0 R + >> +endobj + +5 0 obj + << /Kids [ 4 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +6 0 obj + << /Pages 5 0 R + /Type /Catalog + >> +endobj + +xref +0 7 +0000000000 65535 f +0000000010 00000 n +0000000034 00000 n +0000004749 00000 n +0000004772 00000 n +0000004945 00000 n +0000005019 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 6 0 R + /Size 7 +>> +startxref +5078 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Premium/PrivacyPresence.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Premium/PrivacyPresence.imageset/Contents.json new file mode 100644 index 0000000000..d4072b508f --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Premium/PrivacyPresence.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "LastSeen.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Premium/PrivacyPresence.imageset/LastSeen.pdf b/submodules/TelegramUI/Images.xcassets/Premium/PrivacyPresence.imageset/LastSeen.pdf new file mode 100644 index 0000000000..77e71ede93 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Premium/PrivacyPresence.imageset/LastSeen.pdf @@ -0,0 +1,236 @@ +%PDF-1.7 + +1 0 obj + << /Type /XObject + /Length 2 0 R + /Group << /Type /Group + /S /Transparency + >> + /Subtype /Form + /Resources << >> + /BBox [ 0.000000 0.000000 90.000000 90.000000 ] + >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +-0.573576 0.819152 -0.819152 -0.573576 116.549232 55.611801 cm +0.000000 0.000000 0.000000 scn +11.201888 42.536640 m +10.293921 43.394623 8.862335 43.354103 8.004352 42.446136 c +7.146368 41.538170 7.186888 40.106583 8.094854 39.248600 c +11.201888 42.536640 l +h +0.104302 67.332283 m +2.359326 67.155991 l +2.359326 67.155991 l +0.104302 67.332283 l +h +-2.261905 57.367592 m +-2.261905 56.118378 -1.249215 55.105686 0.000000 55.105686 c +1.249215 55.105686 2.261905 56.118378 2.261905 57.367592 c +-2.261905 57.367592 l +h +8.094854 39.248600 m +14.413891 33.277420 22.963470 29.611862 32.367256 29.611862 c +32.367256 34.135670 l +24.160690 34.135670 16.711905 37.329945 11.201888 42.536640 c +8.094854 39.248600 l +h +32.367256 29.611862 m +51.816689 29.611862 67.607117 45.286694 67.607117 64.651543 c +63.083305 64.651543 l +63.083305 47.811054 49.344265 34.135670 32.367256 34.135670 c +32.367256 29.611862 l +h +67.607117 64.651543 m +67.607117 84.016388 51.816689 99.691223 32.367256 99.691223 c +32.367256 95.167419 l +49.344265 95.167419 63.083305 81.492027 63.083305 64.651543 c +67.607117 64.651543 l +h +32.367256 99.691223 m +13.838978 99.691223 -0.744848 85.491989 -2.150723 67.508575 c +2.359326 67.155991 l +3.587800 82.870171 16.285238 95.167419 32.367256 95.167419 c +32.367256 99.691223 l +h +-2.150723 67.508575 m +-2.228836 66.509377 -2.261905 58.196545 -2.261905 57.367592 c +2.261905 57.367592 l +2.261905 57.810497 2.270653 60.074085 2.288070 62.339462 c +2.296773 63.471542 2.307606 64.599052 2.320517 65.496704 c +2.326979 65.946014 2.333896 66.332962 2.341205 66.632019 c +2.344865 66.781738 2.348513 66.904846 2.352073 67.000267 c +2.353848 67.047859 2.355492 67.085579 2.356944 67.114319 c +2.358485 67.144806 2.359401 67.156944 2.359326 67.155991 c +-2.150723 67.508575 l +h +f +n +Q +q +-0.573576 0.819152 -0.819152 -0.573576 88.913589 26.117556 cm +0.000000 0.000000 0.000000 scn +9.281114 9.781655 m +16.382307 17.498228 l +16.941557 18.105942 16.902267 19.051954 16.294554 19.611202 c +16.008730 19.874233 15.631773 20.015724 15.243468 20.005730 c +0.971281 19.638401 l +0.420877 19.624233 -0.013829 19.166559 0.000337 18.616156 c +0.006554 18.374573 0.100313 18.143475 0.264171 17.965847 c +7.814779 9.780768 l +8.188101 9.376076 8.818807 9.350644 9.223498 9.723966 c +9.243483 9.742400 9.262703 9.761649 9.281114 9.781655 c +h +f* +n +Q +q +0.997564 0.069756 -0.069756 0.997564 27.183931 22.657665 cm +0.000000 0.000000 0.000000 scn +1.126237 13.390795 m +2.459934 13.920820 9.052311 11.488453 9.899677 10.654730 c +10.747043 9.821009 8.165665 2.188360 3.458126 3.582010 c +-1.249412 4.975660 -0.207460 12.860769 1.126237 13.390795 c +h +11.387338 14.554115 m +10.053641 14.024090 3.339256 16.886160 2.491890 17.719881 c +2.073576 18.131460 2.036059 22.762421 3.879909 27.093227 c +5.771091 31.535206 11.220125 34.641033 13.845622 33.894619 c +22.144812 31.535206 12.721035 15.084141 11.387338 14.554115 c +h +f* +n +Q +q +1.000000 0.000000 -0.000000 1.000000 44.495422 29.013916 cm +0.000000 0.000000 0.000000 scn +3.598538 14.091810 m +5.121493 13.880739 10.383765 10.189638 10.959850 8.824841 c +11.535934 7.460043 4.756379 0.930214 1.484518 3.851572 c +0.240279 4.962521 -0.215860 6.791552 0.093658 8.620932 c +0.598051 11.602104 2.654739 14.222616 3.598538 14.091810 c +h +5.710229 17.638847 m +7.863484 29.044964 15.632494 32.943184 18.918007 31.831261 c +20.597502 31.262865 22.614096 27.443319 21.877079 23.943005 c +20.639967 18.067585 14.851414 12.199171 13.275784 12.765230 c +10.189638 13.873955 5.710229 17.084599 5.710229 17.638847 c +h +f* +n +Q + +endstream +endobj + +2 0 obj + 3525 +endobj + +3 0 obj + << /Type /XObject + /Length 4 0 R + /Group << /Type /Group + /S /Transparency + >> + /Subtype /Form + /Resources << >> + /BBox [ 0.000000 0.000000 90.000000 90.000000 ] + >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 5.000000 5.000000 cm +0.000000 0.000000 0.000000 scn +0.000000 80.000000 m +80.000000 80.000000 l +80.000000 0.000000 l +0.000000 0.000000 l +0.000000 80.000000 l +h +f +n +Q + +endstream +endobj + +4 0 obj + 232 +endobj + +5 0 obj + << /XObject << /X1 1 0 R >> + /ExtGState << /E1 << /SMask << /Type /Mask + /G 3 0 R + /S /Alpha + >> + /Type /ExtGState + >> >> + >> +endobj + +6 0 obj + << /Length 7 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +/E1 gs +/X1 Do +Q + +endstream +endobj + +7 0 obj + 46 +endobj + +8 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 90.000000 90.000000 ] + /Resources 5 0 R + /Contents 6 0 R + /Parent 9 0 R + >> +endobj + +9 0 obj + << /Kids [ 8 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +10 0 obj + << /Pages 9 0 R + /Type /Catalog + >> +endobj + +xref +0 11 +0000000000 65535 f +0000000010 00000 n +0000003783 00000 n +0000003806 00000 n +0000004286 00000 n +0000004308 00000 n +0000004606 00000 n +0000004708 00000 n +0000004729 00000 n +0000004902 00000 n +0000004976 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 10 0 R + /Size 11 +>> +startxref +5036 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Premium/PrivacyReadTime.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Premium/PrivacyReadTime.imageset/Contents.json new file mode 100644 index 0000000000..5898847ff8 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Premium/PrivacyReadTime.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ReadTime.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Premium/PrivacyReadTime.imageset/ReadTime.pdf b/submodules/TelegramUI/Images.xcassets/Premium/PrivacyReadTime.imageset/ReadTime.pdf new file mode 100644 index 0000000000..25f40eeac6 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Premium/PrivacyReadTime.imageset/ReadTime.pdf @@ -0,0 +1,277 @@ +%PDF-1.7 + +1 0 obj + << /Type /XObject + /Length 2 0 R + /Group << /Type /Group + /S /Transparency + >> + /Subtype /Form + /Resources << >> + /BBox [ 0.000000 0.000000 90.000000 90.000000 ] + >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +0.707107 0.707107 -0.707107 0.707107 67.034660 -18.373894 cm +0.000000 0.000000 0.000000 scn +10.453167 41.350018 m +9.512874 42.238548 8.030319 42.196587 7.141789 41.256294 c +6.253258 40.316002 6.295220 38.833447 7.235513 37.944916 c +10.453167 41.350018 l +h +0.095610 63.883827 m +-2.239701 64.066391 l +0.095610 63.883827 l +h +-2.342436 54.749527 m +-2.342436 53.455833 -1.293692 52.407089 0.000000 52.407089 c +1.293692 52.407089 2.342436 53.455833 2.342436 54.749527 c +-2.342436 54.749527 l +h +7.235513 37.944916 m +13.076076 32.425873 20.978649 29.037750 29.669981 29.037750 c +29.669981 33.722622 l +22.218493 33.722622 15.455904 36.622681 10.453167 41.350018 c +7.235513 37.944916 l +h +29.669981 29.037750 m +47.645660 29.037750 62.242207 43.525253 62.242207 61.426483 c +57.557335 61.426483 l +57.557335 46.139488 45.085209 33.722622 29.669981 33.722622 c +29.669981 29.037750 l +h +62.242207 61.426483 m +62.242207 79.327713 47.645660 93.815216 29.669981 93.815216 c +29.669981 89.130341 l +45.085209 89.130341 57.557335 76.713478 57.557335 61.426483 c +62.242207 61.426483 l +h +29.669981 93.815216 m +12.540254 93.815216 -0.940433 80.686134 -2.239701 64.066391 c +2.430921 63.701260 l +3.546472 77.970970 15.073609 89.130341 29.669981 89.130341 c +29.669981 93.815216 l +h +-2.239701 64.066391 m +-2.312374 63.136772 -2.342436 55.500645 -2.342436 54.749527 c +2.342436 54.749527 l +2.342436 55.154526 2.350449 57.228703 2.366412 59.305004 c +2.374389 60.342529 2.384313 61.375381 2.396135 62.197342 c +2.402054 62.608826 2.408379 62.962509 2.415047 63.235344 c +2.418387 63.371964 2.421699 63.483559 2.424903 63.569450 c +2.426500 63.612274 2.427957 63.645615 2.429212 63.670444 c +2.430558 63.697090 2.431254 63.705528 2.430921 63.701260 c +-2.239701 64.066391 l +h +f +n +Q +q +0.707107 0.707107 -0.707107 0.707107 35.092972 1.912642 cm +0.000000 0.000000 0.000000 scn +9.400195 9.052296 m +16.491901 18.012711 l +17.065374 18.737297 16.942873 19.789585 16.218287 20.363058 c +15.921938 20.597603 15.554913 20.724905 15.176980 20.724241 c +1.113485 20.699522 l +0.497442 20.698439 -0.001081 20.198158 0.000002 19.582117 c +0.000426 19.340685 0.079173 19.105904 0.224414 18.913044 c +7.634510 9.073509 l +8.005109 8.581406 8.704469 8.482906 9.196571 8.853506 c +9.272655 8.910804 9.341086 8.977612 9.400195 9.052296 c +h +f* +n +Q +q +1.000000 0.000000 -0.000000 1.000000 37.394165 30.785156 cm +0.000000 0.000000 0.000000 scn +25.237368 22.854145 m +26.030727 23.803909 25.903934 25.216991 24.954170 26.010351 c +24.004406 26.803711 22.591324 26.676918 21.797964 25.727154 c +25.237368 22.854145 l +h +7.466430 5.075024 m +9.186129 3.638515 l +9.186131 3.638519 l +7.466430 5.075024 l +h +7.322968 5.062153 m +5.886413 3.342493 l +5.886472 3.342443 l +7.322968 5.062153 l +h +7.306202 5.079990 m +5.500865 3.752708 l +5.500870 3.752703 l +7.306202 5.079990 l +h +1.805337 16.344992 m +1.072299 17.342052 -0.330222 17.556084 -1.327282 16.823048 c +-2.324342 16.090010 -2.538375 14.687489 -1.805337 13.690428 c +1.805337 16.344992 l +h +21.797964 25.727154 m +5.746728 6.511530 l +9.186131 3.638519 l +25.237368 22.854145 l +21.797964 25.727154 l +h +5.746732 6.511532 m +6.504047 7.418142 7.852895 7.539131 8.759465 6.781860 c +5.886472 3.342443 l +6.879384 2.513050 8.356690 2.645563 9.186129 3.638515 c +5.746732 6.511532 l +h +8.759524 6.781811 m +8.891400 6.671646 9.009609 6.545914 9.111534 6.407280 c +5.500870 3.752703 l +5.612496 3.600872 5.741967 3.463160 5.886413 3.342493 c +8.759524 6.781811 l +h +9.111539 6.407272 m +1.805337 16.344992 l +-1.805337 13.690428 l +5.500865 3.752708 l +9.111539 6.407272 l +h +f +n +Q +q +1.000000 0.000000 -0.000000 1.000000 29.174744 29.887207 cm +0.000000 0.000000 0.000000 scn +5.315279 3.728403 m +6.056771 2.737616 7.461063 2.535522 8.451851 3.277015 c +9.442638 4.018508 9.644732 5.422799 8.903239 6.413588 c +5.315279 3.728403 l +h +1.793980 15.913027 m +1.052487 16.903814 -0.351804 17.105907 -1.342592 16.364414 c +-2.333380 15.622922 -2.535474 14.218631 -1.793980 13.227842 c +1.793980 15.913027 l +h +8.903239 6.413588 m +1.793980 15.913027 l +-1.793980 13.227842 l +5.315279 3.728403 l +8.903239 6.413588 l +h +f +n +Q + +endstream +endobj + +2 0 obj + 4078 +endobj + +3 0 obj + << /Type /XObject + /Length 4 0 R + /Group << /Type /Group + /S /Transparency + >> + /Subtype /Form + /Resources << >> + /BBox [ 0.000000 0.000000 90.000000 90.000000 ] + >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 5.000000 5.000000 cm +0.000000 0.000000 0.000000 scn +0.000000 80.000000 m +80.000000 80.000000 l +80.000000 0.000000 l +0.000000 0.000000 l +0.000000 80.000000 l +h +f +n +Q + +endstream +endobj + +4 0 obj + 232 +endobj + +5 0 obj + << /XObject << /X1 1 0 R >> + /ExtGState << /E1 << /SMask << /Type /Mask + /G 3 0 R + /S /Alpha + >> + /Type /ExtGState + >> >> + >> +endobj + +6 0 obj + << /Length 7 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +/E1 gs +/X1 Do +Q + +endstream +endobj + +7 0 obj + 46 +endobj + +8 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 90.000000 90.000000 ] + /Resources 5 0 R + /Contents 6 0 R + /Parent 9 0 R + >> +endobj + +9 0 obj + << /Kids [ 8 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +10 0 obj + << /Pages 9 0 R + /Type /Catalog + >> +endobj + +xref +0 11 +0000000000 65535 f +0000000010 00000 n +0000004336 00000 n +0000004359 00000 n +0000004839 00000 n +0000004861 00000 n +0000005159 00000 n +0000005261 00000 n +0000005282 00000 n +0000005455 00000 n +0000005529 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 10 0 R + /Size 11 +>> +startxref +5589 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Resources/Animations/LockPause.json b/submodules/TelegramUI/Resources/Animations/LockPause.json new file mode 100644 index 0000000000..3997067fef --- /dev/null +++ b/submodules/TelegramUI/Resources/Animations/LockPause.json @@ -0,0 +1 @@ +{"v":"5.12.1","fr":60,"ip":0,"op":60,"w":240,"h":360,"nm":"nLock3","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Rectangle 3","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":26,"s":[0]},{"t":46,"s":[90]}],"ix":10},"p":{"s":true,"x":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":13,"s":[120]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":26,"s":[120]},{"t":46,"s":[120]}],"ix":3},"y":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":13,"s":[150]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":26,"s":[200]},{"t":46,"s":[180]}],"ix":4}},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":26,"s":[{"i":[[-1.66,0],[0,0],[0,-1.66],[0,0],[1.66,0],[0,0],[0,1.66],[0,0]],"o":[[0,0],[1.66,0],[0,0],[0,1.66],[0,0],[-1.66,0],[0,0],[0,-1.66]],"v":[[-5,-7],[5,-7],[8,-4],[8,4],[5,7],[-5,7],[-8,4],[-8,-4]],"c":true}]},{"t":46,"s":[{"i":[[-1.66,0],[0,0],[0,-1.66],[0,0],[1.66,0],[0,0],[0,1.66],[0,0]],"o":[[0,0],[1.66,0],[0,0],[0,1.66],[0,0],[-1.66,0],[0,0],[0,-1.66]],"v":[[-6.25,-5.75],[6.25,-5.75],[9.25,-2.75],[9.25,2.75],[6.25,5.75],[-6.25,5.75],[-9.25,2.75],[-9.25,-2.75]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":26,"s":[0]},{"t":46,"s":[42]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":26,"s":[100]},{"t":46,"s":[63]}],"ix":2},"o":{"a":0,"k":212,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.494117647409,0.898039221764,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":26,"s":[1.33]},{"t":46,"s":[6]}],"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Обводка 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[600,600],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":26,"op":60,"st":13,"ct":1,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Rectangle 2","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":26,"s":[0]},{"t":46,"s":[90]}],"ix":10},"p":{"s":true,"x":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":13,"s":[120]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":26,"s":[120]},{"t":46,"s":[120]}],"ix":3},"y":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":13,"s":[150]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":26,"s":[200]},{"t":46,"s":[180]}],"ix":4}},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":1},"o":{"x":0.333,"y":0},"t":26,"s":[{"i":[[-1.66,0],[0,0],[0,-1.66],[0,0],[1.66,0],[0,0],[0,1.66],[0,0]],"o":[[0,0],[1.66,0],[0,0],[0,1.66],[0,0],[-1.66,0],[0,0],[0,-1.66]],"v":[[-5,-7],[5,-7],[8,-4],[8,4],[5,7],[-5,7],[-8,4],[-8,-4]],"c":true}]},{"t":46,"s":[{"i":[[-1.66,0],[0,0],[0,-1.66],[0,0],[1.66,0],[0,0],[0,1.66],[0,0]],"o":[[0,0],[1.66,0],[0,0],[0,1.66],[0,0],[-1.66,0],[0,0],[0,-1.66]],"v":[[-6.25,-5.75],[6.25,-5.75],[9.25,-2.75],[9.25,2.75],[6.25,5.75],[-6.25,5.75],[-9.25,2.75],[-9.25,-2.75]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":26,"s":[0]},{"t":46,"s":[42]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":26,"s":[100]},{"t":46,"s":[63]}],"ix":2},"o":{"a":0,"k":32,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.494117647409,0.898039221764,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":26,"s":[1.33]},{"t":46,"s":[6]}],"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Обводка 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[600,600],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":26,"op":60,"st":13,"ct":1,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Rectangle","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":26,"s":[0]},{"t":46,"s":[90]}],"ix":10},"p":{"s":true,"x":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":0,"s":[120]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":26,"s":[120]},{"t":46,"s":[120]}],"ix":3},"y":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":0,"s":[150]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":26,"s":[200]},{"t":46,"s":[180]}],"ix":4}},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":26,"s":[{"i":[[-1.66,0],[0,0],[0,-1.66],[0,0],[1.66,0],[0,0],[0,1.66],[0,0]],"o":[[0,0],[1.66,0],[0,0],[0,1.66],[0,0],[-1.66,0],[0,0],[0,-1.66]],"v":[[-5,-7],[5,-7],[8,-4],[8,4],[5,7],[-5,7],[-8,4],[-8,-4]],"c":true}]},{"t":46,"s":[{"i":[[-1.66,0],[0,0],[0,-1.66],[0,0],[1.66,0],[0,0],[0,1.66],[0,0]],"o":[[0,0],[1.66,0],[0,0],[0,1.66],[0,0],[-1.66,0],[0,0],[0,-1.66]],"v":[[-5,-8],[5,-8],[8,-5],[8,5],[5,8],[-5,8],[-8,5],[-8,-5]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.494117647409,0.898039221764,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":1.33,"ix":5},"lc":1,"lj":1,"ml":4,"bm":0,"nm":"Обводка 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[600,600],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"fl","c":{"a":0,"k":[0,0.494117647409,0.898039221764,1],"ix":4},"o":{"a":0,"k":0,"ix":5},"r":1,"bm":0,"nm":"Заливка 1","mn":"ADBE Vector Graphic - Fill","hd":false}],"ip":0,"op":26,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Path","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":0,"s":[10]},{"t":10,"s":[0]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.899,"y":0.642},"o":{"x":0.865,"y":0},"t":0,"s":[30,-32,0],"to":[0.011,8.859,0],"ti":[0,0,0]},{"i":{"x":0.619,"y":0.469},"o":{"x":0.298,"y":0.682},"t":25,"s":[30.07,26.297,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.515,"y":1},"o":{"x":0.218,"y":0.434},"t":26,"s":[30.079,29.902,0],"to":[0,0,0],"ti":[-0.001,-0.956,0]},{"t":30,"s":[30.088,39.998,0]}],"ix":2,"l":2},"a":{"a":0,"k":[30,36,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0],[-2.76,0],[0,-2.76],[0,0]],"o":[[0,0],[0,-2.76],[2.76,0],[0,0],[0,0]],"v":[[-5,2],[-5,-1],[0,-6],[5,-1],[5,6]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":22,"s":[14]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":25,"s":[33]},{"t":26,"s":[41]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":0,"s":[93]},{"i":{"x":[0.686],"y":[0.698]},"o":{"x":[0.353],"y":[0]},"t":6,"s":[93]},{"i":{"x":[0.697],"y":[0.578]},"o":{"x":[0.365],"y":[0.403]},"t":9,"s":[90.956]},{"i":{"x":[0.741],"y":[0.419]},"o":{"x":[0.419],"y":[0.214]},"t":13,"s":[88.581]},{"i":{"x":[0.713],"y":[0.507]},"o":{"x":[0.387],"y":[0.398]},"t":20,"s":[77.219]},{"i":{"x":[0.697],"y":[0.921]},"o":{"x":[0.37],"y":[0.27]},"t":23,"s":[66.609]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":25,"s":[50]},{"t":26,"s":[43]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.494117647409,0.898039221764,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":1.33,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Обводка 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[600,600],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":26,"st":0,"ct":1,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Path 4","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.498,"y":1},"o":{"x":0.921,"y":0},"t":0,"s":[0,132,0],"to":[-0.018,-13.122,0],"ti":[0.001,0.644,0]},{"t":32,"s":[-0.62,8.043,0]}],"ix":2,"l":2},"a":{"a":0,"k":[0,0,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":1,"y":0},"t":0,"s":[{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[-8,3],[0,-3],[8,3]],"c":false}]},{"t":24,"s":[{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[-0.753,3],[0,-3],[0.753,3]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":0,"k":0,"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":21,"s":[100]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":22,"s":[43]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":23,"s":[20]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":24,"s":[6]},{"t":25,"s":[0]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.494117647409,0.898039221764,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":1.33,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Обводка 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[600,600],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path 5","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":1,"y":0},"t":0,"s":[{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[-8,3],[0,-3],[8,3]],"c":false}]},{"t":24,"s":[{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[-0.753,3],[0,-3],[0.753,3]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":21,"s":[0]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":22,"s":[60]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":23,"s":[80]},{"i":{"x":[0.833],"y":[0.833]},"o":{"x":[0.167],"y":[0.167]},"t":24,"s":[94]},{"t":25,"s":[100]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.494117647409,0.898039221764,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":1.33,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Обводка 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[600,600],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path 4","np":3,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":25,"st":0,"ct":1,"bm":0}],"markers":[],"props":{}} \ No newline at end of file diff --git a/submodules/TelegramUI/Resources/Animations/PremiumPrivacyPresence.tgs b/submodules/TelegramUI/Resources/Animations/PremiumPrivacyPresence.tgs new file mode 100644 index 0000000000..9e8554d61a Binary files /dev/null and b/submodules/TelegramUI/Resources/Animations/PremiumPrivacyPresence.tgs differ diff --git a/submodules/TelegramUI/Resources/Animations/PremiumPrivacyRead.tgs b/submodules/TelegramUI/Resources/Animations/PremiumPrivacyRead.tgs new file mode 100644 index 0000000000..91c207c6cb Binary files /dev/null and b/submodules/TelegramUI/Resources/Animations/PremiumPrivacyRead.tgs differ diff --git a/submodules/TelegramUI/Sources/ApplicationContext.swift b/submodules/TelegramUI/Sources/ApplicationContext.swift index 5a1a4b9666..52f0de5dd3 100644 --- a/submodules/TelegramUI/Sources/ApplicationContext.swift +++ b/submodules/TelegramUI/Sources/ApplicationContext.swift @@ -29,6 +29,7 @@ import AuthorizationUI import ChatListUI import StoryContainerScreen import ChatMessageNotificationItem +import PhoneNumberFormat final class UnauthorizedApplicationContext { let sharedContext: SharedAccountContextImpl @@ -725,7 +726,7 @@ final class AuthorizedApplicationContext { }) let importableContacts = self.context.sharedContext.contactDataManager?.importable() ?? .single([:]) - self.context.account.importableContacts.set(self.context.account.postbox.preferencesView(keys: [PreferencesKeys.contactsSettings]) + let optionalImportableContacts = self.context.account.postbox.preferencesView(keys: [PreferencesKeys.contactsSettings]) |> mapToSignal { preferences -> Signal<[DeviceContactNormalizedPhoneNumber: ImportableDeviceContactData], NoError> in let settings: ContactsSettings = preferences.values[PreferencesKeys.contactsSettings]?.get(ContactsSettings.self) ?? .defaultSettings if settings.synchronizeContacts { @@ -733,6 +734,11 @@ final class AuthorizedApplicationContext { } else { return .single([:]) } + } + self.context.account.importableContacts.set(optionalImportableContacts) + self.context.sharedContext.deviceContactPhoneNumbers.set(optionalImportableContacts + |> map { contacts in + return Set(contacts.keys.map { cleanPhoneNumber($0.rawValue) }) }) let previousTheme = Atomic(value: nil) diff --git a/submodules/TelegramUI/Sources/ChatController.swift b/submodules/TelegramUI/Sources/ChatController.swift index 828c796674..4d0a16d6bc 100644 --- a/submodules/TelegramUI/Sources/ChatController.swift +++ b/submodules/TelegramUI/Sources/ChatController.swift @@ -120,6 +120,7 @@ import PeerInfoScreen import MediaEditorScreen import WallpaperGalleryScreen import WallpaperGridScreen +import VideoMessageCameraScreen public enum ChatControllerPeekActions { case standard @@ -347,10 +348,12 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G var audioRecorderDisposable: Disposable? var audioRecorderStatusDisposable: Disposable? - var videoRecorderValue: InstantVideoController? - var videoRecorder = Promise() + var videoRecorderValue: VideoMessageCameraScreen? + var videoRecorder = Promise() var videoRecorderDisposable: Disposable? + var recorderDataDisposable = MetaDisposable() + var buttonKeyboardMessageDisposable: Disposable? var cachedDataDisposable: Disposable? var chatUnreadCountDisposable: Disposable? @@ -6171,7 +6174,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G $0.updatedInputTextPanelState { panelState in if let videoRecorder = videoRecorder { if panelState.mediaRecordingState == nil { - return panelState.withUpdatedMediaRecordingState(.video(status: .recording(videoRecorder.audioStatus), isLocked: strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId)) + let recordingStatus = videoRecorder.recordingStatus + return panelState.withUpdatedMediaRecordingState(.video(status: .recording(InstantVideoControllerRecordingStatus(micLevel: recordingStatus.micLevel, duration: recordingStatus.duration)), isLocked: strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId)) } } else { return panelState.withUpdatedMediaRecordingState(nil) @@ -6201,13 +6205,13 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G strongSelf.present(videoRecorder, in: .window(.root)) if strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId { - videoRecorder.lockVideo() + videoRecorder.lockVideoRecording() } } strongSelf.updateDownButtonVisibility() if let previousVideoRecorderValue = previousVideoRecorderValue { - previousVideoRecorderValue.dismissVideo() + previousVideoRecorderValue.discardVideo() } } } @@ -6649,6 +6653,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G self.stickerSettingsDisposable?.dispose() self.searchQuerySuggestionState?.1.dispose() self.preloadSavedMessagesChatsDisposable?.dispose() + self.recorderDataDisposable.dispose() } deallocate() } @@ -12873,12 +12878,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G strongSelf.push(controller) if justInstalled { - let content: UndoOverlayContent -// if bot.flags.contains(.showInSettings) { - content = .succeed(text: strongSelf.presentationData.strings.WebApp_ShortcutsSettingsAdded(botPeer.compactDisplayTitle).string, timeout: 5.0, customUndoText: nil) -// } else { -// content = .succeed(text: strongSelf.presentationData.strings.WebApp_ShortcutsAdded(bot.shortName).string, timeout: 5.0) -// } + let content: UndoOverlayContent = .succeed(text: strongSelf.presentationData.strings.WebApp_ShortcutsSettingsAdded(botPeer.compactDisplayTitle).string, timeout: 5.0, customUndoText: nil) controller.present(UndoOverlayController(presentationData: strongSelf.presentationData, content: content, elevatedLayout: false, position: .top, action: { _ in return false }), in: .current) } }, error: { [weak self] error in @@ -15376,68 +15376,66 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G isScheduledMessages = true } - self.videoRecorder.set(.single(legacyInstantVideoController(theme: self.presentationData.theme, forStory: false, panelFrame: self.view.convert(currentInputPanelFrame, to: nil), context: self.context, peerId: peerId, slowmodeState: !isScheduledMessages ? self.presentationInterfaceState.slowmodeState : nil, hasSchedule: !isScheduledMessages && peerId.namespace != Namespaces.Peer.SecretChat, send: { [weak self] videoController, message in - if let strongSelf = self { - guard let message = message else { - strongSelf.videoRecorder.set(.single(nil)) + let _ = isScheduledMessages + + let controller = VideoMessageCameraScreen( + context: self.context, + updatedPresentationData: self.updatedPresentationData, + inputPanelFrame: currentInputPanelFrame, + allowLiveUpload: peerId.namespace != Namespaces.Peer.SecretChat, + completion: { [weak self] message in + guard let self, let videoController = self.videoRecorderValue else { return } - - let replyMessageSubject = strongSelf.presentationInterfaceState.interfaceState.replyMessageSubject - let correlationId = Int64.random(in: 0 ..< Int64.max) - let updatedMessage = message + guard var message else { + self.recorderFeedback?.error() + self.recorderFeedback = nil + self.videoRecorder.set(.single(nil)) + return + } + + let replyMessageSubject = self.presentationInterfaceState.interfaceState.replyMessageSubject + let correlationId = Int64.random(in: 0 ..< Int64.max) + message = message .withUpdatedReplyToMessageId(replyMessageSubject?.subjectModel) .withUpdatedCorrelationId(correlationId) -// .withUpdatedAttributes({ attributes in -// var attributes = attributes -//#if DEBUG -// attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil)) -//#endif -// return attributes -// }) - + var usedCorrelationId = false - - if strongSelf.chatDisplayNode.shouldAnimateMessageTransition, let extractedView = videoController.extractVideoSnapshot() { + + if self.chatDisplayNode.shouldAnimateMessageTransition, let extractedView = videoController.extractVideoSnapshot() { usedCorrelationId = true - strongSelf.chatDisplayNode.messageTransitionNode.add(correlationId: correlationId, source: .videoMessage(ChatMessageTransitionNodeImpl.Source.VideoMessage(view: extractedView)), initiated: { [weak videoController] in + self.chatDisplayNode.messageTransitionNode.add(correlationId: correlationId, source: .videoMessage(ChatMessageTransitionNodeImpl.Source.VideoMessage(view: extractedView)), initiated: { [weak videoController, weak self] in videoController?.hideVideoSnapshot() - guard let strongSelf = self else { + guard let self else { return } - strongSelf.videoRecorder.set(.single(nil)) + self.videoRecorder.set(.single(nil)) }) } else { - strongSelf.videoRecorder.set(.single(nil)) + self.videoRecorder.set(.single(nil)) } - - strongSelf.chatDisplayNode.setupSendActionOnViewUpdate({ - if let strongSelf = self { - strongSelf.chatDisplayNode.collapseInput() + + self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in + if let self { + self.chatDisplayNode.collapseInput() - strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: false, { - $0.updatedInterfaceState { $0.withUpdatedReplyMessageSubject(nil) } + self.updateChatPresentationInterfaceState(animated: true, interactive: false, { + $0.updatedRecordedMediaPreview(nil).updatedInterfaceState { $0.withUpdatedReplyMessageSubject(nil) } }) } }, usedCorrelationId ? correlationId : nil) - - strongSelf.sendMessages([updatedMessage]) + + self.sendMessages([message]) } - }, displaySlowmodeTooltip: { [weak self] view, rect in - self?.interfaceInteraction?.displaySlowmodeTooltip(view, rect) - }, presentSchedulePicker: { [weak self] done in - if let strongSelf = self { - strongSelf.presentScheduleTimePicker(completion: { [weak self] time in - if let strongSelf = self { - done(time) - if strongSelf.presentationInterfaceState.subject != .scheduledMessages && time != scheduleWhenOnlineTimestamp { - strongSelf.openScheduledMessages() - } - } - }) + ) + controller.onResume = { [weak self] in + guard let self else { + return } - }))) + self.resumeMediaRecorder() + } + self.videoRecorder.set(.single(controller)) } } } @@ -15463,6 +15461,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G switch updatedAction { case .dismiss: + self.recorderDataDisposable.set(nil) self.chatDisplayNode.updateRecordedMediaDeleted(true) self.audioRecorder.set(.single(nil)) case .preview, .pause: @@ -15474,8 +15473,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G return panelState.withUpdatedMediaRecordingState(.waitingForPreview) } }) - let _ = (audioRecorderValue.takenRecordedData() - |> deliverOnMainQueue).startStandalone(next: { [weak self] data in + self.recorderDataDisposable.set((audioRecorderValue.takenRecordedData() + |> deliverOnMainQueue).startStrict(next: { [weak self] data in if let strongSelf = self, let data = data { if data.duration < 0.5 { strongSelf.recorderFeedback?.error() @@ -15485,25 +15484,27 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G return panelState.withUpdatedMediaRecordingState(nil) } }) + strongSelf.recorderDataDisposable.set(nil) } else if let waveform = data.waveform { let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max), size: Int64(data.compressedData.count)) strongSelf.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data.compressedData) strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, { - $0.updatedRecordedMediaPreview(ChatRecordedMediaPreview(resource: resource, duration: Int32(data.duration), fileSize: Int32(data.compressedData.count), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5))).updatedInputTextPanelState { panelState in + $0.updatedRecordedMediaPreview(.audio(ChatRecordedMediaPreview.Audio(resource: resource, fileSize: Int32(data.compressedData.count), duration: Int32(data.duration), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5)))).updatedInputTextPanelState { panelState in return panelState.withUpdatedMediaRecordingState(nil) } }) strongSelf.recorderFeedback = nil strongSelf.updateDownButtonVisibility() + strongSelf.recorderDataDisposable.set(nil) } } - }) + })) case let .send(viewOnce): self.chatDisplayNode.updateRecordedMediaDeleted(false) - let _ = (audioRecorderValue.takenRecordedData() - |> deliverOnMainQueue).startStandalone(next: { [weak self] data in + self.recorderDataDisposable.set((audioRecorderValue.takenRecordedData() + |> deliverOnMainQueue).startStrict(next: { [weak self] data in if let strongSelf = self, let data = data { if data.duration < 0.5 { strongSelf.recorderFeedback?.error() @@ -15551,25 +15552,72 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G strongSelf.recorderFeedback?.tap() strongSelf.recorderFeedback = nil + strongSelf.recorderDataDisposable.set(nil) } } - }) + })) } } else if let videoRecorderValue = self.videoRecorderValue { if case .send = updatedAction { self.chatDisplayNode.updateRecordedMediaDeleted(false) - videoRecorderValue.completeVideo() + videoRecorderValue.sendVideoRecording() + self.recorderDataDisposable.set(nil) } else { if case .dismiss = updatedAction { self.chatDisplayNode.updateRecordedMediaDeleted(true) + self.recorderDataDisposable.set(nil) } - if case .preview = updatedAction, videoRecorderValue.stopVideo() { - self.updateChatPresentationInterfaceState(animated: true, interactive: true, { - $0.updatedInputTextPanelState { panelState in - return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false)) - } - }) - } else { + + switch updatedAction { + case .preview, .pause: + if videoRecorderValue.stopVideoRecording() { + self.recorderDataDisposable.set((videoRecorderValue.takenRecordedData() + |> deliverOnMainQueue).startStrict(next: { [weak self] data in + if let strongSelf = self, let data = data { + if data.duration < 0.5 { + strongSelf.recorderFeedback?.error() + strongSelf.recorderFeedback = nil + strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, { + $0.updatedInputTextPanelState { panelState in + return panelState.withUpdatedMediaRecordingState(nil) + } + }) + strongSelf.recorderDataDisposable.set(nil) + strongSelf.videoRecorder.set(.single(nil)) + } else { + strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, { + $0.updatedRecordedMediaPreview(.video( + ChatRecordedMediaPreview.Video( + duration: Int32(data.duration), + frames: data.frames, + framesUpdateTimestamp: data.framesUpdateTimestamp, + trimRange: data.trimRange, + control: ChatRecordedMediaPreview.Video.Control( + updateTrimRange: { [weak self] start, end, updatedEnd, apply in + if let self, let videoRecorderValue = self.videoRecorderValue { + videoRecorderValue.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply) + } + } + ) + ) + )).updatedInputTextPanelState { panelState in + return panelState.withUpdatedMediaRecordingState(nil) + } + }) + strongSelf.recorderFeedback = nil + strongSelf.updateDownButtonVisibility() + } + } + })) + +// self.updateChatPresentationInterfaceState(animated: true, interactive: true, { +// $0.updatedInputTextPanelState { panelState in +// return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false)) +// } +// }) + } + default: + self.recorderDataDisposable.set(nil) self.videoRecorder.set(.single(nil)) } } @@ -15584,13 +15632,9 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G audioRecorderValue.stop() self.audioRecorder.set(.single(nil)) } - } else if let videoRecorderValue = self.videoRecorderValue { - if videoRecorderValue.stopVideo() { - self.updateChatPresentationInterfaceState(animated: true, interactive: true, { - $0.updatedInputTextPanelState { panelState in - return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false)) - } - }) + } else if let _ = self.videoRecorderValue { + if let _ = self.presentationInterfaceState.inputTextPanelState.mediaRecordingState { + self.dismissMediaRecorder(pause ? .pause : .preview) } else { self.videoRecorder.set(.single(nil)) } @@ -15606,6 +15650,13 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorderValue, isLocked: true)) }.updatedRecordedMediaPreview(nil) }) + } else if let videoRecorderValue = self.videoRecorderValue { + self.updateChatPresentationInterfaceState(animated: true, interactive: true, { + $0.updatedInputTextPanelState { panelState in + let recordingStatus = videoRecorderValue.recordingStatus + return panelState.withUpdatedMediaRecordingState(.video(status: .recording(InstantVideoControllerRecordingStatus(micLevel: recordingStatus.micLevel, duration: recordingStatus.duration)), isLocked: true)) + }.updatedRecordedMediaPreview(nil) + }) } } @@ -15618,10 +15669,16 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G }) } - self.videoRecorderValue?.lockVideo() + self.videoRecorderValue?.lockVideoRecording() } func deleteMediaRecording() { + if let _ = self.audioRecorderValue { + self.audioRecorder.set(.single(nil)) + } else if let _ = self.videoRecorderValue { + self.videoRecorder.set(.single(nil)) + } + self.chatDisplayNode.updateRecordedMediaDeleted(true) self.updateChatPresentationInterfaceState(animated: true, interactive: true, { $0.updatedRecordedMediaPreview(nil) @@ -15632,7 +15689,12 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G func sendMediaRecording(silentPosting: Bool? = nil, scheduleTime: Int32? = nil, viewOnce: Bool = false) { self.chatDisplayNode.updateRecordedMediaDeleted(false) - if let recordedMediaPreview = self.presentationInterfaceState.recordedMediaPreview { + guard let recordedMediaPreview = self.presentationInterfaceState.recordedMediaPreview else { + return + } + + switch recordedMediaPreview { + case let .audio(audio): var isScheduledMessages = false if case .scheduledMessages = self.presentationInterfaceState.subject { isScheduledMessages = true @@ -15645,7 +15707,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G return } - let waveformBuffer = recordedMediaPreview.waveform.makeBitstream() + let waveformBuffer = audio.waveform.makeBitstream() self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in if let strongSelf = self { @@ -15664,7 +15726,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil)) } - let messages: [EnqueueMessage] = [.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: recordedMediaPreview.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(recordedMediaPreview.fileSize), attributes: [.Audio(isVoice: true, duration: Int(recordedMediaPreview.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])] + let messages: [EnqueueMessage] = [.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: audio.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(audio.fileSize), attributes: [.Audio(isVoice: true, duration: Int(audio.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])] let transformedMessages: [EnqueueMessage] if let silentPosting = silentPosting { @@ -15687,6 +15749,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G }) donateSendMessageIntent(account: self.context.account, sharedContext: self.context.sharedContext, intentContext: .chat, peerIds: [peerId]) + case .video: + self.videoRecorderValue?.sendVideoRecording() } } diff --git a/submodules/TelegramUI/Sources/ChatHistoryListNode.swift b/submodules/TelegramUI/Sources/ChatHistoryListNode.swift index ccd7802233..1162b06b54 100644 --- a/submodules/TelegramUI/Sources/ChatHistoryListNode.swift +++ b/submodules/TelegramUI/Sources/ChatHistoryListNode.swift @@ -331,7 +331,8 @@ private func extractAssociatedData( maxReadStoryId: Int32?, recommendedChannels: RecommendedChannels?, audioTranscriptionTrial: AudioTranscription.TrialState, - chatThemes: [TelegramTheme] + chatThemes: [TelegramTheme], + deviceContactsNumbers: Set ) -> ChatMessageItemAssociatedData { var automaticDownloadPeerId: EnginePeer.Id? var automaticMediaDownloadPeerType: MediaAutoDownloadPeerType = .channel @@ -386,7 +387,7 @@ private func extractAssociatedData( automaticDownloadPeerId = message.peerId } - return ChatMessageItemAssociatedData(automaticDownloadPeerType: automaticMediaDownloadPeerType, automaticDownloadPeerId: automaticDownloadPeerId, automaticDownloadNetworkType: automaticDownloadNetworkType, isRecentActions: false, subject: subject, contactsPeerIds: contactsPeerIds, channelDiscussionGroup: channelDiscussionGroup, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, currentlyPlayingMessageId: currentlyPlayingMessageId, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, accountPeer: accountPeer, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, topicAuthorId: topicAuthorId, hasBots: hasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels, audioTranscriptionTrial: audioTranscriptionTrial, chatThemes: chatThemes) + return ChatMessageItemAssociatedData(automaticDownloadPeerType: automaticMediaDownloadPeerType, automaticDownloadPeerId: automaticDownloadPeerId, automaticDownloadNetworkType: automaticDownloadNetworkType, isRecentActions: false, subject: subject, contactsPeerIds: contactsPeerIds, channelDiscussionGroup: channelDiscussionGroup, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, currentlyPlayingMessageId: currentlyPlayingMessageId, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, accountPeer: accountPeer, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, topicAuthorId: topicAuthorId, hasBots: hasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels, audioTranscriptionTrial: audioTranscriptionTrial, chatThemes: chatThemes, deviceContactsNumbers: deviceContactsNumbers) } private extension ChatHistoryLocationInput { @@ -1413,6 +1414,9 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto let chatThemes = self.context.engine.themes.getChatThemes(accountManager: self.context.sharedContext.accountManager) + let deviceContactsNumbers = self.context.sharedContext.deviceContactPhoneNumbers.get() + |> distinctUntilChanged + let messageViewQueue = Queue.mainQueue() let historyViewTransitionDisposable = combineLatest(queue: messageViewQueue, historyViewUpdate, @@ -1434,8 +1438,9 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto maxReadStoryId, recommendedChannels, audioTranscriptionTrial, - chatThemes - ).startStrict(next: { [weak self] update, chatPresentationData, selectedMessages, updatingMedia, networkType, animatedEmojiStickers, additionalAnimatedEmojiStickers, customChannelDiscussionReadState, customThreadOutgoingReadState, availableReactions, defaultReaction, accountPeer, suggestAudioTranscription, promises, topicAuthorId, translationState, maxReadStoryId, recommendedChannels, audioTranscriptionTrial, chatThemes in + chatThemes, + deviceContactsNumbers + ).startStrict(next: { [weak self] update, chatPresentationData, selectedMessages, updatingMedia, networkType, animatedEmojiStickers, additionalAnimatedEmojiStickers, customChannelDiscussionReadState, customThreadOutgoingReadState, availableReactions, defaultReaction, accountPeer, suggestAudioTranscription, promises, topicAuthorId, translationState, maxReadStoryId, recommendedChannels, audioTranscriptionTrial, chatThemes, deviceContactsNumbers in let (historyAppearsCleared, pendingUnpinnedAllMessages, pendingRemovedMessages, currentlyPlayingMessageIdAndType, scrollToMessageId, chatHasBots, allAdMessages) = promises func applyHole() { @@ -1594,7 +1599,7 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto translateToLanguage = languageCode } - let associatedData = extractAssociatedData(chatLocation: chatLocation, view: view, automaticDownloadNetworkType: networkType, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, subject: subject, currentlyPlayingMessageId: currentlyPlayingMessageIdAndType?.0, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, accountPeer: accountPeer, topicAuthorId: topicAuthorId, hasBots: chatHasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels, audioTranscriptionTrial: audioTranscriptionTrial, chatThemes: chatThemes) + let associatedData = extractAssociatedData(chatLocation: chatLocation, view: view, automaticDownloadNetworkType: networkType, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, subject: subject, currentlyPlayingMessageId: currentlyPlayingMessageIdAndType?.0, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, accountPeer: accountPeer, topicAuthorId: topicAuthorId, hasBots: chatHasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels, audioTranscriptionTrial: audioTranscriptionTrial, chatThemes: chatThemes, deviceContactsNumbers: deviceContactsNumbers) let filteredEntries = chatHistoryEntriesForView( location: chatLocation, @@ -4135,8 +4140,12 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto func voicePlaylistItemChanged(_ previousItem: SharedMediaPlaylistItem?, _ currentItem: SharedMediaPlaylistItem?) -> Void { if let currentItemId = currentItem?.id as? PeerMessagesMediaPlaylistItemId { - let isVideo = currentItem?.playbackData?.type == .instantVideo - self.currentlyPlayingMessageIdPromise.set(.single((currentItemId.messageIndex, isVideo))) + if let source = currentItem?.playbackData?.source, case let .telegramFile(_, _, isViewOnce) = source, isViewOnce { + self.currentlyPlayingMessageIdPromise.set(.single(nil)) + } else { + let isVideo = currentItem?.playbackData?.type == .instantVideo + self.currentlyPlayingMessageIdPromise.set(.single((currentItemId.messageIndex, isVideo))) + } } else { self.currentlyPlayingMessageIdPromise.set(.single(nil)) } diff --git a/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift b/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift index 8308bbe67d..c500599d19 100644 --- a/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift +++ b/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift @@ -1765,14 +1765,7 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState if message.id.peerId.namespace == Namespaces.Peer.CloudUser { if let stats, stats.peers.isEmpty { c.dismiss(completion: { - var replaceImpl: ((ViewController) -> Void)? - let controller = PremiumDemoScreen(context: context, subject: .emojiStatus, action: { - let controller = PremiumIntroScreen(context: context, source: .settings) - replaceImpl?(controller) - }) - replaceImpl = { [weak controller] c in - controller?.replace(with: c) - } + let controller = context.sharedContext.makePremiumPrivacyControllerController(context: context, subject: .readTime, peerId: peer.id) controllerInteraction.navigationController()?.pushViewController(controller) }) } diff --git a/submodules/TelegramUI/Sources/ChatMessageContextControllerContentSource.swift b/submodules/TelegramUI/Sources/ChatMessageContextControllerContentSource.swift index 61fa9d1913..1e338807f7 100644 --- a/submodules/TelegramUI/Sources/ChatMessageContextControllerContentSource.swift +++ b/submodules/TelegramUI/Sources/ChatMessageContextControllerContentSource.swift @@ -110,6 +110,8 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo let blurBackground: Bool = true let centerVertically: Bool = true + var initialAppearanceOffset: CGPoint = .zero + private let context: AccountContext private let presentationData: PresentationData private weak var chatNode: ChatControllerNode? @@ -169,6 +171,9 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension()) + let isIncoming = self.message.effectivelyIncoming(self.context.account.peerId) + let isVideo = (self.message.media.first(where: { $0 is TelegramMediaFile }) as? TelegramMediaFile)?.isInstantVideo ?? false + var result: ContextControllerTakeViewInfo? var sourceNode: ContextExtractedContentContainingNode? var sourceRect: CGRect = .zero @@ -181,19 +186,19 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo } if item.content.contains(where: { $0.0.stableId == self.message.stableId }), let contentNode = itemNode.getMessageContextSourceNode(stableId: self.message.stableId) { sourceNode = contentNode - sourceRect = itemNode.frame + sourceRect = contentNode.view.convert(contentNode.bounds, to: chatNode.view) + if !isVideo { + sourceRect.origin.y -= 2.0 + UIScreenPixel + } } } - let isIncoming = self.message.effectivelyIncoming(self.context.account.peerId) - let isVideo = (self.message.media.first(where: { $0 is TelegramMediaFile }) as? TelegramMediaFile)?.isInstantVideo ?? false - var tooltipSourceRect: CGRect = .zero if let sourceNode { var bubbleWidth: CGFloat = 0.0 - if (isIncoming || "".isEmpty) && !isVideo { + if (isIncoming || "".isEmpty) { let messageItem = self.context.sharedContext.makeChatMessagePreviewItem( context: self.context, messages: [self.message], @@ -211,10 +216,12 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo availableReactions: nil, accountPeer: nil, isCentered: false, - isPreview: false + isPreview: false, + isStandalone: true ) - let params = ListViewItemLayoutParams(width: chatNode.historyNode.frame.width, leftInset: validLayout.safeInsets.left, rightInset: validLayout.safeInsets.right, availableHeight: chatNode.historyNode.frame.height, isStandalone: false) + let width = chatNode.historyNode.frame.width + let params = ListViewItemLayoutParams(width: width, leftInset: validLayout.safeInsets.left, rightInset: validLayout.safeInsets.right, availableHeight: chatNode.historyNode.frame.height, isStandalone: false) var node: ListViewItemNode? messageItem.nodeConfiguredForParams(async: { $0() }, params: params, synchronousLoads: false, previousItem: nil, nextItem: nil, completion: { messageNode, apply in @@ -223,11 +230,19 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo }) if let messageNode = node as? ChatMessageItemView, let copyContentNode = messageNode.getMessageContextSourceNode(stableId: self.message.stableId) { - messageNode.frame.origin.y = chatNode.frame.height - sourceRect.origin.y - sourceRect.size.height + self.initialAppearanceOffset = CGPoint(x: 0.0, y: width - 20.0 - copyContentNode.frame.height) + + messageNode.frame.origin.y = sourceRect.origin.y chatNode.addSubnode(messageNode) result = ContextControllerTakeViewInfo(containingItem: .node(copyContentNode), contentAreaInScreenSpace: chatNode.convert(chatNode.frameForVisibleArea(), to: nil)) bubbleWidth = copyContentNode.contentNode.subnodes?.first?.frame.width ?? messageNode.frame.width + + if isVideo { + messageItem.updateNode(async: { $0() }, node: { return messageNode }, params: params, previousItem: nil, nextItem: nil, animation: .System(duration: 0.4, transition: ControlledTransition(duration: 0.4, curve: .spring, interactive: false)), completion: { (layout, apply) in + apply(ListViewItemApply(isOnScreen: true)) + }) + } } self.messageNodeCopy = node as? ChatMessageItemView diff --git a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift index c06841f2a1..e81e3ba61c 100644 --- a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift +++ b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift @@ -17,6 +17,8 @@ import AudioWaveformNode import ChatInputPanelNode import TooltipUI import TelegramNotices +import ComponentFlow +import MediaScrubberComponent extension AudioWaveformNode: CustomMediaPlayerScrubbingForegroundNode { } @@ -67,13 +69,15 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { private let waveformButton: ASButtonNode let waveformBackgroundNode: ASImageNode - private var viewOnce = false + let scrubber = ComponentView() + + var viewOnce = false let viewOnceButton: ChatRecordingViewOnceButtonNode let recordMoreButton: ChatRecordingViewOnceButtonNode private let waveformNode: AudioWaveformNode private let waveformForegroundNode: AudioWaveformNode - let waveformScubberNode: MediaPlayerScrubbingNode + let waveformScrubberNode: MediaPlayerScrubbingNode private var presentationInterfaceState: ChatPresentationInterfaceState? @@ -129,7 +133,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { self.waveformForegroundNode = AudioWaveformNode() self.waveformForegroundNode.isLayerBacked = true - self.waveformScubberNode = MediaPlayerScrubbingNode(content: .custom(backgroundNode: self.waveformNode, foregroundContentNode: self.waveformForegroundNode)) + self.waveformScrubberNode = MediaPlayerScrubbingNode(content: .custom(backgroundNode: self.waveformNode, foregroundContentNode: self.waveformForegroundNode)) self.durationLabel = MediaPlayerTimeTextNode(textColor: theme.chat.inputPanel.actionControlForegroundColor) self.durationLabel.alignment = .right @@ -156,7 +160,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { self.deleteButton.addSubnode(self.binNode) self.addSubnode(self.waveformBackgroundNode) self.addSubnode(self.sendButton) - self.addSubnode(self.waveformScubberNode) + self.addSubnode(self.waveformScrubberNode) self.addSubnode(self.playButton) self.addSubnode(self.durationLabel) self.addSubnode(self.waveformButton) @@ -202,6 +206,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { viewForOverlayContent.addSubnode(self.viewOnceButton) viewForOverlayContent.addSubnode(self.recordMoreButton) } + + self.view.disablesInteractiveTransitionGestureRecognizer = true } private func maybePresentViewOnceTooltip() { @@ -242,35 +248,98 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { } self.presentationInterfaceState = interfaceState - - if let recordedMediaPreview = interfaceState.recordedMediaPreview, updateWaveform { - self.waveformNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor.withAlphaComponent(0.5), gravity: .center, waveform: recordedMediaPreview.waveform) - self.waveformForegroundNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor, gravity: .center, waveform: recordedMediaPreview.waveform) - - if self.mediaPlayer != nil { - self.mediaPlayer?.pause() - } - if let context = self.context { - let mediaManager = context.sharedContext.mediaManager - let mediaPlayer = MediaPlayer(audioSessionManager: mediaManager.audioSession, postbox: context.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: .standalone(resource: recordedMediaPreview.resource), streamable: .none, video: false, preferSoftwareDecoding: false, enableSound: true, fetchAutomatically: true) - mediaPlayer.actionAtEnd = .action { [weak mediaPlayer] in - mediaPlayer?.seek(timestamp: 0.0) + + if let recordedMediaPreview = interfaceState.recordedMediaPreview, let context = self.context { + switch recordedMediaPreview { + case let .audio(audio): + self.waveformButton.isHidden = false + self.waveformBackgroundNode.isHidden = false + self.waveformForegroundNode.isHidden = false + self.waveformScrubberNode.isHidden = false + self.playButton.isHidden = false + self.durationLabel.isHidden = false + + if let view = self.scrubber.view, view.superview != nil { + view.removeFromSuperview() } - self.mediaPlayer = mediaPlayer - self.durationLabel.defaultDuration = Double(recordedMediaPreview.duration) - self.durationLabel.status = mediaPlayer.status - self.waveformScubberNode.status = mediaPlayer.status - self.statusDisposable.set((mediaPlayer.status + + if updateWaveform { + self.waveformNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor.withAlphaComponent(0.5), gravity: .center, waveform: audio.waveform) + self.waveformForegroundNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor, gravity: .center, waveform: audio.waveform) + if self.mediaPlayer != nil { + self.mediaPlayer?.pause() + } + let mediaManager = context.sharedContext.mediaManager + let mediaPlayer = MediaPlayer(audioSessionManager: mediaManager.audioSession, postbox: context.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: .standalone(resource: audio.resource), streamable: .none, video: false, preferSoftwareDecoding: false, enableSound: true, fetchAutomatically: true) + mediaPlayer.actionAtEnd = .action { [weak mediaPlayer] in + mediaPlayer?.seek(timestamp: 0.0) + } + self.mediaPlayer = mediaPlayer + self.durationLabel.defaultDuration = Double(audio.duration) + self.durationLabel.status = mediaPlayer.status + self.waveformScrubberNode.status = mediaPlayer.status + self.statusDisposable.set((mediaPlayer.status |> deliverOnMainQueue).startStrict(next: { [weak self] status in - if let strongSelf = self { - switch status.status { + if let strongSelf = self { + switch status.status { case .playing, .buffering(_, true, _, _): strongSelf.playPauseIconNode.enqueueState(.pause, animated: true) default: strongSelf.playPauseIconNode.enqueueState(.play, animated: true) + } } + })) + } + case let .video(video): + self.waveformButton.isHidden = true + self.waveformBackgroundNode.isHidden = true + self.waveformForegroundNode.isHidden = true + self.waveformScrubberNode.isHidden = true + self.playButton.isHidden = true + self.durationLabel.isHidden = true + + let scrubberSize = self.scrubber.update( + transition: .immediate, + component: AnyComponent( + MediaScrubberComponent( + context: context, + style: .videoMessage, + theme: interfaceState.theme, + generationTimestamp: 0, + position: 0, + minDuration: 1.0, + maxDuration: 60.0, + isPlaying: false, + tracks: [ + MediaScrubberComponent.Track( + id: 0, + content: .video(frames: video.frames, framesUpdateTimestamp: video.framesUpdateTimestamp), + duration: Double(video.duration), + trimRange: video.trimRange, + offset: nil, + isMain: true + ) + ], + positionUpdated: { _, _ in }, + trackTrimUpdated: { _, start, end, updatedEnd, apply in + video.control.updateTrimRange(start, end, updatedEnd, apply) + }, + trackOffsetUpdated: { _, _, _ in }, + trackLongPressed: { _, _ in } + ) + ), + environment: {}, + forceUpdate: false, + containerSize: CGSize(width: width - leftInset - rightInset - 45.0 * 2.0, height: 33.0) + ) + + if let view = self.scrubber.view { + if view.superview == nil { + self.view.addSubview(view) } - })) + + view.frame = CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: scrubberSize) + } } } } @@ -285,12 +354,17 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { transition.updateFrame(node: self.sendButton, frame: CGRect(origin: CGPoint(x: width - rightInset - 43.0 - UIScreenPixel, y: 2 - UIScreenPixel), size: CGSize(width: 44.0, height: 44))) self.binNode.frame = self.deleteButton.bounds + var viewOnceOffset: CGFloat = 0.0 + if interfaceState.interfaceState.replyMessageSubject != nil { + viewOnceOffset = -35.0 + } + let viewOnceSize = self.viewOnceButton.update(theme: interfaceState.theme) - let viewOnceButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -64.0 - 53.0), size: viewOnceSize) + let viewOnceButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -64.0 - 53.0 + viewOnceOffset), size: viewOnceSize) transition.updateFrame(node: self.viewOnceButton, frame: viewOnceButtonFrame) let recordMoreSize = self.recordMoreButton.update(theme: interfaceState.theme) - let recordMoreButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -64.0), size: recordMoreSize) + let recordMoreButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -64.0 + viewOnceOffset), size: recordMoreSize) transition.updateFrame(node: self.recordMoreButton, frame: recordMoreButtonFrame) var isScheduledMessages = false @@ -327,7 +401,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { let waveformBackgroundFrame = CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: CGSize(width: width - leftInset - rightInset - 90.0, height: 33.0)) transition.updateFrame(node: self.waveformBackgroundNode, frame: waveformBackgroundFrame) transition.updateFrame(node: self.waveformButton, frame: CGRect(origin: CGPoint(x: leftInset + 45.0, y: 0.0), size: CGSize(width: width - leftInset - rightInset - 90.0, height: panelHeight))) - transition.updateFrame(node: self.waveformScubberNode, frame: CGRect(origin: CGPoint(x: leftInset + 45.0 + 35.0, y: 7.0 + floor((33.0 - 13.0) / 2.0)), size: CGSize(width: width - leftInset - rightInset - 90.0 - 45.0 - 40.0, height: 13.0))) + transition.updateFrame(node: self.waveformScrubberNode, frame: CGRect(origin: CGPoint(x: leftInset + 45.0 + 35.0, y: 7.0 + floor((33.0 - 13.0) / 2.0)), size: CGSize(width: width - leftInset - rightInset - 90.0 - 45.0 - 40.0, height: 13.0))) transition.updateFrame(node: self.durationLabel, frame: CGRect(origin: CGPoint(x: width - rightInset - 90.0 - 4.0, y: 15.0), size: CGSize(width: 35.0, height: 20.0))) prevInputPanelNode?.frame = CGRect(origin: .zero, size: CGSize(width: width, height: panelHeight)) @@ -369,6 +443,11 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { prevTextInputPanelNode.actionButtons.micButton.animateOut(true) + if let view = self.scrubber.view { + view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15) + view.layer.animatePosition(from: CGPoint(x: 0.0, y: 64.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true) + } + self.deleteButton.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15) self.deleteButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15) @@ -377,8 +456,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { self.durationLabel.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, delay: 0.1) - self.waveformScubberNode.layer.animateScaleY(from: 0.1, to: 1.0, duration: 0.3, delay: 0.1) - self.waveformScubberNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: 0.1) + self.waveformScrubberNode.layer.animateScaleY(from: 0.1, to: 1.0, duration: 0.3, delay: 0.1) + self.waveformScrubberNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: 0.1) self.waveformBackgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15) self.waveformBackgroundNode.layer.animateFrame( @@ -405,6 +484,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { } @objc func deletePressed() { + self.viewOnce = false self.tooltipController?.dismiss() self.mediaPlayer?.pause() @@ -412,6 +492,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { } @objc func sendPressed() { + self.viewOnce = false self.tooltipController?.dismiss() self.interfaceInteraction?.sendRecordedMedia(false, self.viewOnce) diff --git a/submodules/TelegramUI/Sources/ChatTextInputActionButtonsNode.swift b/submodules/TelegramUI/Sources/ChatTextInputActionButtonsNode.swift index 466642e10b..e748ee429d 100644 --- a/submodules/TelegramUI/Sources/ChatTextInputActionButtonsNode.swift +++ b/submodules/TelegramUI/Sources/ChatTextInputActionButtonsNode.swift @@ -47,7 +47,7 @@ final class ChatTextInputActionButtonsNode: ASDisplayNode { let strings = presentationInterfaceState.strings self.strings = strings - self.micButton = ChatTextInputMediaRecordingButton(context: context, theme: theme, strings: strings, presentController: presentController) + self.micButton = ChatTextInputMediaRecordingButton(context: context, theme: theme, pause: true, strings: strings, presentController: presentController) self.sendContainerNode = ASDisplayNode() self.sendContainerNode.layer.allowsGroupOpacity = true diff --git a/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift b/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift index 40c20b121e..2cd30571ff 100644 --- a/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift +++ b/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift @@ -591,6 +591,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch private var extendedSearchLayout = false var isMediaDeleted: Bool = false + private var recordingPaused = false private let inputMenu: TextInputMenu @@ -2054,13 +2055,14 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch self.actionButtons.micButton.audioRecorder = recorder audioRecordingTimeNode.audioRecorder = recorder case let .video(status, _): + let hadVideoRecorder = self.actionButtons.micButton.videoRecordingStatus != nil + if !hadVideoRecorder, isLocked { + self.actionButtons.micButton.lock() + } switch status { case let .recording(recordingStatus): audioRecordingTimeNode.videoRecordingStatus = recordingStatus self.actionButtons.micButton.videoRecordingStatus = recordingStatus - if isLocked { - audioRecordingCancelIndicator.layer.animateAlpha(from: audioRecordingCancelIndicator.alpha, to: 0, duration: 0.15, delay: 0, removeOnCompletion: false) - } case .editing: audioRecordingTimeNode.videoRecordingStatus = nil self.actionButtons.micButton.videoRecordingStatus = nil @@ -2093,7 +2095,6 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch size: audioRecordingCancelIndicator.bounds.size) audioRecordingCancelIndicator.frame = audioRecordingCancelIndicatorFrame if self.actionButtons.micButton.cancelTranslation > cancelTransformThreshold { - //let progress = 1 - (self.actionButtons.micButton.cancelTranslation - cancelTransformThreshold) / 80 let progress: CGFloat = max(0.0, min(1.0, (audioRecordingCancelIndicatorFrame.minX - 100.0) / 10.0)) audioRecordingCancelIndicator.alpha = progress } else { @@ -2144,6 +2145,8 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch audioRecordingTimeNode.layer.animateAlpha(from: 0, to: 1, duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring) } + let dotFrame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: audioRecordingTimeNode.frame.midY - 20), size: CGSize(width: 40.0, height: 40)) + var animateDotAppearing = false let audioRecordingDotNode: AnimationNode if let currentAudioRecordingDotNode = self.audioRecordingDotNode, !currentAudioRecordingDotNode.didPlay { @@ -2151,35 +2154,64 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch } else { self.audioRecordingDotNode?.removeFromSupernode() audioRecordingDotNode = AnimationNode(animation: "BinRed") + self.audioRecordingDotNode = audioRecordingDotNode self.audioRecordingDotNodeDismissed = false self.clippingNode.insertSubnode(audioRecordingDotNode, belowSubnode: self.menuButton) + audioRecordingDotNode.frame = dotFrame + self.animatingBinNode?.removeFromSupernode() self.animatingBinNode = nil } + var resumingRecording = false animateDotAppearing = transition.isAnimated && !hideInfo - if let mediaRecordingState = mediaRecordingState, case .waitingForPreview = mediaRecordingState { - animateDotAppearing = false + if let mediaRecordingState = mediaRecordingState { + if case .waitingForPreview = mediaRecordingState { + self.recordingPaused = true + animateDotAppearing = false + } else { + if self.recordingPaused { + self.recordingPaused = false + resumingRecording = true + + if (audioRecordingDotNode.layer.animationKeys() ?? []).isEmpty { + animateDotAppearing = true + } + } + } } + + audioRecordingDotNode.bounds = CGRect(origin: .zero, size: dotFrame.size) + audioRecordingDotNode.position = dotFrame.center - audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: audioRecordingTimeNode.frame.midY - 20), size: CGSize(width: 40.0, height: 40)) if animateDotAppearing { - audioRecordingDotNode.layer.animateScale(from: 0.3, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false) - audioRecordingTimeNode.started = { [weak audioRecordingDotNode] in - if let audioRecordingDotNode = audioRecordingDotNode, audioRecordingDotNode.layer.animation(forKey: "recording") == nil { - audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 0), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in - if finished { - let animation = CAKeyframeAnimation(keyPath: "opacity") - animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber] - animation.keyTimes = [0.0 as NSNumber, 0.4546 as NSNumber, 0.9091 as NSNumber, 1 as NSNumber] - animation.duration = 0.5 - animation.autoreverses = true - animation.repeatCount = Float.infinity - - audioRecordingDotNode?.layer.add(animation, forKey: "recording") - } - }) + Queue.mainQueue().justDispatch { + audioRecordingDotNode.layer.animateScale(from: 0.3, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false) + + let animateDot = { [weak audioRecordingDotNode] in + if let audioRecordingDotNode, audioRecordingDotNode.layer.animation(forKey: "recording") == nil { + audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 0), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in + if finished { + let animation = CAKeyframeAnimation(keyPath: "opacity") + animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber] + animation.keyTimes = [0.0 as NSNumber, 0.4546 as NSNumber, 0.9091 as NSNumber, 1 as NSNumber] + animation.duration = 0.5 + animation.autoreverses = true + animation.repeatCount = Float.infinity + + audioRecordingDotNode?.layer.add(animation, forKey: "recording") + } + }) + } + } + + if resumingRecording { + animateDot() + } else { + audioRecordingTimeNode.started = { + animateDot() + } } } self.attachmentButton.layer.animateAlpha(from: CGFloat(self.attachmentButton.layer.presentation()?.opacity ?? 1), to: 0, duration: 0.15, delay: 0, removeOnCompletion: false) @@ -2546,72 +2578,111 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch self.updateActionButtons(hasText: inputHasText, hideMicButton: hideMicButton, animated: transition.isAnimated) + var viewOnceIsVisible = false + if let recordingState = interfaceState.inputTextPanelState.mediaRecordingState { + if case let .audio(_, isLocked) = recordingState { + viewOnceIsVisible = isLocked + } else if case let .video(_, isLocked) = recordingState { + viewOnceIsVisible = isLocked + } + } + if let prevInputPanelNode = self.prevInputPanelNode { prevInputPanelNode.frame = CGRect(origin: .zero, size: prevInputPanelNode.frame.size) } if let prevPreviewInputPanelNode = self.prevInputPanelNode as? ChatRecordingPreviewInputPanelNode { self.prevInputPanelNode = nil - if prevPreviewInputPanelNode.viewOnceButton.alpha > 0.0 { - if let snapshotView = prevPreviewInputPanelNode.viewOnceButton.view.snapshotContentTree() { - snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in - snapshotView.removeFromSuperview() - }) - snapshotView.layer.animateScale(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false) - self.viewForOverlayContent?.addSubview(snapshotView) + if !prevPreviewInputPanelNode.viewOnceButton.isHidden { + self.viewOnce = prevPreviewInputPanelNode.viewOnce + self.viewOnceButton.update(isSelected: prevPreviewInputPanelNode.viewOnce, animated: false) + self.viewOnceButton.layer.animatePosition(from: prevPreviewInputPanelNode.viewOnceButton.position, to: self.viewOnceButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in + }) + } + + let animateOutPreviewButton: (ASDisplayNode) -> Void = { button in + if button.alpha > 0.0 { + if let snapshotView = button.view.snapshotContentTree() { + snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + snapshotView.removeFromSuperview() + }) + snapshotView.layer.animateScale(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false) + self.viewForOverlayContent?.addSubview(snapshotView) + } } } + animateOutPreviewButton(prevPreviewInputPanelNode.viewOnceButton) + animateOutPreviewButton(prevPreviewInputPanelNode.recordMoreButton) + prevPreviewInputPanelNode.gestureRecognizer?.isEnabled = false prevPreviewInputPanelNode.isUserInteractionEnabled = false if self.isMediaDeleted { - func animatePosition(for previewSubnode: ASDisplayNode) { - previewSubnode.layer.animatePosition( - from: previewSubnode.position, - to: CGPoint(x: leftMenuInset.isZero ? previewSubnode.position.x - 20 : leftMenuInset + previewSubnode.frame.width / 2.0, y: previewSubnode.position.y), + func animatePosition(for previewLayer: CALayer) { + previewLayer.animatePosition( + from: previewLayer.position, + to: CGPoint(x: leftMenuInset.isZero ? previewLayer.position.x - 20 : leftMenuInset + previewLayer.frame.width / 2.0, y: previewLayer.position.y), duration: 0.15 ) } - animatePosition(for: prevPreviewInputPanelNode.waveformBackgroundNode) - animatePosition(for: prevPreviewInputPanelNode.waveformScubberNode) - animatePosition(for: prevPreviewInputPanelNode.durationLabel) - animatePosition(for: prevPreviewInputPanelNode.playButton) + animatePosition(for: prevPreviewInputPanelNode.waveformBackgroundNode.layer) + animatePosition(for: prevPreviewInputPanelNode.waveformScrubberNode.layer) + animatePosition(for: prevPreviewInputPanelNode.durationLabel.layer) + animatePosition(for: prevPreviewInputPanelNode.playButton.layer) + if let view = prevPreviewInputPanelNode.scrubber.view { + animatePosition(for: view.layer) + } } - func animateAlpha(for previewSubnode: ASDisplayNode) { - previewSubnode.layer.animateAlpha( + func animateAlpha(for previewLayer: CALayer) { + previewLayer.animateAlpha( from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false ) } - animateAlpha(for: prevPreviewInputPanelNode.waveformBackgroundNode) - animateAlpha(for: prevPreviewInputPanelNode.waveformScubberNode) - animateAlpha(for: prevPreviewInputPanelNode.durationLabel) - animateAlpha(for: prevPreviewInputPanelNode.playButton) - + animateAlpha(for: prevPreviewInputPanelNode.waveformBackgroundNode.layer) + animateAlpha(for: prevPreviewInputPanelNode.waveformScrubberNode.layer) + animateAlpha(for: prevPreviewInputPanelNode.durationLabel.layer) + animateAlpha(for: prevPreviewInputPanelNode.playButton.layer) + if let view = prevPreviewInputPanelNode.scrubber.view { + animateAlpha(for: view.layer) + } + let binNode = prevPreviewInputPanelNode.binNode self.animatingBinNode = binNode let dismissBin = { [weak self, weak prevPreviewInputPanelNode, weak binNode] in if binNode?.supernode != nil { - prevPreviewInputPanelNode?.deleteButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, delay: 0, removeOnCompletion: false) { [weak prevPreviewInputPanelNode] _ in + prevPreviewInputPanelNode?.deleteButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, delay: 0.0, removeOnCompletion: false) { [weak prevPreviewInputPanelNode] _ in if prevPreviewInputPanelNode?.supernode === self { prevPreviewInputPanelNode?.removeFromSupernode() } } - prevPreviewInputPanelNode?.deleteButton.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, delay: 0, removeOnCompletion: false) + prevPreviewInputPanelNode?.deleteButton.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, delay: 0.0, removeOnCompletion: false) - self?.attachmentButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false) - self?.attachmentButton.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false) + if isRecording { + self?.attachmentButton.layer.animateAlpha(from: 0.0, to: 0, duration: 0.01, delay: 0.0, removeOnCompletion: false) + self?.attachmentButton.layer.animateScale(from: 1, to: 0.3, duration: 0.01, delay: 0.0, removeOnCompletion: false) + } else { + self?.attachmentButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, delay: 0.0, removeOnCompletion: false) + self?.attachmentButton.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, delay: 0.0, removeOnCompletion: false) + } } else if prevPreviewInputPanelNode?.supernode === self { prevPreviewInputPanelNode?.removeFromSupernode() } } if self.isMediaDeleted { + Queue.mainQueue().after(0.5, { + self.isMediaDeleted = false + }) + } + + if self.isMediaDeleted && !isRecording { + self.attachmentButton.layer.animateAlpha(from: 0.0, to: 0, duration: 0.01, delay: 0.0, removeOnCompletion: false) binNode.completion = dismissBin binNode.play() } else { @@ -2638,16 +2709,18 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch } } + var clippingDelta: CGFloat = 0.0 + if case let .media(_, _, focused) = interfaceState.inputMode, focused { + clippingDelta = -panelHeight + } + transition.updateFrame(node: self.clippingNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: width, height: panelHeight))) + transition.updateSublayerTransformOffset(layer: self.clippingNode.layer, offset: CGPoint(x: 0.0, y: clippingDelta)) let viewOnceSize = self.viewOnceButton.update(theme: interfaceState.theme) let viewOnceButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -152.0), size: viewOnceSize) self.viewOnceButton.bounds = CGRect(origin: .zero, size: viewOnceButtonFrame.size) transition.updatePosition(node: self.viewOnceButton, position: viewOnceButtonFrame.center) - var viewOnceIsVisible = false - if let recordingState = interfaceState.inputTextPanelState.mediaRecordingState, case let .audio(_, isLocked) = recordingState, isLocked { - viewOnceIsVisible = true - } if self.viewOnceButton.alpha.isZero && viewOnceIsVisible { self.viewOnceButton.update(isSelected: self.viewOnce, animated: false) } @@ -2659,13 +2732,6 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch self.viewOnceButton.isHidden = true } - var clippingDelta: CGFloat = 0.0 - if case let .media(_, _, focused) = interfaceState.inputMode, focused { - clippingDelta = -panelHeight - } - transition.updateFrame(node: self.clippingNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: width, height: panelHeight))) - transition.updateSublayerTransformOffset(layer: self.clippingNode.layer, offset: CGPoint(x: 0.0, y: clippingDelta)) - return panelHeight } diff --git a/submodules/TelegramUI/Sources/SharedAccountContext.swift b/submodules/TelegramUI/Sources/SharedAccountContext.swift index 170af3c44b..d114ac531c 100644 --- a/submodules/TelegramUI/Sources/SharedAccountContext.swift +++ b/submodules/TelegramUI/Sources/SharedAccountContext.swift @@ -166,6 +166,7 @@ public final class SharedAccountContextImpl: SharedAccountContext { public let enablePreloads = Promise() public let hasPreloadBlockingContent = Promise(false) + public let deviceContactPhoneNumbers = Promise>(Set()) private var accountUserInterfaceInUseContexts: [AccountRecordId: AccountUserInterfaceInUseContext] = [:] @@ -1675,7 +1676,7 @@ public final class SharedAccountContextImpl: SharedAccountContext { return presentAddMembersImpl(context: context, updatedPresentationData: updatedPresentationData, parentController: parentController, groupPeer: groupPeer, selectAddMemberDisposable: selectAddMemberDisposable, addMemberDisposable: addMemberDisposable) } - public func makeChatMessagePreviewItem(context: AccountContext, messages: [Message], theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder, forcedResourceStatus: FileMediaResourceStatus?, tapMessage: ((Message) -> Void)?, clickThroughMessage: (() -> Void)? = nil, backgroundNode: ASDisplayNode?, availableReactions: AvailableReactions?, accountPeer: Peer?, isCentered: Bool, isPreview: Bool) -> ListViewItem { + public func makeChatMessagePreviewItem(context: AccountContext, messages: [Message], theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder, forcedResourceStatus: FileMediaResourceStatus?, tapMessage: ((Message) -> Void)?, clickThroughMessage: (() -> Void)? = nil, backgroundNode: ASDisplayNode?, availableReactions: AvailableReactions?, accountPeer: Peer?, isCentered: Bool, isPreview: Bool, isStandalone: Bool) -> ListViewItem { let controllerInteraction: ChatControllerInteraction controllerInteraction = ChatControllerInteraction(openMessage: { _, _ in @@ -1770,7 +1771,7 @@ public final class SharedAccountContextImpl: SharedAccountContext { chatLocation = .peer(id: messages.first!.id.peerId) } - return ChatMessageItemImpl(presentationData: ChatPresentationData(theme: ChatPresentationThemeData(theme: theme, wallpaper: wallpaper), fontSize: fontSize, strings: strings, dateTimeFormat: dateTimeFormat, nameDisplayOrder: nameOrder, disableAnimations: false, largeEmoji: false, chatBubbleCorners: chatBubbleCorners, animatedEmojiScale: 1.0, isPreview: isPreview), context: context, chatLocation: chatLocation, associatedData: ChatMessageItemAssociatedData(automaticDownloadPeerType: .contact, automaticDownloadPeerId: nil, automaticDownloadNetworkType: .cellular, isRecentActions: false, subject: nil, contactsPeerIds: Set(), animatedEmojiStickers: [:], forcedResourceStatus: forcedResourceStatus, availableReactions: availableReactions, defaultReaction: nil, isPremium: false, accountPeer: accountPeer.flatMap(EnginePeer.init), forceInlineReactions: true), controllerInteraction: controllerInteraction, content: content, disableDate: true, additionalContent: nil) + return ChatMessageItemImpl(presentationData: ChatPresentationData(theme: ChatPresentationThemeData(theme: theme, wallpaper: wallpaper), fontSize: fontSize, strings: strings, dateTimeFormat: dateTimeFormat, nameDisplayOrder: nameOrder, disableAnimations: false, largeEmoji: false, chatBubbleCorners: chatBubbleCorners, animatedEmojiScale: 1.0, isPreview: isPreview), context: context, chatLocation: chatLocation, associatedData: ChatMessageItemAssociatedData(automaticDownloadPeerType: .contact, automaticDownloadPeerId: nil, automaticDownloadNetworkType: .cellular, isRecentActions: false, subject: nil, contactsPeerIds: Set(), animatedEmojiStickers: [:], forcedResourceStatus: forcedResourceStatus, availableReactions: availableReactions, defaultReaction: nil, isPremium: false, accountPeer: accountPeer.flatMap(EnginePeer.init), forceInlineReactions: true, isStandalone: isStandalone), controllerInteraction: controllerInteraction, content: content, disableDate: true, additionalContent: nil) } public func makeChatMessageDateHeaderItem(context: AccountContext, timestamp: Int32, theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder) -> ListViewItemHeader { @@ -1947,6 +1948,8 @@ public final class SharedAccountContextImpl: SharedAccountContext { mappedSource = .wallpapers case .presence: mappedSource = .presence + case .readTime: + mappedSource = .readTime } let controller = PremiumIntroScreen(context: context, modal: modal, source: mappedSource, forceDark: forceDark) controller.wasDismissed = dismissed @@ -2094,6 +2097,101 @@ public final class SharedAccountContextImpl: SharedAccountContext { return controller } + public func makePremiumPrivacyControllerController(context: AccountContext, subject: PremiumPrivacySubject, peerId: EnginePeer.Id) -> ViewController { + let mappedSubject: PremiumPrivacyScreen.Subject + let introSource: PremiumIntroSource + + switch subject { + case .presence: + mappedSubject = .presence + introSource = .presence + case .readTime: + mappedSubject = .readTime + introSource = .presence + } + + var actionImpl: (() -> Void)? + var openPremiumIntroImpl: (() -> Void)? + + let controller = PremiumPrivacyScreen( + context: context, + subject: mappedSubject, + action: { + actionImpl?() + }, openPremiumIntro: { + openPremiumIntroImpl?() + } + ) + actionImpl = { [weak controller] in + guard let parentController = controller, let navigationController = parentController.navigationController as? NavigationController else { + return + } + + let currentPrivacy = Promise() + currentPrivacy.set(context.engine.privacy.requestAccountPrivacySettings()) + + let presentationData = context.sharedContext.currentPresentationData.with { $0 } + let tooltipText: String + + switch subject { + case .presence: + //TODO:localize + tooltipText = "Your last seen time is now visible." + + let _ = (currentPrivacy.get() + |> take(1) + |> mapToSignal { current in + let presence = current.presence + var disabledFor: [PeerId: SelectivePrivacyPeer] = [:] + switch presence { + case let .enableEveryone(disabledForValue), let .enableContacts(_, disabledForValue): + disabledFor = disabledForValue + default: + break + } + disabledFor.removeValue(forKey: peerId) + + return context.engine.privacy.updateSelectiveAccountPrivacySettings(type: .presence, settings: .enableEveryone(disableFor: disabledFor)) + } + |> deliverOnMainQueue).startStandalone(completed: { [weak navigationController] in + let _ = context.engine.peers.fetchAndUpdateCachedPeerData(peerId: peerId).startStandalone() + + if let parentController = navigationController?.viewControllers.last as? ViewController { + parentController.present(UndoOverlayController(presentationData: presentationData, content: .info(title: nil, text: tooltipText, timeout: 4.0, customUndoText: nil), elevatedLayout: false, action: { _ in + return true + }), in: .window(.root)) + } + }) + case .readTime: + tooltipText = "Your read times are now visible." + + let _ = (currentPrivacy.get() + |> take(1) + |> mapToSignal { current in + var settings = current.globalSettings + settings.hideReadTime = false + return context.engine.privacy.updateGlobalPrivacySettings(settings: settings) + } + |> deliverOnMainQueue).startStandalone(completed: { [weak navigationController] in + if let parentController = navigationController?.viewControllers.last as? ViewController { + parentController.present(UndoOverlayController(presentationData: presentationData, content: .info(title: nil, text: tooltipText, timeout: 4.0, customUndoText: nil), elevatedLayout: false, action: { _ in + return true + }), in: .window(.root)) + } + }) + } + } + openPremiumIntroImpl = { [weak controller] in + guard let parentController = controller else { + return + } + let controller = context.sharedContext.makePremiumIntroController(context: context, source: introSource, forceDark: false, dismissed: nil) + parentController.push(controller) + } + + return controller + } + public func makeStickerPackScreen(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)?, mainStickerPack: StickerPackReference, stickerPacks: [StickerPackReference], loadedStickerPacks: [LoadedStickerPack], parentNavigationController: NavigationController?, sendSticker: ((FileMediaReference, UIView, CGRect) -> Bool)?) -> ViewController { return StickerPackScreen(context: context, updatedPresentationData: updatedPresentationData, mainStickerPack: mainStickerPack, stickerPacks: stickerPacks, loadedStickerPacks: loadedStickerPacks, parentNavigationController: parentNavigationController, sendSticker: sendSticker) }