diff --git a/Telegram/NotificationService/Sources/NotificationService.swift b/Telegram/NotificationService/Sources/NotificationService.swift index 51d6de208c..3ffcd997c6 100644 --- a/Telegram/NotificationService/Sources/NotificationService.swift +++ b/Telegram/NotificationService/Sources/NotificationService.swift @@ -1707,7 +1707,7 @@ private final class NotificationServiceHandler { } else if let file = media as? TelegramMediaFile { resource = file.resource for attribute in file.attributes { - if case let .Video(_, _, _, preloadSize) = attribute { + if case let .Video(_, _, _, preloadSize, _) = attribute { fetchSize = preloadSize.flatMap(Int64.init) } } diff --git a/Telegram/Telegram-iOS/en.lproj/Localizable.strings b/Telegram/Telegram-iOS/en.lproj/Localizable.strings index 757d16d92f..214cb097ca 100644 --- a/Telegram/Telegram-iOS/en.lproj/Localizable.strings +++ b/Telegram/Telegram-iOS/en.lproj/Localizable.strings @@ -12537,3 +12537,48 @@ Sorry for the inconvenience."; "Conversation.StatusBotSubscribers_any" = "%d users"; "Story.Editor.Add" = "Add"; + +"WebBrowser.LinkForwardTooltip.Chat.One" = "Link forwarded to **%@**"; +"WebBrowser.LinkForwardTooltip.TwoChats.One" = "Link forwarded to **%@** and **%@**"; +"WebBrowser.LinkForwardTooltip.ManyChats.One" = "Link forwarded to **%@** and %@ others"; +"WebBrowser.LinkForwardTooltip.SavedMessages.One" = "Link forwarded to **Saved Messages**"; + +"Stars.Intro.StarsSent_1" = "%@ Star sent."; +"Stars.Intro.StarsSent_any" = "%@ Stars sent."; +"Stars.Intro.StarsSent.ViewChat" = "View Chat"; + +"Stars.Gift.Received.Title" = "Received Gift"; +"Stars.Gift.Received.Text" = "Use Stars to unlock content and services on Telegram. [See Examples >]()"; + +"Stars.Gift.Sent.Title" = "Sent Gift"; +"Stars.Gift.Sent.Text" = "With Stars, %@ will be able to unlock content and services on Telegram. [See Examples >]()"; + +"WebBrowser.Reload" = "Reload"; +"WebBrowser.Share" = "Share"; +"WebBrowser.AddBookmark" = "Add Bookmark"; + +"WebBrowser.LinkAddedToBookmarks" = "Link added to [Bookmarks]() and **Saved Messages**."; + +"WebBrowser.AddressBar.RecentlyVisited" = "RECENTLY VISITED"; +"WebBrowser.AddressBar.RecentlyVisited.Clear" = "Clear"; + +"WebBrowser.AddressBar.Bookmarks" = "BOOKMARKS"; + +"WebBrowser.OpenLinksIn.Title" = "OPEN LINKS IN"; +"WebBrowser.AutoLogin" = "Auto-Login via Telegram"; +"WebBrowser.AutoLogin.Info" = "Use your Telegram account to automatically log in to websites opened in the in-app browser."; + +"WebBrowser.ClearCookies" = "Clear Cookies"; +"WebBrowser.ClearCookies.Info" = "Delete all cookies in the Telegram in-app browser. This action will sign you out of most websites."; +"WebBrowser.ClearCookies.Succeed" = "Cookies cleared."; + +"WebBrowser.Exceptions.Title" = "NEVER OPEN IN THE IN-APP BROWSER"; +"WebBrowser.Exceptions.AddException" = "Add Website"; +"WebBrowser.Exceptions.Clear" = "Clear List"; +"WebBrowser.Exceptions.Info" = "These websites will be always opened in your default browser."; + +"WebBrowser.Exceptions.Create.Title" = "Add Website"; +"WebBrowser.Exceptions.Create.Text" = "Enter a domain that you don't want to be opened in the in-app browser."; +"WebBrowser.Exceptions.Create.Placeholder" = "Enter URL"; + +"WebBrowser.Done" = "Done"; diff --git a/submodules/AccountContext/Sources/AccountContext.swift b/submodules/AccountContext/Sources/AccountContext.swift index d6b12ebca9..3de3ef7802 100644 --- a/submodules/AccountContext/Sources/AccountContext.swift +++ b/submodules/AccountContext/Sources/AccountContext.swift @@ -614,102 +614,6 @@ public enum ContactListActionItemIcon : Equatable { } } -public struct ContactListAdditionalOption: Equatable { - public let title: String - public let icon: ContactListActionItemIcon - public let action: () -> Void - public let clearHighlightAutomatically: Bool - - public init(title: String, icon: ContactListActionItemIcon, action: @escaping () -> Void, clearHighlightAutomatically: Bool = false) { - self.title = title - self.icon = icon - self.action = action - self.clearHighlightAutomatically = clearHighlightAutomatically - } - - public static func ==(lhs: ContactListAdditionalOption, rhs: ContactListAdditionalOption) -> Bool { - return lhs.title == rhs.title && lhs.icon == rhs.icon - } -} - -public enum ContactListPeerId: Hashable { - case peer(PeerId) - case deviceContact(DeviceContactStableId) -} - -public enum ContactListAction: Equatable { - case generic - case voiceCall - case videoCall - case more -} - -public enum ContactListPeer: Equatable { - case peer(peer: Peer, isGlobal: Bool, participantCount: Int32?) - case deviceContact(DeviceContactStableId, DeviceContactBasicData) - - public var id: ContactListPeerId { - switch self { - case let .peer(peer, _, _): - return .peer(peer.id) - case let .deviceContact(id, _): - return .deviceContact(id) - } - } - - public var indexName: PeerIndexNameRepresentation { - switch self { - case let .peer(peer, _, _): - return peer.indexName - case let .deviceContact(_, contact): - return .personName(first: contact.firstName, last: contact.lastName, addressNames: [], phoneNumber: "") - } - } - - public static func ==(lhs: ContactListPeer, rhs: ContactListPeer) -> Bool { - switch lhs { - case let .peer(lhsPeer, lhsIsGlobal, lhsParticipantCount): - if case let .peer(rhsPeer, rhsIsGlobal, rhsParticipantCount) = rhs, lhsPeer.isEqual(rhsPeer), lhsIsGlobal == rhsIsGlobal, lhsParticipantCount == rhsParticipantCount { - return true - } else { - return false - } - case let .deviceContact(id, contact): - if case .deviceContact(id, contact) = rhs { - return true - } else { - return false - } - } - } -} - -public final class ContactSelectionControllerParams { - public let context: AccountContext - public let updatedPresentationData: (initial: PresentationData, signal: Signal)? - public let autoDismiss: Bool - public let title: (PresentationStrings) -> String - public let options: [ContactListAdditionalOption] - public let displayDeviceContacts: Bool - public let displayCallIcons: Bool - public let multipleSelection: Bool - public let requirePhoneNumbers: Bool - public let confirmation: (ContactListPeer) -> Signal - - public init(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)? = nil, autoDismiss: Bool = true, title: @escaping (PresentationStrings) -> String, options: [ContactListAdditionalOption] = [], displayDeviceContacts: Bool = false, displayCallIcons: Bool = false, multipleSelection: Bool = false, requirePhoneNumbers: Bool = false, confirmation: @escaping (ContactListPeer) -> Signal = { _ in .single(true) }) { - self.context = context - self.updatedPresentationData = updatedPresentationData - self.autoDismiss = autoDismiss - self.title = title - self.options = options - self.displayDeviceContacts = displayDeviceContacts - self.displayCallIcons = displayCallIcons - self.multipleSelection = multipleSelection - self.requirePhoneNumbers = requirePhoneNumbers - self.confirmation = confirmation - } -} - public enum ChatListSearchFilter: Equatable { case chats case topics diff --git a/submodules/AccountContext/Sources/ContactMultiselectionController.swift b/submodules/AccountContext/Sources/ContactMultiselectionController.swift index 1bd063097a..88192e710e 100644 --- a/submodules/AccountContext/Sources/ContactMultiselectionController.swift +++ b/submodules/AccountContext/Sources/ContactMultiselectionController.swift @@ -85,6 +85,7 @@ public enum ContactMultiselectionControllerMode { public enum ContactListFilter { case excludeWithoutPhoneNumbers case excludeSelf + case excludeBots case exclude([EnginePeer.Id]) case disable([EnginePeer.Id]) } diff --git a/submodules/AccountContext/Sources/ContactSelectionController.swift b/submodules/AccountContext/Sources/ContactSelectionController.swift index 19d4c5c60a..c16d295605 100644 --- a/submodules/AccountContext/Sources/ContactSelectionController.swift +++ b/submodules/AccountContext/Sources/ContactSelectionController.swift @@ -1,6 +1,9 @@ import Foundation import Display import SwiftSignalKit +import Postbox +import TelegramCore +import TelegramPresentationData public protocol ContactSelectionController: ViewController { var result: Signal<([ContactListPeer], ContactListAction, Bool, Int32?, NSAttributedString?, ChatSendMessageActionSheetController.SendParameters?)?, NoError> { get } @@ -10,3 +13,106 @@ public protocol ContactSelectionController: ViewController { func dismissSearch() } + +public enum ContactSelectionControllerMode { + case generic + case starsGifting(birthdays: [EnginePeer.Id: TelegramBirthday]?, hasActions: Bool) +} + +public struct ContactListAdditionalOption: Equatable { + public let title: String + public let icon: ContactListActionItemIcon + public let action: () -> Void + public let clearHighlightAutomatically: Bool + + public init(title: String, icon: ContactListActionItemIcon, action: @escaping () -> Void, clearHighlightAutomatically: Bool = false) { + self.title = title + self.icon = icon + self.action = action + self.clearHighlightAutomatically = clearHighlightAutomatically + } + + public static func ==(lhs: ContactListAdditionalOption, rhs: ContactListAdditionalOption) -> Bool { + return lhs.title == rhs.title && lhs.icon == rhs.icon + } +} + +public enum ContactListPeerId: Hashable { + case peer(PeerId) + case deviceContact(DeviceContactStableId) +} + +public enum ContactListAction: Equatable { + case generic + case voiceCall + case videoCall + case more +} + +public enum ContactListPeer: Equatable { + case peer(peer: Peer, isGlobal: Bool, participantCount: Int32?) + case deviceContact(DeviceContactStableId, DeviceContactBasicData) + + public var id: ContactListPeerId { + switch self { + case let .peer(peer, _, _): + return .peer(peer.id) + case let .deviceContact(id, _): + return .deviceContact(id) + } + } + + public var indexName: PeerIndexNameRepresentation { + switch self { + case let .peer(peer, _, _): + return peer.indexName + case let .deviceContact(_, contact): + return .personName(first: contact.firstName, last: contact.lastName, addressNames: [], phoneNumber: "") + } + } + + public static func ==(lhs: ContactListPeer, rhs: ContactListPeer) -> Bool { + switch lhs { + case let .peer(lhsPeer, lhsIsGlobal, lhsParticipantCount): + if case let .peer(rhsPeer, rhsIsGlobal, rhsParticipantCount) = rhs, lhsPeer.isEqual(rhsPeer), lhsIsGlobal == rhsIsGlobal, lhsParticipantCount == rhsParticipantCount { + return true + } else { + return false + } + case let .deviceContact(id, contact): + if case .deviceContact(id, contact) = rhs { + return true + } else { + return false + } + } + } +} + +public final class ContactSelectionControllerParams { + public let context: AccountContext + public let updatedPresentationData: (initial: PresentationData, signal: Signal)? + public let mode: ContactSelectionControllerMode + public let autoDismiss: Bool + public let title: (PresentationStrings) -> String + public let options: Signal<[ContactListAdditionalOption], NoError> + public let displayDeviceContacts: Bool + public let displayCallIcons: Bool + public let multipleSelection: Bool + public let requirePhoneNumbers: Bool + public let confirmation: (ContactListPeer) -> Signal + + public init(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)? = nil, mode: ContactSelectionControllerMode = .generic, autoDismiss: Bool = true, title: @escaping (PresentationStrings) -> String, options: Signal<[ContactListAdditionalOption], NoError> = .single([]), displayDeviceContacts: Bool = false, displayCallIcons: Bool = false, multipleSelection: Bool = false, requirePhoneNumbers: Bool = false, confirmation: @escaping (ContactListPeer) -> Signal = { _ in .single(true) }) { + self.context = context + self.updatedPresentationData = updatedPresentationData + self.mode = mode + self.autoDismiss = autoDismiss + self.title = title + self.options = options + self.displayDeviceContacts = displayDeviceContacts + self.displayCallIcons = displayCallIcons + self.multipleSelection = multipleSelection + self.requirePhoneNumbers = requirePhoneNumbers + self.confirmation = confirmation + } +} diff --git a/submodules/AccountContext/Sources/IsMediaStreamable.swift b/submodules/AccountContext/Sources/IsMediaStreamable.swift index a562f0a32c..650911e79a 100644 --- a/submodules/AccountContext/Sources/IsMediaStreamable.swift +++ b/submodules/AccountContext/Sources/IsMediaStreamable.swift @@ -18,7 +18,7 @@ public func isMediaStreamable(message: Message, media: TelegramMediaFile) -> Boo return false } for attribute in media.attributes { - if case let .Video(_, _, flags, _) = attribute { + if case let .Video(_, _, flags, _, _) = attribute { if flags.contains(.supportsStreaming) { return true } @@ -41,7 +41,7 @@ public func isMediaStreamable(media: TelegramMediaFile) -> Bool { return false } for attribute in media.attributes { - if case let .Video(_, _, flags, _) = attribute { + if case let .Video(_, _, flags, _, _) = attribute { if flags.contains(.supportsStreaming) { return true } diff --git a/submodules/AttachmentUI/Sources/AttachmentController.swift b/submodules/AttachmentUI/Sources/AttachmentController.swift index 978d2f936d..68179ddca0 100644 --- a/submodules/AttachmentUI/Sources/AttachmentController.swift +++ b/submodules/AttachmentUI/Sources/AttachmentController.swift @@ -1183,6 +1183,8 @@ public class AttachmentController: ViewController, MinimizableController { self.blocksBackgroundWhenInOverlay = true self.acceptsFocusWhenInOverlay = true + self.navigationItem.backBarButtonItem = UIBarButtonItem(title: self.context.sharedContext.currentPresentationData.with { $0 }.strings.Common_Back, style: .plain, target: nil, action: nil) + self.scrollToTop = { [weak self] in if let strongSelf = self { strongSelf.node.scrollToTop() diff --git a/submodules/AvatarVideoNode/Sources/AvatarVideoNode.swift b/submodules/AvatarVideoNode/Sources/AvatarVideoNode.swift index 634438cafa..15fc596bea 100644 --- a/submodules/AvatarVideoNode/Sources/AvatarVideoNode.swift +++ b/submodules/AvatarVideoNode/Sources/AvatarVideoNode.swift @@ -206,7 +206,7 @@ public final class AvatarVideoNode: ASDisplayNode { self.backgroundNode.image = nil let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value() - let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil)])) + let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil, coverTime: nil)])) let videoContent = NativeVideoContent(id: .profileVideo(videoId, nil), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false, storeAfterDownload: nil) if videoContent.id != self.videoContent?.id { self.videoNode?.removeFromSupernode() diff --git a/submodules/BrowserUI/Sources/BrowserAddressBarComponent.swift b/submodules/BrowserUI/Sources/BrowserAddressBarComponent.swift new file mode 100644 index 0000000000..51618066fd --- /dev/null +++ b/submodules/BrowserUI/Sources/BrowserAddressBarComponent.swift @@ -0,0 +1,362 @@ +import Foundation +import UIKit +import AsyncDisplayKit +import Display +import ComponentFlow +import TelegramPresentationData +import AccountContext +import BundleIconComponent + +final class AddressBarContentComponent: Component { + let theme: PresentationTheme + let strings: PresentationStrings + let url: String + let performAction: ActionSlot + + init( + theme: PresentationTheme, + strings: PresentationStrings, + url: String, + performAction: ActionSlot + ) { + self.theme = theme + self.strings = strings + self.url = url + self.performAction = performAction + } + + static func ==(lhs: AddressBarContentComponent, rhs: AddressBarContentComponent) -> Bool { + if lhs.theme !== rhs.theme { + return false + } + if lhs.strings !== rhs.strings { + return false + } + if lhs.url != rhs.url { + return false + } + return true + } + + final class View: UIView, UITextFieldDelegate { + private final class TextField: UITextField { + override func textRect(forBounds bounds: CGRect) -> CGRect { + return bounds.integral + } + } + + private struct Params: Equatable { + var theme: PresentationTheme + var strings: PresentationStrings + var size: CGSize + + static func ==(lhs: Params, rhs: Params) -> Bool { + if lhs.theme !== rhs.theme { + return false + } + if lhs.strings !== rhs.strings { + return false + } + if lhs.size != rhs.size { + return false + } + return true + } + } + + private let activated: (Bool) -> Void = { _ in } + private let deactivated: (Bool) -> Void = { _ in } + private let updateQuery: (String?) -> Void = { _ in } + + private let backgroundLayer: SimpleLayer + + private let iconView: UIImageView + + private let clearIconView: UIImageView + private let clearIconButton: HighlightTrackingButton + + private let cancelButtonTitle: ComponentView + private let cancelButton: HighlightTrackingButton + + private var placeholderContent = ComponentView() + + private var textFrame: CGRect? + private var textField: TextField? + + private var tapRecognizer: UITapGestureRecognizer? + + private var params: Params? + private var component: AddressBarContentComponent? + + public var wantsDisplayBelowKeyboard: Bool { + return self.textField != nil + } + + init() { + self.backgroundLayer = SimpleLayer() + + self.iconView = UIImageView() + + self.clearIconView = UIImageView() + self.clearIconButton = HighlightableButton() + self.clearIconView.isHidden = true + self.clearIconButton.isHidden = true + + self.cancelButtonTitle = ComponentView() + self.cancelButton = HighlightTrackingButton() + + super.init(frame: CGRect()) + + self.layer.addSublayer(self.backgroundLayer) + + self.addSubview(self.iconView) + self.addSubview(self.clearIconView) + self.addSubview(self.clearIconButton) + + self.addSubview(self.cancelButton) + self.clipsToBounds = true + + let tapRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))) + self.tapRecognizer = tapRecognizer + self.addGestureRecognizer(tapRecognizer) + + self.cancelButton.highligthedChanged = { [weak self] highlighted in + if let strongSelf = self { + if highlighted { + if let cancelButtonTitleView = strongSelf.cancelButtonTitle.view { + cancelButtonTitleView.layer.removeAnimation(forKey: "opacity") + cancelButtonTitleView.alpha = 0.4 + } + } else { + if let cancelButtonTitleView = strongSelf.cancelButtonTitle.view { + cancelButtonTitleView.alpha = 1.0 + cancelButtonTitleView.layer.animateAlpha(from: 0.4, to: 1.0, duration: 0.2) + } + } + } + } + self.cancelButton.addTarget(self, action: #selector(self.cancelPressed), for: .touchUpInside) + + self.clearIconButton.highligthedChanged = { [weak self] highlighted in + if let strongSelf = self { + if highlighted { + strongSelf.clearIconView.layer.removeAnimation(forKey: "opacity") + strongSelf.clearIconView.alpha = 0.4 + } else { + strongSelf.clearIconView.alpha = 1.0 + strongSelf.clearIconView.layer.animateAlpha(from: 0.4, to: 1.0, duration: 0.2) + } + } + } + self.clearIconButton.addTarget(self, action: #selector(self.clearPressed), for: .touchUpInside) + } + + required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + @objc private func tapGesture(_ recognizer: UITapGestureRecognizer) { + if case .ended = recognizer.state { + self.activateTextInput() + } + } + + private func activateTextInput() { + if self.textField == nil, let textFrame = self.textFrame { + let backgroundFrame = self.backgroundLayer.frame + let textFieldFrame = CGRect(origin: CGPoint(x: textFrame.minX, y: backgroundFrame.minY), size: CGSize(width: backgroundFrame.maxX - textFrame.minX, height: backgroundFrame.height)) + + let textField = TextField(frame: textFieldFrame) + textField.autocorrectionType = .no + textField.returnKeyType = .search + self.textField = textField + self.insertSubview(textField, belowSubview: self.clearIconView) + textField.delegate = self + textField.addTarget(self, action: #selector(self.textFieldChanged(_:)), for: .editingChanged) + } + + guard !(self.textField?.isFirstResponder ?? false) else { + return + } + + self.activated(true) + + self.textField?.becomeFirstResponder() + } + + @objc private func cancelPressed() { + self.updateQuery(nil) + + self.clearIconView.isHidden = true + self.clearIconButton.isHidden = true + + let textField = self.textField + self.textField = nil + + self.deactivated(textField?.isFirstResponder ?? false) + + self.component?.performAction.invoke(.updateSearchActive(false)) + + if let textField { + textField.resignFirstResponder() + textField.removeFromSuperview() + } + } + + @objc private func clearPressed() { + self.updateQuery(nil) + self.textField?.text = "" + + self.clearIconView.isHidden = true + self.clearIconButton.isHidden = true + } + + func deactivate() { + if let text = self.textField?.text, !text.isEmpty { + self.textField?.endEditing(true) + } else { + self.cancelPressed() + } + } + + public func textFieldDidBeginEditing(_ textField: UITextField) { + } + + public func textFieldDidEndEditing(_ textField: UITextField) { + } + + public func textFieldShouldReturn(_ textField: UITextField) -> Bool { + textField.endEditing(true) + return false + } + + @objc private func textFieldChanged(_ textField: UITextField) { + let text = textField.text ?? "" + + self.clearIconView.isHidden = text.isEmpty + self.clearIconButton.isHidden = text.isEmpty + self.placeholderContent.view?.isHidden = !text.isEmpty + + self.updateQuery(text) + + self.component?.performAction.invoke(.updateSearchQuery(text)) + + if let params = self.params { + self.update(theme: params.theme, strings: params.strings, size: params.size, transition: .immediate) + } + } + + func update(component: AddressBarContentComponent, availableSize: CGSize, transition: ComponentTransition) -> CGSize { + self.component = component + + self.update(theme: component.theme, strings: component.strings, size: availableSize, transition: transition) + + return availableSize + } + + public func update(theme: PresentationTheme, strings: PresentationStrings, size: CGSize, transition: ComponentTransition) { + let params = Params( + theme: theme, + strings: strings, + size: size + ) + + if self.params == params { + return + } + + let isActiveWithText = true + + if self.params?.theme !== theme { + self.iconView.image = generateTintedImage(image: UIImage(bundleImageName: "Components/Search Bar/Loupe"), color: .white)?.withRenderingMode(.alwaysTemplate) + self.iconView.tintColor = theme.rootController.navigationSearchBar.inputIconColor + self.clearIconView.image = generateTintedImage(image: UIImage(bundleImageName: "Components/Search Bar/Clear"), color: .white)?.withRenderingMode(.alwaysTemplate) + self.clearIconView.tintColor = theme.rootController.navigationSearchBar.inputClearButtonColor + } + + self.params = params + + let sideInset: CGFloat = 10.0 + let inputHeight: CGFloat = 36.0 + let topInset: CGFloat = (size.height - inputHeight) / 2.0 + + let sideTextInset: CGFloat = sideInset + 4.0 + 17.0 + + self.backgroundLayer.backgroundColor = theme.rootController.navigationSearchBar.inputFillColor.cgColor + self.backgroundLayer.cornerRadius = 10.5 + + let cancelTextSize = self.cancelButtonTitle.update( + transition: .immediate, + component: AnyComponent(Text( + text: strings.Common_Cancel, + font: Font.regular(17.0), + color: theme.rootController.navigationBar.accentTextColor + )), + environment: {}, + containerSize: CGSize(width: size.width - 32.0, height: 100.0) + ) + + let cancelButtonSpacing: CGFloat = 8.0 + + var backgroundFrame = CGRect(origin: CGPoint(x: sideInset, y: topInset), size: CGSize(width: size.width - sideInset * 2.0, height: inputHeight)) + if isActiveWithText { + backgroundFrame.size.width -= cancelTextSize.width + cancelButtonSpacing + } + transition.setFrame(layer: self.backgroundLayer, frame: backgroundFrame) + + transition.setFrame(view: self.cancelButton, frame: CGRect(origin: CGPoint(x: backgroundFrame.maxX, y: 0.0), size: CGSize(width: cancelButtonSpacing + cancelTextSize.width, height: size.height))) + + let textX: CGFloat = backgroundFrame.minX + sideTextInset + let textFrame = CGRect(origin: CGPoint(x: textX, y: backgroundFrame.minY), size: CGSize(width: backgroundFrame.maxX - textX, height: backgroundFrame.height)) + self.textFrame = textFrame + + if let image = self.iconView.image { + let iconFrame = CGRect(origin: CGPoint(x: backgroundFrame.minX + 5.0, y: backgroundFrame.minY + floor((backgroundFrame.height - image.size.height) / 2.0)), size: image.size) + transition.setFrame(view: self.iconView, frame: iconFrame) + } + + let placeholderSize = self.placeholderContent.update( + transition: transition, + component: AnyComponent( + Text(text: strings.Common_Search, font: Font.regular(17.0), color: theme.rootController.navigationSearchBar.inputPlaceholderTextColor) + ), + environment: {}, + containerSize: size + ) + if let placeholderContentView = self.placeholderContent.view { + if placeholderContentView.superview == nil { + self.addSubview(placeholderContentView) + } + let placeholderContentFrame = CGRect(origin: CGPoint(x: textFrame.minX, y: backgroundFrame.midY - placeholderSize.height / 2.0), size: placeholderSize) + transition.setFrame(view: placeholderContentView, frame: placeholderContentFrame) + } + + if let image = self.clearIconView.image { + let iconFrame = CGRect(origin: CGPoint(x: backgroundFrame.maxX - image.size.width - 4.0, y: backgroundFrame.minY + floor((backgroundFrame.height - image.size.height) / 2.0)), size: image.size) + transition.setFrame(view: self.clearIconView, frame: iconFrame) + transition.setFrame(view: self.clearIconButton, frame: iconFrame.insetBy(dx: -8.0, dy: -10.0)) + } + + if let cancelButtonTitleComponentView = self.cancelButtonTitle.view { + if cancelButtonTitleComponentView.superview == nil { + self.addSubview(cancelButtonTitleComponentView) + cancelButtonTitleComponentView.isUserInteractionEnabled = false + } + transition.setFrame(view: cancelButtonTitleComponentView, frame: CGRect(origin: CGPoint(x: backgroundFrame.maxX + cancelButtonSpacing, y: floor((size.height - cancelTextSize.height) / 2.0)), size: cancelTextSize)) + } + + if let textField = self.textField { + textField.textColor = theme.rootController.navigationSearchBar.inputTextColor + transition.setFrame(view: textField, frame: CGRect(origin: CGPoint(x: backgroundFrame.minX + sideTextInset, y: backgroundFrame.minY - UIScreenPixel), size: CGSize(width: backgroundFrame.width - sideTextInset - 32.0, height: backgroundFrame.height))) + } + } + } + + func makeView() -> View { + return View() + } + + func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + return view.update(component: self, availableSize: availableSize, transition: transition) + } +} diff --git a/submodules/BrowserUI/Sources/BrowserScreen.swift b/submodules/BrowserUI/Sources/BrowserScreen.swift index 78637c1ba6..f555d1a821 100644 --- a/submodules/BrowserUI/Sources/BrowserScreen.swift +++ b/submodules/BrowserUI/Sources/BrowserScreen.swift @@ -109,7 +109,7 @@ private final class BrowserScreenComponent: CombinedComponent { component: AnyComponent( Button( content: AnyComponent( - MultilineTextComponent(text: .plain(NSAttributedString(string: environment.strings.Common_Close, font: Font.regular(17.0), textColor: environment.theme.rootController.navigationBar.primaryTextColor, paragraphAlignment: .center)), horizontalAlignment: .left, maximumNumberOfLines: 1) + MultilineTextComponent(text: .plain(NSAttributedString(string: environment.strings.WebBrowser_Done, font: Font.regular(17.0), textColor: environment.theme.rootController.navigationBar.accentTextColor, paragraphAlignment: .center)), horizontalAlignment: .left, maximumNumberOfLines: 1) ), action: { performAction.invoke(.close) @@ -119,7 +119,6 @@ private final class BrowserScreenComponent: CombinedComponent { ) ] - let isLoading = (context.component.contentState?.estimatedProgress ?? 1.0) < 1.0 navigationRightItems = [ AnyComponentWithIdentity( id: "settings", @@ -130,7 +129,7 @@ private final class BrowserScreenComponent: CombinedComponent { content: LottieComponent.AppBundleContent( name: "anim_moredots" ), - color: environment.theme.rootController.navigationBar.primaryTextColor, + color: environment.theme.rootController.navigationBar.accentTextColor, size: CGSize(width: 30.0, height: 30.0) ) ), @@ -142,27 +141,6 @@ private final class BrowserScreenComponent: CombinedComponent { ) ) ] - if case .webPage = context.component.contentState?.contentType { - navigationRightItems.insert( - AnyComponentWithIdentity( - id: isLoading ? "stop" : "reload", - component: AnyComponent( - ReferenceButtonComponent( - content: AnyComponent( - BundleIconComponent( - name: isLoading ? "Instant View/CloseIcon" : "Chat/Context Menu/Reload", - tintColor: environment.theme.rootController.navigationBar.primaryTextColor - ) - ), - action: { - performAction.invoke(isLoading ? .stop : .reload) - } - ) - ) - ), - at: 0 - ) - } } let collapseFraction = context.component.presentationState.isSearching ? 0.0 : context.component.panelCollapseFraction @@ -211,6 +189,7 @@ private final class BrowserScreenComponent: CombinedComponent { id: "navigation", component: AnyComponent( NavigationToolbarContentComponent( + accentColor: environment.theme.rootController.navigationBar.accentTextColor, textColor: environment.theme.rootController.navigationBar.primaryTextColor, canGoBack: context.component.contentState?.canGoBack ?? false, canGoForward: context.component.contentState?.canGoForward ?? false, @@ -281,6 +260,8 @@ public class BrowserScreen: ViewController, MinimizableController { case increaseFontSize case resetFontSize case updateFontIsSerif(Bool) + case addBookmark + case openBookmarks } fileprivate final class Node: ViewControllerTracingNode { @@ -340,6 +321,62 @@ public class BrowserScreen: ViewController, MinimizableController { case .share: let presentationData = self.presentationData let shareController = ShareController(context: self.context, subject: .url(url)) + shareController.completed = { [weak self] peerIds in + guard let strongSelf = self else { + return + } + let _ = (strongSelf.context.engine.data.get( + EngineDataList( + peerIds.map(TelegramEngine.EngineData.Item.Peer.Peer.init) + ) + ) + |> deliverOnMainQueue).startStandalone(next: { [weak self] peerList in + guard let strongSelf = self else { + return + } + + let peers = peerList.compactMap { $0 } + let presentationData = strongSelf.context.sharedContext.currentPresentationData.with { $0 } + + let text: String + var savedMessages = false + if peerIds.count == 1, let peerId = peerIds.first, peerId == strongSelf.context.account.peerId { + text = presentationData.strings.WebBrowser_LinkForwardTooltip_SavedMessages_One + savedMessages = true + } else { + if peers.count == 1, let peer = peers.first { + let peerName = peer.id == strongSelf.context.account.peerId ? presentationData.strings.DialogList_SavedMessages : peer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder) + text = presentationData.strings.WebBrowser_LinkForwardTooltip_Chat_One(peerName).string + } else if peers.count == 2, let firstPeer = peers.first, let secondPeer = peers.last { + let firstPeerName = firstPeer.id == strongSelf.context.account.peerId ? presentationData.strings.DialogList_SavedMessages : firstPeer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder) + let secondPeerName = secondPeer.id == strongSelf.context.account.peerId ? presentationData.strings.DialogList_SavedMessages : secondPeer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder) + text = presentationData.strings.WebBrowser_LinkForwardTooltip_TwoChats_One(firstPeerName, secondPeerName).string + } else if let peer = peers.first { + let peerName = peer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder) + text = presentationData.strings.WebBrowser_LinkForwardTooltip_ManyChats_One(peerName, "\(peers.count - 1)").string + } else { + text = "" + } + } + + strongSelf.controller?.present(UndoOverlayController(presentationData: presentationData, content: .forward(savedMessages: savedMessages, text: text), elevatedLayout: false, animateInAsReplacement: true, action: { [weak self] action in + if savedMessages, let self, action == .info { + let _ = (self.context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: self.context.account.peerId)) + |> deliverOnMainQueue).start(next: { [weak self] peer in + guard let self, let peer else { + return + } + guard let navigationController = self.controller?.navigationController as? NavigationController else { + return + } + self.minimize() + self.context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: self.context, chatLocation: .peer(peer), forceOpenChat: true)) + }) + } + return false + }), in: .current) + }) + } shareController.actionCompleted = { [weak self] in self?.controller?.present(UndoOverlayController(presentationData: presentationData, content: .linkCopied(text: presentationData.strings.Conversation_LinkCopied), elevatedLayout: false, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root)) } @@ -446,6 +483,12 @@ public class BrowserScreen: ViewController, MinimizableController { return updatedState }) content.updateFontState(self.presentationState.fontState) + case .addBookmark: + if let content = self.content.last { + self.addBookmark(content.currentState.url) + } + case .openBookmarks: + break } } @@ -553,6 +596,43 @@ public class BrowserScreen: ViewController, MinimizableController { self.context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: self.context, chatLocation: .peer(peer), animated: true)) } + func addBookmark(_ url: String) { + let _ = enqueueMessages( + account: self.context.account, + peerId: self.context.account.peerId, + messages: [.message( + text: url, + attributes: [], + inlineStickers: [:], + mediaReference: nil, + threadId: nil, + replyToMessageId: nil, + replyToStoryId: nil, + localGroupingKey: nil, + correlationId: nil, + bubbleUpEmojiOrStickersets: [] + )] + ).start() + + let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } + self.controller?.present(UndoOverlayController(presentationData: presentationData, content: .forward(savedMessages: true, text: presentationData.strings.WebBrowser_LinkAddedToBookmarks), elevatedLayout: false, animateInAsReplacement: true, action: { [weak self] action in + if let self, action == .info { + let _ = (self.context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: self.context.account.peerId)) + |> deliverOnMainQueue).start(next: { [weak self] peer in + guard let self, let peer else { + return + } + guard let navigationController = self.controller?.navigationController as? NavigationController else { + return + } + self.minimize() + self.context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: self.context, chatLocation: .peer(peer), forceOpenChat: true)) + }) + } + return false + }), in: .current) + } + private func setupContentStateUpdates() { for content in self.content { content.onScrollingUpdate = { _ in } @@ -712,16 +792,29 @@ public class BrowserScreen: ViewController, MinimizableController { .action(ContextMenuActionItem(text: self.presentationData.strings.InstantPage_FontSanFrancisco, icon: forceIsSerif ? emptyIcon : checkIcon, action: { (controller, action) in performAction.invoke(.updateFontIsSerif(false)) action(.default) - })), .action(ContextMenuActionItem(text: self.presentationData.strings.InstantPage_FontNewYork, textFont: .custom(font: Font.with(size: 17.0, design: .serif, traits: []), height: nil, verticalOffset: nil), icon: forceIsSerif ? checkIcon : emptyIcon, action: { (controller, action) in + })), + .action(ContextMenuActionItem(text: self.presentationData.strings.InstantPage_FontNewYork, textFont: .custom(font: Font.with(size: 17.0, design: .serif, traits: []), height: nil, verticalOffset: nil), icon: forceIsSerif ? checkIcon : emptyIcon, action: { (controller, action) in performAction.invoke(.updateFontIsSerif(true)) action(.default) })), .separator, + .action(ContextMenuActionItem(text: self.presentationData.strings.WebBrowser_Reload, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Instant View/Settings/Reload"), color: theme.contextMenu.primaryColor) }, action: { (controller, action) in + performAction.invoke(.reload) + action(.default) + })), .action(ContextMenuActionItem(text: self.presentationData.strings.InstantPage_Search, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Instant View/Settings/Search"), color: theme.contextMenu.primaryColor) }, action: { (controller, action) in performAction.invoke(.updateSearchActive(true)) action(.default) })), - .action(ContextMenuActionItem(text: self.presentationData.strings.InstantPage_OpenInBrowser(openInTitle).string, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Instant View/Settings/Browser"), color: theme.contextMenu.primaryColor) }, action: { [weak self] (controller, action) in + .action(ContextMenuActionItem(text: self.presentationData.strings.WebBrowser_Share, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Share"), color: theme.contextMenu.primaryColor) }, action: { (controller, action) in + performAction.invoke(.share) + action(.default) + })), + .action(ContextMenuActionItem(text: self.presentationData.strings.WebBrowser_AddBookmark, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Fave"), color: theme.contextMenu.primaryColor) }, action: { (controller, action) in + performAction.invoke(.addBookmark) + action(.default) + })), + .action(ContextMenuActionItem(text: self.presentationData.strings.InstantPage_OpenInBrowser(openInTitle).string, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Browser"), color: theme.contextMenu.primaryColor) }, action: { [weak self] (controller, action) in if let self { self.context.sharedContext.applicationBindings.openUrl(openInUrl) } @@ -1018,7 +1111,7 @@ public class BrowserScreen: ViewController, MinimizableController { } return nil } - + public var minimizedProgress: Float? { if let contentState = self.node.contentState { return Float(contentState.readingProgress) @@ -1091,7 +1184,7 @@ private final class BrowserContentComponent: Component { let collapsedHeight: CGFloat = 24.0 let topInset: CGFloat = component.insets.top + component.navigationBarHeight * (1.0 - component.scrollingPanelOffsetFraction) + collapsedHeight * component.scrollingPanelOffsetFraction - let bottomInset = 49.0 + component.insets.bottom + let bottomInset = (49.0 + component.insets.bottom) * (1.0 - component.scrollingPanelOffsetFraction) component.content.updateLayout(size: availableSize, insets: UIEdgeInsets(top: topInset, left: component.insets.left, bottom: bottomInset, right: component.insets.right), transition: transition) transition.setFrame(view: component.content, frame: CGRect(origin: .zero, size: availableSize)) diff --git a/submodules/BrowserUI/Sources/BrowserSearchBarComponent.swift b/submodules/BrowserUI/Sources/BrowserSearchBarComponent.swift index c49fa13540..d588b744f2 100644 --- a/submodules/BrowserUI/Sources/BrowserSearchBarComponent.swift +++ b/submodules/BrowserUI/Sources/BrowserSearchBarComponent.swift @@ -33,7 +33,7 @@ final class SearchBarContentComponent: Component { } final class View: UIView, UITextFieldDelegate { - private final class EmojiSearchTextField: UITextField { + private final class SearchTextField: UITextField { override func textRect(forBounds bounds: CGRect) -> CGRect { return bounds.integral } @@ -75,7 +75,7 @@ final class SearchBarContentComponent: Component { private var placeholderContent = ComponentView() private var textFrame: CGRect? - private var textField: EmojiSearchTextField? + private var textField: SearchTextField? private var tapRecognizer: UITapGestureRecognizer? @@ -160,7 +160,7 @@ final class SearchBarContentComponent: Component { let backgroundFrame = self.backgroundLayer.frame let textFieldFrame = CGRect(origin: CGPoint(x: textFrame.minX, y: backgroundFrame.minY), size: CGSize(width: backgroundFrame.maxX - textFrame.minX, height: backgroundFrame.height)) - let textField = EmojiSearchTextField(frame: textFieldFrame) + let textField = SearchTextField(frame: textFieldFrame) textField.autocorrectionType = .no textField.returnKeyType = .search self.textField = textField @@ -285,7 +285,7 @@ final class SearchBarContentComponent: Component { component: AnyComponent(Text( text: strings.Common_Cancel, font: Font.regular(17.0), - color: theme.rootController.navigationBar.primaryTextColor + color: theme.rootController.navigationBar.accentTextColor )), environment: {}, containerSize: CGSize(width: size.width - 32.0, height: 100.0) diff --git a/submodules/BrowserUI/Sources/BrowserToolbarComponent.swift b/submodules/BrowserUI/Sources/BrowserToolbarComponent.swift index 7a752140ae..2c35387e73 100644 --- a/submodules/BrowserUI/Sources/BrowserToolbarComponent.swift +++ b/submodules/BrowserUI/Sources/BrowserToolbarComponent.swift @@ -120,6 +120,7 @@ final class BrowserToolbarComponent: CombinedComponent { } final class NavigationToolbarContentComponent: CombinedComponent { + let accentColor: UIColor let textColor: UIColor let canGoBack: Bool let canGoForward: Bool @@ -127,12 +128,14 @@ final class NavigationToolbarContentComponent: CombinedComponent { let performHoldAction: (UIView, ContextGesture?, BrowserScreen.Action) -> Void init( + accentColor: UIColor, textColor: UIColor, canGoBack: Bool, canGoForward: Bool, performAction: ActionSlot, performHoldAction: @escaping (UIView, ContextGesture?, BrowserScreen.Action) -> Void ) { + self.accentColor = accentColor self.textColor = textColor self.canGoBack = canGoBack self.canGoForward = canGoForward @@ -141,6 +144,9 @@ final class NavigationToolbarContentComponent: CombinedComponent { } static func ==(lhs: NavigationToolbarContentComponent, rhs: NavigationToolbarContentComponent) -> Bool { + if lhs.accentColor != rhs.accentColor { + return false + } if lhs.textColor != rhs.textColor { return false } @@ -157,6 +163,7 @@ final class NavigationToolbarContentComponent: CombinedComponent { let back = Child(ContextReferenceButtonComponent.self) let forward = Child(ContextReferenceButtonComponent.self) let share = Child(Button.self) + let bookmark = Child(Button.self) let openIn = Child(Button.self) return { context in @@ -166,7 +173,7 @@ final class NavigationToolbarContentComponent: CombinedComponent { let sideInset: CGFloat = 5.0 let buttonSize = CGSize(width: 50.0, height: availableSize.height) - let spacing = (availableSize.width - buttonSize.width * 4.0 - sideInset * 2.0) / 3.0 + let spacing = (availableSize.width - buttonSize.width * 5.0 - sideInset * 2.0) / 4.0 let canGoBack = context.component.canGoBack let back = back.update( @@ -174,7 +181,7 @@ final class NavigationToolbarContentComponent: CombinedComponent { content: AnyComponent( BundleIconComponent( name: "Instant View/Back", - tintColor: canGoBack ? context.component.textColor : context.component.textColor.withAlphaComponent(0.4) + tintColor: canGoBack ? context.component.accentColor : context.component.accentColor.withAlphaComponent(0.4) ) ), minSize: buttonSize, @@ -202,7 +209,7 @@ final class NavigationToolbarContentComponent: CombinedComponent { content: AnyComponent( BundleIconComponent( name: "Instant View/Forward", - tintColor: canGoForward ? context.component.textColor : context.component.textColor.withAlphaComponent(0.4) + tintColor: canGoForward ? context.component.accentColor : context.component.accentColor.withAlphaComponent(0.4) ) ), minSize: buttonSize, @@ -229,7 +236,7 @@ final class NavigationToolbarContentComponent: CombinedComponent { content: AnyComponent( BundleIconComponent( name: "Chat List/NavigationShare", - tintColor: context.component.textColor + tintColor: context.component.accentColor ) ), action: { @@ -243,23 +250,42 @@ final class NavigationToolbarContentComponent: CombinedComponent { .position(CGPoint(x: sideInset + back.size.width + spacing + forward.size.width + spacing + share.size.width / 2.0, y: availableSize.height / 2.0)) ) + let bookmark = bookmark.update( + component: Button( + content: AnyComponent( + BundleIconComponent( + name: "Instant View/Bookmark", + tintColor: context.component.accentColor + ) + ), + action: { + performAction.invoke(.openBookmarks) + } + ).minSize(buttonSize), + availableSize: buttonSize, + transition: .easeInOut(duration: 0.2) + ) + context.add(bookmark + .position(CGPoint(x: sideInset + back.size.width + spacing + forward.size.width + spacing + share.size.width + spacing + bookmark.size.width / 2.0, y: availableSize.height / 2.0)) + ) + let openIn = openIn.update( component: Button( content: AnyComponent( BundleIconComponent( - name: "Instant View/Minimize", - tintColor: context.component.textColor + name: "Instant View/Browser", + tintColor: context.component.accentColor ) ), action: { - performAction.invoke(.minimize) + performAction.invoke(.openIn) } ).minSize(buttonSize), availableSize: buttonSize, transition: .easeInOut(duration: 0.2) ) context.add(openIn - .position(CGPoint(x: sideInset + back.size.width + spacing + forward.size.width + spacing + share.size.width + spacing + openIn.size.width / 2.0, y: availableSize.height / 2.0)) + .position(CGPoint(x: sideInset + back.size.width + spacing + forward.size.width + spacing + share.size.width + spacing + bookmark.size.width + spacing + openIn.size.width / 2.0, y: availableSize.height / 2.0)) ) return availableSize diff --git a/submodules/BrowserUI/Sources/BrowserWebContent.swift b/submodules/BrowserUI/Sources/BrowserWebContent.swift index 1a313e2e87..1da55837da 100644 --- a/submodules/BrowserUI/Sources/BrowserWebContent.swift +++ b/submodules/BrowserUI/Sources/BrowserWebContent.swift @@ -152,11 +152,21 @@ final class BrowserWebContent: UIView, BrowserContent, WKNavigationDelegate, WKU let configuration = WKWebViewConfiguration() +// let bundle = Bundle.main +// let bundleVersion = bundle.infoDictionary?["CFBundleShortVersionString"] ?? "" +// var proxyServerHost = "magic.org" if let data = context.currentAppConfiguration.with({ $0 }).data, let hostValue = data["ton_proxy_address"] as? String { proxyServerHost = hostValue } configuration.setURLSchemeHandler(TonSchemeHandler(proxyServerHost: proxyServerHost), forURLScheme: "tonsite") + configuration.allowsInlineMediaPlayback = true +// configuration.applicationNameForUserAgent = "Telegram-iOS/\(bundleVersion)" + if #available(iOSApplicationExtension 10.0, iOS 10.0, *) { + configuration.mediaTypesRequiringUserActionForPlayback = [] + } else { + configuration.mediaPlaybackRequiresUserAction = false + } self.webView = WKWebView(frame: CGRect(), configuration: configuration) self.webView.allowsLinkPreview = true @@ -182,6 +192,8 @@ final class BrowserWebContent: UIView, BrowserContent, WKNavigationDelegate, WKU self.webView.allowsBackForwardNavigationGestures = true self.webView.scrollView.delegate = self + self.webView.scrollView.clipsToBounds = false +// self.webView.translatesAutoresizingMaskIntoConstraints = false self.webView.navigationDelegate = self self.webView.uiDelegate = self self.webView.addObserver(self, forKeyPath: #keyPath(WKWebView.title), options: [], context: nil) @@ -190,6 +202,7 @@ final class BrowserWebContent: UIView, BrowserContent, WKNavigationDelegate, WKU self.webView.addObserver(self, forKeyPath: #keyPath(WKWebView.canGoBack), options: [], context: nil) self.webView.addObserver(self, forKeyPath: #keyPath(WKWebView.canGoForward), options: [], context: nil) if #available(iOS 15.0, *) { + self.backgroundColor = presentationData.theme.list.plainBackgroundColor self.webView.underPageBackgroundColor = presentationData.theme.list.plainBackgroundColor } if #available(iOS 16.4, *) { @@ -215,6 +228,7 @@ final class BrowserWebContent: UIView, BrowserContent, WKNavigationDelegate, WKU func updatePresentationData(_ presentationData: PresentationData) { self.presentationData = presentationData if #available(iOS 15.0, *) { + self.backgroundColor = presentationData.theme.list.plainBackgroundColor self.webView.underPageBackgroundColor = presentationData.theme.list.plainBackgroundColor } if let (size, insets) = self.validLayout { @@ -387,17 +401,22 @@ final class BrowserWebContent: UIView, BrowserContent, WKNavigationDelegate, WKU private var validLayout: (CGSize, UIEdgeInsets)? func updateLayout(size: CGSize, insets: UIEdgeInsets, transition: ComponentTransition) { self.validLayout = (size, insets) - - var scrollInsets = insets - scrollInsets.left = 0.0 - scrollInsets.right = 0.0 - scrollInsets.top = 0.0 - if self.webView.scrollView.contentInset != insets { - self.webView.scrollView.contentInset = scrollInsets - self.webView.scrollView.scrollIndicatorInsets = scrollInsets - } + self.previousScrollingOffset = ScrollingOffsetState(value: self.webView.scrollView.contentOffset.y, isDraggingOrDecelerating: self.webView.scrollView.isDragging || self.webView.scrollView.isDecelerating) - transition.setFrame(view: self.webView, frame: CGRect(origin: CGPoint(x: insets.left, y: insets.top), size: CGSize(width: size.width - insets.left - insets.right, height: size.height - insets.top))) + + let webViewFrame = CGRect(origin: CGPoint(x: insets.left, y: insets.top), size: CGSize(width: size.width - insets.left - insets.right, height: size.height - insets.top - insets.bottom)) + var refresh = false + if self.webView.frame.width > 0 && webViewFrame.width != self.webView.frame.width { + refresh = true + } + transition.setFrame(view: self.webView, frame: webViewFrame) + + if refresh { + self.webView.reloadInputViews() + } + + self.webView.scrollView.scrollIndicatorInsets = UIEdgeInsets(top: 0.0, left: -insets.left, bottom: 0.0, right: -insets.right) + self.webView.scrollView.horizontalScrollIndicatorInsets = UIEdgeInsets(top: 0.0, left: -insets.left, bottom: 0.0, right: -insets.right) if let error = self.currentError { let errorSize = self.errorView.update( @@ -737,7 +756,7 @@ final class BrowserWebContent: UIView, BrowserContent, WKNavigationDelegate, WKU result.insert(Favicon(url: url.absoluteString, dimensions: nil)) } - var largestIcon = result.first(where: { $0.url.lowercased().contains(".svg") }) + var largestIcon: Favicon? // = result.first(where: { $0.url.lowercased().contains(".svg") }) if largestIcon == nil { largestIcon = result.first for icon in result { diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift index d1f95d0bc9..922e9af872 100644 --- a/submodules/Camera/Sources/Camera.swift +++ b/submodules/Camera/Sources/Camera.swift @@ -559,7 +559,11 @@ private final class CameraContext { guard let mainDeviceContext = self.mainDeviceContext else { return .complete() } - mainDeviceContext.device.setTorchMode(self._flashMode) + if self.initialConfiguration.isRoundVideo && self.positionValue == .front { + + } else { + mainDeviceContext.device.setTorchMode(self._flashMode) + } let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait if self.initialConfiguration.isRoundVideo { diff --git a/submodules/Camera/Sources/VideoRecorder.swift b/submodules/Camera/Sources/VideoRecorder.swift index e785db1834..09dd80a039 100644 --- a/submodules/Camera/Sources/VideoRecorder.swift +++ b/submodules/Camera/Sources/VideoRecorder.swift @@ -204,9 +204,9 @@ private final class VideoRecorderImpl { if let videoInput = self.videoInput { let time = CACurrentMediaTime() - if let previousPresentationTime = self.previousPresentationTime, let previousAppendTime = self.previousAppendTime { - print("appending \(presentationTime.seconds) (\(presentationTime.seconds - previousPresentationTime) ) on \(time) (\(time - previousAppendTime)") - } +// if let previousPresentationTime = self.previousPresentationTime, let previousAppendTime = self.previousAppendTime { +// print("appending \(presentationTime.seconds) (\(presentationTime.seconds - previousPresentationTime) ) on \(time) (\(time - previousAppendTime)") +// } self.previousPresentationTime = presentationTime.seconds self.previousAppendTime = time diff --git a/submodules/ChatImportUI/Sources/ChatImportActivityScreen.swift b/submodules/ChatImportUI/Sources/ChatImportActivityScreen.swift index 0442a760a1..68e3aa5abd 100644 --- a/submodules/ChatImportUI/Sources/ChatImportActivityScreen.swift +++ b/submodules/ChatImportUI/Sources/ChatImportActivityScreen.swift @@ -460,7 +460,7 @@ public final class ChatImportActivityScreen: ViewController { if let path = getAppBundle().path(forResource: "BlankVideo", ofType: "m4v"), let size = fileSize(path) { let decoration = ChatBubbleVideoDecoration(corners: ImageCorners(), nativeSize: CGSize(width: 100.0, height: 100.0), contentMode: .aspectFit, backgroundColor: .black) - let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil)]) + let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil, coverTime: nil)]) let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil) diff --git a/submodules/ChatListUI/Sources/Node/ChatListItem.swift b/submodules/ChatListUI/Sources/Node/ChatListItem.swift index 232dc37a49..87b6d3462a 100644 --- a/submodules/ChatListUI/Sources/Node/ChatListItem.swift +++ b/submodules/ChatListUI/Sources/Node/ChatListItem.swift @@ -2576,7 +2576,7 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode { case let .preview(dimensions, immediateThumbnailData, videoDuration): if let immediateThumbnailData { if let videoDuration { - let thumbnailMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: index), partialReference: nil, resource: EmptyMediaResource(), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Video(duration: Double(videoDuration), size: dimensions ?? PixelDimensions(width: 1, height: 1), flags: [], preloadSize: nil)]) + let thumbnailMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: index), partialReference: nil, resource: EmptyMediaResource(), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Video(duration: Double(videoDuration), size: dimensions ?? PixelDimensions(width: 1, height: 1), flags: [], preloadSize: nil, coverTime: nil)]) contentImageSpecs.append(ContentImageSpec(message: message, media: .file(thumbnailMedia), size: fitSize)) } else { let thumbnailMedia = TelegramMediaImage(imageId: MediaId(namespace: 0, id: index), representations: [], immediateThumbnailData: immediateThumbnailData, reference: nil, partialReference: nil, flags: []) diff --git a/submodules/ChatListUI/Sources/Node/ChatListItemStrings.swift b/submodules/ChatListUI/Sources/Node/ChatListItemStrings.swift index 5d0f029f29..4dc4de1c29 100644 --- a/submodules/ChatListUI/Sources/Node/ChatListItemStrings.swift +++ b/submodules/ChatListUI/Sources/Node/ChatListItemStrings.swift @@ -246,7 +246,7 @@ public func chatListItemStrings(strings: PresentationStrings, nameDisplayOrder: processed = true break inner } - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { messageText = strings.Message_VideoMessage processed = true diff --git a/submodules/ContactListUI/Sources/ContactListNode.swift b/submodules/ContactListUI/Sources/ContactListNode.swift index 231e9a3bf8..9ba3d82ee1 100644 --- a/submodules/ContactListUI/Sources/ContactListNode.swift +++ b/submodules/ContactListUI/Sources/ContactListNode.swift @@ -1499,6 +1499,8 @@ public final class ContactListNode: ASDisplayNode { disabledPeerIds = disabledPeerIds.union(peerIds) case .excludeWithoutPhoneNumbers: requirePhoneNumbers = true + case .excludeBots: + break } } @@ -1786,6 +1788,8 @@ public final class ContactListNode: ASDisplayNode { disabledPeerIds = disabledPeerIds.union(peerIds) case .excludeWithoutPhoneNumbers: requirePhoneNumbers = true + case .excludeBots: + break } } diff --git a/submodules/ContactListUI/Sources/ContactsSearchContainerNode.swift b/submodules/ContactListUI/Sources/ContactsSearchContainerNode.swift index acda9b4528..9056670c02 100644 --- a/submodules/ContactListUI/Sources/ContactsSearchContainerNode.swift +++ b/submodules/ContactListUI/Sources/ContactsSearchContainerNode.swift @@ -394,6 +394,7 @@ public final class ContactsSearchContainerNode: SearchDisplayControllerContentNo var existingPeerIds = Set() var disabledPeerIds = Set() var requirePhoneNumbers = false + var excludeBots = false for filter in filters { switch filter { case .excludeSelf: @@ -404,6 +405,8 @@ public final class ContactsSearchContainerNode: SearchDisplayControllerContentNo disabledPeerIds = disabledPeerIds.union(peerIds) case .excludeWithoutPhoneNumbers: requirePhoneNumbers = true + case .excludeBots: + excludeBots = true } } var existingNormalizedPhoneNumbers = Set() @@ -413,10 +416,17 @@ public final class ContactsSearchContainerNode: SearchDisplayControllerContentNo continue } - if case let .user(user) = peer, requirePhoneNumbers { - let phone = user.phone ?? "" - if phone.isEmpty { - continue + if case let .user(user) = peer { + if requirePhoneNumbers { + let phone = user.phone ?? "" + if phone.isEmpty { + continue + } + } + if excludeBots { + if user.botInfo != nil { + continue + } } } @@ -442,11 +452,18 @@ public final class ContactsSearchContainerNode: SearchDisplayControllerContentNo continue } - if let user = peer.peer as? TelegramUser, requirePhoneNumbers { - let phone = user.phone ?? "" - if phone.isEmpty { - continue + if let user = peer.peer as? TelegramUser { + if requirePhoneNumbers { + let phone = user.phone ?? "" + if phone.isEmpty { + continue + } } + if excludeBots { + if user.botInfo != nil { + continue + } + } } if !existingPeerIds.contains(peer.peer.id) { diff --git a/submodules/Display/Source/Navigation/NavigationModalContainer.swift b/submodules/Display/Source/Navigation/NavigationModalContainer.swift index b7c486c8d5..91dc19258f 100644 --- a/submodules/Display/Source/Navigation/NavigationModalContainer.swift +++ b/submodules/Display/Source/Navigation/NavigationModalContainer.swift @@ -282,7 +282,7 @@ final class NavigationModalContainer: ASDisplayNode, ASScrollViewDelegate, ASGes let transition: ContainedViewLayoutTransition let dismissProgress: CGFloat if (velocity.y < -0.5 || progress >= 0.5) && self.checkInteractiveDismissWithControllers() { - if let controller = self.container.controllers.last as? MinimizableController, self.isDraggingHeader || "".isEmpty { + if let controller = self.container.controllers.last as? MinimizableController { dismissProgress = 0.0 targetOffset = 0.0 transition = .immediate diff --git a/submodules/DrawingUI/Sources/DrawingScreen.swift b/submodules/DrawingUI/Sources/DrawingScreen.swift index 43c4f710c8..dd4c749639 100644 --- a/submodules/DrawingUI/Sources/DrawingScreen.swift +++ b/submodules/DrawingUI/Sources/DrawingScreen.swift @@ -3089,6 +3089,7 @@ public final class DrawingToolsInteraction { var isAdditional = false var isMessage = false var isLink = false + var isWeather = false if let entity = entityView.entity as? DrawingStickerEntity { if case let .dualVideoReference(isAdditionalValue) = entity.content { isVideo = true @@ -3098,6 +3099,8 @@ public final class DrawingToolsInteraction { } } else if entityView.entity is DrawingLinkEntity { isLink = true + } else if entityView.entity is DrawingWeatherEntity { + isWeather = true } guard (!isVideo || isAdditional) && (!isMessage || !isTopmost) else { @@ -3143,7 +3146,7 @@ public final class DrawingToolsInteraction { } })) } - if !isVideo && !isMessage && !isLink { + if !isVideo && !isMessage && !isLink && !isWeather { if let stickerEntity = entityView.entity as? DrawingStickerEntity, case let .file(_, type) = stickerEntity.content, case .reaction = type { } else { diff --git a/submodules/GalleryUI/Sources/ChatItemGalleryFooterContentNode.swift b/submodules/GalleryUI/Sources/ChatItemGalleryFooterContentNode.swift index 6beea637a0..c4bbe07918 100644 --- a/submodules/GalleryUI/Sources/ChatItemGalleryFooterContentNode.swift +++ b/submodules/GalleryUI/Sources/ChatItemGalleryFooterContentNode.swift @@ -836,7 +836,7 @@ final class ChatItemGalleryFooterContentNode: GalleryFooterContentNode, ASScroll } else if let media = media as? TelegramMediaFile, !media.isAnimated { for attribute in media.attributes { switch attribute { - case let .Video(_, dimensions, _, _): + case let .Video(_, dimensions, _, _, _): isVideo = true if dimensions.height > 0 { if CGFloat(dimensions.width) / CGFloat(dimensions.height) > 1.33 { diff --git a/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift b/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift index a73a4b3923..7c82a528e0 100644 --- a/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift +++ b/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift @@ -1235,7 +1235,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { } if let file = file { for attribute in file.attributes { - if case let .Video(duration, _, _, _) = attribute, duration >= 30 { + if case let .Video(duration, _, _, _, _) = attribute, duration >= 30 { hintSeekable = true break } diff --git a/submodules/InstantPageUI/Sources/InstantPageMediaPlaylist.swift b/submodules/InstantPageUI/Sources/InstantPageMediaPlaylist.swift index c45782b8e4..c8fcfe9691 100644 --- a/submodules/InstantPageUI/Sources/InstantPageMediaPlaylist.swift +++ b/submodules/InstantPageUI/Sources/InstantPageMediaPlaylist.swift @@ -53,7 +53,7 @@ final class InstantPageMediaPlaylistItem: SharedMediaPlaylistItem { } else { return SharedMediaPlaybackData(type: .music, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false, isViewOnce: false)) } - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { return SharedMediaPlaybackData(type: .instantVideo, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false, isViewOnce: false)) } else { @@ -99,7 +99,7 @@ final class InstantPageMediaPlaylistItem: SharedMediaPlaylistItem { return SharedMediaPlaybackDisplayData.music(title: updatedTitle, performer: updatedPerformer, albumArt: albumArt, long: false, caption: nil) } - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { return SharedMediaPlaybackDisplayData.instantVideo(author: nil, peer: nil, timestamp: 0) } else { diff --git a/submodules/LegacyMediaPickerUI/Sources/LegacyMediaPickers.swift b/submodules/LegacyMediaPickerUI/Sources/LegacyMediaPickers.swift index 5ae4ac8850..7425988c60 100644 --- a/submodules/LegacyMediaPickerUI/Sources/LegacyMediaPickers.swift +++ b/submodules/LegacyMediaPickerUI/Sources/LegacyMediaPickers.swift @@ -294,7 +294,7 @@ public func legacyEnqueueGifMessage(account: Account, data: Data, correlationId: let finalDimensions = TGMediaVideoConverter.dimensions(for: dimensions, adjustments: nil, preset: TGMediaVideoConversionPresetAnimation) var fileAttributes: [TelegramMediaFileAttribute] = [] - fileAttributes.append(.Video(duration: 0.0, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil)) + fileAttributes.append(.Video(duration: 0.0, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil)) fileAttributes.append(.FileName(fileName: fileName)) fileAttributes.append(.Animated) @@ -336,7 +336,7 @@ public func legacyEnqueueVideoMessage(account: Account, data: Data, correlationI let finalDimensions = TGMediaVideoConverter.dimensions(for: dimensions, adjustments: nil, preset: TGMediaVideoConversionPresetAnimation) var fileAttributes: [TelegramMediaFileAttribute] = [] - fileAttributes.append(.Video(duration: 0.0, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil)) + fileAttributes.append(.Video(duration: 0.0, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil)) fileAttributes.append(.FileName(fileName: fileName)) fileAttributes.append(.Animated) @@ -857,7 +857,7 @@ public func legacyAssetPickerEnqueueMessages(context: AccountContext, account: A fileAttributes.append(.Animated) } if !asFile { - fileAttributes.append(.Video(duration: finalDuration, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil)) + fileAttributes.append(.Video(duration: finalDuration, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil)) if let adjustments = adjustments { if adjustments.sendAsGif { fileAttributes.append(.Animated) diff --git a/submodules/LocationUI/Sources/LocationViewController.swift b/submodules/LocationUI/Sources/LocationViewController.swift index 35fb7bb4d4..173b93567c 100644 --- a/submodules/LocationUI/Sources/LocationViewController.swift +++ b/submodules/LocationUI/Sources/LocationViewController.swift @@ -80,7 +80,6 @@ public final class LocationViewController: ViewController { private let isStoryLocation: Bool private let locationManager = LocationManager() - private var permissionDisposable: Disposable? private var interaction: LocationViewInteraction? diff --git a/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift b/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift index ae44cd0bf1..d56dcdd359 100644 --- a/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift +++ b/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift @@ -1821,6 +1821,8 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { self.moreButtonNode.iconNode.enqueueState(.more, animated: false) self.selectedButtonNode = SelectedButtonNode(theme: self.presentationData.theme) + self.selectedButtonNode.alpha = 0.0 + self.selectedButtonNode.transform = CATransform3DMakeScale(0.01, 0.01, 1.0) super.init(navigationBarPresentationData: NavigationBarPresentationData(presentationData: presentationData)) @@ -2239,6 +2241,9 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { fileprivate var selectionCount: Int32 = 0 fileprivate func updateSelectionState(count: Int32) { self.selectionCount = count + guard let layout = self.validLayout else { + return + } let transition = ContainedViewLayoutTransition.animated(duration: 0.25, curve: .easeInOut) var moreIsVisible = false @@ -2262,15 +2267,22 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { self.titleView.updateTitle(title: title, isEnabled: isEnabled, animated: true) self.cancelButtonNode.setState(isEnabled ? .cancel : .back, animated: true) + let selectedSize = self.selectedButtonNode.update(count: count) + + var safeInset: CGFloat = 0.0 + if layout.safeInsets.right > 0.0 { + safeInset += layout.safeInsets.right + 16.0 + } + let navigationHeight = navigationLayout(layout: layout).navigationFrame.height + self.selectedButtonNode.frame = CGRect(origin: CGPoint(x: self.view.bounds.width - 54.0 - selectedSize.width - safeInset, y: floorToScreenPixels((navigationHeight - selectedSize.height) / 2.0) + 1.0), size: selectedSize) + let isSelectionButtonVisible = count > 0 && self.controllerNode.currentDisplayMode == .all transition.updateAlpha(node: self.selectedButtonNode, alpha: isSelectionButtonVisible ? 1.0 : 0.0) transition.updateTransformScale(node: self.selectedButtonNode, scale: isSelectionButtonVisible ? 1.0 : 0.01) - let selectedSize = self.selectedButtonNode.update(count: count) if self.selectedButtonNode.supernode == nil { self.navigationBar?.addSubnode(self.selectedButtonNode) } - self.selectedButtonNode.frame = CGRect(origin: CGPoint(x: self.view.bounds.width - 54.0 - selectedSize.width, y: 18.0 + UIScreenPixel), size: selectedSize) self.titleView.segmentsHidden = true moreIsVisible = count > 0 @@ -2656,9 +2668,16 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) { super.containerLayoutUpdated(layout, transition: transition) - + self.validLayout = layout self.controllerNode.containerLayoutUpdated(layout, navigationBarHeight: navigationLayout(layout: layout).navigationFrame.maxY, transition: transition) + + var safeInset: CGFloat = 0.0 + if layout.safeInsets.right > 0.0 { + safeInset += layout.safeInsets.right + 16.0 + } + let navigationHeight = navigationLayout(layout: layout).navigationFrame.height + self.selectedButtonNode.frame = CGRect(origin: CGPoint(x: self.view.bounds.width - 54.0 - self.selectedButtonNode.frame.width - safeInset, y: floorToScreenPixels((navigationHeight - self.selectedButtonNode.frame.height) / 2.0) + 1.0), size: self.selectedButtonNode.frame.size) } public var mediaPickerContext: AttachmentMediaPickerContext? { diff --git a/submodules/PeerAvatarGalleryUI/Sources/PeerAvatarImageGalleryItem.swift b/submodules/PeerAvatarGalleryUI/Sources/PeerAvatarImageGalleryItem.swift index 6a74a5da18..37b5bc76e6 100644 --- a/submodules/PeerAvatarGalleryUI/Sources/PeerAvatarImageGalleryItem.swift +++ b/submodules/PeerAvatarGalleryUI/Sources/PeerAvatarImageGalleryItem.swift @@ -187,7 +187,7 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode { let subject: ShareControllerSubject var actionCompletionText: String? if let video = entry.videoRepresentations.last, let peerReference = PeerReference(peer._asPeer()) { - let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil)])) + let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil)])) subject = .media(videoFileReference.abstract) actionCompletionText = strongSelf.presentationData.strings.Gallery_VideoSaved } else { @@ -279,7 +279,7 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode { if let video = entry.videoRepresentations.last, let peerReference = PeerReference(self.peer._asPeer()) { if video != previousVideoRepresentations?.last { let mediaManager = self.context.sharedContext.mediaManager - let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: entry.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil)])) + let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: entry.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil)])) let videoContent = NativeVideoContent(id: .profileVideo(id, category), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: true, useLargeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil) let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay) videoNode.isUserInteractionEnabled = false diff --git a/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift b/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift index 3a5f259483..a3ba818d23 100644 --- a/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift +++ b/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift @@ -515,7 +515,7 @@ public final class PeerInfoAvatarListItemNode: ASDisplayNode { self.isReady.set(.single(true)) } } else if let video = videoRepresentations.last, let peerReference = PeerReference(self.peer._asPeer()) { - let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil)])) + let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil)])) let videoContent = NativeVideoContent(id: .profileVideo(id, nil), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: fullSizeOnly, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil) if videoContent.id != self.videoContent?.id { diff --git a/submodules/PremiumUI/Sources/PremiumIntroScreen.swift b/submodules/PremiumUI/Sources/PremiumIntroScreen.swift index 3e5086d9aa..6240ee08ca 100644 --- a/submodules/PremiumUI/Sources/PremiumIntroScreen.swift +++ b/submodules/PremiumUI/Sources/PremiumIntroScreen.swift @@ -2720,7 +2720,7 @@ private final class PremiumIntroScreenContentComponent: CombinedComponent { if url.hasPrefix("https://apps.apple.com/account/subscriptions") { controller.context.sharedContext.applicationBindings.openSubscriptions() } else if url.hasPrefix("https://") || url.hasPrefix("tg://") { - controller.context.sharedContext.openExternalUrl(context: controller.context, urlContext: .generic, url: url, forceExternal: !url.hasPrefix("tg://") && !url.contains("?start="), presentationData: controller.context.sharedContext.currentPresentationData.with({$0}), navigationController: nil, dismissInput: {}) + controller.context.sharedContext.openExternalUrl(context: controller.context, urlContext: .generic, url: url, forceExternal: false, presentationData: controller.context.sharedContext.currentPresentationData.with({$0}), navigationController: navigationController, dismissInput: {}) } else { let context = controller.context let signal: Signal? diff --git a/submodules/SettingsUI/Sources/Data and Storage/DataAndStorageSettingsController.swift b/submodules/SettingsUI/Sources/Data and Storage/DataAndStorageSettingsController.swift index d3a2363d15..adf77392b4 100644 --- a/submodules/SettingsUI/Sources/Data and Storage/DataAndStorageSettingsController.swift +++ b/submodules/SettingsUI/Sources/Data and Storage/DataAndStorageSettingsController.swift @@ -100,9 +100,9 @@ private enum DataAndStorageEntry: ItemListNodeEntry { case useLessVoiceData(PresentationTheme, String, Bool) case useLessVoiceDataInfo(PresentationTheme, String) case otherHeader(PresentationTheme, String) + case openLinksIn(PresentationTheme, String, String) case shareSheet(PresentationTheme, String) case saveEditedPhotos(PresentationTheme, String, Bool) - case openLinksIn(PresentationTheme, String, String) case pauseMusicOnRecording(PresentationTheme, String, Bool) case raiseToListen(PresentationTheme, String, Bool) case raiseToListenInfo(PresentationTheme, String) @@ -123,7 +123,7 @@ private enum DataAndStorageEntry: ItemListNodeEntry { return DataAndStorageSection.backgroundDownload.rawValue case .useLessVoiceData, .useLessVoiceDataInfo: return DataAndStorageSection.voiceCalls.rawValue - case .otherHeader, .shareSheet, .saveEditedPhotos, .openLinksIn, .pauseMusicOnRecording, .raiseToListen, .raiseToListenInfo: + case .otherHeader, .openLinksIn, .shareSheet, .saveEditedPhotos, .pauseMusicOnRecording, .raiseToListen, .raiseToListenInfo: return DataAndStorageSection.other.rawValue case .connectionHeader, .connectionProxy: return DataAndStorageSection.connection.rawValue @@ -162,11 +162,11 @@ private enum DataAndStorageEntry: ItemListNodeEntry { return 24 case .otherHeader: return 29 - case .shareSheet: - return 30 - case .saveEditedPhotos: - return 31 case .openLinksIn: + return 30 + case .shareSheet: + return 31 + case .saveEditedPhotos: return 32 case .pauseMusicOnRecording: return 33 @@ -257,6 +257,12 @@ private enum DataAndStorageEntry: ItemListNodeEntry { } else { return false } + case let .openLinksIn(lhsTheme, lhsText, lhsValue): + if case let .openLinksIn(rhsTheme, rhsText, rhsValue) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsValue == rhsValue { + return true + } else { + return false + } case let .shareSheet(lhsTheme, lhsText): if case let .shareSheet(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText { return true @@ -269,12 +275,6 @@ private enum DataAndStorageEntry: ItemListNodeEntry { } else { return false } - case let .openLinksIn(lhsTheme, lhsText, lhsValue): - if case let .openLinksIn(rhsTheme, rhsText, rhsValue) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsValue == rhsValue { - return true - } else { - return false - } case let .pauseMusicOnRecording(lhsTheme, lhsText, lhsValue): if case let .pauseMusicOnRecording(rhsTheme, rhsText, rhsValue) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsValue == rhsValue { return true @@ -386,6 +386,10 @@ private enum DataAndStorageEntry: ItemListNodeEntry { return ItemListTextItem(presentationData: presentationData, text: .plain(text), sectionId: self.section) case let .otherHeader(_, text): return ItemListSectionHeaderItem(presentationData: presentationData, text: text, sectionId: self.section) + case let .openLinksIn(_, text, value): + return ItemListDisclosureItem(presentationData: presentationData, title: text, label: value, sectionId: self.section, style: .blocks, action: { + arguments.openBrowserSelection() + }) case let .shareSheet(_, text): return ItemListDisclosureItem(presentationData: presentationData, title: text, label: "", sectionId: self.section, style: .blocks, action: { arguments.openIntents() @@ -394,10 +398,6 @@ private enum DataAndStorageEntry: ItemListNodeEntry { return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, sectionId: self.section, style: .blocks, updated: { value in arguments.toggleSaveEditedPhotos(value) }, tag: DataAndStorageEntryTag.saveEditedPhotos) - case let .openLinksIn(_, text, value): - return ItemListDisclosureItem(presentationData: presentationData, title: text, label: value, sectionId: self.section, style: .blocks, action: { - arguments.openBrowserSelection() - }) case let .pauseMusicOnRecording(_, text, value): return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, sectionId: self.section, style: .blocks, updated: { value in arguments.togglePauseMusicOnRecording(value) @@ -618,11 +618,11 @@ private func dataAndStorageControllerEntries(state: DataAndStorageControllerStat entries.append(.useLessVoiceDataInfo(presentationData.theme, presentationData.strings.CallSettings_UseLessDataLongDescription)) entries.append(.otherHeader(presentationData.theme, presentationData.strings.ChatSettings_Other)) + entries.append(.openLinksIn(presentationData.theme, presentationData.strings.ChatSettings_OpenLinksIn, defaultWebBrowser)) if #available(iOSApplicationExtension 13.2, iOS 13.2, *) { entries.append(.shareSheet(presentationData.theme, presentationData.strings.ChatSettings_IntentsSettings)) } entries.append(.saveEditedPhotos(presentationData.theme, presentationData.strings.Settings_SaveEditedPhotos, data.generatedMediaStoreSettings.storeEditedPhotos)) - entries.append(.openLinksIn(presentationData.theme, presentationData.strings.ChatSettings_OpenLinksIn, defaultWebBrowser)) entries.append(.pauseMusicOnRecording(presentationData.theme, presentationData.strings.Settings_PauseMusicOnRecording, data.mediaInputSettings.pauseMusicOnRecording)) entries.append(.raiseToListen(presentationData.theme, presentationData.strings.Settings_RaiseToListen, data.mediaInputSettings.enableRaiseToSpeak)) entries.append(.raiseToListenInfo(presentationData.theme, presentationData.strings.Settings_RaiseToListenInfo)) diff --git a/submodules/SettingsUI/Sources/Data and Storage/WebBrowserDomainController.swift b/submodules/SettingsUI/Sources/Data and Storage/WebBrowserDomainController.swift new file mode 100644 index 0000000000..2d439c746a --- /dev/null +++ b/submodules/SettingsUI/Sources/Data and Storage/WebBrowserDomainController.swift @@ -0,0 +1,448 @@ +import Foundation +import UIKit +import SwiftSignalKit +import AsyncDisplayKit +import Display +import TelegramCore +import TelegramPresentationData +import AccountContext +import UrlEscaping + +private final class WebBrowserDomainInputFieldNode: ASDisplayNode, ASEditableTextNodeDelegate { + private var theme: PresentationTheme + private let backgroundNode: ASImageNode + fileprivate let textInputNode: EditableTextNode + private let placeholderNode: ASTextNode + + var updateHeight: (() -> Void)? + var complete: (() -> Void)? + var textChanged: ((String) -> Void)? + + private let backgroundInsets = UIEdgeInsets(top: 8.0, left: 16.0, bottom: 15.0, right: 16.0) + private let inputInsets = UIEdgeInsets(top: 5.0, left: 12.0, bottom: 5.0, right: 12.0) + + var text: String { + get { + return self.textInputNode.attributedText?.string ?? "" + } + set { + self.textInputNode.attributedText = NSAttributedString(string: newValue, font: Font.regular(17.0), textColor: self.theme.actionSheet.inputTextColor) + self.placeholderNode.isHidden = !newValue.isEmpty + } + } + + var placeholder: String = "" { + didSet { + self.placeholderNode.attributedText = NSAttributedString(string: self.placeholder, font: Font.regular(17.0), textColor: self.theme.actionSheet.inputPlaceholderColor) + } + } + + init(theme: PresentationTheme, placeholder: String) { + self.theme = theme + + self.backgroundNode = ASImageNode() + self.backgroundNode.isLayerBacked = true + self.backgroundNode.displaysAsynchronously = false + self.backgroundNode.displayWithoutProcessing = true + self.backgroundNode.image = generateStretchableFilledCircleImage(diameter: 12.0, color: theme.actionSheet.inputHollowBackgroundColor, strokeColor: theme.actionSheet.inputBorderColor, strokeWidth: 1.0) + + self.textInputNode = EditableTextNode() + self.textInputNode.typingAttributes = [NSAttributedString.Key.font.rawValue: Font.regular(17.0), NSAttributedString.Key.foregroundColor.rawValue: theme.actionSheet.inputTextColor] + self.textInputNode.clipsToBounds = true + self.textInputNode.hitTestSlop = UIEdgeInsets(top: -5.0, left: -5.0, bottom: -5.0, right: -5.0) + self.textInputNode.textContainerInset = UIEdgeInsets(top: self.inputInsets.top, left: 0.0, bottom: self.inputInsets.bottom, right: 0.0) + self.textInputNode.keyboardAppearance = theme.rootController.keyboardColor.keyboardAppearance + self.textInputNode.keyboardType = .URL + self.textInputNode.autocapitalizationType = .none + self.textInputNode.returnKeyType = .done + self.textInputNode.autocorrectionType = .no + self.textInputNode.tintColor = theme.actionSheet.controlAccentColor + + self.placeholderNode = ASTextNode() + self.placeholderNode.isUserInteractionEnabled = false + self.placeholderNode.displaysAsynchronously = false + self.placeholderNode.attributedText = NSAttributedString(string: placeholder, font: Font.regular(17.0), textColor: self.theme.actionSheet.inputPlaceholderColor) + + super.init() + + self.textInputNode.delegate = self + + self.addSubnode(self.backgroundNode) + self.addSubnode(self.textInputNode) + self.addSubnode(self.placeholderNode) + } + + func updateTheme(_ theme: PresentationTheme) { + self.theme = theme + + self.backgroundNode.image = generateStretchableFilledCircleImage(diameter: 12.0, color: self.theme.actionSheet.inputHollowBackgroundColor, strokeColor: self.theme.actionSheet.inputBorderColor, strokeWidth: 1.0) + self.textInputNode.keyboardAppearance = self.theme.rootController.keyboardColor.keyboardAppearance + self.placeholderNode.attributedText = NSAttributedString(string: self.placeholderNode.attributedText?.string ?? "", font: Font.regular(17.0), textColor: self.theme.actionSheet.inputPlaceholderColor) + self.textInputNode.tintColor = self.theme.actionSheet.controlAccentColor + } + + func updateLayout(width: CGFloat, transition: ContainedViewLayoutTransition) -> CGFloat { + let backgroundInsets = self.backgroundInsets + let inputInsets = self.inputInsets + + let textFieldHeight = self.calculateTextFieldMetrics(width: width) + let panelHeight = textFieldHeight + backgroundInsets.top + backgroundInsets.bottom + + let backgroundFrame = CGRect(origin: CGPoint(x: backgroundInsets.left, y: backgroundInsets.top), size: CGSize(width: width - backgroundInsets.left - backgroundInsets.right, height: panelHeight - backgroundInsets.top - backgroundInsets.bottom)) + transition.updateFrame(node: self.backgroundNode, frame: backgroundFrame) + + let placeholderSize = self.placeholderNode.measure(backgroundFrame.size) + transition.updateFrame(node: self.placeholderNode, frame: CGRect(origin: CGPoint(x: backgroundFrame.minX + inputInsets.left, y: backgroundFrame.minY + floor((backgroundFrame.size.height - placeholderSize.height) / 2.0)), size: placeholderSize)) + + transition.updateFrame(node: self.textInputNode, frame: CGRect(origin: CGPoint(x: backgroundFrame.minX + inputInsets.left, y: backgroundFrame.minY), size: CGSize(width: backgroundFrame.size.width - inputInsets.left - inputInsets.right, height: backgroundFrame.size.height))) + + return panelHeight + } + + func activateInput() { + self.textInputNode.becomeFirstResponder() + } + + func deactivateInput() { + self.textInputNode.resignFirstResponder() + } + + @objc func editableTextNodeDidUpdateText(_ editableTextNode: ASEditableTextNode) { + self.updateTextNodeText(animated: true) + self.textChanged?(editableTextNode.textView.text) + self.placeholderNode.isHidden = !(editableTextNode.textView.text ?? "").isEmpty + } + + private let domainRegex = try? NSRegularExpression(pattern: "^(https?://)?([a-zA-Z0-9-]+\\.?)*([a-zA-Z]*)?(:)?(/)?$", options: []) + private let pathRegex = try? NSRegularExpression(pattern: "^(https?://)?([a-zA-Z0-9-]+\\.)+[a-zA-Z]{2,6}/", options: []) + + func editableTextNode(_ editableTextNode: ASEditableTextNode, shouldChangeTextIn range: NSRange, replacementText text: String) -> Bool { + if text == "\n" { + self.complete?() + return false + } + + if let domainRegex = self.domainRegex, let pathRegex = self.pathRegex { + let updatedText = (editableTextNode.textView.text as NSString).replacingCharacters(in: range, with: text) + let domainMatches = domainRegex.matches(in: updatedText, options: [], range: NSRange(location: 0, length: updatedText.utf16.count)) + let pathMatches = pathRegex.matches(in: updatedText, options: [], range: NSRange(location: 0, length: updatedText.utf16.count)) + + if domainMatches.count > 0, pathMatches.count == 0 { + return true + } else { + return false + } + } + + return true + } + + private func calculateTextFieldMetrics(width: CGFloat) -> CGFloat { + let backgroundInsets = self.backgroundInsets + let inputInsets = self.inputInsets + + let unboundTextFieldHeight = max(33.0, ceil(self.textInputNode.measure(CGSize(width: width - backgroundInsets.left - backgroundInsets.right - inputInsets.left - inputInsets.right, height: CGFloat.greatestFiniteMagnitude)).height)) + + return min(61.0, max(33.0, unboundTextFieldHeight)) + } + + private func updateTextNodeText(animated: Bool) { + let backgroundInsets = self.backgroundInsets + + let textFieldHeight = self.calculateTextFieldMetrics(width: self.bounds.size.width) + + let panelHeight = textFieldHeight + backgroundInsets.top + backgroundInsets.bottom + if !self.bounds.size.height.isEqual(to: panelHeight) { + self.updateHeight?() + } + } + + @objc func clearPressed() { + self.textInputNode.attributedText = nil + self.deactivateInput() + } +} + +private final class WebBrowserDomainAlertContentNode: AlertContentNode { + private let strings: PresentationStrings + + private let titleNode: ASTextNode + private let textNode: ASTextNode + let inputFieldNode: WebBrowserDomainInputFieldNode + + private let actionNodesSeparator: ASDisplayNode + private let actionNodes: [TextAlertContentActionNode] + private let actionVerticalSeparators: [ASDisplayNode] + + private let disposable = MetaDisposable() + + private var validLayout: CGSize? + + private let hapticFeedback = HapticFeedback() + + var complete: (() -> Void)? { + didSet { + self.inputFieldNode.complete = self.complete + } + } + + override var dismissOnOutsideTap: Bool { + return self.isUserInteractionEnabled + } + + init(theme: AlertControllerTheme, ptheme: PresentationTheme, strings: PresentationStrings, actions: [TextAlertAction]) { + self.strings = strings + + self.titleNode = ASTextNode() + self.titleNode.maximumNumberOfLines = 2 + self.textNode = ASTextNode() + self.textNode.maximumNumberOfLines = 2 + + self.inputFieldNode = WebBrowserDomainInputFieldNode(theme: ptheme, placeholder: strings.WebBrowser_Exceptions_Create_Placeholder) + self.inputFieldNode.text = "" + + self.actionNodesSeparator = ASDisplayNode() + self.actionNodesSeparator.isLayerBacked = true + + self.actionNodes = actions.map { action -> TextAlertContentActionNode in + return TextAlertContentActionNode(theme: theme, action: action) + } + + var actionVerticalSeparators: [ASDisplayNode] = [] + if actions.count > 1 { + for _ in 0 ..< actions.count - 1 { + let separatorNode = ASDisplayNode() + separatorNode.isLayerBacked = true + actionVerticalSeparators.append(separatorNode) + } + } + self.actionVerticalSeparators = actionVerticalSeparators + + super.init() + + self.addSubnode(self.titleNode) + self.addSubnode(self.textNode) + + self.addSubnode(self.inputFieldNode) + + self.addSubnode(self.actionNodesSeparator) + + for actionNode in self.actionNodes { + self.addSubnode(actionNode) + } + self.actionNodes.last?.actionEnabled = false + + for separatorNode in self.actionVerticalSeparators { + self.addSubnode(separatorNode) + } + + self.inputFieldNode.updateHeight = { [weak self] in + if let strongSelf = self { + if let _ = strongSelf.validLayout { + strongSelf.requestLayout?(.animated(duration: 0.15, curve: .spring)) + } + } + } + + self.inputFieldNode.textChanged = { [weak self] text in + if let strongSelf = self, let lastNode = strongSelf.actionNodes.last { + lastNode.actionEnabled = !text.isEmpty + } + } + + self.updateTheme(theme) + } + + deinit { + self.disposable.dispose() + } + + var link: String { + return self.inputFieldNode.text + } + + override func updateTheme(_ theme: AlertControllerTheme) { + self.titleNode.attributedText = NSAttributedString(string: self.strings.WebBrowser_Exceptions_Create_Title, font: Font.bold(17.0), textColor: theme.primaryColor, paragraphAlignment: .center) + self.textNode.attributedText = NSAttributedString(string: self.strings.WebBrowser_Exceptions_Create_Text, font: Font.regular(13.0), textColor: theme.primaryColor, paragraphAlignment: .center) + + self.actionNodesSeparator.backgroundColor = theme.separatorColor + for actionNode in self.actionNodes { + actionNode.updateTheme(theme) + } + for separatorNode in self.actionVerticalSeparators { + separatorNode.backgroundColor = theme.separatorColor + } + + if let size = self.validLayout { + _ = self.updateLayout(size: size, transition: .immediate) + } + } + + override func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) -> CGSize { + var size = size + size.width = min(size.width, 270.0) + let measureSize = CGSize(width: size.width - 16.0 * 2.0, height: CGFloat.greatestFiniteMagnitude) + + let hadValidLayout = self.validLayout != nil + + self.validLayout = size + + var origin: CGPoint = CGPoint(x: 0.0, y: 20.0) + let spacing: CGFloat = 5.0 + + let titleSize = self.titleNode.measure(measureSize) + transition.updateFrame(node: self.titleNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - titleSize.width) / 2.0), y: origin.y), size: titleSize)) + origin.y += titleSize.height + 4.0 + + let textSize = self.textNode.measure(measureSize) + transition.updateFrame(node: self.textNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - textSize.width) / 2.0), y: origin.y), size: textSize)) + origin.y += textSize.height + 6.0 + spacing + + let actionButtonHeight: CGFloat = 44.0 + var minActionsWidth: CGFloat = 0.0 + let maxActionWidth: CGFloat = floor(size.width / CGFloat(self.actionNodes.count)) + let actionTitleInsets: CGFloat = 8.0 + + var effectiveActionLayout = TextAlertContentActionLayout.horizontal + for actionNode in self.actionNodes { + let actionTitleSize = actionNode.titleNode.updateLayout(CGSize(width: maxActionWidth, height: actionButtonHeight)) + if case .horizontal = effectiveActionLayout, actionTitleSize.height > actionButtonHeight * 0.6667 { + effectiveActionLayout = .vertical + } + switch effectiveActionLayout { + case .horizontal: + minActionsWidth += actionTitleSize.width + actionTitleInsets + case .vertical: + minActionsWidth = max(minActionsWidth, actionTitleSize.width + actionTitleInsets) + } + } + + let insets = UIEdgeInsets(top: 18.0, left: 18.0, bottom: 9.0, right: 18.0) + + var contentWidth = max(titleSize.width, minActionsWidth) + contentWidth = max(contentWidth, 234.0) + + var actionsHeight: CGFloat = 0.0 + switch effectiveActionLayout { + case .horizontal: + actionsHeight = actionButtonHeight + case .vertical: + actionsHeight = actionButtonHeight * CGFloat(self.actionNodes.count) + } + + let resultWidth = contentWidth + insets.left + insets.right + + let inputFieldWidth = resultWidth + let inputFieldHeight = self.inputFieldNode.updateLayout(width: inputFieldWidth, transition: transition) + let inputHeight = inputFieldHeight + transition.updateFrame(node: self.inputFieldNode, frame: CGRect(x: 0.0, y: origin.y, width: resultWidth, height: inputFieldHeight)) + transition.updateAlpha(node: self.inputFieldNode, alpha: inputHeight > 0.0 ? 1.0 : 0.0) + + let resultSize = CGSize(width: resultWidth, height: titleSize.height + textSize.height + spacing + inputHeight + actionsHeight + insets.top + insets.bottom) + + transition.updateFrame(node: self.actionNodesSeparator, frame: CGRect(origin: CGPoint(x: 0.0, y: resultSize.height - actionsHeight - UIScreenPixel), size: CGSize(width: resultSize.width, height: UIScreenPixel))) + + var actionOffset: CGFloat = 0.0 + let actionWidth: CGFloat = floor(resultSize.width / CGFloat(self.actionNodes.count)) + var separatorIndex = -1 + var nodeIndex = 0 + for actionNode in self.actionNodes { + if separatorIndex >= 0 { + let separatorNode = self.actionVerticalSeparators[separatorIndex] + switch effectiveActionLayout { + case .horizontal: + transition.updateFrame(node: separatorNode, frame: CGRect(origin: CGPoint(x: actionOffset - UIScreenPixel, y: resultSize.height - actionsHeight), size: CGSize(width: UIScreenPixel, height: actionsHeight - UIScreenPixel))) + case .vertical: + transition.updateFrame(node: separatorNode, frame: CGRect(origin: CGPoint(x: 0.0, y: resultSize.height - actionsHeight + actionOffset - UIScreenPixel), size: CGSize(width: resultSize.width, height: UIScreenPixel))) + } + } + separatorIndex += 1 + + let currentActionWidth: CGFloat + switch effectiveActionLayout { + case .horizontal: + if nodeIndex == self.actionNodes.count - 1 { + currentActionWidth = resultSize.width - actionOffset + } else { + currentActionWidth = actionWidth + } + case .vertical: + currentActionWidth = resultSize.width + } + + let actionNodeFrame: CGRect + switch effectiveActionLayout { + case .horizontal: + actionNodeFrame = CGRect(origin: CGPoint(x: actionOffset, y: resultSize.height - actionsHeight), size: CGSize(width: currentActionWidth, height: actionButtonHeight)) + actionOffset += currentActionWidth + case .vertical: + actionNodeFrame = CGRect(origin: CGPoint(x: 0.0, y: resultSize.height - actionsHeight + actionOffset), size: CGSize(width: currentActionWidth, height: actionButtonHeight)) + actionOffset += actionButtonHeight + } + + transition.updateFrame(node: actionNode, frame: actionNodeFrame) + + nodeIndex += 1 + } + + if !hadValidLayout { + self.inputFieldNode.activateInput() + } + + return resultSize + } + + func animateError() { + self.inputFieldNode.layer.addShakeAnimation() + self.hapticFeedback.error() + } +} + +public func webBrowserDomainController(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)? = nil, apply: @escaping (String?) -> Void) -> AlertController { + let presentationData = updatedPresentationData?.initial ?? context.sharedContext.currentPresentationData.with { $0 } + + var dismissImpl: ((Bool) -> Void)? + var applyImpl: (() -> Void)? + + let actions: [TextAlertAction] = [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_Cancel, action: { + dismissImpl?(true) + apply(nil) + }), TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Done, action: { + applyImpl?() + })] + + let contentNode = WebBrowserDomainAlertContentNode(theme: AlertControllerTheme(presentationData: presentationData), ptheme: presentationData.theme, strings: presentationData.strings, actions: actions) + contentNode.complete = { + applyImpl?() + } + applyImpl = { [weak contentNode] in + guard let contentNode = contentNode else { + return + } + let updatedLink = explicitUrl(contentNode.link) + if !updatedLink.isEmpty && isValidUrl(updatedLink, validSchemes: ["http": true, "https": true]) { + dismissImpl?(true) + apply(updatedLink) + } else { + contentNode.animateError() + } + } + + let controller = AlertController(theme: AlertControllerTheme(presentationData: presentationData), contentNode: contentNode) + let presentationDataDisposable = (updatedPresentationData?.signal ?? context.sharedContext.presentationData).start(next: { [weak controller, weak contentNode] presentationData in + controller?.theme = AlertControllerTheme(presentationData: presentationData) + contentNode?.inputFieldNode.updateTheme(presentationData.theme) + }) + controller.dismissed = { _ in + presentationDataDisposable.dispose() + } + dismissImpl = { [weak controller] animated in + contentNode.inputFieldNode.deactivateInput() + if animated { + controller?.dismissAnimated() + } else { + controller?.dismiss() + } + } + return controller +} diff --git a/submodules/SettingsUI/Sources/Data and Storage/WebBrowserDomainExceptionItem.swift b/submodules/SettingsUI/Sources/Data and Storage/WebBrowserDomainExceptionItem.swift new file mode 100644 index 0000000000..572e65ed9f --- /dev/null +++ b/submodules/SettingsUI/Sources/Data and Storage/WebBrowserDomainExceptionItem.swift @@ -0,0 +1,280 @@ +import Foundation +import UIKit +import Display +import AsyncDisplayKit +import SwiftSignalKit +import TelegramPresentationData +import TelegramCore +import AccountContext +import ItemListUI + +public class WebBrowserDomainExceptionItem: ListViewItem, ItemListItem { + let presentationData: ItemListPresentationData + let context: AccountContext? + let title: String + let label: String + public let sectionId: ItemListSectionId + let style: ItemListStyle + + public init( + presentationData: ItemListPresentationData, + context: AccountContext? = nil, + title: String, + label: String, + sectionId: ItemListSectionId, + style: ItemListStyle + ) { + self.presentationData = presentationData + self.context = context + self.title = title + self.label = label + self.sectionId = sectionId + self.style = style + } + + public func nodeConfiguredForParams(async: @escaping (@escaping () -> Void) -> Void, params: ListViewItemLayoutParams, synchronousLoads: Bool, previousItem: ListViewItem?, nextItem: ListViewItem?, completion: @escaping (ListViewItemNode, @escaping () -> (Signal?, (ListViewItemApply) -> Void)) -> Void) { + async { + let node = WebBrowserDomainExceptionItemNode() + let (layout, apply) = node.asyncLayout()(self, params, itemListNeighbors(item: self, topItem: previousItem as? ItemListItem, bottomItem: nextItem as? ItemListItem)) + + node.contentSize = layout.contentSize + node.insets = layout.insets + + Queue.mainQueue().async { + completion(node, { + return (nil, { _ in apply() }) + }) + } + } + } + + public func updateNode(async: @escaping (@escaping () -> Void) -> Void, node: @escaping () -> ListViewItemNode, params: ListViewItemLayoutParams, previousItem: ListViewItem?, nextItem: ListViewItem?, animation: ListViewItemUpdateAnimation, completion: @escaping (ListViewItemNodeLayout, @escaping (ListViewItemApply) -> Void) -> Void) { + Queue.mainQueue().async { + if let nodeValue = node() as? WebBrowserDomainExceptionItemNode { + let makeLayout = nodeValue.asyncLayout() + + async { + let (layout, apply) = makeLayout(self, params, itemListNeighbors(item: self, topItem: previousItem as? ItemListItem, bottomItem: nextItem as? ItemListItem)) + Queue.mainQueue().async { + completion(layout, { _ in + apply() + }) + } + } + } + } + } + + public var selectable: Bool = false + + public func selected(listView: ListView){ + } +} + +public class WebBrowserDomainExceptionItemNode: ListViewItemNode, ItemListItemNode { + private let backgroundNode: ASDisplayNode + private let topStripeNode: ASDisplayNode + private let bottomStripeNode: ASDisplayNode + private let maskNode: ASImageNode + + let iconNode: ASImageNode + let titleNode: TextNode + let labelNode: TextNode + + private let activateArea: AccessibilityAreaNode + + private var item: WebBrowserDomainExceptionItem? + + override public var canBeSelected: Bool { + return false + } + + public var tag: ItemListItemTag? = nil + + public init() { + self.backgroundNode = ASDisplayNode() + self.backgroundNode.isLayerBacked = true + self.backgroundNode.backgroundColor = .white + + self.maskNode = ASImageNode() + self.maskNode.isUserInteractionEnabled = false + + self.topStripeNode = ASDisplayNode() + self.topStripeNode.isLayerBacked = true + + self.bottomStripeNode = ASDisplayNode() + self.bottomStripeNode.isLayerBacked = true + + self.iconNode = ASImageNode() + self.iconNode.isLayerBacked = true + self.iconNode.displaysAsynchronously = false + + self.titleNode = TextNode() + self.titleNode.isUserInteractionEnabled = false + + self.labelNode = TextNode() + self.labelNode.isUserInteractionEnabled = false + + self.activateArea = AccessibilityAreaNode() + + super.init(layerBacked: false, dynamicBounce: false) + + self.addSubnode(self.titleNode) + self.addSubnode(self.labelNode) + + self.addSubnode(self.activateArea) + } + + public func asyncLayout() -> (_ item: WebBrowserDomainExceptionItem, _ params: ListViewItemLayoutParams, _ insets: ItemListNeighbors) -> (ListViewItemNodeLayout, () -> Void) { + let makeTitleLayout = TextNode.asyncLayout(self.titleNode) + let makeLabelLayout = TextNode.asyncLayout(self.labelNode) + + let currentItem = self.item + + return { item, params, neighbors in + var updatedTheme: PresentationTheme? + if currentItem?.presentationData.theme !== item.presentationData.theme { + updatedTheme = item.presentationData.theme + } + + let contentSize: CGSize + let insets: UIEdgeInsets + let separatorHeight = UIScreenPixel + let itemBackgroundColor: UIColor + let itemSeparatorColor: UIColor + + let leftInset = 16.0 + params.leftInset + 43.0 + + let titleColor: UIColor = item.presentationData.theme.list.itemPrimaryTextColor + let labelColor: UIColor = item.presentationData.theme.list.itemAccentColor + + let titleFont = Font.medium(item.presentationData.fontSize.itemListBaseFontSize) + let labelFont = Font.regular(floor(item.presentationData.fontSize.itemListBaseFontSize * 15.0 / 17.0)) + + let maxTitleWidth: CGFloat = params.width - params.rightInset - 20.0 - leftInset + + let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.title, font: titleFont, textColor: titleColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: maxTitleWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + + let (labelLayout, labelApply) = makeLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.label, font: labelFont, textColor: labelColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: maxTitleWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + + let verticalInset: CGFloat = 11.0 + let titleSpacing: CGFloat = 1.0 + + let height: CGFloat = verticalInset * 2.0 + titleLayout.size.height + titleSpacing + labelLayout.size.height + + switch item.style { + case .plain: + itemBackgroundColor = item.presentationData.theme.list.plainBackgroundColor + itemSeparatorColor = item.presentationData.theme.list.itemPlainSeparatorColor + contentSize = CGSize(width: params.width, height: height) + insets = itemListNeighborsPlainInsets(neighbors) + case .blocks: + itemBackgroundColor = item.presentationData.theme.list.itemBlocksBackgroundColor + itemSeparatorColor = item.presentationData.theme.list.itemBlocksSeparatorColor + contentSize = CGSize(width: params.width, height: height) + insets = itemListNeighborsGroupedInsets(neighbors, params) + } + + let layout = ListViewItemNodeLayout(contentSize: contentSize, insets: insets) + + return (ListViewItemNodeLayout(contentSize: contentSize, insets: insets), { [weak self] in + if let strongSelf = self { + strongSelf.item = item + + strongSelf.activateArea.frame = CGRect(origin: CGPoint(x: params.leftInset, y: 0.0), size: CGSize(width: params.width - params.leftInset - params.rightInset, height: layout.contentSize.height)) + strongSelf.activateArea.accessibilityLabel = item.title + strongSelf.activateArea.accessibilityValue = item.label + + if let _ = updatedTheme { + strongSelf.topStripeNode.backgroundColor = itemSeparatorColor + strongSelf.bottomStripeNode.backgroundColor = itemSeparatorColor + strongSelf.backgroundNode.backgroundColor = itemBackgroundColor + } + + let _ = titleApply() + let _ = labelApply() + + switch item.style { + case .plain: + if strongSelf.backgroundNode.supernode != nil { + strongSelf.backgroundNode.removeFromSupernode() + } + if strongSelf.topStripeNode.supernode != nil { + strongSelf.topStripeNode.removeFromSupernode() + } + if strongSelf.bottomStripeNode.supernode == nil { + strongSelf.insertSubnode(strongSelf.bottomStripeNode, at: 0) + } + if strongSelf.maskNode.supernode != nil { + strongSelf.maskNode.removeFromSupernode() + } + strongSelf.bottomStripeNode.frame = CGRect(origin: CGPoint(x: leftInset, y: contentSize.height - separatorHeight), size: CGSize(width: params.width - leftInset, height: separatorHeight)) + case .blocks: + if strongSelf.backgroundNode.supernode == nil { + strongSelf.insertSubnode(strongSelf.backgroundNode, at: 0) + } + if strongSelf.topStripeNode.supernode == nil { + strongSelf.insertSubnode(strongSelf.topStripeNode, at: 1) + } + if strongSelf.bottomStripeNode.supernode == nil { + strongSelf.insertSubnode(strongSelf.bottomStripeNode, at: 2) + } + if strongSelf.maskNode.supernode == nil { + strongSelf.insertSubnode(strongSelf.maskNode, at: 3) + } + + let hasCorners = itemListHasRoundedBlockLayout(params) + var hasTopCorners = false + var hasBottomCorners = false + switch neighbors.top { + case .sameSection(false): + strongSelf.topStripeNode.isHidden = true + default: + hasTopCorners = true + strongSelf.topStripeNode.isHidden = hasCorners + } + let bottomStripeInset: CGFloat + switch neighbors.bottom { + case .sameSection(false): + bottomStripeInset = leftInset + strongSelf.bottomStripeNode.isHidden = false + default: + bottomStripeInset = 0.0 + hasBottomCorners = true + strongSelf.bottomStripeNode.isHidden = hasCorners + } + + strongSelf.maskNode.image = hasCorners ? PresentationResourcesItemList.cornersImage(item.presentationData.theme, top: hasTopCorners, bottom: hasBottomCorners) : nil + + strongSelf.backgroundNode.frame = CGRect(origin: CGPoint(x: 0.0, y: -min(insets.top, separatorHeight)), size: CGSize(width: params.width, height: contentSize.height + min(insets.top, separatorHeight) + min(insets.bottom, separatorHeight))) + strongSelf.maskNode.frame = strongSelf.backgroundNode.frame.insetBy(dx: params.leftInset, dy: 0.0) + strongSelf.topStripeNode.frame = CGRect(origin: CGPoint(x: 0.0, y: -min(insets.top, separatorHeight)), size: CGSize(width: params.width, height: separatorHeight)) + strongSelf.bottomStripeNode.frame = CGRect(origin: CGPoint(x: bottomStripeInset, y: contentSize.height - separatorHeight), size: CGSize(width: params.width - bottomStripeInset, height: separatorHeight)) + } + + var centralContentHeight: CGFloat = titleLayout.size.height + centralContentHeight += titleSpacing + centralContentHeight += labelLayout.size.height + + let titleFrame = CGRect(origin: CGPoint(x: leftInset, y: floor((height - centralContentHeight) / 2.0)), size: titleLayout.size) + strongSelf.titleNode.frame = titleFrame + + let labelFrame = CGRect(origin: CGPoint(x: leftInset, y: titleFrame.maxY + titleSpacing), size: labelLayout.size) + strongSelf.labelNode.frame = labelFrame + } + }) + } + } + + override public func animateInsertion(_ currentTimestamp: Double, duration: Double, options: ListViewItemAnimationOptions) { + self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4) + } + + override public func animateAdded(_ currentTimestamp: Double, duration: Double) { + self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + } + + override public func animateRemoved(_ currentTimestamp: Double, duration: Double) { + self.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false) + } +} diff --git a/submodules/SettingsUI/Sources/Data and Storage/WebBrowserSettingsController.swift b/submodules/SettingsUI/Sources/Data and Storage/WebBrowserSettingsController.swift index e5555124ab..eda596b35c 100644 --- a/submodules/SettingsUI/Sources/Data and Storage/WebBrowserSettingsController.swift +++ b/submodules/SettingsUI/Sources/Data and Storage/WebBrowserSettingsController.swift @@ -9,29 +9,60 @@ import TelegramUIPreferences import ItemListUI import AccountContext import OpenInExternalAppUI +import ItemListPeerActionItem +import UndoUI +import WebKit +import LinkPresentation private final class WebBrowserSettingsControllerArguments { let context: AccountContext let updateDefaultBrowser: (String?) -> Void + let clearCookies: () -> Void + let addException: () -> Void + let clearExceptions: () -> Void - init(context: AccountContext, updateDefaultBrowser: @escaping (String?) -> Void) { + init( + context: AccountContext, + updateDefaultBrowser: @escaping (String?) -> Void, + clearCookies: @escaping () -> Void, + addException: @escaping () -> Void, + clearExceptions: @escaping () -> Void + ) { self.context = context self.updateDefaultBrowser = updateDefaultBrowser + self.clearCookies = clearCookies + self.addException = addException + self.clearExceptions = clearExceptions } } private enum WebBrowserSettingsSection: Int32 { case browsers + case clearCookies + case exceptions } private enum WebBrowserSettingsControllerEntry: ItemListNodeEntry { case browserHeader(PresentationTheme, String) case browser(PresentationTheme, String, OpenInApplication?, String?, Bool, Int32) + case clearCookies(PresentationTheme, String) + case clearCookiesInfo(PresentationTheme, String) + + case exceptionsHeader(PresentationTheme, String) + case exceptionsAdd(PresentationTheme, String) + case exception(Int32, PresentationTheme, WebBrowserException) + case exceptionsClear(PresentationTheme, String) + case exceptionsInfo(PresentationTheme, String) + var section: ItemListSectionId { switch self { case .browserHeader, .browser: return WebBrowserSettingsSection.browsers.rawValue + case .clearCookies, .clearCookiesInfo: + return WebBrowserSettingsSection.clearCookies.rawValue + case .exceptionsHeader, .exceptionsAdd, .exception, .exceptionsClear, .exceptionsInfo: + return WebBrowserSettingsSection.exceptions.rawValue } } @@ -41,6 +72,20 @@ private enum WebBrowserSettingsControllerEntry: ItemListNodeEntry { return 0 case let .browser(_, _, _, _, _, index): return 1 + index + case .clearCookies: + return 102 + case .clearCookiesInfo: + return 103 + case .exceptionsHeader: + return 104 + case .exceptionsAdd: + return 105 + case let .exception(index, _, _): + return 106 + index + case .exceptionsClear: + return 1000 + case .exceptionsInfo: + return 1001 } } @@ -58,6 +103,48 @@ private enum WebBrowserSettingsControllerEntry: ItemListNodeEntry { } else { return false } + case let .clearCookies(lhsTheme, lhsText): + if case let .clearCookies(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText { + return true + } else { + return false + } + case let .clearCookiesInfo(lhsTheme, lhsText): + if case let .clearCookiesInfo(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText { + return true + } else { + return false + } + case let .exceptionsHeader(lhsTheme, lhsText): + if case let .exceptionsHeader(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText { + return true + } else { + return false + } + case let .exception(lhsIndex, lhsTheme, lhsException): + if case let .exception(rhsIndex, rhsTheme, rhsException) = rhs, lhsIndex == rhsIndex, lhsTheme === rhsTheme, lhsException == rhsException { + return true + } else { + return false + } + case let .exceptionsAdd(lhsTheme, lhsText): + if case let .exceptionsAdd(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText { + return true + } else { + return false + } + case let .exceptionsClear(lhsTheme, lhsText): + if case let .exceptionsClear(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText { + return true + } else { + return false + } + case let .exceptionsInfo(lhsTheme, lhsText): + if case let .exceptionsInfo(rhsTheme, rhsText) = rhs, lhsTheme === rhsTheme, lhsText == rhsText { + return true + } else { + return false + } } } @@ -74,44 +161,194 @@ private enum WebBrowserSettingsControllerEntry: ItemListNodeEntry { return WebBrowserItem(context: arguments.context, presentationData: presentationData, title: title, application: application, checked: selected, sectionId: self.section) { arguments.updateDefaultBrowser(identifier) } + case let .clearCookies(_, text): + return ItemListPeerActionItem(presentationData: presentationData, icon: PresentationResourcesItemList.accentDeleteIconImage(presentationData.theme), title: text, sectionId: self.section, height: .generic, color: .accent, editing: false, action: { + arguments.clearCookies() + }) + case let .clearCookiesInfo(_, text): + return ItemListTextItem(presentationData: presentationData, text: .plain(text), sectionId: self.section) + case let .exceptionsHeader(_, text): + return ItemListSectionHeaderItem(presentationData: presentationData, text: text, sectionId: self.section) + case let .exception(_, _, exception): + return WebBrowserDomainExceptionItem(presentationData: presentationData, context: arguments.context, title: exception.title, label: exception.domain, sectionId: self.section, style: .blocks) + case let .exceptionsAdd(_, text): + return ItemListPeerActionItem(presentationData: presentationData, icon: PresentationResourcesItemList.plusIconImage(presentationData.theme), title: text, sectionId: self.section, height: .generic, color: .accent, editing: false, action: { + arguments.addException() + }) + case let .exceptionsClear(_, text): + return ItemListPeerActionItem(presentationData: presentationData, icon: PresentationResourcesItemList.deleteIconImage(presentationData.theme), title: text, sectionId: self.section, height: .generic, color: .destructive, editing: false, action: { + arguments.clearExceptions() + }) + case let .exceptionsInfo(_, text): + return ItemListTextItem(presentationData: presentationData, text: .plain(text), sectionId: self.section) } } } -private func webBrowserSettingsControllerEntries(context: AccountContext, presentationData: PresentationData, selectedBrowser: String?) -> [WebBrowserSettingsControllerEntry] { +private func webBrowserSettingsControllerEntries(context: AccountContext, presentationData: PresentationData, settings: WebBrowserSettings) -> [WebBrowserSettingsControllerEntry] { var entries: [WebBrowserSettingsControllerEntry] = [] let options = availableOpenInOptions(context: context, item: .url(url: "http://telegram.org")) - entries.append(.browserHeader(presentationData.theme, presentationData.strings.WebBrowser_DefaultBrowser)) - entries.append(.browser(presentationData.theme, presentationData.strings.WebBrowser_Telegram, nil, nil, selectedBrowser == nil, 0)) - entries.append(.browser(presentationData.theme, presentationData.strings.WebBrowser_InAppSafari, .safari, "inApp", selectedBrowser == "inApp", 1)) + entries.append(.browserHeader(presentationData.theme, presentationData.strings.WebBrowser_OpenLinksIn_Title)) + entries.append(.browser(presentationData.theme, presentationData.strings.WebBrowser_Telegram, nil, nil, settings.defaultWebBrowser == nil, 0)) - var index: Int32 = 2 + var index: Int32 = 1 for option in options { - entries.append(.browser(presentationData.theme, option.title, option.application, option.identifier, option.identifier == selectedBrowser, index)) + entries.append(.browser(presentationData.theme, option.title, option.application, option.identifier, option.identifier == settings.defaultWebBrowser, index)) index += 1 } + if settings.defaultWebBrowser == nil { + entries.append(.clearCookies(presentationData.theme, presentationData.strings.WebBrowser_ClearCookies)) + entries.append(.clearCookiesInfo(presentationData.theme, presentationData.strings.WebBrowser_ClearCookies_Info)) + + entries.append(.exceptionsHeader(presentationData.theme, presentationData.strings.WebBrowser_Exceptions_Title)) + entries.append(.exceptionsAdd(presentationData.theme, presentationData.strings.WebBrowser_Exceptions_AddException)) + + var exceptionIndex: Int32 = 0 + for exception in settings.exceptions { + entries.append(.exception(exceptionIndex, presentationData.theme, exception)) + exceptionIndex += 1 + } + + if !settings.exceptions.isEmpty { + entries.append(.exceptionsClear(presentationData.theme, presentationData.strings.WebBrowser_Exceptions_Clear)) + } + + entries.append(.exceptionsInfo(presentationData.theme, presentationData.strings.WebBrowser_Exceptions_Info)) + } + return entries } public func webBrowserSettingsController(context: AccountContext) -> ViewController { - let arguments = WebBrowserSettingsControllerArguments(context: context, updateDefaultBrowser: { identifier in - let _ = updateWebBrowserSettingsInteractively(accountManager: context.sharedContext.accountManager, { $0.withUpdatedDefaultWebBrowser(identifier) }).start() - }) + var clearCookiesImpl: (() -> Void)? + var addExceptionImpl: (() -> Void)? + var clearExceptionsImpl: (() -> Void)? - let signal = combineLatest(context.sharedContext.presentationData, context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.webBrowserSettings])) + let arguments = WebBrowserSettingsControllerArguments( + context: context, + updateDefaultBrowser: { identifier in + let _ = updateWebBrowserSettingsInteractively(accountManager: context.sharedContext.accountManager, { + $0.withUpdatedDefaultWebBrowser(identifier) + }).start() + }, + clearCookies: { + clearCookiesImpl?() + }, + addException: { + addExceptionImpl?() + }, + clearExceptions: { + clearExceptionsImpl?() + } + ) + + let previousSettings = Atomic(value: nil) + + let signal = combineLatest( + context.sharedContext.presentationData, + context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.webBrowserSettings]) + ) |> deliverOnMainQueue |> map { presentationData, sharedData -> (ItemListControllerState, (ItemListNodeState, Any)) in let settings = sharedData.entries[ApplicationSpecificSharedDataKeys.webBrowserSettings]?.get(WebBrowserSettings.self) ?? WebBrowserSettings.defaultSettings + let previousSettings = previousSettings.swap(settings) + + var animateChanges = false + if let previousSettings { + if previousSettings.defaultWebBrowser != settings.defaultWebBrowser { + animateChanges = true + } + } let controllerState = ItemListControllerState(presentationData: ItemListPresentationData(presentationData), title: .text(presentationData.strings.WebBrowser_Title), leftNavigationButton: nil, rightNavigationButton: nil, backNavigationButton: ItemListBackButton(title: presentationData.strings.Common_Back)) - let listState = ItemListNodeState(presentationData: ItemListPresentationData(presentationData), entries: webBrowserSettingsControllerEntries(context: context, presentationData: presentationData, selectedBrowser: settings.defaultWebBrowser), style: .blocks, animateChanges: false) + let listState = ItemListNodeState(presentationData: ItemListPresentationData(presentationData), entries: webBrowserSettingsControllerEntries(context: context, presentationData: presentationData, settings: settings), style: .blocks, animateChanges: animateChanges) return (controllerState, (listState, arguments)) } let controller = ItemListController(context: context, state: signal) + + clearCookiesImpl = { [weak controller] in + WKWebsiteDataStore.default().removeData(ofTypes: WKWebsiteDataStore.allWebsiteDataTypes(), modifiedSince: Date(timeIntervalSince1970: 0), completionHandler:{}) + + let presentationData = context.sharedContext.currentPresentationData.with { $0 } + controller?.present(UndoOverlayController( + presentationData: presentationData, + content: .info( + title: nil, + text: presentationData.strings.WebBrowser_ClearCookies_Succeed, + timeout: nil, + customUndoText: nil + ), + elevatedLayout: false, + position: .bottom, + action: { _ in return false }), in: .current + ) + } + + addExceptionImpl = { [weak controller] in + let linkController = webBrowserDomainController(context: context, apply: { url in + if let url { + let _ = fetchDomainExceptionInfo(url: url).startStandalone(next: { newException in + let _ = updateWebBrowserSettingsInteractively(accountManager: context.sharedContext.accountManager, { currentSettings in + var currentExceptions = currentSettings.exceptions + for exception in currentExceptions { + if exception.domain == newException.domain { + return currentSettings + } + } + currentExceptions.append(newException) + return currentSettings.withUpdatedExceptions(currentExceptions) + }).start() + }) + } + }) + controller?.present(linkController, in: .window(.root)) + } + + clearExceptionsImpl = { + let _ = updateWebBrowserSettingsInteractively(accountManager: context.sharedContext.accountManager, { currentSettings in + return currentSettings.withUpdatedExceptions([]) + }).start() + } + return controller } + +private func cleanDomain(url: String) -> (domain: String, fullUrl: String) { + if let parsedUrl = URL(string: url) { + let host: String? + let scheme = parsedUrl.scheme ?? "https" + if #available(iOS 16.0, *) { + host = parsedUrl.host(percentEncoded: true)?.lowercased() + } else { + host = parsedUrl.host?.lowercased() + } + return (host ?? url, "\(scheme)://\(host ?? "")") + } else { + return (url, url) + } +} + +private func fetchDomainExceptionInfo(url: String) -> Signal { + let (domain, domainUrl) = cleanDomain(url: url) + if #available(iOS 13.0, *), let url = URL(string: domainUrl) { + return Signal { subscriber in + let metadataProvider = LPMetadataProvider() + metadataProvider.shouldFetchSubresources = true + metadataProvider.startFetchingMetadata(for: url, completionHandler: { metadata, _ in + let title = metadata?.value(forKey: "_siteName") as? String ?? metadata?.title + subscriber.putNext(WebBrowserException(domain: domain, title: title ?? domain)) + subscriber.putCompletion() + }) + return ActionDisposable { + metadataProvider.cancel() + } + } + } else { + return .single(WebBrowserException(domain: domain, title: domain)) + } +} diff --git a/submodules/ShareController/Sources/ShareLoadingContainerNode.swift b/submodules/ShareController/Sources/ShareLoadingContainerNode.swift index 1d6c98dede..292cc01841 100644 --- a/submodules/ShareController/Sources/ShareLoadingContainerNode.swift +++ b/submodules/ShareController/Sources/ShareLoadingContainerNode.swift @@ -279,7 +279,7 @@ public final class ShareProlongedLoadingContainerNode: ASDisplayNode, ShareConte if let postbox, let mediaManager = environment.mediaManager, let path = getAppBundle().path(forResource: "BlankVideo", ofType: "m4v"), let size = fileSize(path) { let decoration = ChatBubbleVideoDecoration(corners: ImageCorners(), nativeSize: CGSize(width: 100.0, height: 100.0), contentMode: .aspectFit, backgroundColor: .black) - let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil)]) + let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil, coverTime: nil)]) let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil) diff --git a/submodules/ShareItems/Sources/ShareItems.swift b/submodules/ShareItems/Sources/ShareItems.swift index 3ff8e54ca6..71ae6be423 100644 --- a/submodules/ShareItems/Sources/ShareItems.swift +++ b/submodules/ShareItems/Sources/ShareItems.swift @@ -144,7 +144,7 @@ private func preparedShareItem(postbox: Postbox, network: Network, to peerId: Pe let estimatedSize = TGMediaVideoConverter.estimatedSize(for: preset, duration: finalDuration, hasAudio: true) let resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), path: asset.url.path, adjustments: resourceAdjustments) - return standaloneUploadedFile(postbox: postbox, network: network, peerId: peerId, text: "", source: .resource(.standalone(resource: resource)), mimeType: "video/mp4", attributes: [.Video(duration: finalDuration, size: PixelDimensions(width: Int32(finalDimensions.width), height: Int32(finalDimensions.height)), flags: flags, preloadSize: nil)], hintFileIsLarge: estimatedSize > 10 * 1024 * 1024) + return standaloneUploadedFile(postbox: postbox, network: network, peerId: peerId, text: "", source: .resource(.standalone(resource: resource)), mimeType: "video/mp4", attributes: [.Video(duration: finalDuration, size: PixelDimensions(width: Int32(finalDimensions.width), height: Int32(finalDimensions.height)), flags: flags, preloadSize: nil, coverTime: nil)], hintFileIsLarge: estimatedSize > 10 * 1024 * 1024) |> mapError { _ -> PreparedShareItemError in return .generic } @@ -210,7 +210,7 @@ private func preparedShareItem(postbox: Postbox, network: Network, to peerId: Pe let mimeType: String if converted { mimeType = "video/mp4" - attributes = [.Video(duration: duration, size: PixelDimensions(width: Int32(dimensions.width), height: Int32(dimensions.height)), flags: [.supportsStreaming], preloadSize: nil), .Animated, .FileName(fileName: "animation.mp4")] + attributes = [.Video(duration: duration, size: PixelDimensions(width: Int32(dimensions.width), height: Int32(dimensions.height)), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil), .Animated, .FileName(fileName: "animation.mp4")] } else { mimeType = "animation/gif" attributes = [.ImageSize(size: PixelDimensions(width: Int32(dimensions.width), height: Int32(dimensions.height))), .Animated, .FileName(fileName: fileName ?? "animation.gif")] diff --git a/submodules/StatisticsUI/Sources/ChannelStatsController.swift b/submodules/StatisticsUI/Sources/ChannelStatsController.swift index 2b6a0aba20..a294b33a05 100644 --- a/submodules/StatisticsUI/Sources/ChannelStatsController.swift +++ b/submodules/StatisticsUI/Sources/ChannelStatsController.swift @@ -1677,7 +1677,7 @@ private func monetizationEntries( } } - if isCreator { + if isCreator && canViewRevenue { var switchOffAdds: Bool? = nil if let boostData, boostData.level >= premiumConfiguration.minChannelRestrictAdsLevel { switchOffAdds = adsRestricted diff --git a/submodules/TelegramCore/Sources/ApiUtils/StoreMessage_Telegram.swift b/submodules/TelegramCore/Sources/ApiUtils/StoreMessage_Telegram.swift index c40ed11874..241b8f352b 100644 --- a/submodules/TelegramCore/Sources/ApiUtils/StoreMessage_Telegram.swift +++ b/submodules/TelegramCore/Sources/ApiUtils/StoreMessage_Telegram.swift @@ -50,7 +50,7 @@ public func tagsForStoreMessage(incoming: Bool, attributes: [MessageAttribute], var isAnimated = false inner: for attribute in file.attributes { switch attribute { - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { refinedTag = .voiceOrInstantVideo } else { diff --git a/submodules/TelegramCore/Sources/ApiUtils/TelegramMediaFile.swift b/submodules/TelegramCore/Sources/ApiUtils/TelegramMediaFile.swift index 2d3c10d386..611db7cedc 100644 --- a/submodules/TelegramCore/Sources/ApiUtils/TelegramMediaFile.swift +++ b/submodules/TelegramCore/Sources/ApiUtils/TelegramMediaFile.swift @@ -6,7 +6,7 @@ import TelegramApi func dimensionsForFileAttributes(_ attributes: [TelegramMediaFileAttribute]) -> PixelDimensions? { for attribute in attributes { switch attribute { - case let .Video(_, size, _, _): + case let .Video(_, size, _, _, _): return size case let .ImageSize(size): return size @@ -20,7 +20,7 @@ func dimensionsForFileAttributes(_ attributes: [TelegramMediaFileAttribute]) -> func durationForFileAttributes(_ attributes: [TelegramMediaFileAttribute]) -> Double? { for attribute in attributes { switch attribute { - case let .Video(duration, _, _, _): + case let .Video(duration, _, _, _, _): return duration case let .Audio(_, duration, _, _, _): return Double(duration) @@ -99,7 +99,7 @@ func telegramMediaFileAttributesFromApiAttributes(_ attributes: [Api.DocumentAtt result.append(.ImageSize(size: PixelDimensions(width: w, height: h))) case .documentAttributeAnimated: result.append(.Animated) - case let .documentAttributeVideo(flags, duration, w, h, preloadSize, _): + case let .documentAttributeVideo(flags, duration, w, h, preloadSize, videoStart): var videoFlags = TelegramMediaVideoFlags() if (flags & (1 << 0)) != 0 { videoFlags.insert(.instantRoundVideo) @@ -110,7 +110,7 @@ func telegramMediaFileAttributesFromApiAttributes(_ attributes: [Api.DocumentAtt if (flags & (1 << 3)) != 0 { videoFlags.insert(.isSilent) } - result.append(.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: preloadSize)) + result.append(.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: preloadSize, coverTime: videoStart)) case let .documentAttributeAudio(flags, duration, title, performer, waveform): let isVoice = (flags & (1 << 10)) != 0 let waveformBuffer: Data? = waveform?.makeData() diff --git a/submodules/TelegramCore/Sources/PendingMessages/PendingMessageUploadedContent.swift b/submodules/TelegramCore/Sources/PendingMessages/PendingMessageUploadedContent.swift index 0ec84b8642..6d3872c492 100644 --- a/submodules/TelegramCore/Sources/PendingMessages/PendingMessageUploadedContent.swift +++ b/submodules/TelegramCore/Sources/PendingMessages/PendingMessageUploadedContent.swift @@ -701,7 +701,7 @@ func inputDocumentAttributesFromFileAttributes(_ fileAttributes: [TelegramMediaF attributes.append(.documentAttributeSticker(flags: flags, alt: displayText, stickerset: stickerSet, maskCoords: inputMaskCoords)) case .HasLinkedStickers: attributes.append(.documentAttributeHasStickers) - case let .Video(duration, size, videoFlags, preloadSize): + case let .Video(duration, size, videoFlags, preloadSize, coverTime): var flags: Int32 = 0 if videoFlags.contains(.instantRoundVideo) { flags |= (1 << 0) @@ -715,8 +715,10 @@ func inputDocumentAttributesFromFileAttributes(_ fileAttributes: [TelegramMediaF if videoFlags.contains(.isSilent) { flags |= (1 << 3) } - - attributes.append(.documentAttributeVideo(flags: flags, duration: duration, w: Int32(size.width), h: Int32(size.height), preloadPrefixSize: preloadSize, videoStartTs: nil)) + if let coverTime = coverTime, coverTime > 0.0 { + flags |= (1 << 4) + } + attributes.append(.documentAttributeVideo(flags: flags, duration: duration, w: Int32(size.width), h: Int32(size.height), preloadPrefixSize: preloadSize, videoStartTs: coverTime)) case let .Audio(isVoice, duration, title, performer, waveform): var flags: Int32 = 0 if isVoice { @@ -786,7 +788,7 @@ public func statsCategoryForFileWithAttributes(_ attributes: [TelegramMediaFileA } else { return .audio } - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(TelegramMediaVideoFlags.instantRoundVideo) { return .voiceMessages } else { diff --git a/submodules/TelegramCore/Sources/State/ManagedSecretChatOutgoingOperations.swift b/submodules/TelegramCore/Sources/State/ManagedSecretChatOutgoingOperations.swift index 20b9c662dd..88374c2a7e 100644 --- a/submodules/TelegramCore/Sources/State/ManagedSecretChatOutgoingOperations.swift +++ b/submodules/TelegramCore/Sources/State/ManagedSecretChatOutgoingOperations.swift @@ -553,7 +553,7 @@ private func decryptedAttributes46(_ attributes: [TelegramMediaFileAttribute], t result.append(.documentAttributeSticker(alt: displayText, stickerset: stickerSet)) case let .ImageSize(size): result.append(.documentAttributeImageSize(w: Int32(size.width), h: Int32(size.height))) - case let .Video(duration, size, _, _): + case let .Video(duration, size, _, _, _): result.append(.documentAttributeVideo(duration: Int32(duration), w: Int32(size.width), h: Int32(size.height))) case let .Audio(isVoice, duration, title, performer, waveform): var flags: Int32 = 0 @@ -612,7 +612,7 @@ private func decryptedAttributes73(_ attributes: [TelegramMediaFileAttribute], t result.append(.documentAttributeSticker(alt: displayText, stickerset: stickerSet)) case let .ImageSize(size): result.append(.documentAttributeImageSize(w: Int32(size.width), h: Int32(size.height))) - case let .Video(duration, size, videoFlags, _): + case let .Video(duration, size, videoFlags, _, _): var flags: Int32 = 0 if videoFlags.contains(.instantRoundVideo) { flags |= 1 << 0 @@ -675,7 +675,7 @@ private func decryptedAttributes101(_ attributes: [TelegramMediaFileAttribute], result.append(.documentAttributeSticker(alt: displayText, stickerset: stickerSet)) case let .ImageSize(size): result.append(.documentAttributeImageSize(w: Int32(size.width), h: Int32(size.height))) - case let .Video(duration, size, videoFlags, _): + case let .Video(duration, size, videoFlags, _, _): var flags: Int32 = 0 if videoFlags.contains(.instantRoundVideo) { flags |= 1 << 0 @@ -738,7 +738,7 @@ private func decryptedAttributes144(_ attributes: [TelegramMediaFileAttribute], result.append(.documentAttributeSticker(alt: displayText, stickerset: stickerSet)) case let .ImageSize(size): result.append(.documentAttributeImageSize(w: Int32(size.width), h: Int32(size.height))) - case let .Video(duration, size, videoFlags, _): + case let .Video(duration, size, videoFlags, _, _): var flags: Int32 = 0 if videoFlags.contains(.instantRoundVideo) { flags |= 1 << 0 diff --git a/submodules/TelegramCore/Sources/State/ProcessSecretChatIncomingDecryptedOperations.swift b/submodules/TelegramCore/Sources/State/ProcessSecretChatIncomingDecryptedOperations.swift index 24085d5110..ddf283a43d 100644 --- a/submodules/TelegramCore/Sources/State/ProcessSecretChatIncomingDecryptedOperations.swift +++ b/submodules/TelegramCore/Sources/State/ProcessSecretChatIncomingDecryptedOperations.swift @@ -610,7 +610,7 @@ extension TelegramMediaFileAttribute { } self = .Sticker(displayText: alt, packReference: packReference, maskData: nil) case let .documentAttributeVideo(duration, w, h): - self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil) + self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil) } } } @@ -642,7 +642,7 @@ extension TelegramMediaFileAttribute { if (flags & (1 << 0)) != 0 { videoFlags.insert(.instantRoundVideo) } - self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil) + self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil, coverTime: nil) } } } @@ -674,7 +674,7 @@ extension TelegramMediaFileAttribute { if (flags & (1 << 0)) != 0 { videoFlags.insert(.instantRoundVideo) } - self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil) + self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil, coverTime: nil) } } } @@ -706,7 +706,7 @@ extension TelegramMediaFileAttribute { if (flags & (1 << 0)) != 0 { videoFlags.insert(.instantRoundVideo) } - self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil) + self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil, coverTime: nil) } } } @@ -821,7 +821,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32 text = caption } if let file = file { - let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil), .FileName(fileName: "video.mov")] + let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil), .FileName(fileName: "video.mov")] var previewRepresentations: [TelegramMediaImageRepresentation] = [] if thumb.size != 0 { let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max)) @@ -1021,7 +1021,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32 loop: for attr in parsedAttributes { switch attr { - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { attributes.append(ConsumableContentMessageAttribute(consumed: false)) } @@ -1040,7 +1040,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32 text = caption } if let file = file { - let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil), .FileName(fileName: "video.mov")] + let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil), .FileName(fileName: "video.mov")] var previewRepresentations: [TelegramMediaImageRepresentation] = [] if thumb.size != 0 { let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max)) @@ -1300,7 +1300,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32 loop: for attr in parsedAttributes { switch attr { - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { attributes.append(ConsumableContentMessageAttribute(consumed: false)) } @@ -1319,7 +1319,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32 text = caption } if let file = file { - let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil), .FileName(fileName: "video.mov")] + let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil), .FileName(fileName: "video.mov")] var previewRepresentations: [TelegramMediaImageRepresentation] = [] if thumb.size != 0 { let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max)) @@ -1501,7 +1501,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32 loop: for attr in parsedAttributes { switch attr { - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { attributes.append(ConsumableContentMessageAttribute(consumed: false)) } @@ -1520,7 +1520,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32 text = caption } if let file = file { - let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil), .FileName(fileName: "video.mov")] + let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil), .FileName(fileName: "video.mov")] var previewRepresentations: [TelegramMediaImageRepresentation] = [] if thumb.size != 0 { let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max)) diff --git a/submodules/TelegramCore/Sources/SyncCore/SyncCore_TelegramMediaFile.swift b/submodules/TelegramCore/Sources/SyncCore/SyncCore_TelegramMediaFile.swift index 1ae8b35fd7..aaddf2debb 100644 --- a/submodules/TelegramCore/Sources/SyncCore/SyncCore_TelegramMediaFile.swift +++ b/submodules/TelegramCore/Sources/SyncCore/SyncCore_TelegramMediaFile.swift @@ -235,7 +235,7 @@ public enum TelegramMediaFileAttribute: PostboxCoding, Equatable { case Sticker(displayText: String, packReference: StickerPackReference?, maskData: StickerMaskCoords?) case ImageSize(size: PixelDimensions) case Animated - case Video(duration: Double, size: PixelDimensions, flags: TelegramMediaVideoFlags, preloadSize: Int32?) + case Video(duration: Double, size: PixelDimensions, flags: TelegramMediaVideoFlags, preloadSize: Int32?, coverTime: Double?) case Audio(isVoice: Bool, duration: Int, title: String?, performer: String?, waveform: Data?) case HasLinkedStickers case hintFileIsLarge @@ -262,7 +262,7 @@ public enum TelegramMediaFileAttribute: PostboxCoding, Equatable { duration = Double(decoder.decodeInt32ForKey("du", orElse: 0)) } - self = .Video(duration: duration, size: PixelDimensions(width: decoder.decodeInt32ForKey("w", orElse: 0), height: decoder.decodeInt32ForKey("h", orElse: 0)), flags: TelegramMediaVideoFlags(rawValue: decoder.decodeInt32ForKey("f", orElse: 0)), preloadSize: decoder.decodeOptionalInt32ForKey("prs")) + self = .Video(duration: duration, size: PixelDimensions(width: decoder.decodeInt32ForKey("w", orElse: 0), height: decoder.decodeInt32ForKey("h", orElse: 0)), flags: TelegramMediaVideoFlags(rawValue: decoder.decodeInt32ForKey("f", orElse: 0)), preloadSize: decoder.decodeOptionalInt32ForKey("prs"), coverTime: decoder.decodeOptionalDoubleForKey("ct")) case typeAudio: let waveformBuffer = decoder.decodeBytesForKeyNoCopy("wf") var waveform: Data? @@ -309,7 +309,7 @@ public enum TelegramMediaFileAttribute: PostboxCoding, Equatable { encoder.encodeInt32(Int32(size.height), forKey: "h") case .Animated: encoder.encodeInt32(typeAnimated, forKey: "t") - case let .Video(duration, size, flags, preloadSize): + case let .Video(duration, size, flags, preloadSize, coverTime): encoder.encodeInt32(typeVideo, forKey: "t") encoder.encodeDouble(duration, forKey: "dur") encoder.encodeInt32(Int32(size.width), forKey: "w") @@ -320,6 +320,11 @@ public enum TelegramMediaFileAttribute: PostboxCoding, Equatable { } else { encoder.encodeNil(forKey: "prs") } + if let coverTime = coverTime { + encoder.encodeDouble(coverTime, forKey: "ct") + } else { + encoder.encodeNil(forKey: "ct") + } case let .Audio(isVoice, duration, title, performer, waveform): encoder.encodeInt32(typeAudio, forKey: "t") encoder.encodeInt32(isVoice ? 1 : 0, forKey: "iv") @@ -592,7 +597,7 @@ public final class TelegramMediaFile: Media, Equatable, Codable { public var isInstantVideo: Bool { for attribute in self.attributes { - if case .Video(_, _, let flags, _) = attribute { + if case .Video(_, _, let flags, _, _) = attribute { return flags.contains(.instantRoundVideo) } } @@ -601,7 +606,7 @@ public final class TelegramMediaFile: Media, Equatable, Codable { public var preloadSize: Int32? { for attribute in self.attributes { - if case .Video(_, _, _, let preloadSize) = attribute { + if case .Video(_, _, _, let preloadSize, _) = attribute { return preloadSize } } diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Messages/OutgoingMessageWithChatContextResult.swift b/submodules/TelegramCore/Sources/TelegramEngine/Messages/OutgoingMessageWithChatContextResult.swift index de9bc76cf8..019afadf9e 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Messages/OutgoingMessageWithChatContextResult.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Messages/OutgoingMessageWithChatContextResult.swift @@ -118,7 +118,7 @@ func _internal_outgoingMessageWithChatContextResult(to peerId: PeerId, threadId: if let dimensions = externalReference.content?.dimensions { fileAttributes.append(.ImageSize(size: dimensions)) if externalReference.type == "gif" { - fileAttributes.append(.Video(duration: externalReference.content?.duration ?? 0.0, size: dimensions, flags: [], preloadSize: nil)) + fileAttributes.append(.Video(duration: externalReference.content?.duration ?? 0.0, size: dimensions, flags: [], preloadSize: nil, coverTime: nil)) } } diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Messages/Stories.swift b/submodules/TelegramCore/Sources/TelegramEngine/Messages/Stories.swift index ea8d77df3a..4c0b7329cb 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Messages/Stories.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Messages/Stories.swift @@ -5,12 +5,12 @@ import TelegramApi public enum EngineStoryInputMedia { case image(dimensions: PixelDimensions, data: Data, stickers: [TelegramMediaFile]) - case video(dimensions: PixelDimensions, duration: Double, resource: TelegramMediaResource, firstFrameFile: TempBoxFile?, stickers: [TelegramMediaFile]) + case video(dimensions: PixelDimensions, duration: Double, resource: TelegramMediaResource, firstFrameFile: TempBoxFile?, stickers: [TelegramMediaFile], coverTime: Double?) case existing(media: Media) var embeddedStickers: [TelegramMediaFile] { switch self { - case let .image(_, _, stickers), let .video(_, _, _, _, stickers): + case let .image(_, _, stickers), let .video(_, _, _, _, stickers, _): return stickers case .existing: return [] @@ -849,7 +849,7 @@ private func prepareUploadStoryContent(account: Account, media: EngineStoryInput flags: [] ) return imageMedia - case let .video(dimensions, duration, resource, firstFrameFile, _): + case let .video(dimensions, duration, resource, firstFrameFile, _, coverTime): var previewRepresentations: [TelegramMediaImageRepresentation] = [] if let firstFrameFile = firstFrameFile { account.postbox.mediaBox.storeCachedResourceRepresentation(resource.id.stringRepresentation, representationId: "first-frame", keepDuration: .general, tempFile: firstFrameFile) @@ -871,7 +871,7 @@ private func prepareUploadStoryContent(account: Account, media: EngineStoryInput mimeType: "video/mp4", size: nil, attributes: [ - TelegramMediaFileAttribute.Video(duration: duration, size: dimensions, flags: .supportsStreaming, preloadSize: nil) + TelegramMediaFileAttribute.Video(duration: duration, size: dimensions, flags: .supportsStreaming, preloadSize: nil, coverTime: coverTime) ] ) diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Stickers/ImportStickers.swift b/submodules/TelegramCore/Sources/TelegramEngine/Stickers/ImportStickers.swift index ac6ed07c33..6ba7171906 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Stickers/ImportStickers.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Stickers/ImportStickers.swift @@ -144,7 +144,7 @@ public extension ImportSticker { fileAttributes.append(.FileName(fileName: "sticker.webm")) fileAttributes.append(.Animated) fileAttributes.append(.Sticker(displayText: "", packReference: nil, maskData: nil)) - fileAttributes.append(.Video(duration: self.duration ?? 3.0, size: self.dimensions, flags: [], preloadSize: nil)) + fileAttributes.append(.Video(duration: self.duration ?? 3.0, size: self.dimensions, flags: [], preloadSize: nil, coverTime: nil)) } else if self.mimeType == "application/x-tgsticker" { fileAttributes.append(.FileName(fileName: "sticker.tgs")) fileAttributes.append(.Animated) diff --git a/submodules/TelegramPresentationData/Sources/Resources/PresentationResourceKey.swift b/submodules/TelegramPresentationData/Sources/Resources/PresentationResourceKey.swift index 60e2e1dc3c..373ef8fe9e 100644 --- a/submodules/TelegramPresentationData/Sources/Resources/PresentationResourceKey.swift +++ b/submodules/TelegramPresentationData/Sources/Resources/PresentationResourceKey.swift @@ -45,6 +45,7 @@ public enum PresentationResourceKey: Int32 { case itemListSecondaryCheckIcon case itemListPlusIcon case itemListRoundPlusIcon + case itemListAccentDeleteIcon case itemListDeleteIcon case itemListDeleteIndicatorIcon case itemListReorderIndicatorIcon diff --git a/submodules/TelegramPresentationData/Sources/Resources/PresentationResourcesItemList.swift b/submodules/TelegramPresentationData/Sources/Resources/PresentationResourcesItemList.swift index 81e5b19064..081ee55eea 100644 --- a/submodules/TelegramPresentationData/Sources/Resources/PresentationResourcesItemList.swift +++ b/submodules/TelegramPresentationData/Sources/Resources/PresentationResourcesItemList.swift @@ -69,6 +69,12 @@ public struct PresentationResourcesItemList { }) } + public static func accentDeleteIconImage(_ theme: PresentationTheme) -> UIImage? { + return theme.image(PresentationResourceKey.itemListAccentDeleteIcon.rawValue, { theme in + return generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Accessory Panels/MessageSelectionTrash"), color: theme.list.itemAccentColor) + }) + } + public static func deleteIconImage(_ theme: PresentationTheme) -> UIImage? { return theme.image(PresentationResourceKey.itemListDeleteIcon.rawValue, { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Accessory Panels/MessageSelectionTrash"), color: theme.list.itemDestructiveColor) diff --git a/submodules/TelegramStringFormatting/Sources/MessageContentKind.swift b/submodules/TelegramStringFormatting/Sources/MessageContentKind.swift index 9cba0d586f..a548f9aac1 100644 --- a/submodules/TelegramStringFormatting/Sources/MessageContentKind.swift +++ b/submodules/TelegramStringFormatting/Sources/MessageContentKind.swift @@ -330,7 +330,7 @@ public func mediaContentKind(_ media: EngineMedia, message: EngineMessage? = nil return .file(performer) } } - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if file.isAnimated { result = .animation } else { diff --git a/submodules/TelegramStringFormatting/Sources/ServiceMessageStrings.swift b/submodules/TelegramStringFormatting/Sources/ServiceMessageStrings.swift index 2deee3a50a..1816f40ef8 100644 --- a/submodules/TelegramStringFormatting/Sources/ServiceMessageStrings.swift +++ b/submodules/TelegramStringFormatting/Sources/ServiceMessageStrings.swift @@ -235,7 +235,7 @@ public func universalServiceMessageString(presentationData: (PresentationTheme, } else { for attribute in file.attributes { switch attribute { - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { type = .round } else { diff --git a/submodules/TelegramUI/Components/Chat/ChatContextResultPeekContent/Sources/ChatContextResultPeekContent.swift b/submodules/TelegramUI/Components/Chat/ChatContextResultPeekContent/Sources/ChatContextResultPeekContent.swift index d32cd4adf8..67891754b9 100644 --- a/submodules/TelegramUI/Components/Chat/ChatContextResultPeekContent/Sources/ChatContextResultPeekContent.swift +++ b/submodules/TelegramUI/Components/Chat/ChatContextResultPeekContent/Sources/ChatContextResultPeekContent.swift @@ -172,7 +172,7 @@ private final class ChatContextResultPeekNode: ASDisplayNode, PeekControllerCont imageDimensions = externalReference.content?.dimensions?.cgSize if let content = externalReference.content, externalReference.type == "gif", let thumbnailResource = imageResource , let dimensions = content.dimensions { - videoFileReference = .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: content.resource, previewRepresentations: [TelegramMediaImageRepresentation(dimensions: dimensions, resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: dimensions, flags: [], preloadSize: nil)])) + videoFileReference = .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: content.resource, previewRepresentations: [TelegramMediaImageRepresentation(dimensions: dimensions, resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: dimensions, flags: [], preloadSize: nil, coverTime: nil)])) imageResource = nil } case let .internalReference(internalReference): diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageActionBubbleContentNode/Sources/ChatMessageActionBubbleContentNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageActionBubbleContentNode/Sources/ChatMessageActionBubbleContentNode.swift index d0066f268f..feaacffdb1 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageActionBubbleContentNode/Sources/ChatMessageActionBubbleContentNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageActionBubbleContentNode/Sources/ChatMessageActionBubbleContentNode.swift @@ -272,7 +272,7 @@ public class ChatMessageActionBubbleContentNode: ChatMessageBubbleContentNode { strongSelf.mediaBackgroundNode.image = backgroundImage if let image = image, let video = image.videoRepresentations.last, let id = image.id?.id { - let videoFileReference = FileMediaReference.message(message: MessageReference(item.message), media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: image.representations, videoThumbnails: [], immediateThumbnailData: image.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil)])) + let videoFileReference = FileMediaReference.message(message: MessageReference(item.message), media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: image.representations, videoThumbnails: [], immediateThumbnailData: image.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil, coverTime: nil)])) let videoContent = NativeVideoContent(id: .profileVideo(id, "action"), userLocation: .peer(item.message.id.peerId), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil) if videoContent.id != strongSelf.videoContent?.id { let mediaManager = item.context.sharedContext.mediaManager diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift index 208c51937c..6ae1859216 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift @@ -650,7 +650,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode { let messageTheme = arguments.incoming ? arguments.presentationData.theme.theme.chat.message.incoming : arguments.presentationData.theme.theme.chat.message.outgoing let isInstantVideo = arguments.file.isInstantVideo for attribute in arguments.file.attributes { - if case let .Video(videoDuration, _, flags, _) = attribute, flags.contains(.instantRoundVideo) { + if case let .Video(videoDuration, _, flags, _, _) = attribute, flags.contains(.instantRoundVideo) { isAudio = true isVoice = true @@ -1558,7 +1558,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode { var isVoice = false var audioDuration: Int32? for attribute in file.attributes { - if case let .Video(duration, _, flags, _) = attribute, flags.contains(.instantRoundVideo) { + if case let .Video(duration, _, flags, _, _) = attribute, flags.contains(.instantRoundVideo) { isAudio = true isVoice = true audioDuration = Int32(duration) diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageItemImpl/Sources/ChatMessageItemImpl.swift b/submodules/TelegramUI/Components/Chat/ChatMessageItemImpl/Sources/ChatMessageItemImpl.swift index 4deb81d110..d54a68b500 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageItemImpl/Sources/ChatMessageItemImpl.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageItemImpl/Sources/ChatMessageItemImpl.swift @@ -27,7 +27,7 @@ private func mediaMergeableStyle(_ media: Media) -> ChatMessageMerge { switch attribute { case .Sticker: return .semanticallyMerged - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { return .none } @@ -423,7 +423,7 @@ public final class ChatMessageItemImpl: ChatMessageItem, CustomStringConvertible viewClassName = ChatMessageStickerItemNode.self } break loop - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { viewClassName = ChatMessageBubbleItemNode.self break loop diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageItemView/Sources/ChatMessageItemView.swift b/submodules/TelegramUI/Components/Chat/ChatMessageItemView/Sources/ChatMessageItemView.swift index a473d69ea6..9424d985ab 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageItemView/Sources/ChatMessageItemView.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageItemView/Sources/ChatMessageItemView.swift @@ -203,7 +203,7 @@ public final class ChatMessageAccessibilityData { text = item.presentationData.strings.VoiceOver_Chat_MusicTitle(title, performer).string text.append(item.presentationData.strings.VoiceOver_Chat_Duration(durationString).string) } - case let .Video(duration, _, flags, _): + case let .Video(duration, _, flags, _, _): isSpecialFile = true if isSelected == nil { hint = item.presentationData.strings.VoiceOver_Chat_PlayHint diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageProfilePhotoSuggestionContentNode/Sources/ChatMessageProfilePhotoSuggestionContentNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageProfilePhotoSuggestionContentNode/Sources/ChatMessageProfilePhotoSuggestionContentNode.swift index d3187ba59b..7f8236c6ee 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageProfilePhotoSuggestionContentNode/Sources/ChatMessageProfilePhotoSuggestionContentNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageProfilePhotoSuggestionContentNode/Sources/ChatMessageProfilePhotoSuggestionContentNode.swift @@ -218,7 +218,7 @@ public class ChatMessageProfilePhotoSuggestionContentNode: ChatMessageBubbleCont } if let photo = photo, let video = photo.videoRepresentations.last, let id = photo.id?.id { - let videoFileReference = FileMediaReference.message(message: MessageReference(item.message), media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil)])) + let videoFileReference = FileMediaReference.message(message: MessageReference(item.message), media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil, coverTime: nil)])) let videoContent = NativeVideoContent(id: .profileVideo(id, "action"), userLocation: .peer(item.message.id.peerId), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil) if videoContent.id != strongSelf.videoContent?.id { let mediaManager = item.context.sharedContext.mediaManager diff --git a/submodules/TelegramUI/Components/EntityKeyboardGifContent/Sources/GifContext.swift b/submodules/TelegramUI/Components/EntityKeyboardGifContent/Sources/GifContext.swift index 15c955e0b3..e133ecbacb 100644 --- a/submodules/TelegramUI/Components/EntityKeyboardGifContent/Sources/GifContext.swift +++ b/submodules/TelegramUI/Components/EntityKeyboardGifContent/Sources/GifContext.swift @@ -126,7 +126,7 @@ public func paneGifSearchForQuery(context: AccountContext, query: String, offset )) } } - let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: uniqueId ?? 0), partialReference: nil, resource: resource, previewRepresentations: previews, videoThumbnails: videoThumbnails, immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: dimensions, flags: [], preloadSize: nil)]) + let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: uniqueId ?? 0), partialReference: nil, resource: resource, previewRepresentations: previews, videoThumbnails: videoThumbnails, immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: dimensions, flags: [], preloadSize: nil, coverTime: nil)]) references.append(MultiplexedVideoNodeFile(file: FileMediaReference.standalone(media: file), contextResult: (collection, result))) } case let .internalReference(internalReference): diff --git a/submodules/TelegramUI/Components/LegacyInstantVideoController/Sources/LegacyInstantVideoController.swift b/submodules/TelegramUI/Components/LegacyInstantVideoController/Sources/LegacyInstantVideoController.swift index 69dd016f21..0dc69ee662 100644 --- a/submodules/TelegramUI/Components/LegacyInstantVideoController/Sources/LegacyInstantVideoController.swift +++ b/submodules/TelegramUI/Components/LegacyInstantVideoController/Sources/LegacyInstantVideoController.swift @@ -231,7 +231,7 @@ public func legacyInstantVideoController(theme: PresentationTheme, forStory: Boo } } - let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.FileName(fileName: "video.mp4"), .Video(duration: finalDuration, size: PixelDimensions(finalDimensions), flags: [.instantRoundVideo], preloadSize: nil)]) + let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.FileName(fileName: "video.mp4"), .Video(duration: finalDuration, size: PixelDimensions(finalDimensions), flags: [.instantRoundVideo], preloadSize: nil, coverTime: nil)]) var message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: media), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: []) let scheduleTime: Int32? = scheduleTimestamp > 0 ? scheduleTimestamp : nil diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift index 8cbf706e03..0f4bc63cf2 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift @@ -476,6 +476,7 @@ public final class MediaEditor { audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, + coverImageTimestamp: nil, qualityPreset: nil ) } @@ -1733,6 +1734,12 @@ public final class MediaEditor { } } + public func setCoverImageTimestamp(_ coverImageTimestamp: Double?) { + self.updateValues(mode: .skipRendering) { values in + return values.withUpdatedCoverImageTimestamp(coverImageTimestamp) + } + } + public func setDrawingAndEntities(data: Data?, image: UIImage?, entities: [CodableDrawingEntity]) { self.updateValues(mode: .skipRendering) { values in return values.withUpdatedDrawingAndEntities(drawing: image, entities: entities) diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift index deec4efc3d..828e7500d8 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift @@ -324,6 +324,9 @@ public final class MediaEditorValues: Codable, Equatable { if lhs.audioTrackSamples != rhs.audioTrackSamples { return false } + if lhs.coverImageTimestamp != rhs.coverImageTimestamp { + return false + } if lhs.nightTheme != rhs.nightTheme { return false } @@ -394,6 +397,7 @@ public final class MediaEditorValues: Codable, Equatable { case audioTrackTrimRange case audioTrackOffset case audioTrackVolume + case coverImageTimestamp case qualityPreset } @@ -438,6 +442,8 @@ public final class MediaEditorValues: Codable, Equatable { public let audioTrackVolume: CGFloat? public let audioTrackSamples: MediaAudioTrackSamples? + public let coverImageTimestamp: Double? + public let qualityPreset: MediaQualityPreset? var isStory: Bool { @@ -486,6 +492,7 @@ public final class MediaEditorValues: Codable, Equatable { audioTrackOffset: Double?, audioTrackVolume: CGFloat?, audioTrackSamples: MediaAudioTrackSamples?, + coverImageTimestamp: Double?, qualityPreset: MediaQualityPreset? ) { self.peerId = peerId @@ -521,6 +528,7 @@ public final class MediaEditorValues: Codable, Equatable { self.audioTrackOffset = audioTrackOffset self.audioTrackVolume = audioTrackVolume self.audioTrackSamples = audioTrackSamples + self.coverImageTimestamp = coverImageTimestamp self.qualityPreset = qualityPreset } @@ -591,6 +599,8 @@ public final class MediaEditorValues: Codable, Equatable { self.audioTrackSamples = nil + self.coverImageTimestamp = try container.decodeIfPresent(Double.self, forKey: .coverImageTimestamp) + self.qualityPreset = (try container.decodeIfPresent(Int32.self, forKey: .qualityPreset)).flatMap { MediaQualityPreset(rawValue: $0) } } @@ -652,109 +662,115 @@ public final class MediaEditorValues: Codable, Equatable { try container.encodeIfPresent(self.audioTrackOffset, forKey: .audioTrackOffset) try container.encodeIfPresent(self.audioTrackVolume, forKey: .audioTrackVolume) + try container.encodeIfPresent(self.coverImageTimestamp, forKey: .coverImageTimestamp) + try container.encodeIfPresent(self.qualityPreset?.rawValue, forKey: .qualityPreset) } public func makeCopy() -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: offset, cropRect: self.cropRect, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: offset, cropRect: self.cropRect, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } public func withUpdatedCropRect(cropRect: CGRect, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: .zero, cropRect: cropRect, cropScale: 1.0, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: .zero, cropRect: cropRect, cropScale: 1.0, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedGradientColors(gradientColors: [UIColor]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedVideoIsMuted(_ videoIsMuted: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedVideoIsFullHd(_ videoIsFullHd: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedVideoIsMirrored(_ videoIsMirrored: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedVideoVolume(_ videoVolume: CGFloat?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideo(path: String?, isDual: Bool, positionChanges: [VideoPositionChange]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: path, additionalVideoIsDual: isDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: path, additionalVideoIsDual: isDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideo(position: CGPoint, scale: CGFloat, rotation: CGFloat) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideoTrimRange(_ additionalVideoTrimRange: Range?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideoOffset(_ additionalVideoOffset: Double?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideoVolume(_ additionalVideoVolume: CGFloat?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedVideoTrimRange(_ videoTrimRange: Range) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedDrawingAndEntities(drawing: UIImage?, entities: [CodableDrawingEntity]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } public func withUpdatedMaskDrawing(maskDrawing: UIImage?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedToolValues(_ toolValues: [EditorToolKey: Any]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrack(_ audioTrack: MediaAudioTrack?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackTrimRange(_ audioTrackTrimRange: Range?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackOffset(_ audioTrackOffset: Double?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackVolume(_ audioTrackVolume: CGFloat?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackSamples(_ audioTrackSamples: MediaAudioTrackSamples?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedNightTheme(_ nightTheme: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } public func withUpdatedEntities(_ entities: [CodableDrawingEntity]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + } + + public func withUpdatedCoverImageTimestamp(_ coverImageTimestamp: Double?) -> MediaEditorValues { + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: coverImageTimestamp, qualityPreset: self.qualityPreset) } public func withUpdatedQualityPreset(_ qualityPreset: MediaQualityPreset?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: qualityPreset) } public var resultDimensions: PixelDimensions { diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/EditStories.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/EditStories.swift index c45a76b7c1..fb7682e02f 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/EditStories.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/EditStories.swift @@ -216,7 +216,7 @@ public extension MediaEditorScreen { } } - update((context.engine.messages.editStory(peerId: peer.id, id: storyItem.id, media: .video(dimensions: dimensions, duration: duration, resource: resource, firstFrameFile: firstFrameFile, stickers: result.stickers), mediaAreas: result.mediaAreas, text: updatedText, entities: updatedEntities, privacy: nil) + update((context.engine.messages.editStory(peerId: peer.id, id: storyItem.id, media: .video(dimensions: dimensions, duration: duration, resource: resource, firstFrameFile: firstFrameFile, stickers: result.stickers, coverTime: nil), mediaAreas: result.mediaAreas, text: updatedText, entities: updatedEntities, privacy: nil) |> deliverOnMainQueue).startStrict(next: { result in switch result { case let .progress(progress): diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaCoverScreen.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaCoverScreen.swift new file mode 100644 index 0000000000..ac4ccd2601 --- /dev/null +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaCoverScreen.swift @@ -0,0 +1,602 @@ +import Foundation +import UIKit +import Display +import AsyncDisplayKit +import ComponentFlow +import SwiftSignalKit +import ViewControllerComponent +import ComponentDisplayAdapters +import TelegramPresentationData +import AccountContext +import TelegramCore +import MultilineTextComponent +import MediaEditor +import MediaScrubberComponent +import ButtonComponent + +private final class MediaCoverScreenComponent: Component { + typealias EnvironmentType = ViewControllerComponentContainer.Environment + + let context: AccountContext + let mediaEditor: MediaEditor + + init( + context: AccountContext, + mediaEditor: MediaEditor + ) { + self.context = context + self.mediaEditor = mediaEditor + } + + static func ==(lhs: MediaCoverScreenComponent, rhs: MediaCoverScreenComponent) -> Bool { + if lhs.context !== rhs.context { + return false + } + return true + } + + final class State: ComponentState { + enum ImageKey: Hashable { + case done + } + private var cachedImages: [ImageKey: UIImage] = [:] + func image(_ key: ImageKey) -> UIImage { + if let image = self.cachedImages[key] { + return image + } else { + var image: UIImage + switch key { + case .done: + image = generateTintedImage(image: UIImage(bundleImageName: "Media Editor/Done"), color: .white)! + } + cachedImages[key] = image + return image + } + } + + var playerStateDisposable: Disposable? + var playerState: MediaEditorPlayerState? + + init(mediaEditor: MediaEditor) { + super.init() + + self.playerStateDisposable = (mediaEditor.playerState(framesCount: 16) + |> deliverOnMainQueue).start(next: { [weak self] playerState in + if let self { + if self.playerState != playerState { + self.playerState = playerState + self.updated() + } + } + }) + } + + deinit { + self.playerStateDisposable?.dispose() + } + } + + func makeState() -> State { + return State(mediaEditor: self.mediaEditor) + } + + public final class View: UIView { + private let buttonsContainerView = UIView() + private let buttonsBackgroundView = UIImageView() + private let previewContainerView = UIView() + private let cancelButton = ComponentView() + private let label = ComponentView() + private let doneButton = ComponentView() + private let scrubber = ComponentView() + + private let fadeView = UIView() + + private var component: MediaCoverScreenComponent? + private weak var state: State? + private var environment: ViewControllerComponentContainer.Environment? + + override init(frame: CGRect) { + self.buttonsContainerView.clipsToBounds = true + + self.fadeView.alpha = 0.0 + self.fadeView.backgroundColor = UIColor(rgb: 0x000000, alpha: 0.7) + + self.buttonsBackgroundView.image = generateImage(CGSize(width: 22.0, height: 22.0), rotatedContext: { size, context in + context.setFillColor(UIColor.black.cgColor) + context.fill(CGRect(origin: .zero, size: size)) + + context.setBlendMode(.clear) + context.setFillColor(UIColor.clear.cgColor) + context.addPath(CGPath(roundedRect: CGRect(x: 0.0, y: -11.0, width: size.width, height: 22.0), cornerWidth: 11.0, cornerHeight: 11.0, transform: nil)) + context.fillPath() + })?.stretchableImage(withLeftCapWidth: 11, topCapHeight: 11) + + super.init(frame: frame) + + self.backgroundColor = .clear + + self.addSubview(self.buttonsContainerView) + self.buttonsContainerView.addSubview(self.buttonsBackgroundView) + + self.addSubview(self.fadeView) + self.addSubview(self.previewContainerView) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func animateInFromEditor() { + self.buttonsBackgroundView.layer.animatePosition(from: CGPoint(x: 0.0, y: 44.0), to: .zero, duration: 0.2, additive: true) + + self.label.view?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + + if let view = self.doneButton.view { + view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2) + } + } + + private var animatingOut = false + func animateOutToEditor(completion: @escaping () -> Void) { + self.animatingOut = true + + self.fadeView.layer.animateAlpha(from: self.fadeView.alpha, to: 0.0, duration: 0.2, removeOnCompletion: false) + self.buttonsBackgroundView.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: 44.0), duration: 0.2, removeOnCompletion: false, additive: true) + + self.label.view?.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false) + + if let view = self.scrubber.view { + view.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + completion() + }) + view.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2) + } + + if let view = self.cancelButton.view { + view.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false) + view.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2) + } + + if let view = self.doneButton.view { + view.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false) + view.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2) + } + + self.state?.updated() + } + +// override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { +// let result = super.hitTest(point, with: event) +// if let controller = self.environment?.controller() as? MediaCoverScreen, [.erase, .restore].contains(controller.mode), result == self.previewContainerView { +// return nil +// } +// return result +// } + + func update(component: MediaCoverScreenComponent, availableSize: CGSize, state: State, environment: Environment, transition: ComponentTransition) -> CGSize { + let environment = environment[ViewControllerComponentContainer.Environment.self].value + self.environment = environment + + guard let controller = environment.controller() as? MediaCoverScreen else { + return .zero + } + +// let isFirstTime = self.component == nil + self.component = component + self.state = state + + let isTablet: Bool + if case .regular = environment.metrics.widthClass { + isTablet = true + } else { + isTablet = false + } + + let buttonSideInset: CGFloat = 16.0 + var controlsBottomInset: CGFloat = 0.0 + let previewSize: CGSize + var topInset: CGFloat = environment.statusBarHeight + 5.0 + if isTablet { + let previewHeight = availableSize.height - topInset - 75.0 + previewSize = CGSize(width: floorToScreenPixels(previewHeight / 1.77778), height: previewHeight) + } else { + previewSize = CGSize(width: availableSize.width, height: floorToScreenPixels(availableSize.width * 1.77778)) + if availableSize.height < previewSize.height + 30.0 { + topInset = 0.0 + controlsBottomInset = -75.0 + } + } + + let previewContainerFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - previewSize.width) / 2.0), y: environment.safeInsets.top), size: CGSize(width: previewSize.width, height: availableSize.height - environment.safeInsets.top - environment.safeInsets.bottom + controlsBottomInset)) + let buttonsContainerFrame = CGRect(origin: CGPoint(x: 0.0, y: availableSize.height - environment.safeInsets.bottom + controlsBottomInset - 31.0), size: CGSize(width: availableSize.width, height: environment.safeInsets.bottom - controlsBottomInset)) + + let cancelButtonSize = self.cancelButton.update( + transition: transition, + component: AnyComponent(Button( + content: AnyComponent( + MultilineTextComponent(text: .plain(NSAttributedString(string: "Cancel", font: Font.regular(17.0), textColor: .white))) + ), + action: { [weak controller] in + controller?.requestDismiss(animated: true) + } + )), + environment: {}, + containerSize: CGSize(width: 120.0, height: 44.0) + ) + let cancelButtonFrame = CGRect( + origin: CGPoint(x: 16.0, y: 80.0), + size: cancelButtonSize + ) + if let cancelButtonView = self.cancelButton.view { + if cancelButtonView.superview == nil { + self.addSubview(cancelButtonView) + setupButtonShadow(cancelButtonView) + } + transition.setFrame(view: cancelButtonView, frame: cancelButtonFrame) + } + + let doneButtonSize = self.doneButton.update( + transition: transition, + component: AnyComponent( + ButtonComponent( + background: ButtonComponent.Background( + color: environment.theme.list.itemCheckColors.fillColor, + foreground: environment.theme.list.itemCheckColors.foregroundColor, + pressedColor: environment.theme.list.itemCheckColors.fillColor.withMultipliedAlpha(0.9) + ), + content: AnyComponentWithIdentity( + id: AnyHashable(0), + component: AnyComponent(ButtonTextContentComponent( + text: "Save Cover", + badge: 0, + textColor: environment.theme.list.itemCheckColors.foregroundColor, + badgeBackground: .clear, + badgeForeground: .clear + )) + ), + isEnabled: true, + displaysProgress: false, + action: { [weak controller, weak self] in + if let playerState = self?.state?.playerState, let mediaEditor = self?.component?.mediaEditor, let image = mediaEditor.resultImage { + mediaEditor.setCoverImageTimestamp(playerState.position) + controller?.completed(playerState.position, image) + } + controller?.requestDismiss(animated: true) + } + ) + ), + environment: {}, + containerSize: CGSize(width: availableSize.width - buttonSideInset * 2.0, height: 50.0) + ) + let doneButtonFrame = CGRect( + origin: CGPoint(x: floor((availableSize.width - doneButtonSize.width) / 2.0), y: availableSize.height - 99.0), + size: doneButtonSize + ) + if let doneButtonView = self.doneButton.view { + if doneButtonView.superview == nil { + self.addSubview(doneButtonView) + } + transition.setFrame(view: doneButtonView, frame: doneButtonFrame) + } + + let labelSize = self.label.update( + transition: transition, + component: AnyComponent(Text(text: "Story Cover", font: Font.semibold(17.0), color: UIColor(rgb: 0xffffff))), + environment: {}, + containerSize: CGSize(width: availableSize.width - 88.0, height: 44.0) + ) + let labelFrame = CGRect( + origin: CGPoint(x: floorToScreenPixels((availableSize.width - labelSize.width) / 2.0), y: 80.0), + size: labelSize + ) + if let labelView = self.label.view { + if labelView.superview == nil { + self.addSubview(labelView) + setupButtonShadow(labelView) + } + if labelView.bounds.width > 0.0 && labelFrame.width != labelView.bounds.width { + if let snapshotView = labelView.snapshotView(afterScreenUpdates: false) { + snapshotView.center = labelView.center + self.buttonsContainerView.addSubview(snapshotView) + + labelView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25) + snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { _ in + snapshotView.removeFromSuperview() + }) + } + } + labelView.bounds = CGRect(origin: .zero, size: labelFrame.size) + transition.setPosition(view: labelView, position: labelFrame.center) + } + + transition.setFrame(view: self.buttonsContainerView, frame: buttonsContainerFrame) + transition.setFrame(view: self.buttonsBackgroundView, frame: CGRect(origin: .zero, size: buttonsContainerFrame.size)) + + transition.setFrame(view: self.previewContainerView, frame: previewContainerFrame) + + if let playerState = state.playerState { + let visibleTracks = playerState.tracks.filter { $0.id == 0 }.map { MediaScrubberComponent.Track($0) } + + let mediaEditor = component.mediaEditor + let scrubberInset: CGFloat = buttonSideInset + let scrubberSize = self.scrubber.update( + transition: transition, + component: AnyComponent(MediaScrubberComponent( + context: component.context, + style: .cover, + theme: environment.theme, + generationTimestamp: playerState.generationTimestamp, + position: playerState.position, + minDuration: 1.0, + maxDuration: storyMaxVideoDuration, + isPlaying: playerState.isPlaying, + tracks: visibleTracks, + portalView: controller.portalView, + positionUpdated: { [weak mediaEditor] position, apply in + if let mediaEditor { + mediaEditor.seek(position, andPlay: false) + } + }, + coverPositionUpdated: { [weak mediaEditor] position, tap, commit in + if let mediaEditor { + if tap { + mediaEditor.setOnNextDisplay { + commit() + } + mediaEditor.seek(position, andPlay: false) + } else { + mediaEditor.seek(position, andPlay: false) + commit() + } + } + }, + trackTrimUpdated: { _, _, _, _, _ in + }, + trackOffsetUpdated: { _, _, _ in + }, + trackLongPressed: { _, _ in + } + )), + environment: {}, + containerSize: CGSize(width: previewSize.width - scrubberInset * 2.0, height: availableSize.height) + ) + + let scrubberFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - scrubberSize.width) / 2.0), y: availableSize.height - environment.safeInsets.bottom - scrubberSize.height + controlsBottomInset + 3.0 - 40.0), size: scrubberSize) + if let scrubberView = self.scrubber.view { + var animateIn = false + if scrubberView.superview == nil { + animateIn = true + self.addSubview(scrubberView) + } + if animateIn { + scrubberView.frame = scrubberFrame + } else { + transition.setFrame(view: scrubberView, frame: scrubberFrame) + } + if animateIn { + scrubberView.layer.animatePosition(from: CGPoint(x: 0.0, y: 44.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true) + scrubberView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + scrubberView.layer.animateScale(from: 0.6, to: 1.0, duration: 0.2) + } + } + } + + return availableSize + } + } + + func makeView() -> View { + return View() + } + + public func update(view: View, availableSize: CGSize, state: State, environment: Environment, transition: ComponentTransition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} + +final class MediaCoverScreen: ViewController { + fileprivate final class Node: ViewControllerTracingNode, ASGestureRecognizerDelegate { + private weak var controller: MediaCoverScreen? + private let context: AccountContext + + fileprivate let componentHost: ComponentView + + private var presentationData: PresentationData + private var validLayout: ContainerViewLayout? + + init(controller: MediaCoverScreen) { + self.controller = controller + self.context = controller.context + + self.presentationData = self.context.sharedContext.currentPresentationData.with { $0 } + + self.componentHost = ComponentView() + + super.init() + + self.backgroundColor = .clear + } + + override func didLoad() { + super.didLoad() + + self.view.disablesInteractiveModalDismiss = true + self.view.disablesInteractiveKeyboardGestureRecognizer = true + } + + @objc func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool { + return true + } + + func animateInFromEditor() { + if let view = self.componentHost.view as? MediaCoverScreenComponent.View { + view.animateInFromEditor() + } + } + + func animateOutToEditor(completion: @escaping () -> Void) { + if let mediaEditor = self.controller?.mediaEditor { + mediaEditor.play() + } + if let view = self.componentHost.view as? MediaCoverScreenComponent.View { + view.animateOutToEditor(completion: completion) + } + } + + override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { + let result = super.hitTest(point, with: event) + if result === self.view { + return nil + } + return result + } + + func requestLayout(transition: ComponentTransition) { + if let layout = self.validLayout { + self.containerLayoutUpdated(layout: layout, forceUpdate: true, transition: transition) + } + } + + func containerLayoutUpdated(layout: ContainerViewLayout, forceUpdate: Bool = false, animateOut: Bool = false, transition: ComponentTransition) { + guard let controller = self.controller else { + return + } + let isFirstTime = self.validLayout == nil + self.validLayout = layout + + let isTablet = layout.metrics.isTablet + + let previewSize: CGSize + let topInset: CGFloat = (layout.statusBarHeight ?? 0.0) + 5.0 + if isTablet { + let previewHeight = layout.size.height - topInset - 75.0 + previewSize = CGSize(width: floorToScreenPixels(previewHeight / 1.77778), height: previewHeight) + } else { + previewSize = CGSize(width: layout.size.width, height: floorToScreenPixels(layout.size.width * 1.77778)) + } + let bottomInset = layout.size.height - previewSize.height - topInset + + let environment = ViewControllerComponentContainer.Environment( + statusBarHeight: layout.statusBarHeight ?? 0.0, + navigationHeight: 0.0, + safeInsets: UIEdgeInsets( + top: topInset, + left: layout.safeInsets.left, + bottom: bottomInset, + right: layout.safeInsets.right + ), + additionalInsets: layout.additionalInsets, + inputHeight: layout.inputHeight ?? 0.0, + metrics: layout.metrics, + deviceMetrics: layout.deviceMetrics, + orientation: nil, + isVisible: true, + theme: self.presentationData.theme, + strings: self.presentationData.strings, + dateTimeFormat: self.presentationData.dateTimeFormat, + controller: { [weak self] in + return self?.controller + } + ) + + let componentSize = self.componentHost.update( + transition: transition, + component: AnyComponent( + MediaCoverScreenComponent( + context: self.context, + mediaEditor: controller.mediaEditor + ) + ), + environment: { + environment + }, + forceUpdate: forceUpdate || animateOut, + containerSize: layout.size + ) + if let componentView = self.componentHost.view { + if componentView.superview == nil { + self.view.insertSubview(componentView, at: 3) + componentView.clipsToBounds = true + } + let componentFrame = CGRect(origin: .zero, size: componentSize) + transition.setFrame(view: componentView, frame: CGRect(origin: componentFrame.origin, size: CGSize(width: componentFrame.width, height: componentFrame.height))) + } + + if isFirstTime { + self.animateInFromEditor() + } + } + } + + fileprivate var node: Node { + return self.displayNode as! Node + } + + fileprivate let context: AccountContext + fileprivate let mediaEditor: MediaEditor + fileprivate let previewView: MediaEditorPreviewView + fileprivate let portalView: PortalView + + var completed: (Double, UIImage) -> Void = { _, _ in } + var dismissed: () -> Void = {} + + private var initialValues: MediaEditorValues + + init( + context: AccountContext, + mediaEditor: MediaEditor, + previewView: MediaEditorPreviewView, + portalView: PortalView + ) { + self.context = context + self.mediaEditor = mediaEditor + self.previewView = previewView + self.portalView = portalView + self.initialValues = mediaEditor.values.makeCopy() + + super.init(navigationBarPresentationData: nil) + self.navigationPresentation = .flatModal + + self.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait) + + self.statusBar.statusBarStyle = .White + + if let coverImageTimestamp = mediaEditor.values.coverImageTimestamp { + mediaEditor.seek(coverImageTimestamp, andPlay: false) + } else { + mediaEditor.seek(0.0, andPlay: false) + } + } + + required init(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func loadDisplayNode() { + self.displayNode = Node(controller: self) + + super.displayNodeDidLoad() + } + + func requestDismiss(animated: Bool) { + self.dismissed() + + self.node.animateOutToEditor(completion: { + self.dismiss() + }) + } + + override func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) { + super.containerLayoutUpdated(layout, transition: transition) + + (self.displayNode as! Node).containerLayoutUpdated(layout: layout, transition: ComponentTransition(transition)) + } +} + +private func setupButtonShadow(_ view: UIView, radius: CGFloat = 2.0) { + view.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) + view.layer.shadowRadius = radius + view.layer.shadowColor = UIColor.black.cgColor + view.layer.shadowOpacity = 0.35 +} diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift index 18a9f0f686..b5358e21c9 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift @@ -48,6 +48,7 @@ import StickerPackEditTitleController import StickerPickerScreen import UIKitRuntimeUtils import ImageObjectSeparation +import DeviceAccess private let playbackButtonTag = GenericComponentViewTag() private let muteButtonTag = GenericComponentViewTag() @@ -76,6 +77,7 @@ final class MediaEditorScreenComponent: Component { case cutout case cutoutErase case cutoutRestore + case cover } let context: AccountContext @@ -827,8 +829,7 @@ final class MediaEditorScreenComponent: Component { doneButtonTitle = nil doneButtonIcon = generateTintedImage(image: UIImage(bundleImageName: "Media Editor/Apply"), color: .white)! case .botPreview: - //TODO:localize - doneButtonTitle = environment.strings.Story_Editor_Add + doneButtonTitle = environment.strings.Story_Editor_Add.uppercased() doneButtonIcon = nil } @@ -841,31 +842,7 @@ final class MediaEditorScreenComponent: Component { title: doneButtonTitle)), effectAlignment: .center, action: { [weak controller] in - guard let controller else { - return - } - switch controller.mode { - case .storyEditor: - guard !controller.node.recording.isActive else { - return - } - guard controller.checkCaptionLimit() else { - return - } - if controller.isEditingStory { - controller.requestStoryCompletion(animated: true) - } else { - if controller.checkIfCompletionIsAllowed() { - controller.openPrivacySettings(completion: { [weak controller] in - controller?.requestStoryCompletion(animated: true) - }) - } - } - case .stickerEditor: - controller.requestStickerCompletion(animated: true) - case .botPreview: - controller.requestStoryCompletion(animated: true) - } + controller?.node.requestCompletion() } )), environment: {}, @@ -1222,6 +1199,7 @@ final class MediaEditorScreenComponent: Component { let displayTopButtons = !(self.inputPanelExternalState.isEditing || isEditingTextEntity || component.isDisplayingTool != nil) + var inputPanelSize: CGSize = .zero if case .storyEditor = controller.mode { let nextInputMode: MessageInputPanelComponent.InputMode switch self.currentInputMode { @@ -1241,7 +1219,7 @@ final class MediaEditorScreenComponent: Component { } self.inputPanel.parentState = state - let inputPanelSize = self.inputPanel.update( + inputPanelSize = self.inputPanel.update( transition: transition, component: AnyComponent(MessageInputPanelComponent( externalState: self.inputPanelExternalState, @@ -1453,192 +1431,7 @@ final class MediaEditorScreenComponent: Component { transition.setFrame(view: inputPanelView, frame: inputPanelFrame) transition.setAlpha(view: inputPanelView, alpha: isEditingTextEntity || component.isDisplayingTool != nil || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0) } - - if let playerState = state.playerState { - let scrubberInset: CGFloat = 9.0 - - let minDuration: Double - let maxDuration: Double - if playerState.isAudioOnly { - minDuration = 5.0 - maxDuration = 15.0 - } else { - minDuration = 1.0 - maxDuration = storyMaxVideoDuration - } - - let previousTrackCount = self.currentVisibleTracks?.count - let visibleTracks = playerState.tracks.filter { $0.visibleInTimeline }.map { MediaScrubberComponent.Track($0) } - self.currentVisibleTracks = visibleTracks - - var scrubberTransition = transition - if let previousTrackCount, previousTrackCount != visibleTracks.count { - scrubberTransition = .easeInOut(duration: 0.2) - } - - let isAudioOnly = playerState.isAudioOnly - let hasMainVideoTrack = playerState.tracks.contains(where: { $0.id == 0 }) - - let scrubber: ComponentView - if let current = self.scrubber { - scrubber = current - } else { - scrubber = ComponentView() - self.scrubber = scrubber - } - - let scrubberSize = scrubber.update( - transition: scrubberTransition, - component: AnyComponent(MediaScrubberComponent( - context: component.context, - style: .editor, - theme: environment.theme, - generationTimestamp: playerState.generationTimestamp, - position: playerState.position, - minDuration: minDuration, - maxDuration: maxDuration, - isPlaying: playerState.isPlaying, - tracks: visibleTracks, - positionUpdated: { [weak mediaEditor] position, apply in - if let mediaEditor { - mediaEditor.seek(position, andPlay: apply) - } - }, - trackTrimUpdated: { [weak mediaEditor] trackId, start, end, updatedEnd, apply in - guard let mediaEditor else { - return - } - let trimRange = start..= upperBound { - start = lowerBound - } else if start < lowerBound { - start = lowerBound - } - } - - mediaEditor.seek(start, andPlay: true) - mediaEditor.play() - } else { - mediaEditor.stop() - } - } - } else if trackId == 1 { - mediaEditor.setAdditionalVideoOffset(offset, apply: apply) - } - }, - trackLongPressed: { [weak controller] trackId, sourceView in - guard let controller else { - return - } - controller.node.presentTrackOptions(trackId: trackId, sourceView: sourceView) - } - )), - environment: {}, - containerSize: CGSize(width: previewSize.width - scrubberInset * 2.0, height: availableSize.height) - ) - - let scrubberFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - scrubberSize.width) / 2.0), y: availableSize.height - environment.safeInsets.bottom - scrubberSize.height + controlsBottomInset - inputPanelSize.height + 3.0), size: scrubberSize) - if let scrubberView = scrubber.view { - var animateIn = false - if scrubberView.superview == nil { - animateIn = true - if let inputPanelBackgroundView = self.inputPanelBackground.view, inputPanelBackgroundView.superview != nil { - self.insertSubview(scrubberView, belowSubview: inputPanelBackgroundView) - } else { - self.addSubview(scrubberView) - } - } - if animateIn { - scrubberView.frame = scrubberFrame - } else { - scrubberTransition.setFrame(view: scrubberView, frame: scrubberFrame) - } - if !self.animatingButtons && !(!hasMainVideoTrack && animateIn) { - transition.setAlpha(view: scrubberView, alpha: component.isDisplayingTool != nil || component.isDismissing || component.isInteractingWithEntities || isEditingCaption || isRecordingAdditionalVideo || isEditingTextEntity ? 0.0 : 1.0) - } else if animateIn { - scrubberView.layer.animatePosition(from: CGPoint(x: 0.0, y: 44.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true) - scrubberView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) - scrubberView.layer.animateScale(from: 0.6, to: 1.0, duration: 0.2) - } - } - } else { - if let scrubber = self.scrubber { - self.scrubber = nil - if let scrubberView = scrubber.view { - scrubberView.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: 44.0), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true) - scrubberView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in - scrubberView.removeFromSuperview() - }) - scrubberView.layer.animateScale(from: 1.0, to: 0.6, duration: 0.2, removeOnCompletion: false) - } - } - } - + let saveContentComponent: AnyComponentWithIdentity if component.hasAppeared { saveContentComponent = AnyComponentWithIdentity( @@ -1995,8 +1788,199 @@ final class MediaEditorScreenComponent: Component { transition.setScale(view: switchCameraButtonView, scale: isRecordingAdditionalVideo ? 1.0 : 0.01) transition.setAlpha(view: switchCameraButtonView, alpha: isRecordingAdditionalVideo ? 1.0 : 0.0) } - + } else { + inputPanelSize = CGSize(width: 0.0, height: 12.0) } + + if case .stickerEditor = controller.mode { + + } else { + if let playerState = state.playerState { + let scrubberInset: CGFloat = 9.0 + + let minDuration: Double + let maxDuration: Double + if playerState.isAudioOnly { + minDuration = 5.0 + maxDuration = 15.0 + } else { + minDuration = 1.0 + maxDuration = storyMaxVideoDuration + } + + let previousTrackCount = self.currentVisibleTracks?.count + let visibleTracks = playerState.tracks.filter { $0.visibleInTimeline }.map { MediaScrubberComponent.Track($0) } + self.currentVisibleTracks = visibleTracks + + var scrubberTransition = transition + if let previousTrackCount, previousTrackCount != visibleTracks.count { + scrubberTransition = .easeInOut(duration: 0.2) + } + + let isAudioOnly = playerState.isAudioOnly + let hasMainVideoTrack = playerState.tracks.contains(where: { $0.id == 0 }) + + let scrubber: ComponentView + if let current = self.scrubber { + scrubber = current + } else { + scrubber = ComponentView() + self.scrubber = scrubber + } + + let scrubberSize = scrubber.update( + transition: scrubberTransition, + component: AnyComponent(MediaScrubberComponent( + context: component.context, + style: .editor, + theme: environment.theme, + generationTimestamp: playerState.generationTimestamp, + position: playerState.position, + minDuration: minDuration, + maxDuration: maxDuration, + isPlaying: playerState.isPlaying, + tracks: visibleTracks, + positionUpdated: { [weak mediaEditor] position, apply in + if let mediaEditor { + mediaEditor.seek(position, andPlay: apply) + } + }, + trackTrimUpdated: { [weak mediaEditor] trackId, start, end, updatedEnd, apply in + guard let mediaEditor else { + return + } + let trimRange = start..= upperBound { + start = lowerBound + } else if start < lowerBound { + start = lowerBound + } + } + + mediaEditor.seek(start, andPlay: true) + mediaEditor.play() + } else { + mediaEditor.stop() + } + } + } else if trackId == 1 { + mediaEditor.setAdditionalVideoOffset(offset, apply: apply) + } + }, + trackLongPressed: { [weak controller] trackId, sourceView in + guard let controller else { + return + } + controller.node.presentTrackOptions(trackId: trackId, sourceView: sourceView) + } + )), + environment: {}, + containerSize: CGSize(width: previewSize.width - scrubberInset * 2.0, height: availableSize.height) + ) + + let scrubberFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - scrubberSize.width) / 2.0), y: availableSize.height - environment.safeInsets.bottom - scrubberSize.height + controlsBottomInset - inputPanelSize.height + 3.0), size: scrubberSize) + if let scrubberView = scrubber.view { + var animateIn = false + if scrubberView.superview == nil { + animateIn = true + if let inputPanelBackgroundView = self.inputPanelBackground.view, inputPanelBackgroundView.superview != nil { + self.insertSubview(scrubberView, belowSubview: inputPanelBackgroundView) + } else { + self.addSubview(scrubberView) + } + } + if animateIn { + scrubberView.frame = scrubberFrame + } else { + scrubberTransition.setFrame(view: scrubberView, frame: scrubberFrame) + } + if !self.animatingButtons && !(!hasMainVideoTrack && animateIn) { + transition.setAlpha(view: scrubberView, alpha: component.isDisplayingTool != nil || component.isDismissing || component.isInteractingWithEntities || isEditingCaption || isRecordingAdditionalVideo || isEditingTextEntity ? 0.0 : 1.0) + } else if animateIn { + scrubberView.layer.animatePosition(from: CGPoint(x: 0.0, y: 44.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true) + scrubberView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + scrubberView.layer.animateScale(from: 0.6, to: 1.0, duration: 0.2) + } + } + } else { + if let scrubber = self.scrubber { + self.scrubber = nil + if let scrubberView = scrubber.view { + scrubberView.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: 44.0), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true) + scrubberView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + scrubberView.removeFromSuperview() + }) + scrubberView.layer.animateScale(from: 1.0, to: 0.6, duration: 0.2, removeOnCompletion: false) + } + } + } + } + if case .stickerEditor = controller.mode { var stickerButtonsHidden = buttonsAreHidden if let displayingTool = component.isDisplayingTool, [.cutoutErase, .cutoutRestore].contains(displayingTool) { @@ -2558,6 +2542,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate fileprivate var drawingScreen: DrawingScreen? fileprivate var stickerScreen: StickerPickerScreen? fileprivate weak var cutoutScreen: MediaCutoutScreen? + fileprivate weak var coverScreen: MediaCoverScreen? private var defaultToEmoji = false private var previousDrawingData: Data? @@ -2569,6 +2554,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate var recording: MediaEditorScreen.Recording + private let locationManager = LocationManager() + private var presentationData: PresentationData private var validLayout: ContainerViewLayout? @@ -2901,6 +2888,12 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate if isFromCamera && mediaDimensions.width > mediaDimensions.height { mediaEntity.scale = storyDimensions.height / fittedSize.height } + + if case .botPreview = controller.mode { + if fittedSize.width / fittedSize.height < storyDimensions.width / storyDimensions.height { + mediaEntity.scale = storyDimensions.height / fittedSize.height + } + } let initialValues: MediaEditorValues? if case let .draft(draft, _) = subject { @@ -4594,7 +4587,37 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.mediaEditor?.play() } - func addWeather(_ weather: StickerPickerScreen.Weather.LoadedWeather) { + func requestWeather() { + + } + + func presentLocationAccessAlert() { + DeviceAccess.authorizeAccess(to: .location(.send), locationManager: self.locationManager, presentationData: self.presentationData, present: { [weak self] c, a in + self?.controller?.present(c, in: .window(.root), with: a) + }, openSettings: { [weak self] in + self?.context.sharedContext.applicationBindings.openSettings() + }, { [weak self] authorized in + guard let self, authorized else { + return + } + let weatherPromise = Promise() + weatherPromise.set(getWeather(context: self.context)) + self.weatherPromise = weatherPromise + + let _ = (weatherPromise.get() + |> deliverOnMainQueue).start(next: { [weak self] result in + if let self, case let .loaded(weather) = result { + self.addWeather(weather) + } + }) + }) + } + + func addWeather(_ weather: StickerPickerScreen.Weather.LoadedWeather?) { + guard let weather else { + + return + } let maxWeatherCount = 3 var currentWeatherCount = 0 self.entitiesView.eachView { entityView in @@ -4619,6 +4642,74 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate ) } + func requestCompletion(playHaptic: Bool = true) { + guard let controller = self.controller else { + return + } + switch controller.mode { + case .storyEditor: + guard !controller.node.recording.isActive else { + return + } + guard controller.checkCaptionLimit() else { + return + } + if controller.isEditingStory { + controller.requestStoryCompletion(animated: true) + } else { + if controller.checkIfCompletionIsAllowed() { + controller.hapticFeedback.impact(.light) + controller.openPrivacySettings(completion: { [weak controller] in + controller?.requestStoryCompletion(animated: true) + }) + } + } + case .stickerEditor: + controller.requestStickerCompletion(animated: true) + case .botPreview: + controller.requestStoryCompletion(animated: true) + } + } + + func openCoverSelection() { + guard let mediaEditor = self.mediaEditor else { + return + } + + guard let portalView = PortalView(matchPosition: false) else { + return + } + portalView.view.layer.rasterizationScale = UIScreenScale + self.previewContentContainerView.addPortal(view: portalView) + + let scale = 48.0 / self.previewContentContainerView.frame.height + portalView.view.transform = CGAffineTransformMakeScale(scale, scale) + + if self.entitiesView.hasSelection { + self.entitiesView.selectEntity(nil) + } + let coverController = MediaCoverScreen( + context: self.context, + mediaEditor: mediaEditor, + previewView: self.previewView, + portalView: portalView + ) + coverController.dismissed = { [weak self] in + if let self { + self.animateInFromTool() + self.requestCompletion(playHaptic: false) + } + } + coverController.completed = { [weak self] position, image in + if let self { + self.controller?.currentCoverImage = image + } + } + self.controller?.present(coverController, in: .window(.root)) + self.coverScreen = coverController + self.animateOutToTool(tool: .cover) + } + func updateModalTransitionFactor(_ value: CGFloat, transition: ContainedViewLayoutTransition) { guard let layout = self.validLayout, case .compact = layout.metrics.widthClass else { return @@ -4890,9 +4981,16 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate if let self { if let weatherPromise = self.weatherPromise { let _ = (weatherPromise.get() - |> take(1)).start(next: { [weak self] weather in - if let self, case let .loaded(loaded) = weather { - self.addWeather(loaded) + |> take(1)).start(next: { [weak self] result in + if let self { + switch result { + case let .loaded(weather): + self.addWeather(weather) + case .notDetermined, .notAllowed: + self.presentLocationAccessAlert() + default: + break + } } }) } @@ -5082,6 +5180,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } self.controller?.present(controller, in: .window(.root)) self.animateOutToTool(tool: .tools) + case .cover: + self.openCoverSelection() } } }, @@ -5657,16 +5757,25 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate return self.isEditingStory || self.forwardSource != nil } + private var currentCoverImage: UIImage? func openPrivacySettings(_ privacy: MediaEditorResultPrivacy? = nil, completion: @escaping () -> Void = {}) { - self.node.mediaEditor?.maybePauseVideo() - - self.hapticFeedback.impact(.light) - + guard let mediaEditor = self.node.mediaEditor else { + return + } + mediaEditor.maybePauseVideo() + let privacy = privacy ?? self.state.privacy let text = self.getCaption().string let mentions = generateTextEntities(text, enabledTypes: [.mention], currentEntities: []).map { (text as NSString).substring(with: NSRange(location: $0.range.lowerBound + 1, length: $0.range.upperBound - $0.range.lowerBound - 1)) } + let coverImage: UIImage? + if mediaEditor.sourceIsVideo { + coverImage = self.currentCoverImage ?? mediaEditor.resultImage + } else { + coverImage = nil + } + let stateContext = ShareWithPeersScreen.StateContext( context: self.context, subject: .stories(editing: false), @@ -5684,6 +5793,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate let initialPrivacy = privacy.privacy let timeout = privacy.timeout + var editCoverImpl: (() -> Void)? + let controller = ShareWithPeersScreen( context: self.context, initialPrivacy: initialPrivacy, @@ -5692,6 +5803,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate pin: privacy.pin, timeout: privacy.timeout, mentions: mentions, + coverImage: coverImage, stateContext: stateContext, completion: { [weak self] sendAsPeerId, privacy, allowScreenshots, pin, _, completed in guard let self else { @@ -5741,6 +5853,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate pin: pin ), completion: completion) }) + }, + editCover: { + editCoverImpl?() } ) controller.customModalStyleOverlayTransitionFactorUpdated = { [weak self, weak controller] transition in @@ -5753,6 +5868,17 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.node.mediaEditor?.play() } self.push(controller) + + editCoverImpl = { [weak self, weak controller] in + if let self { + Queue.mainQueue().after(0.25, { + self.node.openCoverSelection() + }) + } + if let controller { + controller.dismiss() + } + } }) } @@ -8079,7 +8205,7 @@ private func stickerFile(resource: TelegramMediaResource, thumbnailResource: Tel fileAttributes.append(.FileName(fileName: isVideo ? "sticker.webm" : "sticker.webp")) fileAttributes.append(.Sticker(displayText: "", packReference: nil, maskData: nil)) if isVideo { - fileAttributes.append(.Video(duration: duration ?? 3.0, size: dimensions, flags: [], preloadSize: nil)) + fileAttributes.append(.Video(duration: duration ?? 3.0, size: dimensions, flags: [], preloadSize: nil, coverTime: nil)) } else { fileAttributes.append(.ImageSize(size: dimensions)) } diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/Weather.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/Weather.swift index 8236b696c2..26b43419a5 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/Weather.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/Weather.swift @@ -5,6 +5,7 @@ import TelegramCore import StickerPickerScreen import AccountContext import DeviceLocationManager +import DeviceAccess struct StoryWeather { let emoji: String @@ -50,33 +51,44 @@ func getWeather(context: AccountContext) -> Signal then( - currentLocationManagerCoordinate(manager: locationManager, timeout: 5.0) - |> mapToSignal { location in - if let location { - return getWeatherData(context: context, location: location) - |> mapToSignal { weather in - if let weather { - let effectiveEmoji = emojiFor(for: weather.emoji.strippedEmoji, date: Date(), location: location) - if let match = context.animatedEmojiStickersValue[effectiveEmoji]?.first { - return .single(.loaded(StickerPickerScreen.Weather.LoadedWeather( - emoji: effectiveEmoji, - emojiFile: match.file, - temperature: weather.temperature - ))) - } else { - return .single(.none) + + return DeviceAccess.authorizationStatus(subject: .location(.send)) + |> mapToSignal { status in + switch status { + case .notDetermined: + return .single(.notDetermined) + case .denied, .restricted, .unreachable: + return .single(.notAllowed) + case .allowed: + return .single(.fetching) + |> then( + currentLocationManagerCoordinate(manager: locationManager, timeout: 5.0) + |> mapToSignal { location in + if let location { + return getWeatherData(context: context, location: location) + |> mapToSignal { weather in + if let weather { + let effectiveEmoji = emojiFor(for: weather.emoji.strippedEmoji, date: Date(), location: location) + if let match = context.animatedEmojiStickersValue[effectiveEmoji]?.first { + return .single(.loaded(StickerPickerScreen.Weather.LoadedWeather( + emoji: effectiveEmoji, + emojiFile: match.file, + temperature: weather.temperature + ))) + } else { + return .single(.none) + } + } else { + return .single(.none) + } } } else { return .single(.none) } } - } else { - return .single(.none) - } + ) } - ) + } } private struct WeatherBotConfiguration { diff --git a/submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift b/submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift index c08a62d34e..aa47178efe 100644 --- a/submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift +++ b/submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift @@ -70,6 +70,7 @@ public final class MediaScrubberComponent: Component { public enum Style { case editor case videoMessage + case cover } let context: AccountContext @@ -84,8 +85,10 @@ public final class MediaScrubberComponent: Component { let isPlaying: Bool let tracks: [Track] + let portalView: PortalView? let positionUpdated: (Double, Bool) -> Void + let coverPositionUpdated: (Double, Bool, @escaping () -> Void) -> Void let trackTrimUpdated: (Int32, Double, Double, Bool, Bool) -> Void let trackOffsetUpdated: (Int32, Double, Bool) -> Void let trackLongPressed: (Int32, UIView) -> Void @@ -100,7 +103,9 @@ public final class MediaScrubberComponent: Component { maxDuration: Double, isPlaying: Bool, tracks: [Track], + portalView: PortalView? = nil, positionUpdated: @escaping (Double, Bool) -> Void, + coverPositionUpdated: @escaping (Double, Bool, @escaping () -> Void) -> Void = { _, _, _ in }, trackTrimUpdated: @escaping (Int32, Double, Double, Bool, Bool) -> Void, trackOffsetUpdated: @escaping (Int32, Double, Bool) -> Void, trackLongPressed: @escaping (Int32, UIView) -> Void @@ -114,7 +119,9 @@ public final class MediaScrubberComponent: Component { self.maxDuration = maxDuration self.isPlaying = isPlaying self.tracks = tracks + self.portalView = portalView self.positionUpdated = positionUpdated + self.coverPositionUpdated = coverPositionUpdated self.trackTrimUpdated = trackTrimUpdated self.trackOffsetUpdated = trackOffsetUpdated self.trackLongPressed = trackLongPressed @@ -152,6 +159,7 @@ public final class MediaScrubberComponent: Component { private var trackViews: [Int32: TrackView] = [:] private let trimView: TrimView private let ghostTrimView: TrimView + private let cursorContentView: UIView private let cursorView: HandleView private var cursorDisplayLink: SharedDisplayLinkDriver.Link? @@ -159,6 +167,7 @@ public final class MediaScrubberComponent: Component { private var selectedTrackId: Int32 = 0 private var isPanningCursor = false + private var ignoreCursorPositionUpdate = false private var scrubberSize: CGSize? @@ -169,6 +178,7 @@ public final class MediaScrubberComponent: Component { self.trimView = TrimView(frame: .zero) self.ghostTrimView = TrimView(frame: .zero) self.ghostTrimView.isHollow = true + self.cursorContentView = UIView() self.cursorView = HandleView() super.init(frame: frame) @@ -178,6 +188,10 @@ public final class MediaScrubberComponent: Component { self.disablesInteractiveModalDismiss = true self.disablesInteractiveKeyboardGestureRecognizer = true + self.cursorContentView.isUserInteractionEnabled = false + self.cursorContentView.clipsToBounds = true + self.cursorContentView.layer.cornerRadius = 10.0 + let positionImage = generateImage(CGSize(width: handleWidth, height: 50.0), rotatedContext: { size, context in context.clear(CGRect(origin: .zero, size: size)) context.setFillColor(UIColor.white.cgColor) @@ -187,13 +201,13 @@ public final class MediaScrubberComponent: Component { context.addPath(path.cgPath) context.fillPath() })?.stretchableImage(withLeftCapWidth: Int(handleWidth / 2.0), topCapHeight: 25) - self.cursorView.image = positionImage self.cursorView.isUserInteractionEnabled = true self.cursorView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0) self.addSubview(self.ghostTrimView) self.addSubview(self.trimView) + self.addSubview(self.cursorContentView) self.addSubview(self.cursorView) self.cursorView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleCursorPan(_:)))) @@ -317,10 +331,18 @@ public final class MediaScrubberComponent: Component { switch gestureRecognizer.state { case .began, .changed: self.isPanningCursor = true - component.positionUpdated(position, false) + if case .cover = component.style { + component.coverPositionUpdated(position, false, {}) + } else { + component.positionUpdated(position, false) + } case .ended, .cancelled: self.isPanningCursor = false - component.positionUpdated(position, true) + if case .cover = component.style { + component.coverPositionUpdated(position, false, {}) + } else { + component.positionUpdated(position, true) + } default: break } @@ -328,10 +350,23 @@ public final class MediaScrubberComponent: Component { } private func cursorFrame(size: CGSize, height: CGFloat, position: Double, duration : Double) -> CGRect { + var cursorWidth = handleWidth + var cursorMargin = handleWidth + var height = height + var isCover = false + var y: CGFloat = -5.0 - UIScreenPixel + if let component = self.component, case .cover = component.style { + cursorWidth = 30.0 + 12.0 + cursorMargin = handleWidth + height = 50.0 + isCover = true + y += 1.0 + } + let cursorPadding: CGFloat = 8.0 let cursorPositionFraction = duration > 0.0 ? position / duration : 0.0 - let cursorPosition = floorToScreenPixels(handleWidth - 1.0 + (size.width - handleWidth * 2.0 + 2.0) * cursorPositionFraction) - var cursorFrame = CGRect(origin: CGPoint(x: cursorPosition - handleWidth / 2.0, y: -5.0 - UIScreenPixel), size: CGSize(width: handleWidth, height: height)) + let cursorPosition = floorToScreenPixels(cursorMargin - 1.0 + (size.width - handleWidth * 2.0 + 2.0) * cursorPositionFraction) + var cursorFrame = CGRect(origin: CGPoint(x: cursorPosition - cursorWidth / 2.0, y: y), size: CGSize(width: cursorWidth, height: height)) var leftEdge = self.ghostTrimView.leftHandleView.frame.maxX var rightEdge = self.ghostTrimView.rightHandleView.frame.minX @@ -339,9 +374,13 @@ public final class MediaScrubberComponent: Component { leftEdge = self.trimView.leftHandleView.frame.maxX rightEdge = self.trimView.rightHandleView.frame.minX } + if isCover { + leftEdge = 0.0 + rightEdge = size.width + } cursorFrame.origin.x = max(leftEdge - cursorPadding, cursorFrame.origin.x) - cursorFrame.origin.x = min(rightEdge - handleWidth + cursorPadding, cursorFrame.origin.x) + cursorFrame.origin.x = min(rightEdge - cursorWidth + cursorPadding, cursorFrame.origin.x) return cursorFrame } @@ -377,6 +416,7 @@ public final class MediaScrubberComponent: Component { updatedPosition = max(self.startPosition, min(self.endPosition, position + advance)) } self.cursorView.frame = cursorFrame(size: scrubberSize, height: self.effectiveCursorHeight, position: updatedPosition, duration: self.trimDuration) + self.cursorContentView.frame = self.cursorView.frame.insetBy(dx: 6.0, dy: 2.0).offsetBy(dx: -1.0 - UIScreenPixel, dy: 0.0) } public func update(component: MediaScrubberComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { @@ -384,11 +424,36 @@ public final class MediaScrubberComponent: Component { self.component = component self.state = state + if let portalView = component.portalView, portalView.view.superview == nil { + portalView.view.frame = CGRect(x: 0.0, y: 0.0, width: 30.0, height: 48.0) + portalView.view.clipsToBounds = true + self.cursorContentView.addSubview(portalView.view) + } + switch component.style { case .editor: self.cursorView.isHidden = false case .videoMessage: self.cursorView.isHidden = true + case .cover: + self.cursorView.isHidden = false + self.trimView.isHidden = true + self.ghostTrimView.isHidden = true + + if isFirstTime { + let positionImage = generateImage(CGSize(width: 30.0 + 12.0, height: 50.0), rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + context.setStrokeColor(UIColor.white.cgColor) + let lineWidth = 2.0 - UIScreenPixel + context.setLineWidth(lineWidth) + context.setShadow(offset: .zero, blur: 2.0, color: UIColor(rgb: 0x000000, alpha: 0.55).cgColor) + + let path = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: 6.0 - lineWidth / 2.0, y: 2.0 - lineWidth / 2.0), size: CGSize(width: 30.0 - lineWidth, height: 48.0 - lineWidth)), cornerRadius: 9.0) + context.addPath(path.cgPath) + context.strokePath() + }) + self.cursorView.image = positionImage + } } var totalHeight: CGFloat = 0.0 @@ -419,6 +484,23 @@ public final class MediaScrubberComponent: Component { } else { trackTransition = .immediate trackView = TrackView() + trackView.onTap = { [weak self] fraction in + guard let self, let component = self.component else { + return + } + var position = max(self.startPosition, min(self.endPosition, self.trimDuration * fraction)) + if let offset = self.mainAudioTrackOffset { + position += offset + } + self.ignoreCursorPositionUpdate = true + component.coverPositionUpdated(position, true, { [weak self] in + guard let self else { + return + } + self.ignoreCursorPositionUpdate = false + self.state?.updated(transition: .immediate) + }) + } trackView.onSelection = { [weak self] id in guard let self else { return @@ -520,7 +602,7 @@ public final class MediaScrubberComponent: Component { let fullTrackHeight: CGFloat switch component.style { - case .editor: + case .editor, .cover: fullTrackHeight = trackHeight case .videoMessage: fullTrackHeight = 33.0 @@ -606,11 +688,15 @@ public final class MediaScrubberComponent: Component { self.cursorPositionAnimation = nil self.cursorDisplayLink?.isPaused = true - var cursorPosition = component.position - if let offset = self.mainAudioTrackOffset { - cursorPosition -= offset + if !self.ignoreCursorPositionUpdate { + var cursorPosition = component.position + if let offset = self.mainAudioTrackOffset { + cursorPosition -= offset + } + let cursorFrame = cursorFrame(size: scrubberSize, height: self.effectiveCursorHeight, position: cursorPosition, duration: trimDuration) + transition.setFrame(view: self.cursorView, frame: cursorFrame) + transition.setFrame(view: self.cursorContentView, frame: cursorFrame.insetBy(dx: 6.0, dy: 2.0).offsetBy(dx: -1.0 - UIScreenPixel, dy: 0.0)) } - transition.setFrame(view: self.cursorView, frame: cursorFrame(size: scrubberSize, height: self.effectiveCursorHeight, position: cursorPosition, duration: trimDuration)) } else { if let (_, _, end, ended) = self.cursorPositionAnimation { if ended, component.position >= self.startPosition && component.position < end - 1.0 { @@ -663,6 +749,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega fileprivate var videoOpaqueFrameLayers: [VideoFrameLayer] = [] var onSelection: (Int32) -> Void = { _ in } + var onTap: (CGFloat) -> Void = { _ in } var offsetUpdated: (Double, Bool) -> Void = { _, _ in } var updated: (ComponentTransition) -> Void = { _ in } @@ -739,10 +826,15 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega } @objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) { - guard let (track, _, _, _) = self.params else { + guard let params = self.params else { return } - self.onSelection(track.id) + if case .cover = params.style { + let location = gestureRecognizer.location(in: self) + self.onTap(location.x / self.frame.width) + } else { + self.onSelection(params.track.id) + } } private func updateTrackOffset(done: Bool) { @@ -786,6 +878,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega } private var params: ( + style: MediaScrubberComponent.Style, track: MediaScrubberComponent.Track, isSelected: Bool, availableSize: CGSize, @@ -834,12 +927,12 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega transition: ComponentTransition ) -> CGSize { let previousParams = self.params - self.params = (track, isSelected, availableSize, duration) + self.params = (style, track, isSelected, availableSize, duration) let fullTrackHeight: CGFloat let framesCornerRadius: CGFloat switch style { - case .editor: + case .editor, .cover: fullTrackHeight = trackHeight framesCornerRadius = 9.0 case .videoMessage: @@ -1362,7 +1455,7 @@ private class TrimView: UIView { let highlightColor: UIColor switch style { - case .editor: + case .editor, .cover: effectiveHandleWidth = handleWidth fullTrackHeight = trackHeight capsuleOffset = 5.0 - UIScreenPixel diff --git a/submodules/TelegramUI/Components/MinimizedContainer/Sources/MinimizedContainer.swift b/submodules/TelegramUI/Components/MinimizedContainer/Sources/MinimizedContainer.swift index b6d1b4b977..2bcb788957 100644 --- a/submodules/TelegramUI/Components/MinimizedContainer/Sources/MinimizedContainer.swift +++ b/submodules/TelegramUI/Components/MinimizedContainer/Sources/MinimizedContainer.swift @@ -649,7 +649,7 @@ public class MinimizedContainerImpl: ASDisplayNode, MinimizedContainer, ASScroll scrollView.isScrollEnabled = false scrollView.panGestureRecognizer.isEnabled = false scrollView.panGestureRecognizer.isEnabled = true - scrollView.contentOffset = contentOffset + scrollView.setContentOffset(contentOffset, animated: false) self.currentTransition = .collapse self.requestUpdate(transition: .animated(duration: 0.4, curve: .customSpring(damping: 180.0, initialVelocity: initialVelocity))) } diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoAvatarTransformContainerNode.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoAvatarTransformContainerNode.swift index 1d61323f75..8922ed2a8d 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoAvatarTransformContainerNode.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoAvatarTransformContainerNode.swift @@ -327,7 +327,7 @@ final class PeerInfoAvatarTransformContainerNode: ASDisplayNode { markupNode.update(markup: markup, size: CGSize(width: 320.0, height: 320.0)) markupNode.updateVisibility(true) } else if threadInfo == nil, let video = videoRepresentations.last, let peerReference = PeerReference(peer) { - let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil)])) + let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil)])) let videoContent = NativeVideoContent(id: .profileVideo(videoId, nil), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: peer.isCopyProtectionEnabled, storeAfterDownload: nil) if videoContent.id != self.videoContent?.id { self.videoNode?.removeFromSupernode() diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoEditingAvatarNode.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoEditingAvatarNode.swift index 909f535749..a2e196d1e4 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoEditingAvatarNode.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoEditingAvatarNode.swift @@ -162,7 +162,7 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode { markupNode.removeFromSupernode() } - let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil)])) + let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil)])) let videoContent = NativeVideoContent(id: .profileVideo(videoId, nil), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: peer.isCopyProtectionEnabled, storeAfterDownload: nil) if videoContent.id != self.videoContent?.id { self.videoNode?.removeFromSupernode() diff --git a/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift b/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift index fd926aaa0b..357e5a00ad 100644 --- a/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift +++ b/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift @@ -910,6 +910,7 @@ private extension MediaEditorValues { audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, + coverImageTimestamp: nil, qualityPreset: qualityPreset ) } @@ -1053,6 +1054,7 @@ private extension MediaEditorValues { audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, + coverImageTimestamp: nil, qualityPreset: qualityPreset ) } diff --git a/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/CoverListItemComponent.swift b/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/CoverListItemComponent.swift new file mode 100644 index 0000000000..364a948ddc --- /dev/null +++ b/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/CoverListItemComponent.swift @@ -0,0 +1,144 @@ +import Foundation +import UIKit +import Display +import ComponentFlow +import MultilineTextComponent +import TelegramPresentationData +import SwitchComponent + +final class CoverListItemComponent: Component { + let theme: PresentationTheme + let title: String + let image: UIImage? + let hasNext: Bool + let action: () -> Void + + init( + theme: PresentationTheme, + title: String, + image: UIImage?, + hasNext: Bool, + action: @escaping () -> Void + ) { + self.theme = theme + self.title = title + self.image = image + self.hasNext = hasNext + self.action = action + } + + static func ==(lhs: CoverListItemComponent, rhs: CoverListItemComponent) -> Bool { + if lhs.theme !== rhs.theme { + return false + } + if lhs.title != rhs.title { + return false + } + if lhs.image !== rhs.image { + return false + } + if lhs.hasNext != rhs.hasNext { + return false + } + return true + } + + final class View: UIView { + private let containerButton: HighlightTrackingButton + + private let title = ComponentView() + private let icon = ComponentView() + private let separatorLayer: SimpleLayer + + private var component: CoverListItemComponent? + private weak var state: EmptyComponentState? + + override init(frame: CGRect) { + self.separatorLayer = SimpleLayer() + + self.containerButton = HighlightTrackingButton() + + super.init(frame: frame) + + self.layer.addSublayer(self.separatorLayer) + self.addSubview(self.containerButton) + + self.containerButton.addTarget(self, action: #selector(self.pressed), for: .touchUpInside) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + @objc private func pressed() { + guard let component = self.component else { + return + } + component.action() + } + + func update(component: CoverListItemComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + let themeUpdated = self.component?.theme !== component.theme + + self.component = component + self.state = state + + let height: CGFloat = 44.0 + let verticalInset: CGFloat = 0.0 + let leftInset: CGFloat = 16.0 + let rightInset: CGFloat = 16.0 + + let iconSize = self.icon.update( + transition: .immediate, + component: AnyComponent(Image(image: component.image, contentMode: .scaleAspectFill)), + environment: {}, + containerSize: CGSize(width: 30.0, height: 30.0) + ) + + let titleSize = self.title.update( + transition: .immediate, + component: AnyComponent(MultilineTextComponent( + text: .plain(NSAttributedString(string: component.title, font: Font.regular(17.0), textColor: component.theme.list.itemPrimaryTextColor)) + )), + environment: {}, + containerSize: CGSize(width: availableSize.width - leftInset - rightInset, height: 100.0) + ) + + let titleFrame = CGRect(origin: CGPoint(x: leftInset, y: floorToScreenPixels((height - titleSize.height) / 2.0)), size: titleSize) + if let titleView = self.title.view { + if titleView.superview == nil { + titleView.isUserInteractionEnabled = false + self.containerButton.addSubview(titleView) + } + titleView.frame = titleFrame + } + if let iconView = self.icon.view { + if iconView.superview == nil { + iconView.clipsToBounds = true + iconView.layer.cornerRadius = 5.0 + self.containerButton.addSubview(iconView) + } + transition.setFrame(view: iconView, frame: CGRect(origin: CGPoint(x: availableSize.width - rightInset - iconSize.width, y: floorToScreenPixels((height - iconSize.height) / 2.0)), size: iconSize)) + } + + if themeUpdated { + self.separatorLayer.backgroundColor = component.theme.list.itemPlainSeparatorColor.cgColor + } + transition.setFrame(layer: self.separatorLayer, frame: CGRect(origin: CGPoint(x: leftInset, y: height), size: CGSize(width: availableSize.width - leftInset, height: UIScreenPixel))) + self.separatorLayer.isHidden = !component.hasNext + + let containerFrame = CGRect(origin: CGPoint(x: 0.0, y: verticalInset), size: CGSize(width: availableSize.width, height: height - verticalInset * 2.0)) + transition.setFrame(view: self.containerButton, frame: containerFrame) + + return CGSize(width: availableSize.width, height: height) + } + } + + func makeView() -> View { + return View(frame: CGRect()) + } + + func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} diff --git a/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/ShareWithPeersScreen.swift b/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/ShareWithPeersScreen.swift index 130b156460..b16bd59352 100644 --- a/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/ShareWithPeersScreen.swift +++ b/submodules/TelegramUI/Components/ShareWithPeersScreen/Sources/ShareWithPeersScreen.swift @@ -38,9 +38,11 @@ final class ShareWithPeersScreenComponent: Component { let mentions: [String] let categoryItems: [CategoryItem] let optionItems: [OptionItem] + let coverItem: CoverItem? let completion: (EnginePeer.Id?, EngineStoryPrivacy, Bool, Bool, [EnginePeer], Bool) -> Void let editCategory: (EngineStoryPrivacy, Bool, Bool) -> Void let editBlockedPeers: (EngineStoryPrivacy, Bool, Bool) -> Void + let editCover: () -> Void let peerCompletion: (EnginePeer.Id) -> Void init( @@ -54,9 +56,11 @@ final class ShareWithPeersScreenComponent: Component { mentions: [String], categoryItems: [CategoryItem], optionItems: [OptionItem], + coverItem: CoverItem?, completion: @escaping (EnginePeer.Id?, EngineStoryPrivacy, Bool, Bool, [EnginePeer], Bool) -> Void, editCategory: @escaping (EngineStoryPrivacy, Bool, Bool) -> Void, editBlockedPeers: @escaping (EngineStoryPrivacy, Bool, Bool) -> Void, + editCover: @escaping () -> Void, peerCompletion: @escaping (EnginePeer.Id) -> Void ) { self.context = context @@ -69,9 +73,11 @@ final class ShareWithPeersScreenComponent: Component { self.mentions = mentions self.categoryItems = categoryItems self.optionItems = optionItems + self.coverItem = coverItem self.completion = completion self.editCategory = editCategory self.editBlockedPeers = editBlockedPeers + self.editCover = editCover self.peerCompletion = peerCompletion } @@ -106,6 +112,9 @@ final class ShareWithPeersScreenComponent: Component { if lhs.optionItems != rhs.optionItems { return false } + if lhs.coverItem != rhs.coverItem { + return false + } return true } @@ -258,6 +267,33 @@ final class ShareWithPeersScreenComponent: Component { return false } } + + enum CoverId: Int, Hashable { + case choose = 0 + } + + final class CoverItem: Equatable { + let id: CoverId + let title: String + let image: UIImage? + + init( + id: CoverId, + title: String, + image: UIImage? + ) { + self.id = id + self.title = title + self.image = image + } + + static func ==(lhs: CoverItem, rhs: CoverItem) -> Bool { + if lhs === rhs { + return true + } + return false + } + } final class View: UIView, UIScrollViewDelegate { private let dimView: UIView @@ -1607,6 +1643,90 @@ final class ShareWithPeersScreenComponent: Component { footerText = isSendAsGroup ? environment.strings.Story_Privacy_KeepOnGroupPageInfo(footerValue).string : environment.strings.Story_Privacy_KeepOnChannelPageInfo(footerValue).string } + let footerSize = sectionFooter.update( + transition: sectionFooterTransition, + component: AnyComponent(MultilineTextComponent( + text: .plain(NSAttributedString(string: footerText, font: Font.regular(13.0), textColor: environment.theme.list.freeTextColor)), + maximumNumberOfLines: 0, + lineSpacing: 0.2 + )), + environment: {}, + containerSize: CGSize(width: itemLayout.containerSize.width - 16.0 * 2.0, height: itemLayout.contentHeight) + ) + let footerFrame = CGRect(origin: CGPoint(x: itemLayout.sideInset + 16.0, y: sectionOffset + section.totalHeight + 7.0), size: footerSize) + if let footerView = sectionFooter.view { + if footerView.superview == nil { + self.itemContainerView.addSubview(footerView) + } + sectionFooterTransition.setFrame(view: footerView, frame: footerFrame) + } + sectionOffset += footerSize.height + } else if section.id == 4 && section.itemCount > 0 { + if let item = component.coverItem { + let itemFrame = CGRect(origin: CGPoint(x: itemLayout.sideInset, y: sectionOffset + section.insets.top + CGFloat(0) * section.itemHeight), size: CGSize(width: itemLayout.containerSize.width, height: section.itemHeight)) + if !visibleBounds.intersects(itemFrame) { + continue + } + + let itemId = AnyHashable(item.id) + validIds.append(itemId) + + var itemTransition = transition + let visibleItem: ComponentView + if let current = self.visibleItems[itemId] { + visibleItem = current + } else { + visibleItem = ComponentView() + if !transition.animation.isImmediate { + itemTransition = .immediate + } + self.visibleItems[itemId] = visibleItem + } + + let _ = visibleItem.update( + transition: itemTransition, + component: AnyComponent(CoverListItemComponent( + theme: environment.theme, + title: item.title, + image: item.image, + hasNext: false, + action: { + component.editCover() + } + )), + environment: {}, + containerSize: itemFrame.size + ) + if let itemView = visibleItem.view { + if itemView.superview == nil { + if let minSectionHeader { + self.itemContainerView.insertSubview(itemView, belowSubview: minSectionHeader) + } else { + self.itemContainerView.addSubview(itemView) + } + } + itemTransition.setFrame(view: itemView, frame: itemFrame) + } + } + + let sectionFooter: ComponentView + var sectionFooterTransition = transition + if let current = self.visibleSectionFooters[section.id] { + sectionFooter = current + } else { + if !transition.animation.isImmediate { + sectionFooterTransition = .immediate + } + sectionFooter = ComponentView() + self.visibleSectionFooters[section.id] = sectionFooter + } + + var footerText = "Choose a frame from the story to show in your Profile." + + if let sendAsPeerId = self.sendAsPeerId, sendAsPeerId.isGroupOrChannel == true { + footerText = isSendAsGroup ? "Choose a frame from the story to show in group profile.": "Choose a frame from the story to show in channel profile." + } + let footerSize = sectionFooter.update( transition: sectionFooterTransition, component: AnyComponent(MultilineTextComponent( @@ -1625,7 +1745,6 @@ final class ShareWithPeersScreenComponent: Component { sectionFooterTransition.setFrame(view: footerView, frame: footerFrame) } } - sectionOffset += section.totalHeight } @@ -1873,6 +1992,8 @@ final class ShareWithPeersScreenComponent: Component { } private var currentHasChannels: Bool? + private var currentHasCover: Bool? + func update(component: ShareWithPeersScreenComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { guard !self.isDismissed else { return availableSize @@ -1886,6 +2007,7 @@ final class ShareWithPeersScreenComponent: Component { var hasCategories = false var hasChannels = false + var hasCover = false if case .stories = component.stateContext.subject { if let peerId = self.sendAsPeerId, peerId.isGroupOrChannel { } else { @@ -1899,6 +2021,14 @@ final class ShareWithPeersScreenComponent: Component { contentTransition = .spring(duration: 0.4) } self.currentHasChannels = hasChannels + + if self.selectedOptions.contains(.pin) && component.coverItem != nil { + hasCover = true + } + if let currentHasCover = self.currentHasCover, currentHasCover != hasCover { + contentTransition = .spring(duration: 0.4) + } + self.currentHasCover = hasCover } else if case .members = component.stateContext.subject { self.dismissPanGesture?.isEnabled = false } else if case .channels = component.stateContext.subject { @@ -2306,6 +2436,15 @@ final class ShareWithPeersScreenComponent: Component { itemHeight: optionItemSize.height, itemCount: component.optionItems.count )) + + if hasCover { + sections.append(ItemLayout.Section( + id: 4, + insets: UIEdgeInsets(top: 28.0, left: 0.0, bottom: 0.0, right: 0.0), + itemHeight: optionItemSize.height, + itemCount: 1 + )) + } } else { sections.append(ItemLayout.Section( id: 1, @@ -2479,7 +2618,11 @@ final class ShareWithPeersScreenComponent: Component { inset += 10.0 + environment.safeInsets.bottom + 50.0 + footersTotalHeight } else { if !hasCategories { - inset = 314.0 + if self.selectedOptions.contains(.pin) { + inset = 422.0 + } else { + inset = 314.0 + } inset += 10.0 + environment.safeInsets.bottom + 50.0 + footersTotalHeight } else { if hasChannels { @@ -2489,8 +2632,12 @@ final class ShareWithPeersScreenComponent: Component { inset = 1000.0 } } else { - inset = 464.0 - inset += 10.0 + environment.safeInsets.bottom + 50.0 + footersTotalHeight + if self.selectedOptions.contains(.pin) { + inset = 1000.0 + } else { + inset = 464.0 + inset += 10.0 + environment.safeInsets.bottom + 50.0 + footersTotalHeight + } } } } @@ -2849,10 +2996,12 @@ public class ShareWithPeersScreen: ViewControllerComponentContainer { pin: Bool = false, timeout: Int = 0, mentions: [String] = [], + coverImage: UIImage? = nil, stateContext: StateContext, completion: @escaping (EnginePeer.Id?, EngineStoryPrivacy, Bool, Bool, [EnginePeer], Bool) -> Void, editCategory: @escaping (EngineStoryPrivacy, Bool, Bool) -> Void = { _, _, _ in }, editBlockedPeers: @escaping (EngineStoryPrivacy, Bool, Bool) -> Void = { _, _, _ in }, + editCover: @escaping () -> Void = { }, peerCompletion: @escaping (EnginePeer.Id) -> Void = { _ in } ) { self.context = context @@ -2861,6 +3010,7 @@ public class ShareWithPeersScreen: ViewControllerComponentContainer { var categoryItems: [ShareWithPeersScreenComponent.CategoryItem] = [] var optionItems: [ShareWithPeersScreenComponent.OptionItem] = [] + var coverItem: ShareWithPeersScreenComponent.CoverItem? if case let .stories(editing) = stateContext.subject { var everyoneSubtitle = presentationData.strings.Story_Privacy_ExcludePeople if (stateContext.stateValue?.savedSelectedPeers[.everyone]?.count ?? 0) > 0 { @@ -2994,6 +3144,10 @@ public class ShareWithPeersScreen: ViewControllerComponentContainer { title: presentationData.strings.Story_Privacy_KeepOnMyPage )) } + + if !editing || pin, coverImage != nil { + coverItem = ShareWithPeersScreenComponent.CoverItem(id: .choose, title: "Choose Story Cover", image: coverImage) + } } var theme: ViewControllerComponentContainer.Theme = .dark @@ -3013,9 +3167,11 @@ public class ShareWithPeersScreen: ViewControllerComponentContainer { mentions: mentions, categoryItems: categoryItems, optionItems: optionItems, + coverItem: coverItem, completion: completion, editCategory: editCategory, editBlockedPeers: editBlockedPeers, + editCover: editCover, peerCompletion: peerCompletion ), navigationBarAppearance: .none, theme: theme) diff --git a/submodules/TelegramUI/Components/Stars/StarsTransactionScreen/Sources/StarsTransactionScreen.swift b/submodules/TelegramUI/Components/Stars/StarsTransactionScreen/Sources/StarsTransactionScreen.swift index 5a667fe7fb..21f05af42e 100644 --- a/submodules/TelegramUI/Components/Stars/StarsTransactionScreen/Sources/StarsTransactionScreen.swift +++ b/submodules/TelegramUI/Components/Stars/StarsTransactionScreen/Sources/StarsTransactionScreen.swift @@ -200,8 +200,8 @@ private final class StarsTransactionSheetContent: CombinedComponent { switch subject { case let .transaction(transaction, parentPeer): if transaction.flags.contains(.isGift) { - titleText = "Received Gift" - descriptionText = "Use Stars to unlock content and services on Telegram. [See Examples >]()" + titleText = strings.Stars_Gift_Received_Title + descriptionText = strings.Stars_Gift_Received_Text count = transaction.count countOnTop = true transactionId = transaction.id @@ -218,7 +218,6 @@ private final class StarsTransactionSheetContent: CombinedComponent { photo = nil isRefund = false isGift = true - delayedCloseOnOpenPeer = false } else { switch transaction.peer { case let .peer(peer): @@ -320,9 +319,9 @@ private final class StarsTransactionSheetContent: CombinedComponent { delayedCloseOnOpenPeer = false case let .gift(message): let incoming = message.flags.contains(.Incoming) - titleText = incoming ? "Received Gift" : "Sent Gift" + titleText = incoming ? strings.Stars_Gift_Received_Title : strings.Stars_Gift_Sent_Title let peerName = state.peerMap[message.id.peerId]?.compactDisplayTitle ?? "" - descriptionText = incoming ? "Use Stars to unlock content and services on Telegram. [See Examples >]()" : "With Stars, \(peerName) will be able to unlock content and services on Telegram. [See Examples >]()" + descriptionText = incoming ? strings.Stars_Gift_Received_Text : strings.Stars_Gift_Sent_Text(peerName).string if let action = message.media.first(where: { $0 is TelegramMediaAction }) as? TelegramMediaAction, case let .giftStars(_, _, countValue, _, _, _) = action.action { count = countValue if !incoming { @@ -346,7 +345,6 @@ private final class StarsTransactionSheetContent: CombinedComponent { photo = nil isRefund = false isGift = true - delayedCloseOnOpenPeer = false } if let spaceRegex { let nsRange = NSRange(descriptionText.startIndex..., in: descriptionText) @@ -484,10 +482,7 @@ private final class StarsTransactionSheetContent: CombinedComponent { ) ), action: { - if toPeer.id.namespace == Namespaces.Peer.CloudUser && toPeer.id.id._internalGetInt64Value() == 777000 { - let presentationData = component.context.sharedContext.currentPresentationData.with { $0 } - component.context.sharedContext.openExternalUrl(context: component.context, urlContext: .generic, url: strings.Stars_Transaction_FragmentUnknown_URL, forceExternal: true, presentationData: presentationData, navigationController: nil, dismissInput: {}) - } else if delayedCloseOnOpenPeer { + if delayedCloseOnOpenPeer { component.openPeer(toPeer) Queue.mainQueue().after(1.0, { component.cancel(false) @@ -607,8 +602,11 @@ private final class StarsTransactionSheetContent: CombinedComponent { } }, tapAction: { attributes, _ in - let presentationData = component.context.sharedContext.currentPresentationData.with { $0 } - component.context.sharedContext.openExternalUrl(context: component.context, urlContext: .generic, url: strings.Stars_Transaction_Terms_URL, forceExternal: true, presentationData: presentationData, navigationController: nil, dismissInput: {}) + if let controller = controller() as? StarsTransactionScreen, let navigationController = controller.navigationController as? NavigationController { + let presentationData = component.context.sharedContext.currentPresentationData.with { $0 } + component.context.sharedContext.openExternalUrl(context: component.context, urlContext: .generic, url: strings.Stars_Transaction_Terms_URL, forceExternal: false, presentationData: presentationData, navigationController: navigationController, dismissInput: {}) + component.cancel(true) + } } ), availableSize: CGSize(width: context.availableSize.width - textSideInset * 2.0, height: context.availableSize.height), diff --git a/submodules/TelegramUI/Components/Stars/StarsTransactionsScreen/Sources/StarsStatisticsScreen.swift b/submodules/TelegramUI/Components/Stars/StarsTransactionsScreen/Sources/StarsStatisticsScreen.swift index d8ea1cab30..ee6ff8b47f 100644 --- a/submodules/TelegramUI/Components/Stars/StarsTransactionsScreen/Sources/StarsStatisticsScreen.swift +++ b/submodules/TelegramUI/Components/Stars/StarsTransactionsScreen/Sources/StarsStatisticsScreen.swift @@ -466,8 +466,10 @@ final class StarsStatisticsScreenComponent: Component { return nil } }, - tapAction: { attributes, _ in - component.context.sharedContext.openExternalUrl(context: component.context, urlContext: .generic, url: strings.Stars_BotRevenue_Withdraw_Info_URL, forceExternal: true, presentationData: presentationData, navigationController: nil, dismissInput: {}) + tapAction: { [weak self] attributes, _ in + if let controller = self?.controller?() as? StarsStatisticsScreen, let navigationController = controller.navigationController as? NavigationController { + component.context.sharedContext.openExternalUrl(context: component.context, urlContext: .generic, url: strings.Stars_BotRevenue_Withdraw_Info_URL, forceExternal: false, presentationData: presentationData, navigationController: navigationController, dismissInput: {}) + } } )), items: [AnyComponentWithIdentity(id: 0, component: AnyComponent( diff --git a/submodules/TelegramUI/Components/Stars/StarsTransactionsScreen/Sources/StarsTransactionsScreen.swift b/submodules/TelegramUI/Components/Stars/StarsTransactionsScreen/Sources/StarsTransactionsScreen.swift index 638f2d3f57..04df414332 100644 --- a/submodules/TelegramUI/Components/Stars/StarsTransactionsScreen/Sources/StarsTransactionsScreen.swift +++ b/submodules/TelegramUI/Components/Stars/StarsTransactionsScreen/Sources/StarsTransactionsScreen.swift @@ -857,8 +857,13 @@ public final class StarsTransactionsScreen: ViewControllerComponentContainer { return } + if let navigationController = self.navigationController as? NavigationController { + var controllers = navigationController.viewControllers + controllers = controllers.filter { !($0 is ContactSelectionController) } + navigationController.setViewControllers(controllers, animated: true) + } + Queue.mainQueue().after(2.0) { - //TODO:localize let presentationData = context.sharedContext.currentPresentationData.with { $0 } let resultController = UndoOverlayController( presentationData: presentationData, @@ -867,8 +872,8 @@ public final class StarsTransactionsScreen: ViewControllerComponentContainer { scale: 0.066, colors: [:], title: nil, - text: "\(stars) Stars sent.", - customUndoText: "View Chat", + text: presentationData.strings.Stars_Intro_StarsSent(Int32(stars)), + customUndoText: presentationData.strings.Stars_Intro_StarsSent_ViewChat, timeout: nil ), elevatedLayout: false, diff --git a/submodules/TelegramUI/Components/Stars/StarsWithdrawalScreen/Sources/StarsWithdrawalScreen.swift b/submodules/TelegramUI/Components/Stars/StarsWithdrawalScreen/Sources/StarsWithdrawalScreen.swift index 87f7758a33..0270432457 100644 --- a/submodules/TelegramUI/Components/Stars/StarsWithdrawalScreen/Sources/StarsWithdrawalScreen.swift +++ b/submodules/TelegramUI/Components/Stars/StarsWithdrawalScreen/Sources/StarsWithdrawalScreen.swift @@ -66,6 +66,8 @@ private final class SheetContent: CombinedComponent { let component = context.component let state = context.state + let controller = environment.controller + let theme = environment.theme.withModalBlocksBackground() let strings = environment.strings let presentationData = component.context.sharedContext.currentPresentationData.with { $0 } @@ -229,7 +231,9 @@ private final class SheetContent: CombinedComponent { } }, tapAction: { attributes, _ in - component.context.sharedContext.openExternalUrl(context: component.context, urlContext: .generic, url: strings.Stars_PaidContent_AmountInfo_URL, forceExternal: true, presentationData: presentationData, navigationController: nil, dismissInput: {}) + if let controller = controller() as? StarsWithdrawScreen, let navigationController = controller.navigationController as? NavigationController { + component.context.sharedContext.openExternalUrl(context: component.context, urlContext: .generic, url: strings.Stars_PaidContent_AmountInfo_URL, forceExternal: false, presentationData: presentationData, navigationController: navigationController, dismissInput: {}) + } } )) case let .reaction(starsToTop): @@ -307,7 +311,6 @@ private final class SheetContent: CombinedComponent { buttonAttributedString.addAttribute(.baselineOffset, value: 1.0, range: NSRange(range, in: buttonAttributedString.string)) } - let controller = environment.controller let button = button.update( component: ButtonComponent( background: ButtonComponent.Background( diff --git a/submodules/TelegramUI/Components/StickerPickerScreen/Sources/StickerPickerScreen.swift b/submodules/TelegramUI/Components/StickerPickerScreen/Sources/StickerPickerScreen.swift index 783701b2b9..e9a4cdb727 100644 --- a/submodules/TelegramUI/Components/StickerPickerScreen/Sources/StickerPickerScreen.swift +++ b/submodules/TelegramUI/Components/StickerPickerScreen/Sources/StickerPickerScreen.swift @@ -2065,6 +2065,8 @@ public class StickerPickerScreen: ViewController { } case none + case notDetermined + case notAllowed case fetching case loaded(StickerPickerScreen.Weather.LoadedWeather) } @@ -2722,66 +2724,64 @@ final class StoryStickersContentView: UIView, EmojiCustomContentView { } else { maxHorizontalItems = 3 - + + let weatherButtonContent: AnyComponent switch self.weather { + case .notAllowed, .notDetermined: + weatherButtonContent = AnyComponent( + InteractiveStickerButtonContent( + context: self.context, + theme: theme, + title: stringForTemperature(24), + iconName: "☀️", + iconFile: self.context.animatedEmojiStickersValue["☀️"]?.first?.file, + useOpaqueTheme: useOpaqueTheme, + tintContainerView: self.tintContainerView + ) + ) case let .loaded(weather): - items.append( - AnyComponentWithIdentity( - id: "weather", - component: AnyComponent( - CameraButton( - content: AnyComponentWithIdentity( - id: "weather", - component: AnyComponent( - InteractiveStickerButtonContent( - context: self.context, - theme: theme, - title: stringForTemperature(weather.temperature), - iconName: weather.emoji, - iconFile: weather.emojiFile, - useOpaqueTheme: useOpaqueTheme, - tintContainerView: self.tintContainerView - ) - ) - ), - action: { [weak self] in - if let self { - self.weatherAction() - } - }) - ) + weatherButtonContent = AnyComponent( + InteractiveStickerButtonContent( + context: self.context, + theme: theme, + title: stringForTemperature(weather.temperature), + iconName: weather.emoji, + iconFile: weather.emojiFile, + useOpaqueTheme: useOpaqueTheme, + tintContainerView: self.tintContainerView ) ) case .fetching: - items.append( - AnyComponentWithIdentity( - id: "weather", - component: AnyComponent( - CameraButton( - content: AnyComponentWithIdentity( - id: "weather", - component: AnyComponent( - InteractiveStickerButtonContent( - context: self.context, - theme: theme, - title: nil, - iconName: nil, - useOpaqueTheme: useOpaqueTheme, - tintContainerView: self.tintContainerView - ) - ) - ), - action: { [weak self] in - if let self { - self.weatherAction() - } - }) - ) + weatherButtonContent = AnyComponent( + InteractiveStickerButtonContent( + context: self.context, + theme: theme, + title: nil, + iconName: nil, + useOpaqueTheme: useOpaqueTheme, + tintContainerView: self.tintContainerView ) ) default: fatalError() } + items.append( + AnyComponentWithIdentity( + id: "weather", + component: AnyComponent( + CameraButton( + content: AnyComponentWithIdentity( + id: "weather", + component: weatherButtonContent + ), + action: { [weak self] in + if let self { + self.weatherAction() + } + }) + ) + ) + ) } items.append( diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift index e5f5770769..a0f38bbe68 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift @@ -1820,7 +1820,7 @@ public func preloadStoryMedia(context: AccountContext, info: StoryPreloadInfo) - case let .file(file): var fetchRange: (Range, MediaBoxFetchPriority)? for attribute in file.attributes { - if case let .Video(_, _, _, preloadSize) = attribute { + if case let .Video(_, _, _, preloadSize, _) = attribute { if let preloadSize { fetchRange = (0 ..< Int64(preloadSize), .default) } @@ -2045,7 +2045,7 @@ public func waitUntilStoryMediaPreloaded(context: AccountContext, peerId: Engine case let .file(file): var fetchRange: (Range, MediaBoxFetchPriority)? for attribute in file.attributes { - if case let .Video(_, _, _, preloadSize) = attribute { + if case let .Video(_, _, _, preloadSize, _) = attribute { if let preloadSize { fetchRange = (0 ..< Int64(preloadSize), .default) } diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryContainerScreen.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryContainerScreen.swift index 8ed7f10218..6e44780115 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryContainerScreen.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryContainerScreen.swift @@ -1136,7 +1136,7 @@ private final class StoryContainerScreenComponent: Component { var isSilentVideo = false if case let .file(file) = slice.item.storyItem.media { for attribute in file.attributes { - if case let .Video(_, _, flags, _) = attribute { + if case let .Video(_, _, flags, _, _) = attribute { if flags.contains(.isSilent) { isSilentVideo = true } diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift index f7060b88da..f04aca6cca 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift @@ -3801,7 +3801,7 @@ public final class StoryItemSetContainerComponent: Component { isVideo = true soundAlpha = 1.0 for attribute in file.attributes { - if case let .Video(_, _, flags, _) = attribute { + if case let .Video(_, _, flags, _, _) = attribute { if flags.contains(.isSilent) { isSilentVideo = true soundAlpha = 0.5 @@ -3834,7 +3834,7 @@ public final class StoryItemSetContainerComponent: Component { var isSilentVideo = false if case let .file(file) = component.slice.item.storyItem.media { for attribute in file.attributes { - if case let .Video(_, _, flags, _) = attribute { + if case let .Video(_, _, flags, _, _) = attribute { if flags.contains(.isSilent) { isSilentVideo = true } diff --git a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift index 4c67fe5171..02de03e48c 100644 --- a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift +++ b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift @@ -81,7 +81,11 @@ struct CameraState: Equatable { } func updatedRecording(_ recording: Recording) -> CameraState { - return CameraState(position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isViewOnceEnabled: self.isViewOnceEnabled) + var flashModeDidChange = self.flashModeDidChange + if case .none = self.recording { + flashModeDidChange = false + } + return CameraState(position: self.position, flashMode: self.flashMode, flashModeDidChange: flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isViewOnceEnabled: self.isViewOnceEnabled) } func updatedDuration(_ duration: Double) -> CameraState { @@ -121,6 +125,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { let push: (ViewController) -> Void let startRecording: ActionSlot let stopRecording: ActionSlot + let cancelRecording: ActionSlot let completion: ActionSlot init( @@ -135,6 +140,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { push: @escaping (ViewController) -> Void, startRecording: ActionSlot, stopRecording: ActionSlot, + cancelRecording: ActionSlot, completion: ActionSlot ) { self.context = context @@ -148,6 +154,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { self.push = push self.startRecording = startRecording self.stopRecording = stopRecording + self.cancelRecording = cancelRecording self.completion = completion } @@ -216,6 +223,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { private let present: (ViewController) -> Void private let startRecording: ActionSlot private let stopRecording: ActionSlot + private let cancelRecording: ActionSlot private let completion: ActionSlot private let getController: () -> VideoMessageCameraScreen? @@ -234,6 +242,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { present: @escaping (ViewController) -> Void, startRecording: ActionSlot, stopRecording: ActionSlot, + cancelRecording: ActionSlot, completion: ActionSlot, getController: @escaping () -> VideoMessageCameraScreen? = { return nil @@ -243,6 +252,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { self.present = present self.startRecording = startRecording self.stopRecording = stopRecording + self.cancelRecording = cancelRecording self.completion = completion self.getController = getController @@ -260,6 +270,10 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { self.stopRecording.connect({ [weak self] _ in self?.stopVideoRecording() }) + + self.cancelRecording.connect({ [weak self] _ in + self?.cancelVideoRecording() + }) } deinit { @@ -284,19 +298,28 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { } self.lastFlipTimestamp = currentTimestamp + let isFrontCamera = controller.cameraState.position == .back camera.togglePosition() - + self.hapticFeedback.impact(.veryLight) + + self.updateScreenBrightness(isFrontCamera: isFrontCamera) + + if isFrontCamera { + camera.setTorchActive(false) + } else { + camera.setTorchActive(controller.cameraState.flashMode == .on) + } } func toggleFlashMode() { guard let controller = self.getController(), let camera = controller.camera else { return } - var flashOn = false + var isFlashOn = false switch controller.cameraState.flashMode { case .off: - flashOn = true + isFlashOn = true camera.setFlashMode(.on) case .on: camera.setFlashMode(.off) @@ -305,22 +328,29 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { } self.hapticFeedback.impact(.light) - self.updateScreenBrightness(flashOn: flashOn) + self.updateScreenBrightness(isFlashOn: isFlashOn) + + if controller.cameraState.position == .back { + if isFlashOn { + camera.setTorchActive(true) + } else { + camera.setTorchActive(false) + } + } } private var initialBrightness: CGFloat? private var brightnessArguments: (Double, Double, CGFloat, CGFloat)? private var brightnessAnimator: ConstantDisplayLinkAnimator? - func updateScreenBrightness(flashOn: Bool?) { + func updateScreenBrightness(isFrontCamera: Bool? = nil, isFlashOn: Bool? = nil) { guard let controller = self.getController() else { return } - let isFrontCamera = controller.cameraState.position == .front - let isVideo = true - let isFlashOn = flashOn ?? (controller.cameraState.flashMode == .on) + let isFrontCamera = isFrontCamera ?? (controller.cameraState.position == .front) + let isFlashOn = isFlashOn ?? (controller.cameraState.flashMode == .on) - if isFrontCamera && isVideo && isFlashOn { + if isFrontCamera && isFlashOn { if self.initialBrightness == nil { self.initialBrightness = UIScreen.main.brightness self.brightnessArguments = (CACurrentMediaTime(), 0.2, UIScreen.main.brightness, 1.0) @@ -382,11 +412,11 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { let isFirstRecording = initialDuration.isZero controller.node.resumeCameraCapture() - controller.updatePreviewState({ _ in return nil}, transition: .spring(duration: 0.4)) - controller.node.dismissAllTooltips() controller.updateCameraState({ $0.updatedRecording(pressing ? .holding : .handsFree).updatedDuration(initialDuration) }, transition: .spring(duration: 0.4)) + controller.updatePreviewState({ _ in return nil }, transition: .spring(duration: 0.4)) + controller.node.withReadyCamera(isFirstTime: !controller.node.cameraIsActive) { Queue.mainQueue().after(0.15) { self.resultDisposable.set((camera.startRecording() @@ -412,6 +442,10 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { if initialDuration > 0.0 { controller.onResume() } + + if controller.cameraState.position == .front && controller.cameraState.flashMode == .on { + self.updateScreenBrightness() + } } func stopVideoRecording() { @@ -439,7 +473,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { } })) - if case .front = controller.cameraState.position, let initialBrightness = self.initialBrightness { + if let initialBrightness = self.initialBrightness { self.initialBrightness = nil self.brightnessArguments = (CACurrentMediaTime(), 0.2, UIScreen.main.brightness, initialBrightness) self.animateBrightnessChange() @@ -453,6 +487,14 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { controller.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4)) } + func cancelVideoRecording() { + if let initialBrightness = self.initialBrightness { + self.initialBrightness = nil + self.brightnessArguments = (CACurrentMediaTime(), 0.2, UIScreen.main.brightness, initialBrightness) + self.animateBrightnessChange() + } + } + func updateZoom(fraction: CGFloat) { guard let camera = self.getController()?.camera else { return @@ -462,7 +504,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { } func makeState() -> State { - return State(context: self.context, present: self.present, startRecording: self.startRecording, stopRecording: self.stopRecording, completion: self.completion, getController: self.getController) + return State(context: self.context, present: self.present, startRecording: self.startRecording, stopRecording: self.stopRecording, cancelRecording: self.cancelRecording, completion: self.completion, getController: self.getController) } static var body: Body { @@ -517,7 +559,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { } if !component.isPreviewing { - if case .on = component.cameraState.flashMode { + if case .on = component.cameraState.flashMode, case .front = component.cameraState.position { let frontFlash = frontFlash.update( component: Image(image: state.image(.flashImage, theme: environment.theme), tintColor: component.cameraState.flashTint.color), availableSize: availableSize, @@ -611,7 +653,9 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { action: { [weak state] in if let state { state.toggleFlashMode() - flashAction.invoke(Void()) + Queue.mainQueue().justDispatch { + flashAction.invoke(Void()) + } } } ), @@ -801,6 +845,7 @@ public class VideoMessageCameraScreen: ViewController { fileprivate let startRecording = ActionSlot() fileprivate let stopRecording = ActionSlot() + fileprivate let cancelRecording = ActionSlot() private let completion = ActionSlot() var cameraState: CameraState { @@ -1432,6 +1477,7 @@ public class VideoMessageCameraScreen: ViewController { }, startRecording: self.startRecording, stopRecording: self.stopRecording, + cancelRecording: self.cancelRecording, completion: self.completion ) ), @@ -1793,7 +1839,7 @@ public class VideoMessageCameraScreen: ViewController { guard let self else { return } - let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, maskDrawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage) + let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, maskDrawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, coverImageTimestamp: nil, qualityPreset: .videoMessage) var resourceAdjustments: VideoMediaResourceAdjustments? = nil if let valuesData = try? JSONEncoder().encode(values) { @@ -1833,7 +1879,7 @@ public class VideoMessageCameraScreen: ViewController { context.account.postbox.mediaBox.storeCachedResourceRepresentation(resource, representation: CachedVideoFirstFrameRepresentation(), data: data) } - let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.FileName(fileName: "video.mp4"), .Video(duration: finalDuration, size: video.dimensions, flags: [.instantRoundVideo], preloadSize: nil)]) + let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.FileName(fileName: "video.mp4"), .Video(duration: finalDuration, size: video.dimensions, flags: [.instantRoundVideo], preloadSize: nil, coverTime: nil)]) var attributes: [MessageAttribute] = [] if self.cameraState.isViewOnceEnabled { @@ -1894,6 +1940,8 @@ public class VideoMessageCameraScreen: ViewController { } public func discardVideo() { + self.node.cancelRecording.invoke(Void()) + self.requestDismiss(animated: true) } diff --git a/submodules/TelegramUI/Images.xcassets/Instant View/Bookmark.imageset/Bookmark.pdf b/submodules/TelegramUI/Images.xcassets/Instant View/Bookmark.imageset/Bookmark.pdf new file mode 100644 index 0000000000..d413418c5d Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Instant View/Bookmark.imageset/Bookmark.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Browser.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Instant View/Bookmark.imageset/Contents.json similarity index 75% rename from submodules/TelegramUI/Images.xcassets/Instant View/Settings/Browser.imageset/Contents.json rename to submodules/TelegramUI/Images.xcassets/Instant View/Bookmark.imageset/Contents.json index 0fff16d67e..09b651c240 100644 --- a/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Browser.imageset/Contents.json +++ b/submodules/TelegramUI/Images.xcassets/Instant View/Bookmark.imageset/Contents.json @@ -1,7 +1,7 @@ { "images" : [ { - "filename" : "ic_lt_safari.pdf", + "filename" : "Bookmark.pdf", "idiom" : "universal" } ], diff --git a/submodules/TelegramUI/Images.xcassets/Instant View/Browser.imageset/Browser.pdf b/submodules/TelegramUI/Images.xcassets/Instant View/Browser.imageset/Browser.pdf new file mode 100644 index 0000000000..81f7ac3171 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Instant View/Browser.imageset/Browser.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Instant View/Browser.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Instant View/Browser.imageset/Contents.json new file mode 100644 index 0000000000..c45b00a1de --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Instant View/Browser.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "Browser.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Instant View/CloseIcon.imageset/Close.pdf b/submodules/TelegramUI/Images.xcassets/Instant View/CloseIcon.imageset/Close.pdf new file mode 100644 index 0000000000..dc18df012d --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Instant View/CloseIcon.imageset/Close.pdf @@ -0,0 +1,83 @@ +%PDF-1.7 + +1 0 obj + << >> +endobj + +2 0 obj + << /Length 3 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 6.169983 6.370150 cm +1.000000 1.000000 1.000000 scn +0.359774 1.100077 m +0.100075 0.840378 0.100075 0.419323 0.359774 0.159624 c +0.619473 -0.100075 1.040527 -0.100075 1.300226 0.159624 c +8.830000 7.689398 l +16.359774 0.159624 l +16.619473 -0.100075 17.040527 -0.100075 17.300226 0.159624 c +17.559925 0.419323 17.559925 0.840378 17.300226 1.100077 c +9.770452 8.629850 l +17.300226 16.159624 l +17.559925 16.419323 17.559925 16.840378 17.300226 17.100077 c +17.040527 17.359776 16.619473 17.359776 16.359774 17.100077 c +8.830000 9.570303 l +1.300226 17.100077 l +1.040527 17.359776 0.619473 17.359776 0.359774 17.100077 c +0.100075 16.840378 0.100075 16.419323 0.359774 16.159624 c +7.889547 8.629850 l +0.359774 1.100077 l +h +f* +n +Q + +endstream +endobj + +3 0 obj + 788 +endobj + +4 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 30.000000 30.000000 ] + /Resources 1 0 R + /Contents 2 0 R + /Parent 5 0 R + >> +endobj + +5 0 obj + << /Kids [ 4 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +6 0 obj + << /Pages 5 0 R + /Type /Catalog + >> +endobj + +xref +0 7 +0000000000 65535 f +0000000010 00000 n +0000000034 00000 n +0000000878 00000 n +0000000900 00000 n +0000001073 00000 n +0000001147 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 6 0 R + /Size 7 +>> +startxref +1206 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Instant View/CloseIcon.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Instant View/CloseIcon.imageset/Contents.json index 2553d7c645..2e21e0ab34 100644 --- a/submodules/TelegramUI/Images.xcassets/Instant View/CloseIcon.imageset/Contents.json +++ b/submodules/TelegramUI/Images.xcassets/Instant View/CloseIcon.imageset/Contents.json @@ -1,7 +1,7 @@ { "images" : [ { - "filename" : "cross.pdf", + "filename" : "Close.pdf", "idiom" : "universal" } ], diff --git a/submodules/TelegramUI/Images.xcassets/Instant View/CloseIcon.imageset/cross.pdf b/submodules/TelegramUI/Images.xcassets/Instant View/CloseIcon.imageset/cross.pdf deleted file mode 100644 index e16f590147..0000000000 --- a/submodules/TelegramUI/Images.xcassets/Instant View/CloseIcon.imageset/cross.pdf +++ /dev/null @@ -1,83 +0,0 @@ -%PDF-1.7 - -1 0 obj - << >> -endobj - -2 0 obj - << /Length 3 0 R >> -stream -/DeviceRGB CS -/DeviceRGB cs -q -1.000000 0.000000 -0.000000 1.000000 5.100098 4.640198 cm -1.000000 1.000000 1.000000 scn -0.970226 14.230053 m -0.710527 14.489752 0.289473 14.489752 0.029774 14.230053 c --0.229925 13.970354 -0.229925 13.549299 0.029774 13.289600 c -5.959549 7.359825 l -0.029774 1.430050 l --0.229925 1.170351 -0.229925 0.749296 0.029774 0.489597 c -0.289473 0.229898 0.710527 0.229898 0.970226 0.489597 c -6.900002 6.419373 l -12.829774 0.489600 l -13.089473 0.229901 13.510528 0.229901 13.770226 0.489600 c -14.029925 0.749299 14.029925 1.170354 13.770226 1.430053 c -7.840454 7.359825 l -13.770226 13.289598 l -14.029925 13.549296 14.029925 13.970351 13.770226 14.230050 c -13.510528 14.489749 13.089473 14.489749 12.829774 14.230050 c -6.900002 8.300278 l -0.970226 14.230053 l -h -f* -n -Q - -endstream -endobj - -3 0 obj - 789 -endobj - -4 0 obj - << /Annots [] - /Type /Page - /MediaBox [ 0.000000 0.000000 24.000000 24.000000 ] - /Resources 1 0 R - /Contents 2 0 R - /Parent 5 0 R - >> -endobj - -5 0 obj - << /Kids [ 4 0 R ] - /Count 1 - /Type /Pages - >> -endobj - -6 0 obj - << /Pages 5 0 R - /Type /Catalog - >> -endobj - -xref -0 7 -0000000000 65535 f -0000000010 00000 n -0000000034 00000 n -0000000879 00000 n -0000000901 00000 n -0000001074 00000 n -0000001148 00000 n -trailer -<< /ID [ (some) (id) ] - /Root 6 0 R - /Size 7 ->> -startxref -1207 -%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Browser.imageset/ic_lt_safari.pdf b/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Browser.imageset/ic_lt_safari.pdf deleted file mode 100644 index ba73353a6d..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Browser.imageset/ic_lt_safari.pdf and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Reload.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Reload.imageset/Contents.json new file mode 100644 index 0000000000..99c3b2272f --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Reload.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "scheduled.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Reload.imageset/scheduled.pdf b/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Reload.imageset/scheduled.pdf new file mode 100644 index 0000000000..be50974bfb --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Instant View/Settings/Reload.imageset/scheduled.pdf @@ -0,0 +1,86 @@ +%PDF-1.7 + +1 0 obj + << >> +endobj + +2 0 obj + << /Length 3 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 3.334991 3.334991 cm +0.000000 0.000000 0.000000 scn +8.665000 17.330002 m +8.297730 17.330002 8.000000 17.032270 8.000000 16.665001 c +8.000000 16.297733 8.297730 16.000002 8.665000 16.000002 c +12.716009 16.000002 16.000000 12.716011 16.000000 8.665002 c +16.000000 4.613993 12.716009 1.330002 8.665000 1.330002 c +4.613991 1.330002 1.330000 4.613993 1.330000 8.665002 c +1.330000 11.380140 2.805339 13.751918 5.000000 15.020325 c +5.000000 13.165002 l +5.000000 12.797732 5.297730 12.500002 5.665000 12.500002 c +6.032269 12.500002 6.330000 12.797732 6.330000 13.165002 c +6.330000 16.665001 l +6.330000 17.032270 6.032269 17.330002 5.665000 17.330002 c +2.165000 17.330002 l +1.797731 17.330002 1.500000 17.032270 1.500000 16.665001 c +1.500000 16.297733 1.797731 16.000002 2.165000 16.000002 c +4.050117 16.000002 l +1.617408 14.466265 0.000000 11.755083 0.000000 8.665002 c +0.000000 3.879455 3.879452 0.000000 8.665000 0.000000 c +13.450547 0.000000 17.330002 3.879455 17.330002 8.665002 c +17.330002 13.450549 13.450547 17.330002 8.665000 17.330002 c +h +f* +n +Q + +endstream +endobj + +3 0 obj + 1114 +endobj + +4 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 24.000000 24.000000 ] + /Resources 1 0 R + /Contents 2 0 R + /Parent 5 0 R + >> +endobj + +5 0 obj + << /Kids [ 4 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +6 0 obj + << /Pages 5 0 R + /Type /Catalog + >> +endobj + +xref +0 7 +0000000000 65535 f +0000000010 00000 n +0000000034 00000 n +0000001204 00000 n +0000001227 00000 n +0000001400 00000 n +0000001474 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 6 0 R + /Size 7 +>> +startxref +1533 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Sources/Chat/ChatControllerPaste.swift b/submodules/TelegramUI/Sources/Chat/ChatControllerPaste.swift index 88a9c0d8aa..8eacbdcc2a 100644 --- a/submodules/TelegramUI/Sources/Chat/ChatControllerPaste.swift +++ b/submodules/TelegramUI/Sources/Chat/ChatControllerPaste.swift @@ -186,6 +186,7 @@ extension ChatControllerImpl { audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, + coverImageTimestamp: nil, qualityPreset: nil ) @@ -210,7 +211,7 @@ extension ChatControllerImpl { var fileAttributes: [TelegramMediaFileAttribute] = [] fileAttributes.append(.FileName(fileName: "sticker.webm")) fileAttributes.append(.Sticker(displayText: "", packReference: nil, maskData: nil)) - fileAttributes.append(.Video(duration: animatedImage.duration, size: PixelDimensions(width: 512, height: 512), flags: [], preloadSize: nil)) + fileAttributes.append(.Video(duration: animatedImage.duration, size: PixelDimensions(width: 512, height: 512), flags: [], preloadSize: nil, coverTime: nil)) let previewRepresentations: [TelegramMediaImageRepresentation] = [] // if let thumbnailResource { diff --git a/submodules/TelegramUI/Sources/ChatController.swift b/submodules/TelegramUI/Sources/ChatController.swift index c3410e9c36..60350b684e 100644 --- a/submodules/TelegramUI/Sources/ChatController.swift +++ b/submodules/TelegramUI/Sources/ChatController.swift @@ -7129,7 +7129,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G self.didAppear = true self.chatDisplayNode.historyNode.experimentalSnapScrollToItem = false - self.chatDisplayNode.historyNode.canReadHistory.set(combineLatest(context.sharedContext.applicationBindings.applicationInForeground, self.canReadHistory.get()) |> map { a, b in + self.chatDisplayNode.historyNode.canReadHistory.set(combineLatest(self.context.sharedContext.applicationBindings.applicationInForeground, self.canReadHistory.get()) |> map { a, b in return a && b }) diff --git a/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift b/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift index fd103a83cb..b31c40bc8c 100644 --- a/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift +++ b/submodules/TelegramUI/Sources/ChatInterfaceStateContextMenus.swift @@ -397,7 +397,7 @@ func messageMediaEditingOptions(message: Message) -> MessageMediaEditingOptions return [] case .Animated: break - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { return [] } else { diff --git a/submodules/TelegramUI/Sources/ContactSelectionController.swift b/submodules/TelegramUI/Sources/ContactSelectionController.swift index 33c8ec4ec9..cabf9baa3e 100644 --- a/submodules/TelegramUI/Sources/ContactSelectionController.swift +++ b/submodules/TelegramUI/Sources/ContactSelectionController.swift @@ -17,6 +17,7 @@ import ChatSendMessageActionUI class ContactSelectionControllerImpl: ViewController, ContactSelectionController, PresentableController, AttachmentContainable { private let context: AccountContext + private let mode: ContactSelectionControllerMode private let autoDismiss: Bool fileprivate var contactsNode: ContactSelectionControllerNode { @@ -35,7 +36,7 @@ class ContactSelectionControllerImpl: ViewController, ContactSelectionController private let index: PeerNameIndex = .lastNameFirst private let titleProducer: (PresentationStrings) -> String - private let options: [ContactListAdditionalOption] + private let options: Signal<[ContactListAdditionalOption], NoError> private let displayDeviceContacts: Bool private let displayCallIcons: Bool private let multipleSelection: Bool @@ -94,6 +95,7 @@ class ContactSelectionControllerImpl: ViewController, ContactSelectionController init(_ params: ContactSelectionControllerParams) { self.context = params.context + self.mode = params.mode self.autoDismiss = params.autoDismiss self.titleProducer = params.title self.options = params.options @@ -207,7 +209,7 @@ class ContactSelectionControllerImpl: ViewController, ContactSelectionController } override func loadDisplayNode() { - self.displayNode = ContactSelectionControllerNode(context: self.context, presentationData: self.presentationData, options: self.options, displayDeviceContacts: self.displayDeviceContacts, displayCallIcons: self.displayCallIcons, multipleSelection: self.multipleSelection, requirePhoneNumbers: self.requirePhoneNumbers) + self.displayNode = ContactSelectionControllerNode(context: self.context, mode: self.mode, presentationData: self.presentationData, options: self.options, displayDeviceContacts: self.displayDeviceContacts, displayCallIcons: self.displayCallIcons, multipleSelection: self.multipleSelection, requirePhoneNumbers: self.requirePhoneNumbers) self._ready.set(self.contactsNode.contactListNode.ready) self.contactsNode.navigationBar = self.navigationBar diff --git a/submodules/TelegramUI/Sources/ContactSelectionControllerNode.swift b/submodules/TelegramUI/Sources/ContactSelectionControllerNode.swift index cface4f8c9..685113732e 100644 --- a/submodules/TelegramUI/Sources/ContactSelectionControllerNode.swift +++ b/submodules/TelegramUI/Sources/ContactSelectionControllerNode.swift @@ -55,7 +55,7 @@ final class ContactSelectionControllerNode: ASDisplayNode { var searchContainerNode: ContactsSearchContainerNode? - init(context: AccountContext, presentationData: PresentationData, options: [ContactListAdditionalOption], displayDeviceContacts: Bool, displayCallIcons: Bool, multipleSelection: Bool, requirePhoneNumbers: Bool) { + init(context: AccountContext, mode: ContactSelectionControllerMode, presentationData: PresentationData, options: Signal<[ContactListAdditionalOption], NoError>, displayDeviceContacts: Bool, displayCallIcons: Bool, multipleSelection: Bool, requirePhoneNumbers: Bool) { self.context = context self.presentationData = presentationData self.displayDeviceContacts = displayDeviceContacts @@ -65,10 +65,55 @@ final class ContactSelectionControllerNode: ASDisplayNode { if requirePhoneNumbers { filters.append(.excludeWithoutPhoneNumbers) } + if case .starsGifting = mode { + filters.append(.excludeBots) + } self.filters = filters + let displayTopPeers: ContactListPresentation.TopPeers + if case let .starsGifting(birthdays, hasActions) = mode { + if let birthdays { + let today = Calendar(identifier: .gregorian).component(.day, from: Date()) + var sections: [(String, [EnginePeer.Id], Bool)] = [] + var todayPeers: [EnginePeer.Id] = [] + var yesterdayPeers: [EnginePeer.Id] = [] + var tomorrowPeers: [EnginePeer.Id] = [] + + for (peerId, birthday) in birthdays { + if birthday.day == today { + todayPeers.append(peerId) + } else if birthday.day == today - 1 || birthday.day > today + 5 { + yesterdayPeers.append(peerId) + } else if birthday.day == today + 1 || birthday.day < today + 5 { + tomorrowPeers.append(peerId) + } + } + + if !todayPeers.isEmpty { + sections.append((presentationData.strings.Premium_Gift_ContactSelection_BirthdayToday, todayPeers, hasActions)) + } + if !yesterdayPeers.isEmpty { + sections.append((presentationData.strings.Premium_Gift_ContactSelection_BirthdayYesterday, yesterdayPeers, hasActions)) + } + if !tomorrowPeers.isEmpty { + sections.append((presentationData.strings.Premium_Gift_ContactSelection_BirthdayTomorrow, tomorrowPeers, hasActions)) + } + + displayTopPeers = .custom(sections) + } else { + displayTopPeers = .recent + } + } else { + displayTopPeers = .none + } + + let presentation: Signal = options + |> map { options in + return .natural(options: options, includeChatList: false, topPeers: displayTopPeers) + } + var contextActionImpl: ((EnginePeer, ASDisplayNode, ContextGesture?, CGPoint?) -> Void)? - self.contactListNode = ContactListNode(context: context, updatedPresentationData: (presentationData, self.presentationDataPromise.get()), presentation: .single(.natural(options: options, includeChatList: false, topPeers: .none)), filters: filters, onlyWriteable: false, isGroupInvitation: false, displayCallIcons: displayCallIcons, contextAction: multipleSelection ? { peer, node, gesture, _, _ in + self.contactListNode = ContactListNode(context: context, updatedPresentationData: (presentationData, self.presentationDataPromise.get()), presentation: presentation, filters: filters, onlyWriteable: false, isGroupInvitation: false, displayCallIcons: displayCallIcons, contextAction: multipleSelection ? { peer, node, gesture, _, _ in contextActionImpl?(peer, node, gesture, nil) } : nil, multipleSelection: multipleSelection) @@ -262,7 +307,7 @@ final class ContactSelectionControllerNode: ASDisplayNode { } else { categories.insert(.global) } - self.searchDisplayController = SearchDisplayController(presentationData: self.presentationData, contentNode: ContactsSearchContainerNode(context: self.context, updatedPresentationData: (self.presentationData, self.presentationDataPromise.get()), onlyWriteable: false, categories: categories, addContact: nil, openPeer: { [weak self] peer in + self.searchDisplayController = SearchDisplayController(presentationData: self.presentationData, contentNode: ContactsSearchContainerNode(context: self.context, updatedPresentationData: (self.presentationData, self.presentationDataPromise.get()), onlyWriteable: false, categories: categories, filters: self.filters, addContact: nil, openPeer: { [weak self] peer in if let strongSelf = self { var updated = false strongSelf.contactListNode.updateSelectionState { state -> ContactListNodeGroupSelectionState? in diff --git a/submodules/TelegramUI/Sources/HorizontalListContextResultsChatInputPanelItem.swift b/submodules/TelegramUI/Sources/HorizontalListContextResultsChatInputPanelItem.swift index 6d7b4d1150..d41b9923b6 100644 --- a/submodules/TelegramUI/Sources/HorizontalListContextResultsChatInputPanelItem.swift +++ b/submodules/TelegramUI/Sources/HorizontalListContextResultsChatInputPanelItem.swift @@ -260,7 +260,7 @@ final class HorizontalListContextResultsChatInputPanelItemNode: ListViewItemNode } imageDimensions = externalReference.content?.dimensions?.cgSize if externalReference.type == "gif", let thumbnailResource = externalReference.thumbnail?.resource, let content = externalReference.content, let dimensions = content.dimensions { - videoFile = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: thumbnailResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: dimensions, flags: [], preloadSize: nil)]) + videoFile = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: thumbnailResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: dimensions, flags: [], preloadSize: nil, coverTime: nil)]) imageResource = nil } diff --git a/submodules/TelegramUI/Sources/OpenUrl.swift b/submodules/TelegramUI/Sources/OpenUrl.swift index 0ac8c5a3e9..e232ff65cc 100644 --- a/submodules/TelegramUI/Sources/OpenUrl.swift +++ b/submodules/TelegramUI/Sources/OpenUrl.swift @@ -1033,7 +1033,7 @@ func openExternalUrlImpl(context: AccountContext, urlContext: OpenURLContext, ur } if accessChallengeData.data.isLockable { if passcodeSettings.autolockTimeout != nil && settings.defaultWebBrowser == nil { - settings = WebBrowserSettings(defaultWebBrowser: "safari") + settings = WebBrowserSettings(defaultWebBrowser: "safari", exceptions: []) } } return settings diff --git a/submodules/TelegramUI/Sources/PeerMessagesMediaPlaylist.swift b/submodules/TelegramUI/Sources/PeerMessagesMediaPlaylist.swift index a09f5b61de..9b28b54b49 100644 --- a/submodules/TelegramUI/Sources/PeerMessagesMediaPlaylist.swift +++ b/submodules/TelegramUI/Sources/PeerMessagesMediaPlaylist.swift @@ -72,7 +72,7 @@ final class MessageMediaPlaylistItem: SharedMediaPlaylistItem { } else { return SharedMediaPlaybackData(type: .music, source: source) } - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { return SharedMediaPlaybackData(type: .instantVideo, source: source) } else { @@ -129,7 +129,7 @@ final class MessageMediaPlaylistItem: SharedMediaPlaylistItem { displayData = SharedMediaPlaybackDisplayData.music(title: updatedTitle, performer: updatedPerformer, albumArt: albumArt, long: CGFloat(duration) > 10.0 * 60.0, caption: caption) } return displayData - case let .Video(_, _, flags, _): + case let .Video(_, _, flags, _, _): if flags.contains(.instantRoundVideo) { return SharedMediaPlaybackDisplayData.instantVideo(author: self.message.effectiveAuthor.flatMap(EnginePeer.init), peer: self.message.peers[self.message.id.peerId].flatMap(EnginePeer.init), timestamp: self.message.timestamp) } else { diff --git a/submodules/TelegramUI/Sources/SharedAccountContext.swift b/submodules/TelegramUI/Sources/SharedAccountContext.swift index 7069a9b6f8..f41f50d543 100644 --- a/submodules/TelegramUI/Sources/SharedAccountContext.swift +++ b/submodules/TelegramUI/Sources/SharedAccountContext.swift @@ -2193,6 +2193,7 @@ public final class SharedAccountContextImpl: SharedAccountContext { var reachedLimitImpl: ((Int32) -> Void)? var presentBirthdayPickerImpl: (() -> Void)? let mode: ContactMultiselectionControllerMode + var starsMode: ContactSelectionControllerMode = .generic var currentBirthdays: [EnginePeer.Id: TelegramBirthday]? if case let .chatList(birthdays) = source, let birthdays, !birthdays.isEmpty { mode = .premiumGifting(birthdays: birthdays, selectToday: true, hasActions: true) @@ -2202,6 +2203,7 @@ public final class SharedAccountContextImpl: SharedAccountContext { currentBirthdays = birthdays } else if case let .stars(birthdays) = source { mode = .premiumGifting(birthdays: birthdays, selectToday: false, hasActions: false) + starsMode = .starsGifting(birthdays: birthdays, hasActions: false) currentBirthdays = birthdays } else { mode = .premiumGifting(birthdays: nil, selectToday: false, hasActions: true) @@ -2238,7 +2240,10 @@ public final class SharedAccountContextImpl: SharedAccountContext { options.set(context.engine.payments.starsGiftOptions(peerId: nil)) let contactsController = context.sharedContext.makeContactSelectionController(ContactSelectionControllerParams( context: context, - title: { strings in return strings.Stars_Purchase_GiftStars } + mode: starsMode, + autoDismiss: false, + title: { strings in return strings.Stars_Purchase_GiftStars }, + options: contactOptions )) let _ = (contactsController.result |> deliverOnMainQueue).start(next: { result in diff --git a/submodules/TelegramUI/Sources/TelegramRootController.swift b/submodules/TelegramUI/Sources/TelegramRootController.swift index 5fffa2db5f..e0ab310335 100644 --- a/submodules/TelegramUI/Sources/TelegramRootController.swift +++ b/submodules/TelegramUI/Sources/TelegramRootController.swift @@ -674,7 +674,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon return nil } } - media = .video(dimensions: dimensions, duration: duration, resource: resource, firstFrameFile: firstFrameFile, stickers: result.stickers) + media = .video(dimensions: dimensions, duration: duration, resource: resource, firstFrameFile: firstFrameFile, stickers: result.stickers, coverTime: values.coverImageTimestamp) } default: break diff --git a/submodules/TelegramUIPreferences/Sources/WebBrowserSettings.swift b/submodules/TelegramUIPreferences/Sources/WebBrowserSettings.swift index 0739d93e81..9c45fb6edd 100644 --- a/submodules/TelegramUIPreferences/Sources/WebBrowserSettings.swift +++ b/submodules/TelegramUIPreferences/Sources/WebBrowserSettings.swift @@ -3,35 +3,73 @@ import Postbox import TelegramCore import SwiftSignalKit -public struct WebBrowserSettings: Codable, Equatable { - public let defaultWebBrowser: String? +public struct WebBrowserException: Codable, Equatable { + public let domain: String + public let title: String - public static var defaultSettings: WebBrowserSettings { - return WebBrowserSettings(defaultWebBrowser: nil) + public init(domain: String, title: String) { + self.domain = domain + self.title = title } - public init(defaultWebBrowser: String?) { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: StringCodingKey.self) + + self.domain = try container.decode(String.self, forKey: "domain") + self.title = try container.decode(String.self, forKey: "title") + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: StringCodingKey.self) + + try container.encode(self.domain, forKey: "domain") + try container.encode(self.title, forKey: "title") + } +} + +public struct WebBrowserSettings: Codable, Equatable { + public let defaultWebBrowser: String? + public let exceptions: [WebBrowserException] + + public static var defaultSettings: WebBrowserSettings { + return WebBrowserSettings(defaultWebBrowser: nil, exceptions: []) + } + + public init(defaultWebBrowser: String?, exceptions: [WebBrowserException]) { self.defaultWebBrowser = defaultWebBrowser + self.exceptions = exceptions } public init(from decoder: Decoder) throws { let container = try decoder.container(keyedBy: StringCodingKey.self) self.defaultWebBrowser = try? container.decodeIfPresent(String.self, forKey: "defaultWebBrowser") + self.exceptions = (try? container.decodeIfPresent([WebBrowserException].self, forKey: "exceptions")) ?? [] } public func encode(to encoder: Encoder) throws { var container = encoder.container(keyedBy: StringCodingKey.self) try container.encodeIfPresent(self.defaultWebBrowser, forKey: "defaultWebBrowser") + try container.encode(self.exceptions, forKey: "exceptions") } public static func ==(lhs: WebBrowserSettings, rhs: WebBrowserSettings) -> Bool { - return lhs.defaultWebBrowser == rhs.defaultWebBrowser + if lhs.defaultWebBrowser != rhs.defaultWebBrowser { + return false + } + if lhs.exceptions != rhs.exceptions { + return false + } + return true } public func withUpdatedDefaultWebBrowser(_ defaultWebBrowser: String?) -> WebBrowserSettings { - return WebBrowserSettings(defaultWebBrowser: defaultWebBrowser) + return WebBrowserSettings(defaultWebBrowser: defaultWebBrowser, exceptions: self.exceptions) + } + + public func withUpdatedExceptions(_ exceptions: [WebBrowserException]) -> WebBrowserSettings { + return WebBrowserSettings(defaultWebBrowser: self.defaultWebBrowser, exceptions: exceptions) } } diff --git a/submodules/UndoUI/Sources/UndoOverlayControllerNode.swift b/submodules/UndoUI/Sources/UndoOverlayControllerNode.swift index 0d68fcff21..59b63dd909 100644 --- a/submodules/UndoUI/Sources/UndoOverlayControllerNode.swift +++ b/submodules/UndoUI/Sources/UndoOverlayControllerNode.swift @@ -713,12 +713,14 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode { let body = MarkdownAttributeSet(font: Font.regular(14.0), textColor: .white) let bold: MarkdownAttributeSet + var link = body if savedMessages { bold = MarkdownAttributeSet(font: Font.semibold(14.0), textColor: presentationData.theme.list.itemAccentColor.withMultiplied(hue: 0.933, saturation: 0.61, brightness: 1.0), additionalAttributes: ["URL": ""]) + link = MarkdownAttributeSet(font: Font.semibold(14.0), textColor: .white) } else { bold = MarkdownAttributeSet(font: Font.semibold(14.0), textColor: .white) } - let attributedText = parseMarkdownIntoAttributedString(text, attributes: MarkdownAttributes(body: body, bold: bold, link: body, linkAttribute: { _ in return nil }), textAlignment: .natural) + let attributedText = parseMarkdownIntoAttributedString(text, attributes: MarkdownAttributes(body: body, bold: bold, link: link, linkAttribute: { _ in return nil }), textAlignment: .natural) self.textNode.attributedText = attributedText self.textNode.maximumNumberOfLines = 2 diff --git a/submodules/WatchBridge/Sources/WatchBridge.swift b/submodules/WatchBridge/Sources/WatchBridge.swift index 4cceb2c348..c7c75e42e1 100644 --- a/submodules/WatchBridge/Sources/WatchBridge.swift +++ b/submodules/WatchBridge/Sources/WatchBridge.swift @@ -172,7 +172,7 @@ func makeBridgeMedia(message: Message, strings: PresentationStrings, chatPeer: P for attribute in file.attributes { switch attribute { - case let .Video(duration, size, flags, _): + case let .Video(duration, size, flags, _, _): bridgeVideo.duration = Int32(duration) bridgeVideo.dimensions = size.cgSize bridgeVideo.round = flags.contains(.instantRoundVideo) diff --git a/submodules/WebSearchUI/Sources/WebSearchGalleryController.swift b/submodules/WebSearchUI/Sources/WebSearchGalleryController.swift index 2dd6561a20..f6b4715142 100644 --- a/submodules/WebSearchUI/Sources/WebSearchGalleryController.swift +++ b/submodules/WebSearchUI/Sources/WebSearchGalleryController.swift @@ -37,7 +37,7 @@ struct WebSearchGalleryEntry: Equatable { switch self.result { case let .externalReference(externalReference): if let content = externalReference.content, externalReference.type == "gif", let thumbnailResource = externalReference.thumbnail?.resource, let dimensions = content.dimensions { - let fileReference = FileMediaReference.standalone(media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: content.resource, previewRepresentations: [TelegramMediaImageRepresentation(dimensions: dimensions, resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: dimensions, flags: [], preloadSize: nil)])) + let fileReference = FileMediaReference.standalone(media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: content.resource, previewRepresentations: [TelegramMediaImageRepresentation(dimensions: dimensions, resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: dimensions, flags: [], preloadSize: nil, coverTime: nil)])) return WebSearchVideoGalleryItem(context: context, presentationData: presentationData, index: self.index, result: self.result, content: NativeVideoContent(id: .contextResult(self.result.queryId, self.result.id), userLocation: .other, fileReference: fileReference, loopVideo: true, enableSound: false, fetchAutomatically: true, storeAfterDownload: nil), controllerInteraction: controllerInteraction) } case let .internalReference(internalReference): diff --git a/submodules/WebUI/Sources/WebAppController.swift b/submodules/WebUI/Sources/WebAppController.swift index 49ae1cf2de..08f7b3bec2 100644 --- a/submodules/WebUI/Sources/WebAppController.swift +++ b/submodules/WebUI/Sources/WebAppController.swift @@ -1796,6 +1796,8 @@ public final class WebAppController: ViewController, AttachmentContainable { self.navigationItem.rightBarButtonItem?.action = #selector(self.moreButtonPressed) self.navigationItem.rightBarButtonItem?.target = self + self.navigationItem.backBarButtonItem = UIBarButtonItem(title: self.presentationData.strings.Common_Back, style: .plain, target: nil, action: nil) + let titleView = WebAppTitleView(context: self.context, theme: self.presentationData.theme) titleView.title = WebAppTitle(title: params.botName, counter: self.presentationData.strings.WebApp_Miniapp, isVerified: params.botVerified) self.navigationItem.titleView = titleView diff --git a/submodules/WidgetItemsUtils/Sources/WidgetItemsUtils.swift b/submodules/WidgetItemsUtils/Sources/WidgetItemsUtils.swift index a17977a4d4..e350f2ec7c 100644 --- a/submodules/WidgetItemsUtils/Sources/WidgetItemsUtils.swift +++ b/submodules/WidgetItemsUtils/Sources/WidgetItemsUtils.swift @@ -22,7 +22,7 @@ public extension WidgetDataPeer.Message { switch attribute { case let .Sticker(altText, _, _): content = .sticker(WidgetDataPeer.Message.Content.Sticker(altText: altText)) - case let .Video(duration, _, flags, _): + case let .Video(duration, _, flags, _, _): if flags.contains(.instantRoundVideo) { content = .videoMessage(WidgetDataPeer.Message.Content.VideoMessage(duration: Int32(duration))) } else {