diff --git a/Telegram/Telegram-iOS/Resources/Transcribe.tgs b/Telegram/Telegram-iOS/Resources/Transcribe.tgs new file mode 100644 index 0000000000..341e57cd00 Binary files /dev/null and b/Telegram/Telegram-iOS/Resources/Transcribe.tgs differ diff --git a/Telegram/Telegram-iOS/en.lproj/Localizable.strings b/Telegram/Telegram-iOS/en.lproj/Localizable.strings index 43e6f45ebe..2af4cacb6d 100644 --- a/Telegram/Telegram-iOS/en.lproj/Localizable.strings +++ b/Telegram/Telegram-iOS/en.lproj/Localizable.strings @@ -10465,8 +10465,12 @@ Sorry for the inconvenience."; "Channel.Info.Stats" = "Statistics and Boosts"; -"Conversation.FreeTranscriptionLimitTooltip_1" = "You have **%@** free voice transcription left this month."; -"Conversation.FreeTranscriptionLimitTooltip_any" = "You have **%@** free voice transcriptions left this month."; +"Conversation.FreeTranscriptionLimitTooltip_1" = "You have **%@** free voice transcription left this week."; +"Conversation.FreeTranscriptionLimitTooltip_any" = "You have **%@** free voice transcriptions left this week."; + +"Conversation.FreeTranscriptionCooldownTooltip_1" = "You have used all your **%@** free transcription this week."; +"Conversation.FreeTranscriptionCooldownTooltip_any" = "You have used all your **%@** free transcriptions this week."; +"Conversation.FreeTranscriptionWaitOrSubscribe" = "Wait until **%@** to use it again or subscribe to [Telegram Premium]() now."; "Notification.GiveawayResults_1" = "**%@** winner of the giveaway was randomly selected by Telegram and received their gift link in a private message."; "Notification.GiveawayResults_any" = "**%@** winners of the giveaway were randomly selected by Telegram and received their gift links in private messages."; @@ -10483,6 +10487,8 @@ Sorry for the inconvenience."; "Chat.Giveaway.DeleteConfirmation.Text" = "Deleting this message won't cancel the giveaway - the winners will still be selected on **%@**.\n\nOnce deleted, the Giveaway Announcement cannot be recovered."; "Chat.SimilarChannels" = "Similar Channels"; +"Chat.SimilarChannels.Join" = "Join"; +"Chat.SimilarChannels.JoinedChannel" = "You joined channel **%@**."; "Wallpaper.ApplyForMe" = "Apply for Me"; "Wallpaper.ApplyForBoth" = "Apply for Me and %@"; @@ -10514,3 +10520,19 @@ Sorry for the inconvenience."; "Share.RepostStory" = "Repost\nStory"; "PeerInfo.PaneRecommended" = "Similar Channels"; + +"Story.ViewList.ContextSortReposts" = "Reposts First"; + +"ShortTime.JustNow" = "now"; +"ShortTime.MinutesAgo_1" = "%@m"; +"ShortTime.MinutesAgo_any" = "%@m"; +"ShortTime.HoursAgo_1" = "%@h"; +"ShortTime.HoursAgo_any" = "%@h"; +"ShortTime.AtDate" = "%@"; +"ShortTime.AtPreciseDate" = "%@ at %@"; + +"Stats.Message.Reactions" = "Reactions"; +"Stats.ReactionsPerPost" = "Reactions Per Post"; +"Stats.ViewsPerStory" = "Views Per Story"; +"Stats.SharesPerStory" = "Shares Per Story"; +"Stats.ReactionsPerStory" = "Reactions Per Story"; diff --git a/submodules/AccountContext/Sources/AccountContext.swift b/submodules/AccountContext/Sources/AccountContext.swift index 1acf9a8dff..1e4e27a0bb 100644 --- a/submodules/AccountContext/Sources/AccountContext.swift +++ b/submodules/AccountContext/Sources/AccountContext.swift @@ -802,9 +802,26 @@ public struct StoryCameraTransitionInCoordinator { } } +public class MediaEditorTransitionOutExternalState { + public var storyTarget: Stories.PendingTarget? + public var isPeerArchived: Bool + public var transitionOut: ((Stories.PendingTarget?, Bool) -> StoryCameraTransitionOut?)? + + public init(storyTarget: Stories.PendingTarget?, isPeerArchived: Bool, transitionOut: ((Stories.PendingTarget?, Bool) -> StoryCameraTransitionOut?)?) { + self.storyTarget = storyTarget + self.isPeerArchived = isPeerArchived + self.transitionOut = transitionOut + } +} + +public protocol MediaEditorScreenResult { + +} + public protocol TelegramRootControllerInterface: NavigationController { @discardableResult func openStoryCamera(customTarget: EnginePeer.Id?, transitionIn: StoryCameraTransitionIn?, transitionedIn: @escaping () -> Void, transitionOut: @escaping (Stories.PendingTarget?, Bool) -> StoryCameraTransitionOut?) -> StoryCameraTransitionInCoordinator? + func proceedWithStoryUpload(target: Stories.PendingTarget, result: MediaEditorScreenResult, existingMedia: EngineMedia?, forwardInfo: Stories.PendingForwardInfo?, externalState: MediaEditorTransitionOutExternalState, commit: @escaping (@escaping () -> Void) -> Void) func getContactsController() -> ViewController? func getChatsController() -> ViewController? @@ -935,7 +952,7 @@ public protocol SharedAccountContext: AnyObject { func makeChannelStatsController(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)?, peerId: EnginePeer.Id, boosts: Bool, boostStatus: ChannelBoostStatus?) -> ViewController func makeMessagesStatsController(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)?, messageId: EngineMessage.Id) -> ViewController - func makeStoryStatsController(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)?, peerId: EnginePeer.Id, storyId: Int32) -> ViewController + func makeStoryStatsController(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)?, peerId: EnginePeer.Id, storyId: Int32, storyItem: EngineStoryItem?) -> ViewController func makeDebugSettingsController(context: AccountContext?) -> ViewController? @@ -1079,7 +1096,7 @@ public protocol AccountContext: AnyObject { public struct PremiumConfiguration { public static var defaultValue: PremiumConfiguration { - return PremiumConfiguration(isPremiumDisabled: false, showPremiumGiftInAttachMenu: false, showPremiumGiftInTextField: false, giveawayGiftsPurchaseAvailable: false, boostsPerGiftCount: 3, minChannelNameColorLevel: 5) + return PremiumConfiguration(isPremiumDisabled: false, showPremiumGiftInAttachMenu: false, showPremiumGiftInTextField: false, giveawayGiftsPurchaseAvailable: false, boostsPerGiftCount: 3, minChannelNameColorLevel: 5, audioTransciptionTrialMaxDuration: 300, audioTransciptionTrialCount: 2) } public let isPremiumDisabled: Bool @@ -1088,14 +1105,18 @@ public struct PremiumConfiguration { public let giveawayGiftsPurchaseAvailable: Bool public let boostsPerGiftCount: Int32 public let minChannelNameColorLevel: Int32 + public let audioTransciptionTrialMaxDuration: Int32 + public let audioTransciptionTrialCount: Int32 - fileprivate init(isPremiumDisabled: Bool, showPremiumGiftInAttachMenu: Bool, showPremiumGiftInTextField: Bool, giveawayGiftsPurchaseAvailable: Bool, boostsPerGiftCount: Int32, minChannelNameColorLevel: Int32) { + fileprivate init(isPremiumDisabled: Bool, showPremiumGiftInAttachMenu: Bool, showPremiumGiftInTextField: Bool, giveawayGiftsPurchaseAvailable: Bool, boostsPerGiftCount: Int32, minChannelNameColorLevel: Int32, audioTransciptionTrialMaxDuration: Int32, audioTransciptionTrialCount: Int32) { self.isPremiumDisabled = isPremiumDisabled self.showPremiumGiftInAttachMenu = showPremiumGiftInAttachMenu self.showPremiumGiftInTextField = showPremiumGiftInTextField self.giveawayGiftsPurchaseAvailable = giveawayGiftsPurchaseAvailable self.boostsPerGiftCount = boostsPerGiftCount self.minChannelNameColorLevel = minChannelNameColorLevel + self.audioTransciptionTrialMaxDuration = audioTransciptionTrialMaxDuration + self.audioTransciptionTrialCount = audioTransciptionTrialCount } public static func with(appConfiguration: AppConfiguration) -> PremiumConfiguration { @@ -1106,7 +1127,9 @@ public struct PremiumConfiguration { showPremiumGiftInTextField: data["premium_gift_text_field_icon"] as? Bool ?? false, giveawayGiftsPurchaseAvailable: data["giveaway_gifts_purchase_available"] as? Bool ?? false, boostsPerGiftCount: Int32(data["boosts_per_sent_gift"] as? Double ?? 3), - minChannelNameColorLevel: Int32(data["channel_color_level_min"] as? Double ?? 5) + minChannelNameColorLevel: Int32(data["channel_color_level_min"] as? Double ?? 5), + audioTransciptionTrialMaxDuration: Int32(data["transcribe_audio_trial_duration_max"] as? Double ?? 300), + audioTransciptionTrialCount: Int32(data["transcribe_audio_trial_weekly_number"] as? Double ?? 2) ) } else { return .defaultValue diff --git a/submodules/AccountContext/Sources/ChatController.swift b/submodules/AccountContext/Sources/ChatController.swift index 93521d658d..11981560db 100644 --- a/submodules/AccountContext/Sources/ChatController.swift +++ b/submodules/AccountContext/Sources/ChatController.swift @@ -51,6 +51,7 @@ public final class ChatMessageItemAssociatedData: Equatable { public let translateToLanguage: String? public let maxReadStoryId: Int32? public let recommendedChannels: RecommendedChannels? + public let audioTranscriptionTrial: AudioTranscription.TrialState public init( automaticDownloadPeerType: MediaAutoDownloadPeerType, @@ -75,7 +76,8 @@ public final class ChatMessageItemAssociatedData: Equatable { hasBots: Bool = false, translateToLanguage: String? = nil, maxReadStoryId: Int32? = nil, - recommendedChannels: RecommendedChannels? = nil + recommendedChannels: RecommendedChannels? = nil, + audioTranscriptionTrial: AudioTranscription.TrialState = .defaultValue ) { self.automaticDownloadPeerType = automaticDownloadPeerType self.automaticDownloadPeerId = automaticDownloadPeerId @@ -100,6 +102,7 @@ public final class ChatMessageItemAssociatedData: Equatable { self.translateToLanguage = translateToLanguage self.maxReadStoryId = maxReadStoryId self.recommendedChannels = recommendedChannels + self.audioTranscriptionTrial = audioTranscriptionTrial } public static func == (lhs: ChatMessageItemAssociatedData, rhs: ChatMessageItemAssociatedData) -> Bool { @@ -169,6 +172,9 @@ public final class ChatMessageItemAssociatedData: Equatable { if lhs.recommendedChannels != rhs.recommendedChannels { return false } + if lhs.audioTranscriptionTrial != rhs.audioTranscriptionTrial { + return false + } return true } } diff --git a/submodules/AvatarNode/Sources/AvatarBadgeView.swift b/submodules/AvatarNode/Sources/AvatarBadgeView.swift index d226f743be..1354f9382b 100644 --- a/submodules/AvatarNode/Sources/AvatarBadgeView.swift +++ b/submodules/AvatarNode/Sources/AvatarBadgeView.swift @@ -245,12 +245,9 @@ public final class AvatarBadgeView: UIImageView { var alpha: CGFloat = 0 backgroundColor.getHue(&hue, saturation: &saturation, brightness: &brightness, alpha: &alpha) - if brightness > 0.5 { - brightness = max(brightness - 0.2, 0) - saturation = min(saturation + 0.2, 1) - } else { - brightness = min(brightness + 0.3, 1) - saturation = max(saturation - 0.2, 0) + if brightness > 0.7 { + brightness = brightness * 0.9 + saturation = min(saturation + 0.1, 1) } return UIColor(hue: hue, saturation: saturation, brightness: brightness, alpha: alpha) diff --git a/submodules/AvatarNode/Sources/AvatarNode.swift b/submodules/AvatarNode/Sources/AvatarNode.swift index 7222c3238e..abcc8a9e55 100644 --- a/submodules/AvatarNode/Sources/AvatarNode.swift +++ b/submodules/AvatarNode/Sources/AvatarNode.swift @@ -18,6 +18,7 @@ import DirectMediaImageCache private let deletedIcon = UIImage(bundleImageName: "Avatar/DeletedIcon")?.precomposed() private let phoneIcon = generateTintedImage(image: UIImage(bundleImageName: "Avatar/PhoneIcon"), color: .white) public let savedMessagesIcon = generateTintedImage(image: UIImage(bundleImageName: "Avatar/SavedMessagesIcon"), color: .white) +public let repostStoryIcon = generateTintedImage(image: UIImage(bundleImageName: "Avatar/RepostStoryIcon"), color: .white) private let archivedChatsIcon = UIImage(bundleImageName: "Avatar/ArchiveAvatarIcon")?.precomposed() private let repliesIcon = generateTintedImage(image: UIImage(bundleImageName: "Avatar/RepliesMessagesIcon"), color: .white) @@ -87,6 +88,8 @@ private func calculateColors(context: AccountContext?, explicitColorIndex: Int?, colors = AvatarNode.grayscaleColors } else if case .savedMessagesIcon = icon { colors = AvatarNode.savedMessagesColors + } else if case .repostIcon = icon { + colors = AvatarNode.repostColors } else if case .repliesIcon = icon { colors = AvatarNode.savedMessagesColors } else if case .editAvatarIcon = icon, let theme { @@ -173,6 +176,7 @@ private enum AvatarNodeIcon: Equatable { case editAvatarIcon case deletedIcon case phoneIcon + case repostIcon } public enum AvatarNodeImageOverride: Equatable { @@ -184,6 +188,7 @@ public enum AvatarNodeImageOverride: Equatable { case editAvatarIcon(forceNone: Bool) case deletedIcon case phoneIcon + case repostIcon } public enum AvatarNodeColorOverride { @@ -254,6 +259,10 @@ public final class AvatarNode: ASDisplayNode { UIColor(rgb: 0x2a9ef1), UIColor(rgb: 0x72d5fd) ] + static let repostColors: [UIColor] = [ + UIColor(rgb: 0x34C76F), UIColor(rgb: 0x3DA1FD) + ] + public final class ContentNode: ASDisplayNode { private struct Params: Equatable { let peerId: EnginePeer.Id? @@ -457,6 +466,9 @@ public final class AvatarNode: ASDisplayNode { case .savedMessagesIcon: representation = nil icon = .savedMessagesIcon + case .repostIcon: + representation = nil + icon = .repostIcon case .repliesIcon: representation = nil icon = .repliesIcon @@ -621,6 +633,9 @@ public final class AvatarNode: ASDisplayNode { case .savedMessagesIcon: representation = nil icon = .savedMessagesIcon + case .repostIcon: + representation = nil + icon = .repostIcon case .repliesIcon: representation = nil icon = .repliesIcon @@ -781,7 +796,11 @@ public final class AvatarNode: ASDisplayNode { let colorsArray: NSArray = colors.map(\.cgColor) as NSArray var iconColor = UIColor.white + var diagonal = false if let parameters = parameters as? AvatarNodeParameters, parameters.icon != .none { + if case .repostIcon = parameters.icon { + diagonal = true + } if case let .archivedChatsIcon(hiddenByDefault) = parameters.icon, let theme = parameters.theme { if hiddenByDefault { iconColor = theme.chatList.unpinnedArchiveAvatarColor.foregroundColor @@ -796,7 +815,11 @@ public final class AvatarNode: ASDisplayNode { let colorSpace = CGColorSpaceCreateDeviceRGB() let gradient = CGGradient(colorsSpace: colorSpace, colors: colorsArray, locations: &locations)! - context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: bounds.size.height), options: CGGradientDrawingOptions()) + if diagonal { + context.drawLinearGradient(gradient, start: CGPoint(x: 0.0, y: bounds.size.height), end: CGPoint(x: bounds.size.width, y: 0.0), options: CGGradientDrawingOptions()) + } else { + context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: bounds.size.height), options: CGGradientDrawingOptions()) + } context.setBlendMode(.normal) @@ -828,6 +851,15 @@ public final class AvatarNode: ASDisplayNode { if let savedMessagesIcon = savedMessagesIcon { context.draw(savedMessagesIcon.cgImage!, in: CGRect(origin: CGPoint(x: floor((bounds.size.width - savedMessagesIcon.size.width) / 2.0), y: floor((bounds.size.height - savedMessagesIcon.size.height) / 2.0)), size: savedMessagesIcon.size)) } + } else if case .repostIcon = parameters.icon { + let factor = bounds.size.width / 60.0 + context.translateBy(x: bounds.size.width / 2.0, y: bounds.size.height / 2.0) + context.scaleBy(x: factor, y: -factor) + context.translateBy(x: -bounds.size.width / 2.0, y: -bounds.size.height / 2.0) + + if let repostStoryIcon = repostStoryIcon { + context.draw(repostStoryIcon.cgImage!, in: CGRect(origin: CGPoint(x: floor((bounds.size.width - repostStoryIcon.size.width) / 2.0), y: floor((bounds.size.height - repostStoryIcon.size.height) / 2.0)), size: repostStoryIcon.size)) + } } else if case .repliesIcon = parameters.icon { let factor = bounds.size.width / 60.0 context.translateBy(x: bounds.size.width / 2.0, y: bounds.size.height / 2.0) diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift index f0bf380219..5ba6baab29 100644 --- a/submodules/Camera/Sources/Camera.swift +++ b/submodules/Camera/Sources/Camera.swift @@ -57,7 +57,7 @@ final class CameraDeviceContext { self.output = CameraOutput(exclusive: exclusive) } - func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool) { + func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false) { guard let session = self.session else { return } @@ -65,7 +65,7 @@ final class CameraDeviceContext { self.previewView = previewView self.device.configure(for: session, position: position, dual: !exclusive || additional) - self.device.configureDeviceFormat(maxDimensions: self.preferredMaxDimensions, maxFramerate: self.preferredMaxFrameRate) + self.device.configureDeviceFormat(maxDimensions: self.maxDimensions(additional: self.additional, preferWide: preferWide), maxFramerate: self.preferredMaxFrameRate) self.input.configure(for: session, device: self.device, audio: audio) self.output.configure(for: session, device: self.device, input: self.input, previewView: previewView, audio: audio, photo: photo, metadata: metadata) @@ -82,8 +82,8 @@ final class CameraDeviceContext { self.input.invalidate(for: session) } - private var preferredMaxDimensions: CMVideoDimensions { - if self.additional { + private func maxDimensions(additional: Bool, preferWide: Bool) -> CMVideoDimensions { + if additional || preferWide { return CMVideoDimensions(width: 1920, height: 1440) } else { return CMVideoDimensions(width: 1920, height: 1080) @@ -336,7 +336,7 @@ private final class CameraContext { self.additionalDeviceContext = nil self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false) - self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata) + self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide) } self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in guard let self, let mainDeviceContext = self.mainDeviceContext else { @@ -481,25 +481,40 @@ private final class CameraContext { additionalDeviceContext.output.stopRecording() ) |> mapToSignal { main, additional in if case let .finished(mainResult, _, duration, positionChangeTimestamps, _) = main, case let .finished(additionalResult, _, _, _, _) = additional { - var additionalTransitionImage = additionalResult.1 - if let cgImage = additionalResult.1.cgImage { - additionalTransitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored) + var additionalThumbnailImage = additionalResult.thumbnail + if let cgImage = additionalResult.thumbnail.cgImage { + additionalThumbnailImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored) } - return .single(.finished(mainResult, (additionalResult.0, additionalTransitionImage, true, additionalResult.3), duration, positionChangeTimestamps, CACurrentMediaTime())) + + return .single( + .finished( + main: mainResult, + additional: VideoCaptureResult.Result(path: additionalResult.path, thumbnail: additionalThumbnailImage, isMirrored: true, dimensions: additionalResult.dimensions), + duration: duration, + positionChangeTimestamps: positionChangeTimestamps, + captureTimestamp: CACurrentMediaTime() + ) + ) } else { return .complete() } } } else { - let mirror = self.positionValue == .front + let isMirrored = self.positionValue == .front return mainDeviceContext.output.stopRecording() |> map { result -> VideoCaptureResult in - if case let .finished(mainResult, _, duration, positionChangeTimestamps, time) = result { - var transitionImage = mainResult.1 - if mirror, let cgImage = transitionImage.cgImage { - transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored) + if case let .finished(mainResult, _, duration, positionChangeTimestamps, captureTimestamp) = result { + var thumbnailImage = mainResult.thumbnail + if isMirrored, let cgImage = thumbnailImage.cgImage { + thumbnailImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored) } - return .finished((mainResult.0, transitionImage, mirror, mainResult.3), nil, duration, positionChangeTimestamps, time) + return .finished( + main: VideoCaptureResult.Result(path: mainResult.path, thumbnail: thumbnailImage, isMirrored: isMirrored, dimensions: mainResult.dimensions), + additional: nil, + duration: duration, + positionChangeTimestamps: positionChangeTimestamps, + captureTimestamp: captureTimestamp + ) } else { return result } @@ -548,8 +563,9 @@ public final class Camera { let photo: Bool let metadata: Bool let preferredFps: Double + let preferWide: Bool - public init(preset: Preset, position: Position, isDualEnabled: Bool = false, audio: Bool, photo: Bool, metadata: Bool, preferredFps: Double) { + public init(preset: Preset, position: Position, isDualEnabled: Bool = false, audio: Bool, photo: Bool, metadata: Bool, preferredFps: Double, preferWide: Bool = false) { self.preset = preset self.position = position self.isDualEnabled = isDualEnabled @@ -557,6 +573,7 @@ public final class Camera { self.photo = photo self.metadata = metadata self.preferredFps = preferredFps + self.preferWide = preferWide } } diff --git a/submodules/Camera/Sources/CameraOutput.swift b/submodules/Camera/Sources/CameraOutput.swift index e312b335f7..80daa60410 100644 --- a/submodules/Camera/Sources/CameraOutput.swift +++ b/submodules/Camera/Sources/CameraOutput.swift @@ -8,7 +8,14 @@ import VideoToolbox import TelegramCore public enum VideoCaptureResult: Equatable { - case finished((String, UIImage, Bool, CGSize), (String, UIImage, Bool, CGSize)?, Double, [(Bool, Double)], Double) + public struct Result { + public let path: String + public let thumbnail: UIImage + public let isMirrored: Bool + public let dimensions: CGSize + } + + case finished(main: Result, additional: Result?, duration: Double, positionChangeTimestamps: [(Bool, Double)], captureTimestamp: Double) case failed public static func == (lhs: VideoCaptureResult, rhs: VideoCaptureResult) -> Bool { @@ -19,8 +26,8 @@ public enum VideoCaptureResult: Equatable { } else { return false } - case let .finished(_, _, lhsDuration, lhsChangeTimestamps, lhsTime): - if case let .finished(_, _, rhsDuration, rhsChangeTimestamps, rhsTime) = rhs, lhsDuration == rhsDuration, lhsTime == rhsTime { + case let .finished(_, _, lhsDuration, lhsChangeTimestamps, lhsTimestamp): + if case let .finished(_, _, rhsDuration, rhsChangeTimestamps, rhsTimestamp) = rhs, lhsDuration == rhsDuration, lhsTimestamp == rhsTimestamp { if lhsChangeTimestamps.count != rhsChangeTimestamps.count { return false } @@ -302,10 +309,26 @@ final class CameraOutput: NSObject { let outputFileURL = URL(fileURLWithPath: outputFilePath) let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), orientation: orientation, fileUrl: outputFileURL, completion: { [weak self] result in + guard let self else { + return + } if case let .success(transitionImage, duration, positionChangeTimestamps) = result { - self?.recordingCompletionPipe.putNext(.finished((outputFilePath, transitionImage ?? UIImage(), false, dimensions), nil, duration, positionChangeTimestamps.map { ($0 == .front, $1) }, CACurrentMediaTime())) + self.recordingCompletionPipe.putNext( + .finished( + main: VideoCaptureResult.Result( + path: outputFilePath, + thumbnail: transitionImage ?? UIImage(), + isMirrored: false, + dimensions: dimensions + ), + additional: nil, + duration: duration, + positionChangeTimestamps: positionChangeTimestamps.map { ($0 == .front, $1) }, + captureTimestamp: CACurrentMediaTime() + ) + ) } else { - self?.recordingCompletionPipe.putNext(.failed) + self.recordingCompletionPipe.putNext(.failed) } }) diff --git a/submodules/DebugSettingsUI/Sources/DebugController.swift b/submodules/DebugSettingsUI/Sources/DebugController.swift index 09da501fea..06986f7ac3 100644 --- a/submodules/DebugSettingsUI/Sources/DebugController.swift +++ b/submodules/DebugSettingsUI/Sources/DebugController.swift @@ -47,6 +47,7 @@ private enum DebugControllerSection: Int32 { case sticker case logs case logging + case web case experiments case translation case videoExperiments @@ -86,6 +87,7 @@ private enum DebugControllerEntry: ItemListNodeEntry { case resetCacheIndex case reindexCache case resetBiometricsData(PresentationTheme) + case webViewInspection(Bool) case resetWebViewCache(PresentationTheme) case optimizeDatabase(PresentationTheme) case photoPreview(PresentationTheme, Bool) @@ -121,9 +123,11 @@ private enum DebugControllerEntry: ItemListNodeEntry { return DebugControllerSection.logs.rawValue case .logToFile, .logToConsole, .redactSensitiveData: return DebugControllerSection.logging.rawValue + case .webViewInspection, .resetWebViewCache: + return DebugControllerSection.web.rawValue case .keepChatNavigationStack, .skipReadHistory, .unidirectionalSwipeToReply, .dustEffect, .callUIV2, .crashOnSlowQueries, .crashOnMemoryPressure: return DebugControllerSection.experiments.rawValue - case .clearTips, .resetNotifications, .crash, .resetData, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .resetWebViewCache, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .voiceConference, .experimentalCompatibility, .enableDebugDataDisplay, .acceleratedStickers, .inlineForums, .localTranscription, .enableReactionOverrides, .restorePurchases: + case .clearTips, .resetNotifications, .crash, .resetData, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .voiceConference, .experimentalCompatibility, .enableDebugDataDisplay, .acceleratedStickers, .inlineForums, .localTranscription, .enableReactionOverrides, .restorePurchases: return DebugControllerSection.experiments.rawValue case .logTranslationRecognition, .resetTranslationStates: return DebugControllerSection.translation.rawValue @@ -164,43 +168,45 @@ private enum DebugControllerEntry: ItemListNodeEntry { return 11 case .redactSensitiveData: return 12 - case .keepChatNavigationStack: - return 14 - case .skipReadHistory: - return 15 - case .unidirectionalSwipeToReply: - return 16 - case .dustEffect: - return 17 - case .callUIV2: - return 18 - case .crashOnSlowQueries: - return 19 - case .crashOnMemoryPressure: - return 20 - case .clearTips: - return 21 - case .resetNotifications: - return 22 - case .crash: - return 23 - case .resetData: - return 24 - case .resetDatabase: - return 25 - case .resetDatabaseAndCache: - return 26 - case .resetHoles: - return 27 - case .reindexUnread: - return 28 - case .resetCacheIndex: - return 29 - case .reindexCache: - return 30 - case .resetBiometricsData: - return 31 + case .webViewInspection: + return 13 case .resetWebViewCache: + return 14 + case .keepChatNavigationStack: + return 15 + case .skipReadHistory: + return 16 + case .unidirectionalSwipeToReply: + return 17 + case .dustEffect: + return 18 + case .callUIV2: + return 19 + case .crashOnSlowQueries: + return 20 + case .crashOnMemoryPressure: + return 21 + case .clearTips: + return 22 + case .resetNotifications: + return 23 + case .crash: + return 24 + case .resetData: + return 25 + case .resetDatabase: + return 26 + case .resetDatabaseAndCache: + return 27 + case .resetHoles: + return 28 + case .reindexUnread: + return 29 + case .resetCacheIndex: + return 30 + case .reindexCache: + return 31 + case .resetBiometricsData: return 32 case .optimizeDatabase: return 33 @@ -1163,6 +1169,14 @@ private enum DebugControllerEntry: ItemListNodeEntry { return settings.withUpdatedBiometricsDomainState(nil).withUpdatedShareBiometricsDomainState(nil) }).start() }) + case let .webViewInspection(value): + return ItemListSwitchItem(presentationData: presentationData, title: "Allow Web View Inspection", value: value, sectionId: self.section, style: .blocks, updated: { value in + let _ = updateExperimentalUISettingsInteractively(accountManager: arguments.sharedContext.accountManager, { settings in + var settings = settings + settings.allowWebViewInspection = value + return settings + }).start() + }) case .resetWebViewCache: return ItemListActionItem(presentationData: presentationData, title: "Clear Web View Cache", kind: .destructive, alignment: .natural, sectionId: self.section, style: .blocks, action: { WKWebsiteDataStore.default().removeData(ofTypes: [WKWebsiteDataTypeDiskCache, WKWebsiteDataTypeMemoryCache], modifiedSince: Date(timeIntervalSince1970: 0), completionHandler:{ }) @@ -1404,6 +1418,9 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present entries.append(.redactSensitiveData(presentationData.theme, loggingSettings.redactSensitiveData)) if isMainApp { + entries.append(.webViewInspection(experimentalSettings.allowWebViewInspection)) + entries.append(.resetWebViewCache(presentationData.theme)) + entries.append(.keepChatNavigationStack(presentationData.theme, experimentalSettings.keepChatNavigationStack)) #if DEBUG entries.append(.skipReadHistory(presentationData.theme, experimentalSettings.skipReadHistory)) @@ -1427,7 +1444,6 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present entries.append(.reindexUnread(presentationData.theme)) entries.append(.resetCacheIndex) entries.append(.reindexCache) - entries.append(.resetWebViewCache(presentationData.theme)) } entries.append(.optimizeDatabase(presentationData.theme)) if isMainApp { diff --git a/submodules/Display/Source/ContextControllerSourceNode.swift b/submodules/Display/Source/ContextControllerSourceNode.swift index b01479bfe5..4dc6d55fdc 100644 --- a/submodules/Display/Source/ContextControllerSourceNode.swift +++ b/submodules/Display/Source/ContextControllerSourceNode.swift @@ -153,129 +153,6 @@ open class ContextControllerSourceNode: ContextReferenceContentNode { } } -/*open class ContextControllerSourceNode: ASDisplayNode { - private var viewImpl: ContextControllerSourceView { - return self.view as! ContextControllerSourceView - } - - public var contextGesture: ContextGesture? { - if self.isNodeLoaded { - return self.viewImpl.contextGesture - } else { - return nil - } - } - - public var isGestureEnabled: Bool = true { - didSet { - if self.isNodeLoaded { - self.viewImpl.isGestureEnabled = self.isGestureEnabled - } - } - } - - public var beginDelay: Double = 0.12 { - didSet { - if self.isNodeLoaded { - self.viewImpl.beginDelay = self.beginDelay - } - } - } - - public var animateScale: Bool = true { - didSet { - if self.isNodeLoaded { - self.viewImpl.animateScale = self.animateScale - } - } - } - - public var activated: ((ContextGesture, CGPoint) -> Void)? { - didSet { - if self.isNodeLoaded { - self.viewImpl.activated = self.activated - } - } - } - - public var shouldBegin: ((CGPoint) -> Bool)? { - didSet { - if self.isNodeLoaded { - self.viewImpl.shouldBegin = self.shouldBegin - } - } - } - - public var customActivationProgress: ((CGFloat, ContextGestureTransition) -> Void)? { - didSet { - if self.isNodeLoaded { - self.viewImpl.customActivationProgress = self.customActivationProgress - } - } - } - - public weak var additionalActivationProgressLayer: CALayer? { - didSet { - if self.isNodeLoaded { - self.viewImpl.additionalActivationProgressLayer = self.additionalActivationProgressLayer - } - } - } - - public var targetNodeForActivationProgress: ASDisplayNode? { - didSet { - if self.isNodeLoaded { - self.viewImpl.targetNodeForActivationProgress = self.targetNodeForActivationProgress - } - } - } - - public var targetViewForActivationProgress: UIView? { - didSet { - if self.isNodeLoaded { - self.viewImpl.targetViewForActivationProgress = self.targetViewForActivationProgress - } - } - } - - public var targetNodeForActivationProgressContentRect: CGRect? { - didSet { - if self.isNodeLoaded { - self.viewImpl.targetNodeForActivationProgressContentRect = self.targetNodeForActivationProgressContentRect - } - } - } - - override public init() { - super.init() - - self.setViewBlock({ - return ContextControllerSourceView(frame: CGRect()) - }) - } - - override open func didLoad() { - super.didLoad() - - self.viewImpl.isGestureEnabled = self.isGestureEnabled - self.viewImpl.beginDelay = self.beginDelay - self.viewImpl.animateScale = self.animateScale - self.viewImpl.activated = self.activated - self.viewImpl.shouldBegin = self.shouldBegin - self.viewImpl.customActivationProgress = self.customActivationProgress - self.viewImpl.additionalActivationProgressLayer = self.additionalActivationProgressLayer - self.viewImpl.targetNodeForActivationProgress = self.targetNodeForActivationProgress - self.viewImpl.targetViewForActivationProgress = self.targetViewForActivationProgress - self.viewImpl.targetNodeForActivationProgressContentRect = self.targetNodeForActivationProgressContentRect - } - - public func cancelGesture() { - if self.isNodeLoaded { - self.viewImpl.cancelGesture() - } - } -}*/ - open class ContextControllerSourceView: UIView { public private(set) var contextGesture: ContextGesture? diff --git a/submodules/DrawingUI/BUILD b/submodules/DrawingUI/BUILD index 700800721d..988ae6f3fd 100644 --- a/submodules/DrawingUI/BUILD +++ b/submodules/DrawingUI/BUILD @@ -104,6 +104,7 @@ swift_library( "//submodules/TelegramUI/Components/CameraButtonComponent", "//submodules/ReactionSelectionNode", "//submodules/TelegramUI/Components/EntityKeyboard", + "//submodules/Camera", ], visibility = [ "//visibility:public", diff --git a/submodules/DrawingUI/Sources/DrawingBubbleEntity.swift b/submodules/DrawingUI/Sources/DrawingBubbleEntityView.swift similarity index 100% rename from submodules/DrawingUI/Sources/DrawingBubbleEntity.swift rename to submodules/DrawingUI/Sources/DrawingBubbleEntityView.swift diff --git a/submodules/DrawingUI/Sources/DrawingEntitiesView.swift b/submodules/DrawingUI/Sources/DrawingEntitiesView.swift index 5c5d889f37..c2f464aa5c 100644 --- a/submodules/DrawingUI/Sources/DrawingEntitiesView.swift +++ b/submodules/DrawingUI/Sources/DrawingEntitiesView.swift @@ -15,7 +15,11 @@ private func makeEntityView(context: AccountContext, entity: DrawingEntity) -> D } else if let entity = entity as? DrawingSimpleShapeEntity { return DrawingSimpleShapeEntityView(context: context, entity: entity) } else if let entity = entity as? DrawingStickerEntity { - return DrawingStickerEntityView(context: context, entity: entity) + if case let .file(_, type) = entity.content, case .reaction = type { + return DrawingReactionEntityView(context: context, entity: entity) + } else { + return DrawingStickerEntityView(context: context, entity: entity) + } } else if let entity = entity as? DrawingTextEntity { return DrawingTextEntityView(context: context, entity: entity) } else if let entity = entity as? DrawingVectorEntity { diff --git a/submodules/DrawingUI/Sources/DrawingLocationEntity.swift b/submodules/DrawingUI/Sources/DrawingLocationEntityView.swift similarity index 100% rename from submodules/DrawingUI/Sources/DrawingLocationEntity.swift rename to submodules/DrawingUI/Sources/DrawingLocationEntityView.swift diff --git a/submodules/DrawingUI/Sources/DrawingMediaEntity.swift b/submodules/DrawingUI/Sources/DrawingMediaEntityView.swift similarity index 100% rename from submodules/DrawingUI/Sources/DrawingMediaEntity.swift rename to submodules/DrawingUI/Sources/DrawingMediaEntityView.swift diff --git a/submodules/DrawingUI/Sources/DrawingReactionView.swift b/submodules/DrawingUI/Sources/DrawingReactionView.swift new file mode 100644 index 0000000000..375f941e81 --- /dev/null +++ b/submodules/DrawingUI/Sources/DrawingReactionView.swift @@ -0,0 +1,347 @@ +import Foundation +import UIKit +import AsyncDisplayKit +import AVFoundation +import Display +import SwiftSignalKit +import TelegramCore +import AnimatedStickerNode +import TelegramAnimatedStickerNode +import StickerResources +import AccountContext +import MediaEditor +import TelegramPresentationData +import ReactionSelectionNode +import UndoUI +import EntityKeyboard +import ComponentFlow + +public class DrawingReactionEntityView: DrawingStickerEntityView { + private var backgroundView: UIImageView + private var outlineView: UIImageView + + override init(context: AccountContext, entity: DrawingStickerEntity) { + let backgroundView = UIImageView(image: UIImage(bundleImageName: "Stories/ReactionShadow")) + backgroundView.layer.zPosition = -1000.0 + + let outlineView = UIImageView(image: UIImage(bundleImageName: "Stories/ReactionOutline")) + outlineView.tintColor = .white + backgroundView.addSubview(outlineView) + + self.backgroundView = backgroundView + self.outlineView = outlineView + + super.init(context: context, entity: entity) + + self.insertSubview(backgroundView, at: 0) + + self.setup() + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override var isReaction: Bool { + return true + } + + override func animateInsertion() { + super.animateInsertion() + + Queue.mainQueue().after(0.2) { + let _ = self.selectedTapAction() + } + } + + override func onSelection() { + self.presentReactionSelection() + } + + override func onDeselection() { + let _ = self.dismissReactionSelection() + } + + public override func update(animated: Bool) { + super.update(animated: animated) + + if case let .file(_, type) = self.stickerEntity.content, case let .reaction(_, style) = type { + switch style { + case .white: + self.outlineView.tintColor = .white + case .black: + self.outlineView.tintColor = UIColor(rgb: 0x000000, alpha: 0.5) + } + } + } + + override func updateMirroring(animated: Bool) { + let staticTransform = CATransform3DMakeScale(self.stickerEntity.mirrored ? -1.0 : 1.0, 1.0, 1.0) + if animated { + let isCurrentlyMirrored = ((self.backgroundView.layer.value(forKeyPath: "transform.scale.y") as? NSNumber)?.floatValue ?? 1.0) < 0.0 + var animationSourceTransform = CATransform3DIdentity + var animationTargetTransform = CATransform3DIdentity + if isCurrentlyMirrored { + animationSourceTransform = CATransform3DRotate(animationSourceTransform, .pi, 0.0, 1.0, 0.0) + animationSourceTransform.m34 = -1.0 / self.imageNode.frame.width + } + if self.stickerEntity.mirrored { + animationTargetTransform = CATransform3DRotate(animationTargetTransform, .pi, 0.0, 1.0, 0.0) + animationTargetTransform.m34 = -1.0 / self.imageNode.frame.width + } + self.backgroundView.layer.transform = animationSourceTransform + + let values = [1.0, 0.01, 1.0] + let keyTimes = [0.0, 0.5, 1.0] + self.animationNode?.layer.animateKeyframes(values: values as [NSNumber], keyTimes: keyTimes as [NSNumber], duration: 0.25, keyPath: "transform.scale.x", timingFunction: CAMediaTimingFunctionName.linear.rawValue) + + UIView.animate(withDuration: 0.25, animations: { + self.backgroundView.layer.transform = animationTargetTransform + }, completion: { finished in + self.backgroundView.layer.transform = staticTransform + }) + } else { + CATransaction.begin() + CATransaction.setDisableActions(true) + self.backgroundView.layer.transform = staticTransform + CATransaction.commit() + } + } + + private weak var reactionContextNode: ReactionContextNode? + fileprivate func presentReactionSelection() { + guard let containerView = self.containerView, let superview = containerView.superview?.superview?.superview?.superview, self.reactionContextNode == nil else { + return + } + + let availableSize = superview.frame.size + let reactionItems = containerView.getAvailableReactions() + + let insets = UIEdgeInsets(top: 64.0, left: 0.0, bottom: 64.0, right: 0.0) + + let layout: (ContainedViewLayoutTransition) -> Void = { [weak self, weak superview] transition in + guard let self, let superview, let reactionContextNode = self.reactionContextNode else { + return + } + let anchorRect = self.convert(self.bounds, to: superview).offsetBy(dx: 0.0, dy: -20.0) + reactionContextNode.updateLayout(size: availableSize, insets: insets, anchorRect: anchorRect, centerAligned: true, isCoveredByInput: false, isAnimatingOut: false, transition: transition) + } + + let reactionContextNodeTransition: Transition = .immediate + let reactionContextNode: ReactionContextNode + reactionContextNode = ReactionContextNode( + context: self.context, + animationCache: self.context.animationCache, + presentationData: self.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), + items: reactionItems.map(ReactionContextItem.reaction), + selectedItems: Set(), + title: nil, + getEmojiContent: { [weak self] animationCache, animationRenderer in + guard let self else { + preconditionFailure() + } + + let mappedReactionItems: [EmojiComponentReactionItem] = reactionItems.map { reaction -> EmojiComponentReactionItem in + return EmojiComponentReactionItem(reaction: reaction.reaction.rawValue, file: reaction.stillAnimation) + } + + return EmojiPagerContentComponent.emojiInputData( + context: self.context, + animationCache: animationCache, + animationRenderer: animationRenderer, + isStandalone: false, + subject: .reaction, + hasTrending: false, + topReactionItems: mappedReactionItems, + areUnicodeEmojiEnabled: false, + areCustomEmojiEnabled: true, + chatPeerId: self.context.account.peerId, + selectedItems: Set(), + premiumIfSavedMessages: false + ) + }, + isExpandedUpdated: { transition in + layout(transition) + }, + requestLayout: { transition in + layout(transition) + }, + requestUpdateOverlayWantsToBeBelowKeyboard: { transition in + layout(transition) + } + ) + reactionContextNode.displayTail = true + reactionContextNode.forceTailToRight = true + reactionContextNode.forceDark = true + self.reactionContextNode = reactionContextNode + + reactionContextNode.reactionSelected = { [weak self] updateReaction, _ in + guard let self else { + return + } + + let continueWithAnimationFile: (TelegramMediaFile) -> Void = { [weak self] animation in + guard let self else { + return + } + + if case let .file(_, type) = self.stickerEntity.content, case let .reaction(_, style) = type { + self.stickerEntity.content = .file(animation, .reaction(updateReaction.reaction, style)) + } + + var nodeToTransitionOut: ASDisplayNode? + if let animationNode = self.animationNode { + nodeToTransitionOut = animationNode + } else if !self.imageNode.isHidden { + nodeToTransitionOut = self.imageNode + } + + if let nodeToTransitionOut, let snapshot = nodeToTransitionOut.view.snapshotView(afterScreenUpdates: false) { + snapshot.frame = nodeToTransitionOut.frame + snapshot.layer.transform = nodeToTransitionOut.transform + snapshot.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + snapshot.removeFromSuperview() + }) + snapshot.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2) + self.addSubview(snapshot) + } + + self.animationNode?.removeFromSupernode() + self.animationNode = nil + self.didSetUpAnimationNode = false + self.isPlaying = false + self.currentSize = nil + + self.setup() + self.applyVisibility() + self.setNeedsLayout() + + let nodeToTransitionIn: ASDisplayNode? + if let animationNode = self.animationNode { + nodeToTransitionIn = animationNode + } else { + nodeToTransitionIn = self.imageNode + } + + if let nodeToTransitionIn { + nodeToTransitionIn.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + nodeToTransitionIn.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2) + } + + let _ = self.dismissReactionSelection() + } + + switch updateReaction { + case .builtin: + let _ = (self.context.engine.stickers.availableReactions() + |> take(1) + |> deliverOnMainQueue).start(next: { availableReactions in + guard let availableReactions else { + return + } + var animation: TelegramMediaFile? + for reaction in availableReactions.reactions { + if reaction.value == updateReaction.reaction { + animation = reaction.selectAnimation + break + } + } + if let animation { + continueWithAnimationFile(animation) + } + }) + case let .custom(fileId, file): + if let file { + continueWithAnimationFile(file) + } else { + let _ = (self.context.engine.stickers.resolveInlineStickers(fileIds: [fileId]) + |> deliverOnMainQueue).start(next: { files in + if let itemFile = files[fileId] { + continueWithAnimationFile(itemFile) + } + }) + } + } + } + + reactionContextNode.premiumReactionsSelected = { [weak self] file in + guard let self else { + return + } + + if let file { + let context = self.context + let presentationData = context.sharedContext.currentPresentationData.with { $0 } + + let controller = UndoOverlayController(presentationData: presentationData, content: .sticker(context: context, file: file, loop: true, title: nil, text: presentationData.strings.Story_Editor_TooltipPremiumReaction, undoText: nil, customAction: nil), elevatedLayout: true, animateInAsReplacement: false, blurred: true, action: { [weak self] action in + if case .info = action, let self { + let controller = context.sharedContext.makePremiumIntroController(context: context, source: .storiesExpirationDurations, forceDark: true, dismissed: nil) + self.containerView?.push(controller) + } + return false + }) + self.containerView?.present(controller) + } else { + let controller = self.context.sharedContext.makePremiumIntroController(context: self.context, source: .storiesExpirationDurations, forceDark: true, dismissed: nil) + self.containerView?.push(controller) + } + } + + let anchorRect = self.convert(self.bounds, to: superview).offsetBy(dx: 0.0, dy: -20.0) + reactionContextNodeTransition.setFrame(view: reactionContextNode.view, frame: CGRect(origin: CGPoint(), size: availableSize)) + reactionContextNode.updateLayout(size: availableSize, insets: insets, anchorRect: anchorRect, centerAligned: true, isCoveredByInput: false, isAnimatingOut: false, transition: reactionContextNodeTransition.containedViewLayoutTransition) + + superview.addSubnode(reactionContextNode) + reactionContextNode.animateIn(from: anchorRect) + } + + fileprivate func dismissReactionSelection() -> Bool { + if let reactionContextNode = self.reactionContextNode { + reactionContextNode.animateOut(to: nil, animatingOutToReaction: false) + self.reactionContextNode = nil + + Queue.mainQueue().after(0.35) { + reactionContextNode.view.removeFromSuperview() + } + + return false + } else { + return true + } + } + + override func selectedTapAction() -> Bool { + if case let .file(file, type) = self.stickerEntity.content, case let .reaction(reaction, style) = type { + guard self.reactionContextNode == nil else { + let values = [self.entity.scale, self.entity.scale * 0.93, self.entity.scale] + let keyTimes = [0.0, 0.33, 1.0] + self.layer.animateKeyframes(values: values as [NSNumber], keyTimes: keyTimes as [NSNumber], duration: 0.3, keyPath: "transform.scale") + + let updatedStyle: DrawingStickerEntity.Content.FileType.ReactionStyle + switch style { + case .white: + updatedStyle = .black + case .black: + updatedStyle = .white + } + self.stickerEntity.content = .file(file, .reaction(reaction, updatedStyle)) + + self.update(animated: false) + + return true + } + + self.presentReactionSelection() + + return true + } else { + return super.selectedTapAction() + } + } + + override func innerLayoutSubview(boundingSize: CGSize) -> CGSize { + self.backgroundView.frame = CGRect(origin: .zero, size: boundingSize).insetBy(dx: -5.0, dy: -5.0) + self.outlineView.frame = backgroundView.bounds + return CGSize(width: floor(boundingSize.width * 0.63), height: floor(boundingSize.width * 0.63)) + } +} diff --git a/submodules/DrawingUI/Sources/DrawingSimpleShapeEntity.swift b/submodules/DrawingUI/Sources/DrawingSimpleShapeEntityView.swift similarity index 100% rename from submodules/DrawingUI/Sources/DrawingSimpleShapeEntity.swift rename to submodules/DrawingUI/Sources/DrawingSimpleShapeEntityView.swift diff --git a/submodules/DrawingUI/Sources/DrawingStickerEntity.swift b/submodules/DrawingUI/Sources/DrawingStickerEntityView.swift similarity index 70% rename from submodules/DrawingUI/Sources/DrawingStickerEntity.swift rename to submodules/DrawingUI/Sources/DrawingStickerEntityView.swift index 0aafe33323..020eeda64c 100644 --- a/submodules/DrawingUI/Sources/DrawingStickerEntity.swift +++ b/submodules/DrawingUI/Sources/DrawingStickerEntityView.swift @@ -13,54 +13,36 @@ import MediaEditor import UniversalMediaPlayer import TelegramPresentationData import TelegramUniversalVideoContent -import ReactionSelectionNode -import UndoUI -import EntityKeyboard -import ComponentFlow -public final class DrawingStickerEntityView: DrawingEntityView { - private var stickerEntity: DrawingStickerEntity { +public class DrawingStickerEntityView: DrawingEntityView { + var stickerEntity: DrawingStickerEntity { return self.entity as! DrawingStickerEntity } - var started: ((Double) -> Void)? + let imageNode: TransformImageNode + var animationNode: DefaultAnimatedStickerNodeImpl? + var videoNode: UniversalVideoNode? + var cameraPreviewView: UIView? - public var updated: () -> Void = {} + let progressDisposable = MetaDisposable() + let progressLayer = CAShapeLayer() - private var currentSize: CGSize? - - private var backgroundView: UIImageView? - private var outlineView: UIImageView? - - private let imageNode: TransformImageNode - private var animationNode: DefaultAnimatedStickerNodeImpl? - private var videoNode: UniversalVideoNode? - - private var didSetUpAnimationNode = false + var didSetUpAnimationNode = false private let stickerFetchedDisposable = MetaDisposable() private let cachedDisposable = MetaDisposable() private var isVisible = true - private var isPlaying = false + var isPlaying = false + var started: ((Double) -> Void)? + + var currentSize: CGSize? + public var updated: () -> Void = {} init(context: AccountContext, entity: DrawingStickerEntity) { self.imageNode = TransformImageNode() super.init(context: context, entity: entity) - if case .file(_, .reaction) = entity.content { - let backgroundView = UIImageView(image: UIImage(bundleImageName: "Stories/ReactionShadow")) - backgroundView.layer.zPosition = -1000.0 - - let outlineView = UIImageView(image: UIImage(bundleImageName: "Stories/ReactionOutline")) - outlineView.tintColor = .white - backgroundView.addSubview(outlineView) - - self.addSubview(backgroundView) - self.backgroundView = backgroundView - self.outlineView = outlineView - } - self.addSubview(self.imageNode.view) self.setup() @@ -73,6 +55,7 @@ public final class DrawingStickerEntityView: DrawingEntityView { deinit { self.stickerFetchedDisposable.dispose() self.cachedDisposable.dispose() + self.progressDisposable.dispose() } private var file: TelegramMediaFile? { @@ -103,7 +86,6 @@ public final class DrawingStickerEntityView: DrawingEntityView { return image } - private var video: TelegramMediaFile? { if case let .video(file) = self.stickerEntity.content { return file @@ -143,7 +125,7 @@ public final class DrawingStickerEntityView: DrawingEntityView { self.animationNode?.dynamicColor = color } - private func setup() { + func setup() { if let file = self.file { if let dimensions = file.dimensions { if file.isAnimatedSticker || file.isVideoSticker || file.mimeType == "video/webm" { @@ -308,7 +290,7 @@ public final class DrawingStickerEntityView: DrawingEntityView { self.applyVisibility() } - private func applyVisibility() { + func applyVisibility() { let isPlaying = self.isVisible if self.isPlaying != isPlaying { self.isPlaying = isPlaying @@ -331,6 +313,41 @@ public final class DrawingStickerEntityView: DrawingEntityView { } } + public func setupCameraPreviewView(_ cameraPreviewView: UIView, progress: Signal) { + self.addSubview(cameraPreviewView) + self.cameraPreviewView = cameraPreviewView + + self.progressLayer.transform = CATransform3DMakeRotation(-.pi / 2.0, 0.0, 0.0, 1.0) + self.progressLayer.fillColor = UIColor.clear.cgColor + self.progressLayer.strokeColor = UIColor(rgb: 0xffffff, alpha: 0.5).cgColor + self.progressLayer.lineWidth = 3.0 + self.progressLayer.lineCap = .round + self.progressLayer.strokeEnd = 0.0 + self.layer.addSublayer(self.progressLayer) + + self.setNeedsLayout() + + self.progressDisposable.set((progress + |> deliverOnMainQueue).startStrict(next: { [weak self] progress in + if let self { + self.progressLayer.strokeEnd = CGFloat(progress) + } + })) + } + + public func invalidateCameraPreviewView() { + guard let cameraPreviewView = self.cameraPreviewView else { + return + } + Queue.mainQueue().after(0.3, { + self.cameraPreviewView = nil + cameraPreviewView.removeFromSuperview() + }) + self.progressLayer.removeFromSuperlayer() + self.progressLayer.path = nil + self.progressDisposable.set(nil) + } + private var didApplyVisibility = false public override func layoutSubviews() { super.layoutSubviews() @@ -341,14 +358,8 @@ public final class DrawingStickerEntityView: DrawingEntityView { self.currentSize = size let sideSize: CGFloat = max(size.width, size.height) - var boundingSize = CGSize(width: sideSize, height: sideSize) - - if let backgroundView = self.backgroundView, let outlineView = self.outlineView { - backgroundView.frame = CGRect(origin: .zero, size: boundingSize).insetBy(dx: -5.0, dy: -5.0) - outlineView.frame = backgroundView.bounds - boundingSize = CGSize(width: floor(sideSize * 0.63), height: floor(sideSize * 0.63)) - } - + let boundingSize = self.innerLayoutSubview(boundingSize: CGSize(width: sideSize, height: sideSize)) + let imageSize = self.dimensions.aspectFitted(boundingSize) let imageFrame = CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: (size.height - imageSize.height) / 2.0), size: imageSize) @@ -373,271 +384,32 @@ public final class DrawingStickerEntityView: DrawingEntityView { videoNode.updateLayout(size: imageSize, transition: .immediate) } + if let cameraPreviewView = self.cameraPreviewView { + cameraPreviewView.layer.cornerRadius = imageSize.width / 2.0 + cameraPreviewView.frame = CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) * 0.5), y: floor((size.height - imageSize.height) * 0.5)), size: imageSize) + self.progressLayer.frame = cameraPreviewView.frame + + if self.progressLayer.path == nil { + self.progressLayer.path = CGPath(ellipseIn: cameraPreviewView.frame.insetBy(dx: 6.0, dy: 6.0), transform: nil) + } + } + self.update(animated: false) } } - private var isReaction: Bool { - if case let .file(_, type) = self.stickerEntity.content, case .reaction = type { - return true - } else { - return false - } - } - - override func animateInsertion() { - super.animateInsertion() - - if self.isReaction { - Queue.mainQueue().after(0.2) { - let _ = self.selectedTapAction() - } - } - } - - override func onSelection() { - if self.isReaction { - self.presentReactionSelection() - } + var isReaction: Bool { + return false } func onDeselection() { - if self.isReaction { - let _ = self.dismissReactionSelection() - } + } - private weak var reactionContextNode: ReactionContextNode? - fileprivate func presentReactionSelection() { - guard let containerView = self.containerView, let superview = containerView.superview?.superview?.superview?.superview, self.reactionContextNode == nil else { - return - } - - let availableSize = superview.frame.size - let reactionItems = containerView.getAvailableReactions() - - let insets = UIEdgeInsets(top: 64.0, left: 0.0, bottom: 64.0, right: 0.0) - - let layout: (ContainedViewLayoutTransition) -> Void = { [weak self, weak superview] transition in - guard let self, let superview, let reactionContextNode = self.reactionContextNode else { - return - } - let anchorRect = self.convert(self.bounds, to: superview).offsetBy(dx: 0.0, dy: -20.0) - reactionContextNode.updateLayout(size: availableSize, insets: insets, anchorRect: anchorRect, centerAligned: true, isCoveredByInput: false, isAnimatingOut: false, transition: transition) - } - - let reactionContextNodeTransition: Transition = .immediate - let reactionContextNode: ReactionContextNode - reactionContextNode = ReactionContextNode( - context: self.context, - animationCache: self.context.animationCache, - presentationData: self.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), - items: reactionItems.map(ReactionContextItem.reaction), - selectedItems: Set(), - title: nil, - getEmojiContent: { [weak self] animationCache, animationRenderer in - guard let self else { - preconditionFailure() - } - - let mappedReactionItems: [EmojiComponentReactionItem] = reactionItems.map { reaction -> EmojiComponentReactionItem in - return EmojiComponentReactionItem(reaction: reaction.reaction.rawValue, file: reaction.stillAnimation) - } - - return EmojiPagerContentComponent.emojiInputData( - context: self.context, - animationCache: animationCache, - animationRenderer: animationRenderer, - isStandalone: false, - subject: .reaction, - hasTrending: false, - topReactionItems: mappedReactionItems, - areUnicodeEmojiEnabled: false, - areCustomEmojiEnabled: true, - chatPeerId: self.context.account.peerId, - selectedItems: Set(), - premiumIfSavedMessages: false - ) - }, - isExpandedUpdated: { transition in - layout(transition) - }, - requestLayout: { transition in - layout(transition) - }, - requestUpdateOverlayWantsToBeBelowKeyboard: { transition in - layout(transition) - } - ) - reactionContextNode.displayTail = true - reactionContextNode.forceTailToRight = true - reactionContextNode.forceDark = true - self.reactionContextNode = reactionContextNode - - reactionContextNode.reactionSelected = { [weak self] updateReaction, _ in - guard let self else { - return - } - - let continueWithAnimationFile: (TelegramMediaFile) -> Void = { [weak self] animation in - guard let self else { - return - } - - if case let .file(_, type) = self.stickerEntity.content, case let .reaction(_, style) = type { - self.stickerEntity.content = .file(animation, .reaction(updateReaction.reaction, style)) - } - - var nodeToTransitionOut: ASDisplayNode? - if let animationNode = self.animationNode { - nodeToTransitionOut = animationNode - } else if !self.imageNode.isHidden { - nodeToTransitionOut = self.imageNode - } - - if let nodeToTransitionOut, let snapshot = nodeToTransitionOut.view.snapshotView(afterScreenUpdates: false) { - snapshot.frame = nodeToTransitionOut.frame - snapshot.layer.transform = nodeToTransitionOut.transform - snapshot.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in - snapshot.removeFromSuperview() - }) - snapshot.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2) - self.addSubview(snapshot) - } - - self.animationNode?.removeFromSupernode() - self.animationNode = nil - self.didSetUpAnimationNode = false - self.isPlaying = false - self.currentSize = nil - - self.setup() - self.applyVisibility() - self.setNeedsLayout() - - let nodeToTransitionIn: ASDisplayNode? - if let animationNode = self.animationNode { - nodeToTransitionIn = animationNode - } else { - nodeToTransitionIn = self.imageNode - } - - if let nodeToTransitionIn { - nodeToTransitionIn.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) - nodeToTransitionIn.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2) - } - - let _ = self.dismissReactionSelection() - } - - switch updateReaction { - case .builtin: - let _ = (self.context.engine.stickers.availableReactions() - |> take(1) - |> deliverOnMainQueue).start(next: { availableReactions in - guard let availableReactions else { - return - } - var animation: TelegramMediaFile? - for reaction in availableReactions.reactions { - if reaction.value == updateReaction.reaction { - animation = reaction.selectAnimation - break - } - } - if let animation { - continueWithAnimationFile(animation) - } - }) - case let .custom(fileId, file): - if let file { - continueWithAnimationFile(file) - } else { - let _ = (self.context.engine.stickers.resolveInlineStickers(fileIds: [fileId]) - |> deliverOnMainQueue).start(next: { files in - if let itemFile = files[fileId] { - continueWithAnimationFile(itemFile) - } - }) - } - } - } - - reactionContextNode.premiumReactionsSelected = { [weak self] file in - guard let self else { - return - } - - if let file { - let context = self.context - let presentationData = context.sharedContext.currentPresentationData.with { $0 } - - let controller = UndoOverlayController(presentationData: presentationData, content: .sticker(context: context, file: file, loop: true, title: nil, text: presentationData.strings.Story_Editor_TooltipPremiumReaction, undoText: nil, customAction: nil), elevatedLayout: true, animateInAsReplacement: false, blurred: true, action: { [weak self] action in - if case .info = action, let self { - let controller = context.sharedContext.makePremiumIntroController(context: context, source: .storiesExpirationDurations, forceDark: true, dismissed: nil) - self.containerView?.push(controller) - } - return false - }) - self.containerView?.present(controller) - } else { - let controller = self.context.sharedContext.makePremiumIntroController(context: self.context, source: .storiesExpirationDurations, forceDark: true, dismissed: nil) - self.containerView?.push(controller) - } - } - - let anchorRect = self.convert(self.bounds, to: superview).offsetBy(dx: 0.0, dy: -20.0) - reactionContextNodeTransition.setFrame(view: reactionContextNode.view, frame: CGRect(origin: CGPoint(), size: availableSize)) - reactionContextNode.updateLayout(size: availableSize, insets: insets, anchorRect: anchorRect, centerAligned: true, isCoveredByInput: false, isAnimatingOut: false, transition: reactionContextNodeTransition.containedViewLayoutTransition) - - superview.addSubnode(reactionContextNode) - reactionContextNode.animateIn(from: anchorRect) + func innerLayoutSubview(boundingSize: CGSize) -> CGSize { + return boundingSize } - fileprivate func dismissReactionSelection() -> Bool { - if let reactionContextNode = self.reactionContextNode { - reactionContextNode.animateOut(to: nil, animatingOutToReaction: false) - self.reactionContextNode = nil - - Queue.mainQueue().after(0.35) { - reactionContextNode.view.removeFromSuperview() - } - - return false - } else { - return true - } - } - - override func selectedTapAction() -> Bool { - if case let .file(file, type) = self.stickerEntity.content, case let .reaction(reaction, style) = type { - guard self.reactionContextNode == nil else { - let values = [self.entity.scale, self.entity.scale * 0.93, self.entity.scale] - let keyTimes = [0.0, 0.33, 1.0] - self.layer.animateKeyframes(values: values as [NSNumber], keyTimes: keyTimes as [NSNumber], duration: 0.3, keyPath: "transform.scale") - - let updatedStyle: DrawingStickerEntity.Content.FileType.ReactionStyle - switch style { - case .white: - updatedStyle = .black - case .black: - updatedStyle = .white - } - self.stickerEntity.content = .file(file, .reaction(reaction, updatedStyle)) - - self.update(animated: false) - - return true - } - - self.presentReactionSelection() - - return true - } else { - return super.selectedTapAction() - } - } - public override func update(animated: Bool) { self.center = self.stickerEntity.position @@ -646,24 +418,19 @@ public final class DrawingStickerEntityView: DrawingEntityView { self.bounds = CGRect(origin: .zero, size: self.dimensions.aspectFitted(size)) self.transform = CGAffineTransformScale(CGAffineTransformMakeRotation(self.stickerEntity.rotation), self.stickerEntity.scale, self.stickerEntity.scale) - if case let .file(_, type) = self.stickerEntity.content, case let .reaction(_, style) = type { - switch style { - case .white: - self.outlineView?.tintColor = .white - case .black: - self.outlineView?.tintColor = UIColor(rgb: 0x000000, alpha: 0.5) - } - } self.updateAnimationColor() - let isReaction = self.isReaction - let staticTransform = CATransform3DMakeScale(self.stickerEntity.mirrored ? -1.0 : 1.0, 1.0, 1.0) + self.updateMirroring(animated: animated) + self.updated() + + super.update(animated: animated) + } + + func updateMirroring(animated: Bool) { + let staticTransform = CATransform3DMakeScale(self.stickerEntity.mirrored ? -1.0 : 1.0, 1.0, 1.0) if animated { - var isCurrentlyMirrored = ((self.imageNode.layer.value(forKeyPath: "transform.scale.y") as? NSNumber)?.floatValue ?? 1.0) < 0.0 - if isReaction { - isCurrentlyMirrored = ((self.backgroundView?.layer.value(forKeyPath: "transform.scale.y") as? NSNumber)?.floatValue ?? 1.0) < 0.0 - } + let isCurrentlyMirrored = ((self.imageNode.layer.value(forKeyPath: "transform.scale.y") as? NSNumber)?.floatValue ?? 1.0) < 0.0 var animationSourceTransform = CATransform3DIdentity var animationTargetTransform = CATransform3DIdentity if isCurrentlyMirrored { @@ -674,50 +441,28 @@ public final class DrawingStickerEntityView: DrawingEntityView { animationTargetTransform = CATransform3DRotate(animationTargetTransform, .pi, 0.0, 1.0, 0.0) animationTargetTransform.m34 = -1.0 / self.imageNode.frame.width } - if isReaction { - self.backgroundView?.layer.transform = animationSourceTransform - - let values = [1.0, 0.01, 1.0] - let keyTimes = [0.0, 0.5, 1.0] - self.animationNode?.layer.animateKeyframes(values: values as [NSNumber], keyTimes: keyTimes as [NSNumber], duration: 0.25, keyPath: "transform.scale.x", timingFunction: CAMediaTimingFunctionName.linear.rawValue) - } else { - self.imageNode.transform = animationSourceTransform - self.animationNode?.transform = animationSourceTransform - self.videoNode?.transform = animationSourceTransform - } + + self.imageNode.transform = animationSourceTransform + self.animationNode?.transform = animationSourceTransform + self.videoNode?.transform = animationSourceTransform + UIView.animate(withDuration: 0.25, animations: { - if isReaction { - self.backgroundView?.layer.transform = animationTargetTransform - } else { - self.imageNode.transform = animationTargetTransform - self.animationNode?.transform = animationTargetTransform - self.videoNode?.transform = animationTargetTransform - } + self.imageNode.transform = animationTargetTransform + self.animationNode?.transform = animationTargetTransform + self.videoNode?.transform = animationTargetTransform }, completion: { finished in - if isReaction { - self.backgroundView?.layer.transform = staticTransform - } else { - self.imageNode.transform = staticTransform - self.animationNode?.transform = staticTransform - self.videoNode?.transform = staticTransform - } + self.imageNode.transform = staticTransform + self.animationNode?.transform = staticTransform + self.videoNode?.transform = staticTransform }) } else { CATransaction.begin() CATransaction.setDisableActions(true) - if isReaction { - self.backgroundView?.layer.transform = staticTransform - } else { - self.imageNode.transform = staticTransform - self.animationNode?.transform = staticTransform - self.videoNode?.transform = staticTransform - } + self.imageNode.transform = staticTransform + self.animationNode?.transform = staticTransform + self.videoNode?.transform = staticTransform CATransaction.commit() } - - self.updated() - - super.update(animated: animated) } override func updateSelectionView() { @@ -838,7 +583,7 @@ final class DrawingStickerEntititySelectionView: DrawingEntitySelectionView { self.snapTool.maybeSkipFromStart(entityView: entityView, position: entity.position) - let _ = entityView.dismissReactionSelection() + entityView.onDeselection() if let sublayers = self.layer.sublayers { for layer in sublayers { @@ -927,7 +672,7 @@ final class DrawingStickerEntititySelectionView: DrawingEntitySelectionView { switch gestureRecognizer.state { case .began, .changed: - let _ = entityView.dismissReactionSelection() + entityView.onDeselection() if case .began = gestureRecognizer.state { entityView.onInteractionUpdated(true) @@ -961,7 +706,7 @@ final class DrawingStickerEntititySelectionView: DrawingEntitySelectionView { switch gestureRecognizer.state { case .began: - let _ = entityView.dismissReactionSelection() + entityView.onDeselection() self.snapTool.maybeSkipFromStart(entityView: entityView, rotation: entity.rotation) entityView.onInteractionUpdated(true) diff --git a/submodules/DrawingUI/Sources/DrawingTextEntity.swift b/submodules/DrawingUI/Sources/DrawingTextEntityView.swift similarity index 96% rename from submodules/DrawingUI/Sources/DrawingTextEntity.swift rename to submodules/DrawingUI/Sources/DrawingTextEntityView.swift index 7edc82a173..02be4d4ebc 100644 --- a/submodules/DrawingUI/Sources/DrawingTextEntity.swift +++ b/submodules/DrawingUI/Sources/DrawingTextEntityView.swift @@ -80,10 +80,6 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate fatalError("init(coder:) has not been implemented") } - deinit { - self.displayLink?.invalidate() - } - override func animateInsertion() { } @@ -516,50 +512,6 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate self.updateTextAnimations() } } - - private var previousDisplayLinkTime: Double? - - private var displayLinkStart: Double? - private var displayLink: SharedDisplayLinkDriver.Link? - - private var pendingImage: (Double, UIImage)? - private var cachedFrames: [DrawingTextEntity.AnimationFrame] = [] - - private func setupRecorder(delta: Double, duration: Double) { - self.cachedFrames.removeAll() - - self.displayLink?.invalidate() - self.displayLink = nil - - self.previousDisplayLinkTime = nil - let displayLinkStart = CACurrentMediaTime() - self.displayLinkStart = displayLinkStart - - self.displayLink = SharedDisplayLinkDriver.shared.add { [weak self] _ in - if let strongSelf = self { - let currentTime = CACurrentMediaTime() - if let previousDisplayLinkTime = strongSelf.previousDisplayLinkTime, currentTime < previousDisplayLinkTime + delta { - return - } - if currentTime >= displayLinkStart + duration { - strongSelf.displayLink?.invalidate() - strongSelf.displayLink = nil - } - if let (timestamp, image) = strongSelf.pendingImage, let previousDisplayLinkTime = strongSelf.previousDisplayLinkTime { - strongSelf.cachedFrames.append(DrawingTextEntity.AnimationFrame(timestamp: timestamp - displayLinkStart, duration: currentTime - previousDisplayLinkTime, image: image)) - } - if let image = strongSelf.getPresentationRenderImage() { - strongSelf.pendingImage = (currentTime, image) - } - if strongSelf.previousDisplayLinkTime == nil { - strongSelf.previousDisplayLinkTime = displayLinkStart - } else { - strongSelf.previousDisplayLinkTime = currentTime - } - } - } - self.displayLink?.isPaused = false - } func updateTextAnimations() { for layer in self.textView.characterLayers { @@ -587,7 +539,6 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate layer.add(animation, forKey: "opacity") offset += delta } - self.setupRecorder(delta: delta, duration: duration) case .wiggle: for layer in self.textView.characterLayers { let animation = CABasicAnimation(keyPath: "transform.rotation.z") @@ -599,7 +550,6 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate animation.repeatCount = .infinity layer.add(animation, forKey: "transform.rotation.z") } - self.setupRecorder(delta: 0.033, duration: 1.2) case .zoomIn: let animation = CABasicAnimation(keyPath: "transform.scale") animation.fromValue = 0.001 as NSNumber @@ -766,7 +716,7 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate } func getRenderAnimationFrames() -> [DrawingTextEntity.AnimationFrame]? { - return self.cachedFrames + return nil } } diff --git a/submodules/DrawingUI/Sources/DrawingVectorEntity.swift b/submodules/DrawingUI/Sources/DrawingVectorEntityView.swift similarity index 100% rename from submodules/DrawingUI/Sources/DrawingVectorEntity.swift rename to submodules/DrawingUI/Sources/DrawingVectorEntityView.swift diff --git a/submodules/DrawingUI/Sources/StickerPickerScreen.swift b/submodules/DrawingUI/Sources/StickerPickerScreen.swift index a6990daffe..52f4a47a76 100644 --- a/submodules/DrawingUI/Sources/StickerPickerScreen.swift +++ b/submodules/DrawingUI/Sources/StickerPickerScreen.swift @@ -538,6 +538,9 @@ public class StickerPickerScreen: ViewController { self.storyStickersContentView?.reactionAction = { [weak self] in self?.controller?.addReaction() } + self.storyStickersContentView?.cameraAction = { [weak self] in + self?.controller?.addCamera() + } let gifItems: Signal if controller.hasGifs { @@ -1966,6 +1969,7 @@ public class StickerPickerScreen: ViewController { public var presentLocationPicker: () -> Void = { } public var presentAudioPicker: () -> Void = { } public var addReaction: () -> Void = { } + public var addCamera: () -> Void = { } public init(context: AccountContext, inputData: Signal, defaultToEmoji: Bool = false, hasGifs: Bool = false) { self.context = context @@ -2194,14 +2198,7 @@ private final class InteractiveReactionButtonContent: Component { } func update(component: InteractiveReactionButtonContent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: Transition) -> CGSize { -// if component.useOpaqueTheme { -// self.backgroundLayer.backgroundColor = component.theme.chat.inputMediaPanel.panelContentControlOpaqueSelectionColor.cgColor -// self.tintBackgroundLayer.backgroundColor = UIColor.white.cgColor -// } else { -// self.backgroundLayer.backgroundColor = component.theme.chat.inputMediaPanel.panelContentControlVibrantSelectionColor.cgColor -// self.tintBackgroundLayer.backgroundColor = UIColor(white: 1.0, alpha: 0.2).cgColor -// } - + let bounds = CGRect(origin: .zero, size: CGSize(width: 54.0, height: 54.0)) let iconSize = self.icon.update( transition: .immediate, component: AnyComponent(BundleIconComponent( @@ -2217,10 +2214,89 @@ private final class InteractiveReactionButtonContent: Component { if view.superview == nil { self.addSubview(view) } - transition.setFrame(view: view, frame: CGRect(origin: .zero, size: iconSize)) + transition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: 2.0, y: 0.0), size: iconSize)) } - return iconSize + return bounds.size + } + } + + public func makeView() -> View { + return View(frame: CGRect()) + } + + public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: Transition) -> CGSize { + view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} + +private final class RoundVideoButtonContent: Component { + let theme: PresentationTheme + + public init( + theme: PresentationTheme + ) { + self.theme = theme + } + + public static func ==(lhs: RoundVideoButtonContent, rhs: RoundVideoButtonContent) -> Bool { + if lhs.theme !== rhs.theme { + return false + } + return true + } + + final class View: UIView { + override public static var layerClass: AnyClass { + return PassthroughLayer.self + } + + private let backgroundLayer = SimpleLayer() + private var icon: ComponentView + + private var component: InteractiveReactionButtonContent? + + override init(frame: CGRect) { + self.icon = ComponentView() + + super.init(frame: frame) + + self.isExclusiveTouch = true + + self.layer.addSublayer(self.backgroundLayer) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func update(component: RoundVideoButtonContent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: Transition) -> CGSize { + self.backgroundLayer.backgroundColor = UIColor(rgb: 0xffffff, alpha: 0.11).cgColor + + let bounds = CGRect(origin: .zero, size: CGSize(width: 54.0, height: 54.0)) + let backgroundSize = CGSize(width: 50.0, height: 50.0) + self.backgroundLayer.frame = backgroundSize.centered(in: bounds) + self.backgroundLayer.cornerRadius = backgroundSize.width / 2.0 + + let iconSize = self.icon.update( + transition: .immediate, + component: AnyComponent(BundleIconComponent( + name: "Chat List/Tabs/IconCamera", + tintColor: nil, + maxSize: CGSize(width: 30.0, height: 30.0) + )), + environment: {}, + containerSize: availableSize + ) + + if let view = self.icon.view { + if view.superview == nil { + self.addSubview(view) + } + transition.setFrame(view: view, frame: iconSize.centered(in: bounds)) + } + + return bounds.size } } @@ -2352,7 +2428,8 @@ final class StoryStickersContentView: UIView, EmojiCustomContentView { var locationAction: () -> Void = {} var audioAction: () -> Void = {} var reactionAction: () -> Void = {} - + var cameraAction: () -> Void = {} + func update(theme: PresentationTheme, strings: PresentationStrings, useOpaqueTheme: Bool, availableSize: CGSize, transition: Transition) -> CGSize { let padding: CGFloat = 22.0 let size = self.container.update( diff --git a/submodules/DrawingUI/Sources/VideoRecorder.swift b/submodules/DrawingUI/Sources/VideoRecorder.swift new file mode 100644 index 0000000000..e5bf31adbf --- /dev/null +++ b/submodules/DrawingUI/Sources/VideoRecorder.swift @@ -0,0 +1,173 @@ +import Foundation +import UIKit +import SwiftSignalKit +import Camera +import MediaEditor + +public final class EntityVideoRecorder { + private weak var mediaEditor: MediaEditor? + private weak var entitiesView: DrawingEntitiesView? + + private let maxDuration: Double + + private let camera: Camera + private let previewView: CameraSimplePreviewView + private let entity: DrawingStickerEntity + + private var recordingDisposable = MetaDisposable() + private let durationPromise = ValuePromise() + private let micLevelPromise = Promise() + + public var duration: Signal { + return self.durationPromise.get() + } + + public var micLevel: Signal { + return self.micLevelPromise.get() + } + + public var onAutomaticStop: () -> Void = {} + + public init(mediaEditor: MediaEditor, entitiesView: DrawingEntitiesView) { + self.mediaEditor = mediaEditor + self.entitiesView = entitiesView + + self.maxDuration = mediaEditor.duration ?? 60.0 + self.previewView = CameraSimplePreviewView(frame: .zero, main: true) + + self.entity = DrawingStickerEntity(content: .dualVideoReference) + + self.camera = Camera( + configuration: Camera.Configuration( + preset: .hd1920x1080, + position: .front, + isDualEnabled: false, + audio: true, + photo: false, + metadata: false, + preferredFps: 60.0, + preferWide: true + ), + previewView: self.previewView, + secondaryPreviewView: nil + ) + self.camera.startCapture() + + let action = { [weak self] in + self?.previewView.removePlaceholder(delay: 0.15) + Queue.mainQueue().after(0.1) { + self?.startRecording() + self?.mediaEditor?.play() + } + } + if #available(iOS 13.0, *) { + let _ = (self.previewView.isPreviewing + |> filter { $0 } + |> take(1) + |> deliverOnMainQueue).startStandalone(next: { _ in + action() + }) + } else { + Queue.mainQueue().after(0.35) { + action() + } + } + + self.micLevelPromise.set(.single(0.0)) + + self.mediaEditor?.stop() + self.mediaEditor?.seek(0.0, andPlay: false) + } + + deinit { + self.recordingDisposable.dispose() + } + + public func setup( + referenceDrawingSize: CGSize, + scale: CGFloat, + position: CGPoint + ) { + self.entity.referenceDrawingSize = referenceDrawingSize + self.entity.scale = scale + self.entity.position = position + self.entitiesView?.add(self.entity) + + if let entityView = self.entitiesView?.getView(for: self.entity.uuid) as? DrawingStickerEntityView { + let maxDuration = self.maxDuration + entityView.setupCameraPreviewView( + self.previewView, + progress: self.durationPromise.get() |> map { + Float(max(0.0, min(1.0, $0 / maxDuration))) + } + ) + self.previewView.resetPlaceholder(front: true) + entityView.animateInsertion() + } + } + + var start: Double = 0.0 + private func startRecording() { + self.start = CACurrentMediaTime() + self.recordingDisposable.set((self.camera.startRecording() + |> deliverOnMainQueue).startStrict(next: { [weak self] duration in + guard let self else { + return + } + self.durationPromise.set(duration) + if duration >= self.maxDuration { + let onAutomaticStop = self.onAutomaticStop + self.stopRecording(save: true, completion: { + onAutomaticStop() + }) + } + })) + } + + public func stopRecording(save: Bool, completion: @escaping () -> Void = {}) { + var save = save + var remove = false + let duration = CACurrentMediaTime() - self.start + if duration < 0.2 { + save = false + remove = true + } + self.recordingDisposable.set((self.camera.stopRecording() + |> deliverOnMainQueue).startStrict(next: { [weak self] result in + guard let self, let mediaEditor = self.mediaEditor, let entitiesView = self.entitiesView, case let .finished(mainResult, _, duration, _, _) = result else { + return + } + if save { + mediaEditor.setAdditionalVideo(mainResult.path, positionChanges: []) + mediaEditor.setAdditionalVideoTrimRange(0.. Signal<[Int64: TelegramMediaFile], NoError>, theme: PresentationTheme, strings: PresentationStrings, peer: EngineRenderedPeer, customTitle: String? = nil, iconId: Int64? = nil, iconColor: Int32? = nil, online: Bool = false, numberOfLines: Int = 2, synchronousLoad: Bool) { let isFirstTime = self.peer == nil self.peer = peer diff --git a/submodules/SettingsUI/Sources/BubbleSettings/BubbleSettingsController.swift b/submodules/SettingsUI/Sources/BubbleSettings/BubbleSettingsController.swift index b549d3d41e..0937be6078 100644 --- a/submodules/SettingsUI/Sources/BubbleSettings/BubbleSettingsController.swift +++ b/submodules/SettingsUI/Sources/BubbleSettings/BubbleSettingsController.swift @@ -69,7 +69,7 @@ private final class BubbleSettingsControllerNode: ASDisplayNode, UIScrollViewDel self.messagesContainerNode.clipsToBounds = true self.messagesContainerNode.transform = CATransform3DMakeScale(1.0, -1.0, 1.0) - self.chatBackgroundNode.update(wallpaper: self.presentationData.chatWallpaper) + self.chatBackgroundNode.update(wallpaper: self.presentationData.chatWallpaper, animated: false) self.chatBackgroundNode.updateBubbleTheme(bubbleTheme: self.presentationData.theme, bubbleCorners: self.presentationData.chatBubbleCorners) self.toolbarNode = BubbleSettingsToolbarNode(presentationThemeSettings: self.presentationThemeSettings, presentationData: self.presentationData) diff --git a/submodules/SettingsUI/Sources/Privacy and Security/ForwardPrivacyChatPreviewItem.swift b/submodules/SettingsUI/Sources/Privacy and Security/ForwardPrivacyChatPreviewItem.swift index 22ab048ed3..6df7eed982 100644 --- a/submodules/SettingsUI/Sources/Privacy and Security/ForwardPrivacyChatPreviewItem.swift +++ b/submodules/SettingsUI/Sources/Privacy and Security/ForwardPrivacyChatPreviewItem.swift @@ -133,7 +133,7 @@ class ForwardPrivacyChatPreviewItemNode: ListViewItemNode { return { item, params, neighbors in if currentBackgroundNode == nil { currentBackgroundNode = createWallpaperBackgroundNode(context: item.context, forChatDisplay: false) - currentBackgroundNode?.update(wallpaper: item.wallpaper) + currentBackgroundNode?.update(wallpaper: item.wallpaper, animated: false) currentBackgroundNode?.updateBubbleTheme(bubbleTheme: item.theme, bubbleCorners: item.chatBubbleCorners) } @@ -190,7 +190,7 @@ class ForwardPrivacyChatPreviewItemNode: ListViewItemNode { strongSelf.item = item if let currentBackgroundNode { - currentBackgroundNode.update(wallpaper: item.wallpaper) + currentBackgroundNode.update(wallpaper: item.wallpaper, animated: false) currentBackgroundNode.updateBubbleTheme(bubbleTheme: item.theme, bubbleCorners: item.chatBubbleCorners) } @@ -268,7 +268,7 @@ class ForwardPrivacyChatPreviewItemNode: ListViewItemNode { if let backgroundNode = strongSelf.backgroundNode { backgroundNode.frame = backgroundFrame.insetBy(dx: 0.0, dy: -100.0) - backgroundNode.update(wallpaper: item.wallpaper) + backgroundNode.update(wallpaper: item.wallpaper, animated: false) backgroundNode.updateBubbleTheme(bubbleTheme: item.theme, bubbleCorners: item.chatBubbleCorners) backgroundNode.updateLayout(size: backgroundNode.bounds.size, displayMode: displayMode, transition: .immediate) } diff --git a/submodules/SettingsUI/Sources/Reactions/ReactionChatPreviewItem.swift b/submodules/SettingsUI/Sources/Reactions/ReactionChatPreviewItem.swift index 33ffc6a2fc..07bf9a401b 100644 --- a/submodules/SettingsUI/Sources/Reactions/ReactionChatPreviewItem.swift +++ b/submodules/SettingsUI/Sources/Reactions/ReactionChatPreviewItem.swift @@ -261,7 +261,7 @@ class ReactionChatPreviewItemNode: ListViewItemNode { return { item, params, neighbors in if currentBackgroundNode == nil { currentBackgroundNode = createWallpaperBackgroundNode(context: item.context, forChatDisplay: false) - currentBackgroundNode?.update(wallpaper: item.wallpaper) + currentBackgroundNode?.update(wallpaper: item.wallpaper, animated: false) currentBackgroundNode?.updateBubbleTheme(bubbleTheme: item.theme, bubbleCorners: item.chatBubbleCorners) } @@ -331,7 +331,7 @@ class ReactionChatPreviewItemNode: ListViewItemNode { strongSelf.item = item if let currentBackgroundNode { - currentBackgroundNode.update(wallpaper: item.wallpaper) + currentBackgroundNode.update(wallpaper: item.wallpaper, animated: false) currentBackgroundNode.updateBubbleTheme(bubbleTheme: item.theme, bubbleCorners: item.chatBubbleCorners) } @@ -414,7 +414,7 @@ class ReactionChatPreviewItemNode: ListViewItemNode { if let backgroundNode = strongSelf.backgroundNode { backgroundNode.frame = backgroundFrame.insetBy(dx: 0.0, dy: -100.0) - backgroundNode.update(wallpaper: item.wallpaper) + backgroundNode.update(wallpaper: item.wallpaper, animated: false) backgroundNode.updateBubbleTheme(bubbleTheme: item.theme, bubbleCorners: item.chatBubbleCorners) backgroundNode.updateLayout(size: backgroundNode.bounds.size, displayMode: displayMode, transition: .immediate) } diff --git a/submodules/SettingsUI/Sources/Text Size/TextSizeSelectionController.swift b/submodules/SettingsUI/Sources/Text Size/TextSizeSelectionController.swift index ca37f1a9d8..420e8476b3 100644 --- a/submodules/SettingsUI/Sources/Text Size/TextSizeSelectionController.swift +++ b/submodules/SettingsUI/Sources/Text Size/TextSizeSelectionController.swift @@ -89,7 +89,7 @@ private final class TextSizeSelectionControllerNode: ASDisplayNode, UIScrollView self.messagesContainerNode.clipsToBounds = true self.messagesContainerNode.transform = CATransform3DMakeScale(1.0, -1.0, 1.0) - self.chatBackgroundNode.update(wallpaper: self.presentationData.chatWallpaper) + self.chatBackgroundNode.update(wallpaper: self.presentationData.chatWallpaper, animated: false) self.chatBackgroundNode.updateBubbleTheme(bubbleTheme: self.presentationData.theme, bubbleCorners: self.presentationData.chatBubbleCorners) self.toolbarNode = TextSelectionToolbarNode(presentationThemeSettings: self.presentationThemeSettings, presentationData: self.presentationData) diff --git a/submodules/SettingsUI/Sources/Themes/ThemeAccentColorControllerNode.swift b/submodules/SettingsUI/Sources/Themes/ThemeAccentColorControllerNode.swift index e472c5a796..a3457b60a4 100644 --- a/submodules/SettingsUI/Sources/Themes/ThemeAccentColorControllerNode.swift +++ b/submodules/SettingsUI/Sources/Themes/ThemeAccentColorControllerNode.swift @@ -454,7 +454,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate } } - self.backgroundNode.update(wallpaper: self.wallpaper) + self.backgroundNode.update(wallpaper: self.wallpaper, animated: false) self.backgroundNode.updateBubbleTheme(bubbleTheme: self.theme, bubbleCorners: self.presentationData.chatBubbleCorners) self.stateDisposable = (self.statePromise.get() @@ -551,7 +551,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate strongSelf.serviceBackgroundColor = serviceBackgroundColor strongSelf.serviceBackgroundColorPromise.set(.single(serviceBackgroundColor)) - strongSelf.backgroundNode.update(wallpaper: wallpaper) + strongSelf.backgroundNode.update(wallpaper: wallpaper, animated: false) strongSelf.backgroundNode.updateBubbleTheme(bubbleTheme: strongSelf.theme, bubbleCorners: strongSelf.presentationData.chatBubbleCorners) strongSelf.ready.set(.single(true)) diff --git a/submodules/SettingsUI/Sources/Themes/ThemePreviewControllerNode.swift b/submodules/SettingsUI/Sources/Themes/ThemePreviewControllerNode.swift index 9f57e20b78..8fb8111409 100644 --- a/submodules/SettingsUI/Sources/Themes/ThemePreviewControllerNode.swift +++ b/submodules/SettingsUI/Sources/Themes/ThemePreviewControllerNode.swift @@ -119,7 +119,7 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate { self.instantChatBackgroundNode.displaysAsynchronously = false self.ready.set(.single(true)) - self.instantChatBackgroundNode.update(wallpaper: wallpaper) + self.instantChatBackgroundNode.update(wallpaper: wallpaper, animated: false) self.instantChatBackgroundNode.view.contentMode = .scaleAspectFill @@ -207,7 +207,7 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate { self.chatContainerNode.insertSubnode(self.wallpaperNode, belowSubnode: self.messagesContainerNode) } - self.wallpaperNode.update(wallpaper: self.wallpaper) + self.wallpaperNode.update(wallpaper: self.wallpaper, animated: false) self.wallpaperNode.updateBubbleTheme(bubbleTheme: self.previewTheme, bubbleCorners: self.presentationData.chatBubbleCorners) self.remoteChatBackgroundNode.imageUpdated = { [weak self] image in diff --git a/submodules/SettingsUI/Sources/Themes/ThemeSettingsChatPreviewItem.swift b/submodules/SettingsUI/Sources/Themes/ThemeSettingsChatPreviewItem.swift index fc90663f4d..38271436a2 100644 --- a/submodules/SettingsUI/Sources/Themes/ThemeSettingsChatPreviewItem.swift +++ b/submodules/SettingsUI/Sources/Themes/ThemeSettingsChatPreviewItem.swift @@ -148,7 +148,7 @@ class ThemeSettingsChatPreviewItemNode: ListViewItemNode { if currentBackgroundNode == nil { currentBackgroundNode = createWallpaperBackgroundNode(context: item.context, forChatDisplay: false) } - currentBackgroundNode?.update(wallpaper: item.wallpaper) + currentBackgroundNode?.update(wallpaper: item.wallpaper, animated: false) currentBackgroundNode?.updateBubbleTheme(bubbleTheme: item.componentTheme, bubbleCorners: item.chatBubbleCorners) let insets: UIEdgeInsets diff --git a/submodules/SettingsUI/Sources/Themes/WallpaperGalleryItem.swift b/submodules/SettingsUI/Sources/Themes/WallpaperGalleryItem.swift index c6783d8d30..8985eb422a 100644 --- a/submodules/SettingsUI/Sources/Themes/WallpaperGalleryItem.swift +++ b/submodules/SettingsUI/Sources/Themes/WallpaperGalleryItem.swift @@ -636,7 +636,7 @@ final class WallpaperGalleryItemNode: GalleryItemNode { switch entry { case let .wallpaper(wallpaper, _): Queue.mainQueue().justDispatch { - self.nativeNode.update(wallpaper: wallpaper) + self.nativeNode.update(wallpaper: wallpaper, animated: false) } if case let .file(file) = wallpaper, file.isPattern { @@ -651,7 +651,7 @@ final class WallpaperGalleryItemNode: GalleryItemNode { isColor = true } else if case let .gradient(gradient) = wallpaper { self.nativeNode.isHidden = false - self.nativeNode.update(wallpaper: wallpaper) + self.nativeNode.update(wallpaper: wallpaper, animated: false) self.patternButtonNode.isSelected = false if gradient.colors.count >= 3 { @@ -662,7 +662,7 @@ final class WallpaperGalleryItemNode: GalleryItemNode { isColor = true } else if case .color = wallpaper { self.nativeNode.isHidden = false - self.nativeNode.update(wallpaper: wallpaper) + self.nativeNode.update(wallpaper: wallpaper, animated: false) self.patternButtonNode.isSelected = false isColor = true } else { @@ -989,7 +989,7 @@ final class WallpaperGalleryItemNode: GalleryItemNode { strongSelf.context.sharedContext.accountManager.mediaBox.storeResourceData(resource.id, data: data, synchronous: true) let wallpaper: TelegramWallpaper = .image([TelegramMediaImageRepresentation(dimensions: PixelDimensions(image.size), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)], WallpaperSettings()) - strongSelf.nativeNode.update(wallpaper: wallpaper) + strongSelf.nativeNode.update(wallpaper: wallpaper, animated: false) } } } diff --git a/submodules/ShareController/Sources/ShareController.swift b/submodules/ShareController/Sources/ShareController.swift index 2a618082ba..8a0fa81ad0 100644 --- a/submodules/ShareController/Sources/ShareController.swift +++ b/submodules/ShareController/Sources/ShareController.swift @@ -465,6 +465,8 @@ public final class ShareController: ViewController { } public var openShareAsImage: (([Message]) -> Void)? + + public var shareStory: (() -> Void)? public var debugAction: (() -> Void)? @@ -700,7 +702,7 @@ public final class ShareController: ViewController { return } strongSelf.present(standardTextAlertController(theme: AlertControllerTheme(presentationData: strongSelf.presentationData), title: title, text: text, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {})]), in: .window(.root)) - }, externalShare: self.externalShare, immediateExternalShare: self.immediateExternalShare, immediatePeerId: self.immediatePeerId, fromForeignApp: self.fromForeignApp, forceTheme: self.forceTheme, fromPublicChannel: fromPublicChannel, segmentedValues: self.segmentedValues) + }, externalShare: self.externalShare, immediateExternalShare: self.immediateExternalShare, immediatePeerId: self.immediatePeerId, fromForeignApp: self.fromForeignApp, forceTheme: self.forceTheme, fromPublicChannel: fromPublicChannel, segmentedValues: self.segmentedValues, shareStory: self.shareStory) self.controllerNode.completed = self.completed self.controllerNode.present = { [weak self] c in self?.presentInGlobalOverlay(c) @@ -2471,134 +2473,6 @@ public final class ShareController: ViewController { } } - -final class MessageStoryRenderer { - private let context: AccountContext - private let presentationData: PresentationData - private let messages: [Message] - - let containerNode: ASDisplayNode - private let instantChatBackgroundNode: WallpaperBackgroundNode - private let messagesContainerNode: ASDisplayNode - private var dateHeaderNode: ListViewItemHeaderNode? - private var messageNodes: [ListViewItemNode]? - private let addressNode: ImmediateTextNode - - init(context: AccountContext, messages: [Message]) { - self.context = context - self.presentationData = context.sharedContext.currentPresentationData.with { $0 } - self.messages = messages - - self.containerNode = ASDisplayNode() - - self.instantChatBackgroundNode = createWallpaperBackgroundNode(context: context, forChatDisplay: false) - self.instantChatBackgroundNode.displaysAsynchronously = false - - self.messagesContainerNode = ASDisplayNode() - self.messagesContainerNode.clipsToBounds = true - self.messagesContainerNode.transform = CATransform3DMakeScale(1.0, -1.0, 1.0) - - let message = messages.first! - let addressName = message.peers[message.id.peerId]?.addressName ?? "" - - self.addressNode = ImmediateTextNode() - self.addressNode.displaysAsynchronously = false - self.addressNode.attributedText = NSAttributedString(string: "t.me/\(addressName)/\(message.id.id)", font: Font.medium(14.0), textColor: UIColor(rgb: 0xffffff)) - self.addressNode.textShadowColor = UIColor(rgb: 0x929292, alpha: 0.8) - - self.containerNode.addSubnode(self.instantChatBackgroundNode) - self.containerNode.addSubnode(self.messagesContainerNode) - self.containerNode.addSubnode(self.addressNode) - } - - func update(layout: ContainerViewLayout, completion: @escaping (UIImage?) -> Void) { - self.updateMessagesLayout(layout: layout) - - Queue.mainQueue().after(0.01) { - UIGraphicsBeginImageContextWithOptions(layout.size, false, 3.0) - self.containerNode.view.drawHierarchy(in: CGRect(origin: CGPoint(), size: layout.size), afterScreenUpdates: true) - let img = UIGraphicsGetImageFromCurrentImageContext() - UIGraphicsEndImageContext() - completion(img) - } - } - - private func updateMessagesLayout(layout: ContainerViewLayout) { - let size = layout.size - self.containerNode.frame = CGRect(origin: CGPoint(), size: layout.size) - self.instantChatBackgroundNode.frame = CGRect(origin: CGPoint(), size: layout.size) - self.instantChatBackgroundNode.updateLayout(size: size, displayMode: .aspectFill, transition: .immediate) - self.messagesContainerNode.frame = CGRect(origin: CGPoint(), size: layout.size) - - let addressLayout = self.addressNode.updateLayout(size) - - let theme = self.presentationData.theme.withUpdated(preview: true) - let headerItem = self.context.sharedContext.makeChatMessageDateHeaderItem(context: self.context, timestamp: self.messages.first?.timestamp ?? 0, theme: theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder) - - let items: [ListViewItem] = [self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: self.messages, theme: theme, strings: self.presentationData.strings, wallpaper: self.presentationData.theme.chat.defaultWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: nil, availableReactions: nil, isCentered: false)] - - let inset: CGFloat = 16.0 - let width = layout.size.width - inset * 2.0 - let params = ListViewItemLayoutParams(width: width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, availableHeight: layout.size.height) - if let messageNodes = self.messageNodes { - for i in 0 ..< items.count { - let itemNode = messageNodes[i] - items[i].updateNode(async: { $0() }, node: { - return itemNode - }, params: params, previousItem: i == 0 ? nil : items[i - 1], nextItem: i == (items.count - 1) ? nil : items[i + 1], animation: .None, completion: { (layout, apply) in - let nodeFrame = CGRect(origin: CGPoint(x: 0.0, y: floor((size.height - layout.size.height) / 2.0)), size: CGSize(width: width, height: layout.size.height)) - - itemNode.contentSize = layout.contentSize - itemNode.insets = layout.insets - itemNode.frame = nodeFrame - itemNode.isUserInteractionEnabled = false - - apply(ListViewItemApply(isOnScreen: true)) - }) - } - } else { - var messageNodes: [ListViewItemNode] = [] - for i in 0 ..< items.count { - var itemNode: ListViewItemNode? - items[i].nodeConfiguredForParams(async: { $0() }, params: params, synchronousLoads: true, previousItem: i == 0 ? nil : items[i - 1], nextItem: i == (items.count - 1) ? nil : items[i + 1], completion: { node, apply in - itemNode = node - apply().1(ListViewItemApply(isOnScreen: true)) - }) - itemNode!.subnodeTransform = CATransform3DMakeScale(-1.0, 1.0, 1.0) - itemNode!.isUserInteractionEnabled = false - messageNodes.append(itemNode!) - self.messagesContainerNode.addSubnode(itemNode!) - } - self.messageNodes = messageNodes - } - - var bottomOffset: CGFloat = 0.0 - if let messageNodes = self.messageNodes { - for itemNode in messageNodes { - itemNode.frame = CGRect(origin: CGPoint(x: inset, y: floor((size.height - itemNode.frame.height) / 2.0)), size: itemNode.frame.size) - bottomOffset += itemNode.frame.maxY - itemNode.updateFrame(itemNode.frame, within: layout.size) - } - } - - self.addressNode.frame = CGRect(origin: CGPoint(x: inset + 16.0, y: bottomOffset + 3.0), size: CGSize(width: addressLayout.width, height: addressLayout.height + 3.0)) - - let dateHeaderNode: ListViewItemHeaderNode - if let currentDateHeaderNode = self.dateHeaderNode { - dateHeaderNode = currentDateHeaderNode - headerItem.updateNode(dateHeaderNode, previous: nil, next: headerItem) - } else { - dateHeaderNode = headerItem.node(synchronousLoad: true) - dateHeaderNode.subnodeTransform = CATransform3DMakeScale(-1.0, 1.0, 1.0) - self.messagesContainerNode.addSubnode(dateHeaderNode) - self.dateHeaderNode = dateHeaderNode - } - - dateHeaderNode.frame = CGRect(origin: CGPoint(x: 0.0, y: bottomOffset), size: CGSize(width: layout.size.width, height: headerItem.height)) - dateHeaderNode.updateLayout(size: self.containerNode.frame.size, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right) - } -} - public class ShareToInstagramActivity: UIActivity { private let context: AccountContext private var activityItems = [Any]() diff --git a/submodules/ShareController/Sources/ShareControllerNode.swift b/submodules/ShareController/Sources/ShareControllerNode.swift index 88aee95e8e..647108975e 100644 --- a/submodules/ShareController/Sources/ShareControllerNode.swift +++ b/submodules/ShareController/Sources/ShareControllerNode.swift @@ -87,7 +87,7 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate private let showNames = ValuePromise(true) - init(environment: ShareControllerEnvironment, presentationData: PresentationData, presetText: String?, defaultAction: ShareControllerAction?, requestLayout: @escaping (ContainedViewLayoutTransition) -> Void, presentError: @escaping (String?, String) -> Void, externalShare: Bool, immediateExternalShare: Bool, immediatePeerId: PeerId?, fromForeignApp: Bool, forceTheme: PresentationTheme?, fromPublicChannel: Bool, segmentedValues: [ShareControllerSegmentedValue]?) { + init(environment: ShareControllerEnvironment, presentationData: PresentationData, presetText: String?, defaultAction: ShareControllerAction?, requestLayout: @escaping (ContainedViewLayoutTransition) -> Void, presentError: @escaping (String?, String) -> Void, externalShare: Bool, immediateExternalShare: Bool, immediatePeerId: PeerId?, fromForeignApp: Bool, forceTheme: PresentationTheme?, fromPublicChannel: Bool, segmentedValues: [ShareControllerSegmentedValue]?, shareStory: (() -> Void)?) { self.environment = environment self.presentationData = presentationData self.forceTheme = forceTheme @@ -318,6 +318,13 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate strongSelf.closePeerTopics(peer.peerId, selected: true) }) } + }, shareStory: shareStory.flatMap { shareStory in + return { [weak self] in + self?.animateOut(shared: false, completion: { [weak self] in + self?.dismiss?(false) + }) + shareStory() + } }) self.backgroundColor = nil diff --git a/submodules/ShareController/Sources/ShareControllerPeerGridItem.swift b/submodules/ShareController/Sources/ShareControllerPeerGridItem.swift index 6b0a824fb7..c48874744d 100644 --- a/submodules/ShareController/Sources/ShareControllerPeerGridItem.swift +++ b/submodules/ShareController/Sources/ShareControllerPeerGridItem.swift @@ -20,10 +20,12 @@ final class ShareControllerInteraction { let togglePeer: (EngineRenderedPeer, Bool) -> Void let selectTopic: (EngineRenderedPeer, Int64, MessageHistoryThreadData) -> Void - - init(togglePeer: @escaping (EngineRenderedPeer, Bool) -> Void, selectTopic: @escaping (EngineRenderedPeer, Int64, MessageHistoryThreadData) -> Void) { + let shareStory: (() -> Void)? + + init(togglePeer: @escaping (EngineRenderedPeer, Bool) -> Void, selectTopic: @escaping (EngineRenderedPeer, Int64, MessageHistoryThreadData) -> Void, shareStory: (() -> Void)?) { self.togglePeer = togglePeer self.selectTopic = selectTopic + self.shareStory = shareStory } } @@ -91,28 +93,35 @@ final class ShareControllerGridSectionNode: ASDisplayNode { } final class ShareControllerPeerGridItem: GridItem { + enum ShareItem: Equatable { + case peer(peer: EngineRenderedPeer, presence: EnginePeer.Presence?, topicId: Int64?, threadData: MessageHistoryThreadData?) + case story + + var peerId: EnginePeer.Id? { + if case let .peer(peer, _, _, _) = self { + return peer.peerId + } else { + return nil + } + } + } + let environment: ShareControllerEnvironment let context: ShareControllerAccountContext let theme: PresentationTheme let strings: PresentationStrings - let peer: EngineRenderedPeer? - let presence: EnginePeer.Presence? - let topicId: Int64? - let threadData: MessageHistoryThreadData? + let item: ShareItem? let controllerInteraction: ShareControllerInteraction let search: Bool let section: GridSection? - init(environment: ShareControllerEnvironment, context: ShareControllerAccountContext, theme: PresentationTheme, strings: PresentationStrings, peer: EngineRenderedPeer?, presence: EnginePeer.Presence?, topicId: Int64?, threadData: MessageHistoryThreadData?, controllerInteraction: ShareControllerInteraction, sectionTitle: String? = nil, search: Bool = false) { + init(environment: ShareControllerEnvironment, context: ShareControllerAccountContext, theme: PresentationTheme, strings: PresentationStrings, item: ShareItem?, controllerInteraction: ShareControllerInteraction, sectionTitle: String? = nil, search: Bool = false) { self.environment = environment self.context = context self.theme = theme self.strings = strings - self.peer = peer - self.presence = presence - self.topicId = topicId - self.threadData = threadData + self.item = item self.controllerInteraction = controllerInteraction self.search = search @@ -126,7 +135,7 @@ final class ShareControllerPeerGridItem: GridItem { func node(layout: GridNodeLayout, synchronousLoad: Bool) -> GridItemNode { let node = ShareControllerPeerGridItemNode() node.controllerInteraction = self.controllerInteraction - node.setup(environment: self.environment, context: self.context, theme: self.theme, strings: self.strings, peer: self.peer, presence: self.presence, topicId: self.topicId, threadData: self.threadData, search: self.search, synchronousLoad: synchronousLoad, force: false) + node.setup(environment: self.environment, context: self.context, theme: self.theme, strings: self.strings, item: self.item, search: self.search, synchronousLoad: synchronousLoad, force: false) return node } @@ -136,12 +145,12 @@ final class ShareControllerPeerGridItem: GridItem { return } node.controllerInteraction = self.controllerInteraction - node.setup(environment: self.environment, context: self.context, theme: self.theme, strings: self.strings, peer: self.peer, presence: self.presence, topicId: self.topicId, threadData: self.threadData, search: self.search, synchronousLoad: false, force: false) + node.setup(environment: self.environment, context: self.context, theme: self.theme, strings: self.strings, item: self.item, search: self.search, synchronousLoad: false, force: false) } } final class ShareControllerPeerGridItemNode: GridItemNode { - private var currentState: (ShareControllerEnvironment, ShareControllerAccountContext, PresentationTheme, PresentationStrings, EngineRenderedPeer?, Bool, EnginePeer.Presence?, Int64?, MessageHistoryThreadData?)? + private var currentState: (environment: ShareControllerEnvironment, accountContext: ShareControllerAccountContext, theme: PresentationTheme, strings: PresentationStrings, item: ShareControllerPeerGridItem.ShareItem?, search: Bool)? private let peerNode: SelectablePeerNode private var presenceManager: PeerPresenceStatusManager? @@ -151,7 +160,11 @@ final class ShareControllerPeerGridItemNode: GridItemNode { private var absoluteLocation: (CGRect, CGSize)? var peerId: EnginePeer.Id? { - return self.currentState?.4?.peerId + if let item = self.currentState?.item, case let .peer(peer, _, _, _) = item { + return peer.peerId + } else { + return nil + } } override init() { @@ -161,9 +174,11 @@ final class ShareControllerPeerGridItemNode: GridItemNode { self.peerNode.toggleSelection = { [weak self] in if let strongSelf = self { - if let (_, _, _, _, maybePeer, search, _, _, _) = strongSelf.currentState, let peer = maybePeer { - if let _ = peer.peers[peer.peerId] { + if let (_, _, _, _, maybeItem, search) = strongSelf.currentState, let item = maybeItem { + if case let .peer(peer, _, _, _) = item, let _ = peer.peers[peer.peerId] { strongSelf.controllerInteraction?.togglePeer(peer, search) + } else if case .story = item { + strongSelf.controllerInteraction?.shareStory?() } } } @@ -173,7 +188,7 @@ final class ShareControllerPeerGridItemNode: GridItemNode { guard let strongSelf = self, let currentState = strongSelf.currentState else { return } - strongSelf.setup(environment: currentState.0, context: currentState.1, theme: currentState.2, strings: currentState.3, peer: currentState.4, presence: currentState.6, topicId: currentState.7, threadData: currentState.8, search: currentState.5, synchronousLoad: false, force: true) + strongSelf.setup(environment: currentState.0, context: currentState.1, theme: currentState.2, strings: currentState.3, item: currentState.4, search: currentState.5, synchronousLoad: false, force: true) }) } @@ -185,21 +200,27 @@ final class ShareControllerPeerGridItemNode: GridItemNode { } } - func setup(environment: ShareControllerEnvironment, context: ShareControllerAccountContext, theme: PresentationTheme, strings: PresentationStrings, peer: EngineRenderedPeer?, presence: EnginePeer.Presence?, topicId: Int64?, threadData: MessageHistoryThreadData?, search: Bool, synchronousLoad: Bool, force: Bool) { - if force || self.currentState == nil || self.currentState!.1 !== context || self.currentState!.3 !== theme || self.currentState!.4 != peer || self.currentState!.6 != presence || self.currentState!.7 != topicId { + func setup(environment: ShareControllerEnvironment, context: ShareControllerAccountContext, theme: PresentationTheme, strings: PresentationStrings, item: ShareControllerPeerGridItem.ShareItem?, search: Bool, synchronousLoad: Bool, force: Bool) { + if force || self.currentState == nil || self.currentState!.1 !== context || self.currentState!.3 !== theme || self.currentState!.item != item { let itemTheme = SelectablePeerNodeTheme(textColor: theme.actionSheet.primaryTextColor, secretTextColor: theme.chatList.secretTitleColor, selectedTextColor: theme.actionSheet.controlAccentColor, checkBackgroundColor: theme.actionSheet.opaqueItemBackgroundColor, checkFillColor: theme.actionSheet.controlAccentColor, checkColor: theme.actionSheet.checkContentColor, avatarPlaceholderColor: theme.list.mediaPlaceholderColor) + var effectivePresence: EnginePeer.Presence? let timestamp = Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970) - var online = false - if case let .user(peer) = peer?.peer, let presence = presence, !isServicePeer(peer) && !peer.flags.contains(.isSupport) && peer.id != context.accountPeerId { - let relativeStatus = relativeUserPresenceStatus(presence, relativeTo: timestamp) - if case .online = relativeStatus { - online = true - } - } - self.peerNode.theme = itemTheme - if let peer = peer { + if let item, case let .peer(renderedPeer, presence, _, threadData) = item, let peer = renderedPeer.peer { + effectivePresence = presence + var isOnline = false + var isSupport = false + if case let .user(user) = peer, user.flags.contains(.isSupport) { + isSupport = true + } + if let presence, !peer.isService && !isSupport && peer.id != context.accountPeerId { + let relativeStatus = relativeUserPresenceStatus(presence, relativeTo: timestamp) + if case .online = relativeStatus { + isOnline = true + } + } + let resolveInlineStickers = context.resolveInlineStickers self.peerNode.setup( accountPeerId: context.accountPeerId, @@ -214,17 +235,26 @@ final class ShareControllerPeerGridItemNode: GridItemNode { }, theme: theme, strings: strings, - peer: peer, + peer: renderedPeer, customTitle: threadData?.info.title, iconId: threadData?.info.icon, iconColor: threadData?.info.iconColor ?? 0, - online: online, + online: isOnline, synchronousLoad: synchronousLoad ) if let shimmerNode = self.placeholderNode { self.placeholderNode = nil shimmerNode.removeFromSupernode() } + } else if let item, case .story = item { + self.peerNode.setupStoryRepost( + accountPeerId: context.accountPeerId, + postbox: context.stateManager.postbox, + network: context.stateManager.network, + theme: theme, + strings: strings, + synchronousLoad: synchronousLoad + ) } else { let shimmerNode: ShimmerEffectNode if let current = self.placeholderNode { @@ -252,10 +282,10 @@ final class ShareControllerPeerGridItemNode: GridItemNode { shimmerNode.update(backgroundColor: theme.list.itemBlocksBackgroundColor, foregroundColor: theme.list.mediaPlaceholderColor, shimmeringColor: theme.list.itemBlocksBackgroundColor.withAlphaComponent(0.4), shapes: shapes, horizontal: true, size: self.bounds.size) } - self.currentState = (environment, context, theme, strings, peer, search, presence, topicId, threadData) + self.currentState = (environment, context, theme, strings, item, search) self.setNeedsLayout() - if let presence = presence { - self.presenceManager?.reset(presence: presence) + if let effectivePresence { + self.presenceManager?.reset(presence: effectivePresence) } } self.updateSelection(animated: false) @@ -263,8 +293,10 @@ final class ShareControllerPeerGridItemNode: GridItemNode { func updateSelection(animated: Bool) { var selected = false - if let controllerInteraction = self.controllerInteraction, let (_, _, _, _, maybePeer, _, _, _, _) = self.currentState, let peer = maybePeer { - selected = controllerInteraction.selectedPeerIds.contains(peer.peerId) + if let controllerInteraction = self.controllerInteraction, let (_, _, _, _, maybeItem, _) = self.currentState, let item = maybeItem { + if case let .peer(peer, _, _, _) = item { + selected = controllerInteraction.selectedPeerIds.contains(peer.peerId) + } } self.peerNode.updateSelection(selected: selected, animated: animated) @@ -277,7 +309,7 @@ final class ShareControllerPeerGridItemNode: GridItemNode { self.peerNode.frame = bounds self.placeholderNode?.frame = bounds - if let (_, _, theme, _, _, _, _, _, _) = self.currentState, let shimmerNode = self.placeholderNode { + if let theme = self.currentState?.theme, let shimmerNode = self.placeholderNode { var shapes: [ShimmerEffectNode.Shape] = [] let titleLineWidth: CGFloat = 56.0 diff --git a/submodules/ShareController/Sources/SharePeersContainerNode.swift b/submodules/ShareController/Sources/SharePeersContainerNode.swift index 87b80936eb..10c4b1d931 100644 --- a/submodules/ShareController/Sources/SharePeersContainerNode.swift +++ b/submodules/ShareController/Sources/SharePeersContainerNode.swift @@ -37,31 +37,24 @@ extension CGPoint { private struct SharePeerEntry: Comparable, Identifiable { let index: Int32 - let peer: EngineRenderedPeer - let presence: EnginePeer.Presence? - let threadId: Int64? - let threadData: MessageHistoryThreadData? + let item: ShareControllerPeerGridItem.ShareItem let theme: PresentationTheme let strings: PresentationStrings var stableId: Int64 { - return self.peer.peerId.toInt64() + switch self.item { + case let .peer(peer, _, _, _): + return peer.peerId.toInt64() + case .story: + return 0 + } } static func ==(lhs: SharePeerEntry, rhs: SharePeerEntry) -> Bool { if lhs.index != rhs.index { return false } - if lhs.peer != rhs.peer { - return false - } - if lhs.presence != rhs.presence { - return false - } - if lhs.threadId != rhs.threadId { - return false - } - if lhs.threadData != rhs.threadData { + if lhs.item != rhs.item { return false } if lhs.theme !== rhs.theme { @@ -76,7 +69,7 @@ private struct SharePeerEntry: Comparable, Identifiable { } func item(environment: ShareControllerEnvironment, context: ShareControllerAccountContext, interfaceInteraction: ShareControllerInteraction) -> GridItem { - return ShareControllerPeerGridItem(environment: environment, context: context, theme: self.theme, strings: self.strings, peer: self.peer, presence: self.presence, topicId: self.threadId, threadData: self.threadData, controllerInteraction: interfaceInteraction, search: false) + return ShareControllerPeerGridItem(environment: environment, context: context, theme: self.theme, strings: self.strings, item: self.item, controllerInteraction: interfaceInteraction, search: false) } } @@ -169,19 +162,26 @@ final class SharePeersContainerNode: ASDisplayNode, ShareContentContainerNode { self.peersValue.set(.single(peers)) + let canShareStory = controllerInteraction.shareStory != nil + let items: Signal<[SharePeerEntry], NoError> = combineLatest(self.peersValue.get(), self.foundPeers.get(), self.tick.get(), self.themePromise.get()) |> map { [weak controllerInteraction] initialPeers, foundPeers, _, theme -> [SharePeerEntry] in var entries: [SharePeerEntry] = [] var index: Int32 = 0 + if canShareStory { + entries.append(SharePeerEntry(index: index, item: .story, theme: theme, strings: strings)) + index += 1 + } + var existingPeerIds: Set = Set() - entries.append(SharePeerEntry(index: index, peer: EngineRenderedPeer(peer: accountPeer), presence: nil, threadId: nil, threadData: nil, theme: theme, strings: strings)) + entries.append(SharePeerEntry(index: index, item: .peer(peer: EngineRenderedPeer(peer: accountPeer), presence: nil, topicId: nil, threadData: nil), theme: theme, strings: strings)) existingPeerIds.insert(accountPeer.id) index += 1 for peer in foundPeers.reversed() { if !existingPeerIds.contains(peer.peerId) { - entries.append(SharePeerEntry(index: index, peer: peer, presence: nil, threadId: nil, threadData: nil, theme: theme, strings: strings)) + entries.append(SharePeerEntry(index: index, item: .peer(peer: peer, presence: nil, topicId: nil, threadData: nil), theme: theme, strings: strings)) existingPeerIds.insert(peer.peerId) index += 1 } @@ -190,7 +190,7 @@ final class SharePeersContainerNode: ASDisplayNode, ShareContentContainerNode { for (peer, presence) in initialPeers { if !existingPeerIds.contains(peer.peerId) { let thread = controllerInteraction?.selectedTopics[peer.peerId] - entries.append(SharePeerEntry(index: index, peer: peer, presence: presence, threadId: thread?.0, threadData: thread?.1, theme: theme, strings: strings)) + entries.append(SharePeerEntry(index: index, item: .peer(peer: peer, presence: presence, topicId: thread?.0, threadData: thread?.1), theme: theme, strings: strings)) existingPeerIds.insert(peer.peerId) index += 1 } @@ -568,7 +568,7 @@ final class SharePeersContainerNode: ASDisplayNode, ShareContentContainerNode { var scrollToItem: GridNodeScrollToItem? if let ensurePeerVisibleOnLayout = self.ensurePeerVisibleOnLayout { self.ensurePeerVisibleOnLayout = nil - if let index = self.entries.firstIndex(where: { $0.peer.peerId == ensurePeerVisibleOnLayout }) { + if let index = self.entries.firstIndex(where: { $0.item.peerId == ensurePeerVisibleOnLayout }) { scrollToItem = GridNodeScrollToItem(index: index, position: .visible, transition: transition, directionHint: .up, adjustForSection: false) } } diff --git a/submodules/ShareController/Sources/ShareSearchContainerNode.swift b/submodules/ShareController/Sources/ShareSearchContainerNode.swift index c612976c22..4db2fa5471 100644 --- a/submodules/ShareController/Sources/ShareSearchContainerNode.swift +++ b/submodules/ShareController/Sources/ShareSearchContainerNode.swift @@ -85,15 +85,15 @@ private enum ShareSearchRecentEntry: Comparable, Identifiable { func item(environment: ShareControllerEnvironment, context: ShareControllerAccountContext, interfaceInteraction: ShareControllerInteraction) -> GridItem { switch self { - case let .topPeers(theme, strings): - return ShareControllerRecentPeersGridItem(environment: environment, context: context, theme: theme, strings: strings, controllerInteraction: interfaceInteraction) - case let .peer(_, theme, peer, associatedPeer, presence, strings): - var peers: [EnginePeer.Id: EnginePeer] = [peer.id: peer] - if let associatedPeer = associatedPeer { - peers[associatedPeer.id] = associatedPeer - } - let peer = EngineRenderedPeer(peerId: peer.id, peers: peers, associatedMedia: [:]) - return ShareControllerPeerGridItem(environment: environment, context: context, theme: theme, strings: strings, peer: peer, presence: presence, topicId: nil, threadData: nil, controllerInteraction: interfaceInteraction, sectionTitle: strings.DialogList_SearchSectionRecent, search: true) + case let .topPeers(theme, strings): + return ShareControllerRecentPeersGridItem(environment: environment, context: context, theme: theme, strings: strings, controllerInteraction: interfaceInteraction) + case let .peer(_, theme, peer, associatedPeer, presence, strings): + var peers: [EnginePeer.Id: EnginePeer] = [peer.id: peer] + if let associatedPeer = associatedPeer { + peers[associatedPeer.id] = associatedPeer + } + let peer = EngineRenderedPeer(peerId: peer.id, peers: peers, associatedMedia: [:]) + return ShareControllerPeerGridItem(environment: environment, context: context, theme: theme, strings: strings, item: .peer(peer: peer, presence: presence, topicId: nil, threadData: nil), controllerInteraction: interfaceInteraction, sectionTitle: strings.DialogList_SearchSectionRecent, search: true) } } } @@ -131,7 +131,9 @@ private struct ShareSearchPeerEntry: Comparable, Identifiable { } func item(environment: ShareControllerEnvironment, context: ShareControllerAccountContext, interfaceInteraction: ShareControllerInteraction) -> GridItem { - return ShareControllerPeerGridItem(environment: environment, context: context, theme: self.theme, strings: self.strings, peer: self.peer, presence: self.presence, topicId: nil, threadData: nil, controllerInteraction: interfaceInteraction, search: true) +// let item: ShareControllerPeerGridItem.ShareItem +// item = self.peer.flatMap { .peer(peer: $0, presence: self.presence, topicId: nil, threadData: nil) } + return ShareControllerPeerGridItem(environment: environment, context: context, theme: self.theme, strings: self.strings, item: self.peer.flatMap({ .peer(peer: $0, presence: self.presence, topicId: nil, threadData: nil) }), controllerInteraction: interfaceInteraction, search: true) } } diff --git a/submodules/StatisticsUI/BUILD b/submodules/StatisticsUI/BUILD index 05622529bd..4246307631 100644 --- a/submodules/StatisticsUI/BUILD +++ b/submodules/StatisticsUI/BUILD @@ -37,6 +37,7 @@ swift_library( "//submodules/InviteLinksUI:InviteLinksUI", "//submodules/ShareController:ShareController", "//submodules/TelegramUI/Components/Stories/AvatarStoryIndicatorComponent", + "//submodules/TelegramUI/Components/Stories/StoryContainerScreen", ], visibility = [ "//visibility:public", diff --git a/submodules/StatisticsUI/Sources/ChannelStatsController.swift b/submodules/StatisticsUI/Sources/ChannelStatsController.swift index 0503566617..ab8484d752 100644 --- a/submodules/StatisticsUI/Sources/ChannelStatsController.swift +++ b/submodules/StatisticsUI/Sources/ChannelStatsController.swift @@ -21,6 +21,7 @@ import UndoUI import ShareController import ItemListPeerActionItem import PremiumUI +import StoryContainerScreen private let initialBoostersDisplayedLimit: Int32 = 5 @@ -28,6 +29,7 @@ private final class ChannelStatsControllerArguments { let context: AccountContext let loadDetailedGraph: (StatsGraph, Int64) -> Signal let openPostStats: (EnginePeer, StatsPostItem) -> Void + let openStory: (EngineStoryItem, UIView) -> Void let contextAction: (MessageId, ASDisplayNode, ContextGesture?) -> Void let copyBoostLink: (String) -> Void let shareBoostLink: (String) -> Void @@ -37,10 +39,11 @@ private final class ChannelStatsControllerArguments { let createPrepaidGiveaway: (PrepaidGiveaway) -> Void let updateGiftsSelected: (Bool) -> Void - init(context: AccountContext, loadDetailedGraph: @escaping (StatsGraph, Int64) -> Signal, openPostStats: @escaping (EnginePeer, StatsPostItem) -> Void, contextAction: @escaping (MessageId, ASDisplayNode, ContextGesture?) -> Void, copyBoostLink: @escaping (String) -> Void, shareBoostLink: @escaping (String) -> Void, openBoost: @escaping (ChannelBoostersContext.State.Boost) -> Void, expandBoosters: @escaping () -> Void, openGifts: @escaping () -> Void, createPrepaidGiveaway: @escaping (PrepaidGiveaway) -> Void, updateGiftsSelected: @escaping (Bool) -> Void) { + init(context: AccountContext, loadDetailedGraph: @escaping (StatsGraph, Int64) -> Signal, openPostStats: @escaping (EnginePeer, StatsPostItem) -> Void, openStory: @escaping (EngineStoryItem, UIView) -> Void, contextAction: @escaping (MessageId, ASDisplayNode, ContextGesture?) -> Void, copyBoostLink: @escaping (String) -> Void, shareBoostLink: @escaping (String) -> Void, openBoost: @escaping (ChannelBoostersContext.State.Boost) -> Void, expandBoosters: @escaping () -> Void, openGifts: @escaping () -> Void, createPrepaidGiveaway: @escaping (PrepaidGiveaway) -> Void, updateGiftsSelected: @escaping (Bool) -> Void) { self.context = context self.loadDetailedGraph = loadDetailedGraph self.openPostStats = openPostStats + self.openStory = openStory self.contextAction = contextAction self.copyBoostLink = copyBoostLink self.shareBoostLink = shareBoostLink @@ -652,6 +655,10 @@ private enum StatsEntry: ItemListNodeEntry { case let .post(_, _, _, _, peer, post, interactions): return StatsMessageItem(context: arguments.context, presentationData: presentationData, peer: peer, item: post, views: interactions.views, reactions: interactions.reactions, forwards: interactions.forwards, sectionId: self.section, style: .blocks, action: { arguments.openPostStats(EnginePeer(peer), post) + }, openStory: { sourceView in + if case let .story(story) = post { + arguments.openStory(story, sourceView) + } }, contextAction: !post.isStory ? { node, gesture in if case let .message(message) = post { arguments.contextAction(message.id, node, gesture) @@ -1048,6 +1055,7 @@ public func channelStatsController(context: AccountContext, updatedPresentationD let premiumConfiguration = PremiumConfiguration.with(appConfiguration: context.currentAppConfiguration.with { $0 }) var openPostStatsImpl: ((EnginePeer, StatsPostItem) -> Void)? + var openStoryImpl: ((EngineStoryItem, UIView) -> Void)? var contextActionImpl: ((MessageId, ASDisplayNode, ContextGesture?) -> Void)? let actionsDisposable = DisposableSet() @@ -1097,6 +1105,8 @@ public func channelStatsController(context: AccountContext, updatedPresentationD return statsContext.loadDetailedGraph(graph, x: x) }, openPostStats: { peer, item in openPostStatsImpl?(peer, item) + }, openStory: { story, sourceView in + openStoryImpl?(story, sourceView) }, contextAction: { messageId, node, gesture in contextActionImpl?(messageId, node, gesture) }, copyBoostLink: { link in @@ -1292,10 +1302,70 @@ public func channelStatsController(context: AccountContext, updatedPresentationD case let .message(message): subject = .message(id: message.id) case let .story(story): - subject = .story(peerId: peerId, id: story.id) + subject = .story(peerId: peerId, id: story.id, item: story) } controller?.push(messageStatsController(context: context, subject: subject)) } + openStoryImpl = { [weak controller] story, sourceView in + let storyContent = SingleStoryContentContextImpl(context: context, storyId: StoryId(peerId: peerId, id: story.id), storyItem: story, readGlobally: false) + let _ = (storyContent.state + |> take(1) + |> deliverOnMainQueue).startStandalone(next: { [weak controller, weak sourceView] _ in + guard let controller, let sourceView else { + return + } + let transitionIn = StoryContainerScreen.TransitionIn( + sourceView: sourceView, + sourceRect: sourceView.bounds, + sourceCornerRadius: sourceView.bounds.width * 0.5, + sourceIsAvatar: false + ) + + let storyContainerScreen = StoryContainerScreen( + context: context, + content: storyContent, + transitionIn: transitionIn, + transitionOut: { [weak sourceView] peerId, storyIdValue in + if let sourceView { + let destinationView = sourceView + return StoryContainerScreen.TransitionOut( + destinationView: destinationView, + transitionView: StoryContainerScreen.TransitionView( + makeView: { [weak destinationView] in + let parentView = UIView() + if let copyView = destinationView?.snapshotContentTree(unhide: true) { + parentView.addSubview(copyView) + } + return parentView + }, + updateView: { copyView, state, transition in + guard let view = copyView.subviews.first else { + return + } + let size = state.sourceSize.interpolate(to: state.destinationSize, amount: state.progress) + transition.setPosition(view: view, position: CGPoint(x: size.width * 0.5, y: size.height * 0.5)) + transition.setScale(view: view, scale: size.width / state.destinationSize.width) + }, + insertCloneTransitionView: nil + ), + destinationRect: destinationView.bounds, + destinationCornerRadius: destinationView.bounds.width * 0.5, + destinationIsAvatar: false, + completed: { [weak sourceView] in + guard let sourceView else { + return + } + sourceView.isHidden = false + } + ) + } else { + return nil + } + } + ) + controller.push(storyContainerScreen) + }) + } contextActionImpl = { [weak controller] messageId, sourceNode, gesture in guard let controller = controller, let sourceNode = sourceNode as? ContextExtractedContentContainingNode else { return diff --git a/submodules/StatisticsUI/Sources/MessageStatsController.swift b/submodules/StatisticsUI/Sources/MessageStatsController.swift index 4f432bf52e..3d57776f4c 100644 --- a/submodules/StatisticsUI/Sources/MessageStatsController.swift +++ b/submodules/StatisticsUI/Sources/MessageStatsController.swift @@ -153,7 +153,7 @@ private enum StatsEntry: ItemListNodeEntry { let .publicForwardsTitle(_, text): return ItemListSectionHeaderItem(presentationData: presentationData, text: text, sectionId: self.section) case let .overview(_, stats, publicShares): - return MessageStatsOverviewItem(presentationData: presentationData, stats: stats, publicShares: publicShares, sectionId: self.section, style: .blocks) + return StatsOverviewItem(presentationData: presentationData, stats: stats as! Stats, publicShares: publicShares, sectionId: self.section, style: .blocks) case let .interactionsGraph(_, _, _, graph, type), let .reactionsGraph(_, _, _, graph, type): return StatsGraphItem(presentationData: presentationData, graph: graph, type: type, getDetailsData: { date, completion in let _ = arguments.loadDetailedGraph(graph, Int64(date.timeIntervalSince1970) * 1000).start(next: { graph in @@ -221,7 +221,7 @@ private func messageStatsControllerEntries(data: PostStats?, messages: SearchMes public enum StatsSubject { case message(id: EngineMessage.Id) - case story(peerId: EnginePeer.Id, id: Int32) + case story(peerId: EnginePeer.Id, id: Int32, item: EngineStoryItem?) } protocol PostStats { @@ -252,7 +252,7 @@ public func messageStatsController(context: AccountContext, updatedPresentationD var loadDetailedGraphImpl: ((StatsGraph, Int64) -> Signal)? switch subject { case let .message(id): - let statsContext = MessageStatsContext(postbox: context.account.postbox, network: context.account.network, messageId: id) + let statsContext = MessageStatsContext(account: context.account, messageId: id) loadDetailedGraphImpl = { [weak statsContext] graph, x in return statsContext?.loadDetailedGraph(graph, x: x) ?? .single(nil) } @@ -262,8 +262,8 @@ public func messageStatsController(context: AccountContext, updatedPresentationD } dataPromise.set(.single(nil) |> then(dataSignal)) anyStatsContext = statsContext - case let .story(peerId, id): - let statsContext = StoryStatsContext(postbox: context.account.postbox, network: context.account.network, peerId: peerId, storyId: id) + case let .story(peerId, id, _): + let statsContext = StoryStatsContext(account: context.account, peerId: peerId, storyId: id) loadDetailedGraphImpl = { [weak statsContext] graph, x in return statsContext?.loadDetailedGraph(graph, x: x) ?? .single(nil) } @@ -303,16 +303,15 @@ public func messageStatsController(context: AccountContext, updatedPresentationD } let iconNodePromise = Promise() - if case let .story(peerId, id) = subject { + if case let .story(peerId, id, storyItem) = subject, let storyItem { let _ = id iconNodePromise.set( context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: peerId)) |> deliverOnMainQueue |> map { peer -> ASDisplayNode? in - if let _ = peer?._asPeer() { -// let presentationData = context.sharedContext.currentPresentationData.with { $0 } -// return StoryIconNode(context: context, theme: presentationData.theme, peer: peer, storyItem: storyItem) - return nil + if let peer = peer?._asPeer() { + let presentationData = context.sharedContext.currentPresentationData.with { $0 } + return StoryIconNode(context: context, theme: presentationData.theme, peer: peer, storyItem: storyItem) } else { return nil } diff --git a/submodules/StatisticsUI/Sources/MessageStatsOverviewItem.swift b/submodules/StatisticsUI/Sources/MessageStatsOverviewItem.swift index efbe9c5ef2..16460e3501 100644 --- a/submodules/StatisticsUI/Sources/MessageStatsOverviewItem.swift +++ b/submodules/StatisticsUI/Sources/MessageStatsOverviewItem.swift @@ -8,17 +8,19 @@ import TelegramPresentationData import ItemListUI import PresentationDataUtils -class MessageStatsOverviewItem: ListViewItem, ItemListItem { +final class MessageStatsOverviewItem: ListViewItem, ItemListItem { let presentationData: ItemListPresentationData let stats: PostStats let publicShares: Int32? + let reactions: Int32 let sectionId: ItemListSectionId let style: ItemListStyle - init(presentationData: ItemListPresentationData, stats: PostStats, publicShares: Int32?, sectionId: ItemListSectionId, style: ItemListStyle) { + init(presentationData: ItemListPresentationData, stats: PostStats, publicShares: Int32?, reactions: Int32, sectionId: ItemListSectionId, style: ItemListStyle) { self.presentationData = presentationData self.stats = stats self.publicShares = publicShares + self.reactions = reactions self.sectionId = sectionId self.style = style } @@ -161,9 +163,19 @@ class MessageStatsOverviewItemNode: ListViewItemNode { let rightTitleLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? let centerTitleLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? + let centerTitle: String + let centerValue: String + if let _ = item.stats as? StoryStats { + centerTitle = "Reactions" + centerValue = compactNumericCountString(Int(item.reactions)) + } else { + centerTitle = item.presentationData.strings.Stats_Message_PublicShares + centerValue = item.publicShares.flatMap { compactNumericCountString(Int($0)) } ?? "–" + } + leftValueLabelLayoutAndApply = makeLeftValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: compactNumericCountString(item.stats.views), font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - centerValueLabelLayoutAndApply = makeCenterValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.publicShares.flatMap { compactNumericCountString(Int($0)) } ?? "–", font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + centerValueLabelLayoutAndApply = makeCenterValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: centerValue, font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) rightValueLabelLayoutAndApply = makeRightValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.publicShares.flatMap { "≈\( compactNumericCountString(max(0, item.stats.forwards - Int($0))))" } ?? "–", font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) @@ -173,12 +185,7 @@ class MessageStatsOverviewItemNode: ListViewItemNode { leftTitleLabelLayoutAndApply = makeLeftTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Stats_Message_Views, font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 2, truncationType: .end, constrainedSize: CGSize(width: min(maxItemWidth, remainingWidth), height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) remainingWidth -= leftTitleLabelLayoutAndApply!.0.size.width - 4.0 - let centerTitle: String - if let _ = item.stats as? StoryStats { - centerTitle = "Reactions" - } else { - centerTitle = item.presentationData.strings.Stats_Message_PublicShares - } + centerTitleLabelLayoutAndApply = makeCenterTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: centerTitle, font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 2, truncationType: .end, constrainedSize: CGSize(width: min(maxItemWidth, remainingWidth), height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) remainingWidth -= centerTitleLabelLayoutAndApply!.0.size.width - 4.0 diff --git a/submodules/StatisticsUI/Sources/StatsMessageItem.swift b/submodules/StatisticsUI/Sources/StatsMessageItem.swift index 9fd3dd08a8..e8c3b21f10 100644 --- a/submodules/StatisticsUI/Sources/StatsMessageItem.swift +++ b/submodules/StatisticsUI/Sources/StatsMessageItem.swift @@ -25,9 +25,10 @@ public class StatsMessageItem: ListViewItem, ItemListItem { public let sectionId: ItemListSectionId let style: ItemListStyle let action: (() -> Void)? + let openStory: (UIView) -> Void let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? - init(context: AccountContext, presentationData: ItemListPresentationData, peer: Peer, item: StatsPostItem, views: Int32, reactions: Int32, forwards: Int32, sectionId: ItemListSectionId, style: ItemListStyle, action: (() -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?) { + init(context: AccountContext, presentationData: ItemListPresentationData, peer: Peer, item: StatsPostItem, views: Int32, reactions: Int32, forwards: Int32, sectionId: ItemListSectionId, style: ItemListStyle, action: (() -> Void)?, openStory: @escaping (UIView) -> Void, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?) { self.context = context self.presentationData = presentationData self.peer = peer @@ -38,6 +39,7 @@ public class StatsMessageItem: ListViewItem, ItemListItem { self.sectionId = sectionId self.style = style self.action = action + self.openStory = openStory self.contextAction = contextAction } @@ -103,6 +105,8 @@ final class StatsMessageItemNode: ListViewItemNode, ItemListItemNode { let contentImageNode: TransformImageNode var storyIndicator: ComponentView? + var storyButton: HighlightTrackingButton? + let titleNode: TextNode let labelNode: TextNode let viewsNode: TextNode @@ -146,7 +150,7 @@ final class StatsMessageItemNode: ListViewItemNode, ItemListItemNode { self.extractedBackgroundImageNode.alpha = 0.0 self.contentImageNode = TransformImageNode() - self.contentImageNode.isLayerBacked = true + self.contentImageNode.isLayerBacked = false self.offsetContainerNode = ASDisplayNode() self.countersContainerNode = ASDisplayNode() @@ -234,6 +238,18 @@ final class StatsMessageItemNode: ListViewItemNode, ItemListItemNode { } } + override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { + let result = super.hitTest(point, with: event) + return result + } + + @objc private func storyPressed() { + guard let item = self.item else { + return + } + item.openStory(self.contentImageNode.view) + } + public func asyncLayout() -> (_ item: StatsMessageItem, _ params: ListViewItemLayoutParams, _ insets: ItemListNeighbors) -> (ListViewItemNodeLayout, () -> Void) { let makeTitleLayout = TextNode.asyncLayout(self.titleNode) let makeLabelLayout = TextNode.asyncLayout(self.labelNode) @@ -603,17 +619,34 @@ final class StatsMessageItemNode: ListViewItemNode, ItemListItemNode { environment: {}, containerSize: indicatorSize ) + let storyIndicatorFrame = CGRect(origin: CGPoint(x: contentImageFrame.midX - indicatorSize.width / 2.0, y: contentImageFrame.midY - indicatorSize.height / 2.0), size: indicatorSize) if let storyIndicatorView = storyIndicator.view { if storyIndicatorView.superview == nil { strongSelf.offsetContainerNode.view.addSubview(storyIndicatorView) } - indicatorTransition.setFrame(view: storyIndicatorView, frame: CGRect(origin: CGPoint(x: contentImageFrame.midX - indicatorSize.width / 2.0, y: contentImageFrame.midY - indicatorSize.height / 2.0), size: indicatorSize)) + indicatorTransition.setFrame(view: storyIndicatorView, frame: storyIndicatorFrame) } + + let storyButton: HighlightTrackingButton + if let current = strongSelf.storyButton { + storyButton = current + } else { + storyButton = HighlightTrackingButton() + storyButton.addTarget(strongSelf, action: #selector(strongSelf.storyPressed), for: .touchUpInside) + strongSelf.view.addSubview(storyButton) + strongSelf.storyButton = storyButton + } + storyButton.frame = storyIndicatorFrame } else if let storyIndicator = strongSelf.storyIndicator { if let storyIndicatorView = storyIndicator.view { storyIndicatorView.removeFromSuperview() } strongSelf.storyIndicator = nil + + if let storyButton = strongSelf.storyButton { + storyButton.removeFromSuperview() + strongSelf.storyButton = nil + } } } }) @@ -623,6 +656,11 @@ final class StatsMessageItemNode: ListViewItemNode, ItemListItemNode { override public func setHighlighted(_ highlighted: Bool, at point: CGPoint, animated: Bool) { super.setHighlighted(highlighted, at: point, animated: animated) + var highlighted = highlighted + if let avatarButton = self.storyButton, avatarButton.bounds.contains(self.view.convert(point, to: storyButton)) { + highlighted = false + } + if highlighted { self.highlightedBackgroundNode.alpha = 1.0 if self.highlightedBackgroundNode.supernode == nil { @@ -670,4 +708,3 @@ final class StatsMessageItemNode: ListViewItemNode, ItemListItemNode { self.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false) } } - diff --git a/submodules/StatisticsUI/Sources/StatsOverviewItem.swift b/submodules/StatisticsUI/Sources/StatsOverviewItem.swift index 5021d83779..e0ccfa73cf 100644 --- a/submodules/StatisticsUI/Sources/StatsOverviewItem.swift +++ b/submodules/StatisticsUI/Sources/StatsOverviewItem.swift @@ -8,31 +8,41 @@ import TelegramPresentationData import ItemListUI import PresentationDataUtils -protocol PeerStats { +protocol Stats { } -extension ChannelStats: PeerStats { +extension ChannelStats: Stats { } -extension GroupStats: PeerStats { +extension GroupStats: Stats { } -extension ChannelBoostStatus: PeerStats { +extension ChannelBoostStatus: Stats { + +} + +extension MessageStats: Stats { + +} + +extension StoryStats: Stats { } class StatsOverviewItem: ListViewItem, ItemListItem { let presentationData: ItemListPresentationData - let stats: PeerStats + let stats: Stats + let publicShares: Int32? let sectionId: ItemListSectionId let style: ItemListStyle - init(presentationData: ItemListPresentationData, stats: PeerStats, sectionId: ItemListSectionId, style: ItemListStyle) { + init(presentationData: ItemListPresentationData, stats: Stats, publicShares: Int32? = nil, sectionId: ItemListSectionId, style: ItemListStyle) { self.presentationData = presentationData self.stats = stats + self.publicShares = publicShares self.sectionId = sectionId self.style = style } @@ -73,26 +83,133 @@ class StatsOverviewItem: ListViewItem, ItemListItem { var selectable: Bool = false } +private final class ValueItemNode: ASDisplayNode { + enum DeltaColor { + case generic + case positive + case negative + } + + private let valueNode: TextNode + private let titleNode: TextNode + private let deltaNode: TextNode + + var currentBackgroundColor: UIColor? + var pressed: (() -> Void)? + + override init() { + self.valueNode = TextNode() + self.titleNode = TextNode() + self.deltaNode = TextNode() + + super.init() + + self.isUserInteractionEnabled = false + + self.addSubnode(self.valueNode) + self.addSubnode(self.titleNode) + self.addSubnode(self.deltaNode) + } + + static func asyncLayout(_ current: ValueItemNode?) -> (_ width: CGFloat, _ presentationData: ItemListPresentationData, _ value: String, _ title: String, _ delta: (String, DeltaColor)?) -> (CGSize, () -> ValueItemNode) { + + let maybeMakeValueLayout = (current?.valueNode).flatMap(TextNode.asyncLayout) + let maybeMakeTitleLayout = (current?.titleNode).flatMap(TextNode.asyncLayout) + let maybeMakeDeltaLayout = (current?.deltaNode).flatMap(TextNode.asyncLayout) + + return { width, presentationData, value, title, delta in + let targetNode: ValueItemNode + if let current = current { + targetNode = current + } else { + targetNode = ValueItemNode() + } + + let makeValueLayout: (TextNodeLayoutArguments) -> (TextNodeLayout, () -> TextNode) + if let maybeMakeValueLayout { + makeValueLayout = maybeMakeValueLayout + } else { + makeValueLayout = TextNode.asyncLayout(targetNode.valueNode) + } + + let makeTitleLayout: (TextNodeLayoutArguments) -> (TextNodeLayout, () -> TextNode) + if let maybeMakeTitleLayout { + makeTitleLayout = maybeMakeTitleLayout + } else { + makeTitleLayout = TextNode.asyncLayout(targetNode.titleNode) + } + + let makeDeltaLayout: (TextNodeLayoutArguments) -> (TextNodeLayout, () -> TextNode) + if let maybeMakeDeltaLayout { + makeDeltaLayout = maybeMakeDeltaLayout + } else { + makeDeltaLayout = TextNode.asyncLayout(targetNode.deltaNode) + } + + let valueFont = Font.semibold(presentationData.fontSize.itemListBaseFontSize) + let titleFont = Font.regular(presentationData.fontSize.itemListBaseHeaderFontSize) + let deltaFont = Font.regular(presentationData.fontSize.itemListBaseHeaderFontSize) + + let valueColor = presentationData.theme.list.itemPrimaryTextColor + let titleColor = presentationData.theme.list.sectionHeaderTextColor + + let deltaColor: UIColor + if let (_, color) = delta { + switch color { + case .generic: + deltaColor = titleColor + case .positive: + deltaColor = presentationData.theme.list.freeTextSuccessColor + case .negative: + deltaColor = presentationData.theme.list.freeTextErrorColor + } + } else { + deltaColor = presentationData.theme.list.freeTextErrorColor + } + + let constrainedSize = CGSize(width: width, height: CGFloat.greatestFiniteMagnitude) + let (valueLayout, valueApply) = makeValueLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: value, font: valueFont, textColor: valueColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: constrainedSize, alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + + let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: title, font: titleFont, textColor: titleColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: constrainedSize, alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + + let (deltaLayout, deltaApply) = makeDeltaLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: delta?.0 ?? "", font: deltaFont, textColor: deltaColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: constrainedSize, alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + + let horizontalSpacing: CGFloat = 4.0 + let size = CGSize(width: valueLayout.size.width + horizontalSpacing + deltaLayout.size.width, height: valueLayout.size.height + titleLayout.size.height) + return (size, { + let _ = valueApply() + let _ = titleApply() + let _ = deltaApply() + + let valueFrame = CGRect(origin: .zero, size: valueLayout.size) + let titleFrame = CGRect(origin: CGPoint(x: 0.0, y: valueFrame.maxY), size: titleLayout.size) + let deltaFrame = CGRect(origin: CGPoint(x: valueFrame.maxX + horizontalSpacing, y: valueFrame.maxY - deltaLayout.size.height - 2.0), size: deltaLayout.size) + + targetNode.valueNode.frame = valueFrame + targetNode.titleNode.frame = titleFrame + targetNode.deltaNode.frame = deltaFrame + + return targetNode + }) + } + } +} + + class StatsOverviewItemNode: ListViewItemNode { private let backgroundNode: ASDisplayNode private let topStripeNode: ASDisplayNode private let bottomStripeNode: ASDisplayNode private let maskNode: ASImageNode - private let topLeftValueLabel: ImmediateTextNode - private let bottomLeftValueLabel: ImmediateTextNode - private let bottomRightValueLabel: ImmediateTextNode - private let topRightValueLabel: ImmediateTextNode - - private let topLeftTitleLabel: ImmediateTextNode - private let bottomLeftTitleLabel: ImmediateTextNode - private let bottomRightTitleLabel: ImmediateTextNode - private let topRightTitleLabel: ImmediateTextNode - - private let topLeftDeltaLabel: ImmediateTextNode - private let bottomLeftDeltaLabel: ImmediateTextNode - private let bottomRightDeltaLabel: ImmediateTextNode - private let topRightDeltaLabel: ImmediateTextNode + private let topLeftItem: ValueItemNode + private let topRightItem: ValueItemNode + private let middle1LeftItem: ValueItemNode + private let middle1RightItem: ValueItemNode + private let middle2LeftItem: ValueItemNode + private let middle2RightItem: ValueItemNode + private let bottomLeftItem: ValueItemNode + private let bottomRightItem: ValueItemNode private var item: StatsOverviewItem? @@ -109,56 +226,38 @@ class StatsOverviewItemNode: ListViewItemNode { self.maskNode = ASImageNode() - self.topLeftValueLabel = ImmediateTextNode() - self.bottomLeftValueLabel = ImmediateTextNode() - self.bottomRightValueLabel = ImmediateTextNode() - self.topRightValueLabel = ImmediateTextNode() - - self.topLeftTitleLabel = ImmediateTextNode() - self.bottomLeftTitleLabel = ImmediateTextNode() - self.bottomRightTitleLabel = ImmediateTextNode() - self.topRightTitleLabel = ImmediateTextNode() - - self.topLeftDeltaLabel = ImmediateTextNode() - self.bottomLeftDeltaLabel = ImmediateTextNode() - self.bottomRightDeltaLabel = ImmediateTextNode() - self.topRightDeltaLabel = ImmediateTextNode() + self.topLeftItem = ValueItemNode() + self.topRightItem = ValueItemNode() + self.middle1LeftItem = ValueItemNode() + self.middle1RightItem = ValueItemNode() + self.middle2LeftItem = ValueItemNode() + self.middle2RightItem = ValueItemNode() + self.bottomLeftItem = ValueItemNode() + self.bottomRightItem = ValueItemNode() super.init(layerBacked: false, dynamicBounce: false) self.clipsToBounds = true - self.addSubnode(self.topLeftValueLabel) - self.addSubnode(self.bottomLeftValueLabel) - self.addSubnode(self.bottomRightValueLabel) - self.addSubnode(self.topRightValueLabel) - - self.addSubnode(self.topLeftTitleLabel) - self.addSubnode(self.bottomLeftTitleLabel) - self.addSubnode(self.bottomRightTitleLabel) - self.addSubnode(self.topRightTitleLabel) - - self.addSubnode(self.topLeftDeltaLabel) - self.addSubnode(self.bottomLeftDeltaLabel) - self.addSubnode(self.bottomRightDeltaLabel) - self.addSubnode(self.topRightDeltaLabel) + self.addSubnode(self.topLeftItem) + self.addSubnode(self.topRightItem) + self.addSubnode(self.middle1LeftItem) + self.addSubnode(self.middle1RightItem) + self.addSubnode(self.middle2LeftItem) + self.addSubnode(self.middle2RightItem) + self.addSubnode(self.bottomLeftItem) + self.addSubnode(self.bottomRightItem) } func asyncLayout() -> (_ item: StatsOverviewItem, _ params: ListViewItemLayoutParams, _ insets: ItemListNeighbors) -> (ListViewItemNodeLayout, () -> Void) { - let makeTopLeftValueLabelLayout = TextNode.asyncLayout(self.topLeftValueLabel) - let makeTopRightValueLabelLayout = TextNode.asyncLayout(self.topRightValueLabel) - let makeBottomLeftValueLabelLayout = TextNode.asyncLayout(self.bottomLeftValueLabel) - let makeBottomRightValueLabelLayout = TextNode.asyncLayout(self.bottomRightValueLabel) - - let makeTopLeftTitleLabelLayout = TextNode.asyncLayout(self.topLeftTitleLabel) - let makeTopRightTitleLabelLayout = TextNode.asyncLayout(self.topRightTitleLabel) - let makeBottomLeftTitleLabelLayout = TextNode.asyncLayout(self.bottomLeftTitleLabel) - let makeBottomRightTitleLabelLayout = TextNode.asyncLayout(self.bottomRightTitleLabel) - - let makeTopLeftDeltaLabelLayout = TextNode.asyncLayout(self.topLeftDeltaLabel) - let makeTopRightDeltaLabelLayout = TextNode.asyncLayout(self.topRightDeltaLabel) - let makeBottomLeftDeltaLabelLayout = TextNode.asyncLayout(self.bottomLeftDeltaLabel) - let makeBottomRightDeltaLabelLayout = TextNode.asyncLayout(self.bottomRightDeltaLabel) + let makeTopLeftItemLayout = ValueItemNode.asyncLayout(self.topLeftItem) + let makeTopRightItemLayout = ValueItemNode.asyncLayout(self.topRightItem) + let makeMiddle1LeftItemLayout = ValueItemNode.asyncLayout(self.middle1LeftItem) + let makeMiddle1RightItemLayout = ValueItemNode.asyncLayout(self.middle1RightItem) + let makeMiddle2LeftItemLayout = ValueItemNode.asyncLayout(self.middle2LeftItem) + let makeMiddle2RightItemLayout = ValueItemNode.asyncLayout(self.middle2RightItem) + let makeBottomLeftItemLayout = ValueItemNode.asyncLayout(self.bottomLeftItem) + let makeBottomRightItemLayout = ValueItemNode.asyncLayout(self.bottomRightItem) let currentItem = self.item @@ -176,7 +275,6 @@ class StatsOverviewItemNode: ListViewItemNode { var height: CGFloat = topInset * 2.0 let leftInset = params.leftInset - let rightInset: CGFloat = params.rightInset var updatedTheme: PresentationTheme? if currentItem?.presentationData.theme !== item.presentationData.theme { @@ -194,28 +292,18 @@ class StatsOverviewItemNode: ListViewItemNode { insets = itemListNeighborsGroupedInsets(neighbors, params) } - let valueFont = Font.semibold(item.presentationData.fontSize.itemListBaseFontSize) - let titleFont = Font.regular(item.presentationData.fontSize.itemListBaseHeaderFontSize) - let deltaFont = Font.regular(item.presentationData.fontSize.itemListBaseHeaderFontSize) + let twoColumnLayout = "".isEmpty - let topLeftValueLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - let topRightValueLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - let bottomLeftValueLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - let bottomRightValueLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - - let topLeftTitleLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - let topRightTitleLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - let bottomLeftTitleLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - let bottomRightTitleLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? + var topLeftItemLayoutAndApply: (CGSize, () -> ValueItemNode)? + var topRightItemLayoutAndApply: (CGSize, () -> ValueItemNode)? + var middle1LeftItemLayoutAndApply: (CGSize, () -> ValueItemNode)? + var middle1RightItemLayoutAndApply: (CGSize, () -> ValueItemNode)? + var middle2LeftItemLayoutAndApply: (CGSize, () -> ValueItemNode)? + var middle2RightItemLayoutAndApply: (CGSize, () -> ValueItemNode)? + var bottomLeftItemLayoutAndApply: (CGSize, () -> ValueItemNode)? + var bottomRightItemLayoutAndApply: (CGSize, () -> ValueItemNode)? - let topLeftDeltaLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - let topRightDeltaLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - let bottomLeftDeltaLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - let bottomRightDeltaLabelLayoutAndApply: ((Display.TextNodeLayout, () -> Display.TextNode))? - - var twoColumnLayout = true - - func deltaText(_ value: StatsValue) -> (String, Bool, Bool) { + func deltaText(_ value: StatsValue) -> (text: String, positive: Bool, hasValue: Bool) { let deltaValue = value.current - value.previous let deltaCompact = compactNumericCountString(abs(Int(deltaValue))) let delta = deltaValue > 0 ? "+\(deltaCompact)" : "-\(deltaCompact)" @@ -227,12 +315,103 @@ class StatsOverviewItemNode: ListViewItemNode { return (abs(deltaPercentage) > 0.0 ? String(format: "%@ (%.02f%%)", delta, deltaPercentage * 100.0) : "", deltaValue > 0.0, abs(deltaValue) > 0.0) } - if let stats = item.stats as? ChannelBoostStatus { - topLeftValueLabelLayoutAndApply = makeTopLeftValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: "\(stats.level)", font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - topRightValueLabelLayoutAndApply = makeTopRightValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: "≈\(Int(stats.premiumAudience?.value ?? 0))", font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + if let stats = item.stats as? MessageStats { + topLeftItemLayoutAndApply = makeTopLeftItemLayout( + params.width, + item.presentationData, + compactNumericCountString(stats.views), + item.presentationData.strings.Stats_Message_Views, + nil + ) - bottomLeftValueLabelLayoutAndApply = makeBottomLeftValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: "\(stats.boosts)", font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + topRightItemLayoutAndApply = makeTopRightItemLayout( + params.width, + item.presentationData, + item.publicShares.flatMap { compactNumericCountString(Int($0)) } ?? "–", + item.presentationData.strings.Stats_Message_PublicShares, + nil + ) + + middle1LeftItemLayoutAndApply = makeMiddle1LeftItemLayout( + params.width, + item.presentationData, + compactNumericCountString(stats.reactions), + item.presentationData.strings.Stats_Message_Reactions, + nil + ) + + middle1RightItemLayoutAndApply = makeMiddle1RightItemLayout( + params.width, + item.presentationData, + item.publicShares.flatMap { "≈\( compactNumericCountString(max(0, stats.forwards - Int($0))))" } ?? "–", + item.presentationData.strings.Stats_Message_PrivateShares, + nil + ) + + height += topRightItemLayoutAndApply!.0.height * 2.0 + verticalSpacing + } else if let stats = item.stats as? StoryStats { + topLeftItemLayoutAndApply = makeTopLeftItemLayout( + params.width, + item.presentationData, + compactNumericCountString(stats.views), + item.presentationData.strings.Stats_Message_Views, + nil + ) + + topRightItemLayoutAndApply = makeTopRightItemLayout( + params.width, + item.presentationData, + item.publicShares.flatMap { compactNumericCountString(Int($0)) } ?? "–", + item.presentationData.strings.Stats_Message_PublicShares, + nil + ) + + middle1LeftItemLayoutAndApply = makeMiddle1LeftItemLayout( + params.width, + item.presentationData, + compactNumericCountString(stats.reactions), + item.presentationData.strings.Stats_Message_Reactions, + nil + ) + + middle1RightItemLayoutAndApply = makeMiddle1RightItemLayout( + params.width, + item.presentationData, + compactNumericCountString(stats.forwards), + item.presentationData.strings.Stats_Message_PrivateShares, + nil + ) + + height += topRightItemLayoutAndApply!.0.height * 2.0 + verticalSpacing + } else if let stats = item.stats as? ChannelBoostStatus { + topLeftItemLayoutAndApply = makeTopLeftItemLayout( + params.width, + item.presentationData, + "\(stats.level)", + item.presentationData.strings.Stats_Boosts_Level, + nil + ) + + var premiumSubscribers: Double = 0.0 + if let premiumAudience = stats.premiumAudience, premiumAudience.total > 0 { + premiumSubscribers = premiumAudience.value / premiumAudience.total + } + + topRightItemLayoutAndApply = makeTopRightItemLayout( + params.width, + item.presentationData, + "≈\(Int(stats.premiumAudience?.value ?? 0))", + item.presentationData.strings.Stats_Boosts_PremiumSubscribers, + (String(format: "%.02f%%", premiumSubscribers * 100.0), .generic) + ) + + middle1LeftItemLayoutAndApply = makeMiddle1LeftItemLayout( + params.width, + item.presentationData, + "\(stats.boosts)", + item.presentationData.strings.Stats_Boosts_ExistingBoosts, + nil + ) let boostsLeft: Int32 if let nextLevelBoosts = stats.nextLevelBoosts { @@ -240,162 +419,202 @@ class StatsOverviewItemNode: ListViewItemNode { } else { boostsLeft = 0 } + middle1RightItemLayoutAndApply = makeMiddle1RightItemLayout( + params.width, + item.presentationData, + "\(boostsLeft)", + item.presentationData.strings.Stats_Boosts_BoostsToLevelUp, + nil + ) - bottomRightValueLabelLayoutAndApply = makeBottomRightValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: "\(boostsLeft)", font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - topLeftTitleLabelLayoutAndApply = makeTopLeftTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Stats_Boosts_Level, font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - topRightTitleLabelLayoutAndApply = makeTopRightTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Stats_Boosts_PremiumSubscribers, font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomLeftTitleLabelLayoutAndApply = makeBottomLeftTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Stats_Boosts_ExistingBoosts, font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomRightTitleLabelLayoutAndApply = makeBottomRightTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Stats_Boosts_BoostsToLevelUp, font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - - topLeftDeltaLabelLayoutAndApply = nil - - var premiumSubscribers: Double = 0.0 - if let premiumAudience = stats.premiumAudience, premiumAudience.total > 0 { - premiumSubscribers = premiumAudience.value / premiumAudience.total + if twoColumnLayout { + height += topRightItemLayoutAndApply!.0.height * 2.0 + verticalSpacing + } else { + height += topLeftItemLayoutAndApply!.0.height * 4.0 + verticalSpacing * 3.0 } - - topRightDeltaLabelLayoutAndApply = makeTopRightDeltaLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: String(format: "%.02f%%", premiumSubscribers * 100.0), font: deltaFont, textColor: item.presentationData.theme.list.freeTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomLeftDeltaLabelLayoutAndApply = nil - bottomRightDeltaLabelLayoutAndApply = nil - - height += topRightValueLabelLayoutAndApply!.0.size.height + topRightTitleLabelLayoutAndApply!.0.size.height - - height += verticalSpacing - height += bottomRightValueLabelLayoutAndApply!.0.size.height + bottomRightTitleLabelLayoutAndApply!.0.size.height } else if let stats = item.stats as? ChannelStats { let viewsPerPostDelta = deltaText(stats.viewsPerPost) let sharesPerPostDelta = deltaText(stats.sharesPerPost) + let reactionsPerPostDelta = deltaText(stats.reactionsPerPost) + + let viewsPerStoryDelta = deltaText(stats.viewsPerStory) + let sharesPerStoryDelta = deltaText(stats.sharesPerStory) + let reactionsPerStoryDelta = deltaText(stats.reactionsPerStory) - let displayBottomRow = stats.sharesPerPost.current > 0 || viewsPerPostDelta.2 || stats.viewsPerPost.current > 0 || sharesPerPostDelta.2 + let followersDelta = deltaText(stats.followers) + topLeftItemLayoutAndApply = makeTopLeftItemLayout( + params.width, + item.presentationData, + compactNumericCountString(Int(stats.followers.current)), + item.presentationData.strings.Stats_Followers, + (followersDelta.text, followersDelta.positive ? .positive : .negative) + ) - topLeftValueLabelLayoutAndApply = makeTopLeftValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: compactNumericCountString(Int(stats.followers.current)), font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - var enabledNotifications: Double = 0.0 if stats.enabledNotifications.total > 0 { enabledNotifications = stats.enabledNotifications.value / stats.enabledNotifications.total } + topRightItemLayoutAndApply = makeTopRightItemLayout( + params.width, + item.presentationData, + String(format: "%.02f%%", enabledNotifications * 100.0), + item.presentationData.strings.Stats_EnabledNotifications, + nil + ) - topRightValueLabelLayoutAndApply = makeTopRightValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: String(format: "%.02f%%", enabledNotifications * 100.0), font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + let hasMessages = stats.viewsPerPost.current > 0 + let hasStories = stats.viewsPerStory.current > 0 || viewsPerStoryDelta.hasValue - bottomLeftValueLabelLayoutAndApply = makeBottomLeftValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: displayBottomRow ? compactNumericCountString(Int(stats.viewsPerPost.current)) : "", font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomRightValueLabelLayoutAndApply = makeBottomRightValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: displayBottomRow ? compactNumericCountString(Int(stats.sharesPerPost.current)) : "", font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - topLeftTitleLabelLayoutAndApply = makeTopLeftTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Stats_Followers, font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - topRightTitleLabelLayoutAndApply = makeTopRightTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Stats_EnabledNotifications, font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomLeftTitleLabelLayoutAndApply = makeBottomLeftTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: displayBottomRow ? item.presentationData.strings.Stats_ViewsPerPost : "", font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomRightTitleLabelLayoutAndApply = makeBottomRightTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: displayBottomRow ? item.presentationData.strings.Stats_SharesPerPost : "", font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - let followersDelta = deltaText(stats.followers) - topLeftDeltaLabelLayoutAndApply = makeTopLeftDeltaLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: followersDelta.0, font: deltaFont, textColor: followersDelta.1 ? item.presentationData.theme.list.freeTextSuccessColor : item.presentationData.theme.list.freeTextErrorColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - topRightDeltaLabelLayoutAndApply = nil - - bottomLeftDeltaLabelLayoutAndApply = makeBottomLeftDeltaLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: viewsPerPostDelta.0, font: deltaFont, textColor: viewsPerPostDelta.1 ? item.presentationData.theme.list.freeTextSuccessColor : item.presentationData.theme.list.freeTextErrorColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomRightDeltaLabelLayoutAndApply = makeBottomRightDeltaLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: sharesPerPostDelta.0, font: deltaFont, textColor: sharesPerPostDelta.1 ? item.presentationData.theme.list.freeTextSuccessColor : item.presentationData.theme.list.freeTextErrorColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - - height += topRightValueLabelLayoutAndApply!.0.size.height + topRightTitleLabelLayoutAndApply!.0.size.height - - if max(topLeftValueLabelLayoutAndApply!.0.size.width + topLeftDeltaLabelLayoutAndApply!.0.size.width + horizontalSpacing + topRightValueLabelLayoutAndApply!.0.size.width, bottomLeftValueLabelLayoutAndApply!.0.size.width + bottomLeftDeltaLabelLayoutAndApply!.0.size.width + horizontalSpacing + bottomRightValueLabelLayoutAndApply!.0.size.width + bottomRightDeltaLabelLayoutAndApply!.0.size.width) > params.width - leftInset - rightInset { - twoColumnLayout = false + var items: [Int: (String, String, (String, ValueItemNode.DeltaColor)?)] = [:] + if hasMessages { + items[0] = ( + compactNumericCountString(Int(stats.viewsPerPost.current)), + item.presentationData.strings.Stats_ViewsPerPost, + (viewsPerPostDelta.text, viewsPerPostDelta.positive ? .positive : .negative) + ) + } + if hasMessages { + let index = hasStories ? 2 : 1 + items[index] = ( + compactNumericCountString(Int(stats.sharesPerPost.current)), + item.presentationData.strings.Stats_SharesPerPost, + (sharesPerPostDelta.text, sharesPerPostDelta.positive ? .positive : .negative) + ) + } + if stats.reactionsPerPost.current > 0 || reactionsPerStoryDelta.hasValue { + let index = hasStories ? 4 : 2 + items[index] = ( + compactNumericCountString(Int(stats.reactionsPerPost.current)), + item.presentationData.strings.Stats_ReactionsPerPost, + (reactionsPerPostDelta.text, reactionsPerPostDelta.positive ? .positive : .negative) + ) + } + if hasStories { + items[1] = ( + compactNumericCountString(Int(stats.viewsPerStory.current)), + item.presentationData.strings.Stats_ViewsPerStory, + (viewsPerStoryDelta.text, viewsPerStoryDelta.positive ? .positive : .negative) + ) + items[3] = ( + compactNumericCountString(Int(stats.sharesPerStory.current)), + item.presentationData.strings.Stats_SharesPerStory, + (sharesPerStoryDelta.text, sharesPerStoryDelta.positive ? .positive : .negative) + ) + items[5] = ( + compactNumericCountString(Int(stats.reactionsPerStory.current)), + item.presentationData.strings.Stats_ReactionsPerStory, + (reactionsPerStoryDelta.text, reactionsPerStoryDelta.positive ? .positive : .negative) + ) } + if let (value, title, delta) = items[0] { + middle1LeftItemLayoutAndApply = makeMiddle1LeftItemLayout( + params.width, + item.presentationData, + value, + title, + delta + ) + } + if let (value, title, delta) = items[1] { + middle1RightItemLayoutAndApply = makeMiddle1RightItemLayout( + params.width, + item.presentationData, + value, + title, + delta + ) + } + if let (value, title, delta) = items[2] { + middle2LeftItemLayoutAndApply = makeMiddle2LeftItemLayout( + params.width, + item.presentationData, + value, + title, + delta + ) + } + if let (value, title, delta) = items[3] { + middle2RightItemLayoutAndApply = makeMiddle2RightItemLayout( + params.width, + item.presentationData, + value, + title, + delta + ) + } + if let (value, title, delta) = items[4] { + bottomLeftItemLayoutAndApply = makeBottomLeftItemLayout( + params.width, + item.presentationData, + value, + title, + delta + ) + } + if let (value, title, delta) = items[5] { + bottomRightItemLayoutAndApply = makeBottomRightItemLayout( + params.width, + item.presentationData, + value, + title, + delta + ) + } + + let valuesCount = CGFloat(2 + items.count) if twoColumnLayout { - if displayBottomRow { - height += verticalSpacing - height += bottomRightValueLabelLayoutAndApply!.0.size.height + bottomRightTitleLabelLayoutAndApply!.0.size.height - } + let rowsCount = ceil(valuesCount / 2.0) + height += topLeftItemLayoutAndApply!.0.height * rowsCount + (verticalSpacing * (rowsCount - 1.0)) } else { - height += verticalSpacing - height += topRightValueLabelLayoutAndApply!.0.size.height + topRightTitleLabelLayoutAndApply!.0.size.height - if !stats.viewsPerPost.current.isZero || viewsPerPostDelta.2 { - height += verticalSpacing - height += bottomLeftValueLabelLayoutAndApply!.0.size.height + bottomLeftTitleLabelLayoutAndApply!.0.size.height - } - if !stats.sharesPerPost.current.isZero || sharesPerPostDelta.2 { - height += verticalSpacing - height += bottomRightValueLabelLayoutAndApply!.0.size.height + bottomRightTitleLabelLayoutAndApply!.0.size.height - } + height += topLeftItemLayoutAndApply!.0.height * valuesCount + (verticalSpacing * (valuesCount - 1.0)) } } else if let stats = item.stats as? GroupStats { let viewersDelta = deltaText(stats.viewers) let postersDelta = deltaText(stats.posters) let displayBottomRow = stats.viewers.current > 0 || viewersDelta.2 || stats.posters.current > 0 || postersDelta.2 - - topLeftValueLabelLayoutAndApply = makeTopLeftValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: compactNumericCountString(Int(stats.members.current)), font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - topRightValueLabelLayoutAndApply = makeTopRightValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: compactNumericCountString(Int(stats.messages.current)), font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomLeftValueLabelLayoutAndApply = makeBottomLeftValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: displayBottomRow ? compactNumericCountString(Int(stats.viewers.current)) : "", font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomRightValueLabelLayoutAndApply = makeBottomRightValueLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: displayBottomRow ? compactNumericCountString(Int(stats.posters.current)) : "", font: valueFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - topLeftTitleLabelLayoutAndApply = makeTopLeftTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Stats_GroupMembers, font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - topRightTitleLabelLayoutAndApply = makeTopRightTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Stats_GroupMessages, font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomLeftTitleLabelLayoutAndApply = makeBottomLeftTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: displayBottomRow ? item.presentationData.strings.Stats_GroupViewers : "", font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomRightTitleLabelLayoutAndApply = makeBottomRightTitleLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: displayBottomRow ? item.presentationData.strings.Stats_GroupPosters : "", font: titleFont, textColor: item.presentationData.theme.list.sectionHeaderTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - + let membersDelta = deltaText(stats.members) - topLeftDeltaLabelLayoutAndApply = makeTopLeftDeltaLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: membersDelta.0, font: deltaFont, textColor: membersDelta.1 ? item.presentationData.theme.list.freeTextSuccessColor : item.presentationData.theme.list.freeTextErrorColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + topLeftItemLayoutAndApply = makeTopLeftItemLayout( + params.width, + item.presentationData, + compactNumericCountString(Int(stats.members.current)), + item.presentationData.strings.Stats_GroupMembers, + (membersDelta.text, membersDelta.positive ? .positive : .negative) + ) let messagesDelta = deltaText(stats.messages) - topRightDeltaLabelLayoutAndApply = makeTopRightDeltaLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: messagesDelta.0, font: deltaFont, textColor: messagesDelta.1 ? item.presentationData.theme.list.freeTextSuccessColor : item.presentationData.theme.list.freeTextErrorColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + topRightItemLayoutAndApply = makeTopRightItemLayout( + params.width, + item.presentationData, + compactNumericCountString(Int(stats.messages.current)), + item.presentationData.strings.Stats_GroupMessages, + (messagesDelta.text, messagesDelta.positive ? .positive : .negative) + ) - bottomLeftDeltaLabelLayoutAndApply = makeBottomLeftDeltaLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: viewersDelta.0, font: deltaFont, textColor: viewersDelta.1 ? item.presentationData.theme.list.freeTextSuccessColor : item.presentationData.theme.list.freeTextErrorColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - bottomRightDeltaLabelLayoutAndApply = makeBottomRightDeltaLabelLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: postersDelta.0, font: deltaFont, textColor: postersDelta.1 ? item.presentationData.theme.list.freeTextSuccessColor : item.presentationData.theme.list.freeTextErrorColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - - - height += topRightValueLabelLayoutAndApply!.0.size.height + topRightTitleLabelLayoutAndApply!.0.size.height - - if max(topLeftValueLabelLayoutAndApply!.0.size.width + topLeftDeltaLabelLayoutAndApply!.0.size.width + horizontalSpacing + topRightValueLabelLayoutAndApply!.0.size.width, bottomLeftValueLabelLayoutAndApply!.0.size.width + bottomLeftDeltaLabelLayoutAndApply!.0.size.width + horizontalSpacing + bottomRightValueLabelLayoutAndApply!.0.size.width + bottomRightDeltaLabelLayoutAndApply!.0.size.width) > params.width - leftInset - rightInset { - twoColumnLayout = false + if displayBottomRow { + middle1LeftItemLayoutAndApply = makeMiddle1LeftItemLayout( + params.width, + item.presentationData, + compactNumericCountString(Int(stats.viewers.current)), + item.presentationData.strings.Stats_GroupViewers, + (viewersDelta.text, viewersDelta.positive ? .positive : .negative) + ) + + middle1RightItemLayoutAndApply = makeMiddle1RightItemLayout( + params.width, + item.presentationData, + compactNumericCountString(Int(stats.posters.current)), + item.presentationData.strings.Stats_GroupPosters, + (postersDelta.text, postersDelta.positive ? .positive : .negative) + ) } - if twoColumnLayout { - if !stats.viewers.current.isZero || viewersDelta.2 || !stats.posters.current.isZero || postersDelta.2 { - height += verticalSpacing - height += bottomRightValueLabelLayoutAndApply!.0.size.height + bottomRightTitleLabelLayoutAndApply!.0.size.height - } + if twoColumnLayout || !displayBottomRow { + height += topRightItemLayoutAndApply!.0.height * 2.0 + verticalSpacing } else { - height += verticalSpacing - height += topRightValueLabelLayoutAndApply!.0.size.height + topRightTitleLabelLayoutAndApply!.0.size.height - if !stats.viewers.current.isZero || viewersDelta.2 { - height += verticalSpacing - height += bottomLeftValueLabelLayoutAndApply!.0.size.height + bottomLeftTitleLabelLayoutAndApply!.0.size.height - } - if !stats.posters.current.isZero || postersDelta.2 { - height += verticalSpacing - height += bottomRightValueLabelLayoutAndApply!.0.size.height + bottomRightTitleLabelLayoutAndApply!.0.size.height - } + height += topLeftItemLayoutAndApply!.0.height * 4.0 + verticalSpacing * 3.0 } - } else { - topLeftValueLabelLayoutAndApply = nil - topRightValueLabelLayoutAndApply = nil - bottomLeftValueLabelLayoutAndApply = nil - bottomRightValueLabelLayoutAndApply = nil - topLeftTitleLabelLayoutAndApply = nil - topRightTitleLabelLayoutAndApply = nil - bottomLeftTitleLabelLayoutAndApply = nil - bottomRightTitleLabelLayoutAndApply = nil - topLeftDeltaLabelLayoutAndApply = nil - topRightDeltaLabelLayoutAndApply = nil - bottomLeftDeltaLabelLayoutAndApply = nil - bottomRightDeltaLabelLayoutAndApply = nil } let contentSize = CGSize(width: params.width, height: height) @@ -405,18 +624,14 @@ class StatsOverviewItemNode: ListViewItemNode { if let strongSelf = self { strongSelf.item = item - let _ = topLeftValueLabelLayoutAndApply?.1() - let _ = topRightValueLabelLayoutAndApply?.1() - let _ = bottomLeftValueLabelLayoutAndApply?.1() - let _ = bottomRightValueLabelLayoutAndApply?.1() - let _ = topLeftTitleLabelLayoutAndApply?.1() - let _ = topRightTitleLabelLayoutAndApply?.1() - let _ = bottomLeftTitleLabelLayoutAndApply?.1() - let _ = bottomRightTitleLabelLayoutAndApply?.1() - let _ = topLeftDeltaLabelLayoutAndApply?.1() - let _ = topRightDeltaLabelLayoutAndApply?.1() - let _ = bottomLeftDeltaLabelLayoutAndApply?.1() - let _ = bottomRightDeltaLabelLayoutAndApply?.1() + let _ = topLeftItemLayoutAndApply?.1() + let _ = topRightItemLayoutAndApply?.1() + let _ = middle1LeftItemLayoutAndApply?.1() + let _ = middle1RightItemLayoutAndApply?.1() + let _ = middle2LeftItemLayoutAndApply?.1() + let _ = middle2RightItemLayoutAndApply?.1() + let _ = bottomLeftItemLayoutAndApply?.1() + let _ = bottomRightItemLayoutAndApply?.1() if let _ = updatedTheme { strongSelf.topStripeNode.backgroundColor = itemSeparatorColor @@ -482,47 +697,63 @@ class StatsOverviewItemNode: ListViewItemNode { strongSelf.bottomStripeNode.frame = CGRect(origin: CGPoint(x: bottomStripeInset, y: contentSize.height - separatorHeight), size: CGSize(width: params.width - bottomStripeInset, height: separatorHeight)) } - var secondColumnX = sideInset + leftInset + let firstColumnX = sideInset + leftInset + var secondColumnX = firstColumnX - if let topLeftValueLabelLayout = topLeftValueLabelLayoutAndApply?.0, let topLeftTitleLabelLayout = topLeftTitleLabelLayoutAndApply?.0 { - strongSelf.topLeftValueLabel.frame = CGRect(origin: CGPoint(x: sideInset + leftInset, y: topInset), size: topLeftValueLabelLayout.size) - strongSelf.topLeftTitleLabel.frame = CGRect(origin: CGPoint(x: sideInset + leftInset, y: strongSelf.topLeftValueLabel.frame.maxY), size: topLeftTitleLabelLayout.size) - - if twoColumnLayout { - let topWidth = topLeftValueLabelLayout.size.width + (topLeftDeltaLabelLayoutAndApply?.0.size.width ?? 0) - let bottomWidth = (bottomLeftValueLabelLayoutAndApply?.0.size.width ?? 0.0) + (bottomLeftDeltaLabelLayoutAndApply?.0.size.width ?? 0.0) - secondColumnX = max(layout.size.width / 2.0, sideInset + leftInset + max(topWidth, bottomWidth) + horizontalSpacing) + if twoColumnLayout { + var maxLeftWidth: CGFloat = 0.0 + if let topLeftItemLayout = topLeftItemLayoutAndApply?.0 { + maxLeftWidth = max(maxLeftWidth, topLeftItemLayout.width) } - } - if let topLeftDeltaLabelLayout = topLeftDeltaLabelLayoutAndApply?.0 { - strongSelf.topLeftDeltaLabel.frame = CGRect(origin: CGPoint(x: strongSelf.topLeftValueLabel.frame.maxX + horizontalSpacing, y: strongSelf.topLeftValueLabel.frame.maxY - topLeftDeltaLabelLayout.size.height - 2.0), size: topLeftDeltaLabelLayout.size) + if let middle1LeftItemLayout = middle1LeftItemLayoutAndApply?.0 { + maxLeftWidth = max(maxLeftWidth, middle1LeftItemLayout.width) + } + if let middle2LeftItemLayout = middle2LeftItemLayoutAndApply?.0 { + maxLeftWidth = max(maxLeftWidth, middle2LeftItemLayout.width) + } + if let bottomLeftItemLayout = bottomLeftItemLayoutAndApply?.0 { + maxLeftWidth = max(maxLeftWidth, bottomLeftItemLayout.width) + } + secondColumnX = max(layout.size.width / 2.0, firstColumnX + maxLeftWidth + horizontalSpacing) } - if let topRightValueLabelLayout = topRightValueLabelLayoutAndApply?.0, let topRightTitleLabelLayout = topRightTitleLabelLayoutAndApply?.0 { - let topRightY = twoColumnLayout ? topInset : strongSelf.topLeftTitleLabel.frame.maxY + verticalSpacing - strongSelf.topRightValueLabel.frame = CGRect(origin: CGPoint(x: secondColumnX, y: topRightY), size: topRightValueLabelLayout.size) - strongSelf.topRightTitleLabel.frame = CGRect(origin: CGPoint(x: secondColumnX, y: strongSelf.topRightValueLabel.frame.maxY), size: topRightTitleLabelLayout.size) - } - if let topRightDeltaLabelLayout = topRightDeltaLabelLayoutAndApply?.0 { - strongSelf.topRightDeltaLabel.frame = CGRect(origin: CGPoint(x: strongSelf.topRightValueLabel.frame.maxX + horizontalSpacing, y: strongSelf.topRightValueLabel.frame.maxY - topRightDeltaLabelLayout.size.height - 2.0), size: topRightDeltaLabelLayout.size) + if let topLeftItemLayout = topLeftItemLayoutAndApply?.0 { + strongSelf.topLeftItem.frame = CGRect(origin: CGPoint(x: firstColumnX, y: topInset), size: topLeftItemLayout) } - if let bottomLeftValueLabelLayout = bottomLeftValueLabelLayoutAndApply?.0, let bottomLeftTitleLabelLayout = bottomLeftTitleLabelLayoutAndApply?.0 { - let bottomLeftY = twoColumnLayout ? strongSelf.topLeftTitleLabel.frame.maxY + verticalSpacing : strongSelf.topRightTitleLabel.frame.maxY + verticalSpacing - strongSelf.bottomLeftValueLabel.frame = CGRect(origin: CGPoint(x: sideInset + leftInset, y: bottomLeftY), size: bottomLeftValueLabelLayout.size) - strongSelf.bottomLeftTitleLabel.frame = CGRect(origin: CGPoint(x: sideInset + leftInset, y: strongSelf.bottomLeftValueLabel.frame.maxY), size: bottomLeftTitleLabelLayout.size) - } - if let bottomLeftDeltaLabelLayout = bottomLeftDeltaLabelLayoutAndApply?.0 { - strongSelf.bottomLeftDeltaLabel.frame = CGRect(origin: CGPoint(x: strongSelf.bottomLeftValueLabel.frame.maxX + horizontalSpacing, y: strongSelf.bottomLeftValueLabel.frame.maxY - bottomLeftDeltaLabelLayout.size.height - 2.0), size: bottomLeftDeltaLabelLayout.size) + if let topRightItemLayout = topRightItemLayoutAndApply?.0 { + let originY = twoColumnLayout ? topInset : strongSelf.topLeftItem.frame.maxY + verticalSpacing + strongSelf.topRightItem.frame = CGRect(origin: CGPoint(x: secondColumnX, y: originY), size: topRightItemLayout) } - if let bottomRightValueLabelLayout = bottomRightValueLabelLayoutAndApply?.0, let bottomRightTitleLabelLayout = bottomRightTitleLabelLayoutAndApply?.0 { - let bottomRightY = twoColumnLayout ? strongSelf.topRightTitleLabel.frame.maxY + verticalSpacing : strongSelf.bottomLeftTitleLabel.frame.maxY + verticalSpacing - strongSelf.bottomRightValueLabel.frame = CGRect(origin: CGPoint(x: secondColumnX, y: bottomRightY), size: bottomRightValueLabelLayout.size) - strongSelf.bottomRightTitleLabel.frame = CGRect(origin: CGPoint(x: secondColumnX, y: strongSelf.bottomRightValueLabel.frame.maxY), size: bottomRightTitleLabelLayout.size) + if let middle1LeftItemLayout = middle1LeftItemLayoutAndApply?.0 { + let originY = (twoColumnLayout ? strongSelf.topLeftItem.frame.maxY : strongSelf.topRightItem.frame.maxY) + verticalSpacing + strongSelf.middle1LeftItem.frame = CGRect(origin: CGPoint(x: firstColumnX, y: originY), size: middle1LeftItemLayout) } - if let bottomRightDeltaLabelLayout = bottomRightDeltaLabelLayoutAndApply?.0 { - strongSelf.bottomRightDeltaLabel.frame = CGRect(origin: CGPoint(x: strongSelf.bottomRightValueLabel.frame.maxX + horizontalSpacing, y: strongSelf.bottomRightValueLabel.frame.maxY - bottomRightDeltaLabelLayout.size.height - 2.0), size: bottomRightDeltaLabelLayout.size) + + if let middle1RightItemLayout = middle1RightItemLayoutAndApply?.0 { + let originY = (twoColumnLayout ? strongSelf.topRightItem.frame.maxY : strongSelf.middle1LeftItem.frame.maxY) + verticalSpacing + strongSelf.middle1RightItem.frame = CGRect(origin: CGPoint(x: secondColumnX, y: originY), size: middle1RightItemLayout) + } + + if let middle2LeftItemLayout = middle2LeftItemLayoutAndApply?.0 { + let originY = (twoColumnLayout ? strongSelf.middle1LeftItem.frame.maxY : strongSelf.middle1RightItem.frame.maxY) + verticalSpacing + strongSelf.middle2LeftItem.frame = CGRect(origin: CGPoint(x: firstColumnX, y: originY), size: middle2LeftItemLayout) + } + + if let middle2RightItemLayout = middle2RightItemLayoutAndApply?.0 { + let originY = (twoColumnLayout ? strongSelf.middle1RightItem.frame.maxY : strongSelf.middle2LeftItem.frame.maxY) + verticalSpacing + strongSelf.middle2RightItem.frame = CGRect(origin: CGPoint(x: secondColumnX, y: originY), size: middle2RightItemLayout) + } + + if let bottomLeftItemLayout = bottomLeftItemLayoutAndApply?.0 { + let originY = (twoColumnLayout ? strongSelf.middle2LeftItem.frame.maxY : strongSelf.middle2RightItem.frame.maxY) + verticalSpacing + strongSelf.bottomLeftItem.frame = CGRect(origin: CGPoint(x: firstColumnX, y: originY), size: bottomLeftItemLayout) + } + + if let bottomRightItemLayout = bottomRightItemLayoutAndApply?.0 { + let originY = (twoColumnLayout ? strongSelf.middle2RightItem.frame.maxY : strongSelf.bottomLeftItem.frame.maxY) + verticalSpacing + strongSelf.bottomRightItem.frame = CGRect(origin: CGPoint(x: secondColumnX, y: originY), size: bottomRightItemLayout) } } }) diff --git a/submodules/TelegramApi/Sources/Api0.swift b/submodules/TelegramApi/Sources/Api0.swift index 45226cac35..b4db37fe5b 100644 --- a/submodules/TelegramApi/Sources/Api0.swift +++ b/submodules/TelegramApi/Sources/Api0.swift @@ -1160,7 +1160,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = { dict[946083368] = { return Api.messages.StickerSetInstallResult.parse_stickerSetInstallResultSuccess($0) } dict[816245886] = { return Api.messages.Stickers.parse_stickers($0) } dict[-244016606] = { return Api.messages.Stickers.parse_stickersNotModified($0) } - dict[-1821037486] = { return Api.messages.TranscribedAudio.parse_transcribedAudio($0) } + dict[-809903785] = { return Api.messages.TranscribedAudio.parse_transcribedAudio($0) } dict[870003448] = { return Api.messages.TranslatedText.parse_translateResult($0) } dict[1218005070] = { return Api.messages.VotesList.parse_votesList($0) } dict[-44166467] = { return Api.messages.WebPage.parse_webPage($0) } diff --git a/submodules/TelegramApi/Sources/Api29.swift b/submodules/TelegramApi/Sources/Api29.swift index 15ed48860f..9778a419da 100644 --- a/submodules/TelegramApi/Sources/Api29.swift +++ b/submodules/TelegramApi/Sources/Api29.swift @@ -274,25 +274,27 @@ public extension Api.messages { } public extension Api.messages { enum TranscribedAudio: TypeConstructorDescription { - case transcribedAudio(flags: Int32, transcriptionId: Int64, text: String) + case transcribedAudio(flags: Int32, transcriptionId: Int64, text: String, trialRemainsNum: Int32?, trialRemainsUntilDate: Int32?) public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) { switch self { - case .transcribedAudio(let flags, let transcriptionId, let text): + case .transcribedAudio(let flags, let transcriptionId, let text, let trialRemainsNum, let trialRemainsUntilDate): if boxed { - buffer.appendInt32(-1821037486) + buffer.appendInt32(-809903785) } serializeInt32(flags, buffer: buffer, boxed: false) serializeInt64(transcriptionId, buffer: buffer, boxed: false) serializeString(text, buffer: buffer, boxed: false) + if Int(flags) & Int(1 << 1) != 0 {serializeInt32(trialRemainsNum!, buffer: buffer, boxed: false)} + if Int(flags) & Int(1 << 1) != 0 {serializeInt32(trialRemainsUntilDate!, buffer: buffer, boxed: false)} break } } public func descriptionFields() -> (String, [(String, Any)]) { switch self { - case .transcribedAudio(let flags, let transcriptionId, let text): - return ("transcribedAudio", [("flags", flags as Any), ("transcriptionId", transcriptionId as Any), ("text", text as Any)]) + case .transcribedAudio(let flags, let transcriptionId, let text, let trialRemainsNum, let trialRemainsUntilDate): + return ("transcribedAudio", [("flags", flags as Any), ("transcriptionId", transcriptionId as Any), ("text", text as Any), ("trialRemainsNum", trialRemainsNum as Any), ("trialRemainsUntilDate", trialRemainsUntilDate as Any)]) } } @@ -303,11 +305,17 @@ public extension Api.messages { _2 = reader.readInt64() var _3: String? _3 = parseString(reader) + var _4: Int32? + if Int(_1!) & Int(1 << 1) != 0 {_4 = reader.readInt32() } + var _5: Int32? + if Int(_1!) & Int(1 << 1) != 0 {_5 = reader.readInt32() } let _c1 = _1 != nil let _c2 = _2 != nil let _c3 = _3 != nil - if _c1 && _c2 && _c3 { - return Api.messages.TranscribedAudio.transcribedAudio(flags: _1!, transcriptionId: _2!, text: _3!) + let _c4 = (Int(_1!) & Int(1 << 1) == 0) || _4 != nil + let _c5 = (Int(_1!) & Int(1 << 1) == 0) || _5 != nil + if _c1 && _c2 && _c3 && _c4 && _c5 { + return Api.messages.TranscribedAudio.transcribedAudio(flags: _1!, transcriptionId: _2!, text: _3!, trialRemainsNum: _4, trialRemainsUntilDate: _5) } else { return nil diff --git a/submodules/TelegramCore/Sources/PendingMessages/PendingPeerMediaUploadManager.swift b/submodules/TelegramCore/Sources/PendingMessages/PendingPeerMediaUploadManager.swift index 535b6770fc..c0f8c1c109 100644 --- a/submodules/TelegramCore/Sources/PendingMessages/PendingPeerMediaUploadManager.swift +++ b/submodules/TelegramCore/Sources/PendingMessages/PendingPeerMediaUploadManager.swift @@ -85,7 +85,7 @@ private func uploadPeerMedia(postbox: Postbox, network: Network, stateManager: A } } else { - return _internal_setChatWallpaper(postbox: postbox, network: network, stateManager: stateManager, peerId: peerId, wallpaper: wallpaper, forBoth: forBoth, applyUpdates: false) + return _internal_setChatWallpaper(postbox: postbox, network: network, stateManager: stateManager, peerId: peerId, wallpaper: wallpaper, forBoth: forBoth, applyUpdates: false) |> mapError { error -> PeerMediaUploadingItem.Error in switch error { case .generic: diff --git a/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift b/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift index 7334d848d9..8c845fe6b7 100644 --- a/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift +++ b/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift @@ -4652,7 +4652,8 @@ func replayFinalState( isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: updatedReaction + myReaction: updatedReaction, + forwardInfo: item.forwardInfo )) if let entry = CodableEntry(updatedItem) { updatedPeerEntries[index] = StoryItemsTableEntry(value: entry, id: item.id, expirationTimestamp: item.expirationTimestamp, isCloseFriends: item.isCloseFriends) @@ -4683,7 +4684,8 @@ func replayFinalState( isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: MessageReaction.Reaction(apiReaction: reaction) + myReaction: MessageReaction.Reaction(apiReaction: reaction), + forwardInfo: item.forwardInfo )) if let entry = CodableEntry(updatedItem) { transaction.setStory(id: StoryId(peerId: peerId, id: id), value: entry) diff --git a/submodules/TelegramCore/Sources/State/ManagedAppConfigurationUpdates.swift b/submodules/TelegramCore/Sources/State/ManagedAppConfigurationUpdates.swift index 8b2ee5aa23..e62bc84263 100644 --- a/submodules/TelegramCore/Sources/State/ManagedAppConfigurationUpdates.swift +++ b/submodules/TelegramCore/Sources/State/ManagedAppConfigurationUpdates.swift @@ -33,6 +33,12 @@ func updateAppConfigurationOnce(postbox: Postbox, network: Network) -> Signal MessageStats { - return MessageStats(views: self.views, forwards: self.forwards, interactionsGraph: interactionsGraph, interactionsGraphDelta: self.interactionsGraphDelta, reactionsGraph: self.reactionsGraph) + return MessageStats(views: self.views, forwards: self.forwards, reactions: self.reactions, interactionsGraph: interactionsGraph, interactionsGraphDelta: self.interactionsGraphDelta, reactionsGraph: self.reactionsGraph) } } @@ -78,11 +83,16 @@ private func requestMessageStats(postbox: Postbox, network: Network, messageId: var views: Int = 0 var forwards: Int = 0 + var reactions: Int = 0 for attribute in message.attributes { if let viewsAttribute = attribute as? ViewCountMessageAttribute { views = viewsAttribute.count } else if let forwardsAttribute = attribute as? ForwardCountMessageAttribute { forwards = forwardsAttribute.count + } else if let reactionsAttribute = attribute as? ReactionsMessageAttribute { + reactions = Int(reactionsAttribute.reactions.reduce(0, { partialResult, reaction in + return partialResult + reaction.count + })) } } @@ -110,6 +120,7 @@ private func requestMessageStats(postbox: Postbox, network: Network, messageId: return .single(MessageStats( views: views, forwards: forwards, + reactions: reactions, interactionsGraph: interactionsGraph, interactionsGraphDelta: interactionsGraphDelta, reactionsGraph: reactionsGraph @@ -196,9 +207,9 @@ public final class MessageStatsContext { } } - public init(postbox: Postbox, network: Network, messageId: MessageId) { + public init(account: Account, messageId: MessageId) { self.impl = QueueLocalObject(queue: Queue.mainQueue(), generate: { - return MessageStatsContextImpl(postbox: postbox, network: network, messageId: messageId) + return MessageStatsContextImpl(postbox: account.postbox, network: account.network, messageId: messageId) }) } diff --git a/submodules/TelegramCore/Sources/Statistics/StoryStatistics.swift b/submodules/TelegramCore/Sources/Statistics/StoryStatistics.swift index 6d3e5ede90..ddbb62dbc5 100644 --- a/submodules/TelegramCore/Sources/Statistics/StoryStatistics.swift +++ b/submodules/TelegramCore/Sources/Statistics/StoryStatistics.swift @@ -7,13 +7,15 @@ import MtProtoKit public struct StoryStats: Equatable { public let views: Int public let forwards: Int + public let reactions: Int public let interactionsGraph: StatsGraph public let interactionsGraphDelta: Int64 public let reactionsGraph: StatsGraph - init(views: Int, forwards: Int, interactionsGraph: StatsGraph, interactionsGraphDelta: Int64, reactionsGraph: StatsGraph) { + init(views: Int, forwards: Int, reactions: Int, interactionsGraph: StatsGraph, interactionsGraphDelta: Int64, reactionsGraph: StatsGraph) { self.views = views self.forwards = forwards + self.reactions = reactions self.interactionsGraph = interactionsGraph self.interactionsGraphDelta = interactionsGraphDelta self.reactionsGraph = reactionsGraph @@ -26,6 +28,9 @@ public struct StoryStats: Equatable { if lhs.forwards != rhs.forwards { return false } + if lhs.reactions != rhs.reactions { + return false + } if lhs.interactionsGraph != rhs.interactionsGraph { return false } @@ -39,7 +44,7 @@ public struct StoryStats: Equatable { } public func withUpdatedInteractionsGraph(_ interactionsGraph: StatsGraph) -> StoryStats { - return StoryStats(views: self.views, forwards: self.forwards, interactionsGraph: interactionsGraph, interactionsGraphDelta: self.interactionsGraphDelta, reactionsGraph: self.reactionsGraph) + return StoryStats(views: self.views, forwards: self.forwards, reactions: self.reactions, interactionsGraph: interactionsGraph, interactionsGraphDelta: self.interactionsGraphDelta, reactionsGraph: self.reactionsGraph) } } @@ -47,79 +52,90 @@ public struct StoryStatsContextState: Equatable { public var stats: StoryStats? } -private func requestStoryStats(postbox: Postbox, network: Network, peerId: EnginePeer.Id, storyId: Int32, dark: Bool = false) -> Signal { - return postbox.transaction { transaction -> (Int32, Peer, Stories.Item)? in - if let peer = transaction.getPeer(peerId), let storedItem = transaction.getStory(id: StoryId(peerId: peerId, id: storyId))?.get(Stories.StoredItem.self), case let .item(story) = storedItem, let cachedData = transaction.getPeerCachedData(peerId: peerId) as? CachedChannelData { - return (cachedData.statsDatacenterId, peer, story) +private func requestStoryStats(accountPeerId: PeerId, postbox: Postbox, network: Network, peerId: EnginePeer.Id, storyId: Int32, dark: Bool = false) -> Signal { + return postbox.transaction { transaction -> (Int32, Peer)? in + if let peer = transaction.getPeer(peerId), let cachedData = transaction.getPeerCachedData(peerId: peerId) as? CachedChannelData { + return (cachedData.statsDatacenterId, peer) } else { return nil } - } |> mapToSignal { data -> Signal in - guard let (statsDatacenterId, peer, story) = data, let inputPeer = apiInputPeer(peer) else { + } + |> mapToSignal { data -> Signal in + guard let (statsDatacenterId, peer) = data, let peerReference = PeerReference(peer) else { return .never() } - - var flags: Int32 = 0 - if dark { - flags |= (1 << 1) - } - - let request = Api.functions.stats.getStoryStats(flags: flags, peer: inputPeer, id: storyId) - let signal: Signal - if network.datacenterId != statsDatacenterId { - signal = network.download(datacenterId: Int(statsDatacenterId), isMedia: false, tag: nil) - |> castError(MTRpcError.self) - |> mapToSignal { worker in - return worker.request(request) + return _internal_getStoriesById(accountPeerId: accountPeerId, postbox: postbox, network: network, peer: peerReference, ids: [storyId]) + |> mapToSignal { stories -> Signal in + guard let storyItem = stories.first, case let .item(story) = storyItem, let inputPeer = apiInputPeer(peer) else { + return .never() } - } else { - signal = network.request(request) - } - - var views: Int = 0 - var forwards: Int = 0 - if let storyViews = story.views { - views = storyViews.seenCount - forwards = storyViews.forwardCount - } - - return signal - |> mapToSignal { result -> Signal in - if case let .storyStats(apiInteractionsGraph, apiReactionsGraph) = result { - let interactionsGraph = StatsGraph(apiStatsGraph: apiInteractionsGraph) - var interactionsGraphDelta: Int64 = 86400 - if case let .Loaded(_, data) = interactionsGraph { - if let start = data.range(of: "[\"x\",") { - let substring = data.suffix(from: start.upperBound) - if let end = substring.range(of: "],") { - let valuesString = substring.prefix(through: substring.index(before: end.lowerBound)) - let values = valuesString.components(separatedBy: ",").compactMap { Int64($0) } - if values.count > 1 { - let first = values[0] - let second = values[1] - let delta = abs(second - first) / 1000 - interactionsGraphDelta = delta + + var flags: Int32 = 0 + if dark { + flags |= (1 << 1) + } + + let request = Api.functions.stats.getStoryStats(flags: flags, peer: inputPeer, id: storyId) + let signal: Signal + if network.datacenterId != statsDatacenterId { + signal = network.download(datacenterId: Int(statsDatacenterId), isMedia: false, tag: nil) + |> castError(MTRpcError.self) + |> mapToSignal { worker in + return worker.request(request) + } + } else { + signal = network.request(request) + } + + var views: Int = 0 + var forwards: Int = 0 + var reactions: Int = 0 + if let storyViews = story.views { + views = storyViews.seenCount + forwards = storyViews.forwardCount + reactions = storyViews.reactedCount + } + + return signal + |> mapToSignal { result -> Signal in + if case let .storyStats(apiInteractionsGraph, apiReactionsGraph) = result { + let interactionsGraph = StatsGraph(apiStatsGraph: apiInteractionsGraph) + var interactionsGraphDelta: Int64 = 86400 + if case let .Loaded(_, data) = interactionsGraph { + if let start = data.range(of: "[\"x\",") { + let substring = data.suffix(from: start.upperBound) + if let end = substring.range(of: "],") { + let valuesString = substring.prefix(through: substring.index(before: end.lowerBound)) + let values = valuesString.components(separatedBy: ",").compactMap { Int64($0) } + if values.count > 1 { + let first = values[0] + let second = values[1] + let delta = abs(second - first) / 1000 + interactionsGraphDelta = delta + } } } } + let reactionsGraph = StatsGraph(apiStatsGraph: apiReactionsGraph) + return .single(StoryStats( + views: views, + forwards: forwards, + reactions: reactions, + interactionsGraph: interactionsGraph, + interactionsGraphDelta: interactionsGraphDelta, + reactionsGraph: reactionsGraph + )) + } else { + return .single(nil) } - let reactionsGraph = StatsGraph(apiStatsGraph: apiReactionsGraph) - return .single(StoryStats( - views: views, - forwards: forwards, - interactionsGraph: interactionsGraph, - interactionsGraphDelta: interactionsGraphDelta, - reactionsGraph: reactionsGraph - )) - } else { - return .single(nil) } + |> retryRequest } - |> retryRequest } } private final class StoryStatsContextImpl { + private let accountPeerId: EnginePeer.Id private let postbox: Postbox private let network: Network private let peerId: EnginePeer.Id @@ -140,9 +156,10 @@ private final class StoryStatsContextImpl { private let disposable = MetaDisposable() private let disposables = DisposableDict() - init(postbox: Postbox, network: Network, peerId: EnginePeer.Id, storyId: Int32) { + init(accountPeerId: EnginePeer.Id, postbox: Postbox, network: Network, peerId: EnginePeer.Id, storyId: Int32) { assert(Queue.mainQueue().isCurrent()) + self.accountPeerId = accountPeerId self.postbox = postbox self.network = network self.peerId = peerId @@ -162,7 +179,7 @@ private final class StoryStatsContextImpl { private func load() { assert(Queue.mainQueue().isCurrent()) - self.disposable.set((requestStoryStats(postbox: self.postbox, network: self.network, peerId: self.peerId, storyId: self.storyId) + self.disposable.set((requestStoryStats(accountPeerId: self.accountPeerId, postbox: self.postbox, network: self.network, peerId: self.peerId, storyId: self.storyId) |> deliverOnMainQueue).start(next: { [weak self] stats in if let strongSelf = self { strongSelf._state = StoryStatsContextState(stats: stats) @@ -195,9 +212,9 @@ public final class StoryStatsContext { } } - public init(postbox: Postbox, network: Network, peerId: EnginePeer.Id, storyId: Int32) { + public init(account: Account, peerId: EnginePeer.Id, storyId: Int32) { self.impl = QueueLocalObject(queue: Queue.mainQueue(), generate: { - return StoryStatsContextImpl(postbox: postbox, network: network, peerId: peerId, storyId: storyId) + return StoryStatsContextImpl(accountPeerId: account.peerId, postbox: account.postbox, network: account.network, peerId: peerId, storyId: storyId) }) } diff --git a/submodules/TelegramCore/Sources/SyncCore/SyncCore_Namespaces.swift b/submodules/TelegramCore/Sources/SyncCore/SyncCore_Namespaces.swift index 7e578ea714..813868b578 100644 --- a/submodules/TelegramCore/Sources/SyncCore/SyncCore_Namespaces.swift +++ b/submodules/TelegramCore/Sources/SyncCore/SyncCore_Namespaces.swift @@ -269,6 +269,7 @@ private enum PreferencesKeyValues: Int32 { case chatListFilterUpdates = 30 case globalPrivacySettings = 31 case storiesConfiguration = 32 + case audioTranscriptionTrialState = 33 } public func applicationSpecificPreferencesKey(_ value: Int32) -> ValueBoxKey { @@ -433,6 +434,12 @@ public struct PreferencesKeys { key.setInt32(0, value: PreferencesKeyValues.storiesConfiguration.rawValue) return key }() + + public static let audioTranscriptionTrialState: ValueBoxKey = { + let key = ValueBoxKey(length: 4) + key.setInt32(0, value: PreferencesKeyValues.audioTranscriptionTrialState.rawValue) + return key + }() } private enum SharedDataKeyValues: Int32 { diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Data/ConfigurationData.swift b/submodules/TelegramCore/Sources/TelegramEngine/Data/ConfigurationData.swift index 48a83d7b9a..e83ddeea47 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Data/ConfigurationData.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Data/ConfigurationData.swift @@ -502,5 +502,26 @@ public extension TelegramEngine.EngineData.Item { return value } } + + public struct AudioTranscriptionTrial: TelegramEngineDataItem, PostboxViewDataItem { + public typealias Result = AudioTranscription.TrialState + + public init() { + } + + var key: PostboxViewKey { + return .preferences(keys: Set([PreferencesKeys.audioTranscriptionTrialState])) + } + + func extract(view: PostboxView) -> Result { + guard let view = view as? PreferencesView else { + preconditionFailure() + } + guard let value = view.values[PreferencesKeys.audioTranscriptionTrialState]?.get(AudioTranscription.TrialState.self) else { + return AudioTranscription.TrialState.defaultValue + } + return value + } + } } } diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Messages/EngineStoryViewListContext.swift b/submodules/TelegramCore/Sources/TelegramEngine/Messages/EngineStoryViewListContext.swift index c8fd5fee1c..635c340d9e 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Messages/EngineStoryViewListContext.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Messages/EngineStoryViewListContext.swift @@ -14,6 +14,7 @@ public final class EngineStoryViewListContext { } public enum SortMode { + case repostsFirst case reactionsFirst case recentFirst } @@ -176,6 +177,16 @@ public final class EngineStoryViewListContext { } } switch sortMode { + case .repostsFirst: + items.sort(by: { lhs, rhs in + if (lhs.reaction == nil) != (rhs.reaction == nil) { + return lhs.reaction != nil + } + if lhs.timestamp != rhs.timestamp { + return lhs.timestamp > rhs.timestamp + } + return lhs.peer.id < rhs.peer.id + }) case .reactionsFirst: items.sort(by: { lhs, rhs in if (lhs.reaction == nil) != (rhs.reaction == nil) { @@ -287,7 +298,7 @@ public final class EngineStoryViewListContext { switch sortMode { case .reactionsFirst: flags |= (1 << 2) - case .recentFirst: + case .recentFirst, .repostsFirst: break } if searchQuery != nil { @@ -376,7 +387,8 @@ public final class EngineStoryViewListContext { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo )) if let entry = CodableEntry(updatedItem) { transaction.setStory(id: StoryId(peerId: account.peerId, id: storyId), value: entry) @@ -413,7 +425,8 @@ public final class EngineStoryViewListContext { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo )) if let entry = CodableEntry(updatedItem) { currentItems[i] = StoryItemsTableEntry(value: entry, id: updatedItem.id, expirationTimestamp: updatedItem.expirationTimestamp, isCloseFriends: updatedItem.isCloseFriends) diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Messages/PendingStoryManager.swift b/submodules/TelegramCore/Sources/TelegramEngine/Messages/PendingStoryManager.swift index 80534326c8..c57264f9ab 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Messages/PendingStoryManager.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Messages/PendingStoryManager.swift @@ -39,6 +39,40 @@ public extension Stories { } } + struct PendingForwardInfo: Codable, Equatable { + private enum CodingKeys: String, CodingKey { + case peerId = "peerId" + case storyId = "storyId" + case isForwardingDisabled = "isForwardingDisabled" + } + + public let peerId: EnginePeer.Id + public let storyId: Int32 + public let isForwardingDisabled: Bool + + public init(peerId: EnginePeer.Id, storyId: Int32, isForwardingDisabled: Bool) { + self.peerId = peerId + self.storyId = storyId + self.isForwardingDisabled = isForwardingDisabled + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + + self.peerId = EnginePeer.Id(try container.decode(Int64.self, forKey: .peerId)) + self.storyId = try container.decode(Int32.self, forKey: .storyId) + self.isForwardingDisabled = try container.decode(Bool.self, forKey: .isForwardingDisabled) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + + try container.encode(self.peerId.toInt64(), forKey: .peerId) + try container.encode(self.storyId, forKey: .storyId) + try container.encode(self.isForwardingDisabled, forKey: .isForwardingDisabled) + } + } + final class PendingItem: Equatable, Codable { private enum CodingKeys: CodingKey { case target @@ -54,6 +88,7 @@ public extension Stories { case isForwardingDisabled case period case randomId + case forwardInfo } public let target: PendingTarget @@ -69,6 +104,7 @@ public extension Stories { public let isForwardingDisabled: Bool public let period: Int32 public let randomId: Int64 + public let forwardInfo: PendingForwardInfo? public init( target: PendingTarget, @@ -83,7 +119,8 @@ public extension Stories { privacy: EngineStoryPrivacy, isForwardingDisabled: Bool, period: Int32, - randomId: Int64 + randomId: Int64, + forwardInfo: PendingForwardInfo? ) { self.target = target self.stableId = stableId @@ -98,6 +135,7 @@ public extension Stories { self.isForwardingDisabled = isForwardingDisabled self.period = period self.randomId = randomId + self.forwardInfo = forwardInfo } public init(from decoder: Decoder) throws { @@ -123,6 +161,8 @@ public extension Stories { self.isForwardingDisabled = try container.decodeIfPresent(Bool.self, forKey: .isForwardingDisabled) ?? false self.period = try container.decode(Int32.self, forKey: .period) self.randomId = try container.decode(Int64.self, forKey: .randomId) + + self.forwardInfo = try container.decodeIfPresent(PendingForwardInfo.self, forKey: .forwardInfo) } public func encode(to encoder: Encoder) throws { @@ -150,6 +190,7 @@ public extension Stories { try container.encode(self.isForwardingDisabled, forKey: .isForwardingDisabled) try container.encode(self.period, forKey: .period) try container.encode(self.randomId, forKey: .randomId) + try container.encodeIfPresent(self.forwardInfo, forKey: .forwardInfo) } public static func ==(lhs: PendingItem, rhs: PendingItem) -> Bool { @@ -186,6 +227,9 @@ public extension Stories { if lhs.randomId != rhs.randomId { return false } + if lhs.forwardInfo != rhs.forwardInfo { + return false + } return true } } @@ -359,8 +403,9 @@ final class PendingStoryManager { case let .peer(peerId): toPeerId = peerId } + let stableId = firstItem.stableId - pendingItemContext.disposable = (_internal_uploadStoryImpl(postbox: self.postbox, network: self.network, accountPeerId: self.accountPeerId, stateManager: self.stateManager, messageMediaPreuploadManager: self.messageMediaPreuploadManager, revalidationContext: self.revalidationContext, auxiliaryMethods: self.auxiliaryMethods, toPeerId: toPeerId, stableId: stableId, media: firstItem.media, mediaAreas: firstItem.mediaAreas, text: firstItem.text, entities: firstItem.entities, embeddedStickers: firstItem.embeddedStickers, pin: firstItem.pin, privacy: firstItem.privacy, isForwardingDisabled: firstItem.isForwardingDisabled, period: Int(firstItem.period), randomId: firstItem.randomId) + pendingItemContext.disposable = (_internal_uploadStoryImpl(postbox: self.postbox, network: self.network, accountPeerId: self.accountPeerId, stateManager: self.stateManager, messageMediaPreuploadManager: self.messageMediaPreuploadManager, revalidationContext: self.revalidationContext, auxiliaryMethods: self.auxiliaryMethods, toPeerId: toPeerId, stableId: stableId, media: firstItem.media, mediaAreas: firstItem.mediaAreas, text: firstItem.text, entities: firstItem.entities, embeddedStickers: firstItem.embeddedStickers, pin: firstItem.pin, privacy: firstItem.privacy, isForwardingDisabled: firstItem.isForwardingDisabled, period: Int(firstItem.period), randomId: firstItem.randomId, forwardInfo: firstItem.forwardInfo) |> deliverOn(self.queue)).start(next: { [weak self] event in guard let `self` = self else { return diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Messages/Stories.swift b/submodules/TelegramCore/Sources/TelegramEngine/Messages/Stories.swift index 4173a6fe1e..27b4569911 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Messages/Stories.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Messages/Stories.swift @@ -6,11 +6,14 @@ import TelegramApi public enum EngineStoryInputMedia { case image(dimensions: PixelDimensions, data: Data, stickers: [TelegramMediaFile]) case video(dimensions: PixelDimensions, duration: Double, resource: TelegramMediaResource, firstFrameFile: TempBoxFile?, stickers: [TelegramMediaFile]) + case existing(media: Media) var embeddedStickers: [TelegramMediaFile] { switch self { case let .image(_, _, stickers), let .video(_, _, _, _, stickers): return stickers + case .existing: + return [] } } } @@ -36,6 +39,34 @@ public extension EngineStoryPrivacy { } } +public extension EngineStoryItem.ForwardInfo { + init?(_ forwardInfo: Stories.Item.ForwardInfo, transaction: Transaction) { + switch forwardInfo { + case let .known(peerId, storyId): + if let peer = transaction.getPeer(peerId) { + self = .known(peer: EnginePeer(peer), storyId: storyId) + } else { + return nil + } + case let .unknown(name): + self = .unknown(name: name) + } + } + + init?(_ forwardInfo: Stories.Item.ForwardInfo, peers: [PeerId: Peer]) { + switch forwardInfo { + case let .known(peerId, storyId): + if let peer = peers[peerId] { + self = .known(peer: EnginePeer(peer), storyId: storyId) + } else { + return nil + } + case let .unknown(name): + self = .unknown(name: name) + } + } +} + public enum Stories { public final class Item: Codable, Equatable { public struct Views: Codable, Equatable { @@ -162,6 +193,49 @@ public enum Stories { } } + public enum ForwardInfo: Codable, Equatable { + public enum DecodingError: Error { + case generic + } + + private enum CodingKeys: CodingKey { + case discriminator + case authorPeerId + case storyId + case authorName + } + + case known(peerId: EnginePeer.Id, storyId: Int32) + case unknown(name: String) + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + + switch try container.decode(Int32.self, forKey: .discriminator) { + case 0: + self = .known(peerId: EnginePeer.Id(try container.decode(Int64.self, forKey: .authorPeerId)), storyId: try container.decode(Int32.self, forKey: .storyId)) + case 1: + self = .unknown(name: try container.decode(String.self, forKey: .authorName)) + default: + throw DecodingError.generic + } + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + + switch self { + case let .known(peerId, storyId): + try container.encode(0 as Int32, forKey: .discriminator) + try container.encode(peerId.toInt64(), forKey: .authorPeerId) + try container.encode(storyId, forKey: .storyId) + case let .unknown(name): + try container.encode(1 as Int32, forKey: .discriminator) + try container.encode(name, forKey: .authorName) + } + } + } + private enum CodingKeys: String, CodingKey { case id case timestamp @@ -182,6 +256,7 @@ public enum Stories { case isEdited case isMy case myReaction + case forwardInfo } public let id: Int32 @@ -203,6 +278,7 @@ public enum Stories { public let isEdited: Bool public let isMy: Bool public let myReaction: MessageReaction.Reaction? + public let forwardInfo: ForwardInfo? public init( id: Int32, @@ -223,7 +299,8 @@ public enum Stories { isForwardingDisabled: Bool, isEdited: Bool, isMy: Bool, - myReaction: MessageReaction.Reaction? + myReaction: MessageReaction.Reaction?, + forwardInfo: ForwardInfo? ) { self.id = id self.timestamp = timestamp @@ -244,6 +321,7 @@ public enum Stories { self.isEdited = isEdited self.isMy = isMy self.myReaction = myReaction + self.forwardInfo = forwardInfo } public init(from decoder: Decoder) throws { @@ -274,6 +352,7 @@ public enum Stories { self.isEdited = try container.decodeIfPresent(Bool.self, forKey: .isEdited) ?? false self.isMy = try container.decodeIfPresent(Bool.self, forKey: .isMy) ?? false self.myReaction = try container.decodeIfPresent(MessageReaction.Reaction.self, forKey: .myReaction) + self.forwardInfo = try container.decodeIfPresent(ForwardInfo.self, forKey: .forwardInfo) } public func encode(to encoder: Encoder) throws { @@ -305,6 +384,7 @@ public enum Stories { try container.encode(self.isEdited, forKey: .isEdited) try container.encode(self.isMy, forKey: .isMy) try container.encodeIfPresent(self.myReaction, forKey: .myReaction) + try container.encodeIfPresent(self.forwardInfo, forKey: .forwardInfo) } public static func ==(lhs: Item, rhs: Item) -> Bool { @@ -369,7 +449,9 @@ public enum Stories { if lhs.myReaction != rhs.myReaction { return false } - + if lhs.forwardInfo != rhs.forwardInfo { + return false + } return true } } @@ -752,6 +834,8 @@ private func prepareUploadStoryContent(account: Account, media: EngineStoryInput ) return fileMedia + case let .existing(media): + return media } } @@ -845,7 +929,7 @@ private func apiInputPrivacyRules(privacy: EngineStoryPrivacy, transaction: Tran return privacyRules } -func _internal_uploadStory(account: Account, target: Stories.PendingTarget, media: EngineStoryInputMedia, mediaAreas: [MediaArea], text: String, entities: [MessageTextEntity], pin: Bool, privacy: EngineStoryPrivacy, isForwardingDisabled: Bool, period: Int, randomId: Int64) -> Signal { +func _internal_uploadStory(account: Account, target: Stories.PendingTarget, media: EngineStoryInputMedia, mediaAreas: [MediaArea], text: String, entities: [MessageTextEntity], pin: Bool, privacy: EngineStoryPrivacy, isForwardingDisabled: Bool, period: Int, randomId: Int64, forwardInfo: Stories.PendingForwardInfo?) -> Signal { let inputMedia = prepareUploadStoryContent(account: account, media: media) return (account.postbox.transaction { transaction in @@ -872,7 +956,8 @@ func _internal_uploadStory(account: Account, target: Stories.PendingTarget, medi privacy: privacy, isForwardingDisabled: isForwardingDisabled, period: Int32(period), - randomId: randomId + randomId: randomId, + forwardInfo: forwardInfo )) transaction.setLocalStoryState(state: CodableEntry(currentState)) return stableId @@ -917,7 +1002,28 @@ private func _internal_putPendingStoryIdMapping(peerId: PeerId, stableId: Int32, } } -func _internal_uploadStoryImpl(postbox: Postbox, network: Network, accountPeerId: PeerId, stateManager: AccountStateManager, messageMediaPreuploadManager: MessageMediaPreuploadManager, revalidationContext: MediaReferenceRevalidationContext, auxiliaryMethods: AccountAuxiliaryMethods, toPeerId: PeerId, stableId: Int32, media: Media, mediaAreas: [MediaArea], text: String, entities: [MessageTextEntity], embeddedStickers: [TelegramMediaFile], pin: Bool, privacy: EngineStoryPrivacy, isForwardingDisabled: Bool, period: Int, randomId: Int64) -> Signal { +func _internal_uploadStoryImpl( + postbox: Postbox, + network: Network, + accountPeerId: PeerId, + stateManager: AccountStateManager, + messageMediaPreuploadManager: MessageMediaPreuploadManager, + revalidationContext: MediaReferenceRevalidationContext, + auxiliaryMethods: AccountAuxiliaryMethods, + toPeerId: PeerId, + stableId: Int32, + media: Media, + mediaAreas: [MediaArea], + text: String, + entities: [MessageTextEntity], + embeddedStickers: [TelegramMediaFile], + pin: Bool, + privacy: EngineStoryPrivacy, + isForwardingDisabled: Bool, + period: Int, + randomId: Int64, + forwardInfo: Stories.PendingForwardInfo? +) -> Signal { return postbox.transaction { transaction -> Api.InputPeer? in return transaction.getPeer(toPeerId).flatMap(apiInputPeer) } @@ -975,6 +1081,14 @@ func _internal_uploadStoryImpl(postbox: Postbox, network: Network, accountPeerId flags |= 1 << 5 } + var fwdFromId: Api.InputPeer? + var fwdFromStory: Int32? + if let forwardInfo = forwardInfo, let inputPeer = transaction.getPeer(forwardInfo.peerId).flatMap({ apiInputPeer($0) }) { + flags |= 1 << 6 + fwdFromId = inputPeer + fwdFromStory = forwardInfo.storyId + } + return network.request(Api.functions.stories.sendStory( flags: flags, peer: inputPeer, @@ -985,8 +1099,8 @@ func _internal_uploadStoryImpl(postbox: Postbox, network: Network, accountPeerId privacyRules: privacyRules, randomId: randomId, period: Int32(period), - fwdFromId: nil, - fwdFromStory: nil + fwdFromId: fwdFromId, + fwdFromStory: fwdFromStory )) |> map(Optional.init) |> `catch` { _ -> Signal in @@ -1034,7 +1148,8 @@ func _internal_uploadStoryImpl(postbox: Postbox, network: Network, accountPeerId isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo ) if let entry = CodableEntry(Stories.StoredItem.item(updatedItem)) { items.append(StoryItemsTableEntry(value: entry, id: item.id, expirationTimestamp: updatedItem.expirationTimestamp, isCloseFriends: updatedItem.isCloseFriends)) @@ -1214,7 +1329,8 @@ func _internal_editStoryPrivacy(account: Account, id: Int32, privacy: EngineStor isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo ) if let entry = CodableEntry(Stories.StoredItem.item(updatedItem)) { transaction.setStory(id: storyId, value: entry) @@ -1243,7 +1359,8 @@ func _internal_editStoryPrivacy(account: Account, id: Int32, privacy: EngineStor isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo ) if let entry = CodableEntry(Stories.StoredItem.item(updatedItem)) { items[index] = StoryItemsTableEntry(value: entry, id: item.id, expirationTimestamp: updatedItem.expirationTimestamp, isCloseFriends: updatedItem.isCloseFriends) @@ -1435,7 +1552,8 @@ func _internal_updateStoriesArePinned(account: Account, peerId: PeerId, ids: [In isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo ) if let entry = CodableEntry(Stories.StoredItem.item(updatedItem)) { items[index] = StoryItemsTableEntry(value: entry, id: item.id, expirationTimestamp: updatedItem.expirationTimestamp, isCloseFriends: updatedItem.isCloseFriends) @@ -1463,7 +1581,8 @@ func _internal_updateStoriesArePinned(account: Account, peerId: PeerId, ids: [In isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo ) updatedItems.append(updatedItem) } @@ -1538,11 +1657,26 @@ extension Stories.Item.Views { } } +extension Stories.Item.ForwardInfo { + init?(apiForwardInfo: Api.StoryFwdHeader) { + switch apiForwardInfo { + case let .storyFwdHeader(_, from, fromName, storyId): + if let from = from, let storyId = storyId { + self = .known(peerId: from.peerId, storyId: storyId) + return + } else if let fromName = fromName { + self = .unknown(name: fromName) + return + } + } + return nil + } +} + extension Stories.StoredItem { init?(apiStoryItem: Api.StoryItem, existingItem: Stories.Item? = nil, peerId: PeerId, transaction: Transaction) { switch apiStoryItem { - case let .storyItem(flags, id, date, fwdFrom, expireDate, caption, entities, media, mediaAreas, privacy, views, sentReaction): - let _ = fwdFrom + case let .storyItem(flags, id, date, forwardFrom, expireDate, caption, entities, media, mediaAreas, privacy, views, sentReaction): let (parsedMedia, _, _, _, _) = textMediaAndExpirationTimerFromApiMedia(media, peerId) if let parsedMedia = parsedMedia { var parsedPrivacy: Stories.Item.Privacy? @@ -1613,6 +1747,13 @@ extension Stories.StoredItem { mergedIsMy = (flags & (1 << 16)) != 0 } + var mergedForwardInfo: Stories.Item.ForwardInfo? + if isMin, let existingItem = existingItem { + mergedForwardInfo = existingItem.forwardInfo + } else { + mergedForwardInfo = forwardFrom.flatMap(Stories.Item.ForwardInfo.init(apiForwardInfo:)) + } + let item = Stories.Item( id: id, timestamp: date, @@ -1632,7 +1773,8 @@ extension Stories.StoredItem { isForwardingDisabled: isForwardingDisabled, isEdited: isEdited, isMy: mergedIsMy, - myReaction: mergedMyReaction + myReaction: mergedMyReaction, + forwardInfo: mergedForwardInfo ) self = .item(item) } else { @@ -2104,7 +2246,8 @@ func _internal_setStoryReaction(account: Account, peerId: EnginePeer.Id, id: Int isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: reaction + myReaction: reaction, + forwardInfo: item.forwardInfo )) updatedItemValue = updatedItem if let entry = CodableEntry(updatedItem) { @@ -2135,7 +2278,8 @@ func _internal_setStoryReaction(account: Account, peerId: EnginePeer.Id, id: Int isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: reaction + myReaction: reaction, + forwardInfo: item.forwardInfo )) updatedItemValue = updatedItem if let entry = CodableEntry(updatedItem) { diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Messages/StoryListContext.swift b/submodules/TelegramCore/Sources/TelegramEngine/Messages/StoryListContext.swift index 2eff63d186..0fe6b59e1f 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Messages/StoryListContext.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Messages/StoryListContext.swift @@ -51,6 +51,11 @@ public final class EngineStoryItem: Equatable { } } + public enum ForwardInfo: Equatable { + case known(peer: EnginePeer, storyId: Int32) + case unknown(name: String) + } + public let id: Int32 public let timestamp: Int32 public let expirationTimestamp: Int32 @@ -71,8 +76,9 @@ public final class EngineStoryItem: Equatable { public let isEdited: Bool public let isMy: Bool public let myReaction: MessageReaction.Reaction? + public let forwardInfo: ForwardInfo? - public init(id: Int32, timestamp: Int32, expirationTimestamp: Int32, media: EngineMedia, mediaAreas: [MediaArea], text: String, entities: [MessageTextEntity], views: Views?, privacy: EngineStoryPrivacy?, isPinned: Bool, isExpired: Bool, isPublic: Bool, isPending: Bool, isCloseFriends: Bool, isContacts: Bool, isSelectedContacts: Bool, isForwardingDisabled: Bool, isEdited: Bool, isMy: Bool, myReaction: MessageReaction.Reaction?) { + public init(id: Int32, timestamp: Int32, expirationTimestamp: Int32, media: EngineMedia, mediaAreas: [MediaArea], text: String, entities: [MessageTextEntity], views: Views?, privacy: EngineStoryPrivacy?, isPinned: Bool, isExpired: Bool, isPublic: Bool, isPending: Bool, isCloseFriends: Bool, isContacts: Bool, isSelectedContacts: Bool, isForwardingDisabled: Bool, isEdited: Bool, isMy: Bool, myReaction: MessageReaction.Reaction?, forwardInfo: ForwardInfo?) { self.id = id self.timestamp = timestamp self.expirationTimestamp = expirationTimestamp @@ -93,6 +99,7 @@ public final class EngineStoryItem: Equatable { self.isEdited = isEdited self.isMy = isMy self.myReaction = myReaction + self.forwardInfo = forwardInfo } public static func ==(lhs: EngineStoryItem, rhs: EngineStoryItem) -> Bool { @@ -156,11 +163,25 @@ public final class EngineStoryItem: Equatable { if lhs.myReaction != rhs.myReaction { return false } + if lhs.forwardInfo != rhs.forwardInfo { + return false + } return true } } -extension EngineStoryItem { +extension EngineStoryItem.ForwardInfo { + var storedForwardInfo: Stories.Item.ForwardInfo { + switch self { + case let .known(peer, storyId): + return .known(peerId: peer.id, storyId: storyId) + case let .unknown(name): + return .unknown(name: name) + } + } +} + +public extension EngineStoryItem { func asStoryItem() -> Stories.Item { return Stories.Item( id: self.id, @@ -195,7 +216,8 @@ extension EngineStoryItem { isForwardingDisabled: self.isForwardingDisabled, isEdited: self.isEdited, isMy: self.isMy, - myReaction: self.myReaction + myReaction: self.myReaction, + forwardInfo: self.forwardInfo?.storedForwardInfo ) } } @@ -570,7 +592,8 @@ public final class PeerStoryListContext { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo.flatMap { EngineStoryItem.ForwardInfo($0, transaction: transaction) } ) items.append(mappedItem) @@ -713,7 +736,8 @@ public final class PeerStoryListContext { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo.flatMap { EngineStoryItem.ForwardInfo($0, transaction: transaction) } ) storyItems.append(mappedItem) } @@ -802,6 +826,11 @@ public final class PeerStoryListContext { } } } + if let forwardInfo = item.forwardInfo, case let .known(peerId, _) = forwardInfo { + if let peer = transaction.getPeer(peerId) { + peers[peer.id] = peer + } + } } } default: @@ -868,7 +897,8 @@ public final class PeerStoryListContext { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo.flatMap { EngineStoryItem.ForwardInfo($0, peers: peers) } ) finalUpdatedState = updatedState } @@ -914,7 +944,8 @@ public final class PeerStoryListContext { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo.flatMap { EngineStoryItem.ForwardInfo($0, peers: peers) } ) finalUpdatedState = updatedState } else { @@ -962,7 +993,8 @@ public final class PeerStoryListContext { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo.flatMap { EngineStoryItem.ForwardInfo($0, peers: peers) } )) updatedState.items.sort(by: { lhs, rhs in return lhs.timestamp > rhs.timestamp @@ -1006,7 +1038,8 @@ public final class PeerStoryListContext { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo.flatMap { EngineStoryItem.ForwardInfo($0, peers: peers) } )) updatedState.items.sort(by: { lhs, rhs in return lhs.timestamp > rhs.timestamp @@ -1174,7 +1207,8 @@ public final class PeerExpiringStoryListContext { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo.flatMap { EngineStoryItem.ForwardInfo($0, transaction: transaction) } ) items.append(.item(mappedItem)) } diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Messages/TelegramEngineMessages.swift b/submodules/TelegramCore/Sources/TelegramEngine/Messages/TelegramEngineMessages.swift index 4b01661341..693c9369c9 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Messages/TelegramEngineMessages.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Messages/TelegramEngineMessages.swift @@ -476,7 +476,7 @@ public extension TelegramEngine { public func transcribeAudio(messageId: MessageId) -> Signal { return _internal_transcribeAudio(postbox: self.account.postbox, network: self.account.network, messageId: messageId) } - + public func storeLocallyTranscribedAudio(messageId: MessageId, text: String, isFinal: Bool, error: AudioTranscriptionMessageAttribute.TranscriptionError?) -> Signal { return self.account.postbox.transaction { transaction -> Void in transaction.updateMessage(messageId, update: { currentMessage in @@ -1178,7 +1178,8 @@ public extension TelegramEngine { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo )) if let entry = CodableEntry(updatedItem) { currentItems[i] = StoryItemsTableEntry(value: entry, id: updatedItem.id, expirationTimestamp: updatedItem.expirationTimestamp, isCloseFriends: updatedItem.isCloseFriends) @@ -1192,8 +1193,8 @@ public extension TelegramEngine { } } - public func uploadStory(target: Stories.PendingTarget, media: EngineStoryInputMedia, mediaAreas: [MediaArea], text: String, entities: [MessageTextEntity], pin: Bool, privacy: EngineStoryPrivacy, isForwardingDisabled: Bool, period: Int, randomId: Int64) -> Signal { - return _internal_uploadStory(account: self.account, target: target, media: media, mediaAreas: mediaAreas, text: text, entities: entities, pin: pin, privacy: privacy, isForwardingDisabled: isForwardingDisabled, period: period, randomId: randomId) + public func uploadStory(target: Stories.PendingTarget, media: EngineStoryInputMedia, mediaAreas: [MediaArea], text: String, entities: [MessageTextEntity], pin: Bool, privacy: EngineStoryPrivacy, isForwardingDisabled: Bool, period: Int, randomId: Int64, forwardInfo: Stories.PendingForwardInfo?) -> Signal { + return _internal_uploadStory(account: self.account, target: target, media: media, mediaAreas: mediaAreas, text: text, entities: entities, pin: pin, privacy: privacy, isForwardingDisabled: isForwardingDisabled, period: period, randomId: randomId, forwardInfo: forwardInfo) } public func allStoriesUploadEvents() -> Signal<(Int32, Int32), NoError> { diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Messages/Transcription.swift b/submodules/TelegramCore/Sources/TelegramEngine/Messages/Transcription.swift new file mode 100644 index 0000000000..50356fb4c5 --- /dev/null +++ b/submodules/TelegramCore/Sources/TelegramEngine/Messages/Transcription.swift @@ -0,0 +1,153 @@ +import Foundation +import Postbox +import SwiftSignalKit +import TelegramApi +import MtProtoKit + +public enum EngineAudioTranscriptionResult { + case success + case error +} + +func _internal_transcribeAudio(postbox: Postbox, network: Network, messageId: MessageId) -> Signal { + return postbox.transaction { transaction -> Api.InputPeer? in + return transaction.getPeer(messageId.peerId).flatMap(apiInputPeer) + } + |> mapToSignal { inputPeer -> Signal in + guard let inputPeer = inputPeer else { + return .single(.error) + } + return network.request(Api.functions.messages.transcribeAudio(peer: inputPeer, msgId: messageId.id)) + |> map { result -> Result in + return .success(result) + } + |> `catch` { error -> Signal, NoError> in + let mappedError: AudioTranscriptionMessageAttribute.TranscriptionError + if error.errorDescription == "MSG_VOICE_TOO_LONG" { + mappedError = .tooLong + } else { + mappedError = .generic + } + return .single(.failure(mappedError)) + } + |> mapToSignal { result -> Signal in + return postbox.transaction { transaction -> EngineAudioTranscriptionResult in + let updatedAttribute: AudioTranscriptionMessageAttribute + switch result { + case let .success(transcribedAudio): + switch transcribedAudio { + case let .transcribedAudio(flags, transcriptionId, text, trialRemainingCount, trialUntilDate): + let isPending = (flags & (1 << 0)) != 0 + + _internal_updateAudioTranscriptionTrialState(transaction: transaction) { current in + var updated = current + if let trialRemainingCount = trialRemainingCount, trialRemainingCount > 0 { + updated = updated.withUpdatedRemainingCount(trialRemainingCount) + } else if let trialUntilDate = trialUntilDate { + updated = updated.withUpdatedCooldownUntilTime(trialUntilDate) + } else { + updated = updated.withUpdatedCooldownUntilTime(nil) + } + return updated + } + updatedAttribute = AudioTranscriptionMessageAttribute(id: transcriptionId, text: text, isPending: isPending, didRate: false, error: nil) + } + case let .failure(error): + updatedAttribute = AudioTranscriptionMessageAttribute(id: 0, text: "", isPending: false, didRate: false, error: error) + } + + transaction.updateMessage(messageId, update: { currentMessage in + let storeForwardInfo = currentMessage.forwardInfo.flatMap(StoreMessageForwardInfo.init) + var attributes = currentMessage.attributes.filter { !($0 is AudioTranscriptionMessageAttribute) } + + attributes.append(updatedAttribute) + + return .update(StoreMessage(id: currentMessage.id, globallyUniqueId: currentMessage.globallyUniqueId, groupingKey: currentMessage.groupingKey, threadId: currentMessage.threadId, timestamp: currentMessage.timestamp, flags: StoreMessageFlags(currentMessage.flags), tags: currentMessage.tags, globalTags: currentMessage.globalTags, localTags: currentMessage.localTags, forwardInfo: storeForwardInfo, authorId: currentMessage.author?.id, text: currentMessage.text, attributes: attributes, media: currentMessage.media)) + }) + + if updatedAttribute.error == nil { + return .success + } else { + return .error + } + } + } + } +} + +func _internal_rateAudioTranscription(postbox: Postbox, network: Network, messageId: MessageId, id: Int64, isGood: Bool) -> Signal { + return postbox.transaction { transaction -> Api.InputPeer? in + transaction.updateMessage(messageId, update: { currentMessage in + var storeForwardInfo: StoreMessageForwardInfo? + if let forwardInfo = currentMessage.forwardInfo { + storeForwardInfo = StoreMessageForwardInfo(authorId: forwardInfo.author?.id, sourceId: forwardInfo.source?.id, sourceMessageId: forwardInfo.sourceMessageId, date: forwardInfo.date, authorSignature: forwardInfo.authorSignature, psaType: forwardInfo.psaType, flags: forwardInfo.flags) + } + var attributes = currentMessage.attributes + for i in 0 ..< attributes.count { + if let attribute = attributes[i] as? AudioTranscriptionMessageAttribute { + attributes[i] = attribute.withDidRate() + } + } + return .update(StoreMessage( + id: currentMessage.id, + globallyUniqueId: currentMessage.globallyUniqueId, + groupingKey: currentMessage.groupingKey, + threadId: currentMessage.threadId, + timestamp: currentMessage.timestamp, + flags: StoreMessageFlags(currentMessage.flags), + tags: currentMessage.tags, + globalTags: currentMessage.globalTags, + localTags: currentMessage.localTags, + forwardInfo: storeForwardInfo, + authorId: currentMessage.author?.id, + text: currentMessage.text, + attributes: attributes, + media: currentMessage.media + )) + }) + + return transaction.getPeer(messageId.peerId).flatMap(apiInputPeer) + } + |> mapToSignal { inputPeer -> Signal in + guard let inputPeer = inputPeer else { + return .complete() + } + return network.request(Api.functions.messages.rateTranscribedAudio(peer: inputPeer, msgId: messageId.id, transcriptionId: id, good: isGood ? .boolTrue : .boolFalse)) + |> `catch` { _ -> Signal in + return .single(.boolFalse) + } + |> ignoreValues + } +} + +public enum AudioTranscription { + public struct TrialState: Equatable, Codable { + public let cooldownUntilTime: Int32? + public let remainingCount: Int32 + + func withUpdatedCooldownUntilTime(_ time: Int32?) -> AudioTranscription.TrialState { + return AudioTranscription.TrialState(cooldownUntilTime: time, remainingCount: time != nil ? 0 : max(1, self.remainingCount)) + } + + func withUpdatedRemainingCount(_ remainingCount: Int32) -> AudioTranscription.TrialState { + return AudioTranscription.TrialState(remainingCount: remainingCount) + } + + public init(cooldownUntilTime: Int32? = nil, remainingCount: Int32) { + self.cooldownUntilTime = cooldownUntilTime + self.remainingCount = remainingCount + } + + public static var defaultValue: AudioTranscription.TrialState { + return AudioTranscription.TrialState( + cooldownUntilTime: nil, + remainingCount: 1 + ) + } + } +} + +func _internal_updateAudioTranscriptionTrialState(transaction: Transaction, _ f: (AudioTranscription.TrialState) -> AudioTranscription.TrialState) { + let current = transaction.getPreferencesEntry(key: PreferencesKeys.audioTranscriptionTrialState)?.get(AudioTranscription.TrialState.self) ?? .defaultValue + transaction.setPreferencesEntry(key: PreferencesKeys.audioTranscriptionTrialState, value: PreferencesEntry(f(current))) +} diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Messages/Translate.swift b/submodules/TelegramCore/Sources/TelegramEngine/Messages/Translate.swift index f1ffae6953..562d549a7c 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Messages/Translate.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Messages/Translate.swift @@ -155,108 +155,3 @@ func _internal_togglePeerMessagesTranslationHidden(account: Account, peerId: Eng |> ignoreValues } } - -public enum EngineAudioTranscriptionResult { - case success - case error -} - -func _internal_transcribeAudio(postbox: Postbox, network: Network, messageId: MessageId) -> Signal { - return postbox.transaction { transaction -> Api.InputPeer? in - return transaction.getPeer(messageId.peerId).flatMap(apiInputPeer) - } - |> mapToSignal { inputPeer -> Signal in - guard let inputPeer = inputPeer else { - return .single(.error) - } - return network.request(Api.functions.messages.transcribeAudio(peer: inputPeer, msgId: messageId.id)) - |> map { result -> Result in - return .success(result) - } - |> `catch` { error -> Signal, NoError> in - let mappedError: AudioTranscriptionMessageAttribute.TranscriptionError - if error.errorDescription == "MSG_VOICE_TOO_LONG" { - mappedError = .tooLong - } else { - mappedError = .generic - } - return .single(.failure(mappedError)) - } - |> mapToSignal { result -> Signal in - return postbox.transaction { transaction -> EngineAudioTranscriptionResult in - let updatedAttribute: AudioTranscriptionMessageAttribute - switch result { - case let .success(transcribedAudio): - switch transcribedAudio { - case let .transcribedAudio(flags, transcriptionId, text): - let isPending = (flags & (1 << 0)) != 0 - - updatedAttribute = AudioTranscriptionMessageAttribute(id: transcriptionId, text: text, isPending: isPending, didRate: false, error: nil) - } - case let .failure(error): - updatedAttribute = AudioTranscriptionMessageAttribute(id: 0, text: "", isPending: false, didRate: false, error: error) - } - - transaction.updateMessage(messageId, update: { currentMessage in - let storeForwardInfo = currentMessage.forwardInfo.flatMap(StoreMessageForwardInfo.init) - var attributes = currentMessage.attributes.filter { !($0 is AudioTranscriptionMessageAttribute) } - - attributes.append(updatedAttribute) - - return .update(StoreMessage(id: currentMessage.id, globallyUniqueId: currentMessage.globallyUniqueId, groupingKey: currentMessage.groupingKey, threadId: currentMessage.threadId, timestamp: currentMessage.timestamp, flags: StoreMessageFlags(currentMessage.flags), tags: currentMessage.tags, globalTags: currentMessage.globalTags, localTags: currentMessage.localTags, forwardInfo: storeForwardInfo, authorId: currentMessage.author?.id, text: currentMessage.text, attributes: attributes, media: currentMessage.media)) - }) - - if updatedAttribute.error == nil { - return .success - } else { - return .error - } - } - } - } -} - -func _internal_rateAudioTranscription(postbox: Postbox, network: Network, messageId: MessageId, id: Int64, isGood: Bool) -> Signal { - return postbox.transaction { transaction -> Api.InputPeer? in - transaction.updateMessage(messageId, update: { currentMessage in - var storeForwardInfo: StoreMessageForwardInfo? - if let forwardInfo = currentMessage.forwardInfo { - storeForwardInfo = StoreMessageForwardInfo(authorId: forwardInfo.author?.id, sourceId: forwardInfo.source?.id, sourceMessageId: forwardInfo.sourceMessageId, date: forwardInfo.date, authorSignature: forwardInfo.authorSignature, psaType: forwardInfo.psaType, flags: forwardInfo.flags) - } - var attributes = currentMessage.attributes - for i in 0 ..< attributes.count { - if let attribute = attributes[i] as? AudioTranscriptionMessageAttribute { - attributes[i] = attribute.withDidRate() - } - } - return .update(StoreMessage( - id: currentMessage.id, - globallyUniqueId: currentMessage.globallyUniqueId, - groupingKey: currentMessage.groupingKey, - threadId: currentMessage.threadId, - timestamp: currentMessage.timestamp, - flags: StoreMessageFlags(currentMessage.flags), - tags: currentMessage.tags, - globalTags: currentMessage.globalTags, - localTags: currentMessage.localTags, - forwardInfo: storeForwardInfo, - authorId: currentMessage.author?.id, - text: currentMessage.text, - attributes: attributes, - media: currentMessage.media - )) - }) - - return transaction.getPeer(messageId.peerId).flatMap(apiInputPeer) - } - |> mapToSignal { inputPeer -> Signal in - guard let inputPeer = inputPeer else { - return .complete() - } - return network.request(Api.functions.messages.rateTranscribedAudio(peer: inputPeer, msgId: messageId.id, transcriptionId: id, good: isGood ? .boolTrue : .boolFalse)) - |> `catch` { _ -> Signal in - return .single(.boolFalse) - } - |> ignoreValues - } -} diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Peers/ChannelRecommendation.swift b/submodules/TelegramCore/Sources/TelegramEngine/Peers/ChannelRecommendation.swift index f7c1f7eb39..0b50ca4d87 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Peers/ChannelRecommendation.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Peers/ChannelRecommendation.swift @@ -104,17 +104,23 @@ func _internal_recommendedChannels(account: Account, peerId: EnginePeer.Id) -> S let key = PostboxViewKey.cachedItem(entryId(peerId: peerId)) return account.postbox.combinedView(keys: [key]) |> mapToSignal { views -> Signal in - guard let cachedChannels = (views.views[key] as? CachedItemView)?.value?.get(CachedRecommendedChannels.self) else { + guard let cachedChannels = (views.views[key] as? CachedItemView)?.value?.get(CachedRecommendedChannels.self), !cachedChannels.peerIds.isEmpty else { return .single(nil) } - return account.postbox.transaction { transaction -> RecommendedChannels? in - var channels: [RecommendedChannels.Channel] = [] - for peerId in cachedChannels.peerIds { - if let peer = transaction.getPeer(peerId), let cachedData = transaction.getPeerCachedData(peerId: peerId) as? CachedChannelData { - channels.append(RecommendedChannels.Channel(peer: EnginePeer(peer), subscribers: cachedData.participantsSummary.memberCount ?? 0)) + return account.postbox.multiplePeersView(cachedChannels.peerIds) + |> mapToSignal { view in + return account.postbox.transaction { transaction -> RecommendedChannels? in + var channels: [RecommendedChannels.Channel] = [] + for peerId in cachedChannels.peerIds { + if let peer = view.peers[peerId] as? TelegramChannel, let cachedData = transaction.getPeerCachedData(peerId: peerId) as? CachedChannelData { + if case .member = peer.participationStatus { + } else { + channels.append(RecommendedChannels.Channel(peer: EnginePeer(peer), subscribers: cachedData.participantsSummary.memberCount ?? 0)) + } + } } + return RecommendedChannels(channels: channels, isHidden: cachedChannels.isHidden) } - return RecommendedChannels(channels: channels, isHidden: cachedChannels.isHidden) } } } diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Themes/ChatThemes.swift b/submodules/TelegramCore/Sources/TelegramEngine/Themes/ChatThemes.swift index 6b943dff85..be6cbd94fb 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Themes/ChatThemes.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Themes/ChatThemes.swift @@ -186,7 +186,7 @@ func _internal_revertChatWallpaper(account: Account, peerId: EnginePeer.Id) -> S return account.network.request(Api.functions.messages.setChatWallPaper(flags: flags, peer: inputPeer, wallpaper: nil, settings: nil, id: nil), automaticFloodWait: false) |> map(Optional.init) |> `catch` { error -> Signal in - if error.description == "WALLPAPER_NOT_FOUND" { + if error.errorDescription == "WALLPAPER_NOT_FOUND" { return .single(nil) } return .fail(.generic) @@ -215,7 +215,7 @@ public enum SetExistingChatWallpaperError { case generic } -func _internal_setExistingChatWallpaper(account: Account, messageId: MessageId, settings: WallpaperSettings?) -> Signal { +func _internal_setExistingChatWallpaper(account: Account, messageId: MessageId, settings: WallpaperSettings?, forBoth: Bool) -> Signal { return account.postbox.transaction { transaction -> Peer? in if let peer = transaction.getPeer(messageId.peerId), let message = transaction.getMessage(messageId) { if let action = message.media.first(where: { $0 is TelegramMediaAction }) as? TelegramMediaAction, case let .setChatWallpaper(wallpaper, _) = action.action { @@ -248,6 +248,9 @@ func _internal_setExistingChatWallpaper(account: Account, messageId: MessageId, flags |= 1 << 2 inputSettings = apiWallpaperSettings(settings) } + if forBoth { + flags |= 1 << 3 + } return account.network.request(Api.functions.messages.setChatWallPaper(flags: flags, peer: inputPeer, wallpaper: nil, settings: inputSettings, id: messageId.id), automaticFloodWait: false) |> `catch` { _ -> Signal in return .fail(.generic) diff --git a/submodules/TelegramCore/Sources/TelegramEngine/Themes/TelegramEngineThemes.swift b/submodules/TelegramCore/Sources/TelegramEngine/Themes/TelegramEngineThemes.swift index 1a9ed756c1..8e8ea3b097 100644 --- a/submodules/TelegramCore/Sources/TelegramEngine/Themes/TelegramEngineThemes.swift +++ b/submodules/TelegramCore/Sources/TelegramEngine/Themes/TelegramEngineThemes.swift @@ -22,8 +22,8 @@ public extension TelegramEngine { |> ignoreValues } - public func setExistingChatWallpaper(messageId: MessageId, settings: WallpaperSettings?) -> Signal { - return _internal_setExistingChatWallpaper(account: self.account, messageId: messageId, settings: settings) + public func setExistingChatWallpaper(messageId: MessageId, settings: WallpaperSettings?, forBoth: Bool) -> Signal { + return _internal_setExistingChatWallpaper(account: self.account, messageId: messageId, settings: settings, forBoth: forBoth) } public func revertChatWallpaper(peerId: EnginePeer.Id) -> Signal { diff --git a/submodules/TelegramStringFormatting/Sources/PresenceStrings.swift b/submodules/TelegramStringFormatting/Sources/PresenceStrings.swift index c563c5da65..d790dddb53 100644 --- a/submodules/TelegramStringFormatting/Sources/PresenceStrings.swift +++ b/submodules/TelegramStringFormatting/Sources/PresenceStrings.swift @@ -431,13 +431,13 @@ public func stringForRelativeActivityTimestamp(strings: PresentationStrings, dat } } -public func stringForStoryActivityTimestamp(strings: PresentationStrings, dateTimeFormat: PresentationDateTimeFormat, preciseTime: Bool = false, relativeTimestamp: Int32, relativeTo timestamp: Int32) -> String { +public func stringForStoryActivityTimestamp(strings: PresentationStrings, dateTimeFormat: PresentationDateTimeFormat, preciseTime: Bool = false, relativeTimestamp: Int32, relativeTo timestamp: Int32, short: Bool = false) -> String { let difference = timestamp - relativeTimestamp if difference < 60 { - return strings.Time_JustNow + return short ? strings.ShortTime_JustNow : strings.Time_JustNow } else if difference < 60 * 60 { let minutes = difference / 60 - return strings.Time_MinutesAgo(minutes) + return short ? strings.ShortTime_MinutesAgo(minutes) : strings.Time_MinutesAgo(minutes) } else { var t: time_t = time_t(relativeTimestamp) var timeinfo: tm = tm() @@ -456,7 +456,7 @@ public func stringForStoryActivityTimestamp(strings: PresentationStrings, dateTi let day: RelativeTimestampFormatDay if dayDifference == 0 { let minutes = difference / (60 * 60) - return strings.Time_HoursAgo(minutes) + return short ? strings.ShortTime_HoursAgo(minutes) : strings.Time_HoursAgo(minutes) } else { day = .yesterday } diff --git a/submodules/TelegramUI/Components/AudioTranscriptionButtonComponent/BUILD b/submodules/TelegramUI/Components/AudioTranscriptionButtonComponent/BUILD index 142864139d..b181aca9e7 100644 --- a/submodules/TelegramUI/Components/AudioTranscriptionButtonComponent/BUILD +++ b/submodules/TelegramUI/Components/AudioTranscriptionButtonComponent/BUILD @@ -15,6 +15,7 @@ swift_library( "//submodules/Display:Display", "//submodules/TelegramPresentationData:TelegramPresentationData", "//submodules/Components/LottieAnimationComponent:LottieAnimationComponent", + "//submodules/Components/BundleIconComponent:BundleIconComponent", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/AudioTranscriptionButtonComponent/Sources/AudioTranscriptionButtonComponent.swift b/submodules/TelegramUI/Components/AudioTranscriptionButtonComponent/Sources/AudioTranscriptionButtonComponent.swift index ad955e5fb7..b614222d27 100644 --- a/submodules/TelegramUI/Components/AudioTranscriptionButtonComponent/Sources/AudioTranscriptionButtonComponent.swift +++ b/submodules/TelegramUI/Components/AudioTranscriptionButtonComponent/Sources/AudioTranscriptionButtonComponent.swift @@ -5,6 +5,7 @@ import AppBundle import Display import TelegramPresentationData import LottieAnimationComponent +import BundleIconComponent public final class AudioTranscriptionButtonComponent: Component { public enum Theme: Equatable { @@ -33,6 +34,7 @@ public final class AudioTranscriptionButtonComponent: Component { case inProgress case expanded case collapsed + case locked } public let theme: AudioTranscriptionButtonComponent.Theme @@ -64,25 +66,21 @@ public final class AudioTranscriptionButtonComponent: Component { private let blurredBackgroundNode: NavigationBackgroundNode private let backgroundLayer: SimpleLayer - private let animationView: ComponentHostView + private var iconView: ComponentView? + private var animationView: ComponentView? private var progressAnimationView: ComponentHostView? override init(frame: CGRect) { self.blurredBackgroundNode = NavigationBackgroundNode(color: .clear) self.backgroundLayer = SimpleLayer() - self.animationView = ComponentHostView() super.init(frame: frame) self.backgroundLayer.masksToBounds = true self.backgroundLayer.cornerRadius = 10.0 self.layer.addSublayer(self.backgroundLayer) - - self.animationView.isUserInteractionEnabled = false - - self.addSubview(self.animationView) - + self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside) } @@ -115,58 +113,120 @@ public final class AudioTranscriptionButtonComponent: Component { } if self.component?.transcriptionState != component.transcriptionState { - switch component.transcriptionState { - case .inProgress: - if self.progressAnimationView == nil { - let progressAnimationView = ComponentHostView() - self.progressAnimationView = progressAnimationView - self.addSubview(progressAnimationView) - } - default: - if let progressAnimationView = self.progressAnimationView { - self.progressAnimationView = nil - if case .none = transition.animation { - progressAnimationView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false, completion: { [weak progressAnimationView] _ in - progressAnimationView?.removeFromSuperview() + if case .locked = component.transcriptionState { + if let animationView = self.animationView { + self.animationView = nil + if let view = animationView.view { + view.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + view.removeFromSuperview() }) - } else { - progressAnimationView.removeFromSuperview() } } + + let iconView: ComponentView + if let current = self.iconView { + iconView = current + } else { + iconView = ComponentView() + self.iconView = iconView + } + + let iconSize = iconView.update( + transition: transition, + component: AnyComponent(BundleIconComponent( + name: "Chat/Message/TranscriptionLocked", + tintColor: foregroundColor + )), + environment: {}, + containerSize: CGSize(width: 30.0, height: 30.0) + ) + + if let view = iconView.view { + if view.superview == nil { + view.isUserInteractionEnabled = false + self.addSubview(view) + } + view.frame = CGRect(origin: CGPoint(x: floor((size.width - iconSize.width) / 2.0), y: floor((size.width - iconSize.height) / 2.0)), size: iconSize) + } + } else { + if let iconView = self.iconView { + self.iconView = nil + if let view = iconView.view { + view.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + view.removeFromSuperview() + }) + } + } + + let animationView: ComponentView + if let current = self.animationView { + animationView = current + } else { + animationView = ComponentView() + self.animationView = animationView + } + + switch component.transcriptionState { + case .inProgress: + if self.progressAnimationView == nil { + let progressAnimationView = ComponentHostView() + self.progressAnimationView = progressAnimationView + self.addSubview(progressAnimationView) + } + default: + if let progressAnimationView = self.progressAnimationView { + self.progressAnimationView = nil + if case .none = transition.animation { + progressAnimationView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false, completion: { [weak progressAnimationView] _ in + progressAnimationView?.removeFromSuperview() + }) + } else { + progressAnimationView.removeFromSuperview() + } + } + } + + let animationName: String + switch component.transcriptionState { + case .inProgress: + animationName = "voiceToText" + case .collapsed: + animationName = "voiceToText" + case .expanded: + animationName = "textToVoice" + case .locked: + animationName = "voiceToText" + } + let animationSize = animationView.update( + transition: transition, + component: AnyComponent(LottieAnimationComponent( + animation: LottieAnimationComponent.AnimationItem( + name: animationName, + mode: .animateTransitionFromPrevious + ), + colors: [ + "icon.Group 3.Stroke 1": foregroundColor, + "icon.Group 1.Stroke 1": foregroundColor, + "icon.Group 4.Stroke 1": foregroundColor, + "icon.Group 2.Stroke 1": foregroundColor, + "Artboard Copy 2 Outlines.Group 5.Stroke 1": foregroundColor, + "Artboard Copy 2 Outlines.Group 1.Stroke 1": foregroundColor, + "Artboard Copy 2 Outlines.Group 4.Stroke 1": foregroundColor, + "Artboard Copy Outlines.Group 1.Stroke 1": foregroundColor, + ], + size: CGSize(width: 30.0, height: 30.0) + )), + environment: {}, + containerSize: CGSize(width: 30.0, height: 30.0) + ) + if let view = animationView.view { + if view.superview == nil { + view.isUserInteractionEnabled = false + self.addSubview(view) + } + view.frame = CGRect(origin: CGPoint(x: floor((size.width - animationSize.width) / 2.0), y: floor((size.width - animationSize.height) / 2.0)), size: animationSize) + } } - - let animationName: String - switch component.transcriptionState { - case .inProgress: - animationName = "voiceToText" - case .collapsed: - animationName = "voiceToText" - case .expanded: - animationName = "textToVoice" - } - let animationSize = self.animationView.update( - transition: transition, - component: AnyComponent(LottieAnimationComponent( - animation: LottieAnimationComponent.AnimationItem( - name: animationName, - mode: .animateTransitionFromPrevious - ), - colors: [ - "icon.Group 3.Stroke 1": foregroundColor, - "icon.Group 1.Stroke 1": foregroundColor, - "icon.Group 4.Stroke 1": foregroundColor, - "icon.Group 2.Stroke 1": foregroundColor, - "Artboard Copy 2 Outlines.Group 5.Stroke 1": foregroundColor, - "Artboard Copy 2 Outlines.Group 1.Stroke 1": foregroundColor, - "Artboard Copy 2 Outlines.Group 4.Stroke 1": foregroundColor, - "Artboard Copy Outlines.Group 1.Stroke 1": foregroundColor, - ], - size: CGSize(width: 30.0, height: 30.0) - )), - environment: {}, - containerSize: CGSize(width: 30.0, height: 30.0) - ) - self.animationView.frame = CGRect(origin: CGPoint(x: floor((size.width - animationSize.width) / 2.0), y: floor((size.width - animationSize.height) / 2.0)), size: animationSize) } self.backgroundLayer.backgroundColor = backgroundColor.cgColor diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift index a81d01a6fd..19f885ffe0 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift @@ -674,7 +674,19 @@ private final class CameraScreenComponent: CombinedComponent { self.resultDisposable.set((camera.stopRecording() |> deliverOnMainQueue).start(next: { [weak self] result in if let self, case let .finished(mainResult, additionalResult, duration, positionChangeTimestamps, _) = result { - self.completion.invoke(.single(.video(CameraScreen.Result.Video(videoPath: mainResult.0, coverImage: mainResult.1, mirror: mainResult.2, additionalVideoPath: additionalResult?.0, additionalCoverImage: additionalResult?.1, dimensions: PixelDimensions(mainResult.3), duration: duration, positionChangeTimestamps: positionChangeTimestamps, additionalVideoPosition: .topRight)))) + self.completion.invoke(.single( + .video(CameraScreen.Result.Video( + videoPath: mainResult.path, + coverImage: mainResult.thumbnail, + mirror: mainResult.isMirrored, + additionalVideoPath: additionalResult?.path, + additionalCoverImage: additionalResult?.thumbnail, + dimensions: PixelDimensions(mainResult.dimensions), + duration: duration, + positionChangeTimestamps: positionChangeTimestamps, + additionalVideoPosition: .topRight + )) + )) } })) self.isTransitioning = true diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift index bf42b0daea..70c3438d94 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift @@ -32,6 +32,7 @@ import ChatControllerInteraction import ChatMessageDateAndStatusNode import ChatHistoryEntry import ChatMessageItemCommon +import TelegramStringFormatting private struct FetchControls { let fetch: (Bool) -> Void @@ -334,34 +335,48 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode { } private func transcribe() { - guard let arguments = self.arguments, let context = self.context, let message = self.message, let presentationData = self.presentationData else { + guard let arguments = self.arguments, let context = self.context, let message = self.message else { return } - guard arguments.associatedData.isPremium else { - if self.hapticFeedback == nil { - self.hapticFeedback = HapticFeedback() - } - self.hapticFeedback?.impact(.medium) - - let presentationData = context.sharedContext.currentPresentationData.with { $0 } - let tipController = UndoOverlayController(presentationData: presentationData, content: .universal(animation: "anim_voiceToText", scale: 0.065, colors: [:], title: nil, text: presentationData.strings.Message_AudioTranscription_SubscribeToPremium, customUndoText: presentationData.strings.Message_AudioTranscription_SubscribeToPremiumAction, timeout: nil), elevatedLayout: false, position: .top, animateInAsReplacement: false, action: { action in - if case .undo = action { - var replaceImpl: ((ViewController) -> Void)? - let controller = context.sharedContext.makePremiumDemoController(context: context, subject: .voiceToText, action: { - let controller = context.sharedContext.makePremiumIntroController(context: context, source: .settings, forceDark: false, dismissed: nil) - replaceImpl?(controller) - }) - replaceImpl = { [weak controller] c in - controller?.replace(with: c) + let presentationData = context.sharedContext.currentPresentationData.with { $0 } + + let premiumConfiguration = PremiumConfiguration.with(appConfiguration: arguments.context.currentAppConfiguration.with { $0 }) + + let transcriptionText = self.forcedAudioTranscriptionText ?? transcribedText(message: message) + if transcriptionText == nil { + if premiumConfiguration.audioTransciptionTrialCount > 0 { + if !arguments.associatedData.isPremium { + if self.presentAudioTranscriptionTooltip(finished: false) { + return } - arguments.controllerInteraction.navigationController()?.pushViewController(controller, animated: true) - - let _ = ApplicationSpecificNotice.incrementAudioTranscriptionSuggestion(accountManager: context.sharedContext.accountManager).startStandalone() } - return false }) - arguments.controllerInteraction.presentControllerInCurrent(tipController, nil) - return + } else { + guard arguments.associatedData.isPremium else { + if self.hapticFeedback == nil { + self.hapticFeedback = HapticFeedback() + } + self.hapticFeedback?.impact(.medium) + + let tipController = UndoOverlayController(presentationData: presentationData, content: .universal(animation: "anim_voiceToText", scale: 0.065, colors: [:], title: nil, text: presentationData.strings.Message_AudioTranscription_SubscribeToPremium, customUndoText: presentationData.strings.Message_AudioTranscription_SubscribeToPremiumAction, timeout: nil), elevatedLayout: false, position: .top, animateInAsReplacement: false, action: { action in + if case .undo = action { + var replaceImpl: ((ViewController) -> Void)? + let controller = context.sharedContext.makePremiumDemoController(context: context, subject: .voiceToText, action: { + let controller = context.sharedContext.makePremiumIntroController(context: context, source: .settings, forceDark: false, dismissed: nil) + replaceImpl?(controller) + }) + replaceImpl = { [weak controller] c in + controller?.replace(with: c) + } + arguments.controllerInteraction.navigationController()?.pushViewController(controller, animated: true) + + let _ = ApplicationSpecificNotice.incrementAudioTranscriptionSuggestion(accountManager: context.sharedContext.accountManager).startStandalone() + } + return false }) + arguments.controllerInteraction.presentControllerInCurrent(tipController, nil) + return + } + } } var shouldBeginTranscription = false @@ -450,6 +465,12 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode { } strongSelf.transcribeDisposable?.dispose() strongSelf.transcribeDisposable = nil + + if let arguments = strongSelf.arguments, !arguments.associatedData.isPremium { + Queue.mainQueue().after(0.1, { + let _ = strongSelf.presentAudioTranscriptionTooltip(finished: true) + }) + } }) } } @@ -471,6 +492,58 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode { } } + private func presentAudioTranscriptionTooltip(finished: Bool) -> Bool { + guard let arguments = self.arguments, !arguments.associatedData.isPremium else { + return false + } + + let presentationData = arguments.context.sharedContext.currentPresentationData.with { $0 } + var text: String? + var timeout: Double = 5.0 + + let currentTime = Int32(Date().timeIntervalSince1970) + if let cooldownUntilTime = arguments.associatedData.audioTranscriptionTrial.cooldownUntilTime, cooldownUntilTime > currentTime { + let premiumConfiguration = PremiumConfiguration.with(appConfiguration: arguments.context.currentAppConfiguration.with { $0 }) + + let time = stringForMediumDate(timestamp: cooldownUntilTime, strings: arguments.presentationData.strings, dateTimeFormat: arguments.presentationData.dateTimeFormat) + let usedString = arguments.presentationData.strings.Conversation_FreeTranscriptionCooldownTooltip(premiumConfiguration.audioTransciptionTrialCount) + let waitString = arguments.presentationData.strings.Conversation_FreeTranscriptionWaitOrSubscribe(time).string + let fullString = "\(usedString) \(waitString)" + text = fullString + + if self.hapticFeedback == nil { + self.hapticFeedback = HapticFeedback() + } + self.hapticFeedback?.impact(.medium) + timeout = 7.0 + } else if finished { + let remainingCount = arguments.associatedData.audioTranscriptionTrial.remainingCount + text = arguments.presentationData.strings.Conversation_FreeTranscriptionLimitTooltip(remainingCount) + } + + guard let text else { + return false + } + let context = arguments.context + let tipController = UndoOverlayController(presentationData: presentationData, content: .universal(animation: "Transcribe", scale: 0.06, colors: [:], title: nil, text: text, customUndoText: nil, timeout: timeout), elevatedLayout: false, position: .top, animateInAsReplacement: false, action: { action in + if case .info = action { + var replaceImpl: ((ViewController) -> Void)? + let controller = context.sharedContext.makePremiumDemoController(context: context, subject: .voiceToText, action: { + let controller = context.sharedContext.makePremiumIntroController(context: context, source: .settings, forceDark: false, dismissed: nil) + replaceImpl?(controller) + }) + replaceImpl = { [weak controller] c in + controller?.replace(with: c) + } + arguments.controllerInteraction.navigationController()?.pushViewController(controller, animated: true) + return true + } + return false + }) + arguments.controllerInteraction.presentControllerInCurrent(tipController, nil) + return true + } + public func asyncLayout() -> (Arguments) -> (CGFloat, (CGSize) -> (CGFloat, (CGFloat) -> (CGSize, (Bool, ListViewItemUpdateAnimation, ListViewItemApply?) -> Void))) { let currentFile = self.file @@ -670,23 +743,24 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode { var textString: NSAttributedString? var updatedAudioTranscriptionState: AudioTranscriptionButtonComponent.TranscriptionState? - let displayTranscribe: Bool + var displayTranscribe = false if arguments.message.id.peerId.namespace != Namespaces.Peer.SecretChat { + let premiumConfiguration = PremiumConfiguration.with(appConfiguration: arguments.context.currentAppConfiguration.with { $0 }) if arguments.associatedData.isPremium { displayTranscribe = true + } else if premiumConfiguration.audioTransciptionTrialCount > 0 { + if arguments.incoming { + if audioDuration < premiumConfiguration.audioTransciptionTrialMaxDuration { + displayTranscribe = true + } + } } else if arguments.associatedData.alwaysDisplayTranscribeButton.canBeDisplayed { if audioDuration >= 60 { displayTranscribe = true } else if arguments.incoming && isConsumed == false && arguments.associatedData.alwaysDisplayTranscribeButton.displayForNotConsumed { displayTranscribe = true - } else { - displayTranscribe = false } - } else { - displayTranscribe = false } - } else { - displayTranscribe = false } let transcribedText = forcedAudioTranscriptionText ?? transcribedText(message: arguments.message) @@ -700,6 +774,11 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode { break } + let currentTime = Int32(Date().timeIntervalSince1970) + if transcribedText == nil, let cooldownUntilTime = arguments.associatedData.audioTranscriptionTrial.cooldownUntilTime, cooldownUntilTime > currentTime { + updatedAudioTranscriptionState = .locked + } + let effectiveAudioTranscriptionState = updatedAudioTranscriptionState ?? audioTranscriptionState var displayTrailingAnimatedDots = false diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift index 66f240407d..210ea2bc00 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift @@ -28,6 +28,7 @@ import InstantVideoRadialStatusNode import ChatInstantVideoMessageDurationNode import ChatControllerInteraction import WallpaperBackgroundNode +import TelegramStringFormatting public struct ChatMessageInstantVideoItemLayoutResult { public let contentSize: CGSize @@ -590,6 +591,11 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { updatedTranscriptionText = transcribedText } + let currentTime = Int32(Date().timeIntervalSince1970) + if transcribedText == nil, let cooldownUntilTime = item.associatedData.audioTranscriptionTrial.cooldownUntilTime, cooldownUntilTime > currentTime { + updatedAudioTranscriptionState = .locked + } + let effectiveAudioTranscriptionState = updatedAudioTranscriptionState ?? audioTranscriptionState return (result, { [weak self] layoutData, animation in @@ -775,21 +781,22 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { })) } - var displayTranscribe: Bool + var displayTranscribe = false if item.message.id.peerId.namespace != Namespaces.Peer.SecretChat && statusDisplayType == .free { + let premiumConfiguration = PremiumConfiguration.with(appConfiguration: item.context.currentAppConfiguration.with { $0 }) if item.associatedData.isPremium { displayTranscribe = true + } else if premiumConfiguration.audioTransciptionTrialCount > 0 { + if incoming { + displayTranscribe = true + } } else if item.associatedData.alwaysDisplayTranscribeButton.canBeDisplayed { if incoming && notConsumed && item.associatedData.alwaysDisplayTranscribeButton.displayForNotConsumed { displayTranscribe = true } else { displayTranscribe = false } - } else { - displayTranscribe = false } - } else { - displayTranscribe = false } if displayTranscribe, let durationBlurColor = durationBlurColor { @@ -1614,32 +1621,47 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { guard let item = self.item, item.message.id.namespace == Namespaces.Message.Cloud else { return } + + let presentationData = item.context.sharedContext.currentPresentationData.with { $0 } - guard item.associatedData.isPremium else { - if self.hapticFeedback == nil { - self.hapticFeedback = HapticFeedback() - } - self.hapticFeedback?.impact(.medium) - - let presentationData = item.context.sharedContext.currentPresentationData.with { $0 } - let tipController = UndoOverlayController(presentationData: presentationData, content: .universal(animation: "anim_voiceToText", scale: 0.065, colors: [:], title: nil, text: presentationData.strings.Message_AudioTranscription_SubscribeToPremium, customUndoText: presentationData.strings.Message_AudioTranscription_SubscribeToPremiumAction, timeout: nil), elevatedLayout: false, position: .top, animateInAsReplacement: false, action: { action in - if case .undo = action { - let context = item.context - var replaceImpl: ((ViewController) -> Void)? - let controller = context.sharedContext.makePremiumDemoController(context: context, subject: .voiceToText, action: { - let controller = context.sharedContext.makePremiumIntroController(context: context, source: .settings, forceDark: false, dismissed: nil) - replaceImpl?(controller) - }) - replaceImpl = { [weak controller] c in - controller?.replace(with: c) + let premiumConfiguration = PremiumConfiguration.with(appConfiguration: item.context.currentAppConfiguration.with { $0 }) + + let transcriptionText = transcribedText(message: item.message) + if transcriptionText == nil { + if premiumConfiguration.audioTransciptionTrialCount > 0 { + if !item.associatedData.isPremium { + if self.presentAudioTranscriptionTooltip(finished: false) { + return } - item.controllerInteraction.navigationController()?.pushViewController(controller, animated: true) - - let _ = ApplicationSpecificNotice.incrementAudioTranscriptionSuggestion(accountManager: item.context.sharedContext.accountManager).startStandalone() } - return false }) - item.controllerInteraction.presentControllerInCurrent(tipController, nil) - return + } else { + guard item.associatedData.isPremium else { + if self.hapticFeedback == nil { + self.hapticFeedback = HapticFeedback() + } + self.hapticFeedback?.impact(.medium) + + + let tipController = UndoOverlayController(presentationData: presentationData, content: .universal(animation: "anim_voiceToText", scale: 0.065, colors: [:], title: nil, text: presentationData.strings.Message_AudioTranscription_SubscribeToPremium, customUndoText: presentationData.strings.Message_AudioTranscription_SubscribeToPremiumAction, timeout: nil), elevatedLayout: false, position: .top, animateInAsReplacement: false, action: { action in + if case .undo = action { + let context = item.context + var replaceImpl: ((ViewController) -> Void)? + let controller = context.sharedContext.makePremiumDemoController(context: context, subject: .voiceToText, action: { + let controller = context.sharedContext.makePremiumIntroController(context: context, source: .settings, forceDark: false, dismissed: nil) + replaceImpl?(controller) + }) + replaceImpl = { [weak controller] c in + controller?.replace(with: c) + } + item.controllerInteraction.navigationController()?.pushViewController(controller, animated: true) + + let _ = ApplicationSpecificNotice.incrementAudioTranscriptionSuggestion(accountManager: item.context.sharedContext.accountManager).startStandalone() + } + return false }) + item.controllerInteraction.presentControllerInCurrent(tipController, nil) + return + } + } } var shouldBeginTranscription = false @@ -1673,6 +1695,12 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { } strongSelf.transcribeDisposable?.dispose() strongSelf.transcribeDisposable = nil + + if let item = strongSelf.item, !item.associatedData.isPremium { + Queue.mainQueue().after(0.1, { + let _ = strongSelf.presentAudioTranscriptionTooltip(finished: true) + }) + } }) } } @@ -1694,6 +1722,58 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { self.updateTranscriptionExpanded?(self.audioTranscriptionState) } + private func presentAudioTranscriptionTooltip(finished: Bool) -> Bool { + guard let item = self.item, !item.associatedData.isPremium else { + return false + } + + let presentationData = item.context.sharedContext.currentPresentationData.with { $0 } + var text: String? + var timeout: Double = 5.0 + + let currentTime = Int32(Date().timeIntervalSince1970) + if let cooldownUntilTime = item.associatedData.audioTranscriptionTrial.cooldownUntilTime, cooldownUntilTime > currentTime { + let premiumConfiguration = PremiumConfiguration.with(appConfiguration: item.context.currentAppConfiguration.with { $0 }) + + let time = stringForMediumDate(timestamp: cooldownUntilTime, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat) + let usedString = presentationData.strings.Conversation_FreeTranscriptionCooldownTooltip(premiumConfiguration.audioTransciptionTrialCount) + let waitString = presentationData.strings.Conversation_FreeTranscriptionWaitOrSubscribe(time).string + let fullString = "\(usedString) \(waitString)" + text = fullString + + if self.hapticFeedback == nil { + self.hapticFeedback = HapticFeedback() + } + self.hapticFeedback?.impact(.medium) + timeout = 7.0 + } else if finished { + let remainingCount = item.associatedData.audioTranscriptionTrial.remainingCount + text = presentationData.strings.Conversation_FreeTranscriptionLimitTooltip(remainingCount) + } + + guard let text else { + return false + } + let context = item.context + let tipController = UndoOverlayController(presentationData: presentationData, content: .universal(animation: "Transcribe", scale: 0.06, colors: [:], title: nil, text: text, customUndoText: nil, timeout: timeout), elevatedLayout: false, position: .top, animateInAsReplacement: false, action: { action in + if case .info = action { + var replaceImpl: ((ViewController) -> Void)? + let controller = context.sharedContext.makePremiumDemoController(context: context, subject: .voiceToText, action: { + let controller = context.sharedContext.makePremiumIntroController(context: context, source: .settings, forceDark: false, dismissed: nil) + replaceImpl?(controller) + }) + replaceImpl = { [weak controller] c in + controller?.replace(with: c) + } + item.controllerInteraction.navigationController()?.pushViewController(controller, animated: true) + return true + } + return false + }) + item.controllerInteraction.presentControllerInCurrent(tipController, nil) + return true + } + public final class AnimateFileNodeDescription { public let node: ASDisplayNode public let textClippingNode: ASDisplayNode diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/BUILD b/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/BUILD index 8b096329df..82e4e8e915 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/BUILD +++ b/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/BUILD @@ -33,6 +33,7 @@ swift_library( "//submodules/Components/MultilineTextComponent", "//submodules/Components/BundleIconComponent", "//submodules/ChatMessageBackground", + "//submodules/ContextUI", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/Sources/ChatMessageJoinedChannelBubbleContentNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/Sources/ChatMessageJoinedChannelBubbleContentNode.swift index 0b146cc0a1..94bbddc725 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/Sources/ChatMessageJoinedChannelBubbleContentNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/Sources/ChatMessageJoinedChannelBubbleContentNode.swift @@ -24,6 +24,7 @@ import AvatarNode import MultilineTextComponent import BundleIconComponent import ChatMessageBackground +import ContextUI private func attributedServiceMessageString(theme: ChatPresentationThemeData, strings: PresentationStrings, nameDisplayOrder: PresentationPersonNameOrder, dateTimeFormat: PresentationDateTimeFormat, message: EngineMessage, accountPeerId: EnginePeer.Id) -> NSAttributedString? { return universalServiceMessageString(presentationData: (theme.theme, theme.wallpaper), strings: strings, nameDisplayOrder: nameDisplayOrder, dateTimeFormat: dateTimeFormat, message: message, accountPeerId: accountPeerId, forChatList: false, forForumOverview: false) @@ -332,6 +333,9 @@ public class ChatMessageJoinedChannelBubbleContentNode: ChatMessageBubbleContent addAppLogEvent(postbox: item.context.account.postbox, type: "channels.open_recommended_channel", data: json) } item.controllerInteraction.openPeer(peer, .chat(textInputState: nil, subject: nil, peekData: nil), nil, .default) + }, + contextAction: { peer, sourceView, gesture in + item.controllerInteraction.openRecommendedChannelContextMenu(peer, sourceView, gesture) } ) ), @@ -509,19 +513,22 @@ private final class ChannelItemComponent: Component { let peer: EnginePeer let subtitle: String let action: (EnginePeer) -> Void + let contextAction: (EnginePeer, UIView, ContextGesture?) -> Void init( context: AccountContext, theme: PresentationTheme, peer: EnginePeer, subtitle: String, - action: @escaping (EnginePeer) -> Void + action: @escaping (EnginePeer) -> Void, + contextAction: @escaping (EnginePeer, UIView, ContextGesture?) -> Void ) { self.context = context self.theme = theme self.peer = peer self.subtitle = subtitle self.action = action + self.contextAction = contextAction } static func ==(lhs: ChannelItemComponent, rhs: ChannelItemComponent) -> Bool { @@ -541,6 +548,7 @@ private final class ChannelItemComponent: Component { } final class View: UIView { + private let contextContainer: ContextControllerSourceView private let containerButton: HighlightTrackingButton private let title = ComponentView() @@ -553,6 +561,8 @@ private final class ChannelItemComponent: Component { private weak var state: EmptyComponentState? override init(frame: CGRect) { + self.contextContainer = ContextControllerSourceView() + self.avatarNode = AvatarNode(font: avatarPlaceholderFont(size: 26.0)) self.avatarNode.isUserInteractionEnabled = false @@ -562,13 +572,21 @@ private final class ChannelItemComponent: Component { super.init(frame: frame) - self.addSubview(self.containerButton) - self.addSubnode(self.avatarNode) + self.addSubview(self.contextContainer) + + self.contextContainer.addSubview(self.containerButton) + self.contextContainer.addSubnode(self.avatarNode) self.avatarNode.view.addSubview(self.avatarBadge) self.avatarNode.badgeView = self.avatarBadge self.containerButton.addTarget(self, action: #selector(self.pressed), for: .touchUpInside) + + self.contextContainer.activated = { [weak self] gesture, point in + if let self, let component = self.component { + component.contextAction(component.peer, self.contextContainer, gesture) + } + } } required init?(coder: NSCoder) { @@ -628,21 +646,21 @@ private final class ChannelItemComponent: Component { if let titleView = self.title.view { if titleView.superview == nil { titleView.isUserInteractionEnabled = false - self.addSubview(titleView) + self.contextContainer.addSubview(titleView) } titleView.frame = titleFrame } if let subtitleView = self.subtitle.view { if subtitleView.superview == nil { subtitleView.isUserInteractionEnabled = false - self.addSubview(subtitleView) + self.contextContainer.addSubview(subtitleView) } subtitleView.frame = subtitleFrame } if let subtitleIconView = self.subtitleIcon.view { if subtitleIconView.superview == nil { subtitleIconView.isUserInteractionEnabled = false - self.addSubview(subtitleIconView) + self.contextContainer.addSubview(subtitleIconView) } subtitleIconView.frame = subtitleIconFrame } @@ -654,7 +672,9 @@ private final class ChannelItemComponent: Component { let avatarBadgeFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((avatarFrame.width - avatarBadgeSize.width) / 2.0), y: avatarFrame.height - avatarBadgeSize.height + 2.0), size: avatarBadgeSize).insetBy(dx: -strokeWidth, dy: -strokeWidth) self.avatarBadge.frame = avatarBadgeFrame - self.containerButton.frame = CGRect(origin: .zero, size: itemSize) + let bounds = CGRect(origin: .zero, size: itemSize) + self.contextContainer.frame = bounds + self.containerButton.frame = bounds return itemSize } @@ -676,17 +696,20 @@ final class ChannelListPanelComponent: Component { let theme: PresentationTheme let peers: RecommendedChannels let action: (EnginePeer) -> Void + let contextAction: (EnginePeer, UIView, ContextGesture?) -> Void init( context: AccountContext, theme: PresentationTheme, peers: RecommendedChannels, - action: @escaping (EnginePeer) -> Void + action: @escaping (EnginePeer) -> Void, + contextAction: @escaping (EnginePeer, UIView, ContextGesture?) -> Void ) { self.context = context self.theme = theme self.peers = peers self.action = action + self.contextAction = contextAction } static func ==(lhs: ChannelListPanelComponent, rhs: ChannelListPanelComponent) -> Bool { @@ -832,7 +855,8 @@ final class ChannelListPanelComponent: Component { theme: component.theme, peer: item.peer, subtitle: subtitle, - action: component.action + action: component.action, + contextAction: component.contextAction )), environment: {}, containerSize: CGSize(width: itemLayout.itemWidth, height: itemLayout.containerHeight) diff --git a/submodules/TelegramUI/Components/Chat/ChatQrCodeScreen/Sources/ChatQrCodeScreen.swift b/submodules/TelegramUI/Components/Chat/ChatQrCodeScreen/Sources/ChatQrCodeScreen.swift index 7afd3e5423..deba00c493 100644 --- a/submodules/TelegramUI/Components/Chat/ChatQrCodeScreen/Sources/ChatQrCodeScreen.swift +++ b/submodules/TelegramUI/Components/Chat/ChatQrCodeScreen/Sources/ChatQrCodeScreen.swift @@ -1850,7 +1850,7 @@ private class QrContentNode: ASDisplayNode, ContentNode { func update(theme: PresentationTheme, wallpaper: TelegramWallpaper, isDarkAppearance: Bool, selectedEmoticon: String?) { self.currentParams = (theme, wallpaper, isDarkAppearance, selectedEmoticon) - self.wallpaperBackgroundNode.update(wallpaper: wallpaper) + self.wallpaperBackgroundNode.update(wallpaper: wallpaper, animated: false) self.codeForegroundDimNode.alpha = isDarkAppearance ? 0.5 : 0.3 @@ -2185,7 +2185,7 @@ private class MessageContentNode: ASDisplayNode, ContentNode { func update(theme: PresentationTheme, wallpaper: TelegramWallpaper, isDarkAppearance: Bool, selectedEmoticon: String?) { self.currentParams = (theme, wallpaper, isDarkAppearance, selectedEmoticon) - self.wallpaperBackgroundNode.update(wallpaper: wallpaper) + self.wallpaperBackgroundNode.update(wallpaper: wallpaper, animated: false) self.linkBackgroundDimNode.alpha = isDarkAppearance ? 0.6 : 0.2 diff --git a/submodules/TelegramUI/Components/Chat/ChatRecentActionsController/Sources/ChatRecentActionsControllerNode.swift b/submodules/TelegramUI/Components/Chat/ChatRecentActionsController/Sources/ChatRecentActionsControllerNode.swift index ea1badefea..a04614a737 100644 --- a/submodules/TelegramUI/Components/Chat/ChatRecentActionsController/Sources/ChatRecentActionsControllerNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatRecentActionsController/Sources/ChatRecentActionsControllerNode.swift @@ -570,6 +570,7 @@ final class ChatRecentActionsControllerNode: ViewControllerTracingNode { }, openNoAdsDemo: { }, displayGiveawayParticipationStatus: { _ in }, openPremiumStatusInfo: { _, _, _, _ in + }, openRecommendedChannelContextMenu: { _, _, _ in }, requestMessageUpdate: { _, _ in }, cancelInteractiveKeyboardGestures: { }, dismissTextInput: { @@ -663,7 +664,7 @@ final class ChatRecentActionsControllerNode: ViewControllerTracingNode { self.chatPresentationData = ChatPresentationData(theme: ChatPresentationThemeData(theme: presentationData.theme, wallpaper: presentationData.chatWallpaper), fontSize: presentationData.chatFontSize, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, disableAnimations: true, largeEmoji: presentationData.largeEmoji, chatBubbleCorners: presentationData.chatBubbleCorners) self.chatPresentationDataPromise.set(.single(self.chatPresentationData)) - self.backgroundNode.update(wallpaper: presentationData.chatWallpaper) + self.backgroundNode.update(wallpaper: presentationData.chatWallpaper, animated: false) self.backgroundNode.updateBubbleTheme(bubbleTheme: presentationData.theme, bubbleCorners: presentationData.chatBubbleCorners) self.panelBackgroundNode.updateColor(color: presentationData.theme.chat.inputPanel.panelBackgroundColor, transition: .immediate) diff --git a/submodules/TelegramUI/Components/ChatControllerInteraction/Sources/ChatControllerInteraction.swift b/submodules/TelegramUI/Components/ChatControllerInteraction/Sources/ChatControllerInteraction.swift index df30358951..894cb5ad0f 100644 --- a/submodules/TelegramUI/Components/ChatControllerInteraction/Sources/ChatControllerInteraction.swift +++ b/submodules/TelegramUI/Components/ChatControllerInteraction/Sources/ChatControllerInteraction.swift @@ -230,6 +230,7 @@ public final class ChatControllerInteraction: ChatControllerInteractionProtocol public let openNoAdsDemo: () -> Void public let displayGiveawayParticipationStatus: (EngineMessage.Id) -> Void public let openPremiumStatusInfo: (EnginePeer.Id, UIView, Int64?, PeerNameColor) -> Void + public let openRecommendedChannelContextMenu: (EnginePeer, UIView, ContextGesture?) -> Void public let requestMessageUpdate: (MessageId, Bool) -> Void public let cancelInteractiveKeyboardGestures: () -> Void @@ -349,6 +350,7 @@ public final class ChatControllerInteraction: ChatControllerInteractionProtocol openNoAdsDemo: @escaping () -> Void, displayGiveawayParticipationStatus: @escaping (EngineMessage.Id) -> Void, openPremiumStatusInfo: @escaping (EnginePeer.Id, UIView, Int64?, PeerNameColor) -> Void, + openRecommendedChannelContextMenu: @escaping (EnginePeer, UIView, ContextGesture?) -> Void, requestMessageUpdate: @escaping (MessageId, Bool) -> Void, cancelInteractiveKeyboardGestures: @escaping () -> Void, dismissTextInput: @escaping () -> Void, @@ -450,6 +452,7 @@ public final class ChatControllerInteraction: ChatControllerInteractionProtocol self.openNoAdsDemo = openNoAdsDemo self.displayGiveawayParticipationStatus = displayGiveawayParticipationStatus self.openPremiumStatusInfo = openPremiumStatusInfo + self.openRecommendedChannelContextMenu = openRecommendedChannelContextMenu self.requestMessageUpdate = requestMessageUpdate self.cancelInteractiveKeyboardGestures = cancelInteractiveKeyboardGestures self.dismissTextInput = dismissTextInput diff --git a/submodules/TelegramUI/Components/LegacyMessageInputPanel/Sources/LegacyMessageInputPanel.swift b/submodules/TelegramUI/Components/LegacyMessageInputPanel/Sources/LegacyMessageInputPanel.swift index 3ea69b63a5..1cd12e7d6a 100644 --- a/submodules/TelegramUI/Components/LegacyMessageInputPanel/Sources/LegacyMessageInputPanel.swift +++ b/submodules/TelegramUI/Components/LegacyMessageInputPanel/Sources/LegacyMessageInputPanel.swift @@ -249,6 +249,7 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { audioRecorder: nil, videoRecordingStatus: nil, isRecordingLocked: false, + hasRecordedVideo: false, recordedAudioPreview: nil, hasRecordedVideoPreview: false, wasRecordingDismissed: false, @@ -261,6 +262,7 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { customInputView: self.inputView, forceIsEditing: false, disabledPlaceholder: nil, + header: nil, isChannel: false, storyItem: nil, chatLocation: self.chatLocation diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/ImageTextureSource.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/ImageTextureSource.swift index 75ae7912bf..577371339e 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/ImageTextureSource.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/ImageTextureSource.swift @@ -5,42 +5,8 @@ import MetalKit import Display import Accelerate -func loadTexture(image: UIImage, device: MTLDevice) -> MTLTexture? { - func dataForImage(_ image: UIImage) -> UnsafeMutablePointer { - let imageRef = image.cgImage - let width = Int(image.size.width) - let height = Int(image.size.height) - let colorSpace = CGColorSpaceCreateDeviceRGB() - - let rawData = UnsafeMutablePointer.allocate(capacity: width * height * 4) - let bytePerPixel = 4 - let bytesPerRow = bytePerPixel * Int(width) - let bitsPerComponent = 8 - let bitmapInfo = CGBitmapInfo.byteOrder32Little.rawValue + CGImageAlphaInfo.premultipliedFirst.rawValue - let context = CGContext.init(data: rawData, width: width, height: height, bitsPerComponent: bitsPerComponent, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo) - context?.draw(imageRef!, in: CGRect(x: 0, y: 0, width: width, height: height)) - - return rawData - } - - let width = Int(image.size.width * image.scale) - let height = Int(image.size.height * image.scale) - let bytePerPixel = 4 - let bytesPerRow = bytePerPixel * width - - var texture : MTLTexture? - let region = MTLRegionMake2D(0, 0, Int(width), Int(height)) - let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .bgra8Unorm, width: width, height: height, mipmapped: false) - texture = device.makeTexture(descriptor: textureDescriptor) - - let data = dataForImage(image) - texture?.replace(region: region, mipmapLevel: 0, withBytes: data, bytesPerRow: bytesPerRow) - - return texture -} - final class ImageTextureSource: TextureSource { - weak var output: TextureConsumer? + weak var output: MediaEditorRenderer? var texture: MTLTexture? @@ -50,10 +16,10 @@ final class ImageTextureSource: TextureSource { } } - func connect(to consumer: TextureConsumer) { + func connect(to consumer: MediaEditorRenderer) { self.output = consumer if let texture = self.texture { - self.output?.consumeTexture(texture, render: false) + self.output?.consume(main: .texture(texture, .zero), additional: nil, render: false) } } @@ -61,36 +27,3 @@ final class ImageTextureSource: TextureSource { self.texture = nil } } - -func pixelBufferToMTLTexture(pixelBuffer: CVPixelBuffer, textureCache: CVMetalTextureCache) -> MTLTexture? { - let width = CVPixelBufferGetWidth(pixelBuffer) - let height = CVPixelBufferGetHeight(pixelBuffer) - - let format: MTLPixelFormat = .r8Unorm - var textureRef : CVMetalTexture? - let status = CVMetalTextureCacheCreateTextureFromImage(nil, textureCache, pixelBuffer, nil, format, width, height, 0, &textureRef) - if status == kCVReturnSuccess { - return CVMetalTextureGetTexture(textureRef!) - } - - return nil -} - -func getTextureImage(device: MTLDevice, texture: MTLTexture, mirror: Bool = false) -> UIImage? { - let colorSpace = CGColorSpaceCreateDeviceRGB() - let context = CIContext(mtlDevice: device, options: [:]) - guard var ciImage = CIImage(mtlTexture: texture, options: [.colorSpace: colorSpace]) else { - return nil - } - let transform: CGAffineTransform - if mirror { - transform = CGAffineTransform(-1.0, 0.0, 0.0, -1.0, ciImage.extent.width, ciImage.extent.height) - } else { - transform = CGAffineTransform(1.0, 0.0, 0.0, -1.0, 0.0, ciImage.extent.height) - } - ciImage = ciImage.transformed(by: transform) - guard let cgImage = context.createCGImage(ciImage, from: CGRect(origin: .zero, size: CGSize(width: ciImage.extent.width, height: ciImage.extent.height))) else { - return nil - } - return UIImage(cgImage: cgImage) -} diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift index 18146fd1f5..ab8242880a 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift @@ -12,19 +12,77 @@ import FastBlur import AccountContext public struct MediaEditorPlayerState { + public struct Track: Equatable { + public enum Content: Equatable { + case video(frames: [UIImage], framesUpdateTimestamp: Double) + case audio(artist: String?, title: String?, samples: Data?, peak: Int32) + + public static func ==(lhs: Content, rhs: Content) -> Bool { + switch lhs { + case let .video(_, framesUpdateTimestamp): + if case .video(_, framesUpdateTimestamp) = rhs { + return true + } else { + return false + } + case let .audio(lhsArtist, lhsTitle, lhsSamples, lhsPeak): + if case let .audio(rhsArtist, rhsTitle, rhsSamples, rhsPeak) = rhs { + return lhsArtist == rhsArtist && lhsTitle == rhsTitle && lhsSamples == rhsSamples && lhsPeak == rhsPeak + } else { + return false + } + } + } + } + + public let id: Int32 + public let content: Content + public let duration: Double + public let trimRange: Range? + public let offset: Double? + public let isMain: Bool + public let visibleInTimeline: Bool + } + public let generationTimestamp: Double - public let duration: Double - public let timeRange: Range? + public let tracks: [Track] public let position: Double public let isPlaying: Bool - public let frames: [UIImage] - public let framesCount: Int - public let framesUpdateTimestamp: Double - public let hasAudio: Bool - public let isAudioPlayerOnly: Bool + + public var isAudioOnly: Bool { + var hasVideoTrack = false + var hasAudioTrack = false + for track in tracks { + switch track.content { + case .video: + hasVideoTrack = true + case .audio: + hasAudioTrack = true + } + } + return !hasVideoTrack && hasAudioTrack + } + + public var hasAudio: Bool { + return true + } } public final class MediaEditor { + public struct GradientColors { + public let top: UIColor + public let bottom: UIColor + + public init(top: UIColor, bottom: UIColor) { + self.top = top + self.bottom = bottom + } + + public var array: [UIColor] { + return [self.top, self.bottom] + } + } + public enum Subject { case image(UIImage, PixelDimensions) case video(String, UIImage?, Bool, String?, PixelDimensions, Double) @@ -47,13 +105,14 @@ public final class MediaEditor { private let subject: Subject private let clock = CMClockGetHostTimeClock() + private var player: AVPlayer? private var additionalPlayer: AVPlayer? private var audioPlayer: AVPlayer? + private var volumeFadeIn: SwiftSignalKit.Timer? private var timeObserver: Any? private weak var timeObserverPlayer: AVPlayer? - private var didPlayToEndTimeObserver: NSObjectProtocol? private weak var previewView: MediaEditorPreviewView? @@ -76,15 +135,15 @@ public final class MediaEditor { private var textureSourceDisposable: Disposable? - private let gradientColorsPromise = Promise<(UIColor, UIColor)?>() - public var gradientColors: Signal<(UIColor, UIColor)?, NoError> { - return self.gradientColorsPromise.get() - } - private var gradientColorsValue: (UIColor, UIColor)? { + private let gradientColorsPromise = Promise() + private var gradientColorsValue: GradientColors? { didSet { self.gradientColorsPromise.set(.single(self.gradientColorsValue)) } } + public var gradientColors: Signal { + return self.gradientColorsPromise.get() + } private let histogramPromise = Promise() public var histogram: Signal { @@ -127,17 +186,40 @@ public final class MediaEditor { } private let playerPromise = Promise() - private var playerPlaybackState: (Double, Double, Bool, Bool, Bool) = (0.0, 0.0, false, false, false) { + private let additionalPlayerPromise = Promise(nil) + + private struct PlaybackState: Equatable { + let duration: Double + let position: Double + let isPlaying: Bool + let hasAudio: Bool + + init() { + self.duration = 0.0 + self.position = 0.0 + self.isPlaying = false + self.hasAudio = false + } + + init(duration: Double, position: Double, isPlaying: Bool, hasAudio: Bool) { + self.duration = duration + self.position = position + self.isPlaying = isPlaying + self.hasAudio = hasAudio + } + } + + private var playerPlaybackState: PlaybackState = PlaybackState() { didSet { self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState)) } } - private let playerPlaybackStatePromise = Promise<(Double, Double, Bool, Bool, Bool)>((0.0, 0.0, false, false, false)) + private let playerPlaybackStatePromise = Promise(PlaybackState()) public var position: Signal { return self.playerPlaybackStatePromise.get() - |> map { _, position, _, _, _ -> Double in - return position + |> map { state -> Double in + return state.position } } @@ -146,7 +228,7 @@ public final class MediaEditor { if let trimRange = self.values.videoTrimRange { return trimRange.upperBound - trimRange.lowerBound } else { - return min(60.0, self.playerPlaybackState.0) + return min(60.0, self.playerPlaybackState.duration) } } else { return nil @@ -155,7 +237,7 @@ public final class MediaEditor { public var originalDuration: Double? { if let _ = self.player { - return min(60.0, self.playerPlaybackState.0) + return min(60.0, self.playerPlaybackState.duration) } else { return nil } @@ -164,42 +246,117 @@ public final class MediaEditor { public var onFirstDisplay: () -> Void = {} public func playerState(framesCount: Int) -> Signal { + let additionalFramesAndUpdateTimestamp = self.additionalPlayerPromise.get() + |> mapToSignal { player -> Signal<([UIImage], Double)?, NoError> in + if let player, let asset = player.currentItem?.asset { + return videoFrames(asset: asset, count: framesCount) + |> map(Optional.init) + } else { + return .single(nil) + } + } + + func artistAndTitleForTrack(_ audioTrack: MediaAudioTrack) -> (artist: String?, title: String?) { + let artist = audioTrack.artist + var title = audioTrack.title + if artist == nil && title == nil { + if let underscoreIndex = audioTrack.path.firstIndex(of: "_"), let dotIndex = audioTrack.path.lastIndex(of: ".") { + title = String(audioTrack.path[audioTrack.path.index(after: underscoreIndex).. mapToSignal { [weak self] player in if let self, player != nil { if player === self.player, let asset = player?.currentItem?.asset { - return combineLatest(self.valuesPromise.get(), self.playerPlaybackStatePromise.get(), self.videoFrames(asset: asset, count: framesCount)) - |> map { values, durationAndPosition, framesAndUpdateTimestamp in - let (duration, position, isPlaying, hasAudio, isAudioPlayerOnly) = durationAndPosition - let (frames, framesUpdateTimestamp) = framesAndUpdateTimestamp + return combineLatest(self.valuesPromise.get(), self.playerPlaybackStatePromise.get(), videoFrames(asset: asset, count: framesCount), additionalFramesAndUpdateTimestamp) + |> map { values, playbackState, framesAndUpdateTimestamp, additionalFramesAndUpdateTimestamp in + var tracks: [MediaEditorPlayerState.Track] = [] + tracks.append(MediaEditorPlayerState.Track( + id: 0, + content: .video( + frames: framesAndUpdateTimestamp.0, + framesUpdateTimestamp: framesAndUpdateTimestamp.1 + ), + duration: playbackState.duration, + trimRange: values.videoTrimRange, + offset: nil, + isMain: true, + visibleInTimeline: true + )) + + if let additionalFramesAndUpdateTimestamp { + tracks.append(MediaEditorPlayerState.Track( + id: 1, + content: .video( + frames: additionalFramesAndUpdateTimestamp.0, + framesUpdateTimestamp: additionalFramesAndUpdateTimestamp.1 + ), + duration: playbackState.duration, + trimRange: values.additionalVideoTrimRange, + offset: values.additionalVideoOffset, + isMain: false, + visibleInTimeline: true + )) + } + + if let audioTrack = values.audioTrack { + let (artist, title) = artistAndTitleForTrack(audioTrack) + tracks.append(MediaEditorPlayerState.Track( + id: 2, + content: .audio( + artist: artist, + title: title, + samples: values.audioTrackSamples?.samples, + peak: values.audioTrackSamples?.peak ?? 0 + ), + duration: audioTrack.duration, + trimRange: values.audioTrackTrimRange, + offset: values.audioTrackOffset, + isMain: false, + visibleInTimeline: true + )) + } + return MediaEditorPlayerState( generationTimestamp: CACurrentMediaTime(), - duration: duration, - timeRange: values.videoTrimRange, - position: position, - isPlaying: isPlaying, - frames: frames, - framesCount: framesCount, - framesUpdateTimestamp: framesUpdateTimestamp, - hasAudio: hasAudio, - isAudioPlayerOnly: isAudioPlayerOnly + tracks: tracks, + position: playbackState.position, + isPlaying: playbackState.isPlaying ) } } else if player === self.audioPlayer { return combineLatest(self.valuesPromise.get(), self.playerPlaybackStatePromise.get()) - |> map { values, durationAndPosition in - let (duration, position, isPlaying, _, _) = durationAndPosition + |> map { values, playbackState in + var tracks: [MediaEditorPlayerState.Track] = [] + + if let audioTrack = values.audioTrack { + let (artist, title) = artistAndTitleForTrack(audioTrack) + tracks.append(MediaEditorPlayerState.Track( + id: 0, + content: .audio( + artist: artist, + title: title, + samples: values.audioTrackSamples?.samples, + peak: values.audioTrackSamples?.peak ?? 0 + ), + duration: audioTrack.duration, + trimRange: values.audioTrackTrimRange, + offset: values.audioTrackOffset, + isMain: true, + visibleInTimeline: true + )) + } + return MediaEditorPlayerState( generationTimestamp: CACurrentMediaTime(), - duration: duration, - timeRange: values.audioTrackTrimRange, - position: position, - isPlaying: isPlaying, - frames: [], - framesCount: 0, - framesUpdateTimestamp: 0, - hasAudio: false, - isAudioPlayerOnly: true + tracks: tracks, + position: playbackState.position, + isPlaying: playbackState.isPlaying ) } } else { @@ -211,88 +368,6 @@ public final class MediaEditor { } } - public func videoFrames(asset: AVAsset, count: Int) -> Signal<([UIImage], Double), NoError> { - func blurredImage(_ image: UIImage) -> UIImage? { - guard let image = image.cgImage else { - return nil - } - - let thumbnailSize = CGSize(width: image.width, height: image.height) - let thumbnailContextSize = thumbnailSize.aspectFilled(CGSize(width: 20.0, height: 20.0)) - if let thumbnailContext = DrawingContext(size: thumbnailContextSize, scale: 1.0) { - thumbnailContext.withFlippedContext { c in - c.interpolationQuality = .none - c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContextSize)) - } - imageFastBlur(Int32(thumbnailContextSize.width), Int32(thumbnailContextSize.height), Int32(thumbnailContext.bytesPerRow), thumbnailContext.bytes) - - let thumbnailContext2Size = thumbnailSize.aspectFitted(CGSize(width: 100.0, height: 100.0)) - if let thumbnailContext2 = DrawingContext(size: thumbnailContext2Size, scale: 1.0) { - thumbnailContext2.withFlippedContext { c in - c.interpolationQuality = .none - if let image = thumbnailContext.generateImage()?.cgImage { - c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContext2Size)) - } - } - imageFastBlur(Int32(thumbnailContext2Size.width), Int32(thumbnailContext2Size.height), Int32(thumbnailContext2.bytesPerRow), thumbnailContext2.bytes) - return thumbnailContext2.generateImage() - } - } - return nil - } - - guard count > 0 else { - return .complete() - } - let scale = UIScreen.main.scale - let imageGenerator = AVAssetImageGenerator(asset: asset) - imageGenerator.maximumSize = CGSize(width: 48.0 * scale, height: 36.0 * scale) - imageGenerator.appliesPreferredTrackTransform = true - imageGenerator.requestedTimeToleranceBefore = .zero - imageGenerator.requestedTimeToleranceAfter = .zero - - var firstFrame: UIImage - if let cgImage = try? imageGenerator.copyCGImage(at: .zero, actualTime: nil) { - firstFrame = UIImage(cgImage: cgImage) - if let blurred = blurredImage(firstFrame) { - firstFrame = blurred - } - } else { - firstFrame = generateSingleColorImage(size: CGSize(width: 24.0, height: 36.0), color: .black)! - } - return Signal { subscriber in - subscriber.putNext((Array(repeating: firstFrame, count: count), CACurrentMediaTime())) - - var timestamps: [NSValue] = [] - let duration = asset.duration.seconds - let interval = duration / Double(count) - for i in 0 ..< count { - timestamps.append(NSValue(time: CMTime(seconds: Double(i) * interval, preferredTimescale: CMTimeScale(1000)))) - } - - var updatedFrames: [UIImage] = [] - imageGenerator.generateCGImagesAsynchronously(forTimes: timestamps) { _, image, _, _, _ in - if let image { - updatedFrames.append(UIImage(cgImage: image)) - if updatedFrames.count == count { - subscriber.putNext((updatedFrames, CACurrentMediaTime())) - subscriber.putCompletion() - } else { - var tempFrames = updatedFrames - for _ in 0 ..< count - updatedFrames.count { - tempFrames.append(firstFrame) - } - subscriber.putNext((tempFrames, CACurrentMediaTime())) - } - } - } - - return ActionDisposable { - imageGenerator.cancelAllCGImageGeneration() - } - } - } - public init(context: AccountContext, subject: Subject, values: MediaEditorValues? = nil, hasHistogram: Bool = false) { self.context = context self.subject = subject @@ -319,6 +394,9 @@ public final class MediaEditor { additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], + additionalVideoTrimRange: nil, + additionalVideoOffset: nil, + additionalVideoVolume: nil, drawing: nil, entities: [], toolValues: [:], @@ -344,33 +422,17 @@ public final class MediaEditor { } if case let .asset(asset) = subject { - self.playerPlaybackState = (asset.duration, 0.0, false, false, false) + self.playerPlaybackState = PlaybackState(duration: asset.duration, position: 0.0, isPlaying: false, hasAudio: asset.mediaType == .video) self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState)) } else if case let .video(_, _, _, _, _, duration) = subject { - self.playerPlaybackState = (duration, 0.0, false, true, false) + self.playerPlaybackState = PlaybackState(duration: duration, position: 0.0, isPlaying: false, hasAudio: true) self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState)) } } deinit { self.textureSourceDisposable?.dispose() - self.destroyTimeObservers() - } - - private func destroyTimeObservers() { - if let timeObserver = self.timeObserver { - self.timeObserverPlayer?.removeTimeObserver(timeObserver) - - self.timeObserver = nil - self.timeObserverPlayer = nil - } - if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver { - NotificationCenter.default.removeObserver(didPlayToEndTimeObserver) - self.didPlayToEndTimeObserver = nil - } - - self.audioDelayTimer?.invalidate() - self.audioDelayTimer = nil + self.invalidateTimeObservers() } public func replaceSource(_ image: UIImage, additionalImage: UIImage?, time: CMTime) { @@ -378,10 +440,9 @@ public final class MediaEditor { return } let additionalTexture = additionalImage.flatMap { loadTexture(image: $0, device: device) } - self.renderer.consumeTexture(texture, additionalTexture: additionalTexture, time: time, render: true) + self.renderer.consume(main: .texture(texture, time), additional: additionalTexture.flatMap { .texture($0, time) }, render: true, displayEnabled: false) } - private var volumeFade: SwiftSignalKit.Timer? private func setupSource() { guard let renderTarget = self.previewView else { return @@ -393,11 +454,11 @@ public final class MediaEditor { let context = self.context let clock = self.clock - let textureSource: Signal<(TextureSource, UIImage?, AVPlayer?, AVPlayer?, UIColor, UIColor), NoError> + let textureSource: Signal<(UIImage?, AVPlayer?, AVPlayer?, GradientColors), NoError> switch subject { case let .image(image, _): let colors = mediaEditorGetGradientColors(from: image) - textureSource = .single((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, nil, colors.0, colors.1)) + textureSource = .single((image, nil, nil, colors)) case let .draft(draft): if draft.isVideo { textureSource = Signal { subscriber in @@ -414,8 +475,8 @@ public final class MediaEditor { player.automaticallyWaitsToMinimizeStalling = false if let gradientColors = draft.values.gradientColors { - let colors = (gradientColors.first!, gradientColors.last!) - subscriber.putNext((VideoTextureSource(player: player, additionalPlayer: nil, mirror: false, renderTarget: renderTarget), nil, player, nil, colors.0, colors.1)) + let colors = GradientColors(top: gradientColors.first!, bottom: gradientColors.last!) + subscriber.putNext((nil, player, nil, colors)) subscriber.putCompletion() return EmptyDisposable @@ -424,12 +485,8 @@ public final class MediaEditor { imageGenerator.appliesPreferredTrackTransform = true imageGenerator.maximumSize = CGSize(width: 72, height: 128) imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: 0, preferredTimescale: CMTimeScale(30.0)))]) { _, image, _, _, _ in - if let image { - let colors = mediaEditorGetGradientColors(from: UIImage(cgImage: image)) - subscriber.putNext((VideoTextureSource(player: player, additionalPlayer: nil, mirror: false, renderTarget: renderTarget), nil, player, nil, colors.0, colors.1)) - } else { - subscriber.putNext((VideoTextureSource(player: player, additionalPlayer: nil, mirror: false, renderTarget: renderTarget), nil, player, nil, .black, .black)) - } + let colors: GradientColors = image.flatMap({ mediaEditorGetGradientColors(from: UIImage(cgImage: $0)) }) ?? GradientColors(top: .black, bottom: .black) + subscriber.putNext((nil, player, nil, colors)) subscriber.putCompletion() } return ActionDisposable { @@ -441,15 +498,16 @@ public final class MediaEditor { guard let image = UIImage(contentsOfFile: draft.fullPath(engine: context.engine)) else { return } - let colors: (UIColor, UIColor) + let colors: GradientColors if let gradientColors = draft.values.gradientColors { - colors = (gradientColors.first!, gradientColors.last!) + colors = GradientColors(top: gradientColors.first!, bottom: gradientColors.last!) } else { colors = mediaEditorGetGradientColors(from: image) } - textureSource = .single((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, nil, colors.0, colors.1)) + textureSource = .single((image, nil, nil, colors)) } case let .video(path, transitionImage, mirror, additionalPath, _, _): + let _ = mirror textureSource = Signal { subscriber in let asset = AVURLAsset(url: URL(fileURLWithPath: path)) let player = AVPlayer(playerItem: AVPlayerItem(asset: asset)) @@ -474,7 +532,8 @@ public final class MediaEditor { if let transitionImage { let colors = mediaEditorGetGradientColors(from: transitionImage) - subscriber.putNext((VideoTextureSource(player: player, additionalPlayer: additionalPlayer, mirror: mirror, renderTarget: renderTarget), nil, player, additionalPlayer, colors.0, colors.1)) + //TODO pass mirror + subscriber.putNext((nil, player, additionalPlayer, colors)) subscriber.putCompletion() return EmptyDisposable @@ -483,12 +542,9 @@ public final class MediaEditor { imageGenerator.appliesPreferredTrackTransform = true imageGenerator.maximumSize = CGSize(width: 72, height: 128) imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: 0, preferredTimescale: CMTimeScale(30.0)))]) { _, image, _, _, _ in - if let image { - let colors = mediaEditorGetGradientColors(from: UIImage(cgImage: image)) - subscriber.putNext((VideoTextureSource(player: player, additionalPlayer: additionalPlayer, mirror: mirror, renderTarget: renderTarget), nil, player, additionalPlayer, colors.0, colors.1)) - } else { - subscriber.putNext((VideoTextureSource(player: player, additionalPlayer: additionalPlayer, mirror: mirror, renderTarget: renderTarget), nil, player, additionalPlayer, .black, .black)) - } + let colors: GradientColors = image.flatMap({ mediaEditorGetGradientColors(from: UIImage(cgImage: $0)) }) ?? GradientColors(top: .black, bottom: .black) + //TODO pass mirror + subscriber.putNext((nil, player, additionalPlayer, colors)) subscriber.putCompletion() } return ActionDisposable { @@ -515,7 +571,15 @@ public final class MediaEditor { let playerItem = AVPlayerItem(asset: asset) let player = AVPlayer(playerItem: playerItem) player.automaticallyWaitsToMinimizeStalling = false - subscriber.putNext((VideoTextureSource(player: player, additionalPlayer: nil, mirror: false, renderTarget: renderTarget), nil, player, nil, colors.0, colors.1)) + + #if targetEnvironment(simulator) + let additionalPlayerItem = AVPlayerItem(asset: asset) + let additionalPlayer = AVPlayer(playerItem: additionalPlayerItem) + additionalPlayer.automaticallyWaitsToMinimizeStalling = false + subscriber.putNext((nil, player, additionalPlayer, colors)) + #else + subscriber.putNext((nil, player, nil, colors)) + #endif subscriber.putCompletion() } }) @@ -541,7 +605,7 @@ public final class MediaEditor { } if !degraded { let colors = mediaEditorGetGradientColors(from: image) - subscriber.putNext((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, nil, colors.0, colors.1)) + subscriber.putNext((image, nil, nil, colors)) subscriber.putCompletion() } } @@ -556,17 +620,35 @@ public final class MediaEditor { self.textureSourceDisposable = (textureSource |> deliverOnMainQueue).start(next: { [weak self] sourceAndColors in if let self { - let (source, image, player, additionalPlayer, topColor, bottomColor) = sourceAndColors + let (image, player, additionalPlayer, colors) = sourceAndColors self.renderer.onNextRender = { [weak self] in self?.onFirstDisplay() } - self.renderer.textureSource = source + + let textureSource = UniversalTextureSource(renderTarget: renderTarget) + self.player = player - self.additionalPlayer = additionalPlayer - self.playerPromise.set(.single(player)) - self.gradientColorsValue = (topColor, bottomColor) - self.setGradientColors([topColor, bottomColor]) + + self.additionalPlayer = additionalPlayer + self.additionalPlayerPromise.set(.single(additionalPlayer)) + + if let image { + textureSource.setMainInput(.image(image)) + } + if let player, let playerItem = player.currentItem { + textureSource.setMainInput(.video(playerItem)) + } + if let additionalPlayer, let playerItem = additionalPlayer.currentItem { + if self.values.additionalVideoPath == nil { + self.values = self.values.withUpdatedAdditionalVideo(path: "", positionChanges: []) + } + textureSource.setAdditionalInput(.video(playerItem)) + } + self.renderer.textureSource = textureSource + + self.gradientColorsValue = colors + self.setGradientColors(colors.array) if player == nil { self.updateRenderChain() @@ -582,8 +664,8 @@ public final class MediaEditor { if let player { player.isMuted = self.values.videoIsMuted if let trimRange = self.values.videoTrimRange { - self.player?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000)) - self.additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000)) + player.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000)) + additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000)) } if let initialSeekPosition = self.initialSeekPosition { @@ -600,7 +682,7 @@ public final class MediaEditor { additionalPlayer?.playImmediately(atRate: 1.0) self.audioPlayer?.playImmediately(atRate: 1.0) self.onPlaybackAction(.play) - self.volumeFade = self.player?.fadeVolume(from: 0.0, to: 1.0, duration: 0.4) + self.volumeFadeIn = player.fadeVolume(from: 0.0, to: 1.0, duration: 0.4) } if let audioPlayer = self.audioPlayer, audioPlayer.status != .readyToPlay { Queue.mainQueue().after(0.1) { @@ -630,12 +712,8 @@ public final class MediaEditor { private func setupTimeObservers() { var observedPlayer = self.player - var isAudioPlayerOnly = false if observedPlayer == nil { observedPlayer = self.audioPlayer - if observedPlayer != nil { - isAudioPlayerOnly = true - } } guard let observedPlayer else { return @@ -654,7 +732,7 @@ public final class MediaEditor { if time.seconds > 20000 { } else { - self.playerPlaybackState = (duration, time.seconds, observedPlayer.rate > 0.0, hasAudio, isAudioPlayerOnly) + self.playerPlaybackState = PlaybackState(duration: duration, position: time.seconds, isPlaying: observedPlayer.rate > 0.0, hasAudio: hasAudio) } } } @@ -670,11 +748,11 @@ public final class MediaEditor { } let targetTime = CMTime(seconds: start, preferredTimescale: CMTimeScale(1000)) self.player?.seek(to: targetTime) - self.additionalPlayer?.seek(to: targetTime) +// self.additionalPlayer?.seek(to: targetTime) self.onPlaybackAction(.seek(start)) self.player?.play() - self.additionalPlayer?.play() +// self.additionalPlayer?.play() if self.sourceIsVideo { let audioTime = self.audioTime(for: targetTime) @@ -689,6 +767,20 @@ public final class MediaEditor { self.audioPlayer?.seek(to: audioTime) self.audioPlayer?.play() } + + + let videoTime = self.videoTime(for: targetTime) + if let videoDelay = self.videoDelay(for: targetTime) { + self.additionalPlayer?.pause() + self.videoDelayTimer = SwiftSignalKit.Timer(timeout: videoDelay, repeat: false, completion: { [weak self] in + self?.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) + self?.additionalPlayer?.play() + }, queue: Queue.mainQueue()) + self.videoDelayTimer?.start() + } else { + self.additionalPlayer?.seek(to: videoTime) + self.additionalPlayer?.play() + } } else { self.audioPlayer?.seek(to: targetTime) self.audioPlayer?.play() @@ -702,8 +794,23 @@ public final class MediaEditor { } } - private func setupDidPlayToEndObserver() { - + private func invalidateTimeObservers() { + if let timeObserver = self.timeObserver { + self.timeObserverPlayer?.removeTimeObserver(timeObserver) + + self.timeObserver = nil + self.timeObserverPlayer = nil + } + if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver { + NotificationCenter.default.removeObserver(didPlayToEndTimeObserver) + self.didPlayToEndTimeObserver = nil + } + + self.videoDelayTimer?.invalidate() + self.videoDelayTimer = nil + + self.audioDelayTimer?.invalidate() + self.audioDelayTimer = nil } public func attachPreviewView(_ previewView: MediaEditorPreviewView) { @@ -814,7 +921,7 @@ public final class MediaEditor { } if play { self.player?.play() - self.additionalPlayer?.play() +// self.additionalPlayer?.play() if self.sourceIsVideo { let audioTime = self.audioTime(for: targetPosition) @@ -828,9 +935,24 @@ public final class MediaEditor { self.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero) self.audioPlayer?.play() } + + let videoTime = self.videoTime(for: targetPosition) + if let videoDelay = self.videoDelay(for: targetPosition) { + self.videoDelayTimer = SwiftSignalKit.Timer(timeout: videoDelay, repeat: false, completion: { [weak self] in + self?.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) + self?.additionalPlayer?.play() + }, queue: Queue.mainQueue()) + self.videoDelayTimer?.start() + } else { + self.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) + self.additionalPlayer?.play() + } } else { self.audioPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero) self.audioPlayer?.play() + + self.additionalPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero) + self.additionalPlayer?.play() } self.onPlaybackAction(.play) @@ -852,9 +974,13 @@ public final class MediaEditor { completion() } }) - - self.additionalPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero) + if let _ = self.videoDelay(for: targetPosition) { + + } else { + self.additionalPlayer?.seek(to: self.videoTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero) + } + if let _ = self.audioDelay(for: targetPosition) { } else { @@ -899,6 +1025,49 @@ public final class MediaEditor { } } + + + /// + + private var videoDelayTimer: SwiftSignalKit.Timer? + private func videoDelay(for time: CMTime) -> Double? { + var time = time + if time == .invalid { + time = .zero + } + let mainStart = self.values.videoTrimRange?.lowerBound ?? 0.0 + var trackStart = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0 + if let offset = self.values.additionalVideoOffset, offset < 0.0 { + trackStart -= offset + } + if trackStart - mainStart > 0.0 { + let delay = trackStart - time.seconds + if delay > 0 { + return delay + } + } + return nil + } + + private func videoTime(for time: CMTime) -> CMTime { + var time = time + if time == .invalid { + time = .zero + } + let seconds = time.seconds + + let offset = self.values.additionalVideoOffset ?? 0.0 + let trackOffset = max(0.0, offset) + let trackStart = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0 + if seconds < trackStart - min(0.0, offset) { + return CMTime(seconds: trackOffset + trackStart, preferredTimescale: CMTimeScale(1000.0)) + } else { + return CMTime(seconds: trackOffset + seconds + min(0.0, offset), preferredTimescale: CMTimeScale(1000.0)) + } + } + + /// + public var isPlaying: Bool { let effectivePlayer = self.player ?? self.audioPlayer return (effectivePlayer?.rate ?? 0.0) > 0.0 @@ -941,33 +1110,54 @@ public final class MediaEditor { } } else { let itemTime = self.player?.currentItem?.currentTime() ?? .invalid + let videoTime = self.videoTime(for: itemTime) let audioTime = self.audioTime(for: itemTime) self.player?.setRate(rate, time: itemTime, atHostTime: futureTime) - self.additionalPlayer?.setRate(rate, time: itemTime, atHostTime: futureTime) +// self.additionalPlayer?.setRate(rate, time: itemTime, atHostTime: futureTime) - if let audioPlayer = self.audioPlayer { - if rate > 0.0, let audioDelay = self.audioDelay(for: itemTime) { - self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in - self?.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero) - self?.audioPlayer?.play() - }, queue: Queue.mainQueue()) - self.audioDelayTimer?.start() - } else { - if audioPlayer.status == .readyToPlay { - audioPlayer.setRate(rate, time: audioTime, atHostTime: futureTime) - if rate > 0.0 { -// audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero) - audioPlayer.play() - } + + if let additionalPlayer = self.additionalPlayer { + if rate > 0.0 { + if let videoDelay = self.videoDelay(for: itemTime) { + self.videoDelayTimer = SwiftSignalKit.Timer(timeout: videoDelay, repeat: false, completion: { [weak self] in + self?.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) + self?.additionalPlayer?.play() + }, queue: Queue.mainQueue()) + self.videoDelayTimer?.start() } else { - audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero) - if rate > 0.0 { - audioPlayer.play() + if additionalPlayer.status == .readyToPlay { + additionalPlayer.setRate(rate, time: videoTime, atHostTime: futureTime) + additionalPlayer.play() } else { - audioPlayer.pause() + additionalPlayer.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) + additionalPlayer.play() } } + } else { + additionalPlayer.pause() + } + } + + if let audioPlayer = self.audioPlayer { + if rate > 0.0 { + if let audioDelay = self.audioDelay(for: itemTime) { + self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in + self?.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero) + self?.audioPlayer?.play() + }, queue: Queue.mainQueue()) + self.audioDelayTimer?.start() + } else { + if audioPlayer.status == .readyToPlay { + audioPlayer.setRate(rate, time: audioTime, atHostTime: futureTime) + audioPlayer.play() + } else { + audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero) + audioPlayer.play() + } + } + } else { + audioPlayer.pause() } } } @@ -977,6 +1167,9 @@ public final class MediaEditor { } else { self.onPlaybackAction(.pause) + self.videoDelayTimer?.invalidate() + self.videoDelayTimer = nil + self.audioDelayTimer?.invalidate() self.audioDelayTimer = nil } @@ -991,6 +1184,9 @@ public final class MediaEditor { self.audioDelayTimer?.invalidate() self.audioDelayTimer = nil + + self.videoDelayTimer?.invalidate() + self.videoDelayTimer = nil } private func updateVideoTimePosition() { @@ -1022,7 +1218,10 @@ public final class MediaEditor { } }) - self.additionalPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero) + if let _ = self.videoDelay(for: targetPosition) { + } else { + self.additionalPlayer?.seek(to: self.videoTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero) + } if let _ = self.audioDelay(for: targetPosition) { } else { @@ -1039,11 +1238,32 @@ public final class MediaEditor { if apply { self.player?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000)) - self.additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000)) +// self.additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000)) } } - public func setAdditionalVideo(_ path: String, positionChanges: [VideoPositionChange]) { + public func setAdditionalVideo(_ path: String?, positionChanges: [VideoPositionChange]) { + if self.values.additionalVideoPath == nil, let path { + let asset = AVURLAsset(url: URL(fileURLWithPath: path)) + let playerItem = AVPlayerItem(asset: asset) + let player = AVPlayer(playerItem: playerItem) + if #available(iOS 15.0, *) { + player.sourceClock = clock + } else { + player.masterClock = clock + } + player.automaticallyWaitsToMinimizeStalling = false + self.additionalPlayer = player + self.additionalPlayerPromise.set(.single(player)) + + (self.renderer.textureSource as? UniversalTextureSource)?.setAdditionalInput(.video(playerItem)) + } else if path == nil { + self.additionalPlayer?.pause() + self.additionalPlayer = nil + self.additionalPlayerPromise.set(.single(nil)) + (self.renderer.textureSource as? UniversalTextureSource)?.setAdditionalInput(nil) + } + self.updateValues(mode: .skipRendering) { values in return values.withUpdatedAdditionalVideo(path: path, positionChanges: positionChanges) } @@ -1055,6 +1275,26 @@ public final class MediaEditor { } } + public func setAdditionalVideoTrimRange(_ trimRange: Range, apply: Bool) { + self.updateValues(mode: .generic) { values in + return values.withUpdatedAdditionalVideoTrimRange(trimRange) + } + + if apply { + self.updateAdditionalVideoPlaybackRange() + } + } + + public func setAdditionalVideoOffset(_ offset: Double?, apply: Bool) { + self.updateValues(mode: .generic) { values in + return values.withUpdatedAdditionalVideoOffset(offset) + } + + if apply { + self.updateAdditionalVideoPlaybackRange() + } + } + public func setDrawingAndEntities(data: Data?, image: UIImage?, entities: [CodableDrawingEntity]) { self.updateValues(mode: .skipRendering) { values in return values.withUpdatedDrawingAndEntities(drawing: image, entities: entities) @@ -1069,7 +1309,12 @@ public final class MediaEditor { public func setAudioTrack(_ audioTrack: MediaAudioTrack?, trimRange: Range? = nil, offset: Double? = nil) { self.updateValues(mode: .skipRendering) { values in - return values.withUpdatedAudioTrack(audioTrack).withUpdatedAudioTrackSamples(nil).withUpdatedAudioTrackTrimRange(trimRange).withUpdatedAudioTrackVolume(nil).withUpdatedAudioTrackOffset(offset) + return values + .withUpdatedAudioTrack(audioTrack) + .withUpdatedAudioTrackSamples(nil) + .withUpdatedAudioTrackTrimRange(trimRange) + .withUpdatedAudioTrackVolume(nil) + .withUpdatedAudioTrackOffset(offset) } if let audioPlayer = self.audioPlayer { @@ -1079,13 +1324,10 @@ public final class MediaEditor { self.audioDelayTimer?.invalidate() self.audioDelayTimer = nil } else { - self.destroyTimeObservers() - } - self.audioPlayer = nil - - if !self.sourceIsVideo { + self.invalidateTimeObservers() self.playerPromise.set(.single(nil)) } + self.audioPlayer = nil } self.setupAudioPlayback() @@ -1094,12 +1336,11 @@ public final class MediaEditor { private func setupAudioPlayback() { if let audioTrack = self.values.audioTrack { - let path = fullDraftPath(peerId: self.context.account.peerId, path: audioTrack.path) - let audioAsset = AVURLAsset(url: URL(fileURLWithPath: path)) - let playerItem = AVPlayerItem(asset: audioAsset) - let player = AVPlayer(playerItem: playerItem) - player.automaticallyWaitsToMinimizeStalling = false - self.audioPlayer = player + let audioPath = fullDraftPath(peerId: self.context.account.peerId, path: audioTrack.path) + let audioAsset = AVURLAsset(url: URL(fileURLWithPath: audioPath)) + let audioPlayer = AVPlayer(playerItem: AVPlayerItem(asset: audioAsset)) + audioPlayer.automaticallyWaitsToMinimizeStalling = false + self.audioPlayer = audioPlayer self.maybeGenerateAudioSamples(asset: audioAsset) if let volume = self.values.audioTrackVolume { @@ -1109,7 +1350,7 @@ public final class MediaEditor { self.setupTimeObservers() if !self.sourceIsVideo { - self.playerPromise.set(.single(player)) + self.playerPromise.set(.single(audioPlayer)) } } } @@ -1134,6 +1375,15 @@ public final class MediaEditor { } } + private func updateAdditionalVideoPlaybackRange() { + if let upperBound = self.values.additionalVideoTrimRange?.upperBound { + let offset = max(0.0, self.values.additionalVideoOffset ?? 0.0) + self.additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + upperBound, preferredTimescale: CMTimeScale(1000)) + } else { + self.additionalPlayer?.currentItem?.forwardPlaybackEndTime = .invalid + } + } + private func updateAudioPlaybackRange() { if let upperBound = self.values.audioTrackTrimRange?.upperBound { let offset = max(0.0, self.values.audioTrackOffset ?? 0.0) @@ -1154,7 +1404,7 @@ public final class MediaEditor { private var previousUpdateTime: Double? private var scheduledUpdate = false private func updateRenderChain() { - self.renderer.renderPassedEnabled = !self.previewUnedited + self.renderer.skipEditingPasses = self.previewUnedited self.renderChain.update(values: self.values) self.renderer.videoFinishPass.update(values: self.values) @@ -1269,3 +1519,85 @@ public final class MediaEditor { } } } + +private func videoFrames(asset: AVAsset, count: Int) -> Signal<([UIImage], Double), NoError> { + func blurredImage(_ image: UIImage) -> UIImage? { + guard let image = image.cgImage else { + return nil + } + + let thumbnailSize = CGSize(width: image.width, height: image.height) + let thumbnailContextSize = thumbnailSize.aspectFilled(CGSize(width: 20.0, height: 20.0)) + if let thumbnailContext = DrawingContext(size: thumbnailContextSize, scale: 1.0) { + thumbnailContext.withFlippedContext { c in + c.interpolationQuality = .none + c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContextSize)) + } + imageFastBlur(Int32(thumbnailContextSize.width), Int32(thumbnailContextSize.height), Int32(thumbnailContext.bytesPerRow), thumbnailContext.bytes) + + let thumbnailContext2Size = thumbnailSize.aspectFitted(CGSize(width: 100.0, height: 100.0)) + if let thumbnailContext2 = DrawingContext(size: thumbnailContext2Size, scale: 1.0) { + thumbnailContext2.withFlippedContext { c in + c.interpolationQuality = .none + if let image = thumbnailContext.generateImage()?.cgImage { + c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContext2Size)) + } + } + imageFastBlur(Int32(thumbnailContext2Size.width), Int32(thumbnailContext2Size.height), Int32(thumbnailContext2.bytesPerRow), thumbnailContext2.bytes) + return thumbnailContext2.generateImage() + } + } + return nil + } + + guard count > 0 else { + return .complete() + } + let scale = UIScreen.main.scale + let imageGenerator = AVAssetImageGenerator(asset: asset) + imageGenerator.maximumSize = CGSize(width: 48.0 * scale, height: 36.0 * scale) + imageGenerator.appliesPreferredTrackTransform = true + imageGenerator.requestedTimeToleranceBefore = .zero + imageGenerator.requestedTimeToleranceAfter = .zero + + var firstFrame: UIImage + if let cgImage = try? imageGenerator.copyCGImage(at: .zero, actualTime: nil) { + firstFrame = UIImage(cgImage: cgImage) + if let blurred = blurredImage(firstFrame) { + firstFrame = blurred + } + } else { + firstFrame = generateSingleColorImage(size: CGSize(width: 24.0, height: 36.0), color: .black)! + } + return Signal { subscriber in + subscriber.putNext((Array(repeating: firstFrame, count: count), CACurrentMediaTime())) + + var timestamps: [NSValue] = [] + let duration = asset.duration.seconds + let interval = duration / Double(count) + for i in 0 ..< count { + timestamps.append(NSValue(time: CMTime(seconds: Double(i) * interval, preferredTimescale: CMTimeScale(1000)))) + } + + var updatedFrames: [UIImage] = [] + imageGenerator.generateCGImagesAsynchronously(forTimes: timestamps) { _, image, _, _, _ in + if let image { + updatedFrames.append(UIImage(cgImage: image)) + if updatedFrames.count == count { + subscriber.putNext((updatedFrames, CACurrentMediaTime())) + subscriber.putCompletion() + } else { + var tempFrames = updatedFrames + for _ in 0 ..< count - updatedFrames.count { + tempFrames.append(firstFrame) + } + subscriber.putNext((tempFrames, CACurrentMediaTime())) + } + } + } + + return ActionDisposable { + imageGenerator.cancelAllCGImageGeneration() + } + } +} diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift index 75fd7e83c8..6bb0d6ed99 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift @@ -26,14 +26,17 @@ public func mediaEditorGenerateGradientImage(size: CGSize, colors: [UIColor]) -> return image } -public func mediaEditorGetGradientColors(from image: UIImage) -> (UIColor, UIColor) { +public func mediaEditorGetGradientColors(from image: UIImage) -> MediaEditor.GradientColors { let context = DrawingContext(size: CGSize(width: 5.0, height: 5.0), scale: 1.0, clear: false)! context.withFlippedContext({ context in if let cgImage = image.cgImage { context.draw(cgImage, in: CGRect(x: 0.0, y: 0.0, width: 5.0, height: 5.0)) } }) - return (context.colorAt(CGPoint(x: 2.0, y: 0.0)), context.colorAt(CGPoint(x: 2.0, y: 4.0))) + return MediaEditor.GradientColors( + top: context.colorAt(CGPoint(x: 2.0, y: 0.0)), + bottom: context.colorAt(CGPoint(x: 2.0, y: 4.0)) + ) } final class MediaEditorComposer { @@ -110,9 +113,9 @@ final class MediaEditorComposer { if let additionalSampleBuffer, let additionalImageBuffer = CMSampleBufferGetImageBuffer(additionalSampleBuffer) { additionalPixelBuffer = VideoPixelBuffer(pixelBuffer: additionalImageBuffer, rotation: additionalTextureRotation, timestamp: time) } - self.renderer.consumeVideoPixelBuffer(pixelBuffer: mainPixelBuffer, additionalPixelBuffer: additionalPixelBuffer, render: true) + self.renderer.consume(main: .videoBuffer(mainPixelBuffer), additional: additionalPixelBuffer.flatMap { .videoBuffer($0) }, render: true) - if let finalTexture = self.renderer.finalTexture, var ciImage = CIImage(mtlTexture: finalTexture, options: [.colorSpace: self.colorSpace]) { + if let finalTexture = self.renderer.resultTexture, var ciImage = CIImage(mtlTexture: finalTexture, options: [.colorSpace: self.colorSpace]) { ciImage = ciImage.transformed(by: CGAffineTransformMakeScale(1.0, -1.0).translatedBy(x: 0.0, y: -ciImage.extent.height)) var pixelBuffer: CVPixelBuffer? @@ -144,9 +147,9 @@ final class MediaEditorComposer { } if self.filteredImage == nil, let device = self.device { if let texture = loadTexture(image: inputImage, device: device) { - self.renderer.consumeTexture(texture, render: true) + self.renderer.consume(main: .texture(texture, .zero), additional: nil, render: true) - if let finalTexture = self.renderer.finalTexture, var ciImage = CIImage(mtlTexture: finalTexture, options: [.colorSpace: self.colorSpace]) { + if let finalTexture = self.renderer.resultTexture, var ciImage = CIImage(mtlTexture: finalTexture, options: [.colorSpace: self.colorSpace]) { ciImage = ciImage.transformed(by: CGAffineTransformMakeScale(1.0, -1.0).translatedBy(x: 0.0, y: -ciImage.extent.height)) self.filteredImage = ciImage } diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposerEntity.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposerEntity.swift index a6d865c56a..397c299d5a 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposerEntity.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposerEntity.swift @@ -65,7 +65,6 @@ func composerEntitiesForDrawingEntity(postbox: Postbox, textScale: CGFloat, enti if let entity = entity as? DrawingStickerEntity { if case let .file(_, type) = entity.content, case .reaction = type { return [] -// return [MediaEditorComposerStaticEntity(image: image, position: entity.position, scale: entity.scale, rotation: entity.rotation, baseSize: entity.baseSize, mirrored: false)] } else { let content: MediaEditorComposerStickerEntity.Content switch entity.content { diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorRenderer.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorRenderer.swift index c4f6f3b848..16d00d4a01 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorRenderer.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorRenderer.swift @@ -21,11 +21,6 @@ final class VideoPixelBuffer { } } -protocol TextureConsumer: AnyObject { - func consumeTexture(_ texture: MTLTexture, render: Bool) - func consumeVideoPixelBuffer(pixelBuffer: VideoPixelBuffer, additionalPixelBuffer: VideoPixelBuffer?, render: Bool) -} - final class RenderingContext { let device: MTLDevice let commandBuffer: MTLCommandBuffer @@ -45,7 +40,7 @@ protocol RenderPass: AnyObject { } protocol TextureSource { - func connect(to: TextureConsumer) + func connect(to renderer: MediaEditorRenderer) func invalidate() } @@ -60,7 +55,7 @@ protocol RenderTarget: AnyObject { func redraw() } -final class MediaEditorRenderer: TextureConsumer { +final class MediaEditorRenderer { var textureSource: TextureSource? { didSet { self.textureSource?.connect(to: self) @@ -70,9 +65,9 @@ final class MediaEditorRenderer: TextureConsumer { private var semaphore = DispatchSemaphore(value: 3) private var renderPasses: [RenderPass] = [] - private let videoInputPass = VideoInputPass() + private let mainVideoInputPass = VideoInputPass() private let additionalVideoInputPass = VideoInputPass() - let videoFinishPass = VideoInputScalePass() + let videoFinishPass = VideoFinishPass() private let outputRenderPass = OutputRenderPass() private weak var renderTarget: RenderTarget? { @@ -86,16 +81,33 @@ final class MediaEditorRenderer: TextureConsumer { private var commandQueue: MTLCommandQueue? private var textureCache: CVMetalTextureCache? - private var currentTexture: MTLTexture? - private var currentAdditionalTexture: MTLTexture? - private var currentTime: CMTime = .zero + enum Input { + case texture(MTLTexture, CMTime) + case videoBuffer(VideoPixelBuffer) + + var timestamp: CMTime { + switch self { + case let .texture(_, timestamp): + return timestamp + case let .videoBuffer(videoBuffer): + return videoBuffer.timestamp + } + } + } - private var currentPixelBuffer: VideoPixelBuffer? - private var currentAdditionalPixelBuffer: VideoPixelBuffer? + private var currentMainInput: Input? + private var currentAdditionalInput: Input? + +// private var currentTexture: MTLTexture? +// private var currentAdditionalTexture: MTLTexture? +// private var currentTime: CMTime = .zero +// +// private var currentPixelBuffer: VideoPixelBuffer? +// private var currentAdditionalPixelBuffer: VideoPixelBuffer? public var onNextRender: (() -> Void)? - var finalTexture: MTLTexture? + var resultTexture: MTLTexture? public init() { @@ -142,7 +154,7 @@ final class MediaEditorRenderer: TextureConsumer { self.commandQueue = device.makeCommandQueue() self.commandQueue?.label = "Media Editor Command Queue" - self.videoInputPass.setup(device: device, library: library) + self.mainVideoInputPass.setup(device: device, library: library) self.additionalVideoInputPass.setup(device: device, library: library) self.videoFinishPass.setup(device: device, library: library) self.renderPasses.forEach { $0.setup(device: device, library: library) } @@ -171,15 +183,48 @@ final class MediaEditorRenderer: TextureConsumer { self.commandQueue = device.makeCommandQueue() self.commandQueue?.label = "Media Editor Command Queue" - self.videoInputPass.setup(device: device, library: library) + self.mainVideoInputPass.setup(device: device, library: library) self.additionalVideoInputPass.setup(device: device, library: library) self.videoFinishPass.setup(device: device, library: library) self.renderPasses.forEach { $0.setup(device: device, library: library) } } public var displayEnabled = true - var renderPassedEnabled = true + var skipEditingPasses = true var needsDisplay = false + + private func combinedTextureFromCurrentInputs(device: MTLDevice, commandBuffer: MTLCommandBuffer, textureCache: CVMetalTextureCache) -> MTLTexture? { + var mainTexture: MTLTexture? + var additionalTexture: MTLTexture? + + func textureFromInput(_ input: MediaEditorRenderer.Input, videoInputPass: VideoInputPass) -> MTLTexture? { + switch input { + case let .texture(texture, _): + return texture + case let .videoBuffer(videoBuffer): + return videoInputPass.processPixelBuffer(videoBuffer, textureCache: textureCache, device: device, commandBuffer: commandBuffer) + } + } + + guard let mainInput = self.currentMainInput else { + return nil + } + + mainTexture = textureFromInput(mainInput, videoInputPass: self.mainVideoInputPass) + if let additionalInput = self.currentAdditionalInput { + additionalTexture = textureFromInput(additionalInput, videoInputPass: self.additionalVideoInputPass) + } + + if let mainTexture, let additionalTexture { + if let result = self.videoFinishPass.process(input: mainTexture, secondInput: additionalTexture, timestamp: mainInput.timestamp, device: device, commandBuffer: commandBuffer) { + return result + } else { + return mainTexture + } + } else { + return mainTexture + } + } func renderFrame() { let device: MTLDevice? @@ -192,52 +237,22 @@ final class MediaEditorRenderer: TextureConsumer { } guard let device = device, let commandQueue = self.commandQueue, - let textureCache = self.textureCache else { + let textureCache = self.textureCache, + let commandBuffer = commandQueue.makeCommandBuffer(), + var texture = self.combinedTextureFromCurrentInputs(device: device, commandBuffer: commandBuffer, textureCache: textureCache) + else { self.didRenderFrame() return } - guard let commandBuffer = commandQueue.makeCommandBuffer() else { - self.didRenderFrame() - return - } - - var texture: MTLTexture - if let currentAdditionalTexture = self.currentAdditionalTexture, let currentTexture = self.currentTexture { - self.videoFinishPass.mainTextureRotation = .rotate0Degrees - self.videoFinishPass.additionalTextureRotation = .rotate0DegreesMirrored - if let result = self.videoFinishPass.process(input: currentTexture, secondInput: currentAdditionalTexture, timestamp: self.currentTime, device: device, commandBuffer: commandBuffer) { - texture = result - } else { - texture = currentTexture - } - } else if let currentTexture = self.currentTexture { - texture = currentTexture - } else if let currentPixelBuffer = self.currentPixelBuffer, let currentAdditionalPixelBuffer = self.currentAdditionalPixelBuffer, let videoTexture = self.videoInputPass.processPixelBuffer(currentPixelBuffer, textureCache: textureCache, device: device, commandBuffer: commandBuffer), let additionalVideoTexture = self.additionalVideoInputPass.processPixelBuffer(currentAdditionalPixelBuffer, textureCache: textureCache, device: device, commandBuffer: commandBuffer) { - if let result = self.videoFinishPass.process(input: videoTexture, secondInput: additionalVideoTexture, timestamp: currentPixelBuffer.timestamp, device: device, commandBuffer: commandBuffer) { - texture = result - } else { - texture = videoTexture - } - } else if let currentPixelBuffer = self.currentPixelBuffer, let videoTexture = self.videoInputPass.processPixelBuffer(currentPixelBuffer, textureCache: textureCache, device: device, commandBuffer: commandBuffer) { - if let result = self.videoFinishPass.process(input: videoTexture, secondInput: nil, timestamp: currentPixelBuffer.timestamp, device: device, commandBuffer: commandBuffer) { - texture = result - } else { - texture = videoTexture - } - } else { - self.didRenderFrame() - return - } - - if self.renderPassedEnabled { + if !self.skipEditingPasses { for renderPass in self.renderPasses { if let nextTexture = renderPass.process(input: texture, device: device, commandBuffer: commandBuffer) { texture = nextTexture } } } - self.finalTexture = texture + self.resultTexture = texture if self.renderTarget == nil { commandBuffer.addCompletedHandler { [weak self] _ in @@ -265,7 +280,7 @@ final class MediaEditorRenderer: TextureConsumer { let device = renderTarget.mtlDevice, let commandQueue = self.commandQueue, let commandBuffer = commandQueue.makeCommandBuffer(), - let texture = self.finalTexture + let texture = self.resultTexture else { self.needsDisplay = false self.didRenderFrame() @@ -299,50 +314,70 @@ final class MediaEditorRenderer: TextureConsumer { self.semaphore.signal() } - func consumeTexture(_ texture: MTLTexture, render: Bool) { - if render { - self.willRenderFrame() - } - - self.currentTexture = texture - if render { - self.renderFrame() - } - } - - func consumeTexture(_ texture: MTLTexture, additionalTexture: MTLTexture?, time: CMTime, render: Bool) { - self.displayEnabled = false + func consume( + main: MediaEditorRenderer.Input, + additional: MediaEditorRenderer.Input?, + render: Bool, + displayEnabled: Bool = true + ) { + self.displayEnabled = displayEnabled if render { self.willRenderFrame() } - self.currentTexture = texture - self.currentAdditionalTexture = additionalTexture - self.currentTime = time + self.currentMainInput = main + self.currentAdditionalInput = additional + if render { self.renderFrame() } } - var previousPresentationTimestamp: CMTime? - func consumeVideoPixelBuffer(pixelBuffer: VideoPixelBuffer, additionalPixelBuffer: VideoPixelBuffer?, render: Bool) { - self.willRenderFrame() - - self.currentPixelBuffer = pixelBuffer - if additionalPixelBuffer == nil && self.currentAdditionalPixelBuffer != nil { - } else { - self.currentAdditionalPixelBuffer = additionalPixelBuffer - } - if render { - if self.previousPresentationTimestamp == pixelBuffer.timestamp { - self.didRenderFrame() - } else { - self.renderFrame() - } - } - self.previousPresentationTimestamp = pixelBuffer.timestamp - } +// func consumeTexture(_ texture: MTLTexture, render: Bool) { +// if render { +// self.willRenderFrame() +// } +// +// self.currentTexture = texture +// if render { +// self.renderFrame() +// } +// } +// +// func consumeTexture(_ texture: MTLTexture, additionalTexture: MTLTexture?, time: CMTime, render: Bool) { +// self.displayEnabled = false +// +// if render { +// self.willRenderFrame() +// } +// +// self.currentTexture = texture +// self.currentAdditionalTexture = additionalTexture +// self.currentTime = time +// if render { +// self.renderFrame() +// } +// } +// +// var previousPresentationTimestamp: CMTime? +// func consumeVideoPixelBuffer(pixelBuffer: VideoPixelBuffer, additionalPixelBuffer: VideoPixelBuffer?, render: Bool) { +// self.willRenderFrame() +// +// self.currentPixelBuffer = pixelBuffer +// if additionalPixelBuffer == nil && self.currentAdditionalPixelBuffer != nil { +// } else { +// self.currentAdditionalPixelBuffer = additionalPixelBuffer +// } +// if render { +// if self.previousPresentationTimestamp == pixelBuffer.timestamp { +// self.didRenderFrame() +// } else { +// self.renderFrame() +// } +// } +// self.previousPresentationTimestamp = pixelBuffer.timestamp +// } func renderTargetDidChange(_ target: RenderTarget?) { self.renderTarget = target @@ -354,7 +389,7 @@ final class MediaEditorRenderer: TextureConsumer { } func finalRenderedImage(mirror: Bool = false) -> UIImage? { - if let finalTexture = self.finalTexture, let device = self.renderTarget?.mtlDevice { + if let finalTexture = self.resultTexture, let device = self.renderTarget?.mtlDevice { return getTextureImage(device: device, texture: finalTexture, mirror: mirror) } else { return nil diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorUtils.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorUtils.swift index a93192fc58..33ef99a390 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorUtils.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorUtils.swift @@ -1,4 +1,5 @@ import Foundation +import UIKit import AVFoundation import SwiftSignalKit @@ -39,3 +40,92 @@ extension AVPlayer { return timer } } + +func textureRotatonForAVAsset(_ asset: AVAsset, mirror: Bool = false) -> TextureRotation { + for track in asset.tracks { + if track.mediaType == .video { + let t = track.preferredTransform + if t.a == -1.0 && t.d == -1.0 { + return .rotate180Degrees + } else if t.a == 1.0 && t.d == 1.0 { + return .rotate0Degrees + } else if t.b == -1.0 && t.c == 1.0 { + return .rotate270Degrees + } else if t.a == -1.0 && t.d == 1.0 { + return .rotate270Degrees + } else if t.a == 1.0 && t.d == -1.0 { + return .rotate180Degrees + } else { + return mirror ? .rotate90DegreesMirrored : .rotate90Degrees + } + } + } + return .rotate0Degrees +} + +func loadTexture(image: UIImage, device: MTLDevice) -> MTLTexture? { + func dataForImage(_ image: UIImage) -> UnsafeMutablePointer { + let imageRef = image.cgImage + let width = Int(image.size.width) + let height = Int(image.size.height) + let colorSpace = CGColorSpaceCreateDeviceRGB() + + let rawData = UnsafeMutablePointer.allocate(capacity: width * height * 4) + let bytePerPixel = 4 + let bytesPerRow = bytePerPixel * Int(width) + let bitsPerComponent = 8 + let bitmapInfo = CGBitmapInfo.byteOrder32Little.rawValue + CGImageAlphaInfo.premultipliedFirst.rawValue + let context = CGContext.init(data: rawData, width: width, height: height, bitsPerComponent: bitsPerComponent, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo) + context?.draw(imageRef!, in: CGRect(x: 0, y: 0, width: width, height: height)) + + return rawData + } + + let width = Int(image.size.width * image.scale) + let height = Int(image.size.height * image.scale) + let bytePerPixel = 4 + let bytesPerRow = bytePerPixel * width + + var texture : MTLTexture? + let region = MTLRegionMake2D(0, 0, Int(width), Int(height)) + let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .bgra8Unorm, width: width, height: height, mipmapped: false) + texture = device.makeTexture(descriptor: textureDescriptor) + + let data = dataForImage(image) + texture?.replace(region: region, mipmapLevel: 0, withBytes: data, bytesPerRow: bytesPerRow) + + return texture +} + +func pixelBufferToMTLTexture(pixelBuffer: CVPixelBuffer, textureCache: CVMetalTextureCache) -> MTLTexture? { + let width = CVPixelBufferGetWidth(pixelBuffer) + let height = CVPixelBufferGetHeight(pixelBuffer) + + let format: MTLPixelFormat = .r8Unorm + var textureRef : CVMetalTexture? + let status = CVMetalTextureCacheCreateTextureFromImage(nil, textureCache, pixelBuffer, nil, format, width, height, 0, &textureRef) + if status == kCVReturnSuccess { + return CVMetalTextureGetTexture(textureRef!) + } + + return nil +} + +func getTextureImage(device: MTLDevice, texture: MTLTexture, mirror: Bool = false) -> UIImage? { + let colorSpace = CGColorSpaceCreateDeviceRGB() + let context = CIContext(mtlDevice: device, options: [:]) + guard var ciImage = CIImage(mtlTexture: texture, options: [.colorSpace: colorSpace]) else { + return nil + } + let transform: CGAffineTransform + if mirror { + transform = CGAffineTransform(-1.0, 0.0, 0.0, -1.0, ciImage.extent.width, ciImage.extent.height) + } else { + transform = CGAffineTransform(1.0, 0.0, 0.0, -1.0, 0.0, ciImage.extent.height) + } + ciImage = ciImage.transformed(by: transform) + guard let cgImage = context.createCGImage(ciImage, from: CGRect(origin: .zero, size: CGSize(width: ciImage.extent.width, height: ciImage.extent.height))) else { + return nil + } + return UIImage(cgImage: cgImage) +} diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift index c5fab71d32..10099f36ff 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift @@ -279,6 +279,15 @@ public final class MediaEditorValues: Codable, Equatable { if lhs.additionalVideoPositionChanges != rhs.additionalVideoPositionChanges { return false } + if lhs.additionalVideoTrimRange != rhs.additionalVideoTrimRange { + return false + } + if lhs.additionalVideoOffset != rhs.additionalVideoOffset { + return false + } + if lhs.additionalVideoVolume != rhs.additionalVideoVolume { + return false + } if lhs.drawing !== rhs.drawing { return false } @@ -352,6 +361,10 @@ public final class MediaEditorValues: Codable, Equatable { case additionalVideoScale case additionalVideoRotation case additionalVideoPositionChanges + case additionalVideoTrimRange + case additionalVideoOffset + case additionalVideoVolume + case drawing case entities case toolValues @@ -385,6 +398,10 @@ public final class MediaEditorValues: Codable, Equatable { public let additionalVideoRotation: CGFloat? public let additionalVideoPositionChanges: [VideoPositionChange] + public let additionalVideoTrimRange: Range? + public let additionalVideoOffset: Double? + public let additionalVideoVolume: CGFloat? + public let drawing: UIImage? public let entities: [CodableDrawingEntity] public let toolValues: [EditorToolKey: Any] @@ -420,6 +437,9 @@ public final class MediaEditorValues: Codable, Equatable { additionalVideoScale: CGFloat?, additionalVideoRotation: CGFloat?, additionalVideoPositionChanges: [VideoPositionChange], + additionalVideoTrimRange: Range?, + additionalVideoOffset: Double?, + additionalVideoVolume: CGFloat?, drawing: UIImage?, entities: [CodableDrawingEntity], toolValues: [EditorToolKey: Any], @@ -448,6 +468,9 @@ public final class MediaEditorValues: Codable, Equatable { self.additionalVideoScale = additionalVideoScale self.additionalVideoRotation = additionalVideoRotation self.additionalVideoPositionChanges = additionalVideoPositionChanges + self.additionalVideoTrimRange = additionalVideoTrimRange + self.additionalVideoOffset = additionalVideoOffset + self.additionalVideoVolume = additionalVideoVolume self.drawing = drawing self.entities = entities self.toolValues = toolValues @@ -491,6 +514,9 @@ public final class MediaEditorValues: Codable, Equatable { self.additionalVideoScale = try container.decodeIfPresent(CGFloat.self, forKey: .additionalVideoScale) self.additionalVideoRotation = try container.decodeIfPresent(CGFloat.self, forKey: .additionalVideoRotation) self.additionalVideoPositionChanges = try container.decodeIfPresent([VideoPositionChange].self, forKey: .additionalVideoPositionChanges) ?? [] + self.additionalVideoTrimRange = try container.decodeIfPresent(Range.self, forKey: .additionalVideoTrimRange) + self.additionalVideoOffset = try container.decodeIfPresent(Double.self, forKey: .additionalVideoOffset) + self.additionalVideoVolume = try container.decodeIfPresent(CGFloat.self, forKey: .additionalVideoVolume) if let drawingData = try container.decodeIfPresent(Data.self, forKey: .drawing), let image = UIImage(data: drawingData) { self.drawing = image @@ -571,68 +597,81 @@ public final class MediaEditorValues: Codable, Equatable { } public func makeCopy() -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: offset, cropRect: self.cropRect, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: offset, cropRect: self.cropRect, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedGradientColors(gradientColors: [UIColor]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedVideoIsMuted(_ videoIsMuted: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedVideoIsFullHd(_ videoIsFullHd: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedVideoIsMirrored(_ videoIsMirrored: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } - func withUpdatedAdditionalVideo(path: String, positionChanges: [VideoPositionChange]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: path, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + func withUpdatedAdditionalVideo(path: String?, positionChanges: [VideoPositionChange]) -> MediaEditorValues { + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: path, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideo(position: CGPoint, scale: CGFloat, rotation: CGFloat) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + } + + func withUpdatedAdditionalVideoTrimRange(_ additionalVideoTrimRange: Range) -> MediaEditorValues { + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + } + + + func withUpdatedAdditionalVideoOffset(_ additionalVideoOffset: Double?) -> MediaEditorValues { + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + } + + func withUpdatedAdditionalVideoVolume(_ additionalVideoVolume: CGFloat?) -> MediaEditorValues { + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedVideoTrimRange(_ videoTrimRange: Range) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedDrawingAndEntities(drawing: UIImage?, entities: [CodableDrawingEntity]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: drawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: drawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedToolValues(_ toolValues: [EditorToolKey: Any]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrack(_ audioTrack: MediaAudioTrack?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackTrimRange(_ audioTrackTrimRange: Range?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackOffset(_ audioTrackOffset: Double?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackVolume(_ audioTrackVolume: CGFloat?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackSamples(_ audioTrackSamples: MediaAudioTrackSamples?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples, qualityPreset: self.qualityPreset) } public var resultDimensions: PixelDimensions { diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift index 91c821b902..a0fbc5baba 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift @@ -232,6 +232,23 @@ public final class MediaEditorVideoExport { } } + var additionalVideoTimeRange: CMTimeRange? { + if let videoTrimRange = self.values.additionalVideoTrimRange { + return CMTimeRange(start: CMTime(seconds: videoTrimRange.lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), end: CMTime(seconds: videoTrimRange.upperBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC))) + } else { + return nil + } + } + + var additionalVideoStartTime: CMTime { + if let range = self.values.additionalVideoTrimRange { + let offset = -min(0.0, self.values.additionalVideoOffset ?? 0.0) + return CMTime(seconds: offset + range.lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)) + } else { + return .zero + } + } + var audioTimeRange: CMTimeRange? { if let audioTrack = self.values.audioTrack { let offset = max(0.0, self.values.audioTrackOffset ?? 0.0) @@ -475,11 +492,19 @@ public final class MediaEditorVideoExport { } if let timeRange = self.configuration.timeRange { reader.timeRange = timeRange - self.additionalReader?.timeRange = timeRange + if let additionalTimeRange = self.configuration.additionalVideoTimeRange { + self.additionalReader?.timeRange = additionalTimeRange + } else { + self.additionalReader?.timeRange = timeRange + } } else if asset.duration.seconds > 60.0 && isStory { let trimmedRange = CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), end: CMTime(seconds: 60.0, preferredTimescale: CMTimeScale(NSEC_PER_SEC))) reader.timeRange = trimmedRange - self.additionalReader?.timeRange = trimmedRange + if let additionalTimeRange = self.configuration.additionalVideoTimeRange { + self.additionalReader?.timeRange = additionalTimeRange + } else { + self.additionalReader?.timeRange = trimmedRange + } } self.writer = MediaEditorVideoAVAssetWriter() @@ -802,7 +827,13 @@ public final class MediaEditorVideoExport { self.statusValue = .progress(Float(progress)) } - let additionalSampleBuffer = self.additionalVideoOutput?.copyNextSampleBuffer() + var additionalSampleBuffer: CMSampleBuffer? + if let additionalVideoOutput = self.additionalVideoOutput { + if timestamp < self.configuration.additionalVideoStartTime { + } else { + additionalSampleBuffer = additionalVideoOutput.copyNextSampleBuffer() + } + } if let composer = self.composer { composer.processSampleBuffer(sampleBuffer: sampleBuffer, textureRotation: self.textureRotation, additionalSampleBuffer: additionalSampleBuffer, additionalTextureRotation: self.additionalTextureRotation, pool: writer.pixelBufferPool, completion: { pixelBuffer in diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/UniversalTextureSource.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/UniversalTextureSource.swift new file mode 100644 index 0000000000..c577835caf --- /dev/null +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/UniversalTextureSource.swift @@ -0,0 +1,235 @@ +import Foundation +import AVFoundation +import Metal +import MetalKit + +final class UniversalTextureSource: TextureSource { + enum Input { + case image(UIImage) + case video(AVPlayerItem) + + fileprivate func createContext(renderTarget: RenderTarget, queue: DispatchQueue, additional: Bool) -> InputContext { + switch self { + case .image: + return ImageInputContext(input: self, renderTarget: renderTarget, queue: queue) + case .video: + return VideoInputContext(input: self, renderTarget: renderTarget, queue: queue, additional: additional) + } + } + } + + private weak var renderTarget: RenderTarget? + private var displayLink: CADisplayLink? + private let queue: DispatchQueue + + private var mainInputContext: InputContext? + private var additionalInputContext: InputContext? + + weak var output: MediaEditorRenderer? + + init(renderTarget: RenderTarget) { + self.renderTarget = renderTarget + + self.queue = DispatchQueue( + label: "UniversalTextureSource Queue", + qos: .userInteractive, + attributes: [], + autoreleaseFrequency: .workItem, + target: nil + ) + } + + func setMainInput(_ input: Input) { + guard let renderTarget = self.renderTarget else { + return + } + self.mainInputContext = input.createContext(renderTarget: renderTarget, queue: self.queue, additional: false) + self.update(forced: true) + } + + func setAdditionalInput(_ input: Input?) { + guard let renderTarget = self.renderTarget else { + return + } + if let input { + self.additionalInputContext = input.createContext(renderTarget: renderTarget, queue: self.queue, additional: true) + } else { + self.additionalInputContext = nil + } + self.update(forced: true) + } + + private var previousAdditionalOutput: MediaEditorRenderer.Input? + private func update(forced: Bool) { + let time = CACurrentMediaTime() + + let needsDisplayLink = (self.mainInputContext?.needsDisplayLink ?? false) || (self.additionalInputContext?.needsDisplayLink ?? false) + if needsDisplayLink { + if self.displayLink == nil { + let displayLink = CADisplayLink(target: DisplayLinkTarget({ [weak self] in + self?.update(forced: false) + }), selector: #selector(DisplayLinkTarget.handleDisplayLinkUpdate(sender:))) + displayLink.preferredFramesPerSecond = 60 + displayLink.add(to: .main, forMode: .common) + self.displayLink = displayLink + } + } else { + if let displayLink = self.displayLink { + self.displayLink = nil + displayLink.invalidate() + } + } + + let main = self.mainInputContext?.output(time: time) + var additional = self.additionalInputContext?.output(time: time) + if let additional { + self.previousAdditionalOutput = additional + } else if self.additionalInputContext != nil { + additional = self.previousAdditionalOutput + } + + guard let main else { + return + } + + self.output?.consume(main: main, additional: additional, render: true) + } + + func connect(to consumer: MediaEditorRenderer) { + self.output = consumer + self.update(forced: true) + } + + func invalidate() { + self.mainInputContext?.invalidate() + self.additionalInputContext?.invalidate() + } + + private class DisplayLinkTarget { + private let update: () -> Void + init(_ update: @escaping () -> Void) { + self.update = update + } + @objc func handleDisplayLinkUpdate(sender: CADisplayLink) { + self.update() + } + } +// +// private func setupDisplayLink(frameRate: Int) { +// self.displayLink?.invalidate() +// self.displayLink = nil +// +// if self.playerItemOutput != nil { + +// } +// } +} + +private protocol InputContext { + typealias Input = UniversalTextureSource.Input + typealias Output = MediaEditorRenderer.Input + + var input: Input { get } + func output(time: Double) -> Output? + + var needsDisplayLink: Bool { get } + + func invalidate() +} + +private class ImageInputContext: InputContext { + fileprivate var input: Input + private var texture: MTLTexture? + + init(input: Input, renderTarget: RenderTarget, queue: DispatchQueue) { + guard case let .image(image) = input else { + fatalError() + } + self.input = input + if let device = renderTarget.mtlDevice { + self.texture = loadTexture(image: image, device: device) + } + } + + func output(time: Double) -> Output? { + return self.texture.flatMap { .texture($0, .zero) } + } + + func invalidate() { + self.texture = nil + } + + var needsDisplayLink: Bool { + return false + } +} + +private class VideoInputContext: NSObject, InputContext, AVPlayerItemOutputPullDelegate { + fileprivate var input: Input + private var videoOutput: AVPlayerItemVideoOutput? + private var textureRotation: TextureRotation = .rotate0Degrees + + var playerItem: AVPlayerItem { + guard case let .video(playerItem) = self.input else { + fatalError() + } + return playerItem + } + + init(input: Input, renderTarget: RenderTarget, queue: DispatchQueue, additional: Bool) { + guard case .video = input else { + fatalError() + } + self.input = input + super.init() + + //TODO: mirror if self.additionalPlayer == nil && self.mirror + self.textureRotation = textureRotatonForAVAsset(self.playerItem.asset, mirror: additional) + + let colorProperties: [String: Any] = [ + AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2, + AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_709_2, + AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_709_2 + ] + + let outputSettings: [String: Any] = [ + kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, + kCVPixelBufferMetalCompatibilityKey as String: true, + AVVideoColorPropertiesKey: colorProperties + ] + + let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) + videoOutput.suppressesPlayerRendering = true + videoOutput.setDelegate(self, queue: queue) + self.playerItem.add(videoOutput) + self.videoOutput = videoOutput + } + + func output(time: Double) -> Output? { + guard let videoOutput = self.videoOutput else { + return nil + } + let requestTime = videoOutput.itemTime(forHostTime: time) + if requestTime < .zero { + return nil + } + var presentationTime: CMTime = .zero + var videoPixelBuffer: VideoPixelBuffer? + if let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: requestTime, itemTimeForDisplay: &presentationTime) { + videoPixelBuffer = VideoPixelBuffer(pixelBuffer: pixelBuffer, rotation: self.textureRotation, timestamp: presentationTime) + } + return videoPixelBuffer.flatMap { .videoBuffer($0) } + } + + func invalidate() { + if let videoOutput = self.videoOutput { + self.videoOutput = nil + self.playerItem.remove(videoOutput) + videoOutput.setDelegate(nil, queue: nil) + } + } + + var needsDisplayLink: Bool { + return true + } +} diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/VideoFinishPass.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/VideoFinishPass.swift new file mode 100644 index 0000000000..302857a8da --- /dev/null +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/VideoFinishPass.swift @@ -0,0 +1,502 @@ +import Foundation +import AVFoundation +import Metal +import MetalKit + +private func verticesData( + textureRotation: TextureRotation, + containerSize: CGSize, + position: CGPoint, + size: CGSize, + rotation: CGFloat, + z: Float = 0.0 +) -> [VertexData] { + let topLeft: simd_float2 + let topRight: simd_float2 + let bottomLeft: simd_float2 + let bottomRight: simd_float2 + + switch textureRotation { + case .rotate0Degrees: + topLeft = simd_float2(0.0, 1.0) + topRight = simd_float2(1.0, 1.0) + bottomLeft = simd_float2(0.0, 0.0) + bottomRight = simd_float2(1.0, 0.0) + case .rotate0DegreesMirrored: + topLeft = simd_float2(1.0, 1.0) + topRight = simd_float2(0.0, 1.0) + bottomLeft = simd_float2(1.0, 0.0) + bottomRight = simd_float2(0.0, 0.0) + case .rotate180Degrees: + topLeft = simd_float2(1.0, 0.0) + topRight = simd_float2(0.0, 0.0) + bottomLeft = simd_float2(1.0, 1.0) + bottomRight = simd_float2(0.0, 1.0) + case .rotate90Degrees: + topLeft = simd_float2(1.0, 1.0) + topRight = simd_float2(1.0, 0.0) + bottomLeft = simd_float2(0.0, 1.0) + bottomRight = simd_float2(0.0, 0.0) + case .rotate90DegreesMirrored: + topLeft = simd_float2(1.0, 0.0) + topRight = simd_float2(1.0, 1.0) + bottomLeft = simd_float2(0.0, 0.0) + bottomRight = simd_float2(0.0, 1.0) + case .rotate270Degrees: + topLeft = simd_float2(0.0, 0.0) + topRight = simd_float2(0.0, 1.0) + bottomLeft = simd_float2(1.0, 0.0) + bottomRight = simd_float2(1.0, 1.0) + } + + let angle = Float(.pi - rotation) + let cosAngle = cos(angle) + let sinAngle = sin(angle) + + let centerX = Float(position.x) + let centerY = Float(position.y) + + let halfWidth = Float(size.width / 2.0) + let halfHeight = Float(size.height / 2.0) + + return [ + VertexData( + pos: simd_float4( + x: (centerX + (halfWidth * cosAngle) - (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, + y: (centerY + (halfWidth * sinAngle) + (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, + z: z, + w: 1 + ), + texCoord: topLeft, + localPos: simd_float2(0.0, 0.0) + ), + VertexData( + pos: simd_float4( + x: (centerX - (halfWidth * cosAngle) - (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, + y: (centerY - (halfWidth * sinAngle) + (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, + z: z, + w: 1 + ), + texCoord: topRight, + localPos: simd_float2(1.0, 0.0) + ), + VertexData( + pos: simd_float4( + x: (centerX + (halfWidth * cosAngle) + (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, + y: (centerY + (halfWidth * sinAngle) - (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, + z: z, + w: 1 + ), + texCoord: bottomLeft, + localPos: simd_float2(0.0, 1.0) + ), + VertexData( + pos: simd_float4( + x: (centerX - (halfWidth * cosAngle) + (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, + y: (centerY - (halfWidth * sinAngle) - (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, + z: z, + w: 1 + ), + texCoord: bottomRight, + localPos: simd_float2(1.0, 1.0) + ) + ] +} + +private func lookupSpringValue(_ t: CGFloat) -> CGFloat { + let table: [(CGFloat, CGFloat)] = [ + (0.0, 0.0), + (0.0625, 0.1123005598783493), + (0.125, 0.31598418951034546), + (0.1875, 0.5103585720062256), + (0.25, 0.6650152802467346), + (0.3125, 0.777747631072998), + (0.375, 0.8557760119438171), + (0.4375, 0.9079672694206238), + (0.5, 0.942038357257843), + (0.5625, 0.9638798832893372), + (0.625, 0.9776856303215027), + (0.6875, 0.9863143563270569), + (0.75, 0.991658091545105), + (0.8125, 0.9949421286582947), + (0.875, 0.9969474077224731), + (0.9375, 0.9981651306152344), + (1.0, 1.0) + ] + + for i in 0 ..< table.count - 2 { + let lhs = table[i] + let rhs = table[i + 1] + + if t >= lhs.0 && t <= rhs.0 { + let fraction = (t - lhs.0) / (rhs.0 - lhs.0) + let value = lhs.1 + fraction * (rhs.1 - lhs.1) + return value + } + } + return 1.0 +} + +final class VideoFinishPass: RenderPass { + private var cachedTexture: MTLTexture? + + var mainPipelineState: MTLRenderPipelineState? + var mainVerticesBuffer: MTLBuffer? + var mainTextureRotation: TextureRotation = .rotate0Degrees + + var additionalVerticesBuffer: MTLBuffer? + var additionalTextureRotation: TextureRotation = .rotate0Degrees + + var pixelFormat: MTLPixelFormat { + return .bgra8Unorm + } + + func setup(device: MTLDevice, library: MTLLibrary) { + let descriptor = MTLRenderPipelineDescriptor() + descriptor.vertexFunction = library.makeFunction(name: "defaultVertexShader") + descriptor.fragmentFunction = library.makeFunction(name: "dualFragmentShader") + descriptor.colorAttachments[0].pixelFormat = self.pixelFormat + descriptor.colorAttachments[0].isBlendingEnabled = true + descriptor.colorAttachments[0].rgbBlendOperation = .add + descriptor.colorAttachments[0].alphaBlendOperation = .add + descriptor.colorAttachments[0].sourceRGBBlendFactor = .sourceAlpha + descriptor.colorAttachments[0].sourceAlphaBlendFactor = .sourceAlpha + descriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusSourceAlpha + descriptor.colorAttachments[0].destinationAlphaBlendFactor = .oneMinusSourceAlpha + + do { + self.mainPipelineState = try device.makeRenderPipelineState(descriptor: descriptor) + } catch { + print(error.localizedDescription) + } + } + + func encodeVideo( + using encoder: MTLRenderCommandEncoder, + containerSize: CGSize, + texture: MTLTexture, + textureRotation: TextureRotation, + position: VideoPosition, + roundness: Float, + alpha: Float, + zPosition: Float, + device: MTLDevice + ) { + encoder.setFragmentTexture(texture, index: 0) + + let center = CGPoint( + x: position.position.x - containerSize.width / 2.0, + y: containerSize.height - position.position.y - containerSize.height / 2.0 + ) + + let size = CGSize( + width: position.size.width * position.scale, + height: position.size.height * position.scale + ) + + let vertices = verticesData(textureRotation: textureRotation, containerSize: containerSize, position: center, size: size, rotation: position.rotation, z: zPosition) + let buffer = device.makeBuffer( + bytes: vertices, + length: MemoryLayout.stride * vertices.count, + options: []) + encoder.setVertexBuffer(buffer, offset: 0, index: 0) + + var resolution = simd_uint2(UInt32(size.width), UInt32(size.height)) + encoder.setFragmentBytes(&resolution, length: MemoryLayout.size * 2, index: 0) + + var roundness = roundness + encoder.setFragmentBytes(&roundness, length: MemoryLayout.size, index: 1) + + var alpha = alpha + encoder.setFragmentBytes(&alpha, length: MemoryLayout.size, index: 2) + + encoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4) + } + + func update(values: MediaEditorValues) { + if let position = values.additionalVideoPosition, let scale = values.additionalVideoScale, let rotation = values.additionalVideoRotation { + self.additionalPosition = VideoFinishPass.VideoPosition(position: position, size: CGSize(width: 1080.0 / 4.0, height: 1440.0 / 4.0), scale: scale, rotation: rotation) + } + if !values.additionalVideoPositionChanges.isEmpty { + self.videoPositionChanges = values.additionalVideoPositionChanges + } + if let additionalVideoTrimRange = values.additionalVideoTrimRange { + self.additionalVideoRange = additionalVideoTrimRange + } + if let additionalVideoOffset = values.additionalVideoOffset { + self.additionalVideoOffset = additionalVideoOffset + } + } + + private var mainPosition = VideoPosition( + position: CGPoint(x: 1080 / 2.0, y: 1920.0 / 2.0), + size: CGSize(width: 1080.0, height: 1920.0), + scale: 1.0, + rotation: 0.0 + ) + + private var additionalPosition = VideoPosition( + position: CGPoint(x: 1080 / 2.0, y: 1920.0 / 2.0), + size: CGSize(width: 1440.0, height: 1920.0), + scale: 0.5, + rotation: 0.0 + ) + + private var transitionDuration = 0.5 + private var apperanceDuration = 0.2 + private var videoPositionChanges: [VideoPositionChange] = [] + private var additionalVideoRange: Range? + private var additionalVideoOffset: Double? + + enum VideoType { + case main + case additional + case transition + } + + struct VideoPosition { + let position: CGPoint + let size: CGSize + let scale: CGFloat + let rotation: CGFloat + + func mixed(with other: VideoPosition, fraction: CGFloat) -> VideoPosition { + let position = CGPoint( + x: self.position.x + (other.position.x - self.position.x) * fraction, + y: self.position.y + (other.position.y - self.position.y) * fraction + ) + let size = CGSize( + width: self.size.width + (other.size.width - self.size.width) * fraction, + height: self.size.height + (other.size.height - self.size.height) * fraction + ) + let scale = self.scale + (other.scale - self.scale) * fraction + let rotation = self.rotation + (other.rotation - self.rotation) * fraction + + return VideoPosition( + position: position, + size: size, + scale: scale, + rotation: rotation + ) + } + } + + struct VideoState { + let texture: MTLTexture + let textureRotation: TextureRotation + let position: VideoPosition + let roundness: Float + let alpha: Float + } + + func transitionState(for time: CMTime, mainInput: MTLTexture, additionalInput: MTLTexture?) -> (VideoState, VideoState?, VideoState?) { + let timestamp = time.seconds + + var backgroundTexture = mainInput + var backgroundTextureRotation = self.mainTextureRotation + + var foregroundTexture = additionalInput + var foregroundTextureRotation = self.additionalTextureRotation + + var mainPosition = self.mainPosition + var additionalPosition = self.additionalPosition + var disappearingPosition = self.mainPosition + + var transitionFraction = 1.0 + if let additionalInput { + var previousChange: VideoPositionChange? + for change in self.videoPositionChanges { + if timestamp >= change.timestamp { + previousChange = change + } + if timestamp < change.timestamp { + break + } + } + + if let previousChange { + if previousChange.additional { + backgroundTexture = additionalInput + backgroundTextureRotation = self.additionalTextureRotation + + mainPosition = VideoPosition(position: mainPosition.position, size: CGSize(width: 1440.0, height: 1920.0), scale: mainPosition.scale, rotation: mainPosition.rotation) + additionalPosition = VideoPosition(position: additionalPosition.position, size: CGSize(width: 1080.0 / 4.0, height: 1920.0 / 4.0), scale: additionalPosition.scale, rotation: additionalPosition.rotation) + + foregroundTexture = mainInput + foregroundTextureRotation = self.mainTextureRotation + } else { + disappearingPosition = VideoPosition(position: mainPosition.position, size: CGSize(width: 1440.0, height: 1920.0), scale: mainPosition.scale, rotation: mainPosition.rotation) + } + if previousChange.timestamp > 0.0 && timestamp < previousChange.timestamp + transitionDuration { + transitionFraction = (timestamp - previousChange.timestamp) / transitionDuration + } + } + } + + var backgroundVideoState = VideoState(texture: backgroundTexture, textureRotation: backgroundTextureRotation, position: mainPosition, roundness: 0.0, alpha: 1.0) + var foregroundVideoState: VideoState? + var disappearingVideoState: VideoState? + + if let foregroundTexture { + var foregroundPosition = additionalPosition + var foregroundAlpha: Float = 1.0 + if transitionFraction < 1.0 { + let springFraction = lookupSpringValue(transitionFraction) + + let appearingPosition = VideoPosition(position: additionalPosition.position, size: additionalPosition.size, scale: 0.01, rotation: self.additionalPosition.rotation) + let backgroundInitialPosition = VideoPosition(position: additionalPosition.position, size: CGSize(width: mainPosition.size.width / 4.0, height: mainPosition.size.height / 4.0), scale: additionalPosition.scale, rotation: additionalPosition.rotation) + + foregroundPosition = appearingPosition.mixed(with: additionalPosition, fraction: springFraction) + + disappearingVideoState = VideoState(texture: foregroundTexture, textureRotation: foregroundTextureRotation, position: disappearingPosition, roundness: 0.0, alpha: 1.0) + backgroundVideoState = VideoState(texture: backgroundTexture, textureRotation: backgroundTextureRotation, position: backgroundInitialPosition.mixed(with: mainPosition, fraction: springFraction), roundness: Float(1.0 - springFraction), alpha: 1.0) + + foregroundAlpha = min(1.0, max(0.0, Float(transitionFraction) * 2.5)) + } + + var isVisible = true + + var trimRangeLowerBound: Double? + var trimRangeUpperBound: Double? + if let additionalVideoRange = self.additionalVideoRange { + if let additionalVideoOffset = self.additionalVideoOffset { + trimRangeLowerBound = additionalVideoRange.lowerBound - additionalVideoOffset + trimRangeUpperBound = additionalVideoRange.upperBound - additionalVideoOffset + } else { + trimRangeLowerBound = additionalVideoRange.lowerBound + trimRangeUpperBound = additionalVideoRange.upperBound + } + } else if let additionalVideoOffset = self.additionalVideoOffset { + trimRangeLowerBound = -additionalVideoOffset + } + + if trimRangeLowerBound != nil || trimRangeUpperBound != nil { + let disappearingPosition = VideoPosition(position: foregroundPosition.position, size: foregroundPosition.size, scale: 0.01, rotation: foregroundPosition.rotation) + if let trimRangeLowerBound, trimRangeLowerBound > 0.0, timestamp < trimRangeLowerBound + apperanceDuration { + let visibilityFraction = max(0.0, min(1.0, (timestamp - trimRangeLowerBound) / apperanceDuration)) + if visibilityFraction.isZero { + isVisible = false + } + foregroundAlpha = Float(visibilityFraction) + foregroundPosition = disappearingPosition.mixed(with: foregroundPosition, fraction: visibilityFraction) + } else if let trimRangeUpperBound, timestamp > trimRangeUpperBound - apperanceDuration { + let visibilityFraction = 1.0 - max(0.0, min(1.0, (timestamp - trimRangeUpperBound) / apperanceDuration)) + if visibilityFraction.isZero { + isVisible = false + } + foregroundAlpha = Float(visibilityFraction) + foregroundPosition = disappearingPosition.mixed(with: foregroundPosition, fraction: visibilityFraction) + } + } + + if isVisible { + foregroundVideoState = VideoState(texture: foregroundTexture, textureRotation: foregroundTextureRotation, position: foregroundPosition, roundness: 1.0, alpha: foregroundAlpha) + } + } + + return (backgroundVideoState, foregroundVideoState, disappearingVideoState) + } + + func process(input: MTLTexture, secondInput: MTLTexture?, timestamp: CMTime, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? { + guard max(input.width, input.height) > 1920 || secondInput != nil else { + return input + } + + let scaledSize = CGSize(width: input.width, height: input.height).fitted(CGSize(width: 1920.0, height: 1920.0)) + let width: Int + let height: Int + + if secondInput != nil { + width = 1080 + height = 1920 + } else { + width = Int(scaledSize.width) + height = Int(scaledSize.height) + } + self.mainPosition = VideoPosition(position: CGPoint(x: width / 2, y: height / 2), size: CGSize(width: width, height: height), scale: 1.0, rotation: 0.0) + + let containerSize = CGSize(width: width, height: height) + + if self.cachedTexture == nil || self.cachedTexture?.width != width || self.cachedTexture?.height != height { + let textureDescriptor = MTLTextureDescriptor() + textureDescriptor.textureType = .type2D + textureDescriptor.width = width + textureDescriptor.height = height + textureDescriptor.pixelFormat = input.pixelFormat + textureDescriptor.storageMode = .private + textureDescriptor.usage = [.shaderRead, .shaderWrite, .renderTarget] + guard let texture = device.makeTexture(descriptor: textureDescriptor) else { + return input + } + self.cachedTexture = texture + texture.label = "scaledVideoTexture" + } + + let renderPassDescriptor = MTLRenderPassDescriptor() + renderPassDescriptor.colorAttachments[0].texture = self.cachedTexture! + renderPassDescriptor.colorAttachments[0].loadAction = .dontCare + renderPassDescriptor.colorAttachments[0].storeAction = .store + renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0) + guard let renderCommandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else { + return input + } + + renderCommandEncoder.setViewport(MTLViewport( + originX: 0, originY: 0, + width: Double(width), height: Double(height), + znear: -1.0, zfar: 1.0) + ) + + renderCommandEncoder.setRenderPipelineState(self.mainPipelineState!) + + let (mainVideoState, additionalVideoState, transitionVideoState) = self.transitionState(for: timestamp, mainInput: input, additionalInput: secondInput) + + if let transitionVideoState { + self.encodeVideo( + using: renderCommandEncoder, + containerSize: containerSize, + texture: transitionVideoState.texture, + textureRotation: transitionVideoState.textureRotation, + position: transitionVideoState.position, + roundness: transitionVideoState.roundness, + alpha: transitionVideoState.alpha, + zPosition: 0.75, + device: device + ) + } + + self.encodeVideo( + using: renderCommandEncoder, + containerSize: containerSize, + texture: mainVideoState.texture, + textureRotation: mainVideoState.textureRotation, + position: mainVideoState.position, + roundness: mainVideoState.roundness, + alpha: mainVideoState.alpha, + zPosition: 0.0, + device: device + ) + + if let additionalVideoState { + self.encodeVideo( + using: renderCommandEncoder, + containerSize: containerSize, + texture: additionalVideoState.texture, + textureRotation: additionalVideoState.textureRotation, + position: additionalVideoState.position, + roundness: additionalVideoState.roundness, + alpha: additionalVideoState.alpha, + zPosition: 0.5, + device: device + ) + } + + renderCommandEncoder.endEncoding() + + return self.cachedTexture! + } + + func process(input: MTLTexture, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? { + return nil + } +} diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/VideoInputPass.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/VideoInputPass.swift new file mode 100644 index 0000000000..98f284a011 --- /dev/null +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/VideoInputPass.swift @@ -0,0 +1,86 @@ +import Foundation +import AVFoundation +import Metal +import MetalKit + +final class VideoInputPass: DefaultRenderPass { + private var cachedTexture: MTLTexture? + + override var fragmentShaderFunctionName: String { + return "bt709ToRGBFragmentShader" + } + + override func setup(device: MTLDevice, library: MTLLibrary) { + super.setup(device: device, library: library) + } + + func processPixelBuffer(_ pixelBuffer: VideoPixelBuffer, textureCache: CVMetalTextureCache, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? { + func textureFromPixelBuffer(_ pixelBuffer: CVPixelBuffer, pixelFormat: MTLPixelFormat, width: Int, height: Int, plane: Int) -> MTLTexture? { + var textureRef : CVMetalTexture? + let status = CVMetalTextureCacheCreateTextureFromImage(nil, textureCache, pixelBuffer, nil, pixelFormat, width, height, plane, &textureRef) + if status == kCVReturnSuccess, let textureRef { + return CVMetalTextureGetTexture(textureRef) + } + return nil + } + + let width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer) + let height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer) + guard let inputYTexture = textureFromPixelBuffer(pixelBuffer.pixelBuffer, pixelFormat: .r8Unorm, width: width, height: height, plane: 0), + let inputCbCrTexture = textureFromPixelBuffer(pixelBuffer.pixelBuffer, pixelFormat: .rg8Unorm, width: width >> 1, height: height >> 1, plane: 1) else { + return nil + } + return self.process(yTexture: inputYTexture, cbcrTexture: inputCbCrTexture, width: width, height: height, rotation: pixelBuffer.rotation, device: device, commandBuffer: commandBuffer) + } + + func process(yTexture: MTLTexture, cbcrTexture: MTLTexture, width: Int, height: Int, rotation: TextureRotation, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? { + self.setupVerticesBuffer(device: device, rotation: rotation) + + func textureDimensionsForRotation(width: Int, height: Int, rotation: TextureRotation) -> (width: Int, height: Int) { + switch rotation { + case .rotate90Degrees, .rotate270Degrees, .rotate90DegreesMirrored: + return (height, width) + default: + return (width, height) + } + } + + let (outputWidth, outputHeight) = textureDimensionsForRotation(width: width, height: height, rotation: rotation) + if self.cachedTexture == nil { + let textureDescriptor = MTLTextureDescriptor() + textureDescriptor.textureType = .type2D + textureDescriptor.width = outputWidth + textureDescriptor.height = outputHeight + textureDescriptor.pixelFormat = self.pixelFormat + textureDescriptor.storageMode = .private + textureDescriptor.usage = [.shaderRead, .shaderWrite, .renderTarget] + if let texture = device.makeTexture(descriptor: textureDescriptor) { + self.cachedTexture = texture + } + } + + let renderPassDescriptor = MTLRenderPassDescriptor() + renderPassDescriptor.colorAttachments[0].texture = self.cachedTexture! + renderPassDescriptor.colorAttachments[0].loadAction = .dontCare + renderPassDescriptor.colorAttachments[0].storeAction = .store + renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 0.0, blue: 0.0, alpha: 0.0) + guard let renderCommandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else { + return nil + } + + renderCommandEncoder.setViewport(MTLViewport( + originX: 0, originY: 0, + width: Double(outputWidth), height: Double(outputHeight), + znear: -1.0, zfar: 1.0) + ) + + renderCommandEncoder.setFragmentTexture(yTexture, index: 0) + renderCommandEncoder.setFragmentTexture(cbcrTexture, index: 1) + + self.encodeDefaultCommands(using: renderCommandEncoder) + + renderCommandEncoder.endEncoding() + + return self.cachedTexture + } +} diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/VideoTextureSource.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/VideoTextureSource.swift index 41599f10f3..9aa3fdd2ef 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/VideoTextureSource.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/VideoTextureSource.swift @@ -3,50 +3,27 @@ import AVFoundation import Metal import MetalKit -func textureRotatonForAVAsset(_ asset: AVAsset, mirror: Bool = false) -> TextureRotation { - for track in asset.tracks { - if track.mediaType == .video { - let t = track.preferredTransform - if t.a == -1.0 && t.d == -1.0 { - return .rotate180Degrees - } else if t.a == 1.0 && t.d == 1.0 { - return .rotate0Degrees - } else if t.b == -1.0 && t.c == 1.0 { - return .rotate270Degrees - } else if t.a == -1.0 && t.d == 1.0 { - return .rotate270Degrees - } else if t.a == 1.0 && t.d == -1.0 { - return .rotate180Degrees - } else { - return mirror ? .rotate90DegreesMirrored : .rotate90Degrees - } - } - } - return .rotate0Degrees -} - final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullDelegate { - private weak var player: AVPlayer? - private weak var additionalPlayer: AVPlayer? - private weak var playerItem: AVPlayerItem? - private weak var additionalPlayerItem: AVPlayerItem? + private let device: MTLDevice? + private var displayLink: CADisplayLink? private let mirror: Bool + private weak var player: AVPlayer? + private weak var playerItem: AVPlayerItem? private var playerItemOutput: AVPlayerItemVideoOutput? - private var additionalPlayerItemOutput: AVPlayerItemVideoOutput? - - private var displayLink: CADisplayLink? - - private let device: MTLDevice? private var textureRotation: TextureRotation = .rotate0Degrees - private var additionalTextureRotation: TextureRotation = .rotate0Degrees - - private var forceUpdate: Bool = false - weak var output: TextureConsumer? + private weak var additionalPlayer: AVPlayer? + private weak var additionalPlayerItem: AVPlayerItem? + private var additionalPlayerItemOutput: AVPlayerItemVideoOutput? + private var additionalTextureRotation: TextureRotation = .rotate0Degrees + + weak var output: MediaEditorRenderer? var queue: DispatchQueue! var started: Bool = false + + private var forceUpdate: Bool = false init(player: AVPlayer, additionalPlayer: AVPlayer?, mirror: Bool, renderTarget: RenderTarget) { self.player = player @@ -59,23 +36,14 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD qos: .userInteractive, attributes: [], autoreleaseFrequency: .workItem, - target: nil) + target: nil + ) super.init() self.playerItem = player.currentItem self.additionalPlayerItem = additionalPlayer?.currentItem - self.handleReadyToPlay() - } - func invalidate() { - self.playerItemOutput?.setDelegate(nil, queue: nil) - self.playerItemOutput = nil - self.displayLink?.invalidate() - self.displayLink = nil - } - - private func handleReadyToPlay() { guard let playerItem = self.playerItem else { return } @@ -91,11 +59,49 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD break } } - self.textureRotation = textureRotatonForAVAsset(playerItem.asset, mirror: additionalPlayer == nil && mirror) + if !hasVideoTrack { return } + self.textureRotation = textureRotatonForAVAsset(playerItem.asset, mirror: self.additionalPlayer == nil && self.mirror) + self.playerItemOutput = self.setupPlayerVideoOutput(playerItem: playerItem) + if let additionalPlayerItem = self.additionalPlayerItem { + self.additionalTextureRotation = textureRotatonForAVAsset(additionalPlayerItem.asset, mirror: true) + self.additionalPlayerItemOutput = self.setupPlayerVideoOutput(playerItem: additionalPlayerItem) + } + + self.setupDisplayLink(frameRate: min(60, frameRate)) + } + + func invalidate() { + self.playerItemOutput?.setDelegate(nil, queue: nil) + self.playerItemOutput = nil + self.additionalPlayerItemOutput?.setDelegate(nil, queue: nil) + self.additionalPlayerItemOutput = nil + self.displayLink?.invalidate() + self.displayLink = nil + } + + func setAdditionalPlayer(_ additionalPlayer: AVPlayer?) { + self.additionalPlayer = additionalPlayer + self.additionalPlayerItem = additionalPlayer?.currentItem + + if let additionalPlayerItem = self.additionalPlayerItem { + self.additionalTextureRotation = textureRotatonForAVAsset(additionalPlayerItem.asset, mirror: true) + self.additionalPlayerItemOutput = self.setupPlayerVideoOutput(playerItem: additionalPlayerItem) + } else if let additionalPlayerItemOutput = self.additionalPlayerItemOutput { + self.additionalPlayerItemOutput = nil + additionalPlayerItemOutput.setDelegate(nil, queue: nil) + + if let additionalPlayerItem = self.additionalPlayerItem { + self.additionalPlayerItem = nil + additionalPlayerItem.remove(additionalPlayerItemOutput) + } + } + } + + private func setupPlayerVideoOutput(playerItem: AVPlayerItem) -> AVPlayerItemVideoOutput { let colorProperties: [String: Any] = [ AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2, AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_709_2, @@ -112,19 +118,7 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD output.suppressesPlayerRendering = true output.setDelegate(self, queue: self.queue) playerItem.add(output) - self.playerItemOutput = output - - if let additionalPlayerItem = self.additionalPlayerItem { - self.additionalTextureRotation = textureRotatonForAVAsset(additionalPlayerItem.asset, mirror: true) - - let output = AVPlayerItemVideoOutput(outputSettings: outputSettings) - output.suppressesPlayerRendering = true - output.setDelegate(self, queue: self.queue) - additionalPlayerItem.add(output) - self.additionalPlayerItemOutput = output - } - - self.setupDisplayLink(frameRate: min(60, frameRate)) + return output } private class DisplayLinkTarget { @@ -193,7 +187,7 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD } if let mainPixelBuffer { - self.output?.consumeVideoPixelBuffer(pixelBuffer: mainPixelBuffer, additionalPixelBuffer: additionalPixelBuffer, render: true) + self.output?.consume(main: .videoBuffer(mainPixelBuffer), additional: additionalPixelBuffer.flatMap { .videoBuffer($0) }, render: true) } } @@ -209,7 +203,7 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD } } - func connect(to consumer: TextureConsumer) { + func connect(to consumer: MediaEditorRenderer) { self.output = consumer } @@ -217,536 +211,3 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD self.displayLink?.isPaused = false } } - -final class VideoInputPass: DefaultRenderPass { - private var cachedTexture: MTLTexture? - - override var fragmentShaderFunctionName: String { - return "bt709ToRGBFragmentShader" - } - - override func setup(device: MTLDevice, library: MTLLibrary) { - super.setup(device: device, library: library) - } - - func processPixelBuffer(_ pixelBuffer: VideoPixelBuffer, textureCache: CVMetalTextureCache, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? { - func textureFromPixelBuffer(_ pixelBuffer: CVPixelBuffer, pixelFormat: MTLPixelFormat, width: Int, height: Int, plane: Int) -> MTLTexture? { - var textureRef : CVMetalTexture? - let status = CVMetalTextureCacheCreateTextureFromImage(nil, textureCache, pixelBuffer, nil, pixelFormat, width, height, plane, &textureRef) - if status == kCVReturnSuccess, let textureRef { - return CVMetalTextureGetTexture(textureRef) - } - return nil - } - - let width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer) - let height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer) - guard let inputYTexture = textureFromPixelBuffer(pixelBuffer.pixelBuffer, pixelFormat: .r8Unorm, width: width, height: height, plane: 0), - let inputCbCrTexture = textureFromPixelBuffer(pixelBuffer.pixelBuffer, pixelFormat: .rg8Unorm, width: width >> 1, height: height >> 1, plane: 1) else { - return nil - } - return self.process(yTexture: inputYTexture, cbcrTexture: inputCbCrTexture, width: width, height: height, rotation: pixelBuffer.rotation, device: device, commandBuffer: commandBuffer) - } - - func process(yTexture: MTLTexture, cbcrTexture: MTLTexture, width: Int, height: Int, rotation: TextureRotation, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? { - self.setupVerticesBuffer(device: device, rotation: rotation) - - func textureDimensionsForRotation(width: Int, height: Int, rotation: TextureRotation) -> (width: Int, height: Int) { - switch rotation { - case .rotate90Degrees, .rotate270Degrees, .rotate90DegreesMirrored: - return (height, width) - default: - return (width, height) - } - } - - let (outputWidth, outputHeight) = textureDimensionsForRotation(width: width, height: height, rotation: rotation) - if self.cachedTexture == nil { - let textureDescriptor = MTLTextureDescriptor() - textureDescriptor.textureType = .type2D - textureDescriptor.width = outputWidth - textureDescriptor.height = outputHeight - textureDescriptor.pixelFormat = self.pixelFormat - textureDescriptor.storageMode = .private - textureDescriptor.usage = [.shaderRead, .shaderWrite, .renderTarget] - if let texture = device.makeTexture(descriptor: textureDescriptor) { - self.cachedTexture = texture - } - } - - let renderPassDescriptor = MTLRenderPassDescriptor() - renderPassDescriptor.colorAttachments[0].texture = self.cachedTexture! - renderPassDescriptor.colorAttachments[0].loadAction = .dontCare - renderPassDescriptor.colorAttachments[0].storeAction = .store - renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 0.0, blue: 0.0, alpha: 0.0) - guard let renderCommandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else { - return nil - } - - renderCommandEncoder.setViewport(MTLViewport( - originX: 0, originY: 0, - width: Double(outputWidth), height: Double(outputHeight), - znear: -1.0, zfar: 1.0) - ) - - renderCommandEncoder.setFragmentTexture(yTexture, index: 0) - renderCommandEncoder.setFragmentTexture(cbcrTexture, index: 1) - - self.encodeDefaultCommands(using: renderCommandEncoder) - - renderCommandEncoder.endEncoding() - - return self.cachedTexture - } -} - -private func verticesData( - textureRotation: TextureRotation, - containerSize: CGSize, - position: CGPoint, - size: CGSize, - rotation: CGFloat, - z: Float = 0.0 -) -> [VertexData] { - let topLeft: simd_float2 - let topRight: simd_float2 - let bottomLeft: simd_float2 - let bottomRight: simd_float2 - - switch textureRotation { - case .rotate0Degrees: - topLeft = simd_float2(0.0, 1.0) - topRight = simd_float2(1.0, 1.0) - bottomLeft = simd_float2(0.0, 0.0) - bottomRight = simd_float2(1.0, 0.0) - case .rotate0DegreesMirrored: - topLeft = simd_float2(1.0, 1.0) - topRight = simd_float2(0.0, 1.0) - bottomLeft = simd_float2(1.0, 0.0) - bottomRight = simd_float2(0.0, 0.0) - case .rotate180Degrees: - topLeft = simd_float2(1.0, 0.0) - topRight = simd_float2(0.0, 0.0) - bottomLeft = simd_float2(1.0, 1.0) - bottomRight = simd_float2(0.0, 1.0) - case .rotate90Degrees: - topLeft = simd_float2(1.0, 1.0) - topRight = simd_float2(1.0, 0.0) - bottomLeft = simd_float2(0.0, 1.0) - bottomRight = simd_float2(0.0, 0.0) - case .rotate90DegreesMirrored: - topLeft = simd_float2(1.0, 0.0) - topRight = simd_float2(1.0, 1.0) - bottomLeft = simd_float2(0.0, 0.0) - bottomRight = simd_float2(0.0, 1.0) - case .rotate270Degrees: - topLeft = simd_float2(0.0, 0.0) - topRight = simd_float2(0.0, 1.0) - bottomLeft = simd_float2(1.0, 0.0) - bottomRight = simd_float2(1.0, 1.0) - } - - let angle = Float(.pi - rotation) - let cosAngle = cos(angle) - let sinAngle = sin(angle) - - let centerX = Float(position.x) - let centerY = Float(position.y) - - let halfWidth = Float(size.width / 2.0) - let halfHeight = Float(size.height / 2.0) - - return [ - VertexData( - pos: simd_float4( - x: (centerX + (halfWidth * cosAngle) - (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, - y: (centerY + (halfWidth * sinAngle) + (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, - z: z, - w: 1 - ), - texCoord: topLeft, - localPos: simd_float2(0.0, 0.0) - ), - VertexData( - pos: simd_float4( - x: (centerX - (halfWidth * cosAngle) - (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, - y: (centerY - (halfWidth * sinAngle) + (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, - z: z, - w: 1 - ), - texCoord: topRight, - localPos: simd_float2(1.0, 0.0) - ), - VertexData( - pos: simd_float4( - x: (centerX + (halfWidth * cosAngle) + (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, - y: (centerY + (halfWidth * sinAngle) - (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, - z: z, - w: 1 - ), - texCoord: bottomLeft, - localPos: simd_float2(0.0, 1.0) - ), - VertexData( - pos: simd_float4( - x: (centerX - (halfWidth * cosAngle) + (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, - y: (centerY - (halfWidth * sinAngle) - (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, - z: z, - w: 1 - ), - texCoord: bottomRight, - localPos: simd_float2(1.0, 1.0) - ) - ] -} - -private func lookupSpringValue(_ t: CGFloat) -> CGFloat { - let table: [(CGFloat, CGFloat)] = [ - (0.0, 0.0), - (0.0625, 0.1123005598783493), - (0.125, 0.31598418951034546), - (0.1875, 0.5103585720062256), - (0.25, 0.6650152802467346), - (0.3125, 0.777747631072998), - (0.375, 0.8557760119438171), - (0.4375, 0.9079672694206238), - (0.5, 0.942038357257843), - (0.5625, 0.9638798832893372), - (0.625, 0.9776856303215027), - (0.6875, 0.9863143563270569), - (0.75, 0.991658091545105), - (0.8125, 0.9949421286582947), - (0.875, 0.9969474077224731), - (0.9375, 0.9981651306152344), - (1.0, 1.0) - ] - - for i in 0 ..< table.count - 2 { - let lhs = table[i] - let rhs = table[i + 1] - - if t >= lhs.0 && t <= rhs.0 { - let fraction = (t - lhs.0) / (rhs.0 - lhs.0) - let value = lhs.1 + fraction * (rhs.1 - lhs.1) - return value - } - } - return 1.0 -} - -final class VideoInputScalePass: RenderPass { - private var cachedTexture: MTLTexture? - - var mainPipelineState: MTLRenderPipelineState? - var mainVerticesBuffer: MTLBuffer? - var mainTextureRotation: TextureRotation = .rotate0Degrees - - var additionalVerticesBuffer: MTLBuffer? - var additionalTextureRotation: TextureRotation = .rotate0Degrees - - var pixelFormat: MTLPixelFormat { - return .bgra8Unorm - } - - func setup(device: MTLDevice, library: MTLLibrary) { - let descriptor = MTLRenderPipelineDescriptor() - descriptor.vertexFunction = library.makeFunction(name: "defaultVertexShader") - descriptor.fragmentFunction = library.makeFunction(name: "dualFragmentShader") - descriptor.colorAttachments[0].pixelFormat = self.pixelFormat - descriptor.colorAttachments[0].isBlendingEnabled = true - descriptor.colorAttachments[0].rgbBlendOperation = .add - descriptor.colorAttachments[0].alphaBlendOperation = .add - descriptor.colorAttachments[0].sourceRGBBlendFactor = .sourceAlpha - descriptor.colorAttachments[0].sourceAlphaBlendFactor = .sourceAlpha - descriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusSourceAlpha - descriptor.colorAttachments[0].destinationAlphaBlendFactor = .oneMinusSourceAlpha - - do { - self.mainPipelineState = try device.makeRenderPipelineState(descriptor: descriptor) - } catch { - print(error.localizedDescription) - } - } - - func encodeVideo( - using encoder: MTLRenderCommandEncoder, - containerSize: CGSize, - texture: MTLTexture, - textureRotation: TextureRotation, - position: VideoPosition, - roundness: Float, - alpha: Float, - zPosition: Float, - device: MTLDevice - ) { - encoder.setFragmentTexture(texture, index: 0) - - let center = CGPoint( - x: position.position.x - containerSize.width / 2.0, - y: containerSize.height - position.position.y - containerSize.height / 2.0 - ) - - let size = CGSize( - width: position.size.width * position.scale, - height: position.size.height * position.scale - ) - - let vertices = verticesData(textureRotation: textureRotation, containerSize: containerSize, position: center, size: size, rotation: position.rotation, z: zPosition) - let buffer = device.makeBuffer( - bytes: vertices, - length: MemoryLayout.stride * vertices.count, - options: []) - encoder.setVertexBuffer(buffer, offset: 0, index: 0) - - var resolution = simd_uint2(UInt32(size.width), UInt32(size.height)) - encoder.setFragmentBytes(&resolution, length: MemoryLayout.size * 2, index: 0) - - var roundness = roundness - encoder.setFragmentBytes(&roundness, length: MemoryLayout.size, index: 1) - - var alpha = alpha - encoder.setFragmentBytes(&alpha, length: MemoryLayout.size, index: 2) - - encoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4) - } - - func update(values: MediaEditorValues) { - if let position = values.additionalVideoPosition, let scale = values.additionalVideoScale, let rotation = values.additionalVideoRotation { - self.additionalPosition = VideoInputScalePass.VideoPosition(position: position, size: CGSize(width: 1080.0 / 4.0, height: 1440.0 / 4.0), scale: scale, rotation: rotation) - } - if !values.additionalVideoPositionChanges.isEmpty { - self.videoPositionChanges = values.additionalVideoPositionChanges - } - } - - private var mainPosition = VideoPosition( - position: CGPoint(x: 1080 / 2.0, y: 1920.0 / 2.0), - size: CGSize(width: 1080.0, height: 1920.0), - scale: 1.0, - rotation: 0.0 - ) - - private var additionalPosition = VideoPosition( - position: CGPoint(x: 1080 / 2.0, y: 1920.0 / 2.0), - size: CGSize(width: 1440.0, height: 1920.0), - scale: 0.5, - rotation: 0.0 - ) - - private var transitionDuration = 0.5 - private var videoPositionChanges: [VideoPositionChange] = [] - - enum VideoType { - case main - case additional - case transition - } - - struct VideoPosition { - let position: CGPoint - let size: CGSize - let scale: CGFloat - let rotation: CGFloat - - func mixed(with other: VideoPosition, fraction: CGFloat) -> VideoPosition { - let position = CGPoint( - x: self.position.x + (other.position.x - self.position.x) * fraction, - y: self.position.y + (other.position.y - self.position.y) * fraction - ) - let size = CGSize( - width: self.size.width + (other.size.width - self.size.width) * fraction, - height: self.size.height + (other.size.height - self.size.height) * fraction - ) - let scale = self.scale + (other.scale - self.scale) * fraction - let rotation = self.rotation + (other.rotation - self.rotation) * fraction - - return VideoPosition( - position: position, - size: size, - scale: scale, - rotation: rotation - ) - } - } - - struct VideoState { - let texture: MTLTexture - let textureRotation: TextureRotation - let position: VideoPosition - let roundness: Float - let alpha: Float - } - - func transitionState(for time: CMTime, mainInput: MTLTexture, additionalInput: MTLTexture?) -> (VideoState, VideoState?, VideoState?) { - let timestamp = time.seconds - - var backgroundTexture = mainInput - var backgroundTextureRotation = self.mainTextureRotation - - var foregroundTexture = additionalInput - var foregroundTextureRotation = self.additionalTextureRotation - - var mainPosition = self.mainPosition - var additionalPosition = self.additionalPosition - var disappearingPosition = self.mainPosition - - var transitionFraction = 1.0 - if let additionalInput { - var previousChange: VideoPositionChange? - for change in self.videoPositionChanges { - if timestamp >= change.timestamp { - previousChange = change - } - if timestamp < change.timestamp { - break - } - } - - if let previousChange { - if previousChange.additional { - backgroundTexture = additionalInput - backgroundTextureRotation = self.additionalTextureRotation - - mainPosition = VideoPosition(position: mainPosition.position, size: CGSize(width: 1440.0, height: 1920.0), scale: mainPosition.scale, rotation: mainPosition.rotation) - additionalPosition = VideoPosition(position: additionalPosition.position, size: CGSize(width: 1080.0 / 4.0, height: 1920.0 / 4.0), scale: additionalPosition.scale, rotation: additionalPosition.rotation) - - foregroundTexture = mainInput - foregroundTextureRotation = self.mainTextureRotation - } else { - disappearingPosition = VideoPosition(position: mainPosition.position, size: CGSize(width: 1440.0, height: 1920.0), scale: mainPosition.scale, rotation: mainPosition.rotation) - } - if previousChange.timestamp > 0.0 && timestamp < previousChange.timestamp + transitionDuration { - transitionFraction = (timestamp - previousChange.timestamp) / transitionDuration - } - } - } - - var backgroundVideoState = VideoState(texture: backgroundTexture, textureRotation: backgroundTextureRotation, position: mainPosition, roundness: 0.0, alpha: 1.0) - var foregroundVideoState: VideoState? - var disappearingVideoState: VideoState? - - if let foregroundTexture { - var foregroundPosition = additionalPosition - var foregroundAlpha: Float = 1.0 - if transitionFraction < 1.0 { - let springFraction = lookupSpringValue(transitionFraction) - - let appearingPosition = VideoPosition(position: additionalPosition.position, size: additionalPosition.size, scale: 0.01, rotation: self.additionalPosition.rotation) - let backgroundInitialPosition = VideoPosition(position: additionalPosition.position, size: CGSize(width: mainPosition.size.width / 4.0, height: mainPosition.size.height / 4.0), scale: additionalPosition.scale, rotation: additionalPosition.rotation) - - foregroundPosition = appearingPosition.mixed(with: additionalPosition, fraction: springFraction) - - disappearingVideoState = VideoState(texture: foregroundTexture, textureRotation: foregroundTextureRotation, position: disappearingPosition, roundness: 0.0, alpha: 1.0) - backgroundVideoState = VideoState(texture: backgroundTexture, textureRotation: backgroundTextureRotation, position: backgroundInitialPosition.mixed(with: mainPosition, fraction: springFraction), roundness: Float(1.0 - springFraction), alpha: 1.0) - - foregroundAlpha = min(1.0, max(0.0, Float(transitionFraction) * 2.5)) - } - foregroundVideoState = VideoState(texture: foregroundTexture, textureRotation: foregroundTextureRotation, position: foregroundPosition, roundness: 1.0, alpha: foregroundAlpha) - } - - return (backgroundVideoState, foregroundVideoState, disappearingVideoState) - } - - func process(input: MTLTexture, secondInput: MTLTexture?, timestamp: CMTime, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? { - guard max(input.width, input.height) > 1920 || secondInput != nil else { - return input - } - - let scaledSize = CGSize(width: input.width, height: input.height).fitted(CGSize(width: 1920.0, height: 1920.0)) - let width: Int - let height: Int - - if secondInput != nil { - width = 1080 - height = 1920 - } else { - width = Int(scaledSize.width) - height = Int(scaledSize.height) - } - self.mainPosition = VideoPosition(position: CGPoint(x: width / 2, y: height / 2), size: CGSize(width: width, height: height), scale: 1.0, rotation: 0.0) - - let containerSize = CGSize(width: width, height: height) - - if self.cachedTexture == nil || self.cachedTexture?.width != width || self.cachedTexture?.height != height { - let textureDescriptor = MTLTextureDescriptor() - textureDescriptor.textureType = .type2D - textureDescriptor.width = width - textureDescriptor.height = height - textureDescriptor.pixelFormat = input.pixelFormat - textureDescriptor.storageMode = .private - textureDescriptor.usage = [.shaderRead, .shaderWrite, .renderTarget] - guard let texture = device.makeTexture(descriptor: textureDescriptor) else { - return input - } - self.cachedTexture = texture - texture.label = "scaledVideoTexture" - } - - let renderPassDescriptor = MTLRenderPassDescriptor() - renderPassDescriptor.colorAttachments[0].texture = self.cachedTexture! - renderPassDescriptor.colorAttachments[0].loadAction = .dontCare - renderPassDescriptor.colorAttachments[0].storeAction = .store - renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0) - guard let renderCommandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else { - return input - } - - renderCommandEncoder.setViewport(MTLViewport( - originX: 0, originY: 0, - width: Double(width), height: Double(height), - znear: -1.0, zfar: 1.0) - ) - - renderCommandEncoder.setRenderPipelineState(self.mainPipelineState!) - - let (mainVideoState, additionalVideoState, transitionVideoState) = self.transitionState(for: timestamp, mainInput: input, additionalInput: secondInput) - - if let transitionVideoState { - self.encodeVideo( - using: renderCommandEncoder, - containerSize: containerSize, - texture: transitionVideoState.texture, - textureRotation: transitionVideoState.textureRotation, - position: transitionVideoState.position, - roundness: transitionVideoState.roundness, - alpha: transitionVideoState.alpha, - zPosition: 0.75, - device: device - ) - } - - self.encodeVideo( - using: renderCommandEncoder, - containerSize: containerSize, - texture: mainVideoState.texture, - textureRotation: mainVideoState.textureRotation, - position: mainVideoState.position, - roundness: mainVideoState.roundness, - alpha: mainVideoState.alpha, - zPosition: 0.0, - device: device - ) - - if let additionalVideoState { - self.encodeVideo( - using: renderCommandEncoder, - containerSize: containerSize, - texture: additionalVideoState.texture, - textureRotation: additionalVideoState.textureRotation, - position: additionalVideoState.position, - roundness: additionalVideoState.roundness, - alpha: additionalVideoState.alpha, - zPosition: 0.5, - device: device - ) - } - - renderCommandEncoder.endEncoding() - - return self.cachedTexture! - } - - func process(input: MTLTexture, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? { - return nil - } -} diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/BUILD b/submodules/TelegramUI/Components/MediaEditorScreen/BUILD index e9c288f13f..1753e68c67 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/BUILD +++ b/submodules/TelegramUI/Components/MediaEditorScreen/BUILD @@ -47,6 +47,7 @@ swift_library( "//submodules/TelegramUI/Components/AudioWaveformComponent", "//submodules/ReactionSelectionNode", "//submodules/TelegramUI/Components/VolumeSliderContextItem", + "//submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent" ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/FlipButtonContentComponent.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/FlipButtonContentComponent.swift new file mode 100644 index 0000000000..ea71fcc02d --- /dev/null +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/FlipButtonContentComponent.swift @@ -0,0 +1,75 @@ +import Foundation +import UIKit +import Display +import ComponentFlow + +final class FlipButtonContentComponent: Component { + init() { + + } + + static func ==(lhs: FlipButtonContentComponent, rhs: FlipButtonContentComponent) -> Bool { + return lhs === rhs + } + + final class View: UIView { + private var component: FlipButtonContentComponent? + + private let backgroundView: BlurredBackgroundView + private let icon = SimpleLayer() + + init() { + self.backgroundView = BlurredBackgroundView(color: UIColor(white: 0.0, alpha: 0.5), enableBlur: true) + + super.init(frame: CGRect()) + + self.addSubview(self.backgroundView) + self.layer.addSublayer(self.icon) + + self.icon.contents = UIImage(bundleImageName: "Camera/FlipIcon")?.withRenderingMode(.alwaysTemplate).cgImage + } + + required init?(coder aDecoder: NSCoder) { + preconditionFailure() + } + + func playAnimation() { + let animation = CASpringAnimation(keyPath: "transform.rotation.z") + animation.fromValue = 0.0 as NSNumber + animation.toValue = CGFloat.pi as NSNumber + animation.mass = 5.0 + animation.stiffness = 900.0 + animation.damping = 90.0 + animation.duration = animation.settlingDuration + if #available(iOS 15.0, *) { + let maxFps = Float(UIScreen.main.maximumFramesPerSecond) + animation.preferredFrameRateRange = CAFrameRateRange(minimum: 30.0, maximum: maxFps, preferred: maxFps) + } + self.icon.add(animation, forKey: "transform.rotation.z") + } + + func update(component: FlipButtonContentComponent, availableSize: CGSize, transition: Transition) -> CGSize { + self.component = component + + let size = CGSize(width: 48.0, height: 48.0) + let backgroundFrame = CGRect(x: 4.0, y: 4.0, width: 40.0, height: 40.0) + + self.icon.layerTintColor = UIColor.white.cgColor + self.icon.position = CGPoint(x: size.width / 2.0, y: size.height / 2.0) + self.icon.bounds = CGRect(origin: .zero, size: size) + + self.backgroundView.frame = backgroundFrame + self.backgroundView.update(size: backgroundFrame.size, cornerRadius: backgroundFrame.width / 2.0, transition: .immediate) + + return size + } + } + + func makeView() -> View { + return View() + } + + func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: Transition) -> CGSize { + return view.update(component: self, availableSize: availableSize, transition: transition) + } +} diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorDrafts.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorDrafts.swift new file mode 100644 index 0000000000..3c6dec501e --- /dev/null +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorDrafts.swift @@ -0,0 +1,135 @@ +import Foundation +import UIKit +import Display +import CoreLocation +import Photos +import TelegramCore +import AccountContext +import MediaEditor +import DrawingUI + +extension MediaEditorScreen { + func isEligibleForDraft() -> Bool { + if self.isEditingStory { + return false + } + guard let mediaEditor = self.node.mediaEditor else { + return false + } + let entities = self.node.entitiesView.entities.filter { !($0 is DrawingMediaEntity) } + let codableEntities = DrawingEntitiesView.encodeEntities(entities, entitiesView: self.node.entitiesView) + mediaEditor.setDrawingAndEntities(data: nil, image: mediaEditor.values.drawing, entities: codableEntities) + + let caption = self.getCaption() + + if let subject = self.node.subject, case .asset = subject, self.node.mediaEditor?.values.hasChanges == false && caption.string.isEmpty { + return false + } + return true + } + + func saveDraft(id: Int64?) { + guard let subject = self.node.subject, let mediaEditor = self.node.mediaEditor else { + return + } + try? FileManager.default.createDirectory(atPath: draftPath(engine: self.context.engine), withIntermediateDirectories: true) + + let values = mediaEditor.values + let privacy = self.state.privacy + let caption = self.getCaption() + let duration = mediaEditor.duration ?? 0.0 + + let currentTimestamp = Int32(CFAbsoluteTimeGetCurrent() + kCFAbsoluteTimeIntervalSince1970) + var timestamp: Int32 + var location: CLLocationCoordinate2D? + let expiresOn: Int32 + if case let .draft(draft, _) = subject { + timestamp = draft.timestamp + location = draft.location + if let _ = id { + expiresOn = draft.expiresOn ?? currentTimestamp + 3600 * 24 * 7 + } else { + expiresOn = currentTimestamp + 3600 * 24 * 7 + } + } else { + timestamp = currentTimestamp + if case let .asset(asset) = subject { + location = asset.location?.coordinate + } + if let _ = id { + expiresOn = currentTimestamp + Int32(self.state.privacy.timeout) + } else { + expiresOn = currentTimestamp + 3600 * 24 * 7 + } + } + + if let resultImage = mediaEditor.resultImage { + mediaEditor.seek(0.0, andPlay: false) + makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: resultImage, dimensions: storyDimensions, values: values, time: .zero, textScale: 2.0, completion: { resultImage in + guard let resultImage else { + return + } + let fittedSize = resultImage.size.aspectFitted(CGSize(width: 128.0, height: 128.0)) + + let context = self.context + let saveImageDraft: (UIImage, PixelDimensions) -> Void = { image, dimensions in + if let thumbnailImage = generateScaledImage(image: resultImage, size: fittedSize) { + let path = "\(Int64.random(in: .min ... .max)).jpg" + if let data = image.jpegData(compressionQuality: 0.87) { + let draft = MediaEditorDraft(path: path, isVideo: false, thumbnail: thumbnailImage, dimensions: dimensions, duration: nil, values: values, caption: caption, privacy: privacy, timestamp: timestamp, location: location, expiresOn: expiresOn) + try? data.write(to: URL(fileURLWithPath: draft.fullPath(engine: context.engine))) + if let id { + saveStorySource(engine: context.engine, item: draft, peerId: context.account.peerId, id: id) + } else { + addStoryDraft(engine: context.engine, item: draft) + } + } + } + } + + let saveVideoDraft: (String, PixelDimensions, Double) -> Void = { videoPath, dimensions, duration in + if let thumbnailImage = generateScaledImage(image: resultImage, size: fittedSize) { + let path = "\(Int64.random(in: .min ... .max)).mp4" + let draft = MediaEditorDraft(path: path, isVideo: true, thumbnail: thumbnailImage, dimensions: dimensions, duration: duration, values: values, caption: caption, privacy: privacy, timestamp: timestamp, location: location, expiresOn: expiresOn) + try? FileManager.default.copyItem(atPath: videoPath, toPath: draft.fullPath(engine: context.engine)) + if let id { + saveStorySource(engine: context.engine, item: draft, peerId: context.account.peerId, id: id) + } else { + addStoryDraft(engine: context.engine, item: draft) + } + } + } + + switch subject { + case let .image(image, dimensions, _, _): + saveImageDraft(image, dimensions) + case let .video(path, _, _, _, _, dimensions, _, _, _): + saveVideoDraft(path, dimensions, duration) + case let .asset(asset): + if asset.mediaType == .video { + PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in + if let urlAsset = avAsset as? AVURLAsset { + saveVideoDraft(urlAsset.url.relativePath, PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight)), duration) + } + } + } else { + let options = PHImageRequestOptions() + options.deliveryMode = .highQualityFormat + PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .default, options: options) { image, _ in + if let image { + saveImageDraft(image, PixelDimensions(image.size)) + } + } + } + case let .draft(draft, _): + if draft.isVideo { + saveVideoDraft(draft.fullPath(engine: context.engine), draft.dimensions, draft.duration ?? 0.0) + } else if let image = UIImage(contentsOfFile: draft.fullPath(engine: context.engine)) { + saveImageDraft(image, draft.dimensions) + } + removeStoryDraft(engine: self.context.engine, path: draft.path, delete: false) + } + }) + } + } +} diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift new file mode 100644 index 0000000000..efc92716b9 --- /dev/null +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift @@ -0,0 +1,101 @@ +import Foundation +import UIKit +import Display +import MediaEditor +import DrawingUI +import ChatPresentationInterfaceState +import PresentationDataUtils +import TelegramPresentationData + +extension MediaEditorScreen { + final class Recording { + private weak var controller: MediaEditorScreen? + + private var recorder: EntityVideoRecorder? + + init(controller: MediaEditorScreen) { + self.controller = controller + } + + func setMediaRecordingActive(_ isActive: Bool, finished: Bool) { + guard let controller, let mediaEditor = controller.node.mediaEditor else { + return + } + let entitiesView = controller.node.entitiesView + if mediaEditor.values.additionalVideoPath != nil { + let presentationData = controller.context.sharedContext.currentPresentationData.with { $0 } + let alertController = textAlertController( + context: controller.context, + forceTheme: defaultDarkColorPresentationTheme, + title: nil, + text: "Are you sure you want to delete video message?", + actions: [ + TextAlertAction(type: .genericAction, title: presentationData.strings.Common_Cancel, action: { + }), + TextAlertAction(type: .destructiveAction, title: presentationData.strings.Common_Delete, action: { [weak mediaEditor, weak entitiesView] in + mediaEditor?.setAdditionalVideo(nil, positionChanges: []) + if let entityView = entitiesView?.getView(where: { entityView in + if let entity = entityView.entity as? DrawingStickerEntity, entity.content == .dualVideoReference { + return true + } else { + return false + } + }) { + entitiesView?.remove(uuid: entityView.entity.uuid, animated: false) + } + }) + ] + ) + controller.present(alertController, in: .window(.root)) + return + } + + if isActive { + guard self.recorder == nil else { + return + } + let recorder = EntityVideoRecorder(mediaEditor: mediaEditor, entitiesView: controller.node.entitiesView) + recorder.setup( + referenceDrawingSize: storyDimensions, + scale: 1.625, + position: PIPPosition.topRight.getPosition(storyDimensions) + ) + recorder.onAutomaticStop = { [weak self] in + if let self { + self.recorder = nil + self.controller?.node.requestLayout(forceUpdate: true, transition: .easeInOut(duration: 0.2)) + } + } + self.recorder = recorder + controller.node.requestLayout(forceUpdate: true, transition: .easeInOut(duration: 0.2)) + } else if let recorder = self.recorder { + recorder.stopRecording(save: finished, completion: { [weak self] in + guard let self else { + return + } + self.recorder = nil + self.controller?.node.requestLayout(forceUpdate: true, transition: .easeInOut(duration: 0.2)) + }) + + controller.node.requestLayout(forceUpdate: true, transition: .easeInOut(duration: 0.2)) + } + } + + func togglePosition() { + if let recorder = self.recorder { + recorder.togglePosition() + } + } + + var status: InstantVideoControllerRecordingStatus? { + if let recorder = self.recorder { + return InstantVideoControllerRecordingStatus( + micLevel: recorder.micLevel, + duration: recorder.duration + ) + } else { + return nil + } + } + } +} diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift index 20f197313f..916fe83ac4 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift @@ -37,12 +37,7 @@ import LegacyMediaPickerUI import ReactionSelectionNode import VolumeSliderContextItem import TelegramStringFormatting - -enum DrawingScreenType { - case drawing - case text - case sticker -} +import ForwardInfoPanelComponent private let playbackButtonTag = GenericComponentViewTag() private let muteButtonTag = GenericComponentViewTag() @@ -58,6 +53,12 @@ final class MediaEditorScreenComponent: Component { } } + enum DrawingScreenType { + case drawing + case text + case sticker + } + let context: AccountContext let externalState: ExternalState let isDisplayingTool: Bool @@ -260,6 +261,8 @@ final class MediaEditorScreenComponent: Component { private let muteButton = ComponentView() private let saveButton = ComponentView() + private let switchCameraButton = ComponentView() + private let textCancelButton = ComponentView() private let textDoneButton = ComponentView() private let textSize = ComponentView() @@ -276,13 +279,15 @@ final class MediaEditorScreenComponent: Component { private var inputMediaNodeStateContext = ChatEntityKeyboardInputNode.StateContext() private var inputMediaInteraction: ChatEntityKeyboardInputNode.Interaction? private var inputMediaNode: ChatEntityKeyboardInputNode? - - private var appliedAudioData: VideoScrubberComponent.AudioData? + + private var videoRecorder: EntityVideoRecorder? private var component: MediaEditorScreenComponent? private weak var state: State? private var environment: ViewControllerComponentContainer.Environment? + private var currentVisibleTracks: [MediaScrubberComponent.Track]? + override init(frame: CGRect) { super.init(frame: frame) @@ -572,29 +577,24 @@ final class MediaEditorScreenComponent: Component { if let view = self.cancelButton.view { view.alpha = 0.0 } - let buttons = [ self.drawButton, self.textButton, self.stickerButton, self.toolsButton ] - for button in buttons { if let view = button.view { view.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: -44.0), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true) view.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2) } } - if let view = self.doneButton.view { transition.setScale(view: view, scale: 0.1) } - if let view = self.inputPanel.view { view.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2) } - if let view = self.scrubber.view { view.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2) } @@ -604,33 +604,27 @@ final class MediaEditorScreenComponent: Component { if let view = self.cancelButton.view { view.alpha = 1.0 } - if let buttonView = self.cancelButton.view as? Button.View, let view = buttonView.content as? LottieAnimationComponent.View { view.playOnce() } - let buttons = [ self.drawButton, self.textButton, self.stickerButton, self.toolsButton ] - for button in buttons { if let view = button.view { view.layer.animatePosition(from: CGPoint(x: 0.0, y: -44.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true) view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2) } } - if let view = self.doneButton.view { transition.setScale(view: view, scale: 1.0) } - if let view = self.inputPanel.view { view.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) } - if let view = self.scrubber.view { view.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2) } @@ -653,6 +647,9 @@ final class MediaEditorScreenComponent: Component { return availableSize } let environment = environment[ViewControllerComponentContainer.Environment.self].value + guard let controller = environment.controller() as? MediaEditorScreen else { + return availableSize + } self.environment = environment var transition = transition @@ -661,17 +658,16 @@ final class MediaEditorScreenComponent: Component { transition = transition.withUserData(nextTransitionUserData) } - var isEditingStory = false - if let controller = environment.controller() as? MediaEditorScreen { - isEditingStory = controller.isEditingStory - if self.component == nil { - if let initialCaption = controller.initialCaption { - self.inputPanelExternalState.initialText = initialCaption - } else if case let .draft(draft, _) = controller.node.subject { - self.inputPanelExternalState.initialText = draft.caption - } + let isEditingStory = controller.isEditingStory + if self.component == nil { + if let initialCaption = controller.initialCaption { + self.inputPanelExternalState.initialText = initialCaption + } else if case let .draft(draft, _) = controller.node.subject { + self.inputPanelExternalState.initialText = draft.caption } } + + let isRecordingAdditionalVideo = controller.node.recording.status != nil self.component = component self.state = state @@ -701,6 +697,10 @@ final class MediaEditorScreenComponent: Component { } } + let topButtonsAlpha: CGFloat = isRecordingAdditionalVideo ? 0.3 : 1.0 + let bottomButtonsAlpha: CGFloat = isRecordingAdditionalVideo ? 0.3 : 1.0 + let buttonsAreHidden = component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities + let cancelButtonSize = self.cancelButton.update( transition: transition, component: AnyComponent(Button( @@ -735,14 +735,10 @@ final class MediaEditorScreenComponent: Component { } transition.setPosition(view: cancelButtonView, position: cancelButtonFrame.center) transition.setBounds(view: cancelButtonView, bounds: CGRect(origin: .zero, size: cancelButtonFrame.size)) - transition.setAlpha(view: cancelButtonView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0) - } - - var doneButtonTitle = environment.strings.Story_Editor_Next - if let controller = environment.controller() as? MediaEditorScreen, controller.isEditingStory { - doneButtonTitle = environment.strings.Story_Editor_Done + transition.setAlpha(view: cancelButtonView, alpha: buttonsAreHidden ? 0.0 : bottomButtonsAlpha) } + let doneButtonTitle = isEditingStory ? environment.strings.Story_Editor_Done : environment.strings.Story_Editor_Next let doneButtonSize = self.doneButton.update( transition: transition, component: AnyComponent(PlainButtonComponent( @@ -782,7 +778,7 @@ final class MediaEditorScreenComponent: Component { } transition.setPosition(view: doneButtonView, position: doneButtonFrame.center) transition.setBounds(view: doneButtonView, bounds: CGRect(origin: .zero, size: doneButtonFrame.size)) - transition.setAlpha(view: doneButtonView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0) + transition.setAlpha(view: doneButtonView, alpha: buttonsAreHidden ? 0.0 : bottomButtonsAlpha) } let buttonsAvailableWidth: CGFloat @@ -820,7 +816,7 @@ final class MediaEditorScreenComponent: Component { transition.setPosition(view: drawButtonView, position: drawButtonFrame.center) transition.setBounds(view: drawButtonView, bounds: CGRect(origin: .zero, size: drawButtonFrame.size)) if !self.animatingButtons { - transition.setAlpha(view: drawButtonView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0) + transition.setAlpha(view: drawButtonView, alpha: buttonsAreHidden ? 0.0 : bottomButtonsAlpha) } } @@ -849,7 +845,7 @@ final class MediaEditorScreenComponent: Component { transition.setPosition(view: textButtonView, position: textButtonFrame.center) transition.setBounds(view: textButtonView, bounds: CGRect(origin: .zero, size: textButtonFrame.size)) if !self.animatingButtons { - transition.setAlpha(view: textButtonView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0) + transition.setAlpha(view: textButtonView, alpha: buttonsAreHidden ? 0.0 : bottomButtonsAlpha) } } @@ -878,7 +874,7 @@ final class MediaEditorScreenComponent: Component { transition.setPosition(view: stickerButtonView, position: stickerButtonFrame.center) transition.setBounds(view: stickerButtonView, bounds: CGRect(origin: .zero, size: stickerButtonFrame.size)) if !self.animatingButtons { - transition.setAlpha(view: stickerButtonView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0) + transition.setAlpha(view: stickerButtonView, alpha: buttonsAreHidden ? 0.0 : bottomButtonsAlpha) } } @@ -907,42 +903,12 @@ final class MediaEditorScreenComponent: Component { transition.setPosition(view: toolsButtonView, position: toolsButtonFrame.center) transition.setBounds(view: toolsButtonView, bounds: CGRect(origin: .zero, size: toolsButtonFrame.size)) if !self.animatingButtons { - transition.setAlpha(view: toolsButtonView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0) + transition.setAlpha(view: toolsButtonView, alpha: buttonsAreHidden ? 0.0 : bottomButtonsAlpha) } } - var mediaEditor: MediaEditor? - if let controller = environment.controller() as? MediaEditorScreen { - mediaEditor = controller.node.mediaEditor - } - let previousAudioData = self.appliedAudioData - var audioData: VideoScrubberComponent.AudioData? - if let audioTrack = mediaEditor?.values.audioTrack { - let artist = audioTrack.artist - var title = audioTrack.title - if artist == nil && title == nil { - if let underscoreIndex = audioTrack.path.firstIndex(of: "_"), let dotIndex = audioTrack.path.lastIndex(of: ".") { - title = String(audioTrack.path[audioTrack.path.index(after: underscoreIndex)..? + if let (forwardAuthor, forwardStory) = controller.forwardSource, !forwardStory.text.isEmpty { + let authorName = forwardAuthor.displayTitle(strings: environment.strings, displayOrder: .firstLast) + header = AnyComponent( + ForwardInfoPanelComponent( + authorName: authorName, + text: forwardStory.text, + isChannel: forwardAuthor.id.isGroupOrChannel, + isVibrant: true + ) + ) + } + let nextInputMode: MessageInputPanelComponent.InputMode switch self.currentInputMode { case .text: @@ -1070,6 +1049,16 @@ final class MediaEditorScreenComponent: Component { nextInputMode = .emoji } + var canRecordVideo = true + if let subject = controller.node.subject { + if case let .video(_, _, _, additionalPath, _, _, _, _, _) = subject, additionalPath != nil { + canRecordVideo = false + } + } + if "".isEmpty { + canRecordVideo = false + } + self.inputPanel.parentState = state let inputPanelSize = self.inputPanel.update( transition: transition, @@ -1086,14 +1075,14 @@ final class MediaEditorScreenComponent: Component { resetInputContents: nil, nextInputMode: { _ in return nextInputMode }, areVoiceMessagesAvailable: false, - presentController: { [weak self] c in - guard let self, let _ = self.component, let environment = self.environment, let controller = environment.controller() as? MediaEditorScreen else { + presentController: { [weak controller] c in + guard let controller else { return } controller.present(c, in: .window(.root)) }, - presentInGlobalOverlay: {[weak self] c in - guard let self, let _ = self.component, let environment = self.environment, let controller = environment.controller() as? MediaEditorScreen else { + presentInGlobalOverlay: { [weak controller] c in + guard let controller else { return } controller.presentInGlobalOverlay(c) @@ -1106,9 +1095,21 @@ final class MediaEditorScreenComponent: Component { }, sendMessageOptionsAction: nil, sendStickerAction: { _ in }, - setMediaRecordingActive: nil, - lockMediaRecording: nil, - stopAndPreviewMediaRecording: nil, + setMediaRecordingActive: canRecordVideo ? { [weak controller] isActive, _, finished in + guard let controller else { + return + } + controller.node.recording.setMediaRecordingActive(isActive, finished: finished) + } : nil, + lockMediaRecording: { + + }, + stopAndPreviewMediaRecording: { [weak controller] in + guard let controller else { + return + } + controller.node.recording.setMediaRecordingActive(false, finished: true) + }, discardMediaRecordingPreview: nil, attachmentAction: nil, myReaction: nil, @@ -1131,8 +1132,8 @@ final class MediaEditorScreenComponent: Component { } } }, - timeoutAction: isEditingStory ? nil : { [weak self] view, gesture in - guard let self, let controller = self.environment?.controller() as? MediaEditorScreen else { + timeoutAction: isEditingStory ? nil : { [weak controller] view, gesture in + guard let controller else { return } let context = controller.context @@ -1150,20 +1151,20 @@ final class MediaEditorScreenComponent: Component { forwardAction: nil, moreAction: nil, presentVoiceMessagesUnavailableTooltip: nil, - presentTextLengthLimitTooltip: { [weak self] in - guard let self, let controller = self.environment?.controller() as? MediaEditorScreen else { + presentTextLengthLimitTooltip: { [weak controller] in + guard let controller else { return } - controller.presentCaptionLimitPremiumSuggestion(isPremium: self.state?.isPremium ?? false) + controller.presentCaptionLimitPremiumSuggestion(isPremium: controller.context.isPremium) }, - presentTextFormattingTooltip: { [weak self] in - guard let self, let controller = self.environment?.controller() as? MediaEditorScreen else { + presentTextFormattingTooltip: { [weak controller] in + guard let controller else { return } controller.presentCaptionEntitiesPremiumSuggestion() }, - paste: { [weak self] data in - guard let self, let environment = self.environment, let controller = environment.controller() as? MediaEditorScreen else { + paste: { [weak self, weak controller] data in + guard let self, let controller else { return } switch data { @@ -1191,8 +1192,9 @@ final class MediaEditorScreenComponent: Component { } }, audioRecorder: nil, - videoRecordingStatus: nil, + videoRecordingStatus: controller.node.recording.status, isRecordingLocked: false, + hasRecordedVideo: mediaEditor?.values.additionalVideoPath != nil, recordedAudioPreview: nil, hasRecordedVideoPreview: false, wasRecordingDismissed: false, @@ -1205,6 +1207,7 @@ final class MediaEditorScreenComponent: Component { customInputView: nil, forceIsEditing: self.currentInputMode == .emoji, disabledPlaceholder: nil, + header: header, isChannel: false, storyItem: nil, chatLocation: nil @@ -1213,13 +1216,9 @@ final class MediaEditorScreenComponent: Component { containerSize: CGSize(width: inputPanelAvailableWidth, height: inputPanelAvailableHeight) ) - if self.inputPanelExternalState.isEditing { - if let controller = self.environment?.controller() as? MediaEditorScreen { - if controller.node.entitiesView.hasSelection { - Queue.mainQueue().justDispatch { - controller.node.entitiesView.selectEntity(nil) - } - } + if self.inputPanelExternalState.isEditing && controller.node.entitiesView.hasSelection { + Queue.mainQueue().justDispatch { + controller.node.entitiesView.selectEntity(nil) } } @@ -1243,9 +1242,7 @@ final class MediaEditorScreenComponent: Component { self.isEditingCaption = isEditingCaption if isEditingCaption { - if let controller = environment.controller() as? MediaEditorScreen { - controller.dismissAllTooltips() - } + controller.dismissAllTooltips() mediaEditor?.stop() } else { mediaEditor?.play() @@ -1291,16 +1288,12 @@ final class MediaEditorScreenComponent: Component { transition.setAlpha(view: inputPanelView, alpha: isEditingTextEntity || component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0) } - var bottomControlsTransition = transition if let playerState = state.playerState { let scrubberInset: CGFloat = 9.0 - if (audioData == nil) != (previousAudioData == nil) { - bottomControlsTransition = .easeInOut(duration: 0.25) - } let minDuration: Double let maxDuration: Double - if let mediaEditor, !mediaEditor.sourceIsVideo { + if playerState.isAudioOnly { minDuration = 5.0 maxDuration = 15.0 } else { @@ -1308,72 +1301,86 @@ final class MediaEditorScreenComponent: Component { maxDuration = storyMaxVideoDuration } - let isAudioOnly = mediaEditor?.sourceIsVideo == false + let previousTrackCount = self.currentVisibleTracks?.count + let visibleTracks = playerState.tracks.filter { $0.visibleInTimeline }.map { MediaScrubberComponent.Track($0) } + self.currentVisibleTracks = visibleTracks + + var scrubberTransition = transition + if let previousTrackCount, previousTrackCount != visibleTracks.count { + scrubberTransition = .easeInOut(duration: 0.2) + } + + let isAudioOnly = playerState.isAudioOnly let scrubberSize = self.scrubber.update( - transition: transition, - component: AnyComponent(VideoScrubberComponent( + transition: scrubberTransition, + component: AnyComponent(MediaScrubberComponent( context: component.context, generationTimestamp: playerState.generationTimestamp, - audioOnly: isAudioOnly, - duration: playerState.duration, - startPosition: playerState.timeRange?.lowerBound ?? 0.0, - endPosition: playerState.timeRange?.upperBound ?? min(playerState.duration, storyMaxVideoDuration), position: playerState.position, minDuration: minDuration, maxDuration: maxDuration, isPlaying: playerState.isPlaying, - frames: playerState.frames, - framesUpdateTimestamp: playerState.framesUpdateTimestamp, - audioData: audioData, - videoTrimUpdated: { [weak mediaEditor] start, end, updatedEnd, done in + tracks: visibleTracks, + positionUpdated: { [weak mediaEditor] position, apply in if let mediaEditor { - mediaEditor.setVideoTrimRange(start.. 0 { + controller.node.presentTrackOptions(trackId: trackId, sourceView: sourceView) } } )), @@ -1420,10 +1432,10 @@ final class MediaEditorScreenComponent: Component { if animateIn { scrubberView.frame = scrubberFrame } else { - bottomControlsTransition.setFrame(view: scrubberView, frame: scrubberFrame) + scrubberTransition.setFrame(view: scrubberView, frame: scrubberFrame) } - if !self.animatingButtons && !(isAudioOnly && animateIn) { - transition.setAlpha(view: scrubberView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities || isEditingCaption ? 0.0 : 1.0) + if !self.animatingButtons && !(playerState.isAudioOnly && animateIn) { + transition.setAlpha(view: scrubberView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities || isEditingCaption || isRecordingAdditionalVideo || isEditingTextEntity ? 0.0 : 1.0) } else if animateIn { scrubberView.layer.animatePosition(from: CGPoint(x: 0.0, y: 44.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true) scrubberView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) @@ -1495,14 +1507,11 @@ final class MediaEditorScreenComponent: Component { ) if let saveButtonView = self.saveButton.view { if saveButtonView.superview == nil { - saveButtonView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) - saveButtonView.layer.shadowRadius = 2.0 - saveButtonView.layer.shadowColor = UIColor.black.cgColor - saveButtonView.layer.shadowOpacity = 0.35 + setupButtonShadow(saveButtonView) self.addSubview(saveButtonView) } - let saveButtonAlpha = component.isSavingAvailable ? 1.0 : 0.3 + let saveButtonAlpha = component.isSavingAvailable ? topButtonsAlpha : 0.3 saveButtonView.isUserInteractionEnabled = component.isSavingAvailable transition.setPosition(view: saveButtonView, position: saveButtonFrame.center) @@ -1569,10 +1578,7 @@ final class MediaEditorScreenComponent: Component { ) if let muteButtonView = self.muteButton.view { if muteButtonView.superview == nil { - muteButtonView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) - muteButtonView.layer.shadowRadius = 2.0 - muteButtonView.layer.shadowColor = UIColor.black.cgColor - muteButtonView.layer.shadowOpacity = 0.35 + setupButtonShadow(muteButtonView) self.addSubview(muteButtonView) muteButtonView.layer.animateAlpha(from: 0.0, to: muteButtonView.alpha, duration: self.animatingButtons ? 0.1 : 0.2) @@ -1581,7 +1587,7 @@ final class MediaEditorScreenComponent: Component { transition.setPosition(view: muteButtonView, position: muteButtonFrame.center) transition.setBounds(view: muteButtonView, bounds: CGRect(origin: .zero, size: muteButtonFrame.size)) transition.setScale(view: muteButtonView, scale: displayTopButtons ? 1.0 : 0.01) - transition.setAlpha(view: muteButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? 1.0 : 0.0) + transition.setAlpha(view: muteButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? topButtonsAlpha : 0.0) } topButtonOffsetX += 50.0 @@ -1603,7 +1609,7 @@ final class MediaEditorScreenComponent: Component { LottieAnimationComponent( animation: LottieAnimationComponent.AnimationItem( name: "anim_storyplayback", - mode: state.playbackDidChange ? .animating(loop: false) : .still(position: .end), // : .still(position: .begin), + mode: state.playbackDidChange ? .animating(loop: false) : .still(position: .end), range: playerState.isPlaying ? (0.5, 1.0) : (0.0, 0.5) ), colors: ["__allcolors__": .white], @@ -1643,10 +1649,7 @@ final class MediaEditorScreenComponent: Component { ) if let playbackButtonView = self.playbackButton.view { if playbackButtonView.superview == nil { - playbackButtonView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) - playbackButtonView.layer.shadowRadius = 2.0 - playbackButtonView.layer.shadowColor = UIColor.black.cgColor - playbackButtonView.layer.shadowOpacity = 0.35 + setupButtonShadow(playbackButtonView) self.addSubview(playbackButtonView) playbackButtonView.layer.animateAlpha(from: 0.0, to: playbackButtonView.alpha, duration: self.animatingButtons ? 0.1 : 0.2) @@ -1655,7 +1658,7 @@ final class MediaEditorScreenComponent: Component { transition.setPosition(view: playbackButtonView, position: playbackButtonFrame.center) transition.setBounds(view: playbackButtonView, bounds: CGRect(origin: .zero, size: playbackButtonFrame.size)) transition.setScale(view: playbackButtonView, scale: displayTopButtons ? 1.0 : 0.01) - transition.setAlpha(view: playbackButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? 1.0 : 0.0) + transition.setAlpha(view: playbackButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? topButtonsAlpha : 0.0) } } else { if let playbackButtonView = self.playbackButton.view, playbackButtonView.superview != nil { @@ -1666,14 +1669,43 @@ final class MediaEditorScreenComponent: Component { } } + let switchCameraButtonSize = self.switchCameraButton.update( + transition: transition, + component: AnyComponent(Button( + content: AnyComponent( + FlipButtonContentComponent() + ), + action: { [weak self] in + if let self, let environment = self.environment, let controller = environment.controller() as? MediaEditorScreen { + controller.node.recording.togglePosition() + } + } + )), + environment: {}, + containerSize: CGSize(width: 48.0, height: 48.0) + ) + let switchCameraButtonFrame = CGRect( + origin: CGPoint(x: 12.0, y: max(environment.statusBarHeight + 10.0, inputPanelFrame.minY - switchCameraButtonSize.height - 22.0)), + size: switchCameraButtonSize + ) + if let switchCameraButtonView = self.switchCameraButton.view { + if switchCameraButtonView.superview == nil { + self.addSubview(switchCameraButtonView) + } + transition.setPosition(view: switchCameraButtonView, position: switchCameraButtonFrame.center) + transition.setBounds(view: switchCameraButtonView, bounds: CGRect(origin: .zero, size: switchCameraButtonFrame.size)) + transition.setScale(view: switchCameraButtonView, scale: isRecordingAdditionalVideo ? 1.0 : 0.01) + transition.setAlpha(view: switchCameraButtonView, alpha: isRecordingAdditionalVideo ? 1.0 : 0.0) + } + let textCancelButtonSize = self.textCancelButton.update( transition: transition, component: AnyComponent(Button( content: AnyComponent( Text(text: environment.strings.Common_Cancel, font: Font.regular(17.0), color: .white) ), - action: { - if let controller = environment.controller() as? MediaEditorScreen { + action: { [weak self] in + if let self, let environment = self.environment, let controller = environment.controller() as? MediaEditorScreen { controller.node.interaction?.endTextEditing(reset: true) } } @@ -1701,8 +1733,8 @@ final class MediaEditorScreenComponent: Component { content: AnyComponent( Text(text: environment.strings.Common_Done, font: Font.regular(17.0), color: .white) ), - action: { - if let controller = environment.controller() as? MediaEditorScreen { + action: { [weak self] in + if let self, let environment = self.environment, let controller = environment.controller() as? MediaEditorScreen { controller.node.interaction?.endTextEditing(reset: false) } } @@ -1729,12 +1761,13 @@ final class MediaEditorScreenComponent: Component { component: AnyComponent(TextSizeSliderComponent( value: sizeValue ?? 0.5, tag: nil, - updated: { [weak state] size in - if let controller = environment.controller() as? MediaEditorScreen { + updated: { [weak self] size in + if let self, let environment = self.environment, let controller = environment.controller() as? MediaEditorScreen { controller.node.interaction?.updateEntitySize(size) - state?.updated() + self.state?.updated() } - }, released: { + }, + released: { } )), environment: {}, @@ -1770,8 +1803,8 @@ final class MediaEditorScreenComponent: Component { } } -private let storyDimensions = CGSize(width: 1080.0, height: 1920.0) -private let storyMaxVideoDuration: Double = 60.0 +let storyDimensions = CGSize(width: 1080.0, height: 1920.0) +let storyMaxVideoDuration: Double = 60.0 public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate { public enum TransitionIn { @@ -1829,13 +1862,13 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } } - fileprivate final class Node: ViewControllerTracingNode, UIGestureRecognizerDelegate { + final class Node: ViewControllerTracingNode, UIGestureRecognizerDelegate { private weak var controller: MediaEditorScreen? private let context: AccountContext fileprivate var interaction: DrawingToolsInteraction? private let initializationTimestamp = CACurrentMediaTime() - fileprivate var subject: MediaEditorScreen.Subject? + var subject: MediaEditorScreen.Subject? private var subjectDisposable: Disposable? private var appInForegroundDisposable: Disposable? private var wasPlaying = false @@ -1854,14 +1887,14 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate private var gradientColorsDisposable: Disposable? fileprivate let entitiesContainerView: UIView - fileprivate let entitiesView: DrawingEntitiesView + let entitiesView: DrawingEntitiesView fileprivate let selectionContainerView: DrawingSelectionContainerView fileprivate let drawingView: DrawingView fileprivate let previewView: MediaEditorPreviewView - fileprivate var mediaEditor: MediaEditor? + var mediaEditor: MediaEditor? fileprivate var mediaEditorPromise = Promise() - fileprivate let ciContext = CIContext(options: [.workingColorSpace : NSNull()]) + let ciContext = CIContext(options: [.workingColorSpace : NSNull()]) private let stickerPickerInputData = Promise() @@ -1884,6 +1917,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate private var playbackPositionDisposable: Disposable? + + var recording: MediaEditorScreen.Recording + private var presentationData: PresentationData private var validLayout: ContainerViewLayout? @@ -1929,6 +1965,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.selectionContainerView = DrawingSelectionContainerView(frame: .zero) self.entitiesView.selectionContainerView = self.selectionContainerView + self.recording = MediaEditorScreen.Recording(controller: controller) + super.init() self.backgroundColor = .clear @@ -2061,7 +2099,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate let isSavingAvailable: Bool switch subject { case .image, .video: - isSavingAvailable = !controller.isEditingStory + isSavingAvailable = !controller.isEmbeddedEditor isFromCamera = true case .draft: isSavingAvailable = true @@ -2183,12 +2221,11 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.gradientColorsDisposable = mediaEditor.gradientColors.start(next: { [weak self] colors in if let self, let colors { - let (topColor, bottomColor) = colors - let gradientImage = generateGradientImage(size: CGSize(width: 5.0, height: 640.0), colors: [topColor, bottomColor], locations: [0.0, 1.0]) + let gradientImage = generateGradientImage(size: CGSize(width: 5.0, height: 640.0), colors: colors.array, locations: [0.0, 1.0]) Queue.mainQueue().async { self.gradientView.image = gradientImage - if self.controller?.isEditingStory == true { + if self.controller?.isEmbeddedEditor == true { } else { self.previewContainerView.alpha = 1.0 @@ -2209,7 +2246,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.mediaEditor = mediaEditor self.mediaEditorPromise.set(.single(mediaEditor)) - if controller.isEditingStory == true { + if controller.isEmbeddedEditor == true { mediaEditor.onFirstDisplay = { [weak self] in if let self { if subject.isPhoto { @@ -2410,7 +2447,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } if gestureRecognizer === self.dismissPanGestureRecognizer { let location = gestureRecognizer.location(in: self.entitiesView) - if self.controller?.isEditingStory == true || self.isDisplayingTool || self.entitiesView.hasSelection || self.entitiesView.getView(at: location) != nil { + if self.controller?.isEmbeddedEditor == true || self.isDisplayingTool || self.entitiesView.hasSelection || self.entitiesView.getView(at: location) != nil { return false } return true @@ -2834,7 +2871,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate completion() }) } else { - if controller.isEditingStory { + if controller.isEmbeddedEditor { if let view = self.componentHost.view as? MediaEditorScreenComponent.View { view.animateOut(to: .gallery) } @@ -3296,8 +3333,11 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate }), in: .window(.root)) } - func presentAudioOptions(sourceView: UIView) { + func presentTrackOptions(trackId: Int32, sourceView: UIView) { let value = self.mediaEditor?.values.audioTrackVolume ?? 1.0 + + let actionTitle: String = trackId == 0 || trackId == 2 ? self.presentationData.strings.MediaEditor_RemoveAudio : self.presentationData.strings.MediaEditor_RemoveVideo + let items: [ContextMenuItem] = [ .custom(VolumeSliderContextItem(minValue: 0.0, value: value, valueChanged: { [weak self] value, _ in if let self { @@ -3306,16 +3346,29 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate }), false), .action( ContextMenuActionItem( - text: self.presentationData.strings.MediaEditor_RemoveAudio, + text: actionTitle, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Delete"), color: theme.contextMenu.primaryColor)}, action: { [weak self] f in f.dismissWithResult(.default) if let self { if let mediaEditor = self.mediaEditor { - mediaEditor.setAudioTrack(nil) - - if !mediaEditor.sourceIsVideo && !mediaEditor.isPlaying { - mediaEditor.play() + if trackId == 1 { + mediaEditor.setAdditionalVideo(nil, positionChanges: []) + if let entityView = self.entitiesView.getView(where: { entityView in + if let entity = entityView.entity as? DrawingStickerEntity, entity.content == .dualVideoReference { + return true + } else { + return false + } + }) { + self.entitiesView.remove(uuid: entityView.entity.uuid, animated: false) + } + } else { + mediaEditor.setAudioTrack(nil) + + if !mediaEditor.sourceIsVideo && !mediaEditor.isPlaying { + mediaEditor.play() + } } } self.requestUpdate(transition: .easeInOut(duration: 0.25)) @@ -3557,7 +3610,6 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.hasAnyChanges = true self.controller?.isSavingAvailable = true self.controller?.requestLayout(transition: .immediate) - return case .drawing: self.previousDrawingData = self.drawingView.drawingData self.previousDrawingEntities = self.entitiesView.entities @@ -3761,7 +3813,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } } - fileprivate var node: Node { + var node: Node { return self.displayNode as! Node } @@ -3848,7 +3900,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } } - public enum Result { + public enum MediaResult { public enum VideoResult { case imageFile(path: String) case videoFile(path: String) @@ -3858,10 +3910,20 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate case video(video: VideoResult, coverImage: UIImage?, values: MediaEditorValues, duration: Double, dimensions: PixelDimensions) } - fileprivate let context: AccountContext - fileprivate let subject: Signal - fileprivate let isEditingStory: Bool + public struct Result { + public let media: MediaResult? + public let mediaAreas: [MediaArea] + public let caption: NSAttributedString + public let options: MediaEditorResultPrivacy + public let stickers: [TelegramMediaFile] + public let randomId: Int64 + } + + let context: AccountContext + let subject: Signal + let isEditingStory: Bool fileprivate let customTarget: EnginePeer.Id? + fileprivate let forwardSource: (EnginePeer, EngineStoryItem)? fileprivate let initialCaption: NSAttributedString? fileprivate let initialPrivacy: EngineStoryPrivacy? @@ -3872,7 +3934,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate fileprivate let transitionOut: (Bool, Bool?) -> TransitionOut? public var cancelled: (Bool) -> Void = { _ in } - public var completion: (Int64, MediaEditorScreen.Result?, [MediaArea], NSAttributedString, MediaEditorResultPrivacy, [TelegramMediaFile], @escaping (@escaping () -> Void) -> Void) -> Void = { _, _, _, _, _, _, _ in } + public var completion: (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void = { _, _ in } public var dismissed: () -> Void = { } public var willDismiss: () -> Void = { } @@ -3881,24 +3943,26 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate private let storiesBlockedPeers: BlockedPeersContext private let hapticFeedback = HapticFeedback() - + public init( context: AccountContext, subject: Signal, customTarget: EnginePeer.Id? = nil, - isEditing: Bool, + isEditing: Bool = false, + forwardSource: (EnginePeer, EngineStoryItem)? = nil, initialCaption: NSAttributedString? = nil, initialPrivacy: EngineStoryPrivacy? = nil, initialMediaAreas: [MediaArea]? = nil, initialVideoPosition: Double? = nil, transitionIn: TransitionIn?, transitionOut: @escaping (Bool, Bool?) -> TransitionOut?, - completion: @escaping (Int64, MediaEditorScreen.Result?, [MediaArea], NSAttributedString, MediaEditorResultPrivacy, [TelegramMediaFile], @escaping (@escaping () -> Void) -> Void) -> Void + completion: @escaping (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void ) { self.context = context self.subject = subject self.customTarget = customTarget self.isEditingStory = isEditing + self.forwardSource = forwardSource self.initialCaption = initialCaption self.initialPrivacy = initialPrivacy self.initialMediaAreas = initialMediaAreas @@ -3973,6 +4037,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.closeFriends.set(self.context.engine.data.get(TelegramEngine.EngineData.Item.Contacts.CloseFriends())) } } + + fileprivate var isEmbeddedEditor: Bool { + return self.isEditingStory || self.forwardSource != nil + } func openPrivacySettings(_ privacy: MediaEditorResultPrivacy? = nil, completion: @escaping () -> Void = {}) { self.node.mediaEditor?.stop() @@ -4337,25 +4405,6 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.present(controller, in: .current) } - func isEligibleForDraft() -> Bool { - if self.isEditingStory { - return false - } - guard let mediaEditor = self.node.mediaEditor else { - return false - } - let entities = self.node.entitiesView.entities.filter { !($0 is DrawingMediaEntity) } - let codableEntities = DrawingEntitiesView.encodeEntities(entities, entitiesView: self.node.entitiesView) - mediaEditor.setDrawingAndEntities(data: nil, image: mediaEditor.values.drawing, entities: codableEntities) - - let caption = self.getCaption() - - if let subject = self.node.subject, case .asset = subject, self.node.mediaEditor?.values.hasChanges == false && caption.string.isEmpty { - return false - } - return true - } - func maybePresentDiscardAlert() { self.hapticFeedback.impact(.light) if !self.isEligibleForDraft() { @@ -4373,10 +4422,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate title = presentationData.strings.Story_Editor_DraftDiscardMedia save = presentationData.strings.Story_Editor_DraftKeepMedia } - let theme = defaultDarkPresentationTheme let controller = textAlertController( context: self.context, - forceTheme: theme, + forceTheme: defaultDarkPresentationTheme, title: title, text: presentationData.strings.Story_Editor_DraftDiscaedText, actions: [ @@ -4429,115 +4477,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate }) } - private func getCaption() -> NSAttributedString { + func getCaption() -> NSAttributedString { return (self.node.componentHost.view as? MediaEditorScreenComponent.View)?.getInputText() ?? NSAttributedString() } - private func saveDraft(id: Int64?) { - guard let subject = self.node.subject, let mediaEditor = self.node.mediaEditor else { - return - } - try? FileManager.default.createDirectory(atPath: draftPath(engine: self.context.engine), withIntermediateDirectories: true) - - let values = mediaEditor.values - let privacy = self.state.privacy - let caption = self.getCaption() - let duration = mediaEditor.duration ?? 0.0 - - let currentTimestamp = Int32(CFAbsoluteTimeGetCurrent() + kCFAbsoluteTimeIntervalSince1970) - var timestamp: Int32 - var location: CLLocationCoordinate2D? - let expiresOn: Int32 - if case let .draft(draft, _) = subject { - timestamp = draft.timestamp - location = draft.location - if let _ = id { - expiresOn = draft.expiresOn ?? currentTimestamp + 3600 * 24 * 7 - } else { - expiresOn = currentTimestamp + 3600 * 24 * 7 - } - } else { - timestamp = currentTimestamp - if case let .asset(asset) = subject { - location = asset.location?.coordinate - } - if let _ = id { - expiresOn = currentTimestamp + Int32(self.state.privacy.timeout) - } else { - expiresOn = currentTimestamp + 3600 * 24 * 7 - } - } - - if let resultImage = mediaEditor.resultImage { - mediaEditor.seek(0.0, andPlay: false) - makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: resultImage, dimensions: storyDimensions, values: values, time: .zero, textScale: 2.0, completion: { resultImage in - guard let resultImage else { - return - } - let fittedSize = resultImage.size.aspectFitted(CGSize(width: 128.0, height: 128.0)) - - let context = self.context - let saveImageDraft: (UIImage, PixelDimensions) -> Void = { image, dimensions in - if let thumbnailImage = generateScaledImage(image: resultImage, size: fittedSize) { - let path = "\(Int64.random(in: .min ... .max)).jpg" - if let data = image.jpegData(compressionQuality: 0.87) { - let draft = MediaEditorDraft(path: path, isVideo: false, thumbnail: thumbnailImage, dimensions: dimensions, duration: nil, values: values, caption: caption, privacy: privacy, timestamp: timestamp, location: location, expiresOn: expiresOn) - try? data.write(to: URL(fileURLWithPath: draft.fullPath(engine: context.engine))) - if let id { - saveStorySource(engine: context.engine, item: draft, peerId: context.account.peerId, id: id) - } else { - addStoryDraft(engine: context.engine, item: draft) - } - } - } - } - - let saveVideoDraft: (String, PixelDimensions, Double) -> Void = { videoPath, dimensions, duration in - if let thumbnailImage = generateScaledImage(image: resultImage, size: fittedSize) { - let path = "\(Int64.random(in: .min ... .max)).mp4" - let draft = MediaEditorDraft(path: path, isVideo: true, thumbnail: thumbnailImage, dimensions: dimensions, duration: duration, values: values, caption: caption, privacy: privacy, timestamp: timestamp, location: location, expiresOn: expiresOn) - try? FileManager.default.copyItem(atPath: videoPath, toPath: draft.fullPath(engine: context.engine)) - if let id { - saveStorySource(engine: context.engine, item: draft, peerId: context.account.peerId, id: id) - } else { - addStoryDraft(engine: context.engine, item: draft) - } - } - } - - switch subject { - case let .image(image, dimensions, _, _): - saveImageDraft(image, dimensions) - case let .video(path, _, _, _, _, dimensions, _, _, _): - saveVideoDraft(path, dimensions, duration) - case let .asset(asset): - if asset.mediaType == .video { - PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in - if let urlAsset = avAsset as? AVURLAsset { - saveVideoDraft(urlAsset.url.relativePath, PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight)), duration) - } - } - } else { - let options = PHImageRequestOptions() - options.deliveryMode = .highQualityFormat - PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .default, options: options) { image, _ in - if let image { - saveImageDraft(image, PixelDimensions(image.size)) - } - } - } - case let .draft(draft, _): - if draft.isVideo { - saveVideoDraft(draft.fullPath(engine: context.engine), draft.dimensions, draft.duration ?? 0.0) - } else if let image = UIImage(contentsOfFile: draft.fullPath(engine: context.engine)) { - saveImageDraft(image, draft.dimensions) - } - removeStoryDraft(engine: self.context.engine, path: draft.path, delete: false) - } - }) - } - } - fileprivate func checkCaptionLimit() -> Bool { let caption = self.getCaption() if caption.length > self.context.userLimits.maxStoryCaptionLength { @@ -4631,8 +4574,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } } - if self.isEditingStory && !(self.node.hasAnyChanges || hasEntityChanges) { - self.completion(randomId, nil, [], caption, self.state.privacy, stickers, { [weak self] finished in + if self.isEmbeddedEditor && !(self.node.hasAnyChanges || hasEntityChanges) { + self.completion(MediaEditorScreen.Result(media: nil, mediaAreas: [], caption: caption, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in self?.dismiss() Queue.mainQueue().justDispatch { @@ -4661,7 +4604,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate var firstFrame: Signal<(UIImage?, UIImage?), NoError> let firstFrameTime = CMTime(seconds: mediaEditor.values.videoTrimRange?.lowerBound ?? 0.0, preferredTimescale: CMTimeScale(60)) - let videoResult: Result.VideoResult + let videoResult: MediaResult.VideoResult var videoIsMirrored = false let duration: Double switch subject { @@ -4807,7 +4750,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: inputImage, dimensions: storyDimensions, values: mediaEditor.values, time: firstFrameTime, textScale: 2.0, completion: { [weak self] coverImage in if let self { Logger.shared.log("MediaEditor", "Completed with video \(videoResult)") - self.completion(randomId, .video(video: videoResult, coverImage: coverImage, values: mediaEditor.values, duration: duration, dimensions: mediaEditor.values.resultDimensions), mediaAreas, caption, self.state.privacy, stickers, { [weak self] finished in + self.completion(MediaEditorScreen.Result(media: .video(video: videoResult, coverImage: coverImage, values: mediaEditor.values, duration: duration, dimensions: mediaEditor.values.resultDimensions), mediaAreas: mediaAreas, caption: caption, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in self?.dismiss() Queue.mainQueue().justDispatch { @@ -4830,7 +4773,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: image, dimensions: storyDimensions, values: mediaEditor.values, time: .zero, textScale: 2.0, completion: { [weak self] resultImage in if let self, let resultImage { Logger.shared.log("MediaEditor", "Completed with image \(resultImage)") - self.completion(randomId, .image(image: resultImage, dimensions: PixelDimensions(resultImage.size)), mediaAreas, caption, self.state.privacy, stickers, { [weak self] finished in + self.completion(MediaEditorScreen.Result(media: .image(image: resultImage, dimensions: PixelDimensions(resultImage.size)), mediaAreas: mediaAreas, caption: caption, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in self?.dismiss() Queue.mainQueue().justDispatch { @@ -5257,10 +5200,7 @@ private final class ToolValueComponent: Component { ) if let titleView = self.title.view { if titleView.superview == nil { - titleView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) - titleView.layer.shadowRadius = 3.0 - titleView.layer.shadowColor = UIColor.black.cgColor - titleView.layer.shadowOpacity = 0.35 + setupButtonShadow(titleView, radius: 3.0) self.addSubview(titleView) } transition.setPosition(view: titleView, position: titleFrame.center) @@ -5283,10 +5223,7 @@ private final class ToolValueComponent: Component { ) if let valueView = self.value.view { if valueView.superview == nil { - valueView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) - valueView.layer.shadowRadius = 3.0 - valueView.layer.shadowColor = UIColor.black.cgColor - valueView.layer.shadowOpacity = 0.35 + setupButtonShadow(valueView, radius: 3.0) self.addSubview(valueView) } transition.setPosition(view: valueView, position: valueFrame.center) @@ -5553,3 +5490,10 @@ private final class ReferenceContentSource: ContextReferenceContentSource { return ContextControllerReferenceViewInfo(referenceView: self.sourceView, contentAreaInScreenSpace: self.contentArea, customPosition: self.customPosition, actionsPosition: .top) } } + +private func setupButtonShadow(_ view: UIView, radius: CGFloat = 2.0) { + view.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) + view.layer.shadowRadius = radius + view.layer.shadowColor = UIColor.black.cgColor + view.layer.shadowOpacity = 0.35 +} diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaScrubberComponent.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaScrubberComponent.swift new file mode 100644 index 0000000000..7c3c61c547 --- /dev/null +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaScrubberComponent.swift @@ -0,0 +1,1361 @@ +import Foundation +import UIKit +import Display +import AsyncDisplayKit +import ComponentFlow +import SwiftSignalKit +import ViewControllerComponent +import ComponentDisplayAdapters +import TelegramPresentationData +import AccountContext +import AudioWaveformComponent +import MultilineTextComponent +import MediaEditor + +private let handleWidth: CGFloat = 14.0 +private let trackHeight: CGFloat = 39.0 +private let collapsedTrackHeight: CGFloat = 26.0 +private let trackSpacing: CGFloat = 4.0 +private let borderHeight: CGFloat = 1.0 + UIScreenPixel +private let frameWidth: CGFloat = 24.0 + +final class MediaScrubberComponent: Component { + typealias EnvironmentType = Empty + + struct Track: Equatable { + enum Content: Equatable { + case video(frames: [UIImage], framesUpdateTimestamp: Double) + case audio(artist: String?, title: String?, samples: Data?, peak: Int32) + + static func ==(lhs: Content, rhs: Content) -> Bool { + switch lhs { + case let .video(_, framesUpdateTimestamp): + if case .video(_, framesUpdateTimestamp) = rhs { + return true + } else { + return false + } + case let .audio(lhsArtist, lhsTitle, lhsSamples, lhsPeak): + if case let .audio(rhsArtist, rhsTitle, rhsSamples, rhsPeak) = rhs { + return lhsArtist == rhsArtist && lhsTitle == rhsTitle && lhsSamples == rhsSamples && lhsPeak == rhsPeak + } else { + return false + } + } + } + } + + let id: Int32 + let content: Content + let duration: Double + let trimRange: Range? + let offset: Double? + let isMain: Bool + + init(_ track: MediaEditorPlayerState.Track) { + self.id = track.id + switch track.content { + case let .video(frames, framesUpdateTimestamp): + self.content = .video(frames: frames, framesUpdateTimestamp: framesUpdateTimestamp) + case let .audio(artist, title, samples, peak): + self.content = .audio(artist: artist, title: title, samples: samples, peak: peak) + } + self.duration = track.duration + self.trimRange = track.trimRange + self.offset = track.offset + self.isMain = track.isMain + } + } + + let context: AccountContext + let generationTimestamp: Double + + let position: Double + let minDuration: Double + let maxDuration: Double + let isPlaying: Bool + + let tracks: [Track] + + let positionUpdated: (Double, Bool) -> Void + let trackTrimUpdated: (Int32, Double, Double, Bool, Bool) -> Void + let trackOffsetUpdated: (Int32, Double, Bool) -> Void + let trackLongPressed: (Int32, UIView) -> Void + + init( + context: AccountContext, + generationTimestamp: Double, + position: Double, + minDuration: Double, + maxDuration: Double, + isPlaying: Bool, + tracks: [Track], + positionUpdated: @escaping (Double, Bool) -> Void, + trackTrimUpdated: @escaping (Int32, Double, Double, Bool, Bool) -> Void, + trackOffsetUpdated: @escaping (Int32, Double, Bool) -> Void, + trackLongPressed: @escaping (Int32, UIView) -> Void + ) { + self.context = context + self.generationTimestamp = generationTimestamp + self.position = position + self.minDuration = minDuration + self.maxDuration = maxDuration + self.isPlaying = isPlaying + self.tracks = tracks + self.positionUpdated = positionUpdated + self.trackTrimUpdated = trackTrimUpdated + self.trackOffsetUpdated = trackOffsetUpdated + self.trackLongPressed = trackLongPressed + } + + static func ==(lhs: MediaScrubberComponent, rhs: MediaScrubberComponent) -> Bool { + if lhs.context !== rhs.context { + return false + } + if lhs.generationTimestamp != rhs.generationTimestamp { + return false + } + if lhs.position != rhs.position { + return false + } + if lhs.minDuration != rhs.minDuration { + return false + } + if lhs.maxDuration != rhs.maxDuration { + return false + } + if lhs.isPlaying != rhs.isPlaying { + return false + } + if lhs.tracks != rhs.tracks { + return false + } + return true + } + + final class View: UIView, UIGestureRecognizerDelegate { + private var trackViews: [Int32: TrackView] = [:] + private let trimView: TrimView + private let ghostTrimView: TrimView + private let cursorView: HandleView + + private var cursorDisplayLink: SharedDisplayLinkDriver.Link? + private var cursorPositionAnimation: (start: Double, from: Double, to: Double, ended: Bool)? + + private var selectedTrackId: Int32 = 0 + private var isPanningCursor = false + + private var scrubberSize: CGSize? + + private var component: MediaScrubberComponent? + private weak var state: EmptyComponentState? + + override init(frame: CGRect) { + self.trimView = TrimView(frame: .zero) + self.ghostTrimView = TrimView(frame: .zero) + self.ghostTrimView.isHollow = true + self.cursorView = HandleView() + + super.init(frame: frame) + + self.clipsToBounds = false + + self.disablesInteractiveModalDismiss = true + self.disablesInteractiveKeyboardGestureRecognizer = true + + let positionImage = generateImage(CGSize(width: handleWidth, height: 50.0), rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + context.setFillColor(UIColor.white.cgColor) + context.setShadow(offset: .zero, blur: 2.0, color: UIColor(rgb: 0x000000, alpha: 0.55).cgColor) + + let path = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: 6.0, y: 4.0), size: CGSize(width: 2.0, height: 42.0)), cornerRadius: 1.0) + context.addPath(path.cgPath) + context.fillPath() + })?.stretchableImage(withLeftCapWidth: Int(handleWidth / 2.0), topCapHeight: 25) + + self.cursorView.image = positionImage + self.cursorView.isUserInteractionEnabled = true + self.cursorView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0) + + self.addSubview(self.ghostTrimView) + self.addSubview(self.trimView) + self.addSubview(self.cursorView) + + self.cursorView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleCursorPan(_:)))) + + self.cursorDisplayLink = SharedDisplayLinkDriver.shared.add { [weak self] _ in + self?.updateCursorPosition() + } + self.cursorDisplayLink?.isPaused = true + + self.trimView.updated = { [weak self] transition in + self?.state?.updated(transition: transition) + } + self.trimView.trimUpdated = { [weak self] startValue, endValue, updatedEnd, done in + if let self, let component = self.component { + component.trackTrimUpdated(self.selectedTrackId, startValue, endValue, updatedEnd, done) + } + } + self.ghostTrimView.trimUpdated = { [weak self] startValue, endValue, updatedEnd, done in + if let self, let component = self.component { + component.trackTrimUpdated(0, startValue, endValue, updatedEnd, done) + } + } + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + deinit { + self.cursorDisplayLink?.invalidate() + } + + private var isAudioOnly: Bool { + guard let component = self.component else { + return false + } + var hasVideoTracks = false + var hasAudioTracks = false + for track in component.tracks { + switch track.content { + case .video: + hasVideoTracks = true + case .audio: + hasAudioTracks = true + } + } + return !hasVideoTracks && hasAudioTracks + } + + private var trimDuration: Double { + guard let component = self.component, var duration = component.tracks.first?.duration else { + return 0.0 + } + if self.isAudioOnly { + duration = min(30.0, duration) + } + return duration + } + + private var duration: Double { + guard let component = self.component, let firstTrack = component.tracks.first else { + return 0.0 + } + return max(0.0, firstTrack.duration) + } + + private var startPosition: Double { + guard let component = self.component, let firstTrack = component.tracks.first else { + return 0.0 + } + return max(0.0, firstTrack.trimRange?.lowerBound ?? 0.0) + } + + private var endPosition: Double { + guard let component = self.component, let firstTrack = component.tracks.first else { + return 0.0 + } + return firstTrack.trimRange?.upperBound ?? min(firstTrack.duration, storyMaxVideoDuration) + } + + private var mainAudioTrackOffset: Double? { + guard self.isAudioOnly, let component = self.component, let firstTrack = component.tracks.first else { + return nil + } + return firstTrack.offset + } + + @objc private func handleCursorPan(_ gestureRecognizer: UIPanGestureRecognizer) { + guard let component = self.component else { + return + } + + let location = gestureRecognizer.location(in: self) + let start = handleWidth + let end = self.frame.width - handleWidth + let length = end - start + let fraction = (location.x - start) / length + + var position = max(self.startPosition, min(self.endPosition, self.trimDuration * fraction)) + if let offset = self.mainAudioTrackOffset { + position += offset + } + let transition: Transition = .immediate + switch gestureRecognizer.state { + case .began, .changed: + self.isPanningCursor = true + component.positionUpdated(position, false) + case .ended, .cancelled: + self.isPanningCursor = false + component.positionUpdated(position, true) + default: + break + } + self.state?.updated(transition: transition) + } + + private func cursorFrame(size: CGSize, height: CGFloat, position: Double, duration : Double) -> CGRect { + let cursorPadding: CGFloat = 8.0 + let cursorPositionFraction = duration > 0.0 ? position / duration : 0.0 + let cursorPosition = floorToScreenPixels(handleWidth - 1.0 + (size.width - handleWidth * 2.0 + 2.0) * cursorPositionFraction) + var cursorFrame = CGRect(origin: CGPoint(x: cursorPosition - handleWidth / 2.0, y: -5.0 - UIScreenPixel), size: CGSize(width: handleWidth, height: height)) + + var leftEdge = self.ghostTrimView.leftHandleView.frame.maxX + var rightEdge = self.ghostTrimView.rightHandleView.frame.minX + if self.isAudioOnly { + leftEdge = self.trimView.leftHandleView.frame.maxX + rightEdge = self.trimView.rightHandleView.frame.minX + } + + cursorFrame.origin.x = max(leftEdge - cursorPadding, cursorFrame.origin.x) + cursorFrame.origin.x = min(rightEdge - handleWidth + cursorPadding, cursorFrame.origin.x) + return cursorFrame + } + + private var effectiveCursorHeight: CGFloat { + let additionalTracksCount = max(0, (self.component?.tracks.count ?? 1) - 1) + return 50.0 + CGFloat(additionalTracksCount) * 30.0 + } + + private func updateCursorPosition() { + guard let component = self.component, let scrubberSize = self.scrubberSize else { + return + } + let timestamp = CACurrentMediaTime() + + let updatedPosition: Double + if let (start, from, to, _) = self.cursorPositionAnimation { + var from = from + if let offset = self.mainAudioTrackOffset { + from -= offset + } + let duration = to - from + let fraction = duration > 0.0 ? (timestamp - start) / duration : 0.0 + updatedPosition = max(self.startPosition, min(self.endPosition, from + (to - from) * fraction)) + if fraction >= 1.0 { + self.cursorPositionAnimation = (start, from, to, true) + } + } else { + var position = component.position + if let offset = self.mainAudioTrackOffset { + position -= offset + } + let advance = component.isPlaying ? timestamp - component.generationTimestamp : 0.0 + updatedPosition = max(self.startPosition, min(self.endPosition, position + advance)) + } + self.cursorView.frame = cursorFrame(size: scrubberSize, height: self.effectiveCursorHeight, position: updatedPosition, duration: self.trimDuration) + } + + func update(component: MediaScrubberComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: Transition) -> CGSize { + let isFirstTime = self.component == nil + self.component = component + self.state = state + + var totalHeight: CGFloat = 0.0 + var trackLayout: [Int32: (CGRect, Transition, Bool)] = [:] + + if !component.tracks.contains(where: { $0.id == self.selectedTrackId }) { + self.selectedTrackId = 0 + } + + var validIds = Set() + for track in component.tracks { + let id = track.id + validIds.insert(id) + + var trackTransition = transition + let trackView: TrackView + var animateTrackIn = false + if let current = self.trackViews[id] { + trackView = current + } else { + trackTransition = .immediate + trackView = TrackView() + trackView.onSelection = { [weak self] id in + guard let self else { + return + } + self.selectedTrackId = id + self.state?.updated(transition: .easeInOut(duration: 0.2)) + } + trackView.onLongPress = { [weak self] id, sourceView in + guard let self, let component = self.component else { + return + } + component.trackLongPressed(id, sourceView) + } + trackView.offsetUpdated = { [weak self] offset, apply in + guard let self, let component = self.component else { + return + } + component.trackOffsetUpdated(id, offset, apply) + } + trackView.updated = { [weak self] transition in + guard let self else { + return + } + self.state?.updated(transition: transition) + } + self.trackViews[id] = trackView + + self.insertSubview(trackView, at: 0) + + if !isFirstTime { + animateTrackIn = true + } + } + + let trackSize = trackView.update( + context: component.context, + track: track, + isSelected: id == self.selectedTrackId, + availableSize: availableSize, + duration: self.duration, + transition: trackTransition + ) + trackLayout[id] = (CGRect(origin: CGPoint(x: 0.0, y: totalHeight), size: trackSize), trackTransition, animateTrackIn) + + totalHeight += trackSize.height + totalHeight += trackSpacing + } + totalHeight -= trackSpacing + + for track in component.tracks { + guard let trackView = self.trackViews[track.id], let (trackFrame, trackTransition, animateTrackIn) = trackLayout[track.id] else { + continue + } + trackTransition.setFrame(view: trackView, frame: CGRect(origin: CGPoint(x: 0.0, y: totalHeight - trackFrame.maxY), size: trackFrame.size)) + if animateTrackIn { + trackView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + trackView.layer.animatePosition(from: CGPoint(x: 0.0, y: trackFrame.height + trackSpacing), to: .zero, duration: 0.35, timingFunction: kCAMediaTimingFunctionSpring, additive: true) + } + } + + var removeIds: [Int32] = [] + for (id, trackView) in self.trackViews { + if !validIds.contains(id) { + removeIds.append(id) + trackView.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: trackView.frame.height), duration: 0.35, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true) + transition.setAlpha(view: trackView, alpha: 0.0, completion: { [weak trackView] _ in + trackView?.removeFromSuperview() + }) + } + } + for id in removeIds { + self.trackViews.removeValue(forKey: id) + } + + var startPosition = self.startPosition + var endPosition = self.endPosition + var trimViewOffset: CGFloat = 0.0 + var trimViewVisualInsets: UIEdgeInsets = .zero + if let track = component.tracks.first(where: { $0.id == self.selectedTrackId }), track.id != 0 { + if let trimRange = track.trimRange { + startPosition = trimRange.lowerBound + endPosition = trimRange.upperBound + } + if let trackView = self.trackViews[track.id] { + if trackView.scrollView.contentOffset.x < 0.0 { + trimViewOffset = -trackView.scrollView.contentOffset.x + trimViewVisualInsets.right = trimViewOffset + } else if trackView.scrollView.contentSize.width > trackView.scrollView.frame.width, trackView.scrollView.contentOffset.x > trackView.scrollView.contentSize.width - trackView.scrollView.frame.width { + let delta = trackView.scrollView.contentOffset.x - (trackView.scrollView.contentSize.width - trackView.scrollView.frame.width) + trimViewOffset = -delta + trimViewVisualInsets.left = delta + } + } + } + + let scrubberSize = CGSize(width: availableSize.width, height: trackHeight) + self.trimView.isHollow = self.selectedTrackId != 0 || self.isAudioOnly + let (leftHandleFrame, rightHandleFrame) = self.trimView.update( + totalWidth: scrubberSize.width, + visualInsets: trimViewVisualInsets, + scrubberSize: scrubberSize, + duration: trimDuration, + startPosition: startPosition, + endPosition: endPosition, + position: component.position, + minDuration: component.minDuration, + maxDuration: component.maxDuration, + transition: transition + ) + + let (ghostLeftHandleFrame, ghostRightHandleFrame) = self.ghostTrimView.update( + totalWidth: scrubberSize.width, + visualInsets: .zero, + scrubberSize: CGSize(width: scrubberSize.width, height: collapsedTrackHeight), + duration: self.duration, + startPosition: self.startPosition, + endPosition: self.endPosition, + position: component.position, + minDuration: component.minDuration, + maxDuration: component.maxDuration, + transition: transition + ) + + let _ = leftHandleFrame + let _ = rightHandleFrame + let _ = ghostLeftHandleFrame + let _ = ghostRightHandleFrame + + let scrubberBounds = CGRect(origin: .zero, size: scrubberSize) + var selectedTrackFrame = scrubberBounds + var mainTrackFrame = scrubberBounds + if let (trackFrame, _, _) = trackLayout[0] { + mainTrackFrame = CGRect(origin: CGPoint(x: trackFrame.minX, y: totalHeight - trackFrame.maxY), size: trackFrame.size) + } + if let (trackFrame, _, _) = trackLayout[self.selectedTrackId] { + selectedTrackFrame = CGRect(origin: CGPoint(x: trackFrame.minX, y: totalHeight - trackFrame.maxY), size: trackFrame.size) + } else { + selectedTrackFrame = mainTrackFrame + } + + let trimViewFrame = CGRect(origin: CGPoint(x: trimViewOffset, y: selectedTrackFrame.minY), size: scrubberSize) + transition.setFrame(view: self.trimView, frame: trimViewFrame) + + let ghostTrimViewFrame = CGRect(origin: CGPoint(x: 0.0, y: totalHeight - collapsedTrackHeight), size: CGSize(width: availableSize.width, height: collapsedTrackHeight)) + transition.setFrame(view: self.ghostTrimView, frame: ghostTrimViewFrame) + transition.setAlpha(view: self.ghostTrimView, alpha: self.selectedTrackId != 0 ? 0.75 : 0.0) + +// var containerLeftEdge = leftHandleFrame.maxX +// var containerRightEdge = rightHandleFrame.minX +// if self.isAudioSelected && component.duration > 0.0 { +// containerLeftEdge = ghostLeftHandleFrame.maxX +// containerRightEdge = ghostRightHandleFrame.minX +// } + + let isDraggingTracks = self.trackViews.values.contains(where: { $0.isDragging }) + let isCursorHidden = isDraggingTracks || self.trimView.isPanningTrimHandle || self.ghostTrimView.isPanningTrimHandle + var cursorTransition = transition + if isCursorHidden { + cursorTransition = .immediate + } + cursorTransition.setAlpha(view: self.cursorView, alpha: isCursorHidden ? 0.0 : 1.0, delay: self.cursorView.alpha.isZero && !isCursorHidden ? 0.25 : 0.0) + + self.scrubberSize = scrubberSize + if self.isPanningCursor || !component.isPlaying { + self.cursorPositionAnimation = nil + self.cursorDisplayLink?.isPaused = true + + var cursorPosition = component.position + if let offset = self.mainAudioTrackOffset { + cursorPosition -= offset + } + transition.setFrame(view: self.cursorView, frame: cursorFrame(size: scrubberSize, height: self.effectiveCursorHeight, position: cursorPosition, duration: trimDuration)) + } else { + if let (_, _, end, ended) = self.cursorPositionAnimation { + if ended, component.position >= self.startPosition && component.position < end - 1.0 { + self.cursorPositionAnimation = (CACurrentMediaTime(), component.position, self.endPosition, false) + } + } else { + self.cursorPositionAnimation = (CACurrentMediaTime(), component.position, self.endPosition, false) + } + self.cursorDisplayLink?.isPaused = false + self.updateCursorPosition() + } + + return CGSize(width: availableSize.width, height: totalHeight) + } + + override func point(inside point: CGPoint, with event: UIEvent?) -> Bool { + let hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0) + return self.bounds.inset(by: hitTestSlop).contains(point) + } + } + + public func makeView() -> View { + return View(frame: CGRect()) + } + + public func update(view: View, availableSize: CGSize, state: State, environment: Environment, transition: Transition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} + + +private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelegate { + fileprivate let clippingView: UIView + fileprivate let scrollView: UIScrollView + fileprivate let containerView: UIView + fileprivate let backgroundView: BlurredBackgroundView + fileprivate let vibrancyView: UIVisualEffectView + fileprivate let vibrancyContainer: UIView + + fileprivate let audioContentContainerView: UIView + fileprivate let audioWaveform = ComponentView() + fileprivate let waveformCloneLayer = AudioWaveformComponent.View.CloneLayer() + fileprivate let audioContentMaskView: UIImageView + fileprivate let audioIconView: UIImageView + fileprivate let audioTitle = ComponentView() + + fileprivate let videoTransparentFramesContainer = UIView() + fileprivate var videoTransparentFrameLayers: [VideoFrameLayer] = [] + fileprivate let videoOpaqueFramesContainer = UIView() + fileprivate var videoOpaqueFrameLayers: [VideoFrameLayer] = [] + + var onSelection: (Int32) -> Void = { _ in } + var onLongPress: (Int32, UIView) -> Void = { _, _ in } + var offsetUpdated: (Double, Bool) -> Void = { _, _ in } + var updated: (Transition) -> Void = { _ in } + + private(set) var isDragging = false + private var ignoreScrollUpdates = false + + override init(frame: CGRect) { + self.scrollView = UIScrollView() + if #available(iOSApplicationExtension 11.0, iOS 11.0, *) { + self.scrollView.contentInsetAdjustmentBehavior = .never + } + if #available(iOS 13.0, *) { + self.scrollView.automaticallyAdjustsScrollIndicatorInsets = false + } + self.scrollView.bounces = false + self.scrollView.decelerationRate = .fast + self.scrollView.clipsToBounds = false + self.scrollView.showsHorizontalScrollIndicator = false + self.scrollView.showsVerticalScrollIndicator = false + + self.clippingView = UIView() + self.clippingView.clipsToBounds = true + + self.containerView = UIView() + self.containerView.clipsToBounds = true + self.containerView.layer.cornerRadius = 9.0 + self.containerView.isUserInteractionEnabled = false + + self.backgroundView = BlurredBackgroundView(color: UIColor(white: 0.0, alpha: 0.5), enableBlur: true) + + let style: UIBlurEffect.Style = .dark + let blurEffect = UIBlurEffect(style: style) + let vibrancyEffect = UIVibrancyEffect(blurEffect: blurEffect) + let vibrancyEffectView = UIVisualEffectView(effect: vibrancyEffect) + self.vibrancyView = vibrancyEffectView + + self.vibrancyContainer = UIView() + self.vibrancyView.contentView.addSubview(self.vibrancyContainer) + + self.audioContentContainerView = UIView() + self.audioContentContainerView.clipsToBounds = true + + self.audioContentMaskView = UIImageView() + self.audioContentContainerView.mask = self.audioContentMaskView + + self.audioIconView = UIImageView(image: UIImage(bundleImageName: "Media Editor/SmallAudio")) + + self.waveformCloneLayer.opacity = 0.3 + + super.init(frame: .zero) + + self.scrollView.delegate = self + + self.videoTransparentFramesContainer.alpha = 0.5 + self.videoTransparentFramesContainer.clipsToBounds = true + self.videoTransparentFramesContainer.layer.cornerRadius = 9.0 + self.videoTransparentFramesContainer.isUserInteractionEnabled = false + + self.videoOpaqueFramesContainer.clipsToBounds = true + self.videoOpaqueFramesContainer.layer.cornerRadius = 9.0 + self.videoOpaqueFramesContainer.isUserInteractionEnabled = false + + self.addSubview(self.clippingView) + self.clippingView.addSubview(self.scrollView) + self.scrollView.addSubview(self.containerView) + self.backgroundView.addSubview(self.vibrancyView) + + let longPressGesture = UILongPressGestureRecognizer(target: self, action: #selector(self.longPressed(_:))) + longPressGesture.delegate = self + self.addGestureRecognizer(longPressGesture) + + let tapGesture = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:))) + self.addGestureRecognizer(tapGesture) + + self.audioContentMaskView.image = audioContentMaskImage + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + @objc private func longPressed(_ gestureRecognizer: UILongPressGestureRecognizer) { + guard let (track, _, _) = self.params, case .began = gestureRecognizer.state else { + return + } + self.onLongPress(track.id, self.clippingView) + } + + @objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) { + guard let (track, _, _) = self.params else { + return + } + self.onSelection(track.id) + } + + private func updateTrackOffset(done: Bool) { + guard self.scrollView.contentSize.width > 0.0, let duration = self.params?.track.duration else { + return + } + let totalWidth = self.scrollView.contentSize.width + let offset = self.scrollView.contentOffset.x * duration / totalWidth + self.offsetUpdated(offset, done) + } + + func scrollViewWillBeginDragging(_ scrollView: UIScrollView) { + self.isDragging = true + self.updated(.easeInOut(duration: 0.25)) + } + + func scrollViewDidScroll(_ scrollView: UIScrollView) { + guard !self.ignoreScrollUpdates else { + return + } + self.updateTrackOffset(done: false) + } + + func scrollViewDidEndDragging(_ scrollView: UIScrollView, willDecelerate decelerate: Bool) { + if !decelerate { + self.updateTrackOffset(done: true) + self.isDragging = false + self.updated(.easeInOut(duration: 0.25)) + } + } + + func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) { + self.updateTrackOffset(done: true) + self.isDragging = false + self.updated(.easeInOut(duration: 0.25)) + } + + override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool { + let location = gestureRecognizer.location(in: self.containerView) + return self.containerView.bounds.contains(location) + } + + private var params: ( + track: MediaScrubberComponent.Track, + isSelected: Bool, + duration: Double + )? + + func update( + context: AccountContext, + track: MediaScrubberComponent.Track, + isSelected: Bool, + availableSize: CGSize, + duration: Double, + transition: Transition + ) -> CGSize { + let previousParams = self.params + self.params = (track, isSelected, duration) + + let scrubberSize = CGSize(width: availableSize.width, height: isSelected ? trackHeight : collapsedTrackHeight) + + var trimDuration = duration + if track.isAudio && track.isMain { + trimDuration = min(30.0, track.duration) + } + + let minimalAudioWidth = handleWidth * 2.0 + var audioTotalWidth = scrubberSize.width + if track.isAudio, trimDuration > 0.0 { + let audioFraction = track.duration / trimDuration + if audioFraction < 1.0 - .ulpOfOne || audioFraction > 1.0 + .ulpOfOne { + audioTotalWidth = max(minimalAudioWidth, ceil(availableSize.width * audioFraction)) + } + } + + var clipOrigin: CGFloat = -9.0 + var clipWidth = availableSize.width + 18.0 + + var deselectedClipWidth: CGFloat = 0.0 + var deselectedClipOrigin: CGFloat = 0.0 + + if !track.isMain, duration > 0.0 { + let trackDuration: Double + if let trimRange = track.trimRange { + trackDuration = trimRange.upperBound - trimRange.lowerBound + } else { + trackDuration = duration + } + + let fraction = trackDuration / duration + deselectedClipWidth = max(minimalAudioWidth, availableSize.width * fraction) + deselectedClipOrigin = (track.trimRange?.lowerBound ?? 0.0) / duration * availableSize.width + + if self.scrollView.contentOffset.x < 0.0 { + deselectedClipOrigin -= self.scrollView.contentOffset.x + if self.scrollView.contentSize.width > self.scrollView.frame.width { + deselectedClipWidth += self.scrollView.contentOffset.x + } + } else if self.scrollView.contentSize.width > self.scrollView.frame.width, self.scrollView.contentOffset.x > self.scrollView.contentSize.width - self.scrollView.frame.width { + let delta = self.scrollView.contentOffset.x - (self.scrollView.contentSize.width - self.scrollView.frame.width) + deselectedClipWidth -= delta + } + } + + if !isSelected && (track.isAudio || !track.isMain) { + clipOrigin = deselectedClipOrigin + clipWidth = deselectedClipWidth + } + + let clippingFrame = CGRect(origin: CGPoint(x: clipOrigin, y: 0.0), size: CGSize(width: clipWidth, height: scrubberSize.height)) + let clippingBounds = CGRect(origin: CGPoint(x: clipOrigin, y: 0.0), size: CGSize(width: clipWidth, height: scrubberSize.height)) + transition.setFrame(view: self.clippingView, frame: clippingFrame) + transition.setBounds(view: self.clippingView, bounds: clippingBounds) + + self.scrollView.isUserInteractionEnabled = isSelected && (track.isAudio || !track.isMain) + + self.ignoreScrollUpdates = true + + let scrollFrame = CGRect(origin: .zero, size: CGSize(width: availableSize.width, height: scrubberSize.height)) + transition.setFrame(view: self.scrollView, frame: scrollFrame) + + let audioChanged = !"".isEmpty + + let contentSize = CGSize(width: audioTotalWidth, height: collapsedTrackHeight) + if self.scrollView.contentSize != contentSize || audioChanged { + self.scrollView.contentSize = contentSize + if !track.isMain { + let leftInset = scrubberSize.width - handleWidth * 2.5 + let rightInset: CGFloat + if self.scrollView.contentSize.width > self.scrollView.frame.width { + rightInset = scrubberSize.width - handleWidth * 2.5 + } else { + rightInset = self.scrollView.frame.width - self.scrollView.contentSize.width + } + self.scrollView.contentInset = UIEdgeInsets(top: 0.0, left: leftInset, bottom: 0.0, right: rightInset) + } + + if let offset = track.offset, track.duration > 0.0 { + let contentOffset = offset * audioTotalWidth / duration + self.scrollView.contentOffset = CGPoint(x: contentOffset, y: 0.0) + } else { + self.scrollView.contentOffset = .zero + } + } + + self.ignoreScrollUpdates = false + + transition.setCornerRadius(layer: self.clippingView.layer, cornerRadius: isSelected ? 0.0 : 9.0) + + let audioContainerFrame = CGRect(origin: .zero, size: CGSize(width: audioTotalWidth, height: scrubberSize.height)) + transition.setFrame(view: self.containerView, frame: audioContainerFrame) + + transition.setFrame(view: self.backgroundView, frame: CGRect(origin: .zero, size: audioContainerFrame.size)) + self.backgroundView.update(size: audioContainerFrame.size, transition: transition.containedViewLayoutTransition) + transition.setFrame(view: self.vibrancyView, frame: CGRect(origin: .zero, size: audioContainerFrame.size)) + transition.setFrame(view: self.vibrancyContainer, frame: CGRect(origin: .zero, size: audioContainerFrame.size)) + + let containerFrame = CGRect(origin: .zero, size: CGSize(width: clipWidth, height: audioContainerFrame.height)) + let contentContainerOrigin = deselectedClipOrigin + self.scrollView.contentOffset.x + transition.setFrame(view: self.audioContentContainerView, frame: containerFrame.offsetBy(dx: contentContainerOrigin, dy: 0.0)) + transition.setFrame(view: self.audioContentMaskView, frame: CGRect(origin: .zero, size: containerFrame.size)) + + switch track.content { + case let .video(frames, framesUpdateTimestamp): + if self.videoTransparentFramesContainer.superview == nil { + self.containerView.addSubview(self.videoTransparentFramesContainer) + self.containerView.addSubview(self.videoOpaqueFramesContainer) + } + var previousFramesUpdateTimestamp: Double? + if let previousParams, case let .video(_, previousFramesUpdateTimestampValue) = previousParams.track.content { + previousFramesUpdateTimestamp = previousFramesUpdateTimestampValue + } + + if framesUpdateTimestamp != previousFramesUpdateTimestamp { + for i in 0 ..< frames.count { + let transparentFrameLayer: VideoFrameLayer + let opaqueFrameLayer: VideoFrameLayer + if i >= self.videoTransparentFrameLayers.count { + transparentFrameLayer = VideoFrameLayer() + transparentFrameLayer.masksToBounds = true + transparentFrameLayer.contentsGravity = .resizeAspectFill + self.videoTransparentFramesContainer.layer.addSublayer(transparentFrameLayer) + self.videoTransparentFrameLayers.append(transparentFrameLayer) + opaqueFrameLayer = VideoFrameLayer() + opaqueFrameLayer.masksToBounds = true + opaqueFrameLayer.contentsGravity = .resizeAspectFill + self.videoOpaqueFramesContainer.layer.addSublayer(opaqueFrameLayer) + self.videoOpaqueFrameLayers.append(opaqueFrameLayer) + } else { + transparentFrameLayer = self.videoTransparentFrameLayers[i] + opaqueFrameLayer = self.videoOpaqueFrameLayers[i] + } + transparentFrameLayer.contents = frames[i].cgImage + if let contents = opaqueFrameLayer.contents, (contents as! CGImage) !== frames[i].cgImage, opaqueFrameLayer.animation(forKey: "contents") == nil { + opaqueFrameLayer.contents = frames[i].cgImage + opaqueFrameLayer.animate(from: contents as AnyObject, to: frames[i].cgImage! as AnyObject, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.2) + } else { + opaqueFrameLayer.contents = frames[i].cgImage + } + } + } + + let containerLeftEdge: CGFloat = 0.0 + let containerRightEdge: CGFloat = availableSize.width + + transition.setFrame(view: self.videoTransparentFramesContainer, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: scrubberSize.width, height: scrubberSize.height))) + transition.setFrame(view: self.videoOpaqueFramesContainer, frame: CGRect(origin: CGPoint(x: containerLeftEdge, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge, height: scrubberSize.height))) + transition.setBounds(view: self.videoOpaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: containerLeftEdge, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge, height: scrubberSize.height))) + + var frameAspectRatio = 0.66 + if let image = frames.first, image.size.height > 0.0 { + frameAspectRatio = max(0.66, image.size.width / image.size.height) + } + let frameSize = CGSize(width: trackHeight * frameAspectRatio, height: trackHeight) + var frameOffset: CGFloat = 0.0 + for i in 0 ..< frames.count { + if i < self.videoTransparentFrameLayers.count { + let transparentFrameLayer = self.videoTransparentFrameLayers[i] + let opaqueFrameLayer = self.videoOpaqueFrameLayers[i] + let frame = CGRect(origin: CGPoint(x: frameOffset, y: floorToScreenPixels((scrubberSize.height - frameSize.height) / 2.0)), size: frameSize) + + transparentFrameLayer.bounds = CGRect(origin: .zero, size: frame.size) + opaqueFrameLayer.bounds = CGRect(origin: .zero, size: frame.size) + + transition.setPosition(layer: transparentFrameLayer, position: frame.center) + transition.setPosition(layer: opaqueFrameLayer, position: frame.center) + } + frameOffset += frameSize.width + } + case let .audio(artist, title, samples, peak): + var components: [String] = [] + var trackTitle = "" + if let artist { + components.append(artist) + } + if let title { + components.append(title) + } + if components.isEmpty { + let strings = context.sharedContext.currentPresentationData.with { $0 }.strings + components.append(strings.MediaEditor_Audio) + } + trackTitle = components.joined(separator: " • ") + + let audioTitle = NSAttributedString(string: trackTitle, font: Font.semibold(13.0), textColor: .white) + let audioTitleSize: CGSize + if !trackTitle.isEmpty { + audioTitleSize = self.audioTitle.update( + transition: transition, + component: AnyComponent( + MultilineTextComponent( + text: .plain(audioTitle) + ) + ), + environment: {}, + containerSize: availableSize + ) + } else { + if let audioTitleView = self.audioTitle.view { + audioTitleSize = audioTitleView.bounds.size + } else { + audioTitleSize = .zero + } + } + + let spacing: CGFloat = 4.0 + let iconSize = CGSize(width: 14.0, height: 14.0) + let contentTotalWidth = iconSize.width + audioTitleSize.width + spacing + + let audioContentTransition = transition + transition.setAlpha(view: self.audioIconView, alpha: isSelected ? 0.0 : 1.0) + + let audioIconFrame = CGRect(origin: CGPoint(x: max(8.0, floorToScreenPixels((deselectedClipWidth - contentTotalWidth) / 2.0)), y: floorToScreenPixels((scrubberSize.height - iconSize.height) / 2.0)), size: iconSize) + audioContentTransition.setBounds(view: self.audioIconView, bounds: CGRect(origin: .zero, size: audioIconFrame.size)) + audioContentTransition.setPosition(view: self.audioIconView, position: audioIconFrame.center) + + let trackTitleIsVisible = !isSelected && !track.isMain && !trackTitle.isEmpty + if let view = self.audioTitle.view { + if view.superview == nil { + view.alpha = 0.0 + view.isUserInteractionEnabled = false + self.containerView.addSubview(self.backgroundView) + self.containerView.addSubview(self.audioContentContainerView) + self.audioContentContainerView.addSubview(self.audioIconView) + self.audioContentContainerView.addSubview(view) + } + transition.setAlpha(view: view, alpha: trackTitleIsVisible ? 1.0 : 0.0) + + let audioTitleFrame = CGRect(origin: CGPoint(x: audioIconFrame.maxX + spacing, y: floorToScreenPixels((scrubberSize.height - audioTitleSize.height) / 2.0)), size: audioTitleSize) + view.bounds = CGRect(origin: .zero, size: audioTitleFrame.size) + audioContentTransition.setPosition(view: view, position: audioTitleFrame.center) + } + transition.setAlpha(view: self.audioIconView, alpha: trackTitleIsVisible ? 1.0 : 0.0) + + var previousSamples: Data? + if let previousParams, case let .audio(_ , _, previousSamplesValue, _) = previousParams.track.content { + previousSamples = previousSamplesValue + } + + let samples = samples ?? Data() + if let view = self.audioWaveform.view, previousSamples == nil && !samples.isEmpty, let vibrancySnapshotView = view.snapshotContentTree(), let snapshotView = self.waveformCloneLayer.snapshotContentTreeAsView() { + vibrancySnapshotView.frame = view.frame + snapshotView.alpha = 0.3 + snapshotView.frame = view.frame + self.vibrancyContainer.addSubview(vibrancySnapshotView) + self.containerView.addSubview(snapshotView) + + vibrancySnapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + vibrancySnapshotView.removeFromSuperview() + }) + + snapshotView.layer.animateAlpha(from: 0.3, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + vibrancySnapshotView.removeFromSuperview() + }) + + view.layer.animateScaleY(from: 0.01, to: 1.0, duration: 0.2) + view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + + self.waveformCloneLayer.animateScaleY(from: 0.01, to: 1.0, duration: 0.2) + self.waveformCloneLayer.animateAlpha(from: 0.0, to: 0.3, duration: 0.2) + } + let audioWaveformSize = self.audioWaveform.update( + transition: transition, + component: AnyComponent( + AudioWaveformComponent( + backgroundColor: .clear, + foregroundColor: UIColor(rgb: 0xffffff, alpha: 0.3), + shimmerColor: nil, + style: .middle, + samples: samples, + peak: peak, + status: .complete(), + seek: nil, + updateIsSeeking: nil + ) + ), + environment: {}, + containerSize: CGSize(width: audioContainerFrame.width, height: trackHeight) + ) + if let view = self.audioWaveform.view as? AudioWaveformComponent.View { + if view.superview == nil { + view.cloneLayer = self.waveformCloneLayer + self.vibrancyContainer.addSubview(view) + self.containerView.layer.addSublayer(self.waveformCloneLayer) + } + let audioWaveformFrame = CGRect(origin: CGPoint(x: 0.0, y: isSelected || track.isMain ? 0.0 : 6.0), size: audioWaveformSize) + transition.setFrame(view: view, frame: audioWaveformFrame) + transition.setFrame(layer: self.waveformCloneLayer, frame: audioWaveformFrame) + } + } + + return scrubberSize + } +} + + +private class TrimView: UIView { + fileprivate let leftHandleView = HandleView() + fileprivate let rightHandleView = HandleView() + private let borderView = UIImageView() + private let zoneView = HandleView() + + private let leftCapsuleView = UIView() + private let rightCapsuleView = UIView() + + fileprivate var isPanningTrimHandle = false + + var isHollow = false + + var trimUpdated: (Double, Double, Bool, Bool) -> Void = { _, _, _, _ in } + var updated: (Transition) -> Void = { _ in } + + override init(frame: CGRect) { + super.init(frame: .zero) + + let height = trackHeight + let handleImage = generateImage(CGSize(width: handleWidth, height: height), rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + context.setFillColor(UIColor.white.cgColor) + + let path = UIBezierPath(roundedRect: CGRect(origin: .zero, size: CGSize(width: size.width * 2.0, height: size.height)), cornerRadius: 9.0) + context.addPath(path.cgPath) + context.fillPath() + + context.setBlendMode(.clear) + let innerPath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: handleWidth - 3.0, y: borderHeight), size: CGSize(width: handleWidth, height: size.height - borderHeight * 2.0)), cornerRadius: 2.0) + context.addPath(innerPath.cgPath) + context.fillPath() + })?.withRenderingMode(.alwaysTemplate).resizableImage(withCapInsets: UIEdgeInsets(top: 10.0, left: 0.0, bottom: 10.0, right: 0.0)) + + self.zoneView.image = UIImage() + self.zoneView.isUserInteractionEnabled = true + self.zoneView.hitTestSlop = UIEdgeInsets(top: -8.0, left: 0.0, bottom: -8.0, right: 0.0) + + self.leftHandleView.image = handleImage + self.leftHandleView.isUserInteractionEnabled = true + self.leftHandleView.tintColor = .white + self.leftHandleView.contentMode = .scaleToFill + self.leftHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0) + + self.rightHandleView.image = handleImage + self.rightHandleView.transform = CGAffineTransform(scaleX: -1.0, y: 1.0) + self.rightHandleView.isUserInteractionEnabled = true + self.rightHandleView.tintColor = .white + self.rightHandleView.contentMode = .scaleToFill + self.rightHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0) + + self.borderView.image = generateImage(CGSize(width: 1.0, height: height), rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + context.setFillColor(UIColor.white.cgColor) + context.fill(CGRect(origin: .zero, size: CGSize(width: size.width, height: borderHeight))) + context.fill(CGRect(origin: CGPoint(x: 0.0, y: size.height - borderHeight), size: CGSize(width: size.width, height: height))) + })?.withRenderingMode(.alwaysTemplate).resizableImage(withCapInsets: UIEdgeInsets(top: 10.0, left: 0.0, bottom: 10.0, right: 0.0)) + self.borderView.tintColor = .white + self.borderView.isUserInteractionEnabled = false + + self.leftCapsuleView.clipsToBounds = true + self.leftCapsuleView.layer.cornerRadius = 1.0 + self.leftCapsuleView.backgroundColor = UIColor(rgb: 0x343436) + + self.rightCapsuleView.clipsToBounds = true + self.rightCapsuleView.layer.cornerRadius = 1.0 + self.rightCapsuleView.backgroundColor = UIColor(rgb: 0x343436) + + self.addSubview(self.zoneView) + self.addSubview(self.leftHandleView) + self.leftHandleView.addSubview(self.leftCapsuleView) + + self.addSubview(self.rightHandleView) + self.rightHandleView.addSubview(self.rightCapsuleView) + self.addSubview(self.borderView) + + self.zoneView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleZoneHandlePan(_:)))) + self.leftHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleLeftHandlePan(_:)))) + self.rightHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleRightHandlePan(_:)))) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + @objc private func handleZoneHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) { + guard let params = self.params else { + return + } + let translation = gestureRecognizer.translation(in: self) + + let start = handleWidth / 2.0 + let end = self.frame.width - handleWidth / 2.0 + let length = end - start + + let delta = translation.x / length + + let duration = params.endPosition - params.startPosition + let startValue = max(0.0, min(params.duration - duration, params.startPosition + delta * params.duration)) + let endValue = startValue + duration + + var transition: Transition = .immediate + switch gestureRecognizer.state { + case .began, .changed: + self.isPanningTrimHandle = true + self.trimUpdated(startValue, endValue, false, false) + if case .began = gestureRecognizer.state { + transition = .easeInOut(duration: 0.25) + } + case .ended, .cancelled: + self.isPanningTrimHandle = false + self.trimUpdated(startValue, endValue, false, true) + transition = .easeInOut(duration: 0.25) + default: + break + } + + gestureRecognizer.setTranslation(.zero, in: self) + self.updated(transition) + } + + @objc private func handleLeftHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) { + guard let params = self.params else { + return + } + let location = gestureRecognizer.location(in: self) + let start = handleWidth / 2.0 + let end = self.frame.width - handleWidth / 2.0 + let length = end - start + let fraction = (location.x - start) / length + + var startValue = max(0.0, params.duration * fraction) + if startValue > params.endPosition - params.minDuration { + startValue = max(0.0, params.endPosition - params.minDuration) + } + var endValue = params.endPosition + if endValue - startValue > params.maxDuration { + let delta = (endValue - startValue) - params.maxDuration + endValue -= delta + } + + var transition: Transition = .immediate + switch gestureRecognizer.state { + case .began, .changed: + self.isPanningTrimHandle = true + self.trimUpdated(startValue, endValue, false, false) + if case .began = gestureRecognizer.state { + transition = .easeInOut(duration: 0.25) + } + case .ended, .cancelled: + self.isPanningTrimHandle = false + self.trimUpdated(startValue, endValue, false, true) + transition = .easeInOut(duration: 0.25) + default: + break + } + self.updated(transition) + } + + @objc private func handleRightHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) { + guard let params = self.params else { + return + } + let location = gestureRecognizer.location(in: self) + let start = handleWidth / 2.0 + let end = self.frame.width - handleWidth / 2.0 + let length = end - start + let fraction = (location.x - start) / length + + var endValue = min(params.duration, params.duration * fraction) + if endValue < params.startPosition + params.minDuration { + endValue = min(params.duration, params.startPosition + params.minDuration) + } + var startValue = params.startPosition + if endValue - startValue > params.maxDuration { + let delta = (endValue - startValue) - params.maxDuration + startValue += delta + } + + var transition: Transition = .immediate + switch gestureRecognizer.state { + case .began, .changed: + self.isPanningTrimHandle = true + self.trimUpdated(startValue, endValue, true, false) + if case .began = gestureRecognizer.state { + transition = .easeInOut(duration: 0.25) + } + case .ended, .cancelled: + self.isPanningTrimHandle = false + self.trimUpdated(startValue, endValue, true, true) + transition = .easeInOut(duration: 0.25) + default: + break + } + self.updated(transition) + } + + var params: ( + duration: Double, + startPosition: Double, + endPosition: Double, + position: Double, + minDuration: Double, + maxDuration: Double + )? + + func update( + totalWidth: CGFloat, + visualInsets: UIEdgeInsets, + scrubberSize: CGSize, + duration: Double, + startPosition: Double, + endPosition: Double, + position: Double, + minDuration: Double, + maxDuration: Double, + transition: Transition + ) -> (leftHandleFrame: CGRect, rightHandleFrame: CGRect) + { + self.params = (duration, startPosition, endPosition, position, minDuration, maxDuration) + + let trimColor = self.isPanningTrimHandle ? UIColor(rgb: 0xf8d74a) : .white + transition.setTintColor(view: self.leftHandleView, color: trimColor) + transition.setTintColor(view: self.rightHandleView, color: trimColor) + transition.setTintColor(view: self.borderView, color: trimColor) + + let totalRange = totalWidth - handleWidth + let leftHandlePositionFraction = duration > 0.0 ? startPosition / duration : 0.0 + let leftHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalRange * leftHandlePositionFraction) + + var leftHandleFrame = CGRect(origin: CGPoint(x: leftHandlePosition - handleWidth / 2.0, y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height)) + leftHandleFrame.origin.x = max(leftHandleFrame.origin.x, visualInsets.left) + transition.setFrame(view: self.leftHandleView, frame: leftHandleFrame) + + let rightHandlePositionFraction = duration > 0.0 ? endPosition / duration : 1.0 + let rightHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalRange * rightHandlePositionFraction) + + var rightHandleFrame = CGRect(origin: CGPoint(x: max(leftHandleFrame.maxX, rightHandlePosition - handleWidth / 2.0), y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height)) + rightHandleFrame.origin.x = min(rightHandleFrame.origin.x, totalWidth - visualInsets.right - handleWidth) + transition.setFrame(view: self.rightHandleView, frame: rightHandleFrame) + + let capsuleSize = CGSize(width: 2.0, height: 11.0) + transition.setFrame(view: self.leftCapsuleView, frame: CGRect(origin: CGPoint(x: 5.0 - UIScreenPixel, y: floorToScreenPixels((leftHandleFrame.height - capsuleSize.height) / 2.0)), size: capsuleSize)) + transition.setFrame(view: self.rightCapsuleView, frame: CGRect(origin: CGPoint(x: 5.0 - UIScreenPixel, y: floorToScreenPixels((leftHandleFrame.height - capsuleSize.height) / 2.0)), size: capsuleSize)) + + let zoneFrame = CGRect(x: leftHandleFrame.maxX, y: 0.0, width: rightHandleFrame.minX - leftHandleFrame.maxX, height: scrubberSize.height) + transition.setFrame(view: self.zoneView, frame: zoneFrame) + + let borderFrame = CGRect(origin: CGPoint(x: leftHandleFrame.maxX, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX, height: scrubberSize.height)) + transition.setFrame(view: self.borderView, frame: borderFrame) + + return (leftHandleFrame, rightHandleFrame) + } + + override func point(inside point: CGPoint, with event: UIEvent?) -> Bool { + let leftHandleFrame = self.leftHandleView.frame.insetBy(dx: -8.0, dy: -9.0) + let rightHandleFrame = self.rightHandleView.frame.insetBy(dx: -8.0, dy: -9.0) + let areaFrame = CGRect(x: leftHandleFrame.minX, y: leftHandleFrame.minY, width: rightHandleFrame.maxX - leftHandleFrame.minX, height: rightHandleFrame.maxY - rightHandleFrame.minY) + + if self.isHollow { + return leftHandleFrame.contains(point) || rightHandleFrame.contains(point) + } else { + return areaFrame.contains(point) + } + } +} + + +private class VideoFrameLayer: SimpleShapeLayer { + private let stripeLayer = SimpleShapeLayer() + + override func layoutSublayers() { + super.layoutSublayers() + + if self.stripeLayer.superlayer == nil { + self.stripeLayer.backgroundColor = UIColor(rgb: 0x000000, alpha: 0.3).cgColor + self.addSublayer(self.stripeLayer) + } + self.stripeLayer.frame = CGRect(x: self.bounds.width - UIScreenPixel, y: 0.0, width: UIScreenPixel, height: self.bounds.height) + } +} + +private final class HandleView: UIImageView { + var hitTestSlop = UIEdgeInsets() + + override func point(inside point: CGPoint, with event: UIEvent?) -> Bool { + return self.bounds.inset(by: self.hitTestSlop).contains(point) + } +} + + +private let audioContentMaskImage = generateImage(CGSize(width: 100.0, height: 50.0), rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + + var locations: [CGFloat] = [0.0, 0.75, 0.95, 1.0] + let colors: [CGColor] = [UIColor.white.cgColor, UIColor.white.cgColor, UIColor.white.withAlphaComponent(0.0).cgColor, UIColor.white.withAlphaComponent(0.0).cgColor] + + let colorSpace = CGColorSpaceCreateDeviceRGB() + let gradient = CGGradient(colorsSpace: colorSpace, colors: colors as CFArray, locations: &locations)! + + context.drawLinearGradient(gradient, start: CGPoint(x: 0.0, y: 0.0), end: CGPoint(x: size.width, y: 0.0), options: CGGradientDrawingOptions()) +})?.stretchableImage(withLeftCapWidth: 40, topCapHeight: 0) + + +private extension MediaScrubberComponent.Track { + var isAudio: Bool { + if case .audio = self.content { + return true + } else { + return false + } + } +} diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaToolsScreen.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaToolsScreen.swift index c1345d6bec..e5013f5429 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaToolsScreen.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaToolsScreen.swift @@ -946,8 +946,6 @@ private final class MediaToolsScreenComponent: Component { } } -private let storyDimensions = CGSize(width: 1080.0, height: 1920.0) - public final class MediaToolsScreen: ViewController { fileprivate final class Node: ViewControllerTracingNode, UIGestureRecognizerDelegate { private weak var controller: MediaToolsScreen? diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/StoryPreviewComponent.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/StoryPreviewComponent.swift index 266eeee692..176a2af516 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/StoryPreviewComponent.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/StoryPreviewComponent.swift @@ -281,6 +281,7 @@ final class StoryPreviewComponent: Component { audioRecorder: nil, videoRecordingStatus: nil, isRecordingLocked: false, + hasRecordedVideo: false, recordedAudioPreview: nil, hasRecordedVideoPreview: false, wasRecordingDismissed: false, @@ -293,6 +294,7 @@ final class StoryPreviewComponent: Component { customInputView: nil, forceIsEditing: false, disabledPlaceholder: nil, + header: nil, isChannel: false, storyItem: nil, chatLocation: nil diff --git a/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift b/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift index e368bdfdc3..e6f088122b 100644 --- a/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift +++ b/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift @@ -23,6 +23,10 @@ private extension MessageInputActionButtonComponent.Mode { return "Chat/Input/Text/IconForwardSend" case .like: return "Stories/InputLikeOff" + case .removeVideoInput: + return "Media Editor/RemoveRecordedVideo" + case .repost: + return "Stories/InputRepost" default: return nil } @@ -36,12 +40,14 @@ public final class MessageInputActionButtonComponent: Component { case apply case voiceInput case videoInput + case removeVideoInput case unavailableVoiceInput case delete case attach case forward case more case like(reaction: MessageReaction.Reaction?, file: TelegramMediaFile?, animationFileId: Int64?) + case repost } public enum Action { @@ -333,7 +339,7 @@ public final class MessageInputActionButtonComponent: Component { switch component.mode { case .none: break - case .send, .apply, .attach, .delete, .forward: + case .send, .apply, .attach, .delete, .forward, .removeVideoInput, .repost: sendAlpha = 1.0 case let .like(reaction, _, _): if reaction != nil { @@ -548,9 +554,9 @@ public final class MessageInputActionButtonComponent: Component { if previousComponent?.mode != component.mode { switch component.mode { - case .none, .send, .apply, .voiceInput, .attach, .delete, .forward, .unavailableVoiceInput, .more, .like: + case .none, .send, .apply, .voiceInput, .attach, .delete, .forward, .unavailableVoiceInput, .more, .like, .repost: micButton.updateMode(mode: .audio, animated: !transition.animation.isImmediate) - case .videoInput: + case .videoInput, .removeVideoInput: micButton.updateMode(mode: .video, animated: !transition.animation.isImmediate) } } diff --git a/submodules/TelegramUI/Components/MessageInputPanelComponent/BUILD b/submodules/TelegramUI/Components/MessageInputPanelComponent/BUILD index 225d0f982e..dec402da52 100644 --- a/submodules/TelegramUI/Components/MessageInputPanelComponent/BUILD +++ b/submodules/TelegramUI/Components/MessageInputPanelComponent/BUILD @@ -39,6 +39,7 @@ swift_library( "//submodules/SearchPeerMembers", "//submodules/ContextUI", "//submodules/TelegramUI/Components/ContextReferenceButtonComponent", + "//submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift b/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift index 0edc572a29..3cf84f4c46 100644 --- a/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift +++ b/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift @@ -19,6 +19,7 @@ import AnimatedTextComponent import AnimatedCountLabelNode import MessageInputActionButtonComponent import ContextReferenceButtonComponent +import ForwardInfoPanelComponent private var sharedIsReduceTransparencyEnabled = UIAccessibility.isReduceTransparencyEnabled @@ -135,6 +136,7 @@ public final class MessageInputPanelComponent: Component { public let audioRecorder: ManagedAudioRecorder? public let videoRecordingStatus: InstantVideoControllerRecordingStatus? public let isRecordingLocked: Bool + public let hasRecordedVideo: Bool public let recordedAudioPreview: ChatRecordedMediaPreview? public let hasRecordedVideoPreview: Bool public let wasRecordingDismissed: Bool @@ -147,6 +149,7 @@ public final class MessageInputPanelComponent: Component { public let customInputView: UIView? public let forceIsEditing: Bool public let disabledPlaceholder: String? + public let header: AnyComponent? public let isChannel: Bool public let storyItem: EngineStoryItem? public let chatLocation: ChatLocation? @@ -188,6 +191,7 @@ public final class MessageInputPanelComponent: Component { audioRecorder: ManagedAudioRecorder?, videoRecordingStatus: InstantVideoControllerRecordingStatus?, isRecordingLocked: Bool, + hasRecordedVideo: Bool, recordedAudioPreview: ChatRecordedMediaPreview?, hasRecordedVideoPreview: Bool, wasRecordingDismissed: Bool, @@ -200,6 +204,7 @@ public final class MessageInputPanelComponent: Component { customInputView: UIView?, forceIsEditing: Bool, disabledPlaceholder: String?, + header: AnyComponent?, isChannel: Bool, storyItem: EngineStoryItem?, chatLocation: ChatLocation? @@ -240,6 +245,7 @@ public final class MessageInputPanelComponent: Component { self.audioRecorder = audioRecorder self.videoRecordingStatus = videoRecordingStatus self.isRecordingLocked = isRecordingLocked + self.hasRecordedVideo = hasRecordedVideo self.wasRecordingDismissed = wasRecordingDismissed self.recordedAudioPreview = recordedAudioPreview self.hasRecordedVideoPreview = hasRecordedVideoPreview @@ -252,6 +258,7 @@ public final class MessageInputPanelComponent: Component { self.customInputView = customInputView self.forceIsEditing = forceIsEditing self.disabledPlaceholder = disabledPlaceholder + self.header = header self.isChannel = isChannel self.storyItem = storyItem self.chatLocation = chatLocation @@ -300,6 +307,9 @@ public final class MessageInputPanelComponent: Component { if lhs.isRecordingLocked != rhs.isRecordingLocked { return false } + if lhs.hasRecordedVideo != rhs.hasRecordedVideo { + return false + } if lhs.wasRecordingDismissed != rhs.wasRecordingDismissed { return false } @@ -342,6 +352,9 @@ public final class MessageInputPanelComponent: Component { if lhs.disabledPlaceholder != rhs.disabledPlaceholder { return false } + if lhs.header != rhs.header { + return false + } if (lhs.attachmentAction == nil) != (rhs.attachmentAction == nil) { return false } @@ -380,6 +393,7 @@ public final class MessageInputPanelComponent: Component { private let vibrancyPlaceholder = ComponentView() private let counter = ComponentView() + private var header: ComponentView? private var disabledPlaceholder: ComponentView? private var textClippingView = UIView() @@ -436,12 +450,7 @@ public final class MessageInputPanelComponent: Component { override init(frame: CGRect) { self.fieldBackgroundView = BlurredBackgroundView(color: UIColor(white: 0.0, alpha: 0.5), enableBlur: true) - let style: UIBlurEffect.Style = .dark - let blurEffect = UIBlurEffect(style: style) - let vibrancyEffect = UIVibrancyEffect(blurEffect: blurEffect) - let vibrancyEffectView = UIVisualEffectView(effect: vibrancyEffect) - vibrancyEffectView.alpha = 0.0 - self.vibrancyEffectView = vibrancyEffectView + self.vibrancyEffectView = UIVisualEffectView(effect: UIVibrancyEffect(blurEffect: UIBlurEffect(style: .dark))) self.mediaRecordingVibrancyContainer = UIView() self.vibrancyEffectView.contentView.addSubview(self.mediaRecordingVibrancyContainer) @@ -786,11 +795,67 @@ public final class MessageInputPanelComponent: Component { insets.right = insets.left } - let fieldFrame = CGRect(origin: CGPoint(x: insets.left, y: insets.top), size: CGSize(width: availableSize.width - insets.left - insets.right, height: textFieldSize.height)) + var headerHeight: CGFloat = 0.0 + if let headerComponent = component.header, !hasMediaRecording && !hasMediaEditing { + let headerInset: CGFloat = 10.0 + let header: ComponentView + var headerTransition = transition + if let current = self.header { + header = current + } else { + headerTransition = .immediate + header = ComponentView() + self.header = header + } + let headerSize = header.update( + transition: .immediate, + component: headerComponent, + environment: {}, + containerSize: CGSize(width: availableSize.width - insets.left - insets.right - headerInset * 2.0, height: 100.0) + ) + let headerFrame = CGRect(origin: CGPoint(x: insets.left + headerInset, y: insets.top + headerInset), size: headerSize) + if let headerView = header.view { + if let headerView = headerView as? ForwardInfoPanelComponent.View { + if headerView.superview == nil { + self.addSubview(headerView) + self.vibrancyEffectView.contentView.addSubview(headerView.backgroundView) + + headerView.backgroundView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4) + } + headerView.backgroundView.frame = headerFrame.offsetBy(dx: -9.0, dy: -14.0) + } else { + if headerView.superview == nil { + self.addSubview(headerView) + } + } + headerTransition.setPosition(view: headerView, position: headerFrame.center) + headerView.bounds = CGRect(origin: CGPoint(), size: headerFrame.size) + } + headerHeight = headerFrame.height + headerInset + } else { + if let header = self.header { + self.header = nil + if let headerView = header.view as? ForwardInfoPanelComponent.View { + headerView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + headerView.removeFromSuperview() + }) + headerView.backgroundView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + headerView.backgroundView.removeFromSuperview() + }) + } else { + header.view?.removeFromSuperview() + } + } + } + + var fieldFrame = CGRect(origin: CGPoint(x: insets.left, y: insets.top), size: CGSize(width: availableSize.width - insets.left - insets.right, height: textFieldSize.height)) + if hasMediaRecording || hasMediaEditing { + fieldFrame.size.height = baseFieldHeight + } var fieldBackgroundFrame: CGRect if hasMediaRecording { - fieldBackgroundFrame = CGRect(origin: CGPoint(x: mediaInsets.left, y: insets.top), size: CGSize(width: availableSize.width - mediaInsets.left - mediaInsets.right, height: textFieldSize.height)) + fieldBackgroundFrame = CGRect(origin: CGPoint(x: mediaInsets.left, y: insets.top), size: CGSize(width: availableSize.width - mediaInsets.left - mediaInsets.right, height: fieldFrame.height)) } else if isEditing || component.style == .editor || component.style == .media { fieldBackgroundFrame = fieldFrame } else { @@ -802,17 +867,17 @@ public final class MessageInputPanelComponent: Component { fieldBackgroundFrame = CGRect(origin: CGPoint(x: mediaInsets.left, y: insets.top), size: CGSize(width: availableSize.width - mediaInsets.left - 50.0, height: textFieldSize.height)) } } + + let rawFieldBackgroundFrame = fieldBackgroundFrame + fieldBackgroundFrame.size.height += headerHeight transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size)) self.vibrancyEffectView.isHidden = component.style == .media - if isEditing { - self.vibrancyEffectView.alpha = 1.0 - } transition.setFrame(view: self.fieldBackgroundView, frame: fieldBackgroundFrame) - self.fieldBackgroundView.update(size: fieldBackgroundFrame.size, cornerRadius: baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition) + self.fieldBackgroundView.update(size: fieldBackgroundFrame.size, cornerRadius: headerHeight > 0.0 ? 18.0 : baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition) - var textClippingFrame = fieldBackgroundFrame + var textClippingFrame = rawFieldBackgroundFrame.offsetBy(dx: 0.0, dy: headerHeight) if component.style == .media, !isEditing { textClippingFrame.size.height -= 10.0 } @@ -828,13 +893,15 @@ public final class MessageInputPanelComponent: Component { if isEditing || component.style == .story { placeholderOriginX = 16.0 } else { - placeholderOriginX = floorToScreenPixels((availableSize.width - placeholderSize.width) / 2.0) + placeholderOriginX = floorToScreenPixels(fieldBackgroundFrame.minX + (fieldBackgroundFrame.width - placeholderSize.width) / 2.0) } - let placeholderFrame = CGRect(origin: CGPoint(x: placeholderOriginX, y: floor((fieldBackgroundFrame.height - placeholderSize.height) * 0.5)), size: placeholderSize) + let placeholderFrame = CGRect(origin: CGPoint(x: placeholderOriginX, y: headerHeight + floor((rawFieldBackgroundFrame.height - placeholderSize.height) * 0.5)), size: placeholderSize) if let placeholderView = self.placeholder.view, let vibrancyPlaceholderView = self.vibrancyPlaceholder.view { if vibrancyPlaceholderView.superview == nil { vibrancyPlaceholderView.layer.anchorPoint = CGPoint() self.vibrancyEffectView.contentView.addSubview(vibrancyPlaceholderView) + + vibrancyPlaceholderView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4) } transition.setPosition(view: vibrancyPlaceholderView, position: placeholderFrame.origin) vibrancyPlaceholderView.bounds = CGRect(origin: CGPoint(), size: placeholderFrame.size) @@ -854,7 +921,10 @@ public final class MessageInputPanelComponent: Component { let fieldAlpha = sharedIsReduceTransparencyEnabled ? 0.09 : 1.0 transition.setAlpha(view: self.fieldBackgroundView, alpha: (component.disabledPlaceholder != nil || component.isChannel) ? 0.0 : fieldAlpha) - let size = CGSize(width: availableSize.width, height: textFieldSize.height + insets.top + insets.bottom) + var size = CGSize(width: availableSize.width, height: textFieldSize.height + insets.top + insets.bottom + headerHeight) + if hasMediaRecording || hasMediaEditing { + size.height = baseFieldHeight + insets.top + insets.bottom + } var rightButtonsOffsetX: CGFloat = 0.0 if component.isChannel, let storyItem = component.storyItem { @@ -1157,7 +1227,11 @@ public final class MessageInputPanelComponent: Component { let inputActionButtonMode: MessageInputActionButtonComponent.Mode if case .editor = component.style { - inputActionButtonMode = isEditing ? .apply : .none + if isEditing { + inputActionButtonMode = .apply + } else { + inputActionButtonMode = component.hasRecordedVideo ? .removeVideoInput : .videoInput + } } else if case .media = component.style { inputActionButtonMode = isEditing ? .apply : .none } else { @@ -1217,6 +1291,10 @@ public final class MessageInputPanelComponent: Component { } case .voiceInput, .videoInput: component.setMediaRecordingActive?(action == .down, mode == .videoInput, sendAction) + case .removeVideoInput: + if case .up = action { + component.setMediaRecordingActive?(true, true, false) + } case .forward: if case .up = action { component.forwardAction?() @@ -1463,7 +1541,7 @@ public final class MessageInputPanelComponent: Component { if stickerButtonView.superview == nil { self.addSubview(stickerButtonView) } - let stickerIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - stickerButtonSize.width, y: fieldFrame.maxY - 4.0 - stickerButtonSize.height), size: stickerButtonSize) + let stickerIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - stickerButtonSize.width, y: fieldBackgroundFrame.maxY - 4.0 - stickerButtonSize.height), size: stickerButtonSize) transition.setPosition(view: stickerButtonView, position: stickerIconFrame.center) transition.setBounds(view: stickerButtonView, bounds: CGRect(origin: CGPoint(), size: stickerIconFrame.size)) @@ -1508,12 +1586,13 @@ public final class MessageInputPanelComponent: Component { self.addSubview(timeoutButtonView) } let originX = fieldBackgroundFrame.maxX - 4.0 - let timeoutIconFrame = CGRect(origin: CGPoint(x: originX - timeoutButtonSize.width, y: fieldFrame.maxY - 4.0 - timeoutButtonSize.height), size: timeoutButtonSize) + let timeoutIconFrame = CGRect(origin: CGPoint(x: originX - timeoutButtonSize.width, y: fieldBackgroundFrame.maxY - 4.0 - timeoutButtonSize.height), size: timeoutButtonSize) transition.setPosition(view: timeoutButtonView, position: timeoutIconFrame.center) transition.setBounds(view: timeoutButtonView, bounds: CGRect(origin: CGPoint(), size: timeoutIconFrame.size)) - transition.setAlpha(view: timeoutButtonView, alpha: isEditing ? 0.0 : 1.0) - transition.setScale(view: timeoutButtonView, scale: isEditing ? 0.1 : 1.0) + let visible = !hasMediaRecording && !hasMediaEditing && !isEditing + transition.setAlpha(view: timeoutButtonView, alpha: visible ? 1.0 : 0.0) + transition.setScale(view: timeoutButtonView, scale: visible ? 1.0 : 0.1) } } diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoGroupsInCommonPaneNode.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoGroupsInCommonPaneNode.swift index 7033542b75..673fd6f5c0 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoGroupsInCommonPaneNode.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoGroupsInCommonPaneNode.swift @@ -62,7 +62,7 @@ final class PeerInfoGroupsInCommonPaneNode: ASDisplayNode, PeerInfoPaneNode { private let context: AccountContext private let peerId: PeerId private let chatControllerInteraction: ChatControllerInteraction - private let openPeerContextAction: (Peer, ASDisplayNode, ContextGesture?) -> Void + private let openPeerContextAction: (Bool, Peer, ASDisplayNode, ContextGesture?) -> Void private let groupsInCommonContext: GroupsInCommonContext weak var parentController: ViewController? @@ -99,7 +99,7 @@ final class PeerInfoGroupsInCommonPaneNode: ASDisplayNode, PeerInfoPaneNode { private var disposable: Disposable? - init(context: AccountContext, peerId: PeerId, chatControllerInteraction: ChatControllerInteraction, openPeerContextAction: @escaping (Peer, ASDisplayNode, ContextGesture?) -> Void, groupsInCommonContext: GroupsInCommonContext) { + init(context: AccountContext, peerId: PeerId, chatControllerInteraction: ChatControllerInteraction, openPeerContextAction: @escaping (Bool, Peer, ASDisplayNode, ContextGesture?) -> Void, groupsInCommonContext: GroupsInCommonContext) { self.context = context self.peerId = peerId self.chatControllerInteraction = chatControllerInteraction @@ -190,7 +190,7 @@ final class PeerInfoGroupsInCommonPaneNode: ASDisplayNode, PeerInfoPaneNode { let transaction = preparedTransition(from: self.currentEntries, to: entries, context: self.context, presentationData: presentationData, openPeer: { [weak self] peer in self?.chatControllerInteraction.openPeer(EnginePeer(peer), .default, nil, .default) }, openPeerContextAction: { [weak self] peer, node, gesture in - self?.openPeerContextAction(peer, node, gesture) + self?.openPeerContextAction(false, peer, node, gesture) }) self.currentEntries = entries self.enqueuedTransactions.append(transaction) diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoRecommendedChannelsPane.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoRecommendedChannelsPane.swift index fa55df3663..5f24abe885 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoRecommendedChannelsPane.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/Panes/PeerInfoRecommendedChannelsPane.swift @@ -58,7 +58,7 @@ private enum RecommendedChannelsListEntry: Comparable, Identifiable { } } - func item(context: AccountContext, presentationData: PresentationData, action: @escaping (EnginePeer) -> Void) -> ListViewItem { + func item(context: AccountContext, presentationData: PresentationData, action: @escaping (EnginePeer) -> Void, openPeerContextAction: @escaping (Peer, ASDisplayNode, ContextGesture?) -> Void) -> ListViewItem { switch self { case let .peer(_, _, peer, subscribers): let subtitle = presentationData.strings.Conversation_StatusSubscribers(subscribers) @@ -66,17 +66,19 @@ private enum RecommendedChannelsListEntry: Comparable, Identifiable { action(peer) }, setPeerIdWithRevealedOptions: { _, _ in }, removePeer: { _ in - }, contextAction: nil, hasTopStripe: false, noInsets: true, noCorners: true, disableInteractiveTransitionIfNecessary: true) + }, contextAction: { node, gesture in + openPeerContextAction(peer._asPeer(), node, gesture) + }, hasTopStripe: false, noInsets: true, noCorners: true, disableInteractiveTransitionIfNecessary: true) } } } -private func preparedTransition(from fromEntries: [RecommendedChannelsListEntry], to toEntries: [RecommendedChannelsListEntry], context: AccountContext, presentationData: PresentationData, action: @escaping (EnginePeer) -> Void) -> RecommendedChannelsListTransaction { +private func preparedTransition(from fromEntries: [RecommendedChannelsListEntry], to toEntries: [RecommendedChannelsListEntry], context: AccountContext, presentationData: PresentationData, action: @escaping (EnginePeer) -> Void, openPeerContextAction: @escaping (Peer, ASDisplayNode, ContextGesture?) -> Void) -> RecommendedChannelsListTransaction { let (deleteIndices, indicesAndItems, updateIndices) = mergeListsStableWithUpdates(leftList: fromEntries, rightList: toEntries) let deletions = deleteIndices.map { ListViewDeleteItem(index: $0, directionHint: nil) } - let insertions = indicesAndItems.map { ListViewInsertItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, action: action), directionHint: nil) } - let updates = updateIndices.map { ListViewUpdateItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, action: action), directionHint: nil) } + let insertions = indicesAndItems.map { ListViewInsertItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, action: action, openPeerContextAction: openPeerContextAction), directionHint: nil) } + let updates = updateIndices.map { ListViewUpdateItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, action: action, openPeerContextAction: openPeerContextAction), directionHint: nil) } return RecommendedChannelsListTransaction(deletions: deletions, insertions: insertions, updates: updates, animated: toEntries.count < fromEntries.count) } @@ -84,6 +86,7 @@ private func preparedTransition(from fromEntries: [RecommendedChannelsListEntry] final class PeerInfoRecommendedChannelsPaneNode: ASDisplayNode, PeerInfoPaneNode { private let context: AccountContext private let chatControllerInteraction: ChatControllerInteraction + private let openPeerContextAction: (Bool, Peer, ASDisplayNode, ContextGesture?) -> Void weak var parentController: ViewController? @@ -113,9 +116,10 @@ final class PeerInfoRecommendedChannelsPaneNode: ASDisplayNode, PeerInfoPaneNode private var disposable: Disposable? - init(context: AccountContext, peerId: PeerId, chatControllerInteraction: ChatControllerInteraction) { + init(context: AccountContext, peerId: PeerId, chatControllerInteraction: ChatControllerInteraction, openPeerContextAction: @escaping (Bool, Peer, ASDisplayNode, ContextGesture?) -> Void) { self.context = context self.chatControllerInteraction = chatControllerInteraction + self.openPeerContextAction = openPeerContextAction let presentationData = context.sharedContext.currentPresentationData.with { $0 } self.listNode = ListView() @@ -194,6 +198,8 @@ final class PeerInfoRecommendedChannelsPaneNode: ASDisplayNode, PeerInfoPaneNode let transaction = preparedTransition(from: self.currentEntries, to: entries, context: self.context, presentationData: presentationData, action: { [weak self] peer in self?.chatControllerInteraction.openPeer(peer, .default, nil, .default) + }, openPeerContextAction: { [weak self] peer, node, gesture in + self?.openPeerContextAction(true, peer, node, gesture) }) self.currentEntries = entries self.enqueuedTransactions.append(transaction) diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoPaneContainerNode.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoPaneContainerNode.swift index e27007b30a..e4a9976c56 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoPaneContainerNode.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoPaneContainerNode.swift @@ -352,7 +352,7 @@ private final class PeerInfoPendingPane { updatedPresentationData: (initial: PresentationData, signal: Signal)?, chatControllerInteraction: ChatControllerInteraction, data: PeerInfoScreenData, - openPeerContextAction: @escaping (Peer, ASDisplayNode, ContextGesture?) -> Void, + openPeerContextAction: @escaping (Bool, Peer, ASDisplayNode, ContextGesture?) -> Void, openAddMemberAction: @escaping () -> Void, requestPerformPeerMemberAction: @escaping (PeerInfoMember, PeerMembersListAction) -> Void, peerId: PeerId, @@ -417,7 +417,7 @@ private final class PeerInfoPendingPane { preconditionFailure() } case .recommended: - paneNode = PeerInfoRecommendedChannelsPaneNode(context: context, peerId: peerId, chatControllerInteraction: chatControllerInteraction) + paneNode = PeerInfoRecommendedChannelsPaneNode(context: context, peerId: peerId, chatControllerInteraction: chatControllerInteraction, openPeerContextAction: openPeerContextAction) } paneNode.parentController = parentController self.pane = PeerInfoPaneWrapper(key: key, node: paneNode) @@ -483,7 +483,7 @@ final class PeerInfoPaneContainerNode: ASDisplayNode, UIGestureRecognizerDelegat var selectionPanelNode: PeerInfoSelectionPanelNode? var chatControllerInteraction: ChatControllerInteraction? - var openPeerContextAction: ((Peer, ASDisplayNode, ContextGesture?) -> Void)? + var openPeerContextAction: ((Bool, Peer, ASDisplayNode, ContextGesture?) -> Void)? var openAddMemberAction: (() -> Void)? var requestPerformPeerMemberAction: ((PeerInfoMember, PeerMembersListAction) -> Void)? @@ -792,8 +792,8 @@ final class PeerInfoPaneContainerNode: ASDisplayNode, UIGestureRecognizerDelegat updatedPresentationData: self.updatedPresentationData, chatControllerInteraction: self.chatControllerInteraction!, data: data!, - openPeerContextAction: { [weak self] peer, node, gesture in - self?.openPeerContextAction?(peer, node, gesture) + openPeerContextAction: { [weak self] recommended, peer, node, gesture in + self?.openPeerContextAction?(recommended, peer, node, gesture) }, openAddMemberAction: { [weak self] in self?.openAddMemberAction?() diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift index 15e608a768..3108706737 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift @@ -2187,6 +2187,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro private let addMemberDisposable = MetaDisposable() private let preloadHistoryDisposable = MetaDisposable() private var shareStatusDisposable: MetaDisposable? + private let joinChannelDisposable = MetaDisposable() private let editAvatarDisposable = MetaDisposable() private let updateAvatarDisposable = MetaDisposable() @@ -2956,6 +2957,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro }, openNoAdsDemo: { }, displayGiveawayParticipationStatus: { _ in }, openPremiumStatusInfo: { _, _, _, _ in + }, openRecommendedChannelContextMenu: { _, _, _ in }, requestMessageUpdate: { _, _ in }, cancelInteractiveKeyboardGestures: { }, dismissTextInput: { @@ -2994,19 +2996,37 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro self.scrollNode.view.isScrollEnabled = !self.isMediaOnly self.paneContainerNode.chatControllerInteraction = self.chatInterfaceInteraction - self.paneContainerNode.openPeerContextAction = { [weak self] peer, node, gesture in + self.paneContainerNode.openPeerContextAction = { [weak self] recommended, peer, node, gesture in guard let strongSelf = self, let controller = strongSelf.controller else { return } let presentationData = strongSelf.presentationData let chatController = strongSelf.context.sharedContext.makeChatController(context: context, chatLocation: .peer(id: peer.id), subject: nil, botStart: nil, mode: .standard(previewing: true)) chatController.canReadHistory.set(false) - let items: [ContextMenuItem] = [ - .action(ContextMenuActionItem(text: presentationData.strings.Conversation_LinkDialogOpen, icon: { _ in nil }, action: { _, f in - f(.dismissWithoutContent) - self?.chatInterfaceInteraction.openPeer(EnginePeer(peer), .default, nil, .default) - })) - ] + let items: [ContextMenuItem] + if recommended { + items = [ + .action(ContextMenuActionItem(text: presentationData.strings.Conversation_LinkDialogOpen, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/ImageEnlarge"), color: theme.actionSheet.primaryTextColor) }, action: { [weak self] _, f in + f(.dismissWithoutContent) + self?.chatInterfaceInteraction.openPeer(EnginePeer(peer), .default, nil, .default) + })), + .action(ContextMenuActionItem(text: presentationData.strings.Chat_SimilarChannels_Join, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Add"), color: theme.actionSheet.primaryTextColor) }, action: { [weak self] _, f in + f(.dismissWithoutContent) + + guard let self else { + return + } + self.joinChannel(peer: EnginePeer(peer)) + })) + ] + } else { + items = [ + .action(ContextMenuActionItem(text: presentationData.strings.Conversation_LinkDialogOpen, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/ImageEnlarge"), color: theme.actionSheet.primaryTextColor) }, action: { _, f in + f(.dismissWithoutContent) + self?.chatInterfaceInteraction.openPeer(EnginePeer(peer), .default, nil, .default) + })) + ] + } let contextController = ContextController(presentationData: presentationData, source: .controller(ContextControllerContentSourceImpl(controller: chatController, sourceNode: node)), items: .single(ContextController.Items(content: .list(items))), gesture: gesture) controller.presentInGlobalOverlay(contextController) } @@ -4088,6 +4108,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro self.postingAvailabilityDisposable?.dispose() self.storyUploadProgressDisposable?.dispose() self.updateAvatarDisposable.dispose() + self.joinChannelDisposable.dispose() } override func didLoad() { @@ -10202,6 +10223,43 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro } return false } + + fileprivate func joinChannel(peer: EnginePeer) { + let presentationData = self.presentationData + self.joinChannelDisposable.set(( + self.context.peerChannelMemberCategoriesContextsManager.join(engine: self.context.engine, peerId: peer.id, hash: nil) + |> deliverOnMainQueue + |> afterCompleted { [weak self] in + Queue.mainQueue().async { + if let self { + self.controller?.present(UndoOverlayController(presentationData: presentationData, content: .succeed(text: presentationData.strings.Chat_SimilarChannels_JoinedChannel(peer.compactDisplayTitle).string, timeout: nil, customUndoText: nil), elevatedLayout: false, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root)) + } + } + } + ).startStrict(error: { [weak self] error in + guard let self else { + return + } + let text: String + switch error { + case .inviteRequestSent: + self.controller?.present(UndoOverlayController(presentationData: presentationData, content: .inviteRequestSent(title: presentationData.strings.Group_RequestToJoinSent, text: presentationData.strings.Group_RequestToJoinSentDescriptionGroup), elevatedLayout: true, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root)) + return + case .tooMuchJoined: + self.controller?.push(oldChannelsController(context: context, intent: .join, completed: { [weak self] value in + if value { + self?.joinChannel(peer: peer) + } + })) + return + case .tooMuchUsers: + text = self.presentationData.strings.Conversation_UsersTooMuchError + case .generic: + text = self.presentationData.strings.Channel_ErrorAccessDenied + } + self.controller?.present(textAlertController(context: context, title: nil, text: text, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_OK, action: {})]), in: .window(.root)) + })) + } } public final class PeerInfoScreenImpl: ViewController, PeerInfoScreen, KeyShortcutResponder { diff --git a/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift b/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift index c766b9e28b..1f4b1bb94f 100644 --- a/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift +++ b/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift @@ -862,6 +862,9 @@ private extension MediaEditorValues { additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], + additionalVideoTrimRange: nil, + additionalVideoOffset: nil, + additionalVideoVolume: nil, drawing: nil, entities: [], toolValues: [:], @@ -998,6 +1001,9 @@ private extension MediaEditorValues { additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], + additionalVideoTrimRange: nil, + additionalVideoOffset: nil, + additionalVideoVolume: nil, drawing: drawing, entities: entities, toolValues: toolValues, diff --git a/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/PeerNameColorChatPreviewItem.swift b/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/PeerNameColorChatPreviewItem.swift index 54e1c3e99a..e4aee2f4c3 100644 --- a/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/PeerNameColorChatPreviewItem.swift +++ b/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/PeerNameColorChatPreviewItem.swift @@ -168,7 +168,7 @@ final class PeerNameColorChatPreviewItemNode: ListViewItemNode { return { item, params, neighbors in if currentBackgroundNode == nil { currentBackgroundNode = createWallpaperBackgroundNode(context: item.context, forChatDisplay: false) - currentBackgroundNode?.update(wallpaper: item.wallpaper) + currentBackgroundNode?.update(wallpaper: item.wallpaper, animated: false) currentBackgroundNode?.updateBubbleTheme(bubbleTheme: item.componentTheme, bubbleCorners: item.chatBubbleCorners) } @@ -254,7 +254,7 @@ final class PeerNameColorChatPreviewItemNode: ListViewItemNode { strongSelf.item = item if let currentBackgroundNode { - currentBackgroundNode.update(wallpaper: item.wallpaper) + currentBackgroundNode.update(wallpaper: item.wallpaper, animated: false) currentBackgroundNode.updateBubbleTheme(bubbleTheme: item.theme, bubbleCorners: item.chatBubbleCorners) } diff --git a/submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent/BUILD b/submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent/BUILD new file mode 100644 index 0000000000..f17f609005 --- /dev/null +++ b/submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent/BUILD @@ -0,0 +1,22 @@ +load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library") + +swift_library( + name = "ForwardInfoPanelComponent", + module_name = "ForwardInfoPanelComponent", + srcs = glob([ + "Sources/**/*.swift", + ]), + copts = [ + "-warnings-as-errors", + ], + deps = [ + "//submodules/Display", + "//submodules/ComponentFlow", + "//submodules/TelegramPresentationData", + "//submodules/Components/MultilineTextComponent", + "//submodules/TelegramUI/Components/Chat/MessageInlineBlockBackgroundView", + ], + visibility = [ + "//visibility:public", + ], +) diff --git a/submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent/Sources/ForwardInfoPanelComponent.swift b/submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent/Sources/ForwardInfoPanelComponent.swift new file mode 100644 index 0000000000..c4fadcd075 --- /dev/null +++ b/submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent/Sources/ForwardInfoPanelComponent.swift @@ -0,0 +1,174 @@ +import Foundation +import UIKit +import Display +import ComponentFlow +import MultilineTextComponent +import MessageInlineBlockBackgroundView + +public final class ForwardInfoPanelComponent: Component { + public let authorName: String + public let text: String + public let isChannel: Bool + public let isVibrant: Bool + + public init( + authorName: String, + text: String, + isChannel: Bool, + isVibrant: Bool + ) { + self.authorName = authorName + self.text = text + self.isChannel = isChannel + self.isVibrant = isVibrant + } + + public static func ==(lhs: ForwardInfoPanelComponent, rhs: ForwardInfoPanelComponent) -> Bool { + if lhs.authorName != rhs.authorName { + return false + } + if lhs.text != rhs.text { + return false + } + if lhs.isChannel != rhs.isChannel { + return false + } + if lhs.isVibrant != rhs.isVibrant { + return false + } + return true + } + + public final class View: UIView { + public let backgroundView: UIImageView +// private let blurBackgroundView: BlurredBackgroundView + private let blurBackgroundView: UIVisualEffectView + private let blockView: MessageInlineBlockBackgroundView + private var iconView: UIImageView? + private var title = ComponentView() + private var text = ComponentView() + + private var component: ForwardInfoPanelComponent? + private weak var state: EmptyComponentState? + + override init(frame: CGRect) { +// self.blurBackgroundView = BlurredBackgroundView(color: UIColor(rgb: 0x000000, alpha: 0.4)) + + if #available(iOS 13.0, *) { + self.blurBackgroundView = UIVisualEffectView(effect: UIBlurEffect(style: .systemUltraThinMaterialDark)) + } else { + self.blurBackgroundView = UIVisualEffectView(effect: UIBlurEffect(style: .dark)) + } + self.blurBackgroundView.clipsToBounds = true + self.blurBackgroundView.layer.cornerRadius = 4.0 + + self.backgroundView = UIImageView() + self.backgroundView.image = generateStretchableFilledCircleImage(radius: 4.0, color: UIColor(white: 1.0, alpha: 0.4)) + + self.blockView = MessageInlineBlockBackgroundView() + + super.init(frame: frame) + + self.addSubview(self.blockView) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func update(component: ForwardInfoPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: Transition) -> CGSize { + self.component = component + self.state = state + + let size = CGSize(width: availableSize.width, height: 40.0) + + let lineColor: UIColor + if !component.isVibrant { +// self.blurBackgroundView.update(size: size, cornerRadius: 4.0, transition: .immediate) + self.blurBackgroundView.frame = CGRect(origin: .zero, size: size) + self.insertSubview(self.blurBackgroundView, at: 0) + + lineColor = UIColor.white + } else { + lineColor = UIColor(white: 1.0, alpha: 0.5) + } + + self.blockView.update(size: size, isTransparent: true, primaryColor: lineColor, secondaryColor: nil, thirdColor: nil, backgroundColor: nil, pattern: nil, animation: .None) + self.blockView.frame = CGRect(origin: .zero, size: size) + + + + var titleOffset: CGFloat = 0.0 + if component.isChannel { + let iconView: UIImageView + if let current = self.iconView { + iconView = current + } else { + iconView = UIImageView(image: UIImage(bundleImageName: "Chat/Input/Accessory Panels/PanelTextChannelIcon")?.withRenderingMode(.alwaysTemplate)) + iconView.alpha = 0.55 + iconView.tintColor = .white + self.addSubview(iconView) + } + if let image = iconView.image { + iconView.frame = CGRect(origin: CGPoint(x: 8.0 + UIScreenPixel, y: 6.0 - UIScreenPixel), size: image.size) + } + titleOffset += 13.0 + } else if let iconView = self.iconView { + self.iconView = nil + iconView.removeFromSuperview() + } + + let titleSize = self.title.update( + transition: .immediate, + component: AnyComponent(MultilineTextComponent( + text: .plain(NSAttributedString( + string: component.authorName, + font: Font.semibold(14.0), + textColor: .white + )), + maximumNumberOfLines: 1 + )), + environment: {}, + containerSize: CGSize(width: availableSize.width - titleOffset - 20.0, height: availableSize.height) + ) + let titleFrame = CGRect(origin: CGPoint(x: 9.0 + titleOffset, y: 3.0), size: titleSize) + if let view = self.title.view { + if view.superview == nil { + self.addSubview(view) + } + view.frame = titleFrame + } + + let textSize = self.text.update( + transition: .immediate, + component: AnyComponent(MultilineTextComponent( + text: .plain(NSAttributedString( + string: component.text, + font: Font.regular(14.0), + textColor: .white + )), + maximumNumberOfLines: 1 + )), + environment: {}, + containerSize: CGSize(width: availableSize.width - 20.0, height: availableSize.height) + ) + let textFrame = CGRect(origin: CGPoint(x: 9.0, y: 20.0), size: textSize) + if let view = self.text.view { + if view.superview == nil { + self.addSubview(view) + } + view.frame = textFrame + } + + return size + } + } + + public func makeView() -> View { + return View(frame: CGRect()) + } + + public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: Transition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/BUILD b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/BUILD index b7c1985216..6ac68ffe0f 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/BUILD +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/BUILD @@ -94,6 +94,7 @@ swift_library( "//submodules/Components/BalancedTextComponent", "//submodules/AnimatedCountLabelNode", "//submodules/StickerResources", + "//submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent" ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryAuthorInfoComponent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryAuthorInfoComponent.swift index f60d1f6655..aba5fa1306 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryAuthorInfoComponent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryAuthorInfoComponent.swift @@ -7,6 +7,7 @@ import TelegramCore import TelegramStringFormatting import MultilineTextComponent import TelegramPresentationData +import AvatarNode final class StoryAuthorInfoComponent: Component { struct Counters: Equatable { @@ -17,14 +18,16 @@ final class StoryAuthorInfoComponent: Component { let context: AccountContext let strings: PresentationStrings let peer: EnginePeer? + let forwardInfo: EngineStoryItem.ForwardInfo? let timestamp: Int32 let counters: Counters? let isEdited: Bool - init(context: AccountContext, strings: PresentationStrings, peer: EnginePeer?, timestamp: Int32, counters: Counters?, isEdited: Bool) { + init(context: AccountContext, strings: PresentationStrings, peer: EnginePeer?, forwardInfo: EngineStoryItem.ForwardInfo?, timestamp: Int32, counters: Counters?, isEdited: Bool) { self.context = context self.strings = strings self.peer = peer + self.forwardInfo = forwardInfo self.timestamp = timestamp self.counters = counters self.isEdited = isEdited @@ -40,6 +43,9 @@ final class StoryAuthorInfoComponent: Component { if lhs.peer != rhs.peer { return false } + if lhs.forwardInfo != rhs.forwardInfo { + return false + } if lhs.timestamp != rhs.timestamp { return false } @@ -54,6 +60,8 @@ final class StoryAuthorInfoComponent: Component { final class View: UIView { private let title = ComponentView() + private var repostIconView: UIImageView? + private var avatarNode: AvatarNode? private let subtitle = ComponentView() private var counterLabel: ComponentView? @@ -92,11 +100,30 @@ final class StoryAuthorInfoComponent: Component { } let timestamp = Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970) - var subtitle = stringForStoryActivityTimestamp(strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, preciseTime: true, relativeTimestamp: component.timestamp, relativeTo: timestamp) - if component.isEdited { - subtitle.append(" • ") - subtitle.append(component.strings.Story_HeaderEdited) + let titleColor = UIColor.white + let subtitleColor = UIColor(white: 1.0, alpha: 0.8) + let subtitle: NSAttributedString + if let forwardInfo = component.forwardInfo { + let authorName: String + switch forwardInfo { + case let .known(peer, _): + authorName = peer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder) + case let .unknown(name): + authorName = name + } + let timeString = stringForStoryActivityTimestamp(strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, preciseTime: true, relativeTimestamp: component.timestamp, relativeTo: timestamp, short: true) + let combinedString = NSMutableAttributedString() + combinedString.append(NSAttributedString(string: authorName, font: Font.medium(11.0), textColor: titleColor)) + combinedString.append(NSAttributedString(string: " • \(timeString)", font: Font.regular(11.0), textColor: subtitleColor)) + subtitle = combinedString + } else { + var subtitleString = stringForStoryActivityTimestamp(strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, preciseTime: true, relativeTimestamp: component.timestamp, relativeTo: timestamp) + if component.isEdited { + subtitleString.append(" • ") + subtitleString.append(component.strings.Story_HeaderEdited) + } + subtitle = NSAttributedString(string: subtitleString, font: Font.regular(11.0), textColor: subtitleColor) } let titleSize = self.title.update( @@ -112,7 +139,7 @@ final class StoryAuthorInfoComponent: Component { let subtitleSize = self.subtitle.update( transition: .immediate, component: AnyComponent(MultilineTextComponent( - text: .plain(NSAttributedString(string: subtitle, font: Font.regular(11.0), textColor: UIColor(white: 1.0, alpha: 0.8))), + text: .plain(subtitle), truncationType: .end, maximumNumberOfLines: 1 )), @@ -122,7 +149,52 @@ final class StoryAuthorInfoComponent: Component { let contentHeight: CGFloat = titleSize.height + spacing + subtitleSize.height let titleFrame = CGRect(origin: CGPoint(x: leftInset, y: 2.0 + floor((availableSize.height - contentHeight) * 0.5)), size: titleSize) - let subtitleFrame = CGRect(origin: CGPoint(x: leftInset, y: titleFrame.maxY + spacing + UIScreenPixel), size: subtitleSize) + + var subtitleOffset: CGFloat = 0.0 + if let _ = component.forwardInfo { + let iconView: UIImageView + if let current = self.repostIconView { + iconView = current + } else { + iconView = UIImageView(image: UIImage(bundleImageName: "Stories/HeaderRepost")?.withRenderingMode(.alwaysTemplate)) + iconView.tintColor = .white + self.addSubview(iconView) + self.repostIconView = iconView + } + + let iconSize = CGSize(width: 13.0, height: 13.0) + let iconFrame = CGRect(origin: CGPoint(x: leftInset + subtitleOffset - 2.0 + UIScreenPixel, y: titleFrame.minY + contentHeight - iconSize.height + 1.0), size: iconSize) + transition.setFrame(view: iconView, frame: iconFrame) + + subtitleOffset += iconSize.width + 1.0 + } else if let repostIconView = self.repostIconView { + self.repostIconView = nil + repostIconView.removeFromSuperview() + } + if let forwardInfo = component.forwardInfo, case let .known(peer, _) = forwardInfo { + let avatarNode: AvatarNode + if let current = self.avatarNode { + avatarNode = current + } else { + avatarNode = AvatarNode(font: avatarPlaceholderFont(size: 8.0)) + self.addSubview(avatarNode.view) + self.avatarNode = avatarNode + } + + let avatarSize = CGSize(width: 16.0, height: 16.0) + let theme = component.context.sharedContext.currentPresentationData.with { $0 }.theme + avatarNode.setPeer(context: component.context, theme: theme, peer: peer, synchronousLoad: true, displayDimensions: avatarSize) + + let avatarFrame = CGRect(origin: CGPoint(x: leftInset + subtitleOffset, y: titleFrame.minY + contentHeight - avatarSize.height + 3.0 - UIScreenPixel), size: avatarSize) + avatarNode.frame = avatarFrame + + subtitleOffset += avatarSize.width + 4.0 + } else if let avatarNode = self.avatarNode { + self.avatarNode = nil + avatarNode.view.removeFromSuperview() + } + + let subtitleFrame = CGRect(origin: CGPoint(x: leftInset + subtitleOffset, y: titleFrame.maxY + spacing + UIScreenPixel), size: subtitleSize) if let titleView = self.title.view { if titleView.superview == nil { diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift index ab6f84d06f..9107995366 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryChatContent.swift @@ -63,8 +63,8 @@ public final class StoryContentContextImpl: StoryContentContext { ), context.engine.data.subscribe(TelegramEngine.EngineData.Item.NotificationSettings.Global()) ) - |> mapToSignal { _, views, globalNotificationSettings -> Signal<(CombinedView, [PeerId: Peer], EngineGlobalNotificationSettings, [MediaId: TelegramMediaFile]), NoError> in - return context.account.postbox.transaction { transaction -> (CombinedView, [PeerId: Peer], EngineGlobalNotificationSettings, [MediaId: TelegramMediaFile]) in + |> mapToSignal { _, views, globalNotificationSettings -> Signal<(CombinedView, [PeerId: Peer], EngineGlobalNotificationSettings, [MediaId: TelegramMediaFile], [Int64: EngineStoryItem.ForwardInfo]), NoError> in + return context.account.postbox.transaction { transaction -> (CombinedView, [PeerId: Peer], EngineGlobalNotificationSettings, [MediaId: TelegramMediaFile], [Int64: EngineStoryItem.ForwardInfo]) in var peers: [PeerId: Peer] = [:] var allEntityFiles: [MediaId: TelegramMediaFile] = [:] @@ -78,6 +78,11 @@ public final class StoryContentContextImpl: StoryContentContext { } } } + if let forwardInfo = itemValue.forwardInfo, case let .known(peerId, _) = forwardInfo { + if let peer = transaction.getPeer(peerId) { + peers[peer.id] = peer + } + } for entity in itemValue.entities { if case let .CustomEmoji(_, fileId) = entity.type { let mediaId = MediaId(namespace: Namespaces.Media.CloudFile, id: fileId) @@ -104,10 +109,24 @@ public final class StoryContentContextImpl: StoryContentContext { } } - return (views, peers, globalNotificationSettings, allEntityFiles) + var pendingForwardsInfo: [Int64: EngineStoryItem.ForwardInfo] = [:] + if let stateView = views.views[PostboxViewKey.storiesState(key: .local)] as? StoryStatesView, let localState = stateView.value?.get(Stories.LocalState.self) { + for item in localState.items { + if let forwardInfo = item.forwardInfo, let peer = transaction.getPeer(forwardInfo.peerId) { + let peer = EnginePeer(peer) + if forwardInfo.isForwardingDisabled { + pendingForwardsInfo[item.randomId] = .unknown(name: peer.compactDisplayTitle) + } else { + pendingForwardsInfo[item.randomId] = .known(peer: peer, storyId: forwardInfo.storyId) + } + } + } + } + + return (views, peers, globalNotificationSettings, allEntityFiles, pendingForwardsInfo) } } - |> deliverOnMainQueue).startStrict(next: { [weak self] views, peers, globalNotificationSettings, allEntityFiles in + |> deliverOnMainQueue).startStrict(next: { [weak self] views, peers, globalNotificationSettings, allEntityFiles, pendingForwardsInfo in guard let self else { return } @@ -207,7 +226,8 @@ public final class StoryContentContextImpl: StoryContentContext { isForwardingDisabled: item.isForwardingDisabled, isEdited: item.isEdited, isMy: item.isMy, - myReaction: item.myReaction + myReaction: item.myReaction, + forwardInfo: item.forwardInfo.flatMap { EngineStoryItem.ForwardInfo($0, peers: peers) } ) } var totalCount = peerStoryItemsView.items.count @@ -241,7 +261,8 @@ public final class StoryContentContextImpl: StoryContentContext { isForwardingDisabled: false, isEdited: false, isMy: true, - myReaction: nil + myReaction: nil, + forwardInfo: pendingForwardsInfo[item.randomId] )) totalCount += 1 } @@ -1002,11 +1023,22 @@ public final class SingleStoryContentContextImpl: StoryContentContext { public init( context: AccountContext, storyId: StoryId, + storyItem: EngineStoryItem? = nil, readGlobally: Bool ) { self.context = context self.readGlobally = readGlobally + let item: Signal + if let storyItem { + item = .single(.item(storyItem.asStoryItem())) + } else { + item = context.account.postbox.combinedView(keys: [PostboxViewKey.story(id: storyId)]) + |> map { views -> Stories.StoredItem? in + return (views.views[PostboxViewKey.story(id: storyId)] as? StoryView)?.item?.get(Stories.StoredItem.self) + } + } + self.storyDisposable = (combineLatest(queue: .mainQueue(), context.engine.data.subscribe( TelegramEngine.EngineData.Item.Peer.Peer(id: storyId.peerId), @@ -1016,9 +1048,7 @@ public final class SingleStoryContentContextImpl: StoryContentContext { TelegramEngine.EngineData.Item.Peer.NotificationSettings(id: storyId.peerId), TelegramEngine.EngineData.Item.NotificationSettings.Global() ), - context.account.postbox.combinedView(keys: [PostboxViewKey.story(id: storyId)]) |> mapToSignal { views -> Signal<(Stories.StoredItem?, [PeerId: Peer], [MediaId: TelegramMediaFile]), NoError> in - let item = (views.views[PostboxViewKey.story(id: storyId)] as? StoryView)?.item?.get(Stories.StoredItem.self) - + item |> mapToSignal { item -> Signal<(Stories.StoredItem?, [PeerId: Peer], [MediaId: TelegramMediaFile]), NoError> in return context.account.postbox.transaction { transaction -> (Stories.StoredItem?, [PeerId: Peer], [MediaId: TelegramMediaFile]) in guard let item else { return (nil, [:], [:]) @@ -1033,6 +1063,11 @@ public final class SingleStoryContentContextImpl: StoryContentContext { } } } + if let forwardInfo = item.forwardInfo, case let .known(peerId, _) = forwardInfo { + if let peer = transaction.getPeer(peerId) { + peers[peer.id] = peer + } + } for entity in item.entities { if case let .CustomEmoji(_, fileId) = entity.type { let mediaId = MediaId(namespace: Namespaces.Media.CloudFile, id: fileId) @@ -1122,7 +1157,8 @@ public final class SingleStoryContentContextImpl: StoryContentContext { isForwardingDisabled: itemValue.isForwardingDisabled, isEdited: itemValue.isEdited, isMy: itemValue.isMy, - myReaction: itemValue.myReaction + myReaction: itemValue.myReaction, + forwardInfo: itemValue.forwardInfo.flatMap { EngineStoryItem.ForwardInfo($0, peers: peers) } ) let mainItem = StoryContentItem( diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryContentCaptionComponent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryContentCaptionComponent.swift index 0b9c613f3f..fe13b584ce 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryContentCaptionComponent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryContentCaptionComponent.swift @@ -12,6 +12,7 @@ import UrlEscaping import TelegramPresentationData import TextSelectionNode import SwiftSignalKit +import ForwardInfoPanelComponent final class StoryContentCaptionComponent: Component { enum Action { @@ -57,6 +58,7 @@ final class StoryContentCaptionComponent: Component { let theme: PresentationTheme let text: String let author: EnginePeer + let forwardInfo: EngineStoryItem.ForwardInfo? let entities: [MessageTextEntity] let entityFiles: [EngineMedia.Id: TelegramMediaFile] let action: (Action) -> Void @@ -71,6 +73,7 @@ final class StoryContentCaptionComponent: Component { theme: PresentationTheme, text: String, author: EnginePeer, + forwardInfo: EngineStoryItem.ForwardInfo?, entities: [MessageTextEntity], entityFiles: [EngineMedia.Id: TelegramMediaFile], action: @escaping (Action) -> Void, @@ -83,6 +86,7 @@ final class StoryContentCaptionComponent: Component { self.strings = strings self.theme = theme self.author = author + self.forwardInfo = forwardInfo self.text = text self.entities = entities self.entityFiles = entityFiles @@ -108,6 +112,9 @@ final class StoryContentCaptionComponent: Component { if lhs.author != rhs.author { return false } + if lhs.forwardInfo != rhs.forwardInfo { + return false + } if lhs.text != rhs.text { return false } @@ -168,6 +175,8 @@ final class StoryContentCaptionComponent: Component { private let scrollBottomFullMaskView: UIView private let scrollTopMaskView: UIImageView + private var forwardInfoPanel: ComponentView? + private let shadowGradientView: UIImageView private var component: StoryContentCaptionComponent? @@ -643,6 +652,50 @@ final class StoryContentCaptionComponent: Component { let textOverflowHeight: CGFloat = expandedTextLayout.0.size.height - visibleTextHeight let scrollContentSize = CGSize(width: availableSize.width, height: availableSize.height + textOverflowHeight) + if let forwardInfo = component.forwardInfo { + let authorName: String + let isChannel: Bool + switch forwardInfo { + case let .known(peer, _): + authorName = peer.displayTitle(strings: component.strings, displayOrder: .firstLast) + isChannel = peer.id.isGroupOrChannel + case let .unknown(name): + authorName = name + isChannel = false + } + let forwardInfoPanel: ComponentView + if let current = self.forwardInfoPanel { + forwardInfoPanel = current + } else { + forwardInfoPanel = ComponentView() + self.forwardInfoPanel = forwardInfoPanel + } + + let forwardInfoPanelSize = forwardInfoPanel.update( + transition: .immediate, + component: AnyComponent( + ForwardInfoPanelComponent( + authorName: authorName, + text: "Story", + isChannel: isChannel, + isVibrant: false + ) + ), + environment: {}, + containerSize: CGSize(width: availableSize.width - sideInset * 2.0, height: availableSize.height) + ) + let forwardInfoPanelFrame = CGRect(origin: CGPoint(x: sideInset, y: availableSize.height - visibleTextHeight - verticalInset - forwardInfoPanelSize.height - 10.0), size: forwardInfoPanelSize) + if let view = forwardInfoPanel.view { + if view.superview == nil { + self.scrollView.addSubview(view) + } + view.frame = forwardInfoPanelFrame + } + } else if let forwardInfoPanel = self.forwardInfoPanel { + self.forwardInfoPanel = nil + forwardInfoPanel.view?.removeFromSuperview() + } + do { let collapsedTextNode = collapsedTextLayout.1(TextNodeWithEntities.Arguments( context: component.context, diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift index eaced7f7d9..65cebc254a 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift @@ -1796,6 +1796,12 @@ public final class StoryItemSetContainerComponent: Component { return } self.sendMessageContext.performShareAction(view: self) + }, + repostAction: { [weak self] in + guard let self else { + return + } + self.openStoryEditing(repost: true) } )), environment: {}, @@ -2980,6 +2986,7 @@ public final class StoryItemSetContainerComponent: Component { audioRecorder: self.sendMessageContext.audioRecorderValue, videoRecordingStatus: !self.sendMessageContext.hasRecordedVideoPreview ? self.sendMessageContext.videoRecorderValue?.audioStatus : nil, isRecordingLocked: self.sendMessageContext.isMediaRecordingLocked, + hasRecordedVideo: false, recordedAudioPreview: self.sendMessageContext.recordedAudioPreview, hasRecordedVideoPreview: self.sendMessageContext.hasRecordedVideoPreview, wasRecordingDismissed: self.sendMessageContext.wasRecordingDismissed, @@ -2992,6 +2999,7 @@ public final class StoryItemSetContainerComponent: Component { customInputView: nil, forceIsEditing: self.sendMessageContext.currentInputMode == .media, disabledPlaceholder: disabledPlaceholder, + header: nil, isChannel: isChannel, storyItem: component.slice.item.storyItem, chatLocation: nil @@ -3936,6 +3944,7 @@ public final class StoryItemSetContainerComponent: Component { context: component.context, strings: component.strings, peer: component.slice.peer, + forwardInfo: component.slice.item.storyItem.forwardInfo, timestamp: component.slice.item.storyItem.timestamp, counters: counters, isEdited: component.slice.item.storyItem.isEdited @@ -3963,10 +3972,18 @@ public final class StoryItemSetContainerComponent: Component { guard let self, let component = self.component else { return } - if component.slice.peer.id == component.context.account.peerId { - self.navigateToMyStories() + if let forwardInfo = component.slice.item.storyItem.forwardInfo, case let .known(peer, _) = forwardInfo { + if peer.id == component.context.account.peerId { + self.navigateToMyStories() + } else { + self.navigateToPeer(peer: peer, chat: false) + } } else { - self.navigateToPeer(peer: component.slice.peer, chat: false) + if component.slice.peer.id == component.context.account.peerId { + self.navigateToMyStories() + } else { + self.navigateToPeer(peer: component.slice.peer, chat: false) + } } })), environment: {}, @@ -4061,7 +4078,7 @@ public final class StoryItemSetContainerComponent: Component { } } - if !isUnsupported, !component.slice.item.storyItem.text.isEmpty { + if !isUnsupported, !component.slice.item.storyItem.text.isEmpty || component.slice.item.storyItem.forwardInfo != nil { var captionItemTransition = transition let captionItem: CaptionItem if let current = self.captionItem { @@ -4090,6 +4107,7 @@ public final class StoryItemSetContainerComponent: Component { theme: component.theme, text: component.slice.item.storyItem.text, author: component.slice.peer, + forwardInfo: component.slice.item.storyItem.forwardInfo, entities: enableEntities ? component.slice.item.storyItem.entities : [], entityFiles: component.slice.item.entityFiles, action: { [weak self] action in @@ -5129,148 +5147,222 @@ public final class StoryItemSetContainerComponent: Component { } } + let initialCaption: NSAttributedString? + let initialPrivacy: EngineStoryPrivacy? + let initialMediaAreas: [MediaArea] + if repost { + initialCaption = nil + initialPrivacy = nil + initialMediaAreas = [] + } else { + initialCaption = chatInputStateStringWithAppliedEntities(item.text, entities: item.entities) + initialPrivacy = item.privacy + initialMediaAreas = item.mediaAreas + } + + let externalState = MediaEditorTransitionOutExternalState( + storyTarget: nil, + isPeerArchived: false, + transitionOut: nil + ) + let updateDisposable = MetaDisposable() var updateProgressImpl: ((Float) -> Void)? let controller = MediaEditorScreen( context: context, subject: subject, - isEditing: true, - initialCaption: chatInputStateStringWithAppliedEntities(item.text, entities: item.entities), - initialPrivacy: item.privacy, - initialMediaAreas: item.mediaAreas, + isEditing: !repost, + forwardSource: repost ? (component.slice.peer, item) : nil, + initialCaption: initialCaption, + initialPrivacy: initialPrivacy, + initialMediaAreas: initialMediaAreas, initialVideoPosition: videoPlaybackPosition, transitionIn: nil, - transitionOut: { _, _ in return nil }, - completion: { [weak self] _, mediaResult, mediaAreas, caption, privacy, stickers, commit in + transitionOut: { finished, isNew in + if repost && finished { + if let transitionOut = externalState.transitionOut?(externalState.storyTarget, externalState.isPeerArchived), let destinationView = transitionOut.destinationView { + return MediaEditorScreen.TransitionOut( + destinationView: destinationView, + destinationRect: transitionOut.destinationRect, + destinationCornerRadius: transitionOut.destinationCornerRadius + ) + } else { + return nil + } + } else { + return nil + } + }, + completion: { [weak self] result, commit in guard let self else { return } - let entities = generateChatInputTextEntities(caption) - var updatedText: String? - var updatedEntities: [MessageTextEntity]? - if caption.string != item.text || entities != item.entities { - updatedText = caption.string - updatedEntities = entities - } + + let entities = generateChatInputTextEntities(result.caption) - if let mediaResult { - switch mediaResult { - case let .image(image, dimensions): - updateProgressImpl?(0.0) - - let tempFile = TempBox.shared.tempFile(fileName: "file") - defer { - TempBox.shared.dispose(tempFile) + if repost { + let target: Stories.PendingTarget + let targetPeerId: EnginePeer.Id + if let sendAsPeerId = result.options.sendAsPeerId { + target = .peer(sendAsPeerId) + targetPeerId = sendAsPeerId + } else { + target = .myStories + targetPeerId = context.account.peerId + } + externalState.storyTarget = target + + self.component?.controller()?.dismiss(animated: false) + + let _ = (context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: targetPeerId)) + |> deliverOnMainQueue).startStandalone(next: { peer in + guard let peer else { + return } - if let imageData = compressImageToJPEG(image, quality: 0.7, tempFilePath: tempFile.path) { - updateDisposable.set((context.engine.messages.editStory(peerId: peerId, id: id, media: .image(dimensions: dimensions, data: imageData, stickers: stickers), mediaAreas: mediaAreas, text: updatedText, entities: updatedEntities, privacy: nil) - |> deliverOnMainQueue).startStrict(next: { [weak self] result in - guard let self else { - return - } - switch result { - case let .progress(progress): - updateProgressImpl?(progress) - case .completed: - Queue.mainQueue().after(0.1) { - self.isEditingStory = false - self.rewindCurrentItem() - self.updateIsProgressPaused() - self.state?.updated(transition: .easeInOut(duration: 0.2)) - - HapticFeedback().success() - - commit({}) - } - } - })) - } - case let .video(content, firstFrameImage, values, duration, dimensions): - updateProgressImpl?(0.0) - if let valuesData = try? JSONEncoder().encode(values) { - let data = MemoryBuffer(data: valuesData) - let digest = MemoryBuffer(data: data.md5Digest()) - let adjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: true) - - let resource: TelegramMediaResource - switch content { - case let .imageFile(path): - resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments) - case let .videoFile(path): - resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments) - case let .asset(localIdentifier): - resource = VideoLibraryMediaResource(localIdentifier: localIdentifier, conversion: .compress(adjustments)) + if case let .user(user) = peer { + externalState.isPeerArchived = user.storiesHidden ?? false + } else if case let .channel(channel) = peer { + externalState.isPeerArchived = channel.storiesHidden ?? false + } + + let forwardInfo = Stories.PendingForwardInfo(peerId: component.slice.peer.id, storyId: item.id, isForwardingDisabled: item.isForwardingDisabled) + + if let rootController = context.sharedContext.mainWindow?.viewController as? TelegramRootControllerInterface { + var existingMedia: EngineMedia? + if let _ = result.media { + } else { + existingMedia = item.media } + rootController.proceedWithStoryUpload(target: target, result: result as! MediaEditorScreenResult, existingMedia: existingMedia, forwardInfo: forwardInfo, externalState: externalState, commit: commit) + } + }) + } else { + var updatedText: String? + var updatedEntities: [MessageTextEntity]? + if result.caption.string != item.text || entities != item.entities { + updatedText = result.caption.string + updatedEntities = entities + } + + if let mediaResult = result.media { + switch mediaResult { + case let .image(image, dimensions): + updateProgressImpl?(0.0) let tempFile = TempBox.shared.tempFile(fileName: "file") defer { TempBox.shared.dispose(tempFile) } - let firstFrameImageData = firstFrameImage.flatMap { compressImageToJPEG($0, quality: 0.6, tempFilePath: tempFile.path) } - let firstFrameFile = firstFrameImageData.flatMap { data -> TempBoxFile? in - let file = TempBox.shared.tempFile(fileName: "image.jpg") - if let _ = try? data.write(to: URL(fileURLWithPath: file.path)) { - return file - } else { - return nil - } + if let imageData = compressImageToJPEG(image, quality: 0.7, tempFilePath: tempFile.path) { + updateDisposable.set((context.engine.messages.editStory(peerId: peerId, id: id, media: .image(dimensions: dimensions, data: imageData, stickers: result.stickers), mediaAreas: result.mediaAreas, text: updatedText, entities: updatedEntities, privacy: nil) + |> deliverOnMainQueue).startStrict(next: { [weak self] result in + guard let self else { + return + } + switch result { + case let .progress(progress): + updateProgressImpl?(progress) + case .completed: + Queue.mainQueue().after(0.1) { + self.isEditingStory = false + self.rewindCurrentItem() + self.updateIsProgressPaused() + self.state?.updated(transition: .easeInOut(duration: 0.2)) + + HapticFeedback().success() + + commit({}) + } + } + })) } + case let .video(content, firstFrameImage, values, duration, dimensions): + updateProgressImpl?(0.0) - updateDisposable.set((context.engine.messages.editStory(peerId: peerId, id: id, media: .video(dimensions: dimensions, duration: duration, resource: resource, firstFrameFile: firstFrameFile, stickers: stickers), mediaAreas: mediaAreas, text: updatedText, entities: updatedEntities, privacy: nil) - |> deliverOnMainQueue).startStrict(next: { [weak self] result in - guard let self else { - return + if let valuesData = try? JSONEncoder().encode(values) { + let data = MemoryBuffer(data: valuesData) + let digest = MemoryBuffer(data: data.md5Digest()) + let adjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: true) + + let resource: TelegramMediaResource + switch content { + case let .imageFile(path): + resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments) + case let .videoFile(path): + resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments) + case let .asset(localIdentifier): + resource = VideoLibraryMediaResource(localIdentifier: localIdentifier, conversion: .compress(adjustments)) } - switch result { - case let .progress(progress): - updateProgressImpl?(progress) - case .completed: - Queue.mainQueue().after(0.1) { + + let tempFile = TempBox.shared.tempFile(fileName: "file") + defer { + TempBox.shared.dispose(tempFile) + } + let firstFrameImageData = firstFrameImage.flatMap { compressImageToJPEG($0, quality: 0.6, tempFilePath: tempFile.path) } + let firstFrameFile = firstFrameImageData.flatMap { data -> TempBoxFile? in + let file = TempBox.shared.tempFile(fileName: "image.jpg") + if let _ = try? data.write(to: URL(fileURLWithPath: file.path)) { + return file + } else { + return nil + } + } + + updateDisposable.set((context.engine.messages.editStory(peerId: peerId, id: id, media: .video(dimensions: dimensions, duration: duration, resource: resource, firstFrameFile: firstFrameFile, stickers: result.stickers), mediaAreas: result.mediaAreas, text: updatedText, entities: updatedEntities, privacy: nil) + |> deliverOnMainQueue).startStrict(next: { [weak self] result in + guard let self else { + return + } + switch result { + case let .progress(progress): + updateProgressImpl?(progress) + case .completed: + Queue.mainQueue().after(0.1) { + self.isEditingStory = false + self.rewindCurrentItem() + self.updateIsProgressPaused() + self.state?.updated(transition: .easeInOut(duration: 0.2)) + + HapticFeedback().success() + + commit({}) + } + } + })) + } + } + } else if updatedText != nil { + let _ = (context.engine.messages.editStory(peerId: peerId, id: id, media: nil, mediaAreas: nil, text: updatedText, entities: updatedEntities, privacy: nil) + |> deliverOnMainQueue).startStandalone(next: { [weak self] result in + switch result { + case .completed: + Queue.mainQueue().after(0.1) { + if let self { self.isEditingStory = false self.rewindCurrentItem() self.updateIsProgressPaused() self.state?.updated(transition: .easeInOut(duration: 0.2)) HapticFeedback().success() - - commit({}) } + commit({}) } - })) - } - } - } else if updatedText != nil { - let _ = (context.engine.messages.editStory(peerId: peerId, id: id, media: nil, mediaAreas: nil, text: updatedText, entities: updatedEntities, privacy: nil) - |> deliverOnMainQueue).startStandalone(next: { [weak self] result in - switch result { - case .completed: - Queue.mainQueue().after(0.1) { - if let self { - self.isEditingStory = false - self.rewindCurrentItem() - self.updateIsProgressPaused() - self.state?.updated(transition: .easeInOut(duration: 0.2)) - - HapticFeedback().success() - } - commit({}) + default: + break } - default: - break - } - }) - } else { - self.isEditingStory = false - self.rewindCurrentItem() - self.updateIsProgressPaused() - self.state?.updated(transition: .easeInOut(duration: 0.2)) - - HapticFeedback().success() - - commit({}) + }) + } else { + self.isEditingStory = false + self.rewindCurrentItem() + self.updateIsProgressPaused() + self.state?.updated(transition: .easeInOut(duration: 0.2)) + + HapticFeedback().success() + + commit({}) + } } - } ) controller.willDismiss = { [weak self] in @@ -5969,7 +6061,8 @@ public final class StoryItemSetContainerComponent: Component { context: component.context, updatedPresentationData: (presentationData, .single(presentationData)), peerId: component.slice.peer.id, - storyId: component.slice.item.storyItem.id + storyId: component.slice.item.storyItem.id, + storyItem: component.slice.item.storyItem ) component.controller()?.push(statsController) }))) @@ -6356,6 +6449,27 @@ public final class StoryItemSetContainerComponent: Component { }))) } + if component.slice.additionalPeerData.canViewStats { + items.append(.action(ContextMenuActionItem(text: component.strings.Story_Context_ViewStats, icon: { theme in + return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Statistics"), color: theme.contextMenu.primaryColor) + }, action: { [weak self] _, a in + a(.default) + + guard let self, let component = self.component else { + return + } + let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }.withUpdated(theme: defaultDarkColorPresentationTheme) + let statsController = component.context.sharedContext.makeStoryStatsController( + context: component.context, + updatedPresentationData: (presentationData, .single(presentationData)), + peerId: component.slice.peer.id, + storyId: component.slice.item.storyItem.id, + storyItem: component.slice.item.storyItem + ) + component.controller()?.push(statsController) + }))) + } + if !component.slice.item.storyItem.text.isEmpty { let (canTranslate, _) = canTranslateText(context: component.context, text: component.slice.item.storyItem.text, showTranslate: translationSettings.showTranslate, showTranslateIfTopical: false, ignoredLanguages: translationSettings.ignoredLanguages) if canTranslate { diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift index 7481c9f222..2e7b084f9a 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift @@ -1041,7 +1041,12 @@ final class StoryItemSetContainerSendMessage { immediateExternalShare: false, forceTheme: defaultDarkColorPresentationTheme ) - + shareController.shareStory = { [weak view] in + guard let view else { + return + } + view.openStoryEditing(repost: true) + } shareController.completed = { [weak view] peerIds in guard let view, let component = view.component else { return @@ -1052,7 +1057,7 @@ final class StoryItemSetContainerSendMessage { peerIds.map(TelegramEngine.EngineData.Item.Peer.Peer.init) ) ) - |> deliverOnMainQueue).start(next: { [weak view] peerList in + |> deliverOnMainQueue).start(next: { [weak view] peerList in guard let view, let component = view.component else { return } diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetViewListComponent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetViewListComponent.swift index 12f9512f23..3c287b4fe1 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetViewListComponent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetViewListComponent.swift @@ -231,8 +231,9 @@ final class StoryItemSetViewListComponent: Component { } private enum SortMode: Int { - case reactionsFirst = 0 - case recentFirst = 1 + case repostsFirst = 0 + case reactionsFirst = 1 + case recentFirst = 2 } private struct ContentConfigurationKey: Equatable { @@ -688,6 +689,8 @@ final class StoryItemSetViewListComponent: Component { } let mappedSortMode: EngineStoryViewListContext.SortMode switch self.configuration.sortMode { + case .repostsFirst: + mappedSortMode = .repostsFirst case .reactionsFirst: mappedSortMode = .reactionsFirst case .recentFirst: @@ -725,6 +728,8 @@ final class StoryItemSetViewListComponent: Component { } let mappedSortMode: EngineStoryViewListContext.SortMode switch self.configuration.sortMode { + case .repostsFirst: + mappedSortMode = .repostsFirst case .reactionsFirst: mappedSortMode = .reactionsFirst case .recentFirst: @@ -1288,6 +1293,24 @@ final class StoryItemSetViewListComponent: Component { let sortMode = self.sortMode + items.append(.action(ContextMenuActionItem(text: component.strings.Story_ViewList_ContextSortReposts, icon: { theme in + return generateTintedImage(image: UIImage(bundleImageName: "Stories/Context Menu/Repost"), color: theme.contextMenu.primaryColor) + }, additionalLeftIcon: { theme in + if sortMode != .repostsFirst { + return nil + } + return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Check"), color: theme.contextMenu.primaryColor) + }, action: { [weak self] _, a in + a(.default) + + guard let self else { + return + } + if self.sortMode != .repostsFirst { + self.sortMode = .repostsFirst + self.state?.updated(transition: .immediate) + } + }))) items.append(.action(ContextMenuActionItem(text: component.strings.Story_ViewList_ContextSortReactions, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Reactions"), color: theme.contextMenu.primaryColor) }, additionalLeftIcon: { theme in @@ -1525,6 +1548,15 @@ final class StoryItemSetViewListComponent: Component { containerSize: CGSize(width: 260.0, height: 100.0) ) + let orderSelectorIconName: String + switch self.sortMode { + case .repostsFirst: + orderSelectorIconName = "Stories/Context Menu/Repost" + case .reactionsFirst: + orderSelectorIconName = "Chat/Context Menu/Reactions" + case .recentFirst: + orderSelectorIconName = "Chat/Context Menu/Time" + } let orderSelectorSize = self.orderSelector.update( transition: transition, component: AnyComponent(OptionButtonComponent( @@ -1532,7 +1564,7 @@ final class StoryItemSetViewListComponent: Component { background: UIColor(rgb: 0xffffff, alpha: 0.09), foreground: .white ), - icon: self.sortMode == .recentFirst ? "Chat/Context Menu/Time" : "Chat/Context Menu/Reactions", + icon: orderSelectorIconName, action: { [weak self] in guard let self else { return diff --git a/submodules/TelegramUI/Components/Stories/StoryFooterPanelComponent/Sources/StoryFooterPanelComponent.swift b/submodules/TelegramUI/Components/Stories/StoryFooterPanelComponent/Sources/StoryFooterPanelComponent.swift index f178635b82..d39b5f6277 100644 --- a/submodules/TelegramUI/Components/Stories/StoryFooterPanelComponent/Sources/StoryFooterPanelComponent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryFooterPanelComponent/Sources/StoryFooterPanelComponent.swift @@ -48,6 +48,7 @@ public final class StoryFooterPanelComponent: Component { public let moreAction: (UIView, ContextGesture?) -> Void public let likeAction: () -> Void public let forwardAction: () -> Void + public let repostAction: () -> Void public init( context: AccountContext, @@ -62,7 +63,8 @@ public final class StoryFooterPanelComponent: Component { deleteAction: @escaping () -> Void, moreAction: @escaping (UIView, ContextGesture?) -> Void, likeAction: @escaping () -> Void, - forwardAction: @escaping () -> Void + forwardAction: @escaping () -> Void, + repostAction: @escaping () -> Void ) { self.context = context self.theme = theme @@ -77,6 +79,7 @@ public final class StoryFooterPanelComponent: Component { self.moreAction = moreAction self.likeAction = likeAction self.forwardAction = forwardAction + self.repostAction = repostAction } public static func ==(lhs: StoryFooterPanelComponent, rhs: StoryFooterPanelComponent) -> Bool { @@ -116,10 +119,14 @@ public final class StoryFooterPanelComponent: Component { private var likeButton: ComponentView? private var likeStatsText: AnimatedCountLabelView? private var forwardButton: ComponentView? - + private var repostButton: ComponentView? + private var reactionStatsIcon: UIImageView? private var reactionStatsText: AnimatedCountLabelView? + private var repostStatsIcon: UIImageView? + private var repostStatsText: AnimatedCountLabelView? + private var statusButton: HighlightableButton? private var statusNode: SemanticStatusNode? private var uploadingText: ComponentView? @@ -174,18 +181,24 @@ public final class StoryFooterPanelComponent: Component { self.viewStatsLabelText.view?.alpha = 0.7 self.reactionStatsIcon?.alpha = 0.7 self.reactionStatsText?.alpha = 0.7 + self.repostStatsIcon?.alpha = 0.7 + self.repostStatsText?.alpha = 0.7 } else { self.avatarsView.alpha = 1.0 self.viewStatsCountText.alpha = 1.0 self.viewStatsLabelText.view?.alpha = 1.0 self.reactionStatsIcon?.alpha = 1.0 self.reactionStatsText?.alpha = 1.0 + self.repostStatsIcon?.alpha = 1.0 + self.repostStatsText?.alpha = 1.0 self.avatarsView.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2) self.viewStatsCountText.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2) self.viewStatsLabelText.view?.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2) self.reactionStatsIcon?.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2) self.reactionStatsText?.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2) + self.repostStatsIcon?.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2) + self.repostStatsText?.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2) } } self.viewStatsButton.addTarget(self, action: #selector(self.viewStatsPressed), for: .touchUpInside) @@ -349,9 +362,11 @@ public final class StoryFooterPanelComponent: Component { var viewCount = 0 var reactionCount = 0 + var repostCount = 0 if let views = component.externalViews ?? component.storyItem.views, views.seenCount != 0 { viewCount = views.seenCount reactionCount = views.reactedCount + repostCount = 0 } if component.isChannel { @@ -414,6 +429,14 @@ public final class StoryFooterPanelComponent: Component { self.likeButton = likeButton } + let repostButton: ComponentView + if let current = self.repostButton { + repostButton = current + } else { + repostButton = ComponentView() + self.repostButton = repostButton + } + let forwardButton: ComponentView if let current = self.forwardButton { forwardButton = current @@ -477,6 +500,54 @@ public final class StoryFooterPanelComponent: Component { rightContentOffset -= likeButtonSize.width + 14.0 } + let repostButtonSize = repostButton.update( + transition: likeStatsTransition, + component: AnyComponent(MessageInputActionButtonComponent( + mode: .repost, + storyId: component.storyItem.id, + action: { [weak self] _, action, _ in + guard let self, let component = self.component else { + return + } + guard case .up = action else { + return + } + component.repostAction() + }, + longPressAction: nil, + switchMediaInputMode: { + }, + updateMediaCancelFraction: { _ in + }, + lockMediaRecording: { + }, + stopAndPreviewMediaRecording: { + }, + moreAction: { _, _ in }, + context: component.context, + theme: component.theme, + strings: component.strings, + presentController: { _ in }, + audioRecorder: nil, + videoRecordingStatus: nil + )), + environment: {}, + containerSize: CGSize(width: 33.0, height: 33.0) + ) + if let repostButtonView = repostButton.view { + if repostButtonView.superview == nil { + self.addSubview(repostButtonView) + } + var repostButtonFrame = CGRect(origin: CGPoint(x: rightContentOffset - repostButtonSize.width, y: floor((size.height - repostButtonSize.height) * 0.5)), size: repostButtonSize) + repostButtonFrame.origin.y += component.expandFraction * 45.0 + + likeStatsTransition.setPosition(view: repostButtonView, position: repostButtonFrame.center) + likeStatsTransition.setBounds(view: repostButtonView, bounds: CGRect(origin: CGPoint(), size: repostButtonFrame.size)) + likeStatsTransition.setAlpha(view: repostButtonView, alpha: 1.0 - component.expandFraction) + + rightContentOffset -= repostButtonSize.width + 14.0 + } + let forwardButtonSize = forwardButton.update( transition: likeStatsTransition, component: AnyComponent(MessageInputActionButtonComponent( @@ -529,6 +600,10 @@ public final class StoryFooterPanelComponent: Component { self.likeButton = nil likeButton.view?.removeFromSuperview() } + if let repostButton = self.repostButton { + self.repostButton = nil + repostButton.view?.removeFromSuperview() + } if let forwardButton = self.forwardButton { self.forwardButton = nil forwardButton.view?.removeFromSuperview() @@ -571,6 +646,9 @@ public final class StoryFooterPanelComponent: Component { var reactionsIconSize: CGSize? var reactionsTextSize: CGSize? + var repostsIconSize: CGSize? + var repostsTextSize: CGSize? + if reactionCount != 0 && !component.isChannel { var reactionsTransition = transition let reactionStatsIcon: UIImageView @@ -625,6 +703,60 @@ public final class StoryFooterPanelComponent: Component { } } + if repostCount != 0 && !component.isChannel { + var repostTransition = transition + let repostStatsIcon: UIImageView + if let current = self.repostStatsIcon { + repostStatsIcon = current + } else { + repostTransition = repostTransition.withAnimation(.none) + repostStatsIcon = UIImageView() + repostStatsIcon.image = UIImage(bundleImageName: "Stories/InputRepost")?.withRenderingMode(.alwaysTemplate) + + self.repostStatsIcon = repostStatsIcon + self.externalContainerView.addSubview(repostStatsIcon) + } + + transition.setTintColor(view: repostStatsIcon, color: UIColor(rgb: 0x34c759).mixedWith(.white, alpha: component.expandFraction)) + + let repostStatsText: AnimatedCountLabelView + if let current = self.repostStatsText { + repostStatsText = current + } else { + repostStatsText = AnimatedCountLabelView(frame: CGRect()) + repostStatsText.isUserInteractionEnabled = false + self.repostStatsText = repostStatsText + self.externalContainerView.addSubview(repostStatsText) + } + + let repostStatsLayout = repostStatsText.update( + size: CGSize(width: availableSize.width, height: size.height), + segments: [ + .number(repostCount, NSAttributedString(string: "\(repostCount)", font: Font.with(size: 15.0, traits: .monospacedNumbers), textColor: .white)) + ], + reducedLetterSpacing: true, + transition: (isFirstTime || repostTransition.animation.isImmediate) ? .immediate : ContainedViewLayoutTransition.animated(duration: 0.25, curve: .easeInOut) + ) + repostsTextSize = repostStatsLayout.size + + let imageSize = CGSize(width: 23.0, height: 23.0) + repostsIconSize = imageSize + } else { + if let repostStatsIcon = self.repostStatsIcon { + self.repostStatsIcon = nil + repostStatsIcon.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak repostStatsIcon] _ in + repostStatsIcon?.removeFromSuperview() + }) + } + + if let repostStatsText = self.repostStatsText { + self.repostStatsText = nil + repostStatsText.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak repostStatsText] _ in + repostStatsText?.removeFromSuperview() + }) + } + } + let viewsReactionsCollapsedSpacing: CGFloat = 6.0 let viewsReactionsExpandedSpacing: CGFloat = 8.0 let viewsReactionsSpacing = viewsReactionsCollapsedSpacing.interpolate(to: viewsReactionsExpandedSpacing, amount: component.expandFraction) @@ -668,6 +800,12 @@ public final class StoryFooterPanelComponent: Component { contentWidth += reactionsIconSpacing contentWidth += reactionsTextSize.width } + if let repostsIconSize, let repostsTextSize { + contentWidth += viewsReactionsSpacing + contentWidth += repostsIconSize.width + contentWidth += reactionsIconSpacing + contentWidth += repostsTextSize.width + } } let minContentX: CGFloat = 16.0 @@ -747,6 +885,17 @@ public final class StoryFooterPanelComponent: Component { contentX += reactionsTextSize.width } + if let repostStatsIcon = self.repostStatsIcon, let repostsIconSize, let repostStatsText = self.repostStatsText, let repostsTextSize { + contentX += viewsReactionsSpacing + + transition.setFrame(view: repostStatsIcon, frame: CGRect(origin: CGPoint(x: contentX, y: floor((size.height - repostsIconSize.height) * 0.5)), size: repostsIconSize)) + contentX += repostsIconSize.width + contentX += reactionsIconSpacing + + transition.setFrame(view: repostStatsText, frame: CGRect(origin: CGPoint(x: contentX, y: floor((size.height - repostsTextSize.height) * 0.5)), size: repostsTextSize)) + contentX += repostsTextSize.width + } + let statsButtonWidth = availableSize.width - 80.0 transition.setFrame(view: self.viewStatsButton, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: statsButtonWidth, height: baseHeight))) diff --git a/submodules/TelegramUI/Images.xcassets/Avatar/RepostStoryIcon.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Avatar/RepostStoryIcon.imageset/Contents.json new file mode 100644 index 0000000000..2afe2d007b --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Avatar/RepostStoryIcon.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "repost_40.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Avatar/RepostStoryIcon.imageset/repost_40.pdf b/submodules/TelegramUI/Images.xcassets/Avatar/RepostStoryIcon.imageset/repost_40.pdf new file mode 100644 index 0000000000..53721d0e24 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Avatar/RepostStoryIcon.imageset/repost_40.pdf @@ -0,0 +1,103 @@ +%PDF-1.7 + +1 0 obj + << >> +endobj + +2 0 obj + << /Length 3 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 8.000000 3.739746 cm +0.000000 0.000000 0.000000 scn +11.057250 31.869730 m +11.577950 32.390430 12.422169 32.390430 12.942868 31.869730 c +18.276201 26.536396 l +18.796900 26.015697 18.796900 25.171478 18.276201 24.650778 c +12.942868 19.317444 l +12.422169 18.796745 11.577950 18.796745 11.057250 19.317444 c +10.536551 19.838144 10.536551 20.682364 11.057250 21.203064 c +14.114412 24.260223 l +6.666667 24.260223 l +4.457527 24.260223 2.666667 22.469364 2.666667 20.260227 c +2.666667 12.260225 l +2.666667 11.523846 2.069713 10.926891 1.333333 10.926891 c +0.596954 10.926891 0.000000 11.523846 0.000000 12.260225 c +0.000000 20.260227 l +0.000000 23.942127 2.984769 26.926891 6.666667 26.926891 c +14.114470 26.926891 l +11.057250 29.984112 l +10.536551 30.504810 10.536551 31.349030 11.057250 31.869730 c +h +12.942750 0.650732 m +12.422050 0.130032 11.577831 0.130032 11.057132 0.650732 c +5.723799 5.984066 l +5.203100 6.504765 5.203100 7.348986 5.723799 7.869684 c +11.057132 13.203016 l +11.577831 13.723717 12.422050 13.723717 12.942750 13.203016 c +13.463449 12.682318 13.463449 11.838099 12.942750 11.317398 c +9.885588 8.260237 l +17.333332 8.260237 l +19.542473 8.260237 21.333334 10.051096 21.333334 12.260233 c +21.333334 20.260235 l +21.333334 20.996616 21.930286 21.593571 22.666666 21.593571 c +23.403046 21.593571 24.000000 20.996616 24.000000 20.260235 c +24.000000 12.260233 l +24.000000 8.578335 21.015230 5.593571 17.333332 5.593571 c +9.885530 5.593571 l +12.942750 2.536350 l +13.463449 2.015652 13.463449 1.171431 12.942750 0.650732 c +h +f* +n +Q + +endstream +endobj + +3 0 obj + 1602 +endobj + +4 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 40.000000 40.000000 ] + /Resources 1 0 R + /Contents 2 0 R + /Parent 5 0 R + >> +endobj + +5 0 obj + << /Kids [ 4 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +6 0 obj + << /Pages 5 0 R + /Type /Catalog + >> +endobj + +xref +0 7 +0000000000 65535 f +0000000010 00000 n +0000000034 00000 n +0000001692 00000 n +0000001715 00000 n +0000001888 00000 n +0000001962 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 6 0 R + /Size 7 +>> +startxref +2021 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Media Editor/RemoveRecordedVideo.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Media Editor/RemoveRecordedVideo.imageset/Contents.json new file mode 100644 index 0000000000..447a7e6314 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Media Editor/RemoveRecordedVideo.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "removevideomessage_30.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Media Editor/RemoveRecordedVideo.imageset/removevideomessage_30.pdf b/submodules/TelegramUI/Images.xcassets/Media Editor/RemoveRecordedVideo.imageset/removevideomessage_30.pdf new file mode 100644 index 0000000000..0ac8dba10c --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Media Editor/RemoveRecordedVideo.imageset/removevideomessage_30.pdf @@ -0,0 +1,169 @@ +%PDF-1.7 + +1 0 obj + << >> +endobj + +2 0 obj + << /Length 3 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 3.169922 3.008057 cm +0.000000 0.000000 0.000000 scn +1.416977 23.578920 m +1.092842 23.903055 0.567315 23.903055 0.243180 23.578920 c +-0.080956 23.254786 -0.080956 22.729258 0.243180 22.405123 c +22.243179 0.405123 l +22.567314 0.080988 23.092842 0.080988 23.416977 0.405123 c +23.741112 0.729258 23.741112 1.254786 23.416977 1.578920 c +21.922409 3.073488 l +22.298174 3.538925 22.621704 4.048790 22.884586 4.594667 c +23.294510 5.445885 23.480398 6.387743 23.570843 7.549334 c +23.660007 8.694446 23.660004 10.120492 23.660000 11.956020 c +23.660000 11.956072 l +23.660000 11.956123 l +23.660000 11.956175 l +23.660000 11.991978 l +23.660000 12.027781 l +23.660000 12.027832 l +23.660000 12.027884 l +23.660000 12.027935 l +23.660004 13.863463 23.660007 15.289509 23.570843 16.434622 c +23.480398 17.596212 23.294510 18.538071 22.884586 19.389288 c +22.114952 20.987450 20.825472 22.276928 19.227310 23.046564 c +18.376093 23.456488 17.434235 23.642376 16.272644 23.732821 c +15.127507 23.821983 13.701423 23.821981 11.865836 23.821978 c +11.865828 23.821978 l +11.830000 23.821978 l +11.794173 23.821978 l +11.794165 23.821978 l +9.958577 23.821981 8.532493 23.821983 7.387355 23.732821 c +6.225766 23.642376 5.283906 23.456488 4.432690 23.046564 c +3.886811 22.783682 3.376946 22.460152 2.911509 22.084387 c +1.416977 23.578920 l +h +4.093769 20.902128 m +8.336281 16.659616 l +9.309927 17.389559 10.519464 17.821978 11.830000 17.821978 c +15.049820 17.821978 17.660000 15.211798 17.660000 11.991978 c +17.660000 10.681442 17.227581 9.471904 16.497639 8.498258 c +20.740150 4.255747 l +20.990032 4.584133 21.207787 4.938667 21.388977 5.314915 c +21.672272 5.903181 21.832993 6.614014 21.915852 7.678194 c +21.999414 8.751389 22.000000 10.113052 22.000000 11.991978 c +22.000000 13.870903 21.999414 15.232567 21.915852 16.305759 c +21.832993 17.369942 21.672272 18.080774 21.388977 18.669041 c +20.782509 19.928385 19.766407 20.944487 18.507063 21.550955 c +17.918797 21.834249 17.207964 21.994970 16.143784 22.077829 c +15.070589 22.161392 13.708925 22.161978 11.830000 22.161978 c +9.951075 22.161978 8.589411 22.161392 7.516217 22.077829 c +6.452035 21.994970 5.741203 21.834249 5.152937 21.550955 c +4.776690 21.369764 4.422155 21.152010 4.093769 20.902128 c +h +15.306911 9.688987 m +15.744876 10.348874 16.000000 11.140632 16.000000 11.991978 c +16.000000 14.295005 14.133028 16.161978 11.830000 16.161978 c +10.978655 16.161978 10.186896 15.906854 9.527008 15.468888 c +15.306911 9.688987 l +h +0.775414 19.389288 m +0.905527 19.659470 1.050496 19.920828 1.209303 20.172344 c +2.421079 18.960567 l +2.368598 18.864941 2.318552 18.767738 2.271022 18.669041 c +1.987728 18.080774 1.827008 17.369942 1.744148 16.305759 c +1.660586 15.232567 1.660000 13.870903 1.660000 11.991978 c +1.660000 10.113052 1.660586 8.751389 1.744148 7.678194 c +1.827008 6.614014 1.987728 5.903181 2.271022 5.314915 c +2.877490 4.055571 3.893593 3.039469 5.152937 2.433001 c +5.741203 2.149706 6.452035 1.988985 7.516217 1.906126 c +8.589411 1.822563 9.951075 1.821978 11.830000 1.821978 c +13.708925 1.821978 15.070589 1.822563 16.143784 1.906126 c +17.207964 1.988985 17.918797 2.149706 18.507063 2.433001 c +18.605759 2.480530 18.702961 2.530577 18.798590 2.583057 c +20.010366 1.371281 l +19.758850 1.212473 19.497492 1.067503 19.227310 0.937391 c +18.376093 0.527468 17.434235 0.341579 16.272644 0.251135 c +15.127531 0.161970 13.701486 0.161974 11.865957 0.161978 c +11.865906 0.161978 l +11.865854 0.161978 l +11.865803 0.161978 l +11.830000 0.161978 l +11.794197 0.161978 l +11.794146 0.161978 l +11.794094 0.161978 l +11.794043 0.161978 l +9.958514 0.161974 8.532467 0.161970 7.387355 0.251135 c +6.225766 0.341579 5.283906 0.527468 4.432690 0.937391 c +2.834527 1.707026 1.545049 2.996506 0.775414 4.594667 c +0.365490 5.445885 0.179602 6.387743 0.089157 7.549334 c +-0.000006 8.694470 -0.000004 10.120555 0.000000 11.956142 c +0.000000 11.956151 l +0.000000 11.991978 l +0.000000 12.027805 l +0.000000 12.027814 l +-0.000004 13.863400 -0.000006 15.289485 0.089157 16.434622 c +0.179602 17.596212 0.365490 18.538071 0.775414 19.389288 c +h +6.000000 11.991978 m +6.000000 12.973694 6.242650 13.898737 6.671245 14.710402 c +7.924623 13.457024 l +7.753559 13.001228 7.660000 12.507529 7.660000 11.991978 c +7.660000 9.688950 9.526973 7.821978 11.830000 7.821978 c +12.345551 7.821978 12.839252 7.915537 13.295047 8.086601 c +14.548425 6.833223 l +13.736759 6.404629 12.811716 6.161978 11.830000 6.161978 c +8.610180 6.161978 6.000000 8.772158 6.000000 11.991978 c +h +f* +n +Q + +endstream +endobj + +3 0 obj + 4531 +endobj + +4 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 30.000000 30.000000 ] + /Resources 1 0 R + /Contents 2 0 R + /Parent 5 0 R + >> +endobj + +5 0 obj + << /Kids [ 4 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +6 0 obj + << /Pages 5 0 R + /Type /Catalog + >> +endobj + +xref +0 7 +0000000000 65535 f +0000000010 00000 n +0000000034 00000 n +0000004621 00000 n +0000004644 00000 n +0000004817 00000 n +0000004891 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 6 0 R + /Size 7 +>> +startxref +4950 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Stories/Context Menu/Repost.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Stories/Context Menu/Repost.imageset/Contents.json new file mode 100644 index 0000000000..70fb21c3d7 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Stories/Context Menu/Repost.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "repost_24.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Stories/Context Menu/Repost.imageset/repost_24.pdf b/submodules/TelegramUI/Images.xcassets/Stories/Context Menu/Repost.imageset/repost_24.pdf new file mode 100644 index 0000000000..bc73382494 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Stories/Context Menu/Repost.imageset/repost_24.pdf @@ -0,0 +1,184 @@ +%PDF-1.7 + +1 0 obj + << /Type /XObject + /Length 2 0 R + /Group << /Type /Group + /S /Transparency + >> + /Subtype /Form + /Resources << >> + /BBox [ 0.000000 0.000000 24.000000 24.000000 ] + >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 5.000000 2.514893 cm +0.000000 0.000000 0.000000 scn +6.450462 18.590553 m +6.754203 18.894295 7.246665 18.894295 7.550406 18.590553 c +10.661517 15.479443 l +10.965258 15.175701 10.965258 14.683239 10.661517 14.379498 c +7.550406 11.268387 l +7.246665 10.964646 6.754203 10.964646 6.450462 11.268387 c +6.146721 11.572128 6.146721 12.064590 6.450462 12.368332 c +8.233796 14.151666 l +3.888889 14.151666 l +2.600225 14.151666 1.555556 13.106995 1.555556 11.818330 c +1.555556 7.151666 l +1.555556 6.722111 1.207333 6.373888 0.777778 6.373888 c +0.348223 6.373888 0.000000 6.722111 0.000000 7.151666 c +0.000000 11.818330 l +0.000000 13.966103 1.741114 15.707221 3.888889 15.707221 c +8.233850 15.707221 l +6.450462 17.490610 l +6.146721 17.794352 6.146721 18.286812 6.450462 18.590553 c +h +7.550406 0.379581 m +7.246665 0.075840 6.754203 0.075840 6.450462 0.379581 c +3.339351 3.490692 l +3.035610 3.794434 3.035610 4.286896 3.339351 4.590636 c +6.450462 7.701748 l +6.754203 8.005488 7.246665 8.005488 7.550406 7.701748 c +7.854147 7.398006 7.854147 6.905545 7.550406 6.601804 c +5.766991 4.818388 l +10.111979 4.818388 l +11.400642 4.818388 12.445312 5.863059 12.445312 7.151724 c +12.445312 11.818388 l +12.445312 12.247943 12.793535 12.596166 13.223090 12.596166 c +13.652645 12.596166 14.000868 12.247943 14.000868 11.818388 c +14.000868 7.151724 l +14.000868 5.003951 12.259754 3.262833 10.111979 3.262833 c +5.767099 3.262833 l +7.550406 1.479525 l +7.854147 1.175783 7.854147 0.683323 7.550406 0.379581 c +h +f* +n +Q + +endstream +endobj + +2 0 obj + 1553 +endobj + +3 0 obj + << /Type /XObject + /Length 4 0 R + /Group << /Type /Group + /S /Transparency + >> + /Subtype /Form + /Resources << >> + /BBox [ 0.000000 0.000000 24.000000 24.000000 ] + >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 0.000000 0.000000 cm +0.000000 0.000000 0.000000 scn +0.000000 12.800001 m +0.000000 16.720367 0.000000 18.680552 0.762954 20.177933 c +1.434068 21.495068 2.504932 22.565931 3.822066 23.237045 c +5.319448 24.000000 7.279633 24.000000 11.200000 24.000000 c +12.800001 24.000000 l +16.720367 24.000000 18.680552 24.000000 20.177933 23.237045 c +21.495068 22.565931 22.565931 21.495068 23.237045 20.177933 c +24.000000 18.680552 24.000000 16.720367 24.000000 12.800000 c +24.000000 11.199999 l +24.000000 7.279633 24.000000 5.319448 23.237045 3.822067 c +22.565931 2.504932 21.495068 1.434069 20.177933 0.762955 c +18.680552 0.000000 16.720367 0.000000 12.800000 0.000000 c +11.199999 0.000000 l +7.279632 0.000000 5.319448 0.000000 3.822066 0.762955 c +2.504932 1.434069 1.434068 2.504932 0.762954 3.822067 c +0.000000 5.319448 0.000000 7.279633 0.000000 11.200000 c +0.000000 12.800001 l +h +f +n +Q + +endstream +endobj + +4 0 obj + 944 +endobj + +5 0 obj + << /XObject << /X1 1 0 R >> + /ExtGState << /E1 << /SMask << /Type /Mask + /G 3 0 R + /S /Alpha + >> + /Type /ExtGState + >> >> + >> +endobj + +6 0 obj + << /Length 7 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +/E1 gs +/X1 Do +Q + +endstream +endobj + +7 0 obj + 46 +endobj + +8 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 24.000000 24.000000 ] + /Resources 5 0 R + /Contents 6 0 R + /Parent 9 0 R + >> +endobj + +9 0 obj + << /Kids [ 8 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +10 0 obj + << /Pages 9 0 R + /Type /Catalog + >> +endobj + +xref +0 11 +0000000000 65535 f +0000000010 00000 n +0000001811 00000 n +0000001834 00000 n +0000003026 00000 n +0000003048 00000 n +0000003346 00000 n +0000003448 00000 n +0000003469 00000 n +0000003642 00000 n +0000003716 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 10 0 R + /Size 11 +>> +startxref +3776 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Stories/HeaderRepost.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Stories/HeaderRepost.imageset/Contents.json new file mode 100644 index 0000000000..a2979707d6 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Stories/HeaderRepost.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "repost_16.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Stories/HeaderRepost.imageset/repost_16.pdf b/submodules/TelegramUI/Images.xcassets/Stories/HeaderRepost.imageset/repost_16.pdf new file mode 100644 index 0000000000..9475ed8afa --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Stories/HeaderRepost.imageset/repost_16.pdf @@ -0,0 +1,103 @@ +%PDF-1.7 + +1 0 obj + << >> +endobj + +2 0 obj + << /Length 3 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 2.729980 0.869385 cm +0.000000 0.000000 0.000000 scn +4.842408 13.974898 m +5.076723 14.209213 5.456622 14.209213 5.690936 13.974898 c +8.024273 11.641562 l +8.258587 11.407248 8.258587 11.027349 8.024273 10.793035 c +5.690936 8.459699 l +5.456622 8.225384 5.076723 8.225384 4.842408 8.459699 c +4.608093 8.694014 4.608093 9.073912 4.842408 9.308227 c +6.151480 10.617298 l +2.600000 10.617298 l +1.826801 10.617298 1.200000 9.990498 1.200000 9.217299 c +1.200000 5.383959 l +1.200000 5.052588 0.931371 4.783958 0.600000 4.783958 c +0.268629 4.783958 0.000000 5.052588 0.000000 5.383959 c +0.000000 9.217299 l +0.000000 10.653240 1.164060 11.817299 2.600000 11.817299 c +6.151480 11.817299 l +4.842408 13.126370 l +4.608093 13.360685 4.608093 13.740584 4.842408 13.974898 c +h +5.690948 0.293005 m +5.456634 0.058690 5.076735 0.058690 4.842421 0.293005 c +2.509084 2.626341 l +2.274770 2.860656 2.274770 3.240555 2.509084 3.474869 c +4.842421 5.808205 l +5.076735 6.042520 5.456634 6.042520 5.690948 5.808205 c +5.925263 5.573891 5.925263 5.193992 5.690948 4.959677 c +4.381876 3.650605 l +7.933357 3.650605 l +8.706555 3.650605 9.333357 4.277406 9.333357 5.050604 c +9.333357 8.883945 l +9.333357 9.215316 9.601986 9.483945 9.933356 9.483945 c +10.264728 9.483945 10.533357 9.215316 10.533357 8.883945 c +10.533357 5.050604 l +10.533357 3.614663 9.369297 2.450604 7.933357 2.450604 c +4.381877 2.450604 l +5.690948 1.141533 l +5.925263 0.907218 5.925263 0.527319 5.690948 0.293005 c +h +f* +n +Q + +endstream +endobj + +3 0 obj + 1522 +endobj + +4 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 16.000000 16.000000 ] + /Resources 1 0 R + /Contents 2 0 R + /Parent 5 0 R + >> +endobj + +5 0 obj + << /Kids [ 4 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +6 0 obj + << /Pages 5 0 R + /Type /Catalog + >> +endobj + +xref +0 7 +0000000000 65535 f +0000000010 00000 n +0000000034 00000 n +0000001612 00000 n +0000001635 00000 n +0000001808 00000 n +0000001882 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 6 0 R + /Size 7 +>> +startxref +1941 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Stories/InputRepost.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Stories/InputRepost.imageset/Contents.json new file mode 100644 index 0000000000..62d79d15bf --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Stories/InputRepost.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "repost_30 (2).pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Stories/InputRepost.imageset/repost_30 (2).pdf b/submodules/TelegramUI/Images.xcassets/Stories/InputRepost.imageset/repost_30 (2).pdf new file mode 100644 index 0000000000..622b949443 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Stories/InputRepost.imageset/repost_30 (2).pdf @@ -0,0 +1,103 @@ +%PDF-1.7 + +1 0 obj + << >> +endobj + +2 0 obj + << /Length 3 0 R >> +stream +/DeviceRGB CS +/DeviceRGB cs +q +1.000000 0.000000 -0.000000 1.000000 6.169922 3.007812 cm +0.000000 0.000000 0.000000 scn +8.243101 23.579042 m +8.567237 23.903177 9.092763 23.903177 9.416899 23.579042 c +13.416899 19.579042 l +13.741034 19.254908 13.741034 18.729380 13.416899 18.405245 c +9.416899 14.405245 l +9.092763 14.081110 8.567237 14.081110 8.243101 14.405245 c +7.918966 14.729381 7.918966 15.254907 8.243101 15.579042 c +10.826202 18.162144 l +4.830000 18.162144 l +3.079257 18.162144 1.660000 16.742886 1.660000 14.992144 c +1.660000 8.992144 l +1.660000 8.533748 1.288396 8.162144 0.830000 8.162144 c +0.371604 8.162144 0.000000 8.533748 0.000000 8.992144 c +0.000000 14.992144 l +0.000000 17.659679 2.162465 19.822144 4.830000 19.822144 c +10.826202 19.822144 l +8.243101 22.405245 l +7.918966 22.729380 7.918966 23.254908 8.243101 23.579042 c +h +9.416899 0.405245 m +9.092763 0.081110 8.567237 0.081110 8.243101 0.405245 c +4.243101 4.405245 l +3.918966 4.729380 3.918966 5.254908 4.243101 5.579042 c +8.243101 9.579042 l +8.567237 9.903177 9.092763 9.903177 9.416899 9.579042 c +9.741034 9.254907 9.741034 8.729381 9.416899 8.405245 c +6.833797 5.822144 l +12.830000 5.822144 l +14.580743 5.822144 16.000000 7.241400 16.000000 8.992144 c +16.000000 14.992144 l +16.000000 15.450540 16.371603 15.822144 16.830000 15.822144 c +17.288397 15.822144 17.660000 15.450540 17.660000 14.992144 c +17.660000 8.992144 l +17.660000 6.324608 15.497535 4.162144 12.830000 4.162144 c +6.833797 4.162144 l +9.416899 1.579042 l +9.741034 1.254908 9.741034 0.729380 9.416899 0.405245 c +h +f* +n +Q + +endstream +endobj + +3 0 obj + 1555 +endobj + +4 0 obj + << /Annots [] + /Type /Page + /MediaBox [ 0.000000 0.000000 30.000000 30.000000 ] + /Resources 1 0 R + /Contents 2 0 R + /Parent 5 0 R + >> +endobj + +5 0 obj + << /Kids [ 4 0 R ] + /Count 1 + /Type /Pages + >> +endobj + +6 0 obj + << /Pages 5 0 R + /Type /Catalog + >> +endobj + +xref +0 7 +0000000000 65535 f +0000000010 00000 n +0000000034 00000 n +0000001645 00000 n +0000001668 00000 n +0000001841 00000 n +0000001915 00000 n +trailer +<< /ID [ (some) (id) ] + /Root 6 0 R + /Size 7 +>> +startxref +1974 +%%EOF \ No newline at end of file diff --git a/submodules/TelegramUI/Sources/AccountContext.swift b/submodules/TelegramUI/Sources/AccountContext.swift index 3fb0af2bbe..a5dd4d87ca 100644 --- a/submodules/TelegramUI/Sources/AccountContext.swift +++ b/submodules/TelegramUI/Sources/AccountContext.swift @@ -247,6 +247,9 @@ public final class AccountContextImpl: AccountContext { private var peerNameColorsConfigurationDisposable: Disposable? public private(set) var peerNameColors: PeerNameColors + private var audioTranscriptionTrialDisposable: Disposable? + public private(set) var audioTranscriptionTrial: AudioTranscription.TrialState + public private(set) var isPremium: Bool public let imageCache: AnyObject? @@ -261,6 +264,7 @@ public final class AccountContextImpl: AccountContext { self.userLimits = EngineConfiguration.UserLimits(UserLimitsConfiguration.defaultValue) self.peerNameColors = PeerNameColors.defaultValue + self.audioTranscriptionTrial = AudioTranscription.TrialState.defaultValue self.isPremium = false self.downloadedMediaStoreManager = DownloadedMediaStoreManagerImpl(postbox: account.postbox, accountManager: sharedContext.accountManager) @@ -414,6 +418,22 @@ public final class AccountContextImpl: AccountContext { } self.peerNameColors = PeerNameColors.with(appConfiguration: appConfiguration) }) + + self.audioTranscriptionTrialDisposable = (self.engine.data.subscribe(TelegramEngine.EngineData.Item.Peer.Peer(id: account.peerId)) + |> mapToSignal { peer -> Signal in + let isPremium = peer?.isPremium ?? false + if isPremium { + return .single(AudioTranscription.TrialState(cooldownUntilTime: nil, remainingCount: 1)) + } else { + return self.engine.data.subscribe(TelegramEngine.EngineData.Item.Configuration.AudioTranscriptionTrial()) + } + } + |> deliverOnMainQueue).startStrict(next: { [weak self] audioTranscriptionTrial in + guard let self = self else { + return + } + self.audioTranscriptionTrial = audioTranscriptionTrial + }) } deinit { @@ -703,11 +723,6 @@ private final class ChatLocationReplyContextHolderImpl: ChatLocationContextHolde } } -func getAppConfiguration(transaction: Transaction) -> AppConfiguration { - let appConfiguration: AppConfiguration = transaction.getPreferencesEntry(key: PreferencesKeys.appConfiguration)?.get(AppConfiguration.self) ?? AppConfiguration.defaultValue - return appConfiguration -} - func getAppConfiguration(postbox: Postbox) -> Signal { return postbox.preferencesView(keys: [PreferencesKeys.appConfiguration]) |> map { view -> AppConfiguration in diff --git a/submodules/TelegramUI/Sources/ChatController.swift b/submodules/TelegramUI/Sources/ChatController.swift index 259d8b1b3d..ba0eb07469 100644 --- a/submodules/TelegramUI/Sources/ChatController.swift +++ b/submodules/TelegramUI/Sources/ChatController.swift @@ -483,6 +483,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G let selectAddMemberDisposable = MetaDisposable() let addMemberDisposable = MetaDisposable() + let joinChannelDisposable = MetaDisposable() var shouldDisplayDownButton = false @@ -930,7 +931,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G } } let wallpaperPreviewController = WallpaperGalleryController(context: strongSelf.context, source: .wallpaper(wallpaper, options, [], intensity, nil, nil), mode: .peer(EnginePeer(peer), true)) - wallpaperPreviewController.apply = { [weak wallpaperPreviewController] entry, options, _, _, brightness, _ in + wallpaperPreviewController.apply = { [weak wallpaperPreviewController] entry, options, _, _, brightness, forBoth in var settings: WallpaperSettings? if case let .wallpaper(wallpaper, _) = entry { let baseSettings = wallpaper.settings @@ -942,7 +943,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G } settings = WallpaperSettings(blur: options.contains(.blur), motion: options.contains(.motion), colors: baseSettings?.colors ?? [], intensity: intensity, rotation: baseSettings?.rotation) } - let _ = (strongSelf.context.engine.themes.setExistingChatWallpaper(messageId: message.id, settings: settings) + let _ = (strongSelf.context.engine.themes.setExistingChatWallpaper(messageId: message.id, settings: settings, forBoth: forBoth) |> deliverOnMainQueue).startStandalone() Queue.mainQueue().after(0.1) { wallpaperPreviewController?.dismiss() @@ -4486,6 +4487,67 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G self.push(controller) }) + }, openRecommendedChannelContextMenu: { [weak self] peer, sourceView, gesture in + guard let self else { + return + } + + let chatController = self.context.sharedContext.makeChatController(context: self.context, chatLocation: .peer(id: peer.id), subject: nil, botStart: nil, mode: .standard(previewing: true)) + chatController.canReadHistory.set(false) + + var items: [ContextMenuItem] = [ + .action(ContextMenuActionItem(text: self.presentationData.strings.Conversation_LinkDialogOpen, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/ImageEnlarge"), color: theme.actionSheet.primaryTextColor) }, action: { [weak self] _, f in + f(.dismissWithoutContent) + self?.openPeer(peer: peer, navigation: .chat(textInputState: nil, subject: nil, peekData: nil), fromMessage: nil) + })), + ] + items.append(.action(ContextMenuActionItem(text: self.presentationData.strings.Chat_SimilarChannels_Join, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Add"), color: theme.actionSheet.primaryTextColor) }, action: { [weak self] _, f in + f(.dismissWithoutContent) + + guard let self else { + return + } + let presentationData = self.presentationData + self.joinChannelDisposable.set(( + self.context.peerChannelMemberCategoriesContextsManager.join(engine: self.context.engine, peerId: peer.id, hash: nil) + |> deliverOnMainQueue + |> afterCompleted { [weak self] in + Queue.mainQueue().async { + if let self { + self.present(UndoOverlayController(presentationData: presentationData, content: .succeed(text: presentationData.strings.Chat_SimilarChannels_JoinedChannel(peer.compactDisplayTitle).string, timeout: nil, customUndoText: nil), elevatedLayout: false, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root)) + } + } + } + ).startStrict(error: { [weak self] error in + guard let self else { + return + } + let text: String + switch error { + case .inviteRequestSent: + self.present(UndoOverlayController(presentationData: presentationData, content: .inviteRequestSent(title: presentationData.strings.Group_RequestToJoinSent, text: presentationData.strings.Group_RequestToJoinSentDescriptionGroup), elevatedLayout: true, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root)) + return + case .tooMuchJoined: + self.push(oldChannelsController(context: context, intent: .join)) + return + case .tooMuchUsers: + text = self.presentationData.strings.Conversation_UsersTooMuchError + case .generic: + text = self.presentationData.strings.Channel_ErrorAccessDenied + } + self.present(textAlertController(context: context, title: nil, text: text, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_OK, action: {})]), in: .window(.root)) + })) + }))) + + self.chatDisplayNode.messageTransitionNode.dismissMessageReactionContexts() + + self.canReadHistory.set(false) + + let contextController = ContextController(presentationData: self.presentationData, source: .controller(ChatContextControllerContentSourceImpl(controller: chatController, sourceView: sourceView, passthroughTouches: false)), items: .single(ContextController.Items(content: .list(items))), gesture: gesture) + contextController.dismissed = { [weak self] in + self?.canReadHistory.set(true) + } + self.presentInGlobalOverlay(contextController) }, requestMessageUpdate: { [weak self] id, scroll in if let self { self.chatDisplayNode.historyNode.requestMessageUpdate(id, andScrollToItem: scroll) @@ -6470,6 +6532,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G self.peerSuggestionsDismissDisposable.dispose() self.selectAddMemberDisposable.dispose() self.addMemberDisposable.dispose() + self.joinChannelDisposable.dispose() self.nextChannelToReadDisposable?.dispose() self.inviteRequestsDisposable.dispose() self.sendAsPeersDisposable?.dispose() @@ -18577,6 +18640,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G final class ChatContextControllerContentSourceImpl: ContextControllerContentSource { let controller: ViewController weak var sourceNode: ASDisplayNode? + weak var sourceView: UIView? let sourceRect: CGRect? let navigationController: NavigationController? = nil @@ -18590,11 +18654,21 @@ final class ChatContextControllerContentSourceImpl: ContextControllerContentSour self.passthroughTouches = passthroughTouches } + init(controller: ViewController, sourceView: UIView?, sourceRect: CGRect? = nil, passthroughTouches: Bool) { + self.controller = controller + self.sourceView = sourceView + self.sourceRect = sourceRect + self.passthroughTouches = passthroughTouches + } + func transitionInfo() -> ContextControllerTakeControllerInfo? { + let sourceView = self.sourceView let sourceNode = self.sourceNode let sourceRect = self.sourceRect return ContextControllerTakeControllerInfo(contentAreaInScreenSpace: CGRect(origin: CGPoint(), size: CGSize(width: 10.0, height: 10.0)), sourceNode: { [weak sourceNode] in - if let sourceNode = sourceNode { + if let sourceView = sourceView { + return (sourceView, sourceRect ?? sourceView.bounds) + } else if let sourceNode = sourceNode { return (sourceNode.view, sourceRect ?? sourceNode.bounds) } else { return nil diff --git a/submodules/TelegramUI/Sources/ChatControllerNode.swift b/submodules/TelegramUI/Sources/ChatControllerNode.swift index 62610f663b..9cf1f30511 100644 --- a/submodules/TelegramUI/Sources/ChatControllerNode.swift +++ b/submodules/TelegramUI/Sources/ChatControllerNode.swift @@ -2502,7 +2502,7 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate { let themeUpdated = presentationReadyUpdated || (self.chatPresentationInterfaceState.theme !== chatPresentationInterfaceState.theme) - self.backgroundNode.update(wallpaper: chatPresentationInterfaceState.chatWallpaper) + self.backgroundNode.update(wallpaper: chatPresentationInterfaceState.chatWallpaper, animated: true) self.historyNode.verticalScrollIndicatorColor = UIColor(white: 0.5, alpha: 0.8) self.loadingPlaceholderNode?.updatePresentationInterfaceState(chatPresentationInterfaceState) diff --git a/submodules/TelegramUI/Sources/ChatHistoryListNode.swift b/submodules/TelegramUI/Sources/ChatHistoryListNode.swift index cc59c041a8..b896c1912c 100644 --- a/submodules/TelegramUI/Sources/ChatHistoryListNode.swift +++ b/submodules/TelegramUI/Sources/ChatHistoryListNode.swift @@ -328,7 +328,8 @@ private func extractAssociatedData( hasBots: Bool, translateToLanguage: String?, maxReadStoryId: Int32?, - recommendedChannels: RecommendedChannels? + recommendedChannels: RecommendedChannels?, + audioTranscriptionTrial: AudioTranscription.TrialState ) -> ChatMessageItemAssociatedData { var automaticDownloadPeerId: EnginePeer.Id? var automaticMediaDownloadPeerType: MediaAutoDownloadPeerType = .channel @@ -383,7 +384,7 @@ private func extractAssociatedData( automaticDownloadPeerId = message.messageId.peerId } - return ChatMessageItemAssociatedData(automaticDownloadPeerType: automaticMediaDownloadPeerType, automaticDownloadPeerId: automaticDownloadPeerId, automaticDownloadNetworkType: automaticDownloadNetworkType, isRecentActions: false, subject: subject, contactsPeerIds: contactsPeerIds, channelDiscussionGroup: channelDiscussionGroup, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, currentlyPlayingMessageId: currentlyPlayingMessageId, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, accountPeer: accountPeer, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, topicAuthorId: topicAuthorId, hasBots: hasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels) + return ChatMessageItemAssociatedData(automaticDownloadPeerType: automaticMediaDownloadPeerType, automaticDownloadPeerId: automaticDownloadPeerId, automaticDownloadNetworkType: automaticDownloadNetworkType, isRecentActions: false, subject: subject, contactsPeerIds: contactsPeerIds, channelDiscussionGroup: channelDiscussionGroup, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, currentlyPlayingMessageId: currentlyPlayingMessageId, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, accountPeer: accountPeer, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, topicAuthorId: topicAuthorId, hasBots: hasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels, audioTranscriptionTrial: audioTranscriptionTrial) } private extension ChatHistoryLocationInput { @@ -649,6 +650,8 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto private var adMessagesDisposable: Disposable? private var preloadAdPeerId: PeerId? private let preloadAdPeerDisposable = MetaDisposable() + private var didSetupRecommendedChannelsPreload = false + private let preloadRecommendedChannelsDisposable = MetaDisposable() private var seenAdIds: [Data] = [] private var pendingDynamicAdMessages: [Message] = [] private var pendingDynamicAdMessageInterval: Int? @@ -981,6 +984,7 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto self.canReadHistoryDisposable?.dispose() self.loadedMessagesFromCachedDataDisposable?.dispose() self.preloadAdPeerDisposable.dispose() + self.preloadRecommendedChannelsDisposable.dispose() self.refreshDisplayedItemRangeTimer?.invalidate() self.genericReactionEffectDisposable?.dispose() self.adMessagesDisposable?.dispose() @@ -1315,6 +1319,8 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto recommendedChannels = .single(nil) } + let audioTranscriptionTrial = self.context.engine.data.subscribe(TelegramEngine.EngineData.Item.Configuration.AudioTranscriptionTrial()) + let messageViewQueue = Queue.mainQueue() let historyViewTransitionDisposable = combineLatest(queue: messageViewQueue, historyViewUpdate, @@ -1334,8 +1340,9 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto topicAuthorId, translationState, maxReadStoryId, - recommendedChannels - ).startStrict(next: { [weak self] update, chatPresentationData, selectedMessages, updatingMedia, networkType, animatedEmojiStickers, additionalAnimatedEmojiStickers, customChannelDiscussionReadState, customThreadOutgoingReadState, availableReactions, defaultReaction, accountPeer, suggestAudioTranscription, promises, topicAuthorId, translationState, maxReadStoryId, recommendedChannels in + recommendedChannels, + audioTranscriptionTrial + ).startStrict(next: { [weak self] update, chatPresentationData, selectedMessages, updatingMedia, networkType, animatedEmojiStickers, additionalAnimatedEmojiStickers, customChannelDiscussionReadState, customThreadOutgoingReadState, availableReactions, defaultReaction, accountPeer, suggestAudioTranscription, promises, topicAuthorId, translationState, maxReadStoryId, recommendedChannels, audioTranscriptionTrial in let (historyAppearsCleared, pendingUnpinnedAllMessages, pendingRemovedMessages, currentlyPlayingMessageIdAndType, scrollToMessageId, chatHasBots, allAdMessages) = promises func applyHole() { @@ -1491,7 +1498,7 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto translateToLanguage = languageCode } - let associatedData = extractAssociatedData(chatLocation: chatLocation, view: view, automaticDownloadNetworkType: networkType, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, subject: subject, currentlyPlayingMessageId: currentlyPlayingMessageIdAndType?.0, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, accountPeer: accountPeer, topicAuthorId: topicAuthorId, hasBots: chatHasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels) + let associatedData = extractAssociatedData(chatLocation: chatLocation, view: view, automaticDownloadNetworkType: networkType, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, subject: subject, currentlyPlayingMessageId: currentlyPlayingMessageIdAndType?.0, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, accountPeer: accountPeer, topicAuthorId: topicAuthorId, hasBots: chatHasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels, audioTranscriptionTrial: audioTranscriptionTrial) let filteredEntries = chatHistoryEntriesForView( location: chatLocation, @@ -1617,6 +1624,23 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto } isFirstTime = false } + + if let strongSelf = self { + if let recommendedChannels, !recommendedChannels.channels.isEmpty && !recommendedChannels.isHidden { + if !strongSelf.didSetupRecommendedChannelsPreload { + strongSelf.didSetupRecommendedChannelsPreload = true + let preloadDisposable = DisposableSet() + for channel in recommendedChannels.channels.prefix(5) { + preloadDisposable.add(strongSelf.context.account.viewTracker.polledChannel(peerId: channel.peer.id).startStrict()) + preloadDisposable.add(strongSelf.context.account.addAdditionalPreloadHistoryPeerId(peerId: channel.peer.id)) + } + strongSelf.preloadRecommendedChannelsDisposable.set(preloadDisposable) + } + } else { + strongSelf.didSetupRecommendedChannelsPreload = false + strongSelf.preloadRecommendedChannelsDisposable.set(nil) + } + } if let strongSelf = self, updatedScrollPosition == nil, case .InteractiveChanges = reason, case let .known(offset) = strongSelf.visibleContentOffset(), abs(offset) <= 0.9, let previous = previous { var fillsScreen = true diff --git a/submodules/TelegramUI/Sources/OverlayAudioPlayerControllerNode.swift b/submodules/TelegramUI/Sources/OverlayAudioPlayerControllerNode.swift index e3d1328ea2..61cddab88e 100644 --- a/submodules/TelegramUI/Sources/OverlayAudioPlayerControllerNode.swift +++ b/submodules/TelegramUI/Sources/OverlayAudioPlayerControllerNode.swift @@ -171,6 +171,7 @@ final class OverlayAudioPlayerControllerNode: ViewControllerTracingNode, UIGestu }, openNoAdsDemo: { }, displayGiveawayParticipationStatus: { _ in }, openPremiumStatusInfo: { _, _, _, _ in + }, openRecommendedChannelContextMenu: { _, _, _ in }, requestMessageUpdate: { _, _ in }, cancelInteractiveKeyboardGestures: { }, dismissTextInput: { diff --git a/submodules/TelegramUI/Sources/SharedAccountContext.swift b/submodules/TelegramUI/Sources/SharedAccountContext.swift index dfa749f4f5..431c0062e9 100644 --- a/submodules/TelegramUI/Sources/SharedAccountContext.swift +++ b/submodules/TelegramUI/Sources/SharedAccountContext.swift @@ -1589,6 +1589,7 @@ public final class SharedAccountContextImpl: SharedAccountContext { }, openNoAdsDemo: { }, displayGiveawayParticipationStatus: { _ in }, openPremiumStatusInfo: { _, _, _, _ in + }, openRecommendedChannelContextMenu: { _, _, _ in }, requestMessageUpdate: { _, _ in }, cancelInteractiveKeyboardGestures: { }, dismissTextInput: { @@ -1878,8 +1879,8 @@ public final class SharedAccountContextImpl: SharedAccountContext { return messageStatsController(context: context, updatedPresentationData: updatedPresentationData, subject: .message(id: messageId)) } - public func makeStoryStatsController(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)?, peerId: EnginePeer.Id, storyId: Int32) -> ViewController { - return messageStatsController(context: context, updatedPresentationData: updatedPresentationData, subject: .story(peerId: peerId, id: storyId)) + public func makeStoryStatsController(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)?, peerId: EnginePeer.Id, storyId: Int32, storyItem: EngineStoryItem?) -> ViewController { + return messageStatsController(context: context, updatedPresentationData: updatedPresentationData, subject: .story(peerId: peerId, id: storyId, item: storyItem)) } } diff --git a/submodules/TelegramUI/Sources/TelegramRootController.swift b/submodules/TelegramUI/Sources/TelegramRootController.swift index adc219877f..90d1bec42b 100644 --- a/submodules/TelegramUI/Sources/TelegramRootController.swift +++ b/submodules/TelegramUI/Sources/TelegramRootController.swift @@ -53,7 +53,7 @@ private class DetailsChatPlaceholderNode: ASDisplayNode, NavigationDetailsPlaceh self.presentationData = presentationData self.presentationInterfaceState = ChatPresentationInterfaceState(chatWallpaper: self.presentationData.chatWallpaper, theme: self.presentationData.theme, strings: self.presentationData.strings, dateTimeFormat: self.presentationData.dateTimeFormat, nameDisplayOrder: self.presentationData.nameDisplayOrder, limitsConfiguration: self.presentationInterfaceState.limitsConfiguration, fontSize: self.presentationData.chatFontSize, bubbleCorners: self.presentationData.chatBubbleCorners, accountPeerId: self.presentationInterfaceState.accountPeerId, mode: .standard(previewing: false), chatLocation: self.presentationInterfaceState.chatLocation, subject: nil, peerNearbyData: nil, greetingData: nil, pendingUnpinnedAllMessages: false, activeGroupCallInfo: nil, hasActiveGroupCall: false, importState: nil, threadData: nil, isGeneralThreadClosed: nil, replyMessage: nil, accountPeerColor: nil) - self.wallpaperBackgroundNode.update(wallpaper: presentationData.chatWallpaper) + self.wallpaperBackgroundNode.update(wallpaper: presentationData.chatWallpaper, animated: false) } func updateLayout(size: CGSize, needsTiling: Bool, transition: ContainedViewLayoutTransition) { @@ -158,7 +158,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon detailsPlaceholderNode = current } else { detailsPlaceholderNode = DetailsChatPlaceholderNode(context: self.context) - detailsPlaceholderNode.wallpaperBackgroundNode.update(wallpaper: self.presentationData.chatWallpaper) + detailsPlaceholderNode.wallpaperBackgroundNode.update(wallpaper: self.presentationData.chatWallpaper, animated: false) self.detailsPlaceholderNode = detailsPlaceholderNode } self.updateDetailsPlaceholderNode(detailsPlaceholderNode) @@ -275,9 +275,11 @@ public final class TelegramRootController: NavigationController, TelegramRootCon let context = self.context - var storyTarget: Stories.PendingTarget? - var isPeerArchived = false - var updatedTransitionOut: ((Stories.PendingTarget?, Bool) -> StoryCameraTransitionOut?)? + let externalState = MediaEditorTransitionOutExternalState( + storyTarget: nil, + isPeerArchived: false, + transitionOut: nil + ) var presentImpl: ((ViewController) -> Void)? var returnToCameraImpl: (() -> Void)? @@ -297,7 +299,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon } }, transitionOut: { finished in - if let transitionOut = (updatedTransitionOut ?? transitionOut)(finished ? storyTarget : nil, isPeerArchived), let destinationView = transitionOut.destinationView { + if let transitionOut = (externalState.transitionOut ?? transitionOut)(finished ? externalState.storyTarget : nil, externalState.isPeerArchived), let destinationView = transitionOut.destinationView { return CameraScreen.TransitionOut( destinationView: destinationView, destinationRect: transitionOut.destinationRect, @@ -353,10 +355,9 @@ public final class TelegramRootController: NavigationController, TelegramRootCon context: context, subject: subject, customTarget: customTarget, - isEditing: false, transitionIn: transitionIn, transitionOut: { finished, isNew in - if finished, let transitionOut = (updatedTransitionOut ?? transitionOut)(storyTarget, false), let destinationView = transitionOut.destinationView { + if finished, let transitionOut = (externalState.transitionOut ?? transitionOut)(externalState.storyTarget, false), let destinationView = transitionOut.destinationView { return MediaEditorScreen.TransitionOut( destinationView: destinationView, destinationRect: transitionOut.destinationRect, @@ -371,20 +372,20 @@ public final class TelegramRootController: NavigationController, TelegramRootCon } else { return nil } - }, completion: { [weak self] randomId, mediaResult, mediaAreas, caption, options, stickers, commit in - guard let self, let mediaResult else { + }, completion: { [weak self] result, commit in + guard let self else { dismissCameraImpl?() commit({}) return } - + let target: Stories.PendingTarget let targetPeerId: EnginePeer.Id if let customTarget { target = .peer(customTarget) targetPeerId = customTarget } else { - if let sendAsPeerId = options.sendAsPeerId { + if let sendAsPeerId = result.options.sendAsPeerId { target = .peer(sendAsPeerId) targetPeerId = sendAsPeerId } else { @@ -392,8 +393,8 @@ public final class TelegramRootController: NavigationController, TelegramRootCon targetPeerId = context.account.peerId } } - storyTarget = target - + externalState.storyTarget = target + let _ = (self.context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: targetPeerId)) |> deliverOnMainQueue).startStandalone(next: { [weak self] peer in guard let self, let peer else { @@ -401,120 +402,16 @@ public final class TelegramRootController: NavigationController, TelegramRootCon } if case let .user(user) = peer { - isPeerArchived = user.storiesHidden ?? false + externalState.isPeerArchived = user.storiesHidden ?? false } else if case let .channel(channel) = peer { - isPeerArchived = channel.storiesHidden ?? false + externalState.isPeerArchived = channel.storiesHidden ?? false } - let context = self.context - if let rootTabController = self.rootTabController { - if let index = rootTabController.controllers.firstIndex(where: { $0 is ChatListController}) { - rootTabController.selectedIndex = index - } - } - - let completionImpl: () -> Void = { [weak self] in - guard let self else { - return - } - - var chatListController: ChatListControllerImpl? - - if isPeerArchived { - var viewControllers = self.viewControllers - - let archiveController = ChatListControllerImpl(context: context, location: .chatList(groupId: .archive), controlsHistoryPreload: false, hideNetworkActivityStatus: false, previewing: false, enableDebugActions: false) - updatedTransitionOut = archiveController.storyCameraTransitionOut() - chatListController = archiveController - viewControllers.insert(archiveController, at: 1) - self.setViewControllers(viewControllers, animated: false) - } else { - chatListController = self.chatListController as? ChatListControllerImpl - } - - if let chatListController { - let _ = (chatListController.hasPendingStories - |> filter { $0 } - |> take(1) - |> timeout(isPeerArchived ? 0.5 : 0.25, queue: .mainQueue(), alternate: .single(true)) - |> deliverOnMainQueue).startStandalone(completed: { [weak chatListController] in - guard let chatListController else { - return - } - - chatListController.scrollToStories(peerId: targetPeerId) - Queue.mainQueue().justDispatch { - commit({}) - } - }) - } else { - Queue.mainQueue().justDispatch { - commit({}) - } - } - } - - if let _ = self.chatListController as? ChatListControllerImpl { - switch mediaResult { - case let .image(image, dimensions): - let tempFile = TempBox.shared.tempFile(fileName: "file") - defer { - TempBox.shared.dispose(tempFile) - } - if let imageData = compressImageToJPEG(image, quality: 0.7, tempFilePath: tempFile.path) { - let entities = generateChatInputTextEntities(caption) - Logger.shared.log("MediaEditor", "Calling uploadStory for image, randomId \(randomId)") - let _ = (context.engine.messages.uploadStory(target: target, media: .image(dimensions: dimensions, data: imageData, stickers: stickers), mediaAreas: mediaAreas, text: caption.string, entities: entities, pin: options.pin, privacy: options.privacy, isForwardingDisabled: options.isForwardingDisabled, period: options.timeout, randomId: randomId) - |> deliverOnMainQueue).startStandalone(next: { stableId in - moveStorySource(engine: context.engine, peerId: context.account.peerId, from: randomId, to: Int64(stableId)) - }) - - completionImpl() - } - case let .video(content, firstFrameImage, values, duration, dimensions): - let adjustments: VideoMediaResourceAdjustments - if let valuesData = try? JSONEncoder().encode(values) { - let data = MemoryBuffer(data: valuesData) - let digest = MemoryBuffer(data: data.md5Digest()) - adjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: true) - - let resource: TelegramMediaResource - switch content { - case let .imageFile(path): - resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments) - case let .videoFile(path): - resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments) - case let .asset(localIdentifier): - resource = VideoLibraryMediaResource(localIdentifier: localIdentifier, conversion: .compress(adjustments)) - } - let tempFile = TempBox.shared.tempFile(fileName: "file") - defer { - TempBox.shared.dispose(tempFile) - } - let imageData = firstFrameImage.flatMap { compressImageToJPEG($0, quality: 0.6, tempFilePath: tempFile.path) } - let firstFrameFile = imageData.flatMap { data -> TempBoxFile? in - let file = TempBox.shared.tempFile(fileName: "image.jpg") - if let _ = try? data.write(to: URL(fileURLWithPath: file.path)) { - return file - } else { - return nil - } - } - Logger.shared.log("MediaEditor", "Calling uploadStory for video, randomId \(randomId)") - let entities = generateChatInputTextEntities(caption) - let _ = (context.engine.messages.uploadStory(target: target, media: .video(dimensions: dimensions, duration: duration, resource: resource, firstFrameFile: firstFrameFile, stickers: stickers), mediaAreas: mediaAreas, text: caption.string, entities: entities, pin: options.pin, privacy: options.privacy, isForwardingDisabled: options.isForwardingDisabled, period: options.timeout, randomId: randomId) - |> deliverOnMainQueue).startStandalone(next: { stableId in - moveStorySource(engine: context.engine, peerId: context.account.peerId, from: randomId, to: Int64(stableId)) - }) - - completionImpl() - } - } - } + self.proceedWithStoryUpload(target: target, result: result, existingMedia: nil, forwardInfo: nil, externalState: externalState, commit: commit) dismissCameraImpl?() }) - } as (Int64, MediaEditorScreen.Result?, [MediaArea], NSAttributedString, MediaEditorResultPrivacy, [TelegramMediaFile], @escaping (@escaping () -> Void) -> Void) -> Void + } as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void ) controller.cancelled = { showDraftTooltip in if showDraftTooltip { @@ -569,6 +466,138 @@ public final class TelegramRootController: NavigationController, TelegramRootCon }) } + public func proceedWithStoryUpload(target: Stories.PendingTarget, result: MediaEditorScreenResult, existingMedia: EngineMedia?, forwardInfo: Stories.PendingForwardInfo?, externalState: MediaEditorTransitionOutExternalState, commit: @escaping (@escaping () -> Void) -> Void) { + guard let result = result as? MediaEditorScreen.Result else { + return + } + let context = self.context + let targetPeerId: EnginePeer.Id + switch target { + case let .peer(peerId): + targetPeerId = peerId + case .myStories: + targetPeerId = context.account.peerId + } + + if let rootTabController = self.rootTabController { + if let index = rootTabController.controllers.firstIndex(where: { $0 is ChatListController}) { + rootTabController.selectedIndex = index + } + } + + let completionImpl: () -> Void = { [weak self] in + guard let self else { + return + } + + var chatListController: ChatListControllerImpl? + + if externalState.isPeerArchived { + var viewControllers = self.viewControllers + + let archiveController = ChatListControllerImpl(context: context, location: .chatList(groupId: .archive), controlsHistoryPreload: false, hideNetworkActivityStatus: false, previewing: false, enableDebugActions: false) + externalState.transitionOut = archiveController.storyCameraTransitionOut() + chatListController = archiveController + viewControllers.insert(archiveController, at: 1) + self.setViewControllers(viewControllers, animated: false) + } else { + chatListController = self.chatListController as? ChatListControllerImpl + externalState.transitionOut = chatListController?.storyCameraTransitionOut() + } + + if let chatListController { + let _ = (chatListController.hasPendingStories + |> filter { $0 } + |> take(1) + |> timeout(externalState.isPeerArchived ? 0.5 : 0.25, queue: .mainQueue(), alternate: .single(true)) + |> deliverOnMainQueue).startStandalone(completed: { [weak chatListController] in + guard let chatListController else { + return + } + + chatListController.scrollToStories(peerId: targetPeerId) + Queue.mainQueue().justDispatch { + commit({}) + } + }) + } else { + Queue.mainQueue().justDispatch { + commit({}) + } + } + } + + if let _ = self.chatListController as? ChatListControllerImpl { + var media: EngineStoryInputMedia? + + if let mediaResult = result.media { + switch mediaResult { + case let .image(image, dimensions): + let tempFile = TempBox.shared.tempFile(fileName: "file") + defer { + TempBox.shared.dispose(tempFile) + } + if let imageData = compressImageToJPEG(image, quality: 0.7, tempFilePath: tempFile.path) { + media = .image(dimensions: dimensions, data: imageData, stickers: result.stickers) + } + case let .video(content, firstFrameImage, values, duration, dimensions): + let adjustments: VideoMediaResourceAdjustments + if let valuesData = try? JSONEncoder().encode(values) { + let data = MemoryBuffer(data: valuesData) + let digest = MemoryBuffer(data: data.md5Digest()) + adjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: true) + + let resource: TelegramMediaResource + switch content { + case let .imageFile(path): + resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments) + case let .videoFile(path): + resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments) + case let .asset(localIdentifier): + resource = VideoLibraryMediaResource(localIdentifier: localIdentifier, conversion: .compress(adjustments)) + } + let tempFile = TempBox.shared.tempFile(fileName: "file") + defer { + TempBox.shared.dispose(tempFile) + } + let imageData = firstFrameImage.flatMap { compressImageToJPEG($0, quality: 0.6, tempFilePath: tempFile.path) } + let firstFrameFile = imageData.flatMap { data -> TempBoxFile? in + let file = TempBox.shared.tempFile(fileName: "image.jpg") + if let _ = try? data.write(to: URL(fileURLWithPath: file.path)) { + return file + } else { + return nil + } + } + media = .video(dimensions: dimensions, duration: duration, resource: resource, firstFrameFile: firstFrameFile, stickers: result.stickers) + } + } + } else if let existingMedia { + media = .existing(media: existingMedia._asMedia()) + } + + if let media { + let _ = (context.engine.messages.uploadStory( + target: target, + media: media, + mediaAreas: result.mediaAreas, + text: result.caption.string, + entities: generateChatInputTextEntities(result.caption), + pin: result.options.pin, + privacy: result.options.privacy, + isForwardingDisabled: result.options.isForwardingDisabled, + period: result.options.timeout, + randomId: result.randomId, + forwardInfo: forwardInfo + ) + |> deliverOnMainQueue).startStandalone(next: { stableId in + moveStorySource(engine: context.engine, peerId: context.account.peerId, from: result.randomId, to: Int64(stableId)) + }) + } + completionImpl() + } + } + public func openSettings() { guard let rootTabController = self.rootTabController else { return @@ -581,3 +610,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon } } } + +extension MediaEditorScreen.Result: MediaEditorScreenResult { + +} diff --git a/submodules/TelegramUIPreferences/Sources/ExperimentalUISettings.swift b/submodules/TelegramUIPreferences/Sources/ExperimentalUISettings.swift index 92ff1f19cf..faf8f47474 100644 --- a/submodules/TelegramUIPreferences/Sources/ExperimentalUISettings.swift +++ b/submodules/TelegramUIPreferences/Sources/ExperimentalUISettings.swift @@ -56,6 +56,7 @@ public struct ExperimentalUISettings: Codable, Equatable { public var unidirectionalSwipeToReply: Bool public var dustEffect: Bool public var callUIV2: Bool + public var allowWebViewInspection: Bool public static var defaultSettings: ExperimentalUISettings { return ExperimentalUISettings( @@ -89,7 +90,8 @@ public struct ExperimentalUISettings: Codable, Equatable { crashOnMemoryPressure: false, unidirectionalSwipeToReply: false, dustEffect: false, - callUIV2: false + callUIV2: false, + allowWebViewInspection: false ) } @@ -124,7 +126,8 @@ public struct ExperimentalUISettings: Codable, Equatable { crashOnMemoryPressure: Bool, unidirectionalSwipeToReply: Bool, dustEffect: Bool, - callUIV2: Bool + callUIV2: Bool, + allowWebViewInspection: Bool ) { self.keepChatNavigationStack = keepChatNavigationStack self.skipReadHistory = skipReadHistory @@ -157,6 +160,7 @@ public struct ExperimentalUISettings: Codable, Equatable { self.unidirectionalSwipeToReply = unidirectionalSwipeToReply self.dustEffect = dustEffect self.callUIV2 = callUIV2 + self.allowWebViewInspection = allowWebViewInspection } public init(from decoder: Decoder) throws { @@ -193,6 +197,7 @@ public struct ExperimentalUISettings: Codable, Equatable { self.unidirectionalSwipeToReply = try container.decodeIfPresent(Bool.self, forKey: "unidirectionalSwipeToReply") ?? false self.dustEffect = try container.decodeIfPresent(Bool.self, forKey: "dustEffect") ?? false self.callUIV2 = try container.decodeIfPresent(Bool.self, forKey: "callUIV2") ?? false + self.allowWebViewInspection = try container.decodeIfPresent(Bool.self, forKey: "allowWebViewInspection") ?? false } public func encode(to encoder: Encoder) throws { @@ -229,6 +234,7 @@ public struct ExperimentalUISettings: Codable, Equatable { try container.encode(self.unidirectionalSwipeToReply, forKey: "unidirectionalSwipeToReply") try container.encode(self.dustEffect, forKey: "dustEffect") try container.encode(self.callUIV2, forKey: "callUIV2") + try container.encode(self.allowWebViewInspection, forKey: "allowWebViewInspection") } } diff --git a/submodules/WallpaperBackgroundNode/Sources/WallpaperBackgroundNode.swift b/submodules/WallpaperBackgroundNode/Sources/WallpaperBackgroundNode.swift index b729239afe..f4de1aa775 100644 --- a/submodules/WallpaperBackgroundNode/Sources/WallpaperBackgroundNode.swift +++ b/submodules/WallpaperBackgroundNode/Sources/WallpaperBackgroundNode.swift @@ -82,7 +82,7 @@ public protocol WallpaperBackgroundNode: ASDisplayNode { var isReady: Signal { get } var rotation: CGFloat { get set } - func update(wallpaper: TelegramWallpaper) + func update(wallpaper: TelegramWallpaper, animated: Bool) func _internalUpdateIsSettingUpWallpaper() func updateLayout(size: CGSize, displayMode: WallpaperDisplayMode, transition: ContainedViewLayoutTransition) func updateIsLooping(_ isLooping: Bool) @@ -920,12 +920,22 @@ final class WallpaperBackgroundNodeImpl: ASDisplayNode, WallpaperBackgroundNode self.dimLayer.opacity = dimAlpha } - func update(wallpaper: TelegramWallpaper) { + func update(wallpaper: TelegramWallpaper, animated: Bool) { if self.wallpaper == wallpaper { return } + let previousWallpaper = self.wallpaper self.wallpaper = wallpaper - + + if let _ = previousWallpaper, animated { + if let snapshotView = self.view.snapshotView(afterScreenUpdates: false) { + self.view.addSubview(snapshotView) + snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { _ in + snapshotView.removeFromSuperview() + }) + } + } + var gradientColors: [UInt32] = [] var gradientAngle: Int32 = 0 diff --git a/submodules/WebUI/Sources/WebAppWebView.swift b/submodules/WebUI/Sources/WebAppWebView.swift index 34fa385c29..b3262d1262 100644 --- a/submodules/WebUI/Sources/WebAppWebView.swift +++ b/submodules/WebUI/Sources/WebAppWebView.swift @@ -35,39 +35,85 @@ private class WebViewTouchGestureRecognizer: UITapGestureRecognizer { } } +private let eventProxySource = "var TelegramWebviewProxyProto = function() {}; " + + "TelegramWebviewProxyProto.prototype.postEvent = function(eventName, eventData) { " + + "window.webkit.messageHandlers.performAction.postMessage({'eventName': eventName, 'eventData': eventData}); " + + "}; " + +"var TelegramWebviewProxy = new TelegramWebviewProxyProto();" + +private let selectionSource = "var css = '*{-webkit-touch-callout:none;} :not(input):not(textarea):not([\"contenteditable\"=\"true\"]){-webkit-user-select:none;}';" + + " var head = document.head || document.getElementsByTagName('head')[0];" + + " var style = document.createElement('style'); style.type = 'text/css';" + + " style.appendChild(document.createTextNode(css)); head.appendChild(style);" + +private let videoSource = """ +function disableWebkitEnterFullscreen(videoElement) { + if (videoElement && videoElement.webkitEnterFullscreen) { + Object.defineProperty(videoElement, 'webkitEnterFullscreen', { + value: undefined + }); + } +} + +function disableFullscreenOnExistingVideos() { + document.querySelectorAll('video').forEach(disableWebkitEnterFullscreen); +} + +function handleMutations(mutations) { + mutations.forEach((mutation) => { + if (mutation.addedNodes && mutation.addedNodes.length > 0) { + mutation.addedNodes.forEach((newNode) => { + if (newNode.tagName === 'VIDEO') { + disableWebkitEnterFullscreen(newNode); + } + if (newNode.querySelectorAll) { + newNode.querySelectorAll('video').forEach(disableWebkitEnterFullscreen); + } + }); + } + }); +} + +disableFullscreenOnExistingVideos(); + +const observer = new MutationObserver(handleMutations); + +observer.observe(document.body, { + childList: true, + subtree: true +}); + +function disconnectObserver() { + observer.disconnect(); +} +""" + final class WebAppWebView: WKWebView { var handleScriptMessage: (WKScriptMessage) -> Void = { _ in } init() { let configuration = WKWebViewConfiguration() - let userController = WKUserContentController() - - let js = "var TelegramWebviewProxyProto = function() {}; " + - "TelegramWebviewProxyProto.prototype.postEvent = function(eventName, eventData) { " + - "window.webkit.messageHandlers.performAction.postMessage({'eventName': eventName, 'eventData': eventData}); " + - "}; " + - "var TelegramWebviewProxy = new TelegramWebviewProxyProto();" - + let contentController = WKUserContentController() + var handleScriptMessageImpl: ((WKScriptMessage) -> Void)? - let userScript = WKUserScript(source: js, injectionTime: .atDocumentStart, forMainFrameOnly: false) - userController.addUserScript(userScript) - userController.add(WeakGameScriptMessageHandler { message in + let eventProxyScript = WKUserScript(source: eventProxySource, injectionTime: .atDocumentStart, forMainFrameOnly: false) + contentController.addUserScript(eventProxyScript) + contentController.add(WeakGameScriptMessageHandler { message in handleScriptMessageImpl?(message) }, name: "performAction") - let selectionString = "var css = '*{-webkit-touch-callout:none;} :not(input):not(textarea):not([\"contenteditable\"=\"true\"]){-webkit-user-select:none;}';" - + " var head = document.head || document.getElementsByTagName('head')[0];" - + " var style = document.createElement('style'); style.type = 'text/css';" + - " style.appendChild(document.createTextNode(css)); head.appendChild(style);" - let selectionScript: WKUserScript = WKUserScript(source: selectionString, injectionTime: .atDocumentEnd, forMainFrameOnly: true) - userController.addUserScript(selectionScript) + let selectionScript = WKUserScript(source: selectionSource, injectionTime: .atDocumentEnd, forMainFrameOnly: true) + contentController.addUserScript(selectionScript) - configuration.userContentController = userController + let videoScript = WKUserScript(source: videoSource, injectionTime: .atDocumentStart, forMainFrameOnly: false) + contentController.addUserScript(videoScript) + + configuration.userContentController = contentController configuration.allowsInlineMediaPlayback = true configuration.allowsPictureInPictureMediaPlayback = false if #available(iOS 10.0, *) { - configuration.mediaTypesRequiringUserActionForPlayback = .all + configuration.mediaTypesRequiringUserActionForPlayback = .audio } else { configuration.mediaPlaybackRequiresUserAction = true }