[WIP] View-once audio messages

This commit is contained in:
Ilya Laktyushin 2023-12-29 04:50:22 +04:00
parent 6aa462ba8b
commit f11c070862
62 changed files with 1351 additions and 131 deletions

View File

@ -10841,3 +10841,12 @@ Sorry for the inconvenience.";
"RequestPeer.ReachedMaximum_any" = "You can select up to %@ users.";
"ChatList.DeleteSavedPeerConfirmation" = "Are you sure you want to delete saved messages from %@?";
"Message.VoiceMessageExpired" = "Expired voice message";
"Message.VideoMessageExpired" = "Expired video message";
"Chat.PlayOnceVoiceMessageTooltip" = "This voice message can only be played once.";
"Chat.PlayOnceVoiceMessageYourTooltip" = "This message will disappear once **%@** plays it once.";
"Chat.TapToPlayVoiceMessageOnceTooltip" = "Tap to set this message to **Play Once**";
"Chat.PlayVoiceMessageOnceTooltip" = "The recipients will be able to listen to it only once.";

View File

@ -896,7 +896,7 @@ public protocol SharedAccountContext: AnyObject {
selectedMessages: Signal<Set<MessageId>?, NoError>,
mode: ChatHistoryListMode
) -> ChatHistoryListNode
func makeChatMessagePreviewItem(context: AccountContext, messages: [Message], theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder, forcedResourceStatus: FileMediaResourceStatus?, tapMessage: ((Message) -> Void)?, clickThroughMessage: (() -> Void)?, backgroundNode: ASDisplayNode?, availableReactions: AvailableReactions?, accountPeer: Peer?, isCentered: Bool) -> ListViewItem
func makeChatMessagePreviewItem(context: AccountContext, messages: [Message], theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder, forcedResourceStatus: FileMediaResourceStatus?, tapMessage: ((Message) -> Void)?, clickThroughMessage: (() -> Void)?, backgroundNode: ASDisplayNode?, availableReactions: AvailableReactions?, accountPeer: Peer?, isCentered: Bool, isPreview: Bool) -> ListViewItem
func makeChatMessageDateHeaderItem(context: AccountContext, timestamp: Int32, theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder) -> ListViewItemHeader
func makeChatMessageAvatarHeaderItem(context: AccountContext, timestamp: Int32, peer: Peer, message: Message, theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder) -> ListViewItemHeader
func makePeerSharedMediaController(context: AccountContext, peerId: PeerId) -> ViewController?

View File

@ -12,18 +12,21 @@ public enum SharedMediaPlaybackDataType {
}
public enum SharedMediaPlaybackDataSource: Equatable {
case telegramFile(reference: FileMediaReference, isCopyProtected: Bool)
case telegramFile(reference: FileMediaReference, isCopyProtected: Bool, isViewOnce: Bool)
public static func ==(lhs: SharedMediaPlaybackDataSource, rhs: SharedMediaPlaybackDataSource) -> Bool {
switch lhs {
case let .telegramFile(lhsFileReference, lhsIsCopyProtected):
if case let .telegramFile(rhsFileReference, rhsIsCopyProtected) = rhs {
case let .telegramFile(lhsFileReference, lhsIsCopyProtected, lhsIsViewOnce):
if case let .telegramFile(rhsFileReference, rhsIsCopyProtected, rhsIsViewOnce) = rhs {
if !lhsFileReference.media.isEqual(to: rhsFileReference.media) {
return false
}
if lhsIsCopyProtected != rhsIsCopyProtected {
return false
}
if lhsIsViewOnce != rhsIsViewOnce {
return false
}
return true
} else {
return false

View File

@ -827,7 +827,7 @@ final class AttachmentPanel: ASDisplayNode, UIScrollViewDelegate {
}, stopMediaRecording: {
}, lockMediaRecording: {
}, deleteRecordedMedia: {
}, sendRecordedMedia: { _ in
}, sendRecordedMedia: { _, _ in
}, displayRestrictedInfo: { _, _ in
}, displayVideoUnmuteTip: { _ in
}, switchMediaRecordingMode: {

View File

@ -20,6 +20,7 @@ swift_library(
"//submodules/TelegramUIPreferences:TelegramUIPreferences",
"//submodules/TelegramPresentationData:TelegramPresentationData",
"//submodules/ChatContextQuery",
"//submodules/TooltipUI",
],
visibility = [
"//visibility:public",

View File

@ -7,6 +7,7 @@ import TelegramCore
import Display
import AccountContext
import ContextUI
import TooltipUI
public enum ChatLoadingMessageSubject {
case generic
@ -16,7 +17,7 @@ public enum ChatLoadingMessageSubject {
public enum ChatFinishMediaRecordingAction {
case dismiss
case preview
case send
case send(viewOnce: Bool)
}
public final class ChatPanelInterfaceInteractionStatuses {
@ -108,7 +109,7 @@ public final class ChatPanelInterfaceInteraction {
public let stopMediaRecording: () -> Void
public let lockMediaRecording: () -> Void
public let deleteRecordedMedia: () -> Void
public let sendRecordedMedia: (Bool) -> Void
public let sendRecordedMedia: (Bool, Bool) -> Void
public let displayRestrictedInfo: (ChatPanelRestrictionInfoSubject, ChatPanelRestrictionInfoDisplayType) -> Void
public let displayVideoUnmuteTip: (CGPoint?) -> Void
public let switchMediaRecordingMode: () -> Void
@ -214,7 +215,7 @@ public final class ChatPanelInterfaceInteraction {
stopMediaRecording: @escaping () -> Void,
lockMediaRecording: @escaping () -> Void,
deleteRecordedMedia: @escaping () -> Void,
sendRecordedMedia: @escaping (Bool) -> Void,
sendRecordedMedia: @escaping (Bool, Bool) -> Void,
displayRestrictedInfo: @escaping (ChatPanelRestrictionInfoSubject, ChatPanelRestrictionInfoDisplayType) -> Void,
displayVideoUnmuteTip: @escaping (CGPoint?) -> Void,
switchMediaRecordingMode: @escaping () -> Void,
@ -431,7 +432,7 @@ public final class ChatPanelInterfaceInteraction {
}, stopMediaRecording: {
}, lockMediaRecording: {
}, deleteRecordedMedia: {
}, sendRecordedMedia: { _ in
}, sendRecordedMedia: { _, _ in
}, displayRestrictedInfo: { _, _ in
}, displayVideoUnmuteTip: { _ in
}, switchMediaRecordingMode: {

View File

@ -2215,13 +2215,15 @@ public final class ContextController: ViewController, StandalonePresentableContr
public let source: ContextContentSource
public let items: Signal<ContextController.Items, NoError>
public let closeActionTitle: String?
public let closeAction: (() -> Void)?
public init(id: AnyHashable, title: String, source: ContextContentSource, items: Signal<ContextController.Items, NoError>, closeActionTitle: String? = nil) {
public init(id: AnyHashable, title: String, source: ContextContentSource, items: Signal<ContextController.Items, NoError>, closeActionTitle: String? = nil, closeAction: (() -> Void)? = nil) {
self.id = id
self.title = title
self.source = source
self.items = items
self.closeActionTitle = closeActionTitle
self.closeAction = closeAction
}
}

View File

@ -1121,13 +1121,12 @@ final class ContextControllerExtractedPresentationNode: ASDisplayNode, ContextCo
var animationInContentXDistance: CGFloat = 0.0
let contentX = contentParentGlobalFrame.minX + contentRect.minX - contentNode.containingItem.contentRect.minX
let contentY = contentParentGlobalFrame.minY + contentRect.minY - contentNode.containingItem.contentRect.minY
let contentWidth = contentNode.containingItem.view.bounds.size.width
let contentHeight = contentNode.containingItem.view.bounds.size.height
if case let .extracted(extracted) = self.source, extracted.centerVertically {
if actionsSize.height.isZero {
let fixedContentY = floorToScreenPixels((layout.size.height - contentHeight) / 2.0)
animationInContentYDistance = fixedContentY - contentY
animationInContentYDistance = fixedContentY - contentRect.minY
} else if contentX + contentWidth > layout.size.width / 2.0, actionsSize.height > 0.0 {
let fixedContentX = layout.size.width - (contentX + contentWidth)
animationInContentXDistance = fixedContentX - contentX

View File

@ -18,6 +18,7 @@ final class ContextSourceContainer: ASDisplayNode {
let title: String
let source: ContextContentSource
let closeActionTitle: String?
let closeAction: (() -> Void)?
private var _presentationNode: ContextControllerPresentationNode?
var presentationNode: ContextControllerPresentationNode {
@ -43,13 +44,15 @@ final class ContextSourceContainer: ASDisplayNode {
title: String,
source: ContextContentSource,
items: Signal<ContextController.Items, NoError>,
closeActionTitle: String? = nil
closeActionTitle: String? = nil,
closeAction: (() -> Void)? = nil
) {
self.controller = controller
self.id = id
self.title = title
self.source = source
self.closeActionTitle = closeActionTitle
self.closeAction = closeAction
self.ready.set(combineLatest(queue: .mainQueue(), self.contentReady.get(), self.actionsReady.get())
|> map { a, b -> Bool in
@ -385,7 +388,8 @@ final class ContextSourceContainer: ASDisplayNode {
title: source.title,
source: source.source,
items: source.items,
closeActionTitle: source.closeActionTitle
closeActionTitle: source.closeActionTitle,
closeAction: source.closeAction
)
self.sources.append(mappedSource)
self.addSubnode(mappedSource.presentationNode)
@ -472,17 +476,27 @@ final class ContextSourceContainer: ASDisplayNode {
}
func animateOut(result: ContextMenuActionResult, completion: @escaping () -> Void) {
self.backgroundNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
let delayDismissal = self.activeSource?.closeAction != nil
let delay: Double = delayDismissal ? 0.2 : 0.0
let duration: Double = delayDismissal ? 0.35 : 0.2
self.backgroundNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: duration, delay: delay, removeOnCompletion: false, completion: { _ in
if delayDismissal {
Queue.mainQueue().after(0.55) {
completion()
}
}
})
if let tabSelectorView = self.tabSelector?.view {
tabSelectorView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
tabSelectorView.layer.animateAlpha(from: 1.0, to: 0.0, duration: duration, delay: delay, removeOnCompletion: false)
}
if let closeButtonView = self.closeButton?.view {
closeButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
closeButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: duration, delay: delay, removeOnCompletion: false)
}
if let activeSource = self.activeSource {
activeSource.animateOut(result: result, completion: completion)
activeSource.animateOut(result: result, completion: delayDismissal ? {} : completion)
} else {
completion()
}
@ -670,11 +684,15 @@ final class ContextSourceContainer: ASDisplayNode {
)
),
effectAlignment: .center,
action: { [weak self] in
action: { [weak self, weak source] in
guard let self else {
return
}
self.controller?.dismiss(result: .dismissWithoutContent, completion: nil)
if let source, let closeAction = source.closeAction {
closeAction()
} else {
self.controller?.dismiss(result: .dismissWithoutContent, completion: nil)
}
})
),
environment: {},

View File

@ -47,8 +47,4 @@ open class ASImageNode: ASDisplayNode {
override public func calculateSizeThatFits(_ contrainedSize: CGSize) -> CGSize {
return self.image?.size ?? CGSize()
}
public var asdf: Int {
return 1234
}
}

View File

@ -49,13 +49,13 @@ final class InstantPageMediaPlaylistItem: SharedMediaPlaylistItem {
switch attribute {
case let .Audio(isVoice, _, _, _, _):
if isVoice {
return SharedMediaPlaybackData(type: .voice, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false))
return SharedMediaPlaybackData(type: .voice, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false, isViewOnce: false))
} else {
return SharedMediaPlaybackData(type: .music, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false))
return SharedMediaPlaybackData(type: .music, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false, isViewOnce: false))
}
case let .Video(_, _, flags, _):
if flags.contains(.instantRoundVideo) {
return SharedMediaPlaybackData(type: .instantVideo, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false))
return SharedMediaPlaybackData(type: .instantVideo, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false, isViewOnce: false))
} else {
return nil
}
@ -64,12 +64,12 @@ final class InstantPageMediaPlaylistItem: SharedMediaPlaylistItem {
}
}
if file.mimeType.hasPrefix("audio/") {
return SharedMediaPlaybackData(type: .music, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false))
return SharedMediaPlaybackData(type: .music, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false, isViewOnce: false))
}
if let fileName = file.fileName {
let ext = (fileName as NSString).pathExtension.lowercased()
if ext == "wav" || ext == "opus" {
return SharedMediaPlaybackData(type: .music, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false))
return SharedMediaPlaybackData(type: .music, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false, isViewOnce: false))
}
}
}

View File

@ -859,10 +859,10 @@ final class MediaPickerSelectedListNode: ASDisplayNode, UIScrollViewDelegate, UI
let previewText = groupLayouts.count > 1 ? presentationData.strings.Attachment_MessagesPreview : presentationData.strings.Attachment_MessagePreview
let previewMessage = Message(stableId: 0, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 0), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 0, flags: [], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [TelegramMediaAction(action: .customText(text: previewText, entities: [], additionalAttributes: nil))], peers: peers, associatedMessages: SimpleDictionary(), associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
let previewItem = self.context.sharedContext.makeChatMessagePreviewItem(context: context, messages: [previewMessage], theme: theme, strings: presentationData.strings, wallpaper: wallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: bubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: true)
let previewItem = self.context.sharedContext.makeChatMessagePreviewItem(context: context, messages: [previewMessage], theme: theme, strings: presentationData.strings, wallpaper: wallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: bubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: true, isPreview: true)
let dragMessage = Message(stableId: 0, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 0), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 0, flags: [], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [TelegramMediaAction(action: .customText(text: presentationData.strings.Attachment_DragToReorder, entities: [], additionalAttributes: nil))], peers: peers, associatedMessages: SimpleDictionary(), associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
let dragItem = self.context.sharedContext.makeChatMessagePreviewItem(context: context, messages: [dragMessage], theme: theme, strings: presentationData.strings, wallpaper: wallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: bubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: true)
let dragItem = self.context.sharedContext.makeChatMessagePreviewItem(context: context, messages: [dragMessage], theme: theme, strings: presentationData.strings, wallpaper: wallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: bubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: true, isPreview: true)
let headerItems: [ListViewItem] = [previewItem, dragItem]

View File

@ -2141,7 +2141,7 @@ public func chatSecretMessageVideo(account: Account, userLocation: MediaResource
if blurredImage == nil {
if let thumbnailData = thumbnailData, let imageSource = CGImageSourceCreateWithData(thumbnailData as CFData, nil), let image = CGImageSourceCreateImageAtIndex(imageSource, 0, nil) {
let thumbnailSize = CGSize(width: image.width, height: image.height)
let thumbnailContextSize = thumbnailSize.aspectFilled(CGSize(width: 20.0, height: 20.0))
let thumbnailContextSize = thumbnailSize.aspectFilled(CGSize(width: 40.0, height: 40.0))
if let thumbnailContext = DrawingContext(size: thumbnailContextSize, scale: 1.0) {
thumbnailContext.withFlippedContext { c in
c.interpolationQuality = .none

View File

@ -237,8 +237,11 @@ public func PremiumBoostScreen(
actions: [
TextAlertAction(type: .defaultAction, title: presentationData.strings.ChannelBoost_MoreBoosts_Gift, action: {
dismissImpl?()
let controller = context.sharedContext.makePremiumGiftController(context: context, source: .channelBoost)
pushController(controller)
Queue.mainQueue().after(0.4) {
let controller = context.sharedContext.makePremiumGiftController(context: context, source: .channelBoost)
pushController(controller)
}
}),
TextAlertAction(type: .genericAction, title: presentationData.strings.Common_Close, action: {})
],

View File

@ -1034,8 +1034,9 @@ private final class LimitSheetContent: CombinedComponent {
string = component.count >= premiumLimit ? strings.Premium_MaxPinsFinalText("\(premiumLimit)").string : strings.Premium_MaxPinsText("\(limit)", "\(premiumLimit)").string
defaultValue = component.count > limit ? "\(limit)" : ""
premiumValue = component.count >= premiumLimit ? "" : "\(premiumLimit)"
badgePosition = CGFloat(component.count) / CGFloat(premiumLimit)
badgePosition = max(0.15, min(0.85, CGFloat(component.count) / CGFloat(premiumLimit)))
badgeGraphPosition = badgePosition
buttonAnimationName = nil
if isPremiumDisabled {
badgeText = "\(limit)"

View File

@ -143,7 +143,6 @@ final class RadialStatusSecretTimeoutContentNode: RadialStatusContentNode {
return
}
let absoluteTimestamp = CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970
let alphaProgress: CGFloat

View File

@ -68,7 +68,7 @@ final class SemanticStatusNodeSecretTimeoutContext: SemanticStatusNodeStateConte
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.scaleBy(x: 1.0, y: -1.0)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
context.translateBy(x: 4.0, y: 7.0)
context.translateBy(x: 6.0, y: 8.0)
context.clip(to: iconRect, mask: iconImage.cgImage!)
context.fill(iconRect)
context.restoreGState()
@ -218,9 +218,9 @@ private struct ContentParticle {
private final class FireIconNode: ManagedAnimationNode {
init() {
super.init(size: CGSize(width: 36.0, height: 36.0))
super.init(size: CGSize(width: 32.0, height: 32.0))
self.trackTo(item: ManagedAnimationItem(source: .local("anim_autoremove_on"), frames: .range(startFrame: 0, endFrame: 80), duration: 2.5))
self.trackTo(item: ManagedAnimationItem(source: .local("anim_autoremove_on"), frames: .range(startFrame: 80, endFrame: 115), duration: 0.85, loop: true))
self.trackTo(item: ManagedAnimationItem(source: .local("anim_flame_1"), frames: .range(startFrame: 0, endFrame: 60), duration: 1.5))
self.trackTo(item: ManagedAnimationItem(source: .local("anim_flame_2"), frames: .range(startFrame: 0, endFrame: 120), duration: 2.0, loop: true))
}
}

View File

@ -170,20 +170,20 @@ private final class BubbleSettingsControllerNode: ASDisplayNode, UIScrollViewDel
messages[replyMessageId] = Message(stableId: 3, stableVersion: 0, id: replyMessageId, globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_1_Text, attributes: [], media: [], peers: peers, associatedMessages: SimpleDictionary(), associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
let message1 = Message(stableId: 4, stableVersion: 0, id: MessageId(peerId: otherPeerId, namespace: 0, id: 4), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66003, flags: [], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_3_Text, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
let message2 = Message(stableId: 3, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 3), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66002, flags: [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[peerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_2_Text, attributes: [ReplyMessageAttribute(messageId: replyMessageId, threadMessageId: nil, quote: nil, isQuote: false)], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
let waveformBase64 = "DAAOAAkACQAGAAwADwAMABAADQAPABsAGAALAA0AGAAfABoAHgATABgAGQAYABQADAAVABEAHwANAA0ACQAWABkACQAOAAwACQAfAAAAGQAVAAAAEwATAAAACAAfAAAAHAAAABwAHwAAABcAGQAAABQADgAAABQAHwAAAB8AHwAAAAwADwAAAB8AEwAAABoAFwAAAB8AFAAAAAAAHwAAAAAAHgAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAAAA="
let voiceAttributes: [TelegramMediaFileAttribute] = [.Audio(isVoice: true, duration: 23, title: nil, performer: nil, waveform: Data(base64Encoded: waveformBase64)!)]
let voiceMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: voiceAttributes)
let message3 = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [voiceMedia], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local), tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local), tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
let message4 = Message(stableId: 2, stableVersion: 0, id: MessageId(peerId: otherPeerId, namespace: 0, id: 2), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_1_Text, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message4], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message4], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
let width: CGFloat
if case .regular = layout.metrics.widthClass {

View File

@ -149,7 +149,7 @@ class ForwardPrivacyChatPreviewItemNode: ListViewItemNode {
let forwardInfo = MessageForwardInfo(author: item.linkEnabled ? peers[peerId] : nil, source: nil, sourceMessageId: nil, date: 0, authorSignature: item.linkEnabled ? nil : item.peerName, psaType: nil, flags: [])
let messageItem = item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: forwardInfo, author: nil, text: item.strings.Privacy_Forwards_PreviewMessageText, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])], theme: item.theme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false)
let messageItem = item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: forwardInfo, author: nil, text: item.strings.Privacy_Forwards_PreviewMessageText, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])], theme: item.theme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)
var node: ListViewItemNode?
if let current = currentNode {

View File

@ -435,20 +435,20 @@ private final class TextSizeSelectionControllerNode: ASDisplayNode, UIScrollView
messages[replyMessageId] = Message(stableId: 3, stableVersion: 0, id: replyMessageId, globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_1_Text, attributes: [], media: [], peers: peers, associatedMessages: SimpleDictionary(), associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
let message1 = Message(stableId: 4, stableVersion: 0, id: MessageId(peerId: otherPeerId, namespace: 0, id: 4), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66003, flags: [], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_3_Text, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
let message2 = Message(stableId: 3, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 3), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66002, flags: [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[peerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_2_Text, attributes: [ReplyMessageAttribute(messageId: replyMessageId, threadMessageId: nil, quote: nil, isQuote: false)], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
let waveformBase64 = "DAAOAAkACQAGAAwADwAMABAADQAPABsAGAALAA0AGAAfABoAHgATABgAGQAYABQADAAVABEAHwANAA0ACQAWABkACQAOAAwACQAfAAAAGQAVAAAAEwATAAAACAAfAAAAHAAAABwAHwAAABcAGQAAABQADgAAABQAHwAAAB8AHwAAAAwADwAAAB8AEwAAABoAFwAAAB8AFAAAAAAAHwAAAAAAHgAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAAAA="
let voiceAttributes: [TelegramMediaFileAttribute] = [.Audio(isVoice: true, duration: 23, title: nil, performer: nil, waveform: Data(base64Encoded: waveformBase64)!)]
let voiceMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: voiceAttributes)
let message3 = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [voiceMedia], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local), tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local), tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
let message4 = Message(stableId: 2, stableVersion: 0, id: MessageId(peerId: otherPeerId, namespace: 0, id: 2), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[otherPeerId], text: self.presentationData.strings.Appearance_ThemePreview_Chat_1_Text, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message4], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message4], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
let width: CGFloat
if case .regular = layout.metrics.widthClass {

View File

@ -619,7 +619,7 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate {
sampleMessages.append(message8)
items = sampleMessages.reversed().map { message in
self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message], theme: self.previewTheme, strings: self.presentationData.strings, wallpaper: self.wallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: !message.media.isEmpty ? FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local) : nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperNode, availableReactions: nil, accountPeer: nil, isCentered: false)
self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message], theme: self.previewTheme, strings: self.presentationData.strings, wallpaper: self.wallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: !message.media.isEmpty ? FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local) : nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.wallpaperNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)
}
let width: CGFloat

View File

@ -168,7 +168,7 @@ class ThemeSettingsChatPreviewItemNode: ListViewItemNode {
}
let message = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: messageItem.outgoing ? otherPeerId : peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: messageItem.outgoing ? [] : [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: messageItem.outgoing ? TelegramUser(id: otherPeerId, accessHash: nil, firstName: "", lastName: "", username: nil, phone: nil, photo: [], botInfo: nil, restrictionInfo: nil, flags: [], emojiStatus: nil, usernames: [], storiesHidden: nil, nameColor: nil, backgroundEmojiId: nil, profileColor: nil, profileBackgroundEmojiId: nil) : nil, text: messageItem.text, attributes: messageItem.reply != nil ? [ReplyMessageAttribute(messageId: replyMessageId, threadMessageId: nil, quote: nil, isQuote: false)] : [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [message], theme: item.componentTheme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [message], theme: item.componentTheme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
}
var nodes: [ListViewItemNode] = []

View File

@ -643,7 +643,12 @@ open class TelegramBaseController: ViewController, KeyShortcutResponder {
}
}
if let (item, previousItem, nextItem, order, type, _) = self.playlistStateAndType, !mediaAccessoryPanelHidden {
var isViewOnceMessage = false
if let (item, _, _, _, _, _) = self.playlistStateAndType, let source = item.playbackData?.source, case let .telegramFile(_, _, isViewOnce) = source, isViewOnce {
isViewOnceMessage = true
}
if let (item, previousItem, nextItem, order, type, _) = self.playlistStateAndType, !mediaAccessoryPanelHidden && !isViewOnceMessage {
let panelHeight = MediaNavigationAccessoryHeaderNode.minimizedHeight
let panelFrame = CGRect(origin: CGPoint(x: 0.0, y: panelStartY), size: CGSize(width: layout.size.width, height: panelHeight))
additionalHeight += panelHeight

View File

@ -349,7 +349,15 @@ func textMediaAndExpirationTimerFromApiMedia(_ media: Api.MessageMedia?, _ peerI
return (mediaFile, ttlSeconds, (flags & (1 << 3)) != 0, (flags & (1 << 4)) != 0, nil)
}
} else {
return (TelegramMediaExpiredContent(data: .file), nil, nil, nil, nil)
var data: TelegramMediaExpiredContentData
if (flags & (1 << 7)) != 0 {
data = .videoMessage
} else if (flags & (1 << 8)) != 0 {
data = .voiceMessage
} else {
data = .file
}
return (TelegramMediaExpiredContent(data: data), nil, nil, nil, nil)
}
case let .messageMediaWebPage(flags, webpage):
if let mediaWebpage = telegramMediaWebpageFromApiWebpage(webpage) {

View File

@ -145,6 +145,15 @@ public enum EnqueueMessage {
return nil
}
}
public var attributes: [MessageAttribute] {
switch self {
case let .message(_, attributes, _, _, _, _, _, _, _, _):
return attributes
case let .forward(_, _, _, attributes, _):
return attributes
}
}
}
private extension EnqueueMessage {

View File

@ -94,8 +94,14 @@ func managedAutoremoveMessageOperations(network: Network, postbox: Postbox, isRe
for i in 0 ..< updatedMedia.count {
if let _ = updatedMedia[i] as? TelegramMediaImage {
updatedMedia[i] = TelegramMediaExpiredContent(data: .image)
} else if let _ = updatedMedia[i] as? TelegramMediaFile {
updatedMedia[i] = TelegramMediaExpiredContent(data: .file)
} else if let file = updatedMedia[i] as? TelegramMediaFile {
if file.isInstantVideo {
updatedMedia[i] = TelegramMediaExpiredContent(data: .videoMessage)
} else if file.isVoice {
updatedMedia[i] = TelegramMediaExpiredContent(data: .voiceMessage)
} else {
updatedMedia[i] = TelegramMediaExpiredContent(data: .file)
}
}
}
var updatedAttributes = currentMessage.attributes

View File

@ -4,6 +4,8 @@ import Postbox
public enum TelegramMediaExpiredContentData: Int32 {
case image
case file
case voiceMessage
case videoMessage
}
public final class TelegramMediaExpiredContent: Media, Equatable {

View File

@ -198,8 +198,14 @@ func markMessageContentAsConsumedRemotely(transaction: Transaction, messageId: M
for i in 0 ..< updatedMedia.count {
if let _ = updatedMedia[i] as? TelegramMediaImage {
updatedMedia[i] = TelegramMediaExpiredContent(data: .image)
} else if let _ = updatedMedia[i] as? TelegramMediaFile {
updatedMedia[i] = TelegramMediaExpiredContent(data: .file)
} else if let file = updatedMedia[i] as? TelegramMediaFile {
if file.isInstantVideo {
updatedMedia[i] = TelegramMediaExpiredContent(data: .videoMessage)
} else if file.isVoice {
updatedMedia[i] = TelegramMediaExpiredContent(data: .voiceMessage)
} else {
updatedMedia[i] = TelegramMediaExpiredContent(data: .file)
}
}
}
}
@ -216,8 +222,14 @@ func markMessageContentAsConsumedRemotely(transaction: Transaction, messageId: M
if attribute.timeout == viewOnceTimeout || timestamp >= countdownBeginTime + attribute.timeout {
if let _ = updatedMedia[i] as? TelegramMediaImage {
updatedMedia[i] = TelegramMediaExpiredContent(data: .image)
} else if let _ = updatedMedia[i] as? TelegramMediaFile {
updatedMedia[i] = TelegramMediaExpiredContent(data: .file)
} else if let file = updatedMedia[i] as? TelegramMediaFile {
if file.isInstantVideo {
updatedMedia[i] = TelegramMediaExpiredContent(data: .videoMessage)
} else if file.isVoice {
updatedMedia[i] = TelegramMediaExpiredContent(data: .voiceMessage)
} else {
updatedMedia[i] = TelegramMediaExpiredContent(data: .file)
}
}
}
}

View File

@ -189,6 +189,9 @@ private enum ApplicationSpecificGlobalNotice: Int32 {
case dismissedPremiumColorsBadge = 55
case multipleReactionsSuggestion = 56
case savedMessagesChatsSuggestion = 57
case voiceMessagesPlayOnceSuggestion = 58
case incomingVoiceMessagePlayOnceTip = 59
case outgoingVoiceMessagePlayOnceTip = 60
var key: ValueBoxKey {
let v = ValueBoxKey(length: 4)
@ -454,21 +457,38 @@ private struct ApplicationSpecificNoticeKeys {
static func dismissedPremiumAppIconsBadge() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.dismissedPremiumAppIconsBadge.key)
}
static func replyQuoteTextSelectionTip() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.replyQuoteTextSelectionTip.key)
}
static func dismissedPremiumWallpapersBadge() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.dismissedPremiumWallpapersBadge.key)
}
static func dismissedPremiumColorsBadge() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.dismissedPremiumColorsBadge.key)
}
static func multipleReactionsSuggestion() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.multipleReactionsSuggestion.key)
}
static func savedMessagesChatsSuggestion() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.savedMessagesChatsSuggestion.key)
}
static func voiceMessagesPlayOnceSuggestion() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.voiceMessagesPlayOnceSuggestion.key)
}
static func incomingVoiceMessagePlayOnceTip() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.incomingVoiceMessagePlayOnceTip.key)
}
static func outgoingVoiceMessagePlayOnceTip() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.outgoingVoiceMessagePlayOnceTip.key)
}
}
public struct ApplicationSpecificNotice {
@ -1883,4 +1903,85 @@ public struct ApplicationSpecificNotice {
return Int(previousValue)
}
}
public static func getVoiceMessagesPlayOnceSuggestion(accountManager: AccountManager<TelegramAccountManagerTypes>) -> Signal<Int32, NoError> {
return accountManager.transaction { transaction -> Int32 in
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.voiceMessagesPlayOnceSuggestion())?.get(ApplicationSpecificCounterNotice.self) {
return value.value
} else {
return 0
}
}
}
public static func incrementVoiceMessagesPlayOnceSuggestion(accountManager: AccountManager<TelegramAccountManagerTypes>, count: Int = 1) -> Signal<Int, NoError> {
return accountManager.transaction { transaction -> Int in
var currentValue: Int32 = 0
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.voiceMessagesPlayOnceSuggestion())?.get(ApplicationSpecificCounterNotice.self) {
currentValue = value.value
}
let previousValue = currentValue
currentValue += Int32(count)
if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) {
transaction.setNotice(ApplicationSpecificNoticeKeys.voiceMessagesPlayOnceSuggestion(), entry)
}
return Int(previousValue)
}
}
public static func getIncomingVoiceMessagePlayOnceTip(accountManager: AccountManager<TelegramAccountManagerTypes>) -> Signal<Int32, NoError> {
return accountManager.transaction { transaction -> Int32 in
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.incomingVoiceMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) {
return value.value
} else {
return 0
}
}
}
public static func incrementIncomingVoiceMessagePlayOnceTip(accountManager: AccountManager<TelegramAccountManagerTypes>, count: Int = 1) -> Signal<Int, NoError> {
return accountManager.transaction { transaction -> Int in
var currentValue: Int32 = 0
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.incomingVoiceMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) {
currentValue = value.value
}
let previousValue = currentValue
currentValue += Int32(count)
if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) {
transaction.setNotice(ApplicationSpecificNoticeKeys.incomingVoiceMessagePlayOnceTip(), entry)
}
return Int(previousValue)
}
}
public static func getOutgoingVoiceMessagePlayOnceTip(accountManager: AccountManager<TelegramAccountManagerTypes>) -> Signal<Int32, NoError> {
return accountManager.transaction { transaction -> Int32 in
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.outgoingVoiceMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) {
return value.value
} else {
return 0
}
}
}
public static func incrementOutgoingVoiceMessagePlayOnceTip(accountManager: AccountManager<TelegramAccountManagerTypes>, count: Int = 1) -> Signal<Int, NoError> {
return accountManager.transaction { transaction -> Int in
var currentValue: Int32 = 0
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.outgoingVoiceMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) {
currentValue = value.value
}
let previousValue = currentValue
currentValue += Int32(count)
if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) {
transaction.setNotice(ApplicationSpecificNoticeKeys.outgoingVoiceMessagePlayOnceTip(), entry)
}
return Int(previousValue)
}
}
}

View File

@ -714,7 +714,7 @@ public func makeDefaultDayPresentationTheme(extendingThemeReference: Presentatio
shareButtonFillColor: PresentationThemeVariableColor(withWallpaper: serviceBackgroundColor, withoutWallpaper: UIColor(rgb: 0x748391, alpha: 0.45)),
shareButtonStrokeColor: PresentationThemeVariableColor(withWallpaper: .clear, withoutWallpaper: .clear),
shareButtonForegroundColor: PresentationThemeVariableColor(withWallpaper: UIColor(rgb: 0xffffff), withoutWallpaper: UIColor(rgb: 0xffffff)),
mediaOverlayControlColors: PresentationThemeFillForeground(fillColor: UIColor(rgb: 0x000000, alpha: 0.6), foregroundColor: UIColor(rgb: 0xffffff)),
mediaOverlayControlColors: PresentationThemeFillForeground(fillColor: UIColor(rgb: 0x000000, alpha: 0.45), foregroundColor: UIColor(rgb: 0xffffff)),
selectionControlColors: PresentationThemeFillStrokeForeground(fillColor: defaultDayAccentColor, strokeColor: UIColor(rgb: 0xc7c7cc), foregroundColor: UIColor(rgb: 0xffffff)),
deliveryFailedColors: PresentationThemeFillForeground(fillColor: UIColor(rgb: 0xff3b30), foregroundColor: UIColor(rgb: 0xffffff)),
mediaHighlightOverlayColor: UIColor(white: 1.0, alpha: 0.6),
@ -857,7 +857,7 @@ public func makeDefaultDayPresentationTheme(extendingThemeReference: Presentatio
shareButtonFillColor: PresentationThemeVariableColor(withWallpaper: serviceBackgroundColor, withoutWallpaper: UIColor(rgb: 0xffffff, alpha: 0.8)),
shareButtonStrokeColor: PresentationThemeVariableColor(withWallpaper: .clear, withoutWallpaper: UIColor(rgb: 0xe5e5ea)),
shareButtonForegroundColor: PresentationThemeVariableColor(withWallpaper: UIColor(rgb: 0xffffff), withoutWallpaper: defaultDayAccentColor),
mediaOverlayControlColors: PresentationThemeFillForeground(fillColor: UIColor(rgb: 0x000000, alpha: 0.6), foregroundColor: UIColor(rgb: 0xffffff)),
mediaOverlayControlColors: PresentationThemeFillForeground(fillColor: UIColor(rgb: 0x000000, alpha: 0.45), foregroundColor: UIColor(rgb: 0xffffff)),
selectionControlColors: PresentationThemeFillStrokeForeground(fillColor: defaultDayAccentColor, strokeColor: UIColor(rgb: 0xc7c7cc), foregroundColor: UIColor(rgb: 0xffffff)),
deliveryFailedColors: PresentationThemeFillForeground(fillColor: UIColor(rgb: 0xff3b30), foregroundColor: UIColor(rgb: 0xffffff)),
mediaHighlightOverlayColor: UIColor(rgb: 0xffffff, alpha: 0.6),

View File

@ -278,7 +278,7 @@ public func mediaContentKind(_ media: EngineMedia, message: EngineMessage? = nil
switch expiredMedia.data {
case .image:
return .expiredImage
case .file:
case .file, .videoMessage, .voiceMessage:
return .expiredVideo
}
case .image:

View File

@ -965,6 +965,10 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
attributedString = NSAttributedString(string: strings.Message_ImageExpired, font: titleFont, textColor: primaryTextColor)
case .file:
attributedString = NSAttributedString(string: strings.Message_VideoExpired, font: titleFont, textColor: primaryTextColor)
case .videoMessage:
attributedString = NSAttributedString(string: strings.Message_VideoMessageExpired, font: titleFont, textColor: primaryTextColor)
case .voiceMessage:
attributedString = NSAttributedString(string: strings.Message_VoiceMessageExpired, font: titleFont, textColor: primaryTextColor)
}
} else if let _ = media as? TelegramMediaStory {
let compactPeerName = message.peers[message.id.peerId].flatMap(EnginePeer.init)?.compactDisplayTitle ?? ""

View File

@ -19,6 +19,7 @@ public final class AudioWaveformComponent: Component {
public let samples: Data
public let peak: Int32
public let status: Signal<MediaPlayerStatus, NoError>
public let isViewOnceMessage: Bool
public let seek: ((Double) -> Void)?
public let updateIsSeeking: ((Bool) -> Void)?
@ -30,6 +31,7 @@ public final class AudioWaveformComponent: Component {
samples: Data,
peak: Int32,
status: Signal<MediaPlayerStatus, NoError>,
isViewOnceMessage: Bool,
seek: ((Double) -> Void)?,
updateIsSeeking: ((Bool) -> Void)?
) {
@ -40,6 +42,7 @@ public final class AudioWaveformComponent: Component {
self.samples = samples
self.peak = peak
self.status = status
self.isViewOnceMessage = isViewOnceMessage
self.seek = seek
self.updateIsSeeking = updateIsSeeking
}
@ -63,6 +66,9 @@ public final class AudioWaveformComponent: Component {
if lhs.peak != rhs.peak {
return false
}
if lhs.isViewOnceMessage != rhs.isViewOnceMessage {
return false
}
return true
}
@ -204,6 +210,9 @@ public final class AudioWaveformComponent: Component {
private var statusDisposable: Disposable?
private var playbackStatusAnimator: ConstantDisplayLinkAnimator?
private var sparksView: SparksView?
private var progress: CGFloat = 0.0
private var revealProgress: CGFloat = 1.0
private var animator: DisplayLinkAnimator?
@ -391,6 +400,21 @@ public final class AudioWaveformComponent: Component {
})
}
if component.isViewOnceMessage {
let sparksView: SparksView
if let current = self.sparksView {
sparksView = current
} else {
sparksView = SparksView()
self.addSubview(sparksView)
self.sparksView = sparksView
}
sparksView.frame = CGRect(origin: .zero, size: size).insetBy(dx: -5.0, dy: -5.0)
} else if let sparksView = self.sparksView {
self.sparksView = nil
sparksView.removeFromSuperview()
}
return size
}
@ -408,12 +432,25 @@ public final class AudioWaveformComponent: Component {
if needsAnimation != (self.playbackStatusAnimator != nil) {
if needsAnimation {
self.playbackStatusAnimator = ConstantDisplayLinkAnimator(update: { [weak self] in
if let self, let component = self.component, let sparksView = self.sparksView {
sparksView.update(position: CGPoint(x: sparksView.bounds.width * self.progress, y: sparksView.bounds.height / 2.0), color: component.foregroundColor)
}
self?.setNeedsDisplay()
})
self.playbackStatusAnimator?.isPaused = false
if let sparksView = self.sparksView {
sparksView.alpha = 1.0
sparksView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
} else {
self.playbackStatusAnimator?.invalidate()
self.playbackStatusAnimator = nil
if let sparksView = self.sparksView {
sparksView.alpha = 0.0
sparksView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
}
}
}
}
@ -445,7 +482,7 @@ public final class AudioWaveformComponent: Component {
timestampAndDuration = nil
}
let playbackProgress: CGFloat
var playbackProgress: CGFloat
if let (timestamp, duration) = timestampAndDuration {
if let scrubbingTimestampValue = self.scrubbingTimestampValue {
var progress = CGFloat(scrubbingTimestampValue / duration)
@ -474,6 +511,10 @@ public final class AudioWaveformComponent: Component {
} else {
playbackProgress = 0.0
}
if component.isViewOnceMessage {
playbackProgress = 1.0 - playbackProgress
}
self.progress = playbackProgress
let sampleWidth: CGFloat = 2.0
let halfSampleWidth: CGFloat = 1.0
@ -571,7 +612,11 @@ public final class AudioWaveformComponent: Component {
}
if component.backgroundColor.alpha > 0.0 {
context.setFillColor(component.backgroundColor.mixedWith(component.foregroundColor, alpha: colorMixFraction).cgColor)
var backgroundColor = component.backgroundColor
if component.isViewOnceMessage {
backgroundColor = component.foregroundColor.withMultipliedAlpha(0.0)
}
context.setFillColor(backgroundColor.mixedWith(component.foregroundColor, alpha: colorMixFraction).cgColor)
} else {
context.setFillColor(component.foregroundColor.cgColor)
}
@ -604,3 +649,99 @@ public final class AudioWaveformComponent: Component {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}
private struct ContentParticle {
var position: CGPoint
var direction: CGPoint
var velocity: CGFloat
var alpha: CGFloat
var lifetime: Double
var beginTime: Double
init(position: CGPoint, direction: CGPoint, velocity: CGFloat, alpha: CGFloat, lifetime: Double, beginTime: Double) {
self.position = position
self.direction = direction
self.velocity = velocity
self.alpha = alpha
self.lifetime = lifetime
self.beginTime = beginTime
}
}
private class SparksView: UIView {
private var particles: [ContentParticle] = []
private var color: UIColor = .black
override init(frame: CGRect) {
super.init(frame: frame)
self.backgroundColor = nil
self.isOpaque = false
}
required public init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func update(position: CGPoint, color: UIColor) {
self.color = color
let v = CGPoint(x: 1.0, y: 0.0)
let c = CGPoint(x: position.x - 3.0, y: position.y - 5.5 + 13.0 * CGFloat(arc4random_uniform(100)) / 100.0 + 1.0)
let timestamp = CACurrentMediaTime()
let dt: CGFloat = 1.0 / 60.0
var removeIndices: [Int] = []
for i in 0 ..< self.particles.count {
let currentTime = timestamp - self.particles[i].beginTime
if currentTime > self.particles[i].lifetime {
removeIndices.append(i)
} else {
let input: CGFloat = CGFloat(currentTime / self.particles[i].lifetime)
let decelerated: CGFloat = (1.0 - (1.0 - input) * (1.0 - input))
self.particles[i].alpha = 1.0 - decelerated
var p = self.particles[i].position
let d = self.particles[i].direction
let v = self.particles[i].velocity
p = CGPoint(x: p.x + d.x * v * dt, y: p.y + d.y * v * dt)
self.particles[i].position = p
}
}
for i in removeIndices.reversed() {
self.particles.remove(at: i)
}
let newParticleCount = 2
for _ in 0 ..< newParticleCount {
let degrees: CGFloat = CGFloat(arc4random_uniform(100)) - 50.0
let angle: CGFloat = degrees * CGFloat.pi / 180.0
let direction = CGPoint(x: v.x * cos(angle) - v.y * sin(angle), y: v.x * sin(angle) + v.y * cos(angle))
let velocity = (80.0 + (CGFloat(arc4random()) / CGFloat(UINT32_MAX)) * 4.0) * 0.5
let lifetime = Double(0.65 + CGFloat(arc4random_uniform(100)) * 0.01)
let particle = ContentParticle(position: c, direction: direction, velocity: velocity, alpha: 1.0, lifetime: lifetime, beginTime: timestamp)
self.particles.append(particle)
}
self.setNeedsDisplay()
}
override public func draw(_ rect: CGRect) {
guard let context = UIGraphicsGetCurrentContext() else {
return
}
context.setFillColor(self.color.cgColor)
for particle in self.particles {
let size: CGFloat = 1.0
context.setAlpha(particle.alpha * 1.0)
context.fillEllipse(in: CGRect(origin: CGPoint(x: particle.position.x - size / 2.0, y: particle.position.y - size / 2.0), size: CGSize(width: size, height: size)))
}
}
}

View File

@ -3849,7 +3849,7 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
}
var isCurrentlyPlayingMedia = false
if item.associatedData.currentlyPlayingMessageId == item.message.index {
if item.associatedData.currentlyPlayingMessageId == item.message.index, let file = item.message.media.first(where: { $0 is TelegramMediaFile }) as? TelegramMediaFile, file.isInstantVideo {
isCurrentlyPlayingMedia = true
}

View File

@ -739,7 +739,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
let (titleLayout, titleApply) = titleAsyncLayout(TextNodeLayoutArguments(attributedString: titleString, backgroundColor: nil, maximumNumberOfLines: hasThumbnail ? 2 : 1, truncationType: .middle, constrainedSize: textConstrainedSize, alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let (descriptionLayout, descriptionApply) = descriptionAsyncLayout(TextNodeLayoutArguments(attributedString: descriptionString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .middle, constrainedSize: textConstrainedSize, alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let isViewOnceMessage = "".isEmpty || arguments.message.autoremoveAttribute?.timeout == viewOnceTimeout
let isViewOnceMessage = isVoice && arguments.message.minAutoremoveOrClearTimeout == viewOnceTimeout
let fileSizeString: String
if let _ = arguments.file.size {
@ -1300,6 +1300,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
samples: audioWaveform?.samples ?? Data(),
peak: audioWaveform?.peak ?? 0,
status: strongSelf.playbackStatus.get(),
isViewOnceMessage: isViewOnceMessage,
seek: { timestamp in
if let strongSelf = self, let context = strongSelf.context, let message = strongSelf.message, let type = peerMessageMediaPlayerType(EngineMessage(message)) {
context.sharedContext.mediaManager.playlistControl(.seek(timestamp), type: type)
@ -1559,7 +1560,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
}
}
}
let isViewOnceMessage = "".isEmpty || (isVoice && message.autoremoveAttribute?.timeout == viewOnceTimeout)
let isViewOnceMessage = isVoice && message.minAutoremoveOrClearTimeout == viewOnceTimeout
var state: SemanticStatusNodeState
var streamingState: SemanticStatusNodeState = .none
@ -1780,6 +1781,11 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
self.streamingStatusNode = streamingStatusNode
streamingStatusNode.frame = streamingCacheStatusFrame
self.addSubnode(streamingStatusNode)
if isViewOnceMessage {
streamingStatusNode.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
streamingStatusNode.layer.animateAlpha(from: 0.1, to: 1.0, duration: 0.2)
}
} else if let streamingStatusNode = self.streamingStatusNode {
streamingStatusNode.backgroundNodeColor = backgroundNodeColor
}
@ -1798,10 +1804,9 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
}
})
switch state {
case .pause:
if showBlobs {
self.playbackAudioLevelNode?.startAnimating()
default:
} else {
self.playbackAudioLevelNode?.stopAnimating()
}
}
@ -1809,6 +1814,9 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
if let streamingStatusNode = self.streamingStatusNode {
if streamingState == .none {
self.streamingStatusNode = nil
if isViewOnceMessage {
streamingStatusNode.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2, removeOnCompletion: false)
}
streamingStatusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak streamingStatusNode] _ in
if streamingState == .none {
streamingStatusNode?.removeFromSupernode()

View File

@ -30,6 +30,7 @@ swift_library(
"//submodules/TelegramNotices",
"//submodules/Markdown",
"//submodules/TextFormat",
"//submodules/InvisibleInkDustNode",
"//submodules/TelegramUI/Components/Chat/ChatMessageForwardInfoNode",
"//submodules/TelegramUI/Components/Chat/ChatMessageDateAndStatusNode",
"//submodules/TelegramUI/Components/Chat/ChatMessageItemCommon",

View File

@ -29,6 +29,7 @@ import ChatInstantVideoMessageDurationNode
import ChatControllerInteraction
import WallpaperBackgroundNode
import TelegramStringFormatting
import InvisibleInkDustNode
public struct ChatMessageInstantVideoItemLayoutResult {
public let contentSize: CGSize
@ -89,6 +90,7 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
public var audioTranscriptionButton: ComponentHostView<Empty>?
private var dustNode: MediaDustNode?
private var statusNode: RadialStatusNode?
private var disappearingStatusNode: RadialStatusNode?
private var playbackStatusNode: InstantVideoRadialStatusNode?
@ -278,8 +280,13 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
updatedMuteIconImage = PresentationResourcesChat.chatInstantMessageMuteIconImage(item.presentationData.theme.theme)
}
let isViewOnceMessage = item.message.minAutoremoveOrClearTimeout == viewOnceTimeout
let theme = item.presentationData.theme
let isSecretMedia = item.message.containsSecretMedia
var isSecretMedia = item.message.containsSecretMedia
if isViewOnceMessage {
isSecretMedia = true
}
if isSecretMedia {
secretVideoPlaceholderBackgroundImage = PresentationResourcesChat.chatInstantVideoBackgroundImage(theme.theme, wallpaper: !theme.wallpaper.isEmpty)
}
@ -805,9 +812,7 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
}
}))
}
let isViewOnceMessage = "".isEmpty
var displayTranscribe = false
if item.message.id.peerId.namespace != Namespaces.Peer.SecretChat && statusDisplayType == .free && !isViewOnceMessage && !item.presentationData.isPreview {
let premiumConfiguration = PremiumConfiguration.with(appConfiguration: item.context.currentAppConfiguration.with { $0 })
@ -946,12 +951,15 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
animation.animator.updateFrame(layer: strongSelf.secretVideoPlaceholderBackground.layer, frame: displayVideoFrame, completion: nil)
let placeholderFrame = videoFrame.insetBy(dx: 2.0, dy: 2.0)
strongSelf.secretVideoPlaceholder.bounds = CGRect(origin: CGPoint(), size: videoFrame.size)
strongSelf.secretVideoPlaceholder.bounds = CGRect(origin: CGPoint(), size: placeholderFrame.size)
animation.animator.updateScale(layer: strongSelf.secretVideoPlaceholder.layer, scale: imageScale, completion: nil)
animation.animator.updatePosition(layer: strongSelf.secretVideoPlaceholder.layer, position: displayVideoFrame.center, completion: nil)
let placeholderSide = floor(placeholderFrame.size.width / 2.0) * 2.0
let placeholderSize = CGSize(width: placeholderSide, height: placeholderSide)
let makeSecretPlaceholderLayout = strongSelf.secretVideoPlaceholder.asyncLayout()
let arguments = TransformImageArguments(corners: ImageCorners(radius: placeholderFrame.size.width / 2.0), imageSize: placeholderFrame.size, boundingSize: placeholderFrame.size, intrinsicInsets: UIEdgeInsets())
let arguments = TransformImageArguments(corners: ImageCorners(radius: placeholderSize.width / 2.0), imageSize: placeholderSize, boundingSize: placeholderSize, intrinsicInsets: UIEdgeInsets())
let applySecretPlaceholder = makeSecretPlaceholderLayout(arguments)
applySecretPlaceholder()
@ -1144,7 +1152,10 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
}
let messageTheme = item.presentationData.theme.theme.chat.message
let isViewOnceMessage = item.message.minAutoremoveOrClearTimeout == viewOnceTimeout
let isSecretMedia = item.message.containsSecretMedia
var secretBeginTimeAndTimeout: (Double, Double)?
if isSecretMedia {
if let attribute = item.message.autoclearAttribute {
@ -1197,6 +1208,7 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
self.infoBackgroundNode.layer.animateScale(from: 1.0, to: 0.4, duration: 0.15)
}
}
self.infoBackgroundNode.isHidden = isViewOnceMessage
var isBuffering: Bool?
if let message = self.item?.message, let media = self.media, isMediaStreamable(message: message, media: media) && (self.automaticDownload ?? false) {
@ -1279,7 +1291,9 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
state = .progress(color: messageTheme.mediaOverlayControlColors.foregroundColor, lineWidth: nil, value: CGFloat(adjustedProgress), cancelEnabled: true, animateRotation: true)
}
case .Local:
if isSecretMedia {
if isViewOnceMessage {
state = .play(messageTheme.mediaOverlayControlColors.foregroundColor)
} else if isSecretMedia {
if let (beginTime, timeout) = secretBeginTimeAndTimeout {
state = .secretTimeout(color: messageTheme.mediaOverlayControlColors.foregroundColor, icon: .flame, beginTime: beginTime, timeout: timeout, sparks: true)
} else {
@ -1321,7 +1335,8 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
if let current = self.playbackStatusNode {
playbackStatusNode = current
} else {
playbackStatusNode = InstantVideoRadialStatusNode(color: UIColor(white: 1.0, alpha: 0.6), hasSeek: true)
playbackStatusNode = InstantVideoRadialStatusNode(color: UIColor(white: 1.0, alpha: 0.6), hasSeek: !isViewOnceMessage, sparks: isViewOnceMessage)
playbackStatusNode.isUserInteractionEnabled = !isViewOnceMessage
playbackStatusNode.seekTo = { [weak self] position, play in
guard let strongSelf = self else {
return
@ -1349,6 +1364,11 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
self.videoNode?.isHidden = false
self.secretVideoPlaceholderBackground.isHidden = true
self.secretVideoPlaceholder.isHidden = true
if let dustNode = self.dustNode {
self.dustNode = nil
dustNode.removeFromSupernode()
}
} else {
if let playbackStatusNode = self.playbackStatusNode {
self.playbackStatusNode = nil
@ -1356,9 +1376,24 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
}
self.durationNode?.status = .single(nil)
self.videoNode?.isHidden = isSecretMedia
self.secretVideoPlaceholderBackground.isHidden = !isSecretMedia
self.secretVideoPlaceholder.isHidden = !isSecretMedia && !item.presentationData.isPreview
self.videoNode?.isHidden = isSecretMedia || isViewOnceMessage
self.secretVideoPlaceholderBackground.isHidden = !isSecretMedia && !isViewOnceMessage
self.secretVideoPlaceholder.isHidden = !isSecretMedia && !isViewOnceMessage && !item.presentationData.isPreview
if isViewOnceMessage {
let dustNode: MediaDustNode
if let current = self.dustNode {
dustNode = current
} else {
dustNode = MediaDustNode(enableAnimations: item.controllerInteraction.enableFullTranslucency)
dustNode.clipsToBounds = true
self.insertSubnode(dustNode, belowSubnode: self.dateAndStatusNode)
self.dustNode = dustNode
}
dustNode.cornerRadius = videoFrame.width / 2.0
dustNode.frame = videoFrame
dustNode.update(size: videoFrame.size, color: .white, transition: .immediate)
}
}
}

View File

@ -103,7 +103,7 @@ public final class ChatRecentActionsController: TelegramBaseController {
}, stopMediaRecording: {
}, lockMediaRecording: {
}, deleteRecordedMedia: {
}, sendRecordedMedia: { _ in
}, sendRecordedMedia: { _, _ in
}, displayRestrictedInfo: { _, _ in
}, displayVideoUnmuteTip: { _ in
}, switchMediaRecordingMode: {

View File

@ -7,6 +7,24 @@ import UniversalMediaPlayer
import LegacyComponents
import UIKitRuntimeUtils
private struct ContentParticle {
var position: CGPoint
var direction: CGPoint
var velocity: CGFloat
var alpha: CGFloat
var lifetime: Double
var beginTime: Double
init(position: CGPoint, direction: CGPoint, velocity: CGFloat, alpha: CGFloat, lifetime: Double, beginTime: Double) {
self.position = position
self.direction = direction
self.velocity = velocity
self.alpha = alpha
self.lifetime = lifetime
self.beginTime = beginTime
}
}
private final class InstantVideoRadialStatusNodeParameters: NSObject {
let color: UIColor
let progress: CGFloat
@ -14,14 +32,18 @@ private final class InstantVideoRadialStatusNodeParameters: NSObject {
let playProgress: CGFloat
let blinkProgress: CGFloat
let hasSeek: Bool
let sparks: Bool
let particles: [ContentParticle]
init(color: UIColor, progress: CGFloat, dimProgress: CGFloat, playProgress: CGFloat, blinkProgress: CGFloat, hasSeek: Bool) {
init(color: UIColor, progress: CGFloat, dimProgress: CGFloat, playProgress: CGFloat, blinkProgress: CGFloat, hasSeek: Bool, sparks: Bool, particles: [ContentParticle]) {
self.color = color
self.progress = progress
self.dimProgress = dimProgress
self.playProgress = playProgress
self.blinkProgress = blinkProgress
self.hasSeek = hasSeek
self.sparks = sparks
self.particles = particles
}
}
@ -43,8 +65,12 @@ private extension CGPoint {
public final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecognizerDelegate {
private let color: UIColor
private let hasSeek: Bool
private let hapticFeedback = HapticFeedback()
private let sparks: Bool
private let hapticFeedback = HapticFeedback()
private var particles: [ContentParticle] = []
private var effectiveProgress: CGFloat = 0.0 {
didSet {
self.setNeedsDisplay()
@ -85,6 +111,8 @@ public final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecogni
}
}
private var animator: ConstantDisplayLinkAnimator?
private var statusDisposable: Disposable?
private var statusValuePromise = Promise<MediaPlayerStatus?>()
@ -111,9 +139,10 @@ public final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecogni
public var seekTo: ((Double, Bool) -> Void)?
public init(color: UIColor, hasSeek: Bool) {
public init(color: UIColor, hasSeek: Bool, sparks: Bool = false) {
self.color = color
self.hasSeek = hasSeek
self.sparks = sparks
super.init()
@ -127,6 +156,13 @@ public final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecogni
})
self.view.disablesInteractiveTransitionGestureRecognizer = true
if sparks {
self.animator = ConstantDisplayLinkAnimator(update: { [weak self] in
self?.updateSparks()
})
self.animator?.isPaused = false
}
}
deinit {
@ -165,6 +201,60 @@ public final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecogni
}
}
private func updateSparks() {
// let bounds = self.bounds
// let lineWidth: CGFloat = 4.0
// let center = CGPoint(x: bounds.midX, y: bounds.midY)
// let radius: CGFloat = (bounds.size.width - lineWidth - 4.0 * 2.0) * 0.5
let endAngle: CGFloat = -CGFloat.pi / 2.0 + 2.0 * CGFloat.pi * self.effectiveProgress
let v = CGPoint(x: sin(endAngle), y: -cos(endAngle))
// let c = CGPoint(x: -v.y * radius + center.x, y: v.x * radius + center.y)
let timestamp = CACurrentMediaTime()
let dt: CGFloat = 1.0 / 60.0
var removeIndices: [Int] = []
for i in 0 ..< self.particles.count {
let currentTime = timestamp - self.particles[i].beginTime
if currentTime > self.particles[i].lifetime {
removeIndices.append(i)
} else {
let input: CGFloat = CGFloat(currentTime / self.particles[i].lifetime)
let decelerated: CGFloat = (1.0 - (1.0 - input) * (1.0 - input))
self.particles[i].alpha = 1.0 - decelerated
var p = self.particles[i].position
let d = self.particles[i].direction
let v = self.particles[i].velocity
p = CGPoint(x: p.x + d.x * v * dt, y: p.y + d.y * v * dt)
self.particles[i].position = p
}
}
for i in removeIndices.reversed() {
self.particles.remove(at: i)
}
let newParticleCount = 1
for _ in 0 ..< newParticleCount {
let degrees: CGFloat = CGFloat(arc4random_uniform(140)) - 70.0
let angle: CGFloat = degrees * CGFloat.pi / 180.0
let direction = CGPoint(x: v.x * cos(angle) - v.y * sin(angle), y: v.x * sin(angle) + v.y * cos(angle))
let velocity = (25.0 + (CGFloat(arc4random()) / CGFloat(UINT32_MAX)) * 4.0) * 0.5
let lifetime = Double(0.25 + CGFloat(arc4random_uniform(100)) * 0.01)
let particle = ContentParticle(position: .zero, direction: direction, velocity: velocity, alpha: 0.8, lifetime: lifetime, beginTime: timestamp)
self.particles.append(particle)
}
self.setNeedsDisplay()
}
@objc private func tapGesture(_ gestureRecognizer: UITapGestureRecognizer) {
let center = CGPoint(x: self.bounds.width / 2.0, y: self.bounds.height / 2.0)
let location = gestureRecognizer.location(in: self.view)
@ -259,7 +349,7 @@ public final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecogni
}
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
return InstantVideoRadialStatusNodeParameters(color: self.color, progress: self.effectiveProgress, dimProgress: self.effectiveDimProgress, playProgress: self.effectivePlayProgress, blinkProgress: self.effectiveBlinkProgress, hasSeek: self.hasSeek)
return InstantVideoRadialStatusNodeParameters(color: self.color, progress: self.effectiveProgress, dimProgress: self.effectiveDimProgress, playProgress: self.effectivePlayProgress, blinkProgress: self.effectiveBlinkProgress, hasSeek: self.hasSeek, sparks: self.sparks, particles: self.particles)
}
@objc public override class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
@ -296,8 +386,15 @@ public final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecogni
context.setBlendMode(.normal)
var progress = parameters.progress
let startAngle = -CGFloat.pi / 2.0
let endAngle = CGFloat(progress) * 2.0 * CGFloat.pi + startAngle
let startAngle: CGFloat
let endAngle: CGFloat
if parameters.sparks {
endAngle = -CGFloat.pi / 2.0
startAngle = CGFloat(progress) * 2.0 * CGFloat.pi + endAngle
} else {
startAngle = -CGFloat.pi / 2.0
endAngle = CGFloat(progress) * 2.0 * CGFloat.pi + startAngle
}
progress = min(1.0, progress)
@ -363,6 +460,16 @@ public final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecogni
context.setFillColor(UIColor.white.cgColor)
context.fillEllipse(in: handleFrame)
}
let v = CGPoint(x: sin(startAngle), y: -cos(startAngle))
let c = CGPoint(x: -v.y * pathDiameter * 0.5 + bounds.midX, y: v.x * pathDiameter * 0.5 + bounds.midY)
context.setFillColor(parameters.color.cgColor)
for particle in parameters.particles {
let size: CGFloat = 1.3
context.setAlpha(particle.alpha)
context.fillEllipse(in: CGRect(origin: CGPoint(x: c.x + particle.position.x - size / 2.0, y: c.y + particle.position.y - size / 2.0), size: CGSize(width: size, height: size)))
}
}
}

View File

@ -187,7 +187,7 @@ public final class DrawingMessageRenderer {
let avatarHeaderItem = self.context.sharedContext.makeChatMessageAvatarHeaderItem(context: self.context, timestamp: self.messages.first?.timestamp ?? 0, peer: self.messages.first!.peers[self.messages.first!.author!.id]!, message: self.messages.first!, theme: theme, strings: presentationData.strings, wallpaper: presentationData.chatWallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: presentationData.chatBubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder)
let items: [ListViewItem] = [self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: self.messages, theme: theme, strings: presentationData.strings, wallpaper: presentationData.theme.chat.defaultWallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: presentationData.chatBubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: nil, availableReactions: nil, accountPeer: nil, isCentered: false)]
let items: [ListViewItem] = [self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: self.messages, theme: theme, strings: presentationData.strings, wallpaper: presentationData.theme.chat.defaultWallpaper, fontSize: presentationData.chatFontSize, chatBubbleCorners: presentationData.chatBubbleCorners, dateTimeFormat: presentationData.dateTimeFormat, nameOrder: presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: nil, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)]
let inset: CGFloat = 16.0
let leftInset: CGFloat = 37.0

View File

@ -2448,6 +2448,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
if let maybeFile = messages.first?.media.first(where: { $0 is TelegramMediaFile }) as? TelegramMediaFile, maybeFile.isVideo, let _ = self.context.account.postbox.mediaBox.completedResourcePath(maybeFile.resource, pathExtension: nil) {
messageFile = maybeFile
}
if "".isEmpty {
messageFile = nil
}
let renderer = DrawingMessageRenderer(context: self.context, messages: messages)
renderer.render(completion: { result in

View File

@ -1046,6 +1046,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
samples: samples,
peak: peak,
status: .complete(),
isViewOnceMessage: false,
seek: nil,
updateIsSeeking: nil
)

View File

@ -288,6 +288,7 @@ public final class MediaPreviewPanelComponent: Component {
)
}
},
isViewOnceMessage: false,
seek: { [weak self] timestamp in
guard let self, let mediaPlayer = self.mediaPlayer else {
return
@ -318,6 +319,7 @@ public final class MediaPreviewPanelComponent: Component {
samples: component.mediaPreview.waveform.samples,
peak: component.mediaPreview.waveform.peak,
status: .complete(),
isViewOnceMessage: false,
seek: nil,
updateIsSeeking: nil
)),

View File

@ -348,7 +348,7 @@ final class PeerInfoSelectionPanelNode: ASDisplayNode {
}, stopMediaRecording: {
}, lockMediaRecording: {
}, deleteRecordedMedia: {
}, sendRecordedMedia: { _ in
}, sendRecordedMedia: { _, _ in
}, displayRestrictedInfo: { _, _ in
}, displayVideoUnmuteTip: { _ in
}, switchMediaRecordingMode: {

View File

@ -592,7 +592,7 @@ final class PeerSelectionControllerNode: ASDisplayNode {
}, stopMediaRecording: {
}, lockMediaRecording: {
}, deleteRecordedMedia: {
}, sendRecordedMedia: { _ in
}, sendRecordedMedia: { _, _ in
}, displayRestrictedInfo: { _, _ in
}, displayVideoUnmuteTip: { _ in
}, switchMediaRecordingMode: {

View File

@ -236,7 +236,7 @@ final class PeerNameColorChatPreviewItemNode: ListViewItemNode {
}
let message = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: messageItem.outgoing ? [] : [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[authorPeerId], text: messageItem.text, attributes: messageItem.reply != nil ? [ReplyMessageAttribute(messageId: replyMessageId, threadMessageId: nil, quote: nil, isQuote: false)] : [], media: media, peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [message], theme: item.componentTheme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [message], theme: item.componentTheme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
}
var nodes: [ListViewItemNode] = []

View File

@ -292,7 +292,7 @@ class ReactionChatPreviewItemNode: ListViewItemNode {
attributes.append(ReactionsMessageAttribute(canViewList: false, reactions: [MessageReaction(value: reaction, count: 1, chosenOrder: 0)], recentPeers: recentPeers))
}
let messageItem = item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: chatPeerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[userPeerId], text: messageText, attributes: attributes, media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])], theme: item.theme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: item.availableReactions, accountPeer: item.accountPeer, isCentered: true)
let messageItem = item.context.sharedContext.makeChatMessagePreviewItem(context: item.context, messages: [Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: chatPeerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[userPeerId], text: messageText, attributes: attributes, media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])], theme: item.theme, strings: item.strings, wallpaper: item.wallpaper, fontSize: item.fontSize, chatBubbleCorners: item.chatBubbleCorners, dateTimeFormat: item.dateTimeFormat, nameOrder: item.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: currentBackgroundNode, availableReactions: item.availableReactions, accountPeer: item.accountPeer, isCentered: true, isPreview: true)
var node: ListViewItemNode?
if let current = currentNode {

View File

@ -1087,7 +1087,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
return state
}, animated: true)
}, clickThroughMessage: {
}, backgroundNode: self.backgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false)
}, backgroundNode: self.backgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true)
return item
}

View File

@ -1622,19 +1622,19 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
if !bottomMessageText.isEmpty {
let message1 = Message(stableId: 2, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 2), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[otherPeerId], text: bottomMessageText, attributes: [], media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message1], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
}
let message2 = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66000, flags: [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[peerId], text: topMessageText, attributes: messageAttributes, media: [], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message2], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
if let serviceMessageText {
let attributedText = convertMarkdownToAttributes(NSAttributedString(string: serviceMessageText))
let entities = generateChatInputTextEntities(attributedText)
let message3 = Message(stableId: 0, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 0), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66002, flags: [.Incoming], tags: [], globalTags: [], localTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [TelegramMediaAction(action: .customText(text: attributedText.string, entities: entities, additionalAttributes: nil))], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false))
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: theme, strings: self.presentationData.strings, wallpaper: currentWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: nil, tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.nativeNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true))
}
let params = ListViewItemLayoutParams(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, availableHeight: layout.size.height)

View File

@ -0,0 +1,85 @@
%PDF-1.7
1 0 obj
<< >>
endobj
2 0 obj
<< /Length 3 0 R >>
stream
/DeviceRGB CS
/DeviceRGB cs
q
1.000000 0.000000 -0.000000 1.000000 4.169922 4.170044 cm
0.000000 0.000000 0.000000 scn
10.830000 21.659973 m
4.848756 21.659973 0.000000 16.811216 0.000000 10.829973 c
0.000000 4.848728 4.848756 -0.000027 10.830000 -0.000027 c
16.811245 -0.000027 21.660000 4.848728 21.660000 10.829973 c
21.660000 16.811216 16.811245 21.659973 10.830000 21.659973 c
h
10.775391 6.581909 m
10.775391 6.048706 11.130859 5.693237 11.643555 5.693237 c
12.156250 5.693237 12.511719 6.048706 12.511719 6.581909 c
12.511719 14.846558 l
12.511719 15.454956 12.135742 15.830933 11.513672 15.830933 c
11.144531 15.830933 10.871094 15.769409 10.433594 15.461792 c
8.416992 14.046753 l
8.123047 13.841675 8.013672 13.636597 8.013672 13.363159 c
8.013672 12.973511 8.280273 12.706909 8.649414 12.706909 c
8.840820 12.706909 8.977539 12.754761 9.148438 12.877808 c
10.741211 13.985229 l
10.775391 13.985229 l
10.775391 6.581909 l
h
f*
n
Q
endstream
endobj
3 0 obj
941
endobj
4 0 obj
<< /Annots []
/Type /Page
/MediaBox [ 0.000000 0.000000 30.000000 30.000000 ]
/Resources 1 0 R
/Contents 2 0 R
/Parent 5 0 R
>>
endobj
5 0 obj
<< /Kids [ 4 0 R ]
/Count 1
/Type /Pages
>>
endobj
6 0 obj
<< /Pages 5 0 R
/Type /Catalog
>>
endobj
xref
0 7
0000000000 65535 f
0000000010 00000 n
0000000034 00000 n
0000001031 00000 n
0000001053 00000 n
0000001226 00000 n
0000001300 00000 n
trailer
<< /ID [ (some) (id) ]
/Root 6 0 R
/Size 7
>>
startxref
1359
%%EOF

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "1filled.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -736,10 +736,10 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
return false
}
// if (file.isVoice || file.isInstantVideo) && "".isEmpty {
// strongSelf.openViewOnceMediaMessage(message)
// return false
// }
if (file.isVoice || file.isInstantVideo) && message.minAutoremoveOrClearTimeout == viewOnceTimeout {
strongSelf.openViewOnceMediaMessage(message)
return false
}
}
}
if let invoice = media as? TelegramMediaInvoice, let extendedMedia = invoice.extendedMedia {
@ -9503,8 +9503,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
strongSelf.lockMediaRecorder()
}, deleteRecordedMedia: { [weak self] in
self?.deleteMediaRecording()
}, sendRecordedMedia: { [weak self] silentPosting in
self?.sendMediaRecording(silentPosting: silentPosting)
}, sendRecordedMedia: { [weak self] silentPosting, viewOnce in
self?.sendMediaRecording(silentPosting: silentPosting, viewOnce: viewOnce)
}, displayRestrictedInfo: { [weak self] subject, displayType in
guard let strongSelf = self else {
return
@ -15453,9 +15453,17 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
let replyMessageSubject = strongSelf.presentationInterfaceState.interfaceState.replyMessageSubject
let correlationId = Int64.random(in: 0 ..< Int64.max)
let updatedMessage = message
.withUpdatedReplyToMessageId(replyMessageSubject?.subjectModel)
.withUpdatedCorrelationId(correlationId)
// .withUpdatedAttributes({ attributes in
// var attributes = attributes
//#if DEBUG
// attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil))
//#endif
// return attributes
// })
var usedCorrelationId = false
@ -15548,10 +15556,11 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
})
strongSelf.recorderFeedback = nil
strongSelf.updateDownButtonVisibility()
}
}
})
case .send:
case let .send(viewOnce):
self.chatDisplayNode.updateRecordedMediaDeleted(false)
let _ = (audioRecorderValue.takenRecordedData()
|> deliverOnMainQueue).startStandalone(next: { [weak self] data in
@ -15593,7 +15602,12 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
}, usedCorrelationId ? correlationId : nil)
strongSelf.sendMessages([.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(data.compressedData.count), attributes: [.Audio(isVoice: true, duration: Int(data.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: strongSelf.chatLocation.threadId, replyToMessageId: strongSelf.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: correlationId, bubbleUpEmojiOrStickersets: [])])
var attributes: [MessageAttribute] = []
if viewOnce {
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil))
}
strongSelf.sendMessages([.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(data.compressedData.count), attributes: [.Audio(isVoice: true, duration: Int(data.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: strongSelf.chatLocation.threadId, replyToMessageId: strongSelf.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: correlationId, bubbleUpEmojiOrStickersets: [])])
strongSelf.recorderFeedback?.tap()
strongSelf.recorderFeedback = nil
@ -15660,9 +15674,10 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedRecordedMediaPreview(nil)
})
self.updateDownButtonVisibility()
}
func sendMediaRecording(silentPosting: Bool? = nil, scheduleTime: Int32? = nil) {
func sendMediaRecording(silentPosting: Bool? = nil, scheduleTime: Int32? = nil, viewOnce: Bool = false) {
self.chatDisplayNode.updateRecordedMediaDeleted(false)
if let recordedMediaPreview = self.presentationInterfaceState.recordedMediaPreview {
@ -15687,10 +15702,17 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: false, {
$0.updatedRecordedMediaPreview(nil).updatedInterfaceState { $0.withUpdatedReplyMessageSubject(nil) }
})
strongSelf.updateDownButtonVisibility()
}
}, nil)
let messages: [EnqueueMessage] = [.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: recordedMediaPreview.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(recordedMediaPreview.fileSize), attributes: [.Audio(isVoice: true, duration: Int(recordedMediaPreview.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
var attributes: [MessageAttribute] = []
if viewOnce {
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil))
}
let messages: [EnqueueMessage] = [.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: recordedMediaPreview.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(recordedMediaPreview.fileSize), attributes: [.Audio(isVoice: true, duration: Int(recordedMediaPreview.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
let transformedMessages: [EnqueueMessage]
if let silentPosting = silentPosting {
@ -16008,7 +16030,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
func updateDownButtonVisibility() {
let recordingMediaMessage = self.audioRecorderValue != nil || self.videoRecorderValue != nil
let recordingMediaMessage = self.audioRecorderValue != nil || self.videoRecorderValue != nil || self.presentationInterfaceState.recordedMediaPreview != nil
self.chatDisplayNode.navigateButtons.displayDownButton = self.shouldDisplayDownButton && !recordingMediaMessage
}
@ -18923,16 +18945,32 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
func openViewOnceMediaMessage(_ message: Message) {
let source: ContextContentSource = .extracted(ChatMessageContextExtractedContentSource(chatNode: self.chatDisplayNode, engine: self.context.engine, message: message, selectAll: false, centerVertically: true))
let isIncoming = message.effectivelyIncoming(self.context.account.peerId)
var presentImpl: ((ViewController) -> Void)?
let configuration = ContextController.Configuration(
sources: [
ContextController.Source(
id: 0,
title: "",
source: source,
source: .extracted(ChatViewOnceMessageContextExtractedContentSource(
context: self.context,
presentationData: self.presentationData,
chatNode: self.chatDisplayNode,
backgroundNode: self.chatBackgroundNode,
engine: self.context.engine,
message: message,
present: { c in
presentImpl?(c)
}
)),
items: .single(ContextController.Items(content: .list([]))),
closeActionTitle: "Delete and Close"
closeActionTitle: isIncoming ? "Delete and Close" : "Close",
closeAction: { [weak self] in
if let self {
self.context.sharedContext.mediaManager.setPlaylist(nil, type: .voice, control: .playback(.pause))
}
}
)
], initialId: 0
)
@ -18947,7 +18985,11 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
self.currentContextController = contextController
self.presentInGlobalOverlay(contextController)
let _ = self.context.sharedContext.openChatMessage(OpenChatMessageParams(context: self.context, chatLocation: nil, chatLocationContextHolder: nil, message: message, standalone: false, reverseMessageGalleryOrder: false, navigationController: nil, dismissInput: { }, present: { _, _ in }, transitionNode: { _, _, _ in return nil }, addToTransitionSurface: { _ in }, openUrl: { _ in }, openPeer: { _, _ in }, callPeer: { _, _ in }, enqueueMessage: { _ in }, sendSticker: nil, sendEmoji: nil, setupTemporaryHiddenMedia: { _, _, _ in }, chatAvatarHiddenMedia: { _, _ in }, playlistLocation: nil))
presentImpl = { [weak contextController] c in
contextController?.present(c, in: .current)
}
let _ = self.context.sharedContext.openChatMessage(OpenChatMessageParams(context: self.context, chatLocation: nil, chatLocationContextHolder: nil, message: message, standalone: false, reverseMessageGalleryOrder: false, navigationController: nil, dismissInput: { }, present: { _, _ in }, transitionNode: { _, _, _ in return nil }, addToTransitionSurface: { _ in }, openUrl: { _ in }, openPeer: { _, _ in }, callPeer: { _, _ in }, enqueueMessage: { _ in }, sendSticker: nil, sendEmoji: nil, setupTemporaryHiddenMedia: { _, _, _ in }, chatAvatarHiddenMedia: { _, _ in }, playlistLocation: .singleMessage(message.id)))
}
func openStorySharing(messages: [Message]) {

View File

@ -140,7 +140,7 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
private var validEmptyNodeLayout: (CGSize, UIEdgeInsets)?
var restrictedNode: ChatRecentActionsEmptyNode?
private var validLayout: (ContainerViewLayout, CGFloat)?
private(set) var validLayout: (ContainerViewLayout, CGFloat)?
private var visibleAreaInset = UIEdgeInsets()
private var searchNavigationNode: ChatSearchNavigationContentNode?
@ -1417,6 +1417,8 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
inputPanelNode.removeFromSupernode()
inputPanelNode.prevInputPanelNode = prevInputPanelNode
inputPanelNode.addSubnode(prevInputPanelNode)
prevInputPanelNode.viewForOverlayContent?.removeFromSuperview()
} else {
dismissedInputPanelNode = self.inputPanelNode
}
@ -1426,10 +1428,9 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
if inputPanelNode.supernode !== self {
immediatelyLayoutInputPanelAndAnimateAppearance = true
self.inputPanelClippingNode.insertSubnode(inputPanelNode, aboveSubnode: self.inputPanelBackgroundNode)
if let viewForOverlayContent = inputPanelNode.viewForOverlayContent {
self.inputPanelOverlayNode.view.addSubview(viewForOverlayContent)
}
}
if let viewForOverlayContent = inputPanelNode.viewForOverlayContent, viewForOverlayContent.superview == nil {
self.inputPanelOverlayNode.view.addSubview(viewForOverlayContent)
}
} else {
let inputPanelHeight = inputPanelNode.updateLayout(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, bottomInset: layout.intrinsicInsets.bottom, additionalSideInsets: layout.additionalInsets, maxHeight: layout.size.height - insets.top - inputPanelBottomInset - 120.0, isSecondary: false, transition: transition, interfaceState: self.chatPresentationInterfaceState, metrics: layout.metrics, isMediaInputExpanded: self.inputPanelContainerNode.expansionFraction == 1.0)

View File

@ -1,11 +1,18 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Display
import ContextUI
import Postbox
import TelegramCore
import SwiftSignalKit
import ChatMessageItemView
import AccountContext
import WallpaperBackgroundNode
import TelegramPresentationData
import DustEffect
import TooltipUI
import TelegramNotices
final class ChatMessageContextLocationContentSource: ContextLocationContentSource {
private let controller: ViewController
@ -97,6 +104,249 @@ final class ChatMessageContextExtractedContentSource: ContextExtractedContentSou
}
}
final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedContentSource {
let keepInPlace: Bool = false
let ignoreContentTouches: Bool = false
let blurBackground: Bool = true
let centerVertically: Bool = true
private let context: AccountContext
private let presentationData: PresentationData
private weak var chatNode: ChatControllerNode?
private weak var backgroundNode: WallpaperBackgroundNode?
private let engine: TelegramEngine
private let message: Message
private let present: (ViewController) -> Void
private var messageNodeCopy: ChatMessageItemView?
private weak var tooltipController: TooltipScreen?
var shouldBeDismissed: Signal<Bool, NoError> {
return self.context.sharedContext.mediaManager.globalMediaPlayerState
|> filter { playlistStateAndType in
if let (_, state, _) = playlistStateAndType, case .state = state {
return true
} else {
return false
}
}
|> take(1)
|> map { _ in
return false
}
|> then(
self.context.sharedContext.mediaManager.globalMediaPlayerState
|> filter { playlistStateAndType in
return playlistStateAndType == nil
}
|> take(1)
|> map { _ in
return true
}
)
}
init(context: AccountContext, presentationData: PresentationData, chatNode: ChatControllerNode, backgroundNode: WallpaperBackgroundNode, engine: TelegramEngine, message: Message, present: @escaping (ViewController) -> Void) {
self.context = context
self.presentationData = presentationData
self.chatNode = chatNode
self.backgroundNode = backgroundNode
self.engine = engine
self.message = message
self.present = present
}
func takeView() -> ContextControllerTakeViewInfo? {
guard let chatNode = self.chatNode, let backgroundNode = self.backgroundNode, let validLayout = chatNode.validLayout?.0 else {
return nil
}
var result: ContextControllerTakeViewInfo?
var sourceNode: ContextExtractedContentContainingNode?
var sourceRect: CGRect = .zero
chatNode.historyNode.forEachItemNode { itemNode in
guard let itemNode = itemNode as? ChatMessageItemView else {
return
}
guard let item = itemNode.item else {
return
}
if item.content.contains(where: { $0.0.stableId == self.message.stableId }), let contentNode = itemNode.getMessageContextSourceNode(stableId: self.message.stableId) {
sourceNode = contentNode
sourceRect = itemNode.frame
}
}
let isIncoming = self.message.effectivelyIncoming(self.context.account.peerId)
let isVideo = (self.message.media.first(where: { $0 is TelegramMediaFile }) as? TelegramMediaFile)?.isInstantVideo ?? false
var tooltipSourceRect: CGRect = .zero
if let sourceNode {
var bubbleWidth: CGFloat = 0.0
if (isIncoming || "".isEmpty) && !isVideo {
let messageItem = self.context.sharedContext.makeChatMessagePreviewItem(
context: self.context,
messages: [self.message],
theme: self.presentationData.theme,
strings: self.presentationData.strings,
wallpaper: self.presentationData.chatWallpaper,
fontSize: self.presentationData.chatFontSize,
chatBubbleCorners: self.presentationData.chatBubbleCorners,
dateTimeFormat: self.presentationData.dateTimeFormat,
nameOrder: self.presentationData.nameDisplayOrder,
forcedResourceStatus: nil,
tapMessage: nil,
clickThroughMessage: nil,
backgroundNode: backgroundNode,
availableReactions: nil,
accountPeer: nil,
isCentered: false,
isPreview: false
)
let params = ListViewItemLayoutParams(width: chatNode.historyNode.frame.width, leftInset: validLayout.safeInsets.left, rightInset: validLayout.safeInsets.right, availableHeight: chatNode.historyNode.frame.height, isStandalone: false)
var node: ListViewItemNode?
messageItem.nodeConfiguredForParams(async: { $0() }, params: params, synchronousLoads: false, previousItem: nil, nextItem: nil, completion: { messageNode, apply in
node = messageNode
apply().1(ListViewItemApply(isOnScreen: true))
})
if let messageNode = node as? ChatMessageItemView, let copyContentNode = messageNode.getMessageContextSourceNode(stableId: self.message.stableId) {
messageNode.frame.origin.y = chatNode.frame.height - sourceRect.origin.y - sourceRect.size.height
chatNode.addSubnode(messageNode)
result = ContextControllerTakeViewInfo(containingItem: .node(copyContentNode), contentAreaInScreenSpace: chatNode.convert(chatNode.frameForVisibleArea(), to: nil))
bubbleWidth = copyContentNode.contentNode.subnodes?.first?.frame.width ?? messageNode.frame.width
}
self.messageNodeCopy = node as? ChatMessageItemView
} else {
result = ContextControllerTakeViewInfo(containingItem: .node(sourceNode), contentAreaInScreenSpace: chatNode.convert(chatNode.frameForVisibleArea(), to: nil))
}
tooltipSourceRect = CGRect(x: isIncoming ? 22.0 : chatNode.frame.width - bubbleWidth + 10.0, y: floorToScreenPixels((chatNode.frame.height - 75.0) / 2.0) - 43.0, width: 44.0, height: 44.0)
}
if !isVideo {
let displayTooltip = { [weak self] in
guard let self else {
return
}
let absoluteFrame = tooltipSourceRect
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX, y: absoluteFrame.maxY), size: CGSize())
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
var tooltipText: String?
if isIncoming {
tooltipText = presentationData.strings.Chat_PlayOnceVoiceMessageTooltip
} else if let peer = self.message.peers[self.message.id.peerId] {
let peerName = EnginePeer(peer).compactDisplayTitle
tooltipText = presentationData.strings.Chat_PlayOnceVoiceMessageYourTooltip(peerName).string
}
if let tooltipText {
let tooltipController = TooltipScreen(
account: self.context.account,
sharedContext: self.context.sharedContext,
text: .markdown(text: tooltipText),
balancedTextLayout: true,
constrainWidth: 240.0,
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
arrowStyle: .small,
icon: nil,
location: .point(location, .bottom),
displayDuration: .custom(3.0),
inset: 8.0,
cornerRadius: 11.0,
shouldDismissOnTouch: { _, _ in
return .ignore
}
)
self.tooltipController = tooltipController
self.present(tooltipController)
}
}
if isIncoming {
let _ = (ApplicationSpecificNotice.getIncomingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager)
|> deliverOnMainQueue).startStandalone(next: { [weak self] counter in
guard let self else {
return
}
if counter >= 2 {
return
}
Queue.mainQueue().after(0.3) {
displayTooltip()
}
let _ = ApplicationSpecificNotice.incrementIncomingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager).startStandalone()
})
} else {
let _ = (ApplicationSpecificNotice.getOutgoingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager)
|> deliverOnMainQueue).startStandalone(next: { [weak self] counter in
guard let self else {
return
}
if counter >= 2 {
return
}
Queue.mainQueue().after(0.3) {
displayTooltip()
}
let _ = ApplicationSpecificNotice.incrementOutgoingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager).startStandalone()
})
}
}
return result
}
private var dustEffectLayer: DustEffectLayer?
func putBack() -> ContextControllerPutBackViewInfo? {
guard let chatNode = self.chatNode else {
return nil
}
if let tooltipController = self.tooltipController {
tooltipController.dismiss()
}
if let messageNodeCopy = self.messageNodeCopy, let sourceView = messageNodeCopy.supernode?.view, let contentNode = messageNodeCopy.getMessageContextSourceNode(stableId: nil)?.contentNode, let parentNode = contentNode.supernode?.supernode?.supernode {
let dustEffectLayer = DustEffectLayer()
dustEffectLayer.position = sourceView.bounds.center
dustEffectLayer.bounds = CGRect(origin: CGPoint(), size: sourceView.bounds.size)
dustEffectLayer.zPosition = 10.0
parentNode.layer.addSublayer(dustEffectLayer)
guard let (image, subFrame) = messageNodeCopy.makeContentSnapshot() else {
return nil
}
var itemFrame = subFrame //messageNodeCopy.layer.convert(subFrame, to: dustEffectLayer)
itemFrame.origin.y = floorToScreenPixels((sourceView.frame.height - subFrame.height) / 2.0)
dustEffectLayer.addItem(frame: itemFrame, image: image)
messageNodeCopy.removeFromSupernode()
contentNode.removeFromSupernode()
return nil
} else {
var result: ContextControllerPutBackViewInfo?
chatNode.historyNode.forEachItemNode { itemNode in
guard let itemNode = itemNode as? ChatMessageItemView else {
return
}
guard let item = itemNode.item else {
return
}
if item.content.contains(where: { $0.0.stableId == self.message.stableId }) {
result = ContextControllerPutBackViewInfo(contentAreaInScreenSpace: chatNode.convert(chatNode.frameForVisibleArea(), to: nil))
}
}
return result
}
}
}
final class ChatMessageReactionContextExtractedContentSource: ContextExtractedContentSource {
let keepInPlace: Bool = false
let ignoreContentTouches: Bool = true

View File

@ -15,10 +15,48 @@ import ChatPresentationInterfaceState
import ChatSendButtonRadialStatusNode
import AudioWaveformNode
import ChatInputPanelNode
import TooltipUI
import TelegramNotices
extension AudioWaveformNode: CustomMediaPlayerScrubbingForegroundNode {
}
final class ChatRecordingPreviewViewForOverlayContent: UIView, ChatInputPanelViewForOverlayContent {
let ignoreHit: (UIView, CGPoint) -> Bool
init(ignoreHit: @escaping (UIView, CGPoint) -> Bool) {
self.ignoreHit = ignoreHit
super.init(frame: CGRect())
}
required init(coder: NSCoder) {
preconditionFailure()
}
func maybeDismissContent(point: CGPoint) {
for subview in self.subviews.reversed() {
if let _ = subview.hitTest(self.convert(point, to: subview), with: nil) {
return
}
}
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
for subview in self.subviews.reversed() {
if let result = subview.hitTest(self.convert(point, to: subview), with: event) {
return result
}
}
if event == nil || self.ignoreHit(self, point) {
return nil
}
return nil
}
}
final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
let deleteButton: HighlightableButtonNode
let binNode: AnimationNode
@ -29,6 +67,9 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
private let waveformButton: ASButtonNode
let waveformBackgroundNode: ASImageNode
private var viewOnce = false
let viewOnceButton: ChatRecordingViewOnceButtonNode
private let waveformNode: AudioWaveformNode
private let waveformForegroundNode: AudioWaveformNode
let waveformScubberNode: MediaPlayerScrubbingNode
@ -63,6 +104,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
self.sendButton.displaysAsynchronously = false
self.sendButton.setImage(PresentationResourcesChat.chatInputPanelSendButtonImage(theme), for: [])
self.viewOnceButton = ChatRecordingViewOnceButtonNode()
self.waveformBackgroundNode = ASImageNode()
self.waveformBackgroundNode.isLayerBacked = true
self.waveformBackgroundNode.displaysAsynchronously = false
@ -92,6 +135,21 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
super.init()
self.viewForOverlayContent = ChatRecordingPreviewViewForOverlayContent(
ignoreHit: { [weak self] view, point in
guard let strongSelf = self else {
return false
}
if strongSelf.view.hitTest(view.convert(point, to: strongSelf.view), with: nil) != nil {
return true
}
if view.convert(point, to: strongSelf.view).y > strongSelf.view.bounds.maxY {
return true
}
return false
}
)
self.addSubnode(self.deleteButton)
self.deleteButton.addSubnode(self.binNode)
self.addSubnode(self.waveformBackgroundNode)
@ -111,9 +169,10 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
}
}
self.deleteButton.addTarget(self, action: #selector(self.deletePressed), forControlEvents: [.touchUpInside])
self.sendButton.addTarget(self, action: #selector(self.sendPressed), forControlEvents: [.touchUpInside])
self.viewOnceButton.addTarget(self, action: #selector(self.viewOncePressed), forControlEvents: [.touchUpInside])
self.waveformButton.addTarget(self, action: #selector(self.waveformPressed), forControlEvents: .touchUpInside)
}
@ -135,9 +194,38 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
strongSelf.interfaceInteraction?.displaySendMessageOptions(strongSelf.sendButton, gesture)
}
if let viewForOverlayContent = self.viewForOverlayContent {
viewForOverlayContent.addSubnode(self.viewOnceButton)
}
}
private func maybePresentViewOnceTooltip() {
guard let context = self.context else {
return
}
let _ = (ApplicationSpecificNotice.getVoiceMessagesPlayOnceSuggestion(accountManager: context.sharedContext.accountManager)
|> deliverOnMainQueue).startStandalone(next: { [weak self] counter in
guard let self, let interfaceState = self.presentationInterfaceState else {
return
}
if counter >= 3 {
return
}
Queue.mainQueue().after(0.3) {
self.displayViewOnceTooltip(text: interfaceState.strings.Chat_TapToPlayVoiceMessageOnceTooltip, hasIcon: true)
}
let _ = ApplicationSpecificNotice.incrementVoiceMessagesPlayOnceSuggestion(accountManager: context.sharedContext.accountManager).startStandalone()
})
}
override func updateLayout(width: CGFloat, leftInset: CGFloat, rightInset: CGFloat, bottomInset: CGFloat, additionalSideInsets: UIEdgeInsets, maxHeight: CGFloat, isSecondary: Bool, transition: ContainedViewLayoutTransition, interfaceState: ChatPresentationInterfaceState, metrics: LayoutMetrics, isMediaInputExpanded: Bool) -> CGFloat {
var isFirstTime = false
if self.presentationInterfaceState == nil {
isFirstTime = true
}
if self.presentationInterfaceState != interfaceState {
var updateWaveform = false
if self.presentationInterfaceState?.recordedMediaPreview != interfaceState.recordedMediaPreview {
@ -148,8 +236,9 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
self.sendButton.accessibilityLabel = interfaceState.strings.VoiceOver_MessageContextSend
self.waveformButton.accessibilityLabel = interfaceState.strings.VoiceOver_Chat_RecordPreviewVoiceMessage
}
self.presentationInterfaceState = interfaceState
self.presentationInterfaceState = interfaceState
if let recordedMediaPreview = interfaceState.recordedMediaPreview, updateWaveform {
self.waveformNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor.withAlphaComponent(0.5), gravity: .center, waveform: recordedMediaPreview.waveform)
self.waveformForegroundNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor, gravity: .center, waveform: recordedMediaPreview.waveform)
@ -182,12 +271,20 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
}
let panelHeight = defaultHeight(metrics: metrics)
if isFirstTime {
self.maybePresentViewOnceTooltip()
}
let panelHeight = defaultHeight(metrics: metrics)
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: 1), size: CGSize(width: 40.0, height: 40)))
transition.updateFrame(node: self.sendButton, frame: CGRect(origin: CGPoint(x: width - rightInset - 43.0 - UIScreenPixel, y: 2 - UIScreenPixel), size: CGSize(width: 44.0, height: 44)))
self.binNode.frame = self.deleteButton.bounds
let viewOnceSize = self.viewOnceButton.update(theme: interfaceState.theme)
let viewOnceButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -64.0), size: viewOnceSize)
transition.updateFrame(node: self.viewOnceButton, frame: viewOnceButtonFrame)
var isScheduledMessages = false
if case .scheduledMessages = interfaceState.subject {
isScheduledMessages = true
@ -229,6 +326,16 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
if let prevTextInputPanelNode = self.prevInputPanelNode as? ChatTextInputPanelNode {
self.prevInputPanelNode = nil
self.viewOnce = prevTextInputPanelNode.viewOnce
self.viewOnceButton.update(isSelected: self.viewOnce, animated: false)
prevTextInputPanelNode.viewOnceButton.isHidden = true
prevTextInputPanelNode.viewOnce = false
self.viewOnceButton.layer.animatePosition(from: prevTextInputPanelNode.viewOnceButton.position, to: self.viewOnceButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in
prevTextInputPanelNode.viewOnceButton.isHidden = false
prevTextInputPanelNode.viewOnceButton.update(isSelected: false, animated: false)
})
if let audioRecordingDotNode = prevTextInputPanelNode.audioRecordingDotNode {
let startAlpha = CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1.0)
audioRecordingDotNode.layer.removeAllAnimations()
@ -283,12 +390,63 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
@objc func deletePressed() {
self.tooltipController?.dismiss()
self.mediaPlayer?.pause()
self.interfaceInteraction?.deleteRecordedMedia()
}
@objc func sendPressed() {
self.interfaceInteraction?.sendRecordedMedia(false)
self.tooltipController?.dismiss()
self.interfaceInteraction?.sendRecordedMedia(false, self.viewOnce)
}
private weak var tooltipController: TooltipScreen?
@objc private func viewOncePressed() {
guard let context = self.context, let interfaceState = self.presentationInterfaceState else {
return
}
self.viewOnce = !self.viewOnce
self.viewOnceButton.update(isSelected: self.viewOnce, animated: true)
self.tooltipController?.dismiss()
if self.viewOnce {
self.displayViewOnceTooltip(text: interfaceState.strings.Chat_PlayVoiceMessageOnceTooltip, hasIcon: true)
let _ = ApplicationSpecificNotice.incrementVoiceMessagesPlayOnceSuggestion(accountManager: context.sharedContext.accountManager, count: 3).startStandalone()
}
}
private func displayViewOnceTooltip(text: String, hasIcon: Bool) {
guard let context = self.context, let parentController = self.interfaceInteraction?.chatController() else {
return
}
let absoluteFrame = self.viewOnceButton.view.convert(self.viewOnceButton.bounds, to: parentController.view)
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX - 20.0, y: absoluteFrame.midY), size: CGSize())
let tooltipController = TooltipScreen(
account: context.account,
sharedContext: context.sharedContext,
text: .markdown(text: text),
balancedTextLayout: true,
constrainWidth: 240.0,
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
arrowStyle: .small,
icon: hasIcon ? .animation(name: "anim_autoremove_on", delay: 0.1, tintColor: nil) : nil,
location: .point(location, .right),
displayDuration: .default,
inset: 8.0,
cornerRadius: 8.0,
shouldDismissOnTouch: { _, _ in
return .ignore
}
)
self.tooltipController = tooltipController
parentController.present(tooltipController, in: .window(.root))
}
@objc func waveformPressed() {
@ -353,3 +511,105 @@ private final class PlayPauseIconNode: ManagedAnimationNode {
}
}
}
final class ChatRecordingViewOnceButtonNode: HighlightTrackingButtonNode {
private let backgroundNode: NavigationBackgroundNode
private let borderNode: ASImageNode
private let iconNode: ASImageNode
private var theme: PresentationTheme?
override init(pointerStyle: PointerStyle? = nil) {
self.backgroundNode = NavigationBackgroundNode(color: .clear)
self.backgroundNode.isUserInteractionEnabled = false
self.borderNode = ASImageNode()
self.borderNode.isUserInteractionEnabled = false
self.iconNode = ASImageNode()
self.iconNode.isUserInteractionEnabled = false
super.init(pointerStyle: pointerStyle)
self.addSubnode(self.backgroundNode)
self.addSubnode(self.borderNode)
self.addSubnode(self.iconNode)
self.highligthedChanged = { [weak self] highlighted in
if let self, self.bounds.width > 0.0 {
let topScale: CGFloat = (self.bounds.width - 8.0) / self.bounds.width
let maxScale: CGFloat = (self.bounds.width + 2.0) / self.bounds.width
if highlighted {
self.layer.removeAnimation(forKey: "sublayerTransform")
let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
transition.updateTransformScale(node: self, scale: topScale)
} else {
let transition = ContainedViewLayoutTransition.immediate
transition.updateTransformScale(node: self, scale: 1.0)
self.layer.animateScale(from: topScale, to: maxScale, duration: 0.13, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, removeOnCompletion: false, completion: { [weak self] _ in
guard let self else {
return
}
self.layer.animateScale(from: maxScale, to: 1.0, duration: 0.1, timingFunction: CAMediaTimingFunctionName.easeIn.rawValue)
})
}
}
}
}
private var innerIsSelected = false
func update(isSelected: Bool, animated: Bool = false) {
guard let theme = self.theme else {
return
}
let updated = self.iconNode.image == nil || self.innerIsSelected != isSelected
self.innerIsSelected = isSelected
if animated, updated && self.iconNode.image != nil, let snapshot = self.iconNode.view.snapshotContentTree() {
self.view.addSubview(snapshot)
snapshot.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
snapshot.removeFromSuperview()
})
self.iconNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
if updated {
self.iconNode.image = generateTintedImage(image: UIImage(bundleImageName: self.innerIsSelected ? "Media Gallery/ViewOnceOn" : "Media Gallery/ViewOnce"), color: theme.chat.inputPanel.panelControlAccentColor)
}
}
func update(theme: PresentationTheme) -> CGSize {
let size = CGSize(width: 44.0, height: 44.0)
let innerSize = CGSize(width: 40.0, height: 40.0)
if self.theme !== theme {
self.theme = theme
self.backgroundNode.updateColor(color: theme.chat.inputPanel.panelBackgroundColor, transition: .immediate)
self.borderNode.image = generateCircleImage(diameter: innerSize.width, lineWidth: 0.5, color: theme.chat.historyNavigation.strokeColor, backgroundColor: nil)
self.iconNode.image = generateTintedImage(image: UIImage(bundleImageName: self.innerIsSelected ? "Media Gallery/ViewOnceOn" : "Media Gallery/ViewOnce"), color: theme.chat.inputPanel.panelControlAccentColor)
}
let backgroundFrame = CGRect(origin: CGPoint(x: floorToScreenPixels(size.width / 2.0 - innerSize.width / 2.0), y: floorToScreenPixels(size.height / 2.0 - innerSize.height / 2.0)), size: innerSize)
self.backgroundNode.update(size: innerSize, cornerRadius: innerSize.width / 2.0, transition: .immediate, beginWithCurrentState: false)
self.backgroundNode.frame = backgroundFrame
if let borderImage = self.borderNode.image {
let borderFrame = CGRect(origin: CGPoint(x: floorToScreenPixels(size.width / 2.0 - borderImage.size.width / 2.0), y: floorToScreenPixels(size.height / 2.0 - borderImage.size.height / 2.0)), size: borderImage.size)
self.borderNode.frame = borderFrame
}
if let iconImage = self.iconNode.image {
let iconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels(size.width / 2.0 - iconImage.size.width / 2.0), y: floorToScreenPixels(size.height / 2.0 - iconImage.size.height / 2.0)), size: iconImage.size)
self.iconNode.frame = iconFrame
}
return size
}
}

View File

@ -562,6 +562,9 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
var audioRecordingCancelIndicator: ChatTextInputAudioRecordingCancelIndicator?
var animatingBinNode: AnimationNode?
var viewOnce = false
let viewOnceButton: ChatRecordingViewOnceButtonNode
private var accessoryItemButtons: [(ChatTextInputAccessoryItem, AccessoryItemIconButtonNode)] = []
private var validLayout: (CGFloat, CGFloat, CGFloat, CGFloat, UIEdgeInsets, CGFloat, LayoutMetrics, Bool, Bool)?
@ -847,6 +850,8 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
self.counterTextNode = ImmediateTextNode()
self.counterTextNode.textAlignment = .center
self.viewOnceButton = ChatRecordingViewOnceButtonNode()
super.init()
self.viewForOverlayContent = ChatTextViewForOverlayContent(
@ -958,13 +963,15 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
if let strongSelf = self, let interfaceState = strongSelf.presentationInterfaceState, let interfaceInteraction = strongSelf.interfaceInteraction {
if let _ = interfaceState.inputTextPanelState.mediaRecordingState {
if sendMedia {
interfaceInteraction.finishMediaRecording(.send)
interfaceInteraction.finishMediaRecording(.send(viewOnce: strongSelf.viewOnce))
} else {
interfaceInteraction.finishMediaRecording(.dismiss)
}
} else {
interfaceInteraction.finishMediaRecording(.dismiss)
}
strongSelf.viewOnce = false
strongSelf.tooltipController?.dismiss()
}
}
self.actionButtons.micButton.offsetRecordingControls = { [weak self] in
@ -984,6 +991,8 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
self.actionButtons.micButton.stopRecording = { [weak self] in
if let strongSelf = self, let interfaceInteraction = strongSelf.interfaceInteraction {
interfaceInteraction.stopMediaRecording()
strongSelf.tooltipController?.dismiss()
}
}
self.actionButtons.micButton.updateLocked = { [weak self] _ in
@ -1071,6 +1080,8 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
return EmojiTextAttachmentView(context: context, userLocation: .other, emoji: emoji, file: emoji.file, cache: presentationContext.animationCache, renderer: presentationContext.animationRenderer, placeholderColor: presentationInterfaceState.theme.chat.inputPanel.inputTextColor.withAlphaComponent(0.12), pointSize: CGSize(width: pointSize, height: pointSize))
}
}
self.viewOnceButton.addTarget(self, action: #selector(self.viewOncePressed), forControlEvents: [.touchUpInside])
}
required init?(coder aDecoder: NSCoder) {
@ -1084,6 +1095,14 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
self.currentEmojiSuggestion?.disposable.dispose()
}
override func didLoad() {
super.didLoad()
if let viewForOverlayContent = self.viewForOverlayContent {
viewForOverlayContent.addSubnode(self.viewOnceButton)
}
}
func loadTextInputNodeIfNeeded() {
if self.textInputNode == nil {
self.loadTextInputNode()
@ -2001,7 +2020,9 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
animateCancelSlideIn = transition.isAnimated && mediaRecordingState != nil
audioRecordingCancelIndicator = ChatTextInputAudioRecordingCancelIndicator(theme: interfaceState.theme, strings: interfaceState.strings, cancel: { [weak self] in
self?.viewOnce = false
self?.interfaceInteraction?.finishMediaRecording(.dismiss)
self?.tooltipController?.dismiss()
})
self.audioRecordingCancelIndicator = audioRecordingCancelIndicator
self.clippingNode.insertSubnode(audioRecordingCancelIndicator, at: 0)
@ -2272,7 +2293,9 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
mediaRecordingAccessibilityArea.accessibilityTraits = [.button, .startsMediaSession]
self.mediaRecordingAccessibilityArea = mediaRecordingAccessibilityArea
mediaRecordingAccessibilityArea.activate = { [weak self] in
self?.interfaceInteraction?.finishMediaRecording(.send)
if let self {
self.interfaceInteraction?.finishMediaRecording(.send(viewOnce: self.viewOnce))
}
return true
}
self.clippingNode.insertSubnode(mediaRecordingAccessibilityArea, aboveSubnode: self.actionButtons)
@ -2510,6 +2533,16 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
if let prevPreviewInputPanelNode = self.prevInputPanelNode as? ChatRecordingPreviewInputPanelNode {
self.prevInputPanelNode = nil
if prevPreviewInputPanelNode.viewOnceButton.alpha > 0.0 {
if let snapshotView = prevPreviewInputPanelNode.viewOnceButton.view.snapshotContentTree() {
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
snapshotView.removeFromSuperview()
})
snapshotView.layer.animateScale(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
self.viewForOverlayContent?.addSubview(snapshotView)
}
}
prevPreviewInputPanelNode.gestureRecognizer?.isEnabled = false
prevPreviewInputPanelNode.isUserInteractionEnabled = false
@ -2586,6 +2619,22 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
}
}
let viewOnceSize = self.viewOnceButton.update(theme: interfaceState.theme)
let viewOnceButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -152.0), size: viewOnceSize)
self.viewOnceButton.bounds = CGRect(origin: .zero, size: viewOnceButtonFrame.size)
transition.updatePosition(node: self.viewOnceButton, position: viewOnceButtonFrame.center)
var viewOnceIsVisible = false
if let _ = interfaceState.renderedPeer?.peer as? TelegramUser, let recordingState = interfaceState.inputTextPanelState.mediaRecordingState, case let .audio(_, isLocked) = recordingState, isLocked {
viewOnceIsVisible = true
}
if self.viewOnceButton.alpha.isZero && viewOnceIsVisible {
self.viewOnceButton.update(isSelected: self.viewOnce, animated: false)
}
transition.updateAlpha(node: self.viewOnceButton, alpha: viewOnceIsVisible ? 1.0 : 0.0)
transition.updateTransformScale(node: self.viewOnceButton, scale: viewOnceIsVisible ? 1.0 : 0.01)
var clippingDelta: CGFloat = 0.0
if case let .media(_, _, focused) = interfaceState.inputMode, focused {
clippingDelta = -panelHeight
@ -2596,6 +2645,50 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
return panelHeight
}
@objc private func viewOncePressed() {
guard let interfaceState = self.presentationInterfaceState else {
return
}
self.viewOnce = !self.viewOnce
self.viewOnceButton.update(isSelected: self.viewOnce, animated: true)
self.tooltipController?.dismiss()
if self.viewOnce {
self.displayViewOnceTooltip(text: interfaceState.strings.Chat_PlayVoiceMessageOnceTooltip)
}
}
private func displayViewOnceTooltip(text: String) {
guard let context = self.context, let parentController = self.interfaceInteraction?.chatController() else {
return
}
let absoluteFrame = self.viewOnceButton.view.convert(self.viewOnceButton.bounds, to: parentController.view)
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX - 20.0, y: absoluteFrame.midY), size: CGSize())
let tooltipController = TooltipScreen(
account: context.account,
sharedContext: context.sharedContext,
text: .plain(text: text),
balancedTextLayout: true,
constrainWidth: 240.0,
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
arrowStyle: .small,
icon: .animation(name: "anim_autoremove_on", delay: 0.1, tintColor: nil),
location: .point(location, .right),
displayDuration: .default,
inset: 8.0,
cornerRadius: 8.0,
shouldDismissOnTouch: { _, _ in
return .ignore
}
)
self.tooltipController = tooltipController
parentController.present(tooltipController, in: .window(.root))
}
override func canHandleTransition(from prevInputPanelNode: ChatInputPanelNode?) -> Bool {
return prevInputPanelNode is ChatRecordingPreviewInputPanelNode
}

View File

@ -461,7 +461,7 @@ final class OverlayPlayerControlsNode: ASDisplayNode {
var canShare = true
if let (_, valueOrLoading, _) = value, case let .state(value) = valueOrLoading, let source = value.item.playbackData?.source {
switch source {
case let .telegramFile(fileReference, isCopyProtected):
case let .telegramFile(fileReference, isCopyProtected, _):
canShare = !isCopyProtected
strongSelf.currentFileReference = fileReference
if let size = fileReference.media.size {

View File

@ -63,7 +63,7 @@ final class MessageMediaPlaylistItem: SharedMediaPlaylistItem {
lazy var playbackData: SharedMediaPlaybackData? = {
if let file = extractFileMedia(self.message) {
let fileReference = FileMediaReference.message(message: MessageReference(self.message), media: file)
let source = SharedMediaPlaybackDataSource.telegramFile(reference: fileReference, isCopyProtected: self.message.isCopyProtected())
let source = SharedMediaPlaybackDataSource.telegramFile(reference: fileReference, isCopyProtected: self.message.isCopyProtected(), isViewOnce: self.message.minAutoremoveOrClearTimeout == viewOnceTimeout)
for attribute in file.attributes {
switch attribute {
case let .Audio(isVoice, _, _, _, _):

View File

@ -1603,7 +1603,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
return presentAddMembersImpl(context: context, updatedPresentationData: updatedPresentationData, parentController: parentController, groupPeer: groupPeer, selectAddMemberDisposable: selectAddMemberDisposable, addMemberDisposable: addMemberDisposable)
}
public func makeChatMessagePreviewItem(context: AccountContext, messages: [Message], theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder, forcedResourceStatus: FileMediaResourceStatus?, tapMessage: ((Message) -> Void)?, clickThroughMessage: (() -> Void)? = nil, backgroundNode: ASDisplayNode?, availableReactions: AvailableReactions?, accountPeer: Peer?, isCentered: Bool) -> ListViewItem {
public func makeChatMessagePreviewItem(context: AccountContext, messages: [Message], theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder, forcedResourceStatus: FileMediaResourceStatus?, tapMessage: ((Message) -> Void)?, clickThroughMessage: (() -> Void)? = nil, backgroundNode: ASDisplayNode?, availableReactions: AvailableReactions?, accountPeer: Peer?, isCentered: Bool, isPreview: Bool) -> ListViewItem {
let controllerInteraction: ChatControllerInteraction
controllerInteraction = ChatControllerInteraction(openMessage: { _, _ in
@ -1698,7 +1698,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
chatLocation = .peer(id: messages.first!.id.peerId)
}
return ChatMessageItemImpl(presentationData: ChatPresentationData(theme: ChatPresentationThemeData(theme: theme, wallpaper: wallpaper), fontSize: fontSize, strings: strings, dateTimeFormat: dateTimeFormat, nameDisplayOrder: nameOrder, disableAnimations: false, largeEmoji: false, chatBubbleCorners: chatBubbleCorners, animatedEmojiScale: 1.0, isPreview: true), context: context, chatLocation: chatLocation, associatedData: ChatMessageItemAssociatedData(automaticDownloadPeerType: .contact, automaticDownloadPeerId: nil, automaticDownloadNetworkType: .cellular, isRecentActions: false, subject: nil, contactsPeerIds: Set(), animatedEmojiStickers: [:], forcedResourceStatus: forcedResourceStatus, availableReactions: availableReactions, defaultReaction: nil, isPremium: false, accountPeer: accountPeer.flatMap(EnginePeer.init), forceInlineReactions: true), controllerInteraction: controllerInteraction, content: content, disableDate: true, additionalContent: nil)
return ChatMessageItemImpl(presentationData: ChatPresentationData(theme: ChatPresentationThemeData(theme: theme, wallpaper: wallpaper), fontSize: fontSize, strings: strings, dateTimeFormat: dateTimeFormat, nameDisplayOrder: nameOrder, disableAnimations: false, largeEmoji: false, chatBubbleCorners: chatBubbleCorners, animatedEmojiScale: 1.0, isPreview: isPreview), context: context, chatLocation: chatLocation, associatedData: ChatMessageItemAssociatedData(automaticDownloadPeerType: .contact, automaticDownloadPeerId: nil, automaticDownloadNetworkType: .cellular, isRecentActions: false, subject: nil, contactsPeerIds: Set(), animatedEmojiStickers: [:], forcedResourceStatus: forcedResourceStatus, availableReactions: availableReactions, defaultReaction: nil, isPremium: false, accountPeer: accountPeer.flatMap(EnginePeer.init), forceInlineReactions: true), controllerInteraction: controllerInteraction, content: content, disableDate: true, additionalContent: nil)
}
public func makeChatMessageDateHeaderItem(context: AccountContext, timestamp: Int32, theme: PresentationTheme, strings: PresentationStrings, wallpaper: TelegramWallpaper, fontSize: PresentationFontSize, chatBubbleCorners: PresentationChatBubbleCorners, dateTimeFormat: PresentationDateTimeFormat, nameOrder: PresentationPersonNameOrder) -> ListViewItemHeader {

View File

@ -229,13 +229,13 @@ final class SharedMediaPlayer {
switch playbackData.type {
case .voice, .music:
switch playbackData.source {
case let .telegramFile(fileReference, _):
case let .telegramFile(fileReference, _, _):
strongSelf.playbackItem = .audio(MediaPlayer(audioSessionManager: strongSelf.audioSession, postbox: strongSelf.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: fileReference.resourceReference(fileReference.media.resource), streamable: playbackData.type == .music ? .conservative : .none, video: false, preferSoftwareDecoding: false, enableSound: true, baseRate: rateValue, fetchAutomatically: true, playAndRecord: controlPlaybackWithProximity, isAudioVideoMessage: playbackData.type == .voice))
}
case .instantVideo:
if let mediaManager = strongSelf.mediaManager, let item = item as? MessageMediaPlaylistItem {
switch playbackData.source {
case let .telegramFile(fileReference, _):
case let .telegramFile(fileReference, _, _):
let videoNode = OverlayInstantVideoNode(postbox: strongSelf.account.postbox, audioSession: strongSelf.audioSession, manager: mediaManager.universalVideoManager, content: NativeVideoContent(id: .message(item.message.stableId, fileReference.media.fileId), userLocation: .peer(item.message.id.peerId), fileReference: fileReference, enableSound: false, baseRate: rateValue, isAudioVideoMessage: true, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), close: { [weak mediaManager] in
mediaManager?.setPlaylist(nil, type: .voice, control: .playback(.pause))
})
@ -493,7 +493,7 @@ final class SharedMediaPlayer {
let fetchedCurrentSignal: Signal<Never, NoError>
let fetchedNextSignal: Signal<Never, NoError>
switch current {
case let .telegramFile(file, _):
case let .telegramFile(file, _, _):
fetchedCurrentSignal = self.account.postbox.mediaBox.resourceData(file.media.resource)
|> mapToSignal { data -> Signal<Void, NoError> in
if data.complete {
@ -506,7 +506,7 @@ final class SharedMediaPlayer {
|> ignoreValues
}
switch next {
case let .telegramFile(file, _):
case let .telegramFile(file, _, _):
fetchedNextSignal = fetchedMediaResource(mediaBox: self.account.postbox.mediaBox, userLocation: .other, userContentType: .audio, reference: file.resourceReference(file.media.resource))
|> ignoreValues
|> `catch` { _ -> Signal<Never, NoError> in