Merge commit '0cfbafa6c17fe24cd9e55148462e9e3fe5507d67'

This commit is contained in:
Ali 2022-01-19 21:32:30 +04:00
commit aae451feef
12 changed files with 527 additions and 61 deletions

View File

@ -210,6 +210,7 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
private var originalProjectedContentViewFrame: (CGRect, CGRect)? private var originalProjectedContentViewFrame: (CGRect, CGRect)?
private var contentAreaInScreenSpace: CGRect? private var contentAreaInScreenSpace: CGRect?
private var customPosition: CGPoint?
private let contentContainerNode: ContextContentContainerNode private let contentContainerNode: ContextContentContainerNode
private var actionsContainerNode: ContextActionsContainerNode private var actionsContainerNode: ContextActionsContainerNode
private var reactionContextNode: ReactionContextNode? private var reactionContextNode: ReactionContextNode?
@ -546,6 +547,7 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
let referenceNode = transitionInfo.referenceNode let referenceNode = transitionInfo.referenceNode
self.contentContainerNode.contentNode = .reference(node: referenceNode) self.contentContainerNode.contentNode = .reference(node: referenceNode)
self.contentAreaInScreenSpace = transitionInfo.contentAreaInScreenSpace self.contentAreaInScreenSpace = transitionInfo.contentAreaInScreenSpace
self.customPosition = transitionInfo.customPosition
var projectedFrame = convertFrame(referenceNode.view.bounds, from: referenceNode.view, to: self.view) var projectedFrame = convertFrame(referenceNode.view.bounds, from: referenceNode.view, to: self.view)
projectedFrame.origin.x += transitionInfo.insets.left projectedFrame.origin.x += transitionInfo.insets.left
projectedFrame.size.width -= transitionInfo.insets.left + transitionInfo.insets.right projectedFrame.size.width -= transitionInfo.insets.left + transitionInfo.insets.right
@ -1542,6 +1544,11 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
} }
contentHeight -= offsetDelta contentHeight -= offsetDelta
} }
if let customPosition = self.customPosition {
originalActionsFrame.origin.x = floor(originalContentFrame.center.x - originalActionsFrame.width / 2.0) + customPosition.x
originalActionsFrame.origin.y = floor(originalContentFrame.center.y - originalActionsFrame.height / 2.0) + customPosition.y
}
let scrollContentSize = CGSize(width: layout.size.width, height: contentHeight) let scrollContentSize = CGSize(width: layout.size.width, height: contentHeight)
if self.scrollNode.view.contentSize != scrollContentSize { if self.scrollNode.view.contentSize != scrollContentSize {
@ -2023,11 +2030,13 @@ public final class ContextControllerReferenceViewInfo {
public let referenceNode: ContextReferenceContentNode public let referenceNode: ContextReferenceContentNode
public let contentAreaInScreenSpace: CGRect public let contentAreaInScreenSpace: CGRect
public let insets: UIEdgeInsets public let insets: UIEdgeInsets
public let customPosition: CGPoint?
public init(referenceNode: ContextReferenceContentNode, contentAreaInScreenSpace: CGRect, insets: UIEdgeInsets = UIEdgeInsets()) { public init(referenceNode: ContextReferenceContentNode, contentAreaInScreenSpace: CGRect, insets: UIEdgeInsets = UIEdgeInsets(), customPosition: CGPoint? = nil) {
self.referenceNode = referenceNode self.referenceNode = referenceNode
self.contentAreaInScreenSpace = contentAreaInScreenSpace self.contentAreaInScreenSpace = contentAreaInScreenSpace
self.insets = insets self.insets = insets
self.customPosition = customPosition
} }
} }

View File

@ -2,8 +2,12 @@ import Foundation
import UIKit import UIKit
import AsyncDisplayKit import AsyncDisplayKit
import Display import Display
import ContextUI
public final class ShareActionButtonNode: HighlightTrackingButtonNode { public final class ShareActionButtonNode: HighlightTrackingButtonNode {
private let referenceNode: ContextReferenceContentNode
private let containerNode: ContextControllerSourceNode
private let badgeLabel: TextNode private let badgeLabel: TextNode
private var badgeText: NSAttributedString? private var badgeText: NSAttributedString?
private let badgeBackground: ASImageNode private let badgeBackground: ASImageNode
@ -38,7 +42,14 @@ public final class ShareActionButtonNode: HighlightTrackingButtonNode {
} }
} }
var shouldBegin: (() -> Bool)?
var contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
public init(badgeBackgroundColor: UIColor, badgeTextColor: UIColor) { public init(badgeBackgroundColor: UIColor, badgeTextColor: UIColor) {
self.referenceNode = ContextReferenceContentNode()
self.containerNode = ContextControllerSourceNode()
self.containerNode.animateScale = false
self.badgeBackgroundColor = badgeBackgroundColor self.badgeBackgroundColor = badgeBackgroundColor
self.badgeTextColor = badgeTextColor self.badgeTextColor = badgeTextColor
@ -57,8 +68,27 @@ public final class ShareActionButtonNode: HighlightTrackingButtonNode {
super.init() super.init()
self.containerNode.addSubnode(self.referenceNode)
self.addSubnode(self.containerNode)
self.addSubnode(self.badgeBackground) self.addSubnode(self.badgeBackground)
self.addSubnode(self.badgeLabel) self.addSubnode(self.badgeLabel)
self.containerNode.shouldBegin = { [weak self] location in
guard let strongSelf = self, let _ = strongSelf.contextAction else {
return false
}
if let shouldBegin = strongSelf.shouldBegin {
return shouldBegin()
}
return true
}
self.containerNode.activated = { [weak self] gesture, _ in
guard let strongSelf = self else {
return
}
strongSelf.contextAction?(strongSelf.referenceNode, gesture)
}
} }
override public func layout() { override public func layout() {
@ -74,5 +104,8 @@ public final class ShareActionButtonNode: HighlightTrackingButtonNode {
self.badgeBackground.frame = backgroundFrame self.badgeBackground.frame = backgroundFrame
self.badgeLabel.frame = CGRect(origin: CGPoint(x: floorToScreenPixels(backgroundFrame.midX - badgeLayout.size.width / 2.0), y: backgroundFrame.minY + 3.0), size: badgeLayout.size) self.badgeLabel.frame = CGRect(origin: CGPoint(x: floorToScreenPixels(backgroundFrame.midX - badgeLayout.size.width / 2.0), y: backgroundFrame.minY + 3.0), size: badgeLayout.size)
} }
self.containerNode.frame = self.bounds
self.referenceNode.frame = self.bounds
} }
} }

View File

@ -516,7 +516,7 @@ public final class ShareController: ViewController {
self?.presentingViewController?.dismiss(animated: false, completion: nil) self?.presentingViewController?.dismiss(animated: false, completion: nil)
}) })
} }
self.controllerNode.share = { [weak self] text, peerIds in self.controllerNode.share = { [weak self] text, peerIds, showNames, silently in
guard let strongSelf = self else { guard let strongSelf = self else {
return .complete() return .complete()
} }
@ -528,6 +528,21 @@ public final class ShareController: ViewController {
subject = selectedValue.subject subject = selectedValue.subject
} }
func transformMessages(_ messages: [EnqueueMessage], showNames: Bool, silently: Bool) -> [EnqueueMessage] {
return messages.map { message in
return message.withUpdatedAttributes({ attributes in
var attributes = attributes
if !showNames {
attributes.append(ForwardOptionsMessageAttribute(hideNames: true, hideCaptions: false))
}
if silently {
attributes.append(NotificationInfoMessageAttribute(flags: .muted))
}
return attributes
})
}
}
switch subject { switch subject {
case let .url(url): case let .url(url):
for peerId in peerIds { for peerId in peerIds {
@ -537,6 +552,7 @@ public final class ShareController: ViewController {
} else { } else {
messages.append(.message(text: url, attributes: [], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil)) messages.append(.message(text: url, attributes: [], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
} }
messages = transformMessages(messages, showNames: showNames, silently: silently)
shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages)) shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages))
} }
case let .text(string): case let .text(string):
@ -546,6 +562,7 @@ public final class ShareController: ViewController {
messages.append(.message(text: text, attributes: [], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil)) messages.append(.message(text: text, attributes: [], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
} }
messages.append(.message(text: string, attributes: [], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil)) messages.append(.message(text: string, attributes: [], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
messages = transformMessages(messages, showNames: showNames, silently: silently)
shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages)) shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages))
} }
case let .quote(string, url): case let .quote(string, url):
@ -558,12 +575,14 @@ public final class ShareController: ViewController {
attributedText.append(NSAttributedString(string: "\n\n\(url)")) attributedText.append(NSAttributedString(string: "\n\n\(url)"))
let entities = generateChatInputTextEntities(attributedText) let entities = generateChatInputTextEntities(attributedText)
messages.append(.message(text: attributedText.string, attributes: [TextEntitiesMessageAttribute(entities: entities)], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil)) messages.append(.message(text: attributedText.string, attributes: [TextEntitiesMessageAttribute(entities: entities)], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
messages = transformMessages(messages, showNames: showNames, silently: silently)
shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages)) shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages))
} }
case let .image(representations): case let .image(representations):
for peerId in peerIds { for peerId in peerIds {
var messages: [EnqueueMessage] = [] var messages: [EnqueueMessage] = []
messages.append(.message(text: text, attributes: [], mediaReference: .standalone(media: TelegramMediaImage(imageId: MediaId(namespace: Namespaces.Media.LocalImage, id: Int64.random(in: Int64.min ... Int64.max)), representations: representations.map({ $0.representation }), immediateThumbnailData: nil, reference: nil, partialReference: nil, flags: [])), replyToMessageId: nil, localGroupingKey: nil, correlationId: nil)) messages.append(.message(text: text, attributes: [], mediaReference: .standalone(media: TelegramMediaImage(imageId: MediaId(namespace: Namespaces.Media.LocalImage, id: Int64.random(in: Int64.min ... Int64.max)), representations: representations.map({ $0.representation }), immediateThumbnailData: nil, reference: nil, partialReference: nil, flags: [])), replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
messages = transformMessages(messages, showNames: showNames, silently: silently)
shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages)) shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages))
} }
case let .media(mediaReference): case let .media(mediaReference):
@ -578,6 +597,7 @@ public final class ShareController: ViewController {
messages.append(.message(text: text, attributes: [], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil)) messages.append(.message(text: text, attributes: [], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
} }
messages.append(.message(text: sendTextAsCaption ? text : "", attributes: [], mediaReference: mediaReference, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil)) messages.append(.message(text: sendTextAsCaption ? text : "", attributes: [], mediaReference: mediaReference, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
messages = transformMessages(messages, showNames: showNames, silently: silently)
shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages)) shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages))
} }
case let .mapMedia(media): case let .mapMedia(media):
@ -587,6 +607,7 @@ public final class ShareController: ViewController {
messages.append(.message(text: text, attributes: [], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil)) messages.append(.message(text: text, attributes: [], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
} }
messages.append(.message(text: "", attributes: [], mediaReference: .standalone(media: media), replyToMessageId: nil, localGroupingKey: nil, correlationId: nil)) messages.append(.message(text: "", attributes: [], mediaReference: .standalone(media: media), replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
messages = transformMessages(messages, showNames: showNames, silently: silently)
shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages)) shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messages))
} }
case let .messages(messages): case let .messages(messages):
@ -598,6 +619,7 @@ public final class ShareController: ViewController {
for message in messages { for message in messages {
messagesToEnqueue.append(.forward(source: message.id, grouping: .auto, attributes: [], correlationId: nil)) messagesToEnqueue.append(.forward(source: message.id, grouping: .auto, attributes: [], correlationId: nil))
} }
messagesToEnqueue = transformMessages(messagesToEnqueue, showNames: showNames, silently: silently)
shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messagesToEnqueue)) shareSignals.append(enqueueMessages(account: strongSelf.currentAccount, peerId: peerId, messages: messagesToEnqueue))
} }
case let .fromExternal(f): case let .fromExternal(f):
@ -672,7 +694,7 @@ public final class ShareController: ViewController {
subject = selectedValue.subject subject = selectedValue.subject
} }
var messageUrl: String? var messageUrl: String?
// var messagesToShare: [Message]? var messagesToShare: [Message]?
switch subject { switch subject {
case let .url(text): case let .url(text):
collectableItems.append(CollectableExternalShareItem(url: explicitUrl(text), text: "", author: nil, timestamp: nil, mediaReference: nil)) collectableItems.append(CollectableExternalShareItem(url: explicitUrl(text), text: "", author: nil, timestamp: nil, mediaReference: nil))
@ -689,7 +711,7 @@ public final class ShareController: ViewController {
let latLong = "\(media.latitude),\(media.longitude)" let latLong = "\(media.latitude),\(media.longitude)"
collectableItems.append(CollectableExternalShareItem(url: "https://maps.apple.com/maps?ll=\(latLong)&q=\(latLong)&t=m", text: "", author: nil, timestamp: nil, mediaReference: nil)) collectableItems.append(CollectableExternalShareItem(url: "https://maps.apple.com/maps?ll=\(latLong)&q=\(latLong)&t=m", text: "", author: nil, timestamp: nil, mediaReference: nil))
case let .messages(messages): case let .messages(messages):
// messagesToShare = messages messagesToShare = messages
for message in messages { for message in messages {
var url: String? var url: String?
var selectedMedia: Media? var selectedMedia: Media?
@ -779,16 +801,16 @@ public final class ShareController: ViewController {
|> filter { $0 } |> filter { $0 }
|> take(1) |> take(1)
|> deliverOnMainQueue).start(next: { [weak self] _ in |> deliverOnMainQueue).start(next: { [weak self] _ in
// if asImage, let messages = messagesToShare { if asImage, let messages = messagesToShare {
// self?.openShareAsImage?(messages) self?.openShareAsImage?(messages)
// } else { } else {
let activityController = UIActivityViewController(activityItems: activityItems, applicationActivities: activities) let activityController = UIActivityViewController(activityItems: activityItems, applicationActivities: activities)
if let strongSelf = self, let window = strongSelf.view.window, let rootViewController = window.rootViewController { if let strongSelf = self, let window = strongSelf.view.window, let rootViewController = window.rootViewController {
activityController.popoverPresentationController?.sourceView = window activityController.popoverPresentationController?.sourceView = window
activityController.popoverPresentationController?.sourceRect = CGRect(origin: CGPoint(x: window.bounds.width / 2.0, y: window.bounds.size.height - 1.0), size: CGSize(width: 1.0, height: 1.0)) activityController.popoverPresentationController?.sourceRect = CGRect(origin: CGPoint(x: window.bounds.width / 2.0, y: window.bounds.size.height - 1.0), size: CGSize(width: 1.0, height: 1.0))
rootViewController.present(activityController, animated: true, completion: nil) rootViewController.present(activityController, animated: true, completion: nil)
} }
// } }
}) })
} }
return .done return .done
@ -1140,8 +1162,13 @@ public class ShareToInstagramActivity: UIActivity {
} }
public override func perform() { public override func perform() {
if let url = self.activityItems.first as? URL, let data = try? Data(contentsOf: url) { if let url = self.activityItems.first as? URL, let data = try? Data(contentsOf: url, options: .mappedIfSafe) {
let pasteboardItems: [[String: Any]] = [["com.instagram.sharedSticker.backgroundImage": data]] let pasteboardItems: [[String: Any]]
if url.path.hasSuffix(".mp4") {
pasteboardItems = [["com.instagram.sharedSticker.backgroundVideo": data]]
} else {
pasteboardItems = [["com.instagram.sharedSticker.backgroundImage": data]]
}
if #available(iOS 10.0, *) { if #available(iOS 10.0, *) {
UIPasteboard.general.setItems(pasteboardItems, options: [.expirationDate: Date().addingTimeInterval(5 * 60)]) UIPasteboard.general.setItems(pasteboardItems, options: [.expirationDate: Date().addingTimeInterval(5 * 60)])
} else { } else {

View File

@ -59,7 +59,7 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate
var dismiss: ((Bool) -> Void)? var dismiss: ((Bool) -> Void)?
var cancel: (() -> Void)? var cancel: (() -> Void)?
var share: ((String, [PeerId]) -> Signal<ShareState, NoError>)? var share: ((String, [PeerId], Bool, Bool) -> Signal<ShareState, NoError>)?
var shareExternal: ((Bool) -> Signal<ShareExternalState, NoError>)? var shareExternal: ((Bool) -> Signal<ShareExternalState, NoError>)?
var switchToAnotherAccount: (() -> Void)? var switchToAnotherAccount: (() -> Void)?
var debugAction: (() -> Void)? var debugAction: (() -> Void)?
@ -83,6 +83,8 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate
private let presetText: String? private let presetText: String?
private let showNames = ValuePromise<Bool>(true)
init(sharedContext: SharedAccountContext, presentationData: PresentationData, presetText: String?, defaultAction: ShareControllerAction?, requestLayout: @escaping (ContainedViewLayoutTransition) -> Void, presentError: @escaping (String?, String) -> Void, externalShare: Bool, immediateExternalShare: Bool, immediatePeerId: PeerId?, fromForeignApp: Bool, forceTheme: PresentationTheme?, fromPublicChannel: Bool, segmentedValues: [ShareControllerSegmentedValue]?) { init(sharedContext: SharedAccountContext, presentationData: PresentationData, presetText: String?, defaultAction: ShareControllerAction?, requestLayout: @escaping (ContainedViewLayoutTransition) -> Void, presentError: @escaping (String?, String) -> Void, externalShare: Bool, immediateExternalShare: Bool, immediatePeerId: PeerId?, fromForeignApp: Bool, forceTheme: PresentationTheme?, fromPublicChannel: Bool, segmentedValues: [ShareControllerSegmentedValue]?) {
self.sharedContext = sharedContext self.sharedContext = sharedContext
self.presentationData = presentationData self.presentationData = presentationData
@ -179,6 +181,59 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate
self.isHidden = true self.isHidden = true
self.actionButtonNode.shouldBegin = { [weak self] in
if let strongSelf = self {
return !strongSelf.controllerInteraction!.selectedPeers.isEmpty
} else {
return false
}
}
self.actionButtonNode.contextAction = { [weak self] node, gesture in
if let strongSelf = self, let context = strongSelf.context, let node = node as? ContextReferenceContentNode {
let presentationData = strongSelf.presentationData
let items: Signal<ContextController.Items, NoError> =
strongSelf.showNames.get()
|> map { showNamesValue in
return ContextController.Items(content: .list([
.action(ContextMenuActionItem(text: presentationData.strings.Conversation_ForwardOptions_ShowSendersName, icon: { theme in
if showNamesValue {
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Check"), color: theme.contextMenu.primaryColor)
} else {
return nil
}
}, action: { _, _ in
self?.showNames.set(true)
})),
.action(ContextMenuActionItem(text: presentationData.strings.Conversation_ForwardOptions_HideSendersName, icon: { theme in
if !showNamesValue {
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Check"), color: theme.contextMenu.primaryColor)
} else {
return nil
}
}, action: { _, _ in
self?.showNames.set(false)
})),
.separator,
.action(ContextMenuActionItem(text: presentationData.strings.Conversation_SendMessage_SendSilently, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Menu/SilentIcon"), color: theme.contextMenu.primaryColor) }, action: { _, f in
f(.default)
if let strongSelf = self {
strongSelf.send(showNames: showNamesValue, silently: true)
}
})),
.action(ContextMenuActionItem(text: presentationData.strings.Conversation_ForwardOptions_SendMessage, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Resend"), color: theme.contextMenu.primaryColor) }, action: { _, f in
f(.default)
if let strongSelf = self {
strongSelf.send(showNames: showNamesValue, silently: false)
}
})),
]))
}
let contextController = ContextController(account: context.account, presentationData: presentationData, source: .reference(ShareContextReferenceContentSource(sourceNode: node, customPosition: CGPoint(x: 0.0, y: -116.0))), items: items, gesture: gesture)
contextController.immediateItemsTransitionAnimation = true
strongSelf.present?(contextController)
}
}
self.controllerInteraction = ShareControllerInteraction(togglePeer: { [weak self] peer, search in self.controllerInteraction = ShareControllerInteraction(togglePeer: { [weak self] peer, search in
if let strongSelf = self { if let strongSelf = self {
var added = false var added = false
@ -542,11 +597,14 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate
defaultAction.action() defaultAction.action()
} }
} else { } else {
self.send() let _ = (self.showNames.get()
|> take(1)).start(next: { [weak self] showNames in
self?.send(showNames: showNames)
})
} }
} }
func send(peerId: PeerId? = nil) { func send(peerId: PeerId? = nil, showNames: Bool = true, silently: Bool = false) {
if !self.inputFieldNode.text.isEmpty { if !self.inputFieldNode.text.isEmpty {
for peer in self.controllerInteraction!.selectedPeers { for peer in self.controllerInteraction!.selectedPeers {
if let channel = peer.peer as? TelegramChannel, channel.isRestrictedBySlowmode { if let channel = peer.peer as? TelegramChannel, channel.isRestrictedBySlowmode {
@ -579,7 +637,7 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate
donateSendMessageIntent(account: context.account, sharedContext: self.sharedContext, intentContext: .share, peerIds: peerIds) donateSendMessageIntent(account: context.account, sharedContext: self.sharedContext, intentContext: .share, peerIds: peerIds)
} }
if let signal = self.share?(self.inputFieldNode.text, peerIds) { if let signal = self.share?(self.inputFieldNode.text, peerIds, showNames, silently) {
var wasDone = false var wasDone = false
let timestamp = CACurrentMediaTime() let timestamp = CACurrentMediaTime()
let doneImpl: (Bool) -> Void = { [weak self] shouldDelay in let doneImpl: (Bool) -> Void = { [weak self] shouldDelay in
@ -809,23 +867,11 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate
})) }))
} }
// if strongSelf.fromPublicChannel, let context = strongSelf.context, let node = node as? ContextReferenceContentNode { if strongSelf.fromPublicChannel {
// let presentationData = strongSelf.presentationData proceed(true)
// let items: [ContextMenuItem] = [ } else {
// .action(ContextMenuActionItem(text: presentationData.strings.Share_ShareAsLink, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Link"), color: theme.contextMenu.primaryColor) }, action: { _, f in
// f(.default)
// proceed(false)
// })),
// .action(ContextMenuActionItem(text: presentationData.strings.Share_ShareAsImage, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Image"), color: theme.contextMenu.primaryColor) }, action: { _, f in
// f(.default)
// proceed(true)
// }))
// ]
// let contextController = ContextController(account: context.account, presentationData: presentationData, source: .reference(ShareContextReferenceContentSource(sourceNode: node)), items: .single(ContextController.Items(content: .list(items))), gesture: gesture)
// strongSelf.present?(contextController)
// } else {
proceed(false) proceed(false)
// } }
} }
peersContentNode.openShare = { node, gesture in peersContentNode.openShare = { node, gesture in
openShare(false, node, gesture) openShare(false, node, gesture)
@ -1010,12 +1056,14 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate
private final class ShareContextReferenceContentSource: ContextReferenceContentSource { private final class ShareContextReferenceContentSource: ContextReferenceContentSource {
private let sourceNode: ContextReferenceContentNode private let sourceNode: ContextReferenceContentNode
private let customPosition: CGPoint?
init(sourceNode: ContextReferenceContentNode) {
init(sourceNode: ContextReferenceContentNode, customPosition: CGPoint?) {
self.sourceNode = sourceNode self.sourceNode = sourceNode
self.customPosition = customPosition
} }
func transitionInfo() -> ContextControllerReferenceViewInfo? { func transitionInfo() -> ContextControllerReferenceViewInfo? {
return ContextControllerReferenceViewInfo(referenceNode: self.sourceNode, contentAreaInScreenSpace: UIScreen.main.bounds) return ContextControllerReferenceViewInfo(referenceNode: self.sourceNode, contentAreaInScreenSpace: UIScreen.main.bounds, customPosition: self.customPosition)
} }
} }

View File

@ -80,6 +80,11 @@ func mediaBubbleCornerImage(incoming: Bool, radius: CGFloat, inset: CGFloat) ->
} }
public func messageBubbleImage(maxCornerRadius: CGFloat, minCornerRadius: CGFloat, incoming: Bool, fillColor: UIColor, strokeColor: UIColor, neighbors: MessageBubbleImageNeighbors, theme: PresentationThemeChat, wallpaper: TelegramWallpaper, knockout knockoutValue: Bool, mask: Bool = false, extendedEdges: Bool = false, onlyOutline: Bool = false, onlyShadow: Bool = false, alwaysFillColor: Bool = false) -> UIImage { public func messageBubbleImage(maxCornerRadius: CGFloat, minCornerRadius: CGFloat, incoming: Bool, fillColor: UIColor, strokeColor: UIColor, neighbors: MessageBubbleImageNeighbors, theme: PresentationThemeChat, wallpaper: TelegramWallpaper, knockout knockoutValue: Bool, mask: Bool = false, extendedEdges: Bool = false, onlyOutline: Bool = false, onlyShadow: Bool = false, alwaysFillColor: Bool = false) -> UIImage {
let bubbleColors = incoming ? theme.message.incoming : theme.message.outgoing
return messageBubbleImage(maxCornerRadius: maxCornerRadius, minCornerRadius: minCornerRadius, incoming: incoming, fillColor: fillColor, strokeColor: strokeColor, neighbors: neighbors, shadow: bubbleColors.bubble.withWallpaper.shadow, wallpaper: wallpaper, knockout: knockoutValue, mask: mask, extendedEdges: extendedEdges, onlyOutline: onlyOutline, onlyShadow: onlyShadow, alwaysFillColor: alwaysFillColor)
}
public func messageBubbleImage(maxCornerRadius: CGFloat, minCornerRadius: CGFloat, incoming: Bool, fillColor: UIColor, strokeColor: UIColor, neighbors: MessageBubbleImageNeighbors, shadow: PresentationThemeBubbleShadow?, wallpaper: TelegramWallpaper, knockout knockoutValue: Bool, mask: Bool = false, extendedEdges: Bool = false, onlyOutline: Bool = false, onlyShadow: Bool = false, alwaysFillColor: Bool = false) -> UIImage {
let topLeftRadius: CGFloat let topLeftRadius: CGFloat
let topRightRadius: CGFloat let topRightRadius: CGFloat
let bottomLeftRadius: CGFloat let bottomLeftRadius: CGFloat
@ -299,9 +304,7 @@ public func messageBubbleImage(maxCornerRadius: CGFloat, minCornerRadius: CGFloa
if onlyShadow { if onlyShadow {
context.clear(CGRect(origin: CGPoint(), size: rawSize)) context.clear(CGRect(origin: CGPoint(), size: rawSize))
let bubbleColors = incoming ? theme.message.incoming : theme.message.outgoing if let shadow = shadow {
if let shadow = bubbleColors.bubble.withWallpaper.shadow {
context.translateBy(x: rawSize.width / 2.0, y: rawSize.height / 2.0) context.translateBy(x: rawSize.width / 2.0, y: rawSize.height / 2.0)
context.scaleBy(x: incoming ? -1.0 : 1.0, y: -1.0) context.scaleBy(x: incoming ? -1.0 : 1.0, y: -1.0)
context.translateBy(x: -rawSize.width / 2.0, y: -rawSize.height / 2.0) context.translateBy(x: -rawSize.width / 2.0, y: -rawSize.height / 2.0)

View File

@ -1980,11 +1980,11 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}, openMessageShareMenu: { [weak self] id in }, openMessageShareMenu: { [weak self] id in
if let strongSelf = self, let messages = strongSelf.chatDisplayNode.historyNode.messageGroupInCurrentHistoryView(id), let _ = messages.first { if let strongSelf = self, let messages = strongSelf.chatDisplayNode.historyNode.messageGroupInCurrentHistoryView(id), let _ = messages.first {
let shareController = ShareController(context: strongSelf.context, subject: .messages(messages), updatedPresentationData: strongSelf.updatedPresentationData, shareAsLink: true) let shareController = ShareController(context: strongSelf.context, subject: .messages(messages), updatedPresentationData: strongSelf.updatedPresentationData, shareAsLink: true)
// shareController.openShareAsImage = { [weak self] messages in shareController.openShareAsImage = { [weak self] messages in
// if let strongSelf = self { if let strongSelf = self {
// strongSelf.present(ChatQrCodeScreen(context: strongSelf.context, subject: .messages(messages)), in: .window(.root)) strongSelf.present(ChatQrCodeScreen(context: strongSelf.context, subject: .messages(messages)), in: .window(.root))
// } }
// } }
shareController.dismissed = { [weak self] shared in shareController.dismissed = { [weak self] shared in
if shared { if shared {
self?.commitPurposefulAction() self?.commitPurposefulAction()

View File

@ -503,7 +503,7 @@ final class ChatPinnedMessageTitlePanelNode: ChatTitleAccessoryPanelNode {
strongSelf.contentTextContainer.insertSubnode(dustNode, aboveSubnode: strongSelf.textNode) strongSelf.contentTextContainer.insertSubnode(dustNode, aboveSubnode: strongSelf.textNode)
} }
dustNode.frame = textFrame.insetBy(dx: -3.0, dy: -3.0).offsetBy(dx: 0.0, dy: 3.0) dustNode.frame = textFrame.insetBy(dx: -3.0, dy: -3.0).offsetBy(dx: 0.0, dy: 3.0)
dustNode.update(size: dustNode.frame.size, color: theme.chat.inputPanel.primaryTextColor, textColor: theme.chat.inputPanel.primaryTextColor, rects: textLayout.spoilers.map { $0.1.offsetBy(dx: 3.0, dy: 3.0).insetBy(dx: 1.0, dy: 1.0) }, wordRects: textLayout.spoilerWords.map { $0.1.offsetBy(dx: 3.0, dy: 3.0).insetBy(dx: 1.0, dy: 1.0) }) dustNode.update(size: dustNode.frame.size, color: theme.chat.inputPanel.secondaryTextColor, textColor: theme.chat.inputPanel.primaryTextColor, rects: textLayout.spoilers.map { $0.1.offsetBy(dx: 3.0, dy: 3.0).insetBy(dx: 1.0, dy: 1.0) }, wordRects: textLayout.spoilerWords.map { $0.1.offsetBy(dx: 3.0, dy: 3.0).insetBy(dx: 1.0, dy: 1.0) })
} else if let dustNode = strongSelf.dustNode { } else if let dustNode = strongSelf.dustNode {
dustNode.removeFromSupernode() dustNode.removeFromSupernode()
strongSelf.dustNode = nil strongSelf.dustNode = nil

View File

@ -1,4 +1,5 @@
import Foundation import Foundation
import AVFoundation
import UIKit import UIKit
import Display import Display
import AsyncDisplayKit import AsyncDisplayKit
@ -27,6 +28,11 @@ import ShareController
import TelegramStringFormatting import TelegramStringFormatting
import PhotoResources import PhotoResources
import TextFormat import TextFormat
import UniversalMediaPlayer
import TelegramUniversalVideoContent
import GalleryUI
import SaveToCameraRoll
import SegmentedControlNode
private func closeButtonImage(theme: PresentationTheme) -> UIImage? { private func closeButtonImage(theme: PresentationTheme) -> UIImage? {
return generateImage(CGSize(width: 30.0, height: 30.0), contextGenerator: { size, context in return generateImage(CGSize(width: 30.0, height: 30.0), contextGenerator: { size, context in
@ -710,6 +716,18 @@ private func iconColors(theme: PresentationTheme) -> [String: UIColor] {
private let defaultEmoticon = "🏠" private let defaultEmoticon = "🏠"
private func generateShadowImage() -> UIImage? {
return generateImage(CGSize(width: 40.0, height: 40.0), rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setShadow(offset: CGSize(width: 0.0, height: -0.5), blur: 10.0, color: UIColor(rgb: 0x000000, alpha: 0.4).cgColor)
context.setFillColor(UIColor(rgb: 0x000000, alpha: 0.4).cgColor)
let path = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: 0.0, y: 8.0), size: CGSize(width: 40.0, height: 40.0)), cornerRadius: 16.0)
context.addPath(path.cgPath)
context.fillPath()
})?.stretchableImage(withLeftCapWidth: 20, topCapHeight: 0)
}
private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDelegate { private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDelegate {
private let context: AccountContext private let context: AccountContext
private var presentationData: PresentationData private var presentationData: PresentationData
@ -720,10 +738,12 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDeleg
private let wrappingScrollNode: ASScrollNode private let wrappingScrollNode: ASScrollNode
private let contentContainerNode: ASDisplayNode private let contentContainerNode: ASDisplayNode
private let topContentContainerNode: SparseNode private let topContentContainerNode: SparseNode
private let shadowNode: ASImageNode
private let effectNode: ASDisplayNode private let effectNode: ASDisplayNode
private let backgroundNode: ASDisplayNode private let backgroundNode: ASDisplayNode
private let contentBackgroundNode: ASDisplayNode private let contentBackgroundNode: ASDisplayNode
private let titleNode: ASTextNode private let titleNode: ASTextNode
private let segmentedNode: SegmentedControlNode
private let cancelButton: HighlightableButtonNode private let cancelButton: HighlightableButtonNode
private let switchThemeButton: HighlightTrackingButtonNode private let switchThemeButton: HighlightTrackingButtonNode
private let animationContainerNode: ASDisplayNode private let animationContainerNode: ASDisplayNode
@ -787,6 +807,10 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDeleg
self.topContentContainerNode = SparseNode() self.topContentContainerNode = SparseNode()
self.topContentContainerNode.isOpaque = false self.topContentContainerNode.isOpaque = false
self.shadowNode = ASImageNode()
self.shadowNode.contentMode = .scaleToFill
self.shadowNode.image = generateShadowImage()
self.backgroundNode = ASDisplayNode() self.backgroundNode = ASDisplayNode()
self.backgroundNode.clipsToBounds = true self.backgroundNode.clipsToBounds = true
self.backgroundNode.cornerRadius = 16.0 self.backgroundNode.cornerRadius = 16.0
@ -815,6 +839,10 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDeleg
} }
self.titleNode.attributedText = NSAttributedString(string: title, font: Font.semibold(16.0), textColor: textColor) self.titleNode.attributedText = NSAttributedString(string: title, font: Font.semibold(16.0), textColor: textColor)
self.segmentedNode = SegmentedControlNode(theme: SegmentedControlTheme(theme: presentationData.theme), items: [SegmentedControlItem(title: "Video"), SegmentedControlItem(title: "Image")], selectedIndex: self.contentNode.hasVideo ? 0 : 1)
self.segmentedNode.isHidden = !self.contentNode.hasVideo
self.titleNode.isHidden = !self.segmentedNode.isHidden
self.cancelButton = HighlightableButtonNode() self.cancelButton = HighlightableButtonNode()
self.cancelButton.setImage(closeButtonImage(theme: self.presentationData.theme), for: .normal) self.cancelButton.setImage(closeButtonImage(theme: self.presentationData.theme), for: .normal)
@ -845,6 +873,7 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDeleg
self.wrappingScrollNode.addSubnode(self.contentNode) self.wrappingScrollNode.addSubnode(self.contentNode)
self.wrappingScrollNode.addSubnode(self.shadowNode)
self.wrappingScrollNode.addSubnode(self.backgroundNode) self.wrappingScrollNode.addSubnode(self.backgroundNode)
self.wrappingScrollNode.addSubnode(self.contentContainerNode) self.wrappingScrollNode.addSubnode(self.contentContainerNode)
self.wrappingScrollNode.addSubnode(self.topContentContainerNode) self.wrappingScrollNode.addSubnode(self.topContentContainerNode)
@ -852,6 +881,7 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDeleg
self.backgroundNode.addSubnode(self.effectNode) self.backgroundNode.addSubnode(self.effectNode)
self.backgroundNode.addSubnode(self.contentBackgroundNode) self.backgroundNode.addSubnode(self.contentBackgroundNode)
self.contentContainerNode.addSubnode(self.titleNode) self.contentContainerNode.addSubnode(self.titleNode)
self.contentContainerNode.addSubnode(self.segmentedNode)
self.contentContainerNode.addSubnode(self.doneButton) self.contentContainerNode.addSubnode(self.doneButton)
self.topContentContainerNode.addSubnode(self.animationContainerNode) self.topContentContainerNode.addSubnode(self.animationContainerNode)
@ -863,18 +893,58 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDeleg
self.switchThemeButton.addTarget(self, action: #selector(self.switchThemePressed), forControlEvents: .touchUpInside) self.switchThemeButton.addTarget(self, action: #selector(self.switchThemePressed), forControlEvents: .touchUpInside)
self.cancelButton.addTarget(self, action: #selector(self.cancelButtonPressed), forControlEvents: .touchUpInside) self.cancelButton.addTarget(self, action: #selector(self.cancelButtonPressed), forControlEvents: .touchUpInside)
self.segmentedNode.selectedIndexChanged = { [weak self] index in
guard let strongSelf = self, let contentNode = strongSelf.contentNode as? MessageContentNode, let videoNode = contentNode.videoNode else {
return
}
if index == 0 {
videoNode.play()
} else {
videoNode.pause()
videoNode.seek(0)
}
}
let fileName = controller.subject.fileName let fileName = controller.subject.fileName
self.doneButton.pressed = { [weak self] in self.doneButton.pressed = { [weak self] in
if let strongSelf = self { guard let strongSelf = self else {
strongSelf.doneButton.isUserInteractionEnabled = false return
}
strongSelf.doneButton.isUserInteractionEnabled = false
if strongSelf.segmentedNode.selectedIndex == 0 {
strongSelf.contentNode.generateVideo { [weak self] url in
if let strongSelf = self {
let tempFilePath = NSTemporaryDirectory() + "\(fileName).mp4"
try? FileManager.default.removeItem(atPath: tempFilePath)
let tempFileUrl = URL(fileURLWithPath: tempFilePath)
try? FileManager.default.moveItem(at: url, to: tempFileUrl)
let activityController = UIActivityViewController(activityItems: [tempFileUrl], applicationActivities: [ShareToInstagramActivity(context: strongSelf.context)])
activityController.completionWithItemsHandler = { [weak self] _, finished, _, _ in
if let strongSelf = self {
if finished {
strongSelf.completion?(strongSelf.selectedEmoticon)
} else {
strongSelf.doneButton.isUserInteractionEnabled = true
}
}
}
if let window = strongSelf.view.window {
activityController.popoverPresentationController?.sourceView = window
activityController.popoverPresentationController?.sourceRect = CGRect(origin: CGPoint(x: window.bounds.width / 2.0, y: window.bounds.size.height - 1.0), size: CGSize(width: 1.0, height: 1.0))
}
context.sharedContext.applicationBindings.presentNativeController(activityController)
}
}
} else {
strongSelf.contentNode.generateImage { [weak self] image in strongSelf.contentNode.generateImage { [weak self] image in
if let strongSelf = self, let image = image, let jpgData = image.jpegData(compressionQuality: 0.9) { if let strongSelf = self, let image = image, let jpgData = image.jpegData(compressionQuality: 0.9) {
let tempFilePath = NSTemporaryDirectory() + "\(fileName).jpg" let tempFilePath = NSTemporaryDirectory() + "\(fileName).jpg"
try? FileManager.default.removeItem(atPath: tempFilePath) try? FileManager.default.removeItem(atPath: tempFilePath)
let tempFileUrl = URL(fileURLWithPath: tempFilePath) let tempFileUrl = URL(fileURLWithPath: tempFilePath)
try? jpgData.write(to: tempFileUrl) try? jpgData.write(to: tempFileUrl)
let activityController = UIActivityViewController(activityItems: [tempFileUrl], applicationActivities: [ShareToInstagramActivity(context: strongSelf.context)]) let activityController = UIActivityViewController(activityItems: [tempFileUrl], applicationActivities: [ShareToInstagramActivity(context: strongSelf.context)])
activityController.completionWithItemsHandler = { [weak self] _, finished, _, _ in activityController.completionWithItemsHandler = { [weak self] _, finished, _, _ in
if let strongSelf = self { if let strongSelf = self {
@ -1128,6 +1198,7 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDeleg
self.presentationData = presentationData self.presentationData = presentationData
self.titleNode.attributedText = NSAttributedString(string: self.titleNode.attributedText?.string ?? "", font: Font.semibold(16.0), textColor: self.presentationData.theme.actionSheet.primaryTextColor) self.titleNode.attributedText = NSAttributedString(string: self.titleNode.attributedText?.string ?? "", font: Font.semibold(16.0), textColor: self.presentationData.theme.actionSheet.primaryTextColor)
self.segmentedNode.updateTheme(SegmentedControlTheme(theme: self.presentationData.theme))
if previousTheme !== presentationData.theme, let (layout, navigationBarHeight) = self.containerLayout { if previousTheme !== presentationData.theme, let (layout, navigationBarHeight) = self.containerLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
@ -1305,6 +1376,8 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDeleg
backgroundFrame.origin.y = contentFrame.minY backgroundFrame.origin.y = contentFrame.minY
} }
let shadowFrame = CGRect(x: backgroundFrame.minX, y: backgroundFrame.minY - 8.0, width: backgroundFrame.width, height: 40.0)
transition.updateFrame(node: self.shadowNode, frame: shadowFrame)
transition.updateFrame(node: self.backgroundNode, frame: backgroundFrame) transition.updateFrame(node: self.backgroundNode, frame: backgroundFrame)
transition.updateFrame(node: self.effectNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size)) transition.updateFrame(node: self.effectNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
transition.updateFrame(node: self.contentBackgroundNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size)) transition.updateFrame(node: self.contentBackgroundNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
@ -1313,6 +1386,9 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, UIScrollViewDeleg
let titleSize = self.titleNode.measure(CGSize(width: width - 90.0, height: titleHeight)) let titleSize = self.titleNode.measure(CGSize(width: width - 90.0, height: titleHeight))
let titleFrame = CGRect(origin: CGPoint(x: floor((contentFrame.width - titleSize.width) / 2.0), y: 19.0 + UIScreenPixel), size: titleSize) let titleFrame = CGRect(origin: CGPoint(x: floor((contentFrame.width - titleSize.width) / 2.0), y: 19.0 + UIScreenPixel), size: titleSize)
transition.updateFrame(node: self.titleNode, frame: titleFrame) transition.updateFrame(node: self.titleNode, frame: titleFrame)
let segmentedSize = self.segmentedNode.updateLayout(.sizeToFit(maximumWidth: width - 90.0, minimumWidth: 160.0, height: 32.0), transition: transition)
transition.updateFrame(node: self.segmentedNode, frame: CGRect(origin: CGPoint(x: floor((contentFrame.width - segmentedSize.width) / 2.0), y: 12.0), size: segmentedSize))
let switchThemeSize = CGSize(width: 44.0, height: 44.0) let switchThemeSize = CGSize(width: 44.0, height: 44.0)
let switchThemeFrame = CGRect(origin: CGPoint(x: 3.0, y: 6.0), size: switchThemeSize) let switchThemeFrame = CGRect(origin: CGPoint(x: 3.0, y: 6.0), size: switchThemeSize)
@ -1351,7 +1427,10 @@ private protocol ContentNode: ASDisplayNode {
var wallpaperBackgroundNode: WallpaperBackgroundNode { get } var wallpaperBackgroundNode: WallpaperBackgroundNode { get }
var isReady: Signal<Bool, NoError> { get } var isReady: Signal<Bool, NoError> { get }
var hasVideo: Bool { get }
func generateImage(completion: @escaping (UIImage?) -> Void) func generateImage(completion: @escaping (UIImage?) -> Void)
func generateVideo(completion: @escaping (URL) -> Void)
func update(theme: PresentationTheme, wallpaper: TelegramWallpaper, isDarkAppearance: Bool, selectedEmoticon: String?) func update(theme: PresentationTheme, wallpaper: TelegramWallpaper, isDarkAppearance: Bool, selectedEmoticon: String?)
func updateLayout(size: CGSize, topInset: CGFloat, bottomInset: CGFloat, transition: ContainedViewLayoutTransition) func updateLayout(size: CGSize, topInset: CGFloat, bottomInset: CGFloat, transition: ContainedViewLayoutTransition)
} }
@ -1384,6 +1463,10 @@ private class QrContentNode: ASDisplayNode, ContentNode {
return self._ready.get() return self._ready.get()
} }
var hasVideo: Bool {
return false
}
init(context: AccountContext, peer: Peer, isStatic: Bool = false) { init(context: AccountContext, peer: Peer, isStatic: Bool = false) {
self.context = context self.context = context
self.peer = peer self.peer = peer
@ -1535,6 +1618,9 @@ private class QrContentNode: ASDisplayNode, ContentNode {
} }
}) })
} }
func generateVideo(completion: @escaping(URL) -> Void) {
}
func update(theme: PresentationTheme, wallpaper: TelegramWallpaper, isDarkAppearance: Bool, selectedEmoticon: String?) { func update(theme: PresentationTheme, wallpaper: TelegramWallpaper, isDarkAppearance: Bool, selectedEmoticon: String?) {
self.currentParams = (theme, wallpaper, isDarkAppearance, selectedEmoticon) self.currentParams = (theme, wallpaper, isDarkAppearance, selectedEmoticon)
@ -1649,6 +1735,9 @@ private class QrContentNode: ASDisplayNode, ContentNode {
private class MessageContentNode: ASDisplayNode, ContentNode { private class MessageContentNode: ASDisplayNode, ContentNode {
private let context: AccountContext private let context: AccountContext
private let messages: [Message] private let messages: [Message]
private let isStatic: Bool
private var mediaFrame: CGRect?
fileprivate let containerNode: ASDisplayNode fileprivate let containerNode: ASDisplayNode
fileprivate let wallpaperBackgroundNode: WallpaperBackgroundNode fileprivate let wallpaperBackgroundNode: WallpaperBackgroundNode
@ -1659,7 +1748,11 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
private let titleNode: ImmediateTextNode private let titleNode: ImmediateTextNode
private let dateNode: ImmediateTextNode private let dateNode: ImmediateTextNode
private let imageNode: TransformImageNode private let imageNode: TransformImageNode
fileprivate var videoNode: UniversalVideoNode?
private var videoSnapshotView: UIView?
private let textNode: ImmediateTextNode private let textNode: ImmediateTextNode
private let badgeBackgroundNode: ASDisplayNode
private let badgeTextNode: ImmediateTextNode
private let linkBackgroundNode: ASDisplayNode private let linkBackgroundNode: ASDisplayNode
private var linkBackgroundContentNode: ASDisplayNode? private var linkBackgroundContentNode: ASDisplayNode?
@ -1670,14 +1763,32 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
private var currentParams: (PresentationTheme, TelegramWallpaper, Bool, String?)? private var currentParams: (PresentationTheme, TelegramWallpaper, Bool, String?)?
private var validLayout: (CGSize, CGFloat, CGFloat)? private var validLayout: (CGSize, CGFloat, CGFloat)?
private let videoStatusDisposable = MetaDisposable()
private var videoStatus: MediaPlayerStatus?
private let _ready = Promise<Bool>() private let _ready = Promise<Bool>()
var isReady: Signal<Bool, NoError> { var isReady: Signal<Bool, NoError> {
return self._ready.get() return self._ready.get()
} }
init(context: AccountContext, messages: [Message]) { var hasVideo: Bool {
if let message = self.messages.first, message.media.contains(where: { media in
if let file = media as? TelegramMediaFile, file.isVideo {
return true
} else {
return false
}
}) {
return true
} else {
return false
}
}
init(context: AccountContext, messages: [Message], isStatic: Bool = false) {
self.context = context self.context = context
self.messages = messages self.messages = messages
self.isStatic = isStatic
self.containerNode = ASDisplayNode() self.containerNode = ASDisplayNode()
@ -1690,9 +1801,17 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
self.titleNode = ImmediateTextNode() self.titleNode = ImmediateTextNode()
self.dateNode = ImmediateTextNode() self.dateNode = ImmediateTextNode()
self.textNode = ImmediateTextNode() self.textNode = ImmediateTextNode()
self.textNode.displaysAsynchronously = false
self.textNode.maximumNumberOfLines = 0 self.textNode.maximumNumberOfLines = 0
self.imageNode = TransformImageNode() self.imageNode = TransformImageNode()
self.badgeBackgroundNode = ASDisplayNode()
self.badgeBackgroundNode.cornerRadius = 9.0
self.badgeBackgroundNode.backgroundColor = UIColor(rgb: 0x000000, alpha: 0.5)
self.badgeTextNode = ImmediateTextNode()
self.badgeTextNode.displaysAsynchronously = false
self.linkBackgroundNode = ASDisplayNode() self.linkBackgroundNode = ASDisplayNode()
self.linkBackgroundNode.clipsToBounds = true self.linkBackgroundNode.clipsToBounds = true
self.linkBackgroundNode.cornerRadius = 16.5 self.linkBackgroundNode.cornerRadius = 16.5
@ -1706,6 +1825,7 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
self.linkIconNode.image = UIImage(bundleImageName: "Share/QrPlaneIcon") self.linkIconNode.image = UIImage(bundleImageName: "Share/QrPlaneIcon")
self.linkTextNode = ImmediateTextNode() self.linkTextNode = ImmediateTextNode()
self.linkTextNode.displaysAsynchronously = false
self.linkTextNode.textAlignment = .center self.linkTextNode.textAlignment = .center
super.init() super.init()
@ -1723,6 +1843,9 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
self.backgroundNode.addSubnode(self.textNode) self.backgroundNode.addSubnode(self.textNode)
self.backgroundNode.addSubnode(self.imageNode) self.backgroundNode.addSubnode(self.imageNode)
self.backgroundNode.addSubnode(self.badgeBackgroundNode)
self.badgeBackgroundNode.addSubnode(self.badgeTextNode)
self.linkBackgroundNode.addSubnode(self.linkBackgroundDimNode) self.linkBackgroundNode.addSubnode(self.linkBackgroundDimNode)
self.linkBackgroundNode.addSubnode(self.linkIconNode) self.linkBackgroundNode.addSubnode(self.linkIconNode)
self.linkBackgroundNode.addSubnode(self.linkTextNode) self.linkBackgroundNode.addSubnode(self.linkTextNode)
@ -1734,15 +1857,27 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
} }
} }
deinit {
self.videoStatusDisposable.dispose()
}
func generateImage(completion: @escaping (UIImage?) -> Void) { func generateImage(completion: @escaping (UIImage?) -> Void) {
let size = CGSize(width: 390.0, height: 844.0)
self.generateImage(size: size, completion: { image, _ in
completion(image)
})
}
func generateImage(size: CGSize, completion: @escaping (UIImage?, CGRect?) -> Void) {
guard let (theme, wallpaper, isDarkAppearance, selectedEmoticon) = self.currentParams else { guard let (theme, wallpaper, isDarkAppearance, selectedEmoticon) = self.currentParams else {
return return
} }
let size = CGSize(width: 390.0, height: 844.0)
let scale: CGFloat = 3.0 let scale: CGFloat = 3.0
let copyNode = MessageContentNode(context: self.context, messages: self.messages) let copyNode = MessageContentNode(context: self.context, messages: self.messages, isStatic: true)
copyNode.videoSnapshotView = self.videoNode?.view.snapshotContentTree()
func prepare(view: UIView, scale: CGFloat) { func prepare(view: UIView, scale: CGFloat) {
view.contentScaleFactor = scale view.contentScaleFactor = scale
@ -1758,30 +1893,50 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
self.addSubnode(copyNode) self.addSubnode(copyNode)
let mediaFrame = copyNode.mediaFrame
let _ = (copyNode.isReady let _ = (copyNode.isReady
|> take(1) |> take(1)
|> deliverOnMainQueue).start(next: { [weak copyNode] _ in |> deliverOnMainQueue).start(next: { [weak copyNode] _ in
Queue.mainQueue().after(0.1) { Queue.mainQueue().after(0.1) {
let image: UIImage?
if #available(iOS 10.0, *) { if #available(iOS 10.0, *) {
let format = UIGraphicsImageRendererFormat() let format = UIGraphicsImageRendererFormat()
format.scale = scale format.scale = scale
let renderer = UIGraphicsImageRenderer(size: size, format: format) let renderer = UIGraphicsImageRenderer(size: size, format: format)
let image = renderer.image { rendererContext in image = renderer.image { rendererContext in
copyNode?.containerNode.layer.render(in: rendererContext.cgContext) copyNode?.containerNode.layer.render(in: rendererContext.cgContext)
} }
completion(image)
} else { } else {
UIGraphicsBeginImageContextWithOptions(size, true, scale) UIGraphicsBeginImageContextWithOptions(size, true, scale)
copyNode?.containerNode.view.drawHierarchy(in: CGRect(origin: CGPoint(), size: size), afterScreenUpdates: true) copyNode?.containerNode.view.drawHierarchy(in: CGRect(origin: CGPoint(), size: size), afterScreenUpdates: true)
let image = UIGraphicsGetImageFromCurrentImageContext() image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext() UIGraphicsEndImageContext()
completion(image)
} }
completion(image, mediaFrame)
copyNode?.removeFromSupernode() copyNode?.removeFromSupernode()
} }
}) })
} }
func generateVideo(completion: @escaping(URL) -> Void) {
guard let message = self.messages.first, let media = message.media.first(where: { $0 is TelegramMediaFile }) as? TelegramMediaFile else {
return
}
let context = self.context
self.generateImage(size: CGSize(width: 475.0, height: 844.0), completion: { image, videoFrame in
guard let image = image, let videoFrame = videoFrame else {
return
}
renderVideo(context: context, backgroundImage: image, media: media, videoFrame: videoFrame, completion: { url in
if let url = url {
completion(url)
}
})
})
}
func update(theme: PresentationTheme, wallpaper: TelegramWallpaper, isDarkAppearance: Bool, selectedEmoticon: String?) { func update(theme: PresentationTheme, wallpaper: TelegramWallpaper, isDarkAppearance: Bool, selectedEmoticon: String?) {
self.currentParams = (theme, wallpaper, isDarkAppearance, selectedEmoticon) self.currentParams = (theme, wallpaper, isDarkAppearance, selectedEmoticon)
@ -1852,10 +2007,15 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
self.avatarNode.frame = CGRect(x: contentInset, y: 14.0, width: 36.0, height: 36.0) self.avatarNode.frame = CGRect(x: contentInset, y: 14.0, width: 36.0, height: 36.0)
var mediaSize = CGSize() var mediaSize = CGSize()
var mediaFrame = CGRect()
var mediaDuration: Int32 = 0
if let message = self.messages.first { if let message = self.messages.first {
let mediaFitSize = CGSize(width: size.width - inset * 2.0 - 5.0, height: size.width - inset * 2.0)
for media in message.media { for media in message.media {
if let image = media as? TelegramMediaImage, let representation = largestRepresentationForPhoto(image) { if let image = media as? TelegramMediaImage, let representation = largestRepresentationForPhoto(image) {
mediaSize = representation.dimensions.cgSize.aspectFitted(CGSize(width: size.width - inset * 2.0 - 5.0, height: size.width - inset * 2.0)) mediaSize = representation.dimensions.cgSize.aspectFitted(mediaFitSize)
mediaFrame = CGRect(origin: CGPoint(x: 3.0, y: 63.0), size: mediaSize)
if !wasInitialized { if !wasInitialized {
self.imageNode.setSignal(chatMessagePhoto(postbox: self.context.account.postbox, photoReference: .message(message: MessageReference(message), media: image), synchronousLoad: true, highQuality: true)) self.imageNode.setSignal(chatMessagePhoto(postbox: self.context.account.postbox, photoReference: .message(message: MessageReference(message), media: image), synchronousLoad: true, highQuality: true))
let imageLayout = self.imageNode.asyncLayout() let imageLayout = self.imageNode.asyncLayout()
@ -1865,12 +2025,79 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
imageApply() imageApply()
} }
self.imageNode.frame = CGRect(origin: CGPoint(x: 3.0, y: 60.0), size: mediaSize) self.imageNode.frame = mediaFrame
mediaSize.height += 10.0 mediaSize.height += 16.0
} else if let video = media as? TelegramMediaFile, video.isVideo, let dimensions = video.dimensions?.cgSize {
mediaSize = dimensions.aspectFitted(mediaFitSize)
mediaFrame = CGRect(origin: CGPoint(x: 3.0, y: 63.0), size: mediaSize)
mediaDuration = video.duration ?? 0
if !wasInitialized {
if self.isStatic {
if let videoSnapshotView = self.videoSnapshotView {
self.backgroundNode.view.insertSubview(videoSnapshotView, belowSubview: self.badgeBackgroundNode.view)
videoSnapshotView.frame = mediaFrame
}
} else {
let videoContent = NativeVideoContent(id: .message(message.stableId, video.fileId), fileReference: .message(message: MessageReference(message), media: video), streamVideo: .conservative, loopVideo: true, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: self.isStatic, continuePlayingWithoutSoundOnLostAudioSession: true, placeholderColor: .clear, captureProtected: false)
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay, autoplay: !self.isStatic)
self.videoStatusDisposable.set((videoNode.status
|> deliverOnMainQueue).start(next: { [weak self] status in
if let strongSelf = self {
strongSelf.videoStatus = status
if let (size, topInset, bottomInset) = strongSelf.validLayout {
strongSelf.updateLayout(size: size, topInset: topInset, bottomInset: bottomInset, transition: .immediate)
}
}
}))
videoNode.canAttachContent = true
videoNode.isUserInteractionEnabled = false
self.backgroundNode.insertSubnode(videoNode, belowSubnode: self.badgeBackgroundNode)
self.videoNode = videoNode
}
}
self.videoNode?.updateLayout(size: mediaFitSize, transition: .immediate)
self.videoNode?.frame = mediaFrame
mediaSize.height += 16.0
} }
} }
} }
if self.videoNode != nil || self.videoSnapshotView != nil {
let playerPosition: Int32
let playerDuration: Int32
if let status = self.videoStatus {
if !status.generationTimestamp.isZero, case .playing = status.status {
playerPosition = Int32(status.timestamp + (CACurrentMediaTime() - status.generationTimestamp))
} else {
playerPosition = Int32(status.timestamp)
}
playerDuration = Int32(status.duration)
} else {
playerPosition = 0
playerDuration = mediaDuration
}
let durationString = stringForDuration(playerDuration, position: playerPosition)
let font = Font.with(size: 11.0, design: .regular, weight: .regular, traits: [.monospacedNumbers])
self.badgeTextNode.attributedText = NSAttributedString(string: durationString, font: font, textColor: .white)
let durationSize = self.badgeTextNode.updateLayout(CGSize(width: 100.0, height: 18.0))
let durationBackgroundSize = CGSize(width: durationSize.width + 7.0 * 2.0, height: 18.0)
self.badgeBackgroundNode.frame = CGRect(origin: mediaFrame.origin.offsetBy(dx: 6.0, dy: 6.0), size: durationBackgroundSize)
self.badgeTextNode.frame = CGRect(origin: CGPoint(x: 7.0, y: floorToScreenPixels((durationBackgroundSize.height - durationSize.height) / 2.0)), size: durationSize)
self.badgeBackgroundNode.isHidden = false
self.badgeTextNode.isHidden = false
} else {
self.badgeBackgroundNode.isHidden = true
self.badgeTextNode.isHidden = true
}
if let message = messages.first { if let message = messages.first {
let textFont = Font.regular(17.0) let textFont = Font.regular(17.0)
let boldFont = Font.bold(17.0) let boldFont = Font.bold(17.0)
@ -1910,10 +2137,12 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
} }
let backgroundSize = CGSize(width: size.width - inset * 2.0, height: contentHeight) let backgroundSize = CGSize(width: size.width - inset * 2.0, height: contentHeight)
let backgroundFrame = CGRect(origin: CGPoint(x: inset, y: max(20.0, floorToScreenPixels((size.height - bottomInset - backgroundSize.height) / 2.0))), size: backgroundSize) let backgroundFrame = CGRect(origin: CGPoint(x: inset, y: max(topInset, floorToScreenPixels((size.height - topInset - bottomInset - backgroundSize.height) / 2.0))), size: backgroundSize)
self.backgroundNode.frame = backgroundFrame self.backgroundNode.frame = backgroundFrame
self.backgroundImageNode.frame = CGRect(x: -5.0, y: 0.0, width: backgroundSize.width + 5.0, height: backgroundSize.height) self.backgroundImageNode.frame = CGRect(x: -5.0, y: 0.0, width: backgroundSize.width + 5.0, height: backgroundSize.height)
self.mediaFrame = mediaFrame.offsetBy(dx: 0.0, dy: backgroundFrame.minY)
let link: String let link: String
if let message = self.messages.first, let chatPeer = message.peers[message.id.peerId] as? TelegramChannel, message.id.namespace == Namespaces.Message.Cloud, let addressName = chatPeer.addressName, !addressName.isEmpty { if let message = self.messages.first, let chatPeer = message.peers[message.id.peerId] as? TelegramChannel, message.id.namespace == Namespaces.Message.Cloud, let addressName = chatPeer.addressName, !addressName.isEmpty {
link = "t.me/\(addressName)/\(message.id.id)" link = "t.me/\(addressName)/\(message.id.id)"
@ -1937,3 +2166,91 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
self.linkTextNode.frame = CGRect(origin: CGPoint(x: 37.0, y: floorToScreenPixels((linkBackgroundSize.height - linkSize.height) / 2.0)), size: linkSize) self.linkTextNode.frame = CGRect(origin: CGPoint(x: 37.0, y: floorToScreenPixels((linkBackgroundSize.height - linkSize.height) / 2.0)), size: linkSize)
} }
} }
func renderVideo(context: AccountContext, backgroundImage: UIImage, media: TelegramMediaFile, videoFrame: CGRect, completion: @escaping (URL?) -> Void) {
let _ = (fetchMediaData(context: context, postbox: context.account.postbox, mediaReference: AnyMediaReference.standalone(media: media))
|> deliverOnMainQueue).start(next: { value, isImage in
guard case let .data(data) = value, data.complete else {
return
}
let asset = AVURLAsset(url: URL(fileURLWithPath: data.path))
let composition = AVMutableComposition()
guard let compositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let assetTrack = asset.tracks(withMediaType: .video).first else {
completion(nil)
return
}
do {
let duration = CMTimeMinimum(CMTimeSubtract(asset.duration, CMTime(seconds: 0.1, preferredTimescale: CMTimeScale(30.0))), CMTime(seconds: 15.0, preferredTimescale: CMTimeScale(30.0)))
let timeRange = CMTimeRange(start: .zero, duration: duration)
try compositionTrack.insertTimeRange(timeRange, of: assetTrack, at: .zero)
} catch {
print(error)
completion(nil)
return
}
let outputSize = CGSize(width: 1080.0, height: 1920.0)
let backgroundLayer = CALayer()
backgroundLayer.frame = CGRect(origin: .zero, size: outputSize)
backgroundLayer.contents = backgroundImage.cgImage
let ratio = 1080.0 / 475.0
let offset = videoFrame.minY * ratio
let videoLayer = CALayer()
videoLayer.frame = CGRect(origin: CGPoint(x: 61.0, y: 1920.0 - offset - 960.0), size: CGSize(width: 960.0, height: 960.0))
let outputLayer = CALayer()
outputLayer.frame = CGRect(origin: .zero, size: outputSize)
outputLayer.addSublayer(backgroundLayer)
outputLayer.addSublayer(videoLayer)
func compositionLayerInstruction(for track: AVCompositionTrack, assetTrack: AVAssetTrack) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let fixTransform = CGAffineTransform(scaleX: outputSize.width / track.naturalSize.width, y: outputSize.height / track.naturalSize.height)
instruction.setTransform(fixTransform, at: .zero)
return instruction
}
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = outputSize
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: outputLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: .zero, duration: composition.duration)
videoComposition.instructions = [instruction]
let layerInstruction = compositionLayerInstruction(for: compositionTrack, assetTrack: assetTrack)
instruction.layerInstructions = [layerInstruction]
guard let export = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
print("Cannot create export session.")
completion(nil)
return
}
let videoName = UUID().uuidString
let exportURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(videoName).appendingPathExtension("mp4")
export.videoComposition = videoComposition
export.outputFileType = .mov
export.outputURL = exportURL
export.exportAsynchronously {
DispatchQueue.main.async {
switch export.status {
case .completed:
completion(exportURL)
default:
print("Something went wrong during export.")
print(export.error ?? "unknown error")
completion(nil)
break
}
}
}
})
}

View File

@ -34,12 +34,13 @@ public final class NativeVideoContent: UniversalVideoContent {
let useLargeThumbnail: Bool let useLargeThumbnail: Bool
let autoFetchFullSizeThumbnail: Bool let autoFetchFullSizeThumbnail: Bool
let startTimestamp: Double? let startTimestamp: Double?
let endTimestamp: Double?
let continuePlayingWithoutSoundOnLostAudioSession: Bool let continuePlayingWithoutSoundOnLostAudioSession: Bool
let placeholderColor: UIColor let placeholderColor: UIColor
let tempFilePath: String? let tempFilePath: String?
let captureProtected: Bool let captureProtected: Bool
public init(id: NativeVideoContentId, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, captureProtected: Bool = false) { public init(id: NativeVideoContentId, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, captureProtected: Bool = false) {
self.id = id self.id = id
self.nativeId = id self.nativeId = id
self.fileReference = fileReference self.fileReference = fileReference
@ -67,6 +68,7 @@ public final class NativeVideoContent: UniversalVideoContent {
self.useLargeThumbnail = useLargeThumbnail self.useLargeThumbnail = useLargeThumbnail
self.autoFetchFullSizeThumbnail = autoFetchFullSizeThumbnail self.autoFetchFullSizeThumbnail = autoFetchFullSizeThumbnail
self.startTimestamp = startTimestamp self.startTimestamp = startTimestamp
self.endTimestamp = endTimestamp
self.continuePlayingWithoutSoundOnLostAudioSession = continuePlayingWithoutSoundOnLostAudioSession self.continuePlayingWithoutSoundOnLostAudioSession = continuePlayingWithoutSoundOnLostAudioSession
self.placeholderColor = placeholderColor self.placeholderColor = placeholderColor
self.tempFilePath = tempFilePath self.tempFilePath = tempFilePath
@ -74,7 +76,7 @@ public final class NativeVideoContent: UniversalVideoContent {
} }
public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, captureProtected: self.captureProtected) return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, captureProtected: self.captureProtected)
} }
public func isEqual(to other: UniversalVideoContent) -> Bool { public func isEqual(to other: UniversalVideoContent) -> Bool {
@ -148,7 +150,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private var shouldPlay: Bool = false private var shouldPlay: Bool = false
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, captureProtected: Bool) { init(postbox: Postbox, audioSessionManager: ManagedAudioSession, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, captureProtected: Bool) {
self.postbox = postbox self.postbox = postbox
self.fileReference = fileReference self.fileReference = fileReference
self.placeholderColor = placeholderColor self.placeholderColor = placeholderColor

View File

@ -24,6 +24,7 @@ swift_library(
"//submodules/Svg:Svg", "//submodules/Svg:Svg",
"//submodules/GZip:GZip", "//submodules/GZip:GZip",
"//submodules/GradientBackground:GradientBackground", "//submodules/GradientBackground:GradientBackground",
"//submodules/TelegramPresentationData:TelegramPresentationData",
], ],
visibility = [ visibility = [
"//visibility:public", "//visibility:public",

View File

@ -1316,6 +1316,10 @@ private let qrIconImage: UIImage = {
})! })!
}() }()
private let messageImage: UIImage = {
return messageBubbleImage(maxCornerRadius: 16.0, minCornerRadius: 16.0, incoming: true, fillColor: .white, strokeColor: .clear, neighbors: .none, shadow: nil, wallpaper: .color(0x000000), knockout: false)
}()
public func themeIconImage(account: Account, accountManager: AccountManager<TelegramAccountManagerTypes>, theme: PresentationThemeReference, color: PresentationThemeAccentColor?, wallpaper: TelegramWallpaper? = nil, nightMode: Bool? = nil, emoticon: Bool = false, large: Bool = false, qr: Bool = false, message: Bool = false) -> Signal<(TransformImageArguments) -> DrawingContext?, NoError> { public func themeIconImage(account: Account, accountManager: AccountManager<TelegramAccountManagerTypes>, theme: PresentationThemeReference, color: PresentationThemeAccentColor?, wallpaper: TelegramWallpaper? = nil, nightMode: Bool? = nil, emoticon: Bool = false, large: Bool = false, qr: Bool = false, message: Bool = false) -> Signal<(TransformImageArguments) -> DrawingContext?, NoError> {
let colorsSignal: Signal<((UIColor, UIColor?, [UInt32]), [UIColor], [UIColor], UIImage?, Bool, Bool, CGFloat, Int32?), NoError> let colorsSignal: Signal<((UIColor, UIColor?, [UInt32]), [UIColor], [UIColor], UIImage?, Bool, Bool, CGFloat, Int32?), NoError>
@ -1569,7 +1573,13 @@ public func themeIconImage(account: Account, accountManager: AccountManager<Tele
} }
if message { if message {
if let image = messageImage.cgImage {
c.translateBy(x: drawingRect.width / 2.0, y: drawingRect.height / 2.0)
c.scaleBy(x: 1.0, y: -1.0)
c.translateBy(x: -drawingRect.width / 2.0, y: -drawingRect.height / 2.0)
c.draw(image, in: CGRect(x: floor((drawingRect.width - 43.0) / 2.0) - 2.0, y: floor((drawingRect.height - 37.0) / 2.0), width: 43.0, height: 37.0))
}
} else if qr { } else if qr {
if let image = qrIconImage.cgImage { if let image = qrIconImage.cgImage {
c.draw(image, in: CGRect(x: floor((drawingRect.width - 36.0) / 2.0), y: floor((drawingRect.height - 36.0) / 2.0), width: 36.0, height: 36.0)) c.draw(image, in: CGRect(x: floor((drawingRect.width - 36.0) / 2.0), y: floor((drawingRect.height - 36.0) / 2.0), width: 36.0, height: 36.0))

View File

@ -689,6 +689,7 @@ webrtc_sources = [
"api/video_codecs/vp8_frame_config.cc", "api/video_codecs/vp8_frame_config.cc",
"api/video_codecs/vp8_temporal_layers.cc", "api/video_codecs/vp8_temporal_layers.cc",
"api/video_codecs/vp8_temporal_layers_factory.cc", "api/video_codecs/vp8_temporal_layers_factory.cc",
"api/video_track_source_constraints.h",
"api/candidate.h", "api/candidate.h",
"api/audio/audio_frame.h", "api/audio/audio_frame.h",
"api/ref_counted_base.h", "api/ref_counted_base.h",
@ -1497,6 +1498,7 @@ webrtc_sources = [
"logging/rtc_event_log/fake_rtc_event_log_factory.cc", "logging/rtc_event_log/fake_rtc_event_log_factory.cc",
"logging/rtc_event_log/ice_logger.cc", "logging/rtc_event_log/ice_logger.cc",
"logging/rtc_event_log/rtc_stream_config.cc", "logging/rtc_event_log/rtc_stream_config.cc",
"video/adaptation/bandwidth_quality_scaler_resource.h",
"video/adaptation/encode_usage_resource.cc", "video/adaptation/encode_usage_resource.cc",
"video/adaptation/overuse_frame_detector.cc", "video/adaptation/overuse_frame_detector.cc",
"video/adaptation/quality_scaler_resource.cc", "video/adaptation/quality_scaler_resource.cc",
@ -1725,6 +1727,7 @@ webrtc_sources = [
"common_audio/wav_file.h", "common_audio/wav_file.h",
"common_audio/wav_header.h", "common_audio/wav_header.h",
"common_audio/window_generator.h", "common_audio/window_generator.h",
"common_video/framerate_controller.h",
"common_video/frame_rate_estimator.h", "common_video/frame_rate_estimator.h",
"common_video/generic_frame_descriptor/generic_frame_info.h", "common_video/generic_frame_descriptor/generic_frame_info.h",
"common_video/h264/h264_bitstream_parser.h", "common_video/h264/h264_bitstream_parser.h",
@ -1877,6 +1880,9 @@ webrtc_sources = [
"modules/audio_coding/codecs/pcm16b/pcm16b_common.h", "modules/audio_coding/codecs/pcm16b/pcm16b_common.h",
"modules/audio_coding/codecs/red/audio_encoder_copy_red.h", "modules/audio_coding/codecs/red/audio_encoder_copy_red.h",
"modules/audio_coding/include/audio_coding_module.h", "modules/audio_coding/include/audio_coding_module.h",
"modules/audio_coding/neteq/relative_arrival_delay_tracker.h",
"modules/audio_coding/neteq/reorder_optimizer.h",
"modules/audio_coding/neteq/underrun_optimizer.h",
"modules/audio_coding/neteq/accelerate.h", "modules/audio_coding/neteq/accelerate.h",
"modules/audio_coding/neteq/audio_multi_vector.h", "modules/audio_coding/neteq/audio_multi_vector.h",
"modules/audio_coding/neteq/audio_vector.h", "modules/audio_coding/neteq/audio_vector.h",
@ -2198,6 +2204,8 @@ webrtc_sources = [
"modules/video_capture/device_info_impl.h", "modules/video_capture/device_info_impl.h",
"modules/video_capture/video_capture_factory.h", "modules/video_capture/video_capture_factory.h",
"modules/video_capture/video_capture_impl.h", "modules/video_capture/video_capture_impl.h",
"modules/video_coding/utility/vp9_constants.h",
"modules/video_coding/utility/bandwidth_quality_scaler.h",
"modules/video_coding/codec_timer.h", "modules/video_coding/codec_timer.h",
"modules/video_coding/codecs/av1/libaom_av1_decoder.h", "modules/video_coding/codecs/av1/libaom_av1_decoder.h",
"modules/video_coding/codecs/av1/libaom_av1_encoder.h", "modules/video_coding/codecs/av1/libaom_av1_encoder.h",
@ -2376,6 +2384,7 @@ webrtc_sources = [
"modules/video_coding/codecs/interface/common_constants.h", "modules/video_coding/codecs/interface/common_constants.h",
"modules/video_coding/include/video_coding.h", "modules/video_coding/include/video_coding.h",
"modules/video_coding/internal_defines.h", "modules/video_coding/internal_defines.h",
"api/wrapping_async_dns_resolver.h",
"api/array_view.h", "api/array_view.h",
"api/async_resolver_factory.h", "api/async_resolver_factory.h",
"api/audio_codecs/audio_decoder_factory.h", "api/audio_codecs/audio_decoder_factory.h",
@ -2413,6 +2422,7 @@ webrtc_sources = [
"audio/utility/channel_mixing_matrix.h", "audio/utility/channel_mixing_matrix.h",
"audio/voip/audio_egress.h", "audio/voip/audio_egress.h",
"p2p/base/ice_controller_factory_interface.h", "p2p/base/ice_controller_factory_interface.h",
"rtc_base/bitstream_reader.h",
"rtc_base/async_invoker_inl.h", "rtc_base/async_invoker_inl.h",
"rtc_base/buffer.h", "rtc_base/buffer.h",
"rtc_base/compile_assert_c.h", "rtc_base/compile_assert_c.h",
@ -2514,6 +2524,7 @@ webrtc_sources = [
"modules/audio_processing/aec3/fft_data.h", "modules/audio_processing/aec3/fft_data.h",
"modules/audio_processing/aec3/nearend_detector.h", "modules/audio_processing/aec3/nearend_detector.h",
"modules/audio_processing/aecm/aecm_defines.h", "modules/audio_processing/aecm/aecm_defines.h",
"modules/audio_processing/agc/analog_gain_stats_reporter.h",
"modules/audio_processing/agc/legacy/gain_control.h", "modules/audio_processing/agc/legacy/gain_control.h",
"modules/audio_processing/agc2/rnn_vad/ring_buffer.h", "modules/audio_processing/agc2/rnn_vad/ring_buffer.h",
"modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h", "modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h",
@ -2625,6 +2636,7 @@ webrtc_sources = [
"p2p/base/p2p_transport_channel_ice_field_trials.h", "p2p/base/p2p_transport_channel_ice_field_trials.h",
"rtc_base/swap_queue.h", "rtc_base/swap_queue.h",
"rtc_base/system/ignore_warnings.h", "rtc_base/system/ignore_warnings.h",
"logging/rtc_event_log/encoder/bit_writer.h",
"logging/rtc_event_log/encoder/rtc_event_log_encoder.h", "logging/rtc_event_log/encoder/rtc_event_log_encoder.h",
"logging/rtc_event_log/events/rtc_event_remote_estimate.h", "logging/rtc_event_log/events/rtc_event_remote_estimate.h",
"system_wrappers/include/clock.h", "system_wrappers/include/clock.h",
@ -2853,6 +2865,7 @@ webrtc_sources = [
"modules/video_coding/svc/scalability_structure_key_svc.h", "modules/video_coding/svc/scalability_structure_key_svc.h",
"rtc_base/async_resolver.h", "rtc_base/async_resolver.h",
"rtc_base/async_resolver.cc", "rtc_base/async_resolver.cc",
"rtc_base/experiments/bandwidth_quality_scaler_settings.h",
"rtc_base/experiments/encoder_info_settings.h", "rtc_base/experiments/encoder_info_settings.h",
"rtc_base/experiments/encoder_info_settings.cc", "rtc_base/experiments/encoder_info_settings.cc",
"rtc_base/system_time.h", "rtc_base/system_time.h",
@ -2866,6 +2879,8 @@ webrtc_sources = [
"api/async_dns_resolver.h", "api/async_dns_resolver.h",
"modules/video_coding/utility/qp_parser.h", "modules/video_coding/utility/qp_parser.h",
"modules/video_coding/utility/qp_parser.cc", "modules/video_coding/utility/qp_parser.cc",
"modules/audio_processing/agc2/adaptive_digital_gain_controller.h",
"modules/audio_processing/agc2/vad_wrapper.h",
"modules/audio_processing/agc2/saturation_protector_buffer.h", "modules/audio_processing/agc2/saturation_protector_buffer.h",
"modules/audio_processing/agc2/saturation_protector_buffer.cc", "modules/audio_processing/agc2/saturation_protector_buffer.cc",
"modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.h", "modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.h",
@ -2919,6 +2934,7 @@ webrtc_sources = [
"modules/audio_coding/neteq/relative_arrival_delay_tracker.cc", "modules/audio_coding/neteq/relative_arrival_delay_tracker.cc",
"modules/audio_processing/agc2/vad_wrapper.cc", "modules/audio_processing/agc2/vad_wrapper.cc",
"modules/audio_processing/agc2/adaptive_digital_gain_controller.cc", "modules/audio_processing/agc2/adaptive_digital_gain_controller.cc",
"modules/video_coding/utility/framerate_controller_deprecated.h",
"modules/video_coding/utility/framerate_controller_deprecated.cc", "modules/video_coding/utility/framerate_controller_deprecated.cc",
"video/adaptation/bandwidth_quality_scaler_resource.cc", "video/adaptation/bandwidth_quality_scaler_resource.cc",
"api/wrapping_async_dns_resolver.cc", "api/wrapping_async_dns_resolver.cc",