Media spoilers

This commit is contained in:
Ilya Laktyushin 2022-12-10 15:20:23 +04:00
parent 0aa94cb180
commit 874925d36d
35 changed files with 1750 additions and 397 deletions

View File

@ -8479,3 +8479,9 @@ Sorry for the inconvenience.";
"Conversation.SuggestedVideoText" = "%@ suggests you to use this video for your Telegram account.";
"Conversation.SuggestedVideoTextYou" = "You suggested %@ to use this video for their Telegram account.";
"Conversation.SuggestedVideoView" = "View";
"Conversation.Messages_1" = "%@ message";
"Conversation.Messages_any" = "%@ messages";
"Notification.SuggestedProfilePhoto" = "Suggested Profile Photo";
"Notification.SuggestedProfileVideo" = "Suggested Profile Video";

View File

@ -47,6 +47,12 @@ public final class AnimationNode : ASDisplayNode {
self.colorCallbacks.append(colorCallback)
view.setValueDelegate(colorCallback, for: LOTKeypath(string: "\(key).Color"))*/
}
if let value = colors["__allcolors__"] {
for keypath in view.allKeypaths(predicate: { $0.keys.last == "Color" }) {
view.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: keypath))
}
}
}
return view
@ -75,6 +81,12 @@ public final class AnimationNode : ASDisplayNode {
self.colorCallbacks.append(colorCallback)
view.setValueDelegate(colorCallback, for: LOTKeypath(string: "\(key).Color"))*/
}
if let value = colors["__allcolors__"] {
for keypath in view.allKeypaths(predicate: { $0.keys.last == "Color" }) {
view.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: keypath))
}
}
}
return view

View File

@ -573,6 +573,17 @@ private final class ChatListMediaPreviewNode: ASDisplayNode {
self.imageNode.setSignal(signal, attemptSynchronously: synchronousLoads)
}
}
} else if case let .action(action) = self.media, case let .suggestedProfilePhoto(image) = action.action, let image = image {
isRound = true
self.playIcon.isHidden = true
if let largest = largestImageRepresentation(image.representations) {
dimensions = largest.dimensions.cgSize
if !self.requestedImage {
self.requestedImage = true
let signal = mediaGridMessagePhoto(account: self.context.account, photoReference: .message(message: MessageReference(self.message._asMessage()), media: image), fullRepresentationSize: CGSize(width: 36.0, height: 36.0), synchronousLoad: synchronousLoads)
self.imageNode.setSignal(signal, attemptSynchronously: synchronousLoads)
}
}
} else if case let .file(file) = self.media {
if file.isInstantVideo {
isRound = true
@ -1294,9 +1305,25 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
guard let strongSelf = self else {
return
}
let cachedPeerData = peerView.cachedData
if let cachedPeerData = cachedPeerData as? CachedUserData, case let .known(maybePhoto) = cachedPeerData.photo {
if let photo = maybePhoto, let video = smallestVideoRepresentation(photo.videoRepresentations), let peerReference = PeerReference(peer._asPeer()) {
let cachedPeerData = peerView.cachedData as? CachedUserData
var personalPhoto: TelegramMediaImage?
var profilePhoto: TelegramMediaImage?
var isKnown = false
if let cachedPeerData = cachedPeerData {
if case let .known(maybePersonalPhoto) = cachedPeerData.personalPhoto {
personalPhoto = maybePersonalPhoto
isKnown = true
}
if case let .known(maybePhoto) = cachedPeerData.photo {
profilePhoto = maybePhoto
isKnown = true
}
}
if isKnown {
let photo = personalPhoto ?? profilePhoto
if let photo = photo, let video = smallestVideoRepresentation(photo.videoRepresentations), let peerReference = PeerReference(peer._asPeer()) {
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
let videoContent = NativeVideoContent(id: .profileVideo(videoId, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false)
@ -1936,6 +1963,9 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
}
break inner
}
} else if let action = media as? TelegramMediaAction, case let .suggestedProfilePhoto(image) = action.action, let _ = image {
let fitSize = contentImageSize
contentImageSpecs.append((message, .action(action), fitSize))
}
}
}
@ -3165,7 +3195,11 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
var mediaPreviewOffset = textNodeFrame.origin.offsetBy(dx: 1.0, dy: floor((measureLayout.size.height - contentImageSize.height) / 2.0))
var validMediaIds: [EngineMedia.Id] = []
for (message, media, mediaSize) in contentImageSpecs {
guard let mediaId = media.id else {
var mediaId = media.id
if mediaId == nil, case let .action(action) = media, case let .suggestedProfilePhoto(image) = action.action {
mediaId = image?.id
}
guard let mediaId = mediaId else {
continue
}
validMediaIds.append(mediaId)

View File

@ -24,6 +24,7 @@ swift_library(
"//submodules/TelegramUI/Components/TextNodeWithEntities:TextNodeWithEntities",
"//submodules/TelegramUI/Components/EntityKeyboard:EntityKeyboard",
"//submodules/UndoUI:UndoUI",
"//submodules/AnimationUI:AnimationUI",
],
visibility = [
"//visibility:public",

View File

@ -102,6 +102,7 @@ public final class ContextMenuActionItem {
public let badge: ContextMenuActionBadge?
public let icon: (PresentationTheme) -> UIImage?
public let iconSource: ContextMenuActionItemIconSource?
public let animationName: String?
public let textIcon: (PresentationTheme) -> UIImage?
public let textLinkAction: () -> Void
public let action: ((Action) -> Void)?
@ -116,6 +117,7 @@ public final class ContextMenuActionItem {
badge: ContextMenuActionBadge? = nil,
icon: @escaping (PresentationTheme) -> UIImage?,
iconSource: ContextMenuActionItemIconSource? = nil,
animationName: String? = nil,
textIcon: @escaping (PresentationTheme) -> UIImage? = { _ in return nil },
textLinkAction: @escaping () -> Void = {},
action: ((ContextControllerProtocol, @escaping (ContextMenuActionResult) -> Void) -> Void)?
@ -130,6 +132,7 @@ public final class ContextMenuActionItem {
badge: badge,
icon: icon,
iconSource: iconSource,
animationName: animationName,
textIcon: textIcon,
textLinkAction: textLinkAction,
action: action.flatMap { action in
@ -150,6 +153,7 @@ public final class ContextMenuActionItem {
badge: ContextMenuActionBadge? = nil,
icon: @escaping (PresentationTheme) -> UIImage?,
iconSource: ContextMenuActionItemIconSource? = nil,
animationName: String? = nil,
textIcon: @escaping (PresentationTheme) -> UIImage? = { _ in return nil },
textLinkAction: @escaping () -> Void = {},
action: ((Action) -> Void)?
@ -163,6 +167,7 @@ public final class ContextMenuActionItem {
self.badge = badge
self.icon = icon
self.iconSource = iconSource
self.animationName = animationName
self.textIcon = textIcon
self.textLinkAction = textLinkAction
self.action = action

View File

@ -12,6 +12,7 @@ import Markdown
import EntityKeyboard
import AnimationCache
import MultiAnimationRenderer
import AnimationUI
public protocol ContextControllerActionsStackItemNode: ASDisplayNode {
var wantsFullWidth: Bool { get }
@ -63,6 +64,7 @@ private final class ContextControllerActionsListActionItemNode: HighlightTrackin
private let titleLabelNode: ImmediateTextNode
private let subtitleNode: ImmediateTextNode
private let iconNode: ASImageNode
private var animationNode: AnimationNode?
private var iconDisposable: Disposable?
@ -94,7 +96,7 @@ private final class ContextControllerActionsListActionItemNode: HighlightTrackin
self.iconNode = ASImageNode()
self.iconNode.isAccessibilityElement = false
self.iconNode.isUserInteractionEnabled = false
super.init()
self.isAccessibilityElement = true
@ -275,6 +277,14 @@ private final class ContextControllerActionsListActionItemNode: HighlightTrackin
}
} else if let image = self.iconNode.image {
iconSize = image.size
} else if let animationName = self.item.animationName {
if self.animationNode == nil {
let animationNode = AnimationNode(animation: animationName, colors: ["__allcolors__": titleColor], scale: 1.0)
animationNode.loop()
self.addSubnode(animationNode)
self.animationNode = animationNode
}
iconSize = CGSize(width: 24.0, height: 24.0)
} else {
let iconImage = self.item.icon(presentationData.theme)
self.iconNode.image = iconImage
@ -323,6 +333,9 @@ private final class ContextControllerActionsListActionItemNode: HighlightTrackin
let iconWidth = max(standardIconWidth, iconSize.width)
let iconFrame = CGRect(origin: CGPoint(x: size.width - iconSideInset - iconWidth + floor((iconWidth - iconSize.width) / 2.0), y: floor((size.height - iconSize.height) / 2.0)), size: iconSize)
transition.updateFrame(node: self.iconNode, frame: iconFrame, beginWithCurrentState: true)
if let animationNode = self.animationNode {
transition.updateFrame(node: animationNode, frame: iconFrame, beginWithCurrentState: true)
}
}
})
}

View File

@ -284,7 +284,7 @@ class DrawingGesturePipeline {
}
}
var mode: Mode = .polyline {
var mode: Mode = .location {
didSet {
if [.location, .polyline].contains(self.mode) {
self.gestureRecognizer?.usePredictedTouches = false

View File

@ -279,7 +279,7 @@ struct DrawingState: Equatable {
return DrawingState(
selectedTool: .pen,
tools: [
.pen(DrawingToolState.BrushState(color: DrawingColor(rgb: 0xffffff), size: 0.3, mode: .round)),
.pen(DrawingToolState.BrushState(color: DrawingColor(rgb: 0xe22400), size: 0.25, mode: .round)),
.marker(DrawingToolState.BrushState(color: DrawingColor(rgb: 0xfee21b), size: 0.5, mode: .round)),
.neon(DrawingToolState.BrushState(color: DrawingColor(rgb: 0x34ffab), size: 0.5, mode: .round)),
.pencil(DrawingToolState.BrushState(color: DrawingColor(rgb: 0x2570f0), size: 0.5, mode: .round)),

View File

@ -71,7 +71,7 @@ public final class DrawingView: UIView, UIGestureRecognizerDelegate, TGPhotoDraw
var tool: Tool = .pen
var toolColor: DrawingColor = DrawingColor(color: .white)
var toolBrushSize: CGFloat = 0.35
var toolBrushSize: CGFloat = 0.25
var toolHasArrow: Bool = false
var stateUpdated: (NavigationState) -> Void = { _ in }
@ -577,7 +577,7 @@ public final class DrawingView: UIView, UIGestureRecognizerDelegate, TGPhotoDraw
func updateToolState(_ state: DrawingToolState) {
switch state {
case let .pen(brushState):
self.drawingGesturePipeline?.mode = .polyline
self.drawingGesturePipeline?.mode = .location
self.tool = .pen
self.toolColor = brushState.color
self.toolBrushSize = brushState.size

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,7 @@ func createEmitterBehavior(type: String) -> NSObject {
return castedBehaviorWithType(behaviorClass, NSSelectorFromString(selector), type)
}
private func generateMaskImage(size originalSize: CGSize, position: CGPoint, inverse: Bool) -> UIImage? {
func generateMaskImage(size originalSize: CGSize, position: CGPoint, inverse: Bool) -> UIImage? {
var size = originalSize
var position = position
var scale: CGFloat = 1.0
@ -58,8 +58,7 @@ public class InvisibleInkDustNode: ASDisplayNode {
private let emitterMaskFillNode: ASDisplayNode
public var isRevealed = false
private var exploding = false
private var isExploding = false
public init(textNode: TextNode?) {
self.textNode = textNode
@ -158,8 +157,8 @@ public class InvisibleInkDustNode: ASDisplayNode {
transition.updateAlpha(node: self, alpha: 1.0)
transition.updateAlpha(node: textNode, alpha: 0.0)
if self.exploding {
self.exploding = false
if self.isExploding {
self.isExploding = false
self.emitterLayer?.setValue(false, forKeyPath: "emitterBehaviors.fingerAttractor.enabled")
}
}
@ -171,7 +170,7 @@ public class InvisibleInkDustNode: ASDisplayNode {
}
self.isRevealed = true
self.exploding = true
self.isExploding = true
let position = gestureRecognizer.location(in: self.view)
self.emitterLayer?.setValue(true, forKeyPath: "emitterBehaviors.fingerAttractor.enabled")
@ -227,7 +226,7 @@ public class InvisibleInkDustNode: ASDisplayNode {
}
Queue.mainQueue().after(0.8 * UIView.animationDurationFactor()) {
self.exploding = false
self.isExploding = false
self.emitterLayer?.setValue(false, forKeyPath: "emitterBehaviors.fingerAttractor.enabled")
self.textSpotNode.layer.removeAllAnimations()

View File

@ -13,15 +13,36 @@ public class MediaDustNode: ASDisplayNode {
private var emitterNode: ASDisplayNode
private var emitter: CAEmitterCell?
private var emitterLayer: CAEmitterLayer?
private let emitterMaskNode: ASDisplayNode
private let emitterSpotNode: ASImageNode
private let emitterMaskFillNode: ASDisplayNode
public var isRevealed = false
private var isExploding = false
public var revealed: () -> Void = {}
public override init() {
self.emitterNode = ASDisplayNode()
self.emitterNode.isUserInteractionEnabled = false
self.emitterNode.clipsToBounds = true
self.emitterMaskNode = ASDisplayNode()
self.emitterSpotNode = ASImageNode()
self.emitterSpotNode.contentMode = .scaleToFill
self.emitterSpotNode.isUserInteractionEnabled = false
self.emitterMaskFillNode = ASDisplayNode()
self.emitterMaskFillNode.backgroundColor = .white
self.emitterMaskFillNode.isUserInteractionEnabled = false
super.init()
self.addSubnode(self.emitterNode)
self.emitterMaskNode.addSubnode(self.emitterSpotNode)
self.emitterMaskNode.addSubnode(self.emitterMaskFillNode)
}
public override func didLoad() {
@ -51,8 +72,25 @@ public class MediaDustNode: ASDisplayNode {
scaleBehavior.setValue("scale", forKey: "keyPath")
scaleBehavior.setValue([0.0, 0.5], forKey: "values")
scaleBehavior.setValue([0.0, 0.05], forKey: "locations")
let behaviors = [alphaBehavior, scaleBehavior]
let randomAttractor0 = createEmitterBehavior(type: "simpleAttractor")
randomAttractor0.setValue("randomAttractor0", forKey: "name")
randomAttractor0.setValue(20, forKey: "falloff")
randomAttractor0.setValue(35, forKey: "radius")
randomAttractor0.setValue(5, forKey: "stiffness")
randomAttractor0.setValue(NSValue(cgPoint: .zero), forKey: "position")
let randomAttractor1 = createEmitterBehavior(type: "simpleAttractor")
randomAttractor1.setValue("randomAttractor1", forKey: "name")
randomAttractor1.setValue(20, forKey: "falloff")
randomAttractor1.setValue(35, forKey: "radius")
randomAttractor1.setValue(5, forKey: "stiffness")
randomAttractor1.setValue(NSValue(cgPoint: .zero), forKey: "position")
let fingerAttractor = createEmitterBehavior(type: "simpleAttractor")
fingerAttractor.setValue("fingerAttractor", forKey: "name")
let behaviors = [randomAttractor0, randomAttractor1, fingerAttractor, alphaBehavior, scaleBehavior]
let emitterLayer = CAEmitterLayer()
emitterLayer.masksToBounds = true
@ -62,14 +100,143 @@ public class MediaDustNode: ASDisplayNode {
emitterLayer.seed = arc4random()
emitterLayer.emitterShape = .rectangle
emitterLayer.setValue(behaviors, forKey: "emitterBehaviors")
emitterLayer.setValue(4.0, forKeyPath: "emitterBehaviors.fingerAttractor.stiffness")
emitterLayer.setValue(false, forKeyPath: "emitterBehaviors.fingerAttractor.enabled")
self.emitterLayer = emitterLayer
self.emitterNode.layer.addSublayer(emitterLayer)
self.updateEmitter()
self.setupRandomAnimations()
self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tap(_:))))
}
@objc private func tap(_ gestureRecognizer: UITapGestureRecognizer) {
guard !self.isRevealed else {
return
}
self.isRevealed = true
self.isExploding = true
let position = gestureRecognizer.location(in: self.view)
self.emitterLayer?.setValue(true, forKeyPath: "emitterBehaviors.fingerAttractor.enabled")
self.emitterLayer?.setValue(position, forKeyPath: "emitterBehaviors.fingerAttractor.position")
let maskSize = self.emitterNode.frame.size
Queue.concurrentDefaultQueue().async {
let emitterMaskImage = generateMaskImage(size: maskSize, position: position, inverse: true)
Queue.mainQueue().async {
self.emitterSpotNode.image = emitterMaskImage
}
}
Queue.mainQueue().after(0.1 * UIView.animationDurationFactor()) {
let xFactor = (position.x / self.emitterNode.frame.width - 0.5) * 2.0
let yFactor = (position.y / self.emitterNode.frame.height - 0.5) * 2.0
let maxFactor = max(abs(xFactor), abs(yFactor))
let scaleAddition = maxFactor * 4.0
let durationAddition = -maxFactor * 0.2
self.supernode?.view.mask = self.emitterMaskNode.view
self.emitterSpotNode.frame = CGRect(x: 0.0, y: 0.0, width: self.emitterMaskNode.frame.width * 3.0, height: self.emitterMaskNode.frame.height * 3.0)
self.emitterSpotNode.layer.anchorPoint = CGPoint(x: position.x / self.emitterMaskNode.frame.width, y: position.y / self.emitterMaskNode.frame.height)
self.emitterSpotNode.position = position
self.emitterSpotNode.layer.animateScale(from: 0.3333, to: 10.5 + scaleAddition, duration: 0.45 + durationAddition, removeOnCompletion: false, completion: { [weak self] _ in
self?.revealed()
self?.alpha = 0.0
self?.supernode?.view.mask = nil
})
self.emitterMaskFillNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
}
Queue.mainQueue().after(0.8 * UIView.animationDurationFactor()) {
self.isExploding = false
self.emitterLayer?.setValue(false, forKeyPath: "emitterBehaviors.fingerAttractor.enabled")
self.emitterSpotNode.layer.removeAllAnimations()
self.emitterMaskFillNode.layer.removeAllAnimations()
}
}
private var didSetupAnimations = false
private func setupRandomAnimations() {
guard self.frame.width > 0.0, self.emitterLayer != nil, !self.didSetupAnimations else {
return
}
self.didSetupAnimations = true
let falloffAnimation1 = CABasicAnimation(keyPath: "emitterBehaviors.randomAttractor0.falloff")
falloffAnimation1.beginTime = 0.0
falloffAnimation1.fillMode = .both
falloffAnimation1.isRemovedOnCompletion = false
falloffAnimation1.autoreverses = true
falloffAnimation1.repeatCount = .infinity
falloffAnimation1.duration = 2.0
falloffAnimation1.fromValue = -20.0 as NSNumber
falloffAnimation1.toValue = 60.0 as NSNumber
falloffAnimation1.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
self.emitterLayer?.add(falloffAnimation1, forKey: "emitterBehaviors.randomAttractor0.falloff")
let positionAnimation1 = CAKeyframeAnimation(keyPath: "emitterBehaviors.randomAttractor0.position")
positionAnimation1.beginTime = 0.0
positionAnimation1.fillMode = .both
positionAnimation1.isRemovedOnCompletion = false
positionAnimation1.autoreverses = true
positionAnimation1.repeatCount = .infinity
positionAnimation1.duration = 3.0
positionAnimation1.calculationMode = .discrete
let xInset1: CGFloat = self.frame.width * 0.2
let yInset1: CGFloat = self.frame.height * 0.2
var positionValues1: [CGPoint] = []
for _ in 0 ..< 35 {
positionValues1.append(CGPoint(x: CGFloat.random(in: xInset1 ..< self.frame.width - xInset1), y: CGFloat.random(in: yInset1 ..< self.frame.height - yInset1)))
}
positionAnimation1.values = positionValues1
self.emitterLayer?.add(positionAnimation1, forKey: "emitterBehaviors.randomAttractor0.position")
let falloffAnimation2 = CABasicAnimation(keyPath: "emitterBehaviors.randomAttractor1.falloff")
falloffAnimation2.beginTime = 0.0
falloffAnimation2.fillMode = .both
falloffAnimation2.isRemovedOnCompletion = false
falloffAnimation2.autoreverses = true
falloffAnimation2.repeatCount = .infinity
falloffAnimation2.duration = 2.0
falloffAnimation2.fromValue = -20.0 as NSNumber
falloffAnimation2.toValue = 60.0 as NSNumber
falloffAnimation2.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
self.emitterLayer?.add(falloffAnimation2, forKey: "emitterBehaviors.randomAttractor1.falloff")
let positionAnimation2 = CAKeyframeAnimation(keyPath: "emitterBehaviors.randomAttractor1.position")
positionAnimation2.beginTime = 0.0
positionAnimation2.fillMode = .both
positionAnimation2.isRemovedOnCompletion = false
positionAnimation2.autoreverses = true
positionAnimation2.repeatCount = .infinity
positionAnimation2.duration = 3.0
positionAnimation2.calculationMode = .discrete
let xInset2: CGFloat = self.frame.width * 0.1
let yInset2: CGFloat = self.frame.height * 0.1
var positionValues2: [CGPoint] = []
for _ in 0 ..< 35 {
positionValues2.append(CGPoint(x: CGFloat.random(in: xInset2 ..< self.frame.width - xInset2), y: CGFloat.random(in: yInset2 ..< self.frame.height - yInset2)))
}
positionAnimation2.values = positionValues2
self.emitterLayer?.add(positionAnimation2, forKey: "emitterBehaviors.randomAttractor1.position")
}
private func updateEmitter() {
guard let (size, _) = self.currentParams else {
return
@ -79,19 +246,35 @@ public class MediaDustNode: ASDisplayNode {
self.emitterLayer?.emitterSize = size
self.emitterLayer?.emitterPosition = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
let radius = max(size.width, size.height)
self.emitterLayer?.setValue(max(size.width, size.height), forKeyPath: "emitterBehaviors.fingerAttractor.radius")
self.emitterLayer?.setValue(radius * -0.5, forKeyPath: "emitterBehaviors.fingerAttractor.falloff")
let square = Float(size.width * size.height)
Queue.mainQueue().async {
self.emitter?.birthRate = min(100000.0, square * 0.016)
self.emitter?.birthRate = min(100000.0, square * 0.02)
}
}
public func update(size: CGSize, color: UIColor) {
self.currentParams = (size, color)
self.emitterNode.frame = CGRect(origin: CGPoint(), size: size)
self.emitterMaskNode.frame = self.emitterNode.bounds
self.emitterMaskFillNode.frame = self.emitterNode.bounds
if self.isNodeLoaded {
self.updateEmitter()
self.setupRandomAnimations()
}
}
public override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
if !self.isRevealed {
return super.point(inside: point, with: event)
} else {
return false
}
}
}

View File

@ -245,7 +245,7 @@ public class ItemListSwitchItemNode: ListViewItemNode, ItemListItemNode {
insets.bottom = 0.0
}
let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.title, font: titleFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: item.maximumNumberOfLines, truncationType: .end, constrainedSize: CGSize(width: params.width - params.leftInset - params.rightInset - 80.0, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.title, font: titleFont, textColor: item.presentationData.theme.list.itemPrimaryTextColor), backgroundColor: nil, maximumNumberOfLines: item.maximumNumberOfLines, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - params.rightInset - 64.0, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
contentSize.height = max(contentSize.height, titleLayout.size.height + 22.0)

View File

@ -86,6 +86,12 @@
- (void)setTimer:(NSNumber *)timer forItem:(NSObject<TGMediaEditableItem> *)item;
- (SSignal *)timersUpdatedSignal;
- (bool)spoilerForItem:(NSObject<TGMediaEditableItem> *)item;
- (SSignal *)spoilerSignalForItem:(NSObject<TGMediaEditableItem> *)item;
- (SSignal *)spoilerSignalForIdentifier:(NSString *)identifier;
- (void)setSpoiler:(bool)spoiler forItem:(NSObject<TGMediaEditableItem> *)item;
- (SSignal *)spoilersUpdatedSignal;
- (UIImage *)paintingImageForItem:(NSObject<TGMediaEditableItem> *)item;
- (UIImage *)stillPaintingImageForItem:(NSObject<TGMediaEditableItem> *)item;
- (bool)setPaintingData:(NSData *)data image:(UIImage *)image stillImage:(UIImage *)image forItem:(NSObject<TGMediaEditableItem> *)item dataUrl:(NSURL **)dataOutUrl imageUrl:(NSURL **)imageOutUrl forVideo:(bool)video;

View File

@ -2,7 +2,7 @@
@interface TGPhotoVideoEditor : NSObject
+ (void)presentWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController image:(UIImage *)image video:(NSURL *)video didFinishWithImage:(void (^)(UIImage *image))didFinishWithImage didFinishWithVideo:(void (^)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments))didFinishWithVideo dismissed:(void (^)(void))dismissed;
+ (void)presentWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController image:(UIImage *)image video:(NSURL *)video stickersContext:(id<TGPhotoPaintStickersContext>)stickersContext didFinishWithImage:(void (^)(UIImage *image))didFinishWithImage didFinishWithVideo:(void (^)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments))didFinishWithVideo dismissed:(void (^)(void))dismissed;
+ (void)presentWithContext:(id<LegacyComponentsContext>)context controller:(TGViewController *)controller caption:(NSAttributedString *)caption withItem:(id<TGMediaEditableItem, TGMediaSelectableItem>)item paint:(bool)paint recipientName:(NSString *)recipientName stickersContext:(id<TGPhotoPaintStickersContext>)stickersContext snapshots:(NSArray *)snapshots immediate:(bool)immediate appeared:(void (^)(void))appeared completion:(void (^)(id<TGMediaEditableItem>, TGMediaEditingContext *))completion dismissed:(void (^)())dismissed;

View File

@ -957,6 +957,8 @@
}
}
bool spoiler = [editingContext spoilerForItem:item];
switch (asset.type)
{
case TGMediaAssetPhotoType:
@ -1029,6 +1031,10 @@
else if (groupedId != nil && !hasAnyTimers)
dict[@"groupedId"] = groupedId;
if (spoiler) {
dict[@"spoiler"] = @true;
}
id generatedItem = descriptionGenerator(dict, caption, nil, asset.identifier);
return generatedItem;
}];
@ -1105,6 +1111,10 @@
else if (groupedId != nil && !hasAnyTimers)
dict[@"groupedId"] = groupedId;
if (spoiler) {
dict[@"spoiler"] = @true;
}
id generatedItem = descriptionGenerator(dict, caption, nil, asset.identifier);
return generatedItem;
}];
@ -1188,6 +1198,10 @@
else if (groupedId != nil && !hasAnyTimers)
dict[@"groupedId"] = groupedId;
if (spoiler) {
dict[@"spoiler"] = @true;
}
id generatedItem = descriptionGenerator(dict, caption, nil, asset.identifier);
return generatedItem;
}] catch:^SSignal *(__unused id error)
@ -1228,6 +1242,10 @@
if (groupedId != nil)
dict[@"groupedId"] = groupedId;
if (spoiler) {
dict[@"spoiler"] = @true;
}
id generatedItem = descriptionGenerator(dict, caption, nil, asset.identifier);
return generatedItem;
}]];
@ -1297,6 +1315,10 @@
else if (groupedId != nil && !hasAnyTimers)
dict[@"groupedId"] = groupedId;
if (spoiler) {
dict[@"spoiler"] = @true;
}
id generatedItem = descriptionGenerator(dict, caption, nil, asset.identifier);
return generatedItem;
}]];
@ -1374,6 +1396,10 @@
if (timer != nil)
dict[@"timer"] = timer;
if (spoiler) {
dict[@"spoiler"] = @true;
}
id generatedItem = descriptionGenerator(dict, caption, nil, asset.identifier);
return generatedItem;
}]];
@ -1387,8 +1413,7 @@
break;
}
if (groupedId != nil && i == 10)
{
if (groupedId != nil && i == 10) {
i = 0;
groupedId = @([self generateGroupedId]);
}

View File

@ -54,6 +54,16 @@
@end
@interface TGMediaSpoilerUpdate : NSObject
@property (nonatomic, readonly, strong) id<TGMediaEditableItem> item;
@property (nonatomic, readonly) bool spoiler;
+ (instancetype)spoilerUpdateWithItem:(id<TGMediaEditableItem>)item spoiler:(bool)spoiler;
+ (instancetype)spoilerUpdate:(bool)spoiler;
@end
@interface TGModernCache (Private)
@ -69,6 +79,8 @@
NSMutableDictionary *_adjustments;
NSMutableDictionary *_timers;
NSNumber *_timer;
NSMutableDictionary *_spoilers;
SQueue *_queue;
@ -99,6 +111,7 @@
SPipe *_adjustmentsPipe;
SPipe *_captionPipe;
SPipe *_timerPipe;
SPipe *_spoilerPipe;
SPipe *_fullSizePipe;
SPipe *_cropPipe;
@ -119,6 +132,7 @@
_captions = [[NSMutableDictionary alloc] init];
_adjustments = [[NSMutableDictionary alloc] init];
_timers = [[NSMutableDictionary alloc] init];
_spoilers = [[NSMutableDictionary alloc] init];
_imageCache = [[TGMemoryImageCache alloc] initWithSoftMemoryLimit:[[self class] imageSoftMemoryLimit]
hardMemoryLimit:[[self class] imageHardMemoryLimit]];
@ -165,6 +179,7 @@
_adjustmentsPipe = [[SPipe alloc] init];
_captionPipe = [[SPipe alloc] init];
_timerPipe = [[SPipe alloc] init];
_spoilerPipe = [[SPipe alloc] init];
_fullSizePipe = [[SPipe alloc] init];
_cropPipe = [[SPipe alloc] init];
}
@ -596,6 +611,73 @@
#pragma mark -
- (bool)spoilerForItem:(NSObject<TGMediaEditableItem> *)item
{
NSString *itemId = [self _contextualIdForItemId:item.uniqueIdentifier];
if (itemId == nil)
return nil;
return [self _spoilerForItemId:itemId];
}
- (bool)_spoilerForItemId:(NSString *)itemId
{
if (itemId == nil)
return nil;
return _spoilers[itemId];
}
- (void)setSpoiler:(bool)spoiler forItem:(NSObject<TGMediaEditableItem> *)item
{
NSString *itemId = [self _contextualIdForItemId:item.uniqueIdentifier];
if (itemId == nil)
return;
if (spoiler)
_spoilers[itemId] = @true;
else
[_spoilers removeObjectForKey:itemId];
_spoilerPipe.sink([TGMediaSpoilerUpdate spoilerUpdateWithItem:item spoiler:spoiler]);
}
- (SSignal *)spoilerSignalForItem:(NSObject<TGMediaEditableItem> *)item
{
SSignal *updateSignal = [[_spoilerPipe.signalProducer() filter:^bool(TGMediaSpoilerUpdate *update)
{
return [update.item.uniqueIdentifier isEqualToString:item.uniqueIdentifier];
}] map:^NSNumber *(TGMediaSpoilerUpdate *update)
{
return @(update.spoiler);
}];
return [[SSignal single:@([self spoilerForItem:item])] then:updateSignal];
}
- (SSignal *)spoilerSignalForIdentifier:(NSString *)identifier
{
SSignal *updateSignal = [[_spoilerPipe.signalProducer() filter:^bool(TGMediaSpoilerUpdate *update)
{
return [update.item.uniqueIdentifier isEqualToString:identifier];
}] map:^NSNumber *(TGMediaSpoilerUpdate *update)
{
return @(update.spoiler);
}];
return [[SSignal single:@([self _spoilerForItemId:identifier])] then:updateSignal];
}
- (SSignal *)spoilersUpdatedSignal
{
return [_spoilerPipe.signalProducer() map:^id(__unused id value)
{
return @true;
}];
}
#pragma mark -
- (void)setImage:(UIImage *)image thumbnailImage:(UIImage *)thumbnailImage forItem:(id<TGMediaEditableItem>)item synchronous:(bool)synchronous
{
NSString *itemId = [self _contextualIdForItemId:item.uniqueIdentifier];
@ -1082,3 +1164,23 @@
}
@end
@implementation TGMediaSpoilerUpdate
+ (instancetype)spoilerUpdateWithItem:(id<TGMediaEditableItem>)item spoiler:(bool)spoiler
{
TGMediaSpoilerUpdate *update = [[TGMediaSpoilerUpdate alloc] init];
update->_item = item;
update->_spoiler = spoiler;
return update;
}
+ (instancetype)spoilerUpdate:(bool)spoiler
{
TGMediaSpoilerUpdate *update = [[TGMediaSpoilerUpdate alloc] init];
update->_spoiler = spoiler;
return update;
}
@end

View File

@ -50,7 +50,7 @@
UIView *parentView = [self _parentView];
id<TGCaptionPanelView> inputPanel = nil;
if (_stickersContext) {
if (_stickersContext && _stickersContext.captionPanelView != nil) {
inputPanel = _stickersContext.captionPanelView();
}
_inputPanel = inputPanel;

View File

@ -12,7 +12,7 @@
@implementation TGPhotoVideoEditor
+ (void)presentWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController image:(UIImage *)image video:(NSURL *)video didFinishWithImage:(void (^)(UIImage *image))didFinishWithImage didFinishWithVideo:(void (^)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments))didFinishWithVideo dismissed:(void (^)(void))dismissed
+ (void)presentWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController image:(UIImage *)image video:(NSURL *)video stickersContext:(id<TGPhotoPaintStickersContext>)stickersContext didFinishWithImage:(void (^)(UIImage *image))didFinishWithImage didFinishWithVideo:(void (^)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments))didFinishWithVideo dismissed:(void (^)(void))dismissed
{
id<LegacyComponentsOverlayWindowManager> windowManager = [context makeOverlayWindowManager];
@ -35,19 +35,23 @@
void (^present)(UIImage *) = ^(UIImage *screenImage) {
TGPhotoEditorController *controller = [[TGPhotoEditorController alloc] initWithContext:[windowManager context] item:editableItem intent:TGPhotoEditorControllerAvatarIntent adjustments:nil caption:nil screenImage:screenImage availableTabs:[TGPhotoEditorController defaultTabsForAvatarIntent] selectedTab:TGPhotoEditorCropTab];
// controller.stickersContext = _stickersContext;
controller.stickersContext = stickersContext;
controller.skipInitialTransition = true;
controller.dontHideStatusBar = true;
controller.didFinishEditing = ^(__unused id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, __unused UIImage *thumbnailImage, __unused bool hasChanges, void(^commit)(void))
{
if (didFinishWithImage != nil)
didFinishWithImage(resultImage);
commit();
};
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges, void(^commit)(void)) {
if (didFinishWithVideo != nil) {
if ([asset isKindOfClass:[AVURLAsset class]]) {
didFinishWithVideo(resultImage, [(AVURLAsset *)asset URL], adjustments);
}
commit();
}
};
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)

View File

@ -2,9 +2,13 @@ import Foundation
import UIKit
import Display
import SwiftSignalKit
import Postbox
import TelegramCore
import LegacyComponents
import TelegramPresentationData
import LegacyUI
import AccountContext
import SaveToCameraRoll
public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, theme: PresentationTheme, present: (ViewController, Any?) -> Void, openCurrent: (() -> Void)?, completion: @escaping (UIImage) -> Void, videoCompletion: @escaping (UIImage, Any?, TGVideoEditAdjustments?) -> Void = { _, _, _ in}) {
let legacyController = LegacyController(presentation: .custom, theme: theme)
@ -47,3 +51,58 @@ public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, t
}
}
}
public func legacyAvatarEditor(context: AccountContext, media: AnyMediaReference, present: @escaping (ViewController, Any?) -> Void, imageCompletion: @escaping (UIImage) -> Void, videoCompletion: @escaping (UIImage, URL, TGVideoEditAdjustments) -> Void) {
let _ = (fetchMediaData(context: context, postbox: context.account.postbox, mediaReference: media)
|> deliverOnMainQueue).start(next: { (value, isImage) in
guard case let .data(data) = value, data.complete else {
return
}
var image: UIImage?
var url: URL?
if let maybeImage = UIImage(contentsOfFile: data.path) {
image = maybeImage
} else if data.complete {
url = URL(fileURLWithPath: data.path)
}
if image == nil && url == nil {
return
}
let paintStickersContext = LegacyPaintStickersContext(context: context)
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let legacyController = LegacyController(presentation: .custom, theme: presentationData.theme, initialLayout: nil)
legacyController.blocksBackgroundWhenInOverlay = true
legacyController.acceptsFocusWhenInOverlay = true
legacyController.statusBar.statusBarStyle = .Ignore
legacyController.controllerLoaded = { [weak legacyController] in
legacyController?.view.disablesInteractiveTransitionGestureRecognizer = true
}
let emptyController = LegacyEmptyController(context: legacyController.context)!
emptyController.navigationBarShouldBeHidden = true
let navigationController = makeLegacyNavigationController(rootController: emptyController)
navigationController.setNavigationBarHidden(true, animated: false)
legacyController.bind(controller: navigationController)
legacyController.enableSizeClassSignal = true
present(legacyController, nil)
TGPhotoVideoEditor.present(with: legacyController.context, parentController: emptyController, image: image, video: url, stickersContext: paintStickersContext, didFinishWithImage: { image in
if let image = image {
imageCompletion(image)
}
}, didFinishWithVideo: { image, url, adjustments in
if let image = image, let url = url, let adjustments = adjustments {
videoCompletion(image, url, adjustments)
}
}, dismissed: { [weak legacyController] in
legacyController?.dismiss()
})
})
}

View File

@ -199,12 +199,14 @@ private enum LegacyAssetItem {
private final class LegacyAssetItemWrapper: NSObject {
let item: LegacyAssetItem
let timer: Int?
let spoiler: Bool?
let groupedId: Int64?
let uniqueId: String?
init(item: LegacyAssetItem, timer: Int?, groupedId: Int64?, uniqueId: String?) {
init(item: LegacyAssetItem, timer: Int?, spoiler: Bool?, groupedId: Int64?, uniqueId: String?) {
self.item = item
self.timer = timer
self.spoiler = spoiler
self.groupedId = groupedId
self.uniqueId = uniqueId
@ -232,10 +234,10 @@ public func legacyAssetPickerItemGenerator() -> ((Any?, NSAttributedString?, Str
let url: String? = (dict["url"] as? String) ?? (dict["url"] as? URL)?.path
if let url = url {
let dimensions = image.size
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .tempFile(path: url, dimensions: dimensions, duration: 4.0), thumbnail: thumbnail, adjustments: dict["adjustments"] as? TGVideoEditAdjustments, caption: caption, asFile: false, asAnimation: true, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .tempFile(path: url, dimensions: dimensions, duration: 4.0), thumbnail: thumbnail, adjustments: dict["adjustments"] as? TGVideoEditAdjustments, caption: caption, asFile: false, asAnimation: true, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
}
} else {
result["item" as NSString] = LegacyAssetItemWrapper(item: .image(data: .image(image), thumbnail: thumbnail, caption: caption, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
result["item" as NSString] = LegacyAssetItemWrapper(item: .image(data: .image(image), thumbnail: thumbnail, caption: caption, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
}
return result
} else if (dict["type"] as! NSString) == "cloudPhoto" {
@ -256,9 +258,9 @@ public func legacyAssetPickerItemGenerator() -> ((Any?, NSAttributedString?, Str
name = customName
}
result["item" as NSString] = LegacyAssetItemWrapper(item: .file(data: .asset(asset.backingAsset), thumbnail: thumbnail, mimeType: mimeType, name: name, caption: caption), timer: nil, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
result["item" as NSString] = LegacyAssetItemWrapper(item: .file(data: .asset(asset.backingAsset), thumbnail: thumbnail, mimeType: mimeType, name: name, caption: caption), timer: nil, spoiler: nil, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
} else {
result["item" as NSString] = LegacyAssetItemWrapper(item: .image(data: .asset(asset.backingAsset), thumbnail: thumbnail, caption: caption, stickers: []), timer: (dict["timer"] as? NSNumber)?.intValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
result["item" as NSString] = LegacyAssetItemWrapper(item: .image(data: .asset(asset.backingAsset), thumbnail: thumbnail, caption: caption, stickers: []), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
}
return result
} else if (dict["type"] as! NSString) == "file" {
@ -279,12 +281,12 @@ public func legacyAssetPickerItemGenerator() -> ((Any?, NSAttributedString?, Str
let dimensions = (dict["dimensions"]! as AnyObject).cgSizeValue!
let duration = (dict["duration"]! as AnyObject).doubleValue!
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .tempFile(path: tempFileUrl.path, dimensions: dimensions, duration: duration), thumbnail: thumbnail, adjustments: nil, caption: caption, asFile: false, asAnimation: true, stickers: []), timer: (dict["timer"] as? NSNumber)?.intValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .tempFile(path: tempFileUrl.path, dimensions: dimensions, duration: duration), thumbnail: thumbnail, adjustments: nil, caption: caption, asFile: false, asAnimation: true, stickers: []), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
return result
}
var result: [AnyHashable: Any] = [:]
result["item" as NSString] = LegacyAssetItemWrapper(item: .file(data: .tempFile(tempFileUrl.path), thumbnail: thumbnail, mimeType: mimeType, name: name, caption: caption), timer: (dict["timer"] as? NSNumber)?.intValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
result["item" as NSString] = LegacyAssetItemWrapper(item: .file(data: .tempFile(tempFileUrl.path), thumbnail: thumbnail, mimeType: mimeType, name: name, caption: caption), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
return result
}
} else if (dict["type"] as! NSString) == "video" {
@ -296,13 +298,13 @@ public func legacyAssetPickerItemGenerator() -> ((Any?, NSAttributedString?, Str
if let asset = dict["asset"] as? TGMediaAsset {
var result: [AnyHashable: Any] = [:]
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .asset(asset), thumbnail: thumbnail, adjustments: dict["adjustments"] as? TGVideoEditAdjustments, caption: caption, asFile: asFile, asAnimation: false, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .asset(asset), thumbnail: thumbnail, adjustments: dict["adjustments"] as? TGVideoEditAdjustments, caption: caption, asFile: asFile, asAnimation: false, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
return result
} else if let url = (dict["url"] as? String) ?? (dict["url"] as? URL)?.absoluteString {
let dimensions = (dict["dimensions"]! as AnyObject).cgSizeValue!
let duration = (dict["duration"]! as AnyObject).doubleValue!
var result: [AnyHashable: Any] = [:]
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .tempFile(path: url, dimensions: dimensions, duration: duration), thumbnail: thumbnail, adjustments: dict["adjustments"] as? TGVideoEditAdjustments, caption: caption, asFile: asFile, asAnimation: false, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .tempFile(path: url, dimensions: dimensions, duration: duration), thumbnail: thumbnail, adjustments: dict["adjustments"] as? TGVideoEditAdjustments, caption: caption, asFile: asFile, asAnimation: false, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
return result
}
} else if (dict["type"] as! NSString) == "cameraVideo" {
@ -318,7 +320,7 @@ public func legacyAssetPickerItemGenerator() -> ((Any?, NSAttributedString?, Str
let dimensions = previewImage.pixelSize()
let duration = (dict["duration"]! as AnyObject).doubleValue!
var result: [AnyHashable: Any] = [:]
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .tempFile(path: url, dimensions: dimensions, duration: duration), thumbnail: thumbnail, adjustments: dict["adjustments"] as? TGVideoEditAdjustments, caption: caption, asFile: asFile, asAnimation: false, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .tempFile(path: url, dimensions: dimensions, duration: duration), thumbnail: thumbnail, adjustments: dict["adjustments"] as? TGVideoEditAdjustments, caption: caption, asFile: asFile, asAnimation: false, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
return result
}
}
@ -467,6 +469,9 @@ public func legacyAssetPickerEnqueueMessages(account: Account, signals: [Any]) -
if let timer = item.timer, timer > 0 && timer <= 60 {
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: Int32(timer), countdownBeginTime: nil))
}
if let spoiler = item.spoiler, spoiler {
attributes.append(MediaSpoilerMessageAttribute())
}
let text = trimChatInputText(convertMarkdownToAttributes(caption ?? NSAttributedString()))
let entities = generateTextEntities(text.string, enabledTypes: .all, currentEntities: generateChatInputTextEntities(text))
@ -509,7 +514,10 @@ public func legacyAssetPickerEnqueueMessages(account: Account, signals: [Any]) -
if let timer = item.timer, timer > 0 && timer <= 60 {
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: Int32(timer), countdownBeginTime: nil))
}
if let spoiler = item.spoiler, spoiler {
attributes.append(MediaSpoilerMessageAttribute())
}
let text = trimChatInputText(convertMarkdownToAttributes(caption ?? NSAttributedString()))
let entities = generateTextEntities(text.string, enabledTypes: .all, currentEntities: generateChatInputTextEntities(text))
if !entities.isEmpty {
@ -751,6 +759,9 @@ public func legacyAssetPickerEnqueueMessages(account: Account, signals: [Any]) -
if let timer = item.timer, timer > 0 && timer <= 60 {
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: Int32(timer), countdownBeginTime: nil))
}
if let spoiler = item.spoiler, spoiler {
attributes.append(MediaSpoilerMessageAttribute())
}
let text = trimChatInputText(convertMarkdownToAttributes(caption ?? NSAttributedString()))
let entities = generateTextEntities(text.string, enabledTypes: .all, currentEntities: generateChatInputTextEntities(text))

View File

@ -41,6 +41,7 @@ swift_library(
"//submodules/SparseItemGrid:SparseItemGrid",
"//submodules/UndoUI:UndoUI",
"//submodules/MoreButtonNode:MoreButtonNode",
"//submodules/InvisibleInkDustNode:InvisibleInkDustNode",
],
visibility = [
"//visibility:public",

View File

@ -12,6 +12,7 @@ import Photos
import CheckNode
import LegacyComponents
import PhotoResources
import InvisibleInkDustNode
enum MediaPickerGridItemContent: Equatable {
case asset(PHFetchResult<PHAsset>, Int)
@ -87,6 +88,9 @@ final class MediaPickerGridItemNode: GridItemNode {
private var interaction: MediaPickerInteraction?
private var theme: PresentationTheme?
private let spoilerDisposable = MetaDisposable()
private var spoilerNode: SpoilerOverlayNode?
private var currentIsPreviewing = false
var selected: (() -> Void)?
@ -113,6 +117,10 @@ final class MediaPickerGridItemNode: GridItemNode {
self.addSubnode(self.imageNode)
}
deinit {
self.spoilerDisposable.dispose()
}
var identifier: String {
return self.selectableItem?.uniqueIdentifier ?? ""
@ -170,17 +178,20 @@ final class MediaPickerGridItemNode: GridItemNode {
let wasHidden = self.isHidden
self.isHidden = self.interaction?.hiddenMediaId == self.identifier
if !self.isHidden && wasHidden {
self.animateFadeIn(animateCheckNode: true)
self.animateFadeIn(animateCheckNode: true, animateSpoilerNode: true)
}
}
func animateFadeIn(animateCheckNode: Bool) {
func animateFadeIn(animateCheckNode: Bool, animateSpoilerNode: Bool) {
if animateCheckNode {
self.checkNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
self.gradientNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.typeIconNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.durationNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
if animateSpoilerNode {
self.spoilerNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
}
override func didLoad() {
@ -298,6 +309,31 @@ final class MediaPickerGridItemNode: GridItemNode {
}
self.imageNode.setSignal(imageSignal)
let spoilerSignal = Signal<Bool, NoError> { subscriber in
if let signal = editingContext.spoilerSignal(forIdentifier: asset.localIdentifier) {
let disposable = signal.start(next: { next in
if let next = next as? Bool {
subscriber.putNext(next)
}
}, error: { _ in
}, completed: nil)!
return ActionDisposable {
disposable.dispose()
}
} else {
return EmptyDisposable
}
}
self.spoilerDisposable.set((spoilerSignal
|> deliverOnMainQueue).start(next: { [weak self] hasSpoiler in
guard let strongSelf = self else {
return
}
strongSelf.updateHasSpoiler(hasSpoiler)
}))
if asset.mediaType == .video {
if asset.mediaSubtypes.contains(.videoHighFrameRate) {
self.typeIconNode.image = UIImage(bundleImageName: "Media Editor/MediaSlomo")
@ -331,6 +367,25 @@ final class MediaPickerGridItemNode: GridItemNode {
self.updateHiddenMedia()
}
private func updateHasSpoiler(_ hasSpoiler: Bool) {
if hasSpoiler {
if self.spoilerNode == nil {
let spoilerNode = SpoilerOverlayNode()
self.insertSubnode(spoilerNode, aboveSubnode: self.imageNode)
self.spoilerNode = spoilerNode
spoilerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
self.spoilerNode?.update(size: self.bounds.size, transition: .immediate)
self.spoilerNode?.frame = CGRect(origin: .zero, size: self.bounds.size)
} else if let spoilerNode = self.spoilerNode {
self.spoilerNode = nil
spoilerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak spoilerNode] _ in
spoilerNode?.removeFromSupernode()
})
}
}
override func layout() {
super.layout()
@ -345,6 +400,11 @@ final class MediaPickerGridItemNode: GridItemNode {
let checkSize = CGSize(width: 29.0, height: 29.0)
self.checkNode?.frame = CGRect(origin: CGPoint(x: self.bounds.width - checkSize.width - 3.0, y: 3.0), size: checkSize)
if let spoilerNode = self.spoilerNode, self.bounds.width > 0.0 {
spoilerNode.frame = self.bounds
spoilerNode.update(size: self.bounds.size, transition: .immediate)
}
}
func transitionView() -> UIView {
@ -361,3 +421,61 @@ final class MediaPickerGridItemNode: GridItemNode {
}
}
class SpoilerOverlayNode: ASDisplayNode {
private let blurNode: NavigationBackgroundNode
private let dustNode: MediaDustNode
private var maskView: UIView?
private var maskLayer: CAShapeLayer?
override init() {
self.blurNode = NavigationBackgroundNode(color: UIColor(rgb: 0x000000, alpha: 0.1), enableBlur: true)
self.dustNode = MediaDustNode()
super.init()
self.isUserInteractionEnabled = false
self.addSubnode(self.blurNode)
self.addSubnode(self.dustNode)
}
override func didLoad() {
super.didLoad()
let maskView = UIView()
self.maskView = maskView
// self.dustNode.view.mask = maskView
let maskLayer = CAShapeLayer()
maskLayer.fillRule = .evenOdd
maskLayer.fillColor = UIColor.white.cgColor
maskView.layer.addSublayer(maskLayer)
self.maskLayer = maskLayer
}
func update(size: CGSize, transition: ContainedViewLayoutTransition) {
self.blurNode.update(size: size, transition: transition)
self.blurNode.frame = CGRect(origin: .zero, size: size)
self.dustNode.frame = CGRect(origin: .zero, size: size)
self.dustNode.update(size: size, color: .white)
// var leftOffset: CGFloat = 0.0
// var rightOffset: CGFloat = 0.0
// let corners = corners ?? ImageCorners(radius: 16.0)
// if case .Tail = corners.bottomLeft {
// leftOffset = 4.0
// } else if case .Tail = corners.bottomRight {
// rightOffset = 4.0
// }
// let rect = CGRect(origin: CGPoint(x: leftOffset, y: 0.0), size: CGSize(width: size.width - leftOffset - rightOffset, height: size.height))
// let path = UIBezierPath(roundRect: rect, topLeftRadius: corners.topLeft.radius, topRightRadius: corners.topRight.radius, bottomLeftRadius: corners.bottomLeft.radius, bottomRightRadius: corners.bottomRight.radius)
// let buttonPath = UIBezierPath(roundedRect: self.buttonNode.frame, cornerRadius: 16.0)
// path.append(buttonPath)
// path.usesEvenOddFillRule = true
// self.maskLayer?.path = path.cgPath
}
}

View File

@ -600,7 +600,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
}
if let node = node {
return (node.view, { [weak node] animateCheckNode in
node?.animateFadeIn(animateCheckNode: animateCheckNode)
node?.animateFadeIn(animateCheckNode: animateCheckNode, animateSpoilerNode: false)
})
} else {
return nil
@ -1516,21 +1516,33 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
let strings = self.presentationData.strings
let selectionCount = self.selectionCount
var hasSpoilers = false
if let selectionContext = self.interaction?.selectionState, let editingContext = self.interaction?.editingState {
for case let item as TGMediaEditableItem in selectionContext.selectedItems() {
if editingContext.spoiler(for: item) {
hasSpoilers = true
break
}
}
}
let items: Signal<ContextController.Items, NoError> = self.groupedPromise.get()
|> deliverOnMainQueue
|> map { [weak self] grouped -> ContextController.Items in
var items: [ContextMenuItem] = []
items.append(.action(ContextMenuActionItem(text: selectionCount > 1 ? strings.Attachment_SendAsFiles : strings.Attachment_SendAsFile, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/File"), color: theme.contextMenu.primaryColor)
}, action: { [weak self] _, f in
f(.default)
self?.controllerNode.send(asFile: true, silently: false, scheduleTime: nil, animated: true, completion: {})
})))
if !hasSpoilers {
items.append(.action(ContextMenuActionItem(text: selectionCount > 1 ? strings.Attachment_SendAsFiles : strings.Attachment_SendAsFile, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/File"), color: theme.contextMenu.primaryColor)
}, action: { [weak self] _, f in
f(.default)
self?.controllerNode.send(asFile: true, silently: false, scheduleTime: nil, animated: true, completion: {})
})))
}
if selectionCount > 1 {
items.append(.separator)
if !items.isEmpty {
items.append(.separator)
}
items.append(.action(ContextMenuActionItem(text: strings.Attachment_Grouped, icon: { theme in
if !grouped {
return nil
@ -1552,6 +1564,21 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
self?.groupedValue = false
})))
}
if !items.isEmpty {
items.append(.separator)
}
items.append(.action(ContextMenuActionItem(text: hasSpoilers ? "Disable Spoiler Effect" : "Spoiler Effect", icon: { _ in return nil }, animationName: "anim_spoiler", action: { [weak self] _, f in
f(.default)
guard let strongSelf = self else {
return
}
if let selectionContext = strongSelf.interaction?.selectionState, let editingContext = strongSelf.interaction?.editingState {
for case let item as TGMediaEditableItem in selectionContext.selectedItems() {
editingContext.setSpoiler(!hasSpoilers, for: item)
}
}
})))
return ContextController.Items(content: .list(items))
}

View File

@ -25,6 +25,9 @@ private class MediaPickerSelectedItemNode: ASDisplayNode {
private var adjustmentsDisposable: Disposable?
private let spoilerDisposable = MetaDisposable()
private var spoilerNode: SpoilerOverlayNode?
private var theme: PresentationTheme?
private var validLayout: CGSize?
@ -68,43 +71,71 @@ private class MediaPickerSelectedItemNode: ASDisplayNode {
self.addSubnode(self.imageNode)
if asset.isVideo, let editingState = interaction?.editingState {
func adjustmentsChangedSignal(editingState: TGMediaEditingContext) -> Signal<TGMediaEditAdjustments?, NoError> {
return Signal { subscriber in
let disposable = editingState.adjustmentsSignal(for: asset).start(next: { next in
if let next = next as? TGMediaEditAdjustments {
subscriber.putNext(next)
} else if next == nil {
subscriber.putNext(nil)
if let editingState = interaction?.editingState {
if asset.isVideo {
func adjustmentsChangedSignal(editingState: TGMediaEditingContext) -> Signal<TGMediaEditAdjustments?, NoError> {
return Signal { subscriber in
let disposable = editingState.adjustmentsSignal(for: asset).start(next: { next in
if let next = next as? TGMediaEditAdjustments {
subscriber.putNext(next)
} else if next == nil {
subscriber.putNext(nil)
}
}, error: nil, completed: {})
return ActionDisposable {
disposable?.dispose()
}
}, error: nil, completed: {})
return ActionDisposable {
disposable?.dispose()
}
}
self.adjustmentsDisposable = (adjustmentsChangedSignal(editingState: editingState)
|> deliverOnMainQueue).start(next: { [weak self] adjustments in
if let strongSelf = self {
let duration: Double
if let adjustments = adjustments as? TGVideoEditAdjustments, adjustments.trimApplied() {
duration = adjustments.trimEndValue - adjustments.trimStartValue
} else {
duration = asset.originalDuration ?? 0.0
}
strongSelf.videoDuration = duration
if let size = strongSelf.validLayout {
strongSelf.updateLayout(size: size, transition: .immediate)
}
}
})
}
let spoilerSignal = Signal<Bool, NoError> { subscriber in
if let signal = editingState.spoilerSignal(forIdentifier: asset.uniqueIdentifier) {
let disposable = signal.start(next: { next in
if let next = next as? Bool {
subscriber.putNext(next)
}
}, error: { _ in
}, completed: nil)!
return ActionDisposable {
disposable.dispose()
}
} else {
return EmptyDisposable
}
}
self.adjustmentsDisposable = (adjustmentsChangedSignal(editingState: editingState)
|> deliverOnMainQueue).start(next: { [weak self] adjustments in
if let strongSelf = self {
let duration: Double
if let adjustments = adjustments as? TGVideoEditAdjustments, adjustments.trimApplied() {
duration = adjustments.trimEndValue - adjustments.trimStartValue
} else {
duration = asset.originalDuration ?? 0.0
}
strongSelf.videoDuration = duration
if let size = strongSelf.validLayout {
strongSelf.updateLayout(size: size, transition: .immediate)
}
self.spoilerDisposable.set((spoilerSignal
|> deliverOnMainQueue).start(next: { [weak self] hasSpoiler in
guard let strongSelf = self else {
return
}
})
strongSelf.updateHasSpoiler(hasSpoiler)
}))
}
}
deinit {
self.adjustmentsDisposable?.dispose()
self.spoilerDisposable.dispose()
}
override func didLoad() {
@ -120,6 +151,25 @@ private class MediaPickerSelectedItemNode: ASDisplayNode {
self.interaction?.openSelectedMedia(asset, self.imageNode.image)
}
private func updateHasSpoiler(_ hasSpoiler: Bool) {
if hasSpoiler {
if self.spoilerNode == nil {
let spoilerNode = SpoilerOverlayNode()
self.insertSubnode(spoilerNode, aboveSubnode: self.imageNode)
self.spoilerNode = spoilerNode
spoilerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
self.spoilerNode?.update(size: self.bounds.size, transition: .immediate)
self.spoilerNode?.frame = CGRect(origin: .zero, size: self.bounds.size)
} else if let spoilerNode = self.spoilerNode {
self.spoilerNode = nil
spoilerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak spoilerNode] _ in
spoilerNode?.removeFromSupernode()
})
}
}
func setup(size: CGSize) {
let editingState = self.interaction?.editingState
let editedSignal = Signal<UIImage?, NoError> { subscriber in
@ -229,6 +279,10 @@ private class MediaPickerSelectedItemNode: ASDisplayNode {
if let durationBackgroundNode = self.durationBackgroundNode, durationBackgroundNode.alpha > 0.0 {
durationBackgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
if let spoilerNode = self.spoilerNode, spoilerNode.alpha > 0.0 {
spoilerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
}
}
@ -249,6 +303,11 @@ private class MediaPickerSelectedItemNode: ASDisplayNode {
transition.updateFrame(node: self.imageNode, frame: CGRect(origin: CGPoint(), size: size))
if let spoilerNode = self.spoilerNode {
transition.updateFrame(node: spoilerNode, frame: CGRect(origin: CGPoint(), size: size))
spoilerNode.update(size: size, transition: transition)
}
let checkSize = CGSize(width: 29.0, height: 29.0)
if let checkNode = self.checkNode {
transition.updateFrame(node: checkNode, frame: CGRect(origin: CGPoint(x: size.width - checkSize.width - 3.0, y: 3.0), size: checkSize))

View File

@ -19,6 +19,8 @@ import AppBundle
import MusicAlbumArtResources
import Svg
import RangeSet
import Accelerate
private enum ResourceFileData {
case data(Data)
@ -1146,6 +1148,8 @@ public func chatSecretPhoto(account: Account, photoReference: ImageMediaReferenc
}
}
adjustSaturationInContext(context: context, saturation: 1.7)
addCorners(context, arguments: arguments)
return context
@ -1153,6 +1157,45 @@ public func chatSecretPhoto(account: Account, photoReference: ImageMediaReferenc
}
}
private func adjustSaturationInContext(context: DrawingContext, saturation: CGFloat) {
var buffer = vImage_Buffer()
buffer.data = context.bytes
buffer.width = UInt(context.size.width * context.scale)
buffer.height = UInt(context.size.height * context.scale)
buffer.rowBytes = context.bytesPerRow
let divisor: Int32 = 0x1000
let rwgt: CGFloat = 0.3086
let gwgt: CGFloat = 0.6094
let bwgt: CGFloat = 0.0820
let adjustSaturation = saturation
let a = (1.0 - adjustSaturation) * rwgt + adjustSaturation
let b = (1.0 - adjustSaturation) * rwgt
let c = (1.0 - adjustSaturation) * rwgt
let d = (1.0 - adjustSaturation) * gwgt
let e = (1.0 - adjustSaturation) * gwgt + adjustSaturation
let f = (1.0 - adjustSaturation) * gwgt
let g = (1.0 - adjustSaturation) * bwgt
let h = (1.0 - adjustSaturation) * bwgt
let i = (1.0 - adjustSaturation) * bwgt + adjustSaturation
let satMatrix: [CGFloat] = [
a, b, c, 0,
d, e, f, 0,
g, h, i, 0,
0, 0, 0, 1
]
var matrix: [Int16] = satMatrix.map { value in
return Int16(value * CGFloat(divisor))
}
vImageMatrixMultiply_ARGB8888(&buffer, &buffer, &matrix, divisor, nil, nil, vImage_Flags(kvImageDoNotTile))
}
private func avatarGalleryThumbnailDatas(postbox: Postbox, representations: [ImageRepresentationWithReference], fullRepresentationSize: CGSize = CGSize(width: 1280.0, height: 1280.0), autoFetchFullSize: Bool = false, synchronousLoad: Bool) -> Signal<Tuple3<Data?, Data?, Bool>, NoError> {
if let smallestRepresentation = smallestImageRepresentation(representations.map({ $0.representation })), let largestRepresentation = imageRepresentationLargerThan(representations.map({ $0.representation }), size: PixelDimensions(width: Int32(fullRepresentationSize.width), height: Int32(fullRepresentationSize.height))), let smallestIndex = representations.firstIndex(where: { $0.representation == smallestRepresentation }), let largestIndex = representations.firstIndex(where: { $0.representation == largestRepresentation }) {
let maybeFullSize = postbox.mediaBox.resourceData(largestRepresentation.resource, attemptSynchronously: synchronousLoad)

View File

@ -225,7 +225,11 @@ func _internal_updatePeerPhotoInternal(postbox: Postbox, network: Network, state
if let peer = transaction.getPeer(peer.id) {
updatePeers(transaction: transaction, peers: [peer], update: { (_, peer) -> Peer? in
if let peer = peer as? TelegramUser {
return peer.withUpdatedPhoto(representations)
if customPeerPhotoMode == .suggest {
return peer
} else {
return peer.withUpdatedPhoto(representations)
}
} else {
return peer
}

View File

@ -825,8 +825,12 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
attributedString = addAttributesToStringWithRanges(strings.Notification_ForumTopicIconChanged(".")._tuple, body: bodyAttributes, argumentAttributes: [0: MarkdownAttributeSet(font: titleFont, textColor: primaryTextColor, additionalAttributes: [ChatTextInputAttributes.customEmoji.rawValue: ChatTextInputTextCustomEmojiAttribute(interactivelySelectedFromPackId: nil, fileId: maybeFileId, file: nil, topicInfo: maybeFileId == 0 ? (message.threadId ?? 0, EngineMessageHistoryThread.Info(title: title, icon: nil, iconColor: iconColor)) : nil)])])
}
}
case .suggestedProfilePhoto:
attributedString = nil
case let .suggestedProfilePhoto(image):
if (image?.videoRepresentations.isEmpty ?? true) {
attributedString = NSAttributedString(string: strings.Notification_SuggestedProfilePhoto, font: titleFont, textColor: primaryTextColor)
} else {
attributedString = NSAttributedString(string: strings.Notification_SuggestedProfileVideo, font: titleFont, textColor: primaryTextColor)
}
case .unknown:
attributedString = nil
}

View File

@ -471,7 +471,7 @@ public final class ChatTitleView: UIView, NavigationBarTitleView {
switch titleContent {
case let .peer(peerView, customTitle, onlineMemberCount, isScheduledMessages, _, customMessageCount):
if let customMessageCount = customMessageCount, customMessageCount != 0 {
let string = NSAttributedString(string: self.strings.Conversation_ForwardOptions_Messages(Int32(customMessageCount)), font: subtitleFont, textColor: titleTheme.rootController.navigationBar.secondaryTextColor)
let string = NSAttributedString(string: self.strings.Conversation_Messages(Int32(customMessageCount)), font: subtitleFont, textColor: titleTheme.rootController.navigationBar.secondaryTextColor)
state = .info(string, .generic)
} else if let peer = peerViewMainPeer(peerView) {
let servicePeer = isServicePeer(peer)

File diff suppressed because one or more lines are too long

View File

@ -820,6 +820,24 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
let controller = PremiumIntroScreen(context: strongSelf.context, source: .gift(from: fromPeerId, to: toPeerId, duration: duration))
strongSelf.push(controller)
return true
case let .suggestedProfilePhoto(image):
if let image = image {
legacyAvatarEditor(context: strongSelf.context, media: .message(message: MessageReference(message), media: image), present: { [weak self] c, a in
self?.present(c, in: .window(.root), with: a)
}, imageCompletion: { [weak self] image in
if let strongSelf = self {
if let rootController = strongSelf.effectiveNavigationController as? TelegramRootController, let settingsController = rootController.accountSettingsController as? PeerInfoScreenImpl {
settingsController.updateProfilePhoto(image)
}
}
}, videoCompletion: { [weak self] image, url, adjustments in
if let strongSelf = self {
if let rootController = strongSelf.effectiveNavigationController as? TelegramRootController, let settingsController = rootController.accountSettingsController as? PeerInfoScreenImpl {
settingsController.updateProfileVideo(image, asset: AVURLAsset(url: url), adjustments: adjustments)
}
}
})
}
default:
break
}

View File

@ -110,6 +110,8 @@ private func contentNodeMessagesAndClassesForItem(_ item: ChatMessageItem) -> ([
result.append((message, ChatMessageCallBubbleContentNode.self, itemAttributes, BubbleItemAttributes(isAttachment: false, neighborType: .freeform, neighborSpacing: .default)))
} else if case .giftPremium = action.action {
result.append((message, ChatMessageGiftBubbleContentNode.self, itemAttributes, BubbleItemAttributes(isAttachment: false, neighborType: .freeform, neighborSpacing: .default)))
} else if case .suggestedProfilePhoto = action.action {
result.append((message, ChatMessageProfilePhotoSuggestionContentNode.self, itemAttributes, BubbleItemAttributes(isAttachment: false, neighborType: .freeform, neighborSpacing: .default)))
} else {
result.append((message, ChatMessageActionBubbleContentNode.self, itemAttributes, BubbleItemAttributes(isAttachment: false, neighborType: .freeform, neighborSpacing: .default)))
}

View File

@ -174,6 +174,7 @@ extension UIBezierPath {
}
private class ExtendedMediaOverlayNode: ASDisplayNode {
private let blurredImageNode: TransformImageNode
private let dustNode: MediaDustNode
private let buttonNode: HighlightTrackingButtonNode
private let highlightedBackgroundNode: ASDisplayNode
@ -184,6 +185,8 @@ private class ExtendedMediaOverlayNode: ASDisplayNode {
private var maskLayer: CAShapeLayer?
override init() {
self.blurredImageNode = TransformImageNode()
self.dustNode = MediaDustNode()
self.buttonNode = HighlightTrackingButtonNode()
@ -202,10 +205,8 @@ private class ExtendedMediaOverlayNode: ASDisplayNode {
self.textNode = ImmediateTextNode()
super.init()
self.clipsToBounds = true
self.isUserInteractionEnabled = false
self.addSubnode(self.blurredImageNode)
self.addSubnode(self.dustNode)
self.addSubnode(self.buttonNode)
@ -250,22 +251,51 @@ private class ExtendedMediaOverlayNode: ASDisplayNode {
self.maskLayer = maskLayer
}
func update(size: CGSize, text: String, corners: ImageCorners?) {
func update(size: CGSize, text: String, imageSignal: (Signal<(TransformImageArguments) -> DrawingContext?, NoError>, CGSize)?, imageFrame: CGRect, corners: ImageCorners?) {
let spacing: CGFloat = 2.0
let padding: CGFloat = 10.0
if let (imageSignal, drawingSize) = imageSignal {
self.blurredImageNode.setSignal(imageSignal)
let imageLayout = self.blurredImageNode.asyncLayout()
let arguments = TransformImageArguments(corners: corners ?? ImageCorners(), imageSize: drawingSize, boundingSize: imageFrame.size, intrinsicInsets: UIEdgeInsets(), resizeMode: .blurBackground, emptyColor: .clear, custom: nil)
let apply = imageLayout(arguments)
apply()
self.blurredImageNode.isHidden = false
self.isUserInteractionEnabled = !self.dustNode.isRevealed
self.dustNode.revealed = { [weak self] in
self?.blurredImageNode.removeFromSupernode()
self?.isUserInteractionEnabled = false
}
} else {
self.blurredImageNode.isHidden = true
self.isUserInteractionEnabled = false
}
self.blurredImageNode.frame = imageFrame
self.dustNode.frame = CGRect(origin: .zero, size: size)
self.dustNode.update(size: size, color: .white)
self.textNode.attributedText = NSAttributedString(string: text, font: Font.semibold(14.0), textColor: .white, paragraphAlignment: .center)
let textSize = self.textNode.updateLayout(size)
if let iconSize = self.iconNode.image?.size {
let contentSize = CGSize(width: iconSize.width + textSize.width + spacing + padding * 2.0, height: 32.0)
self.buttonNode.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - contentSize.width) / 2.0), y: floorToScreenPixels((size.height - contentSize.height) / 2.0)), size: contentSize)
self.highlightedBackgroundNode.frame = CGRect(origin: .zero, size: contentSize)
self.iconNode.frame = CGRect(origin: CGPoint(x: self.buttonNode.frame.minX + padding, y: self.buttonNode.frame.minY + floorToScreenPixels((contentSize.height - iconSize.height) / 2.0) + 1.0 - UIScreenPixel), size: iconSize)
self.textNode.frame = CGRect(origin: CGPoint(x: self.iconNode.frame.maxX + spacing, y: self.buttonNode.frame.minY + floorToScreenPixels((contentSize.height - textSize.height) / 2.0)), size: textSize)
if text.isEmpty {
self.buttonNode.isHidden = true
self.textNode.isHidden = true
} else {
self.buttonNode.isHidden = false
self.textNode.isHidden = false
self.textNode.attributedText = NSAttributedString(string: text, font: Font.semibold(14.0), textColor: .white, paragraphAlignment: .center)
let textSize = self.textNode.updateLayout(size)
if let iconSize = self.iconNode.image?.size {
let contentSize = CGSize(width: iconSize.width + textSize.width + spacing + padding * 2.0, height: 32.0)
self.buttonNode.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - contentSize.width) / 2.0), y: floorToScreenPixels((size.height - contentSize.height) / 2.0)), size: contentSize)
self.highlightedBackgroundNode.frame = CGRect(origin: .zero, size: contentSize)
self.iconNode.frame = CGRect(origin: CGPoint(x: self.buttonNode.frame.minX + padding, y: self.buttonNode.frame.minY + floorToScreenPixels((contentSize.height - iconSize.height) / 2.0) + 1.0 - UIScreenPixel), size: iconSize)
self.textNode.frame = CGRect(origin: CGPoint(x: self.iconNode.frame.maxX + spacing, y: self.buttonNode.frame.minY + floorToScreenPixels((contentSize.height - textSize.height) / 2.0)), size: textSize)
}
}
var leftOffset: CGFloat = 0.0
@ -290,6 +320,7 @@ final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTransitio
private let imageNode: TransformImageNode
private var currentImageArguments: TransformImageArguments?
private var currentHighQualityImageSignal: (Signal<(TransformImageArguments) -> DrawingContext?, NoError>, CGSize)?
private var currentBlurredImageSignal: (Signal<(TransformImageArguments) -> DrawingContext?, NoError>, CGSize)?
private var highQualityImageNode: TransformImageNode?
private var videoNode: UniversalVideoNode?
@ -855,6 +886,7 @@ final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTransitio
}
var updateImageSignal: ((Bool, Bool) -> Signal<(TransformImageArguments) -> DrawingContext?, NoError>)?
var updateBlurredImageSignal: ((Bool, Bool) -> Signal<(TransformImageArguments) -> DrawingContext?, NoError>)?
var updatedStatusSignal: Signal<(MediaResourceStatus, MediaResourceStatus?), NoError>?
var updatedFetchControls: FetchControls?
@ -946,6 +978,9 @@ final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTransitio
updateImageSignal = { synchronousLoad, highQuality in
return chatMessagePhoto(postbox: context.account.postbox, photoReference: .message(message: MessageReference(message), media: image), synchronousLoad: synchronousLoad, highQuality: highQuality)
}
updateBlurredImageSignal = { synchronousLoad, _ in
return chatSecretPhoto(account: context.account, photoReference: .message(message: MessageReference(message), media: image))
}
}
updatedFetchControls = FetchControls(fetch: { manual in
@ -1323,9 +1358,15 @@ final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTransitio
if let imageDimensions = imageDimensions {
strongSelf.currentHighQualityImageSignal = (updateImageSignal(false, true), imageDimensions)
if let updateBlurredImageSignal = updateBlurredImageSignal {
strongSelf.currentBlurredImageSignal = (updateBlurredImageSignal(false, true), imageDimensions)
}
}
}
if let _ = secretBeginTimeAndTimeout {
if updatedStatusSignal == nil, let fetchStatus = strongSelf.fetchStatus, case .Local = fetchStatus {
if let statusNode = strongSelf.statusNode, case .secretTimeout = statusNode.state {
@ -1842,7 +1883,14 @@ final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTransitio
badgeNode.removeFromSupernode()
}
var displaySpoiler = false
if let invoice = invoice, let extendedMedia = invoice.extendedMedia, case .preview = extendedMedia {
displaySpoiler = true
} else if message.attributes.contains(where: { $0 is MediaSpoilerMessageAttribute }) {
displaySpoiler = true
}
if displaySpoiler {
if self.extendedMediaOverlayNode == nil {
let extendedMediaOverlayNode = ExtendedMediaOverlayNode()
self.extendedMediaOverlayNode = extendedMediaOverlayNode
@ -1864,7 +1912,7 @@ final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTransitio
break
}
}
self.extendedMediaOverlayNode?.update(size: self.imageNode.frame.size, text: paymentText, corners: self.currentImageArguments?.corners)
self.extendedMediaOverlayNode?.update(size: self.imageNode.frame.size, text: paymentText, imageSignal: self.currentBlurredImageSignal, imageFrame: self.imageNode.view.convert(self.imageNode.bounds, to: self.extendedMediaOverlayNode?.view), corners: self.currentImageArguments?.corners)
} else if let extendedMediaOverlayNode = self.extendedMediaOverlayNode {
self.extendedMediaOverlayNode = nil
extendedMediaOverlayNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak extendedMediaOverlayNode] _ in

View File

@ -13,18 +13,22 @@ import LocalizedPeerData
import TelegramStringFormatting
import WallpaperBackgroundNode
import ReactionSelectionNode
import PhotoResources
class ChatMessageProfilePhotoSuggestionContentNode: ChatMessageBubbleContentNode {
private var mediaBackgroundContent: WallpaperBubbleBackgroundNode?
private let mediaBackgroundNode: NavigationBackgroundNode
private let titleNode: TextNode
private let subtitleNode: TextNode
private let imageNode: TransformImageNode
private let buttonNode: HighlightTrackingButtonNode
private let buttonStarsNode: PremiumStarsNode
private let buttonTitleNode: TextNode
private var absoluteRect: (CGRect, CGSize)?
private let fetchDisposable = MetaDisposable()
required init() {
self.mediaBackgroundNode = NavigationBackgroundNode(color: .clear)
@ -39,6 +43,8 @@ class ChatMessageProfilePhotoSuggestionContentNode: ChatMessageBubbleContentNode
self.subtitleNode.isUserInteractionEnabled = false
self.subtitleNode.displaysAsynchronously = false
self.imageNode = TransformImageNode()
self.buttonNode = HighlightTrackingButtonNode()
self.buttonNode.clipsToBounds = true
self.buttonNode.cornerRadius = 17.0
@ -54,6 +60,7 @@ class ChatMessageProfilePhotoSuggestionContentNode: ChatMessageBubbleContentNode
self.addSubnode(self.mediaBackgroundNode)
self.addSubnode(self.titleNode)
self.addSubnode(self.subtitleNode)
self.addSubnode(self.imageNode)
self.addSubnode(self.buttonNode)
self.buttonNode.addSubnode(self.buttonStarsNode)
@ -82,6 +89,10 @@ class ChatMessageProfilePhotoSuggestionContentNode: ChatMessageBubbleContentNode
fatalError("init(coder:) has not been implemented")
}
deinit {
self.fetchDisposable.dispose()
}
@objc private func buttonPressed() {
guard let item = self.item else {
return
@ -91,6 +102,7 @@ class ChatMessageProfilePhotoSuggestionContentNode: ChatMessageBubbleContentNode
override func asyncLayoutContent() -> (_ item: ChatMessageBubbleContentItem, _ layoutConstants: ChatMessageItemLayoutConstants, _ preparePosition: ChatMessageBubblePreparePosition, _ messageSelection: Bool?, _ constrainedSize: CGSize, _ avatarInset: CGFloat) -> (ChatMessageBubbleContentProperties, unboundSize: CGSize?, maxWidth: CGFloat, layout: (CGSize, ChatMessageBubbleContentPosition) -> (CGFloat, (CGFloat) -> (CGSize, (ListViewItemUpdateAnimation, Bool, ListViewItemApply?) -> Void))) {
let makeTitleLayout = TextNode.asyncLayout(self.titleNode)
let makeImageLayout = self.imageNode.asyncLayout()
let makeSubtitleLayout = TextNode.asyncLayout(self.subtitleNode)
let makeButtonTitleLayout = TextNode.asyncLayout(self.buttonTitleNode)
@ -98,24 +110,53 @@ class ChatMessageProfilePhotoSuggestionContentNode: ChatMessageBubbleContentNode
let contentProperties = ChatMessageBubbleContentProperties(hidesSimpleAuthorHeader: true, headerSpacing: 0.0, hidesBackground: .always, forceFullCorners: false, forceAlignment: .center)
return (contentProperties, nil, CGFloat.greatestFiniteMagnitude, { constrainedSize, position in
let giftSize = CGSize(width: 220.0, height: 240.0)
let width: CGFloat = 220.0
let imageSize = CGSize(width: 100.0, height: 100.0)
let primaryTextColor = serviceMessageColorComponents(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper).primaryText
let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Notification_PremiumGift_Title, font: Font.semibold(15.0), textColor: primaryTextColor, paragraphAlignment: .center), backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: giftSize.width - 32.0, height: CGFloat.greatestFiniteMagnitude), alignment: .center, cutout: nil, insets: UIEdgeInsets()))
var photo: TelegramMediaImage?
if let media = item.message.media.first(where: { $0 is TelegramMediaAction }) as? TelegramMediaAction, case let .suggestedProfilePhoto(image) = media.action {
photo = image
}
let isVideo = !(photo?.videoRepresentations.isEmpty ?? true)
let fromYou = item.message.author?.id == item.context.account.peerId
let (subtitleLayout, subtitleApply) = makeSubtitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: "Subtitle", font: Font.regular(13.0), textColor: primaryTextColor, paragraphAlignment: .center), backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: giftSize.width - 32.0, height: CGFloat.greatestFiniteMagnitude), alignment: .center, cutout: nil, insets: UIEdgeInsets()))
let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: isVideo ? item.presentationData.strings.Conversation_SuggestedVideoTitle : item.presentationData.strings.Conversation_SuggestedPhotoTitle, font: Font.semibold(15.0), textColor: primaryTextColor, paragraphAlignment: .center), backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: width - 32.0, height: CGFloat.greatestFiniteMagnitude), alignment: .center, cutout: nil, insets: UIEdgeInsets()))
let (buttonTitleLayout, buttonTitleApply) = makeButtonTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Notification_PremiumGift_View, font: Font.semibold(15.0), textColor: primaryTextColor, paragraphAlignment: .center), backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: giftSize.width - 32.0, height: CGFloat.greatestFiniteMagnitude), alignment: .center, cutout: nil, insets: UIEdgeInsets()))
let peerName = item.message.peers[item.message.id.peerId].flatMap { EnginePeer($0).compactDisplayTitle } ?? ""
let text: String
if fromYou {
text = isVideo ? item.presentationData.strings.Conversation_SuggestedVideoTextYou(peerName).string : item.presentationData.strings.Conversation_SuggestedPhotoTextYou(peerName).string
} else {
text = isVideo ? item.presentationData.strings.Conversation_SuggestedVideoText(peerName).string : item.presentationData.strings.Conversation_SuggestedPhotoText(peerName).string
}
let (subtitleLayout, subtitleApply) = makeSubtitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: text, font: Font.regular(13.0), textColor: primaryTextColor, paragraphAlignment: .center), backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: width - 32.0, height: CGFloat.greatestFiniteMagnitude), alignment: .center, cutout: nil, insets: UIEdgeInsets()))
let (buttonTitleLayout, buttonTitleApply) = makeButtonTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: isVideo ? item.presentationData.strings.Conversation_SuggestedVideoView : item.presentationData.strings.Conversation_SuggestedPhotoView, font: Font.semibold(15.0), textColor: primaryTextColor, paragraphAlignment: .center), backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: width - 32.0, height: CGFloat.greatestFiniteMagnitude), alignment: .center, cutout: nil, insets: UIEdgeInsets()))
let backgroundSize = CGSize(width: giftSize.width, height: giftSize.height + 18.0)
let backgroundSize = CGSize(width: width, height: titleLayout.size.height + subtitleLayout.size.height + 182.0)
return (backgroundSize.width, { boundingWidth in
return (backgroundSize, { [weak self] animation, synchronousLoads, _ in
if let strongSelf = self {
strongSelf.item = item
let imageFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((backgroundSize.width - giftSize.width) / 2.0), y: 16.0), size: giftSize)
if let photo = photo {
strongSelf.fetchDisposable.set(chatMessagePhotoInteractiveFetched(context: item.context, photoReference: .message(message: MessageReference(item.message), media: photo), displayAtSize: nil, storeToDownloadsPeerType: nil).start())
let updateImageSignal = chatMessagePhoto(postbox: item.context.account.postbox, photoReference: .message(message: MessageReference(item.message), media: photo), synchronousLoad: synchronousLoads)
strongSelf.imageNode.setSignal(updateImageSignal, attemptSynchronously: synchronousLoads)
let arguments = TransformImageArguments(corners: ImageCorners(radius: imageSize.width / 2.0), imageSize: imageSize, boundingSize: imageSize, intrinsicInsets: UIEdgeInsets())
let apply = makeImageLayout(arguments)
apply()
strongSelf.imageNode.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((backgroundSize.width - imageSize.width) / 2.0), y: 13.0), size: imageSize)
}
let imageFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((backgroundSize.width - width) / 2.0), y: 0.0), size: backgroundSize)
let mediaBackgroundFrame = imageFrame.insetBy(dx: -2.0, dy: -2.0)
strongSelf.mediaBackgroundNode.frame = mediaBackgroundFrame
@ -127,10 +168,10 @@ class ChatMessageProfilePhotoSuggestionContentNode: ChatMessageBubbleContentNode
let _ = subtitleApply()
let _ = buttonTitleApply()
let titleFrame = CGRect(origin: CGPoint(x: mediaBackgroundFrame.minX + floorToScreenPixels((mediaBackgroundFrame.width - titleLayout.size.width) / 2.0) , y: mediaBackgroundFrame.minY + 151.0), size: titleLayout.size)
let titleFrame = CGRect(origin: CGPoint(x: mediaBackgroundFrame.minX + floorToScreenPixels((mediaBackgroundFrame.width - titleLayout.size.width) / 2.0) , y: mediaBackgroundFrame.minY + 127.0), size: titleLayout.size)
strongSelf.titleNode.frame = titleFrame
let subtitleFrame = CGRect(origin: CGPoint(x: mediaBackgroundFrame.minX + floorToScreenPixels((mediaBackgroundFrame.width - subtitleLayout.size.width) / 2.0) , y: titleFrame.maxY - 1.0), size: subtitleLayout.size)
let subtitleFrame = CGRect(origin: CGPoint(x: mediaBackgroundFrame.minX + floorToScreenPixels((mediaBackgroundFrame.width - subtitleLayout.size.width) / 2.0) , y: titleFrame.maxY + 2.0), size: subtitleLayout.size)
strongSelf.subtitleNode.frame = subtitleFrame
let buttonTitleFrame = CGRect(origin: CGPoint(x: mediaBackgroundFrame.minX + floorToScreenPixels((mediaBackgroundFrame.width - buttonTitleLayout.size.width) / 2.0), y: subtitleFrame.maxY + 18.0), size: buttonTitleLayout.size)

View File

@ -6741,11 +6741,11 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewDelegate
}
}
private func updateProfilePhoto(_ image: UIImage, mode: AvatarEditingMode) {
fileprivate func updateProfilePhoto(_ image: UIImage, mode: AvatarEditingMode) {
guard let data = image.jpegData(compressionQuality: 0.6) else {
return
}
if self.headerNode.isAvatarExpanded {
self.headerNode.ignoreCollapse = true
self.headerNode.updateIsAvatarExpanded(false, transition: .immediate)
@ -6757,7 +6757,11 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewDelegate
self.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data)
let representation = TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: 640, height: 640), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: mode == .custom ? true : false)
self.state = self.state.withUpdatingAvatar(.image(representation))
if case .suggest = mode {
} else {
self.state = self.state.withUpdatingAvatar(.image(representation))
}
if let (layout, navigationHeight) = self.validLayout {
self.containerLayoutUpdated(layout: layout, navigationHeight: navigationHeight, transition: mode == .custom ? .animated(duration: 0.2, curve: .easeInOut) : .immediate, additive: false)
}
@ -6809,7 +6813,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewDelegate
}))
}
private func updateProfileVideo(_ image: UIImage, asset: Any?, adjustments: TGVideoEditAdjustments?, mode: AvatarEditingMode) {
fileprivate func updateProfileVideo(_ image: UIImage, asset: Any?, adjustments: TGVideoEditAdjustments?, mode: AvatarEditingMode) {
guard let data = image.jpegData(compressionQuality: 0.6) else {
return
}
@ -6825,7 +6829,11 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewDelegate
self.context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
let representation = TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: 640, height: 640), resource: photoResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: mode == .custom ? true : false)
self.state = self.state.withUpdatingAvatar(.image(representation))
if case .suggest = mode {
} else {
self.state = self.state.withUpdatingAvatar(.image(representation))
}
if let (layout, navigationHeight) = self.validLayout {
self.containerLayoutUpdated(layout: layout, navigationHeight: navigationHeight, transition: mode == .custom ? .animated(duration: 0.2, curve: .easeInOut) : .immediate, additive: false)
}
@ -6964,7 +6972,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewDelegate
}))
}
private enum AvatarEditingMode {
fileprivate enum AvatarEditingMode {
case generic
case suggest
case custom
@ -9317,6 +9325,20 @@ public final class PeerInfoScreenImpl: ViewController, PeerInfoScreen, KeyShortc
}
}
func updateProfilePhoto(_ image: UIImage) {
if !self.isNodeLoaded {
self.loadDisplayNode()
}
self.controllerNode.updateProfilePhoto(image, mode: .generic)
}
func updateProfileVideo(_ image: UIImage, asset: Any?, adjustments: TGVideoEditAdjustments?) {
if !self.isNodeLoaded {
self.loadDisplayNode()
}
self.controllerNode.updateProfileVideo(image, asset: asset, adjustments: adjustments, mode: .generic)
}
static func displayChatNavigationMenu(context: AccountContext, chatNavigationStack: [ChatNavigationStackItem], nextFolderId: Int32?, parentController: ViewController, backButtonView: UIView, navigationController: NavigationController, gesture: ContextGesture) {
let peerMap = EngineDataMap(
Set(chatNavigationStack.map(\.peerId)).map(TelegramEngine.EngineData.Item.Peer.Peer.init)