diff --git a/submodules/GalleryUI/BUILD b/submodules/GalleryUI/BUILD index d7df405df9..d98f10b4c9 100644 --- a/submodules/GalleryUI/BUILD +++ b/submodules/GalleryUI/BUILD @@ -34,6 +34,10 @@ swift_library( "//submodules/ContextUI:ContextUI", "//submodules/SaveToCameraRoll:SaveToCameraRoll", "//submodules/TelegramUIPreferences:TelegramUIPreferences", + "//submodules/ImageContentAnalysis:ImageContentAnalysis", + "//submodules/TextSelectionNode:TextSelectionNode", + "//submodules/Speak:Speak", + "//submodules/UndoUI:UndoUI", ], visibility = [ "//visibility:public", diff --git a/submodules/GalleryUI/Sources/GalleryController.swift b/submodules/GalleryUI/Sources/GalleryController.swift index b3df37dbef..296165a905 100644 --- a/submodules/GalleryUI/Sources/GalleryController.swift +++ b/submodules/GalleryUI/Sources/GalleryController.swift @@ -1122,6 +1122,7 @@ public class GalleryController: ViewController, StandalonePresentableController strongSelf.centralItemRightBarButtonItems.set(node.rightBarButtonItems()) strongSelf.centralItemNavigationStyle.set(node.navigationStyle()) strongSelf.centralItemFooterContentNode.set(node.footerContent()) + strongSelf.galleryNode.pager.pagingEnabledPromise.set(node.isPagingEnabled()) } switch strongSelf.source { @@ -1286,6 +1287,7 @@ public class GalleryController: ViewController, StandalonePresentableController self.centralItemRightBarButtonItems.set(centralItemNode.rightBarButtonItems()) self.centralItemNavigationStyle.set(centralItemNode.navigationStyle()) self.centralItemFooterContentNode.set(centralItemNode.footerContent()) + self.galleryNode.pager.pagingEnabledPromise.set(centralItemNode.isPagingEnabled()) if let (media, _) = mediaForMessage(message: message) { if let presentationArguments = self.presentationArguments as? GalleryControllerPresentationArguments, let transitionArguments = presentationArguments.transitionArguments(message.id, media) { @@ -1323,6 +1325,7 @@ public class GalleryController: ViewController, StandalonePresentableController self.centralItemRightBarButtonItems.set(centralItemNode.rightBarButtonItems()) self.centralItemNavigationStyle.set(centralItemNode.navigationStyle()) self.centralItemFooterContentNode.set(centralItemNode.footerContent()) + self.galleryNode.pager.pagingEnabledPromise.set(centralItemNode.isPagingEnabled()) if let _ = mediaForMessage(message: message) { centralItemNode.activateAsInitial() diff --git a/submodules/GalleryUI/Sources/GalleryItemNode.swift b/submodules/GalleryUI/Sources/GalleryItemNode.swift index c777929084..bfb4cc7c21 100644 --- a/submodules/GalleryUI/Sources/GalleryItemNode.swift +++ b/submodules/GalleryUI/Sources/GalleryItemNode.swift @@ -58,6 +58,10 @@ open class GalleryItemNode: ASDisplayNode { return .single(nil) } + open func isPagingEnabled() -> Signal { + return .single(true) + } + open func footerContent() -> Signal<(GalleryFooterContentNode?, GalleryOverlayContentNode?), NoError> { return .single((nil, nil)) } diff --git a/submodules/GalleryUI/Sources/GalleryPagerNode.swift b/submodules/GalleryUI/Sources/GalleryPagerNode.swift index cbb6e27361..6c59f7cbfd 100644 --- a/submodules/GalleryUI/Sources/GalleryPagerNode.swift +++ b/submodules/GalleryUI/Sources/GalleryPagerNode.swift @@ -114,6 +114,10 @@ public final class GalleryPagerNode: ASDisplayNode, UIScrollViewDelegate, UIGest public var baseNavigationController: () -> NavigationController? = { return nil } public var galleryController: () -> ViewController? = { return nil } + private var pagingEnabled = true + public var pagingEnabledPromise = Promise(true) + private var pagingEnabledDisposable: Disposable? + public init(pageGap: CGFloat, disableTapNavigation: Bool) { self.pageGap = pageGap self.disableTapNavigation = disableTapNavigation @@ -146,6 +150,17 @@ public final class GalleryPagerNode: ASDisplayNode, UIScrollViewDelegate, UIGest self.addSubnode(self.leftFadeNode) self.addSubnode(self.rightFadeNode) + + self.pagingEnabledDisposable = (self.pagingEnabledPromise.get() + |> deliverOnMainQueue).start(next: { [weak self] pagingEnabled in + if let strongSelf = self { + strongSelf.pagingEnabled = pagingEnabled + } + }) + } + + deinit { + self.pagingEnabledDisposable?.dispose() } public override func didLoad() { @@ -155,7 +170,7 @@ public final class GalleryPagerNode: ASDisplayNode, UIScrollViewDelegate, UIGest recognizer.delegate = self self.tapRecognizer = recognizer recognizer.tapActionAtPoint = { [weak self] point in - guard let strongSelf = self else { + guard let strongSelf = self, strongSelf.pagingEnabled else { return .fail } @@ -186,7 +201,7 @@ public final class GalleryPagerNode: ASDisplayNode, UIScrollViewDelegate, UIGest return .keepWithSingleTap } recognizer.highlight = { [weak self] point in - guard let strongSelf = self else { + guard let strongSelf = self, strongSelf.pagingEnabled else { return } let size = strongSelf.bounds diff --git a/submodules/GalleryUI/Sources/Items/ChatImageGalleryItem.swift b/submodules/GalleryUI/Sources/Items/ChatImageGalleryItem.swift index a7e6afffe7..94954d6afc 100644 --- a/submodules/GalleryUI/Sources/Items/ChatImageGalleryItem.swift +++ b/submodules/GalleryUI/Sources/Items/ChatImageGalleryItem.swift @@ -13,6 +13,11 @@ import AppBundle import StickerPackPreviewUI import OverlayStatusController import PresentationDataUtils +import ImageContentAnalysis +import TextSelectionNode +import Speak +import ShareController +import UndoUI enum ChatMediaGalleryThumbnail: Equatable { case image(ImageMediaReference) @@ -188,6 +193,10 @@ final class ChatImageGalleryItemNode: ZoomableContentGalleryItemNode { private var message: Message? private let imageNode: TransformImageNode + private var recognizedContentNode: RecognizedContentContainer? + + private let recognitionOverlayContentNode: ImageRecognitionOverlayContentNode + private var tilingNode: TilingNode? fileprivate let _ready = Promise() fileprivate let _title = Promise() @@ -203,8 +212,13 @@ final class ChatImageGalleryItemNode: ZoomableContentGalleryItemNode { private var fetchDisposable = MetaDisposable() private let statusDisposable = MetaDisposable() private let dataDisposable = MetaDisposable() + private let recognitionDisposable = MetaDisposable() private var status: MediaResourceStatus? + private var textCopiedTooltipController: UndoOverlayController? + + private let pagingEnabledPromise = ValuePromise(true) + init(context: AccountContext, presentationData: PresentationData, performAction: @escaping (GalleryControllerInteractionTapAction) -> Void, openActionOptions: @escaping (GalleryControllerInteractionTapAction, Message) -> Void, present: @escaping (ViewController, Any?) -> Void) { self.context = context @@ -214,6 +228,8 @@ final class ChatImageGalleryItemNode: ZoomableContentGalleryItemNode { self.footerContentNode.performAction = performAction self.footerContentNode.openActionOptions = openActionOptions + self.recognitionOverlayContentNode = ImageRecognitionOverlayContentNode(theme: presentationData.theme) + self.statusNodeContainer = HighlightableButtonNode() self.statusNode = RadialStatusNode(backgroundNodeColor: UIColor(white: 0.0, alpha: 0.5)) self.statusNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 50.0, height: 50.0)) @@ -237,12 +253,31 @@ final class ChatImageGalleryItemNode: ZoomableContentGalleryItemNode { self.titleContentView = GalleryTitleView(frame: CGRect()) self._titleView.set(.single(self.titleContentView)) + + self.recognitionOverlayContentNode.action = { [weak self] active in + if let strongSelf = self { + let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) + if let recognizedContentNode = strongSelf.recognizedContentNode { + strongSelf.imageNode.isUserInteractionEnabled = active + transition.updateAlpha(node: recognizedContentNode, alpha: active ? 1.0 : 0.0) + if !active { + recognizedContentNode.dismissSelection() + } + strongSelf.pagingEnabledPromise.set(!active) + } + } + } + } + + override func isPagingEnabled() -> Signal { + return self.pagingEnabledPromise.get() } deinit { //self.fetchDisposable.dispose() self.statusDisposable.dispose() self.dataDisposable.dispose() + self.recognitionDisposable.dispose() } override func ready() -> Signal { @@ -277,6 +312,69 @@ final class ChatImageGalleryItemNode: ZoomableContentGalleryItemNode { switch quality { case .medium, .full: strongSelf.statusNodeContainer.isHidden = true + + Queue.concurrentDefaultQueue().async { + if let message = strongSelf.message, !message.isCopyProtected(), let image = generate(TransformImageArguments(corners: ImageCorners(), imageSize: displaySize, boundingSize: displaySize, intrinsicInsets: UIEdgeInsets()))?.generateImage() { + strongSelf.recognitionDisposable.set((recognizedContent(postbox: strongSelf.context.account.postbox, image: image, messageId: message.id) + |> deliverOnMainQueue).start(next: { [weak self] results in + if let strongSelf = self { + strongSelf.recognizedContentNode?.removeFromSupernode() + if !results.isEmpty { + let size = strongSelf.imageNode.bounds.size + let recognizedContentNode = RecognizedContentContainer(size: size, image: image, recognitions: results, presentationData: strongSelf.context.sharedContext.currentPresentationData.with { $0 }, present: { c, a in + strongSelf.galleryController()?.presentInGlobalOverlay(c, with: a) + }, performAction: { [weak self] string, action in + guard let strongSelf = self else { + return + } + switch action { + case .copy: + UIPasteboard.general.string = string + if let controller = strongSelf.baseNavigationController()?.topViewController as? ViewController { + let presentationData = strongSelf.context.sharedContext.currentPresentationData.with({ $0 }) + let tooltipController = UndoOverlayController(presentationData: presentationData, content: .copy(text: presentationData.strings.Conversation_TextCopied), elevatedLayout: true, animateInAsReplacement: false, action: { _ in return false }) + strongSelf.textCopiedTooltipController = tooltipController + controller.present(tooltipController, in: .window(.root)) + } + case .share: + if let controller = strongSelf.baseNavigationController()?.topViewController as? ViewController { + let shareController = ShareController(context: strongSelf.context, subject: .text(string), externalShare: true, immediateExternalShare: false, updatedPresentationData: (strongSelf.context.sharedContext.currentPresentationData.with({ $0 }), strongSelf.context.sharedContext.presentationData)) + controller.present(shareController, in: .window(.root)) + } + case .lookup: + let controller = UIReferenceLibraryViewController(term: string) + if let window = strongSelf.baseNavigationController()?.view.window { + controller.popoverPresentationController?.sourceView = window + controller.popoverPresentationController?.sourceRect = CGRect(origin: CGPoint(x: window.bounds.width / 2.0, y: window.bounds.size.height - 1.0), size: CGSize(width: 1.0, height: 1.0)) + window.rootViewController?.present(controller, animated: true) + } + case .speak: + speakText(string) + } + }) + recognizedContentNode.barcodeAction = { [weak self] payload, rect in + guard let strongSelf = self, let message = strongSelf.message else { + return + } + strongSelf.footerContentNode.openActionOptions?(.url(url: payload, concealed: true), message) + } + recognizedContentNode.textAction = { _, _ in +// guard let strongSelf = self else { +// return +// } + } + recognizedContentNode.alpha = 0.0 + recognizedContentNode.frame = CGRect(origin: CGPoint(), size: size) + recognizedContentNode.update(size: strongSelf.imageNode.bounds.size, transition: .immediate) + strongSelf.imageNode.addSubnode(recognizedContentNode) + strongSelf.recognizedContentNode = recognizedContentNode + strongSelf.recognitionOverlayContentNode.transitionIn() + } + } + })) + } + } + case .none, .blurred: strongSelf.statusNodeContainer.isHidden = false } @@ -533,6 +631,8 @@ final class ChatImageGalleryItemNode: ZoomableContentGalleryItemNode { } override func animateOut(to node: (ASDisplayNode, CGRect, () -> (UIView?, UIView?)), addToTransitionSurface: (UIView) -> Void, completion: @escaping () -> Void) { + self.textCopiedTooltipController?.dismiss() + self.fetchDisposable.set(nil) let contentNode = self.tilingNode ?? self.imageNode @@ -629,7 +729,7 @@ final class ChatImageGalleryItemNode: ZoomableContentGalleryItemNode { } override func footerContent() -> Signal<(GalleryFooterContentNode?, GalleryOverlayContentNode?), NoError> { - return .single((self.footerContentNode, nil)) + return .single((self.footerContentNode, self.recognitionOverlayContentNode)) } @objc func statusPressed() { @@ -885,3 +985,206 @@ private final class TilingNode: ASDisplayNode { } } } + +extension UIBezierPath { + convenience init(rect: RecognizedContent.Rect, radius r: CGFloat) { + let left = CGFloat.pi + let up = CGFloat.pi * 1.5 + let down = CGFloat.pi * 0.5 + let right = CGFloat.pi * 0.0 + + self.init() + + addArc(withCenter: CGPoint(x: rect.topLeft.x + r, y: rect.topLeft.y + r), radius: r, startAngle: left, endAngle: up, clockwise: true) + addArc(withCenter: CGPoint(x: rect.topRight.x - r, y: rect.topRight.y + r), radius: r, startAngle: up, endAngle: right, clockwise: true) + addArc(withCenter: CGPoint(x: rect.bottomRight.x - r, y: rect.bottomRight.y - r), radius: r, startAngle: right, endAngle: down, clockwise: true) + addArc(withCenter: CGPoint(x: rect.bottomLeft.x + r, y: rect.bottomLeft.y - r), radius: r, startAngle: down, endAngle: left, clockwise: true) + close() + } +} + +private func generateMaskImage(size: CGSize, recognitions: [RecognizedContent]) -> UIImage? { + return generateImage(size, opaque: false, rotatedContext: { size, c in + let bounds = CGRect(origin: CGPoint(), size: size) + c.clear(bounds) + + c.setFillColor(UIColor(rgb: 0x000000, alpha: 0.4).cgColor) + c.fill(bounds) + + c.setBlendMode(.clear) + for recognition in recognitions { + let mappedRect = recognition.rect.convertTo(size: size, insets: UIEdgeInsets(top: -4.0, left: -2.0, bottom: -4.0, right: -2.0)) + let path = UIBezierPath(rect: mappedRect, radius: 3.5) + c.addPath(path.cgPath) + c.fillPath() + } + }) +} + +private class RecognizedContentContainer: ASDisplayNode { + private let size: CGSize + private let recognitions: [RecognizedContent] + + private let maskNode: ASImageNode + private var selectionNode: RecognizedTextSelectionNode? + + var barcodeAction: ((String, CGRect) -> Void)? + var textAction: ((String, CGRect) -> Void)? + + init(size: CGSize, image: UIImage, recognitions: [RecognizedContent], presentationData: PresentationData, present: @escaping (ViewController, Any?) -> Void, performAction: @escaping (String, RecognizedTextSelectionAction) -> Void) { + self.size = size + self.recognitions = recognitions + + self.maskNode = ASImageNode() + self.maskNode.image = generateMaskImage(size: size, recognitions: recognitions) + + super.init() + + let selectionNode = RecognizedTextSelectionNode(size: size, theme: RecognizedTextSelectionTheme(selection: presentationData.theme.chat.message.incoming.textSelectionColor, knob: presentationData.theme.chat.message.incoming.textSelectionKnobColor, knobDiameter: 12.0), strings: presentationData.strings, recognitions: recognitions, updateIsActive: { _ in }, present: present, rootNode: self, performAction: { string, action in + performAction(string, action) + }) + self.selectionNode = selectionNode + + self.addSubnode(self.maskNode) + self.addSubnode(selectionNode.highlightAreaNode) + self.addSubnode(selectionNode) + } + + func dismissSelection() { + let _ = self.selectionNode?.dismissSelection() + } + + override func didLoad() { + super.didLoad() + + self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))) + } + + @objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) { + let location = gestureRecognizer.location(in: self.view) + + for recognition in self.recognitions { + let mappedRect = recognition.rect.convertTo(size: self.bounds.size) + if mappedRect.boundingFrame.contains(location) { + if case let .qrCode(payload) = recognition.content { + self.barcodeAction?(payload, mappedRect.boundingFrame) + } + break + } + } + } + + func update(size: CGSize, transition: ContainedViewLayoutTransition) { + let bounds = CGRect(origin: CGPoint(), size: size) + transition.updateFrame(node: self.maskNode, frame: bounds) + if let selectionNode = self.selectionNode { + transition.updateFrame(node: selectionNode, frame: bounds) + selectionNode.highlightAreaNode.frame = bounds + } + } + + override func point(inside point: CGPoint, with event: UIEvent?) -> Bool { + for recognition in self.recognitions { + let mappedRect = recognition.rect.convertTo(size: self.bounds.size) + if mappedRect.boundingFrame.insetBy(dx: -20.0, dy: -20.0).contains(point) { + return true + } + } + + if (self.selectionNode?.dismissSelection() ?? false) { + return true + } + + return false + } +} + + +private class ImageRecognitionOverlayContentNode: GalleryOverlayContentNode { + private let backgroundNode: ASImageNode + private let selectedBackgroundNode: ASImageNode + private let iconNode: ASImageNode + private let buttonNode: HighlightTrackingButtonNode + + var action: ((Bool) -> Void)? + private var appeared = false + + init(theme: PresentationTheme) { + self.backgroundNode = ASImageNode() + self.backgroundNode.displaysAsynchronously = false + self.backgroundNode.image = generateFilledCircleImage(diameter: 32.0, color: UIColor(white: 0.0, alpha: 0.6)) + + self.selectedBackgroundNode = ASImageNode() + self.selectedBackgroundNode.displaysAsynchronously = false + self.selectedBackgroundNode.isHidden = true + self.selectedBackgroundNode.image = generateFilledCircleImage(diameter: 32.0, color: theme.list.itemAccentColor) + + self.buttonNode = HighlightTrackingButtonNode() + self.buttonNode.alpha = 0.0 + + self.iconNode = ASImageNode() + self.iconNode.displaysAsynchronously = false + self.iconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Media Gallery/LiveTextIcon"), color: .white) + self.iconNode.contentMode = .center + + super.init() + + self.buttonNode.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside) + self.addSubnode(self.buttonNode) + self.buttonNode.addSubnode(self.backgroundNode) + self.buttonNode.addSubnode(self.selectedBackgroundNode) + self.buttonNode.addSubnode(self.iconNode) + } + + @objc private func buttonPressed() { + let newValue = !self.buttonNode.isSelected + self.action?(newValue) + self.buttonNode.isSelected = newValue + self.selectedBackgroundNode.isHidden = !newValue + } + + func transitionIn() { + guard self.buttonNode.alpha.isZero else { + return + } + self.appeared = true + self.buttonNode.alpha = 1.0 + self.buttonNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + } + + override func updateLayout(size: CGSize, metrics: LayoutMetrics, leftInset: CGFloat, rightInset: CGFloat, bottomInset: CGFloat, transition: ContainedViewLayoutTransition) { + let buttonSize = CGSize(width: 32.0, height: 32.0) + self.backgroundNode.frame = CGRect(origin: CGPoint(), size: buttonSize) + self.selectedBackgroundNode.frame = CGRect(origin: CGPoint(), size: buttonSize) + self.iconNode.frame = CGRect(origin: CGPoint(), size: buttonSize) + + transition.updateFrame(node: self.buttonNode, frame: CGRect(x: size.width - rightInset - buttonSize.width - 12.0, y: size.height - bottomInset - buttonSize.height - 12.0, width: buttonSize.width, height: buttonSize.height)) + } + + override func animateIn(previousContentNode: GalleryOverlayContentNode?, transition: ContainedViewLayoutTransition) { + guard self.appeared else { + return + } + self.buttonNode.alpha = 1.0 + if let previousContentNode = previousContentNode as? ImageRecognitionOverlayContentNode, previousContentNode.appeared { + + } else { + self.buttonNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + } + } + + override func animateOut(nextContentNode: GalleryOverlayContentNode?, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) { + let previousAlpha = self.buttonNode.alpha + self.buttonNode.alpha = 0.0 + self.buttonNode.layer.animateAlpha(from: previousAlpha, to: 0.0, duration: 0.2) + completion() + } + + override func point(inside point: CGPoint, with event: UIEvent?) -> Bool { + if self.buttonNode.alpha > 0.0 && self.buttonNode.frame.contains(point) { + return true + } else { + return false + } + } +} diff --git a/submodules/GalleryUI/Sources/RecognizedTextSelectionNode.swift b/submodules/GalleryUI/Sources/RecognizedTextSelectionNode.swift new file mode 100644 index 0000000000..178e669c62 --- /dev/null +++ b/submodules/GalleryUI/Sources/RecognizedTextSelectionNode.swift @@ -0,0 +1,539 @@ +import Foundation +import UIKit +import UIKit.UIGestureRecognizerSubclass +import AsyncDisplayKit +import Display +import TelegramPresentationData +import ImageContentAnalysis + +private func findScrollView(view: UIView?) -> UIScrollView? { + if let view = view { + if let view = view as? UIScrollView { + return view + } + return findScrollView(view: view.superview) + } else { + return nil + } +} + +private func cancelScrollViewGestures(view: UIView?) { + if let view = view { + if let gestureRecognizers = view.gestureRecognizers { + for recognizer in gestureRecognizers { + if let recognizer = recognizer as? UIPanGestureRecognizer { + switch recognizer.state { + case .began, .possible: + recognizer.state = .ended + default: + break + } + } + } + } + cancelScrollViewGestures(view: view.superview) + } +} + +private func generateKnobImage(color: UIColor, diameter: CGFloat, inverted: Bool = false) -> UIImage? { + let f: (CGSize, CGContext) -> Void = { size, context in + context.clear(CGRect(origin: CGPoint(), size: size)) + context.setFillColor(color.cgColor) + context.fill(CGRect(origin: CGPoint(x: (size.width - 2.0) / 2.0, y: size.width / 2.0), size: CGSize(width: 2.0, height: size.height - size.width / 2.0 - 1.0))) + context.fillEllipse(in: CGRect(origin: CGPoint(x: floor((size.width - diameter) / 2.0), y: floor((size.width - diameter) / 2.0)), size: CGSize(width: diameter, height: diameter))) + context.fillEllipse(in: CGRect(origin: CGPoint(x: (size.width - 2.0) / 2.0, y: size.width + 2.0), size: CGSize(width: 2.0, height: 2.0))) + } + let size = CGSize(width: 12.0, height: 12.0 + 2.0 + 2.0) + if inverted { + return generateImage(size, contextGenerator: f)?.stretchableImage(withLeftCapWidth: Int(size.width / 2.0), topCapHeight: Int(size.height) - (Int(size.width) + 1)) + } else { + return generateImage(size, rotatedContext: f)?.stretchableImage(withLeftCapWidth: Int(size.width / 2.0), topCapHeight: Int(size.width) + 1) + } +} + +private func generateSelectionsImage(size: CGSize, rects: [RecognizedContent.Rect], color: UIColor) -> UIImage? { + return generateImage(size, opaque: false, rotatedContext: { size, c in + let bounds = CGRect(origin: CGPoint(), size: size) + c.clear(bounds) + + c.setFillColor(color.cgColor) + for rect in rects { + let path = UIBezierPath(rect: rect, radius: 2.5) + c.addPath(path.cgPath) + c.fillPath() + } + }) +} + +public final class RecognizedTextSelectionTheme { + public let selection: UIColor + public let knob: UIColor + public let knobDiameter: CGFloat + + public init(selection: UIColor, knob: UIColor, knobDiameter: CGFloat = 12.0) { + self.selection = selection + self.knob = knob + self.knobDiameter = knobDiameter + } +} + +private enum Knob { + case left + case right +} + +private final class RecognizedTextSelectionGetureRecognizer: UIGestureRecognizer, UIGestureRecognizerDelegate { + private var longTapTimer: Timer? + private var movingKnob: (Knob, CGPoint, CGPoint)? + private var currentLocation: CGPoint? + + var beginSelection: ((CGPoint) -> Void)? + var knobAtPoint: ((CGPoint) -> (Knob, CGPoint)?)? + var moveKnob: ((Knob, CGPoint) -> Void)? + var finishedMovingKnob: (() -> Void)? + var clearSelection: (() -> Void)? + + override init(target: Any?, action: Selector?) { + super.init(target: nil, action: nil) + + self.delegate = self + } + + override public func reset() { + super.reset() + + self.longTapTimer?.invalidate() + self.longTapTimer = nil + + self.movingKnob = nil + self.currentLocation = nil + } + + override func touchesBegan(_ touches: Set, with event: UIEvent) { + super.touchesBegan(touches, with: event) + + let currentLocation = touches.first?.location(in: self.view) + self.currentLocation = currentLocation + + if let currentLocation = currentLocation { + if let (knob, knobPosition) = self.knobAtPoint?(currentLocation) { + self.movingKnob = (knob, knobPosition, currentLocation) + cancelScrollViewGestures(view: self.view?.superview) + self.state = .began + } else if self.longTapTimer == nil { + final class TimerTarget: NSObject { + let f: () -> Void + + init(_ f: @escaping () -> Void) { + self.f = f + } + + @objc func event() { + self.f() + } + } + let longTapTimer = Timer(timeInterval: 0.3, target: TimerTarget({ [weak self] in + self?.longTapEvent() + }), selector: #selector(TimerTarget.event), userInfo: nil, repeats: false) + self.longTapTimer = longTapTimer + RunLoop.main.add(longTapTimer, forMode: .common) + } + } + } + + override func touchesMoved(_ touches: Set, with event: UIEvent) { + super.touchesMoved(touches, with: event) + + let currentLocation = touches.first?.location(in: self.view) + self.currentLocation = currentLocation + + if let (knob, initialKnobPosition, initialGesturePosition) = self.movingKnob, let currentLocation = currentLocation { + self.moveKnob?(knob, CGPoint(x: initialKnobPosition.x + currentLocation.x - initialGesturePosition.x, y: initialKnobPosition.y + currentLocation.y - initialGesturePosition.y)) + } + } + + override func touchesEnded(_ touches: Set, with event: UIEvent) { + super.touchesEnded(touches, with: event) + + if let longTapTimer = self.longTapTimer { + self.longTapTimer = nil + longTapTimer.invalidate() + self.clearSelection?() + } else { + if let _ = self.currentLocation, let _ = self.movingKnob { + self.finishedMovingKnob?() + } + } + self.state = .ended + } + + override func touchesCancelled(_ touches: Set, with event: UIEvent) { + super.touchesCancelled(touches, with: event) + + self.state = .cancelled + } + + private func longTapEvent() { + if let currentLocation = self.currentLocation { + self.beginSelection?(currentLocation) + self.state = .ended + } + } + + func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldReceive touch: UITouch) -> Bool { + return true + } + + @available(iOS 9.0, *) + func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldReceive press: UIPress) -> Bool { + return true + } +} + +public final class RecognizedTextSelectionNodeView: UIView { + var hitTestImpl: ((CGPoint, UIEvent?) -> UIView?)? + + override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { + return self.hitTestImpl?(point, event) + } +} + +public enum RecognizedTextSelectionAction { + case copy + case share + case lookup + case speak +} + +public final class RecognizedTextSelectionNode: ASDisplayNode { + private let size: CGSize + private let theme: RecognizedTextSelectionTheme + private let strings: PresentationStrings + private let recognitions: [(string: String, rect: RecognizedContent.Rect)] + private let updateIsActive: (Bool) -> Void + private let present: (ViewController, Any?) -> Void + private weak var rootNode: ASDisplayNode? + private let performAction: (String, RecognizedTextSelectionAction) -> Void + private var highlightOverlay: ASImageNode? + private let leftKnob: ASImageNode + private let rightKnob: ASImageNode + + private var selectedIndices: Set? + private var currentRects: [RecognizedContent.Rect]? + private var currentTopLeft: CGPoint? + private var currentBottomRight: CGPoint? + + public let highlightAreaNode: ASDisplayNode + + private var recognizer: RecognizedTextSelectionGetureRecognizer? + + public init(size: CGSize, theme: RecognizedTextSelectionTheme, strings: PresentationStrings, recognitions: [RecognizedContent], updateIsActive: @escaping (Bool) -> Void, present: @escaping (ViewController, Any?) -> Void, rootNode: ASDisplayNode, performAction: @escaping (String, RecognizedTextSelectionAction) -> Void) { + self.size = size + self.theme = theme + self.strings = strings + + let sortedRecognitions = recognitions.sorted(by: { lhs, rhs in + if abs(lhs.rect.leftMidPoint.y - rhs.rect.rightMidPoint.y) < min(lhs.rect.leftHeight, rhs.rect.leftHeight) / 2.0 { + return lhs.rect.leftMidPoint.x < rhs.rect.leftMidPoint.x + } else { + return lhs.rect.leftMidPoint.y > rhs.rect.leftMidPoint.y + } + }) + var textRecognitions: [(String, RecognizedContent.Rect)] = [] + for recognition in sortedRecognitions { + if case let .text(string, words) = recognition.content { + for word in words { + textRecognitions.append((String(string[word.0]), word.1)) + } + } + } + self.recognitions = textRecognitions + + self.updateIsActive = updateIsActive + self.present = present + self.rootNode = rootNode + self.performAction = performAction + self.leftKnob = ASImageNode() + self.leftKnob.isUserInteractionEnabled = false + self.leftKnob.image = generateKnobImage(color: theme.knob, diameter: theme.knobDiameter) + self.leftKnob.displaysAsynchronously = false + self.leftKnob.displayWithoutProcessing = true + self.leftKnob.alpha = 0.0 + self.rightKnob = ASImageNode() + self.rightKnob.isUserInteractionEnabled = false + self.rightKnob.image = generateKnobImage(color: theme.knob, diameter: theme.knobDiameter, inverted: true) + self.rightKnob.displaysAsynchronously = false + self.rightKnob.displayWithoutProcessing = true + self.rightKnob.alpha = 0.0 + + self.highlightAreaNode = ASDisplayNode() + + super.init() + + self.setViewBlock({ + return RecognizedTextSelectionNodeView() + }) + + self.addSubnode(self.leftKnob) + self.addSubnode(self.rightKnob) + } + + override public func didLoad() { + super.didLoad() + + (self.view as? RecognizedTextSelectionNodeView)?.hitTestImpl = { [weak self] point, event in + return self?.hitTest(point, with: event) + } + + let recognizer = RecognizedTextSelectionGetureRecognizer(target: nil, action: nil) + recognizer.knobAtPoint = { [weak self] point in + return self?.knobAtPoint(point) + } + recognizer.moveKnob = { [weak self] knob, point in + guard let strongSelf = self, let _ = strongSelf.selectedIndices, let currentTopLeft = strongSelf.currentTopLeft, let currentBottomRight = strongSelf.currentBottomRight else { + return + } + + let topLeftPoint: CGPoint + let bottomRightPoint: CGPoint + switch knob { + case .left: + topLeftPoint = point + bottomRightPoint = currentBottomRight + case .right: + topLeftPoint = currentTopLeft + bottomRightPoint = point + } + + let selectionRect = CGRect(x: min(topLeftPoint.x, bottomRightPoint.x), y: min(topLeftPoint.y, bottomRightPoint.y), width: max(bottomRightPoint.x, topLeftPoint.x) - min(bottomRightPoint.x, topLeftPoint.x), height: max(bottomRightPoint.y, topLeftPoint.y) - min(bottomRightPoint.y, topLeftPoint.y)) + + var i = 0 + var selectedIndices: Set? + for recognition in strongSelf.recognitions { + let rect = recognition.rect.convertTo(size: strongSelf.size, insets: UIEdgeInsets(top: -4.0, left: -2.0, bottom: -4.0, right: -2.0)) + if selectionRect.intersects(rect.boundingFrame) { + if selectedIndices == nil { + selectedIndices = Set() + } + selectedIndices?.insert(i) + } + i += 1 + } + + strongSelf.selectedIndices = selectedIndices + strongSelf.updateSelection(range: selectedIndices, animateIn: false) + } + recognizer.finishedMovingKnob = { [weak self] in + guard let strongSelf = self else { + return + } + strongSelf.displayMenu() + } + recognizer.beginSelection = { [weak self] point in + guard let strongSelf = self else { + return + } + + let _ = strongSelf.dismissSelection() + + var i = 0 + var selectedIndices: Set? + var topLeft: CGPoint? + var bottomRight: CGPoint? + for recognition in strongSelf.recognitions { + let rect = recognition.rect.convertTo(size: strongSelf.size, insets: UIEdgeInsets(top: -4.0, left: -2.0, bottom: -4.0, right: -2.0)) + if rect.boundingFrame.contains(point) { + topLeft = rect.topLeft + bottomRight = rect.bottomRight + selectedIndices = Set([i]) + break + } + i += 1 + } + strongSelf.selectedIndices = selectedIndices + strongSelf.currentTopLeft = topLeft + strongSelf.currentBottomRight = bottomRight + strongSelf.updateSelection(range: selectedIndices, animateIn: true) + + strongSelf.displayMenu() + strongSelf.updateIsActive(true) + } + recognizer.clearSelection = { [weak self] in + let _ = self?.dismissSelection() + self?.updateIsActive(false) + } + self.recognizer = recognizer + self.view.addGestureRecognizer(recognizer) + } + + public func updateLayout() { + if let selectedIndices = self.selectedIndices { + self.updateSelection(range: selectedIndices, animateIn: false) + } + } + + private func updateSelection(range: Set?, animateIn: Bool) { + var rects: [RecognizedContent.Rect]? = nil + var startEdge: (position: CGPoint, height: CGFloat)? + var endEdge: (position: CGPoint, height: CGFloat)? + + if let range = range { + var i = 0 + rects = [] + for recognition in self.recognitions { + let rect = recognition.rect.convertTo(size: self.size) + if range.contains(i) { + if startEdge == nil { + startEdge = (rect.leftMidPoint, rect.leftHeight) + } + rects?.append(rect) + } + i += 1 + } + + if let rect = rects?.last { + endEdge = (rect.rightMidPoint, rect.rightHeight) + } + } + + self.currentRects = rects + + if let rects = rects, let startEdge = startEdge, let endEdge = endEdge, !rects.isEmpty { + let highlightOverlay: ASImageNode + if let current = self.highlightOverlay { + highlightOverlay = current + } else { + highlightOverlay = ASImageNode() + self.highlightOverlay = highlightOverlay + self.highlightAreaNode.addSubnode(highlightOverlay) + } + highlightOverlay.frame = self.bounds + highlightOverlay.image = generateSelectionsImage(size: self.size, rects: rects, color: self.theme.selection.withAlphaComponent(1.0)) + highlightOverlay.alpha = self.theme.selection.alpha + + if let image = self.leftKnob.image { + self.leftKnob.frame = CGRect(origin: CGPoint(x: floor(startEdge.position.x - image.size.width / 2.0), y: startEdge.position.y - floorToScreenPixels(startEdge.height / 2.0) - self.theme.knobDiameter), size: CGSize(width: image.size.width, height: self.theme.knobDiameter + startEdge.height + 2.0)) + self.rightKnob.frame = CGRect(origin: CGPoint(x: floor(endEdge.position.x + 1.0 - image.size.width / 2.0), y: endEdge.position.y - floorToScreenPixels(endEdge.height / 2.0)), size: CGSize(width: image.size.width, height: self.theme.knobDiameter + endEdge.height + 2.0)) + } + if self.leftKnob.alpha.isZero { + highlightOverlay.layer.animateAlpha(from: 0.0, to: highlightOverlay.alpha, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue) + self.leftKnob.alpha = 1.0 + self.leftKnob.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.14, delay: 0.19) + self.rightKnob.alpha = 1.0 + self.rightKnob.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.14, delay: 0.19) + self.leftKnob.layer.animateSpring(from: 0.5 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.2, delay: 0.25, initialVelocity: 0.0, damping: 80.0) + self.rightKnob.layer.animateSpring(from: 0.5 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.2, delay: 0.25, initialVelocity: 0.0, damping: 80.0) + + if animateIn { + var result = CGRect() + for rect in rects { + if result.isEmpty { + result = rect.boundingFrame + } else { + result = result.union(rect.boundingFrame) + } + } + highlightOverlay.layer.animateScale(from: 2.0, to: 1.0, duration: 0.26) + let fromResult = CGRect(origin: CGPoint(x: result.minX - result.width / 2.0, y: result.minY - result.height / 2.0), size: CGSize(width: result.width * 2.0, height: result.height * 2.0)) + highlightOverlay.layer.animatePosition(from: CGPoint(x: (-fromResult.midX + highlightOverlay.bounds.midX) / 1.0, y: (-fromResult.midY + highlightOverlay.bounds.midY) / 1.0), to: CGPoint(), duration: 0.26, additive: true) + } + } + } else if let highlightOverlay = self.highlightOverlay { + self.highlightOverlay = nil + highlightOverlay.layer.animateAlpha(from: highlightOverlay.alpha, to: 0.0, duration: 0.18, removeOnCompletion: false, completion: { [weak highlightOverlay] _ in + highlightOverlay?.removeFromSupernode() + }) + self.leftKnob.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.18) + self.leftKnob.alpha = 0.0 + self.leftKnob.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.18) + self.rightKnob.alpha = 0.0 + self.rightKnob.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.18) + } + } + + private func knobAtPoint(_ point: CGPoint) -> (Knob, CGPoint)? { + if !self.leftKnob.alpha.isZero, self.leftKnob.frame.insetBy(dx: -4.0, dy: -8.0).contains(point) { + return (.left, self.leftKnob.frame.offsetBy(dx: 0.0, dy: self.leftKnob.frame.width / 2.0).center) + } + if !self.rightKnob.alpha.isZero, self.rightKnob.frame.insetBy(dx: -4.0, dy: -8.0).contains(point) { + return (.right, self.rightKnob.frame.offsetBy(dx: 0.0, dy: -self.rightKnob.frame.width / 2.0).center) + } + if !self.leftKnob.alpha.isZero, self.leftKnob.frame.insetBy(dx: -14.0, dy: -14.0).contains(point) { + return (.left, self.leftKnob.frame.offsetBy(dx: 0.0, dy: self.leftKnob.frame.width / 2.0).center) + } + if !self.rightKnob.alpha.isZero, self.rightKnob.frame.insetBy(dx: -14.0, dy: -14.0).contains(point) { + return (.right, self.rightKnob.frame.offsetBy(dx: 0.0, dy: -self.rightKnob.frame.width / 2.0).center) + } + return nil + } + + public func dismissSelection() -> Bool { + if let _ = self.selectedIndices { + self.selectedIndices = nil + self.updateSelection(range: nil, animateIn: false) + return true + } else { + return false + } + } + + private func displayMenu() { + guard let currentRects = self.currentRects, !currentRects.isEmpty, let selectedIndices = self.selectedIndices else { + return + } + + var completeRect = currentRects[0].boundingFrame + for i in 0 ..< currentRects.count { + completeRect = completeRect.union(currentRects[i].boundingFrame) + } + completeRect = completeRect.insetBy(dx: 0.0, dy: -12.0) + + var selectedText = "" + for i in 0 ..< self.recognitions.count { + if selectedIndices.contains(i) { + let (string, _) = self.recognitions[i] + if !selectedText.isEmpty { + selectedText += " " + } + selectedText.append(contentsOf: string.trimmingCharacters(in: .whitespacesAndNewlines)) + } + } + + var actions: [ContextMenuAction] = [] + actions.append(ContextMenuAction(content: .text(title: self.strings.Conversation_ContextMenuCopy, accessibilityLabel: self.strings.Conversation_ContextMenuCopy), action: { [weak self] in + self?.performAction(selectedText, .copy) + let _ = self?.dismissSelection() + })) + actions.append(ContextMenuAction(content: .text(title: self.strings.Conversation_ContextMenuLookUp, accessibilityLabel: self.strings.Conversation_ContextMenuLookUp), action: { [weak self] in + self?.performAction(selectedText, .lookup) + let _ = self?.dismissSelection() + })) + if isSpeakSelectionEnabled() { + actions.append(ContextMenuAction(content: .text(title: self.strings.Conversation_ContextMenuSpeak, accessibilityLabel: self.strings.Conversation_ContextMenuSpeak), action: { [weak self] in + self?.performAction(selectedText, .speak) + let _ = self?.dismissSelection() + })) + } + actions.append(ContextMenuAction(content: .text(title: self.strings.Conversation_ContextMenuShare, accessibilityLabel: self.strings.Conversation_ContextMenuShare), action: { [weak self] in + self?.performAction(selectedText, .share) + let _ = self?.dismissSelection() + })) + + self.present(ContextMenuController(actions: actions, catchTapsOutside: false, hasHapticFeedback: false), ContextMenuControllerPresentationArguments(sourceNodeAndRect: { [weak self] in + guard let strongSelf = self, let rootNode = strongSelf.rootNode else { + return nil + } + return (strongSelf, completeRect, rootNode, rootNode.bounds) + }, bounce: false)) + } + + override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { + if self.knobAtPoint(point) != nil { + return self.view + } + if self.bounds.contains(point) { + return self.view + } + return nil + } +} diff --git a/submodules/ImageContentAnalysis/BUILD b/submodules/ImageContentAnalysis/BUILD new file mode 100644 index 0000000000..07d864765b --- /dev/null +++ b/submodules/ImageContentAnalysis/BUILD @@ -0,0 +1,22 @@ +load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library") + +swift_library( + name = "ImageContentAnalysis", + module_name = "ImageContentAnalysis", + srcs = glob([ + "Sources/**/*.swift", + ]), + copts = [ + #"-warnings-as-errors", + ], + deps = [ + "//submodules/Display:Display", + "//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit", + "//submodules/Postbox:Postbox", + "//submodules/TelegramCore:TelegramCore", + "//submodules/TelegramUIPreferences:TelegramUIPreferences", + ], + visibility = [ + "//visibility:public", + ], +) diff --git a/submodules/ImageContentAnalysis/Sources/ImageContentAnalysis.swift b/submodules/ImageContentAnalysis/Sources/ImageContentAnalysis.swift new file mode 100644 index 0000000000..28961f2782 --- /dev/null +++ b/submodules/ImageContentAnalysis/Sources/ImageContentAnalysis.swift @@ -0,0 +1,343 @@ +import Foundation +import UIKit +import Vision +import SwiftSignalKit +import Postbox +import TelegramCore +import TelegramUIPreferences + +private final class CachedImageRecognizedContent: Codable { + public let results: [RecognizedContent] + + public init(results: [RecognizedContent]) { + self.results = results + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: StringCodingKey.self) + + self.results = try container.decode([RecognizedContent].self, forKey: "results") + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: StringCodingKey.self) + + try container.encode(self.results, forKey: "results") + } +} + +private func cachedImageRecognizedContent(postbox: Postbox, messageId: MessageId) -> Signal { + return postbox.transaction { transaction -> CachedImageRecognizedContent? in + let key = ValueBoxKey(length: 8) + key.setInt32(0, value: messageId.namespace) + key.setInt32(4, value: messageId.id) + if let entry = transaction.retrieveItemCacheEntry(id: ItemCacheEntryId(collectionId: ApplicationSpecificItemCacheCollectionId.cachedImageRecognizedContent, key: key))?.get(CachedImageRecognizedContent.self) { + return entry + } else { + return nil + } + } +} + +private let collectionSpec = ItemCacheCollectionSpec(lowWaterItemCount: 50, highWaterItemCount: 100) + +private func updateCachedImageRecognizedContent(postbox: Postbox, messageId: MessageId, content: CachedImageRecognizedContent?) -> Signal { + return postbox.transaction { transaction -> Void in + let key = ValueBoxKey(length: 8) + key.setInt32(0, value: messageId.namespace) + key.setInt32(4, value: messageId.id) + let id = ItemCacheEntryId(collectionId: ApplicationSpecificItemCacheCollectionId.cachedImageRecognizedContent, key: key) + if let content = content, let entry = CodableEntry(content) { + transaction.putItemCacheEntry(id: id, entry: entry, collectionSpec: collectionSpec) + } else { + transaction.removeItemCacheEntry(id: id) + } + } +} + +extension CGPoint { + func distanceTo(_ a: CGPoint) -> CGFloat { + let xDist = a.x - x + let yDist = a.y - y + return CGFloat(sqrt((xDist * xDist) + (yDist * yDist))) + } + + func midPoint(_ other: CGPoint) -> CGPoint { + return CGPoint(x: (self.x + other.x) / 2.0, y: (self.y + other.y) / 2.0) + } +} + +public struct RecognizedContent: Codable { + public enum Content { + case text(text: String, words: [(Range, Rect)]) + case qrCode(payload: String) + } + + public struct Rect: Codable { + struct Point: Codable { + let x: Double + let y: Double + + init(cgPoint: CGPoint) { + self.x = cgPoint.x + self.y = cgPoint.y + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: StringCodingKey.self) + + self.x = try container.decode(Double.self, forKey: "x") + self.y = try container.decode(Double.self, forKey: "y") + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: StringCodingKey.self) + + try container.encode(self.x, forKey: "x") + try container.encode(self.y, forKey: "y") + } + + var cgPoint: CGPoint { + return CGPoint(x: self.x, y: self.y) + } + } + + public let topLeft: CGPoint + public let topRight: CGPoint + public let bottomLeft: CGPoint + public let bottomRight: CGPoint + + public var boundingFrame: CGRect { + let top: CGFloat = min(topLeft.y, topRight.y) + let left: CGFloat = min(topLeft.x, bottomLeft.x) + let right: CGFloat = max(topRight.x, bottomRight.x) + let bottom: CGFloat = max(bottomLeft.y, bottomRight.y) + return CGRect(x: left, y: top, width: abs(right - left), height: abs(bottom - top)) + } + + public var leftMidPoint: CGPoint { + return self.topLeft.midPoint(self.bottomLeft) + } + + public var leftHeight: CGFloat { + return self.topLeft.distanceTo(self.bottomLeft) + } + + public var rightMidPoint: CGPoint { + return self.topRight.midPoint(self.bottomRight) + } + + public var rightHeight: CGFloat { + return self.topRight.distanceTo(self.bottomRight) + } + + public func convertTo(size: CGSize, insets: UIEdgeInsets = UIEdgeInsets()) -> Rect { + return Rect( + topLeft: CGPoint(x: self.topLeft.x * size.width + insets.left, y: size.height - self.topLeft.y * size.height + insets.top), + topRight: CGPoint(x: self.topRight.x * size.width - insets.right, y: size.height - self.topRight.y * size.height + insets.top), + bottomLeft: CGPoint(x: self.bottomLeft.x * size.width + insets.left, y: size.height - self.bottomLeft.y * size.height - insets.bottom), + bottomRight: CGPoint(x: self.bottomRight.x * size.width - insets.right, y: size.height - self.bottomRight.y * size.height - insets.bottom) + ) + } + + public init() { + self.topLeft = CGPoint() + self.topRight = CGPoint() + self.bottomLeft = CGPoint() + self.bottomRight = CGPoint() + } + + public init(topLeft: CGPoint, topRight: CGPoint, bottomLeft: CGPoint, bottomRight: CGPoint) { + self.topLeft = topLeft + self.topRight = topRight + self.bottomLeft = bottomLeft + self.bottomRight = bottomRight + } + + @available(iOS 11.0, *) + public init(observation: VNRectangleObservation) { + self.topLeft = observation.topLeft + self.topRight = observation.topRight + self.bottomLeft = observation.bottomLeft + self.bottomRight = observation.bottomRight + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: StringCodingKey.self) + + self.topLeft = try container.decode(Point.self, forKey: "topLeft").cgPoint + self.topRight = try container.decode(Point.self, forKey: "topRight").cgPoint + self.bottomLeft = try container.decode(Point.self, forKey: "bottomLeft").cgPoint + self.bottomRight = try container.decode(Point.self, forKey: "bottomRight").cgPoint + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: StringCodingKey.self) + + try container.encode(Point(cgPoint: self.topLeft), forKey: "topLeft") + try container.encode(Point(cgPoint: self.topRight), forKey: "topRight") + try container.encode(Point(cgPoint: self.bottomLeft), forKey: "bottomLeft") + try container.encode(Point(cgPoint: self.bottomRight), forKey: "bottomRight") + } + } + + public let rect: Rect + public let content: Content + + @available(iOS 11.0, *) + init?(observation: VNObservation) { + if let barcode = observation as? VNBarcodeObservation, case .qr = barcode.symbology, let payload = barcode.payloadStringValue { + self.content = .qrCode(payload: payload) + self.rect = Rect(observation: barcode) + } else if #available(iOS 13.0, *), let text = observation as? VNRecognizedTextObservation, let candidate = text.topCandidates(1).first, candidate.confidence >= 0.5 { + let string = candidate.string + var words: [(Range, Rect)] = [] + string.enumerateSubstrings(in: string.startIndex ..< string.endIndex, options: .byWords) { _, substringRange, _, _ in + if let rectangle = try? candidate.boundingBox(for: substringRange) { + words.append((substringRange, Rect(observation: rectangle))) + } + } + self.content = .text(text: string, words: words) + self.rect = Rect(observation: text) + } else { + return nil + } + } + + struct WordRangeAndRect: Codable { + let start: Int32 + let end: Int32 + let rect: Rect + + init(text: String, range: Range, rect: Rect) { + self.start = Int32(text.distance(from: text.startIndex, to: range.lowerBound)) + self.end = Int32(text.distance(from: text.startIndex, to: range.upperBound)) + self.rect = rect + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: StringCodingKey.self) + + self.start = try container.decode(Int32.self, forKey: "start") + self.end = try container.decode(Int32.self, forKey: "end") + self.rect = try container.decode(Rect.self, forKey: "rect") + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: StringCodingKey.self) + + try container.encode(self.start, forKey: "start") + try container.encode(self.end, forKey: "end") + try container.encode(self.rect, forKey: "rect") + } + + func toRangeWithRect(text: String) -> (Range, Rect) { + return (text.index(text.startIndex, offsetBy: Int(self.start)) ..< text.index(text.startIndex, offsetBy: Int(self.end)), self.rect) + } + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: StringCodingKey.self) + + let type = try container.decode(Int32.self, forKey: "t") + if type == 0 { + let text = try container.decode(String.self, forKey: "text") + let rangesWithRects = try container.decode([WordRangeAndRect].self, forKey: "words") + let words = rangesWithRects.map { $0.toRangeWithRect(text: text) } + self.content = .text(text: text, words: words) + self.rect = try container.decode(Rect.self, forKey: "rect") + } else if type == 1 { + let payload = try container.decode(String.self, forKey: "payload") + self.content = .qrCode(payload: payload) + self.rect = try container.decode(Rect.self, forKey: "rect") + } else { + assertionFailure() + self.content = .text(text: "", words: []) + self.rect = Rect() + } + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: StringCodingKey.self) + + switch self.content { + case let .text(text, words): + try container.encode(Int32(0), forKey: "t") + try container.encode(text, forKey: "text") + + let rangesWithRects: [WordRangeAndRect] = words.map { WordRangeAndRect(text: text, range: $0.0, rect: $0.1) } + try container.encode(rangesWithRects, forKey: "words") + try container.encode(rect, forKey: "rect") + case let .qrCode(payload): + try container.encode(Int32(1), forKey: "t") + try container.encode(payload, forKey: "payload") + try container.encode(rect, forKey: "rect") + } + } +} + +private func recognizeContent(in image: UIImage) -> Signal<[RecognizedContent], NoError> { + if #available(iOS 11.0, *) { + guard let cgImage = image.cgImage else { + return .complete() + } + return Signal { subscriber in + var requests: [VNRequest] = [] + + let barcodeResult = Atomic<[RecognizedContent]?>(value: nil) + let textResult = Atomic<[RecognizedContent]?>(value: nil) + + let completion = { + let barcode = barcodeResult.with { $0 } + let text = textResult.with { $0 } + + if let barcode = barcode, let text = text { + subscriber.putNext(barcode + text) + subscriber.putCompletion() + } + } + + let barcodeRequest = VNDetectBarcodesRequest { request, error in + let mappedResults = request.results?.compactMap { RecognizedContent(observation: $0) } ?? [] + let _ = barcodeResult.swap(mappedResults) + completion() + } + requests.append(barcodeRequest) + + if #available(iOS 13.0, *) { + let textRequest = VNRecognizeTextRequest { request, error in + let mappedResults = request.results?.compactMap { RecognizedContent(observation: $0) } ?? [] + let _ = textResult.swap(mappedResults) + completion() + } + textRequest.usesLanguageCorrection = true + requests.append(textRequest) + } else { + let _ = textResult.swap([]) + } + + let handler = VNImageRequestHandler(cgImage: cgImage, options: [:]) + try? handler.perform(requests) + + return ActionDisposable { + + } + } + } else { + return .single([]) + } +} + +public func recognizedContent(postbox: Postbox, image: UIImage, messageId: MessageId) -> Signal<[RecognizedContent], NoError> { + return cachedImageRecognizedContent(postbox: postbox, messageId: messageId) + |> mapToSignal { cachedContent -> Signal<[RecognizedContent], NoError> in + if let cachedContent = cachedContent { + return .single(cachedContent.results) + } else { + return recognizeContent(in: image) + |> beforeNext { results in + let _ = updateCachedImageRecognizedContent(postbox: postbox, messageId: messageId, content: CachedImageRecognizedContent(results: results)).start() + } + } + } +} diff --git a/submodules/SettingsUI/Sources/Privacy and Security/Recent Sessions/RecentSessionsController.swift b/submodules/SettingsUI/Sources/Privacy and Security/Recent Sessions/RecentSessionsController.swift index 63ea185624..340cfaed54 100644 --- a/submodules/SettingsUI/Sources/Privacy and Security/Recent Sessions/RecentSessionsController.swift +++ b/submodules/SettingsUI/Sources/Privacy and Security/Recent Sessions/RecentSessionsController.swift @@ -761,7 +761,7 @@ public func recentSessionsController(context: AccountContext, activeSessionsCont pushControllerImpl?(AuthTransferScanScreen(context: context, activeSessionsContext: activeSessionsContext)) }) }, openOtherAppsUrl: { - context.sharedContext.openExternalUrl(context: context, urlContext: .generic, url: "https://getdesktop.telegram.org", forceExternal: true, presentationData: context.sharedContext.currentPresentationData.with { $0 }, navigationController: nil, dismissInput: {}) + context.sharedContext.openExternalUrl(context: context, urlContext: .generic, url: "https://telegram.org/apps", forceExternal: true, presentationData: context.sharedContext.currentPresentationData.with { $0 }, navigationController: nil, dismissInput: {}) }, setupAuthorizationTTL: { let presentationData = context.sharedContext.currentPresentationData.with { $0 } let controller = ActionSheetController(presentationData: presentationData) diff --git a/submodules/SettingsUI/Sources/Privacy and Security/RecentSessionScreen.swift b/submodules/SettingsUI/Sources/Privacy and Security/RecentSessionScreen.swift index e1266b3e70..9139af6d9b 100644 --- a/submodules/SettingsUI/Sources/Privacy and Security/RecentSessionScreen.swift +++ b/submodules/SettingsUI/Sources/Privacy and Security/RecentSessionScreen.swift @@ -563,6 +563,8 @@ private class RecentSessionScreenNode: ViewControllerTracingNode, UIScrollViewDe } let previousTheme = self.presentationData.theme self.presentationData = presentationData + + self.contentBackgroundNode.backgroundColor = self.presentationData.theme.list.blocksBackgroundColor self.titleNode.attributedText = NSAttributedString(string: self.titleNode.attributedText?.string ?? "", font: Font.regular(30.0), textColor: self.presentationData.theme.list.itemPrimaryTextColor) diff --git a/submodules/TelegramPresentationData/Sources/PresentationThemeEssentialGraphics.swift b/submodules/TelegramPresentationData/Sources/PresentationThemeEssentialGraphics.swift index 8ae310243c..2f64a23ff3 100644 --- a/submodules/TelegramPresentationData/Sources/PresentationThemeEssentialGraphics.swift +++ b/submodules/TelegramPresentationData/Sources/PresentationThemeEssentialGraphics.swift @@ -532,6 +532,7 @@ public final class PrincipalThemeAdditionalGraphics { public let chatBubbleActionButtonIncomingPhoneIconImage: UIImage public let chatBubbleActionButtonIncomingLocationIconImage: UIImage public let chatBubbleActionButtonIncomingPaymentIconImage: UIImage + public let chatBubbleActionButtonIncomingProfileIconImage: UIImage public let chatBubbleActionButtonOutgoingMessageIconImage: UIImage public let chatBubbleActionButtonOutgoingLinkIconImage: UIImage @@ -539,6 +540,7 @@ public final class PrincipalThemeAdditionalGraphics { public let chatBubbleActionButtonOutgoingPhoneIconImage: UIImage public let chatBubbleActionButtonOutgoingLocationIconImage: UIImage public let chatBubbleActionButtonOutgoingPaymentIconImage: UIImage + public let chatBubbleActionButtonOutgoingProfileIconImage: UIImage public let chatEmptyItemLockIcon: UIImage public let emptyChatListCheckIcon: UIImage @@ -581,12 +583,14 @@ public final class PrincipalThemeAdditionalGraphics { self.chatBubbleActionButtonIncomingPhoneIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotPhone"), color: bubbleVariableColor(variableColor: theme.message.incoming.actionButtonsTextColor, wallpaper: wallpaper))! self.chatBubbleActionButtonIncomingLocationIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotLocation"), color: bubbleVariableColor(variableColor: theme.message.incoming.actionButtonsTextColor, wallpaper: wallpaper))! self.chatBubbleActionButtonIncomingPaymentIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotPayment"), color: bubbleVariableColor(variableColor: theme.message.incoming.actionButtonsTextColor, wallpaper: wallpaper))! + self.chatBubbleActionButtonIncomingProfileIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotProfile"), color: bubbleVariableColor(variableColor: theme.message.incoming.actionButtonsTextColor, wallpaper: wallpaper))! self.chatBubbleActionButtonOutgoingMessageIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotMessage"), color: bubbleVariableColor(variableColor: theme.message.outgoing.actionButtonsTextColor, wallpaper: wallpaper))! self.chatBubbleActionButtonOutgoingLinkIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotLink"), color: bubbleVariableColor(variableColor: theme.message.outgoing.actionButtonsTextColor, wallpaper: wallpaper))! self.chatBubbleActionButtonOutgoingShareIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotShare"), color: bubbleVariableColor(variableColor: theme.message.outgoing.actionButtonsTextColor, wallpaper: wallpaper))! self.chatBubbleActionButtonOutgoingPhoneIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotPhone"), color: bubbleVariableColor(variableColor: theme.message.outgoing.actionButtonsTextColor, wallpaper: wallpaper))! self.chatBubbleActionButtonOutgoingLocationIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotLocation"), color: bubbleVariableColor(variableColor: theme.message.outgoing.actionButtonsTextColor, wallpaper: wallpaper))! self.chatBubbleActionButtonOutgoingPaymentIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotPayment"), color: bubbleVariableColor(variableColor: theme.message.outgoing.actionButtonsTextColor, wallpaper: wallpaper))! + self.chatBubbleActionButtonOutgoingProfileIconImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/BotProfile"), color: bubbleVariableColor(variableColor: theme.message.outgoing.actionButtonsTextColor, wallpaper: wallpaper))! self.chatEmptyItemLockIcon = generateImage(CGSize(width: 9.0, height: 13.0), rotatedContext: { size, context in context.clear(CGRect(origin: CGPoint(), size: size)) diff --git a/submodules/TelegramUI/Images.xcassets/Chat/Message/BotProfile.imageset/BotMessage@3x.png b/submodules/TelegramUI/Images.xcassets/Chat/Message/BotProfile.imageset/BotMessage@3x.png new file mode 100644 index 0000000000..a896a845d6 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Chat/Message/BotProfile.imageset/BotMessage@3x.png differ diff --git a/submodules/TelegramUI/Images.xcassets/Chat/Message/BotProfile.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Chat/Message/BotProfile.imageset/Contents.json new file mode 100644 index 0000000000..cd6ffbe035 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Chat/Message/BotProfile.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "BotMessage@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Sources/ChatMessageActionButtonsNode.swift b/submodules/TelegramUI/Sources/ChatMessageActionButtonsNode.swift index c3697c6713..ee7a76074c 100644 --- a/submodules/TelegramUI/Sources/ChatMessageActionButtonsNode.swift +++ b/submodules/TelegramUI/Sources/ChatMessageActionButtonsNode.swift @@ -106,6 +106,8 @@ private final class ChatMessageActionButtonNode: ASDisplayNode { iconImage = incoming ? graphics.chatBubbleActionButtonIncomingShareIconImage : graphics.chatBubbleActionButtonOutgoingShareIconImage case .payment: iconImage = incoming ? graphics.chatBubbleActionButtonIncomingPaymentIconImage : graphics.chatBubbleActionButtonOutgoingPaymentIconImage + case .openUserProfile: + iconImage = incoming ? graphics.chatBubbleActionButtonIncomingProfileIconImage : graphics.chatBubbleActionButtonOutgoingProfileIconImage default: iconImage = nil } diff --git a/submodules/TelegramUI/Sources/PeerSelectionTextInputPanelNode.swift b/submodules/TelegramUI/Sources/PeerSelectionTextInputPanelNode.swift index 8552a32d3d..98cdb6225b 100644 --- a/submodules/TelegramUI/Sources/PeerSelectionTextInputPanelNode.swift +++ b/submodules/TelegramUI/Sources/PeerSelectionTextInputPanelNode.swift @@ -716,7 +716,13 @@ class PeerSelectionTextInputPanelNode: ChatInputPanelNode, TGCaptionPanelView, A } private func updateCounterTextNode(transition: ContainedViewLayoutTransition) { - if let textInputNode = self.textInputNode, let presentationInterfaceState = self.presentationInterfaceState, let editMessage = presentationInterfaceState.interfaceState.editMessage, let inputTextMaxLength = editMessage.inputTextMaxLength { + let inputTextMaxLength: Int? + if self.isCaption { + inputTextMaxLength = self.context?.currentLimitsConfiguration.with { $0 }.maxMediaCaptionLength + } else { + inputTextMaxLength = nil + } + if let textInputNode = self.textInputNode, let presentationInterfaceState = self.presentationInterfaceState, let inputTextMaxLength = inputTextMaxLength { let textCount = Int32(textInputNode.textView.text.count) let counterColor: UIColor = textCount > inputTextMaxLength ? presentationInterfaceState.theme.chat.inputPanel.panelControlDestructiveColor : presentationInterfaceState.theme.chat.inputPanel.panelControlColor @@ -1028,7 +1034,13 @@ class PeerSelectionTextInputPanelNode: ChatInputPanelNode, TGCaptionPanelView, A sendPressed(effectiveInputText) return } - if let textInputNode = self.textInputNode, let presentationInterfaceState = self.presentationInterfaceState, let editMessage = presentationInterfaceState.interfaceState.editMessage, let inputTextMaxLength = editMessage.inputTextMaxLength { + let inputTextMaxLength: Int? + if self.isCaption { + inputTextMaxLength = self.context?.currentLimitsConfiguration.with { $0 }.maxMediaCaptionLength + } else { + inputTextMaxLength = nil + } + if let textInputNode = self.textInputNode, let presentationInterfaceState = self.presentationInterfaceState, let inputTextMaxLength = inputTextMaxLength { let textCount = Int32(textInputNode.textView.text.count) let remainingCount = inputTextMaxLength - textCount diff --git a/submodules/TelegramUIPreferences/Sources/PostboxKeys.swift b/submodules/TelegramUIPreferences/Sources/PostboxKeys.swift index 8e1dba89fd..73e2effe84 100644 --- a/submodules/TelegramUIPreferences/Sources/PostboxKeys.swift +++ b/submodules/TelegramUIPreferences/Sources/PostboxKeys.swift @@ -65,6 +65,7 @@ private enum ApplicationSpecificItemCacheCollectionIdValues: Int8 { case mediaPlaybackStoredState = 3 case cachedGeocodes = 4 case visualMediaStoredState = 5 + case cachedImageRecognizedContent = 6 } public struct ApplicationSpecificItemCacheCollectionId { @@ -74,6 +75,7 @@ public struct ApplicationSpecificItemCacheCollectionId { public static let mediaPlaybackStoredState = applicationSpecificItemCacheCollectionId(ApplicationSpecificItemCacheCollectionIdValues.mediaPlaybackStoredState.rawValue) public static let cachedGeocodes = applicationSpecificItemCacheCollectionId(ApplicationSpecificItemCacheCollectionIdValues.cachedGeocodes.rawValue) public static let visualMediaStoredState = applicationSpecificItemCacheCollectionId(ApplicationSpecificItemCacheCollectionIdValues.visualMediaStoredState.rawValue) + public static let cachedImageRecognizedContent = applicationSpecificItemCacheCollectionId(ApplicationSpecificItemCacheCollectionIdValues.cachedImageRecognizedContent.rawValue) } private enum ApplicationSpecificOrderedItemListCollectionIdValues: Int32 { @@ -81,7 +83,6 @@ private enum ApplicationSpecificOrderedItemListCollectionIdValues: Int32 { case wallpaperSearchRecentQueries = 1 case settingsSearchRecentItems = 2 case localThemes = 3 - case visualMediaStoredState = 4 } public struct ApplicationSpecificOrderedItemListCollectionId {