This commit is contained in:
Ali 2020-02-19 18:22:42 +04:00
parent 7c7b91194d
commit a5899ef4e7
7 changed files with 133 additions and 69 deletions

View File

@ -5344,3 +5344,5 @@ Any member of this group will be able to see messages in the channel.";
"PeerInfo.AddToContacts" = "Add to Contacts";
"PeerInfo.BioExpand" = "more";
"External.OpenIn" = "Open in %@";

View File

@ -274,15 +274,7 @@ private final class NavigationButtonItemNode: ASTextNode {
}
if shouldChangeHighlight {
if let imageNode = self.imageNode {
let previousAlpha = self.imageRippleNode.alpha
self.imageRippleNode.alpha = highlighted ? 1.0 : 0.0
if !highlighted {
self.imageRippleNode.layer.animateAlpha(from: previousAlpha, to: self.imageRippleNode.alpha, duration: 0.25)
}
} else {
self.alpha = !self.isEnabled ? 1.0 : (highlighted ? 0.4 : 1.0)
}
self.alpha = !self.isEnabled ? 1.0 : (highlighted ? 0.4 : 1.0)
self.highlightChanged(highlighted)
}
}

View File

@ -76,8 +76,9 @@ public final class GalleryPagerNode: ASDisplayNode, UIScrollViewDelegate, UIGest
private let leftFadeNode: ASImageNode
private let rightFadeNode: ASImageNode
private var highlightedSide: Bool?
private var pressGestureRecognizer: UILongPressGestureRecognizer?
private var tapRecognizer: TapLongTapOrDoubleTapGestureRecognizer?
public private(set) var items: [GalleryItem] = []
private var itemNodes: [GalleryItemNode] = []
@ -139,46 +140,82 @@ public final class GalleryPagerNode: ASDisplayNode, UIScrollViewDelegate, UIGest
public override func didLoad() {
super.didLoad()
let gestureRecognizer = UILongPressGestureRecognizer(target: self, action: #selector(self.pressGesture(_:)))
gestureRecognizer.delegate = self
gestureRecognizer.minimumPressDuration = 0.01
self.view.addGestureRecognizer(gestureRecognizer)
self.pressGestureRecognizer = gestureRecognizer
let recognizer = TapLongTapOrDoubleTapGestureRecognizer(target: self, action: #selector(self.tapLongTapOrDoubleTapGesture(_:)))
recognizer.delegate = self
self.tapRecognizer = recognizer
recognizer.tapActionAtPoint = { _ in
return .keepWithSingleTap
}
recognizer.highlight = { [weak self] point in
guard let strongSelf = self else {
return
}
let size = strongSelf.bounds
var highlightedSide: Bool?
if let point = point {
if point.x < size.width * 1.0 / 5.0 {
if strongSelf.items.count > 1 {
highlightedSide = false
}
} else {
if strongSelf.items.count > 1 {
highlightedSide = true
}
}
}
if strongSelf.highlightedSide != highlightedSide {
strongSelf.highlightedSide = highlightedSide
let leftAlpha: CGFloat
let rightAlpha: CGFloat
if let highlightedSide = highlightedSide {
leftAlpha = highlightedSide ? 0.0 : 1.0
rightAlpha = highlightedSide ? 1.0 : 0.0
} else {
leftAlpha = 0.0
rightAlpha = 0.0
}
if strongSelf.leftFadeNode.alpha != leftAlpha {
strongSelf.leftFadeNode.alpha = leftAlpha
if leftAlpha.isZero {
strongSelf.leftFadeNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.16, timingFunction: kCAMediaTimingFunctionSpring)
} else {
strongSelf.leftFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.08)
}
}
if strongSelf.rightFadeNode.alpha != rightAlpha {
strongSelf.rightFadeNode.alpha = rightAlpha
if rightAlpha.isZero {
strongSelf.rightFadeNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.16, timingFunction: kCAMediaTimingFunctionSpring)
} else {
strongSelf.rightFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.08)
}
}
}
}
self.view.addGestureRecognizer(recognizer)
}
public func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
return true
}
@objc private func pressGesture(_ gestureRecognizer: UILongPressGestureRecognizer) {
let edgeWidth: CGFloat = 44.0
let location = gestureRecognizer.location(in: gestureRecognizer.view)
switch gestureRecognizer.state {
case .began:
let transition: ContainedViewLayoutTransition = .animated(duration: 0.07, curve: .easeInOut)
if location.x < edgeWidth && self.canGoToPreviousItem() {
transition.updateAlpha(node: self.leftFadeNode, alpha: 1.0)
} else if location.x > self.frame.width - edgeWidth && self.canGoToNextItem() {
transition.updateAlpha(node: self.rightFadeNode, alpha: 1.0)
@objc private func tapLongTapOrDoubleTapGesture(_ recognizer: TapLongTapOrDoubleTapGestureRecognizer) {
switch recognizer.state {
case .ended:
if let (gesture, location) = recognizer.lastRecognizedGestureAndLocation {
if case .tap = gesture {
let size = self.bounds.size
if location.x < size.width * 1.0 / 5.0 && self.canGoToPreviousItem() {
self.goToPreviousItem()
} else if self.canGoToNextItem() {
self.goToNextItem()
}
}
case .ended:
let transition: ContainedViewLayoutTransition = .animated(duration: 0.1, curve: .easeInOut)
if location.x < edgeWidth && self.canGoToPreviousItem() {
transition.updateAlpha(node: self.leftFadeNode, alpha: 0.0)
self.goToPreviousItem()
} else if location.x > self.frame.width - edgeWidth && self.canGoToNextItem() {
transition.updateAlpha(node: self.rightFadeNode, alpha: 0.0)
self.goToNextItem()
}
case .cancelled:
let transition: ContainedViewLayoutTransition = .animated(duration: 0.1, curve: .easeInOut)
if location.x < edgeWidth {
transition.updateAlpha(node: self.leftFadeNode, alpha: 0.0)
} else if location.x > self.frame.width - edgeWidth {
transition.updateAlpha(node: self.rightFadeNode, alpha: 0.0)
}
default:
break
}
default:
break
}
}

View File

@ -231,7 +231,22 @@ final class ChatImageGalleryItemNode: ZoomableContentGalleryItemNode {
if let largestSize = largestRepresentationForPhoto(imageReference.media) {
let displaySize = largestSize.dimensions.cgSize.fitted(CGSize(width: 1280.0, height: 1280.0)).dividedByScreenScale().integralFloor
self.imageNode.asyncLayout()(TransformImageArguments(corners: ImageCorners(), imageSize: displaySize, boundingSize: displaySize, intrinsicInsets: UIEdgeInsets()))()
self.imageNode.setSignal(chatMessagePhoto(postbox: context.account.postbox, photoReference: imageReference), dispatchOnDisplayLink: false)
let signal: Signal<(TransformImageArguments) -> DrawingContext?, NoError> = chatMessagePhotoInternal(photoData: chatMessagePhotoDatas(postbox: self.context.account.postbox, photoReference: imageReference, tryAdditionalRepresentations: true, synchronousLoad: false), synchronousLoad: false)
|> map { [weak self] _, quality, generate -> (TransformImageArguments) -> DrawingContext? in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
switch quality {
case .medium, .full:
strongSelf.statusNodeContainer.isHidden = true
case .none, .blurred:
strongSelf.statusNodeContainer.isHidden = false
}
}
return generate
}
self.imageNode.setSignal(signal)
self.zoomableContent = (largestSize.dimensions.cgSize, self.imageNode)
self.fetchDisposable.set(fetchedMediaResource(mediaBox: self.context.account.postbox.mediaBox, reference: imageReference.resourceReference(largestSize.resource)).start())
@ -401,7 +416,7 @@ final class ChatImageGalleryItemNode: ZoomableContentGalleryItemNode {
copyView.layer.animate(from: NSValue(caTransform3D: CATransform3DIdentity), to: NSValue(caTransform3D: CATransform3DMakeScale(scale.width, scale.height, 1.0)), keyPath: "transform", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.25, removeOnCompletion: false)
if let transformedSurfaceFrame = transformedSurfaceFrame, let transformedSurfaceFinalFrame = transformedSurfaceFinalFrame {
surfaceCopyView.layer.animatePosition(from: CGPoint(x: transformedSurfaceFrame.midX, y: transformedSurfaceFrame.midY), to: CGPoint(x: transformedCopyViewFinalFrame.midX, y: transformedCopyViewFinalFrame.midY), duration: positionDuration, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { [weak surfaceCopyView] _ in
surfaceCopyView.layer.animatePosition(from: CGPoint(x: transformedSurfaceFrame.midX, y: transformedSurfaceFrame.midY), to: CGPoint(x: transformedSurfaceFinalFrame.midX, y: transformedSurfaceFinalFrame.midY), duration: positionDuration, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { [weak surfaceCopyView] _ in
surfaceCopyView?.removeFromSuperview()
})
let scale = CGSize(width: transformedSurfaceFinalFrame.size.width / transformedSurfaceFrame.size.width, height: transformedSurfaceFinalFrame.size.height / transformedSurfaceFrame.size.height)

View File

@ -30,16 +30,16 @@ public func largestRepresentationForPhoto(_ photo: TelegramMediaImage) -> Telegr
return photo.representationForDisplayAtSize(PixelDimensions(width: 1280, height: 1280))
}
public func chatMessagePhotoDatas(postbox: Postbox, photoReference: ImageMediaReference, fullRepresentationSize: CGSize = CGSize(width: 1280.0, height: 1280.0), autoFetchFullSize: Bool = false, tryAdditionalRepresentations: Bool = false, synchronousLoad: Bool = false) -> Signal<Tuple3<Data?, Data?, Bool>, NoError> {
public func chatMessagePhotoDatas(postbox: Postbox, photoReference: ImageMediaReference, fullRepresentationSize: CGSize = CGSize(width: 1280.0, height: 1280.0), autoFetchFullSize: Bool = false, tryAdditionalRepresentations: Bool = false, synchronousLoad: Bool = false) -> Signal<Tuple4<Data?, Data?, ChatMessagePhotoQuality, Bool>, NoError> {
if let smallestRepresentation = smallestImageRepresentation(photoReference.media.representations), let largestRepresentation = photoReference.media.representationForDisplayAtSize(PixelDimensions(width: Int32(fullRepresentationSize.width), height: Int32(fullRepresentationSize.height))) {
let maybeFullSize = postbox.mediaBox.resourceData(largestRepresentation.resource, option: .complete(waitUntilFetchStatus: false), attemptSynchronously: synchronousLoad)
let signal = maybeFullSize
|> take(1)
|> mapToSignal { maybeData -> Signal<Tuple3<Data?, Data?, Bool>, NoError> in
|> mapToSignal { maybeData -> Signal<Tuple4<Data?, Data?, ChatMessagePhotoQuality, Bool>, NoError> in
if maybeData.complete {
let loadedData: Data? = try? Data(contentsOf: URL(fileURLWithPath: maybeData.path), options: [])
return .single(Tuple(nil, loadedData, true))
return .single(Tuple(nil, loadedData, .full, true))
} else {
let decodedThumbnailData = photoReference.media.immediateThumbnailData.flatMap(decodeTinyThumbnail)
let fetchedThumbnail: Signal<FetchResourceSourceType, FetchResourceError>
@ -50,13 +50,20 @@ public func chatMessagePhotoDatas(postbox: Postbox, photoReference: ImageMediaRe
}
let fetchedFullSize = fetchedMediaResource(mediaBox: postbox.mediaBox, reference: photoReference.resourceReference(largestRepresentation.resource), statsCategory: .image)
let anyThumbnail: [Signal<MediaResourceData, NoError>]
let anyThumbnail: [Signal<(MediaResourceData, ChatMessagePhotoQuality), NoError>]
if tryAdditionalRepresentations {
anyThumbnail = photoReference.media.representations.filter({ representation in
return representation != largestRepresentation
}).map({ representation -> Signal<MediaResourceData, NoError> in
}).map({ representation -> Signal<(MediaResourceData, ChatMessagePhotoQuality), NoError> in
return postbox.mediaBox.resourceData(representation.resource)
|> take(1)
|> map { data -> (MediaResourceData, ChatMessagePhotoQuality) in
if representation.dimensions.width > 200 || representation.dimensions.height > 200 {
return (data, .medium)
} else {
return (data, .blurred)
}
}
})
} else {
anyThumbnail = []
@ -81,13 +88,16 @@ public func chatMessagePhotoDatas(postbox: Postbox, photoReference: ImageMediaRe
}
let thumbnail = combineLatest(anyThumbnail)
|> mapToSignal { thumbnails -> Signal<Data?, NoError> in
for thumbnail in thumbnails {
|> mapToSignal { thumbnails -> Signal<(Data, ChatMessagePhotoQuality)?, NoError> in
for (thumbnail, quality) in thumbnails {
if thumbnail.size != 0, let data = try? Data(contentsOf: URL(fileURLWithPath: thumbnail.path), options: []) {
return .single(data)
return .single((data, quality))
}
}
return mainThumbnail
|> map { data -> (Data, ChatMessagePhotoQuality)? in
return data.flatMap { ($0, .blurred) }
}
}
let fullSizeData: Signal<Tuple2<Data?, Bool>, NoError>
@ -113,13 +123,13 @@ public func chatMessagePhotoDatas(postbox: Postbox, photoReference: ImageMediaRe
return thumbnail
|> mapToSignal { thumbnailData in
if let thumbnailData = thumbnailData {
if let (thumbnailData, thumbnailQuality) = thumbnailData {
return fullSizeData
|> map { value in
return Tuple(thumbnailData, value._0, value._1)
return Tuple(thumbnailData, value._0, value._1 ? .full : thumbnailQuality, value._1)
}
} else {
return .single(Tuple(thumbnailData, nil, false))
return .single(Tuple(nil, nil, .none, false))
}
}
}
@ -408,7 +418,7 @@ public func rawMessagePhoto(postbox: Postbox, photoReference: ImageMediaReferenc
|> map { value -> UIImage? in
let thumbnailData = value._0
let fullSizeData = value._1
let fullSizeComplete = value._2
let fullSizeComplete = value._3
if let fullSizeData = fullSizeData {
if fullSizeComplete {
return UIImage(data: fullSizeData)?.precomposed()
@ -423,20 +433,28 @@ public func rawMessagePhoto(postbox: Postbox, photoReference: ImageMediaReferenc
public func chatMessagePhoto(postbox: Postbox, photoReference: ImageMediaReference, synchronousLoad: Bool = false) -> Signal<(TransformImageArguments) -> DrawingContext?, NoError> {
return chatMessagePhotoInternal(photoData: chatMessagePhotoDatas(postbox: postbox, photoReference: photoReference, tryAdditionalRepresentations: true, synchronousLoad: synchronousLoad), synchronousLoad: synchronousLoad)
|> map { _, generate in
|> map { _, _, generate in
return generate
}
}
public func chatMessagePhotoInternal(photoData: Signal<Tuple3<Data?, Data?, Bool>, NoError>, synchronousLoad: Bool = false) -> Signal<(() -> CGSize?, (TransformImageArguments) -> DrawingContext?), NoError> {
public enum ChatMessagePhotoQuality {
case none
case blurred
case medium
case full
}
public func chatMessagePhotoInternal(photoData: Signal<Tuple4<Data?, Data?, ChatMessagePhotoQuality, Bool>, NoError>, synchronousLoad: Bool = false) -> Signal<(() -> CGSize?, ChatMessagePhotoQuality, (TransformImageArguments) -> DrawingContext?), NoError> {
return photoData
|> map { value in
let thumbnailData = value._0
let fullSizeData = value._1
let fullSizeComplete = value._2
let quality = value._2
let fullSizeComplete = value._3
return ({
return nil
}, { arguments in
}, quality, { arguments in
let drawingRect = arguments.drawingRect
var fittedSize = arguments.imageSize
if abs(fittedSize.width - arguments.boundingSize.width).isLessThanOrEqualTo(CGFloat(1.0)) {
@ -856,7 +874,7 @@ public func chatSecretPhoto(account: Account, photoReference: ImageMediaReferenc
|> map { value in
let thumbnailData = value._0
let fullSizeData = value._1
let fullSizeComplete = value._2
let fullSizeComplete = value._3
return { arguments in
let context = DrawingContext(size: arguments.drawingSize, clear: true)
@ -1117,7 +1135,7 @@ public func mediaGridMessagePhoto(account: Account, photoReference: ImageMediaRe
|> map { value in
let thumbnailData = value._0
let fullSizeData = value._1
let fullSizeComplete = value._2
let fullSizeComplete = value._3
return { arguments in
let context = DrawingContext(size: arguments.drawingSize, clear: true)
@ -1278,7 +1296,7 @@ public func internalMediaGridMessageVideo(postbox: Postbox, videoReference: File
|> map { value -> Tuple3<Data?, Tuple2<Data, String>?, Bool> in
let thumbnailData = value._0
let fullSizeData = value._1
let fullSizeComplete = value._2
let fullSizeComplete = value._3
return Tuple(thumbnailData, fullSizeData.flatMap({ Tuple($0, "") }), fullSizeComplete)
}
} else {

View File

@ -207,14 +207,14 @@ public final class NotificationViewControllerImpl {
let mediaBoxPath = accountsPath + "/" + accountRecordIdPathName(AccountRecordId(rawValue: accountIdValue)) + "/postbox/media"
if let data = try? Data(contentsOf: URL(fileURLWithPath: mediaBoxPath + "/\(largestRepresentation.resource.id.uniqueId)"), options: .mappedRead) {
self.imageNode.setSignal(chatMessagePhotoInternal(photoData: .single(Tuple(nil, data, true)))
|> map { $0.1 })
self.imageNode.setSignal(chatMessagePhotoInternal(photoData: .single(Tuple(nil, data, .full, true)))
|> map { $0.2 })
return
}
if let data = try? Data(contentsOf: URL(fileURLWithPath: mediaBoxPath + "/\(thumbnailRepresentation.resource.id.uniqueId)"), options: .mappedRead) {
self.imageNode.setSignal(chatMessagePhotoInternal(photoData: .single(Tuple(data, nil, false)))
|> map { $0.1 })
self.imageNode.setSignal(chatMessagePhotoInternal(photoData: .single(Tuple(data, nil, .medium, false)))
|> map { $0.2 })
}
guard let sharedAccountContext = sharedAccountContext else {

View File

@ -278,7 +278,7 @@ func legacyWebSearchItem(account: Account, result: ChatContextResult) -> LegacyW
|> mapToSignal { value -> Signal<UIImage, NoError> in
let thumbnailData = value._0
let fullSizeData = value._1
let fullSizeComplete = value._2
let fullSizeComplete = value._3
if fullSizeComplete, let data = fullSizeData, let image = UIImage(data: data) {
return .single(image)