diff --git a/submodules/LegacyComponents/Sources/TGMediaAssetsController.m b/submodules/LegacyComponents/Sources/TGMediaAssetsController.m index 93f4a69c92..1b933d7183 100644 --- a/submodules/LegacyComponents/Sources/TGMediaAssetsController.m +++ b/submodules/LegacyComponents/Sources/TGMediaAssetsController.m @@ -1411,7 +1411,7 @@ bool hasAnyTimers = false; if (editingContext != nil || grouping) { - for (UIImage *asset in selectedItems) + for (id asset in selectedItems) { if ([editingContext timerForItem:asset] != nil) { hasAnyTimers = true; @@ -1434,7 +1434,7 @@ if (grouping && selectedItems.count > 1) groupedId = @([self generateGroupedId]); - for (UIImage *asset in selectedItems) + for (id asset in selectedItems) { NSAttributedString *caption = [editingContext captionForItem:asset]; if (editingContext.isForcedCaption) { @@ -1445,97 +1445,173 @@ } } - if (intent == TGMediaAssetsControllerSendFileIntent) - { - NSString *tempFileName = TGTemporaryFileName(nil); - NSData *imageData = UIImageJPEGRepresentation(asset, 1.0); - [imageData writeToURL:[NSURL fileURLWithPath:tempFileName] atomically:true]; + if ([asset isKindOfClass:[UIImage class]]) { + if (intent == TGMediaAssetsControllerSendFileIntent) + { + NSString *tempFileName = TGTemporaryFileName(nil); + NSData *imageData = UIImageJPEGRepresentation((UIImage *)asset, 1.0); + [imageData writeToURL:[NSURL fileURLWithPath:tempFileName] atomically:true]; + + NSMutableDictionary *dict = [[NSMutableDictionary alloc] init]; + dict[@"type"] = @"file"; + dict[@"tempFileUrl"] = [NSURL fileURLWithPath:tempFileName]; + dict[@"fileName"] = [NSString stringWithFormat:@"IMG%03ld.jpg", i]; + dict[@"mimeType"] = TGMimeTypeForFileUTI(@"image/jpeg"); + dict[@"previewImage"] = asset; + + if (groupedId != nil) + dict[@"groupedId"] = groupedId; + + id generatedItem = descriptionGenerator(dict, caption, nil, nil); + [signals addObject:[SSignal single:generatedItem]]; + + i++; + num++; + } else { + id adjustments = [editingContext adjustmentsForItem:asset]; + NSNumber *timer = [editingContext timerForItem:asset]; + + SSignal *inlineSignal = [[SSignal single:asset] map:^id(UIImage *image) + { + NSMutableDictionary *dict = [[NSMutableDictionary alloc] init]; + dict[@"type"] = @"editedPhoto"; + dict[@"image"] = image; + + if (timer != nil) + dict[@"timer"] = timer; + + if (groupedId != nil && !hasAnyTimers) + dict[@"groupedId"] = groupedId; + + id generatedItem = descriptionGenerator(dict, caption, nil, nil); + return generatedItem; + }]; + + SSignal *assetSignal = inlineSignal; + SSignal *imageSignal = assetSignal; + if (editingContext != nil) + { + imageSignal = [[[[[editingContext imageSignalForItem:asset withUpdates:true] filter:^bool(id result) + { + return result == nil || ([result isKindOfClass:[UIImage class]] && !((UIImage *)result).degraded); + }] take:1] mapToSignal:^SSignal *(id result) + { + if (result == nil) + { + return [SSignal fail:nil]; + } + else if ([result isKindOfClass:[UIImage class]]) + { + UIImage *image = (UIImage *)result; + image.edited = true; + return [SSignal single:image]; + } + + return [SSignal complete]; + }] onCompletion:^ + { + __strong TGMediaEditingContext *strongEditingContext = editingContext; + [strongEditingContext description]; + }]; + } + + [signals addObject:[[imageSignal map:^NSDictionary *(UIImage *image) + { + NSMutableDictionary *dict = [[NSMutableDictionary alloc] init]; + dict[@"type"] = @"editedPhoto"; + dict[@"image"] = image; + + if (adjustments.paintingData.stickers.count > 0) + dict[@"stickers"] = adjustments.paintingData.stickers; + + if (timer != nil) + dict[@"timer"] = timer; + + if (groupedId != nil && !hasAnyTimers) + dict[@"groupedId"] = groupedId; + + id generatedItem = descriptionGenerator(dict, caption, nil, nil); + return generatedItem; + }] catch:^SSignal *(__unused id error) + { + return inlineSignal; + }]]; + + i++; + num++; + } + } else if ([asset isKindOfClass:[TGCameraCapturedVideo class]]) { + TGCameraCapturedVideo *video = (TGCameraCapturedVideo *)asset; - NSMutableDictionary *dict = [[NSMutableDictionary alloc] init]; - dict[@"type"] = @"file"; - dict[@"tempFileUrl"] = [NSURL fileURLWithPath:tempFileName]; - dict[@"fileName"] = [NSString stringWithFormat:@"IMG%03ld.jpg", i]; - dict[@"mimeType"] = TGMimeTypeForFileUTI(@"image/jpeg"); - dict[@"previewImage"] = asset; - - if (groupedId != nil) - dict[@"groupedId"] = groupedId; - - id generatedItem = descriptionGenerator(dict, caption, nil, nil); - [signals addObject:[SSignal single:generatedItem]]; - - i++; - num++; - } else { - id adjustments = [editingContext adjustmentsForItem:asset]; + TGVideoEditAdjustments *adjustments = (TGVideoEditAdjustments *)[editingContext adjustmentsForItem:asset]; + NSAttributedString *caption = [editingContext captionForItem:asset]; NSNumber *timer = [editingContext timerForItem:asset]; - SSignal *inlineSignal = [[SSignal single:asset] map:^id(UIImage *image) - { - NSMutableDictionary *dict = [[NSMutableDictionary alloc] init]; - dict[@"type"] = @"editedPhoto"; - dict[@"image"] = image; + UIImage *(^cropVideoThumbnail)(UIImage *, CGSize, CGSize, bool) = ^UIImage *(UIImage *image, CGSize targetSize, CGSize sourceSize, bool resize) + { + if ([adjustments cropAppliedForAvatar:false] || adjustments.hasPainting || adjustments.toolsApplied) + { + CGRect scaledCropRect = CGRectMake(adjustments.cropRect.origin.x * image.size.width / adjustments.originalSize.width, adjustments.cropRect.origin.y * image.size.height / adjustments.originalSize.height, adjustments.cropRect.size.width * image.size.width / adjustments.originalSize.width, adjustments.cropRect.size.height * image.size.height / adjustments.originalSize.height); + UIImage *paintingImage = adjustments.paintingData.stillImage; + if (paintingImage == nil) { + paintingImage = adjustments.paintingData.image; + } + if (adjustments.toolsApplied) { + image = [PGPhotoEditor resultImageForImage:image adjustments:adjustments]; + } + return TGPhotoEditorCrop(image, paintingImage, adjustments.cropOrientation, 0, scaledCropRect, adjustments.cropMirrored, targetSize, sourceSize, resize); + } - if (timer != nil) - dict[@"timer"] = timer; - - if (groupedId != nil && !hasAnyTimers) - dict[@"groupedId"] = groupedId; - - id generatedItem = descriptionGenerator(dict, caption, nil, nil); - return generatedItem; + return image; + }; + + CGSize imageSize = TGFillSize(asset.originalSize, CGSizeMake(512, 512)); + SSignal *trimmedVideoThumbnailSignal = [[video avAsset] mapToSignal:^SSignal *(AVURLAsset *avAsset) { + return [[TGMediaAssetImageSignals videoThumbnailForAVAsset:avAsset size:imageSize timestamp:CMTimeMakeWithSeconds(adjustments.trimStartValue, NSEC_PER_SEC)] map:^UIImage *(UIImage *image) + { + return cropVideoThumbnail(image, TGScaleToFill(asset.originalSize, CGSizeMake(512, 512)), asset.originalSize, true); + }]; }]; - SSignal *assetSignal = inlineSignal; - SSignal *imageSignal = assetSignal; - if (editingContext != nil) + SSignal *(^inlineThumbnailSignal)(id) = ^SSignal *(id item) { - imageSignal = [[[[[editingContext imageSignalForItem:asset withUpdates:true] filter:^bool(id result) - { - return result == nil || ([result isKindOfClass:[UIImage class]] && !((UIImage *)result).degraded); - }] take:1] mapToSignal:^SSignal *(id result) - { - if (result == nil) - { - return [SSignal fail:nil]; - } - else if ([result isKindOfClass:[UIImage class]]) - { - UIImage *image = (UIImage *)result; - image.edited = true; - return [SSignal single:image]; - } - - return [SSignal complete]; - }] onCompletion:^ - { - __strong TGMediaEditingContext *strongEditingContext = editingContext; - [strongEditingContext description]; - }]; - } + return [item thumbnailImageSignal]; + }; - [signals addObject:[[imageSignal map:^NSDictionary *(UIImage *image) - { - NSMutableDictionary *dict = [[NSMutableDictionary alloc] init]; - dict[@"type"] = @"editedPhoto"; - dict[@"image"] = image; - - if (adjustments.paintingData.stickers.count > 0) - dict[@"stickers"] = adjustments.paintingData.stickers; - - if (timer != nil) - dict[@"timer"] = timer; - - if (groupedId != nil && !hasAnyTimers) - dict[@"groupedId"] = groupedId; - - id generatedItem = descriptionGenerator(dict, caption, nil, nil); - return generatedItem; - }] catch:^SSignal *(__unused id error) + SSignal *videoThumbnailSignal = [inlineThumbnailSignal(asset) map:^UIImage *(UIImage *image) { + return cropVideoThumbnail(image, image.size, image.size, false); + }]; + + SSignal *thumbnailSignal = adjustments.trimStartValue > FLT_EPSILON ? trimmedVideoThumbnailSignal : videoThumbnailSignal; + + TGMediaVideoConversionPreset preset = [TGMediaVideoConverter presetFromAdjustments:adjustments]; + CGSize dimensions = [TGMediaVideoConverter dimensionsFor:asset.originalSize adjustments:adjustments preset:preset]; + NSTimeInterval duration = adjustments.trimApplied ? (adjustments.trimEndValue - adjustments.trimStartValue) : video.videoDuration; + + [signals addObject:[thumbnailSignal mapToSignal:^id(UIImage *image) { - return inlineSignal; + return [video.avAsset map:^id(AVURLAsset *avAsset) { + NSMutableDictionary *dict = [[NSMutableDictionary alloc] init]; + dict[@"type"] = @"cameraVideo"; + dict[@"url"] = avAsset.URL; + dict[@"previewImage"] = image; + dict[@"adjustments"] = adjustments; + dict[@"dimensions"] = [NSValue valueWithCGSize:dimensions]; + dict[@"duration"] = @(duration); + + if (adjustments.paintingData.stickers.count > 0) + dict[@"stickers"] = adjustments.paintingData.stickers; + if (timer != nil) + dict[@"timer"] = timer; + else if (groupedId != nil && !hasAnyTimers) + dict[@"groupedId"] = groupedId; + + id generatedItem = descriptionGenerator(dict, caption, nil, nil); + return generatedItem; + }]; }]]; + i++; i++; num++; } diff --git a/submodules/MediaPasteboardUI/Sources/MediaPasteboardScreen.swift b/submodules/MediaPasteboardUI/Sources/MediaPasteboardScreen.swift index c9d7099d61..15d06381b6 100644 --- a/submodules/MediaPasteboardUI/Sources/MediaPasteboardScreen.swift +++ b/submodules/MediaPasteboardUI/Sources/MediaPasteboardScreen.swift @@ -13,13 +13,13 @@ public func mediaPasteboardScreen( context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)? = nil, peer: EnginePeer, - subjects: [UIImage], + subjects: [MediaPickerScreen.Subject.Media], presentMediaPicker: @escaping (_ subject: MediaPickerScreen.Subject, _ saveEditedPhotos: Bool, _ bannedSendMedia: (Int32, Bool)?, _ present: @escaping (MediaPickerScreen, AttachmentMediaPickerContext?) -> Void) -> Void, getSourceRect: (() -> CGRect?)? = nil ) -> ViewController { let controller = AttachmentController(context: context, updatedPresentationData: updatedPresentationData, chatLocation: .peer(id: peer.id), buttons: [.standalone], initialButton: .standalone) controller.requestController = { _, present in - presentMediaPicker(.media(subjects.map { .image($0) }), false, nil, { mediaPicker, mediaPickerContext in + presentMediaPicker(.media(subjects), false, nil, { mediaPicker, mediaPickerContext in present(mediaPicker, mediaPickerContext) }) } diff --git a/submodules/MediaPickerUI/Sources/LegacyMediaPickerGallery.swift b/submodules/MediaPickerUI/Sources/LegacyMediaPickerGallery.swift index dd12846ee1..0c8bc2bf43 100644 --- a/submodules/MediaPickerUI/Sources/LegacyMediaPickerGallery.swift +++ b/submodules/MediaPickerUI/Sources/LegacyMediaPickerGallery.swift @@ -293,7 +293,7 @@ func presentLegacyMediaPickerGallery(context: AccountContext, peer: EnginePeer?, model.interfaceView.setThumbnailSignalForItem { item in let imageSignal = SSignal(generator: { subscriber in var asset: PHAsset? - if let item = item as? TGCameraCapturedVideo { + if let item = item as? TGCameraCapturedVideo, item.originalAsset != nil { asset = item.originalAsset.backingAsset } else if let item = item as? TGMediaAsset { asset = item.backingAsset diff --git a/submodules/TelegramUI/Sources/ChatController.swift b/submodules/TelegramUI/Sources/ChatController.swift index cdb76f7a90..801e8e9092 100644 --- a/submodules/TelegramUI/Sources/ChatController.swift +++ b/submodules/TelegramUI/Sources/ChatController.swift @@ -6846,14 +6846,17 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G } self.chatDisplayNode.paste = { [weak self] data in switch data { - case let .images(images): - self?.displayPasteMenu(images) - case let .video(data): - self?.enqueueVideoData(data) - case let .gif(data): - self?.enqueueGifData(data) - case let .sticker(image, isMemoji): - self?.enqueueStickerImage(image, isMemoji: isMemoji) + case let .images(images): + self?.displayPasteMenu(images.map { .image($0) }) + case let .video(data): + let tempFilePath = NSTemporaryDirectory() + "\(Int64.random(in: 0...Int64.max)).mp4" + let url = NSURL(fileURLWithPath: tempFilePath) as URL + try? data.write(to: url) + self?.displayPasteMenu([.video(url)]) + case let .gif(data): + self?.enqueueGifData(data) + case let .sticker(image, isMemoji): + self?.enqueueStickerImage(image, isMemoji: isMemoji) } } self.chatDisplayNode.updateTypingActivity = { [weak self] value in @@ -13692,7 +13695,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G })) } - private func displayPasteMenu(_ images: [UIImage]) { + private func displayPasteMenu(_ subjects: [MediaPickerScreen.Subject.Media]) { let _ = (self.context.sharedContext.accountManager.transaction { transaction -> GeneratedMediaStoreSettings in let entry = transaction.getSharedData(ApplicationSpecificSharedDataKeys.generatedMediaStoreSettings)?.get(GeneratedMediaStoreSettings.self) return entry ?? GeneratedMediaStoreSettings.defaultSettings @@ -13705,7 +13708,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G context: strongSelf.context, updatedPresentationData: strongSelf.updatedPresentationData, peer: EnginePeer(peer), - subjects: images, + subjects: subjects, presentMediaPicker: { [weak self] subject, saveEditedPhotos, bannedSendMedia, present in if let strongSelf = self { strongSelf.presentMediaPicker(subject: subject, saveEditedPhotos: saveEditedPhotos, bannedSendMedia: bannedSendMedia, present: present, updateMediaPickerContext: { _ in }, completion: { [weak self] signals, silentPosting, scheduleTime, getAnimatedTransitionSource, completion in @@ -16312,7 +16315,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G } } strongSelf.chatDisplayNode.updateDropInteraction(isActive: false) - strongSelf.displayPasteMenu(images) + strongSelf.displayPasteMenu(images.map { .image($0) }) } } diff --git a/submodules/TelegramUI/Sources/ChatMessageAnimatedStickerItemNode.swift b/submodules/TelegramUI/Sources/ChatMessageAnimatedStickerItemNode.swift index ded4ba96e5..5e2d784f45 100644 --- a/submodules/TelegramUI/Sources/ChatMessageAnimatedStickerItemNode.swift +++ b/submodules/TelegramUI/Sources/ChatMessageAnimatedStickerItemNode.swift @@ -2293,7 +2293,21 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView { self.item?.controllerInteraction.cancelInteractiveKeyboardGestures() case .changed: var translation = recognizer.translation(in: self.view) - translation.x = max(-80.0, min(0.0, translation.x)) + func rubberBandingOffset(offset: CGFloat, bandingStart: CGFloat) -> CGFloat { + let bandedOffset = offset - bandingStart + if offset < bandingStart { + return offset + } + let range: CGFloat = 100.0 + let coefficient: CGFloat = 0.4 + return bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range + } + + if translation.x < 0.0 { + translation.x = max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset))) + } else { + translation.x = 0.0 + } if let item = self.item, self.swipeToReplyNode == nil { let swipeToReplyNode = ChatMessageSwipeToReplyNode(fillColor: selectDateFillStaticColor(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), enableBlur: dateFillNeedsBlur(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), foregroundColor: bubbleVariableColor(variableColor: item.presentationData.theme.theme.chat.message.shareButtonForegroundColor, wallpaper: item.presentationData.theme.wallpaper), backgroundNode: item.controllerInteraction.presentationContext.backgroundNode, action: ChatMessageSwipeToReplyNode.Action(self.currentSwipeAction)) @@ -2309,7 +2323,8 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView { self.updateAttachedAvatarNodeOffset(offset: translation.x, transition: .immediate) if let swipeToReplyNode = self.swipeToReplyNode { - swipeToReplyNode.frame = CGRect(origin: CGPoint(x: bounds.size.width + offset, y: floor((self.contentSize.height - 33.0) / 2.0)), size: CGSize(width: 33.0, height: 33.0)) + swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0)) + swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0) if let (rect, containerSize) = self.absoluteRect { let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size) diff --git a/submodules/TelegramUI/Sources/ChatMessageBubbleItemNode.swift b/submodules/TelegramUI/Sources/ChatMessageBubbleItemNode.swift index 0390c00975..cf5f61d5b7 100644 --- a/submodules/TelegramUI/Sources/ChatMessageBubbleItemNode.swift +++ b/submodules/TelegramUI/Sources/ChatMessageBubbleItemNode.swift @@ -4100,7 +4100,21 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode self.item?.controllerInteraction.cancelInteractiveKeyboardGestures() case .changed: var translation = recognizer.translation(in: self.view) - translation.x = max(-80.0, min(0.0, translation.x)) + func rubberBandingOffset(offset: CGFloat, bandingStart: CGFloat) -> CGFloat { + let bandedOffset = offset - bandingStart + if offset < bandingStart { + return offset + } + let range: CGFloat = 100.0 + let coefficient: CGFloat = 0.4 + return bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range + } + + if translation.x < 0.0 { + translation.x = max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset))) + } else { + translation.x = 0.0 + } if let item = self.item, self.swipeToReplyNode == nil { let swipeToReplyNode = ChatMessageSwipeToReplyNode(fillColor: selectDateFillStaticColor(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), enableBlur: dateFillNeedsBlur(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), foregroundColor: bubbleVariableColor(variableColor: item.presentationData.theme.theme.chat.message.shareButtonForegroundColor, wallpaper: item.presentationData.theme.wallpaper), backgroundNode: item.controllerInteraction.presentationContext.backgroundNode, action: ChatMessageSwipeToReplyNode.Action(self.currentSwipeAction)) @@ -4119,8 +4133,9 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode self.updateAttachedAvatarNodeOffset(offset: translation.x, transition: .immediate) if let swipeToReplyNode = self.swipeToReplyNode { - swipeToReplyNode.frame = CGRect(origin: CGPoint(x: bounds.size.width + offset, y: floor((self.contentSize.height - 33.0) / 2.0)), size: CGSize(width: 33.0, height: 33.0)) - + swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0)) + swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0) + if let (rect, containerSize) = self.absoluteRect { let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size) swipeToReplyNode.updateAbsoluteRect(mappedRect, within: containerSize) diff --git a/submodules/TelegramUI/Sources/ChatMessageInstantVideoItemNode.swift b/submodules/TelegramUI/Sources/ChatMessageInstantVideoItemNode.swift index 8b661ead05..13392f8999 100644 --- a/submodules/TelegramUI/Sources/ChatMessageInstantVideoItemNode.swift +++ b/submodules/TelegramUI/Sources/ChatMessageInstantVideoItemNode.swift @@ -1020,7 +1020,8 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD self.updateAttachedAvatarNodeOffset(offset: translation.x, transition: .immediate) if let swipeToReplyNode = self.swipeToReplyNode { - swipeToReplyNode.frame = CGRect(origin: CGPoint(x: bounds.size.width + offset, y: floor((self.contentSize.height - 33.0) / 2.0)), size: CGSize(width: 33.0, height: 33.0)) + swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0)) + swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0) if let (rect, containerSize) = self.absoluteRect { let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size) diff --git a/submodules/TelegramUI/Sources/ChatMessageStickerItemNode.swift b/submodules/TelegramUI/Sources/ChatMessageStickerItemNode.swift index 3f45d85c54..fcf1ab84cc 100644 --- a/submodules/TelegramUI/Sources/ChatMessageStickerItemNode.swift +++ b/submodules/TelegramUI/Sources/ChatMessageStickerItemNode.swift @@ -1257,7 +1257,21 @@ class ChatMessageStickerItemNode: ChatMessageItemView { self.item?.controllerInteraction.cancelInteractiveKeyboardGestures() case .changed: var translation = recognizer.translation(in: self.view) - translation.x = max(-80.0, min(0.0, translation.x)) + func rubberBandingOffset(offset: CGFloat, bandingStart: CGFloat) -> CGFloat { + let bandedOffset = offset - bandingStart + if offset < bandingStart { + return offset + } + let range: CGFloat = 100.0 + let coefficient: CGFloat = 0.4 + return bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range + } + + if translation.x < 0.0 { + translation.x = max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset))) + } else { + translation.x = 0.0 + } if let item = self.item, self.swipeToReplyNode == nil { let swipeToReplyNode = ChatMessageSwipeToReplyNode(fillColor: selectDateFillStaticColor(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), enableBlur: dateFillNeedsBlur(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), foregroundColor: bubbleVariableColor(variableColor: item.presentationData.theme.theme.chat.message.shareButtonForegroundColor, wallpaper: item.presentationData.theme.wallpaper), backgroundNode: item.controllerInteraction.presentationContext.backgroundNode, action: ChatMessageSwipeToReplyNode.Action(self.currentSwipeAction)) @@ -1273,7 +1287,8 @@ class ChatMessageStickerItemNode: ChatMessageItemView { self.updateAttachedAvatarNodeOffset(offset: translation.x, transition: .immediate) if let swipeToReplyNode = self.swipeToReplyNode { - swipeToReplyNode.frame = CGRect(origin: CGPoint(x: bounds.size.width + offset, y: floor((self.contentSize.height - 33.0) / 2.0)), size: CGSize(width: 33.0, height: 33.0)) + swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0)) + swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0) if let (rect, containerSize) = self.absoluteRect { let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size) diff --git a/submodules/TelegramUI/Sources/ChatMessageSwipeToReplyNode.swift b/submodules/TelegramUI/Sources/ChatMessageSwipeToReplyNode.swift index 590e43752b..d6d0e23acb 100644 --- a/submodules/TelegramUI/Sources/ChatMessageSwipeToReplyNode.swift +++ b/submodules/TelegramUI/Sources/ChatMessageSwipeToReplyNode.swift @@ -179,18 +179,24 @@ final class ChatMessageSwipeToReplyNode: ASDisplayNode { return } + self.layer.animateScale(from: 0.0, to: 0.1, duration: 0.2, additive: true, completion: { [weak self] _ in + self?.layer.animateScale(from: 0.1, to: 0.0, duration: 0.15, additive: true) + }) + self.animatedWave = true var lineWidth = self.progressLayer.lineWidth - self.progressLayer.lineWidth = 1.0 - self.progressLayer.animate(from: lineWidth as NSNumber, to: 1.0 as NSNumber, keyPath: "lineWidth", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.2, completion: { [weak self] _ in - self?.progressLayer.animate(from: 1.0 as NSNumber, to: 0.0 as NSNumber, keyPath: "lineWidth", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.1, removeOnCompletion: false) + self.progressLayer.lineWidth = 0.0 + self.progressLayer.animate(from: lineWidth as NSNumber, to: 0.0 as NSNumber, keyPath: "lineWidth", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3, completion: { _ in + }) var path = self.progressLayer.path var targetPath = UIBezierPath(arcCenter: CGPoint(x: self.progressLayer.frame.width / 2.0, y: self.progressLayer.frame.height / 2.0), radius: 35.0, startAngle: CGFloat(-0.5 * .pi), endAngle: CGFloat(-0.5 * .pi + 2.0 * .pi), clockwise: true).cgPath self.progressLayer.path = targetPath - self.progressLayer.animate(from: path, to: targetPath, keyPath: "path", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.25) + self.progressLayer.animate(from: path, to: targetPath, keyPath: "path", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3) + + self.progressLayer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3) self.fillLayer.isHidden = false self.fillLayer.path = UIBezierPath(ovalIn: CGRect(origin: .zero, size: size)).cgPath @@ -198,12 +204,12 @@ final class ChatMessageSwipeToReplyNode: ASDisplayNode { lineWidth = self.fillLayer.lineWidth self.fillLayer.lineWidth = 18.0 - self.fillLayer.animate(from: lineWidth as NSNumber, to: 18.0 as NSNumber, keyPath: "lineWidth", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.25) + self.fillLayer.animate(from: lineWidth as NSNumber, to: 18.0 as NSNumber, keyPath: "lineWidth", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3) path = self.fillLayer.path targetPath = UIBezierPath(ovalIn: CGRect(origin: .zero, size: size).insetBy(dx: 9.0, dy: 9.0)).cgPath self.fillLayer.path = targetPath - self.fillLayer.animate(from: path, to: targetPath, keyPath: "path", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.25) + self.fillLayer.animate(from: path, to: targetPath, keyPath: "path", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3) } func updateAbsoluteRect(_ rect: CGRect, within containerSize: CGSize) { diff --git a/submodules/TelegramUI/Sources/NotificationItemContainerNode.swift b/submodules/TelegramUI/Sources/NotificationItemContainerNode.swift index 2d3b2b3864..22e32ffe50 100644 --- a/submodules/TelegramUI/Sources/NotificationItemContainerNode.swift +++ b/submodules/TelegramUI/Sources/NotificationItemContainerNode.swift @@ -91,7 +91,9 @@ final class NotificationItemContainerNode: ASDisplayNode { var contentInsets = UIEdgeInsets(top: inset, left: inset + layout.safeInsets.left, bottom: inset, right: inset + layout.safeInsets.right) if let statusBarHeight = layout.statusBarHeight, statusBarHeight >= 39.0 { - if statusBarHeight >= 44.0 { + if layout.deviceMetrics.hasDynamicIsland { + contentInsets.top = statusBarHeight + } else if statusBarHeight >= 44.0 { contentInsets.top += 34.0 } else { contentInsets.top += 29.0