mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Various fixes
This commit is contained in:
parent
f0a33da460
commit
ae8d1c37a5
@ -1411,7 +1411,7 @@
|
||||
bool hasAnyTimers = false;
|
||||
if (editingContext != nil || grouping)
|
||||
{
|
||||
for (UIImage *asset in selectedItems)
|
||||
for (id<TGMediaEditableItem> asset in selectedItems)
|
||||
{
|
||||
if ([editingContext timerForItem:asset] != nil) {
|
||||
hasAnyTimers = true;
|
||||
@ -1434,7 +1434,7 @@
|
||||
if (grouping && selectedItems.count > 1)
|
||||
groupedId = @([self generateGroupedId]);
|
||||
|
||||
for (UIImage *asset in selectedItems)
|
||||
for (id<TGMediaEditableItem> asset in selectedItems)
|
||||
{
|
||||
NSAttributedString *caption = [editingContext captionForItem:asset];
|
||||
if (editingContext.isForcedCaption) {
|
||||
@ -1445,10 +1445,11 @@
|
||||
}
|
||||
}
|
||||
|
||||
if ([asset isKindOfClass:[UIImage class]]) {
|
||||
if (intent == TGMediaAssetsControllerSendFileIntent)
|
||||
{
|
||||
NSString *tempFileName = TGTemporaryFileName(nil);
|
||||
NSData *imageData = UIImageJPEGRepresentation(asset, 1.0);
|
||||
NSData *imageData = UIImageJPEGRepresentation((UIImage *)asset, 1.0);
|
||||
[imageData writeToURL:[NSURL fileURLWithPath:tempFileName] atomically:true];
|
||||
|
||||
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
|
||||
@ -1539,6 +1540,81 @@
|
||||
i++;
|
||||
num++;
|
||||
}
|
||||
} else if ([asset isKindOfClass:[TGCameraCapturedVideo class]]) {
|
||||
TGCameraCapturedVideo *video = (TGCameraCapturedVideo *)asset;
|
||||
|
||||
TGVideoEditAdjustments *adjustments = (TGVideoEditAdjustments *)[editingContext adjustmentsForItem:asset];
|
||||
NSAttributedString *caption = [editingContext captionForItem:asset];
|
||||
NSNumber *timer = [editingContext timerForItem:asset];
|
||||
|
||||
UIImage *(^cropVideoThumbnail)(UIImage *, CGSize, CGSize, bool) = ^UIImage *(UIImage *image, CGSize targetSize, CGSize sourceSize, bool resize)
|
||||
{
|
||||
if ([adjustments cropAppliedForAvatar:false] || adjustments.hasPainting || adjustments.toolsApplied)
|
||||
{
|
||||
CGRect scaledCropRect = CGRectMake(adjustments.cropRect.origin.x * image.size.width / adjustments.originalSize.width, adjustments.cropRect.origin.y * image.size.height / adjustments.originalSize.height, adjustments.cropRect.size.width * image.size.width / adjustments.originalSize.width, adjustments.cropRect.size.height * image.size.height / adjustments.originalSize.height);
|
||||
UIImage *paintingImage = adjustments.paintingData.stillImage;
|
||||
if (paintingImage == nil) {
|
||||
paintingImage = adjustments.paintingData.image;
|
||||
}
|
||||
if (adjustments.toolsApplied) {
|
||||
image = [PGPhotoEditor resultImageForImage:image adjustments:adjustments];
|
||||
}
|
||||
return TGPhotoEditorCrop(image, paintingImage, adjustments.cropOrientation, 0, scaledCropRect, adjustments.cropMirrored, targetSize, sourceSize, resize);
|
||||
}
|
||||
|
||||
return image;
|
||||
};
|
||||
|
||||
CGSize imageSize = TGFillSize(asset.originalSize, CGSizeMake(512, 512));
|
||||
SSignal *trimmedVideoThumbnailSignal = [[video avAsset] mapToSignal:^SSignal *(AVURLAsset *avAsset) {
|
||||
return [[TGMediaAssetImageSignals videoThumbnailForAVAsset:avAsset size:imageSize timestamp:CMTimeMakeWithSeconds(adjustments.trimStartValue, NSEC_PER_SEC)] map:^UIImage *(UIImage *image)
|
||||
{
|
||||
return cropVideoThumbnail(image, TGScaleToFill(asset.originalSize, CGSizeMake(512, 512)), asset.originalSize, true);
|
||||
}];
|
||||
}];
|
||||
|
||||
SSignal *(^inlineThumbnailSignal)(id<TGMediaEditableItem>) = ^SSignal *(id<TGMediaEditableItem> item)
|
||||
{
|
||||
return [item thumbnailImageSignal];
|
||||
};
|
||||
|
||||
SSignal *videoThumbnailSignal = [inlineThumbnailSignal(asset) map:^UIImage *(UIImage *image) {
|
||||
return cropVideoThumbnail(image, image.size, image.size, false);
|
||||
}];
|
||||
|
||||
SSignal *thumbnailSignal = adjustments.trimStartValue > FLT_EPSILON ? trimmedVideoThumbnailSignal : videoThumbnailSignal;
|
||||
|
||||
TGMediaVideoConversionPreset preset = [TGMediaVideoConverter presetFromAdjustments:adjustments];
|
||||
CGSize dimensions = [TGMediaVideoConverter dimensionsFor:asset.originalSize adjustments:adjustments preset:preset];
|
||||
NSTimeInterval duration = adjustments.trimApplied ? (adjustments.trimEndValue - adjustments.trimStartValue) : video.videoDuration;
|
||||
|
||||
[signals addObject:[thumbnailSignal mapToSignal:^id(UIImage *image)
|
||||
{
|
||||
return [video.avAsset map:^id(AVURLAsset *avAsset) {
|
||||
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
|
||||
dict[@"type"] = @"cameraVideo";
|
||||
dict[@"url"] = avAsset.URL;
|
||||
dict[@"previewImage"] = image;
|
||||
dict[@"adjustments"] = adjustments;
|
||||
dict[@"dimensions"] = [NSValue valueWithCGSize:dimensions];
|
||||
dict[@"duration"] = @(duration);
|
||||
|
||||
if (adjustments.paintingData.stickers.count > 0)
|
||||
dict[@"stickers"] = adjustments.paintingData.stickers;
|
||||
if (timer != nil)
|
||||
dict[@"timer"] = timer;
|
||||
else if (groupedId != nil && !hasAnyTimers)
|
||||
dict[@"groupedId"] = groupedId;
|
||||
|
||||
id generatedItem = descriptionGenerator(dict, caption, nil, nil);
|
||||
return generatedItem;
|
||||
}];
|
||||
}]];
|
||||
|
||||
i++;
|
||||
i++;
|
||||
num++;
|
||||
}
|
||||
|
||||
if (groupedId != nil && i == 10)
|
||||
{
|
||||
|
@ -13,13 +13,13 @@ public func mediaPasteboardScreen(
|
||||
context: AccountContext,
|
||||
updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)? = nil,
|
||||
peer: EnginePeer,
|
||||
subjects: [UIImage],
|
||||
subjects: [MediaPickerScreen.Subject.Media],
|
||||
presentMediaPicker: @escaping (_ subject: MediaPickerScreen.Subject, _ saveEditedPhotos: Bool, _ bannedSendMedia: (Int32, Bool)?, _ present: @escaping (MediaPickerScreen, AttachmentMediaPickerContext?) -> Void) -> Void,
|
||||
getSourceRect: (() -> CGRect?)? = nil
|
||||
) -> ViewController {
|
||||
let controller = AttachmentController(context: context, updatedPresentationData: updatedPresentationData, chatLocation: .peer(id: peer.id), buttons: [.standalone], initialButton: .standalone)
|
||||
controller.requestController = { _, present in
|
||||
presentMediaPicker(.media(subjects.map { .image($0) }), false, nil, { mediaPicker, mediaPickerContext in
|
||||
presentMediaPicker(.media(subjects), false, nil, { mediaPicker, mediaPickerContext in
|
||||
present(mediaPicker, mediaPickerContext)
|
||||
})
|
||||
}
|
||||
|
@ -293,7 +293,7 @@ func presentLegacyMediaPickerGallery(context: AccountContext, peer: EnginePeer?,
|
||||
model.interfaceView.setThumbnailSignalForItem { item in
|
||||
let imageSignal = SSignal(generator: { subscriber in
|
||||
var asset: PHAsset?
|
||||
if let item = item as? TGCameraCapturedVideo {
|
||||
if let item = item as? TGCameraCapturedVideo, item.originalAsset != nil {
|
||||
asset = item.originalAsset.backingAsset
|
||||
} else if let item = item as? TGMediaAsset {
|
||||
asset = item.backingAsset
|
||||
|
@ -6847,9 +6847,12 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
self.chatDisplayNode.paste = { [weak self] data in
|
||||
switch data {
|
||||
case let .images(images):
|
||||
self?.displayPasteMenu(images)
|
||||
self?.displayPasteMenu(images.map { .image($0) })
|
||||
case let .video(data):
|
||||
self?.enqueueVideoData(data)
|
||||
let tempFilePath = NSTemporaryDirectory() + "\(Int64.random(in: 0...Int64.max)).mp4"
|
||||
let url = NSURL(fileURLWithPath: tempFilePath) as URL
|
||||
try? data.write(to: url)
|
||||
self?.displayPasteMenu([.video(url)])
|
||||
case let .gif(data):
|
||||
self?.enqueueGifData(data)
|
||||
case let .sticker(image, isMemoji):
|
||||
@ -13692,7 +13695,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
}))
|
||||
}
|
||||
|
||||
private func displayPasteMenu(_ images: [UIImage]) {
|
||||
private func displayPasteMenu(_ subjects: [MediaPickerScreen.Subject.Media]) {
|
||||
let _ = (self.context.sharedContext.accountManager.transaction { transaction -> GeneratedMediaStoreSettings in
|
||||
let entry = transaction.getSharedData(ApplicationSpecificSharedDataKeys.generatedMediaStoreSettings)?.get(GeneratedMediaStoreSettings.self)
|
||||
return entry ?? GeneratedMediaStoreSettings.defaultSettings
|
||||
@ -13705,7 +13708,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
context: strongSelf.context,
|
||||
updatedPresentationData: strongSelf.updatedPresentationData,
|
||||
peer: EnginePeer(peer),
|
||||
subjects: images,
|
||||
subjects: subjects,
|
||||
presentMediaPicker: { [weak self] subject, saveEditedPhotos, bannedSendMedia, present in
|
||||
if let strongSelf = self {
|
||||
strongSelf.presentMediaPicker(subject: subject, saveEditedPhotos: saveEditedPhotos, bannedSendMedia: bannedSendMedia, present: present, updateMediaPickerContext: { _ in }, completion: { [weak self] signals, silentPosting, scheduleTime, getAnimatedTransitionSource, completion in
|
||||
@ -16312,7 +16315,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
}
|
||||
}
|
||||
strongSelf.chatDisplayNode.updateDropInteraction(isActive: false)
|
||||
strongSelf.displayPasteMenu(images)
|
||||
strongSelf.displayPasteMenu(images.map { .image($0) })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2293,7 +2293,21 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
|
||||
self.item?.controllerInteraction.cancelInteractiveKeyboardGestures()
|
||||
case .changed:
|
||||
var translation = recognizer.translation(in: self.view)
|
||||
translation.x = max(-80.0, min(0.0, translation.x))
|
||||
func rubberBandingOffset(offset: CGFloat, bandingStart: CGFloat) -> CGFloat {
|
||||
let bandedOffset = offset - bandingStart
|
||||
if offset < bandingStart {
|
||||
return offset
|
||||
}
|
||||
let range: CGFloat = 100.0
|
||||
let coefficient: CGFloat = 0.4
|
||||
return bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range
|
||||
}
|
||||
|
||||
if translation.x < 0.0 {
|
||||
translation.x = max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
|
||||
} else {
|
||||
translation.x = 0.0
|
||||
}
|
||||
|
||||
if let item = self.item, self.swipeToReplyNode == nil {
|
||||
let swipeToReplyNode = ChatMessageSwipeToReplyNode(fillColor: selectDateFillStaticColor(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), enableBlur: dateFillNeedsBlur(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), foregroundColor: bubbleVariableColor(variableColor: item.presentationData.theme.theme.chat.message.shareButtonForegroundColor, wallpaper: item.presentationData.theme.wallpaper), backgroundNode: item.controllerInteraction.presentationContext.backgroundNode, action: ChatMessageSwipeToReplyNode.Action(self.currentSwipeAction))
|
||||
@ -2309,7 +2323,8 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
|
||||
self.updateAttachedAvatarNodeOffset(offset: translation.x, transition: .immediate)
|
||||
|
||||
if let swipeToReplyNode = self.swipeToReplyNode {
|
||||
swipeToReplyNode.frame = CGRect(origin: CGPoint(x: bounds.size.width + offset, y: floor((self.contentSize.height - 33.0) / 2.0)), size: CGSize(width: 33.0, height: 33.0))
|
||||
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
|
||||
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
|
||||
|
||||
if let (rect, containerSize) = self.absoluteRect {
|
||||
let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size)
|
||||
|
@ -4100,7 +4100,21 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
|
||||
self.item?.controllerInteraction.cancelInteractiveKeyboardGestures()
|
||||
case .changed:
|
||||
var translation = recognizer.translation(in: self.view)
|
||||
translation.x = max(-80.0, min(0.0, translation.x))
|
||||
func rubberBandingOffset(offset: CGFloat, bandingStart: CGFloat) -> CGFloat {
|
||||
let bandedOffset = offset - bandingStart
|
||||
if offset < bandingStart {
|
||||
return offset
|
||||
}
|
||||
let range: CGFloat = 100.0
|
||||
let coefficient: CGFloat = 0.4
|
||||
return bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range
|
||||
}
|
||||
|
||||
if translation.x < 0.0 {
|
||||
translation.x = max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
|
||||
} else {
|
||||
translation.x = 0.0
|
||||
}
|
||||
|
||||
if let item = self.item, self.swipeToReplyNode == nil {
|
||||
let swipeToReplyNode = ChatMessageSwipeToReplyNode(fillColor: selectDateFillStaticColor(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), enableBlur: dateFillNeedsBlur(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), foregroundColor: bubbleVariableColor(variableColor: item.presentationData.theme.theme.chat.message.shareButtonForegroundColor, wallpaper: item.presentationData.theme.wallpaper), backgroundNode: item.controllerInteraction.presentationContext.backgroundNode, action: ChatMessageSwipeToReplyNode.Action(self.currentSwipeAction))
|
||||
@ -4119,7 +4133,8 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
|
||||
self.updateAttachedAvatarNodeOffset(offset: translation.x, transition: .immediate)
|
||||
|
||||
if let swipeToReplyNode = self.swipeToReplyNode {
|
||||
swipeToReplyNode.frame = CGRect(origin: CGPoint(x: bounds.size.width + offset, y: floor((self.contentSize.height - 33.0) / 2.0)), size: CGSize(width: 33.0, height: 33.0))
|
||||
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
|
||||
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
|
||||
|
||||
if let (rect, containerSize) = self.absoluteRect {
|
||||
let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size)
|
||||
|
@ -1020,7 +1020,8 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
|
||||
self.updateAttachedAvatarNodeOffset(offset: translation.x, transition: .immediate)
|
||||
|
||||
if let swipeToReplyNode = self.swipeToReplyNode {
|
||||
swipeToReplyNode.frame = CGRect(origin: CGPoint(x: bounds.size.width + offset, y: floor((self.contentSize.height - 33.0) / 2.0)), size: CGSize(width: 33.0, height: 33.0))
|
||||
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
|
||||
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
|
||||
|
||||
if let (rect, containerSize) = self.absoluteRect {
|
||||
let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size)
|
||||
|
@ -1257,7 +1257,21 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
|
||||
self.item?.controllerInteraction.cancelInteractiveKeyboardGestures()
|
||||
case .changed:
|
||||
var translation = recognizer.translation(in: self.view)
|
||||
translation.x = max(-80.0, min(0.0, translation.x))
|
||||
func rubberBandingOffset(offset: CGFloat, bandingStart: CGFloat) -> CGFloat {
|
||||
let bandedOffset = offset - bandingStart
|
||||
if offset < bandingStart {
|
||||
return offset
|
||||
}
|
||||
let range: CGFloat = 100.0
|
||||
let coefficient: CGFloat = 0.4
|
||||
return bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range
|
||||
}
|
||||
|
||||
if translation.x < 0.0 {
|
||||
translation.x = max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
|
||||
} else {
|
||||
translation.x = 0.0
|
||||
}
|
||||
|
||||
if let item = self.item, self.swipeToReplyNode == nil {
|
||||
let swipeToReplyNode = ChatMessageSwipeToReplyNode(fillColor: selectDateFillStaticColor(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), enableBlur: dateFillNeedsBlur(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), foregroundColor: bubbleVariableColor(variableColor: item.presentationData.theme.theme.chat.message.shareButtonForegroundColor, wallpaper: item.presentationData.theme.wallpaper), backgroundNode: item.controllerInteraction.presentationContext.backgroundNode, action: ChatMessageSwipeToReplyNode.Action(self.currentSwipeAction))
|
||||
@ -1273,7 +1287,8 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
|
||||
self.updateAttachedAvatarNodeOffset(offset: translation.x, transition: .immediate)
|
||||
|
||||
if let swipeToReplyNode = self.swipeToReplyNode {
|
||||
swipeToReplyNode.frame = CGRect(origin: CGPoint(x: bounds.size.width + offset, y: floor((self.contentSize.height - 33.0) / 2.0)), size: CGSize(width: 33.0, height: 33.0))
|
||||
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
|
||||
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
|
||||
|
||||
if let (rect, containerSize) = self.absoluteRect {
|
||||
let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size)
|
||||
|
@ -179,18 +179,24 @@ final class ChatMessageSwipeToReplyNode: ASDisplayNode {
|
||||
return
|
||||
}
|
||||
|
||||
self.layer.animateScale(from: 0.0, to: 0.1, duration: 0.2, additive: true, completion: { [weak self] _ in
|
||||
self?.layer.animateScale(from: 0.1, to: 0.0, duration: 0.15, additive: true)
|
||||
})
|
||||
|
||||
self.animatedWave = true
|
||||
|
||||
var lineWidth = self.progressLayer.lineWidth
|
||||
self.progressLayer.lineWidth = 1.0
|
||||
self.progressLayer.animate(from: lineWidth as NSNumber, to: 1.0 as NSNumber, keyPath: "lineWidth", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.2, completion: { [weak self] _ in
|
||||
self?.progressLayer.animate(from: 1.0 as NSNumber, to: 0.0 as NSNumber, keyPath: "lineWidth", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.1, removeOnCompletion: false)
|
||||
self.progressLayer.lineWidth = 0.0
|
||||
self.progressLayer.animate(from: lineWidth as NSNumber, to: 0.0 as NSNumber, keyPath: "lineWidth", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3, completion: { _ in
|
||||
|
||||
})
|
||||
|
||||
var path = self.progressLayer.path
|
||||
var targetPath = UIBezierPath(arcCenter: CGPoint(x: self.progressLayer.frame.width / 2.0, y: self.progressLayer.frame.height / 2.0), radius: 35.0, startAngle: CGFloat(-0.5 * .pi), endAngle: CGFloat(-0.5 * .pi + 2.0 * .pi), clockwise: true).cgPath
|
||||
self.progressLayer.path = targetPath
|
||||
self.progressLayer.animate(from: path, to: targetPath, keyPath: "path", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.25)
|
||||
self.progressLayer.animate(from: path, to: targetPath, keyPath: "path", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3)
|
||||
|
||||
self.progressLayer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
|
||||
|
||||
self.fillLayer.isHidden = false
|
||||
self.fillLayer.path = UIBezierPath(ovalIn: CGRect(origin: .zero, size: size)).cgPath
|
||||
@ -198,12 +204,12 @@ final class ChatMessageSwipeToReplyNode: ASDisplayNode {
|
||||
|
||||
lineWidth = self.fillLayer.lineWidth
|
||||
self.fillLayer.lineWidth = 18.0
|
||||
self.fillLayer.animate(from: lineWidth as NSNumber, to: 18.0 as NSNumber, keyPath: "lineWidth", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.25)
|
||||
self.fillLayer.animate(from: lineWidth as NSNumber, to: 18.0 as NSNumber, keyPath: "lineWidth", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3)
|
||||
|
||||
path = self.fillLayer.path
|
||||
targetPath = UIBezierPath(ovalIn: CGRect(origin: .zero, size: size).insetBy(dx: 9.0, dy: 9.0)).cgPath
|
||||
self.fillLayer.path = targetPath
|
||||
self.fillLayer.animate(from: path, to: targetPath, keyPath: "path", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.25)
|
||||
self.fillLayer.animate(from: path, to: targetPath, keyPath: "path", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3)
|
||||
}
|
||||
|
||||
func updateAbsoluteRect(_ rect: CGRect, within containerSize: CGSize) {
|
||||
|
@ -91,7 +91,9 @@ final class NotificationItemContainerNode: ASDisplayNode {
|
||||
var contentInsets = UIEdgeInsets(top: inset, left: inset + layout.safeInsets.left, bottom: inset, right: inset + layout.safeInsets.right)
|
||||
|
||||
if let statusBarHeight = layout.statusBarHeight, statusBarHeight >= 39.0 {
|
||||
if statusBarHeight >= 44.0 {
|
||||
if layout.deviceMetrics.hasDynamicIsland {
|
||||
contentInsets.top = statusBarHeight
|
||||
} else if statusBarHeight >= 44.0 {
|
||||
contentInsets.top += 34.0
|
||||
} else {
|
||||
contentInsets.top += 29.0
|
||||
|
Loading…
x
Reference in New Issue
Block a user