diff --git a/submodules/StatisticsUI/Sources/StatsMessageItem.swift b/submodules/StatisticsUI/Sources/StatsMessageItem.swift index e7de7bf930..0817e537bb 100644 --- a/submodules/StatisticsUI/Sources/StatsMessageItem.swift +++ b/submodules/StatisticsUI/Sources/StatsMessageItem.swift @@ -328,7 +328,11 @@ final class StatsMessageItemNode: ListViewItemNode, ItemListItemNode { } } - text = foldLineBreaks(text) + if item.isPeer { + text = EnginePeer(item.peer).displayTitle(strings: item.presentationData.strings, displayOrder: item.presentationData.nameDisplayOrder) + } else { + text = foldLineBreaks(text) + } if let _ = contentImageMedia { totalLeftInset += 46.0 @@ -542,7 +546,8 @@ final class StatsMessageItemNode: ListViewItemNode, ItemListItemNode { let labelFrame = CGRect(origin: CGPoint(x: totalLeftInset, y: titleFrame.maxY + titleSpacing), size: labelLayout.size) strongSelf.labelNode.frame = labelFrame - let viewsFrame = CGRect(origin: CGPoint(x: params.width - rightInset - viewsLayout.size.width, y: 13.0), size: viewsLayout.size) + let viewsOriginY: CGFloat = forwardsLayout.size.width > 0.0 || reactionsLayout.size.width > 0.0 ? 13.0 : floorToScreenPixels((contentSize.height - viewsLayout.size.height) / 2.0) + let viewsFrame = CGRect(origin: CGPoint(x: params.width - rightInset - viewsLayout.size.width, y: viewsOriginY), size: viewsLayout.size) strongSelf.viewsNode.frame = viewsFrame let iconSpacing: CGFloat = 3.0 - UIScreenPixel diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/Sources/ChatMessageJoinedChannelBubbleContentNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/Sources/ChatMessageJoinedChannelBubbleContentNode.swift index 9a5613a7ba..a8bb5342c6 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/Sources/ChatMessageJoinedChannelBubbleContentNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageJoinedChannelBubbleContentNode/Sources/ChatMessageJoinedChannelBubbleContentNode.swift @@ -357,6 +357,7 @@ public class ChatMessageJoinedChannelBubbleContentNode: ChatMessageBubbleContent } ) item.controllerInteraction.presentControllerInCurrent(controller, nil) + HapticFeedback().impact(.light) } }, openMore: { [weak self] in @@ -645,11 +646,29 @@ private final class ChannelItemComponent: Component { self.containerButton.addTarget(self, action: #selector(self.pressed), for: .touchUpInside) + self.contextContainer.animateScale = false self.contextContainer.activated = { [weak self] gesture, point in if let self, let component = self.component, let peer = component.peers.first { component.contextAction?(peer, self.contextContainer, gesture) } } + + self.containerButton.highligthedChanged = { [weak self] highlighted in + if let self, self.bounds.width > 0.0 { + let topScale: CGFloat = (self.bounds.width - 6.0) / self.bounds.width + + if highlighted { + self.contextContainer.layer.removeAnimation(forKey: "sublayerTransform") + let transition = Transition(animation: .curve(duration: 0.2, curve: .easeInOut)) + transition.setScale(layer: self.contextContainer.layer, scale: topScale) + } else { + let transition = Transition(animation: .none) + transition.setScale(layer: self.contextContainer.layer, scale: 1.0) + + self.contextContainer.layer.animateScale(from: topScale, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, removeOnCompletion: false) + } + } + } } required init?(coder: NSCoder) { diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift index 8ca98d2550..badc0010c4 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift @@ -510,7 +510,7 @@ public final class MediaEditor { } textureSource = .single((image, nil, nil, colors)) } - case let .video(path, transitionImage, mirror, additionalPath, _, _): + case let .video(path, transitionImage, mirror, _, _, _): let _ = mirror textureSource = Signal { subscriber in let asset = AVURLAsset(url: URL(fileURLWithPath: path)) @@ -522,22 +522,22 @@ public final class MediaEditor { } player.automaticallyWaitsToMinimizeStalling = false - var additionalPlayer: AVPlayer? - if let additionalPath { - let additionalAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath)) - additionalPlayer = AVPlayer(playerItem: AVPlayerItem(asset: additionalAsset)) - if #available(iOS 15.0, *) { - additionalPlayer?.sourceClock = clock - } else { - additionalPlayer?.masterClock = clock - } - additionalPlayer?.automaticallyWaitsToMinimizeStalling = false - } +// var additionalPlayer: AVPlayer? +// if let additionalPath { +// let additionalAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath)) +// additionalPlayer = AVPlayer(playerItem: AVPlayerItem(asset: additionalAsset)) +// if #available(iOS 15.0, *) { +// additionalPlayer?.sourceClock = clock +// } else { +// additionalPlayer?.masterClock = clock +// } +// additionalPlayer?.automaticallyWaitsToMinimizeStalling = false +// } if let transitionImage { let colors = mediaEditorGetGradientColors(from: transitionImage) //TODO pass mirror - subscriber.putNext((nil, player, additionalPlayer, colors)) + subscriber.putNext((nil, player, nil, colors)) subscriber.putCompletion() return EmptyDisposable @@ -548,7 +548,7 @@ public final class MediaEditor { imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: 0, preferredTimescale: CMTimeScale(30.0)))]) { _, image, _, _, _ in let colors: GradientColors = image.flatMap({ mediaEditorGetGradientColors(from: UIImage(cgImage: $0)) }) ?? GradientColors(top: .black, bottom: .black) //TODO pass mirror - subscriber.putNext((nil, player, additionalPlayer, colors)) + subscriber.putNext((nil, player, nil, colors)) subscriber.putCompletion() } return ActionDisposable { @@ -644,9 +644,6 @@ public final class MediaEditor { textureSource.setMainInput(.video(playerItem)) } if let additionalPlayer, let playerItem = additionalPlayer.currentItem { - if self.values.additionalVideoPath == nil { - self.values = self.values.withUpdatedAdditionalVideo(path: "", positionChanges: []) - } textureSource.setAdditionalInput(.video(playerItem)) } self.renderer.textureSource = textureSource @@ -1379,6 +1376,10 @@ public final class MediaEditor { self.additionalPlayer = player self.additionalPlayerPromise.set(.single(player)) + if let volume = self.values.additionalVideoVolume { + self.additionalPlayer?.volume = Float(volume) + } + (self.renderer.textureSource as? UniversalTextureSource)?.setAdditionalInput(.video(playerItem)) }