diff --git a/submodules/TelegramUI/Components/AudioWaveformComponent/Sources/AudioWaveformComponent.swift b/submodules/TelegramUI/Components/AudioWaveformComponent/Sources/AudioWaveformComponent.swift index 03ea1c9261..2eb94f5e3f 100644 --- a/submodules/TelegramUI/Components/AudioWaveformComponent/Sources/AudioWaveformComponent.swift +++ b/submodules/TelegramUI/Components/AudioWaveformComponent/Sources/AudioWaveformComponent.swift @@ -212,6 +212,7 @@ public final class AudioWaveformComponent: Component { private var sparksView: SparksView? private var progress: CGFloat = 0.0 + private var lastHeight: CGFloat = 0.0 private var revealProgress: CGFloat = 1.0 private var animator: DisplayLinkAnimator? @@ -409,7 +410,7 @@ public final class AudioWaveformComponent: Component { self.addSubview(sparksView) self.sparksView = sparksView } - sparksView.frame = CGRect(origin: .zero, size: size).insetBy(dx: -5.0, dy: -5.0) + sparksView.frame = CGRect(origin: .zero, size: size).insetBy(dx: -10.0, dy: -15.0) } else if let sparksView = self.sparksView { self.sparksView = nil sparksView.removeFromSuperview() @@ -433,7 +434,7 @@ public final class AudioWaveformComponent: Component { if needsAnimation { self.playbackStatusAnimator = ConstantDisplayLinkAnimator(update: { [weak self] in if let self, let component = self.component, let sparksView = self.sparksView { - sparksView.update(position: CGPoint(x: sparksView.bounds.width * self.progress, y: sparksView.bounds.height / 2.0), color: component.foregroundColor) + sparksView.update(position: CGPoint(x: 10.0 + (sparksView.bounds.width - 20.0) * self.progress, y: sparksView.bounds.height / 2.0 + 8.0), sampleHeight: self.lastHeight, color: component.foregroundColor) } self?.setNeedsDisplay() }) @@ -574,6 +575,7 @@ public final class AudioWaveformComponent: Component { let commonRevealFraction = listViewAnimationCurveSystem(self.revealProgress) + var lastHeight: CGFloat = 0.0 for i in 0 ..< numSamples { let offset = CGFloat(i) * (sampleWidth + distance) let peakSample = adjustedSamples[i] @@ -596,6 +598,7 @@ public final class AudioWaveformComponent: Component { let colorMixFraction: CGFloat if startFraction < playbackProgress { colorMixFraction = max(0.0, min(1.0, (playbackProgress - startFraction) / (nextStartFraction - startFraction))) + lastHeight = sampleHeight } else { colorMixFraction = 0.0 } @@ -637,6 +640,8 @@ public final class AudioWaveformComponent: Component { context.fill(adjustedRect) } } + + self.lastHeight = lastHeight } } } @@ -683,12 +688,18 @@ private class SparksView: UIView { fatalError("init(coder:) has not been implemented") } - func update(position: CGPoint, color: UIColor) { + private var presentationSampleHeight: CGFloat = 0.0 + private var sampleHeight: CGFloat = 0.0 + + func update(position: CGPoint, sampleHeight: CGFloat, color: UIColor) { self.color = color + + self.sampleHeight = sampleHeight + self.presentationSampleHeight = self.presentationSampleHeight * 0.9 + self.sampleHeight * 0.1 let v = CGPoint(x: 1.0, y: 0.0) - let c = CGPoint(x: position.x - 3.0, y: position.y - 5.5 + 13.0 * CGFloat(arc4random_uniform(100)) / 100.0 + 1.0) - + let c = CGPoint(x: position.x - 4.0, y: position.y + 1.0 - self.presentationSampleHeight * CGFloat(arc4random_uniform(100)) / 100.0) + let timestamp = CACurrentMediaTime() let dt: CGFloat = 1.0 / 60.0 @@ -714,9 +725,9 @@ private class SparksView: UIView { self.particles.remove(at: i) } - let newParticleCount = 2 + let newParticleCount = 3 for _ in 0 ..< newParticleCount { - let degrees: CGFloat = CGFloat(arc4random_uniform(100)) - 50.0 + let degrees: CGFloat = CGFloat(arc4random_uniform(100)) - 65.0 let angle: CGFloat = degrees * CGFloat.pi / 180.0 let direction = CGPoint(x: v.x * cos(angle) - v.y * sin(angle), y: v.x * sin(angle) + v.y * cos(angle)) @@ -739,7 +750,7 @@ private class SparksView: UIView { context.setFillColor(self.color.cgColor) for particle in self.particles { - let size: CGFloat = 1.0 + let size: CGFloat = 1.4 context.setAlpha(particle.alpha * 1.0) context.fillEllipse(in: CGRect(origin: CGPoint(x: particle.position.x - size / 2.0, y: particle.position.y - size / 2.0), size: CGSize(width: size, height: size))) } diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift index 6b2f1bff0b..a851caebff 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveFileNode/Sources/ChatMessageInteractiveFileNode.swift @@ -1656,6 +1656,9 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode { if isViewOnceMessage && playbackStatus == .playing { state = .secretTimeout(position: playbackState.position, duration: playbackState.duration, generationTimestamp: playbackState.generationTimestamp, appearance: .init(inset: 1.0 + UIScreenPixel, lineWidth: 2.0 - UIScreenPixel)) + if incoming { + self.consumableContentNode.isHidden = true + } } else { switch playbackStatus { case .playing: @@ -1783,8 +1786,8 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode { self.addSubnode(streamingStatusNode) if isViewOnceMessage { - streamingStatusNode.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2) - streamingStatusNode.layer.animateAlpha(from: 0.1, to: 1.0, duration: 0.2) + streamingStatusNode.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.linear.rawValue) + streamingStatusNode.layer.animateAlpha(from: 0.1, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.linear.rawValue) } } else if let streamingStatusNode = self.streamingStatusNode { streamingStatusNode.backgroundNodeColor = backgroundNodeColor @@ -1815,9 +1818,9 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode { if streamingState == .none { self.streamingStatusNode = nil if isViewOnceMessage { - streamingStatusNode.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2, removeOnCompletion: false) + streamingStatusNode.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2, timingFunction: CAMediaTimingFunctionName.linear.rawValue, removeOnCompletion: false) } - streamingStatusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak streamingStatusNode] _ in + streamingStatusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.linear.rawValue, removeOnCompletion: false, completion: { [weak streamingStatusNode] _ in if streamingState == .none { streamingStatusNode?.removeFromSupernode() } diff --git a/submodules/TelegramUI/Sources/Chat/ChatMessageActionOptions.swift b/submodules/TelegramUI/Sources/Chat/ChatMessageActionOptions.swift index 1db5fab7f0..12bfa12d89 100644 --- a/submodules/TelegramUI/Sources/Chat/ChatMessageActionOptions.swift +++ b/submodules/TelegramUI/Sources/Chat/ChatMessageActionOptions.swift @@ -417,7 +417,9 @@ private func generateChatReplyOptionItems(selfController: ChatControllerImpl, ch if message.id.peerId.namespace == Namespaces.Peer.SecretChat { canReplyInAnotherChat = false } - + if message.minAutoremoveOrClearTimeout == viewOnceTimeout { + canReplyInAnotherChat = false + } } if canReplyInAnotherChat { diff --git a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift index ffb98a1d50..f269156706 100644 --- a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift +++ b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift @@ -515,26 +515,21 @@ private final class PlayPauseIconNode: ManagedAnimationNode { final class ChatRecordingViewOnceButtonNode: HighlightTrackingButtonNode { - private let backgroundNode: NavigationBackgroundNode - private let borderNode: ASImageNode + private let backgroundNode: ASImageNode private let iconNode: ASImageNode private var theme: PresentationTheme? override init(pointerStyle: PointerStyle? = nil) { - self.backgroundNode = NavigationBackgroundNode(color: .clear) + self.backgroundNode = ASImageNode() self.backgroundNode.isUserInteractionEnabled = false - self.borderNode = ASImageNode() - self.borderNode.isUserInteractionEnabled = false - self.iconNode = ASImageNode() self.iconNode.isUserInteractionEnabled = false super.init(pointerStyle: pointerStyle) self.addSubnode(self.backgroundNode) - self.addSubnode(self.borderNode) self.addSubnode(self.iconNode) self.highligthedChanged = { [weak self] highlighted in @@ -592,19 +587,13 @@ final class ChatRecordingViewOnceButtonNode: HighlightTrackingButtonNode { if self.theme !== theme { self.theme = theme - self.backgroundNode.updateColor(color: theme.chat.inputPanel.panelBackgroundColor, transition: .immediate) - - self.borderNode.image = generateCircleImage(diameter: innerSize.width, lineWidth: 0.5, color: theme.chat.historyNavigation.strokeColor, backgroundColor: nil) + self.backgroundNode.image = generateFilledCircleImage(diameter: innerSize.width, color: theme.rootController.navigationBar.opaqueBackgroundColor, strokeColor: theme.chat.inputPanel.panelSeparatorColor, strokeWidth: 0.5, backgroundColor: nil) self.iconNode.image = generateTintedImage(image: UIImage(bundleImageName: self.innerIsSelected ? "Media Gallery/ViewOnceEnabled" : "Media Gallery/ViewOnce"), color: theme.chat.inputPanel.panelControlAccentColor) } - - let backgroundFrame = CGRect(origin: CGPoint(x: floorToScreenPixels(size.width / 2.0 - innerSize.width / 2.0), y: floorToScreenPixels(size.height / 2.0 - innerSize.height / 2.0)), size: innerSize) - self.backgroundNode.update(size: innerSize, cornerRadius: innerSize.width / 2.0, transition: .immediate, beginWithCurrentState: false) - self.backgroundNode.frame = backgroundFrame - - if let borderImage = self.borderNode.image { - let borderFrame = CGRect(origin: CGPoint(x: floorToScreenPixels(size.width / 2.0 - borderImage.size.width / 2.0), y: floorToScreenPixels(size.height / 2.0 - borderImage.size.height / 2.0)), size: borderImage.size) - self.borderNode.frame = borderFrame + + if let backgroundImage = self.backgroundNode.image { + let backgroundFrame = CGRect(origin: CGPoint(x: floorToScreenPixels(size.width / 2.0 - backgroundImage.size.width / 2.0), y: floorToScreenPixels(size.height / 2.0 - backgroundImage.size.height / 2.0)), size: backgroundImage.size) + self.backgroundNode.frame = backgroundFrame } if let iconImage = self.iconNode.image {