From 4180b348f16d76f639d690a9ca828af393ff2088 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Thu, 22 May 2025 06:12:04 +0200 Subject: [PATCH] Various improvements --- .../AudioWaveform/Sources/AudioWaveform.swift | 20 ++++ ...tionSequencePhoneEntryControllerNode.swift | 9 ++ .../Sources/MediaPlayerScrubbingNode.swift | 10 +- .../Sources/MediaPlayerTimeTextNode.swift | 5 +- .../Sources/AudioWaveformNode.swift | 25 +++- .../Chat/ChatControllerMediaRecording.swift | 9 +- .../ChatRecordingPreviewInputPanelNode.swift | 111 ++++++++++++++++-- 7 files changed, 171 insertions(+), 18 deletions(-) diff --git a/submodules/AudioWaveform/Sources/AudioWaveform.swift b/submodules/AudioWaveform/Sources/AudioWaveform.swift index 2fd0059971..10c1165247 100644 --- a/submodules/AudioWaveform/Sources/AudioWaveform.swift +++ b/submodules/AudioWaveform/Sources/AudioWaveform.swift @@ -80,6 +80,26 @@ public final class AudioWaveform: Equatable { return result } + public func subwaveform(from start: Double, to end: Double) -> AudioWaveform { + let normalizedStart = max(0.0, min(1.0, start)) + let normalizedEnd = max(normalizedStart, min(1.0, end)) + + let numSamples = self.samples.count / 2 + let startIndex = Int(Double(numSamples) * normalizedStart) * 2 + let endIndex = Int(Double(numSamples) * normalizedEnd) * 2 + + let rangeLength = endIndex - startIndex + let subData: Data + + if rangeLength > 0 { + subData = self.samples.subdata(in: startIndex.. Bool { return lhs.peak == rhs.peak && lhs.samples == rhs.samples } diff --git a/submodules/AuthorizationUI/Sources/AuthorizationSequencePhoneEntryControllerNode.swift b/submodules/AuthorizationUI/Sources/AuthorizationSequencePhoneEntryControllerNode.swift index e32f74f78a..6181df65d9 100644 --- a/submodules/AuthorizationUI/Sources/AuthorizationSequencePhoneEntryControllerNode.swift +++ b/submodules/AuthorizationUI/Sources/AuthorizationSequencePhoneEntryControllerNode.swift @@ -21,6 +21,7 @@ private final class PhoneAndCountryNode: ASDisplayNode { let theme: PresentationTheme let countryButton: ASButtonNode + let countryButtonArrow: ASImageNode let phoneBackground: ASImageNode let phoneInputNode: PhoneInputNode @@ -90,6 +91,9 @@ private final class PhoneAndCountryNode: ASDisplayNode { self.countryButton.titleNode.truncationMode = .byTruncatingTail self.countryButton.setBackgroundImage(countryButtonHighlightedBackground, for: .highlighted) + self.countryButtonArrow = ASImageNode() + self.countryButtonArrow.image = generateTintedImage(image: UIImage(bundleImageName: "Item List/DisclosureArrow"), color: theme.list.disclosureArrowColor) + self.phoneBackground = ASImageNode() self.phoneBackground.image = phoneInputBackground self.phoneBackground.displaysAsynchronously = false @@ -102,6 +106,7 @@ private final class PhoneAndCountryNode: ASDisplayNode { self.addSubnode(self.phoneBackground) self.addSubnode(self.countryButton) + self.countryButton.addSubnode(self.countryButtonArrow) self.addSubnode(self.phoneInputNode) self.phoneInputNode.countryCodeField.textField.keyboardAppearance = theme.rootController.keyboardColor.keyboardAppearance @@ -241,6 +246,10 @@ private final class PhoneAndCountryNode: ASDisplayNode { self.countryButton.frame = CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: 67.0)) self.phoneBackground.frame = CGRect(origin: CGPoint(x: 0.0, y: size.height - 57.0), size: CGSize(width: size.width - inset, height: 57.0)) + if let image = self.countryButtonArrow.image { + self.countryButtonArrow.frame = CGRect(origin: CGPoint(x: size.width - image.size.width - 24.0 - 3.0, y: 16.0 + UIScreenPixel), size: image.size) + } + let countryCodeFrame = CGRect(origin: CGPoint(x: 18.0, y: size.height - 58.0), size: CGSize(width: 71.0, height: 57.0)) let numberFrame = CGRect(origin: CGPoint(x: 107.0, y: size.height - 58.0), size: CGSize(width: size.width - 96.0 - 8.0 - 24.0, height: 57.0)) let placeholderFrame = numberFrame.offsetBy(dx: 0.0, dy: 17.0 - UIScreenPixel) diff --git a/submodules/MediaPlayer/Sources/MediaPlayerScrubbingNode.swift b/submodules/MediaPlayer/Sources/MediaPlayerScrubbingNode.swift index b15072d563..fc71c6c80f 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayerScrubbingNode.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayerScrubbingNode.swift @@ -674,6 +674,8 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode { if let statusValue = strongSelf.statusValue, Double(0.0).isLess(than: statusValue.duration) { strongSelf.scrubbingBeginTimestamp = statusValue.timestamp strongSelf.scrubbingTimestampValue = statusValue.timestamp + strongSelf._scrubbingTimestamp.set(.single(strongSelf.scrubbingTimestampValue)) + strongSelf._scrubbingPosition.set(.single(strongSelf.scrubbingTimestampValue.flatMap { $0 / statusValue.duration })) strongSelf.updateProgressAnimations() } } @@ -682,6 +684,8 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode { if let strongSelf = self { if let statusValue = strongSelf.statusValue, let scrubbingBeginTimestamp = strongSelf.scrubbingBeginTimestamp, Double(0.0).isLess(than: statusValue.duration) { strongSelf.scrubbingTimestampValue = scrubbingBeginTimestamp + (statusValue.duration * Double(addedFraction)) * multiplier + strongSelf._scrubbingTimestamp.set(.single(strongSelf.scrubbingTimestampValue)) + strongSelf._scrubbingPosition.set(.single(strongSelf.scrubbingTimestampValue.flatMap { $0 / statusValue.duration })) strongSelf.updateProgressAnimations() } } @@ -697,7 +701,11 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode { if let strongSelf = self { strongSelf.scrubbingBeginTimestamp = nil let scrubbingTimestampValue = strongSelf.scrubbingTimestampValue - strongSelf.scrubbingTimestampValue = nil + Queue.mainQueue().after(0.05, { + strongSelf._scrubbingTimestamp.set(.single(nil)) + strongSelf._scrubbingPosition.set(.single(nil)) + strongSelf.scrubbingTimestampValue = nil + }) if let scrubbingTimestampValue = scrubbingTimestampValue, apply { strongSelf.seek?(scrubbingTimestampValue) } diff --git a/submodules/MediaPlayer/Sources/MediaPlayerTimeTextNode.swift b/submodules/MediaPlayer/Sources/MediaPlayerTimeTextNode.swift index 151483a23d..210da5b91d 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayerTimeTextNode.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayerTimeTextNode.swift @@ -96,6 +96,7 @@ public final class MediaPlayerTimeTextNode: ASDisplayNode { } public var showDurationIfNotStarted = false + public var isScrubbing = false private var updateTimer: SwiftSignalKit.Timer? @@ -175,13 +176,13 @@ public final class MediaPlayerTimeTextNode: ASDisplayNode { } if let statusValue = self.statusValue, Double(0.0).isLess(than: statusValue.duration) { - let timestamp = statusValue.timestamp - (self.trimRange?.lowerBound ?? 0.0) + let timestamp = max(0.0, statusValue.timestamp - (self.trimRange?.lowerBound ?? 0.0)) var duration = statusValue.duration if let trimRange = self.trimRange { duration = trimRange.upperBound - trimRange.lowerBound } - if self.showDurationIfNotStarted && timestamp < .ulpOfOne { + if self.showDurationIfNotStarted && (timestamp < .ulpOfOne || self.isScrubbing) { let timestamp = Int32(duration) self.state = MediaPlayerTimeTextNodeState(hours: timestamp / (60 * 60), minutes: timestamp % (60 * 60) / 60, seconds: timestamp % 60) } else { diff --git a/submodules/TelegramUI/Components/AudioWaveformNode/Sources/AudioWaveformNode.swift b/submodules/TelegramUI/Components/AudioWaveformNode/Sources/AudioWaveformNode.swift index 06eb8e589c..965cafee55 100644 --- a/submodules/TelegramUI/Components/AudioWaveformNode/Sources/AudioWaveformNode.swift +++ b/submodules/TelegramUI/Components/AudioWaveformNode/Sources/AudioWaveformNode.swift @@ -9,12 +9,14 @@ private final class AudioWaveformNodeParameters: NSObject { let color: UIColor? let gravity: AudioWaveformNode.Gravity? let progress: CGFloat? + let trimRange: Range? - init(waveform: AudioWaveform?, color: UIColor?, gravity: AudioWaveformNode.Gravity?, progress: CGFloat?) { + init(waveform: AudioWaveform?, color: UIColor?, gravity: AudioWaveformNode.Gravity?, progress: CGFloat?, trimRange: Range?) { self.waveform = waveform self.color = color self.gravity = gravity self.progress = progress + self.trimRange = trimRange super.init() } @@ -38,6 +40,14 @@ public final class AudioWaveformNode: ASDisplayNode { } } + public var trimRange: Range? { + didSet { + if self.trimRange != oldValue { + self.setNeedsDisplay() + } + } + } + override public init() { super.init() @@ -67,7 +77,7 @@ public final class AudioWaveformNode: ASDisplayNode { } override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? { - return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, gravity: self.gravity, progress: self.progress) + return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, gravity: self.gravity, progress: self.progress, trimRange: self.trimRange) } @objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) { @@ -164,8 +174,19 @@ public final class AudioWaveformNode: ASDisplayNode { let invScale = 1.0 / max(1.0, CGFloat(maxSample)) + var clipRange: Range? + if let trimRange = parameters.trimRange { + clipRange = trimRange.lowerBound * size.width ..< trimRange.upperBound * size.width + } + for i in 0 ..< numSamples { let offset = CGFloat(i) * (sampleWidth + distance) + if let clipRange { + if !clipRange.contains(offset) { + continue + } + } + let peakSample = adjustedSamples[i] var sampleHeight = CGFloat(peakSample) * peakHeight * invScale diff --git a/submodules/TelegramUI/Sources/Chat/ChatControllerMediaRecording.swift b/submodules/TelegramUI/Sources/Chat/ChatControllerMediaRecording.swift index 4d4f149e0b..969563f98a 100644 --- a/submodules/TelegramUI/Sources/Chat/ChatControllerMediaRecording.swift +++ b/submodules/TelegramUI/Sources/Chat/ChatControllerMediaRecording.swift @@ -555,7 +555,7 @@ extension ChatControllerImpl { return } - let waveformBuffer = audio.waveform.makeBitstream() + self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in if let strongSelf = self { @@ -578,18 +578,21 @@ extension ChatControllerImpl { } let resource: TelegramMediaResource + var waveform = audio.waveform var finalDuration: Int = Int(audio.duration) if let trimRange = audio.trimRange, trimRange.lowerBound > 0.0 || trimRange.upperBound < Double(audio.duration) { let randomId = Int64.random(in: Int64.min ... Int64.max) let tempPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).ogg" - resource = LocalFileAudioMediaResource(randomId: randomId, path: tempPath, trimRange: audio.trimRange) + resource = LocalFileAudioMediaResource(randomId: randomId, path: tempPath, trimRange: trimRange) self.context.account.postbox.mediaBox.moveResourceData(audio.resource.id, toTempPath: tempPath) - + waveform = waveform.subwaveform(from: trimRange.lowerBound / Double(audio.duration), to: trimRange.upperBound / Double(audio.duration)) finalDuration = Int(trimRange.upperBound - trimRange.lowerBound) } else { resource = audio.resource } + let waveformBuffer = waveform.makeBitstream() + let messages: [EnqueueMessage] = [.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(audio.fileSize), attributes: [.Audio(isVoice: true, duration: finalDuration, title: nil, performer: nil, waveform: waveformBuffer)], alternativeRepresentations: [])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])] let transformedMessages: [EnqueueMessage] diff --git a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift index 93e7b87d10..7b6b78c58e 100644 --- a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift +++ b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift @@ -157,7 +157,11 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { private var mediaPlayer: MediaPlayer? + private var statusValue: MediaPlayerStatus? private let statusDisposable = MetaDisposable() + private var scrubbingDisposable: Disposable? + + private var positionTimer: SwiftSignalKit.Timer? private(set) var gestureRecognizer: ContextGesture? @@ -242,7 +246,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { self.addSubnode(self.waveformScrubberNode) //self.addSubnode(self.waveformButton) - //self.view.addSubview(self.trimView) + self.view.addSubview(self.trimView) self.addSubnode(self.playButtonNode) self.sendButton.highligthedChanged = { [weak self] highlighted in @@ -266,9 +270,22 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { guard let self else { return } + var timestamp = timestamp + if let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange { + timestamp = max(trimRange.lowerBound, min(timestamp, trimRange.upperBound)) + } self.mediaPlayer?.seek(timestamp: timestamp) } + self.scrubbingDisposable = (self.waveformScrubberNode.scrubbingPosition + |> deliverOnMainQueue).startStrict(next: { [weak self] value in + guard let self else { + return + } + let transition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut) + transition.updateAlpha(node: self.playButtonNode, alpha: value != nil ? 0.0 : 1.0) + }) + self.deleteButton.addTarget(self, action: #selector(self.deletePressed), forControlEvents: [.touchUpInside]) self.sendButton.addTarget(self, action: #selector(self.sendPressed), forControlEvents: [.touchUpInside]) self.viewOnceButton.addTarget(self, action: #selector(self.viewOncePressed), forControlEvents: [.touchUpInside]) @@ -280,6 +297,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { deinit { self.mediaPlayer?.pause() self.statusDisposable.dispose() + self.scrubbingDisposable?.dispose() + self.positionTimer?.invalidate() } override func didLoad() { @@ -309,6 +328,36 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { self.view.disablesInteractiveTransitionGestureRecognizer = true } + private func ensureHasTimer() { + if self.positionTimer == nil { + let timer = SwiftSignalKit.Timer(timeout: 0.5, repeat: true, completion: { [weak self] in + self?.checkPosition() + }, queue: Queue.mainQueue()) + self.positionTimer = timer + timer.start() + } + } + + func checkPosition() { + guard let statusValue = self.statusValue, let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange, let mediaPlayer = self.mediaPlayer else { + return + } + let timestampSeconds: Double + if !statusValue.generationTimestamp.isZero { + timestampSeconds = statusValue.timestamp + (CACurrentMediaTime() - statusValue.generationTimestamp) + } else { + timestampSeconds = statusValue.timestamp + } + if timestampSeconds >= trimRange.upperBound { + mediaPlayer.seek(timestamp: trimRange.lowerBound, play: false) + } + } + + private func stopTimer() { + self.positionTimer?.invalidate() + self.positionTimer = nil + } + private func maybePresentViewOnceTooltip() { guard let context = self.context else { return @@ -421,28 +470,40 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { let mediaManager = context.sharedContext.mediaManager let mediaPlayer = MediaPlayer(audioSessionManager: mediaManager.audioSession, postbox: context.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: .standalone(resource: audio.resource), streamable: .none, video: false, preferSoftwareDecoding: false, enableSound: true, fetchAutomatically: true) mediaPlayer.actionAtEnd = .action { [weak self] in - guard let self, let interfaceState = self.presentationInterfaceState else { + guard let self else { return } - var timestamp: Double = 0.0 - if let recordedMediaPreview = interfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange { - timestamp = trimRange.lowerBound + Queue.mainQueue().async { + guard let interfaceState = self.presentationInterfaceState else { + return + } + var timestamp: Double = 0.0 + if let recordedMediaPreview = interfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange { + timestamp = trimRange.lowerBound + } + self.mediaPlayer?.seek(timestamp: timestamp, play: false) } - self.mediaPlayer?.seek(timestamp: timestamp) } self.mediaPlayer = mediaPlayer self.playButtonNode.durationLabel.defaultDuration = Double(audio.duration) self.playButtonNode.durationLabel.status = mediaPlayer.status self.playButtonNode.durationLabel.trimRange = audio.trimRange self.waveformScrubberNode.status = mediaPlayer.status + self.statusDisposable.set((mediaPlayer.status |> deliverOnMainQueue).startStrict(next: { [weak self] status in - if let strongSelf = self { + if let self { switch status.status { case .playing, .buffering(_, true, _, _): - strongSelf.playButtonNode.playPauseIconNode.enqueueState(.pause, animated: true) + self.statusValue = status + if let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let _ = audio.trimRange { + self.ensureHasTimer() + } + self.playButtonNode.playPauseIconNode.enqueueState(.pause, animated: true) default: - strongSelf.playButtonNode.playPauseIconNode.enqueueState(.play, animated: true) + self.statusValue = nil + self.stopTimer() + self.playButtonNode.playPauseIconNode.enqueueState(.play, animated: true) } } })) @@ -471,7 +532,18 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { } else { self.mediaPlayer?.seek(timestamp: end - 1.0, play: true) } + self.playButtonNode.durationLabel.isScrubbing = false + Queue.mainQueue().after(0.1) { + self.waveformForegroundNode.alpha = 1.0 + } + } else { + self.playButtonNode.durationLabel.isScrubbing = true + self.waveformForegroundNode.alpha = 0.0 } + + let startFraction = start / Double(audio.duration) + let endFraction = end / Double(audio.duration) + self.waveformForegroundNode.trimRange = startFraction ..< endFraction } } self.trimView.frame = waveformBackgroundFrame @@ -746,7 +818,26 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { } @objc func waveformPressed() { - self.mediaPlayer?.togglePlayPause() + guard let mediaPlayer = self.mediaPlayer else { + return + } + if let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange { + let _ = (mediaPlayer.status + |> take(1) + |> deliverOnMainQueue).start(next: { [weak self] status in + guard let self else { + return + } + + if case .playing = status.status { + self.mediaPlayer?.pause() + } else if status.timestamp <= trimRange.lowerBound { + self.mediaPlayer?.seek(timestamp: trimRange.lowerBound, play: true) + } + }) + } else { + mediaPlayer.togglePlayPause() + } } override func minimalHeight(interfaceState: ChatPresentationInterfaceState, metrics: LayoutMetrics) -> CGFloat {