Merge commit '880fd391c42e7049e17cc87e9cf905f579a8842f'

This commit is contained in:
Isaac 2025-05-22 19:39:55 +08:00
commit e2660e38e7
7 changed files with 171 additions and 18 deletions

View File

@ -80,6 +80,26 @@ public final class AudioWaveform: Equatable {
return result
}
public func subwaveform(from start: Double, to end: Double) -> AudioWaveform {
let normalizedStart = max(0.0, min(1.0, start))
let normalizedEnd = max(normalizedStart, min(1.0, end))
let numSamples = self.samples.count / 2
let startIndex = Int(Double(numSamples) * normalizedStart) * 2
let endIndex = Int(Double(numSamples) * normalizedEnd) * 2
let rangeLength = endIndex - startIndex
let subData: Data
if rangeLength > 0 {
subData = self.samples.subdata(in: startIndex..<endIndex)
} else {
subData = Data()
}
return AudioWaveform(samples: subData, peak: self.peak)
}
public static func ==(lhs: AudioWaveform, rhs: AudioWaveform) -> Bool {
return lhs.peak == rhs.peak && lhs.samples == rhs.samples
}

View File

@ -21,6 +21,7 @@ private final class PhoneAndCountryNode: ASDisplayNode {
let theme: PresentationTheme
let countryButton: ASButtonNode
let countryButtonArrow: ASImageNode
let phoneBackground: ASImageNode
let phoneInputNode: PhoneInputNode
@ -90,6 +91,9 @@ private final class PhoneAndCountryNode: ASDisplayNode {
self.countryButton.titleNode.truncationMode = .byTruncatingTail
self.countryButton.setBackgroundImage(countryButtonHighlightedBackground, for: .highlighted)
self.countryButtonArrow = ASImageNode()
self.countryButtonArrow.image = generateTintedImage(image: UIImage(bundleImageName: "Item List/DisclosureArrow"), color: theme.list.disclosureArrowColor)
self.phoneBackground = ASImageNode()
self.phoneBackground.image = phoneInputBackground
self.phoneBackground.displaysAsynchronously = false
@ -102,6 +106,7 @@ private final class PhoneAndCountryNode: ASDisplayNode {
self.addSubnode(self.phoneBackground)
self.addSubnode(self.countryButton)
self.countryButton.addSubnode(self.countryButtonArrow)
self.addSubnode(self.phoneInputNode)
self.phoneInputNode.countryCodeField.textField.keyboardAppearance = theme.rootController.keyboardColor.keyboardAppearance
@ -241,6 +246,10 @@ private final class PhoneAndCountryNode: ASDisplayNode {
self.countryButton.frame = CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: 67.0))
self.phoneBackground.frame = CGRect(origin: CGPoint(x: 0.0, y: size.height - 57.0), size: CGSize(width: size.width - inset, height: 57.0))
if let image = self.countryButtonArrow.image {
self.countryButtonArrow.frame = CGRect(origin: CGPoint(x: size.width - image.size.width - 24.0 - 3.0, y: 16.0 + UIScreenPixel), size: image.size)
}
let countryCodeFrame = CGRect(origin: CGPoint(x: 18.0, y: size.height - 58.0), size: CGSize(width: 71.0, height: 57.0))
let numberFrame = CGRect(origin: CGPoint(x: 107.0, y: size.height - 58.0), size: CGSize(width: size.width - 96.0 - 8.0 - 24.0, height: 57.0))
let placeholderFrame = numberFrame.offsetBy(dx: 0.0, dy: 17.0 - UIScreenPixel)

View File

@ -674,6 +674,8 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
if let statusValue = strongSelf.statusValue, Double(0.0).isLess(than: statusValue.duration) {
strongSelf.scrubbingBeginTimestamp = statusValue.timestamp
strongSelf.scrubbingTimestampValue = statusValue.timestamp
strongSelf._scrubbingTimestamp.set(.single(strongSelf.scrubbingTimestampValue))
strongSelf._scrubbingPosition.set(.single(strongSelf.scrubbingTimestampValue.flatMap { $0 / statusValue.duration }))
strongSelf.updateProgressAnimations()
}
}
@ -682,6 +684,8 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
if let strongSelf = self {
if let statusValue = strongSelf.statusValue, let scrubbingBeginTimestamp = strongSelf.scrubbingBeginTimestamp, Double(0.0).isLess(than: statusValue.duration) {
strongSelf.scrubbingTimestampValue = scrubbingBeginTimestamp + (statusValue.duration * Double(addedFraction)) * multiplier
strongSelf._scrubbingTimestamp.set(.single(strongSelf.scrubbingTimestampValue))
strongSelf._scrubbingPosition.set(.single(strongSelf.scrubbingTimestampValue.flatMap { $0 / statusValue.duration }))
strongSelf.updateProgressAnimations()
}
}
@ -697,7 +701,11 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
if let strongSelf = self {
strongSelf.scrubbingBeginTimestamp = nil
let scrubbingTimestampValue = strongSelf.scrubbingTimestampValue
strongSelf.scrubbingTimestampValue = nil
Queue.mainQueue().after(0.05, {
strongSelf._scrubbingTimestamp.set(.single(nil))
strongSelf._scrubbingPosition.set(.single(nil))
strongSelf.scrubbingTimestampValue = nil
})
if let scrubbingTimestampValue = scrubbingTimestampValue, apply {
strongSelf.seek?(scrubbingTimestampValue)
}

View File

@ -96,6 +96,7 @@ public final class MediaPlayerTimeTextNode: ASDisplayNode {
}
public var showDurationIfNotStarted = false
public var isScrubbing = false
private var updateTimer: SwiftSignalKit.Timer?
@ -175,13 +176,13 @@ public final class MediaPlayerTimeTextNode: ASDisplayNode {
}
if let statusValue = self.statusValue, Double(0.0).isLess(than: statusValue.duration) {
let timestamp = statusValue.timestamp - (self.trimRange?.lowerBound ?? 0.0)
let timestamp = max(0.0, statusValue.timestamp - (self.trimRange?.lowerBound ?? 0.0))
var duration = statusValue.duration
if let trimRange = self.trimRange {
duration = trimRange.upperBound - trimRange.lowerBound
}
if self.showDurationIfNotStarted && timestamp < .ulpOfOne {
if self.showDurationIfNotStarted && (timestamp < .ulpOfOne || self.isScrubbing) {
let timestamp = Int32(duration)
self.state = MediaPlayerTimeTextNodeState(hours: timestamp / (60 * 60), minutes: timestamp % (60 * 60) / 60, seconds: timestamp % 60)
} else {

View File

@ -9,12 +9,14 @@ private final class AudioWaveformNodeParameters: NSObject {
let color: UIColor?
let gravity: AudioWaveformNode.Gravity?
let progress: CGFloat?
let trimRange: Range<CGFloat>?
init(waveform: AudioWaveform?, color: UIColor?, gravity: AudioWaveformNode.Gravity?, progress: CGFloat?) {
init(waveform: AudioWaveform?, color: UIColor?, gravity: AudioWaveformNode.Gravity?, progress: CGFloat?, trimRange: Range<CGFloat>?) {
self.waveform = waveform
self.color = color
self.gravity = gravity
self.progress = progress
self.trimRange = trimRange
super.init()
}
@ -38,6 +40,14 @@ public final class AudioWaveformNode: ASDisplayNode {
}
}
public var trimRange: Range<CGFloat>? {
didSet {
if self.trimRange != oldValue {
self.setNeedsDisplay()
}
}
}
override public init() {
super.init()
@ -67,7 +77,7 @@ public final class AudioWaveformNode: ASDisplayNode {
}
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, gravity: self.gravity, progress: self.progress)
return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, gravity: self.gravity, progress: self.progress, trimRange: self.trimRange)
}
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
@ -164,8 +174,19 @@ public final class AudioWaveformNode: ASDisplayNode {
let invScale = 1.0 / max(1.0, CGFloat(maxSample))
var clipRange: Range<CGFloat>?
if let trimRange = parameters.trimRange {
clipRange = trimRange.lowerBound * size.width ..< trimRange.upperBound * size.width
}
for i in 0 ..< numSamples {
let offset = CGFloat(i) * (sampleWidth + distance)
if let clipRange {
if !clipRange.contains(offset) {
continue
}
}
let peakSample = adjustedSamples[i]
var sampleHeight = CGFloat(peakSample) * peakHeight * invScale

View File

@ -555,7 +555,7 @@ extension ChatControllerImpl {
return
}
let waveformBuffer = audio.waveform.makeBitstream()
self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in
if let strongSelf = self {
@ -578,18 +578,21 @@ extension ChatControllerImpl {
}
let resource: TelegramMediaResource
var waveform = audio.waveform
var finalDuration: Int = Int(audio.duration)
if let trimRange = audio.trimRange, trimRange.lowerBound > 0.0 || trimRange.upperBound < Double(audio.duration) {
let randomId = Int64.random(in: Int64.min ... Int64.max)
let tempPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).ogg"
resource = LocalFileAudioMediaResource(randomId: randomId, path: tempPath, trimRange: audio.trimRange)
resource = LocalFileAudioMediaResource(randomId: randomId, path: tempPath, trimRange: trimRange)
self.context.account.postbox.mediaBox.moveResourceData(audio.resource.id, toTempPath: tempPath)
waveform = waveform.subwaveform(from: trimRange.lowerBound / Double(audio.duration), to: trimRange.upperBound / Double(audio.duration))
finalDuration = Int(trimRange.upperBound - trimRange.lowerBound)
} else {
resource = audio.resource
}
let waveformBuffer = waveform.makeBitstream()
let messages: [EnqueueMessage] = [.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(audio.fileSize), attributes: [.Audio(isVoice: true, duration: finalDuration, title: nil, performer: nil, waveform: waveformBuffer)], alternativeRepresentations: [])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
let transformedMessages: [EnqueueMessage]

View File

@ -157,7 +157,11 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
private var mediaPlayer: MediaPlayer?
private var statusValue: MediaPlayerStatus?
private let statusDisposable = MetaDisposable()
private var scrubbingDisposable: Disposable?
private var positionTimer: SwiftSignalKit.Timer?
private(set) var gestureRecognizer: ContextGesture?
@ -242,7 +246,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
self.addSubnode(self.waveformScrubberNode)
//self.addSubnode(self.waveformButton)
//self.view.addSubview(self.trimView)
self.view.addSubview(self.trimView)
self.addSubnode(self.playButtonNode)
self.sendButton.highligthedChanged = { [weak self] highlighted in
@ -266,9 +270,22 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
guard let self else {
return
}
var timestamp = timestamp
if let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange {
timestamp = max(trimRange.lowerBound, min(timestamp, trimRange.upperBound))
}
self.mediaPlayer?.seek(timestamp: timestamp)
}
self.scrubbingDisposable = (self.waveformScrubberNode.scrubbingPosition
|> deliverOnMainQueue).startStrict(next: { [weak self] value in
guard let self else {
return
}
let transition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut)
transition.updateAlpha(node: self.playButtonNode, alpha: value != nil ? 0.0 : 1.0)
})
self.deleteButton.addTarget(self, action: #selector(self.deletePressed), forControlEvents: [.touchUpInside])
self.sendButton.addTarget(self, action: #selector(self.sendPressed), forControlEvents: [.touchUpInside])
self.viewOnceButton.addTarget(self, action: #selector(self.viewOncePressed), forControlEvents: [.touchUpInside])
@ -280,6 +297,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
deinit {
self.mediaPlayer?.pause()
self.statusDisposable.dispose()
self.scrubbingDisposable?.dispose()
self.positionTimer?.invalidate()
}
override func didLoad() {
@ -309,6 +328,36 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
self.view.disablesInteractiveTransitionGestureRecognizer = true
}
private func ensureHasTimer() {
if self.positionTimer == nil {
let timer = SwiftSignalKit.Timer(timeout: 0.5, repeat: true, completion: { [weak self] in
self?.checkPosition()
}, queue: Queue.mainQueue())
self.positionTimer = timer
timer.start()
}
}
func checkPosition() {
guard let statusValue = self.statusValue, let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange, let mediaPlayer = self.mediaPlayer else {
return
}
let timestampSeconds: Double
if !statusValue.generationTimestamp.isZero {
timestampSeconds = statusValue.timestamp + (CACurrentMediaTime() - statusValue.generationTimestamp)
} else {
timestampSeconds = statusValue.timestamp
}
if timestampSeconds >= trimRange.upperBound {
mediaPlayer.seek(timestamp: trimRange.lowerBound, play: false)
}
}
private func stopTimer() {
self.positionTimer?.invalidate()
self.positionTimer = nil
}
private func maybePresentViewOnceTooltip() {
guard let context = self.context else {
return
@ -421,28 +470,40 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
let mediaManager = context.sharedContext.mediaManager
let mediaPlayer = MediaPlayer(audioSessionManager: mediaManager.audioSession, postbox: context.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: .standalone(resource: audio.resource), streamable: .none, video: false, preferSoftwareDecoding: false, enableSound: true, fetchAutomatically: true)
mediaPlayer.actionAtEnd = .action { [weak self] in
guard let self, let interfaceState = self.presentationInterfaceState else {
guard let self else {
return
}
var timestamp: Double = 0.0
if let recordedMediaPreview = interfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange {
timestamp = trimRange.lowerBound
Queue.mainQueue().async {
guard let interfaceState = self.presentationInterfaceState else {
return
}
var timestamp: Double = 0.0
if let recordedMediaPreview = interfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange {
timestamp = trimRange.lowerBound
}
self.mediaPlayer?.seek(timestamp: timestamp, play: false)
}
self.mediaPlayer?.seek(timestamp: timestamp)
}
self.mediaPlayer = mediaPlayer
self.playButtonNode.durationLabel.defaultDuration = Double(audio.duration)
self.playButtonNode.durationLabel.status = mediaPlayer.status
self.playButtonNode.durationLabel.trimRange = audio.trimRange
self.waveformScrubberNode.status = mediaPlayer.status
self.statusDisposable.set((mediaPlayer.status
|> deliverOnMainQueue).startStrict(next: { [weak self] status in
if let strongSelf = self {
if let self {
switch status.status {
case .playing, .buffering(_, true, _, _):
strongSelf.playButtonNode.playPauseIconNode.enqueueState(.pause, animated: true)
self.statusValue = status
if let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let _ = audio.trimRange {
self.ensureHasTimer()
}
self.playButtonNode.playPauseIconNode.enqueueState(.pause, animated: true)
default:
strongSelf.playButtonNode.playPauseIconNode.enqueueState(.play, animated: true)
self.statusValue = nil
self.stopTimer()
self.playButtonNode.playPauseIconNode.enqueueState(.play, animated: true)
}
}
}))
@ -471,7 +532,18 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
} else {
self.mediaPlayer?.seek(timestamp: end - 1.0, play: true)
}
self.playButtonNode.durationLabel.isScrubbing = false
Queue.mainQueue().after(0.1) {
self.waveformForegroundNode.alpha = 1.0
}
} else {
self.playButtonNode.durationLabel.isScrubbing = true
self.waveformForegroundNode.alpha = 0.0
}
let startFraction = start / Double(audio.duration)
let endFraction = end / Double(audio.duration)
self.waveformForegroundNode.trimRange = startFraction ..< endFraction
}
}
self.trimView.frame = waveformBackgroundFrame
@ -746,7 +818,26 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
@objc func waveformPressed() {
self.mediaPlayer?.togglePlayPause()
guard let mediaPlayer = self.mediaPlayer else {
return
}
if let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange {
let _ = (mediaPlayer.status
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] status in
guard let self else {
return
}
if case .playing = status.status {
self.mediaPlayer?.pause()
} else if status.timestamp <= trimRange.lowerBound {
self.mediaPlayer?.seek(timestamp: trimRange.lowerBound, play: true)
}
})
} else {
mediaPlayer.togglePlayPause()
}
}
override func minimalHeight(interfaceState: ChatPresentationInterfaceState, metrics: LayoutMetrics) -> CGFloat {