mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-07-19 09:41:29 +00:00
Various improvements
This commit is contained in:
parent
6c337b0f84
commit
4180b348f1
@ -80,6 +80,26 @@ public final class AudioWaveform: Equatable {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func subwaveform(from start: Double, to end: Double) -> AudioWaveform {
|
||||||
|
let normalizedStart = max(0.0, min(1.0, start))
|
||||||
|
let normalizedEnd = max(normalizedStart, min(1.0, end))
|
||||||
|
|
||||||
|
let numSamples = self.samples.count / 2
|
||||||
|
let startIndex = Int(Double(numSamples) * normalizedStart) * 2
|
||||||
|
let endIndex = Int(Double(numSamples) * normalizedEnd) * 2
|
||||||
|
|
||||||
|
let rangeLength = endIndex - startIndex
|
||||||
|
let subData: Data
|
||||||
|
|
||||||
|
if rangeLength > 0 {
|
||||||
|
subData = self.samples.subdata(in: startIndex..<endIndex)
|
||||||
|
} else {
|
||||||
|
subData = Data()
|
||||||
|
}
|
||||||
|
|
||||||
|
return AudioWaveform(samples: subData, peak: self.peak)
|
||||||
|
}
|
||||||
|
|
||||||
public static func ==(lhs: AudioWaveform, rhs: AudioWaveform) -> Bool {
|
public static func ==(lhs: AudioWaveform, rhs: AudioWaveform) -> Bool {
|
||||||
return lhs.peak == rhs.peak && lhs.samples == rhs.samples
|
return lhs.peak == rhs.peak && lhs.samples == rhs.samples
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@ private final class PhoneAndCountryNode: ASDisplayNode {
|
|||||||
let theme: PresentationTheme
|
let theme: PresentationTheme
|
||||||
|
|
||||||
let countryButton: ASButtonNode
|
let countryButton: ASButtonNode
|
||||||
|
let countryButtonArrow: ASImageNode
|
||||||
let phoneBackground: ASImageNode
|
let phoneBackground: ASImageNode
|
||||||
let phoneInputNode: PhoneInputNode
|
let phoneInputNode: PhoneInputNode
|
||||||
|
|
||||||
@ -90,6 +91,9 @@ private final class PhoneAndCountryNode: ASDisplayNode {
|
|||||||
self.countryButton.titleNode.truncationMode = .byTruncatingTail
|
self.countryButton.titleNode.truncationMode = .byTruncatingTail
|
||||||
self.countryButton.setBackgroundImage(countryButtonHighlightedBackground, for: .highlighted)
|
self.countryButton.setBackgroundImage(countryButtonHighlightedBackground, for: .highlighted)
|
||||||
|
|
||||||
|
self.countryButtonArrow = ASImageNode()
|
||||||
|
self.countryButtonArrow.image = generateTintedImage(image: UIImage(bundleImageName: "Item List/DisclosureArrow"), color: theme.list.disclosureArrowColor)
|
||||||
|
|
||||||
self.phoneBackground = ASImageNode()
|
self.phoneBackground = ASImageNode()
|
||||||
self.phoneBackground.image = phoneInputBackground
|
self.phoneBackground.image = phoneInputBackground
|
||||||
self.phoneBackground.displaysAsynchronously = false
|
self.phoneBackground.displaysAsynchronously = false
|
||||||
@ -102,6 +106,7 @@ private final class PhoneAndCountryNode: ASDisplayNode {
|
|||||||
|
|
||||||
self.addSubnode(self.phoneBackground)
|
self.addSubnode(self.phoneBackground)
|
||||||
self.addSubnode(self.countryButton)
|
self.addSubnode(self.countryButton)
|
||||||
|
self.countryButton.addSubnode(self.countryButtonArrow)
|
||||||
self.addSubnode(self.phoneInputNode)
|
self.addSubnode(self.phoneInputNode)
|
||||||
|
|
||||||
self.phoneInputNode.countryCodeField.textField.keyboardAppearance = theme.rootController.keyboardColor.keyboardAppearance
|
self.phoneInputNode.countryCodeField.textField.keyboardAppearance = theme.rootController.keyboardColor.keyboardAppearance
|
||||||
@ -241,6 +246,10 @@ private final class PhoneAndCountryNode: ASDisplayNode {
|
|||||||
self.countryButton.frame = CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: 67.0))
|
self.countryButton.frame = CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: 67.0))
|
||||||
self.phoneBackground.frame = CGRect(origin: CGPoint(x: 0.0, y: size.height - 57.0), size: CGSize(width: size.width - inset, height: 57.0))
|
self.phoneBackground.frame = CGRect(origin: CGPoint(x: 0.0, y: size.height - 57.0), size: CGSize(width: size.width - inset, height: 57.0))
|
||||||
|
|
||||||
|
if let image = self.countryButtonArrow.image {
|
||||||
|
self.countryButtonArrow.frame = CGRect(origin: CGPoint(x: size.width - image.size.width - 24.0 - 3.0, y: 16.0 + UIScreenPixel), size: image.size)
|
||||||
|
}
|
||||||
|
|
||||||
let countryCodeFrame = CGRect(origin: CGPoint(x: 18.0, y: size.height - 58.0), size: CGSize(width: 71.0, height: 57.0))
|
let countryCodeFrame = CGRect(origin: CGPoint(x: 18.0, y: size.height - 58.0), size: CGSize(width: 71.0, height: 57.0))
|
||||||
let numberFrame = CGRect(origin: CGPoint(x: 107.0, y: size.height - 58.0), size: CGSize(width: size.width - 96.0 - 8.0 - 24.0, height: 57.0))
|
let numberFrame = CGRect(origin: CGPoint(x: 107.0, y: size.height - 58.0), size: CGSize(width: size.width - 96.0 - 8.0 - 24.0, height: 57.0))
|
||||||
let placeholderFrame = numberFrame.offsetBy(dx: 0.0, dy: 17.0 - UIScreenPixel)
|
let placeholderFrame = numberFrame.offsetBy(dx: 0.0, dy: 17.0 - UIScreenPixel)
|
||||||
|
@ -674,6 +674,8 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
|
|||||||
if let statusValue = strongSelf.statusValue, Double(0.0).isLess(than: statusValue.duration) {
|
if let statusValue = strongSelf.statusValue, Double(0.0).isLess(than: statusValue.duration) {
|
||||||
strongSelf.scrubbingBeginTimestamp = statusValue.timestamp
|
strongSelf.scrubbingBeginTimestamp = statusValue.timestamp
|
||||||
strongSelf.scrubbingTimestampValue = statusValue.timestamp
|
strongSelf.scrubbingTimestampValue = statusValue.timestamp
|
||||||
|
strongSelf._scrubbingTimestamp.set(.single(strongSelf.scrubbingTimestampValue))
|
||||||
|
strongSelf._scrubbingPosition.set(.single(strongSelf.scrubbingTimestampValue.flatMap { $0 / statusValue.duration }))
|
||||||
strongSelf.updateProgressAnimations()
|
strongSelf.updateProgressAnimations()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -682,6 +684,8 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
|
|||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
if let statusValue = strongSelf.statusValue, let scrubbingBeginTimestamp = strongSelf.scrubbingBeginTimestamp, Double(0.0).isLess(than: statusValue.duration) {
|
if let statusValue = strongSelf.statusValue, let scrubbingBeginTimestamp = strongSelf.scrubbingBeginTimestamp, Double(0.0).isLess(than: statusValue.duration) {
|
||||||
strongSelf.scrubbingTimestampValue = scrubbingBeginTimestamp + (statusValue.duration * Double(addedFraction)) * multiplier
|
strongSelf.scrubbingTimestampValue = scrubbingBeginTimestamp + (statusValue.duration * Double(addedFraction)) * multiplier
|
||||||
|
strongSelf._scrubbingTimestamp.set(.single(strongSelf.scrubbingTimestampValue))
|
||||||
|
strongSelf._scrubbingPosition.set(.single(strongSelf.scrubbingTimestampValue.flatMap { $0 / statusValue.duration }))
|
||||||
strongSelf.updateProgressAnimations()
|
strongSelf.updateProgressAnimations()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -697,7 +701,11 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
|
|||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
strongSelf.scrubbingBeginTimestamp = nil
|
strongSelf.scrubbingBeginTimestamp = nil
|
||||||
let scrubbingTimestampValue = strongSelf.scrubbingTimestampValue
|
let scrubbingTimestampValue = strongSelf.scrubbingTimestampValue
|
||||||
strongSelf.scrubbingTimestampValue = nil
|
Queue.mainQueue().after(0.05, {
|
||||||
|
strongSelf._scrubbingTimestamp.set(.single(nil))
|
||||||
|
strongSelf._scrubbingPosition.set(.single(nil))
|
||||||
|
strongSelf.scrubbingTimestampValue = nil
|
||||||
|
})
|
||||||
if let scrubbingTimestampValue = scrubbingTimestampValue, apply {
|
if let scrubbingTimestampValue = scrubbingTimestampValue, apply {
|
||||||
strongSelf.seek?(scrubbingTimestampValue)
|
strongSelf.seek?(scrubbingTimestampValue)
|
||||||
}
|
}
|
||||||
|
@ -96,6 +96,7 @@ public final class MediaPlayerTimeTextNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public var showDurationIfNotStarted = false
|
public var showDurationIfNotStarted = false
|
||||||
|
public var isScrubbing = false
|
||||||
|
|
||||||
private var updateTimer: SwiftSignalKit.Timer?
|
private var updateTimer: SwiftSignalKit.Timer?
|
||||||
|
|
||||||
@ -175,13 +176,13 @@ public final class MediaPlayerTimeTextNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let statusValue = self.statusValue, Double(0.0).isLess(than: statusValue.duration) {
|
if let statusValue = self.statusValue, Double(0.0).isLess(than: statusValue.duration) {
|
||||||
let timestamp = statusValue.timestamp - (self.trimRange?.lowerBound ?? 0.0)
|
let timestamp = max(0.0, statusValue.timestamp - (self.trimRange?.lowerBound ?? 0.0))
|
||||||
var duration = statusValue.duration
|
var duration = statusValue.duration
|
||||||
if let trimRange = self.trimRange {
|
if let trimRange = self.trimRange {
|
||||||
duration = trimRange.upperBound - trimRange.lowerBound
|
duration = trimRange.upperBound - trimRange.lowerBound
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.showDurationIfNotStarted && timestamp < .ulpOfOne {
|
if self.showDurationIfNotStarted && (timestamp < .ulpOfOne || self.isScrubbing) {
|
||||||
let timestamp = Int32(duration)
|
let timestamp = Int32(duration)
|
||||||
self.state = MediaPlayerTimeTextNodeState(hours: timestamp / (60 * 60), minutes: timestamp % (60 * 60) / 60, seconds: timestamp % 60)
|
self.state = MediaPlayerTimeTextNodeState(hours: timestamp / (60 * 60), minutes: timestamp % (60 * 60) / 60, seconds: timestamp % 60)
|
||||||
} else {
|
} else {
|
||||||
|
@ -9,12 +9,14 @@ private final class AudioWaveformNodeParameters: NSObject {
|
|||||||
let color: UIColor?
|
let color: UIColor?
|
||||||
let gravity: AudioWaveformNode.Gravity?
|
let gravity: AudioWaveformNode.Gravity?
|
||||||
let progress: CGFloat?
|
let progress: CGFloat?
|
||||||
|
let trimRange: Range<CGFloat>?
|
||||||
|
|
||||||
init(waveform: AudioWaveform?, color: UIColor?, gravity: AudioWaveformNode.Gravity?, progress: CGFloat?) {
|
init(waveform: AudioWaveform?, color: UIColor?, gravity: AudioWaveformNode.Gravity?, progress: CGFloat?, trimRange: Range<CGFloat>?) {
|
||||||
self.waveform = waveform
|
self.waveform = waveform
|
||||||
self.color = color
|
self.color = color
|
||||||
self.gravity = gravity
|
self.gravity = gravity
|
||||||
self.progress = progress
|
self.progress = progress
|
||||||
|
self.trimRange = trimRange
|
||||||
|
|
||||||
super.init()
|
super.init()
|
||||||
}
|
}
|
||||||
@ -38,6 +40,14 @@ public final class AudioWaveformNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public var trimRange: Range<CGFloat>? {
|
||||||
|
didSet {
|
||||||
|
if self.trimRange != oldValue {
|
||||||
|
self.setNeedsDisplay()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
override public init() {
|
override public init() {
|
||||||
super.init()
|
super.init()
|
||||||
|
|
||||||
@ -67,7 +77,7 @@ public final class AudioWaveformNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
|
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
|
||||||
return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, gravity: self.gravity, progress: self.progress)
|
return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, gravity: self.gravity, progress: self.progress, trimRange: self.trimRange)
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
|
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
|
||||||
@ -164,8 +174,19 @@ public final class AudioWaveformNode: ASDisplayNode {
|
|||||||
|
|
||||||
let invScale = 1.0 / max(1.0, CGFloat(maxSample))
|
let invScale = 1.0 / max(1.0, CGFloat(maxSample))
|
||||||
|
|
||||||
|
var clipRange: Range<CGFloat>?
|
||||||
|
if let trimRange = parameters.trimRange {
|
||||||
|
clipRange = trimRange.lowerBound * size.width ..< trimRange.upperBound * size.width
|
||||||
|
}
|
||||||
|
|
||||||
for i in 0 ..< numSamples {
|
for i in 0 ..< numSamples {
|
||||||
let offset = CGFloat(i) * (sampleWidth + distance)
|
let offset = CGFloat(i) * (sampleWidth + distance)
|
||||||
|
if let clipRange {
|
||||||
|
if !clipRange.contains(offset) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let peakSample = adjustedSamples[i]
|
let peakSample = adjustedSamples[i]
|
||||||
|
|
||||||
var sampleHeight = CGFloat(peakSample) * peakHeight * invScale
|
var sampleHeight = CGFloat(peakSample) * peakHeight * invScale
|
||||||
|
@ -555,7 +555,7 @@ extension ChatControllerImpl {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
let waveformBuffer = audio.waveform.makeBitstream()
|
|
||||||
|
|
||||||
self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in
|
self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
@ -578,18 +578,21 @@ extension ChatControllerImpl {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let resource: TelegramMediaResource
|
let resource: TelegramMediaResource
|
||||||
|
var waveform = audio.waveform
|
||||||
var finalDuration: Int = Int(audio.duration)
|
var finalDuration: Int = Int(audio.duration)
|
||||||
if let trimRange = audio.trimRange, trimRange.lowerBound > 0.0 || trimRange.upperBound < Double(audio.duration) {
|
if let trimRange = audio.trimRange, trimRange.lowerBound > 0.0 || trimRange.upperBound < Double(audio.duration) {
|
||||||
let randomId = Int64.random(in: Int64.min ... Int64.max)
|
let randomId = Int64.random(in: Int64.min ... Int64.max)
|
||||||
let tempPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).ogg"
|
let tempPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).ogg"
|
||||||
resource = LocalFileAudioMediaResource(randomId: randomId, path: tempPath, trimRange: audio.trimRange)
|
resource = LocalFileAudioMediaResource(randomId: randomId, path: tempPath, trimRange: trimRange)
|
||||||
self.context.account.postbox.mediaBox.moveResourceData(audio.resource.id, toTempPath: tempPath)
|
self.context.account.postbox.mediaBox.moveResourceData(audio.resource.id, toTempPath: tempPath)
|
||||||
|
waveform = waveform.subwaveform(from: trimRange.lowerBound / Double(audio.duration), to: trimRange.upperBound / Double(audio.duration))
|
||||||
finalDuration = Int(trimRange.upperBound - trimRange.lowerBound)
|
finalDuration = Int(trimRange.upperBound - trimRange.lowerBound)
|
||||||
} else {
|
} else {
|
||||||
resource = audio.resource
|
resource = audio.resource
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let waveformBuffer = waveform.makeBitstream()
|
||||||
|
|
||||||
let messages: [EnqueueMessage] = [.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(audio.fileSize), attributes: [.Audio(isVoice: true, duration: finalDuration, title: nil, performer: nil, waveform: waveformBuffer)], alternativeRepresentations: [])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
|
let messages: [EnqueueMessage] = [.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(audio.fileSize), attributes: [.Audio(isVoice: true, duration: finalDuration, title: nil, performer: nil, waveform: waveformBuffer)], alternativeRepresentations: [])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
|
||||||
|
|
||||||
let transformedMessages: [EnqueueMessage]
|
let transformedMessages: [EnqueueMessage]
|
||||||
|
@ -157,7 +157,11 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
|
|
||||||
private var mediaPlayer: MediaPlayer?
|
private var mediaPlayer: MediaPlayer?
|
||||||
|
|
||||||
|
private var statusValue: MediaPlayerStatus?
|
||||||
private let statusDisposable = MetaDisposable()
|
private let statusDisposable = MetaDisposable()
|
||||||
|
private var scrubbingDisposable: Disposable?
|
||||||
|
|
||||||
|
private var positionTimer: SwiftSignalKit.Timer?
|
||||||
|
|
||||||
private(set) var gestureRecognizer: ContextGesture?
|
private(set) var gestureRecognizer: ContextGesture?
|
||||||
|
|
||||||
@ -242,7 +246,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
self.addSubnode(self.waveformScrubberNode)
|
self.addSubnode(self.waveformScrubberNode)
|
||||||
//self.addSubnode(self.waveformButton)
|
//self.addSubnode(self.waveformButton)
|
||||||
|
|
||||||
//self.view.addSubview(self.trimView)
|
self.view.addSubview(self.trimView)
|
||||||
self.addSubnode(self.playButtonNode)
|
self.addSubnode(self.playButtonNode)
|
||||||
|
|
||||||
self.sendButton.highligthedChanged = { [weak self] highlighted in
|
self.sendButton.highligthedChanged = { [weak self] highlighted in
|
||||||
@ -266,9 +270,22 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
guard let self else {
|
guard let self else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
var timestamp = timestamp
|
||||||
|
if let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange {
|
||||||
|
timestamp = max(trimRange.lowerBound, min(timestamp, trimRange.upperBound))
|
||||||
|
}
|
||||||
self.mediaPlayer?.seek(timestamp: timestamp)
|
self.mediaPlayer?.seek(timestamp: timestamp)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.scrubbingDisposable = (self.waveformScrubberNode.scrubbingPosition
|
||||||
|
|> deliverOnMainQueue).startStrict(next: { [weak self] value in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let transition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut)
|
||||||
|
transition.updateAlpha(node: self.playButtonNode, alpha: value != nil ? 0.0 : 1.0)
|
||||||
|
})
|
||||||
|
|
||||||
self.deleteButton.addTarget(self, action: #selector(self.deletePressed), forControlEvents: [.touchUpInside])
|
self.deleteButton.addTarget(self, action: #selector(self.deletePressed), forControlEvents: [.touchUpInside])
|
||||||
self.sendButton.addTarget(self, action: #selector(self.sendPressed), forControlEvents: [.touchUpInside])
|
self.sendButton.addTarget(self, action: #selector(self.sendPressed), forControlEvents: [.touchUpInside])
|
||||||
self.viewOnceButton.addTarget(self, action: #selector(self.viewOncePressed), forControlEvents: [.touchUpInside])
|
self.viewOnceButton.addTarget(self, action: #selector(self.viewOncePressed), forControlEvents: [.touchUpInside])
|
||||||
@ -280,6 +297,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
deinit {
|
deinit {
|
||||||
self.mediaPlayer?.pause()
|
self.mediaPlayer?.pause()
|
||||||
self.statusDisposable.dispose()
|
self.statusDisposable.dispose()
|
||||||
|
self.scrubbingDisposable?.dispose()
|
||||||
|
self.positionTimer?.invalidate()
|
||||||
}
|
}
|
||||||
|
|
||||||
override func didLoad() {
|
override func didLoad() {
|
||||||
@ -309,6 +328,36 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
self.view.disablesInteractiveTransitionGestureRecognizer = true
|
self.view.disablesInteractiveTransitionGestureRecognizer = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private func ensureHasTimer() {
|
||||||
|
if self.positionTimer == nil {
|
||||||
|
let timer = SwiftSignalKit.Timer(timeout: 0.5, repeat: true, completion: { [weak self] in
|
||||||
|
self?.checkPosition()
|
||||||
|
}, queue: Queue.mainQueue())
|
||||||
|
self.positionTimer = timer
|
||||||
|
timer.start()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkPosition() {
|
||||||
|
guard let statusValue = self.statusValue, let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange, let mediaPlayer = self.mediaPlayer else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let timestampSeconds: Double
|
||||||
|
if !statusValue.generationTimestamp.isZero {
|
||||||
|
timestampSeconds = statusValue.timestamp + (CACurrentMediaTime() - statusValue.generationTimestamp)
|
||||||
|
} else {
|
||||||
|
timestampSeconds = statusValue.timestamp
|
||||||
|
}
|
||||||
|
if timestampSeconds >= trimRange.upperBound {
|
||||||
|
mediaPlayer.seek(timestamp: trimRange.lowerBound, play: false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func stopTimer() {
|
||||||
|
self.positionTimer?.invalidate()
|
||||||
|
self.positionTimer = nil
|
||||||
|
}
|
||||||
|
|
||||||
private func maybePresentViewOnceTooltip() {
|
private func maybePresentViewOnceTooltip() {
|
||||||
guard let context = self.context else {
|
guard let context = self.context else {
|
||||||
return
|
return
|
||||||
@ -421,28 +470,40 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
let mediaManager = context.sharedContext.mediaManager
|
let mediaManager = context.sharedContext.mediaManager
|
||||||
let mediaPlayer = MediaPlayer(audioSessionManager: mediaManager.audioSession, postbox: context.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: .standalone(resource: audio.resource), streamable: .none, video: false, preferSoftwareDecoding: false, enableSound: true, fetchAutomatically: true)
|
let mediaPlayer = MediaPlayer(audioSessionManager: mediaManager.audioSession, postbox: context.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: .standalone(resource: audio.resource), streamable: .none, video: false, preferSoftwareDecoding: false, enableSound: true, fetchAutomatically: true)
|
||||||
mediaPlayer.actionAtEnd = .action { [weak self] in
|
mediaPlayer.actionAtEnd = .action { [weak self] in
|
||||||
guard let self, let interfaceState = self.presentationInterfaceState else {
|
guard let self else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
var timestamp: Double = 0.0
|
Queue.mainQueue().async {
|
||||||
if let recordedMediaPreview = interfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange {
|
guard let interfaceState = self.presentationInterfaceState else {
|
||||||
timestamp = trimRange.lowerBound
|
return
|
||||||
|
}
|
||||||
|
var timestamp: Double = 0.0
|
||||||
|
if let recordedMediaPreview = interfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange {
|
||||||
|
timestamp = trimRange.lowerBound
|
||||||
|
}
|
||||||
|
self.mediaPlayer?.seek(timestamp: timestamp, play: false)
|
||||||
}
|
}
|
||||||
self.mediaPlayer?.seek(timestamp: timestamp)
|
|
||||||
}
|
}
|
||||||
self.mediaPlayer = mediaPlayer
|
self.mediaPlayer = mediaPlayer
|
||||||
self.playButtonNode.durationLabel.defaultDuration = Double(audio.duration)
|
self.playButtonNode.durationLabel.defaultDuration = Double(audio.duration)
|
||||||
self.playButtonNode.durationLabel.status = mediaPlayer.status
|
self.playButtonNode.durationLabel.status = mediaPlayer.status
|
||||||
self.playButtonNode.durationLabel.trimRange = audio.trimRange
|
self.playButtonNode.durationLabel.trimRange = audio.trimRange
|
||||||
self.waveformScrubberNode.status = mediaPlayer.status
|
self.waveformScrubberNode.status = mediaPlayer.status
|
||||||
|
|
||||||
self.statusDisposable.set((mediaPlayer.status
|
self.statusDisposable.set((mediaPlayer.status
|
||||||
|> deliverOnMainQueue).startStrict(next: { [weak self] status in
|
|> deliverOnMainQueue).startStrict(next: { [weak self] status in
|
||||||
if let strongSelf = self {
|
if let self {
|
||||||
switch status.status {
|
switch status.status {
|
||||||
case .playing, .buffering(_, true, _, _):
|
case .playing, .buffering(_, true, _, _):
|
||||||
strongSelf.playButtonNode.playPauseIconNode.enqueueState(.pause, animated: true)
|
self.statusValue = status
|
||||||
|
if let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let _ = audio.trimRange {
|
||||||
|
self.ensureHasTimer()
|
||||||
|
}
|
||||||
|
self.playButtonNode.playPauseIconNode.enqueueState(.pause, animated: true)
|
||||||
default:
|
default:
|
||||||
strongSelf.playButtonNode.playPauseIconNode.enqueueState(.play, animated: true)
|
self.statusValue = nil
|
||||||
|
self.stopTimer()
|
||||||
|
self.playButtonNode.playPauseIconNode.enqueueState(.play, animated: true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
@ -471,7 +532,18 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
} else {
|
} else {
|
||||||
self.mediaPlayer?.seek(timestamp: end - 1.0, play: true)
|
self.mediaPlayer?.seek(timestamp: end - 1.0, play: true)
|
||||||
}
|
}
|
||||||
|
self.playButtonNode.durationLabel.isScrubbing = false
|
||||||
|
Queue.mainQueue().after(0.1) {
|
||||||
|
self.waveformForegroundNode.alpha = 1.0
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.playButtonNode.durationLabel.isScrubbing = true
|
||||||
|
self.waveformForegroundNode.alpha = 0.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let startFraction = start / Double(audio.duration)
|
||||||
|
let endFraction = end / Double(audio.duration)
|
||||||
|
self.waveformForegroundNode.trimRange = startFraction ..< endFraction
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.trimView.frame = waveformBackgroundFrame
|
self.trimView.frame = waveformBackgroundFrame
|
||||||
@ -746,7 +818,26 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@objc func waveformPressed() {
|
@objc func waveformPressed() {
|
||||||
self.mediaPlayer?.togglePlayPause()
|
guard let mediaPlayer = self.mediaPlayer else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if let recordedMediaPreview = self.presentationInterfaceState?.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange {
|
||||||
|
let _ = (mediaPlayer.status
|
||||||
|
|> take(1)
|
||||||
|
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if case .playing = status.status {
|
||||||
|
self.mediaPlayer?.pause()
|
||||||
|
} else if status.timestamp <= trimRange.lowerBound {
|
||||||
|
self.mediaPlayer?.seek(timestamp: trimRange.lowerBound, play: true)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
mediaPlayer.togglePlayPause()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override func minimalHeight(interfaceState: ChatPresentationInterfaceState, metrics: LayoutMetrics) -> CGFloat {
|
override func minimalHeight(interfaceState: ChatPresentationInterfaceState, metrics: LayoutMetrics) -> CGFloat {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user