mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-10-09 11:23:48 +00:00
Video message recording improvements
This commit is contained in:
parent
2bccbfea3b
commit
40c4378bde
@ -10893,3 +10893,6 @@ Sorry for the inconvenience.";
|
|||||||
|
|
||||||
"Chat.TapToPlayVideoMessageOnceTooltip" = "Tap to set this message to **Play Once**";
|
"Chat.TapToPlayVideoMessageOnceTooltip" = "Tap to set this message to **Play Once**";
|
||||||
"Chat.PlayVideoMessageOnceTooltip" = "The recipient will be able to play it only once.";
|
"Chat.PlayVideoMessageOnceTooltip" = "The recipient will be able to play it only once.";
|
||||||
|
|
||||||
|
"Conversation.DiscardRecordedVoiceMessageDescription" = "Are you sure you want to discard\nyour voice message?";
|
||||||
|
"Conversation.DiscardRecordedVoiceMessageAction" = "Discard";
|
||||||
|
@ -556,8 +556,6 @@ public final class MediaScrubberComponent: Component {
|
|||||||
transition: transition
|
transition: transition
|
||||||
)
|
)
|
||||||
|
|
||||||
let _ = leftHandleFrame
|
|
||||||
let _ = rightHandleFrame
|
|
||||||
let _ = ghostLeftHandleFrame
|
let _ = ghostLeftHandleFrame
|
||||||
let _ = ghostRightHandleFrame
|
let _ = ghostRightHandleFrame
|
||||||
|
|
||||||
@ -585,12 +583,15 @@ public final class MediaScrubberComponent: Component {
|
|||||||
transition.setFrame(view: self.ghostTrimView, frame: ghostTrimViewFrame)
|
transition.setFrame(view: self.ghostTrimView, frame: ghostTrimViewFrame)
|
||||||
transition.setAlpha(view: self.ghostTrimView, alpha: ghostTrimVisible ? 0.75 : 0.0)
|
transition.setAlpha(view: self.ghostTrimView, alpha: ghostTrimVisible ? 0.75 : 0.0)
|
||||||
|
|
||||||
// var containerLeftEdge = leftHandleFrame.maxX
|
if case .videoMessage = component.style {
|
||||||
// var containerRightEdge = rightHandleFrame.minX
|
for (_ , trackView) in self.trackViews {
|
||||||
// if self.isAudioSelected && component.duration > 0.0 {
|
trackView.updateOpaqueEdges(
|
||||||
// containerLeftEdge = ghostLeftHandleFrame.maxX
|
left: leftHandleFrame.minX,
|
||||||
// containerRightEdge = ghostRightHandleFrame.minX
|
right: rightHandleFrame.maxX,
|
||||||
// }
|
transition: transition
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let isDraggingTracks = self.trackViews.values.contains(where: { $0.isDragging })
|
let isDraggingTracks = self.trackViews.values.contains(where: { $0.isDragging })
|
||||||
let isCursorHidden = isDraggingTracks || self.trimView.isPanningTrimHandle || self.ghostTrimView.isPanningTrimHandle
|
let isCursorHidden = isDraggingTracks || self.trimView.isPanningTrimHandle || self.ghostTrimView.isPanningTrimHandle
|
||||||
@ -738,7 +739,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
|||||||
}
|
}
|
||||||
|
|
||||||
@objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) {
|
@objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) {
|
||||||
guard let (track, _, _) = self.params else {
|
guard let (track, _, _, _) = self.params else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
self.onSelection(track.id)
|
self.onSelection(track.id)
|
||||||
@ -787,9 +788,42 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
|||||||
private var params: (
|
private var params: (
|
||||||
track: MediaScrubberComponent.Track,
|
track: MediaScrubberComponent.Track,
|
||||||
isSelected: Bool,
|
isSelected: Bool,
|
||||||
|
availableSize: CGSize,
|
||||||
duration: Double
|
duration: Double
|
||||||
)?
|
)?
|
||||||
|
|
||||||
|
private var leftOpaqueEdge: CGFloat?
|
||||||
|
private var rightOpaqueEdge: CGFloat?
|
||||||
|
func updateOpaqueEdges(
|
||||||
|
left: CGFloat,
|
||||||
|
right: CGFloat,
|
||||||
|
transition: Transition
|
||||||
|
) {
|
||||||
|
self.leftOpaqueEdge = left
|
||||||
|
self.rightOpaqueEdge = right
|
||||||
|
|
||||||
|
if let params = self.params {
|
||||||
|
self.updateThumbnailContainers(
|
||||||
|
scrubberSize: CGSize(width: params.availableSize.width, height: 33.0),
|
||||||
|
availableSize: params.availableSize,
|
||||||
|
transition: transition
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func updateThumbnailContainers(
|
||||||
|
scrubberSize: CGSize,
|
||||||
|
availableSize: CGSize,
|
||||||
|
transition: Transition
|
||||||
|
) {
|
||||||
|
let containerLeftEdge: CGFloat = self.leftOpaqueEdge ?? 0.0
|
||||||
|
let containerRightEdge: CGFloat = self.rightOpaqueEdge ?? availableSize.width
|
||||||
|
|
||||||
|
transition.setFrame(view: self.videoTransparentFramesContainer, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: scrubberSize.width, height: scrubberSize.height)))
|
||||||
|
transition.setFrame(view: self.videoOpaqueFramesContainer, frame: CGRect(origin: CGPoint(x: containerLeftEdge, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge, height: scrubberSize.height)))
|
||||||
|
transition.setBounds(view: self.videoOpaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: containerLeftEdge, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge, height: scrubberSize.height)))
|
||||||
|
}
|
||||||
|
|
||||||
func update(
|
func update(
|
||||||
context: AccountContext,
|
context: AccountContext,
|
||||||
style: MediaScrubberComponent.Style,
|
style: MediaScrubberComponent.Style,
|
||||||
@ -800,7 +834,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
|||||||
transition: Transition
|
transition: Transition
|
||||||
) -> CGSize {
|
) -> CGSize {
|
||||||
let previousParams = self.params
|
let previousParams = self.params
|
||||||
self.params = (track, isSelected, duration)
|
self.params = (track, isSelected, availableSize, duration)
|
||||||
|
|
||||||
let fullTrackHeight: CGFloat
|
let fullTrackHeight: CGFloat
|
||||||
let framesCornerRadius: CGFloat
|
let framesCornerRadius: CGFloat
|
||||||
@ -968,13 +1002,12 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let containerLeftEdge: CGFloat = 0.0
|
self.updateThumbnailContainers(
|
||||||
let containerRightEdge: CGFloat = availableSize.width
|
scrubberSize: scrubberSize,
|
||||||
|
availableSize: availableSize,
|
||||||
transition.setFrame(view: self.videoTransparentFramesContainer, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: scrubberSize.width, height: scrubberSize.height)))
|
transition: transition
|
||||||
transition.setFrame(view: self.videoOpaqueFramesContainer, frame: CGRect(origin: CGPoint(x: containerLeftEdge, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge, height: scrubberSize.height)))
|
)
|
||||||
transition.setBounds(view: self.videoOpaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: containerLeftEdge, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge, height: scrubberSize.height)))
|
|
||||||
|
|
||||||
var frameAspectRatio = 0.66
|
var frameAspectRatio = 0.66
|
||||||
if let image = frames.first, image.size.height > 0.0 {
|
if let image = frames.first, image.size.height > 0.0 {
|
||||||
|
@ -1219,7 +1219,7 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
self.updatedPresentationData = updatedPresentationData
|
self.updatedPresentationData = updatedPresentationData
|
||||||
self.inputPanelFrame = inputPanelFrame
|
self.inputPanelFrame = inputPanelFrame
|
||||||
self.allowLiveUpload = peerId.namespace != Namespaces.Peer.SecretChat
|
self.allowLiveUpload = peerId.namespace != Namespaces.Peer.SecretChat
|
||||||
self.viewOnceAvailable = peerId.namespace == Namespaces.Peer.CloudUser
|
self.viewOnceAvailable = peerId.namespace == Namespaces.Peer.CloudUser && peerId != context.account.peerId
|
||||||
self.completion = completion
|
self.completion = completion
|
||||||
|
|
||||||
self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get())
|
self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get())
|
||||||
|
@ -689,6 +689,10 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if strongSelf.presentRecordedVoiceMessageDiscardAlert(action: action, performAction: false) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -743,6 +747,10 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
strongSelf.openViewOnceMediaMessage(message)
|
strongSelf.openViewOnceMediaMessage(message)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
} else if file.isVideo {
|
||||||
|
if !displayVoiceMessageDiscardAlert() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let invoice = media as? TelegramMediaInvoice, let extendedMedia = invoice.extendedMedia {
|
if let invoice = media as? TelegramMediaInvoice, let extendedMedia = invoice.extendedMedia {
|
||||||
@ -17359,6 +17367,25 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func presentRecordedVoiceMessageDiscardAlert(action: @escaping () -> Void = {}, alertAction: (() -> Void)? = nil, delay: Bool = false, performAction: Bool = true) -> Bool {
|
||||||
|
if let _ = self.presentationInterfaceState.recordedMediaPreview {
|
||||||
|
alertAction?()
|
||||||
|
Queue.mainQueue().after(delay ? 0.2 : 0.0) {
|
||||||
|
self.present(textAlertController(context: self.context, updatedPresentationData: self.updatedPresentationData, title: nil, text: self.presentationData.strings.Conversation_DiscardRecordedVoiceMessageDescription, actions: [TextAlertAction(type: .genericAction, title: self.presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .defaultAction, title: self.presentationData.strings.Conversation_DiscardRecordedVoiceMessageAction, action: { [weak self] in
|
||||||
|
self?.stopMediaRecorder()
|
||||||
|
Queue.mainQueue().after(0.1) {
|
||||||
|
action()
|
||||||
|
}
|
||||||
|
})]), in: .window(.root))
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
} else if performAction {
|
||||||
|
action()
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func presentAutoremoveSetup() {
|
func presentAutoremoveSetup() {
|
||||||
guard let peer = self.presentationInterfaceState.renderedPeer?.peer else {
|
guard let peer = self.presentationInterfaceState.renderedPeer?.peer else {
|
||||||
return
|
return
|
||||||
|
@ -169,6 +169,7 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let context = self.context
|
||||||
self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension())
|
self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension())
|
||||||
|
|
||||||
let isIncoming = self.message.effectivelyIncoming(self.context.account.peerId)
|
let isIncoming = self.message.effectivelyIncoming(self.context.account.peerId)
|
||||||
@ -196,6 +197,7 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
|
|||||||
var tooltipSourceRect: CGRect = .zero
|
var tooltipSourceRect: CGRect = .zero
|
||||||
|
|
||||||
if let sourceNode {
|
if let sourceNode {
|
||||||
|
let videoWidth = min(404.0, chatNode.frame.width - 2.0)
|
||||||
var bubbleWidth: CGFloat = 0.0
|
var bubbleWidth: CGFloat = 0.0
|
||||||
|
|
||||||
if (isIncoming || "".isEmpty) {
|
if (isIncoming || "".isEmpty) {
|
||||||
@ -231,7 +233,7 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
|
|||||||
|
|
||||||
if let messageNode = node as? ChatMessageItemView, let copyContentNode = messageNode.getMessageContextSourceNode(stableId: self.message.stableId) {
|
if let messageNode = node as? ChatMessageItemView, let copyContentNode = messageNode.getMessageContextSourceNode(stableId: self.message.stableId) {
|
||||||
if isVideo {
|
if isVideo {
|
||||||
self.initialAppearanceOffset = CGPoint(x: 0.0, y: width - 20.0 - copyContentNode.frame.height)
|
self.initialAppearanceOffset = CGPoint(x: 0.0, y: min(videoWidth, width - 20.0) - copyContentNode.frame.height)
|
||||||
}
|
}
|
||||||
|
|
||||||
messageNode.frame.origin.y = sourceRect.origin.y
|
messageNode.frame.origin.y = sourceRect.origin.y
|
||||||
@ -253,79 +255,99 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mappedParentRect = chatNode.view.convert(chatNode.bounds, to: nil)
|
let mappedParentRect = chatNode.view.convert(chatNode.bounds, to: nil)
|
||||||
tooltipSourceRect = CGRect(x: mappedParentRect.minX + (isIncoming ? 22.0 : chatNode.frame.width - bubbleWidth + 10.0), y: floorToScreenPixels((chatNode.frame.height - 75.0) / 2.0) - 43.0, width: 44.0, height: 44.0)
|
if isVideo {
|
||||||
|
tooltipSourceRect = CGRect(x: mappedParentRect.minX + (isIncoming ? videoWidth / 2.0 : chatNode.frame.width - videoWidth / 2.0), y: floorToScreenPixels((chatNode.frame.height - videoWidth) / 2.0) + 8.0, width: 0.0, height: 0.0)
|
||||||
|
} else {
|
||||||
|
tooltipSourceRect = CGRect(x: mappedParentRect.minX + (isIncoming ? 22.0 : chatNode.frame.width - bubbleWidth + 10.0), y: floorToScreenPixels((chatNode.frame.height - 75.0) / 2.0) - 43.0, width: 44.0, height: 44.0)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !isVideo {
|
let displayTooltip = { [weak self] in
|
||||||
let displayTooltip = { [weak self] in
|
guard let self else {
|
||||||
guard let self else {
|
return
|
||||||
return
|
}
|
||||||
}
|
let absoluteFrame = tooltipSourceRect
|
||||||
let absoluteFrame = tooltipSourceRect
|
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX, y: absoluteFrame.maxY), size: CGSize())
|
||||||
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX, y: absoluteFrame.maxY), size: CGSize())
|
|
||||||
|
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
||||||
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
var tooltipText: String?
|
||||||
var tooltipText: String?
|
if isIncoming {
|
||||||
if isIncoming {
|
if isVideo {
|
||||||
|
tooltipText = presentationData.strings.Chat_PlayOnceVideoMessageTooltip
|
||||||
|
} else {
|
||||||
tooltipText = presentationData.strings.Chat_PlayOnceVoiceMessageTooltip
|
tooltipText = presentationData.strings.Chat_PlayOnceVoiceMessageTooltip
|
||||||
} else if let peer = self.message.peers[self.message.id.peerId] {
|
|
||||||
let peerName = EnginePeer(peer).compactDisplayTitle
|
|
||||||
tooltipText = presentationData.strings.Chat_PlayOnceVoiceMessageYourTooltip(peerName).string
|
|
||||||
}
|
}
|
||||||
|
} else if let peer = self.message.peers[self.message.id.peerId] {
|
||||||
if let tooltipText {
|
let peerName = EnginePeer(peer).compactDisplayTitle
|
||||||
let tooltipController = TooltipScreen(
|
if isVideo {
|
||||||
account: self.context.account,
|
tooltipText = presentationData.strings.Chat_PlayOnceVideoMessageYourTooltip(peerName).string
|
||||||
sharedContext: self.context.sharedContext,
|
} else {
|
||||||
text: .markdown(text: tooltipText),
|
tooltipText = presentationData.strings.Chat_PlayOnceVoiceMessageYourTooltip(peerName).string
|
||||||
balancedTextLayout: true,
|
|
||||||
constrainWidth: 240.0,
|
|
||||||
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
|
|
||||||
arrowStyle: .small,
|
|
||||||
icon: nil,
|
|
||||||
location: .point(location, .bottom),
|
|
||||||
displayDuration: .custom(3.0),
|
|
||||||
inset: 8.0,
|
|
||||||
cornerRadius: 11.0,
|
|
||||||
shouldDismissOnTouch: { _, _ in
|
|
||||||
return .ignore
|
|
||||||
}
|
|
||||||
)
|
|
||||||
self.tooltipController = tooltipController
|
|
||||||
self.present(tooltipController)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if isIncoming {
|
if let tooltipText {
|
||||||
let _ = (ApplicationSpecificNotice.getIncomingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager)
|
let tooltipController = TooltipScreen(
|
||||||
|> deliverOnMainQueue).startStandalone(next: { [weak self] counter in
|
account: self.context.account,
|
||||||
guard let self else {
|
sharedContext: self.context.sharedContext,
|
||||||
return
|
text: .markdown(text: tooltipText),
|
||||||
|
balancedTextLayout: true,
|
||||||
|
constrainWidth: 240.0,
|
||||||
|
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
|
||||||
|
arrowStyle: .small,
|
||||||
|
icon: nil,
|
||||||
|
location: .point(location, .bottom),
|
||||||
|
displayDuration: .custom(3.0),
|
||||||
|
inset: 8.0,
|
||||||
|
cornerRadius: 11.0,
|
||||||
|
shouldDismissOnTouch: { _, _ in
|
||||||
|
return .ignore
|
||||||
}
|
}
|
||||||
if counter >= 2 {
|
)
|
||||||
return
|
self.tooltipController = tooltipController
|
||||||
}
|
self.present(tooltipController)
|
||||||
Queue.mainQueue().after(0.3) {
|
|
||||||
displayTooltip()
|
|
||||||
}
|
|
||||||
let _ = ApplicationSpecificNotice.incrementIncomingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager).startStandalone()
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
let _ = (ApplicationSpecificNotice.getOutgoingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager)
|
|
||||||
|> deliverOnMainQueue).startStandalone(next: { [weak self] counter in
|
|
||||||
guard let self else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if counter >= 2 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
Queue.mainQueue().after(0.3) {
|
|
||||||
displayTooltip()
|
|
||||||
}
|
|
||||||
let _ = ApplicationSpecificNotice.incrementOutgoingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager).startStandalone()
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let tooltipStateSignal: Signal<Int32, NoError>
|
||||||
|
let updateTooltipState: () -> Void
|
||||||
|
if isVideo {
|
||||||
|
if isIncoming {
|
||||||
|
tooltipStateSignal = ApplicationSpecificNotice.getIncomingVideoMessagePlayOnceTip(accountManager: context.sharedContext.accountManager)
|
||||||
|
updateTooltipState = {
|
||||||
|
let _ = ApplicationSpecificNotice.incrementIncomingVideoMessagePlayOnceTip(accountManager: context.sharedContext.accountManager).startStandalone()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tooltipStateSignal = ApplicationSpecificNotice.getOutgoingVideoMessagePlayOnceTip(accountManager: context.sharedContext.accountManager)
|
||||||
|
updateTooltipState = {
|
||||||
|
let _ = ApplicationSpecificNotice.incrementOutgoingVideoMessagePlayOnceTip(accountManager: context.sharedContext.accountManager).startStandalone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if isIncoming {
|
||||||
|
tooltipStateSignal = ApplicationSpecificNotice.getIncomingVoiceMessagePlayOnceTip(accountManager: context.sharedContext.accountManager)
|
||||||
|
updateTooltipState = {
|
||||||
|
let _ = ApplicationSpecificNotice.incrementIncomingVoiceMessagePlayOnceTip(accountManager: context.sharedContext.accountManager).startStandalone()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tooltipStateSignal = ApplicationSpecificNotice.getOutgoingVoiceMessagePlayOnceTip(accountManager: context.sharedContext.accountManager)
|
||||||
|
updateTooltipState = {
|
||||||
|
let _ = ApplicationSpecificNotice.incrementOutgoingVoiceMessagePlayOnceTip(accountManager: context.sharedContext.accountManager).startStandalone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = (tooltipStateSignal
|
||||||
|
|> deliverOnMainQueue).startStandalone(next: { counter in
|
||||||
|
if counter >= 2 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
Queue.mainQueue().after(0.3) {
|
||||||
|
displayTooltip()
|
||||||
|
}
|
||||||
|
updateTooltipState()
|
||||||
|
})
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -415,9 +415,14 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
prevTextInputPanelNode.viewOnceButton.isHidden = true
|
prevTextInputPanelNode.viewOnceButton.isHidden = true
|
||||||
prevTextInputPanelNode.viewOnce = false
|
prevTextInputPanelNode.viewOnce = false
|
||||||
|
|
||||||
|
self.recordMoreButton.isEnabled = false
|
||||||
self.viewOnceButton.layer.animatePosition(from: prevTextInputPanelNode.viewOnceButton.position, to: self.viewOnceButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in
|
self.viewOnceButton.layer.animatePosition(from: prevTextInputPanelNode.viewOnceButton.position, to: self.viewOnceButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in
|
||||||
prevTextInputPanelNode.viewOnceButton.isHidden = false
|
prevTextInputPanelNode.viewOnceButton.isHidden = false
|
||||||
prevTextInputPanelNode.viewOnceButton.update(isSelected: false, animated: false)
|
prevTextInputPanelNode.viewOnceButton.update(isSelected: false, animated: false)
|
||||||
|
|
||||||
|
Queue.mainQueue().after(0.3) {
|
||||||
|
self.recordMoreButton.isEnabled = true
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
self.recordMoreButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
self.recordMoreButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||||
|
@ -150,6 +150,7 @@ final class ManagedAudioRecorderContext {
|
|||||||
private let beganWithTone: (Bool) -> Void
|
private let beganWithTone: (Bool) -> Void
|
||||||
|
|
||||||
private var paused = true
|
private var paused = true
|
||||||
|
private var manuallyPaused = false
|
||||||
|
|
||||||
private let queue: Queue
|
private let queue: Queue
|
||||||
private let mediaManager: MediaManager
|
private let mediaManager: MediaManager
|
||||||
@ -413,9 +414,11 @@ final class ManagedAudioRecorderContext {
|
|||||||
return Signal { subscriber in
|
return Signal { subscriber in
|
||||||
queue.async {
|
queue.async {
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
strongSelf.hasAudioSession = false
|
if !strongSelf.manuallyPaused {
|
||||||
strongSelf.stop()
|
strongSelf.hasAudioSession = false
|
||||||
strongSelf.recordingState.set(.stopped)
|
strongSelf.stop()
|
||||||
|
strongSelf.recordingState.set(.stopped)
|
||||||
|
}
|
||||||
subscriber.putCompletion()
|
subscriber.putCompletion()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -450,13 +453,17 @@ final class ManagedAudioRecorderContext {
|
|||||||
func pause() {
|
func pause() {
|
||||||
assert(self.queue.isCurrent())
|
assert(self.queue.isCurrent())
|
||||||
|
|
||||||
self.paused = true
|
self.manuallyPaused = true
|
||||||
}
|
}
|
||||||
|
|
||||||
func resume() {
|
func resume() {
|
||||||
assert(self.queue.isCurrent())
|
assert(self.queue.isCurrent())
|
||||||
|
|
||||||
self.paused = false
|
if self.manuallyPaused {
|
||||||
|
self.manuallyPaused = false
|
||||||
|
} else if self.paused {
|
||||||
|
self.start()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func stop() {
|
func stop() {
|
||||||
@ -500,7 +507,7 @@ final class ManagedAudioRecorderContext {
|
|||||||
free(buffer.mData)
|
free(buffer.mData)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !self.processSamples || self.paused {
|
if !self.processSamples || self.manuallyPaused {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user