From 5b1352affe48706513319dc28aa51881fc0cb40a Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Sun, 14 Jan 2024 22:49:02 +0400 Subject: [PATCH 1/2] Video message recording improvements --- submodules/Camera/Sources/Camera.swift | 29 +++++--- submodules/Camera/Sources/CameraDevice.swift | 6 +- submodules/Camera/Sources/CameraOutput.swift | 52 ++++++++------ .../Sources/PlainButtonComponent.swift | 2 + .../Sources/VideoMessageCameraScreen.swift | 71 ++++++++++++++----- .../TelegramUI/Sources/ChatController.swift | 33 +++++++-- .../ChatRecordingPreviewInputPanelNode.swift | 7 ++ .../Sources/ChatTextInputPanelNode.swift | 2 +- 8 files changed, 144 insertions(+), 58 deletions(-) diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift index 6875719fa1..5f831246e1 100644 --- a/submodules/Camera/Sources/Camera.swift +++ b/submodules/Camera/Sources/Camera.swift @@ -57,17 +57,17 @@ final class CameraDeviceContext { self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext, use32BGRA: use32BGRA) } - func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false) { + func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false, switchAudio: Bool = true) { guard let session = self.session else { return } self.previewView = previewView - self.device.configure(for: session, position: position, dual: !exclusive || additional) + self.device.configure(for: session, position: position, dual: !self.exclusive || self.additional, switchAudio: switchAudio) self.device.configureDeviceFormat(maxDimensions: self.maxDimensions(additional: self.additional, preferWide: preferWide), maxFramerate: self.preferredMaxFrameRate(useLower: preferLowerFramerate)) - self.input.configure(for: session, device: self.device, audio: audio) - self.output.configure(for: session, device: self.device, input: self.input, previewView: previewView, audio: audio, photo: photo, metadata: metadata) + self.input.configure(for: session, device: self.device, audio: audio && switchAudio) + self.output.configure(for: session, device: self.device, input: self.input, previewView: previewView, audio: audio && switchAudio, photo: photo, metadata: metadata) self.output.configureVideoStabilization() @@ -260,7 +260,14 @@ private final class CameraContext { self._positionPromise.set(targetPosition) self.modeChange = .position - mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide, preferLowerFramerate: self.initialConfiguration.preferLowerFramerate) + let isRoundVideo = self.initialConfiguration.isRoundVideo + let preferWide = self.initialConfiguration.preferWide || (self.positionValue == .front && isRoundVideo) + let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || isRoundVideo + + mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate, switchAudio: !isRoundVideo) + if isRoundVideo { + mainDeviceContext.output.markPositionChange(position: targetPosition) + } self.queue.after(0.5) { self.modeChange = .none @@ -277,7 +284,10 @@ private final class CameraContext { self.positionValue = position self.modeChange = .position - self.mainDeviceContext?.configure(position: position, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata) + let preferWide = self.initialConfiguration.preferWide || (self.positionValue == .front && self.initialConfiguration.isRoundVideo) + let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo + + self.mainDeviceContext?.configure(position: position, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate) self.queue.after(0.5) { self.modeChange = .none @@ -342,8 +352,11 @@ private final class CameraContext { self.additionalDeviceContext?.invalidate() self.additionalDeviceContext = nil - self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, use32BGRA: false) - self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide, preferLowerFramerate: self.initialConfiguration.preferLowerFramerate) + let preferWide = self.initialConfiguration.preferWide || (self.positionValue == .front && self.initialConfiguration.isRoundVideo) + let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo + + self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo) + self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate) } self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in guard let self, let mainDeviceContext = self.mainDeviceContext else { diff --git a/submodules/Camera/Sources/CameraDevice.swift b/submodules/Camera/Sources/CameraDevice.swift index 139d429b50..d8c75fbe14 100644 --- a/submodules/Camera/Sources/CameraDevice.swift +++ b/submodules/Camera/Sources/CameraDevice.swift @@ -29,7 +29,7 @@ final class CameraDevice { public private(set) var audioDevice: AVCaptureDevice? = nil - func configure(for session: CameraSession, position: Camera.Position, dual: Bool) { + func configure(for session: CameraSession, position: Camera.Position, dual: Bool, switchAudio: Bool) { self.position = position var selectedDevice: AVCaptureDevice? @@ -57,7 +57,9 @@ final class CameraDevice { self.videoDevice = selectedDevice self.videoDevicePromise.set(.single(selectedDevice)) - self.audioDevice = AVCaptureDevice.default(for: .audio) + if switchAudio { + self.audioDevice = AVCaptureDevice.default(for: .audio) + } } func configureDeviceFormat(maxDimensions: CMVideoDimensions, maxFramerate: Double) { diff --git a/submodules/Camera/Sources/CameraOutput.swift b/submodules/Camera/Sources/CameraOutput.swift index 9b6787b011..483a7fead1 100644 --- a/submodules/Camera/Sources/CameraOutput.swift +++ b/submodules/Camera/Sources/CameraOutput.swift @@ -417,32 +417,40 @@ final class CameraOutput: NSObject { if let videoRecorder = self.videoRecorder, videoRecorder.isRecording { if case .roundVideo = self.currentMode, type == kCMMediaType_Video { - var transitionFactor: CGFloat = 0.0 - let currentTimestamp = CACurrentMediaTime() - let duration: Double = 0.2 - if case .front = self.currentPosition { - transitionFactor = 1.0 - if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration { - transitionFactor = max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration) - } - } else { - transitionFactor = 0.0 - if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration { - transitionFactor = 1.0 - max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration) - } - } - if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) { - let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer) - if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime { - + if !self.exclusive { + var transitionFactor: CGFloat = 0.0 + let currentTimestamp = CACurrentMediaTime() + let duration: Double = 0.2 + if case .front = self.currentPosition { + transitionFactor = 1.0 + if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration { + transitionFactor = max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration) + } } else { - if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) { - videoRecorder.appendSampleBuffer(processedSampleBuffer) - self.lastSampleTimestamp = presentationTime + transitionFactor = 0.0 + if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration { + transitionFactor = 1.0 - max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration) } } + if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) { + let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer) + if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime { + + } else { + if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) { + videoRecorder.appendSampleBuffer(processedSampleBuffer) + self.lastSampleTimestamp = presentationTime + } + } + } else { + videoRecorder.appendSampleBuffer(sampleBuffer) + } } else { - videoRecorder.appendSampleBuffer(sampleBuffer) + if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: self.currentPosition == .front, transitionFactor: self.currentPosition == .front ? 1.0 : 0.0) { + videoRecorder.appendSampleBuffer(processedSampleBuffer) + } else { + videoRecorder.appendSampleBuffer(sampleBuffer) + } } } else { videoRecorder.appendSampleBuffer(sampleBuffer) diff --git a/submodules/TelegramUI/Components/PlainButtonComponent/Sources/PlainButtonComponent.swift b/submodules/TelegramUI/Components/PlainButtonComponent/Sources/PlainButtonComponent.swift index bb386fa85d..672468188a 100644 --- a/submodules/TelegramUI/Components/PlainButtonComponent/Sources/PlainButtonComponent.swift +++ b/submodules/TelegramUI/Components/PlainButtonComponent/Sources/PlainButtonComponent.swift @@ -88,6 +88,8 @@ public final class PlainButtonComponent: Component { override init(frame: CGRect) { super.init(frame: frame) + self.isExclusiveTouch = true + self.contentContainer.isUserInteractionEnabled = false self.addSubview(self.contentContainer) diff --git a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift index dfe1b91825..81cb98617b 100644 --- a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift +++ b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift @@ -241,15 +241,21 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { return } + let currentTimestamp = CACurrentMediaTime() + if let lastActionTimestamp = controller.lastActionTimestamp, currentTimestamp - lastActionTimestamp < 0.5 { + return + } + controller.lastActionTimestamp = currentTimestamp + let initialDuration = controller.node.previewState?.composition.duration.seconds ?? 0.0 + let isFirstRecording = initialDuration.isZero + controller.node.resumeCameraCapture() + controller.updatePreviewState({ _ in return nil}, transition: .spring(duration: 0.4)) controller.node.dismissAllTooltips() controller.updateCameraState({ $0.updatedRecording(pressing ? .holding : .handsFree).updatedDuration(initialDuration) }, transition: .spring(duration: 0.4)) - - let isFirstRecording = initialDuration.isZero - controller.node.resumeCameraCapture() - + controller.node.withReadyCamera(isFirstTime: !controller.node.cameraIsActive) { self.resultDisposable.set((camera.startRecording() |> deliverOnMainQueue).start(next: { [weak self] recordingData in @@ -275,6 +281,11 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent { guard let controller = self.getController(), let camera = controller.camera else { return } + let currentTimestamp = CACurrentMediaTime() + if let lastActionTimestamp = controller.lastActionTimestamp, currentTimestamp - lastActionTimestamp < 0.5 { + return + } + controller.lastActionTimestamp = currentTimestamp self.resultDisposable.set((camera.stopRecording() |> deliverOnMainQueue).start(next: { [weak self] result in @@ -598,7 +609,7 @@ public class VideoMessageCameraScreen: ViewController { self.previewContainerView = UIView() self.previewContainerView.clipsToBounds = true - let isDualCameraEnabled = Camera.isDualCameraSupported + let isDualCameraEnabled = Camera.isDualCameraSupported //!"".isEmpty // let isFrontPosition = "".isEmpty self.mainPreviewView = CameraSimplePreviewView(frame: .zero, main: true) @@ -638,7 +649,9 @@ public class VideoMessageCameraScreen: ViewController { self.containerView.addSubview(self.previewContainerView) self.previewContainerView.addSubview(self.mainPreviewView) - self.previewContainerView.addSubview(self.additionalPreviewView) + if isDualCameraEnabled { + self.previewContainerView.addSubview(self.additionalPreviewView) + } self.previewContainerView.addSubview(self.progressView) self.previewContainerView.addSubview(self.previewBlurView) self.previewContainerView.addSubview(self.loadingView) @@ -652,7 +665,7 @@ public class VideoMessageCameraScreen: ViewController { self.mainPreviewView.removePlaceholder(delay: 0.0) } self.withReadyCamera(isFirstTime: true, { - if isDualCameraEnabled { + if !isDualCameraEnabled { self.mainPreviewView.removePlaceholder(delay: 0.0) } self.loadingView.alpha = 0.0 @@ -675,7 +688,7 @@ public class VideoMessageCameraScreen: ViewController { func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) { if #available(iOS 13.0, *) { - let _ = (self.additionalPreviewView.isPreviewing + let _ = ((self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing) |> filter { $0 } |> take(1)).startStandalone(next: { _ in f() @@ -805,7 +818,7 @@ public class VideoMessageCameraScreen: ViewController { func resumeCameraCapture() { if !self.mainPreviewView.isEnabled { - if let snapshotView = self.previewContainerView.snapshotView(afterScreenUpdates: false) { + if let snapshotView = self.resultPreviewView?.snapshotView(afterScreenUpdates: false) { self.previewContainerView.insertSubview(snapshotView, belowSubview: self.previewBlurView) self.previewSnapshotView = snapshotView } @@ -1177,7 +1190,7 @@ public class VideoMessageCameraScreen: ViewController { fileprivate var allowLiveUpload: Bool fileprivate var viewOnceAvailable: Bool - fileprivate let completion: (EnqueueMessage?) -> Void + fileprivate let completion: (EnqueueMessage?, Bool?, Int32?) -> Void private var audioSessionDisposable: Disposable? @@ -1320,15 +1333,16 @@ public class VideoMessageCameraScreen: ViewController { public init( context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)?, - peerId: EnginePeer.Id, + allowLiveUpload: Bool, + viewOnceAvailable: Bool, inputPanelFrame: CGRect, - completion: @escaping (EnqueueMessage?) -> Void + completion: @escaping (EnqueueMessage?, Bool?, Int32?) -> Void ) { self.context = context self.updatedPresentationData = updatedPresentationData self.inputPanelFrame = inputPanelFrame - self.allowLiveUpload = peerId.namespace != Namespaces.Peer.SecretChat - self.viewOnceAvailable = peerId.namespace == Namespaces.Peer.CloudUser && peerId != context.account.peerId + self.allowLiveUpload = allowLiveUpload + self.viewOnceAvailable = viewOnceAvailable self.completion = completion self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get()) @@ -1360,10 +1374,21 @@ public class VideoMessageCameraScreen: ViewController { super.displayNodeDidLoad() } + fileprivate var didSend = false + fileprivate var lastActionTimestamp: Double? fileprivate var isSendingImmediately = false - public func sendVideoRecording() { + public func sendVideoRecording(silentPosting: Bool? = nil, scheduleTime: Int32? = nil) { + guard !self.didSend else { + return + } + + let currentTimestamp = CACurrentMediaTime() + if let lastActionTimestamp = self.lastActionTimestamp, currentTimestamp - lastActionTimestamp < 0.5 { + return + } + if case .none = self.cameraState.recording, self.node.results.isEmpty { - self.completion(nil) + self.completion(nil, nil, nil) return } @@ -1374,6 +1399,8 @@ public class VideoMessageCameraScreen: ViewController { self.node.stopRecording.invoke(Void()) } + self.didSend = true + let _ = (self.currentResults |> take(1) |> deliverOnMainQueue).startStandalone(next: { [weak self] results in @@ -1393,7 +1420,7 @@ public class VideoMessageCameraScreen: ViewController { } if duration < 1.0 { - self.completion(nil) + self.completion(nil, nil, nil) return } @@ -1467,12 +1494,16 @@ public class VideoMessageCameraScreen: ViewController { localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [] - )) + ), silentPosting, scheduleTime) }) } private var waitingForNextResult = false public func stopVideoRecording() -> Bool { + guard !self.didSend else { + return false + } + self.node.dismissAllTooltips() self.waitingForNextResult = true @@ -1487,6 +1518,10 @@ public class VideoMessageCameraScreen: ViewController { fileprivate var recordingStartTime: Double? fileprivate var scheduledLock = false public func lockVideoRecording() { + guard !self.didSend else { + return + } + if case .none = self.cameraState.recording { self.scheduledLock = true self.node.requestUpdateLayout(transition: .spring(duration: 0.4)) diff --git a/submodules/TelegramUI/Sources/ChatController.swift b/submodules/TelegramUI/Sources/ChatController.swift index 12a2a1b8e8..a7a7ece3a6 100644 --- a/submodules/TelegramUI/Sources/ChatController.swift +++ b/submodules/TelegramUI/Sources/ChatController.swift @@ -683,6 +683,10 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G return true } + if let _ = strongSelf.videoRecorderValue { + return false + } + strongSelf.chatDisplayNode.messageTransitionNode.dismissMessageReactionContexts() if strongSelf.presentVoiceMessageDiscardAlert(action: action, performAction: false) { @@ -15374,14 +15378,18 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G isScheduledMessages = true } - let _ = isScheduledMessages + var isBot = false + if let user = self.presentationInterfaceState.renderedPeer?.peer as? TelegramUser, user.botInfo != nil { + isBot = true + } let controller = VideoMessageCameraScreen( context: self.context, updatedPresentationData: self.updatedPresentationData, - peerId: peerId, + allowLiveUpload: peerId.namespace != Namespaces.Peer.SecretChat, + viewOnceAvailable: !isScheduledMessages && peerId.namespace == Namespaces.Peer.CloudUser && peerId != self.context.account.peerId && !isBot, inputPanelFrame: currentInputPanelFrame, - completion: { [weak self] message in + completion: { [weak self] message, silentPosting, scheduleTime in guard let self, let videoController = self.videoRecorderValue else { return } @@ -15400,8 +15408,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G .withUpdatedCorrelationId(correlationId) var usedCorrelationId = false - - if self.chatDisplayNode.shouldAnimateMessageTransition, let extractedView = videoController.extractVideoSnapshot() { + if scheduleTime == nil, self.chatDisplayNode.shouldAnimateMessageTransition, let extractedView = videoController.extractVideoSnapshot() { usedCorrelationId = true self.chatDisplayNode.messageTransitionNode.add(correlationId: correlationId, source: .videoMessage(ChatMessageTransitionNodeImpl.Source.VideoMessage(view: extractedView)), initiated: { [weak videoController, weak self] in videoController?.hideVideoSnapshot() @@ -15424,7 +15431,17 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G } }, usedCorrelationId ? correlationId : nil) - self.sendMessages([message]) + let messages = [message] + let transformedMessages: [EnqueueMessage] + if let silentPosting { + transformedMessages = self.transformEnqueueMessages(messages, silentPosting: silentPosting) + } else if let scheduleTime { + transformedMessages = self.transformEnqueueMessages(messages, silentPosting: false, scheduleTime: scheduleTime) + } else { + transformedMessages = self.transformEnqueueMessages(messages) + } + + self.sendMessages(transformedMessages) } ) controller.onResume = { [weak self] in @@ -15634,6 +15651,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G } func resumeMediaRecorder() { + self.context.sharedContext.mediaManager.playlistControl(.playback(.pause), type: nil) + if let audioRecorderValue = self.audioRecorderValue { audioRecorderValue.resume() @@ -15743,7 +15762,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G donateSendMessageIntent(account: self.context.account, sharedContext: self.context.sharedContext, intentContext: .chat, peerIds: [peerId]) case .video: - self.videoRecorderValue?.sendVideoRecording() + self.videoRecorderValue?.sendVideoRecording(silentPosting: silentPosting, scheduleTime: scheduleTime) } } diff --git a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift index d12c41f77d..2e31756982 100644 --- a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift +++ b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift @@ -107,6 +107,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { self.sendButton = HighlightTrackingButtonNode() self.sendButton.displaysAsynchronously = false + self.sendButton.isExclusiveTouch = true self.sendButton.setImage(PresentationResourcesChat.chatInputPanelSendButtonImage(theme), for: []) self.viewOnceButton = ChatRecordingViewOnceButtonNode(icon: .viewOnce) @@ -195,6 +196,12 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { let gestureRecognizer = ContextGesture(target: nil, action: nil) self.sendButton.view.addGestureRecognizer(gestureRecognizer) self.gestureRecognizer = gestureRecognizer + gestureRecognizer.shouldBegin = { [weak self] _ in + if let self, self.viewOnce { + return false + } + return true + } gestureRecognizer.activated = { [weak self] gesture, _ in guard let strongSelf = self else { return diff --git a/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift b/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift index 6323a39915..31e3cd4f90 100644 --- a/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift +++ b/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift @@ -972,7 +972,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch interfaceInteraction.finishMediaRecording(.dismiss) } } else { - interfaceInteraction.finishMediaRecording(.dismiss) +// interfaceInteraction.finishMediaRecording(.dismiss) } strongSelf.viewOnce = false strongSelf.tooltipController?.dismiss() From 8bcc38c24e25d27697235a93d046e78320075276 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Mon, 15 Jan 2024 02:26:12 +0400 Subject: [PATCH 2/2] Video message recording improvements --- submodules/Camera/Sources/Camera.swift | 15 ++-- submodules/Camera/Sources/CameraInput.swift | 5 +- submodules/Camera/Sources/CameraOutput.swift | 61 ++++++++++++-- .../Camera/Sources/CameraPreviewView.swift | 40 +++++++--- submodules/Camera/Sources/VideoRecorder.swift | 4 +- .../Sources/ThemeGridControllerNode.swift | 20 +++-- .../Sources/VideoMessageCameraScreen.swift | 80 +++++++++++++++---- .../Sources/ChatControllerNode.swift | 3 + 8 files changed, 182 insertions(+), 46 deletions(-) diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift index 5f831246e1..55c6816e6a 100644 --- a/submodules/Camera/Sources/Camera.swift +++ b/submodules/Camera/Sources/Camera.swift @@ -74,12 +74,12 @@ final class CameraDeviceContext { self.device.resetZoom(neutral: self.exclusive || !self.additional) } - func invalidate() { + func invalidate(switchAudio: Bool = true) { guard let session = self.session else { return } - self.output.invalidate(for: session) - self.input.invalidate(for: session) + self.output.invalidate(for: session, switchAudio: switchAudio) + self.input.invalidate(for: session, switchAudio: switchAudio) } private func maxDimensions(additional: Bool, preferWide: Bool) -> CMVideoDimensions { @@ -248,7 +248,8 @@ private final class CameraContext { mainDeviceContext.output.markPositionChange(position: targetPosition) } else { self.configure { - self.mainDeviceContext?.invalidate() + let isRoundVideo = self.initialConfiguration.isRoundVideo + self.mainDeviceContext?.invalidate(switchAudio: !isRoundVideo) let targetPosition: Camera.Position if case .back = mainDeviceContext.device.position { @@ -260,8 +261,8 @@ private final class CameraContext { self._positionPromise.set(targetPosition) self.modeChange = .position - let isRoundVideo = self.initialConfiguration.isRoundVideo - let preferWide = self.initialConfiguration.preferWide || (self.positionValue == .front && isRoundVideo) + + let preferWide = self.initialConfiguration.preferWide || isRoundVideo let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || isRoundVideo mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate, switchAudio: !isRoundVideo) @@ -352,7 +353,7 @@ private final class CameraContext { self.additionalDeviceContext?.invalidate() self.additionalDeviceContext = nil - let preferWide = self.initialConfiguration.preferWide || (self.positionValue == .front && self.initialConfiguration.isRoundVideo) + let preferWide = self.initialConfiguration.preferWide || self.initialConfiguration.isRoundVideo let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo) diff --git a/submodules/Camera/Sources/CameraInput.swift b/submodules/Camera/Sources/CameraInput.swift index c058c2127b..29adeca209 100644 --- a/submodules/Camera/Sources/CameraInput.swift +++ b/submodules/Camera/Sources/CameraInput.swift @@ -14,8 +14,11 @@ class CameraInput { } } - func invalidate(for session: CameraSession) { + func invalidate(for session: CameraSession, switchAudio: Bool = true) { for input in session.session.inputs { + if !switchAudio && input === self.audioInput { + continue + } session.session.removeInput(input) } } diff --git a/submodules/Camera/Sources/CameraOutput.swift b/submodules/Camera/Sources/CameraOutput.swift index 483a7fead1..ff09574e62 100644 --- a/submodules/Camera/Sources/CameraOutput.swift +++ b/submodules/Camera/Sources/CameraOutput.swift @@ -190,7 +190,7 @@ final class CameraOutput: NSObject { } } - func invalidate(for session: CameraSession) { + func invalidate(for session: CameraSession, switchAudio: Bool = true) { if #available(iOS 13.0, *) { if let previewConnection = self.previewConnection { if session.session.connections.contains(where: { $0 === previewConnection }) { @@ -214,7 +214,7 @@ final class CameraOutput: NSObject { if session.session.outputs.contains(where: { $0 === self.videoOutput }) { session.session.removeOutput(self.videoOutput) } - if session.session.outputs.contains(where: { $0 === self.audioOutput }) { + if switchAudio, session.session.outputs.contains(where: { $0 === self.audioOutput }) { session.session.removeOutput(self.audioOutput) } if session.session.outputs.contains(where: { $0 === self.photoOutput }) { @@ -409,6 +409,14 @@ final class CameraOutput: NSObject { private weak var masterOutput: CameraOutput? private var lastSampleTimestamp: CMTime? + + private var needsCrossfadeTransition = false + private var crossfadeTransitionStart: Double = 0.0 + + private var needsSwitchSampleOffset = false + private var lastAudioSampleTime: CMTime? + private var videoSwitchSampleTimeOffset: CMTime? + func processVideoRecording(_ sampleBuffer: CMSampleBuffer, fromAdditionalOutput: Bool) { guard let formatDescriptor = CMSampleBufferGetFormatDescription(sampleBuffer) else { return @@ -417,10 +425,10 @@ final class CameraOutput: NSObject { if let videoRecorder = self.videoRecorder, videoRecorder.isRecording { if case .roundVideo = self.currentMode, type == kCMMediaType_Video { + let currentTimestamp = CACurrentMediaTime() + let duration: Double = 0.2 if !self.exclusive { var transitionFactor: CGFloat = 0.0 - let currentTimestamp = CACurrentMediaTime() - let duration: Double = 0.2 if case .front = self.currentPosition { transitionFactor = 1.0 if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration { @@ -446,13 +454,51 @@ final class CameraOutput: NSObject { videoRecorder.appendSampleBuffer(sampleBuffer) } } else { - if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: self.currentPosition == .front, transitionFactor: self.currentPosition == .front ? 1.0 : 0.0) { + var additional = self.currentPosition == .front + var transitionFactor = self.currentPosition == .front ? 1.0 : 0.0 + if self.lastSwitchTimestamp > 0.0 { + if self.needsCrossfadeTransition { + self.needsCrossfadeTransition = false + self.crossfadeTransitionStart = currentTimestamp + 0.03 + self.needsSwitchSampleOffset = true + } + if self.crossfadeTransitionStart > 0.0, currentTimestamp - self.crossfadeTransitionStart < duration { + if case .front = self.currentPosition { + transitionFactor = max(0.0, (currentTimestamp - self.crossfadeTransitionStart) / duration) + } else { + transitionFactor = 1.0 - max(0.0, (currentTimestamp - self.crossfadeTransitionStart) / duration) + } + } else if currentTimestamp - self.lastSwitchTimestamp < 0.05 { + additional = !additional + transitionFactor = 1.0 - transitionFactor + self.needsCrossfadeTransition = true + } + } + if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: additional, transitionFactor: transitionFactor) { videoRecorder.appendSampleBuffer(processedSampleBuffer) } else { videoRecorder.appendSampleBuffer(sampleBuffer) } } } else { + if type == kCMMediaType_Audio { + if self.needsSwitchSampleOffset { + self.needsSwitchSampleOffset = false + + if let lastAudioSampleTime = self.lastAudioSampleTime { + let videoSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + let offset = videoSampleTime - lastAudioSampleTime + if let current = self.videoSwitchSampleTimeOffset { + self.videoSwitchSampleTimeOffset = current + offset + } else { + self.videoSwitchSampleTimeOffset = offset + } + self.lastAudioSampleTime = nil + } + } + + self.lastAudioSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + CMSampleBufferGetDuration(sampleBuffer) + } videoRecorder.appendSampleBuffer(sampleBuffer) } } @@ -494,6 +540,11 @@ final class CameraOutput: NSObject { var sampleTimingInfo: CMSampleTimingInfo = .invalid CMSampleBufferGetSampleTimingInfo(sampleBuffer, at: 0, timingInfoOut: &sampleTimingInfo) + if let videoSwitchSampleTimeOffset = self.videoSwitchSampleTimeOffset { + sampleTimingInfo.decodeTimeStamp = sampleTimingInfo.decodeTimeStamp - videoSwitchSampleTimeOffset + sampleTimingInfo.presentationTimeStamp = sampleTimingInfo.presentationTimeStamp - videoSwitchSampleTimeOffset + } + var newSampleBuffer: CMSampleBuffer? status = CMSampleBufferCreateForImageBuffer( allocator: kCFAllocatorDefault, diff --git a/submodules/Camera/Sources/CameraPreviewView.swift b/submodules/Camera/Sources/CameraPreviewView.swift index 73046cbe8d..54a3666eb9 100644 --- a/submodules/Camera/Sources/CameraPreviewView.swift +++ b/submodules/Camera/Sources/CameraPreviewView.swift @@ -21,6 +21,29 @@ private extension UIInterfaceOrientation { } } +private class SimpleCapturePreviewLayer: AVCaptureVideoPreviewLayer { + public var didEnterHierarchy: (() -> Void)? + public var didExitHierarchy: (() -> Void)? + + override open func action(forKey event: String) -> CAAction? { + if event == kCAOnOrderIn { + self.didEnterHierarchy?() + } else if event == kCAOnOrderOut { + self.didExitHierarchy?() + } + return nullAction + } + + override public init(layer: Any) { + super.init(layer: layer) + } + + required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } +} + + public class CameraSimplePreviewView: UIView { func updateOrientation() { guard self.videoPreviewLayer.connection?.isVideoOrientationSupported == true else { @@ -72,11 +95,16 @@ public class CameraSimplePreviewView: UIView { private var previewingDisposable: Disposable? private let placeholderView = UIImageView() - public init(frame: CGRect, main: Bool) { + public init(frame: CGRect, main: Bool, roundVideo: Bool = false) { super.init(frame: frame) - self.videoPreviewLayer.videoGravity = main ? .resizeAspectFill : .resizeAspect - self.placeholderView.contentMode = main ? .scaleAspectFill : .scaleAspectFit + if roundVideo { + self.videoPreviewLayer.videoGravity = .resizeAspectFill + self.placeholderView.contentMode = .scaleAspectFill + } else { + self.videoPreviewLayer.videoGravity = main ? .resizeAspectFill : .resizeAspect + self.placeholderView.contentMode = main ? .scaleAspectFill : .scaleAspectFit + } self.addSubview(self.placeholderView) } @@ -567,35 +595,29 @@ public class CameraPreviewView: MTKView { var scaleX: CGFloat var scaleY: CGFloat - // Rotate the layer into screen orientation. switch UIDevice.current.orientation { case .portraitUpsideDown: rotation = 180 scaleX = videoPreviewRect.width / captureDeviceResolution.width scaleY = videoPreviewRect.height / captureDeviceResolution.height - case .landscapeLeft: rotation = 90 scaleX = videoPreviewRect.height / captureDeviceResolution.width scaleY = scaleX - case .landscapeRight: rotation = -90 scaleX = videoPreviewRect.height / captureDeviceResolution.width scaleY = scaleX - default: rotation = 0 scaleX = videoPreviewRect.width / captureDeviceResolution.width scaleY = videoPreviewRect.height / captureDeviceResolution.height } - // Scale and mirror the image to ensure upright presentation. let affineTransform = CGAffineTransform(rotationAngle: radiansForDegrees(rotation)) .scaledBy(x: scaleX, y: -scaleY) overlayLayer.setAffineTransform(affineTransform) - // Cover entire screen UI. let rootLayerBounds = self.bounds overlayLayer.position = CGPoint(x: rootLayerBounds.midX, y: rootLayerBounds.midY) } diff --git a/submodules/Camera/Sources/VideoRecorder.swift b/submodules/Camera/Sources/VideoRecorder.swift index 7f9bcb05f3..1b1d11900f 100644 --- a/submodules/Camera/Sources/VideoRecorder.swift +++ b/submodules/Camera/Sources/VideoRecorder.swift @@ -146,7 +146,7 @@ private final class VideoRecorderImpl { } if failed { - print("error") + print("append video error") return } @@ -256,7 +256,7 @@ private final class VideoRecorderImpl { } if failed { - print("error") + print("append audio error") return } diff --git a/submodules/TelegramUI/Components/Settings/WallpaperGridScreen/Sources/ThemeGridControllerNode.swift b/submodules/TelegramUI/Components/Settings/WallpaperGridScreen/Sources/ThemeGridControllerNode.swift index 50ba9af0cd..5fe89baff5 100644 --- a/submodules/TelegramUI/Components/Settings/WallpaperGridScreen/Sources/ThemeGridControllerNode.swift +++ b/submodules/TelegramUI/Components/Settings/WallpaperGridScreen/Sources/ThemeGridControllerNode.swift @@ -329,7 +329,10 @@ final class ThemeGridControllerNode: ASDisplayNode { if let strongSelf = self, !strongSelf.currentState.editing { let entries = previousEntries.with { $0 } if let entries = entries, !entries.isEmpty { - let wallpapers = entries.map { $0.wallpaper }.filter { !$0.isColorOrGradient } + var wallpapers = entries.map { $0.wallpaper } + if case .peer = mode { + wallpapers = wallpapers.filter { !$0.isColorOrGradient } + } var options = WallpaperPresentationOptions() if wallpaper == strongSelf.presentationData.chatWallpaper, let settings = wallpaper.settings { @@ -575,7 +578,14 @@ final class ThemeGridControllerNode: ASDisplayNode { transition.updateFrame(node: strongSelf.bottomBackgroundNode, frame: CGRect(origin: CGPoint(x: 0.0, y: gridLayout.contentSize.height), size: CGSize(width: layout.size.width, height: 500.0))) transition.updateFrame(node: strongSelf.bottomSeparatorNode, frame: CGRect(origin: CGPoint(x: 0.0, y: gridLayout.contentSize.height), size: CGSize(width: layout.size.width, height: UIScreenPixel))) - let params = ListViewItemLayoutParams(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, availableHeight: layout.size.height) + let sideInset = max(16.0, floor((layout.size.width - 674.0) / 2.0)) + var listInsets = layout.safeInsets + if layout.size.width >= 375.0 { + listInsets.left = sideInset + listInsets.right = sideInset + } + + let params = ListViewItemLayoutParams(width: layout.size.width, leftInset: listInsets.left, rightInset: listInsets.right, availableHeight: layout.size.height) let makeResetLayout = strongSelf.resetItemNode.asyncLayout() let makeResetDescriptionLayout = strongSelf.resetDescriptionItemNode.asyncLayout() @@ -588,8 +598,8 @@ final class ThemeGridControllerNode: ASDisplayNode { transition.updateFrame(node: strongSelf.resetItemNode, frame: CGRect(origin: CGPoint(x: 0.0, y: gridLayout.contentSize.height + 35.0), size: resetLayout.contentSize)) transition.updateFrame(node: strongSelf.resetDescriptionItemNode, frame: CGRect(origin: CGPoint(x: 0.0, y: gridLayout.contentSize.height + 35.0 + resetLayout.contentSize.height), size: resetDescriptionLayout.contentSize)) - let sideInset = strongSelf.leftOverlayNode.frame.maxX - strongSelf.maskNode.frame = CGRect(origin: CGPoint(x: sideInset, y: strongSelf.separatorNode.frame.minY + UIScreenPixel + 4.0), size: CGSize(width: layout.size.width - sideInset * 2.0, height: gridLayout.contentSize.height + 6.0)) + let maskSideInset = strongSelf.leftOverlayNode.frame.maxX + strongSelf.maskNode.frame = CGRect(origin: CGPoint(x: maskSideInset, y: strongSelf.separatorNode.frame.minY + UIScreenPixel + 4.0), size: CGSize(width: layout.size.width - sideInset * 2.0, height: gridLayout.contentSize.height + 6.0)) } } } @@ -934,7 +944,7 @@ final class ThemeGridControllerNode: ASDisplayNode { let (resetDescriptionLayout, _) = makeResetDescriptionLayout(self.resetDescriptionItem, params, ItemListNeighbors(top: .none, bottom: .none)) if !isChannel { - insets.bottom += buttonHeight + 35.0 + resetDescriptionLayout.contentSize.height + 32.0 + listInsets.bottom += buttonHeight + 35.0 + resetDescriptionLayout.contentSize.height + 32.0 } self.gridNode.frame = CGRect(x: 0.0, y: 0.0, width: layout.size.width, height: layout.size.height) diff --git a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift index 81cb98617b..afc3a21091 100644 --- a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift +++ b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift @@ -609,11 +609,11 @@ public class VideoMessageCameraScreen: ViewController { self.previewContainerView = UIView() self.previewContainerView.clipsToBounds = true - let isDualCameraEnabled = Camera.isDualCameraSupported //!"".isEmpty // + let isDualCameraEnabled = Camera.isDualCameraSupported let isFrontPosition = "".isEmpty - self.mainPreviewView = CameraSimplePreviewView(frame: .zero, main: true) - self.additionalPreviewView = CameraSimplePreviewView(frame: .zero, main: false) + self.mainPreviewView = CameraSimplePreviewView(frame: .zero, main: true, roundVideo: true) + self.additionalPreviewView = CameraSimplePreviewView(frame: .zero, main: false, roundVideo: true) self.progressView = RecordingProgressView(frame: .zero) @@ -746,6 +746,11 @@ public class VideoMessageCameraScreen: ViewController { return } self.cameraState = self.cameraState.updatedPosition(position) + + if !self.cameraState.isDualCameraEnabled { + self.animatePositionChange() + } + self.requestUpdateLayout(transition: .easeInOut(duration: 0.2)) }) @@ -807,6 +812,31 @@ public class VideoMessageCameraScreen: ViewController { self.previewContainerView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false) } + private func animatePositionChange() { + if let snapshotView = self.mainPreviewView.snapshotView(afterScreenUpdates: false) { + self.previewContainerView.insertSubview(snapshotView, belowSubview: self.progressView) + self.previewSnapshotView = snapshotView + + let action = { [weak self] in + guard let self else { + return + } + UIView.animate(withDuration: 0.2, animations: { + self.previewSnapshotView?.alpha = 0.0 + }, completion: { _ in + self.previewSnapshotView?.removeFromSuperview() + self.previewSnapshotView = nil + }) + } + + Queue.mainQueue().after(1.0) { + action() + } + + self.requestUpdateLayout(transition: .immediate) + } + } + func pauseCameraCapture() { self.mainPreviewView.isEnabled = false self.additionalPreviewView.isEnabled = false @@ -850,10 +880,10 @@ public class VideoMessageCameraScreen: ViewController { action() }) } else { - Queue.mainQueue().after(1.0) { - action() - } - } + Queue.mainQueue().after(1.0) { + action() + } + } self.cameraIsActive = true self.requestUpdateLayout(transition: .immediate) @@ -1084,21 +1114,37 @@ public class VideoMessageCameraScreen: ViewController { } transition.setCornerRadius(layer: self.previewContainerView.layer, cornerRadius: previewSide / 2.0) - let previewInnerFrame = CGRect(origin: .zero, size: previewFrame.size) + let previewBounds = CGRect(origin: .zero, size: previewFrame.size) + + let previewInnerSize: CGSize + let additionalPreviewInnerSize: CGSize - let additionalPreviewSize = CGSize(width: previewFrame.size.width, height: previewFrame.size.width / 3.0 * 4.0) - let additionalPreviewInnerFrame = CGRect(origin: CGPoint(x: 0.0, y: floorToScreenPixels((previewFrame.height - additionalPreviewSize.height) / 2.0)), size: additionalPreviewSize) - self.mainPreviewView.frame = previewInnerFrame - self.additionalPreviewView.frame = additionalPreviewInnerFrame + if self.cameraState.isDualCameraEnabled { + previewInnerSize = CGSize(width: previewFrame.size.width, height: previewFrame.size.width / 9.0 * 16.0) + additionalPreviewInnerSize = CGSize(width: previewFrame.size.width, height: previewFrame.size.width / 3.0 * 4.0) + } else { + previewInnerSize = CGSize(width: previewFrame.size.width, height: previewFrame.size.width / 3.0 * 4.0) + additionalPreviewInnerSize = CGSize(width: previewFrame.size.width, height: previewFrame.size.width / 3.0 * 4.0) + } - self.progressView.frame = previewInnerFrame + let previewInnerFrame = CGRect(origin: CGPoint(x: 0.0, y: floorToScreenPixels((previewFrame.height - previewInnerSize.height) / 2.0)), size: previewInnerSize) + + let additionalPreviewInnerFrame = CGRect(origin: CGPoint(x: 0.0, y: floorToScreenPixels((previewFrame.height - additionalPreviewInnerSize.height) / 2.0)), size: additionalPreviewInnerSize) + if self.cameraState.isDualCameraEnabled { + self.mainPreviewView.frame = previewInnerFrame + self.additionalPreviewView.frame = additionalPreviewInnerFrame + } else { + self.mainPreviewView.frame = self.cameraState.position == .front ? additionalPreviewInnerFrame : previewInnerFrame + } + + self.progressView.frame = previewBounds self.progressView.value = CGFloat(self.cameraState.duration / 60.0) transition.setAlpha(view: self.additionalPreviewView, alpha: self.cameraState.position == .front ? 1.0 : 0.0) - self.previewBlurView.frame = previewInnerFrame - self.previewSnapshotView?.frame = previewInnerFrame - self.loadingView.update(size: previewInnerFrame.size, transition: .immediate) + self.previewBlurView.frame = previewBounds + self.previewSnapshotView?.center = previewBounds.center + self.loadingView.update(size: previewBounds.size, transition: .immediate) let componentSize = self.componentHost.update( transition: transition, @@ -1168,7 +1214,7 @@ public class VideoMessageCameraScreen: ViewController { resultPreviewView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.resultTapped))) } - resultPreviewView.frame = previewInnerFrame + resultPreviewView.frame = previewBounds } else if let resultPreviewView = self.resultPreviewView { self.resultPreviewView = nil resultPreviewView.removeFromSuperview() diff --git a/submodules/TelegramUI/Sources/ChatControllerNode.swift b/submodules/TelegramUI/Sources/ChatControllerNode.swift index de5355c1e1..c031ddad32 100644 --- a/submodules/TelegramUI/Sources/ChatControllerNode.swift +++ b/submodules/TelegramUI/Sources/ChatControllerNode.swift @@ -3729,6 +3729,9 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate { if self.chatPresentationInterfaceState.inputTextPanelState.mediaRecordingState != nil { return false } + if self.chatPresentationInterfaceState.recordedMediaPreview != nil { + return false + } if let inputPanelNode = self.inputPanelNode as? ChatTextInputPanelNode { if inputPanelNode.isFocused { return false