mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Video message recording improvements
This commit is contained in:
parent
3f6ac92df7
commit
5b1352affe
@ -57,17 +57,17 @@ final class CameraDeviceContext {
|
||||
self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext, use32BGRA: use32BGRA)
|
||||
}
|
||||
|
||||
func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false) {
|
||||
func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false, switchAudio: Bool = true) {
|
||||
guard let session = self.session else {
|
||||
return
|
||||
}
|
||||
|
||||
self.previewView = previewView
|
||||
|
||||
self.device.configure(for: session, position: position, dual: !exclusive || additional)
|
||||
self.device.configure(for: session, position: position, dual: !self.exclusive || self.additional, switchAudio: switchAudio)
|
||||
self.device.configureDeviceFormat(maxDimensions: self.maxDimensions(additional: self.additional, preferWide: preferWide), maxFramerate: self.preferredMaxFrameRate(useLower: preferLowerFramerate))
|
||||
self.input.configure(for: session, device: self.device, audio: audio)
|
||||
self.output.configure(for: session, device: self.device, input: self.input, previewView: previewView, audio: audio, photo: photo, metadata: metadata)
|
||||
self.input.configure(for: session, device: self.device, audio: audio && switchAudio)
|
||||
self.output.configure(for: session, device: self.device, input: self.input, previewView: previewView, audio: audio && switchAudio, photo: photo, metadata: metadata)
|
||||
|
||||
self.output.configureVideoStabilization()
|
||||
|
||||
@ -260,7 +260,14 @@ private final class CameraContext {
|
||||
self._positionPromise.set(targetPosition)
|
||||
self.modeChange = .position
|
||||
|
||||
mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide, preferLowerFramerate: self.initialConfiguration.preferLowerFramerate)
|
||||
let isRoundVideo = self.initialConfiguration.isRoundVideo
|
||||
let preferWide = self.initialConfiguration.preferWide || (self.positionValue == .front && isRoundVideo)
|
||||
let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || isRoundVideo
|
||||
|
||||
mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate, switchAudio: !isRoundVideo)
|
||||
if isRoundVideo {
|
||||
mainDeviceContext.output.markPositionChange(position: targetPosition)
|
||||
}
|
||||
|
||||
self.queue.after(0.5) {
|
||||
self.modeChange = .none
|
||||
@ -277,7 +284,10 @@ private final class CameraContext {
|
||||
self.positionValue = position
|
||||
self.modeChange = .position
|
||||
|
||||
self.mainDeviceContext?.configure(position: position, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
|
||||
let preferWide = self.initialConfiguration.preferWide || (self.positionValue == .front && self.initialConfiguration.isRoundVideo)
|
||||
let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo
|
||||
|
||||
self.mainDeviceContext?.configure(position: position, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate)
|
||||
|
||||
self.queue.after(0.5) {
|
||||
self.modeChange = .none
|
||||
@ -342,8 +352,11 @@ private final class CameraContext {
|
||||
self.additionalDeviceContext?.invalidate()
|
||||
self.additionalDeviceContext = nil
|
||||
|
||||
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, use32BGRA: false)
|
||||
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide, preferLowerFramerate: self.initialConfiguration.preferLowerFramerate)
|
||||
let preferWide = self.initialConfiguration.preferWide || (self.positionValue == .front && self.initialConfiguration.isRoundVideo)
|
||||
let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo
|
||||
|
||||
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo)
|
||||
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate)
|
||||
}
|
||||
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
|
||||
guard let self, let mainDeviceContext = self.mainDeviceContext else {
|
||||
|
@ -29,7 +29,7 @@ final class CameraDevice {
|
||||
|
||||
public private(set) var audioDevice: AVCaptureDevice? = nil
|
||||
|
||||
func configure(for session: CameraSession, position: Camera.Position, dual: Bool) {
|
||||
func configure(for session: CameraSession, position: Camera.Position, dual: Bool, switchAudio: Bool) {
|
||||
self.position = position
|
||||
|
||||
var selectedDevice: AVCaptureDevice?
|
||||
@ -57,7 +57,9 @@ final class CameraDevice {
|
||||
self.videoDevice = selectedDevice
|
||||
self.videoDevicePromise.set(.single(selectedDevice))
|
||||
|
||||
self.audioDevice = AVCaptureDevice.default(for: .audio)
|
||||
if switchAudio {
|
||||
self.audioDevice = AVCaptureDevice.default(for: .audio)
|
||||
}
|
||||
}
|
||||
|
||||
func configureDeviceFormat(maxDimensions: CMVideoDimensions, maxFramerate: Double) {
|
||||
|
@ -417,32 +417,40 @@ final class CameraOutput: NSObject {
|
||||
|
||||
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording {
|
||||
if case .roundVideo = self.currentMode, type == kCMMediaType_Video {
|
||||
var transitionFactor: CGFloat = 0.0
|
||||
let currentTimestamp = CACurrentMediaTime()
|
||||
let duration: Double = 0.2
|
||||
if case .front = self.currentPosition {
|
||||
transitionFactor = 1.0
|
||||
if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration {
|
||||
transitionFactor = max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration)
|
||||
}
|
||||
} else {
|
||||
transitionFactor = 0.0
|
||||
if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration {
|
||||
transitionFactor = 1.0 - max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration)
|
||||
}
|
||||
}
|
||||
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) {
|
||||
let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer)
|
||||
if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime {
|
||||
|
||||
if !self.exclusive {
|
||||
var transitionFactor: CGFloat = 0.0
|
||||
let currentTimestamp = CACurrentMediaTime()
|
||||
let duration: Double = 0.2
|
||||
if case .front = self.currentPosition {
|
||||
transitionFactor = 1.0
|
||||
if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration {
|
||||
transitionFactor = max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration)
|
||||
}
|
||||
} else {
|
||||
if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) {
|
||||
videoRecorder.appendSampleBuffer(processedSampleBuffer)
|
||||
self.lastSampleTimestamp = presentationTime
|
||||
transitionFactor = 0.0
|
||||
if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration {
|
||||
transitionFactor = 1.0 - max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration)
|
||||
}
|
||||
}
|
||||
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) {
|
||||
let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer)
|
||||
if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime {
|
||||
|
||||
} else {
|
||||
if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) {
|
||||
videoRecorder.appendSampleBuffer(processedSampleBuffer)
|
||||
self.lastSampleTimestamp = presentationTime
|
||||
}
|
||||
}
|
||||
} else {
|
||||
videoRecorder.appendSampleBuffer(sampleBuffer)
|
||||
}
|
||||
} else {
|
||||
videoRecorder.appendSampleBuffer(sampleBuffer)
|
||||
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: self.currentPosition == .front, transitionFactor: self.currentPosition == .front ? 1.0 : 0.0) {
|
||||
videoRecorder.appendSampleBuffer(processedSampleBuffer)
|
||||
} else {
|
||||
videoRecorder.appendSampleBuffer(sampleBuffer)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
videoRecorder.appendSampleBuffer(sampleBuffer)
|
||||
|
@ -88,6 +88,8 @@ public final class PlainButtonComponent: Component {
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
|
||||
self.isExclusiveTouch = true
|
||||
|
||||
self.contentContainer.isUserInteractionEnabled = false
|
||||
self.addSubview(self.contentContainer)
|
||||
|
||||
|
@ -241,15 +241,21 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
||||
return
|
||||
}
|
||||
|
||||
let currentTimestamp = CACurrentMediaTime()
|
||||
if let lastActionTimestamp = controller.lastActionTimestamp, currentTimestamp - lastActionTimestamp < 0.5 {
|
||||
return
|
||||
}
|
||||
controller.lastActionTimestamp = currentTimestamp
|
||||
|
||||
let initialDuration = controller.node.previewState?.composition.duration.seconds ?? 0.0
|
||||
let isFirstRecording = initialDuration.isZero
|
||||
controller.node.resumeCameraCapture()
|
||||
|
||||
controller.updatePreviewState({ _ in return nil}, transition: .spring(duration: 0.4))
|
||||
|
||||
controller.node.dismissAllTooltips()
|
||||
controller.updateCameraState({ $0.updatedRecording(pressing ? .holding : .handsFree).updatedDuration(initialDuration) }, transition: .spring(duration: 0.4))
|
||||
|
||||
let isFirstRecording = initialDuration.isZero
|
||||
controller.node.resumeCameraCapture()
|
||||
|
||||
|
||||
controller.node.withReadyCamera(isFirstTime: !controller.node.cameraIsActive) {
|
||||
self.resultDisposable.set((camera.startRecording()
|
||||
|> deliverOnMainQueue).start(next: { [weak self] recordingData in
|
||||
@ -275,6 +281,11 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
||||
guard let controller = self.getController(), let camera = controller.camera else {
|
||||
return
|
||||
}
|
||||
let currentTimestamp = CACurrentMediaTime()
|
||||
if let lastActionTimestamp = controller.lastActionTimestamp, currentTimestamp - lastActionTimestamp < 0.5 {
|
||||
return
|
||||
}
|
||||
controller.lastActionTimestamp = currentTimestamp
|
||||
|
||||
self.resultDisposable.set((camera.stopRecording()
|
||||
|> deliverOnMainQueue).start(next: { [weak self] result in
|
||||
@ -598,7 +609,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
self.previewContainerView = UIView()
|
||||
self.previewContainerView.clipsToBounds = true
|
||||
|
||||
let isDualCameraEnabled = Camera.isDualCameraSupported
|
||||
let isDualCameraEnabled = Camera.isDualCameraSupported //!"".isEmpty //
|
||||
let isFrontPosition = "".isEmpty
|
||||
|
||||
self.mainPreviewView = CameraSimplePreviewView(frame: .zero, main: true)
|
||||
@ -638,7 +649,9 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
self.containerView.addSubview(self.previewContainerView)
|
||||
|
||||
self.previewContainerView.addSubview(self.mainPreviewView)
|
||||
self.previewContainerView.addSubview(self.additionalPreviewView)
|
||||
if isDualCameraEnabled {
|
||||
self.previewContainerView.addSubview(self.additionalPreviewView)
|
||||
}
|
||||
self.previewContainerView.addSubview(self.progressView)
|
||||
self.previewContainerView.addSubview(self.previewBlurView)
|
||||
self.previewContainerView.addSubview(self.loadingView)
|
||||
@ -652,7 +665,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
self.mainPreviewView.removePlaceholder(delay: 0.0)
|
||||
}
|
||||
self.withReadyCamera(isFirstTime: true, {
|
||||
if isDualCameraEnabled {
|
||||
if !isDualCameraEnabled {
|
||||
self.mainPreviewView.removePlaceholder(delay: 0.0)
|
||||
}
|
||||
self.loadingView.alpha = 0.0
|
||||
@ -675,7 +688,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
|
||||
func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) {
|
||||
if #available(iOS 13.0, *) {
|
||||
let _ = (self.additionalPreviewView.isPreviewing
|
||||
let _ = ((self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing)
|
||||
|> filter { $0 }
|
||||
|> take(1)).startStandalone(next: { _ in
|
||||
f()
|
||||
@ -805,7 +818,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
|
||||
func resumeCameraCapture() {
|
||||
if !self.mainPreviewView.isEnabled {
|
||||
if let snapshotView = self.previewContainerView.snapshotView(afterScreenUpdates: false) {
|
||||
if let snapshotView = self.resultPreviewView?.snapshotView(afterScreenUpdates: false) {
|
||||
self.previewContainerView.insertSubview(snapshotView, belowSubview: self.previewBlurView)
|
||||
self.previewSnapshotView = snapshotView
|
||||
}
|
||||
@ -1177,7 +1190,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
fileprivate var allowLiveUpload: Bool
|
||||
fileprivate var viewOnceAvailable: Bool
|
||||
|
||||
fileprivate let completion: (EnqueueMessage?) -> Void
|
||||
fileprivate let completion: (EnqueueMessage?, Bool?, Int32?) -> Void
|
||||
|
||||
private var audioSessionDisposable: Disposable?
|
||||
|
||||
@ -1320,15 +1333,16 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
public init(
|
||||
context: AccountContext,
|
||||
updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?,
|
||||
peerId: EnginePeer.Id,
|
||||
allowLiveUpload: Bool,
|
||||
viewOnceAvailable: Bool,
|
||||
inputPanelFrame: CGRect,
|
||||
completion: @escaping (EnqueueMessage?) -> Void
|
||||
completion: @escaping (EnqueueMessage?, Bool?, Int32?) -> Void
|
||||
) {
|
||||
self.context = context
|
||||
self.updatedPresentationData = updatedPresentationData
|
||||
self.inputPanelFrame = inputPanelFrame
|
||||
self.allowLiveUpload = peerId.namespace != Namespaces.Peer.SecretChat
|
||||
self.viewOnceAvailable = peerId.namespace == Namespaces.Peer.CloudUser && peerId != context.account.peerId
|
||||
self.allowLiveUpload = allowLiveUpload
|
||||
self.viewOnceAvailable = viewOnceAvailable
|
||||
self.completion = completion
|
||||
|
||||
self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get())
|
||||
@ -1360,10 +1374,21 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
super.displayNodeDidLoad()
|
||||
}
|
||||
|
||||
fileprivate var didSend = false
|
||||
fileprivate var lastActionTimestamp: Double?
|
||||
fileprivate var isSendingImmediately = false
|
||||
public func sendVideoRecording() {
|
||||
public func sendVideoRecording(silentPosting: Bool? = nil, scheduleTime: Int32? = nil) {
|
||||
guard !self.didSend else {
|
||||
return
|
||||
}
|
||||
|
||||
let currentTimestamp = CACurrentMediaTime()
|
||||
if let lastActionTimestamp = self.lastActionTimestamp, currentTimestamp - lastActionTimestamp < 0.5 {
|
||||
return
|
||||
}
|
||||
|
||||
if case .none = self.cameraState.recording, self.node.results.isEmpty {
|
||||
self.completion(nil)
|
||||
self.completion(nil, nil, nil)
|
||||
return
|
||||
}
|
||||
|
||||
@ -1374,6 +1399,8 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
self.node.stopRecording.invoke(Void())
|
||||
}
|
||||
|
||||
self.didSend = true
|
||||
|
||||
let _ = (self.currentResults
|
||||
|> take(1)
|
||||
|> deliverOnMainQueue).startStandalone(next: { [weak self] results in
|
||||
@ -1393,7 +1420,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
}
|
||||
|
||||
if duration < 1.0 {
|
||||
self.completion(nil)
|
||||
self.completion(nil, nil, nil)
|
||||
return
|
||||
}
|
||||
|
||||
@ -1467,12 +1494,16 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
localGroupingKey: nil,
|
||||
correlationId: nil,
|
||||
bubbleUpEmojiOrStickersets: []
|
||||
))
|
||||
), silentPosting, scheduleTime)
|
||||
})
|
||||
}
|
||||
|
||||
private var waitingForNextResult = false
|
||||
public func stopVideoRecording() -> Bool {
|
||||
guard !self.didSend else {
|
||||
return false
|
||||
}
|
||||
|
||||
self.node.dismissAllTooltips()
|
||||
|
||||
self.waitingForNextResult = true
|
||||
@ -1487,6 +1518,10 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
fileprivate var recordingStartTime: Double?
|
||||
fileprivate var scheduledLock = false
|
||||
public func lockVideoRecording() {
|
||||
guard !self.didSend else {
|
||||
return
|
||||
}
|
||||
|
||||
if case .none = self.cameraState.recording {
|
||||
self.scheduledLock = true
|
||||
self.node.requestUpdateLayout(transition: .spring(duration: 0.4))
|
||||
|
@ -683,6 +683,10 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
return true
|
||||
}
|
||||
|
||||
if let _ = strongSelf.videoRecorderValue {
|
||||
return false
|
||||
}
|
||||
|
||||
strongSelf.chatDisplayNode.messageTransitionNode.dismissMessageReactionContexts()
|
||||
|
||||
if strongSelf.presentVoiceMessageDiscardAlert(action: action, performAction: false) {
|
||||
@ -15374,14 +15378,18 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
isScheduledMessages = true
|
||||
}
|
||||
|
||||
let _ = isScheduledMessages
|
||||
var isBot = false
|
||||
if let user = self.presentationInterfaceState.renderedPeer?.peer as? TelegramUser, user.botInfo != nil {
|
||||
isBot = true
|
||||
}
|
||||
|
||||
let controller = VideoMessageCameraScreen(
|
||||
context: self.context,
|
||||
updatedPresentationData: self.updatedPresentationData,
|
||||
peerId: peerId,
|
||||
allowLiveUpload: peerId.namespace != Namespaces.Peer.SecretChat,
|
||||
viewOnceAvailable: !isScheduledMessages && peerId.namespace == Namespaces.Peer.CloudUser && peerId != self.context.account.peerId && !isBot,
|
||||
inputPanelFrame: currentInputPanelFrame,
|
||||
completion: { [weak self] message in
|
||||
completion: { [weak self] message, silentPosting, scheduleTime in
|
||||
guard let self, let videoController = self.videoRecorderValue else {
|
||||
return
|
||||
}
|
||||
@ -15400,8 +15408,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
.withUpdatedCorrelationId(correlationId)
|
||||
|
||||
var usedCorrelationId = false
|
||||
|
||||
if self.chatDisplayNode.shouldAnimateMessageTransition, let extractedView = videoController.extractVideoSnapshot() {
|
||||
if scheduleTime == nil, self.chatDisplayNode.shouldAnimateMessageTransition, let extractedView = videoController.extractVideoSnapshot() {
|
||||
usedCorrelationId = true
|
||||
self.chatDisplayNode.messageTransitionNode.add(correlationId: correlationId, source: .videoMessage(ChatMessageTransitionNodeImpl.Source.VideoMessage(view: extractedView)), initiated: { [weak videoController, weak self] in
|
||||
videoController?.hideVideoSnapshot()
|
||||
@ -15424,7 +15431,17 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
}
|
||||
}, usedCorrelationId ? correlationId : nil)
|
||||
|
||||
self.sendMessages([message])
|
||||
let messages = [message]
|
||||
let transformedMessages: [EnqueueMessage]
|
||||
if let silentPosting {
|
||||
transformedMessages = self.transformEnqueueMessages(messages, silentPosting: silentPosting)
|
||||
} else if let scheduleTime {
|
||||
transformedMessages = self.transformEnqueueMessages(messages, silentPosting: false, scheduleTime: scheduleTime)
|
||||
} else {
|
||||
transformedMessages = self.transformEnqueueMessages(messages)
|
||||
}
|
||||
|
||||
self.sendMessages(transformedMessages)
|
||||
}
|
||||
)
|
||||
controller.onResume = { [weak self] in
|
||||
@ -15634,6 +15651,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
}
|
||||
|
||||
func resumeMediaRecorder() {
|
||||
self.context.sharedContext.mediaManager.playlistControl(.playback(.pause), type: nil)
|
||||
|
||||
if let audioRecorderValue = self.audioRecorderValue {
|
||||
audioRecorderValue.resume()
|
||||
|
||||
@ -15743,7 +15762,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
|
||||
donateSendMessageIntent(account: self.context.account, sharedContext: self.context.sharedContext, intentContext: .chat, peerIds: [peerId])
|
||||
case .video:
|
||||
self.videoRecorderValue?.sendVideoRecording()
|
||||
self.videoRecorderValue?.sendVideoRecording(silentPosting: silentPosting, scheduleTime: scheduleTime)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -107,6 +107,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
|
||||
self.sendButton = HighlightTrackingButtonNode()
|
||||
self.sendButton.displaysAsynchronously = false
|
||||
self.sendButton.isExclusiveTouch = true
|
||||
self.sendButton.setImage(PresentationResourcesChat.chatInputPanelSendButtonImage(theme), for: [])
|
||||
|
||||
self.viewOnceButton = ChatRecordingViewOnceButtonNode(icon: .viewOnce)
|
||||
@ -195,6 +196,12 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
let gestureRecognizer = ContextGesture(target: nil, action: nil)
|
||||
self.sendButton.view.addGestureRecognizer(gestureRecognizer)
|
||||
self.gestureRecognizer = gestureRecognizer
|
||||
gestureRecognizer.shouldBegin = { [weak self] _ in
|
||||
if let self, self.viewOnce {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
gestureRecognizer.activated = { [weak self] gesture, _ in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
|
@ -972,7 +972,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
||||
interfaceInteraction.finishMediaRecording(.dismiss)
|
||||
}
|
||||
} else {
|
||||
interfaceInteraction.finishMediaRecording(.dismiss)
|
||||
// interfaceInteraction.finishMediaRecording(.dismiss)
|
||||
}
|
||||
strongSelf.viewOnce = false
|
||||
strongSelf.tooltipController?.dismiss()
|
||||
|
Loading…
x
Reference in New Issue
Block a user