Video message recording improvements

This commit is contained in:
Ilya Laktyushin 2024-01-13 21:46:02 +04:00
parent bb23f962b8
commit ebc8176850
5 changed files with 44 additions and 13 deletions

View File

@ -302,6 +302,7 @@ final class CameraOutput: NSObject {
} }
self.currentMode = mode self.currentMode = mode
self.lastSampleTimestamp = nil
let codecType: AVVideoCodecType let codecType: AVVideoCodecType
if case .roundVideo = mode { if case .roundVideo = mode {
@ -407,6 +408,7 @@ final class CameraOutput: NSObject {
private weak var masterOutput: CameraOutput? private weak var masterOutput: CameraOutput?
private var lastSampleTimestamp: CMTime?
func processVideoRecording(_ sampleBuffer: CMSampleBuffer, fromAdditionalOutput: Bool) { func processVideoRecording(_ sampleBuffer: CMSampleBuffer, fromAdditionalOutput: Bool) {
guard let formatDescriptor = CMSampleBufferGetFormatDescription(sampleBuffer) else { guard let formatDescriptor = CMSampleBufferGetFormatDescription(sampleBuffer) else {
return return
@ -430,8 +432,14 @@ final class CameraOutput: NSObject {
} }
} }
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) { if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) {
if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) { let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer)
videoRecorder.appendSampleBuffer(processedSampleBuffer) if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime {
} else {
if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) {
videoRecorder.appendSampleBuffer(processedSampleBuffer)
self.lastSampleTimestamp = presentationTime
}
} }
} else { } else {
videoRecorder.appendSampleBuffer(sampleBuffer) videoRecorder.appendSampleBuffer(sampleBuffer)

View File

@ -121,7 +121,7 @@ class CameraRoundVideoFilter {
context.setFillColor(UIColor.white.cgColor) context.setFillColor(UIColor.white.cgColor)
context.fill(bounds) context.fill(bounds)
context.setBlendMode(.clear) context.setBlendMode(.clear)
context.fillEllipse(in: bounds) context.fillEllipse(in: bounds.insetBy(dx: -2.0, dy: -2.0))
})! })!
self.resizeFilter = CIFilter(name: "CILanczosScaleTransform") self.resizeFilter = CIFilter(name: "CILanczosScaleTransform")

View File

@ -71,7 +71,7 @@ enum CameraScreenTransition {
private let viewOnceButtonTag = GenericComponentViewTag() private let viewOnceButtonTag = GenericComponentViewTag()
private final class CameraScreenComponent: CombinedComponent { private final class VideoMessageCameraScreenComponent: CombinedComponent {
typealias EnvironmentType = ViewControllerComponentContainer.Environment typealias EnvironmentType = ViewControllerComponentContainer.Environment
let context: AccountContext let context: AccountContext
@ -109,7 +109,7 @@ private final class CameraScreenComponent: CombinedComponent {
self.completion = completion self.completion = completion
} }
static func ==(lhs: CameraScreenComponent, rhs: CameraScreenComponent) -> Bool { static func ==(lhs: VideoMessageCameraScreenComponent, rhs: VideoMessageCameraScreenComponent) -> Bool {
if lhs.context !== rhs.context { if lhs.context !== rhs.context {
return false return false
} }
@ -184,6 +184,9 @@ private final class CameraScreenComponent: CombinedComponent {
if let self, let controller = getController() { if let self, let controller = getController() {
self.startVideoRecording(pressing: !controller.scheduledLock) self.startVideoRecording(pressing: !controller.scheduledLock)
controller.scheduledLock = false controller.scheduledLock = false
if controller.recordingStartTime == nil {
controller.recordingStartTime = CACurrentMediaTime()
}
} }
}) })
self.stopRecording.connect({ [weak self] _ in self.stopRecording.connect({ [weak self] _ in
@ -241,7 +244,7 @@ private final class CameraScreenComponent: CombinedComponent {
let duration = initialDuration + recordingData.duration let duration = initialDuration + recordingData.duration
if let self, let controller = self.getController() { if let self, let controller = self.getController() {
controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1)) controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1))
if recordingData.duration > 59.0 { if duration > 59.0 {
self.stopVideoRecording() self.stopVideoRecording()
} }
if isFirstRecording { if isFirstRecording {
@ -323,6 +326,10 @@ private final class CameraScreenComponent: CombinedComponent {
showViewOnce = true showViewOnce = true
} }
if let controller = component.getController(), !controller.viewOnceAvailable {
showViewOnce = false
}
if !component.isPreviewing { if !component.isPreviewing {
let flipButton = flipButton.update( let flipButton = flipButton.update(
component: CameraButton( component: CameraButton(
@ -942,7 +949,7 @@ public class VideoMessageCameraScreen: ViewController {
let componentSize = self.componentHost.update( let componentSize = self.componentHost.update(
transition: transition, transition: transition,
component: AnyComponent( component: AnyComponent(
CameraScreenComponent( VideoMessageCameraScreenComponent(
context: self.context, context: self.context,
cameraState: self.cameraState, cameraState: self.cameraState,
isPreviewing: self.previewState != nil || self.transitioningToPreview, isPreviewing: self.previewState != nil || self.transitioningToPreview,
@ -1056,6 +1063,7 @@ public class VideoMessageCameraScreen: ViewController {
private let updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)? private let updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?
private let inputPanelFrame: CGRect private let inputPanelFrame: CGRect
fileprivate var allowLiveUpload: Bool fileprivate var allowLiveUpload: Bool
fileprivate var viewOnceAvailable: Bool
fileprivate let completion: (EnqueueMessage?) -> Void fileprivate let completion: (EnqueueMessage?) -> Void
@ -1147,13 +1155,20 @@ public class VideoMessageCameraScreen: ViewController {
initialPlaceholder = self.camera?.transitionImage ?? .single(nil) initialPlaceholder = self.camera?.transitionImage ?? .single(nil)
} }
var approximateDuration: Double
if let recordingStartTime = self.recordingStartTime {
approximateDuration = CACurrentMediaTime() - recordingStartTime
} else {
approximateDuration = 1.0
}
let immediateResult: Signal<RecordedVideoData?, NoError> = initialPlaceholder let immediateResult: Signal<RecordedVideoData?, NoError> = initialPlaceholder
|> take(1) |> take(1)
|> mapToSignal { initialPlaceholder in |> mapToSignal { initialPlaceholder in
return videoFrames(asset: nil, count: count, initialPlaceholder: initialPlaceholder) return videoFrames(asset: nil, count: count, initialPlaceholder: initialPlaceholder)
|> map { framesAndUpdateTimestamp in |> map { framesAndUpdateTimestamp in
return RecordedVideoData( return RecordedVideoData(
duration: 1.0, duration: approximateDuration,
frames: framesAndUpdateTimestamp.0, frames: framesAndUpdateTimestamp.0,
framesUpdateTimestamp: framesAndUpdateTimestamp.1, framesUpdateTimestamp: framesAndUpdateTimestamp.1,
trimRange: nil trimRange: nil
@ -1196,14 +1211,15 @@ public class VideoMessageCameraScreen: ViewController {
public init( public init(
context: AccountContext, context: AccountContext,
updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?,
peerId: EnginePeer.Id,
inputPanelFrame: CGRect, inputPanelFrame: CGRect,
allowLiveUpload: Bool,
completion: @escaping (EnqueueMessage?) -> Void completion: @escaping (EnqueueMessage?) -> Void
) { ) {
self.context = context self.context = context
self.updatedPresentationData = updatedPresentationData self.updatedPresentationData = updatedPresentationData
self.inputPanelFrame = inputPanelFrame self.inputPanelFrame = inputPanelFrame
self.allowLiveUpload = allowLiveUpload self.allowLiveUpload = peerId.namespace != Namespaces.Peer.SecretChat
self.viewOnceAvailable = peerId.namespace == Namespaces.Peer.CloudUser
self.completion = completion self.completion = completion
self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get()) self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get())
@ -1265,6 +1281,11 @@ public class VideoMessageCameraScreen: ViewController {
} }
} }
if duration < 1.0 {
self.completion(nil)
return
}
let finalDuration: Double let finalDuration: Double
if let trimRange = self.node.previewState?.trimRange { if let trimRange = self.node.previewState?.trimRange {
finalDuration = trimRange.upperBound - trimRange.lowerBound finalDuration = trimRange.upperBound - trimRange.lowerBound
@ -1350,6 +1371,7 @@ public class VideoMessageCameraScreen: ViewController {
return true return true
} }
fileprivate var recordingStartTime: Double?
fileprivate var scheduledLock = false fileprivate var scheduledLock = false
public func lockVideoRecording() { public func lockVideoRecording() {
if case .none = self.cameraState.recording { if case .none = self.cameraState.recording {

View File

@ -15381,8 +15381,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
let controller = VideoMessageCameraScreen( let controller = VideoMessageCameraScreen(
context: self.context, context: self.context,
updatedPresentationData: self.updatedPresentationData, updatedPresentationData: self.updatedPresentationData,
peerId: peerId,
inputPanelFrame: currentInputPanelFrame, inputPanelFrame: currentInputPanelFrame,
allowLiveUpload: peerId.namespace != Namespaces.Peer.SecretChat,
completion: { [weak self] message in completion: { [weak self] message in
guard let self, let videoController = self.videoRecorderValue else { guard let self, let videoController = self.videoRecorderValue else {
return return
@ -15574,7 +15574,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
self.recorderDataDisposable.set((videoRecorderValue.takenRecordedData() self.recorderDataDisposable.set((videoRecorderValue.takenRecordedData()
|> deliverOnMainQueue).startStrict(next: { [weak self] data in |> deliverOnMainQueue).startStrict(next: { [weak self] data in
if let strongSelf = self, let data = data { if let strongSelf = self, let data = data {
if data.duration < 0.5 { if data.duration < 1.0 {
strongSelf.recorderFeedback?.error() strongSelf.recorderFeedback?.error()
strongSelf.recorderFeedback = nil strongSelf.recorderFeedback = nil
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, { strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {

View File

@ -492,10 +492,11 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
} }
@objc func sendPressed() { @objc func sendPressed() {
self.viewOnce = false
self.tooltipController?.dismiss() self.tooltipController?.dismiss()
self.interfaceInteraction?.sendRecordedMedia(false, self.viewOnce) self.interfaceInteraction?.sendRecordedMedia(false, self.viewOnce)
self.viewOnce = false
} }
private weak var tooltipController: TooltipScreen? private weak var tooltipController: TooltipScreen?