Video message recording improvements

This commit is contained in:
Ilya Laktyushin 2024-01-13 21:46:02 +04:00
parent bb23f962b8
commit ebc8176850
5 changed files with 44 additions and 13 deletions

View File

@ -302,6 +302,7 @@ final class CameraOutput: NSObject {
}
self.currentMode = mode
self.lastSampleTimestamp = nil
let codecType: AVVideoCodecType
if case .roundVideo = mode {
@ -407,6 +408,7 @@ final class CameraOutput: NSObject {
private weak var masterOutput: CameraOutput?
private var lastSampleTimestamp: CMTime?
func processVideoRecording(_ sampleBuffer: CMSampleBuffer, fromAdditionalOutput: Bool) {
guard let formatDescriptor = CMSampleBufferGetFormatDescription(sampleBuffer) else {
return
@ -430,8 +432,14 @@ final class CameraOutput: NSObject {
}
}
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) {
if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) {
videoRecorder.appendSampleBuffer(processedSampleBuffer)
let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer)
if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime {
} else {
if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) {
videoRecorder.appendSampleBuffer(processedSampleBuffer)
self.lastSampleTimestamp = presentationTime
}
}
} else {
videoRecorder.appendSampleBuffer(sampleBuffer)

View File

@ -121,7 +121,7 @@ class CameraRoundVideoFilter {
context.setFillColor(UIColor.white.cgColor)
context.fill(bounds)
context.setBlendMode(.clear)
context.fillEllipse(in: bounds)
context.fillEllipse(in: bounds.insetBy(dx: -2.0, dy: -2.0))
})!
self.resizeFilter = CIFilter(name: "CILanczosScaleTransform")

View File

@ -71,7 +71,7 @@ enum CameraScreenTransition {
private let viewOnceButtonTag = GenericComponentViewTag()
private final class CameraScreenComponent: CombinedComponent {
private final class VideoMessageCameraScreenComponent: CombinedComponent {
typealias EnvironmentType = ViewControllerComponentContainer.Environment
let context: AccountContext
@ -109,7 +109,7 @@ private final class CameraScreenComponent: CombinedComponent {
self.completion = completion
}
static func ==(lhs: CameraScreenComponent, rhs: CameraScreenComponent) -> Bool {
static func ==(lhs: VideoMessageCameraScreenComponent, rhs: VideoMessageCameraScreenComponent) -> Bool {
if lhs.context !== rhs.context {
return false
}
@ -184,6 +184,9 @@ private final class CameraScreenComponent: CombinedComponent {
if let self, let controller = getController() {
self.startVideoRecording(pressing: !controller.scheduledLock)
controller.scheduledLock = false
if controller.recordingStartTime == nil {
controller.recordingStartTime = CACurrentMediaTime()
}
}
})
self.stopRecording.connect({ [weak self] _ in
@ -241,7 +244,7 @@ private final class CameraScreenComponent: CombinedComponent {
let duration = initialDuration + recordingData.duration
if let self, let controller = self.getController() {
controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1))
if recordingData.duration > 59.0 {
if duration > 59.0 {
self.stopVideoRecording()
}
if isFirstRecording {
@ -323,6 +326,10 @@ private final class CameraScreenComponent: CombinedComponent {
showViewOnce = true
}
if let controller = component.getController(), !controller.viewOnceAvailable {
showViewOnce = false
}
if !component.isPreviewing {
let flipButton = flipButton.update(
component: CameraButton(
@ -942,7 +949,7 @@ public class VideoMessageCameraScreen: ViewController {
let componentSize = self.componentHost.update(
transition: transition,
component: AnyComponent(
CameraScreenComponent(
VideoMessageCameraScreenComponent(
context: self.context,
cameraState: self.cameraState,
isPreviewing: self.previewState != nil || self.transitioningToPreview,
@ -1056,6 +1063,7 @@ public class VideoMessageCameraScreen: ViewController {
private let updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?
private let inputPanelFrame: CGRect
fileprivate var allowLiveUpload: Bool
fileprivate var viewOnceAvailable: Bool
fileprivate let completion: (EnqueueMessage?) -> Void
@ -1147,13 +1155,20 @@ public class VideoMessageCameraScreen: ViewController {
initialPlaceholder = self.camera?.transitionImage ?? .single(nil)
}
var approximateDuration: Double
if let recordingStartTime = self.recordingStartTime {
approximateDuration = CACurrentMediaTime() - recordingStartTime
} else {
approximateDuration = 1.0
}
let immediateResult: Signal<RecordedVideoData?, NoError> = initialPlaceholder
|> take(1)
|> mapToSignal { initialPlaceholder in
return videoFrames(asset: nil, count: count, initialPlaceholder: initialPlaceholder)
|> map { framesAndUpdateTimestamp in
return RecordedVideoData(
duration: 1.0,
duration: approximateDuration,
frames: framesAndUpdateTimestamp.0,
framesUpdateTimestamp: framesAndUpdateTimestamp.1,
trimRange: nil
@ -1196,14 +1211,15 @@ public class VideoMessageCameraScreen: ViewController {
public init(
context: AccountContext,
updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?,
peerId: EnginePeer.Id,
inputPanelFrame: CGRect,
allowLiveUpload: Bool,
completion: @escaping (EnqueueMessage?) -> Void
) {
self.context = context
self.updatedPresentationData = updatedPresentationData
self.inputPanelFrame = inputPanelFrame
self.allowLiveUpload = allowLiveUpload
self.allowLiveUpload = peerId.namespace != Namespaces.Peer.SecretChat
self.viewOnceAvailable = peerId.namespace == Namespaces.Peer.CloudUser
self.completion = completion
self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get())
@ -1265,6 +1281,11 @@ public class VideoMessageCameraScreen: ViewController {
}
}
if duration < 1.0 {
self.completion(nil)
return
}
let finalDuration: Double
if let trimRange = self.node.previewState?.trimRange {
finalDuration = trimRange.upperBound - trimRange.lowerBound
@ -1350,6 +1371,7 @@ public class VideoMessageCameraScreen: ViewController {
return true
}
fileprivate var recordingStartTime: Double?
fileprivate var scheduledLock = false
public func lockVideoRecording() {
if case .none = self.cameraState.recording {

View File

@ -15381,8 +15381,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
let controller = VideoMessageCameraScreen(
context: self.context,
updatedPresentationData: self.updatedPresentationData,
peerId: peerId,
inputPanelFrame: currentInputPanelFrame,
allowLiveUpload: peerId.namespace != Namespaces.Peer.SecretChat,
completion: { [weak self] message in
guard let self, let videoController = self.videoRecorderValue else {
return
@ -15574,7 +15574,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
self.recorderDataDisposable.set((videoRecorderValue.takenRecordedData()
|> deliverOnMainQueue).startStrict(next: { [weak self] data in
if let strongSelf = self, let data = data {
if data.duration < 0.5 {
if data.duration < 1.0 {
strongSelf.recorderFeedback?.error()
strongSelf.recorderFeedback = nil
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {

View File

@ -492,10 +492,11 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
@objc func sendPressed() {
self.viewOnce = false
self.tooltipController?.dismiss()
self.interfaceInteraction?.sendRecordedMedia(false, self.viewOnce)
self.viewOnce = false
}
private weak var tooltipController: TooltipScreen?