Various video recording improvements

This commit is contained in:
Ilya Laktyushin 2024-01-28 15:27:53 +04:00
parent 79f167d01d
commit 4b48b12d1a
2 changed files with 82 additions and 51 deletions

View File

@ -95,7 +95,9 @@ final class CameraOutput: NSObject {
private var roundVideoFilter: CameraRoundVideoFilter?
private let semaphore = DispatchSemaphore(value: 1)
private let queue = DispatchQueue(label: "")
private let videoQueue = DispatchQueue(label: "", qos: .userInitiated)
private let audioQueue = DispatchQueue(label: "")
private let metadataQueue = DispatchQueue(label: "")
private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:]
@ -114,7 +116,7 @@ final class CameraOutput: NSObject {
self.isVideoMessage = use32BGRA
super.init()
if #available(iOS 13.0, *) {
self.photoOutput.maxPhotoQualityPrioritization = .balanced
}
@ -135,13 +137,13 @@ final class CameraOutput: NSObject {
} else {
session.session.addOutput(self.videoOutput)
}
self.videoOutput.setSampleBufferDelegate(self, queue: self.queue)
self.videoOutput.setSampleBufferDelegate(self, queue: self.videoQueue)
} else {
Logger.shared.log("Camera", "Can't add video output")
}
if audio, session.session.canAddOutput(self.audioOutput) {
session.session.addOutput(self.audioOutput)
self.audioOutput.setSampleBufferDelegate(self, queue: self.queue)
self.audioOutput.setSampleBufferDelegate(self, queue: self.audioQueue)
}
if photo, session.session.canAddOutput(self.photoOutput) {
if session.hasMultiCam {
@ -454,7 +456,9 @@ final class CameraOutput: NSObject {
if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime {
} else {
if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) {
if (transitionFactor == 1.0 && fromAdditionalOutput)
|| (transitionFactor == 0.0 && !fromAdditionalOutput)
|| (transitionFactor > 0.0 && transitionFactor < 1.0) {
videoRecorder.appendSampleBuffer(processedSampleBuffer)
self.lastSampleTimestamp = presentationTime
}
@ -518,7 +522,7 @@ final class CameraOutput: NSObject {
return nil
}
self.semaphore.wait()
let mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription)
let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any]
@ -528,6 +532,7 @@ final class CameraOutput: NSObject {
var newFormatDescription: CMFormatDescription?
var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: videoMessageDimensions.width, height: videoMessageDimensions.height, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription)
guard status == noErr, let newFormatDescription else {
self.semaphore.signal()
return nil
}
@ -592,7 +597,7 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA
guard CMSampleBufferDataIsReady(sampleBuffer) else {
return
}
if let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
self.processSampleBuffer?(sampleBuffer, videoPixelBuffer, connection)
} else {
@ -607,7 +612,9 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA
}
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if #available(iOS 13.0, *) {
Logger.shared.log("VideoRecorder", "Dropped sample buffer \(sampleBuffer.attachments)")
}
}
}

View File

@ -40,11 +40,12 @@ private final class VideoRecorderImpl {
private var pendingAudioSampleBuffers: [CMSampleBuffer] = []
private var _duration: CMTime = .zero
private var _duration = Atomic<CMTime>(value: .zero)
public var duration: CMTime {
self.queue.sync { _duration }
return self._duration.with { $0 }
}
private var startedSession = false
private var lastVideoSampleTime: CMTime = .invalid
private var recordingStartSampleTime: CMTime = .invalid
private var recordingStopSampleTime: CMTime = .invalid
@ -59,7 +60,11 @@ private final class VideoRecorderImpl {
private let error = Atomic<Error?>(value: nil)
private var stopped = false
private var _stopped = Atomic<Bool>(value: false)
private var stopped: Bool {
return self._stopped.with { $0 }
}
private var hasAllVideoBuffers = false
private var hasAllAudioBuffers = false
@ -113,20 +118,21 @@ private final class VideoRecorderImpl {
}
}
private var previousPresentationTime: Double?
private var previousAppendTime: Double?
public func appendVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
if let _ = self.hasError() {
return
}
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Video else {
return
}
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
self.queue.async {
guard !self.stopped && self.error.with({ $0 }) == nil else {
guard self.hasError() == nil && !self.stopped else {
return
}
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Video else {
return
}
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
var failed = false
if self.videoInput == nil {
Logger.shared.log("VideoRecorder", "Try adding video input")
@ -162,33 +168,49 @@ private final class VideoRecorderImpl {
if !self.assetWriter.startWriting() {
if let error = self.assetWriter.error {
self.transitionToFailedStatus(error: .avError(error))
return
}
}
self.assetWriter.startSession(atSourceTime: presentationTime)
self.recordingStartSampleTime = presentationTime
self.lastVideoSampleTime = presentationTime
return
}
} else if self.assetWriter.status == .writing && !self.startedSession {
self.assetWriter.startSession(atSourceTime: presentationTime)
self.recordingStartSampleTime = presentationTime
self.lastVideoSampleTime = presentationTime
self.startedSession = true
}
if self.recordingStartSampleTime == .invalid || sampleBuffer.presentationTimestamp < self.recordingStartSampleTime {
return
}
if self.assetWriter.status == .writing {
if self.assetWriter.status == .writing && self.startedSession {
if self.recordingStopSampleTime != .invalid && sampleBuffer.presentationTimestamp > self.recordingStopSampleTime {
self.hasAllVideoBuffers = true
self.maybeFinish()
return
}
if let videoInput = self.videoInput, videoInput.isReadyForMoreMediaData {
if let videoInput = self.videoInput {
while (!videoInput.isReadyForMoreMediaData)
{
let maxDate = Date(timeIntervalSinceNow: 0.05)
RunLoop.current.run(until: maxDate)
}
}
if let videoInput = self.videoInput {
let time = CACurrentMediaTime()
if let previousPresentationTime = self.previousPresentationTime, let previousAppendTime = self.previousAppendTime {
print("appending \(presentationTime.seconds) (\(presentationTime.seconds - previousPresentationTime) ) on \(time) (\(time - previousAppendTime)")
}
self.previousPresentationTime = presentationTime.seconds
self.previousAppendTime = time
if videoInput.append(sampleBuffer) {
self.lastVideoSampleTime = presentationTime
let startTime = self.recordingStartSampleTime
let duration = presentationTime - startTime
self._duration = duration
let _ = self._duration.modify { _ in return duration }
}
if !self.savedTransitionImage, let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
@ -220,16 +242,12 @@ private final class VideoRecorderImpl {
}
public func appendAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
if let _ = self.hasError() {
return
}
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Audio else {
return
}
self.queue.async {
guard !self.stopped && self.error.with({ $0 }) == nil else {
guard self.hasError() == nil && !self.stopped else {
return
}
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Audio else {
return
}
@ -304,7 +322,7 @@ private final class VideoRecorderImpl {
}
return
}
self.stopped = true
let _ = self._stopped.modify { _ in return true }
self.pendingAudioSampleBuffers = []
if self.assetWriter.status == .writing {
self.assetWriter.cancelWriting()
@ -318,7 +336,7 @@ private final class VideoRecorderImpl {
}
public var isRecording: Bool {
self.queue.sync { !(self.hasAllVideoBuffers && self.hasAllAudioBuffers) }
return !self.stopped
}
public func stopRecording() {
@ -334,17 +352,17 @@ private final class VideoRecorderImpl {
}
}
public func maybeFinish() {
self.queue.async {
guard self.hasAllVideoBuffers && self.hasAllVideoBuffers && !self.stopped else {
return
}
self.stopped = true
self.finish()
private func maybeFinish() {
dispatchPrecondition(condition: .onQueue(self.queue))
guard self.hasAllVideoBuffers && self.hasAllVideoBuffers && !self.stopped else {
return
}
let _ = self._stopped.modify { _ in return true }
self.finish()
}
public func finish() {
private func finish() {
dispatchPrecondition(condition: .onQueue(self.queue))
let completion = self.completion
if self.recordingStopSampleTime == .invalid {
DispatchQueue.main.async {
@ -352,7 +370,7 @@ private final class VideoRecorderImpl {
}
return
}
if let _ = self.error.with({ $0 }) {
DispatchQueue.main.async {
completion(false, nil, nil)
@ -421,7 +439,13 @@ private final class VideoRecorderImpl {
}
private func internalAppendAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) -> Bool {
if let audioInput = self.audioInput, audioInput.isReadyForMoreMediaData {
if self.startedSession, let audioInput = self.audioInput {
while (!audioInput.isReadyForMoreMediaData)
{
let maxDate = Date(timeIntervalSinceNow: 0.05)
RunLoop.current.run(until: maxDate)
}
if !audioInput.append(sampleBuffer) {
if let _ = self.assetWriter.error {
return false