Merge commit 'ee745c48a6ef2bf814c8f5ed2388559691d93ec3'

This commit is contained in:
Isaac 2024-01-28 18:59:20 +01:00
commit 03f5073f1d
4 changed files with 99 additions and 62 deletions

View File

@ -85,7 +85,7 @@ final class CameraDeviceContext {
}
private func maxDimensions(additional: Bool, preferWide: Bool) -> CMVideoDimensions {
if self.isRoundVideo && !Camera.isDualCameraSupported {
if self.isRoundVideo && self.exclusive {
return CMVideoDimensions(width: 640, height: 480)
} else {
if additional || preferWide {

View File

@ -95,7 +95,9 @@ final class CameraOutput: NSObject {
private var roundVideoFilter: CameraRoundVideoFilter?
private let semaphore = DispatchSemaphore(value: 1)
private let queue = DispatchQueue(label: "")
private let videoQueue = DispatchQueue(label: "", qos: .userInitiated)
private let audioQueue = DispatchQueue(label: "")
private let metadataQueue = DispatchQueue(label: "")
private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:]
@ -114,7 +116,7 @@ final class CameraOutput: NSObject {
self.isVideoMessage = use32BGRA
super.init()
if #available(iOS 13.0, *) {
self.photoOutput.maxPhotoQualityPrioritization = .balanced
}
@ -135,13 +137,13 @@ final class CameraOutput: NSObject {
} else {
session.session.addOutput(self.videoOutput)
}
self.videoOutput.setSampleBufferDelegate(self, queue: self.queue)
self.videoOutput.setSampleBufferDelegate(self, queue: self.videoQueue)
} else {
Logger.shared.log("Camera", "Can't add video output")
}
if audio, session.session.canAddOutput(self.audioOutput) {
session.session.addOutput(self.audioOutput)
self.audioOutput.setSampleBufferDelegate(self, queue: self.queue)
self.audioOutput.setSampleBufferDelegate(self, queue: self.audioQueue)
}
if photo, session.session.canAddOutput(self.photoOutput) {
if session.hasMultiCam {
@ -305,6 +307,8 @@ final class CameraOutput: NSObject {
return .complete()
}
Logger.shared.log("CameraOutput", "startRecording")
self.currentMode = mode
self.lastSampleTimestamp = nil
self.captureOrientation = orientation
@ -449,18 +453,19 @@ final class CameraOutput: NSObject {
transitionFactor = 1.0 - max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration)
}
}
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer)
if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime {
} else {
if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) {
if (transitionFactor == 1.0 && fromAdditionalOutput)
|| (transitionFactor == 0.0 && !fromAdditionalOutput)
|| (transitionFactor > 0.0 && transitionFactor < 1.0) {
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer)
if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime {
} else {
videoRecorder.appendSampleBuffer(processedSampleBuffer)
self.lastSampleTimestamp = presentationTime
}
}
} else {
videoRecorder.appendSampleBuffer(sampleBuffer)
}
} else {
var additional = self.currentPosition == .front
@ -518,7 +523,7 @@ final class CameraOutput: NSObject {
return nil
}
self.semaphore.wait()
let mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription)
let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any]
@ -528,6 +533,7 @@ final class CameraOutput: NSObject {
var newFormatDescription: CMFormatDescription?
var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: videoMessageDimensions.width, height: videoMessageDimensions.height, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription)
guard status == noErr, let newFormatDescription else {
self.semaphore.signal()
return nil
}
@ -539,8 +545,9 @@ final class CameraOutput: NSObject {
self.roundVideoFilter = filter
}
if !filter.isPrepared {
filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3)
filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 4)
}
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, captureOrientation: self.captureOrientation, transitionFactor: transitionFactor) else {
self.semaphore.signal()
return nil
@ -592,7 +599,7 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA
guard CMSampleBufferDataIsReady(sampleBuffer) else {
return
}
if let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
self.processSampleBuffer?(sampleBuffer, videoPixelBuffer, connection)
} else {
@ -607,7 +614,9 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA
}
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if #available(iOS 13.0, *) {
Logger.shared.log("VideoRecorder", "Dropped sample buffer \(sampleBuffer.attachments)")
}
}
}

View File

@ -40,11 +40,12 @@ private final class VideoRecorderImpl {
private var pendingAudioSampleBuffers: [CMSampleBuffer] = []
private var _duration: CMTime = .zero
private var _duration = Atomic<CMTime>(value: .zero)
public var duration: CMTime {
self.queue.sync { _duration }
return self._duration.with { $0 }
}
private var startedSession = false
private var lastVideoSampleTime: CMTime = .invalid
private var recordingStartSampleTime: CMTime = .invalid
private var recordingStopSampleTime: CMTime = .invalid
@ -59,7 +60,11 @@ private final class VideoRecorderImpl {
private let error = Atomic<Error?>(value: nil)
private var stopped = false
private var _stopped = Atomic<Bool>(value: false)
private var stopped: Bool {
return self._stopped.with { $0 }
}
private var hasAllVideoBuffers = false
private var hasAllAudioBuffers = false
@ -113,20 +118,21 @@ private final class VideoRecorderImpl {
}
}
private var previousPresentationTime: Double?
private var previousAppendTime: Double?
public func appendVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
if let _ = self.hasError() {
return
}
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Video else {
return
}
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
self.queue.async {
guard !self.stopped && self.error.with({ $0 }) == nil else {
guard self.hasError() == nil && !self.stopped else {
return
}
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Video else {
return
}
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
var failed = false
if self.videoInput == nil {
Logger.shared.log("VideoRecorder", "Try adding video input")
@ -159,36 +165,56 @@ private final class VideoRecorderImpl {
return
}
if self.videoInput != nil && (self.audioInput != nil || !self.configuration.hasAudio) {
print("startWriting")
let start = CACurrentMediaTime()
if !self.assetWriter.startWriting() {
if let error = self.assetWriter.error {
self.transitionToFailedStatus(error: .avError(error))
return
}
}
self.assetWriter.startSession(atSourceTime: presentationTime)
self.recordingStartSampleTime = presentationTime
self.lastVideoSampleTime = presentationTime
print("started In \(CACurrentMediaTime() - start)")
return
}
} else if self.assetWriter.status == .writing && !self.startedSession {
print("Started session at \(presentationTime)")
self.assetWriter.startSession(atSourceTime: presentationTime)
self.recordingStartSampleTime = presentationTime
self.lastVideoSampleTime = presentationTime
self.startedSession = true
}
if self.recordingStartSampleTime == .invalid || sampleBuffer.presentationTimestamp < self.recordingStartSampleTime {
return
}
if self.assetWriter.status == .writing {
if self.assetWriter.status == .writing && self.startedSession {
if self.recordingStopSampleTime != .invalid && sampleBuffer.presentationTimestamp > self.recordingStopSampleTime {
self.hasAllVideoBuffers = true
self.maybeFinish()
return
}
if let videoInput = self.videoInput, videoInput.isReadyForMoreMediaData {
if let videoInput = self.videoInput {
while (!videoInput.isReadyForMoreMediaData)
{
let maxDate = Date(timeIntervalSinceNow: 0.05)
RunLoop.current.run(until: maxDate)
}
}
if let videoInput = self.videoInput {
let time = CACurrentMediaTime()
if let previousPresentationTime = self.previousPresentationTime, let previousAppendTime = self.previousAppendTime {
print("appending \(presentationTime.seconds) (\(presentationTime.seconds - previousPresentationTime) ) on \(time) (\(time - previousAppendTime)")
}
self.previousPresentationTime = presentationTime.seconds
self.previousAppendTime = time
if videoInput.append(sampleBuffer) {
self.lastVideoSampleTime = presentationTime
let startTime = self.recordingStartSampleTime
let duration = presentationTime - startTime
self._duration = duration
let _ = self._duration.modify { _ in return duration }
}
if !self.savedTransitionImage, let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
@ -220,16 +246,12 @@ private final class VideoRecorderImpl {
}
public func appendAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
if let _ = self.hasError() {
return
}
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Audio else {
return
}
self.queue.async {
guard !self.stopped && self.error.with({ $0 }) == nil else {
guard self.hasError() == nil && !self.stopped else {
return
}
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Audio else {
return
}
@ -304,7 +326,7 @@ private final class VideoRecorderImpl {
}
return
}
self.stopped = true
let _ = self._stopped.modify { _ in return true }
self.pendingAudioSampleBuffers = []
if self.assetWriter.status == .writing {
self.assetWriter.cancelWriting()
@ -318,7 +340,7 @@ private final class VideoRecorderImpl {
}
public var isRecording: Bool {
self.queue.sync { !(self.hasAllVideoBuffers && self.hasAllAudioBuffers) }
return !self.stopped
}
public func stopRecording() {
@ -334,17 +356,17 @@ private final class VideoRecorderImpl {
}
}
public func maybeFinish() {
self.queue.async {
guard self.hasAllVideoBuffers && self.hasAllVideoBuffers && !self.stopped else {
return
}
self.stopped = true
self.finish()
private func maybeFinish() {
dispatchPrecondition(condition: .onQueue(self.queue))
guard self.hasAllVideoBuffers && self.hasAllVideoBuffers && !self.stopped else {
return
}
let _ = self._stopped.modify { _ in return true }
self.finish()
}
public func finish() {
private func finish() {
dispatchPrecondition(condition: .onQueue(self.queue))
let completion = self.completion
if self.recordingStopSampleTime == .invalid {
DispatchQueue.main.async {
@ -352,7 +374,7 @@ private final class VideoRecorderImpl {
}
return
}
if let _ = self.error.with({ $0 }) {
DispatchQueue.main.async {
completion(false, nil, nil)
@ -421,7 +443,13 @@ private final class VideoRecorderImpl {
}
private func internalAppendAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) -> Bool {
if let audioInput = self.audioInput, audioInput.isReadyForMoreMediaData {
if self.startedSession, let audioInput = self.audioInput {
while (!audioInput.isReadyForMoreMediaData)
{
let maxDate = Date(timeIntervalSinceNow: 0.05)
RunLoop.current.run(until: maxDate)
}
if !audioInput.append(sampleBuffer) {
if let _ = self.assetWriter.error {
return false

View File

@ -695,7 +695,7 @@ public class VideoMessageCameraScreen: ViewController {
func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) {
let previewReady: Signal<Bool, NoError>
if #available(iOS 13.0, *) {
previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing |> delay(0.2, queue: Queue.mainQueue())
previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing |> delay(0.25, queue: Queue.mainQueue())
} else {
previewReady = .single(true) |> delay(0.35, queue: Queue.mainQueue())
}
@ -740,7 +740,7 @@ public class VideoMessageCameraScreen: ViewController {
position: self.cameraState.position,
isDualEnabled: self.cameraState.isDualCameraEnabled,
audio: true,
photo: true,
photo: false,
metadata: false,
isRoundVideo: true
),