Various video/audio recording improvements

This commit is contained in:
Ilya Laktyushin 2024-01-27 18:43:10 +04:00
parent bc99a483e1
commit 79f167d01d
4 changed files with 56 additions and 54 deletions

View File

@ -274,7 +274,7 @@ private final class VideoRecorderImpl {
return return
} }
if self.recordingStartSampleTime != .invalid { //self.assetWriter.status == .writing { if self.recordingStartSampleTime != .invalid {
if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime { if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime {
return return
} }
@ -336,7 +336,7 @@ private final class VideoRecorderImpl {
public func maybeFinish() { public func maybeFinish() {
self.queue.async { self.queue.async {
guard self.hasAllVideoBuffers && self.hasAllVideoBuffers else { guard self.hasAllVideoBuffers && self.hasAllVideoBuffers && !self.stopped else {
return return
} }
self.stopped = true self.stopped = true
@ -345,7 +345,6 @@ private final class VideoRecorderImpl {
} }
public func finish() { public func finish() {
self.queue.async {
let completion = self.completion let completion = self.completion
if self.recordingStopSampleTime == .invalid { if self.recordingStopSampleTime == .invalid {
DispatchQueue.main.async { DispatchQueue.main.async {
@ -390,7 +389,6 @@ private final class VideoRecorderImpl {
} }
} }
} }
}
private func tryAppendingPendingAudioBuffers() -> Bool { private func tryAppendingPendingAudioBuffers() -> Bool {
dispatchPrecondition(condition: .onQueue(self.queue)) dispatchPrecondition(condition: .onQueue(self.queue))

View File

@ -960,6 +960,17 @@ public final class ManagedAudioSession: NSObject {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none) try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
} }
if case let .record(speaker, _) = type, !speaker, let input = AVAudioSession.sharedInstance().availableInputs?.first {
if let dataSources = input.dataSources {
for source in dataSources {
if source.dataSourceName.contains("Front") {
try? input.setPreferredDataSource(source)
break
}
}
}
}
if resetToBuiltin { if resetToBuiltin {
var updatedType = type var updatedType = type
if case .record(false, let withOthers) = updatedType, self.isHeadsetPluggedInValue { if case .record(false, let withOthers) = updatedType, self.isHeadsetPluggedInValue {

View File

@ -150,7 +150,6 @@ final class ManagedAudioRecorderContext {
private let beganWithTone: (Bool) -> Void private let beganWithTone: (Bool) -> Void
private var paused = true private var paused = true
private var manuallyPaused = false
private let queue: Queue private let queue: Queue
private let mediaManager: MediaManager private let mediaManager: MediaManager
@ -414,11 +413,9 @@ final class ManagedAudioRecorderContext {
return Signal { subscriber in return Signal { subscriber in
queue.async { queue.async {
if let strongSelf = self { if let strongSelf = self {
if !strongSelf.manuallyPaused {
strongSelf.hasAudioSession = false strongSelf.hasAudioSession = false
strongSelf.stop() strongSelf.stop()
strongSelf.recordingState.set(.stopped) strongSelf.recordingState.set(.stopped)
}
subscriber.putCompletion() subscriber.putCompletion()
} }
} }
@ -453,18 +450,14 @@ final class ManagedAudioRecorderContext {
func pause() { func pause() {
assert(self.queue.isCurrent()) assert(self.queue.isCurrent())
self.manuallyPaused = true self.stop()
} }
func resume() { func resume() {
assert(self.queue.isCurrent()) assert(self.queue.isCurrent())
if self.manuallyPaused {
self.manuallyPaused = false
} else if self.paused {
self.start() self.start()
} }
}
func stop() { func stop() {
assert(self.queue.isCurrent()) assert(self.queue.isCurrent())
@ -507,7 +500,7 @@ final class ManagedAudioRecorderContext {
free(buffer.mData) free(buffer.mData)
} }
if !self.processSamples || self.manuallyPaused { if !self.processSamples {
return return
} }

View File

@ -1,5 +1,5 @@
{ {
"app": "10.6.3", "app": "10.6.4",
"bazel": "6.4.0", "bazel": "6.4.0",
"xcode": "15.1", "xcode": "15.1",
"macos": "13.0" "macos": "13.0"