diff --git a/submodules/Camera/Sources/VideoRecorder.swift b/submodules/Camera/Sources/VideoRecorder.swift index 1d5f9352cc..bed6803b69 100644 --- a/submodules/Camera/Sources/VideoRecorder.swift +++ b/submodules/Camera/Sources/VideoRecorder.swift @@ -274,7 +274,7 @@ private final class VideoRecorderImpl { return } - if self.recordingStartSampleTime != .invalid { //self.assetWriter.status == .writing { + if self.recordingStartSampleTime != .invalid { if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime { return } @@ -336,7 +336,7 @@ private final class VideoRecorderImpl { public func maybeFinish() { self.queue.async { - guard self.hasAllVideoBuffers && self.hasAllVideoBuffers else { + guard self.hasAllVideoBuffers && self.hasAllVideoBuffers && !self.stopped else { return } self.stopped = true @@ -345,49 +345,47 @@ private final class VideoRecorderImpl { } public func finish() { - self.queue.async { - let completion = self.completion - if self.recordingStopSampleTime == .invalid { - DispatchQueue.main.async { - completion(false, nil, nil) - } - return + let completion = self.completion + if self.recordingStopSampleTime == .invalid { + DispatchQueue.main.async { + completion(false, nil, nil) } - - if let _ = self.error.with({ $0 }) { - DispatchQueue.main.async { - completion(false, nil, nil) - } - return + return + } + + if let _ = self.error.with({ $0 }) { + DispatchQueue.main.async { + completion(false, nil, nil) } - - if !self.tryAppendingPendingAudioBuffers() { - DispatchQueue.main.async { - completion(false, nil, nil) - } - return + return + } + + if !self.tryAppendingPendingAudioBuffers() { + DispatchQueue.main.async { + completion(false, nil, nil) } - - if self.assetWriter.status == .writing { - self.assetWriter.finishWriting { - if let _ = self.assetWriter.error { - DispatchQueue.main.async { - completion(false, nil, nil) - } - } else { - DispatchQueue.main.async { - completion(true, self.transitionImage, self.positionChangeTimestamps) - } + return + } + + if self.assetWriter.status == .writing { + self.assetWriter.finishWriting { + if let _ = self.assetWriter.error { + DispatchQueue.main.async { + completion(false, nil, nil) + } + } else { + DispatchQueue.main.async { + completion(true, self.transitionImage, self.positionChangeTimestamps) } } - } else if let _ = self.assetWriter.error { - DispatchQueue.main.async { - completion(false, nil, nil) - } - } else { - DispatchQueue.main.async { - completion(false, nil, nil) - } + } + } else if let _ = self.assetWriter.error { + DispatchQueue.main.async { + completion(false, nil, nil) + } + } else { + DispatchQueue.main.async { + completion(false, nil, nil) } } } diff --git a/submodules/TelegramAudio/Sources/ManagedAudioSession.swift b/submodules/TelegramAudio/Sources/ManagedAudioSession.swift index 7b1431d82f..0c9564781f 100644 --- a/submodules/TelegramAudio/Sources/ManagedAudioSession.swift +++ b/submodules/TelegramAudio/Sources/ManagedAudioSession.swift @@ -960,6 +960,17 @@ public final class ManagedAudioSession: NSObject { try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none) } + if case let .record(speaker, _) = type, !speaker, let input = AVAudioSession.sharedInstance().availableInputs?.first { + if let dataSources = input.dataSources { + for source in dataSources { + if source.dataSourceName.contains("Front") { + try? input.setPreferredDataSource(source) + break + } + } + } + } + if resetToBuiltin { var updatedType = type if case .record(false, let withOthers) = updatedType, self.isHeadsetPluggedInValue { diff --git a/submodules/TelegramUI/Sources/ManagedAudioRecorder.swift b/submodules/TelegramUI/Sources/ManagedAudioRecorder.swift index cb8f91145c..18cb29e417 100644 --- a/submodules/TelegramUI/Sources/ManagedAudioRecorder.swift +++ b/submodules/TelegramUI/Sources/ManagedAudioRecorder.swift @@ -150,7 +150,6 @@ final class ManagedAudioRecorderContext { private let beganWithTone: (Bool) -> Void private var paused = true - private var manuallyPaused = false private let queue: Queue private let mediaManager: MediaManager @@ -414,11 +413,9 @@ final class ManagedAudioRecorderContext { return Signal { subscriber in queue.async { if let strongSelf = self { - if !strongSelf.manuallyPaused { - strongSelf.hasAudioSession = false - strongSelf.stop() - strongSelf.recordingState.set(.stopped) - } + strongSelf.hasAudioSession = false + strongSelf.stop() + strongSelf.recordingState.set(.stopped) subscriber.putCompletion() } } @@ -453,17 +450,13 @@ final class ManagedAudioRecorderContext { func pause() { assert(self.queue.isCurrent()) - self.manuallyPaused = true + self.stop() } func resume() { assert(self.queue.isCurrent()) - if self.manuallyPaused { - self.manuallyPaused = false - } else if self.paused { - self.start() - } + self.start() } func stop() { @@ -507,7 +500,7 @@ final class ManagedAudioRecorderContext { free(buffer.mData) } - if !self.processSamples || self.manuallyPaused { + if !self.processSamples { return }