mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-10-09 03:20:48 +00:00
Camera and editor improvements
This commit is contained in:
parent
2d738fbfac
commit
57eceb0aef
@ -142,8 +142,17 @@ private final class CameraContext {
|
|||||||
self.session.stopRunning()
|
self.session.stopRunning()
|
||||||
}
|
}
|
||||||
|
|
||||||
func focus(at point: CGPoint) {
|
func focus(at point: CGPoint, autoFocus: Bool) {
|
||||||
self.device.setFocusPoint(point, focusMode: .continuousAutoFocus, exposureMode: .continuousAutoExposure, monitorSubjectAreaChange: true)
|
let focusMode: AVCaptureDevice.FocusMode
|
||||||
|
let exposureMode: AVCaptureDevice.ExposureMode
|
||||||
|
if autoFocus {
|
||||||
|
focusMode = .continuousAutoFocus
|
||||||
|
exposureMode = .continuousAutoExposure
|
||||||
|
} else {
|
||||||
|
focusMode = .autoFocus
|
||||||
|
exposureMode = .autoExpose
|
||||||
|
}
|
||||||
|
self.device.setFocusPoint(point, focusMode: focusMode, exposureMode: exposureMode, monitorSubjectAreaChange: true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func setFps(_ fps: Float64) {
|
func setFps(_ fps: Float64) {
|
||||||
@ -276,6 +285,9 @@ public final class Camera {
|
|||||||
self.metrics = Camera.Metrics(model: DeviceModel.current)
|
self.metrics = Camera.Metrics(model: DeviceModel.current)
|
||||||
|
|
||||||
let session = AVCaptureSession()
|
let session = AVCaptureSession()
|
||||||
|
session.usesApplicationAudioSession = true
|
||||||
|
session.automaticallyConfiguresApplicationAudioSession = false
|
||||||
|
session.automaticallyConfiguresCaptureDeviceForWideColor = false
|
||||||
if let previewView {
|
if let previewView {
|
||||||
previewView.session = session
|
previewView.session = session
|
||||||
}
|
}
|
||||||
@ -373,10 +385,10 @@ public final class Camera {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func focus(at point: CGPoint) {
|
public func focus(at point: CGPoint, autoFocus: Bool = true) {
|
||||||
self.queue.async {
|
self.queue.async {
|
||||||
if let context = self.contextRef?.takeUnretainedValue() {
|
if let context = self.contextRef?.takeUnretainedValue() {
|
||||||
context.focus(at: point)
|
context.focus(at: point, autoFocus: autoFocus)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,9 +8,21 @@ private let defaultFPS: Double = 30.0
|
|||||||
final class CameraDevice {
|
final class CameraDevice {
|
||||||
var position: Camera.Position = .back
|
var position: Camera.Position = .back
|
||||||
|
|
||||||
|
deinit {
|
||||||
|
if let videoDevice = self.videoDevice {
|
||||||
|
self.unsubscribeFromChanges(videoDevice)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public private(set) var videoDevice: AVCaptureDevice? = nil {
|
public private(set) var videoDevice: AVCaptureDevice? = nil {
|
||||||
didSet {
|
didSet {
|
||||||
|
if let previousVideoDevice = oldValue {
|
||||||
|
self.unsubscribeFromChanges(previousVideoDevice)
|
||||||
|
}
|
||||||
self.videoDevicePromise.set(.single(self.videoDevice))
|
self.videoDevicePromise.set(.single(self.videoDevice))
|
||||||
|
if let videoDevice = self.videoDevice {
|
||||||
|
self.subscribeForChanges(videoDevice)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
private var videoDevicePromise = Promise<AVCaptureDevice?>()
|
private var videoDevicePromise = Promise<AVCaptureDevice?>()
|
||||||
@ -93,12 +105,12 @@ final class CameraDevice {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func subscribeForChanges() {
|
private func subscribeForChanges(_ device: AVCaptureDevice) {
|
||||||
NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaChanged), name: Notification.Name.AVCaptureDeviceSubjectAreaDidChange, object: self.videoDevice)
|
NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaChanged), name: Notification.Name.AVCaptureDeviceSubjectAreaDidChange, object: device)
|
||||||
}
|
}
|
||||||
|
|
||||||
private func unsubscribeFromChanges() {
|
private func unsubscribeFromChanges(_ device: AVCaptureDevice) {
|
||||||
NotificationCenter.default.removeObserver(self, name: Notification.Name.AVCaptureDeviceSubjectAreaDidChange, object: self.videoDevice)
|
NotificationCenter.default.removeObserver(self, name: Notification.Name.AVCaptureDeviceSubjectAreaDidChange, object: device)
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc private func subjectAreaChanged() {
|
@objc private func subjectAreaChanged() {
|
||||||
@ -171,6 +183,12 @@ final class CameraDevice {
|
|||||||
device.focusPointOfInterest = point
|
device.focusPointOfInterest = point
|
||||||
device.focusMode = focusMode
|
device.focusMode = focusMode
|
||||||
}
|
}
|
||||||
|
|
||||||
|
device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
|
||||||
|
|
||||||
|
if abs(device.exposureTargetBias) > 0.0 {
|
||||||
|
device.setExposureTargetBias(0.0)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -109,11 +109,12 @@ final class CameraOutput: NSObject {
|
|||||||
|
|
||||||
func configureVideoStabilization() {
|
func configureVideoStabilization() {
|
||||||
if let videoDataOutputConnection = self.videoOutput.connection(with: .video), videoDataOutputConnection.isVideoStabilizationSupported {
|
if let videoDataOutputConnection = self.videoOutput.connection(with: .video), videoDataOutputConnection.isVideoStabilizationSupported {
|
||||||
if #available(iOS 13.0, *) {
|
videoDataOutputConnection.preferredVideoStabilizationMode = .standard
|
||||||
videoDataOutputConnection.preferredVideoStabilizationMode = .cinematicExtended
|
// if #available(iOS 13.0, *) {
|
||||||
} else {
|
// videoDataOutputConnection.preferredVideoStabilizationMode = .cinematicExtended
|
||||||
videoDataOutputConnection.preferredVideoStabilizationMode = .cinematic
|
// } else {
|
||||||
}
|
// videoDataOutputConnection.preferredVideoStabilizationMode = .cinematic
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -178,7 +179,7 @@ final class CameraOutput: NSObject {
|
|||||||
let outputFileName = NSUUID().uuidString
|
let outputFileName = NSUUID().uuidString
|
||||||
let outputFilePath = NSTemporaryDirectory() + outputFileName + ".mp4"
|
let outputFilePath = NSTemporaryDirectory() + outputFileName + ".mp4"
|
||||||
let outputFileURL = URL(fileURLWithPath: outputFilePath)
|
let outputFileURL = URL(fileURLWithPath: outputFilePath)
|
||||||
let videoRecorder = VideoRecorder(preset: MediaPreset(videoSettings: videoSettings, audioSettings: audioSettings), videoTransform: CGAffineTransform(rotationAngle: .pi / 2.0), fileUrl: outputFileURL, completion: { [weak self] result in
|
let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), videoTransform: CGAffineTransform(rotationAngle: .pi / 2.0), fileUrl: outputFileURL, completion: { [weak self] result in
|
||||||
if case .success = result {
|
if case .success = result {
|
||||||
self?.recordingCompletionPipe.putNext(outputFilePath)
|
self?.recordingCompletionPipe.putNext(outputFilePath)
|
||||||
} else {
|
} else {
|
||||||
@ -186,7 +187,8 @@ final class CameraOutput: NSObject {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
videoRecorder.start()
|
|
||||||
|
videoRecorder?.start()
|
||||||
self.videoRecorder = videoRecorder
|
self.videoRecorder = videoRecorder
|
||||||
|
|
||||||
return Signal { subscriber in
|
return Signal { subscriber in
|
||||||
@ -244,13 +246,8 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA
|
|||||||
// self.processSampleBuffer?(finalVideoPixelBuffer, connection)
|
// self.processSampleBuffer?(finalVideoPixelBuffer, connection)
|
||||||
// }
|
// }
|
||||||
|
|
||||||
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording || videoRecorder.isStopping {
|
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording {
|
||||||
let mediaType = sampleBuffer.type
|
videoRecorder.appendSampleBuffer(sampleBuffer)
|
||||||
if mediaType == kCMMediaType_Video {
|
|
||||||
videoRecorder.appendVideo(sampleBuffer: sampleBuffer)
|
|
||||||
} else if mediaType == kCMMediaType_Audio {
|
|
||||||
videoRecorder.appendAudio(sampleBuffer: sampleBuffer)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,6 +108,10 @@ public class CameraSimplePreviewView: UIView {
|
|||||||
}
|
}
|
||||||
|> distinctUntilChanged
|
|> distinctUntilChanged
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func cameraPoint(for location: CGPoint) -> CGPoint {
|
||||||
|
return self.videoPreviewLayer.captureDevicePointConverted(fromLayerPoint: location)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public class CameraPreviewView: MTKView {
|
public class CameraPreviewView: MTKView {
|
||||||
|
@ -1,129 +1,439 @@
|
|||||||
import Foundation
|
import Foundation
|
||||||
import AVFoundation
|
import AVFoundation
|
||||||
import SwiftSignalKit
|
import SwiftSignalKit
|
||||||
|
import TelegramCore
|
||||||
|
|
||||||
struct MediaPreset {
|
private extension CMSampleBuffer {
|
||||||
var videoSettings: [String: Any]
|
var endTime: CMTime {
|
||||||
var audioSettings: [String: Any]
|
let presentationTime = CMSampleBufferGetPresentationTimeStamp(self)
|
||||||
|
let duration = CMSampleBufferGetDuration(self)
|
||||||
init(videoSettings: [String: Any], audioSettings: [String: Any]) {
|
return presentationTime + duration
|
||||||
self.videoSettings = videoSettings
|
|
||||||
self.audioSettings = audioSettings
|
|
||||||
}
|
|
||||||
|
|
||||||
var hasAudio: Bool {
|
|
||||||
return !self.audioSettings.isEmpty
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final class VideoRecorder {
|
private final class VideoRecorderImpl {
|
||||||
|
public enum RecorderError: LocalizedError {
|
||||||
|
case generic
|
||||||
|
case avError(Error)
|
||||||
|
|
||||||
|
public var errorDescription: String? {
|
||||||
|
switch self {
|
||||||
|
case .generic:
|
||||||
|
return "Error"
|
||||||
|
case let .avError(error):
|
||||||
|
return error.localizedDescription
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private let queue = DispatchQueue(label: "VideoRecorder")
|
||||||
|
|
||||||
|
private var assetWriter: AVAssetWriter
|
||||||
|
private var videoInput: AVAssetWriterInput?
|
||||||
|
private var audioInput: AVAssetWriterInput?
|
||||||
|
|
||||||
|
private var pendingAudioSampleBuffers: [CMSampleBuffer] = []
|
||||||
|
|
||||||
|
private var _duration: CMTime = .zero
|
||||||
|
public var duration: CMTime {
|
||||||
|
self.queue.sync { _duration }
|
||||||
|
}
|
||||||
|
|
||||||
|
private var lastVideoSampleTime: CMTime = .invalid
|
||||||
|
private var recordingStartSampleTime: CMTime = .invalid
|
||||||
|
private var recordingStopSampleTime: CMTime = .invalid
|
||||||
|
|
||||||
|
private let configuration: VideoRecorder.Configuration
|
||||||
|
private let videoTransform: CGAffineTransform
|
||||||
|
private let url: URL
|
||||||
|
fileprivate var completion: (Bool) -> Void = { _ in }
|
||||||
|
|
||||||
|
private let error = Atomic<Error?>(value: nil)
|
||||||
|
|
||||||
|
private var stopped = false
|
||||||
|
private var hasAllVideoBuffers = false
|
||||||
|
private var hasAllAudioBuffers = false
|
||||||
|
|
||||||
|
public init?(configuration: VideoRecorder.Configuration, videoTransform: CGAffineTransform, fileUrl: URL) {
|
||||||
|
self.configuration = configuration
|
||||||
|
self.videoTransform = videoTransform
|
||||||
|
self.url = fileUrl
|
||||||
|
|
||||||
|
try? FileManager.default.removeItem(at: url)
|
||||||
|
guard let assetWriter = try? AVAssetWriter(url: url, fileType: .mp4) else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
self.assetWriter = assetWriter
|
||||||
|
self.assetWriter.shouldOptimizeForNetworkUse = false
|
||||||
|
}
|
||||||
|
|
||||||
|
private func hasError() -> Error? {
|
||||||
|
return self.error.with { $0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
public func start() {
|
||||||
|
self.queue.async {
|
||||||
|
self.recordingStartSampleTime = CMTime(seconds: CACurrentMediaTime(), preferredTimescale: CMTimeScale(NSEC_PER_SEC))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public func appendVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
|
||||||
|
if let _ = self.hasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Video else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
|
||||||
|
self.queue.async {
|
||||||
|
guard !self.stopped && self.error.with({ $0 }) == nil else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var failed = false
|
||||||
|
if self.videoInput == nil {
|
||||||
|
let videoSettings = self.configuration.videoSettings
|
||||||
|
if self.assetWriter.canApply(outputSettings: videoSettings, forMediaType: .video) {
|
||||||
|
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings, sourceFormatHint: formatDescription)
|
||||||
|
videoInput.expectsMediaDataInRealTime = true
|
||||||
|
videoInput.transform = self.videoTransform
|
||||||
|
if self.assetWriter.canAdd(videoInput) {
|
||||||
|
self.assetWriter.add(videoInput)
|
||||||
|
self.videoInput = videoInput
|
||||||
|
} else {
|
||||||
|
failed = true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
failed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if failed {
|
||||||
|
print("error")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.assetWriter.status == .unknown {
|
||||||
|
if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !self.assetWriter.startWriting() {
|
||||||
|
if let error = self.assetWriter.error {
|
||||||
|
self.transitionToFailedStatus(error: .avError(error))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assetWriter.startSession(atSourceTime: presentationTime)
|
||||||
|
self.recordingStartSampleTime = presentationTime
|
||||||
|
self.lastVideoSampleTime = presentationTime
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.assetWriter.status == .writing {
|
||||||
|
if self.recordingStopSampleTime != .invalid && sampleBuffer.presentationTimestamp > self.recordingStopSampleTime {
|
||||||
|
self.hasAllVideoBuffers = true
|
||||||
|
self.maybeFinish()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if let videoInput = self.videoInput, videoInput.isReadyForMoreMediaData {
|
||||||
|
if videoInput.append(sampleBuffer) {
|
||||||
|
self.lastVideoSampleTime = presentationTime
|
||||||
|
let startTime = self.recordingStartSampleTime
|
||||||
|
let duration = presentationTime - startTime
|
||||||
|
self._duration = duration
|
||||||
|
} else {
|
||||||
|
print("error")
|
||||||
|
}
|
||||||
|
if !self.tryAppendingPendingAudioBuffers() {
|
||||||
|
self.transitionToFailedStatus(error: .generic)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public func appendAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
|
||||||
|
if let _ = self.hasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Audio else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
self.queue.async {
|
||||||
|
guard !self.stopped && self.error.with({ $0 }) == nil else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var failed = false
|
||||||
|
if self.audioInput == nil {
|
||||||
|
var audioSettings = self.configuration.audioSettings
|
||||||
|
if let currentAudioStreamBasicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription) {
|
||||||
|
audioSettings[AVSampleRateKey] = currentAudioStreamBasicDescription.pointee.mSampleRate
|
||||||
|
audioSettings[AVNumberOfChannelsKey] = currentAudioStreamBasicDescription.pointee.mChannelsPerFrame
|
||||||
|
}
|
||||||
|
|
||||||
|
var audioChannelLayoutSize: Int = 0
|
||||||
|
let currentChannelLayout = CMAudioFormatDescriptionGetChannelLayout(formatDescription, sizeOut: &audioChannelLayoutSize)
|
||||||
|
let currentChannelLayoutData: Data
|
||||||
|
if let currentChannelLayout = currentChannelLayout, audioChannelLayoutSize > 0 {
|
||||||
|
currentChannelLayoutData = Data(bytes: currentChannelLayout, count: audioChannelLayoutSize)
|
||||||
|
} else {
|
||||||
|
currentChannelLayoutData = Data()
|
||||||
|
}
|
||||||
|
audioSettings[AVChannelLayoutKey] = currentChannelLayoutData
|
||||||
|
|
||||||
|
if self.assetWriter.canApply(outputSettings: audioSettings, forMediaType: .audio) {
|
||||||
|
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings, sourceFormatHint: formatDescription)
|
||||||
|
audioInput.expectsMediaDataInRealTime = true
|
||||||
|
if self.assetWriter.canAdd(audioInput) {
|
||||||
|
self.assetWriter.add(audioInput)
|
||||||
|
self.audioInput = audioInput
|
||||||
|
} else {
|
||||||
|
failed = true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
failed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if failed {
|
||||||
|
print("error")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.assetWriter.status == .writing {
|
||||||
|
if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if self.recordingStopSampleTime != .invalid && sampleBuffer.presentationTimestamp > self.recordingStopSampleTime {
|
||||||
|
self.hasAllAudioBuffers = true
|
||||||
|
self.maybeFinish()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var result = false
|
||||||
|
if self.tryAppendingPendingAudioBuffers() {
|
||||||
|
if self.tryAppendingAudioSampleBuffer(sampleBuffer) {
|
||||||
|
result = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !result {
|
||||||
|
self.transitionToFailedStatus(error: .generic)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public func cancelRecording(completion: @escaping () -> Void) {
|
||||||
|
self.queue.async {
|
||||||
|
if self.stopped {
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
completion()
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.stopped = true
|
||||||
|
self.pendingAudioSampleBuffers = []
|
||||||
|
if self.assetWriter.status == .writing {
|
||||||
|
self.assetWriter.cancelWriting()
|
||||||
|
}
|
||||||
|
let fileManager = FileManager()
|
||||||
|
try? fileManager.removeItem(at: self.url)
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
completion()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public var isRecording: Bool {
|
||||||
|
self.queue.sync { !(self.hasAllVideoBuffers && self.hasAllAudioBuffers) }
|
||||||
|
}
|
||||||
|
|
||||||
|
public func stopRecording() {
|
||||||
|
self.queue.async {
|
||||||
|
self.recordingStopSampleTime = CMTime(seconds: CACurrentMediaTime(), preferredTimescale: CMTimeScale(NSEC_PER_SEC))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public func maybeFinish() {
|
||||||
|
self.queue.async {
|
||||||
|
guard self.hasAllVideoBuffers && self.hasAllVideoBuffers else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.stopped = true
|
||||||
|
self.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public func finish() {
|
||||||
|
self.queue.async {
|
||||||
|
let completion = self.completion
|
||||||
|
if self.recordingStopSampleTime == .invalid {
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
completion(false)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if let _ = self.error.with({ $0 }) {
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
completion(false)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !self.tryAppendingPendingAudioBuffers() {
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
completion(false)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.assetWriter.status == .writing {
|
||||||
|
self.assetWriter.finishWriting {
|
||||||
|
if let _ = self.assetWriter.error {
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
completion(false)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
completion(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if let _ = self.assetWriter.error {
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
completion(true)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
completion(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func tryAppendingPendingAudioBuffers() -> Bool {
|
||||||
|
dispatchPrecondition(condition: .onQueue(self.queue))
|
||||||
|
guard self.pendingAudioSampleBuffers.count > 0 else {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
var result = true
|
||||||
|
let (sampleBuffersToAppend, pendingSampleBuffers) = self.pendingAudioSampleBuffers.stableGroup(using: { $0.endTime <= self.lastVideoSampleTime })
|
||||||
|
for sampleBuffer in sampleBuffersToAppend {
|
||||||
|
if !self.internalAppendAudioSampleBuffer(sampleBuffer) {
|
||||||
|
result = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.pendingAudioSampleBuffers = pendingSampleBuffers
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
private func tryAppendingAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) -> Bool {
|
||||||
|
dispatchPrecondition(condition: .onQueue(self.queue))
|
||||||
|
|
||||||
|
var result = true
|
||||||
|
if sampleBuffer.endTime > self.lastVideoSampleTime {
|
||||||
|
self.pendingAudioSampleBuffers.append(sampleBuffer)
|
||||||
|
} else {
|
||||||
|
result = self.internalAppendAudioSampleBuffer(sampleBuffer)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
private func internalAppendAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) -> Bool {
|
||||||
|
if let audioInput = self.audioInput, audioInput.isReadyForMoreMediaData {
|
||||||
|
if !audioInput.append(sampleBuffer) {
|
||||||
|
if let _ = self.assetWriter.error {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
private func transitionToFailedStatus(error: RecorderError) {
|
||||||
|
let _ = self.error.modify({ _ in return error })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extension Sequence {
|
||||||
|
func stableGroup(using predicate: (Element) throws -> Bool) rethrows -> ([Element], [Element]) {
|
||||||
|
var trueGroup: [Element] = []
|
||||||
|
var falseGroup: [Element] = []
|
||||||
|
for element in self {
|
||||||
|
if try predicate(element) {
|
||||||
|
trueGroup.append(element)
|
||||||
|
} else {
|
||||||
|
falseGroup.append(element)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return (trueGroup, falseGroup)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public final class VideoRecorder {
|
||||||
|
var duration: Double? {
|
||||||
|
return self.impl.duration.seconds
|
||||||
|
}
|
||||||
|
|
||||||
enum Result {
|
enum Result {
|
||||||
enum Error {
|
enum Error {
|
||||||
case generic
|
case generic
|
||||||
}
|
}
|
||||||
|
|
||||||
case success
|
case success
|
||||||
|
case initError(Error)
|
||||||
case writeError(Error)
|
case writeError(Error)
|
||||||
case finishError(Error)
|
case finishError(Error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct Configuration {
|
||||||
|
var videoSettings: [String: Any]
|
||||||
|
var audioSettings: [String: Any]
|
||||||
|
|
||||||
|
init(videoSettings: [String: Any], audioSettings: [String: Any]) {
|
||||||
|
self.videoSettings = videoSettings
|
||||||
|
self.audioSettings = audioSettings
|
||||||
|
}
|
||||||
|
|
||||||
|
var hasAudio: Bool {
|
||||||
|
return !self.audioSettings.isEmpty
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private let impl: VideoRecorderImpl
|
||||||
|
fileprivate let configuration: Configuration
|
||||||
|
fileprivate let videoTransform: CGAffineTransform
|
||||||
|
fileprivate let fileUrl: URL
|
||||||
private let completion: (Result) -> Void
|
private let completion: (Result) -> Void
|
||||||
|
|
||||||
private let queue = Queue()
|
public var isRecording: Bool {
|
||||||
private var assetWriter: AVAssetWriter?
|
return self.impl.isRecording
|
||||||
|
}
|
||||||
|
|
||||||
private var videoInput: AVAssetWriterInput?
|
init?(configuration: Configuration, videoTransform: CGAffineTransform, fileUrl: URL, completion: @escaping (Result) -> Void) {
|
||||||
private var audioInput: AVAssetWriterInput?
|
self.configuration = configuration
|
||||||
|
|
||||||
private let preset: MediaPreset
|
|
||||||
private let videoTransform: CGAffineTransform
|
|
||||||
private let fileUrl: URL
|
|
||||||
|
|
||||||
private (set) var isRecording = false
|
|
||||||
private (set) var isStopping = false
|
|
||||||
private var finishedWriting = false
|
|
||||||
|
|
||||||
private var captureStartTimestamp: Double?
|
|
||||||
private var firstVideoTimestamp: CMTime?
|
|
||||||
private var lastVideoTimestamp: CMTime?
|
|
||||||
private var lastAudioTimestamp: CMTime?
|
|
||||||
|
|
||||||
private var pendingAudioBuffers: [CMSampleBuffer] = []
|
|
||||||
|
|
||||||
init(preset: MediaPreset, videoTransform: CGAffineTransform, fileUrl: URL, completion: @escaping (Result) -> Void) {
|
|
||||||
self.preset = preset
|
|
||||||
self.videoTransform = videoTransform
|
self.videoTransform = videoTransform
|
||||||
self.fileUrl = fileUrl
|
self.fileUrl = fileUrl
|
||||||
self.completion = completion
|
self.completion = completion
|
||||||
}
|
|
||||||
|
|
||||||
func start() {
|
|
||||||
self.queue.async {
|
|
||||||
guard self.assetWriter == nil else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
self.captureStartTimestamp = CFAbsoluteTimeGetCurrent()
|
|
||||||
|
|
||||||
guard let assetWriter = try? AVAssetWriter(url: self.fileUrl, fileType: .mp4) else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: self.preset.videoSettings)
|
|
||||||
videoInput.expectsMediaDataInRealTime = true
|
|
||||||
videoInput.transform = self.videoTransform
|
|
||||||
if assetWriter.canAdd(videoInput) {
|
|
||||||
assetWriter.add(videoInput)
|
|
||||||
}
|
|
||||||
|
|
||||||
let audioInput: AVAssetWriterInput?
|
|
||||||
if self.preset.hasAudio {
|
|
||||||
audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: self.preset.audioSettings)
|
|
||||||
audioInput!.expectsMediaDataInRealTime = true
|
|
||||||
if assetWriter.canAdd(audioInput!) {
|
|
||||||
assetWriter.add(audioInput!)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
audioInput = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
self.assetWriter = assetWriter
|
|
||||||
self.videoInput = videoInput
|
|
||||||
self.audioInput = audioInput
|
|
||||||
|
|
||||||
self.isRecording = true
|
|
||||||
|
|
||||||
//assetWriter.startWriting()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func stop() {
|
|
||||||
self.queue.async {
|
|
||||||
guard let captureStartTimestamp = self.captureStartTimestamp, abs(CFAbsoluteTimeGetCurrent() - captureStartTimestamp) > 0.5 else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
self.isStopping = true
|
|
||||||
|
|
||||||
if self.audioInput == nil {
|
|
||||||
self.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private func finish() {
|
|
||||||
guard let assetWriter = self.assetWriter else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
self.queue.async {
|
guard let impl = VideoRecorderImpl(configuration: configuration, videoTransform: videoTransform, fileUrl: fileUrl) else {
|
||||||
self.isRecording = false
|
completion(.initError(.generic))
|
||||||
self.isStopping = false
|
return nil
|
||||||
|
}
|
||||||
assetWriter.finishWriting {
|
self.impl = impl
|
||||||
self.finishedWriting = true
|
impl.completion = { [weak self] success in
|
||||||
|
if let self {
|
||||||
if case .completed = assetWriter.status {
|
if success {
|
||||||
self.completion(.success)
|
self.completion(.success)
|
||||||
} else {
|
} else {
|
||||||
self.completion(.finishError(.generic))
|
self.completion(.finishError(.generic))
|
||||||
@ -132,76 +442,25 @@ final class VideoRecorder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func appendVideo(sampleBuffer: CMSampleBuffer) {
|
func start() {
|
||||||
self.queue.async {
|
self.impl.start()
|
||||||
guard let assetWriter = self.assetWriter, let videoInput = self.videoInput, (self.isRecording || self.isStopping) && !self.finishedWriting else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
let timestamp = sampleBuffer.presentationTimestamp
|
|
||||||
if let startTimestamp = self.captureStartTimestamp, timestamp.seconds < startTimestamp {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
switch assetWriter.status {
|
|
||||||
case .unknown:
|
|
||||||
break
|
|
||||||
case .writing:
|
|
||||||
if self.firstVideoTimestamp == nil {
|
|
||||||
self.firstVideoTimestamp = timestamp
|
|
||||||
assetWriter.startSession(atSourceTime: timestamp)
|
|
||||||
}
|
|
||||||
while !videoInput.isReadyForMoreMediaData {
|
|
||||||
RunLoop.current.run(until: Date(timeIntervalSinceNow: 0.1))
|
|
||||||
}
|
|
||||||
|
|
||||||
if videoInput.append(sampleBuffer) {
|
|
||||||
self.lastVideoTimestamp = timestamp
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.audioInput != nil && self.isStopping, let lastVideoTimestamp = self.lastAudioTimestamp, let lastAudioTimestamp = self.lastAudioTimestamp, lastVideoTimestamp >= lastAudioTimestamp {
|
|
||||||
self.finish()
|
|
||||||
}
|
|
||||||
case .failed:
|
|
||||||
self.isRecording = false
|
|
||||||
self.completion(.writeError(.generic))
|
|
||||||
default:
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func appendAudio(sampleBuffer: CMSampleBuffer) {
|
func stop() {
|
||||||
self.queue.async {
|
self.impl.stopRecording()
|
||||||
guard let _ = self.assetWriter, let audioInput = self.audioInput, !self.isStopping && !self.finishedWriting else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
let timestamp = sampleBuffer.presentationTimestamp
|
|
||||||
|
|
||||||
if let _ = self.firstVideoTimestamp {
|
|
||||||
if !self.pendingAudioBuffers.isEmpty {
|
|
||||||
for buffer in self.pendingAudioBuffers {
|
|
||||||
audioInput.append(buffer)
|
|
||||||
}
|
|
||||||
self.pendingAudioBuffers.removeAll()
|
|
||||||
}
|
|
||||||
|
|
||||||
while !audioInput.isReadyForMoreMediaData {
|
|
||||||
RunLoop.current.run(until: Date(timeIntervalSinceNow: 0.1))
|
|
||||||
}
|
|
||||||
|
|
||||||
if audioInput.append(sampleBuffer) {
|
|
||||||
self.lastAudioTimestamp = timestamp
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.pendingAudioBuffers.append(sampleBuffer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var duration: Double? {
|
func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
|
||||||
guard let firstTimestamp = self.firstVideoTimestamp, let lastTimestamp = self.lastVideoTimestamp else {
|
guard let formatDescriptor = CMSampleBufferGetFormatDescription(sampleBuffer) else {
|
||||||
return nil
|
return
|
||||||
|
}
|
||||||
|
let type = CMFormatDescriptionGetMediaType(formatDescriptor)
|
||||||
|
if type == kCMMediaType_Video {
|
||||||
|
self.impl.appendVideoSampleBuffer(sampleBuffer)
|
||||||
|
} else if type == kCMMediaType_Audio {
|
||||||
|
if self.configuration.hasAudio {
|
||||||
|
self.impl.appendAudioSampleBuffer(sampleBuffer)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return (lastTimestamp - firstTimestamp).seconds
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -840,7 +840,10 @@ public class CameraScreen: ViewController {
|
|||||||
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handlePan(_:)))
|
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handlePan(_:)))
|
||||||
self.effectivePreviewView.addGestureRecognizer(panGestureRecognizer)
|
self.effectivePreviewView.addGestureRecognizer(panGestureRecognizer)
|
||||||
|
|
||||||
self.camera.focus(at: CGPoint(x: 0.5, y: 0.5))
|
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
|
||||||
|
self.effectivePreviewView.addGestureRecognizer(tapGestureRecognizer)
|
||||||
|
|
||||||
|
self.camera.focus(at: CGPoint(x: 0.5, y: 0.5), autoFocus: true)
|
||||||
self.camera.startCapture()
|
self.camera.startCapture()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -856,8 +859,6 @@ public class CameraScreen: ViewController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private var previewInitialPosition: CGPoint?
|
|
||||||
private var controlsInitialPosition: CGPoint?
|
|
||||||
@objc private func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
@objc private func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
||||||
guard let controller = self.controller else {
|
guard let controller = self.controller else {
|
||||||
return
|
return
|
||||||
@ -865,8 +866,7 @@ public class CameraScreen: ViewController {
|
|||||||
let translation = gestureRecognizer.translation(in: gestureRecognizer.view)
|
let translation = gestureRecognizer.translation(in: gestureRecognizer.view)
|
||||||
switch gestureRecognizer.state {
|
switch gestureRecognizer.state {
|
||||||
case .began:
|
case .began:
|
||||||
self.previewInitialPosition = self.previewContainerView.center
|
break
|
||||||
self.controlsInitialPosition = self.componentHost.view?.center
|
|
||||||
case .changed:
|
case .changed:
|
||||||
if !"".isEmpty {
|
if !"".isEmpty {
|
||||||
|
|
||||||
@ -888,6 +888,15 @@ public class CameraScreen: ViewController {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) {
|
||||||
|
guard let previewView = self.simplePreviewView else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let location = gestureRecognizer.location(in: previewView)
|
||||||
|
let point = previewView.cameraPoint(for: location)
|
||||||
|
self.camera.focus(at: point, autoFocus: false)
|
||||||
|
}
|
||||||
|
|
||||||
func animateIn() {
|
func animateIn() {
|
||||||
self.backgroundView.alpha = 0.0
|
self.backgroundView.alpha = 0.0
|
||||||
|
@ -1160,7 +1160,7 @@ public final class MediaEditorScreen: ViewController {
|
|||||||
rotateGestureRecognizer.delegate = self
|
rotateGestureRecognizer.delegate = self
|
||||||
self.previewContainerView.addGestureRecognizer(rotateGestureRecognizer)
|
self.previewContainerView.addGestureRecognizer(rotateGestureRecognizer)
|
||||||
|
|
||||||
let tapGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
|
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
|
||||||
self.previewContainerView.addGestureRecognizer(tapGestureRecognizer)
|
self.previewContainerView.addGestureRecognizer(tapGestureRecognizer)
|
||||||
|
|
||||||
self.interaction = DrawingToolsInteraction(
|
self.interaction = DrawingToolsInteraction(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user