Various fixes

This commit is contained in:
Ilya Laktyushin 2023-07-06 22:56:18 +02:00
parent a4297cf1c5
commit 9b8d5631c1
7 changed files with 193 additions and 171 deletions

View File

@ -8,13 +8,17 @@ final class CameraSession {
private let singleSession: AVCaptureSession?
private let multiSession: Any?
let hasMultiCam: Bool
init() {
if #available(iOS 13.0, *), AVCaptureMultiCamSession.isMultiCamSupported {
self.multiSession = AVCaptureMultiCamSession()
self.singleSession = nil
self.hasMultiCam = true
} else {
self.singleSession = AVCaptureSession()
self.multiSession = nil
self.hasMultiCam = false
}
}
@ -119,23 +123,9 @@ private final class CameraContext {
}
}
var previewView: CameraPreviewView? {
didSet {
}
}
var simplePreviewView: CameraSimplePreviewView? {
didSet {
if let oldValue {
Queue.mainQueue().async {
oldValue.invalidate()
self.simplePreviewView?.setSession(self.session.session, autoConnect: true)
}
}
}
}
var previewView: CameraPreviewView?
var simplePreviewView: CameraSimplePreviewView?
var secondaryPreviewView: CameraSimplePreviewView?
private var lastSnapshotTimestamp: Double = CACurrentMediaTime()
@ -370,7 +360,7 @@ private final class CameraContext {
self.previewNode?.enqueue(sampleBuffer)
let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5 {
if timestamp > self.lastSnapshotTimestamp + 2.5, !self.mainDeviceContext.output.isRecording {
var mirror = false
if #available(iOS 13.0, *) {
mirror = connection.inputPorts.first?.sourceDevicePosition == .front
@ -564,9 +554,9 @@ public final class Camera {
session.session.automaticallyConfiguresApplicationAudioSession = false
session.session.automaticallyConfiguresCaptureDeviceForWideColor = false
if let previewView {
previewView.setSession(session.session, autoConnect: false)
previewView.setSession(session.session, autoConnect: !session.hasMultiCam)
}
if let secondaryPreviewView {
if let secondaryPreviewView, session.hasMultiCam {
secondaryPreviewView.setSession(session.session, autoConnect: false)
}

View File

@ -27,7 +27,11 @@ class CameraInput {
if let videoInput = try? AVCaptureDeviceInput(device: device) {
self.videoInput = videoInput
if session.session.canAddInput(videoInput) {
session.session.addInputWithNoConnections(videoInput)
if session.hasMultiCam {
session.session.addInputWithNoConnections(videoInput)
} else {
session.session.addInput(videoInput)
}
}
}
}

View File

@ -118,7 +118,11 @@ final class CameraOutput: NSObject {
func configure(for session: CameraSession, device: CameraDevice, input: CameraInput, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool) {
if session.session.canAddOutput(self.videoOutput) {
session.session.addOutputWithNoConnections(self.videoOutput)
if session.hasMultiCam {
session.session.addOutputWithNoConnections(self.videoOutput)
} else {
session.session.addOutput(self.videoOutput)
}
self.videoOutput.setSampleBufferDelegate(self, queue: self.queue)
}
if audio, session.session.canAddOutput(self.audioOutput) {
@ -126,7 +130,11 @@ final class CameraOutput: NSObject {
self.audioOutput.setSampleBufferDelegate(self, queue: self.queue)
}
if photo, session.session.canAddOutput(self.photoOutput) {
session.session.addOutputWithNoConnections(self.photoOutput)
if session.hasMultiCam {
session.session.addOutputWithNoConnections(self.photoOutput)
} else {
session.session.addOutput(self.photoOutput)
}
}
if metadata, session.session.canAddOutput(self.metadataOutput) {
session.session.addOutput(self.metadataOutput)
@ -137,7 +145,7 @@ final class CameraOutput: NSObject {
}
}
if #available(iOS 13.0, *) {
if #available(iOS 13.0, *), session.hasMultiCam {
if let device = device.videoDevice, let ports = input.videoInput?.ports(for: AVMediaType.video, sourceDeviceType: device.deviceType, sourceDevicePosition: device.position) {
if let previewView {
let previewConnection = AVCaptureConnection(inputPort: ports.first!, videoPreviewLayer: previewView.videoPreviewLayer)
@ -163,57 +171,7 @@ final class CameraOutput: NSObject {
}
}
}
func reconfigure(for session: CameraSession, device: CameraDevice, input: CameraInput, otherPreviewView: CameraSimplePreviewView?, otherOutput: CameraOutput) {
if #available(iOS 13.0, *) {
if let previewConnection = self.previewConnection {
if session.session.connections.contains(where: { $0 === previewConnection }) {
session.session.removeConnection(previewConnection)
}
self.previewConnection = nil
}
if let videoConnection = self.videoConnection {
if session.session.connections.contains(where: { $0 === videoConnection }) {
session.session.removeConnection(videoConnection)
}
self.videoConnection = nil
}
if let photoConnection = self.photoConnection {
if session.session.connections.contains(where: { $0 === photoConnection }) {
session.session.removeConnection(photoConnection)
}
self.photoConnection = nil
}
if let device = device.videoDevice, let ports = input.videoInput?.ports(for: AVMediaType.video, sourceDeviceType: device.deviceType, sourceDevicePosition: device.position) {
if let otherPreviewView {
let previewConnection = AVCaptureConnection(inputPort: ports.first!, videoPreviewLayer: otherPreviewView.videoPreviewLayer)
if session.session.canAddConnection(previewConnection) {
session.session.addConnection(previewConnection)
self.previewConnection = previewConnection
}
}
let videoConnection = AVCaptureConnection(inputPorts: ports, output: otherOutput.videoOutput)
if session.session.canAddConnection(videoConnection) {
session.session.addConnection(videoConnection)
self.videoConnection = videoConnection
}
let photoConnection = AVCaptureConnection(inputPorts: ports, output: otherOutput.photoOutput)
if session.session.canAddConnection(photoConnection) {
session.session.addConnection(photoConnection)
self.photoConnection = photoConnection
}
}
}
}
func toggleConnection() {
}
func invalidate(for session: CameraSession) {
if #available(iOS 13.0, *) {
if let previewConnection = self.previewConnection {
@ -314,6 +272,10 @@ final class CameraOutput: NSObject {
}
}
var isRecording: Bool {
return self.videoRecorder != nil
}
private var recordingCompletionPipe = ValuePipe<VideoCaptureResult>()
func startRecording(isDualCamera: Bool, position: Camera.Position? = nil) -> Signal<Double, NoError> {
guard self.videoRecorder == nil else {

View File

@ -77,7 +77,7 @@ public class CameraSimplePreviewView: UIView {
self.videoPreviewLayer.videoGravity = main ? .resizeAspectFill : .resizeAspect
self.placeholderView.contentMode = .scaleAspectFill
self.placeholderView.contentMode = main ? .scaleAspectFill : .scaleAspectFit
self.addSubview(self.placeholderView)
if main {

View File

@ -201,93 +201,157 @@ func exifOrientationForDeviceOrientation(_ deviceOrientation: UIDeviceOrientatio
}
}
/**
First crops the pixel buffer, then resizes it.
This function requires the caller to pass in both the source and destination
pixel buffers. The dimensions of destination pixel buffer should be at least
`scaleWidth` x `scaleHeight` pixels.
*/
func resizePixelBuffer(from srcPixelBuffer: CVPixelBuffer,
to dstPixelBuffer: CVPixelBuffer,
cropX: Int,
cropY: Int,
cropWidth: Int,
cropHeight: Int,
scaleWidth: Int,
scaleHeight: Int) {
assert(CVPixelBufferGetWidth(dstPixelBuffer) >= scaleWidth)
assert(CVPixelBufferGetHeight(dstPixelBuffer) >= scaleHeight)
let srcFlags = CVPixelBufferLockFlags.readOnly
let dstFlags = CVPixelBufferLockFlags(rawValue: 0)
guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(srcPixelBuffer, srcFlags) else {
print("Error: could not lock source pixel buffer")
return
}
defer { CVPixelBufferUnlockBaseAddress(srcPixelBuffer, srcFlags) }
guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(dstPixelBuffer, dstFlags) else {
print("Error: could not lock destination pixel buffer")
return
}
defer { CVPixelBufferUnlockBaseAddress(dstPixelBuffer, dstFlags) }
guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer),
let dstData = CVPixelBufferGetBaseAddress(dstPixelBuffer) else {
print("Error: could not get pixel buffer base address")
return
}
let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
let offset = cropY*srcBytesPerRow + cropX*4
var srcBuffer = vImage_Buffer(data: srcData.advanced(by: offset),
height: vImagePixelCount(cropHeight),
width: vImagePixelCount(cropWidth),
rowBytes: srcBytesPerRow)
let dstBytesPerRow = CVPixelBufferGetBytesPerRow(dstPixelBuffer)
var dstBuffer = vImage_Buffer(data: dstData,
height: vImagePixelCount(scaleHeight),
width: vImagePixelCount(scaleWidth),
rowBytes: dstBytesPerRow)
let error = vImageScale_ARGB8888(&srcBuffer, &dstBuffer, nil, vImage_Flags(0))
if error != kvImageNoError {
print("Error:", error)
}
to dstPixelBuffer: CVPixelBuffer,
cropX: Int,
cropY: Int,
cropWidth: Int,
cropHeight: Int,
scaleWidth: Int,
scaleHeight: Int) {
assert(CVPixelBufferGetWidth(dstPixelBuffer) >= scaleWidth)
assert(CVPixelBufferGetHeight(dstPixelBuffer) >= scaleHeight)
let srcFlags = CVPixelBufferLockFlags.readOnly
let dstFlags = CVPixelBufferLockFlags(rawValue: 0)
guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(srcPixelBuffer, srcFlags) else {
print("Error: could not lock source pixel buffer")
return
}
defer { CVPixelBufferUnlockBaseAddress(srcPixelBuffer, srcFlags) }
guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(dstPixelBuffer, dstFlags) else {
print("Error: could not lock destination pixel buffer")
return
}
defer { CVPixelBufferUnlockBaseAddress(dstPixelBuffer, dstFlags) }
guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer),
let dstData = CVPixelBufferGetBaseAddress(dstPixelBuffer) else {
print("Error: could not get pixel buffer base address")
return
}
let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
let offset = cropY*srcBytesPerRow + cropX*4
var srcBuffer = vImage_Buffer(data: srcData.advanced(by: offset),
height: vImagePixelCount(cropHeight),
width: vImagePixelCount(cropWidth),
rowBytes: srcBytesPerRow)
let dstBytesPerRow = CVPixelBufferGetBytesPerRow(dstPixelBuffer)
var dstBuffer = vImage_Buffer(data: dstData,
height: vImagePixelCount(scaleHeight),
width: vImagePixelCount(scaleWidth),
rowBytes: dstBytesPerRow)
let error = vImageScale_ARGB8888(&srcBuffer, &dstBuffer, nil, vImage_Flags(0))
if error != kvImageNoError {
print("Error:", error)
}
}
/**
Resizes a CVPixelBuffer to a new width and height.
This function requires the caller to pass in both the source and destination
pixel buffers. The dimensions of destination pixel buffer should be at least
`width` x `height` pixels.
*/
func resizePixelBuffer(from srcPixelBuffer: CVPixelBuffer,
to dstPixelBuffer: CVPixelBuffer,
width: Int, height: Int) {
resizePixelBuffer(from: srcPixelBuffer, to: dstPixelBuffer,
cropX: 0, cropY: 0,
cropWidth: CVPixelBufferGetWidth(srcPixelBuffer),
cropHeight: CVPixelBufferGetHeight(srcPixelBuffer),
scaleWidth: width, scaleHeight: height)
to dstPixelBuffer: CVPixelBuffer,
width: Int, height: Int) {
resizePixelBuffer(from: srcPixelBuffer, to: dstPixelBuffer,
cropX: 0, cropY: 0,
cropWidth: CVPixelBufferGetWidth(srcPixelBuffer),
cropHeight: CVPixelBufferGetHeight(srcPixelBuffer),
scaleWidth: width, scaleHeight: height)
}
/**
Resizes a CVPixelBuffer to a new width and height, using Core Image.
*/
func resizePixelBuffer(_ pixelBuffer: CVPixelBuffer,
width: Int, height: Int,
output: CVPixelBuffer, context: CIContext) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let sx = CGFloat(width) / CGFloat(CVPixelBufferGetWidth(pixelBuffer))
let sy = CGFloat(height) / CGFloat(CVPixelBufferGetHeight(pixelBuffer))
let scaleTransform = CGAffineTransform(scaleX: sx, y: sy)
let scaledImage = ciImage.transformed(by: scaleTransform)
context.render(scaledImage, to: output)
width: Int, height: Int,
output: CVPixelBuffer, context: CIContext) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let sx = CGFloat(width) / CGFloat(CVPixelBufferGetWidth(pixelBuffer))
let sy = CGFloat(height) / CGFloat(CVPixelBufferGetHeight(pixelBuffer))
let scaleTransform = CGAffineTransform(scaleX: sx, y: sy)
let scaledImage = ciImage.transformed(by: scaleTransform)
context.render(scaledImage, to: output)
}
func imageFromCVPixelBuffer(_ pixelBuffer: CVPixelBuffer, orientation: UIImage.Orientation) -> UIImage? {
CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
guard let context = CGContext(
data: baseAddress,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: bytesPerRow,
space: colorSpace,
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue
) else {
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
return nil
}
guard let cgImage = context.makeImage() else {
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
return nil
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
return UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation)
}
extension CVPixelBuffer {
func deepCopy() -> CVPixelBuffer? {
let width = CVPixelBufferGetWidth(self)
let height = CVPixelBufferGetHeight(self)
let format = CVPixelBufferGetPixelFormatType(self)
let attributes: [NSObject: AnyObject] = [
kCVPixelBufferCGImageCompatibilityKey: true as AnyObject,
kCVPixelBufferCGBitmapContextCompatibilityKey: true as AnyObject
]
var newPixelBuffer: CVPixelBuffer?
let status = CVPixelBufferCreate(
kCFAllocatorDefault,
width,
height,
format,
attributes as CFDictionary,
&newPixelBuffer
)
guard status == kCVReturnSuccess, let unwrappedPixelBuffer = newPixelBuffer else {
return nil
}
CVPixelBufferLockBaseAddress(self, .readOnly)
CVPixelBufferLockBaseAddress(unwrappedPixelBuffer, [])
guard let sourceBaseAddress = CVPixelBufferGetBaseAddress(self),
let destinationBaseAddress = CVPixelBufferGetBaseAddress(unwrappedPixelBuffer) else {
CVPixelBufferUnlockBaseAddress(self, .readOnly)
CVPixelBufferUnlockBaseAddress(unwrappedPixelBuffer, [])
return nil
}
let sourceBytesPerRow = CVPixelBufferGetBytesPerRow(self)
let destinationBytesPerRow = CVPixelBufferGetBytesPerRow(unwrappedPixelBuffer)
let imageSize = height * min(sourceBytesPerRow, destinationBytesPerRow)
memcpy(destinationBaseAddress, sourceBaseAddress, imageSize)
CVPixelBufferUnlockBaseAddress(self, .readOnly)
CVPixelBufferUnlockBaseAddress(unwrappedPixelBuffer, [])
return unwrappedPixelBuffer
}
}

View File

@ -34,7 +34,7 @@ private final class VideoRecorderImpl {
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private let imageContext: CIContext
private let imageContext = CIContext()
private var transitionImage: UIImage?
private var savedTransitionImage = false
@ -66,7 +66,6 @@ private final class VideoRecorderImpl {
self.configuration = configuration
self.videoTransform = videoTransform
self.url = fileUrl
self.imageContext = CIContext()
try? FileManager.default.removeItem(at: url)
guard let assetWriter = try? AVAssetWriter(url: url, fileType: .mp4) else {
@ -162,10 +161,16 @@ private final class VideoRecorderImpl {
}
if let videoInput = self.videoInput, videoInput.isReadyForMoreMediaData {
if videoInput.append(sampleBuffer) {
self.lastVideoSampleTime = presentationTime
let startTime = self.recordingStartSampleTime
let duration = presentationTime - startTime
self._duration = duration
}
if !self.savedTransitionImage, let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
self.savedTransitionImage = true
Queue.concurrentDefaultQueue().async {
Queue.concurrentBackgroundQueue().async {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
if let cgImage = self.imageContext.createCGImage(ciImage, from: ciImage.extent) {
self.transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
@ -175,14 +180,6 @@ private final class VideoRecorderImpl {
}
}
if videoInput.append(sampleBuffer) {
self.lastVideoSampleTime = presentationTime
let startTime = self.recordingStartSampleTime
let duration = presentationTime - startTime
self._duration = duration
} else {
print("error")
}
if !self.tryAppendingPendingAudioBuffers() {
self.transitionToFailedStatus(error: .generic)
}

View File

@ -826,9 +826,9 @@ private final class CameraScreenComponent: CombinedComponent {
if isTablet {
timePosition = CGPoint(x: availableSize.width - panelWidth / 2.0, y: availableSize.height / 2.0 - 97.0)
} else {
timePosition = CGPoint(x: availableSize.width / 2.0, y: environment.safeInsets.top + 40.0)
timePosition = CGPoint(x: availableSize.width / 2.0, y: max(environment.statusBarHeight + 5.0 + 20.0, environment.safeInsets.top + topControlInset + 20.0))
}
if state.cameraState.recording != .none {
let timeBackground = timeBackground.update(
component: RoundedRectangle(color: videoRedColor, cornerRadius: 4.0),
@ -1172,7 +1172,6 @@ public class CameraScreen: ViewController {
if let self {
if modeChange != .none {
if case .dualCamera = modeChange, self.cameraPosition == .front {
} else {
if let snapshot = self.mainPreviewView.snapshotView(afterScreenUpdates: false) {
self.mainPreviewView.addSubview(snapshot)
@ -1283,7 +1282,13 @@ public class CameraScreen: ViewController {
self.mainPreviewContainerView.addSubview(cloneView)
}
} else {
if let cloneView = self.mainPreviewView.snapshotView(afterScreenUpdates: false) {
cloneView.frame = self.mainPreviewView.frame
self.additionalPreviewSnapshotView = cloneView
self.additionalPreviewContainerView.addSubview(cloneView)
}
if let cloneView = self.additionalPreviewView.snapshotView(afterScreenUpdates: false) {
cloneView.frame = self.additionalPreviewView.frame
self.mainPreviewSnapshotView = cloneView
self.mainPreviewContainerView.addSubview(cloneView)
}