From c0d0b3733dbdae4203b7fdbdb56285f22160a72f Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Sun, 18 Jun 2023 18:59:21 +0400 Subject: [PATCH] Camera and editor improvements --- submodules/Camera/Sources/Camera.swift | 3 ++- submodules/Camera/Sources/CameraOutput.swift | 23 ++++++++++++++++--- .../Camera/Sources/PhotoCaptureContext.swift | 9 +++++++- .../CameraScreen/Sources/CameraScreen.swift | 3 +++ 4 files changed, 33 insertions(+), 5 deletions(-) diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift index 79eb3aade5..d2b658f5f7 100644 --- a/submodules/Camera/Sources/Camera.swift +++ b/submodules/Camera/Sources/Camera.swift @@ -41,11 +41,12 @@ final class CameraDeviceContext { let device = CameraDevice() let input = CameraInput() - let output = CameraOutput() + let output: CameraOutput init(session: CameraSession, exclusive: Bool) { self.session = session self.exclusive = exclusive + self.output = CameraOutput(exclusive: exclusive) } func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool) { diff --git a/submodules/Camera/Sources/CameraOutput.swift b/submodules/Camera/Sources/CameraOutput.swift index 65e0ed5e4a..ee623b2c4f 100644 --- a/submodules/Camera/Sources/CameraOutput.swift +++ b/submodules/Camera/Sources/CameraOutput.swift @@ -73,6 +73,8 @@ final class CameraOutput: NSObject { let metadataOutput = AVCaptureMetadataOutput() private let faceLandmarksOutput = FaceLandmarksDataOutput() + let exclusive: Bool + private var photoConnection: AVCaptureConnection? private var videoConnection: AVCaptureConnection? private var previewConnection: AVCaptureConnection? @@ -91,7 +93,9 @@ final class CameraOutput: NSObject { var processCodes: (([CameraCode]) -> Void)? var processFaceLandmarks: (([VNFaceObservation]) -> Void)? - override init() { + init(exclusive: Bool) { + self.exclusive = exclusive + super.init() self.videoOutput.alwaysDiscardsLateVideoFrames = false @@ -270,8 +274,13 @@ final class CameraOutput: NSObject { } func takePhoto(orientation: AVCaptureVideoOrientation, flashMode: AVCaptureDevice.FlashMode) -> Signal { + var mirror = false if let connection = self.photoOutput.connection(with: .video) { connection.videoOrientation = orientation + + if #available(iOS 13.0, *) { + mirror = connection.inputPorts.first?.sourceDevicePosition == .front + } } let settings = AVCapturePhotoSettings(format: [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]) @@ -280,11 +289,19 @@ final class CameraOutput: NSObject { settings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPhotoPixelFormatType] } if #available(iOS 13.0, *) { - settings.photoQualityPrioritization = .speed + if self.exclusive { + if self.photoOutput.maxPhotoQualityPrioritization != .speed { + settings.photoQualityPrioritization = .balanced + } else { + settings.photoQualityPrioritization = .speed + } + } else { + settings.photoQualityPrioritization = .speed + } } let uniqueId = settings.uniqueID - let photoCapture = PhotoCaptureContext(settings: settings, filter: self.activeFilter) + let photoCapture = PhotoCaptureContext(settings: settings, filter: self.activeFilter, mirror: mirror) self.photoCaptureRequests[uniqueId] = photoCapture self.photoOutput.capturePhoto(with: settings, delegate: photoCapture) diff --git a/submodules/Camera/Sources/PhotoCaptureContext.swift b/submodules/Camera/Sources/PhotoCaptureContext.swift index 747f1b5400..8748cb9d18 100644 --- a/submodules/Camera/Sources/PhotoCaptureContext.swift +++ b/submodules/Camera/Sources/PhotoCaptureContext.swift @@ -35,9 +35,11 @@ public enum PhotoCaptureResult: Equatable { final class PhotoCaptureContext: NSObject, AVCapturePhotoCaptureDelegate { private let pipe = ValuePipe() private let filter: CameraFilter? + private let mirror: Bool - init(settings: AVCapturePhotoSettings, filter: CameraFilter?) { + init(settings: AVCapturePhotoSettings, filter: CameraFilter?, mirror: Bool) { self.filter = filter + self.mirror = mirror super.init() } @@ -79,6 +81,11 @@ final class PhotoCaptureContext: NSObject, AVCapturePhotoCaptureDelegate { var image = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right) if image.imageOrientation != .up { UIGraphicsBeginImageContextWithOptions(image.size, true, image.scale) + if self.mirror, let context = UIGraphicsGetCurrentContext() { + context.translateBy(x: image.size.width / 2.0, y: image.size.height / 2.0) + context.scaleBy(x: -1.0, y: 1.0) + context.translateBy(x: -image.size.width / 2.0, y: -image.size.height / 2.0) + } image.draw(in: CGRect(origin: .zero, size: image.size)) if let currentImage = UIGraphicsGetImageFromCurrentImageContext() { image = currentImage diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift index 5b15360c6f..0aa599272b 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift @@ -524,6 +524,8 @@ private final class CameraScreenComponent: CombinedComponent { } else { captureControlsAvailableSize = availableSize } + + let animateShutter = component.animateShutter let captureControls = captureControls.update( component: CaptureControlsComponent( isTablet: isTablet, @@ -538,6 +540,7 @@ private final class CameraScreenComponent: CombinedComponent { } if case .none = state.cameraState.recording { if state.cameraState.mode == .photo { + animateShutter() state.takePhoto() } else if state.cameraState.mode == .video { state.startVideoRecording(pressing: false)