mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-12-23 06:35:51 +00:00
Camera and editor improvements
This commit is contained in:
@@ -41,11 +41,12 @@ final class CameraDeviceContext {
|
|||||||
|
|
||||||
let device = CameraDevice()
|
let device = CameraDevice()
|
||||||
let input = CameraInput()
|
let input = CameraInput()
|
||||||
let output = CameraOutput()
|
let output: CameraOutput
|
||||||
|
|
||||||
init(session: CameraSession, exclusive: Bool) {
|
init(session: CameraSession, exclusive: Bool) {
|
||||||
self.session = session
|
self.session = session
|
||||||
self.exclusive = exclusive
|
self.exclusive = exclusive
|
||||||
|
self.output = CameraOutput(exclusive: exclusive)
|
||||||
}
|
}
|
||||||
|
|
||||||
func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool) {
|
func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool) {
|
||||||
|
|||||||
@@ -73,6 +73,8 @@ final class CameraOutput: NSObject {
|
|||||||
let metadataOutput = AVCaptureMetadataOutput()
|
let metadataOutput = AVCaptureMetadataOutput()
|
||||||
private let faceLandmarksOutput = FaceLandmarksDataOutput()
|
private let faceLandmarksOutput = FaceLandmarksDataOutput()
|
||||||
|
|
||||||
|
let exclusive: Bool
|
||||||
|
|
||||||
private var photoConnection: AVCaptureConnection?
|
private var photoConnection: AVCaptureConnection?
|
||||||
private var videoConnection: AVCaptureConnection?
|
private var videoConnection: AVCaptureConnection?
|
||||||
private var previewConnection: AVCaptureConnection?
|
private var previewConnection: AVCaptureConnection?
|
||||||
@@ -91,7 +93,9 @@ final class CameraOutput: NSObject {
|
|||||||
var processCodes: (([CameraCode]) -> Void)?
|
var processCodes: (([CameraCode]) -> Void)?
|
||||||
var processFaceLandmarks: (([VNFaceObservation]) -> Void)?
|
var processFaceLandmarks: (([VNFaceObservation]) -> Void)?
|
||||||
|
|
||||||
override init() {
|
init(exclusive: Bool) {
|
||||||
|
self.exclusive = exclusive
|
||||||
|
|
||||||
super.init()
|
super.init()
|
||||||
|
|
||||||
self.videoOutput.alwaysDiscardsLateVideoFrames = false
|
self.videoOutput.alwaysDiscardsLateVideoFrames = false
|
||||||
@@ -270,8 +274,13 @@ final class CameraOutput: NSObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func takePhoto(orientation: AVCaptureVideoOrientation, flashMode: AVCaptureDevice.FlashMode) -> Signal<PhotoCaptureResult, NoError> {
|
func takePhoto(orientation: AVCaptureVideoOrientation, flashMode: AVCaptureDevice.FlashMode) -> Signal<PhotoCaptureResult, NoError> {
|
||||||
|
var mirror = false
|
||||||
if let connection = self.photoOutput.connection(with: .video) {
|
if let connection = self.photoOutput.connection(with: .video) {
|
||||||
connection.videoOrientation = orientation
|
connection.videoOrientation = orientation
|
||||||
|
|
||||||
|
if #available(iOS 13.0, *) {
|
||||||
|
mirror = connection.inputPorts.first?.sourceDevicePosition == .front
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let settings = AVCapturePhotoSettings(format: [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)])
|
let settings = AVCapturePhotoSettings(format: [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)])
|
||||||
@@ -280,11 +289,19 @@ final class CameraOutput: NSObject {
|
|||||||
settings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPhotoPixelFormatType]
|
settings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPhotoPixelFormatType]
|
||||||
}
|
}
|
||||||
if #available(iOS 13.0, *) {
|
if #available(iOS 13.0, *) {
|
||||||
settings.photoQualityPrioritization = .speed
|
if self.exclusive {
|
||||||
|
if self.photoOutput.maxPhotoQualityPrioritization != .speed {
|
||||||
|
settings.photoQualityPrioritization = .balanced
|
||||||
|
} else {
|
||||||
|
settings.photoQualityPrioritization = .speed
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
settings.photoQualityPrioritization = .speed
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let uniqueId = settings.uniqueID
|
let uniqueId = settings.uniqueID
|
||||||
let photoCapture = PhotoCaptureContext(settings: settings, filter: self.activeFilter)
|
let photoCapture = PhotoCaptureContext(settings: settings, filter: self.activeFilter, mirror: mirror)
|
||||||
self.photoCaptureRequests[uniqueId] = photoCapture
|
self.photoCaptureRequests[uniqueId] = photoCapture
|
||||||
self.photoOutput.capturePhoto(with: settings, delegate: photoCapture)
|
self.photoOutput.capturePhoto(with: settings, delegate: photoCapture)
|
||||||
|
|
||||||
|
|||||||
@@ -35,9 +35,11 @@ public enum PhotoCaptureResult: Equatable {
|
|||||||
final class PhotoCaptureContext: NSObject, AVCapturePhotoCaptureDelegate {
|
final class PhotoCaptureContext: NSObject, AVCapturePhotoCaptureDelegate {
|
||||||
private let pipe = ValuePipe<PhotoCaptureResult>()
|
private let pipe = ValuePipe<PhotoCaptureResult>()
|
||||||
private let filter: CameraFilter?
|
private let filter: CameraFilter?
|
||||||
|
private let mirror: Bool
|
||||||
|
|
||||||
init(settings: AVCapturePhotoSettings, filter: CameraFilter?) {
|
init(settings: AVCapturePhotoSettings, filter: CameraFilter?, mirror: Bool) {
|
||||||
self.filter = filter
|
self.filter = filter
|
||||||
|
self.mirror = mirror
|
||||||
|
|
||||||
super.init()
|
super.init()
|
||||||
}
|
}
|
||||||
@@ -79,6 +81,11 @@ final class PhotoCaptureContext: NSObject, AVCapturePhotoCaptureDelegate {
|
|||||||
var image = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
|
var image = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
|
||||||
if image.imageOrientation != .up {
|
if image.imageOrientation != .up {
|
||||||
UIGraphicsBeginImageContextWithOptions(image.size, true, image.scale)
|
UIGraphicsBeginImageContextWithOptions(image.size, true, image.scale)
|
||||||
|
if self.mirror, let context = UIGraphicsGetCurrentContext() {
|
||||||
|
context.translateBy(x: image.size.width / 2.0, y: image.size.height / 2.0)
|
||||||
|
context.scaleBy(x: -1.0, y: 1.0)
|
||||||
|
context.translateBy(x: -image.size.width / 2.0, y: -image.size.height / 2.0)
|
||||||
|
}
|
||||||
image.draw(in: CGRect(origin: .zero, size: image.size))
|
image.draw(in: CGRect(origin: .zero, size: image.size))
|
||||||
if let currentImage = UIGraphicsGetImageFromCurrentImageContext() {
|
if let currentImage = UIGraphicsGetImageFromCurrentImageContext() {
|
||||||
image = currentImage
|
image = currentImage
|
||||||
|
|||||||
@@ -524,6 +524,8 @@ private final class CameraScreenComponent: CombinedComponent {
|
|||||||
} else {
|
} else {
|
||||||
captureControlsAvailableSize = availableSize
|
captureControlsAvailableSize = availableSize
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let animateShutter = component.animateShutter
|
||||||
let captureControls = captureControls.update(
|
let captureControls = captureControls.update(
|
||||||
component: CaptureControlsComponent(
|
component: CaptureControlsComponent(
|
||||||
isTablet: isTablet,
|
isTablet: isTablet,
|
||||||
@@ -538,6 +540,7 @@ private final class CameraScreenComponent: CombinedComponent {
|
|||||||
}
|
}
|
||||||
if case .none = state.cameraState.recording {
|
if case .none = state.cameraState.recording {
|
||||||
if state.cameraState.mode == .photo {
|
if state.cameraState.mode == .photo {
|
||||||
|
animateShutter()
|
||||||
state.takePhoto()
|
state.takePhoto()
|
||||||
} else if state.cameraState.mode == .video {
|
} else if state.cameraState.mode == .video {
|
||||||
state.startVideoRecording(pressing: false)
|
state.startVideoRecording(pressing: false)
|
||||||
|
|||||||
Reference in New Issue
Block a user