Merge commit '61c795b95b497f3acf34c386d3eb35cba2b41a0a'

This commit is contained in:
Ali 2023-07-15 22:46:53 +04:00
commit 33e716d78b
5 changed files with 49 additions and 17 deletions

View File

@ -460,15 +460,16 @@ private final class CameraContext {
} }
mainDeviceContext.device.setTorchMode(self._flashMode) mainDeviceContext.device.setTorchMode(self._flashMode)
let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait
if let additionalDeviceContext = self.additionalDeviceContext { if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest( return combineLatest(
mainDeviceContext.output.startRecording(isDualCamera: true, position: self.positionValue), mainDeviceContext.output.startRecording(isDualCamera: true, position: self.positionValue, orientation: orientation),
additionalDeviceContext.output.startRecording(isDualCamera: true) additionalDeviceContext.output.startRecording(isDualCamera: true, orientation: .portrait)
) |> map { value, _ in ) |> map { value, _ in
return value return value
} }
} else { } else {
return mainDeviceContext.output.startRecording(isDualCamera: false) return mainDeviceContext.output.startRecording(isDualCamera: false, orientation: orientation)
} }
} }
@ -486,7 +487,7 @@ private final class CameraContext {
if let cgImage = additionalResult.1.cgImage { if let cgImage = additionalResult.1.cgImage {
additionalTransitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored) additionalTransitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored)
} }
return .single(.finished(mainResult, (additionalResult.0, additionalTransitionImage, true), duration, positionChangeTimestamps, CACurrentMediaTime())) return .single(.finished(mainResult, (additionalResult.0, additionalTransitionImage, true, additionalResult.3), duration, positionChangeTimestamps, CACurrentMediaTime()))
} else { } else {
return .complete() return .complete()
} }
@ -500,7 +501,7 @@ private final class CameraContext {
if mirror, let cgImage = transitionImage.cgImage { if mirror, let cgImage = transitionImage.cgImage {
transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored) transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored)
} }
return .finished((mainResult.0, transitionImage, mirror), nil, duration, positionChangeTimestamps, time) return .finished((mainResult.0, transitionImage, mirror, mainResult.3), nil, duration, positionChangeTimestamps, time)
} else { } else {
return result return result
} }

View File

@ -8,7 +8,7 @@ import VideoToolbox
import TelegramCore import TelegramCore
public enum VideoCaptureResult: Equatable { public enum VideoCaptureResult: Equatable {
case finished((String, UIImage, Bool), (String, UIImage, Bool)?, Double, [(Bool, Double)], Double) case finished((String, UIImage, Bool, CGSize), (String, UIImage, Bool, CGSize)?, Double, [(Bool, Double)], Double)
case failed case failed
public static func == (lhs: VideoCaptureResult, rhs: VideoCaptureResult) -> Bool { public static func == (lhs: VideoCaptureResult, rhs: VideoCaptureResult) -> Bool {
@ -276,7 +276,7 @@ final class CameraOutput: NSObject {
} }
private var recordingCompletionPipe = ValuePipe<VideoCaptureResult>() private var recordingCompletionPipe = ValuePipe<VideoCaptureResult>()
func startRecording(isDualCamera: Bool, position: Camera.Position? = nil) -> Signal<Double, NoError> { func startRecording(isDualCamera: Bool, position: Camera.Position? = nil, orientation: AVCaptureVideoOrientation) -> Signal<Double, NoError> {
guard self.videoRecorder == nil else { guard self.videoRecorder == nil else {
return .complete() return .complete()
} }
@ -293,12 +293,20 @@ final class CameraOutput: NSObject {
} }
let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:] let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:]
var dimensions: CGSize = CGSize(width: 1080, height: 1920)
if orientation == .landscapeLeft {
dimensions = CGSize(width: 1920, height: 1080)
} else if orientation == .landscapeRight {
dimensions = CGSize(width: 1920, height: 1080)
}
let outputFileName = NSUUID().uuidString let outputFileName = NSUUID().uuidString
let outputFilePath = NSTemporaryDirectory() + outputFileName + ".mp4" let outputFilePath = NSTemporaryDirectory() + outputFileName + ".mp4"
let outputFileURL = URL(fileURLWithPath: outputFilePath) let outputFileURL = URL(fileURLWithPath: outputFilePath)
let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), videoTransform: CGAffineTransform(rotationAngle: .pi / 2.0), fileUrl: outputFileURL, completion: { [weak self] result in
let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), orientation: orientation, fileUrl: outputFileURL, completion: { [weak self] result in
if case let .success(transitionImage, duration, positionChangeTimestamps) = result { if case let .success(transitionImage, duration, positionChangeTimestamps) = result {
self?.recordingCompletionPipe.putNext(.finished((outputFilePath, transitionImage ?? UIImage(), false), nil, duration, positionChangeTimestamps.map { ($0 == .front, $1) }, CACurrentMediaTime())) self?.recordingCompletionPipe.putNext(.finished((outputFilePath, transitionImage ?? UIImage(), false, dimensions), nil, duration, positionChangeTimestamps.map { ($0 == .front, $1) }, CACurrentMediaTime()))
} else { } else {
self?.recordingCompletionPipe.putNext(.failed) self?.recordingCompletionPipe.putNext(.failed)
} }

View File

@ -52,6 +52,7 @@ private final class VideoRecorderImpl {
private var positionChangeTimestamps: [(Camera.Position, CMTime)] = [] private var positionChangeTimestamps: [(Camera.Position, CMTime)] = []
private let configuration: VideoRecorder.Configuration private let configuration: VideoRecorder.Configuration
private let orientation: AVCaptureVideoOrientation
private let videoTransform: CGAffineTransform private let videoTransform: CGAffineTransform
private let url: URL private let url: URL
fileprivate var completion: (Bool, UIImage?, [(Camera.Position, CMTime)]?) -> Void = { _, _, _ in } fileprivate var completion: (Bool, UIImage?, [(Camera.Position, CMTime)]?) -> Void = { _, _, _ in }
@ -62,9 +63,20 @@ private final class VideoRecorderImpl {
private var hasAllVideoBuffers = false private var hasAllVideoBuffers = false
private var hasAllAudioBuffers = false private var hasAllAudioBuffers = false
public init?(configuration: VideoRecorder.Configuration, videoTransform: CGAffineTransform, fileUrl: URL) { public init?(configuration: VideoRecorder.Configuration, orientation: AVCaptureVideoOrientation, fileUrl: URL) {
self.configuration = configuration self.configuration = configuration
self.videoTransform = videoTransform
var transform: CGAffineTransform = CGAffineTransform(rotationAngle: .pi / 2.0)
if orientation == .landscapeLeft {
transform = CGAffineTransform(rotationAngle: .pi)
} else if orientation == .landscapeRight {
transform = CGAffineTransform(rotationAngle: 0.0)
} else if orientation == .portraitUpsideDown {
transform = CGAffineTransform(rotationAngle: -.pi / 2.0)
}
self.orientation = orientation
self.videoTransform = transform
self.url = fileUrl self.url = fileUrl
try? FileManager.default.removeItem(at: url) try? FileManager.default.removeItem(at: url)
@ -173,7 +185,15 @@ private final class VideoRecorderImpl {
Queue.concurrentBackgroundQueue().async { Queue.concurrentBackgroundQueue().async {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer) let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
if let cgImage = self.imageContext.createCGImage(ciImage, from: ciImage.extent) { if let cgImage = self.imageContext.createCGImage(ciImage, from: ciImage.extent) {
self.transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right) var orientation: UIImage.Orientation = .right
if self.orientation == .landscapeLeft {
orientation = .down
} else if self.orientation == .landscapeRight {
orientation = .up
} else if self.orientation == .portraitUpsideDown {
orientation = .left
}
self.transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation)
} else { } else {
self.savedTransitionImage = false self.savedTransitionImage = false
} }
@ -452,7 +472,6 @@ public final class VideoRecorder {
private let impl: VideoRecorderImpl private let impl: VideoRecorderImpl
fileprivate let configuration: Configuration fileprivate let configuration: Configuration
fileprivate let videoTransform: CGAffineTransform
fileprivate let fileUrl: URL fileprivate let fileUrl: URL
private let completion: (Result) -> Void private let completion: (Result) -> Void
@ -460,13 +479,12 @@ public final class VideoRecorder {
return self.impl.isRecording return self.impl.isRecording
} }
init?(configuration: Configuration, videoTransform: CGAffineTransform, fileUrl: URL, completion: @escaping (Result) -> Void) { init?(configuration: Configuration, orientation: AVCaptureVideoOrientation, fileUrl: URL, completion: @escaping (Result) -> Void) {
self.configuration = configuration self.configuration = configuration
self.videoTransform = videoTransform
self.fileUrl = fileUrl self.fileUrl = fileUrl
self.completion = completion self.completion = completion
guard let impl = VideoRecorderImpl(configuration: configuration, videoTransform: videoTransform, fileUrl: fileUrl) else { guard let impl = VideoRecorderImpl(configuration: configuration, orientation: orientation, fileUrl: fileUrl) else {
completion(.initError(.generic)) completion(.initError(.generic))
return nil return nil
} }

View File

@ -495,7 +495,7 @@ private final class CameraScreenComponent: CombinedComponent {
self.resultDisposable.set((camera.stopRecording() self.resultDisposable.set((camera.stopRecording()
|> deliverOnMainQueue).start(next: { [weak self] result in |> deliverOnMainQueue).start(next: { [weak self] result in
if let self, case let .finished(mainResult, additionalResult, duration, positionChangeTimestamps, _) = result { if let self, case let .finished(mainResult, additionalResult, duration, positionChangeTimestamps, _) = result {
self.completion.invoke(.single(.video(CameraScreen.Result.Video(videoPath: mainResult.0, coverImage: mainResult.1, mirror: mainResult.2, additionalVideoPath: additionalResult?.0, additionalCoverImage: additionalResult?.1, dimensions: PixelDimensions(width: 1080, height: 1920), duration: duration, positionChangeTimestamps: positionChangeTimestamps, additionalVideoPosition: .topRight)))) self.completion.invoke(.single(.video(CameraScreen.Result.Video(videoPath: mainResult.0, coverImage: mainResult.1, mirror: mainResult.2, additionalVideoPath: additionalResult?.0, additionalCoverImage: additionalResult?.1, dimensions: PixelDimensions(mainResult.3), duration: duration, positionChangeTimestamps: positionChangeTimestamps, additionalVideoPosition: .topRight))))
} }
})) }))
self.isTransitioning = true self.isTransitioning = true

View File

@ -5036,6 +5036,11 @@ public final class EmojiPagerContentComponent: Component {
default: default:
return return
} }
if item.0.icon == .locked {
return
}
self.longPressItem = item.1 self.longPressItem = item.1
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {