mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-08-02 00:17:02 +00:00
Merge commit '61c795b95b497f3acf34c386d3eb35cba2b41a0a'
This commit is contained in:
commit
33e716d78b
@ -460,15 +460,16 @@ private final class CameraContext {
|
||||
}
|
||||
mainDeviceContext.device.setTorchMode(self._flashMode)
|
||||
|
||||
let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait
|
||||
if let additionalDeviceContext = self.additionalDeviceContext {
|
||||
return combineLatest(
|
||||
mainDeviceContext.output.startRecording(isDualCamera: true, position: self.positionValue),
|
||||
additionalDeviceContext.output.startRecording(isDualCamera: true)
|
||||
mainDeviceContext.output.startRecording(isDualCamera: true, position: self.positionValue, orientation: orientation),
|
||||
additionalDeviceContext.output.startRecording(isDualCamera: true, orientation: .portrait)
|
||||
) |> map { value, _ in
|
||||
return value
|
||||
}
|
||||
} else {
|
||||
return mainDeviceContext.output.startRecording(isDualCamera: false)
|
||||
return mainDeviceContext.output.startRecording(isDualCamera: false, orientation: orientation)
|
||||
}
|
||||
}
|
||||
|
||||
@ -486,7 +487,7 @@ private final class CameraContext {
|
||||
if let cgImage = additionalResult.1.cgImage {
|
||||
additionalTransitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored)
|
||||
}
|
||||
return .single(.finished(mainResult, (additionalResult.0, additionalTransitionImage, true), duration, positionChangeTimestamps, CACurrentMediaTime()))
|
||||
return .single(.finished(mainResult, (additionalResult.0, additionalTransitionImage, true, additionalResult.3), duration, positionChangeTimestamps, CACurrentMediaTime()))
|
||||
} else {
|
||||
return .complete()
|
||||
}
|
||||
@ -500,7 +501,7 @@ private final class CameraContext {
|
||||
if mirror, let cgImage = transitionImage.cgImage {
|
||||
transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored)
|
||||
}
|
||||
return .finished((mainResult.0, transitionImage, mirror), nil, duration, positionChangeTimestamps, time)
|
||||
return .finished((mainResult.0, transitionImage, mirror, mainResult.3), nil, duration, positionChangeTimestamps, time)
|
||||
} else {
|
||||
return result
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ import VideoToolbox
|
||||
import TelegramCore
|
||||
|
||||
public enum VideoCaptureResult: Equatable {
|
||||
case finished((String, UIImage, Bool), (String, UIImage, Bool)?, Double, [(Bool, Double)], Double)
|
||||
case finished((String, UIImage, Bool, CGSize), (String, UIImage, Bool, CGSize)?, Double, [(Bool, Double)], Double)
|
||||
case failed
|
||||
|
||||
public static func == (lhs: VideoCaptureResult, rhs: VideoCaptureResult) -> Bool {
|
||||
@ -276,7 +276,7 @@ final class CameraOutput: NSObject {
|
||||
}
|
||||
|
||||
private var recordingCompletionPipe = ValuePipe<VideoCaptureResult>()
|
||||
func startRecording(isDualCamera: Bool, position: Camera.Position? = nil) -> Signal<Double, NoError> {
|
||||
func startRecording(isDualCamera: Bool, position: Camera.Position? = nil, orientation: AVCaptureVideoOrientation) -> Signal<Double, NoError> {
|
||||
guard self.videoRecorder == nil else {
|
||||
return .complete()
|
||||
}
|
||||
@ -293,12 +293,20 @@ final class CameraOutput: NSObject {
|
||||
}
|
||||
let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:]
|
||||
|
||||
var dimensions: CGSize = CGSize(width: 1080, height: 1920)
|
||||
if orientation == .landscapeLeft {
|
||||
dimensions = CGSize(width: 1920, height: 1080)
|
||||
} else if orientation == .landscapeRight {
|
||||
dimensions = CGSize(width: 1920, height: 1080)
|
||||
}
|
||||
|
||||
let outputFileName = NSUUID().uuidString
|
||||
let outputFilePath = NSTemporaryDirectory() + outputFileName + ".mp4"
|
||||
let outputFileURL = URL(fileURLWithPath: outputFilePath)
|
||||
let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), videoTransform: CGAffineTransform(rotationAngle: .pi / 2.0), fileUrl: outputFileURL, completion: { [weak self] result in
|
||||
|
||||
let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), orientation: orientation, fileUrl: outputFileURL, completion: { [weak self] result in
|
||||
if case let .success(transitionImage, duration, positionChangeTimestamps) = result {
|
||||
self?.recordingCompletionPipe.putNext(.finished((outputFilePath, transitionImage ?? UIImage(), false), nil, duration, positionChangeTimestamps.map { ($0 == .front, $1) }, CACurrentMediaTime()))
|
||||
self?.recordingCompletionPipe.putNext(.finished((outputFilePath, transitionImage ?? UIImage(), false, dimensions), nil, duration, positionChangeTimestamps.map { ($0 == .front, $1) }, CACurrentMediaTime()))
|
||||
} else {
|
||||
self?.recordingCompletionPipe.putNext(.failed)
|
||||
}
|
||||
|
@ -52,6 +52,7 @@ private final class VideoRecorderImpl {
|
||||
private var positionChangeTimestamps: [(Camera.Position, CMTime)] = []
|
||||
|
||||
private let configuration: VideoRecorder.Configuration
|
||||
private let orientation: AVCaptureVideoOrientation
|
||||
private let videoTransform: CGAffineTransform
|
||||
private let url: URL
|
||||
fileprivate var completion: (Bool, UIImage?, [(Camera.Position, CMTime)]?) -> Void = { _, _, _ in }
|
||||
@ -62,9 +63,20 @@ private final class VideoRecorderImpl {
|
||||
private var hasAllVideoBuffers = false
|
||||
private var hasAllAudioBuffers = false
|
||||
|
||||
public init?(configuration: VideoRecorder.Configuration, videoTransform: CGAffineTransform, fileUrl: URL) {
|
||||
public init?(configuration: VideoRecorder.Configuration, orientation: AVCaptureVideoOrientation, fileUrl: URL) {
|
||||
self.configuration = configuration
|
||||
self.videoTransform = videoTransform
|
||||
|
||||
var transform: CGAffineTransform = CGAffineTransform(rotationAngle: .pi / 2.0)
|
||||
if orientation == .landscapeLeft {
|
||||
transform = CGAffineTransform(rotationAngle: .pi)
|
||||
} else if orientation == .landscapeRight {
|
||||
transform = CGAffineTransform(rotationAngle: 0.0)
|
||||
} else if orientation == .portraitUpsideDown {
|
||||
transform = CGAffineTransform(rotationAngle: -.pi / 2.0)
|
||||
}
|
||||
|
||||
self.orientation = orientation
|
||||
self.videoTransform = transform
|
||||
self.url = fileUrl
|
||||
|
||||
try? FileManager.default.removeItem(at: url)
|
||||
@ -173,7 +185,15 @@ private final class VideoRecorderImpl {
|
||||
Queue.concurrentBackgroundQueue().async {
|
||||
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
|
||||
if let cgImage = self.imageContext.createCGImage(ciImage, from: ciImage.extent) {
|
||||
self.transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
|
||||
var orientation: UIImage.Orientation = .right
|
||||
if self.orientation == .landscapeLeft {
|
||||
orientation = .down
|
||||
} else if self.orientation == .landscapeRight {
|
||||
orientation = .up
|
||||
} else if self.orientation == .portraitUpsideDown {
|
||||
orientation = .left
|
||||
}
|
||||
self.transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation)
|
||||
} else {
|
||||
self.savedTransitionImage = false
|
||||
}
|
||||
@ -452,7 +472,6 @@ public final class VideoRecorder {
|
||||
|
||||
private let impl: VideoRecorderImpl
|
||||
fileprivate let configuration: Configuration
|
||||
fileprivate let videoTransform: CGAffineTransform
|
||||
fileprivate let fileUrl: URL
|
||||
private let completion: (Result) -> Void
|
||||
|
||||
@ -460,13 +479,12 @@ public final class VideoRecorder {
|
||||
return self.impl.isRecording
|
||||
}
|
||||
|
||||
init?(configuration: Configuration, videoTransform: CGAffineTransform, fileUrl: URL, completion: @escaping (Result) -> Void) {
|
||||
init?(configuration: Configuration, orientation: AVCaptureVideoOrientation, fileUrl: URL, completion: @escaping (Result) -> Void) {
|
||||
self.configuration = configuration
|
||||
self.videoTransform = videoTransform
|
||||
self.fileUrl = fileUrl
|
||||
self.completion = completion
|
||||
|
||||
guard let impl = VideoRecorderImpl(configuration: configuration, videoTransform: videoTransform, fileUrl: fileUrl) else {
|
||||
guard let impl = VideoRecorderImpl(configuration: configuration, orientation: orientation, fileUrl: fileUrl) else {
|
||||
completion(.initError(.generic))
|
||||
return nil
|
||||
}
|
||||
|
@ -495,7 +495,7 @@ private final class CameraScreenComponent: CombinedComponent {
|
||||
self.resultDisposable.set((camera.stopRecording()
|
||||
|> deliverOnMainQueue).start(next: { [weak self] result in
|
||||
if let self, case let .finished(mainResult, additionalResult, duration, positionChangeTimestamps, _) = result {
|
||||
self.completion.invoke(.single(.video(CameraScreen.Result.Video(videoPath: mainResult.0, coverImage: mainResult.1, mirror: mainResult.2, additionalVideoPath: additionalResult?.0, additionalCoverImage: additionalResult?.1, dimensions: PixelDimensions(width: 1080, height: 1920), duration: duration, positionChangeTimestamps: positionChangeTimestamps, additionalVideoPosition: .topRight))))
|
||||
self.completion.invoke(.single(.video(CameraScreen.Result.Video(videoPath: mainResult.0, coverImage: mainResult.1, mirror: mainResult.2, additionalVideoPath: additionalResult?.0, additionalCoverImage: additionalResult?.1, dimensions: PixelDimensions(mainResult.3), duration: duration, positionChangeTimestamps: positionChangeTimestamps, additionalVideoPosition: .topRight))))
|
||||
}
|
||||
}))
|
||||
self.isTransitioning = true
|
||||
|
@ -5036,6 +5036,11 @@ public final class EmojiPagerContentComponent: Component {
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
if item.0.icon == .locked {
|
||||
return
|
||||
}
|
||||
|
||||
self.longPressItem = item.1
|
||||
|
||||
if #available(iOS 13.0, *) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user