mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Camera and editor improvements
This commit is contained in:
parent
e419ccf8f3
commit
2d738fbfac
@ -57,6 +57,7 @@ swift_library(
|
|||||||
"//submodules/AsyncDisplayKit:AsyncDisplayKit",
|
"//submodules/AsyncDisplayKit:AsyncDisplayKit",
|
||||||
"//submodules/Display:Display",
|
"//submodules/Display:Display",
|
||||||
"//submodules/ImageBlur:ImageBlur",
|
"//submodules/ImageBlur:ImageBlur",
|
||||||
|
"//submodules/TelegramCore:TelegramCore",
|
||||||
],
|
],
|
||||||
visibility = [
|
visibility = [
|
||||||
"//visibility:public",
|
"//visibility:public",
|
||||||
|
@ -40,14 +40,14 @@ private final class CameraContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private let previewSnapshotContext = CIContext()
|
||||||
private var lastSnapshotTimestamp: Double = CACurrentMediaTime()
|
private var lastSnapshotTimestamp: Double = CACurrentMediaTime()
|
||||||
private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer) {
|
private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer) {
|
||||||
Queue.concurrentDefaultQueue().async {
|
Queue.concurrentDefaultQueue().async {
|
||||||
let ciContext = CIContext()
|
|
||||||
var ciImage = CIImage(cvImageBuffer: pixelBuffer)
|
var ciImage = CIImage(cvImageBuffer: pixelBuffer)
|
||||||
ciImage = ciImage.transformed(by: CGAffineTransform(scaleX: 0.33, y: 0.33))
|
let size = ciImage.extent.size
|
||||||
ciImage = ciImage.clampedToExtent()
|
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 40.0).cropped(to: CGRect(origin: .zero, size: size))
|
||||||
if let cgImage = ciContext.createCGImage(ciImage, from: ciImage.extent) {
|
if let cgImage = self.previewSnapshotContext.createCGImage(ciImage, from: ciImage.extent) {
|
||||||
let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
|
let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
|
||||||
CameraSimplePreviewView.saveLastStateImage(uiImage)
|
CameraSimplePreviewView.saveLastStateImage(uiImage)
|
||||||
}
|
}
|
||||||
@ -67,7 +67,7 @@ private final class CameraContext {
|
|||||||
self.input.configure(for: self.session, device: self.device, audio: configuration.audio)
|
self.input.configure(for: self.session, device: self.device, audio: configuration.audio)
|
||||||
self.output.configure(for: self.session, configuration: configuration)
|
self.output.configure(for: self.session, configuration: configuration)
|
||||||
|
|
||||||
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: 60)
|
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: self.preferredMaxFrameRate)
|
||||||
self.output.configureVideoStabilization()
|
self.output.configureVideoStabilization()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -115,6 +115,15 @@ private final class CameraContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private var preferredMaxFrameRate: Double {
|
||||||
|
switch DeviceModel.current {
|
||||||
|
case .iPhone14ProMax, .iPhone13ProMax:
|
||||||
|
return 60.0
|
||||||
|
default:
|
||||||
|
return 30.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func startCapture() {
|
func startCapture() {
|
||||||
guard !self.session.isRunning else {
|
guard !self.session.isRunning else {
|
||||||
return
|
return
|
||||||
@ -160,7 +169,7 @@ private final class CameraContext {
|
|||||||
self.changingPosition = true
|
self.changingPosition = true
|
||||||
self.device.configure(for: self.session, position: targetPosition)
|
self.device.configure(for: self.session, position: targetPosition)
|
||||||
self.input.configure(for: self.session, device: self.device, audio: self.initialConfiguration.audio)
|
self.input.configure(for: self.session, device: self.device, audio: self.initialConfiguration.audio)
|
||||||
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: 60)
|
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: self.preferredMaxFrameRate)
|
||||||
self.output.configureVideoStabilization()
|
self.output.configureVideoStabilization()
|
||||||
self.queue.after(0.5) {
|
self.queue.after(0.5) {
|
||||||
self.changingPosition = false
|
self.changingPosition = false
|
||||||
@ -173,7 +182,7 @@ private final class CameraContext {
|
|||||||
self.input.invalidate(for: self.session)
|
self.input.invalidate(for: self.session)
|
||||||
self.device.configure(for: self.session, position: position)
|
self.device.configure(for: self.session, position: position)
|
||||||
self.input.configure(for: self.session, device: self.device, audio: self.initialConfiguration.audio)
|
self.input.configure(for: self.session, device: self.device, audio: self.initialConfiguration.audio)
|
||||||
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: 60)
|
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: self.preferredMaxFrameRate)
|
||||||
self.output.configureVideoStabilization()
|
self.output.configureVideoStabilization()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import Foundation
|
import Foundation
|
||||||
import AVFoundation
|
import AVFoundation
|
||||||
import SwiftSignalKit
|
import SwiftSignalKit
|
||||||
|
import TelegramCore
|
||||||
|
|
||||||
private let defaultFPS: Double = 30.0
|
private let defaultFPS: Double = 30.0
|
||||||
|
|
||||||
@ -68,6 +69,14 @@ final class CameraDevice {
|
|||||||
|
|
||||||
if let bestFormat = candidates.last {
|
if let bestFormat = candidates.last {
|
||||||
device.activeFormat = bestFormat
|
device.activeFormat = bestFormat
|
||||||
|
|
||||||
|
Logger.shared.log("Camera", "Available formats:")
|
||||||
|
for format in device.formats {
|
||||||
|
Logger.shared.log("Camera", format.description)
|
||||||
|
}
|
||||||
|
|
||||||
|
Logger.shared.log("Camera", "Selected format:")
|
||||||
|
Logger.shared.log("Camera", bestFormat.description)
|
||||||
}
|
}
|
||||||
|
|
||||||
if let targetFPS = device.actualFPS(maxFramerate) {
|
if let targetFPS = device.actualFPS(maxFramerate) {
|
||||||
|
@ -23,6 +23,8 @@ public extension Camera {
|
|||||||
self = .iPhone14ProMax
|
self = .iPhone14ProMax
|
||||||
case .unknown:
|
case .unknown:
|
||||||
self = .unknown
|
self = .unknown
|
||||||
|
default:
|
||||||
|
self = .unknown
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,6 +72,16 @@ enum DeviceModel: CaseIterable {
|
|||||||
case iPodTouch6
|
case iPodTouch6
|
||||||
case iPodTouch7
|
case iPodTouch7
|
||||||
|
|
||||||
|
case iPhone12
|
||||||
|
case iPhone12Mini
|
||||||
|
case iPhone12Pro
|
||||||
|
case iPhone12ProMax
|
||||||
|
|
||||||
|
case iPhone13
|
||||||
|
case iPhone13Mini
|
||||||
|
case iPhone13Pro
|
||||||
|
case iPhone13ProMax
|
||||||
|
|
||||||
case iPhone14
|
case iPhone14
|
||||||
case iPhone14Plus
|
case iPhone14Plus
|
||||||
case iPhone14Pro
|
case iPhone14Pro
|
||||||
@ -93,6 +105,22 @@ enum DeviceModel: CaseIterable {
|
|||||||
return "iPod7,1"
|
return "iPod7,1"
|
||||||
case .iPodTouch7:
|
case .iPodTouch7:
|
||||||
return "iPod9,1"
|
return "iPod9,1"
|
||||||
|
case .iPhone12:
|
||||||
|
return "iPhone13,2"
|
||||||
|
case .iPhone12Mini:
|
||||||
|
return "iPhone13,1"
|
||||||
|
case .iPhone12Pro:
|
||||||
|
return "iPhone13,3"
|
||||||
|
case .iPhone12ProMax:
|
||||||
|
return "iPhone13,4"
|
||||||
|
case .iPhone13:
|
||||||
|
return "iPhone14,5"
|
||||||
|
case .iPhone13Mini:
|
||||||
|
return "iPhone14,4"
|
||||||
|
case .iPhone13Pro:
|
||||||
|
return "iPhone14,2"
|
||||||
|
case .iPhone13ProMax:
|
||||||
|
return "iPhone14,3"
|
||||||
case .iPhone14:
|
case .iPhone14:
|
||||||
return "iPhone14,7"
|
return "iPhone14,7"
|
||||||
case .iPhone14Plus:
|
case .iPhone14Plus:
|
||||||
@ -122,6 +150,22 @@ enum DeviceModel: CaseIterable {
|
|||||||
return "iPod touch 6G"
|
return "iPod touch 6G"
|
||||||
case .iPodTouch7:
|
case .iPodTouch7:
|
||||||
return "iPod touch 7G"
|
return "iPod touch 7G"
|
||||||
|
case .iPhone12:
|
||||||
|
return "iPhone 12"
|
||||||
|
case .iPhone12Mini:
|
||||||
|
return "iPhone 12 mini"
|
||||||
|
case .iPhone12Pro:
|
||||||
|
return "iPhone 12 Pro"
|
||||||
|
case .iPhone12ProMax:
|
||||||
|
return "iPhone 12 Pro Max"
|
||||||
|
case .iPhone13:
|
||||||
|
return "iPhone 13"
|
||||||
|
case .iPhone13Mini:
|
||||||
|
return "iPhone 13 mini"
|
||||||
|
case .iPhone13Pro:
|
||||||
|
return "iPhone 13 Pro"
|
||||||
|
case .iPhone13ProMax:
|
||||||
|
return "iPhone 13 Pro Max"
|
||||||
case .iPhone14:
|
case .iPhone14:
|
||||||
return "iPhone 14"
|
return "iPhone 14"
|
||||||
case .iPhone14Plus:
|
case .iPhone14Plus:
|
||||||
|
@ -65,7 +65,6 @@ final class CameraOutput: NSObject {
|
|||||||
super.init()
|
super.init()
|
||||||
|
|
||||||
self.videoOutput.alwaysDiscardsLateVideoFrames = false
|
self.videoOutput.alwaysDiscardsLateVideoFrames = false
|
||||||
//self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA] as [String : Any]
|
|
||||||
self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any]
|
self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any]
|
||||||
|
|
||||||
self.faceLandmarksOutput.outputFaceObservations = { [weak self] observations in
|
self.faceLandmarksOutput.outputFaceObservations = { [weak self] observations in
|
||||||
|
@ -21,7 +21,7 @@ public class CameraSimplePreviewView: UIView {
|
|||||||
|
|
||||||
static func saveLastStateImage(_ image: UIImage) {
|
static func saveLastStateImage(_ image: UIImage) {
|
||||||
let imagePath = NSTemporaryDirectory() + "cameraImage.jpg"
|
let imagePath = NSTemporaryDirectory() + "cameraImage.jpg"
|
||||||
if let blurredImage = blurredImage(image, radius: 60.0), let data = blurredImage.jpegData(compressionQuality: 0.85) {
|
if let data = image.jpegData(compressionQuality: 0.6) {
|
||||||
try? data.write(to: URL(fileURLWithPath: imagePath))
|
try? data.write(to: URL(fileURLWithPath: imagePath))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -937,7 +937,7 @@ final class ColorSpectrumComponent: Component {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final class ColorSpectrumPickerView: UIView, UIGestureRecognizerDelegate {
|
public final class ColorSpectrumPickerView: UIView, UIGestureRecognizerDelegate {
|
||||||
private var validSize: CGSize?
|
private var validSize: CGSize?
|
||||||
private var selectedColor: DrawingColor?
|
private var selectedColor: DrawingColor?
|
||||||
|
|
||||||
@ -950,7 +950,7 @@ final class ColorSpectrumPickerView: UIView, UIGestureRecognizerDelegate {
|
|||||||
private var circleMaskView = UIView()
|
private var circleMaskView = UIView()
|
||||||
private let maskCircle = SimpleShapeLayer()
|
private let maskCircle = SimpleShapeLayer()
|
||||||
|
|
||||||
var selected: (DrawingColor) -> Void = { _ in }
|
public var selected: (DrawingColor) -> Void = { _ in }
|
||||||
|
|
||||||
private var bitmapData: UnsafeMutableRawPointer?
|
private var bitmapData: UnsafeMutableRawPointer?
|
||||||
|
|
||||||
@ -1048,7 +1048,7 @@ final class ColorSpectrumPickerView: UIView, UIGestureRecognizerDelegate {
|
|||||||
private var animatingIn = false
|
private var animatingIn = false
|
||||||
private var scheduledAnimateOut: (() -> Void)?
|
private var scheduledAnimateOut: (() -> Void)?
|
||||||
|
|
||||||
func animateIn() {
|
public func animateIn() {
|
||||||
self.animatingIn = true
|
self.animatingIn = true
|
||||||
|
|
||||||
Queue.mainQueue().after(0.15) {
|
Queue.mainQueue().after(0.15) {
|
||||||
@ -1107,7 +1107,7 @@ final class ColorSpectrumPickerView: UIView, UIGestureRecognizerDelegate {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateLayout(size: CGSize, selectedColor: DrawingColor?) -> CGSize {
|
public func updateLayout(size: CGSize, selectedColor: DrawingColor?) -> CGSize {
|
||||||
let previousSize = self.validSize
|
let previousSize = self.validSize
|
||||||
|
|
||||||
let imageSize = size
|
let imageSize = size
|
||||||
@ -2413,10 +2413,10 @@ private final class ColorPickerSheetComponent: CombinedComponent {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ColorPickerScreen: ViewControllerComponentContainer {
|
public final class ColorPickerScreen: ViewControllerComponentContainer {
|
||||||
private var dismissed: () -> Void
|
private var dismissed: () -> Void
|
||||||
|
|
||||||
init(context: AccountContext, initialColor: DrawingColor, updated: @escaping (DrawingColor) -> Void, openEyedropper: @escaping () -> Void, dismissed: @escaping () -> Void = {}) {
|
public init(context: AccountContext, initialColor: DrawingColor, updated: @escaping (DrawingColor) -> Void, openEyedropper: @escaping () -> Void, dismissed: @escaping () -> Void = {}) {
|
||||||
self.dismissed = dismissed
|
self.dismissed = dismissed
|
||||||
super.init(context: context, component: ColorPickerSheetComponent(context: context, initialColor: initialColor, updated: updated, openEyedropper: openEyedropper, dismissed: dismissed), navigationBarAppearance: .none)
|
super.init(context: context, component: ColorPickerSheetComponent(context: context, initialColor: initialColor, updated: updated, openEyedropper: openEyedropper, dismissed: dismissed), navigationBarAppearance: .none)
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ public final class DrawingEntitiesView: UIView, TGPhotoDrawingEntitiesView {
|
|||||||
public weak var selectionContainerView: DrawingSelectionContainerView?
|
public weak var selectionContainerView: DrawingSelectionContainerView?
|
||||||
|
|
||||||
private var tapGestureRecognizer: UITapGestureRecognizer!
|
private var tapGestureRecognizer: UITapGestureRecognizer!
|
||||||
private(set) var selectedEntityView: DrawingEntityView?
|
public private(set) var selectedEntityView: DrawingEntityView?
|
||||||
|
|
||||||
public var getEntityCenterPosition: () -> CGPoint = { return .zero }
|
public var getEntityCenterPosition: () -> CGPoint = { return .zero }
|
||||||
public var getEntityInitialRotation: () -> CGFloat = { return 0.0 }
|
public var getEntityInitialRotation: () -> CGFloat = { return 0.0 }
|
||||||
@ -593,7 +593,7 @@ protocol DrawingEntityMediaView: DrawingEntityView {
|
|||||||
|
|
||||||
public class DrawingEntityView: UIView {
|
public class DrawingEntityView: UIView {
|
||||||
let context: AccountContext
|
let context: AccountContext
|
||||||
let entity: DrawingEntity
|
public let entity: DrawingEntity
|
||||||
var isTracking = false
|
var isTracking = false
|
||||||
|
|
||||||
public weak var selectionView: DrawingEntitySelectionView?
|
public weak var selectionView: DrawingEntitySelectionView?
|
||||||
@ -645,7 +645,7 @@ public class DrawingEntityView: UIView {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func update(animated: Bool = false) {
|
public func update(animated: Bool = false) {
|
||||||
self.updateSelectionView()
|
self.updateSelectionView()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ public final class DrawingMediaEntityView: DrawingEntityView, DrawingEntityMedia
|
|||||||
}
|
}
|
||||||
|
|
||||||
public var updated: (() -> Void)?
|
public var updated: (() -> Void)?
|
||||||
override func update(animated: Bool) {
|
public override func update(animated: Bool) {
|
||||||
self.center = self.mediaEntity.position
|
self.center = self.mediaEntity.position
|
||||||
|
|
||||||
let size = self.mediaEntity.baseSize
|
let size = self.mediaEntity.baseSize
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -38,7 +38,7 @@ private func generateGridImage(size: CGSize, light: Bool) -> UIImage? {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
final class EyedropperView: UIView {
|
public final class EyedropperView: UIView {
|
||||||
private weak var drawingView: DrawingView?
|
private weak var drawingView: DrawingView?
|
||||||
|
|
||||||
private let containerView: UIView
|
private let containerView: UIView
|
||||||
|
@ -872,7 +872,7 @@ public class CameraScreen: ViewController {
|
|||||||
|
|
||||||
} else {
|
} else {
|
||||||
if translation.x < -10.0 {
|
if translation.x < -10.0 {
|
||||||
let transitionFraction = 1.0 - abs(translation.x) / self.frame.width
|
let transitionFraction = 1.0 - max(0.0, translation.x * -1.0) / self.frame.width
|
||||||
controller.updateTransitionProgress(transitionFraction, transition: .immediate)
|
controller.updateTransitionProgress(transitionFraction, transition: .immediate)
|
||||||
} else if translation.y < -10.0 {
|
} else if translation.y < -10.0 {
|
||||||
controller.presentGallery()
|
controller.presentGallery()
|
||||||
@ -882,7 +882,7 @@ public class CameraScreen: ViewController {
|
|||||||
}
|
}
|
||||||
case .ended:
|
case .ended:
|
||||||
let velocity = gestureRecognizer.velocity(in: self.view)
|
let velocity = gestureRecognizer.velocity(in: self.view)
|
||||||
let transitionFraction = 1.0 - abs(translation.x) / self.frame.width
|
let transitionFraction = 1.0 - max(0.0, translation.x * -1.0) / self.frame.width
|
||||||
controller.completeWithTransitionProgress(transitionFraction, velocity: abs(velocity.x), dismissing: true)
|
controller.completeWithTransitionProgress(transitionFraction, velocity: abs(velocity.x), dismissing: true)
|
||||||
default:
|
default:
|
||||||
break
|
break
|
||||||
@ -982,26 +982,28 @@ public class CameraScreen: ViewController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func resumeCameraCapture() {
|
func resumeCameraCapture() {
|
||||||
if let snapshot = self.simplePreviewView?.snapshotView(afterScreenUpdates: false) {
|
if self.simplePreviewView?.isEnabled == false {
|
||||||
self.simplePreviewView?.addSubview(snapshot)
|
if let snapshot = self.simplePreviewView?.snapshotView(afterScreenUpdates: false) {
|
||||||
self.previewSnapshotView = snapshot
|
self.simplePreviewView?.addSubview(snapshot)
|
||||||
}
|
self.previewSnapshotView = snapshot
|
||||||
self.simplePreviewView?.isEnabled = true
|
|
||||||
self.camera.startCapture()
|
|
||||||
|
|
||||||
if #available(iOS 13.0, *), let isPreviewing = self.simplePreviewView?.isPreviewing {
|
|
||||||
let _ = (isPreviewing
|
|
||||||
|> filter {
|
|
||||||
$0
|
|
||||||
}
|
}
|
||||||
|> take(1)).start(next: { [weak self] _ in
|
self.simplePreviewView?.isEnabled = true
|
||||||
if let self {
|
self.camera.startCapture()
|
||||||
|
|
||||||
|
if #available(iOS 13.0, *), let isPreviewing = self.simplePreviewView?.isPreviewing {
|
||||||
|
let _ = (isPreviewing
|
||||||
|
|> filter {
|
||||||
|
$0
|
||||||
|
}
|
||||||
|
|> take(1)).start(next: { [weak self] _ in
|
||||||
|
if let self {
|
||||||
|
self.previewBlurPromise.set(false)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Queue.mainQueue().after(1.0) {
|
||||||
self.previewBlurPromise.set(false)
|
self.previewBlurPromise.set(false)
|
||||||
}
|
}
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Queue.mainQueue().after(1.0) {
|
|
||||||
self.previewBlurPromise.set(false)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1344,7 +1346,7 @@ public class CameraScreen: ViewController {
|
|||||||
private var isTransitioning = false
|
private var isTransitioning = false
|
||||||
public func updateTransitionProgress(_ transitionFraction: CGFloat, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void = {}) {
|
public func updateTransitionProgress(_ transitionFraction: CGFloat, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void = {}) {
|
||||||
self.isTransitioning = true
|
self.isTransitioning = true
|
||||||
let offsetX = (1.0 - transitionFraction) * self.node.frame.width * -1.0
|
let offsetX = floorToScreenPixels((1.0 - transitionFraction) * self.node.frame.width * -1.0)
|
||||||
transition.updateTransform(layer: self.node.backgroundView.layer, transform: CGAffineTransform(translationX: offsetX, y: 0.0))
|
transition.updateTransform(layer: self.node.backgroundView.layer, transform: CGAffineTransform(translationX: offsetX, y: 0.0))
|
||||||
transition.updateTransform(layer: self.node.containerView.layer, transform: CGAffineTransform(translationX: offsetX, y: 0.0))
|
transition.updateTransform(layer: self.node.containerView.layer, transform: CGAffineTransform(translationX: offsetX, y: 0.0))
|
||||||
let scale = max(0.8, min(1.0, 0.8 + 0.2 * transitionFraction))
|
let scale = max(0.8, min(1.0, 0.8 + 0.2 * transitionFraction))
|
||||||
@ -1359,7 +1361,7 @@ public class CameraScreen: ViewController {
|
|||||||
self.statusBar.updateStatusBarStyle(transitionFraction > 0.45 ? .White : .Ignore, animated: true)
|
self.statusBar.updateStatusBarStyle(transitionFraction > 0.45 ? .White : .Ignore, animated: true)
|
||||||
|
|
||||||
if let navigationController = self.navigationController as? NavigationController {
|
if let navigationController = self.navigationController as? NavigationController {
|
||||||
let offsetX = transitionFraction * self.node.frame.width
|
let offsetX = floorToScreenPixels(transitionFraction * self.node.frame.width)
|
||||||
navigationController.updateRootContainerTransitionOffset(offsetX, transition: transition)
|
navigationController.updateRootContainerTransitionOffset(offsetX, transition: transition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1367,7 +1369,7 @@ public class CameraScreen: ViewController {
|
|||||||
public func completeWithTransitionProgress(_ transitionFraction: CGFloat, velocity: CGFloat, dismissing: Bool) {
|
public func completeWithTransitionProgress(_ transitionFraction: CGFloat, velocity: CGFloat, dismissing: Bool) {
|
||||||
self.isTransitioning = false
|
self.isTransitioning = false
|
||||||
if dismissing {
|
if dismissing {
|
||||||
if transitionFraction < 0.7 || velocity > 1000.0 {
|
if transitionFraction < 0.7 || velocity < -1000.0 {
|
||||||
self.requestDismiss(animated: true, interactive: true)
|
self.requestDismiss(animated: true, interactive: true)
|
||||||
} else {
|
} else {
|
||||||
self.updateTransitionProgress(1.0, transition: .animated(duration: 0.4, curve: .spring), completion: { [weak self] in
|
self.updateTransitionProgress(1.0, transition: .animated(duration: 0.4, curve: .spring), completion: { [weak self] in
|
||||||
|
@ -464,6 +464,10 @@ public final class MediaEditor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func play() {
|
||||||
|
self.player?.play()
|
||||||
|
}
|
||||||
|
|
||||||
public func stop() {
|
public func stop() {
|
||||||
self.player?.pause()
|
self.player?.pause()
|
||||||
}
|
}
|
||||||
|
@ -85,46 +85,35 @@ final class MediaEditorComposer {
|
|||||||
self.renderChain.update(values: self.values)
|
self.renderChain.update(values: self.values)
|
||||||
}
|
}
|
||||||
|
|
||||||
func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, pool: CVPixelBufferPool?, completion: @escaping (CVPixelBuffer?) -> Void) {
|
func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, pool: CVPixelBufferPool?, textureRotation: TextureRotation, completion: @escaping (CVPixelBuffer?) -> Void) {
|
||||||
guard let textureCache = self.textureCache, let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let pool = pool else {
|
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let pool = pool else {
|
||||||
completion(nil)
|
completion(nil)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
|
let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
|
||||||
|
|
||||||
let width = CVPixelBufferGetWidth(imageBuffer)
|
self.renderer.consumeVideoPixelBuffer(imageBuffer, rotation: textureRotation)
|
||||||
let height = CVPixelBufferGetHeight(imageBuffer)
|
self.renderer.renderFrame()
|
||||||
let format: MTLPixelFormat = .bgra8Unorm
|
|
||||||
var textureRef : CVMetalTexture?
|
if let finalTexture = self.renderer.finalTexture, var ciImage = CIImage(mtlTexture: finalTexture, options: [.colorSpace: self.colorSpace]) {
|
||||||
let status = CVMetalTextureCacheCreateTextureFromImage(nil, textureCache, imageBuffer, nil, format, width, height, 0, &textureRef)
|
ciImage = ciImage.transformed(by: CGAffineTransformMakeScale(1.0, -1.0).translatedBy(x: 0.0, y: -ciImage.extent.height))
|
||||||
var texture: MTLTexture?
|
|
||||||
if status == kCVReturnSuccess {
|
|
||||||
texture = CVMetalTextureGetTexture(textureRef!)
|
|
||||||
}
|
|
||||||
if let texture {
|
|
||||||
self.renderer.consumeTexture(texture)
|
|
||||||
self.renderer.renderFrame()
|
|
||||||
|
|
||||||
if let finalTexture = self.renderer.finalTexture, var ciImage = CIImage(mtlTexture: finalTexture, options: [.colorSpace: self.colorSpace]) {
|
var pixelBuffer: CVPixelBuffer?
|
||||||
ciImage = ciImage.transformed(by: CGAffineTransformMakeScale(1.0, -1.0).translatedBy(x: 0.0, y: -ciImage.extent.height))
|
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pool, &pixelBuffer)
|
||||||
|
|
||||||
var pixelBuffer: CVPixelBuffer?
|
if let pixelBuffer {
|
||||||
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pool, &pixelBuffer)
|
processImage(inputImage: ciImage, time: time, completion: { compositedImage in
|
||||||
|
if var compositedImage {
|
||||||
if let pixelBuffer {
|
let scale = self.outputDimensions.width / self.dimensions.width
|
||||||
processImage(inputImage: ciImage, time: time, completion: { compositedImage in
|
compositedImage = compositedImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
|
||||||
if var compositedImage {
|
|
||||||
let scale = self.outputDimensions.width / self.dimensions.width
|
self.ciContext?.render(compositedImage, to: pixelBuffer)
|
||||||
compositedImage = compositedImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
|
completion(pixelBuffer)
|
||||||
|
} else {
|
||||||
self.ciContext?.render(compositedImage, to: pixelBuffer)
|
completion(nil)
|
||||||
completion(pixelBuffer)
|
}
|
||||||
} else {
|
})
|
||||||
completion(nil)
|
return
|
||||||
}
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
completion(nil)
|
completion(nil)
|
||||||
|
@ -50,7 +50,7 @@ public final class MediaEditorPreviewView: MTKView, MTKViewDelegate, RenderTarge
|
|||||||
}
|
}
|
||||||
|
|
||||||
func scheduleFrame() {
|
func scheduleFrame() {
|
||||||
Queue.mainQueue().async {
|
Queue.mainQueue().justDispatch {
|
||||||
self.draw()
|
self.draw()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -247,10 +247,7 @@ public final class MediaEditorVideoExport {
|
|||||||
private let subject: Subject
|
private let subject: Subject
|
||||||
private let configuration: Configuration
|
private let configuration: Configuration
|
||||||
private let outputPath: String
|
private let outputPath: String
|
||||||
|
|
||||||
private var previousSampleTime: CMTime = .zero
|
|
||||||
private var processedPixelBuffer: CVPixelBuffer?
|
|
||||||
|
|
||||||
private var reader: AVAssetReader?
|
private var reader: AVAssetReader?
|
||||||
|
|
||||||
private var videoOutput: AVAssetReaderOutput?
|
private var videoOutput: AVAssetReaderOutput?
|
||||||
@ -260,6 +257,7 @@ public final class MediaEditorVideoExport {
|
|||||||
private var writer: MediaEditorVideoExportWriter?
|
private var writer: MediaEditorVideoExportWriter?
|
||||||
private var composer: MediaEditorComposer?
|
private var composer: MediaEditorComposer?
|
||||||
|
|
||||||
|
private var textureRotation: TextureRotation = .rotate0Degrees
|
||||||
private let duration = ValuePromise<CMTime>()
|
private let duration = ValuePromise<CMTime>()
|
||||||
|
|
||||||
private let pauseDispatchGroup = DispatchGroup()
|
private let pauseDispatchGroup = DispatchGroup()
|
||||||
@ -320,16 +318,23 @@ public final class MediaEditorVideoExport {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.textureRotation = textureRotatonForAVAsset(asset)
|
||||||
|
|
||||||
writer.setup(configuration: self.configuration, outputPath: self.outputPath)
|
writer.setup(configuration: self.configuration, outputPath: self.outputPath)
|
||||||
|
|
||||||
let videoTracks = asset.tracks(withMediaType: .video)
|
let videoTracks = asset.tracks(withMediaType: .video)
|
||||||
if (videoTracks.count > 0) {
|
if (videoTracks.count > 0) {
|
||||||
let outputSettings: [String : Any]
|
|
||||||
var sourceFrameRate: Float = 0.0
|
var sourceFrameRate: Float = 0.0
|
||||||
|
let outputSettings: [String: Any] = [
|
||||||
|
kCVPixelBufferPixelFormatTypeKey as String: [kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],
|
||||||
|
AVVideoColorPropertiesKey: [
|
||||||
|
AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2,
|
||||||
|
AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_709_2,
|
||||||
|
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_709_2
|
||||||
|
]
|
||||||
|
]
|
||||||
if let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing {
|
if let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing {
|
||||||
outputSettings = [kCVPixelBufferPixelFormatTypeKey as String: [kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]]
|
|
||||||
} else {
|
} else {
|
||||||
outputSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
|
|
||||||
self.setupComposer()
|
self.setupComposer()
|
||||||
}
|
}
|
||||||
let videoOutput = AVAssetReaderTrackOutput(track: videoTracks.first!, outputSettings: outputSettings)
|
let videoOutput = AVAssetReaderTrackOutput(track: videoTracks.first!, outputSettings: outputSettings)
|
||||||
@ -516,7 +521,7 @@ public final class MediaEditorVideoExport {
|
|||||||
if let buffer = output.copyNextSampleBuffer() {
|
if let buffer = output.copyNextSampleBuffer() {
|
||||||
if let composer = self.composer {
|
if let composer = self.composer {
|
||||||
let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer)
|
let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer)
|
||||||
composer.processSampleBuffer(buffer, pool: writer.pixelBufferPool, completion: { pixelBuffer in
|
composer.processSampleBuffer(buffer, pool: writer.pixelBufferPool, textureRotation: self.textureRotation, completion: { pixelBuffer in
|
||||||
if let pixelBuffer {
|
if let pixelBuffer {
|
||||||
if !writer.appendPixelBuffer(pixelBuffer, at: timestamp) {
|
if !writer.appendPixelBuffer(pixelBuffer, at: timestamp) {
|
||||||
writer.markVideoAsFinished()
|
writer.markVideoAsFinished()
|
||||||
|
@ -3,6 +3,28 @@ import AVFoundation
|
|||||||
import Metal
|
import Metal
|
||||||
import MetalKit
|
import MetalKit
|
||||||
|
|
||||||
|
func textureRotatonForAVAsset(_ asset: AVAsset) -> TextureRotation {
|
||||||
|
for track in asset.tracks {
|
||||||
|
if track.mediaType == .video {
|
||||||
|
let t = track.preferredTransform
|
||||||
|
if t.a == -1.0 && t.d == -1.0 {
|
||||||
|
return .rotate180Degrees
|
||||||
|
} else if t.a == 1.0 && t.d == 1.0 {
|
||||||
|
return .rotate0Degrees
|
||||||
|
} else if t.b == -1.0 && t.c == 1.0 {
|
||||||
|
return .rotate270Degrees
|
||||||
|
} else if t.a == -1.0 && t.d == 1.0 {
|
||||||
|
return .rotate270Degrees
|
||||||
|
} else if t.a == 1.0 && t.d == -1.0 {
|
||||||
|
return .rotate180Degrees
|
||||||
|
} else {
|
||||||
|
return .rotate90Degrees
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return .rotate0Degrees
|
||||||
|
}
|
||||||
|
|
||||||
final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullDelegate {
|
final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullDelegate {
|
||||||
private let player: AVPlayer
|
private let player: AVPlayer
|
||||||
private var playerItem: AVPlayerItem?
|
private var playerItem: AVPlayerItem?
|
||||||
@ -80,23 +102,10 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD
|
|||||||
for track in playerItem.asset.tracks {
|
for track in playerItem.asset.tracks {
|
||||||
if track.mediaType == .video {
|
if track.mediaType == .video {
|
||||||
hasVideoTrack = true
|
hasVideoTrack = true
|
||||||
|
break
|
||||||
let t = track.preferredTransform
|
|
||||||
if t.a == -1.0 && t.d == -1.0 {
|
|
||||||
self.textureRotation = .rotate180Degrees
|
|
||||||
} else if t.a == 1.0 && t.d == 1.0 {
|
|
||||||
self.textureRotation = .rotate0Degrees
|
|
||||||
} else if t.b == -1.0 && t.c == 1.0 {
|
|
||||||
self.textureRotation = .rotate270Degrees
|
|
||||||
} else if t.a == -1.0 && t.d == 1.0 {
|
|
||||||
self.textureRotation = .rotate270Degrees
|
|
||||||
} else if t.a == 1.0 && t.d == -1.0 {
|
|
||||||
self.textureRotation = .rotate180Degrees
|
|
||||||
} else {
|
|
||||||
self.textureRotation = .rotate90Degrees
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
self.textureRotation = textureRotatonForAVAsset(playerItem.asset)
|
||||||
if !hasVideoTrack {
|
if !hasVideoTrack {
|
||||||
assertionFailure("No video track found.")
|
assertionFailure("No video track found.")
|
||||||
return
|
return
|
||||||
|
@ -496,7 +496,7 @@ final class MediaEditorScreenComponent: Component {
|
|||||||
containerSize: CGSize(width: 40.0, height: 40.0)
|
containerSize: CGSize(width: 40.0, height: 40.0)
|
||||||
)
|
)
|
||||||
let drawButtonFrame = CGRect(
|
let drawButtonFrame = CGRect(
|
||||||
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 4.0 - 3.0 - drawButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset),
|
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 4.0 - 3.0 - drawButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset + 1.0),
|
||||||
size: drawButtonSize
|
size: drawButtonSize
|
||||||
)
|
)
|
||||||
if let drawButtonView = self.drawButton.view {
|
if let drawButtonView = self.drawButton.view {
|
||||||
@ -521,7 +521,7 @@ final class MediaEditorScreenComponent: Component {
|
|||||||
containerSize: CGSize(width: 40.0, height: 40.0)
|
containerSize: CGSize(width: 40.0, height: 40.0)
|
||||||
)
|
)
|
||||||
let textButtonFrame = CGRect(
|
let textButtonFrame = CGRect(
|
||||||
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 2.5 + 5.0 - textButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset),
|
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 2.5 + 5.0 - textButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset + 1.0),
|
||||||
size: textButtonSize
|
size: textButtonSize
|
||||||
)
|
)
|
||||||
if let textButtonView = self.textButton.view {
|
if let textButtonView = self.textButton.view {
|
||||||
@ -546,7 +546,7 @@ final class MediaEditorScreenComponent: Component {
|
|||||||
containerSize: CGSize(width: 40.0, height: 40.0)
|
containerSize: CGSize(width: 40.0, height: 40.0)
|
||||||
)
|
)
|
||||||
let stickerButtonFrame = CGRect(
|
let stickerButtonFrame = CGRect(
|
||||||
origin: CGPoint(x: floorToScreenPixels(availableSize.width - availableSize.width / 2.5 - 5.0 - stickerButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset),
|
origin: CGPoint(x: floorToScreenPixels(availableSize.width - availableSize.width / 2.5 - 5.0 - stickerButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset + 1.0),
|
||||||
size: stickerButtonSize
|
size: stickerButtonSize
|
||||||
)
|
)
|
||||||
if let stickerButtonView = self.stickerButton.view {
|
if let stickerButtonView = self.stickerButton.view {
|
||||||
@ -571,7 +571,7 @@ final class MediaEditorScreenComponent: Component {
|
|||||||
containerSize: CGSize(width: 40.0, height: 40.0)
|
containerSize: CGSize(width: 40.0, height: 40.0)
|
||||||
)
|
)
|
||||||
let toolsButtonFrame = CGRect(
|
let toolsButtonFrame = CGRect(
|
||||||
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 4.0 * 3.0 + 3.0 - toolsButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset),
|
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 4.0 * 3.0 + 3.0 - toolsButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset + 1.0),
|
||||||
size: toolsButtonSize
|
size: toolsButtonSize
|
||||||
)
|
)
|
||||||
if let toolsButtonView = self.toolsButton.view {
|
if let toolsButtonView = self.toolsButton.view {
|
||||||
@ -592,24 +592,19 @@ final class MediaEditorScreenComponent: Component {
|
|||||||
context: component.context,
|
context: component.context,
|
||||||
duration: playerState.duration,
|
duration: playerState.duration,
|
||||||
startPosition: playerState.timeRange?.lowerBound ?? 0.0,
|
startPosition: playerState.timeRange?.lowerBound ?? 0.0,
|
||||||
endPosition: playerState.timeRange?.upperBound ?? playerState.duration,
|
endPosition: playerState.timeRange?.upperBound ?? min(playerState.duration, storyMaxVideoDuration),
|
||||||
position: playerState.position,
|
position: playerState.position,
|
||||||
|
maxDuration: storyMaxVideoDuration,
|
||||||
frames: playerState.frames,
|
frames: playerState.frames,
|
||||||
framesUpdateTimestamp: playerState.framesUpdateTimestamp,
|
framesUpdateTimestamp: playerState.framesUpdateTimestamp,
|
||||||
startPositionUpdated: { [weak mediaEditor] position, done in
|
trimUpdated: { [weak mediaEditor] start, end, updatedEnd, done in
|
||||||
if let mediaEditor {
|
if let mediaEditor {
|
||||||
mediaEditor.setVideoTrimStart(position)
|
mediaEditor.setVideoTrimStart(start)
|
||||||
mediaEditor.seek(position, andPlay: done)
|
mediaEditor.setVideoTrimEnd(end)
|
||||||
}
|
|
||||||
},
|
|
||||||
endPositionUpdated: { [weak mediaEditor] position, done in
|
|
||||||
if let mediaEditor {
|
|
||||||
mediaEditor.setVideoTrimEnd(position)
|
|
||||||
if done {
|
if done {
|
||||||
let start = mediaEditor.values.videoTrimRange?.lowerBound ?? 0.0
|
|
||||||
mediaEditor.seek(start, andPlay: true)
|
mediaEditor.seek(start, andPlay: true)
|
||||||
} else {
|
} else {
|
||||||
mediaEditor.seek(position, andPlay: false)
|
mediaEditor.seek(updatedEnd ? end : start, andPlay: false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -730,7 +725,7 @@ final class MediaEditorScreenComponent: Component {
|
|||||||
}
|
}
|
||||||
case let .message(peerIds, _):
|
case let .message(peerIds, _):
|
||||||
if peerIds.count == 1 {
|
if peerIds.count == 1 {
|
||||||
privacyText = "User Test"
|
privacyText = "1 Recipient"
|
||||||
} else {
|
} else {
|
||||||
privacyText = "\(peerIds.count) Recipients"
|
privacyText = "\(peerIds.count) Recipients"
|
||||||
}
|
}
|
||||||
@ -871,6 +866,7 @@ final class MediaEditorScreenComponent: Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private let storyDimensions = CGSize(width: 1080.0, height: 1920.0)
|
private let storyDimensions = CGSize(width: 1080.0, height: 1920.0)
|
||||||
|
private let storyMaxVideoDuration: Double = 60.0
|
||||||
|
|
||||||
public enum MediaEditorResultPrivacy: Equatable {
|
public enum MediaEditorResultPrivacy: Equatable {
|
||||||
case story(privacy: EngineStoryPrivacy, archive: Bool)
|
case story(privacy: EngineStoryPrivacy, archive: Bool)
|
||||||
@ -928,6 +924,7 @@ public final class MediaEditorScreen: ViewController {
|
|||||||
fileprivate final class Node: ViewControllerTracingNode, UIGestureRecognizerDelegate {
|
fileprivate final class Node: ViewControllerTracingNode, UIGestureRecognizerDelegate {
|
||||||
private weak var controller: MediaEditorScreen?
|
private weak var controller: MediaEditorScreen?
|
||||||
private let context: AccountContext
|
private let context: AccountContext
|
||||||
|
private var interaction: DrawingToolsInteraction?
|
||||||
private let initializationTimestamp = CACurrentMediaTime()
|
private let initializationTimestamp = CACurrentMediaTime()
|
||||||
|
|
||||||
fileprivate var subject: MediaEditorScreen.Subject?
|
fileprivate var subject: MediaEditorScreen.Subject?
|
||||||
@ -1162,6 +1159,51 @@ public final class MediaEditorScreen: ViewController {
|
|||||||
let rotateGestureRecognizer = UIRotationGestureRecognizer(target: self, action: #selector(self.handleRotate(_:)))
|
let rotateGestureRecognizer = UIRotationGestureRecognizer(target: self, action: #selector(self.handleRotate(_:)))
|
||||||
rotateGestureRecognizer.delegate = self
|
rotateGestureRecognizer.delegate = self
|
||||||
self.previewContainerView.addGestureRecognizer(rotateGestureRecognizer)
|
self.previewContainerView.addGestureRecognizer(rotateGestureRecognizer)
|
||||||
|
|
||||||
|
let tapGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
|
||||||
|
self.previewContainerView.addGestureRecognizer(tapGestureRecognizer)
|
||||||
|
|
||||||
|
self.interaction = DrawingToolsInteraction(
|
||||||
|
context: self.context,
|
||||||
|
drawingView: self.drawingView,
|
||||||
|
entitiesView: self.entitiesView,
|
||||||
|
selectionContainerView: self.selectionContainerView,
|
||||||
|
isVideo: false,
|
||||||
|
updateSelectedEntity: { _ in
|
||||||
|
|
||||||
|
},
|
||||||
|
updateVideoPlayback: { [weak self] isPlaying in
|
||||||
|
if let self, let mediaEditor = self.mediaEditor {
|
||||||
|
if isPlaying {
|
||||||
|
mediaEditor.play()
|
||||||
|
} else {
|
||||||
|
mediaEditor.stop()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
updateColor: { [weak self] color in
|
||||||
|
if let self, let selectedEntityView = self.entitiesView.selectedEntityView {
|
||||||
|
selectedEntityView.entity.color = color
|
||||||
|
selectedEntityView.update(animated: false)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
getCurrentImage: {
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
getControllerNode: { [weak self] in
|
||||||
|
return self
|
||||||
|
},
|
||||||
|
present: { [weak self] c, i, a in
|
||||||
|
if let self {
|
||||||
|
self.controller?.present(c, in: i, with: a)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
addSubview: { [weak self] view in
|
||||||
|
if let self {
|
||||||
|
self.view.addSubview(view)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
|
@objc func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
|
||||||
@ -1180,6 +1222,12 @@ public final class MediaEditorScreen: ViewController {
|
|||||||
self.entitiesView.handleRotate(gestureRecognizer)
|
self.entitiesView.handleRotate(gestureRecognizer)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@objc func handleTap(_ gestureRecognizer: UITapGestureRecognizer) {
|
||||||
|
if self.entitiesView.hasSelection {
|
||||||
|
self.entitiesView.selectEntity(nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func animateIn() {
|
func animateIn() {
|
||||||
if let transitionIn = self.controller?.transitionIn {
|
if let transitionIn = self.controller?.transitionIn {
|
||||||
switch transitionIn {
|
switch transitionIn {
|
||||||
@ -1435,21 +1483,6 @@ public final class MediaEditorScreen: ViewController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func insertDrawingEntity(_ entity: DrawingEntity) {
|
|
||||||
self.entitiesView.prepareNewEntity(entity)
|
|
||||||
self.entitiesView.add(entity)
|
|
||||||
self.entitiesView.selectEntity(entity)
|
|
||||||
|
|
||||||
if let entityView = entitiesView.getView(for: entity.uuid) {
|
|
||||||
entityView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
|
||||||
entityView.layer.animateScale(from: 0.1, to: entity.scale, duration: 0.2)
|
|
||||||
|
|
||||||
if let selectionView = entityView.selectionView {
|
|
||||||
selectionView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: 0.2)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private var drawingScreen: DrawingScreen?
|
private var drawingScreen: DrawingScreen?
|
||||||
func containerLayoutUpdated(layout: ContainerViewLayout, forceUpdate: Bool = false, animateOut: Bool = false, transition: Transition) {
|
func containerLayoutUpdated(layout: ContainerViewLayout, forceUpdate: Bool = false, animateOut: Bool = false, transition: Transition) {
|
||||||
guard let controller = self.controller else {
|
guard let controller = self.controller else {
|
||||||
@ -1492,71 +1525,65 @@ public final class MediaEditorScreen: ViewController {
|
|||||||
privacy: controller.state.privacy,
|
privacy: controller.state.privacy,
|
||||||
openDrawing: { [weak self] mode in
|
openDrawing: { [weak self] mode in
|
||||||
if let self {
|
if let self {
|
||||||
|
if self.entitiesView.hasSelection {
|
||||||
|
self.entitiesView.selectEntity(nil)
|
||||||
|
}
|
||||||
switch mode {
|
switch mode {
|
||||||
case .sticker:
|
case .sticker:
|
||||||
let controller = StickerPickerScreen(context: self.context, inputData: self.stickerPickerInputData.get())
|
let controller = StickerPickerScreen(context: self.context, inputData: self.stickerPickerInputData.get())
|
||||||
controller.completion = { [weak self] file in
|
controller.completion = { [weak self] file in
|
||||||
if let self, let file {
|
if let self, let file {
|
||||||
let stickerEntity = DrawingStickerEntity(content: .file(file))
|
let stickerEntity = DrawingStickerEntity(content: .file(file))
|
||||||
self.insertDrawingEntity(stickerEntity)
|
self.interaction?.insertEntity(stickerEntity)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.controller?.present(controller, in: .current)
|
self.controller?.present(controller, in: .current)
|
||||||
return
|
return
|
||||||
case .text:
|
case .text:
|
||||||
break
|
let textEntity = DrawingTextEntity(text: NSAttributedString(), style: .regular, animation: .none, font: .sanFrancisco, alignment: .center, fontSize: 1.0, color: DrawingColor(color: .white))
|
||||||
default:
|
self.interaction?.insertEntity(textEntity)
|
||||||
break
|
return
|
||||||
}
|
case .drawing:
|
||||||
|
let controller = DrawingScreen(context: self.context, sourceHint: .storyEditor, size: self.previewContainerView.frame.size, originalSize: storyDimensions, isVideo: false, isAvatar: false, drawingView: self.drawingView, entitiesView: self.entitiesView, selectionContainerView: self.selectionContainerView, existingStickerPickerInputData: self.stickerPickerInputData)
|
||||||
let controller = DrawingScreen(context: self.context, sourceHint: .storyEditor, size: self.previewContainerView.frame.size, originalSize: storyDimensions, isVideo: false, isAvatar: false, drawingView: self.drawingView, entitiesView: self.entitiesView, selectionContainerView: self.selectionContainerView, existingStickerPickerInputData: self.stickerPickerInputData)
|
self.drawingScreen = controller
|
||||||
self.drawingScreen = controller
|
self.drawingView.isUserInteractionEnabled = true
|
||||||
self.drawingView.isUserInteractionEnabled = true
|
|
||||||
|
|
||||||
controller.requestDismiss = { [weak controller, weak self] in
|
|
||||||
self?.drawingScreen = nil
|
|
||||||
controller?.animateOut({
|
|
||||||
controller?.dismiss()
|
|
||||||
})
|
|
||||||
self?.drawingView.isUserInteractionEnabled = false
|
|
||||||
self?.animateInFromTool()
|
|
||||||
|
|
||||||
self?.entitiesView.selectEntity(nil)
|
controller.requestDismiss = { [weak controller, weak self] in
|
||||||
}
|
self?.drawingScreen = nil
|
||||||
controller.requestApply = { [weak controller, weak self] in
|
controller?.animateOut({
|
||||||
self?.drawingScreen = nil
|
controller?.dismiss()
|
||||||
controller?.animateOut({
|
})
|
||||||
controller?.dismiss()
|
self?.drawingView.isUserInteractionEnabled = false
|
||||||
})
|
self?.animateInFromTool()
|
||||||
self?.drawingView.isUserInteractionEnabled = false
|
|
||||||
self?.animateInFromTool()
|
self?.entitiesView.selectEntity(nil)
|
||||||
|
|
||||||
if let result = controller?.generateDrawingResultData() {
|
|
||||||
self?.mediaEditor?.setDrawingAndEntities(data: result.data, image: result.drawingImage, entities: result.entities)
|
|
||||||
} else {
|
|
||||||
self?.mediaEditor?.setDrawingAndEntities(data: nil, image: nil, entities: [])
|
|
||||||
}
|
}
|
||||||
|
controller.requestApply = { [weak controller, weak self] in
|
||||||
self?.entitiesView.selectEntity(nil)
|
self?.drawingScreen = nil
|
||||||
|
controller?.animateOut({
|
||||||
|
controller?.dismiss()
|
||||||
|
})
|
||||||
|
self?.drawingView.isUserInteractionEnabled = false
|
||||||
|
self?.animateInFromTool()
|
||||||
|
|
||||||
|
if let result = controller?.generateDrawingResultData() {
|
||||||
|
self?.mediaEditor?.setDrawingAndEntities(data: result.data, image: result.drawingImage, entities: result.entities)
|
||||||
|
} else {
|
||||||
|
self?.mediaEditor?.setDrawingAndEntities(data: nil, image: nil, entities: [])
|
||||||
|
}
|
||||||
|
|
||||||
|
self?.entitiesView.selectEntity(nil)
|
||||||
|
}
|
||||||
|
self.controller?.present(controller, in: .current)
|
||||||
|
self.animateOutToTool()
|
||||||
}
|
}
|
||||||
self.controller?.present(controller, in: .current)
|
|
||||||
|
|
||||||
switch mode {
|
|
||||||
case .sticker:
|
|
||||||
controller.presentStickerSelection()
|
|
||||||
case .text:
|
|
||||||
Queue.mainQueue().after(0.05, {
|
|
||||||
controller.addTextEntity()
|
|
||||||
})
|
|
||||||
default:
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
self.animateOutToTool()
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
openTools: { [weak self] in
|
openTools: { [weak self] in
|
||||||
if let self, let mediaEditor = self.mediaEditor {
|
if let self, let mediaEditor = self.mediaEditor {
|
||||||
|
if self.entitiesView.hasSelection {
|
||||||
|
self.entitiesView.selectEntity(nil)
|
||||||
|
}
|
||||||
let controller = MediaToolsScreen(context: self.context, mediaEditor: mediaEditor)
|
let controller = MediaToolsScreen(context: self.context, mediaEditor: mediaEditor)
|
||||||
controller.dismissed = { [weak self] in
|
controller.dismissed = { [weak self] in
|
||||||
if let self {
|
if let self {
|
||||||
@ -1605,6 +1632,8 @@ public final class MediaEditorScreen: ViewController {
|
|||||||
|
|
||||||
transition.setFrame(view: self.selectionContainerView, frame: CGRect(origin: .zero, size: previewFrame.size))
|
transition.setFrame(view: self.selectionContainerView, frame: CGRect(origin: .zero, size: previewFrame.size))
|
||||||
|
|
||||||
|
self.interaction?.containerLayoutUpdated(layout: layout, transition: transition)
|
||||||
|
|
||||||
if isFirstTime {
|
if isFirstTime {
|
||||||
self.animateIn()
|
self.animateIn()
|
||||||
}
|
}
|
||||||
@ -1968,7 +1997,10 @@ public final class MediaEditorScreen: ViewController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
mediaEditor.stop()
|
mediaEditor.stop()
|
||||||
|
|
||||||
|
let codableEntities = self.node.entitiesView.entities.filter { !($0 is DrawingMediaEntity) }.compactMap({ CodableDrawingEntity(entity: $0) })
|
||||||
|
mediaEditor.setDrawingAndEntities(data: nil, image: mediaEditor.values.drawing, entities: codableEntities)
|
||||||
|
|
||||||
if mediaEditor.resultIsVideo {
|
if mediaEditor.resultIsVideo {
|
||||||
let videoResult: Result.VideoResult
|
let videoResult: Result.VideoResult
|
||||||
let duration: Double
|
let duration: Double
|
||||||
|
@ -23,10 +23,10 @@ final class VideoScrubberComponent: Component {
|
|||||||
let startPosition: Double
|
let startPosition: Double
|
||||||
let endPosition: Double
|
let endPosition: Double
|
||||||
let position: Double
|
let position: Double
|
||||||
|
let maxDuration: Double
|
||||||
let frames: [UIImage]
|
let frames: [UIImage]
|
||||||
let framesUpdateTimestamp: Double
|
let framesUpdateTimestamp: Double
|
||||||
let startPositionUpdated: (Double, Bool) -> Void
|
let trimUpdated: (Double, Double, Bool, Bool) -> Void
|
||||||
let endPositionUpdated: (Double, Bool) -> Void
|
|
||||||
let positionUpdated: (Double, Bool) -> Void
|
let positionUpdated: (Double, Bool) -> Void
|
||||||
|
|
||||||
init(
|
init(
|
||||||
@ -35,10 +35,10 @@ final class VideoScrubberComponent: Component {
|
|||||||
startPosition: Double,
|
startPosition: Double,
|
||||||
endPosition: Double,
|
endPosition: Double,
|
||||||
position: Double,
|
position: Double,
|
||||||
|
maxDuration: Double,
|
||||||
frames: [UIImage],
|
frames: [UIImage],
|
||||||
framesUpdateTimestamp: Double,
|
framesUpdateTimestamp: Double,
|
||||||
startPositionUpdated: @escaping (Double, Bool) -> Void,
|
trimUpdated: @escaping (Double, Double, Bool, Bool) -> Void,
|
||||||
endPositionUpdated: @escaping (Double, Bool) -> Void,
|
|
||||||
positionUpdated: @escaping (Double, Bool) -> Void
|
positionUpdated: @escaping (Double, Bool) -> Void
|
||||||
) {
|
) {
|
||||||
self.context = context
|
self.context = context
|
||||||
@ -46,10 +46,10 @@ final class VideoScrubberComponent: Component {
|
|||||||
self.startPosition = startPosition
|
self.startPosition = startPosition
|
||||||
self.endPosition = endPosition
|
self.endPosition = endPosition
|
||||||
self.position = position
|
self.position = position
|
||||||
|
self.maxDuration = maxDuration
|
||||||
self.frames = frames
|
self.frames = frames
|
||||||
self.framesUpdateTimestamp = framesUpdateTimestamp
|
self.framesUpdateTimestamp = framesUpdateTimestamp
|
||||||
self.startPositionUpdated = startPositionUpdated
|
self.trimUpdated = trimUpdated
|
||||||
self.endPositionUpdated = endPositionUpdated
|
|
||||||
self.positionUpdated = positionUpdated
|
self.positionUpdated = positionUpdated
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -69,6 +69,9 @@ final class VideoScrubberComponent: Component {
|
|||||||
if lhs.position != rhs.position {
|
if lhs.position != rhs.position {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.maxDuration != rhs.maxDuration {
|
||||||
|
return false
|
||||||
|
}
|
||||||
if lhs.framesUpdateTimestamp != rhs.framesUpdateTimestamp {
|
if lhs.framesUpdateTimestamp != rhs.framesUpdateTimestamp {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -165,22 +168,28 @@ final class VideoScrubberComponent: Component {
|
|||||||
let end = self.frame.width - handleWidth
|
let end = self.frame.width - handleWidth
|
||||||
let length = end - start
|
let length = end - start
|
||||||
let fraction = (location.x - start) / length
|
let fraction = (location.x - start) / length
|
||||||
var value = max(0.0, component.duration * fraction)
|
|
||||||
if value > component.endPosition - minumumDuration {
|
var startValue = max(0.0, component.duration * fraction)
|
||||||
value = max(0.0, component.endPosition - minumumDuration)
|
if startValue > component.endPosition - minumumDuration {
|
||||||
|
startValue = max(0.0, component.endPosition - minumumDuration)
|
||||||
|
}
|
||||||
|
var endValue = component.endPosition
|
||||||
|
if endValue - startValue > component.maxDuration {
|
||||||
|
let delta = (endValue - startValue) - component.maxDuration
|
||||||
|
endValue -= delta
|
||||||
}
|
}
|
||||||
|
|
||||||
var transition: Transition = .immediate
|
var transition: Transition = .immediate
|
||||||
switch gestureRecognizer.state {
|
switch gestureRecognizer.state {
|
||||||
case .began, .changed:
|
case .began, .changed:
|
||||||
self.isPanningHandle = true
|
self.isPanningHandle = true
|
||||||
component.startPositionUpdated(value, false)
|
component.trimUpdated(startValue, endValue, false, false)
|
||||||
if case .began = gestureRecognizer.state {
|
if case .began = gestureRecognizer.state {
|
||||||
transition = .easeInOut(duration: 0.25)
|
transition = .easeInOut(duration: 0.25)
|
||||||
}
|
}
|
||||||
case .ended, .cancelled:
|
case .ended, .cancelled:
|
||||||
self.isPanningHandle = false
|
self.isPanningHandle = false
|
||||||
component.startPositionUpdated(value, true)
|
component.trimUpdated(startValue, endValue, false, true)
|
||||||
transition = .easeInOut(duration: 0.25)
|
transition = .easeInOut(duration: 0.25)
|
||||||
default:
|
default:
|
||||||
break
|
break
|
||||||
@ -197,22 +206,28 @@ final class VideoScrubberComponent: Component {
|
|||||||
let end = self.frame.width - handleWidth
|
let end = self.frame.width - handleWidth
|
||||||
let length = end - start
|
let length = end - start
|
||||||
let fraction = (location.x - start) / length
|
let fraction = (location.x - start) / length
|
||||||
var value = min(component.duration, component.duration * fraction)
|
|
||||||
if value < component.startPosition + minumumDuration {
|
var endValue = min(component.duration, component.duration * fraction)
|
||||||
value = min(component.duration, component.startPosition + minumumDuration)
|
if endValue < component.startPosition + minumumDuration {
|
||||||
|
endValue = min(component.duration, component.startPosition + minumumDuration)
|
||||||
|
}
|
||||||
|
var startValue = component.startPosition
|
||||||
|
if endValue - startValue > component.maxDuration {
|
||||||
|
let delta = (endValue - startValue) - component.maxDuration
|
||||||
|
startValue += delta
|
||||||
}
|
}
|
||||||
|
|
||||||
var transition: Transition = .immediate
|
var transition: Transition = .immediate
|
||||||
switch gestureRecognizer.state {
|
switch gestureRecognizer.state {
|
||||||
case .began, .changed:
|
case .began, .changed:
|
||||||
self.isPanningHandle = true
|
self.isPanningHandle = true
|
||||||
component.endPositionUpdated(value, false)
|
component.trimUpdated(startValue, endValue, true, false)
|
||||||
if case .began = gestureRecognizer.state {
|
if case .began = gestureRecognizer.state {
|
||||||
transition = .easeInOut(duration: 0.25)
|
transition = .easeInOut(duration: 0.25)
|
||||||
}
|
}
|
||||||
case .ended, .cancelled:
|
case .ended, .cancelled:
|
||||||
self.isPanningHandle = false
|
self.isPanningHandle = false
|
||||||
component.endPositionUpdated(value, true)
|
component.trimUpdated(startValue, endValue, true, true)
|
||||||
transition = .easeInOut(duration: 0.25)
|
transition = .easeInOut(duration: 0.25)
|
||||||
default:
|
default:
|
||||||
break
|
break
|
||||||
|
@ -376,7 +376,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
|||||||
switch privacy {
|
switch privacy {
|
||||||
case let .story(storyPrivacy, _):
|
case let .story(storyPrivacy, _):
|
||||||
let _ = self.context.engine.messages.uploadStory(media: .image(dimensions: dimensions, data: imageData), text: caption?.string ?? "", entities: [], privacy: storyPrivacy).start()
|
let _ = self.context.engine.messages.uploadStory(media: .image(dimensions: dimensions, data: imageData), text: caption?.string ?? "", entities: [], privacy: storyPrivacy).start()
|
||||||
Queue.mainQueue().after(0.2, { [weak chatListController] in
|
Queue.mainQueue().after(0.3, { [weak chatListController] in
|
||||||
chatListController?.animateStoryUploadRipple()
|
chatListController?.animateStoryUploadRipple()
|
||||||
})
|
})
|
||||||
case let .message(peerIds, timeout):
|
case let .message(peerIds, timeout):
|
||||||
@ -457,7 +457,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
|||||||
}
|
}
|
||||||
if case let .story(storyPrivacy, _) = privacy {
|
if case let .story(storyPrivacy, _) = privacy {
|
||||||
let _ = self.context.engine.messages.uploadStory(media: .video(dimensions: dimensions, duration: Int(duration), resource: resource), text: caption?.string ?? "", entities: [], privacy: storyPrivacy).start()
|
let _ = self.context.engine.messages.uploadStory(media: .video(dimensions: dimensions, duration: Int(duration), resource: resource), text: caption?.string ?? "", entities: [], privacy: storyPrivacy).start()
|
||||||
Queue.mainQueue().after(0.2, { [weak chatListController] in
|
Queue.mainQueue().after(0.3, { [weak chatListController] in
|
||||||
chatListController?.animateStoryUploadRipple()
|
chatListController?.animateStoryUploadRipple()
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
@ -468,7 +468,9 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
|||||||
}
|
}
|
||||||
|
|
||||||
dismissCameraImpl?()
|
dismissCameraImpl?()
|
||||||
commit()
|
Queue.mainQueue().after(0.1) {
|
||||||
|
commit()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
controller.cancelled = { showDraftTooltip in
|
controller.cancelled = { showDraftTooltip in
|
||||||
|
Loading…
x
Reference in New Issue
Block a user