Camera and editor improvements

This commit is contained in:
Ilya Laktyushin 2023-05-27 02:29:23 +04:00
parent e419ccf8f3
commit 2d738fbfac
20 changed files with 960 additions and 703 deletions

View File

@ -57,6 +57,7 @@ swift_library(
"//submodules/AsyncDisplayKit:AsyncDisplayKit",
"//submodules/Display:Display",
"//submodules/ImageBlur:ImageBlur",
"//submodules/TelegramCore:TelegramCore",
],
visibility = [
"//visibility:public",

View File

@ -40,14 +40,14 @@ private final class CameraContext {
}
}
private let previewSnapshotContext = CIContext()
private var lastSnapshotTimestamp: Double = CACurrentMediaTime()
private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer) {
Queue.concurrentDefaultQueue().async {
let ciContext = CIContext()
var ciImage = CIImage(cvImageBuffer: pixelBuffer)
ciImage = ciImage.transformed(by: CGAffineTransform(scaleX: 0.33, y: 0.33))
ciImage = ciImage.clampedToExtent()
if let cgImage = ciContext.createCGImage(ciImage, from: ciImage.extent) {
let size = ciImage.extent.size
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 40.0).cropped(to: CGRect(origin: .zero, size: size))
if let cgImage = self.previewSnapshotContext.createCGImage(ciImage, from: ciImage.extent) {
let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
CameraSimplePreviewView.saveLastStateImage(uiImage)
}
@ -67,7 +67,7 @@ private final class CameraContext {
self.input.configure(for: self.session, device: self.device, audio: configuration.audio)
self.output.configure(for: self.session, configuration: configuration)
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: 60)
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: self.preferredMaxFrameRate)
self.output.configureVideoStabilization()
}
@ -115,6 +115,15 @@ private final class CameraContext {
}
}
private var preferredMaxFrameRate: Double {
switch DeviceModel.current {
case .iPhone14ProMax, .iPhone13ProMax:
return 60.0
default:
return 30.0
}
}
func startCapture() {
guard !self.session.isRunning else {
return
@ -160,7 +169,7 @@ private final class CameraContext {
self.changingPosition = true
self.device.configure(for: self.session, position: targetPosition)
self.input.configure(for: self.session, device: self.device, audio: self.initialConfiguration.audio)
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: 60)
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: self.preferredMaxFrameRate)
self.output.configureVideoStabilization()
self.queue.after(0.5) {
self.changingPosition = false
@ -173,7 +182,7 @@ private final class CameraContext {
self.input.invalidate(for: self.session)
self.device.configure(for: self.session, position: position)
self.input.configure(for: self.session, device: self.device, audio: self.initialConfiguration.audio)
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: 60)
self.device.configureDeviceFormat(maxDimensions: CMVideoDimensions(width: 1920, height: 1080), maxFramerate: self.preferredMaxFrameRate)
self.output.configureVideoStabilization()
}
}

View File

@ -1,6 +1,7 @@
import Foundation
import AVFoundation
import SwiftSignalKit
import TelegramCore
private let defaultFPS: Double = 30.0
@ -68,6 +69,14 @@ final class CameraDevice {
if let bestFormat = candidates.last {
device.activeFormat = bestFormat
Logger.shared.log("Camera", "Available formats:")
for format in device.formats {
Logger.shared.log("Camera", format.description)
}
Logger.shared.log("Camera", "Selected format:")
Logger.shared.log("Camera", bestFormat.description)
}
if let targetFPS = device.actualFPS(maxFramerate) {

View File

@ -23,6 +23,8 @@ public extension Camera {
self = .iPhone14ProMax
case .unknown:
self = .unknown
default:
self = .unknown
}
}
@ -70,6 +72,16 @@ enum DeviceModel: CaseIterable {
case iPodTouch6
case iPodTouch7
case iPhone12
case iPhone12Mini
case iPhone12Pro
case iPhone12ProMax
case iPhone13
case iPhone13Mini
case iPhone13Pro
case iPhone13ProMax
case iPhone14
case iPhone14Plus
case iPhone14Pro
@ -93,6 +105,22 @@ enum DeviceModel: CaseIterable {
return "iPod7,1"
case .iPodTouch7:
return "iPod9,1"
case .iPhone12:
return "iPhone13,2"
case .iPhone12Mini:
return "iPhone13,1"
case .iPhone12Pro:
return "iPhone13,3"
case .iPhone12ProMax:
return "iPhone13,4"
case .iPhone13:
return "iPhone14,5"
case .iPhone13Mini:
return "iPhone14,4"
case .iPhone13Pro:
return "iPhone14,2"
case .iPhone13ProMax:
return "iPhone14,3"
case .iPhone14:
return "iPhone14,7"
case .iPhone14Plus:
@ -122,6 +150,22 @@ enum DeviceModel: CaseIterable {
return "iPod touch 6G"
case .iPodTouch7:
return "iPod touch 7G"
case .iPhone12:
return "iPhone 12"
case .iPhone12Mini:
return "iPhone 12 mini"
case .iPhone12Pro:
return "iPhone 12 Pro"
case .iPhone12ProMax:
return "iPhone 12 Pro Max"
case .iPhone13:
return "iPhone 13"
case .iPhone13Mini:
return "iPhone 13 mini"
case .iPhone13Pro:
return "iPhone 13 Pro"
case .iPhone13ProMax:
return "iPhone 13 Pro Max"
case .iPhone14:
return "iPhone 14"
case .iPhone14Plus:

View File

@ -65,7 +65,6 @@ final class CameraOutput: NSObject {
super.init()
self.videoOutput.alwaysDiscardsLateVideoFrames = false
//self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA] as [String : Any]
self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any]
self.faceLandmarksOutput.outputFaceObservations = { [weak self] observations in

View File

@ -21,7 +21,7 @@ public class CameraSimplePreviewView: UIView {
static func saveLastStateImage(_ image: UIImage) {
let imagePath = NSTemporaryDirectory() + "cameraImage.jpg"
if let blurredImage = blurredImage(image, radius: 60.0), let data = blurredImage.jpegData(compressionQuality: 0.85) {
if let data = image.jpegData(compressionQuality: 0.6) {
try? data.write(to: URL(fileURLWithPath: imagePath))
}
}

View File

@ -937,7 +937,7 @@ final class ColorSpectrumComponent: Component {
}
}
final class ColorSpectrumPickerView: UIView, UIGestureRecognizerDelegate {
public final class ColorSpectrumPickerView: UIView, UIGestureRecognizerDelegate {
private var validSize: CGSize?
private var selectedColor: DrawingColor?
@ -950,7 +950,7 @@ final class ColorSpectrumPickerView: UIView, UIGestureRecognizerDelegate {
private var circleMaskView = UIView()
private let maskCircle = SimpleShapeLayer()
var selected: (DrawingColor) -> Void = { _ in }
public var selected: (DrawingColor) -> Void = { _ in }
private var bitmapData: UnsafeMutableRawPointer?
@ -1048,7 +1048,7 @@ final class ColorSpectrumPickerView: UIView, UIGestureRecognizerDelegate {
private var animatingIn = false
private var scheduledAnimateOut: (() -> Void)?
func animateIn() {
public func animateIn() {
self.animatingIn = true
Queue.mainQueue().after(0.15) {
@ -1107,7 +1107,7 @@ final class ColorSpectrumPickerView: UIView, UIGestureRecognizerDelegate {
})
}
func updateLayout(size: CGSize, selectedColor: DrawingColor?) -> CGSize {
public func updateLayout(size: CGSize, selectedColor: DrawingColor?) -> CGSize {
let previousSize = self.validSize
let imageSize = size
@ -2413,10 +2413,10 @@ private final class ColorPickerSheetComponent: CombinedComponent {
}
}
class ColorPickerScreen: ViewControllerComponentContainer {
public final class ColorPickerScreen: ViewControllerComponentContainer {
private var dismissed: () -> Void
init(context: AccountContext, initialColor: DrawingColor, updated: @escaping (DrawingColor) -> Void, openEyedropper: @escaping () -> Void, dismissed: @escaping () -> Void = {}) {
public init(context: AccountContext, initialColor: DrawingColor, updated: @escaping (DrawingColor) -> Void, openEyedropper: @escaping () -> Void, dismissed: @escaping () -> Void = {}) {
self.dismissed = dismissed
super.init(context: context, component: ColorPickerSheetComponent(context: context, initialColor: initialColor, updated: updated, openEyedropper: openEyedropper, dismissed: dismissed), navigationBarAppearance: .none)

View File

@ -54,7 +54,7 @@ public final class DrawingEntitiesView: UIView, TGPhotoDrawingEntitiesView {
public weak var selectionContainerView: DrawingSelectionContainerView?
private var tapGestureRecognizer: UITapGestureRecognizer!
private(set) var selectedEntityView: DrawingEntityView?
public private(set) var selectedEntityView: DrawingEntityView?
public var getEntityCenterPosition: () -> CGPoint = { return .zero }
public var getEntityInitialRotation: () -> CGFloat = { return 0.0 }
@ -593,7 +593,7 @@ protocol DrawingEntityMediaView: DrawingEntityView {
public class DrawingEntityView: UIView {
let context: AccountContext
let entity: DrawingEntity
public let entity: DrawingEntity
var isTracking = false
public weak var selectionView: DrawingEntitySelectionView?
@ -645,7 +645,7 @@ public class DrawingEntityView: UIView {
}
func update(animated: Bool = false) {
public func update(animated: Bool = false) {
self.updateSelectionView()
}

View File

@ -89,7 +89,7 @@ public final class DrawingMediaEntityView: DrawingEntityView, DrawingEntityMedia
}
public var updated: (() -> Void)?
override func update(animated: Bool) {
public override func update(animated: Bool) {
self.center = self.mediaEntity.position
let size = self.mediaEntity.baseSize

File diff suppressed because it is too large Load Diff

View File

@ -38,7 +38,7 @@ private func generateGridImage(size: CGSize, light: Bool) -> UIImage? {
})
}
final class EyedropperView: UIView {
public final class EyedropperView: UIView {
private weak var drawingView: DrawingView?
private let containerView: UIView

View File

@ -872,7 +872,7 @@ public class CameraScreen: ViewController {
} else {
if translation.x < -10.0 {
let transitionFraction = 1.0 - abs(translation.x) / self.frame.width
let transitionFraction = 1.0 - max(0.0, translation.x * -1.0) / self.frame.width
controller.updateTransitionProgress(transitionFraction, transition: .immediate)
} else if translation.y < -10.0 {
controller.presentGallery()
@ -882,7 +882,7 @@ public class CameraScreen: ViewController {
}
case .ended:
let velocity = gestureRecognizer.velocity(in: self.view)
let transitionFraction = 1.0 - abs(translation.x) / self.frame.width
let transitionFraction = 1.0 - max(0.0, translation.x * -1.0) / self.frame.width
controller.completeWithTransitionProgress(transitionFraction, velocity: abs(velocity.x), dismissing: true)
default:
break
@ -982,6 +982,7 @@ public class CameraScreen: ViewController {
}
func resumeCameraCapture() {
if self.simplePreviewView?.isEnabled == false {
if let snapshot = self.simplePreviewView?.snapshotView(afterScreenUpdates: false) {
self.simplePreviewView?.addSubview(snapshot)
self.previewSnapshotView = snapshot
@ -1005,6 +1006,7 @@ public class CameraScreen: ViewController {
}
}
}
}
func animateInFromEditor(toGallery: Bool) {
if !toGallery {
@ -1344,7 +1346,7 @@ public class CameraScreen: ViewController {
private var isTransitioning = false
public func updateTransitionProgress(_ transitionFraction: CGFloat, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void = {}) {
self.isTransitioning = true
let offsetX = (1.0 - transitionFraction) * self.node.frame.width * -1.0
let offsetX = floorToScreenPixels((1.0 - transitionFraction) * self.node.frame.width * -1.0)
transition.updateTransform(layer: self.node.backgroundView.layer, transform: CGAffineTransform(translationX: offsetX, y: 0.0))
transition.updateTransform(layer: self.node.containerView.layer, transform: CGAffineTransform(translationX: offsetX, y: 0.0))
let scale = max(0.8, min(1.0, 0.8 + 0.2 * transitionFraction))
@ -1359,7 +1361,7 @@ public class CameraScreen: ViewController {
self.statusBar.updateStatusBarStyle(transitionFraction > 0.45 ? .White : .Ignore, animated: true)
if let navigationController = self.navigationController as? NavigationController {
let offsetX = transitionFraction * self.node.frame.width
let offsetX = floorToScreenPixels(transitionFraction * self.node.frame.width)
navigationController.updateRootContainerTransitionOffset(offsetX, transition: transition)
}
}
@ -1367,7 +1369,7 @@ public class CameraScreen: ViewController {
public func completeWithTransitionProgress(_ transitionFraction: CGFloat, velocity: CGFloat, dismissing: Bool) {
self.isTransitioning = false
if dismissing {
if transitionFraction < 0.7 || velocity > 1000.0 {
if transitionFraction < 0.7 || velocity < -1000.0 {
self.requestDismiss(animated: true, interactive: true)
} else {
self.updateTransitionProgress(1.0, transition: .animated(duration: 0.4, curve: .spring), completion: { [weak self] in

View File

@ -464,6 +464,10 @@ public final class MediaEditor {
}
}
public func play() {
self.player?.play()
}
public func stop() {
self.player?.pause()
}

View File

@ -85,24 +85,14 @@ final class MediaEditorComposer {
self.renderChain.update(values: self.values)
}
func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, pool: CVPixelBufferPool?, completion: @escaping (CVPixelBuffer?) -> Void) {
guard let textureCache = self.textureCache, let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let pool = pool else {
func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, pool: CVPixelBufferPool?, textureRotation: TextureRotation, completion: @escaping (CVPixelBuffer?) -> Void) {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let pool = pool else {
completion(nil)
return
}
let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let width = CVPixelBufferGetWidth(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
let format: MTLPixelFormat = .bgra8Unorm
var textureRef : CVMetalTexture?
let status = CVMetalTextureCacheCreateTextureFromImage(nil, textureCache, imageBuffer, nil, format, width, height, 0, &textureRef)
var texture: MTLTexture?
if status == kCVReturnSuccess {
texture = CVMetalTextureGetTexture(textureRef!)
}
if let texture {
self.renderer.consumeTexture(texture)
self.renderer.consumeVideoPixelBuffer(imageBuffer, rotation: textureRotation)
self.renderer.renderFrame()
if let finalTexture = self.renderer.finalTexture, var ciImage = CIImage(mtlTexture: finalTexture, options: [.colorSpace: self.colorSpace]) {
@ -126,7 +116,6 @@ final class MediaEditorComposer {
return
}
}
}
completion(nil)
}

View File

@ -50,7 +50,7 @@ public final class MediaEditorPreviewView: MTKView, MTKViewDelegate, RenderTarge
}
func scheduleFrame() {
Queue.mainQueue().async {
Queue.mainQueue().justDispatch {
self.draw()
}
}

View File

@ -248,9 +248,6 @@ public final class MediaEditorVideoExport {
private let configuration: Configuration
private let outputPath: String
private var previousSampleTime: CMTime = .zero
private var processedPixelBuffer: CVPixelBuffer?
private var reader: AVAssetReader?
private var videoOutput: AVAssetReaderOutput?
@ -260,6 +257,7 @@ public final class MediaEditorVideoExport {
private var writer: MediaEditorVideoExportWriter?
private var composer: MediaEditorComposer?
private var textureRotation: TextureRotation = .rotate0Degrees
private let duration = ValuePromise<CMTime>()
private let pauseDispatchGroup = DispatchGroup()
@ -320,16 +318,23 @@ public final class MediaEditorVideoExport {
return
}
self.textureRotation = textureRotatonForAVAsset(asset)
writer.setup(configuration: self.configuration, outputPath: self.outputPath)
let videoTracks = asset.tracks(withMediaType: .video)
if (videoTracks.count > 0) {
let outputSettings: [String : Any]
var sourceFrameRate: Float = 0.0
let outputSettings: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: [kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],
AVVideoColorPropertiesKey: [
AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2,
AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_709_2,
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_709_2
]
]
if let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing {
outputSettings = [kCVPixelBufferPixelFormatTypeKey as String: [kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]]
} else {
outputSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
self.setupComposer()
}
let videoOutput = AVAssetReaderTrackOutput(track: videoTracks.first!, outputSettings: outputSettings)
@ -516,7 +521,7 @@ public final class MediaEditorVideoExport {
if let buffer = output.copyNextSampleBuffer() {
if let composer = self.composer {
let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer)
composer.processSampleBuffer(buffer, pool: writer.pixelBufferPool, completion: { pixelBuffer in
composer.processSampleBuffer(buffer, pool: writer.pixelBufferPool, textureRotation: self.textureRotation, completion: { pixelBuffer in
if let pixelBuffer {
if !writer.appendPixelBuffer(pixelBuffer, at: timestamp) {
writer.markVideoAsFinished()

View File

@ -3,6 +3,28 @@ import AVFoundation
import Metal
import MetalKit
func textureRotatonForAVAsset(_ asset: AVAsset) -> TextureRotation {
for track in asset.tracks {
if track.mediaType == .video {
let t = track.preferredTransform
if t.a == -1.0 && t.d == -1.0 {
return .rotate180Degrees
} else if t.a == 1.0 && t.d == 1.0 {
return .rotate0Degrees
} else if t.b == -1.0 && t.c == 1.0 {
return .rotate270Degrees
} else if t.a == -1.0 && t.d == 1.0 {
return .rotate270Degrees
} else if t.a == 1.0 && t.d == -1.0 {
return .rotate180Degrees
} else {
return .rotate90Degrees
}
}
}
return .rotate0Degrees
}
final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullDelegate {
private let player: AVPlayer
private var playerItem: AVPlayerItem?
@ -80,23 +102,10 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD
for track in playerItem.asset.tracks {
if track.mediaType == .video {
hasVideoTrack = true
let t = track.preferredTransform
if t.a == -1.0 && t.d == -1.0 {
self.textureRotation = .rotate180Degrees
} else if t.a == 1.0 && t.d == 1.0 {
self.textureRotation = .rotate0Degrees
} else if t.b == -1.0 && t.c == 1.0 {
self.textureRotation = .rotate270Degrees
} else if t.a == -1.0 && t.d == 1.0 {
self.textureRotation = .rotate270Degrees
} else if t.a == 1.0 && t.d == -1.0 {
self.textureRotation = .rotate180Degrees
} else {
self.textureRotation = .rotate90Degrees
}
break
}
}
self.textureRotation = textureRotatonForAVAsset(playerItem.asset)
if !hasVideoTrack {
assertionFailure("No video track found.")
return

View File

@ -496,7 +496,7 @@ final class MediaEditorScreenComponent: Component {
containerSize: CGSize(width: 40.0, height: 40.0)
)
let drawButtonFrame = CGRect(
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 4.0 - 3.0 - drawButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset),
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 4.0 - 3.0 - drawButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset + 1.0),
size: drawButtonSize
)
if let drawButtonView = self.drawButton.view {
@ -521,7 +521,7 @@ final class MediaEditorScreenComponent: Component {
containerSize: CGSize(width: 40.0, height: 40.0)
)
let textButtonFrame = CGRect(
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 2.5 + 5.0 - textButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset),
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 2.5 + 5.0 - textButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset + 1.0),
size: textButtonSize
)
if let textButtonView = self.textButton.view {
@ -546,7 +546,7 @@ final class MediaEditorScreenComponent: Component {
containerSize: CGSize(width: 40.0, height: 40.0)
)
let stickerButtonFrame = CGRect(
origin: CGPoint(x: floorToScreenPixels(availableSize.width - availableSize.width / 2.5 - 5.0 - stickerButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset),
origin: CGPoint(x: floorToScreenPixels(availableSize.width - availableSize.width / 2.5 - 5.0 - stickerButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset + 1.0),
size: stickerButtonSize
)
if let stickerButtonView = self.stickerButton.view {
@ -571,7 +571,7 @@ final class MediaEditorScreenComponent: Component {
containerSize: CGSize(width: 40.0, height: 40.0)
)
let toolsButtonFrame = CGRect(
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 4.0 * 3.0 + 3.0 - toolsButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset),
origin: CGPoint(x: floorToScreenPixels(availableSize.width / 4.0 * 3.0 + 3.0 - toolsButtonSize.width / 2.0), y: availableSize.height - environment.safeInsets.bottom + buttonBottomInset + 1.0),
size: toolsButtonSize
)
if let toolsButtonView = self.toolsButton.view {
@ -592,24 +592,19 @@ final class MediaEditorScreenComponent: Component {
context: component.context,
duration: playerState.duration,
startPosition: playerState.timeRange?.lowerBound ?? 0.0,
endPosition: playerState.timeRange?.upperBound ?? playerState.duration,
endPosition: playerState.timeRange?.upperBound ?? min(playerState.duration, storyMaxVideoDuration),
position: playerState.position,
maxDuration: storyMaxVideoDuration,
frames: playerState.frames,
framesUpdateTimestamp: playerState.framesUpdateTimestamp,
startPositionUpdated: { [weak mediaEditor] position, done in
trimUpdated: { [weak mediaEditor] start, end, updatedEnd, done in
if let mediaEditor {
mediaEditor.setVideoTrimStart(position)
mediaEditor.seek(position, andPlay: done)
}
},
endPositionUpdated: { [weak mediaEditor] position, done in
if let mediaEditor {
mediaEditor.setVideoTrimEnd(position)
mediaEditor.setVideoTrimStart(start)
mediaEditor.setVideoTrimEnd(end)
if done {
let start = mediaEditor.values.videoTrimRange?.lowerBound ?? 0.0
mediaEditor.seek(start, andPlay: true)
} else {
mediaEditor.seek(position, andPlay: false)
mediaEditor.seek(updatedEnd ? end : start, andPlay: false)
}
}
},
@ -730,7 +725,7 @@ final class MediaEditorScreenComponent: Component {
}
case let .message(peerIds, _):
if peerIds.count == 1 {
privacyText = "User Test"
privacyText = "1 Recipient"
} else {
privacyText = "\(peerIds.count) Recipients"
}
@ -871,6 +866,7 @@ final class MediaEditorScreenComponent: Component {
}
private let storyDimensions = CGSize(width: 1080.0, height: 1920.0)
private let storyMaxVideoDuration: Double = 60.0
public enum MediaEditorResultPrivacy: Equatable {
case story(privacy: EngineStoryPrivacy, archive: Bool)
@ -928,6 +924,7 @@ public final class MediaEditorScreen: ViewController {
fileprivate final class Node: ViewControllerTracingNode, UIGestureRecognizerDelegate {
private weak var controller: MediaEditorScreen?
private let context: AccountContext
private var interaction: DrawingToolsInteraction?
private let initializationTimestamp = CACurrentMediaTime()
fileprivate var subject: MediaEditorScreen.Subject?
@ -1162,6 +1159,51 @@ public final class MediaEditorScreen: ViewController {
let rotateGestureRecognizer = UIRotationGestureRecognizer(target: self, action: #selector(self.handleRotate(_:)))
rotateGestureRecognizer.delegate = self
self.previewContainerView.addGestureRecognizer(rotateGestureRecognizer)
let tapGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
self.previewContainerView.addGestureRecognizer(tapGestureRecognizer)
self.interaction = DrawingToolsInteraction(
context: self.context,
drawingView: self.drawingView,
entitiesView: self.entitiesView,
selectionContainerView: self.selectionContainerView,
isVideo: false,
updateSelectedEntity: { _ in
},
updateVideoPlayback: { [weak self] isPlaying in
if let self, let mediaEditor = self.mediaEditor {
if isPlaying {
mediaEditor.play()
} else {
mediaEditor.stop()
}
}
},
updateColor: { [weak self] color in
if let self, let selectedEntityView = self.entitiesView.selectedEntityView {
selectedEntityView.entity.color = color
selectedEntityView.update(animated: false)
}
},
getCurrentImage: {
return nil
},
getControllerNode: { [weak self] in
return self
},
present: { [weak self] c, i, a in
if let self {
self.controller?.present(c, in: i, with: a)
}
},
addSubview: { [weak self] view in
if let self {
self.view.addSubview(view)
}
}
)
}
@objc func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
@ -1180,6 +1222,12 @@ public final class MediaEditorScreen: ViewController {
self.entitiesView.handleRotate(gestureRecognizer)
}
@objc func handleTap(_ gestureRecognizer: UITapGestureRecognizer) {
if self.entitiesView.hasSelection {
self.entitiesView.selectEntity(nil)
}
}
func animateIn() {
if let transitionIn = self.controller?.transitionIn {
switch transitionIn {
@ -1435,21 +1483,6 @@ public final class MediaEditorScreen: ViewController {
}
}
private func insertDrawingEntity(_ entity: DrawingEntity) {
self.entitiesView.prepareNewEntity(entity)
self.entitiesView.add(entity)
self.entitiesView.selectEntity(entity)
if let entityView = entitiesView.getView(for: entity.uuid) {
entityView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
entityView.layer.animateScale(from: 0.1, to: entity.scale, duration: 0.2)
if let selectionView = entityView.selectionView {
selectionView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: 0.2)
}
}
}
private var drawingScreen: DrawingScreen?
func containerLayoutUpdated(layout: ContainerViewLayout, forceUpdate: Bool = false, animateOut: Bool = false, transition: Transition) {
guard let controller = self.controller else {
@ -1492,23 +1525,25 @@ public final class MediaEditorScreen: ViewController {
privacy: controller.state.privacy,
openDrawing: { [weak self] mode in
if let self {
if self.entitiesView.hasSelection {
self.entitiesView.selectEntity(nil)
}
switch mode {
case .sticker:
let controller = StickerPickerScreen(context: self.context, inputData: self.stickerPickerInputData.get())
controller.completion = { [weak self] file in
if let self, let file {
let stickerEntity = DrawingStickerEntity(content: .file(file))
self.insertDrawingEntity(stickerEntity)
self.interaction?.insertEntity(stickerEntity)
}
}
self.controller?.present(controller, in: .current)
return
case .text:
break
default:
break
}
let textEntity = DrawingTextEntity(text: NSAttributedString(), style: .regular, animation: .none, font: .sanFrancisco, alignment: .center, fontSize: 1.0, color: DrawingColor(color: .white))
self.interaction?.insertEntity(textEntity)
return
case .drawing:
let controller = DrawingScreen(context: self.context, sourceHint: .storyEditor, size: self.previewContainerView.frame.size, originalSize: storyDimensions, isVideo: false, isAvatar: false, drawingView: self.drawingView, entitiesView: self.entitiesView, selectionContainerView: self.selectionContainerView, existingStickerPickerInputData: self.stickerPickerInputData)
self.drawingScreen = controller
self.drawingView.isUserInteractionEnabled = true
@ -1540,23 +1575,15 @@ public final class MediaEditorScreen: ViewController {
self?.entitiesView.selectEntity(nil)
}
self.controller?.present(controller, in: .current)
switch mode {
case .sticker:
controller.presentStickerSelection()
case .text:
Queue.mainQueue().after(0.05, {
controller.addTextEntity()
})
default:
break
}
self.animateOutToTool()
}
}
},
openTools: { [weak self] in
if let self, let mediaEditor = self.mediaEditor {
if self.entitiesView.hasSelection {
self.entitiesView.selectEntity(nil)
}
let controller = MediaToolsScreen(context: self.context, mediaEditor: mediaEditor)
controller.dismissed = { [weak self] in
if let self {
@ -1605,6 +1632,8 @@ public final class MediaEditorScreen: ViewController {
transition.setFrame(view: self.selectionContainerView, frame: CGRect(origin: .zero, size: previewFrame.size))
self.interaction?.containerLayoutUpdated(layout: layout, transition: transition)
if isFirstTime {
self.animateIn()
}
@ -1969,6 +1998,9 @@ public final class MediaEditorScreen: ViewController {
mediaEditor.stop()
let codableEntities = self.node.entitiesView.entities.filter { !($0 is DrawingMediaEntity) }.compactMap({ CodableDrawingEntity(entity: $0) })
mediaEditor.setDrawingAndEntities(data: nil, image: mediaEditor.values.drawing, entities: codableEntities)
if mediaEditor.resultIsVideo {
let videoResult: Result.VideoResult
let duration: Double

View File

@ -23,10 +23,10 @@ final class VideoScrubberComponent: Component {
let startPosition: Double
let endPosition: Double
let position: Double
let maxDuration: Double
let frames: [UIImage]
let framesUpdateTimestamp: Double
let startPositionUpdated: (Double, Bool) -> Void
let endPositionUpdated: (Double, Bool) -> Void
let trimUpdated: (Double, Double, Bool, Bool) -> Void
let positionUpdated: (Double, Bool) -> Void
init(
@ -35,10 +35,10 @@ final class VideoScrubberComponent: Component {
startPosition: Double,
endPosition: Double,
position: Double,
maxDuration: Double,
frames: [UIImage],
framesUpdateTimestamp: Double,
startPositionUpdated: @escaping (Double, Bool) -> Void,
endPositionUpdated: @escaping (Double, Bool) -> Void,
trimUpdated: @escaping (Double, Double, Bool, Bool) -> Void,
positionUpdated: @escaping (Double, Bool) -> Void
) {
self.context = context
@ -46,10 +46,10 @@ final class VideoScrubberComponent: Component {
self.startPosition = startPosition
self.endPosition = endPosition
self.position = position
self.maxDuration = maxDuration
self.frames = frames
self.framesUpdateTimestamp = framesUpdateTimestamp
self.startPositionUpdated = startPositionUpdated
self.endPositionUpdated = endPositionUpdated
self.trimUpdated = trimUpdated
self.positionUpdated = positionUpdated
}
@ -69,6 +69,9 @@ final class VideoScrubberComponent: Component {
if lhs.position != rhs.position {
return false
}
if lhs.maxDuration != rhs.maxDuration {
return false
}
if lhs.framesUpdateTimestamp != rhs.framesUpdateTimestamp {
return false
}
@ -165,22 +168,28 @@ final class VideoScrubberComponent: Component {
let end = self.frame.width - handleWidth
let length = end - start
let fraction = (location.x - start) / length
var value = max(0.0, component.duration * fraction)
if value > component.endPosition - minumumDuration {
value = max(0.0, component.endPosition - minumumDuration)
var startValue = max(0.0, component.duration * fraction)
if startValue > component.endPosition - minumumDuration {
startValue = max(0.0, component.endPosition - minumumDuration)
}
var endValue = component.endPosition
if endValue - startValue > component.maxDuration {
let delta = (endValue - startValue) - component.maxDuration
endValue -= delta
}
var transition: Transition = .immediate
switch gestureRecognizer.state {
case .began, .changed:
self.isPanningHandle = true
component.startPositionUpdated(value, false)
component.trimUpdated(startValue, endValue, false, false)
if case .began = gestureRecognizer.state {
transition = .easeInOut(duration: 0.25)
}
case .ended, .cancelled:
self.isPanningHandle = false
component.startPositionUpdated(value, true)
component.trimUpdated(startValue, endValue, false, true)
transition = .easeInOut(duration: 0.25)
default:
break
@ -197,22 +206,28 @@ final class VideoScrubberComponent: Component {
let end = self.frame.width - handleWidth
let length = end - start
let fraction = (location.x - start) / length
var value = min(component.duration, component.duration * fraction)
if value < component.startPosition + minumumDuration {
value = min(component.duration, component.startPosition + minumumDuration)
var endValue = min(component.duration, component.duration * fraction)
if endValue < component.startPosition + minumumDuration {
endValue = min(component.duration, component.startPosition + minumumDuration)
}
var startValue = component.startPosition
if endValue - startValue > component.maxDuration {
let delta = (endValue - startValue) - component.maxDuration
startValue += delta
}
var transition: Transition = .immediate
switch gestureRecognizer.state {
case .began, .changed:
self.isPanningHandle = true
component.endPositionUpdated(value, false)
component.trimUpdated(startValue, endValue, true, false)
if case .began = gestureRecognizer.state {
transition = .easeInOut(duration: 0.25)
}
case .ended, .cancelled:
self.isPanningHandle = false
component.endPositionUpdated(value, true)
component.trimUpdated(startValue, endValue, true, true)
transition = .easeInOut(duration: 0.25)
default:
break

View File

@ -376,7 +376,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
switch privacy {
case let .story(storyPrivacy, _):
let _ = self.context.engine.messages.uploadStory(media: .image(dimensions: dimensions, data: imageData), text: caption?.string ?? "", entities: [], privacy: storyPrivacy).start()
Queue.mainQueue().after(0.2, { [weak chatListController] in
Queue.mainQueue().after(0.3, { [weak chatListController] in
chatListController?.animateStoryUploadRipple()
})
case let .message(peerIds, timeout):
@ -457,7 +457,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
}
if case let .story(storyPrivacy, _) = privacy {
let _ = self.context.engine.messages.uploadStory(media: .video(dimensions: dimensions, duration: Int(duration), resource: resource), text: caption?.string ?? "", entities: [], privacy: storyPrivacy).start()
Queue.mainQueue().after(0.2, { [weak chatListController] in
Queue.mainQueue().after(0.3, { [weak chatListController] in
chatListController?.animateStoryUploadRipple()
})
} else {
@ -468,8 +468,10 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
}
dismissCameraImpl?()
Queue.mainQueue().after(0.1) {
commit()
}
}
)
controller.cancelled = { showDraftTooltip in
if showDraftTooltip {