diff --git a/submodules/AccountContext/Sources/AccountContext.swift b/submodules/AccountContext/Sources/AccountContext.swift index 4d3c0a6bad..901370438a 100644 --- a/submodules/AccountContext/Sources/AccountContext.swift +++ b/submodules/AccountContext/Sources/AccountContext.swift @@ -742,8 +742,40 @@ public protocol RecentSessionsController: AnyObject { public protocol AttachmentFileController: AnyObject { } +public struct StoryCameraTransitionIn { + public weak var sourceView: UIView? + public let sourceRect: CGRect + public let sourceCornerRadius: CGFloat + + public init( + sourceView: UIView, + sourceRect: CGRect, + sourceCornerRadius: CGFloat + ) { + self.sourceView = sourceView + self.sourceRect = sourceRect + self.sourceCornerRadius = sourceCornerRadius + } +} + +public struct StoryCameraTransitionOut { + public weak var destinationView: UIView? + public let destinationRect: CGRect + public let destinationCornerRadius: CGFloat + + public init( + destinationView: UIView, + destinationRect: CGRect, + destinationCornerRadius: CGFloat + ) { + self.destinationView = destinationView + self.destinationRect = destinationRect + self.destinationCornerRadius = destinationCornerRadius + } +} + public protocol TelegramRootControllerInterface: NavigationController { - func openStoryCamera() + func openStoryCamera(transitionIn: StoryCameraTransitionIn?, transitionOut: @escaping (Bool) -> StoryCameraTransitionOut?) } public protocol SharedAccountContext: AnyObject { diff --git a/submodules/ChatListUI/Sources/ChatListController.swift b/submodules/ChatListUI/Sources/ChatListController.swift index 09fc926612..7c51ad45e9 100644 --- a/submodules/ChatListUI/Sources/ChatListController.swift +++ b/submodules/ChatListUI/Sources/ChatListController.swift @@ -2433,6 +2433,17 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController } } + var cameraTransitionIn: StoryCameraTransitionIn? + if let componentView = self.headerContentView.view as? ChatListHeaderComponent.View { + if let transitionView = componentView.storyPeerListView()?.transitionViewForItem(peerId: self.context.account.peerId) { + cameraTransitionIn = StoryCameraTransitionIn( + sourceView: transitionView, + sourceRect: transitionView.bounds, + sourceCornerRadius: transitionView.bounds.height * 0.5 + ) + } + } + var initialFocusedId: AnyHashable? if let peer { initialFocusedId = AnyHashable(peer.id) @@ -2442,7 +2453,21 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController return !slice.items.isEmpty }) { if let rootController = self.context.sharedContext.mainWindow?.viewController as? TelegramRootControllerInterface { - rootController.openStoryCamera() + rootController.openStoryCamera(transitionIn: cameraTransitionIn, transitionOut: { [weak self] _ in + guard let self else { + return nil + } + if let componentView = self.headerContentView.view as? ChatListHeaderComponent.View { + if let transitionView = componentView.storyPeerListView()?.transitionViewForItem(peerId: self.context.account.peerId) { + return StoryCameraTransitionOut( + destinationView: transitionView, + destinationRect: transitionView.bounds, + destinationCornerRadius: transitionView.bounds.height * 0.5 + ) + } + } + return nil + }) } return diff --git a/submodules/Display/Source/TabBarController.swift b/submodules/Display/Source/TabBarController.swift index ca3ca23938..2c09401ee1 100644 --- a/submodules/Display/Source/TabBarController.swift +++ b/submodules/Display/Source/TabBarController.swift @@ -19,6 +19,7 @@ public protocol TabBarController: ViewController { func updateBackgroundAlpha(_ alpha: CGFloat, transition: ContainedViewLayoutTransition) + func viewForCameraItem() -> UIView? func frameForControllerTab(controller: ViewController) -> CGRect? func isPointInsideContentArea(point: CGPoint) -> Bool func sourceNodesForController(at index: Int) -> [ASDisplayNode]? diff --git a/submodules/DrawingUI/Sources/DrawingMediaEntity.swift b/submodules/DrawingUI/Sources/DrawingMediaEntity.swift index 209c4997a9..15384d14c5 100644 --- a/submodules/DrawingUI/Sources/DrawingMediaEntity.swift +++ b/submodules/DrawingUI/Sources/DrawingMediaEntity.swift @@ -22,7 +22,6 @@ public final class DrawingMediaEntityView: DrawingEntityView, DrawingEntityMedia didSet { if let previewView = self.previewView { previewView.isUserInteractionEnabled = false - previewView.layer.allowsEdgeAntialiasing = true self.addSubview(previewView) } } @@ -30,8 +29,6 @@ public final class DrawingMediaEntityView: DrawingEntityView, DrawingEntityMedia init(context: AccountContext, entity: DrawingMediaEntity) { super.init(context: context, entity: entity) - - self.layer.allowsEdgeAntialiasing = true } required init?(coder: NSCoder) { diff --git a/submodules/LegacyMediaPickerUI/Sources/LegacyMediaPickers.swift b/submodules/LegacyMediaPickerUI/Sources/LegacyMediaPickers.swift index 373f976b4b..ebb832dd1f 100644 --- a/submodules/LegacyMediaPickerUI/Sources/LegacyMediaPickers.swift +++ b/submodules/LegacyMediaPickerUI/Sources/LegacyMediaPickers.swift @@ -691,7 +691,7 @@ public func legacyAssetPickerEnqueueMessages(context: AccountContext, account: A let adjustmentsData = MemoryBuffer(data: NSKeyedArchiver.archivedData(withRootObject: adjustments.dictionary()!)) let digest = MemoryBuffer(data: adjustmentsData.md5Digest()) - resourceAdjustments = VideoMediaResourceAdjustments(data: adjustmentsData, digest: digest) + resourceAdjustments = VideoMediaResourceAdjustments(data: adjustmentsData, digest: digest, isStory: false) } let resource: TelegramMediaResource diff --git a/submodules/LocalMediaResources/Sources/MediaResources.swift b/submodules/LocalMediaResources/Sources/MediaResources.swift index b8fd6473fd..0306be276e 100644 --- a/submodules/LocalMediaResources/Sources/MediaResources.swift +++ b/submodules/LocalMediaResources/Sources/MediaResources.swift @@ -7,24 +7,28 @@ import PersistentStringHash public final class VideoMediaResourceAdjustments: PostboxCoding, Equatable { public let data: MemoryBuffer public let digest: MemoryBuffer + public let isStory: Bool - public init(data: MemoryBuffer, digest: MemoryBuffer) { + public init(data: MemoryBuffer, digest: MemoryBuffer, isStory: Bool) { self.data = data self.digest = digest + self.isStory = isStory } public init(decoder: PostboxDecoder) { self.data = decoder.decodeBytesForKey("d")! self.digest = decoder.decodeBytesForKey("h")! + self.isStory = decoder.decodeBoolForKey("s", orElse: false) } public func encode(_ encoder: PostboxEncoder) { encoder.encodeBytes(self.data, forKey: "d") encoder.encodeBytes(self.digest, forKey: "h") + encoder.encodeBool(self.isStory, forKey: "s") } public static func ==(lhs: VideoMediaResourceAdjustments, rhs: VideoMediaResourceAdjustments) -> Bool { - return lhs.data == rhs.data && lhs.digest == rhs.digest + return lhs.data == rhs.data && lhs.digest == rhs.digest && lhs.isStory == rhs.isStory } } diff --git a/submodules/ShareItems/Sources/ShareItems.swift b/submodules/ShareItems/Sources/ShareItems.swift index 6d66c748a8..4f36bdb7ed 100644 --- a/submodules/ShareItems/Sources/ShareItems.swift +++ b/submodules/ShareItems/Sources/ShareItems.swift @@ -140,7 +140,7 @@ private func preparedShareItem(account: Account, to peerId: PeerId, value: [Stri let adjustmentsData = MemoryBuffer(data: NSKeyedArchiver.archivedData(withRootObject: adjustments.dictionary()!)) let digest = MemoryBuffer(data: adjustmentsData.md5Digest()) - resourceAdjustments = VideoMediaResourceAdjustments(data: adjustmentsData, digest: digest) + resourceAdjustments = VideoMediaResourceAdjustments(data: adjustmentsData, digest: digest, isStory: false) } let estimatedSize = TGMediaVideoConverter.estimatedSize(for: preset, duration: finalDuration, hasAudio: true) diff --git a/submodules/TabBarUI/Sources/TabBarController.swift b/submodules/TabBarUI/Sources/TabBarController.swift index 4fce734805..f834824761 100644 --- a/submodules/TabBarUI/Sources/TabBarController.swift +++ b/submodules/TabBarUI/Sources/TabBarController.swift @@ -172,8 +172,27 @@ open class TabBarControllerImpl: ViewController, TabBarController { return self.tabBarControllerNode.tabBarNode.sourceNodesForController(at: index) } + public func viewForCameraItem() -> UIView? { + if let (cameraItem, _) = self.cameraItemAndAction { + if let cameraItemIndex = self.tabBarControllerNode.tabBarNode.tabBarItems.firstIndex(where: { $0.item === cameraItem }) { + return self.tabBarControllerNode.tabBarNode.viewForControllerTab(at: cameraItemIndex) + } + } + return nil + } + public func frameForControllerTab(controller: ViewController) -> CGRect? { if let index = self.controllers.firstIndex(of: controller) { + var index = index + if let (cameraItem, _) = self.cameraItemAndAction { + if let cameraItemIndex = self.tabBarControllerNode.tabBarNode.tabBarItems.firstIndex(where: { $0.item === cameraItem }) { + if index == cameraItemIndex { + + } else if index > cameraItemIndex { + index -= 1 + } + } + } return self.tabBarControllerNode.tabBarNode.frameForControllerTab(at: index).flatMap { self.tabBarControllerNode.tabBarNode.view.convert($0, to: self.view) } } else { return nil diff --git a/submodules/TabBarUI/Sources/TabBarNode.swift b/submodules/TabBarUI/Sources/TabBarNode.swift index 901f62c190..473a7f42aa 100644 --- a/submodules/TabBarUI/Sources/TabBarNode.swift +++ b/submodules/TabBarUI/Sources/TabBarNode.swift @@ -453,6 +453,11 @@ class TabBarNode: ASDisplayNode, UIGestureRecognizerDelegate { return container.imageNode.frame } + func viewForControllerTab(at index: Int) -> UIView? { + let container = self.tabBarNodeContainers[index] + return container.imageNode.view + } + private func reloadTabBarItems() { for node in self.tabBarNodeContainers { node.imageNode.removeFromSupernode() diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift index fe369f06b7..73f927be70 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift @@ -67,6 +67,7 @@ private final class CameraScreenComponent: CombinedComponent { let context: AccountContext let camera: Camera let changeMode: ActionSlot + let isDismissing: Bool let present: (ViewController) -> Void let push: (ViewController) -> Void let completion: ActionSlot> @@ -75,6 +76,7 @@ private final class CameraScreenComponent: CombinedComponent { context: AccountContext, camera: Camera, changeMode: ActionSlot, + isDismissing: Bool, present: @escaping (ViewController) -> Void, push: @escaping (ViewController) -> Void, completion: ActionSlot> @@ -82,6 +84,7 @@ private final class CameraScreenComponent: CombinedComponent { self.context = context self.camera = camera self.changeMode = changeMode + self.isDismissing = isDismissing self.present = present self.push = push self.completion = completion @@ -91,6 +94,9 @@ private final class CameraScreenComponent: CombinedComponent { if lhs.context !== rhs.context { return false } + if lhs.isDismissing != rhs.isDismissing { + return false + } return true } @@ -480,7 +486,7 @@ private final class CameraScreenComponent: CombinedComponent { } } - if case .none = state.cameraState.recording { + if case .none = state.cameraState.recording, !component.isDismissing { let modeControl = modeControl.update( component: ModeComponent( availableModes: [.photo, .video], @@ -565,6 +571,38 @@ public class CameraScreen: ViewController { case video(String, PixelDimensions) case asset(PHAsset) } + + public final class TransitionIn { + public weak var sourceView: UIView? + public let sourceRect: CGRect + public let sourceCornerRadius: CGFloat + + public init( + sourceView: UIView, + sourceRect: CGRect, + sourceCornerRadius: CGFloat + ) { + self.sourceView = sourceView + self.sourceRect = sourceRect + self.sourceCornerRadius = sourceCornerRadius + } + } + + public final class TransitionOut { + public weak var destinationView: UIView? + public let destinationRect: CGRect + public let destinationCornerRadius: CGFloat + + public init( + destinationView: UIView, + destinationRect: CGRect, + destinationCornerRadius: CGFloat + ) { + self.destinationView = destinationView + self.destinationRect = destinationRect + self.destinationCornerRadius = destinationCornerRadius + } + } fileprivate final class Node: ViewControllerTracingNode { private weak var controller: CameraScreen? @@ -668,6 +706,15 @@ public class CameraScreen: ViewController { UIView.animate(withDuration: 0.4) { self.previewBlurView.effect = nil } + + if let previewSnapshotView = self.previewSnapshotView { + self.previewSnapshotView = nil + UIView.animate(withDuration: 0.25, animations: { + previewSnapshotView.alpha = 0.0 + }, completion: { _ in + previewSnapshotView.removeFromSuperview() + }) + } } } }) @@ -754,6 +801,7 @@ public class CameraScreen: ViewController { gestureRecognizer.isEnabled = false gestureRecognizer.isEnabled = true } else if translation.y > 10.0 { + let isFirstPanChange = self.panTranslation == nil self.panTranslation = translation.y if let previewInitialPosition = self.previewInitialPosition { self.previewContainerView.center = CGPoint(x: previewInitialPosition.x, y: previewInitialPosition.y + translation.y) @@ -770,6 +818,12 @@ public class CameraScreen: ViewController { self.backgroundDimView.alpha = 0.0 }) } + + if isFirstPanChange { + if let layout = self.validLayout { + self.containerLayoutUpdated(layout: layout, transition: .easeInOut(duration: 0.2)) + } + } } } case .ended: @@ -795,15 +849,18 @@ public class CameraScreen: ViewController { }) } } + if let _ = self.panTranslation { + self.panTranslation = nil + if let layout = self.validLayout { + self.containerLayoutUpdated(layout: layout, transition: .easeInOut(duration: 0.2)) + } + } default: break } } func animateIn() { - guard let layout = self.validLayout else { - return - } self.backgroundDimView.alpha = 0.0 UIView.animate(withDuration: 0.4, animations: { self.backgroundEffectView.effect = UIBlurEffect(style: .dark) @@ -813,16 +870,30 @@ public class CameraScreen: ViewController { self.backgroundEffectView.isHidden = true }) - self.previewContainerView.layer.animatePosition(from: CGPoint(x: 0.0, y: layout.size.height / 2.0 - layout.intrinsicInsets.bottom - 22.0), to: .zero, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true) - self.componentHost.view?.layer.animatePosition(from: CGPoint(x: 0.0, y: layout.size.height / 2.0 - layout.intrinsicInsets.bottom - 22.0), to: .zero, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true) - self.previewContainerView.layer.animateScale(from: 0.2, to: 1.0, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring) - self.componentHost.view?.layer.animateScale(from: 0.2, to: 1.0, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring) + if let transitionIn = self.controller?.transitionIn, let sourceView = transitionIn.sourceView { + let sourceLocalFrame = sourceView.convert(transitionIn.sourceRect, to: self.view) + let innerSourceLocalFrame = CGRect(origin: CGPoint(x: sourceLocalFrame.minX - self.previewContainerView.frame.minX, y: sourceLocalFrame.minY - self.previewContainerView.frame.minY), size: sourceLocalFrame.size) + + self.previewContainerView.layer.animatePosition(from: sourceLocalFrame.center, to: self.previewContainerView.center, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + self.previewContainerView.layer.animateBounds(from: CGRect(origin: CGPoint(x: innerSourceLocalFrame.minX, y: innerSourceLocalFrame.minY), size: sourceLocalFrame.size), to: self.previewContainerView.bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + self.previewContainerView.layer.animate( + from: transitionIn.sourceCornerRadius as NSNumber, + to: self.previewContainerView.layer.cornerRadius as NSNumber, + keyPath: "cornerRadius", + timingFunction: kCAMediaTimingFunctionSpring, + duration: 0.3 + ) + + if let view = self.componentHost.view { + view.layer.animatePosition(from: sourceLocalFrame.center, to: view.center, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + view.layer.animateBounds(from: CGRect(origin: CGPoint(x: innerSourceLocalFrame.minX, y: innerSourceLocalFrame.minY), size: sourceLocalFrame.size), to: view.bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + } + } } func animateOut(completion: @escaping () -> Void) { - guard let layout = self.validLayout else { - return - } + self.camera.stopCapture(invalidate: true) + self.backgroundEffectView.isHidden = false UIView.animate(withDuration: 0.25, animations: { @@ -830,15 +901,29 @@ public class CameraScreen: ViewController { self.backgroundDimView.alpha = 0.0 }) - self.previewContainerView.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: layout.size.height / 2.0 - layout.intrinsicInsets.bottom - 8.0), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true) - self.componentHost.view?.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: layout.size.height / 2.0 - layout.intrinsicInsets.bottom - 8.0), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true, completion: { _ in - completion() - }) - self.previewContainerView.layer.animateScale(from: 1.0, to: 0.01, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false) - self.previewContainerView.layer.animateBounds(from: self.previewContainerView.bounds, to: CGRect(origin: .zero, size: CGSize(width: self.previewContainerView.bounds.width, height: self.previewContainerView.bounds.width)), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false) - let transition = ContainedViewLayoutTransition.animated(duration: 0.4, curve: .spring) - transition.updateCornerRadius(layer: self.previewContainerView.layer, cornerRadius: self.previewContainerView.bounds.width / 2.0) - self.componentHost.view?.layer.animateScale(from: 1.0, to: 0.2, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false) + if let transitionOut = self.controller?.transitionOut(false), let destinationView = transitionOut.destinationView { + let sourceLocalFrame = destinationView.convert(transitionOut.destinationRect, to: self.view) + let innerSourceLocalFrame = CGRect(origin: CGPoint(x: sourceLocalFrame.minX - self.previewContainerView.frame.minX, y: sourceLocalFrame.minY - self.previewContainerView.frame.minY), size: sourceLocalFrame.size) + + self.previewContainerView.layer.animatePosition(from: self.previewContainerView.center, to: sourceLocalFrame.center, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { _ in + completion() + }) + self.previewContainerView.layer.animateBounds(from: self.previewContainerView.bounds, to: CGRect(origin: CGPoint(x: innerSourceLocalFrame.minX, y: innerSourceLocalFrame.minY), size: sourceLocalFrame.size), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false) + self.previewContainerView.layer.animate( + from: self.previewContainerView.layer.cornerRadius as NSNumber, + to: transitionOut.destinationCornerRadius as NSNumber, + keyPath: "cornerRadius", + timingFunction: kCAMediaTimingFunctionSpring, + duration: 0.3, + removeOnCompletion: false + ) + + if let view = self.componentHost.view { + view.layer.animatePosition(from: view.center, to: sourceLocalFrame.center, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + view.layer.animateBounds(from: view.bounds, to: CGRect(origin: CGPoint(x: innerSourceLocalFrame.minX, y: innerSourceLocalFrame.minY), size: sourceLocalFrame.size), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + } + } + self.componentHost.view?.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false) self.previewContainerView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.35, removeOnCompletion: false) } @@ -864,7 +949,12 @@ public class CameraScreen: ViewController { } } + private var previewSnapshotView: UIView? func animateInFromEditor() { + if let snapshot = self.simplePreviewView?.snapshotView(afterScreenUpdates: false) { + self.simplePreviewView?.addSubview(snapshot) + self.previewSnapshotView = snapshot + } self.simplePreviewView?.isEnabled = true self.camera.startCapture() @@ -959,6 +1049,7 @@ public class CameraScreen: ViewController { context: self.context, camera: self.camera, changeMode: self.changeMode, + isDismissing: self.panTranslation != nil, present: { [weak self] c in self?.controller?.present(c, in: .window(.root)) }, @@ -980,9 +1071,11 @@ public class CameraScreen: ViewController { componentView.clipsToBounds = true } - let componentFrame = CGRect(origin: .zero, size: componentSize) - transition.setFrame(view: componentView, frame: CGRect(origin: componentFrame.origin, size: CGSize(width: componentFrame.width, height: componentFrame.height))) - + if self.panTranslation == nil { + let componentFrame = CGRect(origin: .zero, size: componentSize) + transition.setFrame(view: componentView, frame: componentFrame) + } + if isFirstTime { self.animateIn() } @@ -991,10 +1084,12 @@ public class CameraScreen: ViewController { transition.setFrame(view: self.backgroundDimView, frame: CGRect(origin: .zero, size: layout.size)) transition.setFrame(view: self.backgroundEffectView, frame: CGRect(origin: .zero, size: layout.size)) - let previewFrame = CGRect(origin: CGPoint(x: 0.0, y: topInset), size: previewSize) - transition.setFrame(view: self.previewContainerView, frame: previewFrame) - transition.setFrame(view: self.effectivePreviewView, frame: CGRect(origin: .zero, size: previewFrame.size)) - transition.setFrame(view: self.previewBlurView, frame: CGRect(origin: .zero, size: previewFrame.size)) + if self.panTranslation == nil { + let previewFrame = CGRect(origin: CGPoint(x: 0.0, y: topInset), size: previewSize) + transition.setFrame(view: self.previewContainerView, frame: previewFrame) + transition.setFrame(view: self.effectivePreviewView, frame: CGRect(origin: .zero, size: previewFrame.size)) + transition.setFrame(view: self.previewBlurView, frame: CGRect(origin: .zero, size: previewFrame.size)) + } } } @@ -1005,12 +1100,23 @@ public class CameraScreen: ViewController { private let context: AccountContext fileprivate let mode: Mode fileprivate let holder: CameraHolder? + fileprivate let transitionIn: TransitionIn? + fileprivate let transitionOut: (Bool) -> TransitionOut? fileprivate let completion: (Signal) -> Void - public init(context: AccountContext, mode: Mode, holder: CameraHolder? = nil, completion: @escaping (Signal) -> Void) { + public init( + context: AccountContext, + mode: Mode, + holder: CameraHolder? = nil, + transitionIn: TransitionIn?, + transitionOut: @escaping (Bool) -> TransitionOut?, + completion: @escaping (Signal) -> Void + ) { self.context = context self.mode = mode self.holder = holder + self.transitionIn = transitionIn + self.transitionOut = transitionOut self.completion = completion super.init(navigationBarPresentationData: nil) diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift index 34fc1b3b31..7269c60910 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift @@ -637,7 +637,8 @@ final class CaptureControlsComponent: Component { if flipButtonView.superview == nil { self.addSubview(flipButtonView) } - transition.setFrame(view: flipButtonView, frame: flipButtonFrame) + transition.setBounds(view: flipButtonView, bounds: CGRect(origin: .zero, size: flipButtonFrame.size)) + transition.setPosition(view: flipButtonView, position: flipButtonFrame.center) } var blobState: ShutterBlobView.BlobState @@ -689,7 +690,8 @@ final class CaptureControlsComponent: Component { self.addSubview(shutterButtonView) } - transition.setFrame(view: shutterButtonView, frame: shutterButtonFrame) + transition.setBounds(view: shutterButtonView, bounds: CGRect(origin: .zero, size: shutterButtonFrame.size)) + transition.setPosition(view: shutterButtonView, position: shutterButtonFrame.center) } let guideSpacing: CGFloat = 9.0 diff --git a/submodules/TelegramUI/Components/LegacyInstantVideoController/Sources/LegacyInstantVideoController.swift b/submodules/TelegramUI/Components/LegacyInstantVideoController/Sources/LegacyInstantVideoController.swift index 8c46397fcd..5b33cf7cbf 100644 --- a/submodules/TelegramUI/Components/LegacyInstantVideoController/Sources/LegacyInstantVideoController.swift +++ b/submodules/TelegramUI/Components/LegacyInstantVideoController/Sources/LegacyInstantVideoController.swift @@ -191,7 +191,7 @@ public func legacyInstantVideoController(theme: PresentationTheme, panelFrame: C let adjustmentsData = MemoryBuffer(data: NSKeyedArchiver.archivedData(withRootObject: adjustments.dictionary()!)) let digest = MemoryBuffer(data: adjustmentsData.md5Digest()) - resourceAdjustments = VideoMediaResourceAdjustments(data: adjustmentsData, digest: digest) + resourceAdjustments = VideoMediaResourceAdjustments(data: adjustmentsData, digest: digest, isStory: false) } if finalDuration.isZero || finalDuration.isNaN { diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/BlurRenderPass.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/BlurRenderPass.swift index 2682b98b94..030fff5640 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/BlurRenderPass.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/BlurRenderPass.swift @@ -37,6 +37,7 @@ private final class BlurGaussianPass: RenderPass { let radius = round(4.0 + intensity * 26.0) if self.blur?.sigma != radius { self.blur = MPSImageGaussianBlur(device: device, sigma: radius) + self.blur?.edgeMode = .clamp } if self.cachedTexture == nil { diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift index 7b4392a677..3185991061 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift @@ -33,7 +33,9 @@ public final class MediaEditor { public var values: MediaEditorValues { didSet { - self.updateRenderChain() + if !self.skipRendering { + self.updateRenderChain() + } } } @@ -65,8 +67,7 @@ public final class MediaEditor { } public var resultIsVideo: Bool { - let hasAnimatedEntities = false - return self.player != nil || hasAnimatedEntities + return self.player != nil || self.values.entities.contains(where: { $0.entity.isAnimated }) } public var resultImage: UIImage? { @@ -248,8 +249,11 @@ public final class MediaEditor { self.setupSource() } + private var skipRendering = false public func setCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) { + self.skipRendering = true self.values = self.values.withUpdatedCrop(offset: offset, scale: scale, rotation: rotation, mirroring: mirroring) + self.skipRendering = false } public func getToolValue(_ key: EditorToolKey) -> Any? { diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift index d17bed5105..d570a21434 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift @@ -3,6 +3,7 @@ import AVFoundation import UIKit import CoreImage import Metal +import MetalKit import Display import SwiftSignalKit import TelegramCore @@ -10,7 +11,6 @@ import AnimatedStickerNode import TelegramAnimatedStickerNode import YuvConversion import StickerResources -import AccountContext final class MediaEditorComposer { let device: MTLDevice? @@ -28,7 +28,7 @@ final class MediaEditorComposer { private let drawingImage: CIImage? private var entities: [MediaEditorComposerEntity] - init(context: AccountContext, values: MediaEditorValues, dimensions: CGSize) { + init(account: Account, values: MediaEditorValues, dimensions: CGSize) { self.values = values self.dimensions = dimensions @@ -48,7 +48,7 @@ final class MediaEditorComposer { self.drawingImage = nil } - self.entities = values.entities.map { $0.entity } .compactMap { composerEntityForDrawingEntity(context: context, entity: $0) } + self.entities = values.entities.map { $0.entity } .compactMap { composerEntityForDrawingEntity(account: account, entity: $0) } self.device = MTLCreateSystemDefaultDevice() if let device = self.device { @@ -103,15 +103,52 @@ final class MediaEditorComposer { } } completion(nil) - return + } + + private var filteredImage: CIImage? + func processImage(inputImage: UIImage, pool: CVPixelBufferPool?, time: CMTime, completion: @escaping (CVPixelBuffer?, CMTime) -> Void) { + guard let pool else { + completion(nil, time) + return + } + if self.filteredImage == nil, let device = self.device, let cgImage = inputImage.cgImage { + let textureLoader = MTKTextureLoader(device: device) + if let texture = try? textureLoader.newTexture(cgImage: cgImage) { + self.renderer.consumeTexture(texture, rotation: .rotate0Degrees) + self.renderer.renderFrame() + + if let finalTexture = self.renderer.finalTexture, var ciImage = CIImage(mtlTexture: finalTexture) { + ciImage = ciImage.transformed(by: CGAffineTransformMakeScale(1.0, -1.0).translatedBy(x: 0.0, y: -ciImage.extent.height)) + self.filteredImage = ciImage + } + } + } + + if let image = self.filteredImage { + var pixelBuffer: CVPixelBuffer? + CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pool, &pixelBuffer) + + if let pixelBuffer { + makeEditorImageFrameComposition(inputImage: image, gradientImage: self.gradientImage, drawingImage: self.drawingImage, dimensions: self.dimensions, values: self.values, entities: self.entities, time: time, completion: { compositedImage in + if let compositedImage { + self.ciContext?.render(compositedImage, to: pixelBuffer) + completion(pixelBuffer, time) + } else { + completion(nil, time) + } + }) + return + } + } + completion(nil, time) } func processImage(inputImage: CIImage, time: CMTime, completion: @escaping (CIImage?) -> Void) { - return makeEditorImageFrameComposition(inputImage: inputImage, gradientImage: self.gradientImage, drawingImage: self.drawingImage, dimensions: self.dimensions, values: self.values, entities: self.entities, time: time, completion: completion) + makeEditorImageFrameComposition(inputImage: inputImage, gradientImage: self.gradientImage, drawingImage: self.drawingImage, dimensions: self.dimensions, values: self.values, entities: self.entities, time: time, completion: completion) } } -public func makeEditorImageComposition(context: AccountContext, inputImage: UIImage, dimensions: CGSize, values: MediaEditorValues, time: CMTime, completion: @escaping (UIImage?) -> Void) { +public func makeEditorImageComposition(account: Account, inputImage: UIImage, dimensions: CGSize, values: MediaEditorValues, time: CMTime, completion: @escaping (UIImage?) -> Void) { let inputImage = CIImage(image: inputImage)! let gradientImage: CIImage var drawingImage: CIImage? @@ -126,7 +163,7 @@ public func makeEditorImageComposition(context: AccountContext, inputImage: UIIm drawingImage = image.transformed(by: CGAffineTransform(translationX: -dimensions.width / 2.0, y: -dimensions.height / 2.0)) } - let entities: [MediaEditorComposerEntity] = values.entities.map { $0.entity }.compactMap { composerEntityForDrawingEntity(context: context, entity: $0) } + let entities: [MediaEditorComposerEntity] = values.entities.map { $0.entity }.compactMap { composerEntityForDrawingEntity(account: account, entity: $0) } makeEditorImageFrameComposition(inputImage: inputImage, gradientImage: gradientImage, drawingImage: drawingImage, dimensions: dimensions, values: values, entities: entities, time: time, completion: { ciImage in if let ciImage { let context = CIContext(options: [.workingColorSpace : NSNull()]) @@ -147,9 +184,16 @@ private func makeEditorImageFrameComposition(inputImage: CIImage, gradientImage: var mediaImage = inputImage.transformed(by: CGAffineTransform(translationX: -inputImage.extent.midX, y: -inputImage.extent.midY)) + var initialScale: CGFloat + if mediaImage.extent.height > mediaImage.extent.width { + initialScale = dimensions.height / mediaImage.extent.height + } else { + initialScale = dimensions.width / mediaImage.extent.width + } + var cropTransform = CGAffineTransform(translationX: values.cropOffset.x, y: values.cropOffset.y * -1.0) cropTransform = cropTransform.rotated(by: -values.cropRotation) - cropTransform = cropTransform.scaledBy(x: values.cropScale, y: values.cropScale) + cropTransform = cropTransform.scaledBy(x: initialScale * values.cropScale, y: initialScale * values.cropScale) mediaImage = mediaImage.transformed(by: cropTransform) resultImage = mediaImage.composited(over: resultImage) @@ -172,6 +216,7 @@ private func makeEditorImageFrameComposition(inputImage: CIImage, gradientImage: } resultImage = resultImage.transformed(by: CGAffineTransform(translationX: dimensions.width / 2.0, y: dimensions.height / 2.0)) + resultImage = resultImage.cropped(to: CGRect(origin: .zero, size: dimensions)) completion(resultImage) } } @@ -183,21 +228,22 @@ private func makeEditorImageFrameComposition(inputImage: CIImage, gradientImage: let index = i entity.image(for: time, frameRate: frameRate, completion: { image in if var image = image { - var transform = CGAffineTransform(translationX: -image.extent.midX, y: -image.extent.midY) - image = image.transformed(by: transform) + let resetTransform = CGAffineTransform(translationX: -image.extent.width / 2.0, y: -image.extent.height / 2.0) + image = image.transformed(by: resetTransform) - var scale = entity.scale * 1.0 + var baseScale: CGFloat = 1.0 if let baseSize = entity.baseSize { - scale *= baseSize.width / image.extent.size.width + baseScale = baseSize.width / image.extent.width } - - transform = CGAffineTransform(translationX: entity.position.x, y: dimensions.height - entity.position.y) - transform = transform.rotated(by: CGFloat.pi * 2.0 - entity.rotation) - transform = transform.scaledBy(x: scale, y: scale) + + var transform = CGAffineTransform.identity + transform = transform.translatedBy(x: -dimensions.width / 2.0 + entity.position.x, y: dimensions.height / 2.0 + entity.position.y * -1.0) + transform = transform.rotated(by: -entity.rotation) + transform = transform.scaledBy(x: entity.scale * baseScale, y: entity.scale * baseScale) if entity.mirrored { transform = transform.scaledBy(x: -1.0, y: 1.0) } - + image = image.transformed(by: transform) let _ = entitiesImages.modify { current in var updated = current @@ -212,7 +258,7 @@ private func makeEditorImageFrameComposition(inputImage: CIImage, gradientImage: maybeFinalize() } -private func composerEntityForDrawingEntity(context: AccountContext, entity: DrawingEntity) -> MediaEditorComposerEntity? { +private func composerEntityForDrawingEntity(account: Account, entity: DrawingEntity) -> MediaEditorComposerEntity? { if let entity = entity as? DrawingStickerEntity { let content: MediaEditorComposerStickerEntity.Content switch entity.content { @@ -221,7 +267,7 @@ private func composerEntityForDrawingEntity(context: AccountContext, entity: Dra case let .image(image): content = .image(image) } - return MediaEditorComposerStickerEntity(context: context, content: content, position: entity.position, scale: entity.scale, rotation: entity.rotation, baseSize: entity.baseSize, mirrored: entity.mirrored) + return MediaEditorComposerStickerEntity(account: account, content: content, position: entity.position, scale: entity.scale, rotation: entity.rotation, baseSize: entity.baseSize, mirrored: entity.mirrored) } else if let renderImage = entity.renderImage, let image = CIImage(image: renderImage) { if let entity = entity as? DrawingBubbleEntity { return MediaEditorComposerStaticEntity(image: image, position: entity.position, scale: 1.0, rotation: entity.rotation, baseSize: entity.size, mirrored: false) @@ -295,7 +341,7 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity { var imagePixelBuffer: CVPixelBuffer? let imagePromise = Promise() - init(context: AccountContext, content: Content, position: CGPoint, scale: CGFloat, rotation: CGFloat, baseSize: CGSize, mirrored: Bool) { + init(account: Account, content: Content, position: CGPoint, scale: CGFloat, rotation: CGFloat, baseSize: CGSize, mirrored: Bool) { self.content = content self.position = position self.scale = scale @@ -307,8 +353,8 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity { case let .file(file): if file.isAnimatedSticker || file.isVideoSticker || file.mimeType == "video/webm" { self.isAnimated = true - self.source = AnimatedStickerResourceSource(account: context.account, resource: file.resource, isVideo: file.isVideoSticker || file.mimeType == "video/webm") - let pathPrefix = context.account.postbox.mediaBox.shortLivedResourceCachePathPrefix(file.resource.id) + self.source = AnimatedStickerResourceSource(account: account, resource: file.resource, isVideo: file.isVideoSticker || file.mimeType == "video/webm") + let pathPrefix = account.postbox.mediaBox.shortLivedResourceCachePathPrefix(file.resource.id) if let source = self.source { let dimensions = file.dimensions ?? PixelDimensions(width: 512, height: 512) let fittedDimensions = dimensions.cgSize.aspectFitted(CGSize(width: 384, height: 384)) @@ -337,7 +383,7 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity { } } else { self.isAnimated = false - self.disposables.add((chatMessageSticker(account: context.account, userLocation: .other, file: file, small: false, fetched: true, onlyFullSize: true, thumbnail: false, synchronousLoad: false) + self.disposables.add((chatMessageSticker(account: account, userLocation: .other, file: file, small: false, fetched: true, onlyFullSize: true, thumbnail: false, synchronousLoad: false) |> deliverOn(self.queue)).start(next: { [weak self] generator in if let strongSelf = self { let context = generator(TransformImageArguments(corners: ImageCorners(), imageSize: baseSize, boundingSize: baseSize, intrinsicInsets: UIEdgeInsets())) @@ -368,7 +414,6 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity { tintColor = .white } -// let start = CACurrentMediaTime() self.disposables.add((self.frameSource.get() |> take(1) |> deliverOn(self.queue)).start(next: { [weak self] frameSource in @@ -381,17 +426,7 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity { completion(nil) return } - -// if !strongSelf.tested { -// frameSource.syncWith { frameSource in -// for _ in 0 ..< 60 * 3 { -// let _ = frameSource.takeFrame(draw: true) -// } -// } -// strongSelf.tested = true -// print("180 frames in \(CACurrentMediaTime() - start)") -// } - + let relativeTime = currentTime - floor(currentTime / duration) * duration var t = relativeTime / duration t = max(0.0, t) @@ -415,9 +450,6 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity { delta = max(1, frameIndex - previousFrameIndex) } - //print("skipping: \(delta) frames") - - var frame: AnimatedStickerFrame? frameSource.syncWith { frameSource in for i in 0 ..< delta { @@ -425,8 +457,6 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity { } } if let frame { - //print("has frame: \(CACurrentMediaTime() - start)") - var imagePixelBuffer: CVPixelBuffer? if let pixelBuffer = strongSelf.imagePixelBuffer { imagePixelBuffer = pixelBuffer @@ -451,7 +481,6 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity { if let imagePixelBuffer { let image = render(width: frame.width, height: frame.height, bytesPerRow: frame.bytesPerRow, data: frame.data, type: frame.type, pixelBuffer: imagePixelBuffer, tintColor: tintColor) - //print("image loaded in: \(CACurrentMediaTime() - start)") strongSelf.image = image } completion(strongSelf.image) @@ -505,7 +534,7 @@ private func render(width: Int, height: Int, bytesPerRow: Int, data: Data, type: guard let bytes = buffer.baseAddress?.assumingMemoryBound(to: UInt8.self) else { return } - decodeYUVAToRGBA(bytes, dest, Int32(width), Int32(height), Int32(bytesPerRow)) + decodeYUVAToRGBA(bytes, dest, Int32(width), Int32(height), Int32(width * 4)) } case .argb: data.withUnsafeBytes { buffer -> Void in diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift index e2e7c13724..4c77d18ada 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift @@ -2,8 +2,9 @@ import Foundation import UIKit import Display import TelegramCore +import AVFoundation -public enum EditorToolKey { +public enum EditorToolKey: Int32 { case enhance case brightness case contrast @@ -20,6 +21,7 @@ public enum EditorToolKey { case blur case curves } + private let adjustmentToolsKeys: [EditorToolKey] = [ .enhance, .brightness, @@ -34,7 +36,26 @@ private let adjustmentToolsKeys: [EditorToolKey] = [ .sharpen ] -public class MediaEditorValues { +public final class MediaEditorValues: Codable { + private enum CodingKeys: String, CodingKey { + case originalWidth + case originalHeight + case cropOffset + case cropSize + case cropScale + case cropRotation + case cropMirroring + + case gradientColors + + case videoTrimRange + case videoIsMuted + + case drawing + case entities + case toolValues + } + public let originalDimensions: PixelDimensions public let cropOffset: CGPoint public let cropSize: CGSize? @@ -79,6 +100,79 @@ public class MediaEditorValues { self.toolValues = toolValues } + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + + let width = try container.decode(Int32.self, forKey: .originalWidth) + let height = try container.decode(Int32.self, forKey: .originalHeight) + self.originalDimensions = PixelDimensions(width: width, height: height) + + self.cropOffset = try container.decode(CGPoint.self, forKey: .cropOffset) + self.cropSize = try container.decodeIfPresent(CGSize.self, forKey: .cropSize) + self.cropScale = try container.decode(CGFloat.self, forKey: .cropScale) + self.cropRotation = try container.decode(CGFloat.self, forKey: .cropRotation) + self.cropMirroring = try container.decode(Bool.self, forKey: .cropMirroring) + + if let gradientColors = try container.decodeIfPresent([DrawingColor].self, forKey: .gradientColors) { + self.gradientColors = gradientColors.map { $0.toUIColor() } + } else { + self.gradientColors = nil + } + + self.videoTrimRange = try container.decodeIfPresent(Range.self, forKey: .videoTrimRange) + self.videoIsMuted = try container.decode(Bool.self, forKey: .videoIsMuted) + + if let drawingData = try container.decodeIfPresent(Data.self, forKey: .drawing), let image = UIImage(data: drawingData) { + self.drawing = image + } else { + self.drawing = nil + } + + self.entities = try container.decode([CodableDrawingEntity].self, forKey: .entities) + + let values = try container.decode([CodableToolValue].self, forKey: .toolValues) + var toolValues: [EditorToolKey: Any] = [:] + for value in values { + let (key, value) = value.keyAndValue + toolValues[key] = value + } + self.toolValues = toolValues + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + + try container.encode(self.originalDimensions.width, forKey: .originalWidth) + try container.encode(self.originalDimensions.height, forKey: .originalHeight) + + try container.encode(self.cropOffset, forKey: .cropOffset) + try container.encode(self.cropSize, forKey: .cropSize) + try container.encode(self.cropScale, forKey: .cropScale) + try container.encode(self.cropRotation, forKey: .cropRotation) + try container.encode(self.cropMirroring, forKey: .cropMirroring) + + if let gradientColors = self.gradientColors { + try container.encode(gradientColors.map { DrawingColor(color: $0) }, forKey: .gradientColors) + } + + try container.encodeIfPresent(self.videoTrimRange, forKey: .videoTrimRange) + try container.encode(self.videoIsMuted, forKey: .videoIsMuted) + + if let drawing = self.drawing, let pngDrawingData = drawing.pngData() { + try container.encode(pngDrawingData, forKey: .drawing) + } + + try container.encode(self.entities, forKey: .entities) + + var values: [CodableToolValue] = [] + for (key, value) in self.toolValues { + if let toolValue = CodableToolValue(key: key, value: value) { + values.append(toolValue) + } + } + try container.encode(values, forKey: .toolValues) + } + func withUpdatedCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues { return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: offset, cropSize: self.cropSize, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues) } @@ -100,7 +194,12 @@ public class MediaEditorValues { } } -public struct TintValue: Equatable { +public struct TintValue: Equatable, Codable { + private enum CodingKeys: String, CodingKey { + case color + case intensity + } + public static let initial = TintValue( color: .clear, intensity: 0.5 @@ -117,6 +216,20 @@ public struct TintValue: Equatable { self.intensity = intensity } + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + + self.color = try container.decode(DrawingColor.self, forKey: .color).toUIColor() + self.intensity = try container.decode(Float.self, forKey: .intensity) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + + try container.encode(DrawingColor(color: self.color), forKey: .color) + try container.encode(self.intensity, forKey: .intensity) + } + public func withUpdatedColor(_ color: UIColor) -> TintValue { return TintValue(color: color, intensity: self.intensity) } @@ -126,7 +239,16 @@ public struct TintValue: Equatable { } } -public struct BlurValue: Equatable { +public struct BlurValue: Equatable, Codable { + private enum CodingKeys: String, CodingKey { + case mode + case intensity + case position + case size + case falloff + case rotation + } + public static let initial = BlurValue( mode: .off, intensity: 0.5, @@ -136,7 +258,7 @@ public struct BlurValue: Equatable { rotation: 0.0 ) - public enum Mode: Equatable { + public enum Mode: Int32, Equatable { case off case radial case linear @@ -166,6 +288,28 @@ public struct BlurValue: Equatable { self.rotation = rotation } + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + + self.mode = try BlurValue.Mode(rawValue: container.decode(Int32.self, forKey: .mode)) ?? .off + self.intensity = try container.decode(Float.self, forKey: .intensity) + self.position = try container.decode(CGPoint.self, forKey: .position) + self.size = try container.decode(Float.self, forKey: .size) + self.falloff = try container.decode(Float.self, forKey: .falloff) + self.rotation = try container.decode(Float.self, forKey: .rotation) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + + try container.encode(self.mode.rawValue, forKey: .mode) + try container.encode(self.intensity, forKey: .intensity) + try container.encode(self.position, forKey: .position) + try container.encode(self.size, forKey: .size) + try container.encode(self.falloff, forKey: .falloff) + try container.encode(self.rotation, forKey: .rotation) + } + public func withUpdatedMode(_ mode: Mode) -> BlurValue { return BlurValue( mode: mode, @@ -233,8 +377,23 @@ public struct BlurValue: Equatable { } } -public struct CurvesValue: Equatable { - public struct CurveValue: Equatable { +public struct CurvesValue: Equatable, Codable { + private enum CodingKeys: String, CodingKey { + case all + case red + case green + case blue + } + + public struct CurveValue: Equatable, Codable { + private enum CodingKeys: String, CodingKey { + case blacks + case shadows + case midtones + case highlights + case whites + } + public static let initial = CurveValue( blacks: 0.0, shadows: 0.25, @@ -304,6 +463,26 @@ public struct CurvesValue: Equatable { self.whites = whites } + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + + self.blacks = try container.decode(Float.self, forKey: .blacks) + self.shadows = try container.decode(Float.self, forKey: .shadows) + self.midtones = try container.decode(Float.self, forKey: .midtones) + self.highlights = try container.decode(Float.self, forKey: .highlights) + self.whites = try container.decode(Float.self, forKey: .whites) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + + try container.encode(self.blacks, forKey: .blacks) + try container.encode(self.shadows, forKey: .shadows) + try container.encode(self.midtones, forKey: .midtones) + try container.encode(self.highlights, forKey: .highlights) + try container.encode(self.whites, forKey: .whites) + } + public func withUpdatedBlacks(_ blacks: Float) -> CurveValue { return CurveValue(blacks: blacks, shadows: self.shadows, midtones: self.midtones, highlights: self.highlights, whites: self.whites) } @@ -349,6 +528,24 @@ public struct CurvesValue: Equatable { self.blue = blue } + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + + self.all = try container.decode(CurveValue.self, forKey: .all) + self.red = try container.decode(CurveValue.self, forKey: .red) + self.green = try container.decode(CurveValue.self, forKey: .green) + self.blue = try container.decode(CurveValue.self, forKey: .blue) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + + try container.encode(self.all, forKey: .all) + try container.encode(self.red, forKey: .red) + try container.encode(self.green, forKey: .green) + try container.encode(self.blue, forKey: .blue) + } + public func withUpdatedAll(_ all: CurveValue) -> CurvesValue { return CurvesValue(all: all, red: self.red, green: self.green, blue: self.blue) } @@ -623,3 +820,116 @@ public func curveThroughPoints(count: Int, valueAtIndex: (Int) -> Float, positio return (path, dataPoints) } + +public enum CodableToolValue { + case float(EditorToolKey, Float) + case tint(EditorToolKey, TintValue) + case blur(EditorToolKey, BlurValue) + case curves(EditorToolKey, CurvesValue) + + public init?(key: EditorToolKey, value: Any) { + if let toolValue = value as? Float { + self = .float(key, toolValue) + } else if let toolValue = value as? TintValue { + self = .tint(key, toolValue) + } else if let toolValue = value as? BlurValue { + self = .blur(key, toolValue) + } else if let toolValue = value as? CurvesValue { + self = .curves(key, toolValue) + } else { + return nil + } + } + + public var keyAndValue: (EditorToolKey, Any) { + switch self { + case let .float(key, value): + return (key, value) + case let .tint(key, value): + return (key, value) + case let .blur(key, value): + return (key, value) + case let .curves(key, value): + return (key, value) + } + } +} + +extension CodableToolValue: Codable { + private enum CodingKeys: String, CodingKey { + case key + case type + case value + } + + private enum ToolType: Int, Codable { + case float + case tint + case blur + case curves + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + let type = try container.decode(ToolType.self, forKey: .type) + let key = EditorToolKey(rawValue: try container.decode(Int32.self, forKey: .key))! + switch type { + case .float: + self = .float(key, try container.decode(Float.self, forKey: .value)) + case .tint: + self = .tint(key, try container.decode(TintValue.self, forKey: .value)) + case .blur: + self = .blur(key, try container.decode(BlurValue.self, forKey: .value)) + case .curves: + self = .curves(key, try container.decode(CurvesValue.self, forKey: .value)) + } + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + switch self { + case let .float(key, value): + try container.encode(key.rawValue, forKey: .key) + try container.encode(ToolType.float, forKey: .type) + try container.encode(value, forKey: .value) + case let .tint(key, value): + try container.encode(key.rawValue, forKey: .key) + try container.encode(ToolType.tint, forKey: .type) + try container.encode(value, forKey: .value) + case let .blur(key, value): + try container.encode(key.rawValue, forKey: .key) + try container.encode(ToolType.blur, forKey: .type) + try container.encode(value, forKey: .value) + case let .curves(key, value): + try container.encode(key.rawValue, forKey: .key) + try container.encode(ToolType.curves, forKey: .type) + try container.encode(value, forKey: .value) + } + } +} + +public func recommendedVideoExportConfiguration(values: MediaEditorValues) -> MediaEditorVideoExport.Configuration { + let compressionProperties: [String: Any] = [ + AVVideoAverageBitRateKey: 2000000 + ] + + let videoSettings: [String: Any] = [ + AVVideoCodecKey: AVVideoCodecType.h264, + AVVideoCompressionPropertiesKey: compressionProperties, + AVVideoWidthKey: 1080, + AVVideoHeightKey: 1920 + ] + + let audioSettings: [String: Any] = [ + AVFormatIDKey: kAudioFormatMPEG4AAC, + AVSampleRateKey: 44100, + AVEncoderBitRateKey: 64000, + AVNumberOfChannelsKey: 2 + ] + + return MediaEditorVideoExport.Configuration( + videoSettings: videoSettings, + audioSettings: audioSettings, + values: values + ) +} diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift index 7ec4b9fe29..0dd882bfbc 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift @@ -2,7 +2,7 @@ import Foundation import AVFoundation import MetalKit import SwiftSignalKit -import AccountContext +import TelegramCore enum ExportWriterStatus { case unknown @@ -240,7 +240,7 @@ public final class MediaEditorVideoExport { public private(set) var internalStatus: Status = .idle - private let context: AccountContext + private let account: Account private let subject: Subject private let configuration: Configuration private let outputPath: String @@ -266,8 +266,8 @@ public final class MediaEditorVideoExport { private let semaphore = DispatchSemaphore(value: 0) - public init(context: AccountContext, subject: Subject, configuration: Configuration, outputPath: String) { - self.context = context + public init(account: Account, subject: Subject, configuration: Configuration, outputPath: String) { + self.account = account self.subject = subject self.configuration = configuration self.outputPath = outputPath @@ -289,16 +289,18 @@ public final class MediaEditorVideoExport { } if self.configuration.values.requiresComposing { - self.composer = MediaEditorComposer(context: self.context, values: self.configuration.values, dimensions: self.configuration.dimensions) + self.composer = MediaEditorComposer(account: self.account, values: self.configuration.values, dimensions: self.configuration.dimensions) + } + + switch self.subject { + case let .video(asset): + self.setupWithAsset(asset) + case let .image(image): + self.setupWithImage(image) } - self.setupVideoInput() } - private func setupVideoInput() { - guard case let .video(asset) = self.subject else { - return - } - + private func setupWithAsset(_ asset: AVAsset) { self.reader = try? AVAssetReader(asset: asset) guard let reader = self.reader else { return @@ -363,22 +365,40 @@ public final class MediaEditorVideoExport { } } + private func setupWithImage(_ image: UIImage) { + self.writer = MediaEditorVideoAVAssetWriter() + guard let writer = self.writer else { + return + } + writer.setup(configuration: self.configuration, outputPath: self.outputPath) + writer.setupVideoInput(configuration: self.configuration, inputTransform: nil) + } + private func finish() { assert(self.queue.isCurrent()) - guard let reader = self.reader, let writer = self.writer else { + guard let writer = self.writer else { return } let outputUrl = URL(fileURLWithPath: self.outputPath) - if reader.status == .cancelled || writer.status == .cancelled { + var cancelled = false + if let reader = self.reader, reader.status == .cancelled { if writer.status != .cancelled { writer.cancelWriting() } - if reader.status != .cancelled { + cancelled = true + } + + if writer.status == .cancelled { + if let reader = self.reader, reader.status != .cancelled { reader.cancelReading() } + cancelled = true + } + + if cancelled { try? FileManager().removeItem(at: outputUrl) self.internalStatus = .finished self.statusValue = .failed(.cancelled) @@ -389,7 +409,7 @@ public final class MediaEditorVideoExport { try? FileManager().removeItem(at: outputUrl) self.internalStatus = .finished self.statusValue = .failed(.writing(nil)) - } else if reader.status == .failed { + } else if let reader = self.reader, reader.status == .failed { try? FileManager().removeItem(at: outputUrl) writer.cancelWriting() self.internalStatus = .finished @@ -420,6 +440,46 @@ public final class MediaEditorVideoExport { } } + private func encodeImageVideo() -> Bool { + guard let writer = self.writer, let composer = self.composer, case let .image(image) = self.subject else { + return false + } + + let duration: Double = 3.0 + let frameRate: Double = 60.0 + var position: CMTime = CMTime(value: 0, timescale: Int32(frameRate)) + + var appendFailed = false + while writer.isReadyForMoreVideoData { + if appendFailed { + return false + } + if writer.status != .writing { + writer.markVideoAsFinished() + return false + } + self.pauseDispatchGroup.wait() + composer.processImage(inputImage: image, pool: writer.pixelBufferPool, time: position, completion: { pixelBuffer, timestamp in + if let pixelBuffer { + if !writer.appendPixelBuffer(pixelBuffer, at: timestamp) { + writer.markVideoAsFinished() + appendFailed = true + } + } + Thread.sleep(forTimeInterval: 0.001) + self.semaphore.signal() + }) + self.semaphore.wait() + + position = position + CMTime(value: 1, timescale: Int32(frameRate)) + if position.seconds >= duration { + writer.markVideoAsFinished() + return false + } + } + return true + } + private func encodeVideo() -> Bool { guard let reader = self.reader, let writer = self.writer, let output = self.videoOutput else { return false @@ -551,7 +611,33 @@ public final class MediaEditorVideoExport { return self.statusPromise.get() } - public func startExport() { + + private func startImageVideoExport() { + guard self.internalStatus == .idle, let writer = self.writer else { + self.statusValue = .failed(.invalid) + return + } + + guard writer.startWriting() else { + self.statusValue = .failed(.writing(nil)) + return + } + + self.internalStatus = .exporting + + writer.startSession(atSourceTime: .zero) + + var exportForVideoOutput: MediaEditorVideoExport? = self + writer.requestVideoDataWhenReady(on: self.queue.queue) { + guard let export = exportForVideoOutput else { return } + if !export.encodeImageVideo() { + exportForVideoOutput = nil + export.finish() + } + } + } + + private func startVideoExport() { guard self.internalStatus == .idle, let writer = self.writer, let reader = self.reader else { self.statusValue = .failed(.invalid) return @@ -572,16 +658,15 @@ public final class MediaEditorVideoExport { var videoCompleted = false var audioCompleted = false - if let _ = self.videoOutput { - var sessionForVideoEncoder: MediaEditorVideoExport? = self + var exportForVideoOutput: MediaEditorVideoExport? = self writer.requestVideoDataWhenReady(on: self.queue.queue) { - guard let session = sessionForVideoEncoder else { return } - if !session.encodeVideo() { + guard let export = exportForVideoOutput else { return } + if !export.encodeVideo() { videoCompleted = true - sessionForVideoEncoder = nil + exportForVideoOutput = nil if audioCompleted { - session.finish() + export.finish() } } } @@ -590,14 +675,14 @@ public final class MediaEditorVideoExport { } if let _ = self.audioOutput { - var sessionForAudioEncoder: MediaEditorVideoExport? = self + var exportForAudioOutput: MediaEditorVideoExport? = self writer.requestAudioDataWhenReady(on: self.queue.queue) { - guard let session = sessionForAudioEncoder else { return } - if !session.encodeAudio() { + guard let export = exportForAudioOutput else { return } + if !export.encodeAudio() { audioCompleted = true - sessionForAudioEncoder = nil + exportForAudioOutput = nil if videoCompleted { - session.finish() + export.finish() } } } @@ -605,4 +690,13 @@ public final class MediaEditorVideoExport { audioCompleted = true } } + + public func startExport() { + switch self.subject { + case .video: + self.startVideoExport() + case .image: + self.startImageVideoExport() + } + } } diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/VideoTextureSource.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/VideoTextureSource.swift index a881a83f9f..205581760f 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/VideoTextureSource.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/VideoTextureSource.swift @@ -13,7 +13,7 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD private var displayLink: CADisplayLink? - private var preferredVideoTransform: CGAffineTransform = .identity + private var textureRotation: TextureRotation = .rotate0Degrees private var forceUpdate: Bool = false @@ -82,8 +82,32 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD for track in playerItem.asset.tracks { if track.mediaType == .video { hasVideoTrack = true - self.preferredVideoTransform = track.preferredTransform - break + + let t = track.preferredTransform + if t.a == -1.0 && t.d == -1.0 { + self.textureRotation = .rotate180Degrees + } else if t.a == 1.0 && t.d == 1.0 { + self.textureRotation = .rotate0Degrees + } else if t.b == -1.0 && t.c == 1.0 { + self.textureRotation = .rotate270Degrees + } else if t.a == -1.0 && t.d == 1.0 { +// if (mirrored != NULL) { +// *mirrored = true; +// } + self.textureRotation = .rotate270Degrees + } else if t.a == 1.0 && t.d == -1.0 { +// if (mirrored != NULL) { +// *mirrored = true; +// } + self.textureRotation = .rotate180Degrees + } else { +// if (t.c == 1) { +// if (mirrored != NULL) { +// *mirrored = true; +// } +// } + self.textureRotation = .rotate90Degrees + } } } if !hasVideoTrack { @@ -151,13 +175,8 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD var presentationTime: CMTime = .zero if let pixelBuffer = output.copyPixelBuffer(forItemTime: requestTime, itemTimeForDisplay: &presentationTime) { if let texture = self.pixelBufferToMTLTexture(pixelBuffer: pixelBuffer) { - self.output?.consumeTexture(texture, rotation: .rotate90Degrees) + self.output?.consumeTexture(texture, rotation: self.textureRotation) } -// -// self.handler(VideoFrame(preferredTrackTransform: self.preferredVideoTransform, -// presentationTimestamp: presentationTime, -// playerTimestamp: player.currentTime(), -// pixelBuffer: pixelBuffer)) } } diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift index a05a4d0bb5..72af232319 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift @@ -1164,8 +1164,13 @@ public final class MediaEditorScreen: ViewController { } public enum Result { - case image(UIImage, NSAttributedString?) - case video(String, UIImage?, MediaEditorValues, NSAttributedString?) + public enum VideoResult { + case imageFile(path: String) + case videoFile(path: String) + case asset(localIdentifier: String) + } + case image(image: UIImage, dimensions: PixelDimensions, caption: NSAttributedString?) + case video(video: VideoResult, coverImage: UIImage?, values: MediaEditorValues, duration: Double, dimensions: PixelDimensions, caption: NSAttributedString?) } fileprivate let context: AccountContext @@ -1211,21 +1216,50 @@ public final class MediaEditorScreen: ViewController { } func requestCompletion(animated: Bool) { - guard let mediaEditor = self.node.mediaEditor else { + guard let mediaEditor = self.node.mediaEditor, let subject = self.node.subject else { return } if mediaEditor.resultIsVideo { - self.completion(.video("", nil, mediaEditor.values, nil), { [weak self] in + let videoResult: Result.VideoResult + let duration: Double + switch subject { + case let .image(image, _): + let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).jpg" + if let data = image.jpegData(compressionQuality: 0.85) { + try? data.write(to: URL(fileURLWithPath: tempImagePath)) + } + videoResult = .imageFile(path: tempImagePath) + duration = 5.0 + case let .video(path, _): + videoResult = .videoFile(path: path) + if let videoTrimRange = mediaEditor.values.videoTrimRange { + duration = videoTrimRange.upperBound - videoTrimRange.lowerBound + } else { + duration = 5.0 + } + case let .asset(asset): + videoResult = .asset(localIdentifier: asset.localIdentifier) + if asset.mediaType == .video { + if let videoTrimRange = mediaEditor.values.videoTrimRange { + duration = videoTrimRange.upperBound - videoTrimRange.lowerBound + } else { + duration = asset.duration + } + } else { + duration = 5.0 + } + } + self.completion(.video(video: videoResult, coverImage: nil, values: mediaEditor.values, duration: duration, dimensions: PixelDimensions(width: 1080, height: 1920), caption: nil), { [weak self] in self?.node.animateOut(completion: { [weak self] in self?.dismiss() }) }) } else { if let image = mediaEditor.resultImage { - makeEditorImageComposition(context: self.context, inputImage: image, dimensions: storyDimensions, values: mediaEditor.values, time: .zero, completion: { resultImage in + makeEditorImageComposition(account: self.context.account, inputImage: image, dimensions: storyDimensions, values: mediaEditor.values, time: .zero, completion: { resultImage in if let resultImage { - self.completion(.image(resultImage, nil), { [weak self] in + self.completion(.image(image: resultImage, dimensions: PixelDimensions(resultImage.size), caption: nil), { [weak self] in self?.node.animateOut(completion: { [weak self] in self?.dismiss() }) @@ -1236,7 +1270,7 @@ public final class MediaEditorScreen: ViewController { } } - private var export: MediaEditorVideoExport? + private var videoExport: MediaEditorVideoExport? private var exportDisposable: Disposable? func requestSave() { @@ -1266,32 +1300,35 @@ public final class MediaEditorScreen: ViewController { if mediaEditor.resultIsVideo { let exportSubject: MediaEditorVideoExport.Subject - if case let .video(path, _) = subject { + switch subject { + case let .video(path, _): let asset = AVURLAsset(url: NSURL(fileURLWithPath: path) as URL) exportSubject = .video(asset) - } else { + case let .image(image, _): + exportSubject = .image(image) + default: fatalError() } - let configuration = recommendedExportConfiguration(mediaEditor: mediaEditor) + let configuration = recommendedVideoExportConfiguration(values: mediaEditor.values) let outputPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).mp4" - let export = MediaEditorVideoExport(context: self.context, subject: exportSubject, configuration: configuration, outputPath: outputPath) - self.export = export + let videoExport = MediaEditorVideoExport(account: self.context.account, subject: exportSubject, configuration: configuration, outputPath: outputPath) + self.videoExport = videoExport - export.startExport() + videoExport.startExport() - self.exportDisposable = (export.status + self.exportDisposable = (videoExport.status |> deliverOnMainQueue).start(next: { [weak self] status in if let self { if case .completed = status { - self.export = nil + self.videoExport = nil saveToPhotos(outputPath, true) } } }) } else { if let image = mediaEditor.resultImage { - makeEditorImageComposition(context: self.context, inputImage: image, dimensions: storyDimensions, values: mediaEditor.values, time: .zero, completion: { resultImage in + makeEditorImageComposition(account: self.context.account, inputImage: image, dimensions: storyDimensions, values: mediaEditor.values, time: .zero, completion: { resultImage in if let data = resultImage?.jpegData(compressionQuality: 0.8) { let outputPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).jpg" try? data.write(to: URL(fileURLWithPath: outputPath)) @@ -1308,30 +1345,3 @@ public final class MediaEditorScreen: ViewController { (self.displayNode as! Node).containerLayoutUpdated(layout: layout, transition: Transition(transition)) } } - - -private func recommendedExportConfiguration(mediaEditor: MediaEditor) -> MediaEditorVideoExport.Configuration { - let compressionProperties: [String: Any] = [ - AVVideoAverageBitRateKey: 2000000 - ] - - let videoSettings: [String: Any] = [ - AVVideoCodecKey: AVVideoCodecType.h264, - AVVideoCompressionPropertiesKey: compressionProperties, - AVVideoWidthKey: 1080, - AVVideoHeightKey: 1920 - ] - - let audioSettings: [String: Any] = [ - AVFormatIDKey: kAudioFormatMPEG4AAC, - AVSampleRateKey: 44100, - AVEncoderBitRateKey: 64000, - AVNumberOfChannelsKey: 2 - ] - - return MediaEditorVideoExport.Configuration( - videoSettings: videoSettings, - audioSettings: audioSettings, - values: mediaEditor.values - ) -} diff --git a/submodules/TelegramUI/Sources/FetchVideoMediaResource.swift b/submodules/TelegramUI/Sources/FetchVideoMediaResource.swift index 7485c0182d..1b6425d814 100644 --- a/submodules/TelegramUI/Sources/FetchVideoMediaResource.swift +++ b/submodules/TelegramUI/Sources/FetchVideoMediaResource.swift @@ -7,6 +7,7 @@ import LegacyComponents import FFMpegBinding import LocalMediaResources import LegacyMediaPickerUI +import MediaEditor private final class AVURLAssetCopyItem: MediaResourceDataFetchCopyLocalItem { private let url: URL @@ -220,15 +221,15 @@ public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibr let alreadyReceivedAsset = Atomic(value: false) requestId = PHImageManager.default().requestAVAsset(forVideo: asset, options: option, resultHandler: { avAsset, _, _ in - if avAsset == nil { + if alreadyReceivedAsset.swap(true) { return } - - if alreadyReceivedAsset.swap(true) { + guard let avAsset else { return } var adjustments: TGVideoEditAdjustments? + var mediaEditorValues: MediaEditorValues? switch resource.conversion { case .passthrough: if let asset = avAsset as? AVURLAsset { @@ -245,76 +246,128 @@ public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibr } case let .compress(adjustmentsValue): if let adjustmentsValue = adjustmentsValue { - if let dict = NSKeyedUnarchiver.unarchiveObject(with: adjustmentsValue.data.makeData()) as? [AnyHashable : Any] { + if adjustmentsValue.isStory { + if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: adjustmentsValue.data.makeData()) { + mediaEditorValues = values + } + } else if let dict = NSKeyedUnarchiver.unarchiveObject(with: adjustmentsValue.data.makeData()) as? [AnyHashable : Any] { adjustments = TGVideoEditAdjustments(dictionary: dict) } } } - let updatedSize = Atomic(value: 0) - let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in - if let paintingData = adjustments.paintingData, paintingData.hasAnimation { - return LegacyPaintEntityRenderer(account: account, adjustments: adjustments) - } else { - return nil - } - } let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4") - let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in - /*var value = stat() - if stat(path, &value) == 0 { - let remuxedTempFile = TempBox.shared.tempFile(fileName: "video.mp4") - if FFMpegRemuxer.remux(path, to: remuxedTempFile.path) { - TempBox.shared.dispose(tempFile) - subscriber.putNext(.moveTempFile(file: remuxedTempFile)) - } else { - TempBox.shared.dispose(remuxedTempFile) - if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { - var range: Range? - let _ = updatedSize.modify { updatedSize in - range = updatedSize ..< value.st_size - return value.st_size - } - //print("size = \(Int(value.st_size)), range: \(range!)") - subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) - } - } - }*/ - }), entityRenderer: entityRenderer)! - let signalDisposable = signal.start(next: { next in - if let result = next as? TGMediaVideoConversionResult { - var value = stat() - if stat(result.fileURL.path, &value) == 0 { - let remuxedTempFile = TempBox.shared.tempFile(fileName: "video.mp4") - if let size = fileSize(result.fileURL.path), size <= 32 * 1024 * 1024, FFMpegRemuxer.remux(result.fileURL.path, to: remuxedTempFile.path) { - TempBox.shared.dispose(tempFile) - subscriber.putNext(.moveTempFile(file: remuxedTempFile)) - } else { - TempBox.shared.dispose(remuxedTempFile) - if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) { - var range: Range? - let _ = updatedSize.modify { updatedSize in - range = updatedSize ..< value.st_size - return value.st_size + let updatedSize = Atomic(value: 0) + if let mediaEditorValues { + let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues) + let videoExport = MediaEditorVideoExport(account: account, subject: .video(avAsset), configuration: configuration, outputPath: tempFile.path) + videoExport.startExport() + + let statusDisposable = videoExport.status.start(next: { status in + switch status { + case .completed: + var value = stat() + if stat(tempFile.path, &value) == 0 { + let remuxedTempFile = TempBox.shared.tempFile(fileName: "video.mp4") + if let size = fileSize(tempFile.path), size <= 32 * 1024 * 1024, FFMpegRemuxer.remux(tempFile.path, to: remuxedTempFile.path) { + TempBox.shared.dispose(tempFile) + subscriber.putNext(.moveTempFile(file: remuxedTempFile)) + } else { + TempBox.shared.dispose(remuxedTempFile) + if let data = try? Data(contentsOf: URL(fileURLWithPath: tempFile.path), options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< value.st_size + return value.st_size + } + //print("finish size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) + subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true)) } - //print("finish size = \(Int(value.st_size)), range: \(range!)") - subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) - subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) - subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true)) } + } else { + subscriber.putError(.generic) } - } else { + subscriber.putCompletion() + + EngineTempBox.shared.dispose(tempFile) + case .failed: subscriber.putError(.generic) + default: + break + } + }) + + disposable.set(ActionDisposable { + statusDisposable.dispose() + videoExport.cancel() + }) + } else { + let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in + if let paintingData = adjustments.paintingData, paintingData.hasAnimation { + return LegacyPaintEntityRenderer(account: account, adjustments: adjustments) + } else { + return nil } - subscriber.putCompletion() - - EngineTempBox.shared.dispose(tempFile) } - }, error: { _ in - subscriber.putError(.generic) - }, completed: nil) - disposable.set(ActionDisposable { - signalDisposable?.dispose() - }) + + let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in + /*var value = stat() + if stat(path, &value) == 0 { + let remuxedTempFile = TempBox.shared.tempFile(fileName: "video.mp4") + if FFMpegRemuxer.remux(path, to: remuxedTempFile.path) { + TempBox.shared.dispose(tempFile) + subscriber.putNext(.moveTempFile(file: remuxedTempFile)) + } else { + TempBox.shared.dispose(remuxedTempFile) + if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< value.st_size + return value.st_size + } + //print("size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + } + } + }*/ + }), entityRenderer: entityRenderer)! + let signalDisposable = signal.start(next: { next in + if let result = next as? TGMediaVideoConversionResult { + var value = stat() + if stat(result.fileURL.path, &value) == 0 { + let remuxedTempFile = TempBox.shared.tempFile(fileName: "video.mp4") + if let size = fileSize(result.fileURL.path), size <= 32 * 1024 * 1024, FFMpegRemuxer.remux(result.fileURL.path, to: remuxedTempFile.path) { + TempBox.shared.dispose(tempFile) + subscriber.putNext(.moveTempFile(file: remuxedTempFile)) + } else { + TempBox.shared.dispose(remuxedTempFile) + if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< value.st_size + return value.st_size + } + //print("finish size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) + subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true)) + } + } + } else { + subscriber.putError(.generic) + } + subscriber.putCompletion() + + EngineTempBox.shared.dispose(tempFile) + } + }, error: { _ in + subscriber.putError(.generic) + }, completed: nil) + disposable.set(ActionDisposable { + signalDisposable?.dispose() + }) + } }) } @@ -339,78 +392,141 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath)) var adjustments: TGVideoEditAdjustments? + var mediaEditorValues: MediaEditorValues? if let videoAdjustments = resource.adjustments { - if let dict = NSKeyedUnarchiver.unarchiveObject(with: videoAdjustments.data.makeData()) as? [AnyHashable : Any] { + if videoAdjustments.isStory { + if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: videoAdjustments.data.makeData()) { + mediaEditorValues = values + } + } else if let dict = NSKeyedUnarchiver.unarchiveObject(with: videoAdjustments.data.makeData()) as? [AnyHashable : Any] { adjustments = TGVideoEditAdjustments(dictionary: dict) } } let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4") let updatedSize = Atomic(value: 0) - let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in - if let paintingData = adjustments.paintingData, paintingData.hasAnimation { - return LegacyPaintEntityRenderer(account: account, adjustments: adjustments) + if let mediaEditorValues { + let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues) + let subject: MediaEditorVideoExport.Subject + if filteredPath.contains(".jpg"), let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) { + subject = .image(image) } else { - return nil + subject = .video(avAsset) } - } - let signal: SSignal - if filteredPath.contains(".jpg"), let entityRenderer = entityRenderer { - if let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) { - let durationSignal: SSignal = SSignal(generator: { subscriber in - let disposable = (entityRenderer.duration()).start(next: { duration in - subscriber.putNext(duration) - subscriber.putCompletion() - }) - - return SBlockDisposable(block: { - disposable.dispose() - }) - }) - - signal = durationSignal.map(toSignal: { duration -> SSignal in - if let duration = duration as? Double { - return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in - var value = stat() - if stat(path, &value) == 0 { - if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { - var range: Range? - let _ = updatedSize.modify { updatedSize in - range = updatedSize ..< value.st_size - return value.st_size - } - //print("size = \(Int(value.st_size)), range: \(range!)") - subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + + let videoExport = MediaEditorVideoExport(account: account, subject: subject, configuration: configuration, outputPath: tempFile.path) + videoExport.startExport() + + let statusDisposable = videoExport.status.start(next: { status in + switch status { + case .completed: + var value = stat() + if stat(tempFile.path, &value) == 0 { + let remuxedTempFile = TempBox.shared.tempFile(fileName: "video.mp4") + if let size = fileSize(tempFile.path), size <= 32 * 1024 * 1024, FFMpegRemuxer.remux(tempFile.path, to: remuxedTempFile.path) { + TempBox.shared.dispose(tempFile) + subscriber.putNext(.moveTempFile(file: remuxedTempFile)) + } else { + TempBox.shared.dispose(remuxedTempFile) + if let data = try? Data(contentsOf: URL(fileURLWithPath: tempFile.path), options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< value.st_size + return value.st_size } + //print("finish size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) + subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true)) } - }), entityRenderer: entityRenderer)! + } } else { - return SSignal.single(nil) + subscriber.putError(.generic) } - }) - } else { - signal = SSignal.single(nil) + subscriber.putCompletion() + + EngineTempBox.shared.dispose(tempFile) + case .failed: + subscriber.putError(.generic) + default: + break + } + }) + + let disposable = MetaDisposable() + disposable.set(ActionDisposable { + statusDisposable.dispose() + videoExport.cancel() + }) + + return ActionDisposable { + disposable.dispose() } } else { - signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in - var value = stat() - if stat(path, &value) == 0 { - if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { - var range: Range? - let _ = updatedSize.modify { updatedSize in - range = updatedSize ..< Int64(value.st_size) - return value.st_size - } - //print("size = \(Int(value.st_size)), range: \(range!)") - subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) - } + let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in + if let paintingData = adjustments.paintingData, paintingData.hasAnimation { + return LegacyPaintEntityRenderer(account: account, adjustments: adjustments) + } else { + return nil } - }), entityRenderer: entityRenderer)! - } + } + let signal: SSignal + if filteredPath.contains(".jpg"), let entityRenderer = entityRenderer { + if let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) { + let durationSignal: SSignal = SSignal(generator: { subscriber in + let disposable = (entityRenderer.duration()).start(next: { duration in + subscriber.putNext(duration) + subscriber.putCompletion() + }) + + return SBlockDisposable(block: { + disposable.dispose() + }) + }) + + signal = durationSignal.map(toSignal: { duration -> SSignal in + if let duration = duration as? Double { + return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in + var value = stat() + if stat(path, &value) == 0 { + if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< value.st_size + return value.st_size + } + //print("size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + } + } + }), entityRenderer: entityRenderer)! + } else { + return SSignal.single(nil) + } + }) + } else { + signal = SSignal.single(nil) + } + } else { + signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in + var value = stat() + if stat(path, &value) == 0 { + if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< Int64(value.st_size) + return value.st_size + } + //print("size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + } + } + }), entityRenderer: entityRenderer)! + } - let signalDisposable = signal.start(next: { next in - if let result = next as? TGMediaVideoConversionResult { - var value = stat() - if stat(result.fileURL.path, &value) == 0 { + let signalDisposable = signal.start(next: { next in + if let result = next as? TGMediaVideoConversionResult { + var value = stat() + if stat(result.fileURL.path, &value) == 0 { // if config.remuxToFMp4 { // let tempFile = TempBox.shared.tempFile(fileName: "video.mp4") // if FFMpegRemuxer.remux(result.fileURL.path, to: tempFile.path) { @@ -423,31 +539,32 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo // } else { // subscriber.putNext(.moveLocalFile(path: result.fileURL.path)) // } - if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) { - var range: Range? - let _ = updatedSize.modify { updatedSize in - range = updatedSize ..< value.st_size - return value.st_size + if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< value.st_size + return value.st_size + } + //print("finish size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) + subscriber.putNext(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: true)) + + EngineTempBox.shared.dispose(tempFile) } - //print("finish size = \(Int(value.st_size)), range: \(range!)") - subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) - subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) - subscriber.putNext(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: true)) - - EngineTempBox.shared.dispose(tempFile) } + subscriber.putCompletion() } - subscriber.putCompletion() + }, error: { _ in + }, completed: nil) + + let disposable = ActionDisposable { + signalDisposable?.dispose() + } + + return ActionDisposable { + disposable.dispose() } - }, error: { _ in - }, completed: nil) - - let disposable = ActionDisposable { - signalDisposable?.dispose() - } - - return ActionDisposable { - disposable.dispose() } } return throttlingContext.wrap(priority: .default, signal: signal) diff --git a/submodules/TelegramUI/Sources/TelegramRootController.swift b/submodules/TelegramUI/Sources/TelegramRootController.swift index a2e875fef3..0da07105ab 100644 --- a/submodules/TelegramUI/Sources/TelegramRootController.swift +++ b/submodules/TelegramUI/Sources/TelegramRootController.swift @@ -192,7 +192,49 @@ public final class TelegramRootController: NavigationController, TelegramRootCon tabBarController.cameraItemAndAction = ( UITabBarItem(title: "Camera", image: UIImage(bundleImageName: "Chat List/Tabs/IconCamera"), tag: 2), { [weak self] in - self?.openStoryCamera() + guard let self else { + return + } + var transitionIn: StoryCameraTransitionIn? + if let cameraItemView = self.rootTabController?.viewForCameraItem() { + transitionIn = StoryCameraTransitionIn( + sourceView: cameraItemView, + sourceRect: cameraItemView.bounds, + sourceCornerRadius: cameraItemView.bounds.height / 2.0 + ) + } + self.openStoryCamera( + transitionIn: transitionIn, + transitionOut: { [weak self] finished in + guard let self else { + return nil + } + if finished { + + } else { + if let cameraItemView = self.rootTabController?.viewForCameraItem() { + return StoryCameraTransitionOut( + destinationView: cameraItemView, + destinationRect: cameraItemView.bounds, + destinationCornerRadius: cameraItemView.bounds.height / 2.0 + ) + } + } + return nil +// if finished { +// return nil +// } else { +// if let self, let cameraItemView = self.rootTabController?.viewForCameraItem() { +// return StoryCameraTransitionOut( +// destinationView: cameraItemView, +// destinationRect: cameraItemView.bounds, +// destinationCornerRadius: cameraItemView.bound.height / 2.0 +// ) +// } +// } +// return nil + } + ) } ) } @@ -252,7 +294,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon presentedLegacyShortcutCamera(context: self.context, saveCapturedMedia: false, saveEditedPhotos: false, mediaGrouping: true, parentController: controller) } - public func openStoryCamera() { + public func openStoryCamera(transitionIn: StoryCameraTransitionIn?, transitionOut: @escaping (Bool) -> StoryCameraTransitionOut?) { guard let controller = self.viewControllers.last as? ViewController else { return } @@ -263,7 +305,32 @@ public final class TelegramRootController: NavigationController, TelegramRootCon var presentImpl: ((ViewController) -> Void)? var returnToCameraImpl: (() -> Void)? var dismissCameraImpl: (() -> Void)? - let cameraController = CameraScreen(context: context, mode: .story, completion: { result in + let cameraController = CameraScreen( + context: context, + mode: .story, + transitionIn: transitionIn.flatMap { + if let sourceView = $0.sourceView { + return CameraScreen.TransitionIn( + sourceView: sourceView, + sourceRect: $0.sourceRect, + sourceCornerRadius: $0.sourceCornerRadius + ) + } else { + return nil + } + }, + transitionOut: { finished in + if let transitionOut = transitionOut(finished), let destinationView = transitionOut.destinationView { + return CameraScreen.TransitionOut( + destinationView: destinationView, + destinationRect: transitionOut.destinationRect, + destinationCornerRadius: transitionOut.destinationCornerRadius + ) + } else { + return nil + } + }, + completion: { result in let subject: Signal = result |> map { value -> MediaEditorScreen.Subject? in switch value { @@ -349,21 +416,31 @@ public final class TelegramRootController: NavigationController, TelegramRootCon selectionController?.displayProgress = true - switch mediaResult { - case let .image(image, _): - if let data = image.jpegData(compressionQuality: 0.8) { - if let chatListController = self.chatListController as? ChatListControllerImpl, let storyListContext = chatListController.storyListContext { - storyListContext.upload(media: .image(dimensions: PixelDimensions(image.size), data: data), text: nil, entities: nil, privacy: privacy) + if let chatListController = self.chatListController as? ChatListControllerImpl, let storyListContext = chatListController.storyListContext { + switch mediaResult { + case let .image(image, dimensions, _): + if let data = image.jpegData(compressionQuality: 0.8) { + storyListContext.upload(media: .image(dimensions: dimensions, data: data), text: nil, entities: nil, privacy: privacy) + } + case let .video(content, _, values, duration, dimensions, _): + let adjustments: VideoMediaResourceAdjustments + if let valuesData = try? JSONEncoder().encode(values) { + let data = MemoryBuffer(data: valuesData) + let digest = MemoryBuffer(data: data.md5Digest()) + adjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: true) + + let resource: TelegramMediaResource + switch content { + case let .imageFile(path): + resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments) + case let .videoFile(path): + resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments) + case let .asset(localIdentifier): + resource = VideoLibraryMediaResource(localIdentifier: localIdentifier, conversion: .compress(adjustments)) + } + storyListContext.upload(media: .video(dimensions: dimensions, duration: Int(duration), resource: resource), text: nil, entities: nil, privacy: privacy) } } - case .video: - break -// let resource = VideoLibraryMediaResource(localIdentifier: asset.localIdentifier, conversion: VideoLibraryMediaResourceConversion.passthrough) -// -// if let chatListController = self.chatListController as? ChatListControllerImpl, let storyListContext = chatListController.storyListContext { -// storyListContext.upload(media: .video(dimensions: PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight)), duration: Int(asset.duration), resource: resource), privacy: privacy) -// } -// selectionController?.dismiss() } dismissCameraImpl?() commit()