diff --git a/submodules/AccountContext/Sources/AccountContext.swift b/submodules/AccountContext/Sources/AccountContext.swift index 1c8fad66a4..8cd3fdb621 100644 --- a/submodules/AccountContext/Sources/AccountContext.swift +++ b/submodules/AccountContext/Sources/AccountContext.swift @@ -777,6 +777,17 @@ public class MediaEditorTransitionOutExternalState { } } +public protocol CameraScreen: ViewController { + +} + +public protocol MediaEditorScreen: ViewController { +} + +public protocol MediaPickerScreen: ViewController { + func dismissAnimated() +} + public protocol MediaEditorScreenResult { var target: Stories.PendingTarget { get } } @@ -1015,7 +1026,7 @@ public protocol SharedAccountContext: AnyObject { func makeStickerEditorScreen(context: AccountContext, source: Any?, intro: Bool, transitionArguments: (UIView, CGRect, UIImage?)?, completion: @escaping (TelegramMediaFile, [String], @escaping () -> Void) -> Void, cancelled: @escaping () -> Void) -> ViewController func makeStickerMediaPickerScreen(context: AccountContext, getSourceRect: @escaping () -> CGRect?, completion: @escaping (Any?, UIView?, CGRect, UIImage?, Bool, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void) -> ViewController - func makeStoryMediaPickerScreen(context: AccountContext, isDark: Bool, getSourceRect: @escaping () -> CGRect, completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void, groupsPresented: @escaping () -> Void) -> ViewController + func makeStoryMediaPickerScreen(context: AccountContext, isDark: Bool, forCollage: Bool, getSourceRect: @escaping () -> CGRect, completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void, groupsPresented: @escaping () -> Void) -> ViewController func makeStickerPickerScreen(context: AccountContext, inputData: Promise, completion: @escaping (FileMediaReference) -> Void) -> ViewController diff --git a/submodules/AttachmentTextInputPanelNode/Sources/AttachmentTextInputPanelNode.swift b/submodules/AttachmentTextInputPanelNode/Sources/AttachmentTextInputPanelNode.swift index 99a7985e27..2070783d8e 100644 --- a/submodules/AttachmentTextInputPanelNode/Sources/AttachmentTextInputPanelNode.swift +++ b/submodules/AttachmentTextInputPanelNode/Sources/AttachmentTextInputPanelNode.swift @@ -504,6 +504,7 @@ public class AttachmentTextInputPanelNode: ASDisplayNode, TGCaptionPanelView, AS public var focusUpdated: ((Bool) -> Void)? public var heightUpdated: ((Bool) -> Void)? public var timerUpdated: ((NSNumber?) -> Void)? + public var captionIsAboveUpdated: ((Bool) -> Void)? public func updateLayoutSize(_ size: CGSize, keyboardHeight: CGFloat, sideInset: CGFloat, animated: Bool) -> CGFloat { guard let presentationInterfaceState = self.presentationInterfaceState else { @@ -518,7 +519,7 @@ public class AttachmentTextInputPanelNode: ASDisplayNode, TGCaptionPanelView, AS } } - public func setTimeout(_ timeout: Int32, isVideo: Bool) { + public func setTimeout(_ timeout: Int32, isVideo: Bool, isCaptionAbove: Bool) { } public func animate(_ view: UIView, frame: CGRect) { diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift index 1568fea7ca..1698ecbd76 100644 --- a/submodules/Camera/Sources/Camera.swift +++ b/submodules/Camera/Sources/Camera.swift @@ -51,7 +51,7 @@ final class CameraDeviceContext { let device = CameraDevice() let input = CameraInput() let output: CameraOutput - + init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext, colorSpace: CGColorSpace, isRoundVideo: Bool = false) { self.session = session self.exclusive = exclusive @@ -126,7 +126,7 @@ private final class CameraContext { private let audioLevelPipe = ValuePipe() fileprivate let modeChangePromise = ValuePromise(.none) - var previewView: CameraPreviewView? + var videoOutput: CameraVideoOutput? var simplePreviewView: CameraSimplePreviewView? var secondaryPreviewView: CameraSimplePreviewView? @@ -310,7 +310,7 @@ private final class CameraContext { private var micLevelPeak: Int16 = 0 private var micLevelPeakCount = 0 - + private var isDualCameraEnabled: Bool? public func setDualCameraEnabled(_ enabled: Bool, change: Bool = true) { guard enabled != self.isDualCameraEnabled else { @@ -378,6 +378,13 @@ private final class CameraContext { guard let self, let mainDeviceContext = self.mainDeviceContext else { return } + + if sampleBuffer.type == kCMMediaType_Video { + Queue.mainQueue().async { + self.videoOutput?.push(sampleBuffer) + } + } + let timestamp = CACurrentMediaTime() if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot { var front = false @@ -696,6 +703,26 @@ public final class Camera { public typealias ExposureMode = AVCaptureDevice.ExposureMode public typealias FlashMode = AVCaptureDevice.FlashMode + public struct CollageGrid: Hashable { + public struct Row: Hashable { + public let columns: Int + + public init(columns: Int) { + self.columns = columns + } + } + + public let rows: [Row] + + public init(rows: [Row]) { + self.rows = rows + } + + public var count: Int { + return self.rows.reduce(0) { $0 + $1.columns } + } + } + public struct Configuration { let preset: Preset let position: Position @@ -975,16 +1002,19 @@ public final class Camera { } } - public func attachPreviewView(_ view: CameraPreviewView) { - self.previewView = view - let viewRef: Unmanaged = Unmanaged.passRetained(view) + public func setPreviewOutput(_ output: CameraVideoOutput?) { + let outputRef: Unmanaged? = output.flatMap { Unmanaged.passRetained($0) } self.queue.async { if let context = self.contextRef?.takeUnretainedValue() { - context.previewView = viewRef.takeUnretainedValue() - viewRef.release() + if let outputRef { + context.videoOutput = outputRef.takeUnretainedValue() + outputRef.release() + } else { + context.videoOutput = nil + } } else { Queue.mainQueue().async { - viewRef.release() + outputRef?.release() } } } @@ -1108,3 +1138,15 @@ public struct CameraRecordingData { public enum CameraRecordingError { case audioInitializationError } + +public class CameraVideoOutput { + private let sink: (CMSampleBuffer) -> Void + + public init(sink: @escaping (CMSampleBuffer) -> Void) { + self.sink = sink + } + + func push(_ buffer: CMSampleBuffer) { + self.sink(buffer) + } +} diff --git a/submodules/Camera/Sources/CameraOutput.swift b/submodules/Camera/Sources/CameraOutput.swift index 276feb37a3..d1fbadf7f2 100644 --- a/submodules/Camera/Sources/CameraOutput.swift +++ b/submodules/Camera/Sources/CameraOutput.swift @@ -1,6 +1,7 @@ import Foundation import AVFoundation import UIKit +import Display import SwiftSignalKit import CoreImage import Vision @@ -286,6 +287,19 @@ final class CameraOutput: NSObject { } } +#if targetEnvironment(simulator) + let image = generateImage(CGSize(width: 1080, height: 1920), opaque: true, scale: 1.0, rotatedContext: { size, context in + let colors: [UIColor] = [UIColor(rgb: 0xff00ff), UIColor(rgb: 0xff0000), UIColor(rgb: 0x00ffff), UIColor(rgb: 0x00ff00)] + if let randomColor = colors.randomElement() { + context.setFillColor(randomColor.cgColor) + } + context.fill(CGRect(origin: .zero, size: size)) + })! + return .single(.began) + |> then( + .single(.finished(image, nil, CACurrentMediaTime())) |> delay(0.5, queue: Queue.concurrentDefaultQueue()) + ) +#else let uniqueId = settings.uniqueID let photoCapture = PhotoCaptureContext(ciContext: self.ciContext, settings: settings, orientation: orientation, mirror: mirror) self.photoCaptureRequests[uniqueId] = photoCapture @@ -295,6 +309,7 @@ final class CameraOutput: NSObject { |> afterDisposed { [weak self] in self?.photoCaptureRequests.removeValue(forKey: uniqueId) } +#endif } var isRecording: Bool { diff --git a/submodules/ContextUI/Sources/ContextActionsContainerNode.swift b/submodules/ContextUI/Sources/ContextActionsContainerNode.swift index 909a81bfcd..7bf4a2fdd5 100644 --- a/submodules/ContextUI/Sources/ContextActionsContainerNode.swift +++ b/submodules/ContextUI/Sources/ContextActionsContainerNode.swift @@ -449,6 +449,12 @@ final class InnerTextSelectionTipContainerNode: ASDisplayNode { self.targetSelectionIndex = nil icon = nil isUserInteractionEnabled = action != nil + case .collageReordering: + //TODO:localize + self.action = nil + self.text = "Hold and drag tiles to reorder them." + self.targetSelectionIndex = nil + icon = UIImage(bundleImageName: "Chat/Context Menu/Tip") } self.iconNode = ASImageNode() diff --git a/submodules/ContextUI/Sources/ContextController.swift b/submodules/ContextUI/Sources/ContextController.swift index 0a39323617..8bccf7692b 100644 --- a/submodules/ContextUI/Sources/ContextController.swift +++ b/submodules/ContextUI/Sources/ContextController.swift @@ -2359,6 +2359,7 @@ public final class ContextController: ViewController, StandalonePresentableContr case notificationTopicExceptions(text: String, action: (() -> Void)?) case starsReactions(topCount: Int) case videoProcessing + case collageReordering public static func ==(lhs: Tip, rhs: Tip) -> Bool { switch lhs { @@ -2416,6 +2417,12 @@ public final class ContextController: ViewController, StandalonePresentableContr } else { return false } + case .collageReordering: + if case .collageReordering = rhs { + return true + } else { + return false + } } } } diff --git a/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGMediaEditingContext.h b/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGMediaEditingContext.h index 01b5704ddd..d035252a3c 100644 --- a/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGMediaEditingContext.h +++ b/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGMediaEditingContext.h @@ -103,6 +103,12 @@ - (bool)setPaintingData:(NSData *)data entitiesData:(NSData *)entitiesData image:(UIImage *)image stillImage:(UIImage *)stillImage forItem:(NSObject *)item dataUrl:(NSURL **)dataOutUrl entitiesDataUrl:(NSURL **)entitiesDataOutUrl imageUrl:(NSURL **)imageOutUrl forVideo:(bool)video; - (void)clearPaintingData; + +- (bool)isCaptionAbove; +- (SSignal *)captionAbove; +- (void)setCaptionAbove:(bool)captionAbove; + + - (SSignal *)facesForItem:(NSObject *)item; - (void)setFaces:(NSArray *)faces forItem:(NSObject *)item; diff --git a/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGPhotoCaptionInputMixin.h b/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGPhotoCaptionInputMixin.h index fed6a04d4c..605607abaa 100644 --- a/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGPhotoCaptionInputMixin.h +++ b/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGPhotoCaptionInputMixin.h @@ -15,6 +15,7 @@ @property (nonatomic, assign) UIInterfaceOrientation interfaceOrientation; @property (nonatomic, readonly) CGFloat keyboardHeight; @property (nonatomic, assign) CGFloat contentAreaHeight; +@property (nonatomic, assign) UIEdgeInsets safeAreaInset; @property (nonatomic, assign) bool allowEntities; @property (nonatomic, copy) UIView *(^panelParentView)(void); @@ -23,6 +24,7 @@ @property (nonatomic, copy) void (^finishedWithCaption)(NSAttributedString *caption); @property (nonatomic, copy) void (^keyboardHeightChanged)(CGFloat keyboardHeight, NSTimeInterval duration, NSInteger animationCurve); @property (nonatomic, copy) void (^timerUpdated)(NSNumber *timeout); +@property (nonatomic, copy) void (^captionIsAboveUpdated)(bool captionIsAbove); - (void)createInputPanelIfNeeded; - (void)beginEditing; @@ -36,7 +38,7 @@ - (void)setCaption:(NSAttributedString *)caption animated:(bool)animated; - (void)setCaptionPanelHidden:(bool)hidden animated:(bool)animated; -- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo; +- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo isCaptionAbove:(bool)isCaptionAbove; - (void)updateLayoutWithFrame:(CGRect)frame edgeInsets:(UIEdgeInsets)edgeInsets animated:(bool)animated; diff --git a/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGPhotoPaintStickersContext.h b/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGPhotoPaintStickersContext.h index 4ba1397776..40d786c0f3 100644 --- a/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGPhotoPaintStickersContext.h +++ b/submodules/LegacyComponents/PublicHeaders/LegacyComponents/TGPhotoPaintStickersContext.h @@ -22,7 +22,7 @@ @property (nonatomic, readonly) UIView * _Nonnull view; -- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo; +- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo isCaptionAbove:(bool)isCaptionAbove; - (NSAttributedString * _Nonnull)caption; - (void)setCaption:(NSAttributedString * _Nullable)caption; @@ -36,6 +36,7 @@ @property (nonatomic, copy) void(^ _Nullable focusUpdated)(BOOL focused); @property (nonatomic, copy) void(^ _Nullable heightUpdated)(BOOL animated); @property (nonatomic, copy) void(^ _Nullable timerUpdated)(NSNumber * _Nullable value); +@property (nonatomic, copy) void(^ _Nullable captionIsAboveUpdated)(BOOL value); - (CGFloat)updateLayoutSize:(CGSize)size keyboardHeight:(CGFloat)keyboardHeight sideInset:(CGFloat)sideInset animated:(bool)animated; - (CGFloat)baseHeight; diff --git a/submodules/LegacyComponents/Sources/PGPhotoGaussianBlurFilter.m b/submodules/LegacyComponents/Sources/PGPhotoGaussianBlurFilter.m index fe24f1048f..d13751fa11 100644 --- a/submodules/LegacyComponents/Sources/PGPhotoGaussianBlurFilter.m +++ b/submodules/LegacyComponents/Sources/PGPhotoGaussianBlurFilter.m @@ -53,8 +53,9 @@ sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex]; } - for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) { standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights; + } NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7U); GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat)); diff --git a/submodules/LegacyComponents/Sources/TGMediaEditingContext.m b/submodules/LegacyComponents/Sources/TGMediaEditingContext.m index a9aa9b8e2e..67f7274bfd 100644 --- a/submodules/LegacyComponents/Sources/TGMediaEditingContext.m +++ b/submodules/LegacyComponents/Sources/TGMediaEditingContext.m @@ -126,8 +126,11 @@ SPipe *_pricePipe; SPipe *_fullSizePipe; SPipe *_cropPipe; + SPipe *_captionAbovePipe; NSAttributedString *_forcedCaption; + + bool _captionAbove; } @end @@ -196,6 +199,7 @@ _pricePipe = [[SPipe alloc] init]; _fullSizePipe = [[SPipe alloc] init]; _cropPipe = [[SPipe alloc] init]; + _captionAbovePipe = [[SPipe alloc] init]; } return self; } @@ -853,6 +857,28 @@ } } +- (bool)isCaptionAbove { + return _captionAbove; +} + +- (SSignal *)captionAbove +{ + __weak TGMediaEditingContext *weakSelf = self; + SSignal *updateSignal = [_captionAbovePipe.signalProducer() map:^NSNumber *(NSNumber *update) + { + __strong TGMediaEditingContext *strongSelf = weakSelf; + return @(strongSelf->_captionAbove); + }]; + + return [[SSignal single:@(_captionAbove)] then:updateSignal]; +} + +- (void)setCaptionAbove:(bool)captionAbove +{ + _captionAbove = captionAbove; + _captionAbovePipe.sink(@(captionAbove)); +} + - (SSignal *)facesForItem:(NSObject *)item { NSString *itemId = [self _contextualIdForItemId:item.uniqueIdentifier]; diff --git a/submodules/LegacyComponents/Sources/TGMediaPickerGalleryInterfaceView.m b/submodules/LegacyComponents/Sources/TGMediaPickerGalleryInterfaceView.m index 5ed286a8fd..b42069dd86 100644 --- a/submodules/LegacyComponents/Sources/TGMediaPickerGalleryInterfaceView.m +++ b/submodules/LegacyComponents/Sources/TGMediaPickerGalleryInterfaceView.m @@ -389,6 +389,14 @@ [strongSelf->_selectionContext setItem:(id)galleryEditableItem.editableMediaItem selected:true animated:true sender:nil]; }; + _captionMixin.captionIsAboveUpdated = ^(bool captionIsAbove) { + __strong TGMediaPickerGalleryInterfaceView *strongSelf = weakSelf; + if (strongSelf == nil) + return; + + [strongSelf->_editingContext setCaptionAbove:captionIsAbove]; + }; + _captionMixin.stickersContext = stickersContext; [_captionMixin createInputPanelIfNeeded]; @@ -818,6 +826,8 @@ { id editableMediaItem = [galleryEditableItem editableMediaItem]; + bool isCaptionAbove = galleryEditableItem.editingContext.isCaptionAbove; + __weak id weakGalleryEditableItem = galleryEditableItem; [_adjustmentsDisposable setDisposable:[[[[galleryEditableItem.editingContext adjustmentsSignalForItem:editableMediaItem] mapToSignal:^SSignal *(id adjustments) { __strong id strongGalleryEditableItem = weakGalleryEditableItem; @@ -842,7 +852,7 @@ id adjustments = dict[@"adjustments"]; NSNumber *timer = dict[@"timer"]; - [strongSelf->_captionMixin setTimeout:[timer intValue] isVideo:editableMediaItem.isVideo]; + [strongSelf->_captionMixin setTimeout:[timer intValue] isVideo:editableMediaItem.isVideo isCaptionAbove:isCaptionAbove]; if ([adjustments isKindOfClass:[TGVideoEditAdjustments class]]) { @@ -1617,6 +1627,7 @@ - (void)setSafeAreaInset:(UIEdgeInsets)safeAreaInset { _safeAreaInset = safeAreaInset; + _captionMixin.safeAreaInset = safeAreaInset; [_currentItemView setSafeAreaInset:[self localSafeAreaInset]]; [self setNeedsLayout]; } diff --git a/submodules/LegacyComponents/Sources/TGMediaPickerGalleryVideoItemView.m b/submodules/LegacyComponents/Sources/TGMediaPickerGalleryVideoItemView.m index 72127e4ac4..01578096c3 100644 --- a/submodules/LegacyComponents/Sources/TGMediaPickerGalleryVideoItemView.m +++ b/submodules/LegacyComponents/Sources/TGMediaPickerGalleryVideoItemView.m @@ -222,7 +222,7 @@ UIView *scrubberBackgroundView = [[UIView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, _headerView.frame.size.width, 64.0f)]; scrubberBackgroundView.autoresizingMask = UIViewAutoresizingFlexibleWidth; - scrubberBackgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor]; + //scrubberBackgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor]; [_scrubberPanelView addSubview:scrubberBackgroundView]; _scrubberView = [[TGMediaPickerGalleryVideoScrubber alloc] initWithFrame:CGRectMake(0.0f, _headerView.frame.size.height - 44.0f, _headerView.frame.size.width, 68.0f)]; diff --git a/submodules/LegacyComponents/Sources/TGMediaPickerGalleryVideoScrubber.m b/submodules/LegacyComponents/Sources/TGMediaPickerGalleryVideoScrubber.m index 317f6cec14..6e595b10f4 100644 --- a/submodules/LegacyComponents/Sources/TGMediaPickerGalleryVideoScrubber.m +++ b/submodules/LegacyComponents/Sources/TGMediaPickerGalleryVideoScrubber.m @@ -99,6 +99,12 @@ typedef enum _currentTimeLabel.backgroundColor = [UIColor clearColor]; _currentTimeLabel.text = @"0:00"; _currentTimeLabel.textColor = [UIColor whiteColor]; + _currentTimeLabel.layer.shadowOffset = CGSizeMake(0.0, 0.0); + _currentTimeLabel.layer.shadowRadius = 2.0; + _currentTimeLabel.layer.shadowColor = [UIColor blackColor].CGColor; + _currentTimeLabel.layer.shadowOpacity = 0.4; + _currentTimeLabel.layer.rasterizationScale = TGScreenScaling(); + _currentTimeLabel.layer.shouldRasterize = true; [self addSubview:_currentTimeLabel]; _inverseTimeLabel = [[UILabel alloc] initWithFrame:CGRectMake(frame.size.width - 108, 4, 100, 15)]; @@ -108,6 +114,12 @@ typedef enum _inverseTimeLabel.text = @"0:00"; _inverseTimeLabel.textAlignment = NSTextAlignmentRight; _inverseTimeLabel.textColor = [UIColor whiteColor]; + _inverseTimeLabel.layer.shadowOffset = CGSizeMake(0.0, 0.0); + _inverseTimeLabel.layer.shadowRadius = 2.0; + _inverseTimeLabel.layer.shadowColor = [UIColor blackColor].CGColor; + _inverseTimeLabel.layer.shadowOpacity = 0.4; + _inverseTimeLabel.layer.rasterizationScale = TGScreenScaling(); + _inverseTimeLabel.layer.shouldRasterize = true; [self addSubview:_inverseTimeLabel]; _wrapperView = [[UIControl alloc] initWithFrame:CGRectMake(8, 24, 0, 36)]; @@ -119,14 +131,19 @@ typedef enum _summaryThumbnailWrapperView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 32)]; _summaryThumbnailWrapperView.clipsToBounds = true; + _summaryThumbnailWrapperView.layer.cornerRadius = 5.0; [_wrapperView addSubview:_summaryThumbnailWrapperView]; _leftCurtainView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 0)]; _leftCurtainView.backgroundColor = [[TGPhotoEditorInterfaceAssets toolbarBackgroundColor] colorWithAlphaComponent:0.8f]; + _leftCurtainView.clipsToBounds = true; + _leftCurtainView.layer.cornerRadius = 5.0; [_wrapperView addSubview:_leftCurtainView]; _rightCurtainView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 0)]; _rightCurtainView.backgroundColor = [[TGPhotoEditorInterfaceAssets toolbarBackgroundColor] colorWithAlphaComponent:0.8f]; + _rightCurtainView.clipsToBounds = true; + _rightCurtainView.layer.cornerRadius = 5.0; [_wrapperView addSubview:_rightCurtainView]; __weak TGMediaPickerGalleryVideoScrubber *weakSelf = self; diff --git a/submodules/LegacyComponents/Sources/TGPhotoCaptionInputMixin.m b/submodules/LegacyComponents/Sources/TGPhotoCaptionInputMixin.m index 1a0bc8ce62..60958afd97 100644 --- a/submodules/LegacyComponents/Sources/TGPhotoCaptionInputMixin.m +++ b/submodules/LegacyComponents/Sources/TGPhotoCaptionInputMixin.m @@ -15,6 +15,8 @@ CGRect _currentFrame; UIEdgeInsets _currentEdgeInsets; + + bool _currentIsCaptionAbove; } @end @@ -94,12 +96,21 @@ } }; + _inputPanel.captionIsAboveUpdated = ^(bool value) { + __strong TGPhotoCaptionInputMixin *strongSelf = weakSelf; + if (strongSelf.captionIsAboveUpdated != nil) { + strongSelf.captionIsAboveUpdated(value); + + strongSelf->_currentIsCaptionAbove = value; + [strongSelf updateLayoutWithFrame:strongSelf->_currentFrame edgeInsets:strongSelf->_currentEdgeInsets animated:true]; + } + }; _inputPanelView = inputPanel.view; _backgroundView = [[UIView alloc] init]; _backgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor]; - [parentView addSubview:_backgroundView]; + //[parentView addSubview:_backgroundView]; [parentView addSubview:_inputPanelView]; } @@ -123,7 +134,7 @@ _dismissTapRecognizer.enabled = false; [_dismissView addGestureRecognizer:_dismissTapRecognizer]; - [parentView insertSubview:_dismissView belowSubview:_backgroundView]; + //[parentView insertSubview:_dismissView belowSubview:_backgroundView]; } - (void)setCaption:(NSAttributedString *)caption @@ -141,8 +152,9 @@ [_inputPanel setCaption:caption]; } -- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo { - [_inputPanel setTimeout:timeout isVideo:isVideo]; +- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo isCaptionAbove:(bool)isCaptionAbove { + _currentIsCaptionAbove = isCaptionAbove; + [_inputPanel setTimeout:timeout isVideo:isVideo isCaptionAbove:isCaptionAbove]; } - (void)setCaptionPanelHidden:(bool)hidden animated:(bool)__unused animated @@ -222,14 +234,30 @@ CGRect frame = _currentFrame; UIEdgeInsets edgeInsets = _currentEdgeInsets; CGFloat panelHeight = [_inputPanel updateLayoutSize:frame.size keyboardHeight:keyboardHeight sideInset:0.0 animated:false]; - [UIView animateWithDuration:duration delay:0.0f options:(curve << 16) animations:^{ - _inputPanelView.frame = CGRectMake(edgeInsets.left, frame.size.height - panelHeight - MAX(edgeInsets.bottom, _keyboardHeight), frame.size.width, panelHeight); - - CGFloat backgroundHeight = panelHeight; - if (_keyboardHeight > 0.0) { - backgroundHeight += _keyboardHeight - edgeInsets.bottom; + + CGFloat panelY = 0.0; + if (frame.size.width > frame.size.height && !TGIsPad()) { + panelY = edgeInsets.top + frame.size.height; + } else { + if (_currentIsCaptionAbove) { + if (_keyboardHeight > 0.0) { + panelY = _safeAreaInset.top + 8.0; + } else { + panelY = _safeAreaInset.top + 8.0 + 40.0; + } + } else { + panelY = edgeInsets.top + frame.size.height - panelHeight - MAX(edgeInsets.bottom, _keyboardHeight); } - _backgroundView.frame = CGRectMake(edgeInsets.left, frame.size.height - panelHeight - MAX(edgeInsets.bottom, _keyboardHeight), frame.size.width, backgroundHeight); + } + + CGFloat backgroundHeight = panelHeight; + if (_keyboardHeight > 0.0) { + backgroundHeight += _keyboardHeight - edgeInsets.bottom; + } + + [UIView animateWithDuration:duration delay:0.0f options:(curve << 16) animations:^{ + _inputPanelView.frame = CGRectMake(edgeInsets.left, panelY, frame.size.width, panelHeight); + _backgroundView.frame = CGRectMake(edgeInsets.left, panelY, frame.size.width, backgroundHeight); } completion:nil]; if (self.keyboardHeightChanged != nil) @@ -243,11 +271,19 @@ CGFloat panelHeight = [_inputPanel updateLayoutSize:frame.size keyboardHeight:_keyboardHeight sideInset:0.0 animated:animated]; - CGFloat y = 0.0; + CGFloat panelY = 0.0; if (frame.size.width > frame.size.height && !TGIsPad()) { - y = edgeInsets.top + frame.size.height; + panelY = edgeInsets.top + frame.size.height; } else { - y = edgeInsets.top + frame.size.height - panelHeight - MAX(edgeInsets.bottom, _keyboardHeight); + if (_currentIsCaptionAbove) { + if (_keyboardHeight > 0.0) { + panelY = _safeAreaInset.top + 8.0; + } else { + panelY = _safeAreaInset.top + 8.0 + 40.0; + } + } else { + panelY = edgeInsets.top + frame.size.height - panelHeight - MAX(edgeInsets.bottom, _keyboardHeight); + } } CGFloat backgroundHeight = panelHeight; @@ -255,8 +291,8 @@ backgroundHeight += _keyboardHeight - edgeInsets.bottom; } - CGRect panelFrame = CGRectMake(edgeInsets.left, y, frame.size.width, panelHeight); - CGRect backgroundFrame = CGRectMake(edgeInsets.left, y, frame.size.width, backgroundHeight); + CGRect panelFrame = CGRectMake(edgeInsets.left, panelY, frame.size.width, panelHeight); + CGRect backgroundFrame = CGRectMake(edgeInsets.left, panelY, frame.size.width, backgroundHeight); if (animated) { [_inputPanel animateView:_inputPanelView frame:panelFrame]; diff --git a/submodules/MediaPasteboardUI/Sources/MediaPasteboardScreen.swift b/submodules/MediaPasteboardUI/Sources/MediaPasteboardScreen.swift index eaf4f3d86f..e57222dfb6 100644 --- a/submodules/MediaPasteboardUI/Sources/MediaPasteboardScreen.swift +++ b/submodules/MediaPasteboardUI/Sources/MediaPasteboardScreen.swift @@ -14,8 +14,8 @@ public func mediaPasteboardScreen( context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)? = nil, peer: EnginePeer, - subjects: [MediaPickerScreen.Subject.Media], - presentMediaPicker: @escaping (_ subject: MediaPickerScreen.Subject, _ saveEditedPhotos: Bool, _ bannedSendPhotos: (Int32, Bool)?, _ bannedSendVideos: (Int32, Bool)?, _ present: @escaping (MediaPickerScreen, AttachmentMediaPickerContext?) -> Void) -> Void, + subjects: [MediaPickerScreenImpl.Subject.Media], + presentMediaPicker: @escaping (_ subject: MediaPickerScreenImpl.Subject, _ saveEditedPhotos: Bool, _ bannedSendPhotos: (Int32, Bool)?, _ bannedSendVideos: (Int32, Bool)?, _ present: @escaping (MediaPickerScreenImpl, AttachmentMediaPickerContext?) -> Void) -> Void, getSourceRect: (() -> CGRect?)? = nil, makeEntityInputView: @escaping () -> AttachmentTextInputPanelInputView? = { return nil } ) -> ViewController { diff --git a/submodules/MediaPickerUI/BUILD b/submodules/MediaPickerUI/BUILD index af3769ac24..22bd024349 100644 --- a/submodules/MediaPickerUI/BUILD +++ b/submodules/MediaPickerUI/BUILD @@ -51,6 +51,7 @@ swift_library( "//submodules/ComponentFlow", "//submodules/Components/ComponentDisplayAdapters", "//submodules/AnimatedCountLabelNode", + "//submodules/TelegramUI/Components/MediaAssetsContext", ], visibility = [ "//visibility:public", diff --git a/submodules/MediaPickerUI/Sources/LegacyMediaPickerGallery.swift b/submodules/MediaPickerUI/Sources/LegacyMediaPickerGallery.swift index d1a9f17cae..8cc6f6f846 100644 --- a/submodules/MediaPickerUI/Sources/LegacyMediaPickerGallery.swift +++ b/submodules/MediaPickerUI/Sources/LegacyMediaPickerGallery.swift @@ -11,6 +11,7 @@ import LegacyComponents import LegacyUI import LegacyMediaPickerUI import Photos +import MediaAssetsContext private func galleryFetchResultItems(fetchResult: PHFetchResult, index: Int, reversed: Bool, selectionContext: TGMediaSelectionContext?, editingContext: TGMediaEditingContext, stickersContext: TGPhotoPaintStickersContext, immediateThumbnail: UIImage?) -> ([TGModernGalleryItem], TGModernGalleryItem?) { var focusItem: TGModernGalleryItem? diff --git a/submodules/MediaPickerUI/Sources/MediaGroupsAlbumGridItem.swift b/submodules/MediaPickerUI/Sources/MediaGroupsAlbumGridItem.swift index b9f111bc58..0f9a47637b 100644 --- a/submodules/MediaPickerUI/Sources/MediaGroupsAlbumGridItem.swift +++ b/submodules/MediaPickerUI/Sources/MediaGroupsAlbumGridItem.swift @@ -7,6 +7,7 @@ import TelegramPresentationData import ItemListUI import MergeLists import Photos +import MediaAssetsContext private struct MediaGroupsGridAlbumEntry: Comparable, Identifiable { let theme: PresentationTheme diff --git a/submodules/MediaPickerUI/Sources/MediaGroupsContextMenuContent.swift b/submodules/MediaPickerUI/Sources/MediaGroupsContextMenuContent.swift index 741427fc78..e2b5f31a29 100644 --- a/submodules/MediaPickerUI/Sources/MediaGroupsContextMenuContent.swift +++ b/submodules/MediaPickerUI/Sources/MediaGroupsContextMenuContent.swift @@ -6,6 +6,7 @@ import ContextUI import AccountContext import TelegramPresentationData import Photos +import MediaAssetsContext struct MediaGroupItem { let collection: PHAssetCollection diff --git a/submodules/MediaPickerUI/Sources/MediaGroupsScreen.swift b/submodules/MediaPickerUI/Sources/MediaGroupsScreen.swift index 4c5a43ebd7..d7d6cd5967 100644 --- a/submodules/MediaPickerUI/Sources/MediaGroupsScreen.swift +++ b/submodules/MediaPickerUI/Sources/MediaGroupsScreen.swift @@ -13,7 +13,7 @@ import Photos import LegacyComponents import AttachmentUI import ItemListUI -import CameraScreen +import MediaAssetsContext private enum MediaGroupsEntry: Comparable, Identifiable { enum StableId: Hashable { @@ -470,7 +470,7 @@ public final class MediaGroupsScreen: ViewController, AttachmentContainable { } else { self.updateNavigationStack { current in var mediaPickerContext: AttachmentMediaPickerContext? - if let first = current.first as? MediaPickerScreen { + if let first = current.first as? MediaPickerScreenImpl { mediaPickerContext = first.webSearchController?.mediaPickerContext ?? first.mediaPickerContext } return (current.filter { $0 !== self }, mediaPickerContext) diff --git a/submodules/MediaPickerUI/Sources/MediaPickerGridItem.swift b/submodules/MediaPickerUI/Sources/MediaPickerGridItem.swift index 4c2873fbef..b26687edc4 100644 --- a/submodules/MediaPickerUI/Sources/MediaPickerGridItem.swift +++ b/submodules/MediaPickerUI/Sources/MediaPickerGridItem.swift @@ -17,6 +17,7 @@ import ImageBlur import FastBlur import MediaEditor import RadialStatusNode +import MediaAssetsContext private let leftShadowImage: UIImage = { let baseImage = UIImage(bundleImageName: "Peer Info/MediaGridShadow")! @@ -48,7 +49,7 @@ private let rightShadowImage: UIImage = { enum MediaPickerGridItemContent: Equatable { case asset(PHFetchResult, Int) - case media(MediaPickerScreen.Subject.Media, Int) + case media(MediaPickerScreenImpl.Subject.Media, Int) case draft(MediaEditorDraft, Int) } @@ -395,7 +396,7 @@ final class MediaPickerGridItemNode: GridItemNode { self.updateHiddenMedia() } - func setup(interaction: MediaPickerInteraction, media: MediaPickerScreen.Subject.Media, index: Int, theme: PresentationTheme, selectable: Bool, enableAnimations: Bool, stories: Bool) { + func setup(interaction: MediaPickerInteraction, media: MediaPickerScreenImpl.Subject.Media, index: Int, theme: PresentationTheme, selectable: Bool, enableAnimations: Bool, stories: Bool) { self.interaction = interaction self.theme = theme self.selectable = selectable diff --git a/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift b/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift index 3fab113259..3dddffaa39 100644 --- a/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift +++ b/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift @@ -27,6 +27,7 @@ import MediaEditor import ImageObjectSeparation import ChatSendMessageActionUI import AnimatedCountLabelNode +import MediaAssetsContext final class MediaPickerInteraction { let downloadManager: AssetDownloadManager @@ -40,16 +41,7 @@ final class MediaPickerInteraction { let selectionState: TGMediaSelectionContext? let editingState: TGMediaEditingContext var hiddenMediaId: String? - - var captionIsAboveMedia: Bool = false { - didSet { - if self.captionIsAboveMedia != oldValue { - self.captionIsAboveMediaValue.set(self.captionIsAboveMedia) - } - } - } - let captionIsAboveMediaValue = ValuePromise(false) - + init(downloadManager: AssetDownloadManager, openMedia: @escaping (PHFetchResult, Int, UIImage?) -> Void, openSelectedMedia: @escaping (TGMediaSelectableItem, UIImage?) -> Void, openDraft: @escaping (MediaEditorDraft, UIImage?) -> Void, toggleSelection: @escaping (TGMediaSelectableItem, Bool, Bool) -> Bool, sendSelected: @escaping (TGMediaSelectableItem?, Bool, Int32?, Bool, ChatSendMessageActionSheetController.SendParameters?, @escaping () -> Void) -> Void, schedule: @escaping (ChatSendMessageActionSheetController.SendParameters?) -> Void, dismissInput: @escaping () -> Void, selectionState: TGMediaSelectionContext?, editingState: TGMediaEditingContext) { self.downloadManager = downloadManager self.openMedia = openMedia @@ -138,7 +130,7 @@ struct Month: Equatable { private var savedStoriesContentOffset: CGFloat? -public final class MediaPickerScreen: ViewController, AttachmentContainable { +public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, AttachmentContainable { public enum Subject { public enum Media: Equatable { case image(UIImage) @@ -207,7 +199,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { public var getCaptionPanelView: () -> TGCaptionPanelView? = { return nil } public var openBoost: () -> Void = { } - public var customSelection: ((MediaPickerScreen, Any) -> Void)? = nil + public var customSelection: ((MediaPickerScreenImpl, Any) -> Void)? = nil public var createFromScratch: () -> Void = {} public var presentFilePicker: () -> Void = {} @@ -250,7 +242,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { case media([Subject.Media]) } - private weak var controller: MediaPickerScreen? + private weak var controller: MediaPickerScreenImpl? private var presentationData: PresentationData fileprivate let mediaAssetsContext: MediaAssetsContext @@ -307,7 +299,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { private var validLayout: (ContainerViewLayout, CGFloat)? - init(controller: MediaPickerScreen) { + init(controller: MediaPickerScreenImpl) { self.controller = controller self.presentationData = controller.presentationData @@ -1255,7 +1247,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { if parameters == nil { var textIsAboveMedia = false if let interaction = controller.interaction { - textIsAboveMedia = interaction.captionIsAboveMedia + textIsAboveMedia = interaction.editingState.isCaptionAbove() } parameters = ChatSendMessageActionSheetController.SendParameters( effect: nil, @@ -2311,7 +2303,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { } else { self.updateNavigationStack { current in var mediaPickerContext: AttachmentMediaPickerContext? - if let first = current.first as? MediaPickerScreen { + if let first = current.first as? MediaPickerScreenImpl { mediaPickerContext = first.webSearchController?.mediaPickerContext ?? first.mediaPickerContext } return (current.filter { $0 !== self }, mediaPickerContext) @@ -2417,7 +2409,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { var updateNavigationStackImpl: ((AttachmentContainable) -> Void)? let groupsController = MediaGroupsScreen(context: self.context, updatedPresentationData: self.updatedPresentationData, mediaAssetsContext: self.controllerNode.mediaAssetsContext, embedded: embedded, openGroup: { [weak self] collection in if let strongSelf = self { - let mediaPicker = MediaPickerScreen(context: strongSelf.context, updatedPresentationData: strongSelf.updatedPresentationData, peer: strongSelf.peer, threadTitle: strongSelf.threadTitle, chatLocation: strongSelf.chatLocation, isScheduledMessages: strongSelf.isScheduledMessages, bannedSendPhotos: strongSelf.bannedSendPhotos, bannedSendVideos: strongSelf.bannedSendVideos, subject: .assets(collection, mode), editingContext: strongSelf.interaction?.editingState, selectionContext: strongSelf.interaction?.selectionState) + let mediaPicker = MediaPickerScreenImpl(context: strongSelf.context, updatedPresentationData: strongSelf.updatedPresentationData, peer: strongSelf.peer, threadTitle: strongSelf.threadTitle, chatLocation: strongSelf.chatLocation, isScheduledMessages: strongSelf.isScheduledMessages, bannedSendPhotos: strongSelf.bannedSendPhotos, bannedSendVideos: strongSelf.bannedSendVideos, subject: .assets(collection, mode), editingContext: strongSelf.interaction?.editingState, selectionContext: strongSelf.interaction?.selectionState) mediaPicker.presentSchedulePicker = strongSelf.presentSchedulePicker mediaPicker.presentTimerPicker = strongSelf.presentTimerPicker @@ -2576,7 +2568,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { if isCaptionAboveMediaAvailable { var mediaCaptionIsAbove = false if let interaction = self.interaction { - mediaCaptionIsAbove = interaction.captionIsAboveMedia + mediaCaptionIsAbove = interaction.editingState.isCaptionAbove() } items.append(.action(ContextMenuActionItem(text: mediaCaptionIsAbove ? strings.Chat_SendMessageMenu_MoveCaptionDown : strings.Chat_SendMessageMenu_MoveCaptionUp, icon: { _ in return nil }, iconAnimation: ContextMenuActionItem.IconAnimation( @@ -2588,7 +2580,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { } if let interaction = strongSelf.interaction { - interaction.captionIsAboveMedia = !interaction.captionIsAboveMedia + interaction.editingState.setCaptionAbove(!interaction.editingState.isCaptionAbove()) } }))) } @@ -2684,13 +2676,17 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable { self.selectedButtonNode.frame = CGRect(origin: CGPoint(x: self.view.bounds.width - 54.0 - self.selectedButtonNode.frame.width - safeInset, y: floorToScreenPixels((navigationHeight - self.selectedButtonNode.frame.height) / 2.0) + 1.0), size: self.selectedButtonNode.frame.size) } + public func dismissAnimated() { + self.requestDismiss(completion: {}) + } + public var mediaPickerContext: AttachmentMediaPickerContext? { return MediaPickerContext(controller: self) } } final class MediaPickerContext: AttachmentMediaPickerContext { - private weak var controller: MediaPickerScreen? + private weak var controller: MediaPickerScreenImpl? var selectionCount: Signal { return Signal { [weak self] subscriber in @@ -2791,23 +2787,32 @@ final class MediaPickerContext: AttachmentMediaPickerContext { var captionIsAboveMedia: Signal { return Signal { [weak self] subscriber in - guard let interaction = self?.controller?.interaction else { + guard let self else { subscriber.putNext(false) subscriber.putCompletion() - return EmptyDisposable } - let disposable = interaction.captionIsAboveMediaValue.get().start(next: { value in - subscriber.putNext(value) + guard let captionAbove = self.controller?.interaction?.editingState.captionAbove() else { + subscriber.putNext(false) + subscriber.putCompletion() + return EmptyDisposable + } + + let disposable = captionAbove.start(next: { caption in + if let caption = caption as? NSNumber { + subscriber.putNext(caption.boolValue) + } else { + subscriber.putNext(false) + } }, error: { _ in }, completed: { }) return ActionDisposable { - disposable.dispose() + disposable?.dispose() } } } func setCaptionIsAboveMedia(_ captionIsAboveMedia: Bool) -> Void { - self.controller?.interaction?.captionIsAboveMedia = captionIsAboveMedia + self.controller?.interaction?.editingState.setCaptionAbove(captionIsAboveMedia) } public var loadingProgress: Signal { @@ -2818,7 +2823,7 @@ final class MediaPickerContext: AttachmentMediaPickerContext { return .single(self.controller?.mainButtonState) } - init(controller: MediaPickerScreen) { + init(controller: MediaPickerScreenImpl) { self.controller = controller } @@ -2954,7 +2959,7 @@ public func wallpaperMediaPickerController( updatedPresentationData: (initial: PresentationData, signal: Signal)? = nil, peer: EnginePeer, animateAppearance: Bool, - completion: @escaping (MediaPickerScreen, Any) -> Void = { _, _ in }, + completion: @escaping (MediaPickerScreenImpl, Any) -> Void = { _, _ in }, openColors: @escaping () -> Void ) -> ViewController { let controller = AttachmentController(context: context, updatedPresentationData: updatedPresentationData, chatLocation: nil, buttons: [.standalone], initialButton: .standalone, fromMenu: false, hasTextInput: false, makeEntityInputView: { @@ -2963,7 +2968,7 @@ public func wallpaperMediaPickerController( controller.animateAppearance = animateAppearance controller.requestController = { [weak controller] _, present in let presentationData = context.sharedContext.currentPresentationData.with { $0 } - let mediaPickerController = MediaPickerScreen(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper), mainButtonState: AttachmentMainButtonState(text: presentationData.strings.Conversation_Theme_SetColorWallpaper, font: .regular, background: .color(.clear), textColor: presentationData.theme.actionSheet.controlAccentColor, isVisible: true, progress: .none, isEnabled: true, hasShimmer: false), mainButtonAction: { + let mediaPickerController = MediaPickerScreenImpl(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper), mainButtonState: AttachmentMainButtonState(text: presentationData.strings.Conversation_Theme_SetColorWallpaper, font: .regular, background: .color(.clear), textColor: presentationData.theme.actionSheet.controlAccentColor, isVisible: true, progress: .none, isEnabled: true, hasShimmer: false), mainButtonAction: { controller?.dismiss(animated: true) openColors() }) @@ -2985,7 +2990,7 @@ public func mediaPickerController( return nil }) controller.requestController = { _, present in - let mediaPickerController = MediaPickerScreen(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .addImage), mainButtonState: nil, mainButtonAction: nil) + let mediaPickerController = MediaPickerScreenImpl(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .addImage), mainButtonState: nil, mainButtonAction: nil) mediaPickerController.customSelection = { controller, result in completion(result) controller.dismiss(animated: true) @@ -3020,6 +3025,7 @@ public func mediaPickerController( public func storyMediaPickerController( context: AccountContext, isDark: Bool, + forCollage: Bool, getSourceRect: @escaping () -> CGRect, completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void, @@ -3036,7 +3042,7 @@ public func storyMediaPickerController( controller.forceSourceRect = true controller.getSourceRect = getSourceRect controller.requestController = { _, present in - let mediaPickerController = MediaPickerScreen(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .story), mainButtonState: nil, mainButtonAction: nil) + let mediaPickerController = MediaPickerScreenImpl(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .story), mainButtonState: nil, mainButtonAction: nil) mediaPickerController.groupsPresented = groupsPresented mediaPickerController.customSelection = { controller, result in if let result = result as? MediaEditorDraft { @@ -3062,7 +3068,9 @@ public func storyMediaPickerController( }) } } else if let result = result as? PHAsset { - controller.updateHiddenMediaId(result.localIdentifier) + if !forCollage { + controller.updateHiddenMediaId(result.localIdentifier) + } if let transitionView = controller.transitionView(for: result.localIdentifier, snapshot: false) { let transitionOut: (Bool?) -> (UIView, CGRect)? = { isNew in if let isNew { @@ -3107,7 +3115,7 @@ public func stickerMediaPickerController( controller.forceSourceRect = true controller.getSourceRect = getSourceRect controller.requestController = { [weak controller] _, present in - let mediaPickerController = MediaPickerScreen(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .createSticker), mainButtonState: nil, mainButtonAction: nil) + let mediaPickerController = MediaPickerScreenImpl(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .createSticker), mainButtonState: nil, mainButtonAction: nil) mediaPickerController.customSelection = { controller, result in if let result = result as? PHAsset { controller.updateHiddenMediaId(result.localIdentifier) @@ -3175,17 +3183,17 @@ public func stickerMediaPickerController( } var returnToCameraImpl: (() -> Void)? - let cameraScreen = CameraScreen( + let cameraScreen = CameraScreenImpl( context: context, mode: .sticker, holder: cameraHolder, - transitionIn: CameraScreen.TransitionIn( + transitionIn: CameraScreenImpl.TransitionIn( sourceView: cameraHolder.parentView, sourceRect: cameraHolder.parentView.bounds, sourceCornerRadius: 0.0 ), transitionOut: { _ in - return CameraScreen.TransitionOut( + return CameraScreenImpl.TransitionOut( destinationView: cameraHolder.parentView, destinationRect: cameraHolder.parentView.bounds, destinationCornerRadius: 0.0 diff --git a/submodules/MediaPickerUI/Sources/MediaPickerSelectedListNode.swift b/submodules/MediaPickerUI/Sources/MediaPickerSelectedListNode.swift index 8b5b6960ac..65cbce1c78 100644 --- a/submodules/MediaPickerUI/Sources/MediaPickerSelectedListNode.swift +++ b/submodules/MediaPickerUI/Sources/MediaPickerSelectedListNode.swift @@ -16,6 +16,7 @@ import ChatMessageBackground import ChatSendMessageActionUI import ComponentFlow import ComponentDisplayAdapters +import MediaAssetsContext private class MediaPickerSelectedItemNode: ASDisplayNode { let asset: TGMediaEditableItem diff --git a/submodules/TelegramCore/Sources/PendingMessages/StandaloneSendMessage.swift b/submodules/TelegramCore/Sources/PendingMessages/StandaloneSendMessage.swift index fd9398aa56..74de9a9aaa 100644 --- a/submodules/TelegramCore/Sources/PendingMessages/StandaloneSendMessage.swift +++ b/submodules/TelegramCore/Sources/PendingMessages/StandaloneSendMessage.swift @@ -128,6 +128,8 @@ public func standaloneSendEnqueueMessages( struct MessageResult { var result: PendingMessageUploadedContentResult var media: [Media] + var attributes: [MessageAttribute] + var groupingKey: Int64? } let signals: [Signal] = messages.map { message in @@ -180,7 +182,7 @@ public func standaloneSendEnqueueMessages( let content = messageContentToUpload(accountPeerId: accountPeerId, network: network, postbox: postbox, auxiliaryMethods: auxiliaryMethods, transformOutgoingMessageMedia: { _, _, _, _ in return .single(nil) - }, messageMediaPreuploadManager: MessageMediaPreuploadManager(), revalidationContext: MediaReferenceRevalidationContext(), forceReupload: false, isGrouped: false, passFetchProgress: true, forceNoBigParts: false, peerId: peerId, messageId: nil, attributes: attributes, text: text, media: media) + }, messageMediaPreuploadManager: MessageMediaPreuploadManager(), revalidationContext: MediaReferenceRevalidationContext(), forceReupload: false, isGrouped: message.groupingKey != nil, passFetchProgress: true, forceNoBigParts: false, peerId: peerId, messageId: nil, attributes: attributes, text: text, media: media) let contentResult: Signal switch content { case let .signal(value, _): @@ -190,7 +192,7 @@ public func standaloneSendEnqueueMessages( } return contentResult |> map { contentResult in - return MessageResult(result: contentResult, media: media) + return MessageResult(result: contentResult, media: media, attributes: attributes, groupingKey: 12345) } } @@ -200,7 +202,7 @@ public func standaloneSendEnqueueMessages( } |> mapToSignal { contentResults -> Signal in var progressSum: Float = 0.0 - var allResults: [(result: PendingMessageUploadedContentAndReuploadInfo, media: [Media])] = [] + var allResults: [(result: PendingMessageUploadedContentAndReuploadInfo, media: [Media], attributes: [MessageAttribute], groupingKey: Int64?)] = [] var allDone = true for result in contentResults { switch result.result { @@ -208,36 +210,60 @@ public func standaloneSendEnqueueMessages( allDone = false progressSum += value.progress case let .content(content): - allResults.append((content, result.media)) + allResults.append((content, result.media, result.attributes, result.groupingKey)) } } if allDone { var sendSignals: [Signal] = [] - for (content, media) in allResults { - var text: String = "" - switch content.content { - case let .text(textValue): - text = textValue - case let .media(_, textValue): - text = textValue - default: - break + var existingGroupingKeys = Set() + for (content, media, attributes, groupingKey) in allResults { + if let currentGroupingKey = groupingKey { + if !existingGroupingKeys.contains(currentGroupingKey) { + existingGroupingKeys.insert(currentGroupingKey) + var contents: [PendingMessageUploadedContentAndReuploadInfo] = [] + for (content, _, _, _) in allResults { + contents.append(content) + } + + sendSignals.append(sendUploadedMultiMessageContent( + auxiliaryMethods: auxiliaryMethods, + postbox: postbox, + network: network, + stateManager: stateManager, + accountPeerId: stateManager.accountPeerId, + peerId: peerId, + content: contents, + attributes: attributes, + threadId: threadId + )) + } + } else { + var text: String = "" + switch content.content { + case let .text(textValue): + text = textValue + case let .media(_, textValue): + text = textValue + default: + break + } + + sendSignals.append(sendUploadedMessageContent( + auxiliaryMethods: auxiliaryMethods, + postbox: postbox, + network: network, + stateManager: stateManager, + accountPeerId: stateManager.accountPeerId, + peerId: peerId, + content: content, + text: text, + attributes: attributes, + media: media, + threadId: threadId + )) } - sendSignals.append(sendUploadedMessageContent( - auxiliaryMethods: auxiliaryMethods, - postbox: postbox, - network: network, - stateManager: stateManager, - accountPeerId: stateManager.accountPeerId, - peerId: peerId, - content: content, - text: text, - attributes: [], - media: media, - threadId: threadId - )) } return combineLatest(sendSignals) @@ -389,111 +415,111 @@ private func sendUploadedMessageContent( let sendMessageRequest: Signal, MTRpcError> switch content.content { - case .text: - if bubbleUpEmojiOrStickersets { - flags |= Int32(1 << 15) - } - - var replyTo: Api.InputReplyTo? - if let replyMessageId = replyMessageId { - flags |= 1 << 0 - - var replyFlags: Int32 = 0 - if threadId != nil { - replyFlags |= 1 << 0 - } - replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) - } else if let replyToStoryId = replyToStoryId { - if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) { - flags |= 1 << 0 - replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id) - } - } - - sendMessageRequest = network.requestWithAdditionalInfo(Api.functions.messages.sendMessage(flags: flags, peer: inputPeer, replyTo: replyTo, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil), info: .acknowledgement, tag: dependencyTag) - case let .media(inputMedia, text): - if bubbleUpEmojiOrStickersets { - flags |= Int32(1 << 15) - } - - var replyTo: Api.InputReplyTo? - if let replyMessageId = replyMessageId { - flags |= 1 << 0 - - var replyFlags: Int32 = 0 - if threadId != nil { - replyFlags |= 1 << 0 - } - replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) - } else if let replyToStoryId = replyToStoryId { - if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) { - flags |= 1 << 0 - replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id) - } - } + case .text: + if bubbleUpEmojiOrStickersets { + flags |= Int32(1 << 15) + } + + var replyTo: Api.InputReplyTo? + if let replyMessageId = replyMessageId { + flags |= 1 << 0 - sendMessageRequest = network.request(Api.functions.messages.sendMedia(flags: flags, peer: inputPeer, replyTo: replyTo, media: inputMedia, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil), tag: dependencyTag) - |> map(NetworkRequestResult.result) - case let .forward(sourceInfo): - var topMsgId: Int32? - if let threadId = threadId { - flags |= Int32(1 << 9) - topMsgId = Int32(clamping: threadId) + var replyFlags: Int32 = 0 + if threadId != nil { + replyFlags |= 1 << 0 } - - if let forwardSourceInfoAttribute = forwardSourceInfoAttribute, let sourcePeer = transaction.getPeer(forwardSourceInfoAttribute.messageId.peerId), let sourceInputPeer = apiInputPeer(sourcePeer) { - sendMessageRequest = network.request(Api.functions.messages.forwardMessages(flags: flags, fromPeer: sourceInputPeer, id: [sourceInfo.messageId.id], randomId: [uniqueId], toPeer: inputPeer, topMsgId: topMsgId, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil), tag: dependencyTag) - |> map(NetworkRequestResult.result) - } else { - sendMessageRequest = .fail(MTRpcError(errorCode: 400, errorDescription: "internal")) - } - case let .chatContextResult(chatContextResult): - if chatContextResult.hideVia { - flags |= Int32(1 << 11) - } - - var replyTo: Api.InputReplyTo? - if let replyMessageId = replyMessageId { + replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) + } else if let replyToStoryId = replyToStoryId { + if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) { flags |= 1 << 0 - - var replyFlags: Int32 = 0 - if threadId != nil { - replyFlags |= 1 << 0 - } - replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) - } else if let replyToStoryId = replyToStoryId { - if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) { - flags |= 1 << 0 - replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id) - } + replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id) } + } + + sendMessageRequest = network.requestWithAdditionalInfo(Api.functions.messages.sendMessage(flags: flags, peer: inputPeer, replyTo: replyTo, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil), info: .acknowledgement, tag: dependencyTag) + case let .media(inputMedia, text): + if bubbleUpEmojiOrStickersets { + flags |= Int32(1 << 15) + } + + var replyTo: Api.InputReplyTo? + if let replyMessageId = replyMessageId { + flags |= 1 << 0 + + var replyFlags: Int32 = 0 + if threadId != nil { + replyFlags |= 1 << 0 + } + replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) + } else if let replyToStoryId = replyToStoryId { + if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) { + flags |= 1 << 0 + replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id) + } + } - sendMessageRequest = network.request(Api.functions.messages.sendInlineBotResult(flags: flags, peer: inputPeer, replyTo: replyTo, randomId: uniqueId, queryId: chatContextResult.queryId, id: chatContextResult.id, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil)) + sendMessageRequest = network.request(Api.functions.messages.sendMedia(flags: flags, peer: inputPeer, replyTo: replyTo, media: inputMedia, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil), tag: dependencyTag) + |> map(NetworkRequestResult.result) + case let .forward(sourceInfo): + var topMsgId: Int32? + if let threadId = threadId { + flags |= Int32(1 << 9) + topMsgId = Int32(clamping: threadId) + } + + if let forwardSourceInfoAttribute = forwardSourceInfoAttribute, let sourcePeer = transaction.getPeer(forwardSourceInfoAttribute.messageId.peerId), let sourceInputPeer = apiInputPeer(sourcePeer) { + sendMessageRequest = network.request(Api.functions.messages.forwardMessages(flags: flags, fromPeer: sourceInputPeer, id: [sourceInfo.messageId.id], randomId: [uniqueId], toPeer: inputPeer, topMsgId: topMsgId, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil), tag: dependencyTag) |> map(NetworkRequestResult.result) - case .messageScreenshot: - let replyTo: Api.InputReplyTo - - if let replyMessageId = replyMessageId { - let replyFlags: Int32 = 0 - replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: nil, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) - } else if let replyToStoryId = replyToStoryId { - if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) { - flags |= 1 << 0 - replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id) - } else { - let replyFlags: Int32 = 0 - replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: 0, topMsgId: nil, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) - } + } else { + sendMessageRequest = .fail(MTRpcError(errorCode: 400, errorDescription: "internal")) + } + case let .chatContextResult(chatContextResult): + if chatContextResult.hideVia { + flags |= Int32(1 << 11) + } + + var replyTo: Api.InputReplyTo? + if let replyMessageId = replyMessageId { + flags |= 1 << 0 + + var replyFlags: Int32 = 0 + if threadId != nil { + replyFlags |= 1 << 0 + } + replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) + } else if let replyToStoryId = replyToStoryId { + if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) { + flags |= 1 << 0 + replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id) + } + } + + sendMessageRequest = network.request(Api.functions.messages.sendInlineBotResult(flags: flags, peer: inputPeer, replyTo: replyTo, randomId: uniqueId, queryId: chatContextResult.queryId, id: chatContextResult.id, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil)) + |> map(NetworkRequestResult.result) + case .messageScreenshot: + let replyTo: Api.InputReplyTo + + if let replyMessageId = replyMessageId { + let replyFlags: Int32 = 0 + replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: nil, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) + } else if let replyToStoryId = replyToStoryId { + if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) { + flags |= 1 << 0 + replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id) } else { let replyFlags: Int32 = 0 replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: 0, topMsgId: nil, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) } - - sendMessageRequest = network.request(Api.functions.messages.sendScreenshotNotification(peer: inputPeer, replyTo: replyTo, randomId: uniqueId)) - |> map(NetworkRequestResult.result) - case .secretMedia: - assertionFailure() - sendMessageRequest = .fail(MTRpcError(errorCode: 400, errorDescription: "internal")) + } else { + let replyFlags: Int32 = 0 + replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: 0, topMsgId: nil, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) + } + + sendMessageRequest = network.request(Api.functions.messages.sendScreenshotNotification(peer: inputPeer, replyTo: replyTo, randomId: uniqueId)) + |> map(NetworkRequestResult.result) + case .secretMedia: + assertionFailure() + sendMessageRequest = .fail(MTRpcError(errorCode: 400, errorDescription: "internal")) } return sendMessageRequest @@ -524,6 +550,143 @@ private func sendUploadedMessageContent( |> switchToLatest } +private func sendUploadedMultiMessageContent( + auxiliaryMethods: AccountAuxiliaryMethods, + postbox: Postbox, + network: Network, + stateManager: AccountStateManager, + accountPeerId: PeerId, + peerId: PeerId, + content: [PendingMessageUploadedContentAndReuploadInfo], + attributes: [MessageAttribute], + threadId: Int64? +) -> Signal { + return postbox.transaction { transaction -> Signal in + if let peer = transaction.getPeer(peerId), let inputPeer = apiInputPeer(peer) { + //var forwardSourceInfoAttribute: ForwardSourceInfoAttribute? + var messageEntities: [Api.MessageEntity]? + var replyMessageId: Int32? = threadId.flatMap { threadId in + return Int32(clamping: threadId) + } + var replyToStoryId: StoryId? + var scheduleTime: Int32? + var sendAsPeerId: PeerId? + var bubbleUpEmojiOrStickersets = false + + var flags: Int32 = 0 + + for attribute in attributes { + if let replyAttribute = attribute as? ReplyMessageAttribute { + replyMessageId = replyAttribute.messageId.id + } else if let attribute = attribute as? ReplyStoryAttribute { + replyToStoryId = attribute.storyId + } else if let outgoingInfo = attribute as? OutgoingMessageInfoAttribute { + bubbleUpEmojiOrStickersets = !outgoingInfo.bubbleUpEmojiOrStickersets.isEmpty + } else if let _ = attribute as? ForwardSourceInfoAttribute { + //forwardSourceInfoAttribute = attribute + } else if let attribute = attribute as? TextEntitiesMessageAttribute { + var associatedPeers = SimpleDictionary() + for attributePeerId in attribute.associatedPeerIds { + if let peer = transaction.getPeer(attributePeerId) { + associatedPeers[peer.id] = peer + } + } + messageEntities = apiTextAttributeEntities(attribute, associatedPeers: associatedPeers) + } else if let attribute = attribute as? OutgoingContentInfoMessageAttribute { + if attribute.flags.contains(.disableLinkPreviews) { + flags |= Int32(1 << 1) + } + } else if let attribute = attribute as? NotificationInfoMessageAttribute { + if attribute.flags.contains(.muted) { + flags |= Int32(1 << 5) + } + } else if let attribute = attribute as? OutgoingScheduleInfoMessageAttribute { + flags |= Int32(1 << 10) + scheduleTime = attribute.scheduleTime + } else if let attribute = attribute as? SendAsMessageAttribute { + sendAsPeerId = attribute.peerId + } + } + + var replyTo: Api.InputReplyTo? + if let replyMessageId = replyMessageId { + flags |= 1 << 0 + + var replyFlags: Int32 = 0 + if threadId != nil { + replyFlags |= 1 << 0 + } + replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) + } else if let replyToStoryId = replyToStoryId { + if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) { + flags |= 1 << 0 + replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id) + } + } + + + flags |= (1 << 7) + + if let _ = replyMessageId { + flags |= Int32(1 << 0) + } + if let _ = messageEntities { + flags |= Int32(1 << 3) + } + + if bubbleUpEmojiOrStickersets { + flags |= Int32(1 << 15) + } + + var sendAsInputPeer: Api.InputPeer? + if let sendAsPeerId = sendAsPeerId, let sendAsPeer = transaction.getPeer(sendAsPeerId), let inputPeer = apiInputPeerOrSelf(sendAsPeer, accountPeerId: accountPeerId) { + sendAsInputPeer = inputPeer + flags |= (1 << 13) + } + + let dependencyTag: PendingMessageRequestDependencyTag? = nil//(messageId: messageId) + + let sendMessageRequest: Signal, MTRpcError> + + var multiMedia: [Api.InputSingleMedia] = [] + for singleContent in content { + if case let .media(inputMedia, text) = singleContent.content { + let uniqueId = Int64.random(in: Int64.min ... Int64.max) + multiMedia.append(.inputSingleMedia(flags: 0, media: inputMedia, randomId: uniqueId, message: text, entities: nil)) + } + } + + sendMessageRequest = network.request(Api.functions.messages.sendMultiMedia(flags: flags, peer: inputPeer, replyTo: replyTo, multiMedia: multiMedia, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil), tag: dependencyTag) + |> map(NetworkRequestResult.result) + + return sendMessageRequest + |> mapToSignal { result -> Signal in + switch result { + case .progress: + return .complete() + case .acknowledged: + return .complete() + case let .result(result): + stateManager.addUpdates(result) + return .complete() + } + } + |> mapError { error -> StandaloneSendMessagesError in + if error.errorDescription.hasPrefix("FILEREF_INVALID") || error.errorDescription.hasPrefix("FILE_REFERENCE_") { + return StandaloneSendMessagesError(peerId: peerId, reason: nil) + } else if let failureReason = sendMessageReasonForError(error.errorDescription) { + return StandaloneSendMessagesError(peerId: peerId, reason: failureReason) + } + return StandaloneSendMessagesError(peerId: peerId, reason: nil) + } + } else { + return .complete() + } + } + |> castError(StandaloneSendMessagesError.self) + |> switchToLatest +} + public func standaloneSendMessage(account: Account, peerId: PeerId, text: String, attributes: [MessageAttribute], media: StandaloneMedia?, replyToMessageId: MessageId?, threadId: Int32? = nil) -> Signal { let content: Signal if let media = media { @@ -657,7 +820,7 @@ private func sendMessageContent(account: Account, peerId: PeerId, attributes: [M replyTo = .inputReplyToMessage(flags: flags, replyToMsgId: threadId, topMsgId: threadId, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil) } - sendMessageRequest = account.network.request(Api.functions.messages.sendMedia(flags: flags, peer: inputPeer, replyTo: replyTo, media: inputMedia, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil)) + sendMessageRequest = account.network.request(Api.functions.messages.sendMedia(flags: flags, peer: inputPeer, replyTo: replyTo, media: inputMedia, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil)) |> `catch` { _ -> Signal in return .complete() } diff --git a/submodules/TelegramUI/Components/CameraScreen/BUILD b/submodules/TelegramUI/Components/CameraScreen/BUILD index 6e070546b7..951d4aed83 100644 --- a/submodules/TelegramUI/Components/CameraScreen/BUILD +++ b/submodules/TelegramUI/Components/CameraScreen/BUILD @@ -56,6 +56,7 @@ swift_library( "//submodules/AsyncDisplayKit", "//submodules/Display", "//submodules/TelegramCore", + "//submodules/MetalEngine", "//submodules/SSignalKit/SwiftSignalKit", "//submodules/ComponentFlow", "//submodules/Components/ViewControllerComponent", @@ -81,6 +82,9 @@ swift_library( "//submodules/TelegramNotices", "//submodules/DeviceAccess", "//submodules/TelegramUI/Components/Utils/RoundedRectWithTailPath", + "//submodules/TelegramUI/Components/MediaAssetsContext", + "//submodules/UndoUI", + "//submodules/ContextUI", ], visibility = [ diff --git a/submodules/TelegramUI/Components/CameraScreen/MetalResources/cameraScreen.metal b/submodules/TelegramUI/Components/CameraScreen/MetalResources/cameraScreen.metal index aeb2812c0e..745949c73f 100644 --- a/submodules/TelegramUI/Components/CameraScreen/MetalResources/cameraScreen.metal +++ b/submodules/TelegramUI/Components/CameraScreen/MetalResources/cameraScreen.metal @@ -78,3 +78,172 @@ fragment half4 cameraBlobFragment(RasterizerData in[[stage_in]], return half4(min(minColor, c), min(minColor, max(cAlpha, 0.231)), min(minColor, max(cAlpha, 0.188)), c); } + +struct Rectangle { + float2 origin; + float2 size; +}; + +constant static float2 quadVertices[6] = { + float2(0.0, 0.0), + float2(1.0, 0.0), + float2(0.0, 1.0), + float2(1.0, 0.0), + float2(0.0, 1.0), + float2(1.0, 1.0) +}; + +struct QuadVertexOut { + float4 position [[position]]; + float2 uv; +}; + +kernel void videoBiPlanarToRGBA( + texture2d inTextureY [[ texture(0) ]], + texture2d inTextureUV [[ texture(1) ]], + texture2d outTexture [[ texture(2) ]], + uint2 threadPosition [[ thread_position_in_grid ]] +) { + half y = inTextureY.read(threadPosition).r; + half2 uv = inTextureUV.read(uint2(threadPosition.x / 2, threadPosition.y / 2)).rg - half2(0.5, 0.5); + + half4 color(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0); + outTexture.write(color, threadPosition); +} + +kernel void videoTriPlanarToRGBA( + texture2d inTextureY [[ texture(0) ]], + texture2d inTextureU [[ texture(1) ]], + texture2d inTextureV [[ texture(2) ]], + texture2d outTexture [[ texture(3) ]], + uint2 threadPosition [[ thread_position_in_grid ]] +) { + half y = inTextureY.read(threadPosition).r; + uint2 uvPosition = uint2(threadPosition.x / 2, threadPosition.y / 2); + half2 inUV = (inTextureU.read(uvPosition).r, inTextureV.read(uvPosition).r); + half2 uv = inUV - half2(0.5, 0.5); + + half4 color(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0); + outTexture.write(color, threadPosition); +} + +vertex QuadVertexOut mainVideoVertex( + const device Rectangle &rect [[ buffer(0) ]], + const device uint2 &mirror [[ buffer(1) ]], + unsigned int vid [[ vertex_id ]] +) { + float2 quadVertex = quadVertices[vid]; + + QuadVertexOut out; + + out.position = float4(rect.origin.x + quadVertex.x * rect.size.x, rect.origin.y + quadVertex.y * rect.size.y, 0.0, 1.0); + out.position.x = -1.0 + out.position.x * 2.0; + out.position.y = -1.0 + out.position.y * 2.0; + + float2 uv = float2(quadVertex.x, 1.0 - quadVertex.y); + out.uv = float2(uv.y, 1.0 - uv.x); + if (mirror.x == 1) { + out.uv.x = 1.0 - out.uv.x; + } + if (mirror.y == 1) { + out.uv.y = 1.0 - out.uv.y; + } + + return out; +} + +half4 rgb2hsv(half4 c) { + half4 K = half4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0); + half4 p = mix(half4(c.bg, K.wz), half4(c.gb, K.xy), step(c.b, c.g)); + half4 q = mix(half4(p.xyw, c.r), half4(c.r, p.yzx), step(p.x, c.r)); + + float d = q.x - min(q.w, q.y); + float e = 1.0e-10; + return half4(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x, c.a); +} + +half4 hsv2rgb(half4 c) { + half4 K = half4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0); + half3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www); + return half4(c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y), c.a); +} + +fragment half4 mainVideoFragment( + QuadVertexOut in [[stage_in]], + texture2d texture [[ texture(0) ]], + const device float &brightness [[ buffer(0) ]], + const device float &saturation [[ buffer(1) ]], + const device float4 &overlay [[ buffer(2) ]] +) { + constexpr sampler sampler(coord::normalized, address::repeat, filter::linear); + half4 color = texture.sample(sampler, in.uv); + color = rgb2hsv(color); + color.b = clamp(color.b * brightness, 0.0, 1.0); + color.g = clamp(color.g * saturation, 0.0, 1.0); + color = hsv2rgb(color); + color.rgb += half3(overlay.rgb * overlay.a); + color.rgb = min(color.rgb, half3(1.0, 1.0, 1.0)); + + return half4(color.r, color.g, color.b, color.a); +} + +constant int BLUR_SAMPLE_COUNT = 7; +constant float BLUR_OFFSETS[BLUR_SAMPLE_COUNT] = { + 1.489585, + 3.475713, + 5.461880, + 7.448104, + 9.434408, + 11.420812, + 13.407332 +}; + +constant float BLUR_WEIGHTS[BLUR_SAMPLE_COUNT] = { + 0.130498886, + 0.113685958, + 0.0886923522, + 0.0619646012, + 0.0387683809, + 0.0217213109, + 0.0108984858 +}; + +static void gaussianBlur( + texture2d inTexture, + texture2d outTexture, + float2 offset, + uint2 gid +) { + constexpr sampler sampler(coord::normalized, address::clamp_to_edge, filter::linear); + + uint2 textureDim(outTexture.get_width(), outTexture.get_height()); + if(all(gid < textureDim)) { + float3 outColor(0.0); + + float2 size(inTexture.get_width(), inTexture.get_height()); + + float2 baseTexCoord = float2(gid); + + for (int i = 0; i < BLUR_SAMPLE_COUNT; i++) { + outColor += float3(inTexture.sample(sampler, (baseTexCoord + offset * BLUR_OFFSETS[i]) / size).rgb) * BLUR_WEIGHTS[i]; + } + + outTexture.write(half4(half3(outColor), 1.0), gid); + } +} + +kernel void gaussianBlurHorizontal( + texture2d inTexture [[ texture(0) ]], + texture2d outTexture [[ texture(1) ]], + uint2 gid [[ thread_position_in_grid ]] +) { + gaussianBlur(inTexture, outTexture, float2(1, 0), gid); +} + +kernel void gaussianBlurVertical( + texture2d inTexture [[ texture(0) ]], + texture2d outTexture [[ texture(1) ]], + uint2 gid [[ thread_position_in_grid ]] +) { + gaussianBlur(inTexture, outTexture, float2(0, 1), gid); +} diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraCollage.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraCollage.swift new file mode 100644 index 0000000000..58246d48c1 --- /dev/null +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraCollage.swift @@ -0,0 +1,1403 @@ +import Foundation +import AVFoundation +import UIKit +import Display +import ComponentFlow +import SwiftSignalKit +import Camera +import ContextUI +import AccountContext +import MetalEngine +import TelegramPresentationData +import Photos + +final class CameraCollage { + final class CaptureResult { + enum Content { + case pending(CameraCollage.State.Item.Content.Placeholder?) + case image(UIImage) + case video(asset: AVAsset, thumbnail: UIImage?, duration: Double, source: CameraCollage.State.Item.Content.VideoSource) + case failed + } + + private var internalContent: Content + private var disposable: Disposable? + + init(result: Signal, snapshotView: UIView?, contentUpdated: @escaping () -> Void) { + self.internalContent = .pending(snapshotView.flatMap { .view($0) }) + self.disposable = (result + |> deliverOnMainQueue).start(next: { [weak self] value in + guard let self else { + return + } + switch value { + case .pendingImage: + contentUpdated() + case let .image(image): + self.internalContent = .image(image.image) + contentUpdated() + case let .video(video): + self.internalContent = .pending(video.coverImage.flatMap { .image($0) }) + contentUpdated() + Queue.mainQueue().after(0.05, { + let asset = AVURLAsset(url: URL(fileURLWithPath: video.videoPath)) + self.internalContent = .video(asset: asset, thumbnail: video.coverImage, duration: video.duration, source: .file(video.videoPath)) + contentUpdated() + }) + case let .asset(asset): + let targetSize = CGSize(width: 256.0, height: 256.0) + let options = PHImageRequestOptions() + let deliveryMode: PHImageRequestOptionsDeliveryMode = .highQualityFormat + options.deliveryMode = deliveryMode + options.isNetworkAccessAllowed = true + + PHImageManager.default().requestImage( + for: asset, + targetSize: targetSize, + contentMode: .aspectFit, + options: options, + resultHandler: { [weak self] image, info in + if let image, let self { + if let info { + if let cancelled = info[PHImageCancelledKey] as? Bool, cancelled { + return + } + } + self.internalContent = .pending(.image(image)) + contentUpdated() + + PHImageManager.default().requestAVAsset(forVideo: asset, options: nil, resultHandler: { [weak self] avAsset, _, _ in + if let avAsset, let self { + Queue.mainQueue().async { + self.internalContent = .video(asset: avAsset, thumbnail: nil, duration: 0.0, source: .asset(asset)) + contentUpdated() + } + } + }) + } + } + ) + default: + break + } + }) + } + + deinit { + self.disposable?.dispose() + } + + var content: State.Item.Content? { + switch self.internalContent { + case let .pending(placeholder): + return .pending(placeholder) + case let .image(image): + return .image(image) + case let .video(asset, thumbnail, duration, source): + return .video(asset, thumbnail, duration, source) + case .failed: + return nil + } + } + } + + struct State { + struct Item { + enum Content { + enum VideoSource { + case file(String) + case asset(PHAsset) + } + + enum Placeholder { + case view(UIView) + case image(UIImage) + } + + case empty + case camera + case pending(Placeholder?) + case image(UIImage) + case video(AVAsset, UIImage?, Double, VideoSource) + } + + var uniqueId: Int64 + var content: Content + + var isReady: Bool { + switch self.content { + case .image, .video: + return true + default: + return false + } + } + + var isAlmostReady: Bool { + switch self.content { + case .image, .video, .pending: + return true + default: + return false + } + } + } + + struct Row { + let items: [Item] + } + + var grid: Camera.CollageGrid + var progress: Float + var innerProgress: Float + var rows: [Row] + } + private var _state: State + private var _statePromise = Promise() + var state: Signal { + return self._statePromise.get() + } + + var grid: Camera.CollageGrid { + didSet { + if self.grid != oldValue { + self._state.grid = self.grid + self.updateState() + } + } + } + var results: [CaptureResult] + var uniqueIds: [Int64] + + private(set) var cameraIndex: Int? + + init(grid: Camera.CollageGrid) { + self.grid = grid + self.results = [] + self.uniqueIds = (0 ..< 6).map { _ in Int64.random(in: .min ... .max) } + + self._state = State( + grid: grid, + progress: 0.0, + innerProgress: 0.0, + rows: CameraCollage.computeRows(grid: grid, results: [], uniqueIds: self.uniqueIds, cameraIndex: self.cameraIndex) + ) + self.updateState() + } + + func addResult(_ signal: Signal, snapshotView: UIView?) { + guard self.results.count < self.grid.count else { + return + } + let result = CaptureResult(result: signal, snapshotView: snapshotView, contentUpdated: { [weak self] in + self?.checkResults() + self?.updateState() + }) + if let cameraIndex = self.cameraIndex { + self.cameraIndex = nil + self.results.insert(result, at: cameraIndex) + } else { + self.results.append(result) + } + self.updateState() + } + + func moveItem(fromId: Int64, toId: Int64) { + guard let fromIndex = self.uniqueIds.firstIndex(where: { $0 == fromId }), let toIndex = self.uniqueIds.firstIndex(where: { $0 == toId }), toIndex < self.results.count else { + return + } + let fromItem = self.results[fromIndex] + let toItem = self.results[toIndex] + self.results[fromIndex] = toItem + self.uniqueIds[fromIndex] = toId + self.results[toIndex] = fromItem + self.uniqueIds[toIndex] = fromId + self.updateState() + } + + func retakeItem(id: Int64) { + guard let index = self.uniqueIds.firstIndex(where: { $0 == id }) else { + return + } + self.cameraIndex = index + + self.results.remove(at: index) + self.updateState() + } + + func deleteItem(id: Int64) { + guard let index = self.uniqueIds.firstIndex(where: { $0 == id }) else { + return + } + self.results.remove(at: index) + self.uniqueIds.removeAll(where: { $0 == id }) + self.uniqueIds.append(Int64.random(in: .min ... .max)) + } + + private func checkResults() { + self.results = self.results.filter { $0.content != nil } + } + + private static func computeRows(grid: Camera.CollageGrid, results: [CaptureResult], uniqueIds: [Int64], cameraIndex: Int?) -> [State.Row] { + var rows: [State.Row] = [] + var index = 0 + var contentIndex = 0 + var addedCamera = false + for row in grid.rows { + var items: [State.Item] = [] + for _ in 0 ..< row.columns { + if index == cameraIndex { + items.append(State.Item(uniqueId: uniqueIds[index], content: .camera)) + addedCamera = true + contentIndex -= 1 + } else if contentIndex < results.count { + if let content = results[contentIndex].content { + items.append(State.Item(uniqueId: uniqueIds[index], content: content)) + } else { + items.append(State.Item(uniqueId: uniqueIds[index], content: .empty)) + } + } else if index == results.count && !addedCamera { + items.append(State.Item(uniqueId: uniqueIds[index], content: .camera)) + } else { + items.append(State.Item(uniqueId: uniqueIds[index], content: .empty)) + } + index += 1 + contentIndex += 1 + } + rows.append(State.Row(items: items)) + } + return rows + } + + private static func computeProgress(rows: [State.Row], inner: Bool) -> Float { + var readyCount: Int = 0 + var totalCount: Int = 0 + for row in rows { + for item in row.items { + if inner { + if item.isAlmostReady { + readyCount += 1 + } + } else { + if item.isReady { + readyCount += 1 + } + } + totalCount += 1 + } + } + guard totalCount > 0 else { + return 0.0 + } + return Float(readyCount) / Float(totalCount) + } + + private func updateState() { + self._state.rows = CameraCollage.computeRows(grid: self._state.grid, results: self.results, uniqueIds: self.uniqueIds, cameraIndex: self.cameraIndex) + self._state.progress = CameraCollage.computeProgress(rows: self._state.rows, inner: false) + self._state.innerProgress = CameraCollage.computeProgress(rows: self._state.rows, inner: true) + self._statePromise.set(.single(self._state)) + } + + var isComplete: Bool { + return self._state.progress > 1.0 - .ulpOfOne + } + + var result: Signal { + guard self.isComplete else { + return .complete() + } + + var hasVideo = false + let state = self._state + +outer: for row in state.rows { + for item in row.items { + if case .video = item.content { + hasVideo = true + break outer + } + } + } + + let size = CGSize(width: 1080.0, height: 1920.0) + let rowHeight: CGFloat = ceil(size.height / CGFloat(state.rows.count)) + + if hasVideo { + var items: [CameraScreenImpl.Result.VideoCollage.Item] = [] + var itemFrame: CGRect = .zero + for row in state.rows { + let columnWidth: CGFloat = floor(size.width / CGFloat(row.items.count)) + itemFrame = CGRect(origin: itemFrame.origin, size: CGSize(width: columnWidth, height: rowHeight)) + for item in row.items { + let content: CameraScreenImpl.Result.VideoCollage.Item.Content + switch item.content { + case let .image(image): + content = .image(image) + case let .video(_, _, duration, source): + switch source { + case let .file(path): + content = .video(path, duration) + case let .asset(asset): + content = .asset(asset) + } + default: + fatalError() + } + items.append(CameraScreenImpl.Result.VideoCollage.Item(content: content, frame: itemFrame)) + itemFrame.origin.x += columnWidth + } + itemFrame.origin.x = 0.0 + itemFrame.origin.y += rowHeight + } + return .single(.videoCollage(CameraScreenImpl.Result.VideoCollage(items: items))) + } else { + let image = generateImage(size, contextGenerator: { size, context in + var itemFrame: CGRect = .zero + for row in state.rows { + let columnWidth: CGFloat = floor(size.width / CGFloat(row.items.count)) + itemFrame = CGRect(origin: itemFrame.origin, size: CGSize(width: columnWidth, height: rowHeight)) + for item in row.items { + let mappedItemFrame = CGRect(origin: CGPoint(x: itemFrame.minX, y: size.height - itemFrame.origin.y - rowHeight), size: CGSize(width: columnWidth, height: rowHeight)) + if case let .image(image) = item.content { + context.clip(to: mappedItemFrame) + let drawingSize = image.size.aspectFilled(mappedItemFrame.size) + let imageFrame = drawingSize.centered(around: mappedItemFrame.center) + if let cgImage = image.cgImage { + context.draw(cgImage, in: imageFrame, byTiling: false) + } + context.resetClip() + } + itemFrame.origin.x += columnWidth + } + itemFrame.origin.x = 0.0 + itemFrame.origin.y += rowHeight + } + }, opaque: true, scale: 1.0) + if let image { + return .single(.image(CameraScreenImpl.Result.Image(image: image, additionalImage: nil, additionalImagePosition: .topLeft))) + } else { + return .single(.pendingImage) + } + } + } +} + +final class CameraCollageView: UIView, UIGestureRecognizerDelegate { + final class PreviewLayer: SimpleLayer { + var dispose: () -> Void = {} + + let contentLayer: MetalEngineSubjectLayer + init(contentLayer: MetalEngineSubjectLayer) { + self.contentLayer = contentLayer + super.init() + self.addSublayer(contentLayer) + } + + required init?(coder: NSCoder) { + preconditionFailure() + } + + func update(size: CGSize, transition: ComponentTransition) { + let filledSize = CGSize(width: 320.0, height: 568.0).aspectFilled(size) + transition.setFrame(layer: self.contentLayer, frame: filledSize.centered(around: CGPoint(x: size.width / 2.0, y: size.height / 2.0))) + } + } + + final class ItemView: ContextControllerSourceView { + private let extractedContainerView = ContextExtractedContentContainingView() + + private let clippingView = UIView() + private var snapshotView: UIView? + private var cameraContainerView: UIView? + private var imageView: UIImageView? + private var previewLayer: PreviewLayer? + + private var videoPlayer: AVPlayer? + private var videoLayer: AVPlayerLayer? + private var didPlayToEndTimeObserver: NSObjectProtocol? + + private var originalCameraTransform: CATransform3D? + private var originalCameraFrame: CGRect? + + var contextAction: ((Int64, ContextExtractedContentContainingView, ContextGesture?) -> Void)? + + var isCamera: Bool { + if case .camera = self.item?.content { + return true + } + return false + } + + var isReady: Bool { + if case .image = self.item?.content { + return true + } + if case .video = self.item?.content { + return true + } + return false + } + + var isPlaying: Bool { + if let videoPlayer = self.videoPlayer { + return videoPlayer.rate > 0.0 + } else { + return false + } + } + + var didPlayToEnd: (() -> Void) = {} + + override init(frame: CGRect) { + super.init(frame: frame) + + self.clippingView.clipsToBounds = true + + self.addSubview(self.extractedContainerView) + + self.isGestureEnabled = false + self.targetViewForActivationProgress = self.extractedContainerView.contentView + + self.clipsToBounds = true + self.extractedContainerView.contentView.clipsToBounds = true + self.extractedContainerView.contentView.addSubview(self.clippingView) + + self.extractedContainerView.willUpdateIsExtractedToContextPreview = { [weak self] value, _ in + guard let self else { + return + } + let transition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) + if value { + self.clippingView.layer.cornerRadius = 12.0 + transition.updateSublayerTransformScale(layer: self.extractedContainerView.contentView.layer, scale: CGPoint(x: 0.9, y: 0.9)) + } else { + self.clippingView.layer.cornerRadius = 0.0 + self.clippingView.layer.animate(from: NSNumber(value: Float(12.0)), to: NSNumber(value: Float(0.0)), keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2) + transition.updateSublayerTransformScale(layer: self.extractedContainerView.contentView.layer, scale: CGPoint(x: 1.0, y: 1.0)) + } + } + + self.activated = { [weak self] gesture, _ in + guard let self, let item = self.item else { + gesture.cancel() + return + } + self.contextAction?(item.uniqueId, self.extractedContainerView, gesture) + } + } + required init?(coder aDecoder: NSCoder) { + preconditionFailure() + } + + func requestContextAction() { + guard let item = self.item, self.isReady else { + return + } + self.contextAction?(item.uniqueId, self.extractedContainerView, nil) + } + + func resetPlayback() { + self.videoPlayer?.seek(to: .zero) + self.videoPlayer?.play() + } + + var getPreviewLayer: () -> PreviewLayer? = { return nil } + + private var item: CameraCollage.State.Item? + func update(item: CameraCollage.State.Item, size: CGSize, cameraContainerView: UIView?, transition: ComponentTransition) { + self.item = item + + let center = CGPoint(x: size.width / 2.0, y: size.height / 2.0) + + switch item.content { + case let .pending(placeholder): + if let placeholder { + let snapshotView: UIView + switch placeholder { + case let .view(view): + snapshotView = view + case let .image(image): + if let current = self.snapshotView as? UIImageView { + snapshotView = current + } else { + snapshotView = UIImageView(image: image) + snapshotView.contentMode = .scaleAspectFill + snapshotView.isUserInteractionEnabled = false + snapshotView.frame = image.size.aspectFilled(size).centered(in: CGRect(origin: .zero, size: size)) + } + } + + self.snapshotView = snapshotView + var snapshotTransition = transition + if snapshotView.superview !== self.clippingView { + snapshotTransition = .immediate + self.clippingView.addSubview(snapshotView) + + let shutterLayer = SimpleLayer() + shutterLayer.backgroundColor = UIColor.white.cgColor + shutterLayer.frame = CGRect(origin: .zero, size: size) + self.layer.addSublayer(shutterLayer) + shutterLayer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { _ in + shutterLayer.removeFromSuperlayer() + }) + } + let scale = max(size.width / snapshotView.bounds.width, size.height / snapshotView.bounds.height) + snapshotTransition.setPosition(layer: snapshotView.layer, position: center) + snapshotTransition.setTransform(layer: snapshotView.layer, transform: CATransform3DMakeScale(scale, scale, 1.0)) + + if let previewLayer = self.previewLayer { + previewLayer.dispose() + previewLayer.removeFromSuperlayer() + self.previewLayer = nil + } + if let cameraContainerView = self.cameraContainerView { + cameraContainerView.removeFromSuperview() + self.cameraContainerView = nil + } + } + case .camera: + if let cameraContainerView { + self.cameraContainerView = cameraContainerView + if cameraContainerView.superview !== self.extractedContainerView.contentView { + self.originalCameraTransform = CATransform3DIdentity + self.originalCameraFrame = cameraContainerView.bounds + self.clippingView.addSubview(cameraContainerView) + } + if let originalCameraFrame = self.originalCameraFrame { + let scale = max(size.width / originalCameraFrame.width, size.height / originalCameraFrame.height) + transition.setPosition(layer: cameraContainerView.layer, position: center) + transition.setTransform(layer: cameraContainerView.layer, transform: CATransform3DMakeScale(scale, scale, 1.0)) + } + } + if let imageView = self.imageView { + imageView.superview?.bringSubviewToFront(imageView) + imageView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + imageView.removeFromSuperview() + }) + self.imageView = nil + } + if let videoLayer = self.videoLayer { + self.videoPlayer?.pause() + self.videoPlayer = nil + + videoLayer.superlayer?.addSublayer(videoLayer) + videoLayer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + videoLayer.removeFromSuperlayer() + }) + self.videoLayer = nil + } + if let snapshotView = self.snapshotView { + snapshotView.removeFromSuperview() + self.snapshotView = nil + } + if let previewLayer = self.previewLayer { + cameraContainerView?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25, completion: { _ in + previewLayer.removeFromSuperlayer() + }) + self.previewLayer = nil + } + case .empty: + if let cameraContainerView = self.cameraContainerView { + cameraContainerView.removeFromSuperview() + self.cameraContainerView = nil + } + if let snapshotView = self.snapshotView { + snapshotView.removeFromSuperview() + self.snapshotView = nil + } + var imageTransition = transition + if self.previewLayer == nil, let previewLayer = self.getPreviewLayer() { + imageTransition = .immediate + self.previewLayer = previewLayer + + self.clippingView.layer.addSublayer(previewLayer) + } + if let imageView = self.imageView { + imageView.superview?.bringSubviewToFront(imageView) + imageView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + imageView.removeFromSuperview() + }) + self.imageView = nil + } + if let videoLayer = self.videoLayer { + self.videoPlayer?.pause() + self.videoPlayer = nil + + videoLayer.superlayer?.addSublayer(videoLayer) + videoLayer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in + videoLayer.removeFromSuperlayer() + }) + self.videoLayer = nil + } + if let previewLayer = self.previewLayer { + previewLayer.update(size: size, transition: imageTransition) + imageTransition.setFrame(layer: previewLayer, frame: CGRect(origin: .zero, size: size)) + } + case let .image(image): + if let cameraContainerView = self.cameraContainerView { + cameraContainerView.removeFromSuperview() + self.cameraContainerView = nil + } + if let snapshotView = self.snapshotView { + snapshotView.removeFromSuperview() + self.snapshotView = nil + } + if let previewLayer = self.previewLayer { + previewLayer.dispose() + previewLayer.removeFromSuperlayer() + self.previewLayer = nil + } + + var imageTransition = transition + var imageView: UIImageView + if let current = self.imageView { + imageView = current + } else { + imageTransition = .immediate + imageView = UIImageView() + imageView.contentMode = .scaleAspectFill + self.imageView = imageView + self.clippingView.addSubview(imageView) + } + imageView.image = image + imageTransition.setFrame(view: imageView, frame: CGRect(origin: .zero, size: size)) + case let .video(asset, _, _, _): + if let cameraContainerView = self.cameraContainerView { + cameraContainerView.removeFromSuperview() + self.cameraContainerView = nil + } + var delayAppearance = false + if let snapshotView = self.snapshotView { + if snapshotView is UIImageView { + + } else { + delayAppearance = true + Queue.mainQueue().after(0.2, { + snapshotView.removeFromSuperview() + }) + } + self.snapshotView = nil + } + if let previewLayer = self.previewLayer { + previewLayer.dispose() + previewLayer.removeFromSuperlayer() + self.previewLayer = nil + } + + var imageTransition = transition + if self.videoLayer == nil { + imageTransition = .immediate + let playerItem = AVPlayerItem(asset: asset) + let player = AVPlayer(playerItem: playerItem) + player.isMuted = true + if self.didPlayToEndTimeObserver == nil { + self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: nil, using: { [weak self] notification in + if let self { + self.didPlayToEnd() + } + }) + } + + let videoLayer = AVPlayerLayer(player: player) + videoLayer.videoGravity = .resizeAspectFill + + if delayAppearance { + videoLayer.opacity = 0.0 + Queue.mainQueue().after(0.15, { + videoLayer.opacity = 1.0 + }) + } + + self.videoLayer = videoLayer + self.videoPlayer = player + + self.clippingView.layer.addSublayer(videoLayer) + + player.playImmediately(atRate: 1.0) + } + + if let videoLayer = self.videoLayer { + imageTransition.setFrame(layer: videoLayer, frame: CGRect(origin: .zero, size: size)) + } + } + + let bounds = CGRect(origin: .zero, size: size) + transition.setFrame(view: self.extractedContainerView, frame: bounds) + transition.setFrame(view: self.extractedContainerView.contentView, frame: bounds) + transition.setBounds(view: self.clippingView, bounds: bounds) + transition.setPosition(view: self.clippingView, position: bounds.center) + self.extractedContainerView.contentRect = bounds + } + + func animateIn(from size: CGSize, transition: ComponentTransition) { + guard let cameraContainerView = self.cameraContainerView, let originalCameraFrame = self.originalCameraFrame else { + return + } + + self.extractedContainerView.contentView.clipsToBounds = false + self.clippingView.clipsToBounds = false + + let scale = size.width / originalCameraFrame.width + transition.animateScale(view: cameraContainerView, from: 1.0, to: scale) + transition.animatePosition(view: cameraContainerView, from: originalCameraFrame.center, to: cameraContainerView.center, completion: { _ in + self.extractedContainerView.contentView.clipsToBounds = true + self.clippingView.clipsToBounds = true + }) + } + + func animateOut(to size: CGSize, transition: ComponentTransition, completion: @escaping () -> Void) { + guard let cameraContainerView = self.cameraContainerView, let originalCameraFrame = self.originalCameraFrame else { + return + } + + self.extractedContainerView.contentView.clipsToBounds = false + self.clippingView.clipsToBounds = false + + let scale = max(self.frame.width / originalCameraFrame.width, self.frame.height / originalCameraFrame.height) + cameraContainerView.transform = CGAffineTransform.identity + transition.animateScale(view: cameraContainerView, from: scale, to: 1.0) + transition.setPosition(view: cameraContainerView, position: CGPoint(x: size.width / 2.0, y: size.height / 2.0), completion: { _ in + self.extractedContainerView.contentView.clipsToBounds = true + self.clippingView.clipsToBounds = true + completion() + }) + } + } + + private let context: AccountContext + private let collage: CameraCollage + private weak var camera: Camera? + private weak var cameraContainerView: UIView? + + private var cameraVideoSource: CameraVideoSource? + private var cameraVideoDisposable: Disposable? + + private let cameraVideoLayer = CameraVideoLayer() + private let cloneLayers: [MetalEngineSubjectLayer] + + private var itemViews: [Int64: ItemView] = [:] + + private var state: CameraCollage.State? + private var disposable: Disposable? + + private var reorderRecognizer: ReorderGestureRecognizer? + private var reorderingItem: (id: Int64, initialPosition: CGPoint, position: CGPoint)? + + private var tapRecognizer: UITapGestureRecognizer? + + private var validLayout: CGSize? + + var getOverlayViews: (() -> [UIView])? + + var requestGridReduce: (() -> Void)? + + var isEnabled: Bool = true + + init(context: AccountContext, collage: CameraCollage, camera: Camera?, cameraContainerView: UIView?) { + self.context = context + self.collage = collage + self.cameraContainerView = cameraContainerView + + self.cloneLayers = (0 ..< 6).map { _ in MetalEngineSubjectLayer() } + self.cameraVideoLayer.blurredLayer.cloneLayers = self.cloneLayers + + super.init(frame: .zero) + + self.backgroundColor = .black + + self.disposable = (collage.state + |> deliverOnMainQueue).start(next: { [weak self] state in + guard let self else { + return + } + let previousState = self.state + self.state = state + if let size = self.validLayout { + var transition: ComponentTransition = .spring(duration: 0.3) + if let previousState, previousState.innerProgress != state.innerProgress { + transition = .immediate + } + var progressUpdated = false + if let previousState, previousState.progress != state.progress { + progressUpdated = true + } + self.updateLayout(size: size, transition: transition) + + if progressUpdated { + self.resetPlayback() + } + } + }) + + let reorderRecognizer = ReorderGestureRecognizer( + shouldBegin: { [weak self] point in + guard let self, let item = self.item(at: point) else { + return (allowed: false, requiresLongPress: false, item: nil) + } + + return (allowed: true, requiresLongPress: true, item: item) + }, + willBegin: { point in + }, + began: { [weak self] item in + guard let self else { + return + } + self.setReorderingItem(item: item) + }, + ended: { [weak self] in + guard let self else { + return + } + self.setReorderingItem(item: nil) + }, + moved: { [weak self] distance in + guard let self else { + return + } + self.moveReorderingItem(distance: distance) + }, + isActiveUpdated: { _ in + } + ) + reorderRecognizer.delegate = self + self.reorderRecognizer = reorderRecognizer + self.addGestureRecognizer(reorderRecognizer) + + let tapRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.handleTap)) + self.tapRecognizer = tapRecognizer + self.addGestureRecognizer(tapRecognizer) + + if let cameraVideoSource = CameraVideoSource() { + self.cameraVideoLayer.video = cameraVideoSource.currentOutput + camera?.setPreviewOutput(cameraVideoSource.cameraVideoOutput) + self.cameraVideoSource = cameraVideoSource + + self.cameraVideoDisposable = cameraVideoSource.addOnUpdated { [weak self] in + guard let self, let videoSource = self.cameraVideoSource, self.isEnabled else { + return + } + self.cameraVideoLayer.video = videoSource.currentOutput + } + } + + let videoSize = CGSize(width: 160.0 * 2.0, height: 284.0 * 2.0) + self.cameraVideoLayer.frame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: videoSize) + self.cameraVideoLayer.blurredLayer.frame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: videoSize) + self.cameraVideoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(videoSize.width), height: Int(videoSize.height)), edgeInset: 2) + } + + required init?(coder: NSCoder) { + preconditionFailure() + } + + deinit { + self.disposable?.dispose() + self.cameraVideoDisposable?.dispose() + self.camera?.setPreviewOutput(nil) + } + + func getPreviewLayer() -> PreviewLayer { + var contentLayer = MetalEngineSubjectLayer() + for layer in self.cloneLayers { + if layer.superlayer?.superlayer == nil { + contentLayer = layer + break + } + } + return PreviewLayer(contentLayer: contentLayer) + } + + @objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) { + self.reorderRecognizer?.isEnabled = false + self.reorderRecognizer?.isEnabled = true + + let location = gestureRecognizer.location(in: self) + if let itemView = self.item(at: location) { + itemView.requestContextAction() + } + } + + func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool { + if otherGestureRecognizer is UITapGestureRecognizer { + return true + } + return false + } + + func item(at point: CGPoint) -> ItemView? { + for (_, itemView) in self.itemViews { + if itemView.frame.contains(point), itemView.isReady { + return itemView + } + } + return nil + } + + func setReorderingItem(item: ItemView?) { + self.tapRecognizer?.isEnabled = false + self.tapRecognizer?.isEnabled = true + + var mappedItem: (Int64, ItemView)? + if let item { + for (id, visibleItem) in self.itemViews { + if visibleItem === item { + mappedItem = (id, visibleItem) + break + } + } + } + + if self.reorderingItem?.id != mappedItem?.0 { + if let (id, itemView) = mappedItem { + self.addSubview(itemView) + self.reorderingItem = (id, itemView.center, itemView.center) + } else { + self.reorderingItem = nil + } + if let size = self.validLayout { + self.updateLayout(size: size, transition: .spring(duration: 0.4)) + } + } + } + + func moveReorderingItem(distance: CGPoint) { + if let (id, initialPosition, _) = self.reorderingItem { + let targetPosition = CGPoint(x: initialPosition.x + distance.x, y: initialPosition.y + distance.y) + self.reorderingItem = (id, initialPosition, targetPosition) + if let size = self.validLayout { + self.updateLayout(size: size, transition: .immediate) + } + + if let visibleReorderingItem = self.itemViews[id] { + for (visibleId, visibleItem) in self.itemViews { + if visibleItem === visibleReorderingItem { + continue + } + if visibleItem.frame.contains(targetPosition) { + self.collage.moveItem(fromId: id, toId: visibleId) + break + } + } + } + } + } + + func resetPlayback() { + for (_, itemView) in self.itemViews { + itemView.resetPlayback() + } + } + + func maybeResetPlayback() { + var shouldResetPlayback = true + for (_, itemView) in self.itemViews { + if itemView.isPlaying { + shouldResetPlayback = false + break + } + } + if shouldResetPlayback { + self.resetPlayback() + } + } + + func animateIn(transition: ComponentTransition) { + guard let size = self.validLayout, let (_, cameraItemView) = self.itemViews.first(where: { $0.value.isCamera }) else { + return + } + + let targetFrame = cameraItemView.frame + let sourceFrame = CGRect(origin: .zero, size: size) + + cameraItemView.frame = sourceFrame + transition.setFrame(view: cameraItemView, frame: targetFrame) + cameraItemView.animateIn(from: sourceFrame.size, transition: transition) + } + + func animateOut(transition: ComponentTransition, completion: @escaping () -> Void) { + guard let size = self.validLayout else { + completion() + return + } + guard let (_, cameraItemView) = self.itemViews.first(where: { $0.value.isCamera }) else { + if let cameraContainerView = self.cameraContainerView { + cameraContainerView.transform = CGAffineTransform.identity + cameraContainerView.frame = CGRect(origin: .zero, size: size) + cameraContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + cameraContainerView.layer.animateScale(from: 0.02, to: 1.0, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in + completion() + }) + self.addSubview(cameraContainerView) + } + return + } + + cameraItemView.superview?.bringSubviewToFront(cameraItemView) + + let targetFrame = CGRect(origin: .zero, size: size) + cameraItemView.animateOut(to: targetFrame.size, transition: transition, completion: completion) + transition.setFrame(view: cameraItemView, frame: targetFrame) + } + + var presentController: ((ViewController) -> Void)? + func contextGesture(id: Int64, sourceView: ContextExtractedContentContainingView, gesture: ContextGesture?) { + let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } + + var itemList: [ContextMenuItem] = [] + if self.collage.cameraIndex == nil { + itemList.append(.action(ContextMenuActionItem(text: "Retake", icon: { theme in + return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Camera"), color: theme.contextMenu.primaryColor) + }, action: { [weak self] _, f in + f(.default) + + self?.collage.retakeItem(id: id) + }))) + } + + if self.itemViews.count > 2 { + itemList.append(.separator) + + itemList.append(.action(ContextMenuActionItem(text: "Delete", textColor: .destructive, icon: { theme in + return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Delete"), color: theme.contextMenu.destructiveColor) + }, action: { [weak self] _, f in + f(.dismissWithoutContent) + + self?.collage.deleteItem(id: id) + self?.requestGridReduce?() + }))) + } + + guard !itemList.isEmpty else { + return + } + + let items = ContextController.Items(content: .list(itemList), tip: .collageReordering) + let controller = ContextController( + presentationData: presentationData.withUpdated(theme: defaultDarkColorPresentationTheme), + source: .extracted(CollageContextExtractedContentSource(contentView: sourceView)), + items: .single(items), + recognizer: nil, + gesture: gesture + ) + controller.getOverlayViews = self.getOverlayViews + self.presentController?(controller) + } + + func updateLayout(size: CGSize, transition: ComponentTransition) { + self.validLayout = size + guard let state = self.state else { + return + } + + var validIds = Set() + + let rowHeight: CGFloat = ceil(size.height / CGFloat(state.rows.count)) + + var previousItemFrame: CGRect? + + var itemFrame: CGRect = .zero + for row in state.rows { + let columnWidth: CGFloat = floor(size.width / CGFloat(row.items.count)) + itemFrame = CGRect(origin: itemFrame.origin, size: CGSize(width: columnWidth, height: rowHeight)) + + for item in row.items { + let id = item.uniqueId + validIds.insert(id) + + var effectiveItemFrame = itemFrame + let itemScale: CGFloat + let itemCornerRadius: CGFloat + if let reorderingItem = self.reorderingItem, item.uniqueId == reorderingItem.id { + itemScale = 0.9 + itemCornerRadius = 12.0 + effectiveItemFrame = itemFrame.size.centered(around: reorderingItem.position) + } else { + itemScale = 1.0 + itemCornerRadius = 0.0 + } + + var itemTransition = transition + let itemView: ItemView + if let current = self.itemViews[id] { + itemView = current + previousItemFrame = itemFrame + } else { + itemView = ItemView(frame: effectiveItemFrame) + itemView.clipsToBounds = true + itemView.getPreviewLayer = { [weak self] in + return self?.getPreviewLayer() + } + itemView.didPlayToEnd = { [weak self] in + self?.maybeResetPlayback() + } + self.insertSubview(itemView, at: 0) + self.itemViews[id] = itemView + + if !transition.animation.isImmediate, let previousItemFrame { + itemView.frame = previousItemFrame + } else { + itemTransition = .immediate + } + } + itemView.update(item: item, size: effectiveItemFrame.size, cameraContainerView: self.cameraContainerView, transition: itemTransition) + itemView.contextAction = { [weak self] id, sourceView, gesture in + guard let self else { + return + } + self.contextGesture(id: id, sourceView: sourceView, gesture: gesture) + } + + itemTransition.setBounds(view: itemView, bounds: CGRect(origin: .zero, size: effectiveItemFrame.size)) + itemTransition.setPosition(view: itemView, position: effectiveItemFrame.center) + itemTransition.setScale(view: itemView, scale: itemScale) + + if !itemTransition.animation.isImmediate { + let cornerTransition: ComponentTransition + if itemCornerRadius > 0.0 { + cornerTransition = ComponentTransition(animation: .curve(duration: 0.1, curve: .linear)) + } else { + cornerTransition = .easeInOut(duration: 0.4) + } + cornerTransition.setCornerRadius(layer: itemView.layer, cornerRadius: itemCornerRadius) + } else { + itemTransition.setCornerRadius(layer: itemView.layer, cornerRadius: itemCornerRadius) + } + + itemFrame.origin.x += columnWidth + } + itemFrame.origin.x = 0.0 + itemFrame.origin.y += rowHeight + } + + var removeIds: [Int64] = [] + for (id, itemView) in self.itemViews { + if !validIds.contains(id) { + removeIds.append(id) + transition.setAlpha(view: itemView, alpha: 0.0, completion: { [weak itemView] _ in + itemView?.removeFromSuperview() + }) + } + } + for id in removeIds { + self.itemViews.removeValue(forKey: id) + } + } +} + +private final class ReorderGestureRecognizer: UIGestureRecognizer { + private let shouldBegin: (CGPoint) -> (allowed: Bool, requiresLongPress: Bool, item: CameraCollageView.ItemView?) + private let willBegin: (CGPoint) -> Void + private let began: (CameraCollageView.ItemView) -> Void + private let ended: () -> Void + private let moved: (CGPoint) -> Void + private let isActiveUpdated: (Bool) -> Void + + private var initialLocation: CGPoint? + private var longTapTimer: SwiftSignalKit.Timer? + private var longPressTimer: SwiftSignalKit.Timer? + + private var itemView: CameraCollageView.ItemView? + + public init(shouldBegin: @escaping (CGPoint) -> (allowed: Bool, requiresLongPress: Bool, item: CameraCollageView.ItemView?), willBegin: @escaping (CGPoint) -> Void, began: @escaping (CameraCollageView.ItemView) -> Void, ended: @escaping () -> Void, moved: @escaping (CGPoint) -> Void, isActiveUpdated: @escaping (Bool) -> Void) { + self.shouldBegin = shouldBegin + self.willBegin = willBegin + self.began = began + self.ended = ended + self.moved = moved + self.isActiveUpdated = isActiveUpdated + + super.init(target: nil, action: nil) + } + + deinit { + self.longTapTimer?.invalidate() + self.longPressTimer?.invalidate() + } + + private func startLongTapTimer() { + self.longTapTimer?.invalidate() + let longTapTimer = SwiftSignalKit.Timer(timeout: 0.25, repeat: false, completion: { [weak self] in + self?.longTapTimerFired() + }, queue: Queue.mainQueue()) + self.longTapTimer = longTapTimer + longTapTimer.start() + } + + private func stopLongTapTimer() { + self.itemView = nil + self.longTapTimer?.invalidate() + self.longTapTimer = nil + } + + private func startLongPressTimer() { + self.longPressTimer?.invalidate() + let longPressTimer = SwiftSignalKit.Timer(timeout: 0.6, repeat: false, completion: { [weak self] in + self?.longPressTimerFired() + }, queue: Queue.mainQueue()) + self.longPressTimer = longPressTimer + longPressTimer.start() + } + + private func stopLongPressTimer() { + self.itemView = nil + self.longPressTimer?.invalidate() + self.longPressTimer = nil + } + + override public func reset() { + super.reset() + + self.itemView = nil + self.stopLongTapTimer() + self.stopLongPressTimer() + self.initialLocation = nil + + self.isActiveUpdated(false) + } + + private func longTapTimerFired() { + guard let location = self.initialLocation else { + return + } + + self.longTapTimer?.invalidate() + self.longTapTimer = nil + + self.willBegin(location) + } + + private func longPressTimerFired() { + guard let _ = self.initialLocation else { + return + } + + self.isActiveUpdated(true) + self.state = .began + self.longPressTimer?.invalidate() + self.longPressTimer = nil + self.longTapTimer?.invalidate() + self.longTapTimer = nil + if let itemView = self.itemView { + self.began(itemView) + } + self.isActiveUpdated(true) + } + + override public func touchesBegan(_ touches: Set, with event: UIEvent) { + super.touchesBegan(touches, with: event) + + if self.numberOfTouches > 1 { + self.isActiveUpdated(false) + self.state = .failed + self.ended() + return + } + + if self.state == .possible { + if let location = touches.first?.location(in: self.view) { + let (allowed, requiresLongPress, itemView) = self.shouldBegin(location) + if allowed { + self.isActiveUpdated(true) + + self.itemView = itemView + self.initialLocation = location + if requiresLongPress { + self.startLongTapTimer() + self.startLongPressTimer() + } else { + self.state = .began + if let itemView = self.itemView { + self.began(itemView) + } + } + } else { + self.isActiveUpdated(false) + self.state = .failed + } + } else { + self.isActiveUpdated(false) + self.state = .failed + } + } + } + + override public func touchesEnded(_ touches: Set, with event: UIEvent) { + super.touchesEnded(touches, with: event) + + self.initialLocation = nil + + self.stopLongTapTimer() + if self.longPressTimer != nil { + self.stopLongPressTimer() + self.isActiveUpdated(false) + self.state = .failed + } + if self.state == .began || self.state == .changed { + self.isActiveUpdated(false) + self.ended() + self.state = .failed + } + } + + override public func touchesCancelled(_ touches: Set, with event: UIEvent) { + super.touchesCancelled(touches, with: event) + + self.initialLocation = nil + + self.stopLongTapTimer() + if self.longPressTimer != nil { + self.isActiveUpdated(false) + self.stopLongPressTimer() + self.state = .failed + } + if self.state == .began || self.state == .changed { + self.isActiveUpdated(false) + self.ended() + self.state = .failed + } + } + + override public func touchesMoved(_ touches: Set, with event: UIEvent) { + super.touchesMoved(touches, with: event) + + if (self.state == .began || self.state == .changed), let initialLocation = self.initialLocation, let location = touches.first?.location(in: self.view) { + self.state = .changed + let offset = CGPoint(x: location.x - initialLocation.x, y: location.y - initialLocation.y) + self.moved(offset) + } else if let touch = touches.first, let initialTapLocation = self.initialLocation, self.longPressTimer != nil { + let touchLocation = touch.location(in: self.view) + let dX = touchLocation.x - initialTapLocation.x + let dY = touchLocation.y - initialTapLocation.y + + if dX * dX + dY * dY > 3.0 * 3.0 { + self.stopLongTapTimer() + self.stopLongPressTimer() + self.initialLocation = nil + self.isActiveUpdated(false) + self.state = .failed + } + } + } +} + +private final class CollageContextExtractedContentSource: ContextExtractedContentSource { + let keepInPlace: Bool = false + let ignoreContentTouches: Bool = false + let blurBackground: Bool = true + + private let contentView: ContextExtractedContentContainingView + + init(contentView: ContextExtractedContentContainingView) { + self.contentView = contentView + } + + func takeView() -> ContextControllerTakeViewInfo? { + return ContextControllerTakeViewInfo(containingItem: .view(self.contentView), contentAreaInScreenSpace: UIScreen.main.bounds) + } + + func putBack() -> ContextControllerPutBackViewInfo? { + return ContextControllerPutBackViewInfo(contentAreaInScreenSpace: UIScreen.main.bounds) + } +} diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraMetalLibrary.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraMetalLibrary.swift new file mode 100644 index 0000000000..87edbe00bc --- /dev/null +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraMetalLibrary.swift @@ -0,0 +1,28 @@ +import Foundation +import UIKit +import Display +import MetalKit + +private final class BundleMarker: NSObject { +} + +private var metalLibraryValue: MTLLibrary? +func metalLibrary(device: MTLDevice) -> MTLLibrary? { + if let metalLibraryValue { + return metalLibraryValue + } + + let mainBundle = Bundle(for: BundleMarker.self) + guard let path = mainBundle.path(forResource: "CameraScreenBundle", ofType: "bundle") else { + return nil + } + guard let bundle = Bundle(path: path) else { + return nil + } + guard let library = try? device.makeDefaultLibrary(bundle: bundle) else { + return nil + } + + metalLibraryValue = library + return library +} diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift index 8c63e8750b..7ce78fa4e7 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift @@ -22,8 +22,22 @@ import CameraButtonComponent import VolumeButtons import TelegramNotices import DeviceAccess +import MediaAssetsContext +import UndoUI +import MetalEngine let videoRedColor = UIColor(rgb: 0xff3b30) +let collageGrids: [Camera.CollageGrid] = [ + Camera.CollageGrid(rows: [Camera.CollageGrid.Row(columns: 1), Camera.CollageGrid.Row(columns: 1)]), + Camera.CollageGrid(rows: [Camera.CollageGrid.Row(columns: 2)]), + Camera.CollageGrid(rows: [Camera.CollageGrid.Row(columns: 2), Camera.CollageGrid.Row(columns: 1)]), + Camera.CollageGrid(rows: [Camera.CollageGrid.Row(columns: 1), Camera.CollageGrid.Row(columns: 2)]), + Camera.CollageGrid(rows: [Camera.CollageGrid.Row(columns: 1), Camera.CollageGrid.Row(columns: 1), Camera.CollageGrid.Row(columns: 1)]), + Camera.CollageGrid(rows: [Camera.CollageGrid.Row(columns: 3)]), + Camera.CollageGrid(rows: [Camera.CollageGrid.Row(columns: 1), Camera.CollageGrid.Row(columns: 2), Camera.CollageGrid.Row(columns: 2)]), + Camera.CollageGrid(rows: [Camera.CollageGrid.Row(columns: 2), Camera.CollageGrid.Row(columns: 2), Camera.CollageGrid.Row(columns: 1)]), + Camera.CollageGrid(rows: [Camera.CollageGrid.Row(columns: 2), Camera.CollageGrid.Row(columns: 2), Camera.CollageGrid.Row(columns: 2)]) +] enum CameraMode: Equatable { case photo @@ -62,37 +76,52 @@ struct CameraState: Equatable { let recording: Recording let duration: Double let isDualCameraEnabled: Bool + let isCollageEnabled: Bool + let collageGrid: Camera.CollageGrid + let collageProgress: Float func updatedMode(_ mode: CameraMode) -> CameraState { - return CameraState(mode: mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled) + return CameraState(mode: mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isCollageEnabled: self.isCollageEnabled, collageGrid: self.collageGrid, collageProgress: self.collageProgress) } func updatedPosition(_ position: Camera.Position) -> CameraState { - return CameraState(mode: self.mode, position: position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled) + return CameraState(mode: self.mode, position: position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isCollageEnabled: self.isCollageEnabled, collageGrid: self.collageGrid, collageProgress: self.collageProgress) } func updatedFlashMode(_ flashMode: Camera.FlashMode) -> CameraState { - return CameraState(mode: self.mode, position: self.position, flashMode: flashMode, flashModeDidChange: self.flashMode != flashMode, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled) + return CameraState(mode: self.mode, position: self.position, flashMode: flashMode, flashModeDidChange: self.flashMode != flashMode, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isCollageEnabled: self.isCollageEnabled, collageGrid: self.collageGrid, collageProgress: self.collageProgress) } func updatedFlashTint(_ flashTint: FlashTint) -> CameraState { - return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled) + return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isCollageEnabled: self.isCollageEnabled, collageGrid: self.collageGrid, collageProgress: self.collageProgress) } func updatedFlashTintSize(_ size: CGFloat) -> CameraState { - return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: size, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled) + return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: size, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isCollageEnabled: self.isCollageEnabled, collageGrid: self.collageGrid, collageProgress: self.collageProgress) } func updatedRecording(_ recording: Recording) -> CameraState { - return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled) + return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isCollageEnabled: self.isCollageEnabled, collageGrid: self.collageGrid, collageProgress: self.collageProgress) } func updatedDuration(_ duration: Double) -> CameraState { - return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: duration, isDualCameraEnabled: self.isDualCameraEnabled) + return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: duration, isDualCameraEnabled: self.isDualCameraEnabled, isCollageEnabled: self.isCollageEnabled, collageGrid: self.collageGrid, collageProgress: self.collageProgress) } func updatedIsDualCameraEnabled(_ isDualCameraEnabled: Bool) -> CameraState { - return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: isDualCameraEnabled) + return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: isDualCameraEnabled, isCollageEnabled: self.isCollageEnabled, collageGrid: self.collageGrid, collageProgress: self.collageProgress) + } + + func updatedIsCollageEnabled(_ isCollageEnabled: Bool) -> CameraState { + return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isCollageEnabled: isCollageEnabled, collageGrid: self.collageGrid, collageProgress: self.collageProgress) + } + + func updatedCollageGrid(_ collageGrid: Camera.CollageGrid) -> CameraState { + return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isCollageEnabled: self.isCollageEnabled, collageGrid: collageGrid, collageProgress: self.collageProgress) + } + + func updatedCollageProgress(_ collageProgress: Float) -> CameraState { + return CameraState(mode: self.mode, position: self.position, flashMode: self.flashMode, flashModeDidChange: self.flashModeDidChange, flashTint: self.flashTint, flashTintSize: self.flashTintSize, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isCollageEnabled: self.isCollageEnabled, collageGrid: self.collageGrid, collageProgress: collageProgress) } } @@ -109,6 +138,9 @@ private let captureControlsTag = GenericComponentViewTag() private let modeControlTag = GenericComponentViewTag() private let galleryButtonTag = GenericComponentViewTag() private let dualButtonTag = GenericComponentViewTag() +private let collageButtonTag = GenericComponentViewTag() +private let collageCarouselTag = GenericComponentViewTag() +private let disableCollageButtonTag = GenericComponentViewTag() private final class CameraScreenComponent: CombinedComponent { typealias EnvironmentType = ViewControllerComponentContainer.Environment @@ -123,10 +155,11 @@ private final class CameraScreenComponent: CombinedComponent { let animateFlipAction: ActionSlot let animateShutter: () -> Void let toggleCameraPositionAction: ActionSlot - let getController: () -> CameraScreen? + let dismissCollageSelection: ActionSlot + let getController: () -> CameraScreenImpl? let present: (ViewController) -> Void let push: (ViewController) -> Void - let completion: ActionSlot> + let completion: ActionSlot> init( context: AccountContext, @@ -139,10 +172,11 @@ private final class CameraScreenComponent: CombinedComponent { animateFlipAction: ActionSlot, animateShutter: @escaping () -> Void, toggleCameraPositionAction: ActionSlot, - getController: @escaping () -> CameraScreen?, + dismissCollageSelection: ActionSlot, + getController: @escaping () -> CameraScreenImpl?, present: @escaping (ViewController) -> Void, push: @escaping (ViewController) -> Void, - completion: ActionSlot> + completion: ActionSlot> ) { self.context = context self.cameraState = cameraState @@ -154,6 +188,7 @@ private final class CameraScreenComponent: CombinedComponent { self.animateFlipAction = animateFlipAction self.animateShutter = animateShutter self.toggleCameraPositionAction = toggleCameraPositionAction + self.dismissCollageSelection = dismissCollageSelection self.getController = getController self.present = present self.push = push @@ -222,11 +257,10 @@ private final class CameraScreenComponent: CombinedComponent { private let context: AccountContext private let present: (ViewController) -> Void - private let completion: ActionSlot> + private let completion: ActionSlot> private let animateShutter: () -> Void private let animateFlipAction: ActionSlot - private let toggleCameraPositionAction: ActionSlot - private let getController: () -> CameraScreen? + private let getController: () -> CameraScreenImpl? private var resultDisposable = MetaDisposable() @@ -244,40 +278,46 @@ private final class CameraScreenComponent: CombinedComponent { var displayingFlashTint = false var previousFlashMode: Camera.FlashMode? + var displayingCollageSelection = false + private let hapticFeedback = HapticFeedback() init( context: AccountContext, present: @escaping (ViewController) -> Void, - completion: ActionSlot>, + completion: ActionSlot>, animateShutter: @escaping () -> Void = {}, animateFlipAction: ActionSlot, toggleCameraPositionAction: ActionSlot, - getController: @escaping () -> CameraScreen? = { - return nil - } + dismissCollageSelection: ActionSlot, + getController: @escaping () -> CameraScreenImpl? ) { self.context = context self.present = present self.completion = completion self.animateShutter = animateShutter self.animateFlipAction = animateFlipAction - self.toggleCameraPositionAction = toggleCameraPositionAction self.getController = getController super.init() - - Queue.concurrentDefaultQueue().async { - self.setupRecentAssetSubscription() - } - + self.setupVolumeButtonsHandler() - self.toggleCameraPositionAction.connect({ [weak self] in + toggleCameraPositionAction.connect({ [weak self] in if let self { self.togglePosition(self.animateFlipAction) } }) + + dismissCollageSelection.connect({ [weak self] in + if let self { + self.dismissCollageSelection() + } + }) + + Queue.concurrentDefaultQueue().async { + self.setupRecentAssetSubscription() + } } deinit { @@ -387,7 +427,6 @@ private final class CameraScreenComponent: CombinedComponent { if case .none = controller.cameraState.recording { switch controller.cameraState.mode { case .photo: - self.animateShutter() self.takePhoto() case .video: self.startVideoRecording(pressing: false) @@ -521,6 +560,52 @@ private final class CameraScreenComponent: CombinedComponent { self.hapticFeedback.impact(.light) } + func dismissCollageSelection() { + self.displayingCollageSelection = false + self.updated(transition: .spring(duration: 0.3)) + } + + func toggleCollageCamera() { + guard let controller = self.getController(), let _ = controller.camera else { + return + } + + controller.node.dismissAllTooltips() + + if controller.cameraState.isCollageEnabled { + self.displayingCollageSelection = !self.displayingCollageSelection + self.updated(transition: .spring(duration: 0.3)) + } else { + let isEnabled = !controller.cameraState.isCollageEnabled + self.displayingCollageSelection = isEnabled + controller.updateCameraState({ $0.updatedIsCollageEnabled(isEnabled).updatedCollageProgress(0.0) }, transition: .spring(duration: 0.3)) + } + self.hapticFeedback.impact(.light) + } + + func disableCollageCamera() { + guard let controller = self.getController(), let _ = controller.camera else { + return + } + + self.displayingCollageSelection = false + controller.updateCameraState({ $0.updatedIsCollageEnabled(false).updatedCollageProgress(0.0) }, transition: .spring(duration: 0.3)) + + self.hapticFeedback.impact(.light) + } + + func updateCollageGrid(_ grid: Camera.CollageGrid) { + guard let controller = self.getController(), let _ = controller.camera else { + return + } + + self.displayingCollageSelection = false + + controller.updateCameraState({ $0.updatedCollageGrid(grid) }, transition: .spring(duration: 0.3)) + + self.hapticFeedback.impact(.light) + } + func updateSwipeHint(_ hint: CaptureControlsComponent.SwipeHint) { guard hint != self.swipeHint else { return @@ -531,24 +616,42 @@ private final class CameraScreenComponent: CombinedComponent { var isTakingPhoto = false func takePhoto() { - guard let controller = self.getController(), let camera = controller.camera else { + guard let controller = self.getController(), let camera = controller.camera, let cameraState = self.cameraState else { return } - guard !self.isTakingPhoto else { + guard !(self.isTakingPhoto && !cameraState.isCollageEnabled) else { return } + + self.animateShutter() + self.isTakingPhoto = true controller.node.dismissAllTooltips() - let takePhoto = { + if self.displayingCollageSelection { + self.displayingCollageSelection = false + self.updated(transition: .spring(duration: 0.3)) + + //TODO:localize + let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } + let tooltipController = UndoOverlayController(presentationData: presentationData, content: .info(title: nil, text: "Tap a tile to delete or reorder it.", timeout: 3.0, customUndoText: nil), elevatedLayout: false, action: { _ in + return true + }) + controller.present(tooltipController, in: .current) + } + + let takePhoto = { [weak self] in + guard let self else { + return + } let takePhoto = camera.takePhoto() - |> mapToSignal { value -> Signal in + |> mapToSignal { value -> Signal in switch value { case .began: return .single(.pendingImage) case let .finished(image, additionalImage, _): - return .single(.image(CameraScreen.Result.Image(image: image, additionalImage: additionalImage, additionalImagePosition: .topRight))) + return .single(.image(CameraScreenImpl.Result.Image(image: image, additionalImage: additionalImage, additionalImagePosition: .topRight))) case .failed: return .complete() } @@ -648,9 +751,14 @@ private final class CameraScreenComponent: CombinedComponent { guard case .none = controller.cameraState.recording else { return } - + controller.node.dismissAllTooltips() + if self.displayingCollageSelection { + self.displayingCollageSelection = false + self.updated(transition: .spring(duration: 0.3)) + } + let startRecording = { self.resultDisposable.set((camera.startRecording() |> deliverOnMainQueue).start(next: { [weak self] recordingData in @@ -665,11 +773,13 @@ private final class CameraScreenComponent: CombinedComponent { controller.updateCameraState({ $0.updatedRecording(pressing ? .holding : .handsFree).updatedDuration(0.0) }, transition: .spring(duration: 0.4)) + self.animateShutter() + startRecording() } func stopVideoRecording() { - guard let controller = self.getController(), let camera = controller.camera else { + guard let controller = self.getController(), let camera = controller.camera, let cameraState = self.cameraState else { return } @@ -677,7 +787,7 @@ private final class CameraScreenComponent: CombinedComponent { |> deliverOnMainQueue).start(next: { [weak self] result in if let self, case let .finished(mainResult, additionalResult, duration, positionChangeTimestamps, _) = result { self.completion.invoke(.single( - .video(CameraScreen.Result.Video( + .video(CameraScreenImpl.Result.Video( videoPath: mainResult.path, coverImage: mainResult.thumbnail, mirror: mainResult.isMirrored, @@ -691,11 +801,13 @@ private final class CameraScreenComponent: CombinedComponent { )) } })) - self.isTransitioning = true - Queue.mainQueue().after(1.25, { - self.isTransitioning = false - self.updated(transition: .immediate) - }) + if !cameraState.isCollageEnabled { + self.isTransitioning = true + Queue.mainQueue().after(1.25, { + self.isTransitioning = false + self.updated(transition: .immediate) + }) + } controller.updateCameraState({ $0.updatedRecording(.none).updatedDuration(0.0) }, transition: .spring(duration: 0.4)) @@ -722,7 +834,16 @@ private final class CameraScreenComponent: CombinedComponent { } func makeState() -> State { - return State(context: self.context, present: self.present, completion: self.completion, animateShutter: self.animateShutter, animateFlipAction: self.animateFlipAction, toggleCameraPositionAction: self.toggleCameraPositionAction, getController: self.getController) + return State( + context: self.context, + present: self.present, + completion: self.completion, + animateShutter: self.animateShutter, + animateFlipAction: self.animateFlipAction, + toggleCameraPositionAction: self.toggleCameraPositionAction, + dismissCollageSelection: self.dismissCollageSelection, + getController: self.getController + ) } static var body: Body { @@ -734,6 +855,9 @@ private final class CameraScreenComponent: CombinedComponent { let flashButton = Child(CameraButton.self) let flipButton = Child(CameraButton.self) let dualButton = Child(CameraButton.self) + let collageButton = Child(CameraButton.self) + let disableCollageButton = Child(CameraButton.self) + let collageCarousel = Child(CollageIconCarouselComponent.self) let modeControl = Child(ModeComponent.self) let hintLabel = Child(HintLabelComponent.self) let flashTintControl = Child(FlashTintControlComponent.self) @@ -752,7 +876,7 @@ private final class CameraScreenComponent: CombinedComponent { state.volumeButtonsListenerActive = component.hasAppeared && component.isVisible let isSticker: Bool - if let controller = controller() as? CameraScreen, case .sticker = controller.mode { + if let controller = controller() as? CameraScreenImpl, case .sticker = controller.mode { isSticker = true } else { isSticker = false @@ -887,13 +1011,14 @@ private final class CameraScreenComponent: CombinedComponent { captureControlsAvailableSize = availableSize } - let animateShutter = component.animateShutter let captureControls = captureControls.update( component: CaptureControlsComponent( isTablet: isTablet, isSticker: isSticker, hasAppeared: component.hasAppeared && hasAllRequiredAccess, hasAccess: hasAllRequiredAccess, + hideControls: component.cameraState.collageProgress > 1.0 - .ulpOfOne, + collageProgress: component.cameraState.collageProgress, tintColor: controlsTintColor, shutterState: shutterState, lastGalleryAsset: state.lastGalleryAsset, @@ -905,7 +1030,6 @@ private final class CameraScreenComponent: CombinedComponent { } if case .none = cameraState.recording { if cameraState.mode == .photo { - animateShutter() state.takePhoto() } else if cameraState.mode == .video { state.startVideoRecording(pressing: false) @@ -915,7 +1039,7 @@ private final class CameraScreenComponent: CombinedComponent { } }, shutterPressed: { [weak state] in - guard let state, let cameraState = state.cameraState, case .none = cameraState.recording else { + guard let state, let cameraState = state.cameraState, case .none = cameraState.recording, cameraState.collageProgress < 1.0 - .ulpOfOne else { return } state.startVideoRecording(pressing: true) @@ -939,7 +1063,7 @@ private final class CameraScreenComponent: CombinedComponent { state.togglePosition(animateFlipAction) }, galleryTapped: { [weak state] in - guard let controller = environment.controller() as? CameraScreen else { + guard let controller = environment.controller() as? CameraScreenImpl else { return } state?.requestMediaAccess { @@ -975,33 +1099,36 @@ private final class CameraScreenComponent: CombinedComponent { var flashButtonPosition: CGPoint? let topControlInset: CGFloat = 20.0 if case .none = component.cameraState.recording, !state.isTransitioning { - let cancelButton = cancelButton.update( - component: CameraButton( - content: AnyComponentWithIdentity( - id: "cancel", - component: AnyComponent( - Image( - image: state.image(.cancel), - tintColor: controlsTintColor, - size: CGSize(width: 40.0, height: 40.0) + if !state.displayingCollageSelection { + let cancelButton = cancelButton.update( + component: CameraButton( + content: AnyComponentWithIdentity( + id: "cancel", + component: AnyComponent( + Image( + image: state.image(.cancel), + tintColor: controlsTintColor, + size: CGSize(width: 40.0, height: 40.0) + ) ) - ) - ), - action: { - guard let controller = controller() as? CameraScreen else { - return + ), + action: { + guard let controller = controller() as? CameraScreenImpl else { + return + } + controller.requestDismiss(animated: true) } - controller.requestDismiss(animated: true) - } - ).tagged(cancelButtonTag), - availableSize: CGSize(width: 40.0, height: 40.0), - transition: .immediate - ) - context.add(cancelButton - .position(CGPoint(x: isTablet ? smallPanelWidth / 2.0 : topControlInset + cancelButton.size.width / 2.0, y: max(environment.statusBarHeight + 5.0, environment.safeInsets.top + topControlInset) + cancelButton.size.height / 2.0)) - .appear(.default(scale: true)) - .disappear(.default(scale: true)) - ) + ).tagged(cancelButtonTag), + availableSize: CGSize(width: 40.0, height: 40.0), + transition: .immediate + ) + context.add(cancelButton + .position(CGPoint(x: isTablet ? smallPanelWidth / 2.0 : topControlInset + cancelButton.size.width / 2.0, y: max(environment.statusBarHeight + 5.0, environment.safeInsets.top + topControlInset) + cancelButton.size.height / 2.0)) + .appear(.default(scale: true)) + .disappear(.default(scale: true)) + .shadow(Shadow(color: UIColor(white: 0.0, alpha: 0.25), radius: 3.0, offset: .zero)) + ) + } let flashContentComponent: AnyComponentWithIdentity if component.hasAppeared { @@ -1050,58 +1177,165 @@ private final class CameraScreenComponent: CombinedComponent { } if hasAllRequiredAccess { - let flashButton = flashButton.update( - component: CameraButton( - content: flashContentComponent, - action: { [weak state] in - if let state { - state.toggleFlashMode() - } - }, - longTapAction: { [weak state] in - if let state { - state.presentFlashTint() - } - } - ).tagged(flashButtonTag), - availableSize: CGSize(width: 40.0, height: 40.0), - transition: .immediate - ) - - let position = CGPoint(x: isTablet ? availableSize.width - smallPanelWidth / 2.0 : availableSize.width - topControlInset - flashButton.size.width / 2.0 - 5.0, y: max(environment.statusBarHeight + 5.0, environment.safeInsets.top + topControlInset) + flashButton.size.height / 2.0) - flashButtonPosition = position - context.add(flashButton - .position(position) - .appear(.default(scale: true)) - .disappear(.default(scale: true)) - ) - - if !isSticker && !isTablet && Camera.isDualCameraSupported(forRoundVideo: false) { - let dualButton = dualButton.update( + let rightMostButtonWidth: CGFloat + if state.displayingCollageSelection { + let disableCollageButton = disableCollageButton.update( component: CameraButton( content: AnyComponentWithIdentity( - id: "dual", + id: "disableCollage", component: AnyComponent( - DualIconComponent( - isSelected: component.cameraState.isDualCameraEnabled, + CollageIconComponent( + grid: component.cameraState.collageGrid, + crossed: true, + isSelected: false, tintColor: controlsTintColor ) ) ), action: { [weak state] in if let state { - state.toggleDualCamera() + state.disableCollageCamera() } } - ).tagged(dualButtonTag), + ).tagged(disableCollageButtonTag), availableSize: CGSize(width: 40.0, height: 40.0), transition: .immediate ) - context.add(dualButton - .position(CGPoint(x: availableSize.width - topControlInset - flashButton.size.width / 2.0 - 58.0, y: max(environment.statusBarHeight + 5.0, environment.safeInsets.top + topControlInset) + dualButton.size.height / 2.0 + 2.0)) + context.add(disableCollageButton + .position(CGPoint(x: availableSize.width - topControlInset - disableCollageButton.size.width / 2.0 - 5.0, y: max(environment.statusBarHeight + 5.0, environment.safeInsets.top + topControlInset) + disableCollageButton.size.height / 2.0 + 2.0)) .appear(.default(scale: true)) .disappear(.default(scale: true)) + .shadow(Shadow(color: UIColor(white: 0.0, alpha: 0.25), radius: 3.0, offset: .zero)) ) + rightMostButtonWidth = disableCollageButton.size.width + 4.0 + } else if component.cameraState.collageProgress > 1.0 - .ulpOfOne { + rightMostButtonWidth = 0.0 + } else { + let flashButton = flashButton.update( + component: CameraButton( + content: flashContentComponent, + action: { [weak state] in + if let state { + state.toggleFlashMode() + } + }, + longTapAction: { [weak state] in + if let state { + state.presentFlashTint() + } + } + ).tagged(flashButtonTag), + availableSize: CGSize(width: 40.0, height: 40.0), + transition: .immediate + ) + + let position = CGPoint(x: isTablet ? availableSize.width - smallPanelWidth / 2.0 : availableSize.width - topControlInset - flashButton.size.width / 2.0 - 5.0, y: max(environment.statusBarHeight + 5.0, environment.safeInsets.top + topControlInset) + flashButton.size.height / 2.0) + flashButtonPosition = position + context.add(flashButton + .position(position) + .appear(.default(scale: true)) + .disappear(.default(scale: true)) + .shadow(Shadow(color: UIColor(white: 0.0, alpha: 0.25), radius: 3.0, offset: .zero)) + ) + rightMostButtonWidth = flashButton.size.width + } + + if !isSticker && !isTablet { + var nextButtonX = availableSize.width - topControlInset - rightMostButtonWidth / 2.0 - 58.0 + if Camera.isDualCameraSupported(forRoundVideo: false) && !component.cameraState.isCollageEnabled { + let dualButton = dualButton.update( + component: CameraButton( + content: AnyComponentWithIdentity( + id: "dual", + component: AnyComponent( + DualIconComponent( + isSelected: component.cameraState.isDualCameraEnabled, + tintColor: controlsTintColor + ) + ) + ), + action: { [weak state] in + if let state { + state.toggleDualCamera() + } + } + ).tagged(dualButtonTag), + availableSize: CGSize(width: 40.0, height: 40.0), + transition: .immediate + ) + context.add(dualButton + .position(CGPoint(x: nextButtonX, y: max(environment.statusBarHeight + 5.0, environment.safeInsets.top + topControlInset) + dualButton.size.height / 2.0 + 2.0)) + .appear(.default(scale: true)) + .disappear(.default(scale: true)) + .shadow(Shadow(color: UIColor(white: 0.0, alpha: 0.25), radius: 3.0, offset: .zero)) + ) + + nextButtonX -= dualButton.size.width + 16.0 + } + + if !component.cameraState.isDualCameraEnabled { + let collageButton = collageButton.update( + component: CameraButton( + content: AnyComponentWithIdentity( + id: "collage", + component: AnyComponent( + CollageIconComponent( + grid: component.cameraState.collageGrid, + crossed: false, + isSelected: component.cameraState.isCollageEnabled, + tintColor: controlsTintColor + ) + ) + ), + action: { [weak state] in + if let state { + state.toggleCollageCamera() + } + } + ).tagged(collageButtonTag), + availableSize: CGSize(width: 40.0, height: 40.0), + transition: .immediate + ) + var collageButtonX = nextButtonX + if rightMostButtonWidth.isZero { + collageButtonX = availableSize.width - topControlInset - collageButton.size.width / 2.0 - 5.0 + } + context.add(collageButton + .position(CGPoint(x: collageButtonX, y: max(environment.statusBarHeight + 5.0, environment.safeInsets.top + topControlInset) + collageButton.size.height / 2.0 + 2.0)) + .appear(.default(scale: true)) + .disappear(.default(scale: true)) + .shadow(Shadow(color: UIColor(white: 0.0, alpha: 0.25), radius: 3.0, offset: .zero)) + ) + nextButtonX -= collageButton.size.width + + if state.displayingCollageSelection { + let collageCarousel = collageCarousel.update( + component: CollageIconCarouselComponent( + grids: collageGrids.filter { $0 != component.cameraState.collageGrid }, + selected: { [weak state] grid in + state?.updateCollageGrid(grid) + } + ), + availableSize: CGSize(width: nextButtonX, height: 40.0), + transition: .immediate + ) + context.add(collageCarousel + .position(CGPoint(x: collageCarousel.size.width / 2.0, y: max(environment.statusBarHeight + 5.0, environment.safeInsets.top + topControlInset) + collageCarousel.size.height / 2.0 + 2.0)) + .appear(ComponentTransition.Appear({ _, view, transition in + if let view = view as? CollageIconCarouselComponent.View, !transition.animation.isImmediate { + view.animateIn() + } + })) + .disappear(ComponentTransition.Disappear({ view, transition, completion in + if let view = view as? CollageIconCarouselComponent.View, !transition.animation.isImmediate { + view.animateOut(completion: completion) + } else { + completion() + } + })) + ) + } + } } } } @@ -1143,7 +1377,7 @@ private final class CameraScreenComponent: CombinedComponent { isVideoRecording = true } - if isVideoRecording && !state.isTransitioning { + if isVideoRecording && !state.isTransitioning && !state.displayingCollageSelection { let duration = Int(component.cameraState.duration) let durationString = String(format: "%02d:%02d", (duration / 60) % 60, duration % 60) let timeLabel = timeLabel.update( @@ -1214,7 +1448,7 @@ private final class CameraScreenComponent: CombinedComponent { } } - if !isSticker, case .none = component.cameraState.recording, !state.isTransitioning && hasAllRequiredAccess { + if !isSticker, case .none = component.cameraState.recording, !state.isTransitioning && hasAllRequiredAccess && component.cameraState.collageProgress < 1.0 - .ulpOfOne { let availableModeControlSize: CGSize if isTablet { availableModeControlSize = CGSize(width: panelWidth, height: 120.0) @@ -1326,7 +1560,7 @@ private class BlurView: UIVisualEffectView { } } -public class CameraScreen: ViewController { +public class CameraScreenImpl: ViewController, CameraScreen { public enum Mode { case story case sticker @@ -1343,7 +1577,7 @@ public class CameraScreen: ViewController { public struct Image { public let image: UIImage public let additionalImage: UIImage? - public let additionalImagePosition: CameraScreen.PIPPosition + public let additionalImagePosition: CameraScreenImpl.PIPPosition } public struct Video { @@ -1355,16 +1589,30 @@ public class CameraScreen: ViewController { public let dimensions: PixelDimensions public let duration: Double public let positionChangeTimestamps: [(Bool, Double)] - public let additionalVideoPosition: CameraScreen.PIPPosition + public let additionalVideoPosition: CameraScreenImpl.PIPPosition + } + + public struct VideoCollage { + public struct Item { + public enum Content { + case image(UIImage) + case video(String, Double) + case asset(PHAsset) + } + public let content: Content + public let frame: CGRect + } + public let items: [Item] } case pendingImage case image(Image) case video(Video) + case videoCollage(VideoCollage) case asset(PHAsset) case draft(MediaEditorDraft) - func withPIPPosition(_ position: CameraScreen.PIPPosition) -> Result { + func withPIPPosition(_ position: CameraScreenImpl.PIPPosition) -> Result { switch self { case let .image(result): return .image(Image(image: result.image, additionalImage: result.additionalImage, additionalImagePosition: position)) @@ -1412,18 +1660,21 @@ public class CameraScreen: ViewController { } fileprivate final class Node: ViewControllerTracingNode, ASGestureRecognizerDelegate { - private weak var controller: CameraScreen? + private weak var controller: CameraScreenImpl? private let context: AccountContext fileprivate var camera: Camera? private let updateState: ActionSlot private let toggleCameraPositionAction: ActionSlot + fileprivate let dismissCollageSelection: ActionSlot fileprivate let backgroundView: UIView fileprivate let containerView: UIView fileprivate let componentHost: ComponentView private let previewContainerView: UIView - private let mainPreviewContainerView: UIView + private let collageContainerView: UIView + private var collageView: CameraCollageView? + private let mainPreviewContainerView: PortalSourceView fileprivate var mainPreviewView: CameraSimplePreviewView private let mainPreviewAnimationWrapperView: UIView @@ -1442,6 +1693,9 @@ public class CameraScreen: ViewController { private var changingPositionDisposable: Disposable? private var appliedDualCamera = false + fileprivate var collage: CameraCollage? + private var collageStateDisposable: Disposable? + private var pipPosition: PIPPosition = .topRight fileprivate var previewBlurPromise = ValuePromise(false) @@ -1461,12 +1715,14 @@ public class CameraScreen: ViewController { fileprivate var hasGallery = false fileprivate var postingAvailable = true + private var updatingCollageProgress = false + private var presentationData: PresentationData private var validLayout: ContainerViewLayout? fileprivate var didAppear: () -> Void = {} - private let completion = ActionSlot>() + private let completion = ActionSlot>() var cameraState: CameraState { didSet { @@ -1499,7 +1755,7 @@ public class CameraScreen: ViewController { if isDualCameraEnabled && previousPosition != currentPosition { self.animateDualCameraPositionSwitch() } else if dualCamWasEnabled != isDualCameraEnabled { - self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .spring(duration: 0.4)) + self.requestUpdateLayout(transition: .spring(duration: 0.4)) UserDefaults.standard.set(isDualCameraEnabled as NSNumber, forKey: "TelegramStoryCameraIsDualEnabled") } @@ -1511,11 +1767,12 @@ public class CameraScreen: ViewController { private var galleryAuthorizationStatus: AccessType = .notDetermined private var authorizationStatusDisposables = DisposableSet() - init(controller: CameraScreen) { + init(controller: CameraScreenImpl) { self.controller = controller self.context = controller.context self.updateState = ActionSlot() self.toggleCameraPositionAction = ActionSlot() + self.dismissCollageSelection = ActionSlot() self.presentationData = self.context.sharedContext.currentPresentationData.with { $0 } @@ -1558,7 +1815,10 @@ public class CameraScreen: ViewController { cameraFrontPosition = true } - self.mainPreviewContainerView = UIView() + self.collageContainerView = UIView() + self.collageContainerView.clipsToBounds = true + + self.mainPreviewContainerView = PortalSourceView() self.mainPreviewContainerView.clipsToBounds = true self.mainPreviewView = CameraSimplePreviewView(frame: .zero, main: true) @@ -1586,7 +1846,10 @@ public class CameraScreen: ViewController { flashTintSize: 1.0, recording: .none, duration: 0.0, - isDualCameraEnabled: isDualCameraEnabled + isDualCameraEnabled: isDualCameraEnabled, + isCollageEnabled: false, + collageGrid: collageGrids[3], + collageProgress: 0.0 ) self.previewFrameLeftDimView = UIView() @@ -1612,6 +1875,7 @@ public class CameraScreen: ViewController { self.containerView.addSubview(self.previewContainerView) self.previewContainerView.addSubview(self.mainPreviewContainerView) + self.previewContainerView.addSubview(self.collageContainerView) self.previewContainerView.addSubview(self.additionalPreviewContainerView) self.previewContainerView.addSubview(self.previewBlurView) self.previewContainerView.addSubview(self.previewFrameLeftDimView) @@ -1626,37 +1890,67 @@ public class CameraScreen: ViewController { self.completion.connect { [weak self] result in if let self { let pipPosition = self.pipPosition - self.animateOutToEditor() - self.controller?.completion( - result - |> map { result in - return result.withPIPPosition(pipPosition) - } - |> beforeNext { [weak self] value in - guard let self else { - return - } - if case .pendingImage = value { - Queue.mainQueue().async { - self.mainPreviewView.isEnabled = false - self.additionalPreviewView.isEnabled = false - } - } else { - Queue.mainQueue().async { - if case .image = value { - Queue.mainQueue().after(0.3) { - self.previewBlurPromise.set(true) + if self.cameraState.isCollageEnabled { + if let collage = self.collage { + if collage.isComplete { + self.animateOutToEditor() + self.controller?.completion( + collage.result + |> beforeNext { [weak self] value in + guard let self else { + return } - } - self.mainPreviewView.isEnabled = false - self.additionalPreviewView.isEnabled = false - self.camera?.stopCapture() - } + Queue.mainQueue().async { + if case .image = value { + Queue.mainQueue().after(0.3) { + self.previewBlurPromise.set(true) + } + } + self.mainPreviewView.isEnabled = false + self.additionalPreviewView.isEnabled = false + self.camera?.stopCapture() + } + }, + nil, + {} + ) + } else { + collage.addResult(result, snapshotView: self.mainPreviewContainerView.snapshotView(afterScreenUpdates: false)) } - }, - nil, - {} - ) + } + } else { + self.animateOutToEditor() + self.controller?.completion( + result + |> map { result in + return result.withPIPPosition(pipPosition) + } + |> beforeNext { [weak self] value in + guard let self else { + return + } + if case .pendingImage = value { + Queue.mainQueue().async { + self.mainPreviewView.isEnabled = false + self.additionalPreviewView.isEnabled = false + } + } else { + Queue.mainQueue().async { + if case .image = value { + Queue.mainQueue().after(0.3) { + self.previewBlurPromise.set(true) + } + } + self.mainPreviewView.isEnabled = false + self.additionalPreviewView.isEnabled = false + self.camera?.stopCapture() + } + } + }, + nil, + {} + ) + } } } @@ -1695,7 +1989,7 @@ public class CameraScreen: ViewController { |> deliverOnMainQueue).start(next: { [weak self] status in if let self { self.cameraAuthorizationStatus = status - self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .easeInOut(duration: 0.2)) + self.requestUpdateLayout(transition: .easeInOut(duration: 0.2)) self.maybeSetupCamera() } @@ -1705,7 +1999,7 @@ public class CameraScreen: ViewController { |> deliverOnMainQueue).start(next: { [weak self] status in if let self { self.microphoneAuthorizationStatus = status - self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .easeInOut(duration: 0.2)) + self.requestUpdateLayout(transition: .easeInOut(duration: 0.2)) self.maybeSetupCamera() } @@ -1715,6 +2009,7 @@ public class CameraScreen: ViewController { deinit { self.cameraStateDisposable?.dispose() self.changingPositionDisposable?.dispose() + self.collageStateDisposable?.dispose() self.idleTimerExtensionDisposable.dispose() self.authorizationStatusDisposables.dispose() } @@ -1762,7 +2057,7 @@ public class CameraScreen: ViewController { } }) } - + fileprivate var captureStartTimestamp: Double? private func setupCamera() { guard self.camera == nil, let controller = self.controller else { @@ -1790,7 +2085,7 @@ public class CameraScreen: ViewController { ) isNew = true } - + self.cameraStateDisposable = combineLatest( queue: Queue.mainQueue(), camera.flashMode, @@ -1802,7 +2097,7 @@ public class CameraScreen: ViewController { let previousState = self.cameraState self.cameraState = self.cameraState.updatedPosition(position).updatedFlashMode(flashMode) if !self.animatingDualCameraPositionSwitch { - self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .easeInOut(duration: 0.2)) + self.requestUpdateLayout(transition: .easeInOut(duration: 0.2)) } if previousState.position != self.cameraState.position { @@ -1939,7 +2234,7 @@ public class CameraScreen: ViewController { case .began: break case .changed: - if case .none = self.cameraState.recording { + if case .none = self.cameraState.recording, self.cameraState.collageProgress.isZero { if case .compact = layout.metrics.widthClass { switch controller.mode { case .story: @@ -2016,6 +2311,9 @@ public class CameraScreen: ViewController { } @objc private func handleDoubleTap(_ gestureRecognizer: UITapGestureRecognizer) { + guard !self.cameraState.isCollageEnabled else { + return + } self.toggleCameraPositionAction.invoke(Void()) } @@ -2058,7 +2356,7 @@ public class CameraScreen: ViewController { CATransaction.begin() CATransaction.setDisableActions(true) - self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .immediate) + self.requestUpdateLayout(transition: .immediate) CATransaction.commit() self.animatingDualCameraPositionSwitch = true @@ -2101,7 +2399,7 @@ public class CameraScreen: ViewController { ) } - var animatedIn = false + private var animatedIn = false func animateIn() { guard let controller = self.controller else { return @@ -2248,7 +2546,7 @@ public class CameraScreen: ViewController { func animateOutToEditor() { self.cameraIsActive = false - self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .immediate) + self.requestUpdateLayout(transition: .immediate) let transition = ComponentTransition(animation: .curve(duration: 0.2, curve: .easeInOut)) if let view = self.componentHost.findTaggedView(tag: cancelButtonTag) { @@ -2272,21 +2570,33 @@ public class CameraScreen: ViewController { if let view = self.componentHost.findTaggedView(tag: modeControlTag) as? ModeComponent.View { view.animateOutToEditor(transition: transition) } + + Queue.mainQueue().after(1.0, { + if self.cameraState.isCollageEnabled { + self.collage = nil + if let collageView = self.collageView { + collageView.removeFromSuperview() + self.collageView = nil + } + } + }) } func pauseCameraCapture() { self.mainPreviewView.isEnabled = false self.additionalPreviewView.isEnabled = false + self.collageView?.isEnabled = false + Queue.mainQueue().after(0.3) { self.previewBlurPromise.set(true) } self.camera?.stopCapture() self.cameraIsActive = false - self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .immediate) + self.requestUpdateLayout(transition: .immediate) } - func resumeCameraCapture() { + func resumeCameraCapture(fromGallery: Bool) { if !self.mainPreviewView.isEnabled { if let snapshot = self.mainPreviewView.snapshotView(afterScreenUpdates: false) { self.mainPreviewView.addSubview(snapshot) @@ -2317,16 +2627,16 @@ public class CameraScreen: ViewController { } self.cameraIsActive = true - self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .immediate) + self.requestUpdateLayout(transition: .immediate) } } func animateInFromEditor(toGallery: Bool) { if !toGallery { - self.resumeCameraCapture() + self.resumeCameraCapture(fromGallery: false) self.cameraIsActive = true - self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .immediate) + self.requestUpdateLayout(transition: .immediate) let transition = ComponentTransition(animation: .curve(duration: 0.2, curve: .easeInOut)) if let view = self.componentHost.findTaggedView(tag: cancelButtonTag) { @@ -2475,12 +2785,19 @@ public class CameraScreen: ViewController { if self.additionalPreviewContainerView.bounds.contains(self.view.convert(point, to: self.additionalPreviewContainerView)) { return self.additionalPreviewContainerView } else { - return self.mainPreviewView + return self.collageView ?? self.mainPreviewView } } return result } + func requestUpdateLayout(transition: ComponentTransition) { + if let layout = self.validLayout { + self.containerLayoutUpdated(layout: layout, forceUpdate: true, hasAppeared: self.hasAppeared, transition: transition) + } + } + + func requestUpdateLayout(hasAppeared: Bool, transition: ComponentTransition) { if let layout = self.validLayout { self.containerLayoutUpdated(layout: layout, forceUpdate: true, hasAppeared: hasAppeared, transition: transition) @@ -2575,9 +2892,20 @@ public class CameraScreen: ViewController { panelWidth: panelWidth, animateFlipAction: self.animateFlipAction, animateShutter: { [weak self] in - self?.mainPreviewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25) + guard let self else { + return + } + + if self.cameraState.isCollageEnabled { + self.collageView?.resetPlayback() + } + + if !self.cameraState.isCollageEnabled, case .none = self.cameraState.recording { + self.mainPreviewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25) + } }, toggleCameraPositionAction: self.toggleCameraPositionAction, + dismissCollageSelection: self.dismissCollageSelection, getController: { [weak self] in return self?.controller }, @@ -2606,20 +2934,6 @@ public class CameraScreen: ViewController { transition.setFrame(view: componentView, frame: componentFrame) } - if let view = self.componentHost.findTaggedView(tag: cancelButtonTag), view.layer.shadowOpacity.isZero { - view.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) - view.layer.shadowRadius = 3.0 - view.layer.shadowColor = UIColor.black.cgColor - view.layer.shadowOpacity = 0.25 - } - - if let view = self.componentHost.findTaggedView(tag: flashButtonTag), view.layer.shadowOpacity.isZero { - view.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) - view.layer.shadowRadius = 3.0 - view.layer.shadowColor = UIColor.black.cgColor - view.layer.shadowOpacity = 0.25 - } - transition.setPosition(view: self.backgroundView, position: CGPoint(x: layout.size.width / 2.0, y: layout.size.height / 2.0)) transition.setBounds(view: self.backgroundView, bounds: CGRect(origin: .zero, size: layout.size)) @@ -2638,8 +2952,93 @@ public class CameraScreen: ViewController { } transition.setFrame(view: self.previewContainerView, frame: previewContainerFrame) - transition.setFrame(view: self.mainPreviewContainerView, frame: CGRect(origin: .zero, size: previewContainerFrame.size)) + transition.setFrame(view: self.collageContainerView, frame: CGRect(origin: .zero, size: previewContainerFrame.size)) + if self.cameraState.isCollageEnabled { + let collage: CameraCollage + if let current = self.collage { + collage = current + collage.grid = self.cameraState.collageGrid + } else { + collage = CameraCollage(grid: self.cameraState.collageGrid) + self.collage = collage + + self.collageStateDisposable = (collage.state + |> deliverOnMainQueue).start(next: { [weak self] collageState in + guard let self else { + return + } + self.updatingCollageProgress = true + self.controller?.updateCameraState({ state in + return state.updatedCollageProgress(collageState.progress) + }, transition: .spring(duration: 0.3)) + self.updatingCollageProgress = false + }) + + controller.galleryController = nil + } + var added = false + let collageView: CameraCollageView + if let current = self.collageView { + collageView = current + } else { + collageView = CameraCollageView(context: self.context, collage: collage, camera: self.camera, cameraContainerView: self.mainPreviewContainerView) + collageView.getOverlayViews = { [weak self] in + guard let self, let view = self.componentHost.view else { + return [] + } + return [view] + } + collageView.requestGridReduce = { [weak self] in + guard let self, self.cameraState.isCollageEnabled else { + return + } + if self.cameraState.collageGrid.count == 2 { + self.controller?.updateCameraState({ $0.updatedIsCollageEnabled(false).updatedCollageProgress(0.0) }, transition: .spring(duration: 0.3)) + } else { + let currentCount = self.cameraState.collageGrid.count + for grid in collageGrids.reversed() { + if grid.count == currentCount - 1 { + self.controller?.updateCameraState({ $0.updatedCollageGrid(grid) }, transition: .spring(duration: 0.3)) + break + } + } + } + } + collageView.presentController = { [weak controller] c in + controller?.presentInGlobalOverlay(c) + } + self.collageView = collageView + self.collageContainerView.addSubview(collageView) + added = true + } + transition.setFrame(view: collageView, frame: CGRect(origin: .zero, size: previewContainerFrame.size)) + if !self.updatingCollageProgress || added { + collageView.updateLayout(size: previewContainerFrame.size, transition: transition) + } + + if added { + collageView.animateIn(transition: transition) + } + + self.collageContainerView.isHidden = false + } else { + self.collageStateDisposable?.dispose() + self.collageStateDisposable = nil + + if let collageView = self.collageView { + collageView.animateOut(transition: transition, completion: { [weak collageView] in + self.previewContainerView.addSubview(self.mainPreviewContainerView) + collageView?.removeFromSuperview() + self.collageContainerView.isHidden = true + }) + self.collageView = nil + self.collage = nil + } else { + transition.setFrame(view: self.mainPreviewContainerView, frame: CGRect(origin: .zero, size: previewContainerFrame.size)) + } + } + transition.setFrame(view: self.previewBlurView, frame: CGRect(origin: .zero, size: previewContainerFrame.size)) let isDualCameraEnabled = self.cameraState.isDualCameraEnabled @@ -2800,7 +3199,7 @@ public class CameraScreen: ViewController { self.transitionOut = transitionOut } } - fileprivate let completion: (Signal, ResultTransition?, @escaping () -> Void) -> Void + fileprivate let completion: (Signal, ResultTransition?, @escaping () -> Void) -> Void public var transitionedIn: () -> Void = {} public var transitionedOut: () -> Void = {} @@ -2825,7 +3224,7 @@ public class CameraScreen: ViewController { fileprivate func updateCameraState(_ f: (CameraState) -> CameraState, transition: ComponentTransition) { self.node.cameraState = f(self.node.cameraState) - self.node.requestUpdateLayout(hasAppeared: self.node.hasAppeared, transition: transition) + self.node.requestUpdateLayout(transition: transition) } public init( @@ -2834,7 +3233,7 @@ public class CameraScreen: ViewController { holder: CameraHolder? = nil, transitionIn: TransitionIn?, transitionOut: @escaping (Bool) -> TransitionOut?, - completion: @escaping (Signal, ResultTransition?, @escaping () -> Void) -> Void + completion: @escaping (Signal, ResultTransition?, @escaping () -> Void) -> Void ) { self.context = context self.mode = mode @@ -2847,6 +3246,7 @@ public class CameraScreen: ViewController { self.statusBar.statusBarStyle = .Ignore self.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait) + self.automaticallyControlPresentationContextLayout = false self.navigationPresentation = .flatModal @@ -2954,12 +3354,13 @@ public class CameraScreen: ViewController { } self.node.dismissAllTooltips() + self.node.dismissCollageSelection.invoke(Void()) self.node.hasGallery = true self.didStopCameraCapture = false let stopCameraCapture = { [weak self] in - guard let self, !self.didStopCameraCapture else { + guard let self, !self.didStopCameraCapture, !self.cameraState.isCollageEnabled else { return } let currentTimestamp = CACurrentMediaTime() @@ -2982,14 +3383,15 @@ public class CameraScreen: ViewController { return } self.didStopCameraCapture = false - self.node.resumeCameraCapture() + self.node.resumeCameraCapture(fromGallery: true) } + var dismissControllerImpl: (() -> Void)? let controller: ViewController if let current = self.galleryController { controller = current } else { - controller = self.context.sharedContext.makeStoryMediaPickerScreen(context: self.context, isDark: true, getSourceRect: { [weak self] in + controller = self.context.sharedContext.makeStoryMediaPickerScreen(context: self.context, isDark: true, forCollage: self.cameraState.isCollageEnabled, getSourceRect: { [weak self] in if let self { if let galleryButton = self.node.componentHost.findTaggedView(tag: galleryButtonTag) { return galleryButton.convert(galleryButton.bounds, to: self.view).offsetBy(dx: 0.0, dy: -15.0) @@ -3001,45 +3403,74 @@ public class CameraScreen: ViewController { } }, completion: { [weak self] result, transitionView, transitionRect, transitionImage, transitionOut, dismissed in if let self { - stopCameraCapture() - - let resultTransition = ResultTransition( - sourceView: transitionView, - sourceRect: transitionRect, - sourceImage: transitionImage, - transitionOut: transitionOut - ) - if let asset = result as? PHAsset { - if asset.mediaType == .video && asset.duration < 1.0 { - let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } - let alertController = textAlertController( - context: self.context, - forceTheme: defaultDarkColorPresentationTheme, - title: nil, - text: presentationData.strings.Story_Editor_VideoTooShort, - actions: [ - TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_OK, action: {}) - ], - actionLayout: .vertical - ) - self.present(alertController, in: .window(.root)) - } else { - self.completion(.single(.asset(asset)), resultTransition, dismissed) + if self.cameraState.isCollageEnabled { + if let asset = result as? PHAsset { + if asset.mediaType == .video && asset.duration > 1.0 { + self.node.collage?.addResult(.single(.asset(asset)), snapshotView: nil) + } else { + self.node.collage?.addResult( + assetImage(asset: asset, targetSize: CGSize(width: 1080, height: 1080), exact: false, deliveryMode: .highQualityFormat) + |> runOn(Queue.concurrentDefaultQueue()) + |> mapToSignal { image -> Signal in + if let image { + return .single(.image(Result.Image(image: image, additionalImage: nil, additionalImagePosition: .topLeft))) + } else { + return .complete() + } + }, + snapshotView: nil + ) + } + } + + + + dismissControllerImpl?() + } else { + stopCameraCapture() + + let resultTransition = ResultTransition( + sourceView: transitionView, + sourceRect: transitionRect, + sourceImage: transitionImage, + transitionOut: transitionOut + ) + if let asset = result as? PHAsset { + if asset.mediaType == .video && asset.duration < 1.0 { + let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } + let alertController = textAlertController( + context: self.context, + forceTheme: defaultDarkColorPresentationTheme, + title: nil, + text: presentationData.strings.Story_Editor_VideoTooShort, + actions: [ + TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_OK, action: {}) + ], + actionLayout: .vertical + ) + self.present(alertController, in: .window(.root)) + } else { + self.completion(.single(.asset(asset)), resultTransition, dismissed) + } + } else if let draft = result as? MediaEditorDraft { + self.completion(.single(.draft(draft)), resultTransition, dismissed) } - } else if let draft = result as? MediaEditorDraft { - self.completion(.single(.draft(draft)), resultTransition, dismissed) } } }, dismissed: { [weak self] in resumeCameraCapture() if let self { self.node.hasGallery = false - self.node.requestUpdateLayout(hasAppeared: self.node.hasAppeared, transition: .immediate) + self.node.requestUpdateLayout(transition: .immediate) } }, groupsPresented: { stopCameraCapture() }) self.galleryController = controller + + dismissControllerImpl = { [weak controller] in + controller?.dismiss(animated: true) + } } controller.customModalStyleOverlayTransitionFactorUpdated = { [weak self, weak controller] transition in if let self, let controller { @@ -3224,6 +3655,11 @@ public class CameraScreen: ViewController { self.validLayout = layout super.containerLayoutUpdated(layout, transition: transition) + + var presentationLayout = layout + presentationLayout.intrinsicInsets.bottom = 210.0 + + self.presentationContext.containerLayoutUpdated(presentationLayout, transition: transition) if !self.isDismissed { (self.displayNode as! Node).containerLayoutUpdated(layout: layout, transition: ComponentTransition(transition)) @@ -3231,98 +3667,7 @@ public class CameraScreen: ViewController { } } -private final class DualIconComponent: Component { - typealias EnvironmentType = Empty - - let isSelected: Bool - let tintColor: UIColor - - init( - isSelected: Bool, - tintColor: UIColor - ) { - self.isSelected = isSelected - self.tintColor = tintColor - } - - static func ==(lhs: DualIconComponent, rhs: DualIconComponent) -> Bool { - if lhs.isSelected != rhs.isSelected { - return false - } - if lhs.tintColor != rhs.tintColor { - return false - } - return true - } - - final class View: UIView { - private let iconView = UIImageView() - - private var component: DualIconComponent? - private weak var state: EmptyComponentState? - - override init(frame: CGRect) { - super.init(frame: frame) - - let image = generateImage(CGSize(width: 36.0, height: 36.0), rotatedContext: { size, context in - context.clear(CGRect(origin: .zero, size: size)) - - if let image = UIImage(bundleImageName: "Camera/DualIcon"), let cgImage = image.cgImage { - context.draw(cgImage, in: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - image.size.width) / 2.0), y: floorToScreenPixels((size.height - image.size.height) / 2.0) - 1.0), size: image.size)) - } - })?.withRenderingMode(.alwaysTemplate) - - let selectedImage = generateImage(CGSize(width: 36.0, height: 36.0), rotatedContext: { size, context in - context.clear(CGRect(origin: .zero, size: size)) - context.setFillColor(UIColor.white.cgColor) - context.fillEllipse(in: CGRect(origin: .zero, size: size)) - - if let image = UIImage(bundleImageName: "Camera/DualIcon"), let cgImage = image.cgImage { - context.setBlendMode(.clear) - context.clip(to: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - image.size.width) / 2.0), y: floorToScreenPixels((size.height - image.size.height) / 2.0) - 1.0), size: image.size), mask: cgImage) - context.fill(CGRect(origin: .zero, size: size)) - } - })?.withRenderingMode(.alwaysTemplate) - - self.iconView.image = image - self.iconView.highlightedImage = selectedImage - - self.iconView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) - self.iconView.layer.shadowRadius = 3.0 - self.iconView.layer.shadowColor = UIColor.black.cgColor - self.iconView.layer.shadowOpacity = 0.25 - - self.addSubview(self.iconView) - } - - required init?(coder: NSCoder) { - fatalError("init(coder:) has not been implemented") - } - - func update(component: DualIconComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { - self.component = component - self.state = state - - let size = CGSize(width: 36.0, height: 36.0) - self.iconView.frame = CGRect(origin: .zero, size: size) - self.iconView.isHighlighted = component.isSelected - - self.iconView.tintColor = component.tintColor - - return size - } - } - - public func makeView() -> View { - return View(frame: CGRect()) - } - - public func update(view: View, availableSize: CGSize, state: State, environment: Environment, transition: ComponentTransition) -> CGSize { - return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) - } -} - -private func pipPositionForLocation(layout: ContainerViewLayout, position: CGPoint, velocity: CGPoint) -> CameraScreen.PIPPosition { +private func pipPositionForLocation(layout: ContainerViewLayout, position: CGPoint, velocity: CGPoint) -> CameraScreenImpl.PIPPosition { var layoutInsets = layout.insets(options: [.input]) layoutInsets.bottom += 48.0 var result = CGPoint() @@ -3415,7 +3760,7 @@ private func pipPositionForLocation(layout: ContainerViewLayout, position: CGPoi } } - var position: CameraScreen.PIPPosition = .topRight + var position: CameraScreenImpl.PIPPosition = .topRight if result.x == 0.0 && result.y == 0.0 { position = .topLeft } else if result.x == 1.0 && result.y == 0.0 { @@ -3427,3 +3772,4 @@ private func pipPositionForLocation(layout: ContainerViewLayout, position: CGPoi } return position } + diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraVideoLayer.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraVideoLayer.swift new file mode 100644 index 0000000000..c00df1c15e --- /dev/null +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraVideoLayer.swift @@ -0,0 +1,342 @@ +import Foundation +import UIKit +import SwiftSignalKit +import MetalKit +import MetalPerformanceShaders +import Accelerate +import MetalEngine + +public final class VideoSourceOutput { + public struct MirrorDirection: OptionSet { + public var rawValue: Int32 + + public init(rawValue: Int32) { + self.rawValue = rawValue + } + + public static let horizontal = MirrorDirection(rawValue: 1 << 0) + public static let vertical = MirrorDirection(rawValue: 1 << 1) + } + + open class DataBuffer { + open var pixelBuffer: CVPixelBuffer? { + return nil + } + + public init() { + } + } + + public final class BiPlanarTextureLayout { + public let y: MTLTexture + public let uv: MTLTexture + + public init(y: MTLTexture, uv: MTLTexture) { + self.y = y + self.uv = uv + } + } + + public final class TriPlanarTextureLayout { + public let y: MTLTexture + public let u: MTLTexture + public let v: MTLTexture + + public init(y: MTLTexture, u: MTLTexture, v: MTLTexture) { + self.y = y + self.u = u + self.v = v + } + } + + public enum TextureLayout { + case biPlanar(BiPlanarTextureLayout) + case triPlanar(TriPlanarTextureLayout) + } + + public final class NativeDataBuffer: DataBuffer { + private let pixelBufferValue: CVPixelBuffer + override public var pixelBuffer: CVPixelBuffer? { + return self.pixelBufferValue + } + + public init(pixelBuffer: CVPixelBuffer) { + self.pixelBufferValue = pixelBuffer + } + } + + public let resolution: CGSize + public let textureLayout: TextureLayout + public let dataBuffer: DataBuffer + public let mirrorDirection: MirrorDirection + public let sourceId: Int + + public init(resolution: CGSize, textureLayout: TextureLayout, dataBuffer: DataBuffer, mirrorDirection: MirrorDirection, sourceId: Int) { + self.resolution = resolution + self.textureLayout = textureLayout + self.dataBuffer = dataBuffer + self.mirrorDirection = mirrorDirection + self.sourceId = sourceId + } +} + +public protocol VideoSource: AnyObject { + typealias Output = VideoSourceOutput + + var currentOutput: Output? { get } + + func addOnUpdated(_ f: @escaping () -> Void) -> Disposable +} + +final class CameraVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject { + public var internalData: MetalEngineSubjectInternalData? + + public let blurredLayer: MetalEngineSubjectLayer + + final class BlurState: ComputeState { + let computePipelineStateYUVBiPlanarToRGBA: MTLComputePipelineState + let computePipelineStateYUVTriPlanarToRGBA: MTLComputePipelineState + let computePipelineStateHorizontal: MTLComputePipelineState + let computePipelineStateVertical: MTLComputePipelineState + let downscaleKernel: MPSImageBilinearScale + + required init?(device: MTLDevice) { + guard let library = metalLibrary(device: device) else { + return nil + } + + guard let functionVideoBiPlanarToRGBA = library.makeFunction(name: "videoBiPlanarToRGBA") else { + return nil + } + guard let computePipelineStateYUVBiPlanarToRGBA = try? device.makeComputePipelineState(function: functionVideoBiPlanarToRGBA) else { + return nil + } + self.computePipelineStateYUVBiPlanarToRGBA = computePipelineStateYUVBiPlanarToRGBA + + guard let functionVideoTriPlanarToRGBA = library.makeFunction(name: "videoTriPlanarToRGBA") else { + return nil + } + guard let computePipelineStateYUVTriPlanarToRGBA = try? device.makeComputePipelineState(function: functionVideoTriPlanarToRGBA) else { + return nil + } + self.computePipelineStateYUVTriPlanarToRGBA = computePipelineStateYUVTriPlanarToRGBA + + guard let gaussianBlurHorizontal = library.makeFunction(name: "gaussianBlurHorizontal"), let gaussianBlurVertical = library.makeFunction(name: "gaussianBlurVertical") else { + return nil + } + guard let computePipelineStateHorizontal = try? device.makeComputePipelineState(function: gaussianBlurHorizontal) else { + return nil + } + self.computePipelineStateHorizontal = computePipelineStateHorizontal + + guard let computePipelineStateVertical = try? device.makeComputePipelineState(function: gaussianBlurVertical) else { + return nil + } + self.computePipelineStateVertical = computePipelineStateVertical + + self.downscaleKernel = MPSImageBilinearScale(device: device) + } + } + + final class RenderState: RenderToLayerState { + let pipelineState: MTLRenderPipelineState + + required init?(device: MTLDevice) { + guard let library = metalLibrary(device: device) else { + return nil + } + guard let vertexFunction = library.makeFunction(name: "mainVideoVertex"), let fragmentFunction = library.makeFunction(name: "mainVideoFragment") else { + return nil + } + + let pipelineDescriptor = MTLRenderPipelineDescriptor() + pipelineDescriptor.vertexFunction = vertexFunction + pipelineDescriptor.fragmentFunction = fragmentFunction + pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm + guard let pipelineState = try? device.makeRenderPipelineState(descriptor: pipelineDescriptor) else { + return nil + } + self.pipelineState = pipelineState + } + } + + public var video: VideoSource.Output? { + didSet { + self.setNeedsUpdate() + } + } + + public var renderSpec: RenderLayerSpec? + + private var rgbaTexture: PooledTexture? + private var downscaledTexture: PooledTexture? + private var blurredHorizontalTexture: PooledTexture? + private var blurredVerticalTexture: PooledTexture? + + override public init() { + self.blurredLayer = MetalEngineSubjectLayer() + + super.init() + } + + override public init(layer: Any) { + self.blurredLayer = MetalEngineSubjectLayer() + + super.init(layer: layer) + } + + required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + public func update(context: MetalEngineSubjectContext) { + if self.isHidden { + return + } + guard let renderSpec = self.renderSpec else { + return + } + guard let videoTextures = self.video else { + return + } + + let rgbaTextureSpec = TextureSpec(width: Int(videoTextures.resolution.width), height: Int(videoTextures.resolution.height), pixelFormat: .rgba8UnsignedNormalized) + if self.rgbaTexture == nil || self.rgbaTexture?.spec != rgbaTextureSpec { + self.rgbaTexture = MetalEngine.shared.pooledTexture(spec: rgbaTextureSpec) + } + if self.downscaledTexture == nil { + self.downscaledTexture = MetalEngine.shared.pooledTexture(spec: TextureSpec(width: 256, height: 256, pixelFormat: .rgba8UnsignedNormalized)) + } + if self.blurredHorizontalTexture == nil { + self.blurredHorizontalTexture = MetalEngine.shared.pooledTexture(spec: TextureSpec(width: 256, height: 256, pixelFormat: .rgba8UnsignedNormalized)) + } + if self.blurredVerticalTexture == nil { + self.blurredVerticalTexture = MetalEngine.shared.pooledTexture(spec: TextureSpec(width: 256, height: 256, pixelFormat: .rgba8UnsignedNormalized)) + } + + guard let rgbaTexture = self.rgbaTexture?.get(context: context) else { + return + } + + let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture in + guard let rgbaTexture else { + return + } + guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else { + return + } + + let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1) + let threadgroupCount = MTLSize(width: (rgbaTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (rgbaTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1) + + switch videoTextures.textureLayout { + case let .biPlanar(biPlanar): + computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVBiPlanarToRGBA) + computeEncoder.setTexture(biPlanar.y, index: 0) + computeEncoder.setTexture(biPlanar.uv, index: 1) + computeEncoder.setTexture(rgbaTexture, index: 2) + case let .triPlanar(triPlanar): + computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVTriPlanarToRGBA) + computeEncoder.setTexture(triPlanar.y, index: 0) + computeEncoder.setTexture(triPlanar.u, index: 1) + computeEncoder.setTexture(triPlanar.u, index: 2) + computeEncoder.setTexture(rgbaTexture, index: 3) + } + computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize) + + computeEncoder.endEncoding() + }) + + if !self.blurredLayer.isHidden { + guard let downscaledTexture = self.downscaledTexture?.get(context: context), let blurredHorizontalTexture = self.blurredHorizontalTexture?.get(context: context), let blurredVerticalTexture = self.blurredVerticalTexture?.get(context: context) else { + return + } + + let blurredTexture = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, downscaledTexture.placeholer, blurredHorizontalTexture.placeholer, blurredVerticalTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture, downscaledTexture, blurredHorizontalTexture, blurredVerticalTexture -> MTLTexture? in + guard let rgbaTexture, let downscaledTexture, let blurredHorizontalTexture, let blurredVerticalTexture else { + return nil + } + + blurState.downscaleKernel.encode(commandBuffer: commandBuffer, sourceTexture: rgbaTexture, destinationTexture: downscaledTexture) + + do { + guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else { + return nil + } + + let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1) + let threadgroupCount = MTLSize(width: (downscaledTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (downscaledTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1) + + computeEncoder.setComputePipelineState(blurState.computePipelineStateHorizontal) + computeEncoder.setTexture(downscaledTexture, index: 0) + computeEncoder.setTexture(blurredHorizontalTexture, index: 1) + computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize) + + computeEncoder.setComputePipelineState(blurState.computePipelineStateVertical) + computeEncoder.setTexture(blurredHorizontalTexture, index: 0) + computeEncoder.setTexture(blurredVerticalTexture, index: 1) + computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize) + + computeEncoder.endEncoding() + } + + return blurredVerticalTexture + }) + + context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self.blurredLayer, inputs: blurredTexture, commands: { encoder, placement, blurredTexture in + guard let blurredTexture else { + return + } + let effectiveRect = placement.effectiveRect + + var rect = SIMD4(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height)) + encoder.setVertexBytes(&rect, length: 4 * 4, index: 0) + + var mirror = SIMD2( + videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0, + videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0 + ) + encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1) + + encoder.setFragmentTexture(blurredTexture, index: 0) + + var brightness: Float = 0.75 + var saturation: Float = 1.3 + var overlay: SIMD4 = SIMD4() + encoder.setFragmentBytes(&brightness, length: 4, index: 0) + encoder.setFragmentBytes(&saturation, length: 4, index: 1) + encoder.setFragmentBytes(&overlay, length: 4 * 4, index: 2) + + encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6) + }) + } + + context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, inputs: rgbaTexture.placeholer, commands: { encoder, placement, rgbaTexture in + guard let rgbaTexture else { + return + } + + let effectiveRect = placement.effectiveRect + + var rect = SIMD4(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height)) + encoder.setVertexBytes(&rect, length: 4 * 4, index: 0) + + var mirror = SIMD2( + videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0, + videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0 + ) + encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1) + + encoder.setFragmentTexture(rgbaTexture, index: 0) + + var brightness: Float = 1.0 + var saturation: Float = 1.0 + var overlay: SIMD4 = SIMD4() + encoder.setFragmentBytes(&brightness, length: 4, index: 0) + encoder.setFragmentBytes(&saturation, length: 4, index: 1) + encoder.setFragmentBytes(&overlay, length: 4 * 4, index: 2) + + encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6) + }) + } +} diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraVideoSource.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraVideoSource.swift new file mode 100644 index 0000000000..dd4d917ebc --- /dev/null +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraVideoSource.swift @@ -0,0 +1,82 @@ +import AVFoundation +import Metal +import CoreVideo +import Display +import SwiftSignalKit +import Camera +import MetalEngine + +final class CameraVideoSource: VideoSource { + private var device: MTLDevice + private var textureCache: CVMetalTextureCache? + + private(set) var cameraVideoOutput: CameraVideoOutput! + + public private(set) var currentOutput: Output? + private var onUpdatedListeners = Bag<() -> Void>() + + public var sourceId: Int = 0 + public var sizeMultiplicator: CGPoint = CGPoint(x: 1.0, y: 1.0) + + public init?() { + self.device = MetalEngine.shared.device + + self.cameraVideoOutput = CameraVideoOutput(sink: { [weak self] buffer in + self?.push(buffer) + }) + + CVMetalTextureCacheCreate(nil, nil, self.device, nil, &self.textureCache) + } + + public func addOnUpdated(_ f: @escaping () -> Void) -> Disposable { + let index = self.onUpdatedListeners.add(f) + + return ActionDisposable { [weak self] in + DispatchQueue.main.async { + guard let self else { + return + } + self.onUpdatedListeners.remove(index) + } + } + } + + private func push(_ sampleBuffer: CMSampleBuffer) { + guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + return + } + + let width = CVPixelBufferGetWidth(buffer) + let height = CVPixelBufferGetHeight(buffer) + + var cvMetalTextureY: CVMetalTexture? + var status = CVMetalTextureCacheCreateTextureFromImage(nil, self.textureCache!, buffer, nil, .r8Unorm, width, height, 0, &cvMetalTextureY) + guard status == kCVReturnSuccess, let yTexture = CVMetalTextureGetTexture(cvMetalTextureY!) else { + return + } + var cvMetalTextureUV: CVMetalTexture? + status = CVMetalTextureCacheCreateTextureFromImage(nil, self.textureCache!, buffer, nil, .rg8Unorm, width / 2, height / 2, 1, &cvMetalTextureUV) + guard status == kCVReturnSuccess, let uvTexture = CVMetalTextureGetTexture(cvMetalTextureUV!) else { + return + } + + var resolution = CGSize(width: CGFloat(yTexture.width), height: CGFloat(yTexture.height)) + resolution.width = floor(resolution.width * self.sizeMultiplicator.x) + resolution.height = floor(resolution.height * self.sizeMultiplicator.y) + + self.currentOutput = Output( + resolution: resolution, + textureLayout: .biPlanar(Output.BiPlanarTextureLayout( + y: yTexture, + uv: uvTexture + )), + dataBuffer: Output.NativeDataBuffer(pixelBuffer: buffer), + mirrorDirection: [], + sourceId: self.sourceId + ) + + for onUpdated in self.onUpdatedListeners.copyItems() { + onUpdated() + } + } +} diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift index eb5b47c728..3b472fb6d2 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CaptureControlsComponent.swift @@ -6,6 +6,7 @@ import SwiftSignalKit import Photos import LocalMediaResources import CameraButtonComponent +import UIKitRuntimeUtils enum ShutterButtonState: Equatable { case disabled @@ -34,6 +35,7 @@ private final class ShutterButtonContentComponent: Component { let tintColor: UIColor let shutterState: ShutterButtonState let blobState: ShutterBlobView.BlobState + let collageProgress: Float let highlightedAction: ActionSlot let updateOffsetX: ActionSlot<(CGFloat, ComponentTransition)> let updateOffsetY: ActionSlot<(CGFloat, ComponentTransition)> @@ -44,6 +46,7 @@ private final class ShutterButtonContentComponent: Component { tintColor: UIColor, shutterState: ShutterButtonState, blobState: ShutterBlobView.BlobState, + collageProgress: Float, highlightedAction: ActionSlot, updateOffsetX: ActionSlot<(CGFloat, ComponentTransition)>, updateOffsetY: ActionSlot<(CGFloat, ComponentTransition)> @@ -53,6 +56,7 @@ private final class ShutterButtonContentComponent: Component { self.tintColor = tintColor self.shutterState = shutterState self.blobState = blobState + self.collageProgress = collageProgress self.highlightedAction = highlightedAction self.updateOffsetX = updateOffsetX self.updateOffsetY = updateOffsetY @@ -74,17 +78,25 @@ private final class ShutterButtonContentComponent: Component { if lhs.blobState != rhs.blobState { return false } + if lhs.collageProgress != rhs.collageProgress { + return false + } return true } final class View: UIView { private var component: ShutterButtonContentComponent? + private let underRingLayer = SimpleShapeLayer() private let ringLayer = SimpleShapeLayer() var blobView: ShutterBlobView? private let innerLayer = SimpleShapeLayer() private let progressLayer = SimpleShapeLayer() + private let checkLayer = SimpleLayer() + private let checkLayerMask = SimpleShapeLayer() + private let checkLayerLineMask = SimpleShapeLayer() + init() { super.init(frame: CGRect()) @@ -92,7 +104,34 @@ private final class ShutterButtonContentComponent: Component { self.progressLayer.strokeEnd = 0.0 + let checkPath = CGMutablePath() + checkPath.move(to: CGPoint(x: 18.0 + 2.0, y: 18.0 + 13.0)) + checkPath.addLine(to: CGPoint(x: 18.0 + 9.0, y: 18.0 + 20.0)) + checkPath.addLine(to: CGPoint(x: 18.0 + 22.0, y: 18.0 + 7.0)) + + self.checkLayer.frame = CGRect(origin: .zero, size: CGSize(width: 60.0, height: 60.0)) + if let filter = makeLuminanceToAlphaFilter() { + self.checkLayerMask.filters = [filter] + } + self.checkLayerMask.backgroundColor = UIColor.black.cgColor + self.checkLayerMask.fillColor = UIColor.white.cgColor + self.checkLayerMask.path = CGPath(ellipseIn: self.checkLayer.frame, transform: nil) + self.checkLayerMask.frame = self.checkLayer.frame + + self.checkLayerLineMask.path = checkPath + self.checkLayerLineMask.lineWidth = 3.0 + self.checkLayerLineMask.lineCap = .round + self.checkLayerLineMask.lineJoin = .round + self.checkLayerLineMask.fillColor = UIColor.clear.cgColor + self.checkLayerLineMask.strokeColor = UIColor.black.cgColor + self.checkLayerLineMask.frame = self.checkLayer.frame + self.checkLayerMask.addSublayer(self.checkLayerLineMask) + + self.checkLayer.mask = self.checkLayerMask + self.checkLayer.isHidden = true + self.layer.addSublayer(self.innerLayer) + self.layer.addSublayer(self.underRingLayer) self.layer.addSublayer(self.ringLayer) self.layer.addSublayer(self.progressLayer) } @@ -102,21 +141,27 @@ private final class ShutterButtonContentComponent: Component { } func updateIsHighlighted(_ isHighlighted: Bool) { - guard let blobView = self.blobView else { + guard let blobView = self.blobView, let component = self.component else { return } let scale: CGFloat = isHighlighted ? 0.8 : 1.0 let transition = ComponentTransition(animation: .curve(duration: 0.3, curve: .easeInOut)) transition.setTransform(view: blobView, transform: CATransform3DMakeScale(scale, scale, 1.0)) + if component.collageProgress > 1.0 - .ulpOfOne { + transition.setTransform(layer: self.ringLayer, transform: CATransform3DMakeScale(scale, scale, 1.0)) + } } func update(component: ShutterButtonContentComponent, availableSize: CGSize, transition: ComponentTransition) -> CGSize { + let previousComponent = self.component self.component = component if component.hasAppeared && self.blobView == nil { self.blobView = ShutterBlobView(test: false) self.addSubview(self.blobView!) + self.layer.addSublayer(self.checkLayer) + Queue.mainQueue().after(0.2) { self.innerLayer.removeFromSuperlayer() } @@ -163,10 +208,10 @@ private final class ShutterButtonContentComponent: Component { } } - let innerColor: UIColor + var innerColor: UIColor let innerSize: CGSize - let ringSize: CGSize - let ringWidth: CGFloat = 3.0 + var ringSize: CGSize + var ringWidth: CGFloat = 3.0 var recordingProgress: Float? switch component.shutterState { case .generic, .disabled: @@ -193,9 +238,35 @@ private final class ShutterButtonContentComponent: Component { recordingProgress = 0.0 } + if component.collageProgress > 1.0 - .ulpOfOne { + innerColor = component.tintColor + ringSize = CGSize(width: 60.0, height: 60.0) + ringWidth = 5.0 + } else if component.collageProgress > 0.0 { + ringSize = CGSize(width: 74.0, height: 74.0) + ringWidth = 5.0 + } + + if component.collageProgress > 1.0 - .ulpOfOne { + self.blobView?.isHidden = true + self.checkLayer.isHidden = false + transition.setShapeLayerStrokeEnd(layer: self.checkLayerLineMask, strokeEnd: 1.0) + } else { + self.checkLayer.isHidden = true + self.blobView?.isHidden = false +// transition.setAlpha(layer: self.checkLayerLineMask, alpha: 0.0) +// transition.setShapeLayerStrokeEnd(layer: self.checkLayerLineMask, strokeEnd: 0.0, completion: { _ in +// self.blobView?.isHidden = false +// self.checkLayer.isHidden = true +// }) + } + + self.checkLayer.backgroundColor = innerColor.cgColor + self.ringLayer.fillColor = UIColor.clear.cgColor self.ringLayer.strokeColor = component.tintColor.cgColor self.ringLayer.lineWidth = ringWidth + self.ringLayer.lineCap = .round let ringPath = CGPath( ellipseIn: CGRect( origin: CGPoint( @@ -208,6 +279,25 @@ private final class ShutterButtonContentComponent: Component { transition.setShapeLayerPath(layer: self.ringLayer, path: ringPath) self.ringLayer.bounds = CGRect(origin: .zero, size: maximumShutterSize) self.ringLayer.position = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0) + self.ringLayer.transform = CATransform3DMakeRotation(-.pi / 2.0, 0.0, 0.0, 1.0) + + self.checkLayer.position = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0) + + if component.collageProgress > 0.0 { + if previousComponent?.collageProgress == 0.0 { + self.ringLayer.animateRotation(from: -.pi * 3.0 / 2.0, to: -.pi / 2.0, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + } + transition.setShapeLayerStrokeEnd(layer: self.ringLayer, strokeEnd: CGFloat(component.collageProgress)) + } else { + transition.setShapeLayerStrokeEnd(layer: self.ringLayer, strokeEnd: 1.0) + } + + self.underRingLayer.fillColor = UIColor.clear.cgColor + self.underRingLayer.strokeColor = component.tintColor.withAlphaComponent(0.2).cgColor + self.underRingLayer.lineWidth = ringWidth + transition.setShapeLayerPath(layer: self.underRingLayer, path: ringPath) + self.underRingLayer.bounds = CGRect(origin: .zero, size: maximumShutterSize) + self.underRingLayer.position = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0) if let blobView = self.blobView { blobView.updateState(component.blobState, tintColor: innerColor, transition: transition) @@ -462,6 +552,8 @@ final class CaptureControlsComponent: Component { let isSticker: Bool let hasAppeared: Bool let hasAccess: Bool + let hideControls: Bool + let collageProgress: Float let tintColor: UIColor let shutterState: ShutterButtonState let lastGalleryAsset: PHAsset? @@ -482,6 +574,8 @@ final class CaptureControlsComponent: Component { isSticker: Bool, hasAppeared: Bool, hasAccess: Bool, + hideControls: Bool, + collageProgress: Float, tintColor: UIColor, shutterState: ShutterButtonState, lastGalleryAsset: PHAsset?, @@ -501,6 +595,8 @@ final class CaptureControlsComponent: Component { self.isSticker = isSticker self.hasAppeared = hasAppeared self.hasAccess = hasAccess + self.hideControls = hideControls + self.collageProgress = collageProgress self.tintColor = tintColor self.shutterState = shutterState self.lastGalleryAsset = lastGalleryAsset @@ -530,6 +626,12 @@ final class CaptureControlsComponent: Component { if lhs.hasAccess != rhs.hasAccess { return false } + if lhs.hideControls != rhs.hideControls { + return false + } + if lhs.collageProgress != rhs.collageProgress { + return false + } if lhs.tintColor != rhs.tintColor { return false } @@ -918,6 +1020,8 @@ final class CaptureControlsComponent: Component { isTransitioning = true } + let hideControls = component.hideControls + let galleryButtonFrame: CGRect let gallerySize: CGSize if !component.isSticker { @@ -974,8 +1078,8 @@ final class CaptureControlsComponent: Component { let normalAlpha = component.tintColor.rgb == 0xffffff ? 1.0 : 0.6 - transition.setScale(view: galleryButtonView, scale: isRecording || isTransitioning ? 0.1 : 1.0) - transition.setAlpha(view: galleryButtonView, alpha: isRecording || isTransitioning ? 0.0 : normalAlpha) + transition.setScale(view: galleryButtonView, scale: isRecording || isTransitioning || hideControls ? 0.1 : 1.0) + transition.setAlpha(view: galleryButtonView, alpha: isRecording || isTransitioning || hideControls ? 0.0 : normalAlpha) } } else { galleryButtonFrame = .zero @@ -1017,8 +1121,8 @@ final class CaptureControlsComponent: Component { transition.setBounds(view: flipButtonView, bounds: CGRect(origin: .zero, size: flipButtonFrame.size)) transition.setPosition(view: flipButtonView, position: flipButtonFrame.center) - transition.setScale(view: flipButtonView, scale: isTransitioning ? 0.01 : 1.0) - transition.setAlpha(view: flipButtonView, alpha: isTransitioning ? 0.0 : 1.0) + transition.setScale(view: flipButtonView, scale: isTransitioning || hideControls ? 0.01 : 1.0) + transition.setAlpha(view: flipButtonView, alpha: isTransitioning || hideControls ? 0.0 : 1.0) } } else if let flipButtonView = self.flipButtonView.view { flipButtonView.removeFromSuperview() @@ -1047,6 +1151,7 @@ final class CaptureControlsComponent: Component { tintColor: component.tintColor, shutterState: component.shutterState, blobState: blobState, + collageProgress: component.collageProgress, highlightedAction: self.shutterHightlightedAction, updateOffsetX: self.shutterUpdateOffsetX, updateOffsetY: self.shutterUpdateOffsetY diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CollageIconComponent.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CollageIconComponent.swift new file mode 100644 index 0000000000..25fc72a6a3 --- /dev/null +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CollageIconComponent.swift @@ -0,0 +1,329 @@ +import Foundation +import UIKit +import Display +import ComponentFlow +import Camera +import CameraButtonComponent + +private func generateCollageIcon(grid: Camera.CollageGrid, crossed: Bool) -> UIImage? { + return generateImage(CGSize(width: 36.0, height: 36.0), rotatedContext: { size, context in + let bounds = CGRect(origin: .zero, size: size) + context.clear(bounds) + + let lineWidth = 2.0 - UIScreenPixel + context.setLineWidth(lineWidth) + context.setStrokeColor(UIColor.white.cgColor) + + let iconBounds = bounds.insetBy(dx: 11.0, dy: 9.0) + let path = UIBezierPath(roundedRect: iconBounds, cornerRadius: 3.0) + context.addPath(path.cgPath) + context.strokePath() + + let rowHeight = iconBounds.height / CGFloat(grid.rows.count) + + var yOffset: CGFloat = iconBounds.minY + lineWidth / 2.0 + for i in 0 ..< grid.rows.count { + let row = grid.rows[i] + var xOffset: CGFloat = iconBounds.minX + let lineCount = max(0, row.columns - 1) + let colWidth = iconBounds.width / CGFloat(max(row.columns, 1)) + for _ in 0 ..< lineCount { + xOffset += colWidth + context.move(to: CGPoint(x: xOffset, y: yOffset)) + context.addLine(to: CGPoint(x: xOffset, y: yOffset + rowHeight)) + context.strokePath() + } + yOffset += rowHeight + + if i != grid.rows.count - 1 { + context.move(to: CGPoint(x: iconBounds.minX, y: yOffset - lineWidth / 2.0)) + context.addLine(to: CGPoint(x: iconBounds.maxX, y: yOffset - lineWidth / 2.0)) + context.strokePath() + } + } + + if crossed { + context.setLineCap(.round) + + let startPoint = CGPoint(x: iconBounds.minX - 3.0, y: iconBounds.minY - 2.0) + let endPoint = CGPoint(x: iconBounds.maxX + 4.0, y: iconBounds.maxY + 1.0) + + context.setBlendMode(.clear) + context.move(to: startPoint.offsetBy(dx: 0.0, dy: lineWidth)) + context.addLine(to: endPoint.offsetBy(dx: 0.0, dy: lineWidth)) + context.strokePath() + + context.setBlendMode(.normal) + + context.move(to: startPoint) + context.addLine(to: endPoint) + context.strokePath() + } + }) +} + +final class CollageIconComponent: Component { + typealias EnvironmentType = Empty + + let grid: Camera.CollageGrid + let crossed: Bool + let isSelected: Bool + let tintColor: UIColor + + init( + grid: Camera.CollageGrid, + crossed: Bool, + isSelected: Bool, + tintColor: UIColor + ) { + self.grid = grid + self.crossed = crossed + self.isSelected = isSelected + self.tintColor = tintColor + } + + static func ==(lhs: CollageIconComponent, rhs: CollageIconComponent) -> Bool { + if lhs.grid != rhs.grid { + return false + } + if lhs.crossed != rhs.crossed { + return false + } + if lhs.isSelected != rhs.isSelected { + return false + } + if lhs.tintColor != rhs.tintColor { + return false + } + return true + } + + final class View: UIView { + private let iconView = UIImageView() + + private var component: CollageIconComponent? + private weak var state: EmptyComponentState? + + override init(frame: CGRect) { + super.init(frame: frame) + + self.addSubview(self.iconView) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func update(component: CollageIconComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + let previousComponent = self.component + self.component = component + self.state = state + + if component.grid != previousComponent?.grid { + let image = generateCollageIcon(grid: component.grid, crossed: component.crossed) + let selectedImage = generateImage(CGSize(width: 36.0, height: 36.0), contextGenerator: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + context.setFillColor(UIColor.white.cgColor) + context.fillEllipse(in: CGRect(origin: .zero, size: size)) + + if let image, let cgImage = image.cgImage { + context.setBlendMode(.clear) + context.clip(to: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - image.size.width) / 2.0), y: floorToScreenPixels((size.height - image.size.height) / 2.0) - 1.0), size: image.size), mask: cgImage) + context.fill(CGRect(origin: .zero, size: size)) + } + })?.withRenderingMode(.alwaysTemplate) + + self.iconView.image = image + + if self.iconView.isHighlighted { + self.iconView.isHighlighted = false + self.iconView.highlightedImage = selectedImage + self.iconView.isHighlighted = true + } else { + self.iconView.highlightedImage = selectedImage + } + } + + let size = CGSize(width: 36.0, height: 36.0) + self.iconView.frame = CGRect(origin: .zero, size: size) + self.iconView.isHighlighted = component.isSelected + + self.iconView.tintColor = component.tintColor + + return size + } + } + + public func makeView() -> View { + return View(frame: CGRect()) + } + + public func update(view: View, availableSize: CGSize, state: State, environment: Environment, transition: ComponentTransition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} + +final class CollageIconCarouselComponent: Component { + typealias EnvironmentType = Empty + + let grids: [Camera.CollageGrid] + let selected: (Camera.CollageGrid) -> Void + + init( + grids: [Camera.CollageGrid], + selected: @escaping (Camera.CollageGrid) -> Void + ) { + self.grids = grids + self.selected = selected + } + + static func ==(lhs: CollageIconCarouselComponent, rhs: CollageIconCarouselComponent) -> Bool { + if lhs.grids != rhs.grids { + return false + } + return true + } + + final class View: UIView { + private let clippingView = UIView() + private let scrollView = UIScrollView() + + private var itemViews: [AnyHashable: ComponentView] = [:] + + private var component: CollageIconCarouselComponent? + private weak var state: EmptyComponentState? + + override init(frame: CGRect) { + super.init(frame: frame) + + self.scrollView.contentInsetAdjustmentBehavior = .never + self.scrollView.showsVerticalScrollIndicator = false + self.scrollView.showsHorizontalScrollIndicator = false + + self.addSubview(self.clippingView) + self.clippingView.addSubview(self.scrollView) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func update(component: CollageIconCarouselComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + self.component = component + self.state = state + + let inset: CGFloat = 27.0 + let spacing: CGFloat = 8.0 + var contentWidth: CGFloat = inset + let buttonSize = CGSize(width: 40.0, height: 40.0) + + var validIds: [AnyHashable] = [] + for grid in component.grids { + validIds.append(grid) + + let itemView: ComponentView + if let current = itemViews[grid] { + itemView = current + } else { + itemView = ComponentView() + self.itemViews[grid] = itemView + } + let itemSize = itemView.update( + transition: .immediate, + component: AnyComponent(CameraButton( + content: AnyComponentWithIdentity( + id: "content", + component: AnyComponent( + CollageIconComponent( + grid: grid, + crossed: false, + isSelected: false, + tintColor: .white + ) + ) + ), + action: { [weak self] in + if let component = self?.component { + component.selected(grid) + } + } + )), + environment: {}, + containerSize: buttonSize + ) + if let view = itemView.view { + if view.superview == nil { + self.scrollView.addSubview(view) + + view.layer.shadowOffset = CGSize(width: 0.0, height: 0.0) + view.layer.shadowRadius = 3.0 + view.layer.shadowColor = UIColor.black.cgColor + view.layer.shadowOpacity = 0.25 + view.layer.rasterizationScale = UIScreenScale + view.layer.shouldRasterize = true + } + view.frame = CGRect(origin: CGPoint(x: contentWidth, y: 0.0), size: itemSize) + } + contentWidth += itemSize.width + spacing + } + + let contentSize = CGSize(width: contentWidth, height: buttonSize.height) + if self.scrollView.contentSize != contentSize { + self.scrollView.contentSize = contentSize + } + self.scrollView.frame = CGRect(origin: .zero, size: availableSize) + self.clippingView.frame = CGRect(origin: .zero, size: availableSize) + + if self.clippingView.mask == nil { + if let maskImage = generateGradientImage(size: CGSize(width: 42.0, height: 10.0), colors: [UIColor.clear, UIColor.black, UIColor.black, UIColor.clear], locations: [0.0, 0.3, 0.7, 1.0], direction: .horizontal) { + let maskView = UIImageView(image: maskImage.stretchableImage(withLeftCapWidth: 13, topCapHeight: 0)) + self.clippingView.mask = maskView + } + } + self.clippingView.mask?.frame = CGRect(origin: .zero, size: availableSize) + + var removeIds: [AnyHashable] = [] + for (id, itemView) in self.itemViews { + if !validIds.contains(id) { + removeIds.append(id) + itemView.view?.removeFromSuperview() + } + } + for id in removeIds { + self.itemViews.removeValue(forKey: id) + } + + return availableSize + } + + func animateIn() { + guard self.frame.width > 0.0 else { + return + } + self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25) + for (_, itemView) in self.itemViews { + itemView.view?.layer.animatePosition(from: CGPoint(x: self.frame.width, y: 0.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true) + } + } + + func animateOut(completion: @escaping () -> Void) { + guard self.frame.width > 0.0 else { + return + } + self.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { _ in + completion() + }) + for (_, itemView) in self.itemViews { + itemView.view?.layer.animatePosition(from: .zero, to: CGPoint(x: self.frame.width + self.scrollView.contentOffset.x, y: 0.0), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true) + } + } + } + + public func makeView() -> View { + return View(frame: CGRect()) + } + + public func update(view: View, availableSize: CGSize, state: State, environment: Environment, transition: ComponentTransition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/DualIconComponent.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/DualIconComponent.swift new file mode 100644 index 0000000000..736f858101 --- /dev/null +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/DualIconComponent.swift @@ -0,0 +1,90 @@ +import Foundation +import UIKit +import Display +import ComponentFlow + +final class DualIconComponent: Component { + typealias EnvironmentType = Empty + + let isSelected: Bool + let tintColor: UIColor + + init( + isSelected: Bool, + tintColor: UIColor + ) { + self.isSelected = isSelected + self.tintColor = tintColor + } + + static func ==(lhs: DualIconComponent, rhs: DualIconComponent) -> Bool { + if lhs.isSelected != rhs.isSelected { + return false + } + if lhs.tintColor != rhs.tintColor { + return false + } + return true + } + + final class View: UIView { + private let iconView = UIImageView() + + private var component: DualIconComponent? + private weak var state: EmptyComponentState? + + override init(frame: CGRect) { + super.init(frame: frame) + + let image = generateImage(CGSize(width: 36.0, height: 36.0), rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + + if let image = UIImage(bundleImageName: "Camera/DualIcon"), let cgImage = image.cgImage { + context.draw(cgImage, in: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - image.size.width) / 2.0), y: floorToScreenPixels((size.height - image.size.height) / 2.0) - 1.0), size: image.size)) + } + })?.withRenderingMode(.alwaysTemplate) + + let selectedImage = generateImage(CGSize(width: 36.0, height: 36.0), rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + context.setFillColor(UIColor.white.cgColor) + context.fillEllipse(in: CGRect(origin: .zero, size: size)) + + if let image = UIImage(bundleImageName: "Camera/DualIcon"), let cgImage = image.cgImage { + context.setBlendMode(.clear) + context.clip(to: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - image.size.width) / 2.0), y: floorToScreenPixels((size.height - image.size.height) / 2.0) - 1.0), size: image.size), mask: cgImage) + context.fill(CGRect(origin: .zero, size: size)) + } + })?.withRenderingMode(.alwaysTemplate) + + self.iconView.image = image + self.iconView.highlightedImage = selectedImage + + self.addSubview(self.iconView) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func update(component: DualIconComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + self.component = component + self.state = state + + let size = CGSize(width: 36.0, height: 36.0) + self.iconView.frame = CGRect(origin: .zero, size: size) + self.iconView.isHighlighted = component.isSelected + + self.iconView.tintColor = component.tintColor + + return size + } + } + + public func makeView() -> View { + return View(frame: CGRect()) + } + + public func update(view: View, availableSize: CGSize, state: State, environment: Environment, transition: ComponentTransition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/ShutterBlobView.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/ShutterBlobView.swift index 094fe890b5..f0f68e80b7 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/ShutterBlobView.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/ShutterBlobView.swift @@ -230,20 +230,11 @@ final class ShutterBlobView: UIView { } public init?(test: Bool) { - let mainBundle = Bundle(for: ShutterBlobView.self) - - guard let path = mainBundle.path(forResource: "CameraScreenBundle", ofType: "bundle") else { - return nil - } - guard let bundle = Bundle(path: path) else { - return nil - } - guard let device = MTLCreateSystemDefaultDevice() else { return nil } - guard let defaultLibrary = try? device.makeDefaultLibrary(bundle: bundle) else { + guard let library = metalLibrary(device: device) else { return nil } @@ -252,11 +243,11 @@ final class ShutterBlobView: UIView { } self.commandQueue = commandQueue - guard let loadedVertexProgram = defaultLibrary.makeFunction(name: "cameraBlobVertex") else { + guard let loadedVertexProgram = library.makeFunction(name: "cameraBlobVertex") else { return nil } - guard let loadedFragmentProgram = defaultLibrary.makeFunction(name: "cameraBlobFragment") else { + guard let loadedFragmentProgram = library.makeFunction(name: "cameraBlobFragment") else { return nil } diff --git a/submodules/TelegramUI/Components/LegacyMessageInputPanel/Sources/LegacyMessageInputPanel.swift b/submodules/TelegramUI/Components/LegacyMessageInputPanel/Sources/LegacyMessageInputPanel.swift index abea4059e8..a43da16894 100644 --- a/submodules/TelegramUI/Components/LegacyMessageInputPanel/Sources/LegacyMessageInputPanel.swift +++ b/submodules/TelegramUI/Components/LegacyMessageInputPanel/Sources/LegacyMessageInputPanel.swift @@ -33,12 +33,22 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { private var currentIsEditing = false private var currentHeight: CGFloat? private var currentIsVideo = false + private var currentIsCaptionAbove = false private let hapticFeedback = HapticFeedback() private var inputView: LegacyMessageInputPanelInputView? private var isEmojiKeyboardActive = false + public var sendPressed: ((NSAttributedString?) -> Void)? + public var focusUpdated: ((Bool) -> Void)? + public var heightUpdated: ((Bool) -> Void)? + public var timerUpdated: ((NSNumber?) -> Void)? + public var captionIsAboveUpdated: ((Bool) -> Void)? + + private weak var undoController: UndoOverlayController? + private weak var tooltipController: TooltipScreen? + private var validLayout: (width: CGFloat, leftInset: CGFloat, rightInset: CGFloat, bottomInset: CGFloat, keyboardHeight: CGFloat, additionalSideInsets: UIEdgeInsets, maxHeight: CGFloat, isSecondary: Bool, metrics: LayoutMetrics)? public init( @@ -67,11 +77,6 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { } } - public var sendPressed: ((NSAttributedString?) -> Void)? - public var focusUpdated: ((Bool) -> Void)? - public var heightUpdated: ((Bool) -> Void)? - public var timerUpdated: ((NSNumber?) -> Void)? - public func updateLayoutSize(_ size: CGSize, keyboardHeight: CGFloat, sideInset: CGFloat, animated: Bool) -> CGFloat { return self.updateLayout(width: size.width, leftInset: sideInset, rightInset: sideInset, bottomInset: 0.0, keyboardHeight: keyboardHeight, additionalSideInsets: UIEdgeInsets(), maxHeight: size.height, isSecondary: false, transition: animated ? .animated(duration: 0.2, curve: .easeInOut) : .immediate, metrics: LayoutMetrics(widthClass: .compact, heightClass: .compact, orientation: nil), isMediaInputExpanded: false) } @@ -99,14 +104,15 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { transition.setFrame(view: view, frame: frame) } - public func setTimeout(_ timeout: Int32, isVideo: Bool) { - self.dismissTimeoutTooltip() + public func setTimeout(_ timeout: Int32, isVideo: Bool, isCaptionAbove: Bool) { + self.dismissAllTooltips() var timeout: Int32? = timeout if timeout == 0 { timeout = nil } self.currentTimeout = timeout self.currentIsVideo = isVideo + self.currentIsCaptionAbove = isCaptionAbove } public func activateInput() { @@ -132,7 +138,7 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { } public func onAnimateOut() { - self.dismissTimeoutTooltip() + self.dismissAllTooltips() } public func baseHeight() -> CGFloat { @@ -233,7 +239,12 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { lockMediaRecording: nil, stopAndPreviewMediaRecording: nil, discardMediaRecordingPreview: nil, - attachmentAction: nil, + attachmentAction: { [weak self] in + if let self { + self.toggleIsCaptionAbove() + } + }, + attachmentButtonMode: self.currentIsCaptionAbove ? .captionDown : .captionUp, myReaction: nil, likeAction: nil, likeOptionsAction: nil, @@ -249,6 +260,11 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { } : nil, forwardAction: nil, moreAction: nil, + presentCaptionPositionTooltip: { [weak self] sourceView in + if let self { + self.presentCaptionPositionTooltip(sourceView: sourceView) + } + }, presentVoiceMessagesUnavailableTooltip: nil, presentTextLengthLimitTooltip: nil, presentTextFormattingTooltip: nil, @@ -340,6 +356,31 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { } } + private func toggleIsCaptionAbove() { + //TODO:localize + self.currentIsCaptionAbove = !self.currentIsCaptionAbove + self.captionIsAboveUpdated?(self.currentIsCaptionAbove) + self.update(transition: .animated(duration: 0.3, curve: .spring)) + + self.dismissAllTooltips() + + let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } + + let title = self.currentIsCaptionAbove ? "Caption moved up" : "Caption moved down" + let text = self.currentIsCaptionAbove ? "Text will be shown above the media." : "Text will be shown below the media." + let animationName = self.currentIsCaptionAbove ? "message_preview_sort_above" : "message_preview_sort_below" + + let controller = UndoOverlayController( + presentationData: presentationData, + content: .universal(animation: animationName, scale: 1.0, colors: ["__allcolors__": UIColor.white], title: title, text: text, customUndoText: nil, timeout: 2.0), + elevatedLayout: false, + position: self.currentIsCaptionAbove ? .bottom : .top, + action: { _ in return false } + ) + self.present(controller) + self.undoController = controller + } + private func presentTimeoutSetup(sourceView: UIView, gesture: ContextGesture?) { self.hapticFeedback.impact(.light) @@ -395,10 +436,12 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { let contextController = ContextController(presentationData: presentationData, source: .reference(HeaderContextReferenceContentSource(sourceView: sourceView)), items: .single(ContextController.Items(content: .list(items))), gesture: gesture) self.present(contextController) } - - private weak var tooltipController: TooltipScreen? - - private func dismissTimeoutTooltip() { + + private func dismissAllTooltips() { + if let undoController = self.undoController { + self.undoController = nil + undoController.dismissWithCommitAction() + } if let tooltipController = self.tooltipController { self.tooltipController = nil tooltipController.dismiss() @@ -409,7 +452,7 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { guard let superview = self.view.superview?.superview else { return } - self.dismissTimeoutTooltip() + self.dismissAllTooltips() let parentFrame = superview.convert(superview.bounds, to: nil) let absoluteFrame = sourceView.convert(sourceView.bounds, to: nil).offsetBy(dx: -parentFrame.minX, dy: 0.0) @@ -449,6 +492,38 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView { self.present(tooltipController) } + private func presentCaptionPositionTooltip(sourceView: UIView) { + guard let superview = self.view.superview?.superview else { + return + } + self.dismissAllTooltips() + + let parentFrame = superview.convert(superview.bounds, to: nil) + let absoluteFrame = sourceView.convert(sourceView.bounds, to: nil).offsetBy(dx: -parentFrame.minX, dy: 0.0) + let location = CGRect(origin: CGPoint(x: absoluteFrame.midX + 2.0, y: absoluteFrame.minY + 6.0), size: CGSize()) + + //TODO:localize + let text = "Tap here to move caption up." + let tooltipController = TooltipScreen( + account: self.context.account, + sharedContext: self.context.sharedContext, + text: .plain(text: text), + balancedTextLayout: false, + style: .customBlur(UIColor(rgb: 0x18181a), 4.0), + arrowStyle: .small, + icon: nil, + location: .point(location, .bottom), + displayDuration: .default, + inset: 4.0, + cornerRadius: 10.0, + shouldDismissOnTouch: { _, _ in + return .ignore + } + ) + self.tooltipController = tooltipController + self.present(tooltipController) + } + public override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { let result = super.hitTest(point, with: event) if let view = self.inputPanel.view, let panelResult = view.hitTest(self.view.convert(point, to: view), with: event) { diff --git a/submodules/TelegramUI/Components/MediaAssetsContext/BUILD b/submodules/TelegramUI/Components/MediaAssetsContext/BUILD new file mode 100644 index 0000000000..060cfd6ee4 --- /dev/null +++ b/submodules/TelegramUI/Components/MediaAssetsContext/BUILD @@ -0,0 +1,19 @@ +load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library") + +swift_library( + name = "MediaAssetsContext", + module_name = "MediaAssetsContext", + srcs = glob([ + "Sources/**/*.swift", + ]), + copts = [ + "-warnings-as-errors", + ], + deps = [ + "//submodules/Display", + "//submodules/SSignalKit/SwiftSignalKit", + ], + visibility = [ + "//visibility:public", + ], +) diff --git a/submodules/MediaPickerUI/Sources/FetchAssets.swift b/submodules/TelegramUI/Components/MediaAssetsContext/Sources/FetchAssets.swift similarity index 91% rename from submodules/MediaPickerUI/Sources/FetchAssets.swift rename to submodules/TelegramUI/Components/MediaAssetsContext/Sources/FetchAssets.swift index 95878b87bc..e5f3ffb0f6 100644 --- a/submodules/MediaPickerUI/Sources/FetchAssets.swift +++ b/submodules/TelegramUI/Components/MediaAssetsContext/Sources/FetchAssets.swift @@ -12,7 +12,7 @@ private let imageManager: PHCachingImageManager = { private let assetsQueue = Queue() -final class AssetDownloadManager { +public final class AssetDownloadManager { private final class DownloadingAssetContext { let identifier: String let updated: () -> Void @@ -33,13 +33,13 @@ final class AssetDownloadManager { private let queue = Queue() private var currentAssetContext: DownloadingAssetContext? - init() { + public init() { } deinit { } - func download(asset: PHAsset) { + public func download(asset: PHAsset) { self.cancelAllDownloads() let queue = self.queue @@ -70,7 +70,7 @@ final class AssetDownloadManager { }) } - func cancelAllDownloads() { + public func cancelAllDownloads() { if let currentAssetContext = self.currentAssetContext { currentAssetContext.status = .none currentAssetContext.updated() @@ -83,7 +83,7 @@ final class AssetDownloadManager { } } - func cancel(identifier: String) { + public func cancel(identifier: String) { if let currentAssetContext = self.currentAssetContext, currentAssetContext.identifier == identifier { currentAssetContext.status = .none currentAssetContext.updated() @@ -129,7 +129,7 @@ final class AssetDownloadManager { } } - func downloadProgress(identifier: String) -> Signal { + public func downloadProgress(identifier: String) -> Signal { return Signal { [weak self] subscriber in if let self { return self.downloadProgress(identifier: identifier, next: { status in @@ -145,7 +145,7 @@ final class AssetDownloadManager { } } -func checkIfAssetIsLocal(_ asset: PHAsset) -> Signal { +public func checkIfAssetIsLocal(_ asset: PHAsset) -> Signal { if asset.isLocallyAvailable == true { return .single(true) } @@ -181,7 +181,7 @@ func checkIfAssetIsLocal(_ asset: PHAsset) -> Signal { } } -enum AssetDownloadStatus { +public enum AssetDownloadStatus { case none case progress(Float) case completed @@ -242,13 +242,13 @@ private func downloadAssetMediaData(_ asset: PHAsset) -> Signal, index: Int, targetSize: CGSize, exact: Bool, deliveryMode: PHImageRequestOptionsDeliveryMode = .opportunistic, synchronous: Bool = false) -> Signal { +public func assetImage(fetchResult: PHFetchResult, index: Int, targetSize: CGSize, exact: Bool, deliveryMode: PHImageRequestOptionsDeliveryMode = .opportunistic, synchronous: Bool = false) -> Signal { let asset = fetchResult[index] return assetImage(asset: asset, targetSize: targetSize, exact: exact, deliveryMode: deliveryMode, synchronous: synchronous) } -func assetImage(asset: PHAsset, targetSize: CGSize, exact: Bool, deliveryMode: PHImageRequestOptionsDeliveryMode = .opportunistic, synchronous: Bool = false) -> Signal { - return Signal { subscriber in +public func assetImage(asset: PHAsset, targetSize: CGSize, exact: Bool, deliveryMode: PHImageRequestOptionsDeliveryMode = .opportunistic, synchronous: Bool = false) -> Signal { + return Signal { subscriber in let options = PHImageRequestOptions() options.deliveryMode = deliveryMode if exact { @@ -282,7 +282,7 @@ func assetImage(asset: PHAsset, targetSize: CGSize, exact: Bool, deliveryMode: P } } -func assetVideo(fetchResult: PHFetchResult, index: Int) -> Signal { +public func assetVideo(fetchResult: PHFetchResult, index: Int) -> Signal { return Signal { subscriber in let asset = fetchResult[index] diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/MediaAssetsContext.swift b/submodules/TelegramUI/Components/MediaAssetsContext/Sources/MediaAssetsContext.swift similarity index 100% rename from submodules/TelegramUI/Components/CameraScreen/Sources/MediaAssetsContext.swift rename to submodules/TelegramUI/Components/MediaAssetsContext/Sources/MediaAssetsContext.swift diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift index 4b7755eda9..2c9d540ab9 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditor.swift @@ -51,6 +51,7 @@ public struct MediaEditorPlayerState: Equatable { public let tracks: [Track] public let position: Double public let isPlaying: Bool + public let collageSamples: (samples: Data, peak: Int32)? public var isAudioOnly: Bool { var hasVideoTrack = false @@ -69,6 +70,25 @@ public struct MediaEditorPlayerState: Equatable { public var hasAudio: Bool { return true } + + public static func == (lhs: MediaEditorPlayerState, rhs: MediaEditorPlayerState) -> Bool { + if lhs.generationTimestamp != rhs.generationTimestamp { + return false + } + if lhs.tracks != rhs.tracks { + return false + } + if lhs.position != rhs.position { + return false + } + if lhs.isPlaying != rhs.isPlaying { + return false + } + if lhs.collageSamples?.samples != rhs.collageSamples?.samples || lhs.collageSamples?.peak != rhs.collageSamples?.peak { + return false + } + return true + } } public final class MediaEditor { @@ -102,8 +122,39 @@ public final class MediaEditor { } public enum Subject { + public struct VideoCollageItem { + public enum Content: Equatable { + case image(UIImage) + case video(String, Double) + case asset(PHAsset) + } + public let content: Content + public let frame: CGRect + + var isVideo: Bool { + return self.duration > 0.0 + } + + var duration: Double { + switch self.content { + case .image: + return 0.0 + case let .video(_, duration): + return duration + case let .asset(asset): + return asset.duration + } + } + + public init(content: Content, frame: CGRect) { + self.content = content + self.frame = frame + } + } + case image(UIImage, PixelDimensions) case video(String, UIImage?, Bool, String?, PixelDimensions, Double) + case videoCollage([VideoCollageItem]) case asset(PHAsset) case draft(MediaEditorDraft) case message(MessageId) @@ -117,9 +168,7 @@ public final class MediaEditor { return PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight)) case let .draft(draft): return draft.dimensions - case .message: - return PixelDimensions(width: 1080, height: 1920) - case .sticker: + case .message, .sticker, .videoCollage: return PixelDimensions(width: 1080, height: 1920) } } @@ -134,12 +183,15 @@ public final class MediaEditor { private var stickerEntity: MediaEditorComposerStickerEntity? private var player: AVPlayer? + private let playerPromise = Promise() private var playerAudioMix: AVMutableAudioMix? - private var additionalPlayer: AVPlayer? + private var additionalPlayers: [AVPlayer] = [] + private let additionalPlayersPromise = Promise<[AVPlayer]>([]) private var additionalPlayerAudioMix: AVMutableAudioMix? private var audioPlayer: AVPlayer? + private let audioPlayerPromise = Promise(nil) private var audioPlayerAudioMix: AVMutableAudioMix? private var volumeFadeIn: SwiftSignalKit.Timer? @@ -233,7 +285,7 @@ public final class MediaEditor { if case let .sticker(file) = self.subject { return file.isAnimatedSticker || file.isVideoSticker } else { - return self.player != nil || self.audioPlayer != nil || self.additionalPlayer != nil + return self.player != nil || self.audioPlayer != nil || !self.additionalPlayers.isEmpty } } @@ -244,11 +296,7 @@ public final class MediaEditor { public func getResultImage(mirror: Bool) -> UIImage? { return self.renderer.finalRenderedImage(mirror: mirror) } - - private let playerPromise = Promise() - private let additionalPlayerPromise = Promise(nil) - private let audioPlayerPromise = Promise(nil) - + private var wallpapers: ((day: UIImage, night: UIImage?))? private struct PlaybackState: Equatable { @@ -309,7 +357,7 @@ public final class MediaEditor { } public var additionalVideoDuration: Double? { - if let additionalPlayer = self.additionalPlayer { + if let additionalPlayer = self.additionalPlayers.first { return min(60.0, additionalPlayer.currentItem?.asset.duration.seconds ?? 0.0) } else { return nil @@ -317,7 +365,7 @@ public final class MediaEditor { } public var originalDuration: Double? { - if self.player != nil || self.additionalPlayer != nil { + if self.player != nil || !self.additionalPlayers.isEmpty { return min(60.0, self.playerPlaybackState.duration) } else { return nil @@ -340,8 +388,8 @@ public final class MediaEditor { return (artist: artist, title: title) } - func playerAndThumbnails(promise: Promise, mirror: Bool = false) -> Signal<(AVPlayer, [UIImage], Double)?, NoError> { - return promise.get() + func playerAndThumbnails(_ signal: Signal, mirror: Bool = false) -> Signal<(AVPlayer, [UIImage], Double)?, NoError> { + return signal |> mapToSignal { player -> Signal<(AVPlayer, [UIImage], Double)?, NoError> in if let player, let asset = player.currentItem?.asset { return videoFrames(asset: asset, count: framesCount, mirror: mirror) @@ -353,16 +401,20 @@ public final class MediaEditor { } } } - + return combineLatest( - playerAndThumbnails(promise: self.playerPromise), - playerAndThumbnails(promise: self.additionalPlayerPromise, mirror: true), + playerAndThumbnails(self.playerPromise.get()), + self.additionalPlayersPromise.get() + |> mapToSignal { players in + return combineLatest(players.compactMap { playerAndThumbnails(.single($0), mirror: true) }) + }, self.audioPlayerPromise.get(), self.valuesPromise.get(), self.playerPlaybackStatePromise.get() - ) |> map { mainPlayerAndThumbnails, additionalPlayerAndThumbnails, audioPlayer, values, playbackState in - var tracks: [MediaEditorPlayerState.Track] = [] + ) |> map { [weak self] mainPlayerAndThumbnails, additionalPlayerAndThumbnails, audioPlayer, values, playbackState in + let isCollage = !values.collage.isEmpty + var tracks: [MediaEditorPlayerState.Track] = [] if let (player, frames, updateTimestamp) = mainPlayerAndThumbnails { let duration: Double if !playbackState.duration.isNaN { @@ -383,30 +435,49 @@ public final class MediaEditor { visibleInTimeline: true )) } - if let (player, frames, updateTimestamp) = additionalPlayerAndThumbnails { - let duration: Double - if !playbackState.duration.isNaN && mainPlayerAndThumbnails == nil { - duration = playbackState.duration - } else { - duration = player.currentItem?.asset.duration.seconds ?? 0.0 + + var index: Int32 = 1 + for maybeValues in additionalPlayerAndThumbnails { + if let (player, frames, updateTimestamp) = maybeValues { + let duration: Double + if !playbackState.duration.isNaN && mainPlayerAndThumbnails == nil { + duration = playbackState.duration + } else { + duration = player.currentItem?.asset.duration.seconds ?? 0.0 + } + + var trimRange: Range? + var offset: Double? + if isCollage { + if let collageIndex = self?.collageItemIndexForTrackId(index) { + trimRange = values.collage[collageIndex].videoTrimRange + offset = values.collage[collageIndex].videoOffset + } + } else { + trimRange = values.additionalVideoTrimRange + offset = values.additionalVideoOffset + } + + tracks.append(MediaEditorPlayerState.Track( + id: index, + content: .video( + frames: frames, + framesUpdateTimestamp: updateTimestamp + ), + duration: duration, + trimRange: trimRange, + offset: offset, + isMain: tracks.isEmpty, + visibleInTimeline: !values.additionalVideoIsDual + )) + index += 1 } - tracks.append(MediaEditorPlayerState.Track( - id: 1, - content: .video( - frames: frames, - framesUpdateTimestamp: updateTimestamp - ), - duration: duration, - trimRange: values.additionalVideoTrimRange, - offset: values.additionalVideoOffset, - isMain: tracks.isEmpty, - visibleInTimeline: !values.additionalVideoIsDual - )) } + if let audioTrack = values.audioTrack { let (artist, title) = artistAndTitleForTrack(audioTrack) tracks.append(MediaEditorPlayerState.Track( - id: 2, + id: 1000, content: .audio( artist: artist, title: title, @@ -425,11 +496,17 @@ public final class MediaEditor { return nil } + var collageSamples: (Data, Int32)? + if let samples = values.collageTrackSamples?.samples, let peak = values.collageTrackSamples?.peak { + collageSamples = (samples, peak) + } + return MediaEditorPlayerState( generationTimestamp: CACurrentMediaTime(), tracks: tracks, position: playbackState.position, - isPlaying: playbackState.isPlaying + isPlaying: playbackState.isPlaying, + collageSamples: collageSamples ) } } @@ -466,6 +543,7 @@ public final class MediaEditor { additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, + collage: [], nightTheme: false, drawing: nil, maskDrawing: nil, @@ -476,6 +554,7 @@ public final class MediaEditor { audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, + collageTrackSamples: nil, coverImageTimestamp: nil, qualityPreset: nil ) @@ -520,7 +599,7 @@ public final class MediaEditor { self.renderer.videoFinishPass.additionalTextureRotation = .rotate0DegreesMirrored } let hasTransparency = imageHasTransparency(image) - self.renderer.consume(main: .texture(texture, time, hasTransparency), additional: additionalTexture.flatMap { .texture($0, time, false) }, render: true, displayEnabled: false) + self.renderer.consume(main: .texture(texture, time, hasTransparency, nil), additionals: additionalTexture.flatMap { [.texture($0, time, false, nil)] } ?? [], render: true, displayEnabled: false) } private func setupSource(andPlay: Bool) { @@ -529,7 +608,6 @@ public final class MediaEditor { } let context = self.context - let clock = self.clock if let device = renderTarget.mtlDevice, CVMetalTextureCacheCreate(nil, nil, device, nil, &self.textureCache) != kCVReturnSuccess { print("error") } @@ -540,6 +618,7 @@ public final class MediaEditor { let player: AVPlayer? let stickerEntity: MediaEditorComposerStickerEntity? let playerIsReference: Bool + let rect: CGRect? let gradientColors: GradientColors init( @@ -548,6 +627,7 @@ public final class MediaEditor { player: AVPlayer? = nil, stickerEntity: MediaEditorComposerStickerEntity? = nil, playerIsReference: Bool = false, + rect: CGRect? = nil, gradientColors: GradientColors ) { self.image = image @@ -555,26 +635,20 @@ public final class MediaEditor { self.player = player self.stickerEntity = stickerEntity self.playerIsReference = playerIsReference + self.rect = rect self.gradientColors = gradientColors } } - - func makePlayer(asset: AVAsset) -> AVPlayer { - let player = AVPlayer(playerItem: AVPlayerItem(asset: asset)) - if #available(iOS 15.0, *) { - player.sourceClock = clock - } else { - player.masterClock = clock - } - player.automaticallyWaitsToMinimizeStalling = false - return player - } - - func textureSourceResult(for asset: AVAsset, gradientColors: GradientColors? = nil) -> Signal { - return Signal { subscriber in - let player = makePlayer(asset: asset) + + func textureSourceResult(for asset: AVAsset, gradientColors: GradientColors? = nil, rect: CGRect? = nil) -> Signal { + return Signal { [weak self] subscriber in + guard let self else { + subscriber.putCompletion() + return EmptyDisposable + } + let player = self.makePlayer(asset: asset) if let gradientColors { - subscriber.putNext(TextureSourceResult(player: player, gradientColors: gradientColors)) + subscriber.putNext(TextureSourceResult(player: player, rect: rect, gradientColors: gradientColors)) subscriber.putCompletion() return EmptyDisposable } else { @@ -583,7 +657,7 @@ public final class MediaEditor { imageGenerator.maximumSize = CGSize(width: 72, height: 128) imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: 0, preferredTimescale: CMTimeScale(30.0)))]) { _, image, _, _, _ in let gradientColors: GradientColors = image.flatMap({ mediaEditorGetGradientColors(from: UIImage(cgImage: $0)) }) ?? GradientColors(top: .black, bottom: .black) - subscriber.putNext(TextureSourceResult(player: player, gradientColors: gradientColors)) + subscriber.putNext(TextureSourceResult(player: player, rect: rect, gradientColors: gradientColors)) subscriber.putCompletion() } return ActionDisposable { @@ -593,8 +667,68 @@ public final class MediaEditor { } } + func textureSourceResult(for asset: PHAsset, rect: CGRect? = nil) -> Signal { + return Signal { [weak self] subscriber in + let isVideo = asset.mediaType == .video + + let targetSize = isVideo ? CGSize(width: 128.0, height: 128.0) : CGSize(width: 1920.0, height: 1920.0) + let options = PHImageRequestOptions() + let deliveryMode: PHImageRequestOptionsDeliveryMode = .highQualityFormat + options.deliveryMode = deliveryMode + options.isNetworkAccessAllowed = true + + let requestId = PHImageManager.default().requestImage( + for: asset, + targetSize: targetSize, + contentMode: .aspectFit, + options: options, + resultHandler: { [weak self] image, info in + if let image { + var degraded = false + if let info { + if let cancelled = info[PHImageCancelledKey] as? Bool, cancelled { + return + } + if let degradedValue = info[PHImageResultIsDegradedKey] as? Bool, degradedValue { + degraded = true + } + } + if isVideo { + PHImageManager.default().requestAVAsset(forVideo: asset, options: nil, resultHandler: { asset, _, _ in + if let asset, let player = self?.makePlayer(asset: asset) { + subscriber.putNext( + TextureSourceResult( + player: player, + rect: rect, + gradientColors: mediaEditorGetGradientColors(from: image) + ) + ) + subscriber.putCompletion() + } + }) + } else { + if !degraded { + subscriber.putNext( + TextureSourceResult( + image: image, + rect: rect, + gradientColors: mediaEditorGetGradientColors(from: image) + ) + ) + subscriber.putCompletion() + } + } + } + } + ) + return ActionDisposable { + PHImageManager.default().cancelImageRequest(requestId) + } + } + } + let textureSource: Signal - switch subject { + switch self.subject { case let .image(image, _): textureSource = .single( TextureSourceResult( @@ -625,72 +759,22 @@ public final class MediaEditor { let _ = mirror let asset = AVURLAsset(url: URL(fileURLWithPath: path)) textureSource = textureSourceResult(for: asset) - case let .asset(asset): - textureSource = Signal { subscriber in - let isVideo = asset.mediaType == .video - - let targetSize = isVideo ? CGSize(width: 128.0, height: 128.0) : CGSize(width: 1920.0, height: 1920.0) - let options = PHImageRequestOptions() - let deliveryMode: PHImageRequestOptionsDeliveryMode - if isVideo { - if #available(iOS 14.0, *), PHPhotoLibrary.authorizationStatus(for: .readWrite) == .limited { - deliveryMode = .highQualityFormat - } else { - deliveryMode = .fastFormat - } - } else { - deliveryMode = .highQualityFormat - } - options.deliveryMode = deliveryMode - options.isNetworkAccessAllowed = true - - let requestId = PHImageManager.default().requestImage( - for: asset, - targetSize: targetSize, - contentMode: .aspectFit, - options: options, - resultHandler: { image, info in - if let image { - var degraded = false - if let info { - if let cancelled = info[PHImageCancelledKey] as? Bool, cancelled { - return - } - if let degradedValue = info[PHImageResultIsDegradedKey] as? Bool, degradedValue { - degraded = true - } - } - if isVideo { - PHImageManager.default().requestAVAsset(forVideo: asset, options: nil, resultHandler: { asset, _, _ in - if let asset { - let player = makePlayer(asset: asset) - subscriber.putNext( - TextureSourceResult( - player: player, - gradientColors: mediaEditorGetGradientColors(from: image) - ) - ) - subscriber.putCompletion() - } - }) - } else { - if !degraded { - subscriber.putNext( - TextureSourceResult( - image: image, - gradientColors: mediaEditorGetGradientColors(from: image) - ) - ) - subscriber.putCompletion() - } - } - } - } - ) - return ActionDisposable { - PHImageManager.default().cancelImageRequest(requestId) + case let .videoCollage(items): + if let longestItem = longestCollageItem(items) { + switch longestItem.content { + case let .video(path, _): + let asset = AVURLAsset(url: URL(fileURLWithPath: path)) + textureSource = textureSourceResult(for: asset, rect: longestItem.frame) + case let .asset(asset): + textureSource = textureSourceResult(for: asset, rect: longestItem.frame) + default: + textureSource = .complete() } + } else { + textureSource = .complete() } + case let .asset(asset): + textureSource = textureSourceResult(for: asset) case let .message(messageId): textureSource = self.context.engine.data.get(TelegramEngine.EngineData.Item.Messages.Message(id: messageId)) |> mapToSignal { message in @@ -698,7 +782,7 @@ public final class MediaEditor { if let message, !"".isEmpty { if let maybeFile = message.media.first(where: { $0 is TelegramMediaFile }) as? TelegramMediaFile, maybeFile.isVideo, let path = self.context.account.postbox.mediaBox.completedResourcePath(maybeFile.resource, pathExtension: "mp4") { let asset = AVURLAsset(url: URL(fileURLWithPath: path)) - player = makePlayer(asset: asset) + player = self.makePlayer(asset: asset) } } return getChatWallpaperImage(context: self.context, messageId: messageId) @@ -750,13 +834,13 @@ public final class MediaEditor { } self.player = textureSourceResult.player - self.playerPromise.set(.single(player)) + self.playerPromise.set(.single(self.player)) if let image = textureSourceResult.image { if self.values.nightTheme, let nightImage = textureSourceResult.nightImage { - textureSource.setMainInput(.image(nightImage)) + textureSource.setMainInput(.image(nightImage, nil)) } else { - textureSource.setMainInput(.image(image)) + textureSource.setMainInput(.image(image, nil)) } if case .sticker = self.mode { @@ -795,11 +879,11 @@ public final class MediaEditor { }) } } - if let player, let playerItem = player.currentItem, !textureSourceResult.playerIsReference { - textureSource.setMainInput(.video(playerItem)) + if let player = self.player, let playerItem = player.currentItem, !textureSourceResult.playerIsReference { + textureSource.setMainInput(.video(playerItem, textureSourceResult.rect)) } - if let additionalPlayer, let playerItem = additionalPlayer.currentItem { - textureSource.setAdditionalInput(.video(playerItem)) + if self.values.collage.isEmpty, let additionalPlayer = self.additionalPlayers.first, let playerItem = additionalPlayer.currentItem { + textureSource.setAdditionalInputs([.video(playerItem, nil)]) } if let entity = textureSourceResult.stickerEntity { textureSource.setMainInput(.entity(entity)) @@ -807,6 +891,9 @@ public final class MediaEditor { self.stickerEntity = textureSourceResult.stickerEntity self.renderer.textureSource = textureSource + if !self.values.collage.isEmpty { + self.setupAdditionalVideoPlayback() + } switch self.mode { case .default: @@ -847,6 +934,9 @@ public final class MediaEditor { return } player.playImmediately(atRate: 1.0) + for player in self.additionalPlayers { + player.playImmediately(atRate: 1.0) + } // additionalPlayer?.playImmediately(atRate: 1.0) self.audioPlayer?.playImmediately(atRate: 1.0) self.onPlaybackAction(.play) @@ -889,7 +979,7 @@ public final class MediaEditor { private func setupTimeObservers() { var observedPlayer = self.player if observedPlayer == nil { - observedPlayer = self.additionalPlayer + observedPlayer = self.additionalPlayers.first } if observedPlayer == nil { observedPlayer = self.audioPlayer @@ -922,14 +1012,14 @@ public final class MediaEditor { var start: Double = 0.0 if self.player != nil { start = self.values.videoTrimRange?.lowerBound ?? 0.0 - } else if self.additionalPlayer != nil { + } else if !self.additionalPlayers.isEmpty { start = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0 } else if self.audioPlayer != nil { start = (self.values.audioTrackOffset ?? 0.0) + (self.values.audioTrackTrimRange?.lowerBound ?? 0.0) } self.player?.pause() - self.additionalPlayer?.pause() + self.additionalPlayers.forEach { $0.pause() } self.audioPlayer?.pause() self.seek(start, andPlay: true) @@ -950,8 +1040,8 @@ public final class MediaEditor { self.didPlayToEndTimeObserver = nil } - self.videoDelayTimer?.invalidate() - self.videoDelayTimer = nil + self.videoDelayTimer.values.forEach { $0.invalidate() } + self.videoDelayTimer = [:] self.audioDelayTimer?.invalidate() self.audioDelayTimer = nil @@ -1107,9 +1197,9 @@ public final class MediaEditor { if let textureSource = self.renderer.textureSource as? UniversalTextureSource { if nightTheme { - textureSource.setMainInput(.image(nightImage)) + textureSource.setMainInput(.image(nightImage, nil)) } else { - textureSource.setMainInput(.image(dayImage)) + textureSource.setMainInput(.image(dayImage, nil)) } } } @@ -1130,14 +1220,14 @@ public final class MediaEditor { private var targetTimePosition: (CMTime, Bool)? private var updatingTimePosition = false public func seek(_ position: Double, andPlay play: Bool) { - if self.player == nil && self.additionalPlayer == nil && self.audioPlayer == nil { + if self.player == nil && self.additionalPlayers.isEmpty && self.audioPlayer == nil { self.initialSeekPosition = position return } self.renderer.setRate(1.0) if !play { self.player?.pause() - self.additionalPlayer?.pause() + self.additionalPlayers.forEach { $0.pause() } self.audioPlayer?.pause() self.onPlaybackAction(.pause) } @@ -1151,25 +1241,29 @@ public final class MediaEditor { if play { self.player?.play() - if self.player == nil && self.additionalPlayer == nil { + if self.player == nil && self.additionalPlayers.isEmpty { self.audioPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero) self.audioPlayer?.play() } else { - if let _ = self.additionalPlayer { + if let _ = self.additionalPlayers.first { if self.player != nil { - let videoTime = self.videoTime(for: targetPosition) - if let videoDelay = self.videoDelay(for: targetPosition) { - self.videoDelayTimer = SwiftSignalKit.Timer(timeout: videoDelay, repeat: false, completion: { [weak self] in - self?.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) - self?.additionalPlayer?.play() - }, queue: Queue.mainQueue()) - self.videoDelayTimer?.start() - } else { - self.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) - self.additionalPlayer?.play() + var index: Int32 = 0 + for additionalPlayer in self.additionalPlayers { + let videoTime = self.videoTime(for: targetPosition, playerId: index) + if let videoDelay = self.videoDelay(for: targetPosition, playerId: index) { + self.videoDelayTimer[index] = SwiftSignalKit.Timer(timeout: videoDelay, repeat: false, completion: { [weak additionalPlayer] in + additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) + additionalPlayer?.play() + }, queue: Queue.mainQueue()) + self.videoDelayTimer[index]?.start() + } else { + additionalPlayer.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) + additionalPlayer.play() + } + index += 1 } } else { - self.additionalPlayer?.play() + self.additionalPlayers.forEach { $0.play() } } } @@ -1198,7 +1292,7 @@ public final class MediaEditor { return } player.pause() - self.additionalPlayer?.pause() + self.additionalPlayers.forEach { $0.pause() } self.audioPlayer?.pause() let targetPosition = CMTime(seconds: position, preferredTimescale: CMTimeScale(1000.0)) @@ -1208,11 +1302,15 @@ public final class MediaEditor { } }) - if let _ = self.videoDelay(for: targetPosition) { - } else { - self.additionalPlayer?.seek(to: self.videoTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero) + var index: Int32 = 0 + for player in self.additionalPlayers { + if let _ = self.videoDelay(for: targetPosition, playerId: index) { + } else { + player.seek(to: self.videoTime(for: targetPosition, playerId: index), toleranceBefore: .zero, toleranceAfter: .zero) + } + index += 1 } - + if let _ = self.audioDelay(for: targetPosition) { } else { self.audioPlayer?.seek(to: self.audioTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero) @@ -1228,7 +1326,7 @@ public final class MediaEditor { var videoStart: Double = 0.0 if self.player != nil { videoStart = self.values.videoTrimRange?.lowerBound ?? 0.0 - } else if self.additionalPlayer != nil { + } else if !self.additionalPlayers.isEmpty { videoStart = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0 } var audioStart = self.values.audioTrackTrimRange?.lowerBound ?? 0.0 @@ -1261,16 +1359,31 @@ public final class MediaEditor { } } - private var videoDelayTimer: SwiftSignalKit.Timer? - private func videoDelay(for time: CMTime) -> Double? { + private var videoDelayTimer: [Int32: SwiftSignalKit.Timer] = [:] + private func videoDelay(for time: CMTime, playerId: Int32?) -> Double? { + let playerId = self.values.collage.isEmpty ? nil : playerId + var time = time if time == .invalid { time = .zero } let mainStart = self.values.videoTrimRange?.lowerBound ?? 0.0 - var trackStart = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0 - if let offset = self.values.additionalVideoOffset, offset < 0.0 { - trackStart -= offset + var trackStart: Double + if let playerId { + let trackId = playerId + 1 + if let collageIndex = self.collageItemIndexForTrackId(trackId) { + trackStart = self.values.collage[collageIndex].videoTrimRange?.lowerBound ?? 0.0 + if let offset = self.values.collage[collageIndex].videoOffset, offset < 0.0 { + trackStart -= offset + } + } else { + trackStart = 0.0 + } + } else { + trackStart = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0 + if let offset = self.values.additionalVideoOffset, offset < 0.0 { + trackStart -= offset + } } if trackStart - mainStart > 0.0 { let delay = trackStart - time.seconds @@ -1281,16 +1394,32 @@ public final class MediaEditor { return nil } - private func videoTime(for time: CMTime) -> CMTime { + private func videoTime(for time: CMTime, playerId: Int32?) -> CMTime { + let playerId = self.values.collage.isEmpty ? nil : playerId + var time = time if time == .invalid { time = .zero } let seconds = time.seconds - let offset = self.values.additionalVideoOffset ?? 0.0 + let offset: Double + let trackStart: Double + if let playerId { + let trackId = playerId + 1 + if let collageIndex = self.collageItemIndexForTrackId(trackId) { + offset = self.values.collage[collageIndex].videoOffset ?? 0.0 + trackStart = self.values.collage[collageIndex].videoTrimRange?.lowerBound ?? 0.0 + } else { + offset = 0.0 + trackStart = 0.0 + } + } else { + offset = self.values.additionalVideoOffset ?? 0.0 + trackStart = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0 + } + let trackOffset = max(0.0, offset) - let trackStart = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0 if seconds < trackStart - min(0.0, offset) { return CMTime(seconds: trackOffset + trackStart, preferredTimescale: CMTimeScale(1000.0)) } else { @@ -1302,7 +1431,7 @@ public final class MediaEditor { var effectivePlayer: AVPlayer? if let player = self.player { effectivePlayer = player - } else if let additionalPlayer = self.additionalPlayer { + } else if let additionalPlayer = self.additionalPlayers.first { effectivePlayer = additionalPlayer } else if let audioPlayer = self.audioPlayer { effectivePlayer = audioPlayer @@ -1358,7 +1487,7 @@ public final class MediaEditor { let cmVTime = CMTimeMakeWithSeconds(time, preferredTimescale: 1000000) let futureTime = CMTimeAdd(cmHostTime, cmVTime) - if self.player == nil && self.additionalPlayer == nil, let audioPlayer = self.audioPlayer { + if self.player == nil && self.additionalPlayers.isEmpty, let audioPlayer = self.audioPlayer { let itemTime = audioPlayer.currentItem?.currentTime() ?? .invalid if audioPlayer.status == .readyToPlay { audioPlayer.setRate(rate, time: itemTime, atHostTime: futureTime) @@ -1374,27 +1503,31 @@ public final class MediaEditor { var itemTime = self.player?.currentItem?.currentTime() ?? .invalid self.player?.setRate(rate, time: itemTime, atHostTime: futureTime) - if let additionalPlayer = self.additionalPlayer { + if let additionalPlayer = self.additionalPlayers.first { if self.player != nil { - let videoTime = self.videoTime(for: itemTime) - if rate > 0.0 { - if let videoDelay = self.videoDelay(for: itemTime) { - self.videoDelayTimer = SwiftSignalKit.Timer(timeout: videoDelay, repeat: false, completion: { [weak self] in - self?.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) - self?.additionalPlayer?.play() - }, queue: Queue.mainQueue()) - self.videoDelayTimer?.start() - } else { - if additionalPlayer.status == .readyToPlay { - additionalPlayer.setRate(rate, time: videoTime, atHostTime: futureTime) - additionalPlayer.play() + var index: Int32 = 0 + for additionalPlayer in self.additionalPlayers { + let videoTime = self.videoTime(for: itemTime, playerId: index) + if rate > 0.0 { + if let videoDelay = self.videoDelay(for: itemTime, playerId: index) { + self.videoDelayTimer[index] = SwiftSignalKit.Timer(timeout: videoDelay, repeat: false, completion: { [weak additionalPlayer] in + additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) + additionalPlayer?.play() + }, queue: Queue.mainQueue()) + self.videoDelayTimer[index]?.start() } else { - additionalPlayer.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) - additionalPlayer.play() + if additionalPlayer.status == .readyToPlay { + additionalPlayer.setRate(rate, time: videoTime, atHostTime: futureTime) + additionalPlayer.play() + } else { + additionalPlayer.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero) + additionalPlayer.play() + } } + } else { + additionalPlayer.pause() } - } else { - additionalPlayer.pause() + index += 1 } } else { itemTime = additionalPlayer.currentItem?.currentTime() ?? .invalid @@ -1443,8 +1576,8 @@ public final class MediaEditor { } else { self.onPlaybackAction(.pause) - self.videoDelayTimer?.invalidate() - self.videoDelayTimer = nil + self.videoDelayTimer.values.forEach { $0.invalidate() } + self.videoDelayTimer = [:] self.audioDelayTimer?.invalidate() self.audioDelayTimer = nil @@ -1453,7 +1586,7 @@ public final class MediaEditor { public func invalidate() { self.player?.pause() - self.additionalPlayer?.pause() + self.additionalPlayers.forEach { $0.pause() } self.audioPlayer?.pause() self.onPlaybackAction(.pause) self.renderer.textureSource?.invalidate() @@ -1461,8 +1594,8 @@ public final class MediaEditor { self.audioDelayTimer?.invalidate() self.audioDelayTimer = nil - self.videoDelayTimer?.invalidate() - self.videoDelayTimer = nil + self.videoDelayTimer.values.forEach { $0.invalidate() } + self.videoDelayTimer = [:] } private func updateVideoTimePosition() { @@ -1471,7 +1604,7 @@ public final class MediaEditor { } self.updatingTimePosition = true - if self.player == nil && self.additionalPlayer == nil, let audioPlayer = self.audioPlayer { + if self.player == nil && self.additionalPlayers.isEmpty, let audioPlayer = self.audioPlayer { audioPlayer.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { [weak self] _ in if let self { if let (currentTargetPosition, _) = self.targetTimePosition, currentTargetPosition == targetPosition { @@ -1494,11 +1627,15 @@ public final class MediaEditor { } }) - if let additionalPlayer = self.additionalPlayer { + if let additionalPlayer = self.additionalPlayers.first { if self.player != nil { - if let _ = self.videoDelay(for: targetPosition) { - } else { - self.additionalPlayer?.seek(to: self.videoTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero) + var index: Int32 = 0 + for additionalPlayer in self.additionalPlayers { + if let _ = self.videoDelay(for: targetPosition, playerId: index) { + } else { + additionalPlayer.seek(to: self.videoTime(for: targetPosition, playerId: index), toleranceBefore: .zero, toleranceAfter: .zero) + } + index += 1 } } else { additionalPlayer.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { [weak self] _ in @@ -1541,6 +1678,44 @@ public final class MediaEditor { } } + public func setupCollage(_ items: [MediaEditor.Subject.VideoCollageItem]) { + let longestItem = longestCollageItem(items) + var collage: [MediaEditorValues.VideoCollageItem] = [] + for item in items { + var content: MediaEditorValues.VideoCollageItem.Content + if item.content == longestItem?.content { + content = .main + } else { + switch item.content { + case let .image(image): + let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).jpg" + if let data = image.jpegData(compressionQuality: 0.85) { + try? data.write(to: URL(fileURLWithPath: tempImagePath)) + } + content = .imageFile(path: tempImagePath) + case let .video(path, _): + content = .videoFile(path: path) + case let .asset(asset): + content = .asset(localIdentifier: asset.localIdentifier, isVideo: asset.mediaType == .video) + } + } + collage.append(MediaEditorValues.VideoCollageItem( + content: content, + frame: item.frame, + videoTrimRange: nil, + videoOffset: nil, + videoVolume: nil + )) + } + + self.updateValues(mode: .skipRendering) { values in + return values.withUpdatedCollage(collage) + } + + self.setupAdditionalVideoPlayback() + self.updateAdditionalVideoPlaybackRange() + } + public func setAdditionalVideo(_ path: String?, isDual: Bool = false, positionChanges: [VideoPositionChange]) { self.updateValues(mode: .skipRendering) { values in var values = values.withUpdatedAdditionalVideo(path: path, isDual: isDual, positionChanges: positionChanges) @@ -1550,25 +1725,24 @@ public final class MediaEditor { return values } - if let additionalPlayer = self.additionalPlayer { - additionalPlayer.pause() - - self.additionalPlayer = nil - self.additionalPlayerPromise.set(.single(nil)) + if !self.additionalPlayers.isEmpty { + self.additionalPlayers.forEach { $0.pause() } + self.additionalPlayers = [] + self.additionalPlayersPromise.set(.single([])) self.additionalPlayerAudioMix = nil if let textureSource = self.renderer.textureSource as? UniversalTextureSource { textureSource.forceUpdates = true self.renderer.videoFinishPass.animateAdditionalRemoval { [weak textureSource] in if let textureSource { - textureSource.setAdditionalInput(nil) + textureSource.setAdditionalInputs([]) textureSource.forceUpdates = false } } } - self.videoDelayTimer?.invalidate() - self.videoDelayTimer = nil + self.videoDelayTimer.values.forEach { $0.invalidate() } + self.videoDelayTimer = [:] if self.player == nil { self.invalidateTimeObservers() @@ -1581,37 +1755,130 @@ public final class MediaEditor { if self.player == nil { self.invalidateTimeObservers() self.setupTimeObservers() - self.additionalPlayer?.play() + self.additionalPlayers.forEach { $0.play() } } } private func setupAdditionalVideoPlayback() { - guard let additionalVideoPath = self.values.additionalVideoPath else { - return + if !self.values.collage.isEmpty { + var signals: [Signal<(UniversalTextureSource.Input, AVPlayer?), NoError>] = [] + + for item in self.values.collage { + switch item.content { + case .main: + break + case let .imageFile(path): + if let image = UIImage(contentsOfFile: path) { + signals.append(.single((.image(image, item.frame), nil))) + } + case let .videoFile(path): + let asset = AVURLAsset(url: URL(fileURLWithPath: path)) + let player = self.makePlayer(asset: asset) + if let playerItem = player.currentItem { + signals.append(.single((.video(playerItem, item.frame), player))) + } + case let .asset(localIdentifier, _): + let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil) + if fetchResult.count != 0 { + let asset = fetchResult.object(at: 0) + signals.append(Signal { subscriber in + let options = PHVideoRequestOptions() + options.isNetworkAccessAllowed = true + options.deliveryMode = .highQualityFormat + + PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { [weak self] avAsset, _, _ in + guard let self, let avAsset else { + subscriber.putCompletion() + return + } + let player = self.makePlayer(asset: avAsset) + if let playerItem = player.currentItem { + subscriber.putNext((.video(playerItem, item.frame), player)) + } + subscriber.putCompletion() + }) + + return EmptyDisposable + }) + } + } + } + + let _ = (combineLatest(signals) + |> deliverOnMainQueue).start(next: { [weak self] results in + guard let self else { + return + } + var additionalInputs: [UniversalTextureSource.Input] = [] + var additionalPlayers: [AVPlayer] = [] + + for (input, player) in results { + additionalInputs.append(input) + if let player { + additionalPlayers.append(player) + } + } + + self.additionalPlayers = additionalPlayers + self.additionalPlayersPromise.set(.single(additionalPlayers)) + + (self.renderer.textureSource as? UniversalTextureSource)?.setAdditionalInputs(additionalInputs) + + for player in additionalPlayers { + player.play() + } + + if let asset = self.player?.currentItem?.asset { + self.maybeGenerateAudioSamples(asset: asset, collage: true) + } + }) + } else if let additionalVideoPath = self.values.additionalVideoPath { + let asset = AVURLAsset(url: URL(fileURLWithPath: additionalVideoPath)) + let player = self.makePlayer(asset: asset) + guard let playerItem = player.currentItem else { + return + } + + let audioMix = AVMutableAudioMix() + let audioMixInputParameters = AVMutableAudioMixInputParameters(track: asset.tracks(withMediaType: .audio).first) + if let volume = self.values.additionalVideoVolume { + audioMixInputParameters.setVolume(Float(volume), at: .zero) + } + audioMix.inputParameters = [audioMixInputParameters] + player.currentItem?.audioMix = audioMix + + self.additionalPlayers = [player] + self.additionalPlayersPromise.set(.single([player])) + self.additionalPlayerAudioMix = audioMix + + (self.renderer.textureSource as? UniversalTextureSource)?.setAdditionalInputs([.video(playerItem, nil)]) } - let asset = AVURLAsset(url: URL(fileURLWithPath: additionalVideoPath)) - let playerItem = AVPlayerItem(asset: asset) - let player = AVPlayer(playerItem: playerItem) - if #available(iOS 15.0, *) { - player.sourceClock = clock - } else { - player.masterClock = clock + } + + public func collageItemIndexForTrackId(_ trackId: Int32) -> Int? { + var collageIndex = -1 + var trackIndex = 0 + for item in self.values.collage { + if case .videoFile = item.content { + trackIndex += 1 + } else if case .asset(_, true) = item.content { + trackIndex += 1 + } + collageIndex += 1 + + if trackIndex == trackId { + return collageIndex + } } - player.automaticallyWaitsToMinimizeStalling = false - - let audioMix = AVMutableAudioMix() - let audioMixInputParameters = AVMutableAudioMixInputParameters(track: asset.tracks(withMediaType: .audio).first) - if let volume = self.values.additionalVideoVolume { - audioMixInputParameters.setVolume(Float(volume), at: .zero) + return nil + } + + public func playerIndexForTrackId(_ trackId: Int32) -> Int? { + let index = trackId - 1 + if index >= self.additionalPlayers.count { + return nil } - audioMix.inputParameters = [audioMixInputParameters] - player.currentItem?.audioMix = audioMix - - self.additionalPlayer = player - self.additionalPlayerPromise.set(.single(player)) - self.additionalPlayerAudioMix = audioMix - - (self.renderer.textureSource as? UniversalTextureSource)?.setAdditionalInput(.video(playerItem)) + return Int(index) } public func setAdditionalVideoPosition(_ position: CGPoint, scale: CGFloat, rotation: CGFloat) { @@ -1620,9 +1887,19 @@ public final class MediaEditor { } } - public func setAdditionalVideoTrimRange(_ trimRange: Range, apply: Bool) { - self.updateValues(mode: .generic) { values in - return values.withUpdatedAdditionalVideoTrimRange(trimRange) + public func setAdditionalVideoTrimRange(_ trimRange: Range, trackId: Int32? = nil, apply: Bool) { + if let trackId { + if let index = self.collageItemIndexForTrackId(trackId) { + self.updateValues(mode: .generic) { values in + var updatedCollage = values.collage + updatedCollage[index] = values.collage[index].withUpdatedVideoTrimRange(trimRange) + return values.withUpdatedCollage(updatedCollage) + } + } + } else { + self.updateValues(mode: .generic) { values in + return values.withUpdatedAdditionalVideoTrimRange(trimRange) + } } if apply { @@ -1630,9 +1907,19 @@ public final class MediaEditor { } } - public func setAdditionalVideoOffset(_ offset: Double?, apply: Bool) { - self.updateValues(mode: .generic) { values in - return values.withUpdatedAdditionalVideoOffset(offset) + public func setAdditionalVideoOffset(_ offset: Double?, trackId: Int32? = nil, apply: Bool) { + if let trackId { + if let index = self.collageItemIndexForTrackId(trackId) { + self.updateValues(mode: .generic) { values in + var updatedCollage = values.collage + updatedCollage[index] = values.collage[index].withUpdatedVideoOffset(offset) + return values.withUpdatedCollage(updatedCollage) + } + } + } else { + self.updateValues(mode: .generic) { values in + return values.withUpdatedAdditionalVideoOffset(offset) + } } if apply { @@ -1640,25 +1927,47 @@ public final class MediaEditor { } } - public func setAdditionalVideoVolume(_ volume: CGFloat?) { - self.updateValues(mode: .skipRendering) { values in - return values.withUpdatedAdditionalVideoVolume(volume) + public func setAdditionalVideoVolume(_ volume: CGFloat?, trackId: Int32? = nil) { + if let trackId { + if let index = self.collageItemIndexForTrackId(trackId) { + self.updateValues(mode: .generic) { values in + var updatedCollage = values.collage + updatedCollage[index] = values.collage[index].withUpdatedVideoVolume(volume) + return values.withUpdatedCollage(updatedCollage) + } + } + } else { + self.updateValues(mode: .skipRendering) { values in + return values.withUpdatedAdditionalVideoVolume(volume) + } } - if let audioMix = self.additionalPlayerAudioMix, let asset = self.additionalPlayer?.currentItem?.asset { + if let audioMix = self.additionalPlayerAudioMix, let asset = self.additionalPlayers.first?.currentItem?.asset { let audioMixInputParameters = AVMutableAudioMixInputParameters(track: asset.tracks(withMediaType: .audio).first) audioMixInputParameters.setVolume(Float(volume ?? 1.0), at: .zero) audioMix.inputParameters = [audioMixInputParameters] - self.additionalPlayer?.currentItem?.audioMix = audioMix + self.additionalPlayers.first?.currentItem?.audioMix = audioMix } } private func updateAdditionalVideoPlaybackRange() { + if !self.values.collage.isEmpty { + var trackId: Int32 = 0 + for player in self.additionalPlayers { + if let index = self.collageItemIndexForTrackId(trackId), let upperBound = self.values.collage[index].videoTrimRange?.upperBound { + let offset = max(0.0, self.values.collage[index].videoOffset ?? 0.0) + player.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + upperBound, preferredTimescale: CMTimeScale(1000)) + } else { + player.currentItem?.forwardPlaybackEndTime = .invalid + } + trackId += 1 + } + } if let upperBound = self.values.additionalVideoTrimRange?.upperBound { let offset = max(0.0, self.values.additionalVideoOffset ?? 0.0) - self.additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + upperBound, preferredTimescale: CMTimeScale(1000)) + self.additionalPlayers.first?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + upperBound, preferredTimescale: CMTimeScale(1000)) } else { - self.additionalPlayer?.currentItem?.forwardPlaybackEndTime = .invalid + self.additionalPlayers.first?.currentItem?.forwardPlaybackEndTime = .invalid } } @@ -1711,7 +2020,7 @@ public final class MediaEditor { self.audioPlayer = audioPlayer self.audioPlayerPromise.set(.single(audioPlayer)) self.audioPlayerAudioMix = audioMix - self.maybeGenerateAudioSamples(asset: audioAsset) + self.maybeGenerateAudioSamples(asset: audioAsset, collage: false) self.setupTimeObservers() } @@ -1886,7 +2195,7 @@ public final class MediaEditor { } } - private func maybeGenerateAudioSamples(asset: AVAsset) { + private func maybeGenerateAudioSamples(asset: AVAsset, collage: Bool) { Queue.concurrentDefaultQueue().async { guard let audioTrack = asset.tracks(withMediaType: .audio).first else { return @@ -1928,13 +2237,30 @@ public final class MediaEditor { } Queue.mainQueue().async { self.updateValues(mode: .skipRendering) { values in - return values.withUpdatedAudioTrackSamples(MediaAudioTrackSamples(samples: samplesData, peak: peak)) + let samples = MediaAudioTrackSamples(samples: samplesData, peak: peak) + if collage { + return values.withUpdatedCollageTrackSamples(samples) + } else { + return values.withUpdatedAudioTrackSamples(samples) + } } } } catch { } } } + + private func makePlayer(asset: AVAsset) -> AVPlayer { + let player = AVPlayer(playerItem: AVPlayerItem(asset: asset)) + if #available(iOS 15.0, *) { + player.sourceClock = clock + } else { + player.masterClock = clock + } + player.automaticallyWaitsToMinimizeStalling = false + return player + } + } public func videoFrames(asset: AVAsset?, count: Int, initialPlaceholder: UIImage? = nil, initialTimestamp: Double? = nil, mirror: Bool = false) -> Signal<([UIImage], Double), NoError> { @@ -2049,3 +2375,20 @@ public func videoFrames(asset: AVAsset?, count: Int, initialPlaceholder: UIImage return .single((frames, CACurrentMediaTime())) } } + +private func longestCollageItem(_ items: [MediaEditor.Subject.VideoCollageItem]) -> MediaEditor.Subject.VideoCollageItem? { + var longestItem: MediaEditor.Subject.VideoCollageItem? + for item in items { + guard item.isVideo else { + continue + } + if let current = longestItem { + if item.duration > current.duration { + longestItem = item + } + } else { + longestItem = item + } + } + return longestItem +} diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift index 77a9ceea8e..782fa83366 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift @@ -52,15 +52,15 @@ private func roundedCornersMaskImage(size: CGSize) -> CIImage { final class MediaEditorComposer { enum Input { - case texture(MTLTexture, CMTime, Bool) - case videoBuffer(VideoPixelBuffer) + case texture(MTLTexture, CMTime, Bool, CGRect?) + case videoBuffer(VideoPixelBuffer, CGRect?) case ciImage(CIImage, CMTime) var timestamp: CMTime { switch self { - case let .texture(_, timestamp, _): + case let .texture(_, timestamp, _, _): return timestamp - case let .videoBuffer(videoBuffer): + case let .videoBuffer(videoBuffer, _): return videoBuffer.timestamp case let .ciImage(_, timestamp): return timestamp @@ -69,10 +69,10 @@ final class MediaEditorComposer { var rendererInput: MediaEditorRenderer.Input { switch self { - case let .texture(texture, timestamp, hasTransparency): - return .texture(texture, timestamp, hasTransparency) - case let .videoBuffer(videoBuffer): - return .videoBuffer(videoBuffer) + case let .texture(texture, timestamp, hasTransparency, rect): + return .texture(texture, timestamp, hasTransparency, rect) + case let .videoBuffer(videoBuffer, rect): + return .videoBuffer(videoBuffer, rect) case let .ciImage(image, timestamp): return .ciImage(image, timestamp) } @@ -150,21 +150,26 @@ final class MediaEditorComposer { self.renderer.videoFinishPass.update(values: self.values, videoDuration: videoDuration, additionalVideoDuration: additionalVideoDuration) } - var previousAdditionalInput: Input? - func process(main: Input, additional: Input?, timestamp: CMTime, pool: CVPixelBufferPool?, completion: @escaping (CVPixelBuffer?) -> Void) { + var previousAdditionalInput: [Int: Input] = [:] + func process(main: Input, additional: [Input?], timestamp: CMTime, pool: CVPixelBufferPool?, completion: @escaping (CVPixelBuffer?) -> Void) { guard let pool, let ciContext = self.ciContext else { completion(nil) return } - var additional = additional - if let additional { - self.previousAdditionalInput = additional - } else { - additional = self.previousAdditionalInput + var index = 0 + var augmentedAdditionals: [Input?] = [] + for input in additional { + if let input { + self.previousAdditionalInput[index] = input + augmentedAdditionals.append(input) + } else { + augmentedAdditionals.append(self.previousAdditionalInput[index]) + } + index += 1 } - self.renderer.consume(main: main.rendererInput, additional: additional?.rendererInput, render: true) + self.renderer.consume(main: main.rendererInput, additionals: augmentedAdditionals.compactMap { $0 }.map { $0.rendererInput }, render: true) if let resultTexture = self.renderer.resultTexture, var ciImage = CIImage(mtlTexture: resultTexture, options: [.colorSpace: self.colorSpace]) { ciImage = ciImage.transformed(by: CGAffineTransformMakeScale(1.0, -1.0).translatedBy(x: 0.0, y: -ciImage.extent.height)) @@ -190,13 +195,13 @@ final class MediaEditorComposer { completion(nil) } - private var cachedTexture: MTLTexture? - func textureForImage(_ image: UIImage) -> MTLTexture? { - if let cachedTexture = self.cachedTexture { + private var cachedTextures: [Int: MTLTexture] = [:] + func textureForImage(index: Int, image: UIImage) -> MTLTexture? { + if let cachedTexture = self.cachedTextures[index] { return cachedTexture } if let device = self.device, let texture = loadTexture(image: image, device: device) { - self.cachedTexture = texture + self.cachedTextures[index] = texture return texture } return nil diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorRenderer.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorRenderer.swift index 517e2c71f0..3892925a85 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorRenderer.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorRenderer.swift @@ -59,15 +59,15 @@ protocol RenderTarget: AnyObject { final class MediaEditorRenderer { enum Input { - case texture(MTLTexture, CMTime, Bool) - case videoBuffer(VideoPixelBuffer) + case texture(MTLTexture, CMTime, Bool, CGRect?) + case videoBuffer(VideoPixelBuffer, CGRect?) case ciImage(CIImage, CMTime) var timestamp: CMTime { switch self { - case let .texture(_, timestamp, _): + case let .texture(_, timestamp, _, _): return timestamp - case let .videoBuffer(videoBuffer): + case let .videoBuffer(videoBuffer, _): return videoBuffer.timestamp case let .ciImage(_, timestamp): return timestamp @@ -85,7 +85,7 @@ final class MediaEditorRenderer { private let ciInputPass = CIInputPass() private let mainVideoInputPass = VideoInputPass() - private let additionalVideoInputPass = VideoInputPass() + private var additionalVideoInputPass: [Int : VideoInputPass] = [:] let videoFinishPass = VideoFinishPass() private let outputRenderPass = OutputRenderPass() @@ -103,7 +103,7 @@ final class MediaEditorRenderer { private var currentMainInput: Input? var currentMainInputMask: MTLTexture? - private var currentAdditionalInput: Input? + private var currentAdditionalInputs: [Input] = [] private(set) var resultTexture: MTLTexture? var displayEnabled = true @@ -156,7 +156,6 @@ final class MediaEditorRenderer { self.commandQueue?.label = "Media Editor Command Queue" self.ciInputPass.setup(device: device, library: library) self.mainVideoInputPass.setup(device: device, library: library) - self.additionalVideoInputPass.setup(device: device, library: library) self.videoFinishPass.setup(device: device, library: library) self.renderPasses.forEach { $0.setup(device: device, library: library) } } @@ -186,23 +185,25 @@ final class MediaEditorRenderer { } private func combinedTextureFromCurrentInputs(device: MTLDevice, commandBuffer: MTLCommandBuffer, textureCache: CVMetalTextureCache) -> MTLTexture? { - var mainTexture: MTLTexture? - var additionalTexture: MTLTexture? - var hasTransparency = false + guard let library = self.library else { + return nil + } + var passMainInput: VideoFinishPass.Input? + var passAdditionalInputs: [VideoFinishPass.Input] = [] - func textureFromInput(_ input: MediaEditorRenderer.Input, videoInputPass: VideoInputPass) -> (MTLTexture, Bool)? { + func textureFromInput(_ input: MediaEditorRenderer.Input, videoInputPass: VideoInputPass) -> VideoFinishPass.Input? { switch input { - case let .texture(texture, _, hasTransparency): - return (texture, hasTransparency) - case let .videoBuffer(videoBuffer): + case let .texture(texture, _, hasTransparency, rect): + return VideoFinishPass.Input(texture: texture, hasTransparency: hasTransparency, rect: rect) + case let .videoBuffer(videoBuffer, rect): if let texture = videoInputPass.processPixelBuffer(videoBuffer, textureCache: textureCache, device: device, commandBuffer: commandBuffer) { - return (texture, false) + return VideoFinishPass.Input(texture: texture, hasTransparency: false, rect: rect) } else { return nil } case let .ciImage(image, _): if let texture = self.ciInputPass.processCIImage(image, device: device, commandBuffer: commandBuffer) { - return (texture, true) + return VideoFinishPass.Input(texture: texture, hasTransparency: true, rect: nil) } else { return nil } @@ -213,16 +214,26 @@ final class MediaEditorRenderer { return nil } - if let (texture, transparency) = textureFromInput(mainInput, videoInputPass: self.mainVideoInputPass) { - mainTexture = texture - hasTransparency = transparency + if let input = textureFromInput(mainInput, videoInputPass: self.mainVideoInputPass) { + passMainInput = input } - if let additionalInput = self.currentAdditionalInput, let (texture, _) = textureFromInput(additionalInput, videoInputPass: self.additionalVideoInputPass) { - additionalTexture = texture + var index = 0 + for additionalInput in self.currentAdditionalInputs { + let videoInputPass: VideoInputPass + if let current = self.additionalVideoInputPass[index] { + videoInputPass = current + } else { + videoInputPass = VideoInputPass() + videoInputPass.setup(device: device, library: library) + self.additionalVideoInputPass[index] = videoInputPass + } + if let input = textureFromInput(additionalInput, videoInputPass: videoInputPass) { + passAdditionalInputs.append(input) + } + index += 1 } - - if let mainTexture { - return self.videoFinishPass.process(input: mainTexture, inputMask: self.currentMainInputMask, hasTransparency: hasTransparency, secondInput: additionalTexture, timestamp: mainInput.timestamp, device: device, commandBuffer: commandBuffer) + if let passMainInput { + return self.videoFinishPass.process(input: passMainInput, inputMask: self.currentMainInputMask, hasTransparency: passMainInput.hasTransparency, secondInput: passAdditionalInputs, timestamp: mainInput.timestamp, device: device, commandBuffer: commandBuffer) } else { return nil } @@ -300,7 +311,7 @@ final class MediaEditorRenderer { } if let onNextAdditionalRender = self.onNextAdditionalRender { - if self.currentAdditionalInput != nil { + if !self.currentAdditionalInputs.isEmpty { self.onNextAdditionalRender = nil Queue.mainQueue().after(0.016) { onNextAdditionalRender() @@ -327,7 +338,7 @@ final class MediaEditorRenderer { func consume( main: MediaEditorRenderer.Input, - additional: MediaEditorRenderer.Input?, + additionals: [MediaEditorRenderer.Input], render: Bool, displayEnabled: Bool = true ) { @@ -338,7 +349,7 @@ final class MediaEditorRenderer { } self.currentMainInput = main - self.currentAdditionalInput = additional + self.currentAdditionalInputs = additionals if render { self.renderFrame() diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift index 828e7500d8..5fbfe5950a 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorValues.swift @@ -300,6 +300,9 @@ public final class MediaEditorValues: Codable, Equatable { if lhs.additionalVideoVolume != rhs.additionalVideoVolume { return false } + if lhs.collage != rhs.collage { + return false + } if lhs.drawing !== rhs.drawing { return false } @@ -324,6 +327,9 @@ public final class MediaEditorValues: Codable, Equatable { if lhs.audioTrackSamples != rhs.audioTrackSamples { return false } + if lhs.collageTrackSamples != rhs.collageTrackSamples { + return false + } if lhs.coverImageTimestamp != rhs.coverImageTimestamp { return false } @@ -387,6 +393,7 @@ public final class MediaEditorValues: Codable, Equatable { case additionalVideoTrimRange case additionalVideoOffset case additionalVideoVolume + case collage case nightTheme case drawing @@ -401,6 +408,131 @@ public final class MediaEditorValues: Codable, Equatable { case qualityPreset } + public struct VideoCollageItem: Codable, Equatable { + enum DecodingError: Error { + case generic + } + + private enum CodingKeys: String, CodingKey { + case contentType + case contentValue + case isVideo + case frame + case videoTrimRange + case videoOffset + case videoVolume + } + + public enum Content: Equatable { + case main + case imageFile(path: String) + case videoFile(path: String) + case asset(localIdentifier: String, isVideo: Bool) + + public var isVideo: Bool { + switch self { + case .videoFile, .asset(_, true): + return true + default: + return false + } + } + } + + public let content: Content + public let frame: CGRect + + public let videoTrimRange: Range? + public let videoOffset: Double? + public let videoVolume: CGFloat? + + public init( + content: Content, + frame: CGRect, + videoTrimRange: Range?, + videoOffset: Double?, + videoVolume: CGFloat? + ) { + self.content = content + self.frame = frame + self.videoTrimRange = videoTrimRange + self.videoOffset = videoOffset + self.videoVolume = videoVolume + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + switch try container.decode(Int32.self, forKey: .contentType) { + case 0: + self.content = .main + case 1: + self.content = .imageFile(path: try container.decode(String.self, forKey: .contentValue)) + case 2: + self.content = .videoFile(path: try container.decode(String.self, forKey: .contentValue)) + case 3: + self.content = .asset(localIdentifier: try container.decode(String.self, forKey: .contentValue), isVideo: try container.decode(Bool.self, forKey: .isVideo)) + default: + throw DecodingError.generic + } + self.frame = try container.decode(CGRect.self, forKey: .frame) + self.videoTrimRange = try container.decodeIfPresent(Range.self, forKey: .videoTrimRange) + self.videoOffset = try container.decodeIfPresent(Double.self, forKey: .videoOffset) + self.videoVolume = try container.decodeIfPresent(CGFloat.self, forKey: .videoVolume) + } + + public func encode(to encoder: any Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + switch self.content { + case .main: + try container.encode(Int32(0), forKey: .contentType) + case let .imageFile(value): + try container.encode(Int32(1), forKey: .contentType) + try container.encode(value, forKey: .contentValue) + case let .videoFile(value): + try container.encode(Int32(2), forKey: .contentType) + try container.encode(value, forKey: .contentValue) + case let .asset(value, isVideo): + try container.encode(Int32(3), forKey: .contentType) + try container.encode(value, forKey: .contentValue) + try container.encode(isVideo, forKey: .isVideo) + } + try container.encode(self.frame, forKey: .frame) + try container.encodeIfPresent(self.videoTrimRange, forKey: .videoTrimRange) + try container.encodeIfPresent(self.videoOffset, forKey: .videoOffset) + try container.encodeIfPresent(self.videoVolume, forKey: .videoVolume) + } + + func withUpdatedVideoTrimRange(_ videoTrimRange: Range?) -> VideoCollageItem { + return VideoCollageItem( + content: self.content, + frame: self.frame, + videoTrimRange: videoTrimRange, + videoOffset: self.videoOffset, + videoVolume: self.videoVolume + ) + } + + func withUpdatedVideoOffset(_ videoOffset: Double?) -> VideoCollageItem { + return VideoCollageItem( + content: self.content, + frame: self.frame, + videoTrimRange: self.videoTrimRange, + videoOffset: videoOffset, + videoVolume: self.videoVolume + ) + } + + func withUpdatedVideoVolume(_ videoVolume: CGFloat?) -> VideoCollageItem { + return VideoCollageItem( + content: self.content, + frame: self.frame, + videoTrimRange: self.videoTrimRange, + videoOffset: self.videoOffset, + videoVolume: videoVolume + ) + } + } + public let peerId: EnginePeer.Id public let originalDimensions: PixelDimensions @@ -425,11 +557,13 @@ public final class MediaEditorValues: Codable, Equatable { public let additionalVideoScale: CGFloat? public let additionalVideoRotation: CGFloat? public let additionalVideoPositionChanges: [VideoPositionChange] - + public let additionalVideoTrimRange: Range? public let additionalVideoOffset: Double? public let additionalVideoVolume: CGFloat? + public let collage: [VideoCollageItem] + public let nightTheme: Bool public let drawing: UIImage? public let maskDrawing: UIImage? @@ -442,6 +576,8 @@ public final class MediaEditorValues: Codable, Equatable { public let audioTrackVolume: CGFloat? public let audioTrackSamples: MediaAudioTrackSamples? + public let collageTrackSamples: MediaAudioTrackSamples? + public let coverImageTimestamp: Double? public let qualityPreset: MediaQualityPreset? @@ -482,6 +618,7 @@ public final class MediaEditorValues: Codable, Equatable { additionalVideoTrimRange: Range?, additionalVideoOffset: Double?, additionalVideoVolume: CGFloat?, + collage: [VideoCollageItem], nightTheme: Bool, drawing: UIImage?, maskDrawing: UIImage?, @@ -492,6 +629,7 @@ public final class MediaEditorValues: Codable, Equatable { audioTrackOffset: Double?, audioTrackVolume: CGFloat?, audioTrackSamples: MediaAudioTrackSamples?, + collageTrackSamples: MediaAudioTrackSamples?, coverImageTimestamp: Double?, qualityPreset: MediaQualityPreset? ) { @@ -518,6 +656,7 @@ public final class MediaEditorValues: Codable, Equatable { self.additionalVideoTrimRange = additionalVideoTrimRange self.additionalVideoOffset = additionalVideoOffset self.additionalVideoVolume = additionalVideoVolume + self.collage = collage self.nightTheme = nightTheme self.drawing = drawing self.maskDrawing = maskDrawing @@ -528,6 +667,7 @@ public final class MediaEditorValues: Codable, Equatable { self.audioTrackOffset = audioTrackOffset self.audioTrackVolume = audioTrackVolume self.audioTrackSamples = audioTrackSamples + self.collageTrackSamples = collageTrackSamples self.coverImageTimestamp = coverImageTimestamp self.qualityPreset = qualityPreset } @@ -570,6 +710,8 @@ public final class MediaEditorValues: Codable, Equatable { self.additionalVideoOffset = try container.decodeIfPresent(Double.self, forKey: .additionalVideoOffset) self.additionalVideoVolume = try container.decodeIfPresent(CGFloat.self, forKey: .additionalVideoVolume) + self.collage = try container.decodeIfPresent([VideoCollageItem].self, forKey: .collage) ?? [] + self.nightTheme = try container.decodeIfPresent(Bool.self, forKey: .nightTheme) ?? false if let drawingData = try container.decodeIfPresent(Data.self, forKey: .drawing), let image = UIImage(data: drawingData) { self.drawing = image @@ -598,6 +740,7 @@ public final class MediaEditorValues: Codable, Equatable { self.audioTrackVolume = try container.decodeIfPresent(CGFloat.self, forKey: .audioTrackVolume) self.audioTrackSamples = nil + self.collageTrackSamples = nil self.coverImageTimestamp = try container.decodeIfPresent(Double.self, forKey: .coverImageTimestamp) @@ -639,6 +782,8 @@ public final class MediaEditorValues: Codable, Equatable { try container.encodeIfPresent(self.additionalVideoOffset, forKey: .additionalVideoOffset) try container.encodeIfPresent(self.additionalVideoVolume, forKey: .additionalVideoVolume) + try container.encode(self.collage, forKey: .collage) + try container.encode(self.nightTheme, forKey: .nightTheme) if let drawing = self.drawing, let pngDrawingData = drawing.pngData() { try container.encode(pngDrawingData, forKey: .drawing) @@ -668,109 +813,117 @@ public final class MediaEditorValues: Codable, Equatable { } public func makeCopy() -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: offset, cropRect: self.cropRect, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: offset, cropRect: self.cropRect, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } public func withUpdatedCropRect(cropRect: CGRect, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: .zero, cropRect: cropRect, cropScale: 1.0, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: .zero, cropRect: cropRect, cropScale: 1.0, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedGradientColors(gradientColors: [UIColor]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedVideoIsMuted(_ videoIsMuted: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedVideoIsFullHd(_ videoIsFullHd: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedVideoIsMirrored(_ videoIsMirrored: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedVideoVolume(_ videoVolume: CGFloat?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideo(path: String?, isDual: Bool, positionChanges: [VideoPositionChange]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: path, additionalVideoIsDual: isDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: path, additionalVideoIsDual: isDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideo(position: CGPoint, scale: CGFloat, rotation: CGFloat) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideoTrimRange(_ additionalVideoTrimRange: Range?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideoOffset(_ additionalVideoOffset: Double?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAdditionalVideoVolume(_ additionalVideoVolume: CGFloat?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + } + + func withUpdatedCollage(_ collage: [VideoCollageItem]) -> MediaEditorValues { + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedVideoTrimRange(_ videoTrimRange: Range) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedDrawingAndEntities(drawing: UIImage?, entities: [CodableDrawingEntity]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } public func withUpdatedMaskDrawing(maskDrawing: UIImage?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedToolValues(_ toolValues: [EditorToolKey: Any]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrack(_ audioTrack: MediaAudioTrack?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackTrimRange(_ audioTrackTrimRange: Range?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackOffset(_ audioTrackOffset: Double?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackVolume(_ audioTrackVolume: CGFloat?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedAudioTrackSamples(_ audioTrackSamples: MediaAudioTrackSamples?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + } + + func withUpdatedCollageTrackSamples(_ collageTrackSamples: MediaAudioTrackSamples?) -> MediaEditorValues { + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } func withUpdatedNightTheme(_ nightTheme: Bool) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } public func withUpdatedEntities(_ entities: [CodableDrawingEntity]) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset) } public func withUpdatedCoverImageTimestamp(_ coverImageTimestamp: Double?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: coverImageTimestamp, qualityPreset: self.qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: coverImageTimestamp, qualityPreset: self.qualityPreset) } public func withUpdatedQualityPreset(_ qualityPreset: MediaQualityPreset?) -> MediaEditorValues { - return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: qualityPreset) + return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: qualityPreset) } public var resultDimensions: PixelDimensions { diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift index 27e40f3e64..302feaa287 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorVideoExport.swift @@ -5,6 +5,7 @@ import SwiftSignalKit import TelegramCore import Postbox import ImageTransparency +import Photos enum ExportWriterStatus { case unknown @@ -96,15 +97,9 @@ public final class MediaEditorVideoExport { return nil } } - + var additionalVideoStartTime: CMTime { - let lowerBound = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0 - let offset = -min(0.0, self.values.additionalVideoOffset ?? 0.0) - if !lowerBound.isZero || !offset.isZero { - return CMTime(seconds: offset + lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)) - } else { - return .zero - } + return videoStartTime(trimRange: self.values.additionalVideoTrimRange, offset: self.values.additionalVideoOffset) } var audioTimeRange: CMTimeRange? { @@ -189,12 +184,40 @@ public final class MediaEditorVideoExport { private var reader: AVAssetReader? private var videoOutput: AVAssetReaderOutput? private var textureRotation: TextureRotation = .rotate0Degrees + private var videoRect: CGRect? private var frameRate: Float? - private var additionalVideoOutput: AVAssetReaderOutput? - private var additionalTextureRotation: TextureRotation = .rotate0Degrees - private var additionalFrameRate: Float? - private var additionalVideoDuration: Double? + class VideoOutput { + enum Output { + case videoOutput(AVAssetReaderOutput) + case image(UIImage) + } + let output: Output + let rect: CGRect? + let textureRotation: TextureRotation + let duration: Double + let frameRate: Float + let startTime: CMTime + + init( + output: Output, + rect: CGRect?, + textureRotation: TextureRotation, + duration: Double, + frameRate: Float, + startTime: CMTime + ) { + self.output = output + self.rect = rect + self.textureRotation = textureRotation + self.duration = duration + self.frameRate = frameRate + self.startTime = startTime + } + + var skippingUpdate = false + } + private var additionalVideoOutput: [Int: VideoOutput] = [:] private var mainComposeFramerate: Float? @@ -251,8 +274,8 @@ public final class MediaEditorVideoExport { } enum Input { - case image(UIImage) - case video(AVAsset) + case image(image: UIImage, rect: CGRect?) + case video(asset: AVAsset, rect: CGRect?, rotation: TextureRotation, duration: Double, trimRange: Range?, offset: Double?, volume: CGFloat?) case sticker(TelegramMediaFile) var isVideo: Bool { @@ -266,9 +289,52 @@ public final class MediaEditorVideoExport { private func setup() { var mainAsset: AVAsset? + var signals: [Signal] = [] + + var mainRect: CGRect? var additionalAsset: AVAsset? - if let additionalPath = self.configuration.values.additionalVideoPath { - additionalAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath)) + if !self.configuration.values.collage.isEmpty { + for item in self.configuration.values.collage { + switch item.content { + case .main: + mainRect = item.frame + case let .imageFile(path): + if let image = UIImage(contentsOfFile: path) { + signals.append(.single(.image(image: image, rect: item.frame))) + } + case let .videoFile(path): + let asset = AVURLAsset(url: URL(fileURLWithPath: path)) + signals.append(.single(.video(asset: asset, rect: item.frame, rotation: textureRotatonForAVAsset(asset, mirror: false), duration: asset.duration.seconds, trimRange: item.videoTrimRange, offset: item.videoOffset, volume: item.videoVolume))) + case let .asset(localIdentifier, _): + let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil) + if fetchResult.count != 0 { + let asset = fetchResult.object(at: 0) + + let signal: Signal = Signal { subscriber in + let options = PHVideoRequestOptions() + options.isNetworkAccessAllowed = true + options.deliveryMode = .highQualityFormat + + PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { avAsset, _, _ in + guard let avAsset else { + subscriber.putCompletion() + return + } + subscriber.putNext(.video(asset: avAsset, rect: item.frame, rotation: textureRotatonForAVAsset(avAsset, mirror: false), duration: avAsset.duration.seconds, trimRange: item.videoTrimRange, offset: item.videoOffset, volume: item.videoVolume)) + subscriber.putCompletion() + }) + + return EmptyDisposable + } + + signals.append(signal) + } + } + } + } else if let additionalPath = self.configuration.values.additionalVideoPath { + let asset = AVURLAsset(url: URL(fileURLWithPath: additionalPath)) + additionalAsset = asset + signals = [.single(.video(asset: asset, rect: nil, rotation: textureRotatonForAVAsset(asset, mirror: true), duration: asset.duration.seconds, trimRange: nil, offset: nil, volume: nil))] } var audioAsset: AVAsset? @@ -278,16 +344,14 @@ public final class MediaEditorVideoExport { } var mainInput: Input - let additionalInput: Input? = additionalAsset.flatMap { .video($0) } var isStory = true - switch self.subject { case let .video(asset, isStoryValue): mainAsset = asset - mainInput = .video(asset) + mainInput = .video(asset: asset, rect: mainRect, rotation: textureRotatonForAVAsset(asset), duration: asset.duration.seconds, trimRange: nil, offset: nil, volume: nil) isStory = isStoryValue case let .image(image): - mainInput = .image(image) + mainInput = .image(image: image, rect: nil) case let .sticker(file): mainInput = .sticker(file) } @@ -324,7 +388,13 @@ public final class MediaEditorVideoExport { } self.durationValue = duration - self.setupWithInputs(main: mainInput, additional: additionalInput, audio: audioAsset, isStory: isStory) + let _ = (combineLatest(signals) + |> deliverOn(self.queue)).start(next: { [weak self] additionalInputs in + guard let self else { + return + } + self.setupWithInputs(main: mainInput, additional: additionalInputs, audio: audioAsset, isStory: isStory) + }) } private func setupComposer() { @@ -337,6 +407,11 @@ public final class MediaEditorVideoExport { duration = nil } + var additionalVideoDuration: Double? + if self.configuration.values.collage.isEmpty, let output = self.additionalVideoOutput.values.first { + additionalVideoDuration = output.duration + } + self.composer = MediaEditorComposer( postbox: self.postbox, values: self.configuration.values, @@ -344,19 +419,38 @@ public final class MediaEditorVideoExport { outputDimensions: self.configuration.dimensions, textScale: self.textScale, videoDuration: duration, - additionalVideoDuration: self.additionalVideoDuration + additionalVideoDuration: additionalVideoDuration ) } - private func setupWithInputs(main: Input, additional: Input?, audio: AVAsset?, isStory: Bool) { + private func setupWithInputs(main: Input, additional: [Input], audio: AVAsset?, isStory: Bool) { var hasVideoOrAudio = false - if main.isVideo || additional?.isVideo == true || audio != nil { + if main.isVideo || audio != nil { hasVideoOrAudio = true } + for input in additional { + if input.isVideo { + hasVideoOrAudio = true + } + } + + enum AdditionalTrack { + case image(image: UIImage, rect: CGRect?) + case video(track: AVMutableCompositionTrack, rect: CGRect?, rotation: TextureRotation, duration: Double, frameRate: Float, startTime: CMTime?) + } + + func frameRate(for track: AVCompositionTrack) -> Float { + if track.nominalFrameRate > 0.0 { + return track.nominalFrameRate + } else if track.minFrameDuration.seconds > 0.0 { + return Float(1.0 / track.minFrameDuration.seconds) + } + return 30.0 + } var composition: AVMutableComposition? var mainVideoTrack: AVMutableCompositionTrack? - var additionalVideoTrack: AVMutableCompositionTrack? + var additionalTracks: [AdditionalTrack] = [] var audioMix: AVMutableAudioMix? if hasVideoOrAudio, let duration = self.durationValue { @@ -376,8 +470,9 @@ public final class MediaEditorVideoExport { } var readerRange = wholeRange - if case let .video(asset) = main { - self.textureRotation = textureRotatonForAVAsset(asset) + if case let .video(asset, rect, rotation, _, _, _, _) = main { + self.videoRect = rect + self.textureRotation = rotation if let videoAssetTrack = asset.tracks(withMediaType: .video).first { if let compositionTrack = composition?.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) { mainVideoTrack = compositionTrack @@ -402,10 +497,42 @@ public final class MediaEditorVideoExport { readerRange = timeRange } } - if let additional, case let .video(asset) = additional { - self.additionalTextureRotation = textureRotatonForAVAsset(asset, mirror: true) - self.additionalVideoDuration = asset.duration.seconds - + + if !self.configuration.values.collage.isEmpty { + for input in additional { + switch input { + case let .image(image, rect): + additionalTracks.append(.image(image: image, rect: rect)) + case let .video(asset, rect, rotation, duration, trimRange, offset, volume): + let startTime = videoStartTime(trimRange: trimRange, offset: offset) + let timeRange = clampedRange(trackDuration: asset.duration, trackTrimRange: videoTimeRange(trimRange: trimRange), trackStart: startTime, maxDuration: readerRange.end) + + if let videoAssetTrack = asset.tracks(withMediaType: .video).first { + if let compositionTrack = composition?.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) { + additionalTracks.append(.video(track: compositionTrack, rect: rect, rotation: rotation, duration: duration, frameRate: frameRate(for: compositionTrack), startTime: startTime)) + + compositionTrack.preferredTransform = videoAssetTrack.preferredTransform + + try? compositionTrack.insertTimeRange(timeRange, of: videoAssetTrack, at: startTime) + } + } + if let audioAssetTrack = asset.tracks(withMediaType: .audio).first, volume ?? 1.0 > 0.01 { + if let compositionTrack = composition?.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) { + try? compositionTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: startTime) + + if let volume, volume != 1.0 { + let trackParameters = AVMutableAudioMixInputParameters(track: compositionTrack) + trackParameters.trackID = compositionTrack.trackID + trackParameters.setVolume(Float(volume), at: .zero) + audioMixParameters.append(trackParameters) + } + } + } + default: + break + } + } + } else if let additional = additional.first, case let .video(asset, _, rotation, duration, _, _, _) = additional { let startTime: CMTime let timeRange: CMTimeRange if mainVideoTrack == nil { @@ -418,7 +545,8 @@ public final class MediaEditorVideoExport { if let videoAssetTrack = asset.tracks(withMediaType: .video).first { if let compositionTrack = composition?.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) { - additionalVideoTrack = compositionTrack + additionalTracks.append(.video(track: compositionTrack, rect: nil, rotation: rotation, duration: duration, frameRate: frameRate(for: compositionTrack), startTime: self.configuration.additionalVideoStartTime)) + compositionTrack.preferredTransform = videoAssetTrack.preferredTransform try? compositionTrack.insertTimeRange(timeRange, of: videoAssetTrack, at: startTime) @@ -440,9 +568,10 @@ public final class MediaEditorVideoExport { readerRange = timeRange } } + if let audio, let audioAssetTrack = audio.tracks(withMediaType: .audio).first { let startTime: CMTime - if mainVideoTrack == nil && additionalVideoTrack == nil { + if mainVideoTrack == nil && additionalTracks.isEmpty { startTime = .zero } else { startTime = self.configuration.audioStartTime @@ -510,35 +639,51 @@ public final class MediaEditorVideoExport { } self.videoOutput = videoOutput } - if let additionalVideoTrack { - let videoOutput = AVAssetReaderTrackOutput(track: additionalVideoTrack, outputSettings: outputSettings) - videoOutput.alwaysCopiesSampleData = true - if reader.canAdd(videoOutput) { - reader.add(videoOutput) - } else { - self.internalStatus = .finished - self.statusValue = .failed(.addVideoOutput) + + var additionalIndex = 0 + for track in additionalTracks { + switch track { + case let .image(image, rect): + self.additionalVideoOutput[additionalIndex] = VideoOutput( + output: .image(image), + rect: rect, + textureRotation: .rotate0Degrees, + duration: 0.0, + frameRate: 0.0, + startTime: .zero + ) + case let .video(track, rect, rotation, duration, frameRate, startTime): + let videoOutput = AVAssetReaderTrackOutput(track: track, outputSettings: outputSettings) + videoOutput.alwaysCopiesSampleData = true + if reader.canAdd(videoOutput) { + reader.add(videoOutput) + } else { + self.internalStatus = .finished + self.statusValue = .failed(.addVideoOutput) + } + + self.additionalVideoOutput[additionalIndex] = VideoOutput( + output: .videoOutput(videoOutput), + rect: rect, + textureRotation: rotation, + duration: duration, + frameRate: frameRate, + startTime: startTime ?? .zero + ) } - self.additionalVideoOutput = videoOutput + additionalIndex += 1 } } - - func frameRate(for track: AVCompositionTrack) -> Float { - if track.nominalFrameRate > 0.0 { - return track.nominalFrameRate - } else if track.minFrameDuration.seconds > 0.0 { - return Float(1.0 / track.minFrameDuration.seconds) - } - return 30.0 - } - + if let mainVideoTrack { self.frameRate = frameRate(for: mainVideoTrack) } - if let additionalVideoTrack { - self.additionalFrameRate = frameRate(for: additionalVideoTrack) + + var additionalFrameRate: Float? + if self.configuration.values.collage.isEmpty, let output = self.additionalVideoOutput.values.first { + additionalFrameRate = output.frameRate } - let sourceFrameRate: Float = (self.frameRate ?? self.additionalFrameRate) ?? 30.0 + let sourceFrameRate: Float = (self.frameRate ?? additionalFrameRate) ?? 30.0 self.mainComposeFramerate = round(sourceFrameRate / 30.0) * 30.0 writer.setupVideoInput(configuration: self.configuration, preferredTransform: nil, sourceFrameRate: sourceFrameRate) @@ -559,10 +704,10 @@ public final class MediaEditorVideoExport { writer.setupAudioInput(configuration: self.configuration) } } - } - - private var skippingAdditionalCopyUpdate = false + self.start() + } + private func encodeVideo() -> Bool { guard let writer = self.writer else { return false @@ -587,7 +732,7 @@ public final class MediaEditorVideoExport { var updatedProgress = false var mainInput: MediaEditorComposer.Input? - var additionalInput: MediaEditorComposer.Input? + var additionalInput: [MediaEditorComposer.Input?] = [] var mainTimestamp: CMTime? if let videoOutput = self.videoOutput { if let sampleBuffer = videoOutput.copyNextSampleBuffer() { @@ -598,7 +743,7 @@ public final class MediaEditorVideoExport { pixelBuffer: pixelBuffer, rotation: self.textureRotation, timestamp: timestamp - )) + ), self.videoRect) if let duration = self.durationValue { let startTime = self.reader?.timeRange.start.seconds ?? 0.0 @@ -612,39 +757,53 @@ public final class MediaEditorVideoExport { return false } } - if let additionalVideoOutput = self.additionalVideoOutput { - if let mainTimestamp, mainTimestamp < self.configuration.additionalVideoStartTime { - - } else { - if self.skippingAdditionalCopyUpdate { - self.skippingAdditionalCopyUpdate = false - } else if let sampleBuffer = additionalVideoOutput.copyNextSampleBuffer() { - if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { - let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - additionalInput = .videoBuffer(VideoPixelBuffer( - pixelBuffer: pixelBuffer, - rotation: self.additionalTextureRotation, - timestamp: timestamp - )) - - if !updatedProgress, let duration = self.durationValue { - let startTime = self.reader?.timeRange.start.seconds ?? 0.0 - let progress = (timestamp.seconds - startTime) / duration.seconds - self.statusValue = .progress(Float(progress)) - updatedProgress = true - } - } - if let additionalFrameRate = self.additionalFrameRate, let mainComposeFramerate = self.mainComposeFramerate { - let additionalFrameRate = round(additionalFrameRate / 30.0) * 30.0 - if Int(mainComposeFramerate) == Int(additionalFrameRate) * 2 { - self.skippingAdditionalCopyUpdate = true + + for i in 0 ..< self.additionalVideoOutput.count { + if let additionalVideoOutput = self.additionalVideoOutput[i] { + if let mainTimestamp, mainTimestamp < additionalVideoOutput.startTime { + + } else { + if additionalVideoOutput.skippingUpdate { + additionalVideoOutput.skippingUpdate = false + } else { + switch additionalVideoOutput.output { + case let .image(image): + if let texture = self.composer?.textureForImage(index: i, image: image) { + additionalInput.append(.texture(texture, .zero, false, additionalVideoOutput.rect)) + } + case let .videoOutput(videoOutput): + if let sampleBuffer = videoOutput.copyNextSampleBuffer() { + if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { + let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + additionalInput.append(.videoBuffer(VideoPixelBuffer( + pixelBuffer: pixelBuffer, + rotation: additionalVideoOutput.textureRotation, + timestamp: timestamp + ), additionalVideoOutput.rect)) + + if !updatedProgress, let duration = self.durationValue { + let startTime = self.reader?.timeRange.start.seconds ?? 0.0 + let progress = (timestamp.seconds - startTime) / duration.seconds + self.statusValue = .progress(Float(progress)) + updatedProgress = true + } + } + if let mainComposeFramerate = self.mainComposeFramerate { + let additionalFrameRate = round(additionalVideoOutput.frameRate / 30.0) * 30.0 + if Int(mainComposeFramerate) == Int(additionalFrameRate) * 2 { + additionalVideoOutput.skippingUpdate = true + } + } + } } } } } } - if case let .image(image) = self.subject, let texture = self.composer?.textureForImage(image) { - mainInput = .texture(texture, self.imageArguments?.position ?? .zero, imageHasTransparency(image)) + + + if case let .image(image) = self.subject, let texture = self.composer?.textureForImage(index: -1, image: image) { + mainInput = .texture(texture, self.imageArguments?.position ?? .zero, imageHasTransparency(image), nil) if !updatedProgress, let imageArguments = self.imageArguments, let duration = self.durationValue { let progress = imageArguments.position.seconds / duration.seconds @@ -659,7 +818,7 @@ public final class MediaEditorVideoExport { timestamp = imageArguments.position } else { if case .image = self.subject { - timestamp = additionalInput?.timestamp + timestamp = additionalInput.first??.timestamp } else { timestamp = mainInput?.timestamp } @@ -749,7 +908,7 @@ public final class MediaEditorVideoExport { return true } - public func start() { + private func start() { guard self.internalStatus == .idle, let writer = self.writer else { self.statusValue = .failed(.invalid) return @@ -765,7 +924,7 @@ public final class MediaEditorVideoExport { return } - if self.additionalVideoOutput == nil { + if self.additionalVideoOutput.isEmpty { switch self.subject { case .image, .sticker: self.imageArguments = (Double(self.configuration.frameRate), CMTime(value: 0, timescale: Int32(self.configuration.frameRate))) @@ -923,3 +1082,21 @@ public final class MediaEditorVideoExport { return self.statusPromise.get() } } + +private func videoStartTime(trimRange: Range?, offset: Double?) -> CMTime { + let lowerBound = trimRange?.lowerBound ?? 0.0 + let offset = -min(0.0, offset ?? 0.0) + if !lowerBound.isZero || !offset.isZero { + return CMTime(seconds: offset + lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)) + } else { + return .zero + } +} + +private func videoTimeRange(trimRange: Range?) -> CMTimeRange? { + if let videoTrimRange = trimRange { + return CMTimeRange(start: CMTime(seconds: videoTrimRange.lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), end: CMTime(seconds: videoTrimRange.upperBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC))) + } else { + return nil + } +} diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/UniversalTextureSource.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/UniversalTextureSource.swift index e4205d8405..b9a2b9c19f 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/UniversalTextureSource.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/UniversalTextureSource.swift @@ -7,8 +7,8 @@ import SwiftSignalKit final class UniversalTextureSource: TextureSource { enum Input { - case image(UIImage) - case video(AVPlayerItem) + case image(UIImage, CGRect?) + case video(AVPlayerItem, CGRect?) case entity(MediaEditorComposerEntity) fileprivate func createContext(renderTarget: RenderTarget, queue: DispatchQueue, additional: Bool) -> InputContext { @@ -28,7 +28,7 @@ final class UniversalTextureSource: TextureSource { private let queue: DispatchQueue private var mainInputContext: InputContext? - private var additionalInputContext: InputContext? + private var additionalInputContexts: [InputContext] = [] var forceUpdates = false private var rate: Float = 1.0 @@ -48,7 +48,7 @@ final class UniversalTextureSource: TextureSource { } var mainImage: UIImage? { - if let mainInput = self.mainInputContext?.input, case let .image(image) = mainInput { + if let mainInput = self.mainInputContext?.input, case let .image(image, _) = mainInput { return image } return nil @@ -62,15 +62,11 @@ final class UniversalTextureSource: TextureSource { self.update(forced: true) } - func setAdditionalInput(_ input: Input?) { + func setAdditionalInputs(_ inputs: [Input]) { guard let renderTarget = self.renderTarget else { return } - if let input { - self.additionalInputContext = input.createContext(renderTarget: renderTarget, queue: self.queue, additional: true) - } else { - self.additionalInputContext = nil - } + self.additionalInputContexts = inputs.map { $0.createContext(renderTarget: renderTarget, queue: self.queue, additional: true) } self.update(forced: true) } @@ -79,7 +75,7 @@ final class UniversalTextureSource: TextureSource { self.rate = rate } - private var previousAdditionalOutput: MediaEditorRenderer.Input? + private var previousAdditionalOutput: [Int: MediaEditorRenderer.Input] = [:] private var readyForMoreData = Atomic(value: true) private func update(forced: Bool) { let time = CACurrentMediaTime() @@ -89,7 +85,15 @@ final class UniversalTextureSource: TextureSource { fps = 30 } - let needsDisplayLink = (self.mainInputContext?.needsDisplayLink ?? false) || (self.additionalInputContext?.needsDisplayLink ?? false) + var additionalsNeedDisplayLink = false + for context in self.additionalInputContexts { + if context.needsDisplayLink { + additionalsNeedDisplayLink = true + break + } + } + + let needsDisplayLink = (self.mainInputContext?.needsDisplayLink ?? false) || additionalsNeedDisplayLink if needsDisplayLink { if self.displayLink == nil { let displayLink = CADisplayLink(target: DisplayLinkTarget({ [weak self] in @@ -122,22 +126,32 @@ final class UniversalTextureSource: TextureSource { return } if let main { - self.output?.consume(main: main, additional: nil, render: true) + self.output?.consume(main: main, additionals: [], render: true) } let _ = self.readyForMoreData.swap(true) }) } else { let main = self.mainInputContext?.output(time: time) - var additional = self.additionalInputContext?.output(time: time) - if let additional { - self.previousAdditionalOutput = additional - } else if self.additionalInputContext != nil { - additional = self.previousAdditionalOutput + var additionals: [(Int, InputContext.Output?)] = [] + var index = 0 + for context in self.additionalInputContexts { + additionals.append((index, context.output(time: time))) + index += 1 + } + for (index, output) in additionals { + if let output { + self.previousAdditionalOutput[index] = output + } + } + for (index, output) in additionals { + if output == nil { + additionals[index] = (index, self.previousAdditionalOutput[index]) + } } guard let main else { return } - self.output?.consume(main: main, additional: additional, render: true) + self.output?.consume(main: main, additionals: additionals.compactMap { $0.1 }, render: true) } } @@ -148,7 +162,7 @@ final class UniversalTextureSource: TextureSource { func invalidate() { self.mainInputContext?.invalidate() - self.additionalInputContext?.invalidate() + self.additionalInputContexts.forEach { $0.invalidate() } } private class DisplayLinkTarget { @@ -168,6 +182,8 @@ protocol InputContext { var input: Input { get } + var rect: CGRect? { get } + var useAsyncOutput: Bool { get } func output(time: Double) -> Output? func asyncOutput(time: Double, completion: @escaping (Output?) -> Void) @@ -191,12 +207,14 @@ private class ImageInputContext: InputContext { fileprivate var input: Input private var texture: MTLTexture? private var hasTransparency = false + fileprivate var rect: CGRect? init(input: Input, renderTarget: RenderTarget, queue: DispatchQueue) { - guard case let .image(image) = input else { + guard case let .image(image, rect) = input else { fatalError() } self.input = input + self.rect = rect if let device = renderTarget.mtlDevice { self.texture = loadTexture(image: image, device: device) } @@ -204,7 +222,7 @@ private class ImageInputContext: InputContext { } func output(time: Double) -> Output? { - return self.texture.flatMap { .texture($0, .zero, self.hasTransparency) } + return self.texture.flatMap { .texture($0, .zero, self.hasTransparency, self.rect) } } func invalidate() { @@ -220,23 +238,26 @@ private class VideoInputContext: NSObject, InputContext, AVPlayerItemOutputPullD fileprivate var input: Input private var videoOutput: AVPlayerItemVideoOutput? private var textureRotation: TextureRotation = .rotate0Degrees + fileprivate var rect: CGRect? var playerItem: AVPlayerItem { - guard case let .video(playerItem) = self.input else { + guard case let .video(playerItem, _) = self.input else { fatalError() } return playerItem } init(input: Input, renderTarget: RenderTarget, queue: DispatchQueue, additional: Bool) { - guard case .video = input else { + guard case let .video(_, rect) = input else { fatalError() } self.input = input + self.rect = rect + super.init() //TODO: mirror if self.additionalPlayer == nil && self.mirror - self.textureRotation = textureRotatonForAVAsset(self.playerItem.asset, mirror: additional) + self.textureRotation = textureRotatonForAVAsset(self.playerItem.asset, mirror: rect == nil ? additional : false) let colorProperties: [String: Any] = [ AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2, @@ -270,7 +291,7 @@ private class VideoInputContext: NSObject, InputContext, AVPlayerItemOutputPullD if let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: requestTime, itemTimeForDisplay: &presentationTime) { videoPixelBuffer = VideoPixelBuffer(pixelBuffer: pixelBuffer, rotation: self.textureRotation, timestamp: presentationTime) } - return videoPixelBuffer.flatMap { .videoBuffer($0) } + return videoPixelBuffer.flatMap { .videoBuffer($0, self.rect) } } func invalidate() { @@ -290,6 +311,8 @@ final class EntityInputContext: NSObject, InputContext, AVPlayerItemOutputPullDe internal var input: Input private var textureRotation: TextureRotation = .rotate0Degrees + var rect: CGRect? + var entity: MediaEditorComposerEntity { guard case let .entity(entity) = self.input else { fatalError() diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/VideoFinishPass.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/VideoFinishPass.swift index a1ccfeb9aa..c0b2df1182 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/VideoFinishPass.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/VideoFinishPass.swift @@ -106,6 +106,124 @@ private func verticesData( ] } +private func verticesData( + size: CGSize, + textureRotation: TextureRotation, + containerSize: CGSize, + textureRect: CGRect, + z: Float = 0.0 +) -> [VertexData] { + let textureRect = CGRect(origin: CGPoint(x: textureRect.origin.x, y: containerSize.height - textureRect.maxY ), size: textureRect.size) + + let containerAspect = textureRect.width / textureRect.height + let imageAspect = size.width / size.height + + let texCoordScale: simd_float2 + if imageAspect > containerAspect { + texCoordScale = simd_float2(Float(containerAspect / imageAspect), 1.0) + } else { + texCoordScale = simd_float2(1.0, Float(imageAspect / containerAspect)) + } + + let scaledTopLeft = simd_float2(0.5 - texCoordScale.x * 0.5, 0.5 + texCoordScale.y * 0.5) + let scaledTopRight = simd_float2(0.5 + texCoordScale.x * 0.5, 0.5 + texCoordScale.y * 0.5) + let scaledBottomLeft = simd_float2(0.5 - texCoordScale.x * 0.5, 0.5 - texCoordScale.y * 0.5) + let scaledBottomRight = simd_float2(0.5 + texCoordScale.x * 0.5, 0.5 - texCoordScale.y * 0.5) + + let topLeft: simd_float2 + let topRight: simd_float2 + let bottomLeft: simd_float2 + let bottomRight: simd_float2 + + switch textureRotation { + case .rotate0Degrees: + topLeft = scaledTopLeft + topRight = scaledTopRight + bottomLeft = scaledBottomLeft + bottomRight = scaledBottomRight + case .rotate0DegreesMirrored: + topLeft = scaledTopRight + topRight = scaledTopLeft + bottomLeft = scaledBottomRight + bottomRight = scaledBottomLeft + case .rotate180Degrees: + topLeft = scaledBottomRight + topRight = scaledBottomLeft + bottomLeft = scaledTopRight + bottomRight = scaledTopLeft + case .rotate90Degrees: + topLeft = scaledTopRight + topRight = scaledBottomRight + bottomLeft = scaledTopLeft + bottomRight = scaledBottomLeft + case .rotate90DegreesMirrored: + topLeft = scaledBottomRight + topRight = scaledTopRight + bottomLeft = scaledBottomLeft + bottomRight = scaledTopLeft + case .rotate270Degrees: + topLeft = scaledBottomLeft + topRight = scaledTopLeft + bottomLeft = scaledBottomRight + bottomRight = scaledTopRight + } + + let containerSize = CGSize(width: containerSize.width, height: containerSize.height) + + let centerX = Float(textureRect.midX - containerSize.width / 2.0) + let centerY = Float(textureRect.midY - containerSize.height / 2.0) + + let halfWidth = Float(textureRect.width / 2.0) + let halfHeight = Float(textureRect.height / 2.0) + + let angle = Float.pi + let cosAngle = cos(angle) + let sinAngle = sin(angle) + + return [ + VertexData( + pos: simd_float4( + x: (centerX + (halfWidth * cosAngle) - (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, + y: (centerY + (halfWidth * sinAngle) + (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, + z: z, + w: 1 + ), + texCoord: topLeft, + localPos: simd_float2(0.0, 0.0) + ), + VertexData( + pos: simd_float4( + x: (centerX - (halfWidth * cosAngle) - (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, + y: (centerY - (halfWidth * sinAngle) + (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, + z: z, + w: 1 + ), + texCoord: topRight, + localPos: simd_float2(1.0, 0.0) + ), + VertexData( + pos: simd_float4( + x: (centerX + (halfWidth * cosAngle) + (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, + y: (centerY + (halfWidth * sinAngle) - (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, + z: z, + w: 1 + ), + texCoord: bottomLeft, + localPos: simd_float2(0.0, 1.0) + ), + VertexData( + pos: simd_float4( + x: (centerX - (halfWidth * cosAngle) + (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0, + y: (centerY - (halfWidth * sinAngle) - (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0, + z: z, + w: 1 + ), + texCoord: bottomRight, + localPos: simd_float2(1.0, 1.0) + ) + ] +} + private func lookupSpringValue(_ t: CGFloat) -> CGFloat { let table: [(CGFloat, CGFloat)] = [ (0.0, 0.0), @@ -198,6 +316,41 @@ final class VideoFinishPass: RenderPass { } } + func encodeVideo( + using encoder: MTLRenderCommandEncoder, + containerSize: CGSize, + texture: MTLTexture, + textureRotation: TextureRotation, + rect: CGRect, + zPosition: Float, + device: MTLDevice + ) { + encoder.setFragmentTexture(texture, index: 0) + encoder.setFragmentTexture(texture, index: 1) + + let vertices = verticesData( + size: CGSize(width: texture.width, height: texture.height), + textureRotation: textureRotation, + containerSize: containerSize, + textureRect: rect, + z: zPosition + ) + let buffer = device.makeBuffer( + bytes: vertices, + length: MemoryLayout.stride * vertices.count, + options: []) + encoder.setVertexBuffer(buffer, offset: 0, index: 0) + + var parameters = VideoEncodeParameters( + dimensions: simd_float2(Float(rect.size.width), Float(rect.size.height)), + roundness: 0.0, + alpha: 1.0, + isOpaque: 1.0 + ) + encoder.setFragmentBytes(¶meters, length: MemoryLayout.size, index: 0) + encoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4) + } + func encodeVideo( using encoder: MTLRenderCommandEncoder, containerSize: CGSize, @@ -228,7 +381,14 @@ final class VideoFinishPass: RenderPass { height: position.size.height * position.scale * position.baseScale ) - let vertices = verticesData(textureRotation: textureRotation, containerSize: containerSize, position: center, size: size, rotation: position.rotation, z: zPosition) + let vertices = verticesData( + textureRotation: textureRotation, + containerSize: containerSize, + position: center, + size: size, + rotation: position.rotation, + z: zPosition + ) let buffer = device.makeBuffer( bytes: vertices, length: MemoryLayout.stride * vertices.count, @@ -491,34 +651,40 @@ final class VideoFinishPass: RenderPass { return (backgroundVideoState, foregroundVideoState, disappearingVideoState) } + struct Input { + let texture: MTLTexture + let hasTransparency: Bool + let rect: CGRect? + } + func process( - input: MTLTexture, + input: Input, inputMask: MTLTexture?, hasTransparency: Bool, - secondInput: MTLTexture?, + secondInput: [Input], timestamp: CMTime, device: MTLDevice, commandBuffer: MTLCommandBuffer ) -> MTLTexture? { if !self.isStory { - return input + return input.texture } let baseScale: CGFloat if !self.isSticker { - if input.height > input.width { - baseScale = max(canvasSize.width / CGFloat(input.width), canvasSize.height / CGFloat(input.height)) + if input.texture.height > input.texture.width { + baseScale = max(canvasSize.width / CGFloat(input.texture.width), canvasSize.height / CGFloat(input.texture.height)) } else { - baseScale = canvasSize.width / CGFloat(input.width) + baseScale = canvasSize.width / CGFloat(input.texture.width) } } else { - if input.height > input.width { - baseScale = canvasSize.width / CGFloat(input.width) + if input.texture.height > input.texture.width { + baseScale = canvasSize.width / CGFloat(input.texture.width) } else { - baseScale = canvasSize.width / CGFloat(input.height) + baseScale = canvasSize.width / CGFloat(input.texture.height) } } - self.mainPosition = self.mainPosition.with(size: CGSize(width: input.width, height: input.height), baseScale: baseScale) + self.mainPosition = self.mainPosition.with(size: CGSize(width: input.texture.width, height: input.texture.height), baseScale: baseScale) let containerSize = canvasSize @@ -527,11 +693,11 @@ final class VideoFinishPass: RenderPass { textureDescriptor.textureType = .type2D textureDescriptor.width = Int(containerSize.width) textureDescriptor.height = Int(containerSize.height) - textureDescriptor.pixelFormat = input.pixelFormat + textureDescriptor.pixelFormat = input.texture.pixelFormat textureDescriptor.storageMode = .private textureDescriptor.usage = [.shaderRead, .shaderWrite, .renderTarget] guard let texture = device.makeTexture(descriptor: textureDescriptor) else { - return input + return input.texture } self.cachedTexture = texture texture.label = "finishedTexture" @@ -547,7 +713,7 @@ final class VideoFinishPass: RenderPass { renderPassDescriptor.colorAttachments[0].storeAction = .store renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 0.0, blue: 0.0, alpha: 0.0) guard let renderCommandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else { - return input + return input.texture } renderCommandEncoder.setViewport(MTLViewport( @@ -566,52 +732,78 @@ final class VideoFinishPass: RenderPass { renderCommandEncoder.setRenderPipelineState(self.mainPipelineState!) - let (mainVideoState, additionalVideoState, transitionVideoState) = self.transitionState(for: timestamp, mainInput: input, additionalInput: secondInput) - - if let transitionVideoState { + if let rect = input.rect { self.encodeVideo( using: renderCommandEncoder, containerSize: containerSize, - texture: transitionVideoState.texture, - textureRotation: transitionVideoState.textureRotation, - maskTexture: nil, - hasTransparency: false, - position: transitionVideoState.position, - roundness: transitionVideoState.roundness, - alpha: transitionVideoState.alpha, - zPosition: 0.75, + texture: input.texture, + textureRotation: self.mainTextureRotation, + rect: rect, + zPosition: 0.0, device: device ) - } - - self.encodeVideo( - using: renderCommandEncoder, - containerSize: containerSize, - texture: mainVideoState.texture, - textureRotation: mainVideoState.textureRotation, - maskTexture: inputMask, - hasTransparency: hasTransparency, - position: mainVideoState.position, - roundness: mainVideoState.roundness, - alpha: mainVideoState.alpha, - zPosition: 0.0, - device: device - ) - - if let additionalVideoState { + + for input in secondInput { + if let rect = input.rect { + self.encodeVideo( + using: renderCommandEncoder, + containerSize: containerSize, + texture: input.texture, + textureRotation: self.mainTextureRotation, + rect: rect, + zPosition: 0.0, + device: device + ) + } + } + } else { + let (mainVideoState, additionalVideoState, transitionVideoState) = self.transitionState(for: timestamp, mainInput: input.texture, additionalInput: secondInput.first?.texture) + + if let transitionVideoState { + self.encodeVideo( + using: renderCommandEncoder, + containerSize: containerSize, + texture: transitionVideoState.texture, + textureRotation: transitionVideoState.textureRotation, + maskTexture: nil, + hasTransparency: false, + position: transitionVideoState.position, + roundness: transitionVideoState.roundness, + alpha: transitionVideoState.alpha, + zPosition: 0.75, + device: device + ) + } + self.encodeVideo( using: renderCommandEncoder, containerSize: containerSize, - texture: additionalVideoState.texture, - textureRotation: additionalVideoState.textureRotation, - maskTexture: nil, - hasTransparency: false, - position: additionalVideoState.position, - roundness: additionalVideoState.roundness, - alpha: additionalVideoState.alpha, - zPosition: 0.5, + texture: mainVideoState.texture, + textureRotation: mainVideoState.textureRotation, + maskTexture: inputMask, + hasTransparency: hasTransparency, + position: mainVideoState.position, + roundness: mainVideoState.roundness, + alpha: mainVideoState.alpha, + zPosition: 0.0, device: device ) + + if let additionalVideoState { + self.encodeVideo( + using: renderCommandEncoder, + containerSize: containerSize, + texture: additionalVideoState.texture, + textureRotation: additionalVideoState.textureRotation, + maskTexture: nil, + hasTransparency: false, + position: additionalVideoState.position, + roundness: additionalVideoState.roundness, + alpha: additionalVideoState.alpha, + zPosition: 0.5, + device: device + ) + } } renderCommandEncoder.endEncoding() diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/CollageHighlightView.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/CollageHighlightView.swift new file mode 100644 index 0000000000..5d9148a109 --- /dev/null +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/CollageHighlightView.swift @@ -0,0 +1,44 @@ +import Foundation +import UIKit +import Display + +final class CollageHighlightView: UIView { + private let borderLayer = SimpleLayer() + private let gradientView = UIImageView() + + override public init(frame: CGRect) { + super.init(frame: frame) + + self.borderLayer.cornerRadius = 12.0 + self.borderLayer.borderWidth = 4.0 + self.borderLayer.borderColor = UIColor.white.cgColor + + self.layer.mask = self.borderLayer + + self.addSubview(self.gradientView) + } + + required public init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func update(size: CGSize, corners: CACornerMask, completion: @escaping () -> Void) { + self.borderLayer.maskedCorners = corners + self.borderLayer.frame = CGRect(origin: .zero, size: size) + + let color = UIColor.white.withAlphaComponent(0.7) + + let gradientWidth = size.width * 3.0 + self.gradientView.image = generateGradientImage( + size: CGSize(width: gradientWidth, height: 24.0), + colors: [UIColor.white.withAlphaComponent(0.0), color, color, color, UIColor.white.withAlphaComponent(0.0)], + locations: [0.0, 0.2, 0.5, 0.8, 1.0], + direction: .horizontal + ) + + self.gradientView.frame = CGRect(origin: CGPoint(x: -gradientWidth, y: 0.0), size: CGSize(width: gradientWidth, height: size.height)) + self.gradientView.layer.animatePosition(from: .zero, to: CGPoint(x: gradientWidth * 2.0, y: 0.0), duration: 1.4, additive: true, completion: { _ in + completion() + }) + } +} diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/EditStories.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/EditStories.swift index aee5cbbcb9..02770cd84f 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/EditStories.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/EditStories.swift @@ -10,7 +10,7 @@ import SaveToCameraRoll import ImageCompression import LocalMediaResources -public extension MediaEditorScreen { +public extension MediaEditorScreenImpl { static func makeEditStoryController( context: AccountContext, peer: EnginePeer, @@ -18,16 +18,16 @@ public extension MediaEditorScreen { videoPlaybackPosition: Double?, cover: Bool, repost: Bool, - transitionIn: MediaEditorScreen.TransitionIn, - transitionOut: MediaEditorScreen.TransitionOut?, + transitionIn: MediaEditorScreenImpl.TransitionIn, + transitionOut: MediaEditorScreenImpl.TransitionOut?, completed: @escaping () -> Void = {}, willDismiss: @escaping () -> Void = {}, update: @escaping (Disposable?) -> Void - ) -> MediaEditorScreen? { + ) -> MediaEditorScreenImpl? { guard let peerReference = PeerReference(peer._asPeer()) else { return nil } - let subject: Signal + let subject: Signal subject = getStorySource(engine: context.engine, peerId: peer.id, id: Int64(storyItem.id)) |> mapToSignal { source in if !repost, let source { @@ -35,14 +35,14 @@ public extension MediaEditorScreen { } else { let media = storyItem.media._asMedia() return fetchMediaData(context: context, postbox: context.account.postbox, userLocation: .peer(peerReference.id), customUserContentType: .story, mediaReference: .story(peer: peerReference, id: storyItem.id, media: media)) - |> mapToSignal { (value, isImage) -> Signal in + |> mapToSignal { (value, isImage) -> Signal in guard case let .data(data) = value, data.complete else { return .complete() } if let image = UIImage(contentsOfFile: data.path) { return .single(nil) |> then( - .single(.image(image, PixelDimensions(image.size), nil, .bottomRight)) + .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight)) |> delay(0.1, queue: Queue.mainQueue()) ) } else { @@ -56,7 +56,7 @@ public extension MediaEditorScreen { } return .single(nil) |> then( - .single(.video(symlinkPath, nil, false, nil, nil, PixelDimensions(width: 720, height: 1280), duration ?? 0.0, [], .bottomRight)) + .single(.video(videoPath: symlinkPath, thumbnail: nil, mirror: false, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: PixelDimensions(width: 720, height: 1280), duration: duration ?? 0.0, videoPositionChanges: [], additionalVideoPosition: .bottomRight)) ) } } @@ -95,7 +95,7 @@ public extension MediaEditorScreen { } var updateProgressImpl: ((Float) -> Void)? - let controller = MediaEditorScreen( + let controller = MediaEditorScreenImpl( context: context, mode: .storyEditor, subject: subject, @@ -110,7 +110,7 @@ public extension MediaEditorScreen { transitionOut: { finished, isNew in if repost && finished { if let transitionOut = externalState.transitionOut?(externalState.storyTarget, externalState.isPeerArchived), let destinationView = transitionOut.destinationView { - return MediaEditorScreen.TransitionOut( + return MediaEditorScreenImpl.TransitionOut( destinationView: destinationView, destinationRect: transitionOut.destinationRect, destinationCornerRadius: transitionOut.destinationCornerRadius diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaCoverScreen.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaCoverScreen.swift index db9dc21545..1b958dfa1b 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaCoverScreen.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaCoverScreen.swift @@ -327,6 +327,7 @@ private final class MediaCoverScreenComponent: Component { maxDuration: storyMaxVideoDuration, isPlaying: playerState.isPlaying, tracks: visibleTracks, + isCollage: false, portalView: controller.portalView, positionUpdated: { [weak state] position, apply in if let mediaEditor = state?.mediaEditor { diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorDrafts.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorDrafts.swift index d29a7083b9..0408508dac 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorDrafts.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorDrafts.swift @@ -9,7 +9,7 @@ import AccountContext import MediaEditor import DrawingUI -extension MediaEditorScreen { +extension MediaEditorScreenImpl { func isEligibleForDraft() -> Bool { if self.isEditingStory { return false @@ -173,6 +173,8 @@ extension MediaEditorScreen { innerSaveDraft(media: .image(image: image, dimensions: dimensions)) case let .video(path, _, _, _, _, dimensions, _, _, _): innerSaveDraft(media: .video(path: path, dimensions: dimensions, duration: duration)) + case let .videoCollage(items): + let _ = items case let .asset(asset): if asset.mediaType == .video { PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift index 19e5a01424..f3999a5273 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorRecording.swift @@ -10,9 +10,9 @@ import TelegramPresentationData import DeviceAccess import AccountContext -extension MediaEditorScreen { +extension MediaEditorScreenImpl { final class Recording { - private weak var controller: MediaEditorScreen? + private weak var controller: MediaEditorScreenImpl? private var recorder: EntityVideoRecorder? @@ -37,7 +37,7 @@ extension MediaEditorScreen { var isLocked = false - init(controller: MediaEditorScreen) { + init(controller: MediaEditorScreenImpl) { self.controller = controller self.authorizationStatusDisposables.add((DeviceAccess.authorizationStatus(subject: .camera(.video)) diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift index 159e256163..e992d859a0 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/MediaEditorScreen.swift @@ -64,6 +64,7 @@ final class MediaEditorScreenComponent: Component { public final class ExternalState { public fileprivate(set) var derivedInputHeight: CGFloat = 0.0 + public fileprivate(set) var timelineHeight: CGFloat = 0.0 public init() { } @@ -85,6 +86,7 @@ final class MediaEditorScreenComponent: Component { let isDisplayingTool: DrawingScreenType? let isInteractingWithEntities: Bool let isSavingAvailable: Bool + let isCollageTimelineOpen: Bool let hasAppeared: Bool let isDismissing: Bool let bottomSafeInset: CGFloat @@ -101,6 +103,7 @@ final class MediaEditorScreenComponent: Component { isDisplayingTool: DrawingScreenType?, isInteractingWithEntities: Bool, isSavingAvailable: Bool, + isCollageTimelineOpen: Bool, hasAppeared: Bool, isDismissing: Bool, bottomSafeInset: CGFloat, @@ -116,6 +119,7 @@ final class MediaEditorScreenComponent: Component { self.isDisplayingTool = isDisplayingTool self.isInteractingWithEntities = isInteractingWithEntities self.isSavingAvailable = isSavingAvailable + self.isCollageTimelineOpen = isCollageTimelineOpen self.hasAppeared = hasAppeared self.isDismissing = isDismissing self.bottomSafeInset = bottomSafeInset @@ -140,6 +144,9 @@ final class MediaEditorScreenComponent: Component { if lhs.isSavingAvailable != rhs.isSavingAvailable { return false } + if lhs.isCollageTimelineOpen != rhs.isCollageTimelineOpen { + return false + } if lhs.hasAppeared != rhs.hasAppeared { return false } @@ -444,7 +451,7 @@ final class MediaEditorScreenComponent: Component { }, requestLayout: { [weak self] transition in if let self { - (self.environment?.controller() as? MediaEditorScreen)?.node.requestLayout(forceUpdate: true, transition: ComponentTransition(transition)) + (self.environment?.controller() as? MediaEditorScreenImpl)?.node.requestLayout(forceUpdate: true, transition: ComponentTransition(transition)) } } ) @@ -722,7 +729,7 @@ final class MediaEditorScreenComponent: Component { return availableSize } let environment = environment[ViewControllerComponentContainer.Environment.self].value - guard let controller = environment.controller() as? MediaEditorScreen else { + guard let controller = environment.controller() as? MediaEditorScreenImpl else { return availableSize } self.environment = environment @@ -744,6 +751,7 @@ final class MediaEditorScreenComponent: Component { let isRecordingAdditionalVideo = controller.node.recording.isActive + let previousComponent = self.component self.component = component self.state = state @@ -1216,6 +1224,9 @@ final class MediaEditorScreenComponent: Component { if case let .video(_, _, _, additionalPath, _, _, _, _, _) = subject, additionalPath != nil { canRecordVideo = false } + if case .videoCollage = subject { + canRecordVideo = false + } } self.inputPanel.parentState = state @@ -1231,6 +1242,7 @@ final class MediaEditorScreenComponent: Component { maxLength: Int(component.context.userLimits.maxStoryCaptionLength), queryTypes: [.mention, .hashtag], alwaysDarkWhenHasText: false, + useGrayBackground: component.isCollageTimelineOpen, resetInputContents: nil, nextInputMode: { _ in return nextInputMode }, areVoiceMessagesAvailable: false, @@ -1303,6 +1315,7 @@ final class MediaEditorScreenComponent: Component { }, forwardAction: nil, moreAction: nil, + presentCaptionPositionTooltip: nil, presentVoiceMessagesUnavailableTooltip: nil, presentTextLengthLimitTooltip: { [weak controller] in guard let controller else { @@ -1460,6 +1473,31 @@ final class MediaEditorScreenComponent: Component { ) } + var animateRightButtonsSwitch = false + if let previousComponent, previousComponent.isCollageTimelineOpen != component.isCollageTimelineOpen { + animateRightButtonsSwitch = true + } + + var buttonTransition = transition + if animateRightButtonsSwitch { + buttonTransition = .immediate + for button in [self.muteButton, self.playbackButton] { + if let view = button.view { + if let snapshotView = view.snapshotView(afterScreenUpdates: false) { + snapshotView.frame = view.frame + view.superview?.addSubview(snapshotView) + snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { _ in + snapshotView.removeFromSuperview() + }) + snapshotView.layer.animateScale(from: 1.0, to: 0.01, duration: 0.25, removeOnCompletion: false) + + view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25) + view.layer.animateScale(from: 0.01, to: 1.0, duration: 0.25) + } + } + } + } + let saveButtonSize = self.saveButton.update( transition: transition, component: AnyComponent(CameraButton( @@ -1493,13 +1531,14 @@ final class MediaEditorScreenComponent: Component { let saveButtonAlpha = component.isSavingAvailable ? topButtonsAlpha : 0.3 saveButtonView.isUserInteractionEnabled = component.isSavingAvailable - transition.setPosition(view: saveButtonView, position: saveButtonFrame.center) - transition.setBounds(view: saveButtonView, bounds: CGRect(origin: .zero, size: saveButtonFrame.size)) + buttonTransition.setPosition(view: saveButtonView, position: saveButtonFrame.center) + buttonTransition.setBounds(view: saveButtonView, bounds: CGRect(origin: .zero, size: saveButtonFrame.size)) transition.setScale(view: saveButtonView, scale: displayTopButtons ? 1.0 : 0.01) transition.setAlpha(view: saveButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? saveButtonAlpha : 0.0) } var topButtonOffsetX: CGFloat = 0.0 + var topButtonOffsetY: CGFloat = 0.0 if let subject = controller.node.subject, case .message = subject { let isNightTheme = mediaEditor?.values.nightTheme == true @@ -1651,8 +1690,19 @@ final class MediaEditorScreenComponent: Component { environment: {}, containerSize: CGSize(width: 44.0, height: 44.0) ) + + var xOffset: CGFloat + var yOffset: CGFloat = 0.0 + if component.isCollageTimelineOpen { + xOffset = 0.0 + yOffset = 50.0 + topButtonOffsetY + } else { + xOffset = -50.0 - topButtonOffsetX + yOffset = 0.0 + } + let muteButtonFrame = CGRect( - origin: CGPoint(x: availableSize.width - 20.0 - muteButtonSize.width - 50.0 - topButtonOffsetX, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0)), + origin: CGPoint(x: availableSize.width - 20.0 - muteButtonSize.width + xOffset, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0) + yOffset), size: muteButtonSize ) if let muteButtonView = self.muteButton.view { @@ -1663,13 +1713,14 @@ final class MediaEditorScreenComponent: Component { muteButtonView.layer.animateAlpha(from: 0.0, to: muteButtonView.alpha, duration: self.animatingButtons ? 0.1 : 0.2) muteButtonView.layer.animateScale(from: 0.4, to: 1.0, duration: self.animatingButtons ? 0.1 : 0.2) } - transition.setPosition(view: muteButtonView, position: muteButtonFrame.center) - transition.setBounds(view: muteButtonView, bounds: CGRect(origin: .zero, size: muteButtonFrame.size)) + buttonTransition.setPosition(view: muteButtonView, position: muteButtonFrame.center) + buttonTransition.setBounds(view: muteButtonView, bounds: CGRect(origin: .zero, size: muteButtonFrame.size)) transition.setScale(view: muteButtonView, scale: displayTopButtons ? 1.0 : 0.01) transition.setAlpha(view: muteButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? topButtonsAlpha : 0.0) } topButtonOffsetX += 50.0 + topButtonOffsetY += 50.0 } else { if let muteButtonView = self.muteButton.view, muteButtonView.superview != nil { muteButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak muteButtonView] _ in @@ -1728,8 +1779,20 @@ final class MediaEditorScreenComponent: Component { environment: {}, containerSize: CGSize(width: 44.0, height: 44.0) ) + + + var xOffset: CGFloat + var yOffset: CGFloat = 0.0 + if component.isCollageTimelineOpen { + xOffset = 0.0 + yOffset = 50.0 + topButtonOffsetY + } else { + xOffset = -50.0 - topButtonOffsetX + yOffset = 0.0 + } + let playbackButtonFrame = CGRect( - origin: CGPoint(x: availableSize.width - 20.0 - playbackButtonSize.width - 50.0 - topButtonOffsetX, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0)), + origin: CGPoint(x: availableSize.width - 20.0 - playbackButtonSize.width + xOffset, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0) + yOffset), size: playbackButtonSize ) if let playbackButtonView = self.playbackButton.view { @@ -1740,13 +1803,14 @@ final class MediaEditorScreenComponent: Component { playbackButtonView.layer.animateAlpha(from: 0.0, to: playbackButtonView.alpha, duration: self.animatingButtons ? 0.1 : 0.2) playbackButtonView.layer.animateScale(from: 0.4, to: 1.0, duration: self.animatingButtons ? 0.1 : 0.2) } - transition.setPosition(view: playbackButtonView, position: playbackButtonFrame.center) - transition.setBounds(view: playbackButtonView, bounds: CGRect(origin: .zero, size: playbackButtonFrame.size)) + buttonTransition.setPosition(view: playbackButtonView, position: playbackButtonFrame.center) + buttonTransition.setBounds(view: playbackButtonView, bounds: CGRect(origin: .zero, size: playbackButtonFrame.size)) transition.setScale(view: playbackButtonView, scale: displayTopButtons ? 1.0 : 0.01) transition.setAlpha(view: playbackButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? topButtonsAlpha : 0.0) } topButtonOffsetX += 50.0 + topButtonOffsetY += 50.0 } else { if let playbackButtonView = self.playbackButton.view, playbackButtonView.superview != nil { playbackButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak playbackButtonView] _ in @@ -1820,6 +1884,17 @@ final class MediaEditorScreenComponent: Component { let isAudioOnly = playerState.isAudioOnly let hasMainVideoTrack = playerState.tracks.contains(where: { $0.id == 0 }) + var isCollage = false + if let mediaEditor, !mediaEditor.values.collage.isEmpty { + var videoCount = 1 + for item in mediaEditor.values.collage { + if item.content.isVideo { + videoCount += 1 + } + } + isCollage = videoCount > 1 + } + let scrubber: ComponentView if let current = self.scrubber { scrubber = current @@ -1840,6 +1915,9 @@ final class MediaEditorScreenComponent: Component { maxDuration: maxDuration, isPlaying: playerState.isPlaying, tracks: visibleTracks, + isCollage: isCollage, + isCollageSelected: component.isCollageTimelineOpen, + collageSamples: playerState.collageSamples, positionUpdated: { [weak mediaEditor] position, apply in if let mediaEditor { mediaEditor.seek(position, andPlay: apply) @@ -1850,7 +1928,7 @@ final class MediaEditorScreenComponent: Component { return } let trimRange = start.. 0 { + mediaEditor.setAdditionalVideoTrimRange(trimRange, trackId: isCollage ? trackId : nil, apply: apply) if hasMainVideoTrack { if apply { mediaEditor.play() @@ -1895,7 +1973,7 @@ final class MediaEditorScreenComponent: Component { guard let mediaEditor else { return } - if trackId == 2 { + if trackId == 1000 { mediaEditor.setAudioTrackOffset(offset, apply: apply) if isAudioOnly { let offset = (mediaEditor.values.audioTrackOffset ?? 0.0) @@ -1928,8 +2006,8 @@ final class MediaEditorScreenComponent: Component { mediaEditor.stop() } } - } else if trackId == 1 { - mediaEditor.setAdditionalVideoOffset(offset, apply: apply) + } else if trackId > 0 { + mediaEditor.setAdditionalVideoOffset(offset, trackId: isCollage ? trackId : nil, apply: apply) } }, trackLongPressed: { [weak controller] trackId, sourceView in @@ -1937,11 +2015,28 @@ final class MediaEditorScreenComponent: Component { return } controller.node.presentTrackOptions(trackId: trackId, sourceView: sourceView) + }, + collageSelectionUpdated: { [weak controller] in + guard let controller else { + return + } + controller.node.openCollageTimeline() + }, + trackSelectionUpdated: { [weak controller] trackId in + guard let controller else { + return + } + controller.node.highlightCollageItem(trackId: trackId) } )), environment: {}, containerSize: CGSize(width: previewSize.width - scrubberInset * 2.0, height: availableSize.height) ) + if component.isCollageTimelineOpen { + component.externalState.timelineHeight = scrubberSize.height + 65.0 + } else { + component.externalState.timelineHeight = 0.0 + } let scrubberFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - scrubberSize.width) / 2.0), y: availableSize.height - environment.safeInsets.bottom - scrubberSize.height + controlsBottomInset - inputPanelSize.height + 3.0), size: scrubberSize) if let scrubberView = scrubber.view { @@ -2394,7 +2489,7 @@ final class MediaEditorScreenComponent: Component { let storyDimensions = CGSize(width: 1080.0, height: 1920.0) let storyMaxVideoDuration: Double = 60.0 -public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate { +public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UIDropInteractionDelegate { public enum Mode { public enum StickerEditorMode { case generic @@ -2468,13 +2563,13 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } final class Node: ViewControllerTracingNode, ASGestureRecognizerDelegate, UIScrollViewDelegate { - private weak var controller: MediaEditorScreen? + private weak var controller: MediaEditorScreenImpl? private let context: AccountContext fileprivate var interaction: DrawingToolsInteraction? private let initializationTimestamp = CACurrentMediaTime() - var subject: MediaEditorScreen.Subject? - var actualSubject: MediaEditorScreen.Subject? + var subject: MediaEditorScreenImpl.Subject? + var actualSubject: MediaEditorScreenImpl.Subject? private var subjectDisposable: Disposable? private var appInForegroundDisposable: Disposable? @@ -2552,7 +2647,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate private var playbackPositionDisposable: Disposable? - var recording: MediaEditorScreen.Recording + var recording: MediaEditorScreenImpl.Recording private let locationManager = LocationManager() @@ -2561,7 +2656,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate private let readyValue = Promise() - init(controller: MediaEditorScreen) { + init(controller: MediaEditorScreenImpl) { self.controller = controller self.context = controller.context @@ -2631,7 +2726,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.stickerMaskPreviewView.backgroundColor = UIColor(rgb: 0xffffff, alpha: 0.3) self.stickerMaskPreviewView.isUserInteractionEnabled = false - self.recording = MediaEditorScreen.Recording(controller: controller) + self.recording = MediaEditorScreenImpl.Recording(controller: controller) super.init() @@ -2825,7 +2920,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.stickerCutoutStatusDisposable?.dispose() } - private func setup(with subject: MediaEditorScreen.Subject) { + private func setup(with subject: MediaEditorScreenImpl.Subject) { guard let controller = self.controller else { return } @@ -3038,6 +3133,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } } } + } else if case let .videoCollage(items) = effectiveSubject { + mediaEditor.setupCollage(items.map { $0.editorItem }) } else if case let .message(messageIds) = effectiveSubject { let isNightTheme = mediaEditor.values.nightTheme let _ = ((self.context.engine.data.get( @@ -3129,7 +3226,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate self.mediaEditor = mediaEditor self.mediaEditorPromise.set(.single(mediaEditor)) - if controller.isEmbeddedEditor == true { + if controller.isEmbeddedEditor { mediaEditor.onFirstDisplay = { [weak self] in if let self { if let transitionInView = self.transitionInView { @@ -3475,6 +3572,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate if case .stickerEditor = controller.mode { hasSwipeToDismiss = false hasSwipeToEnhance = false + } else if self.isCollageTimelineOpen { + hasSwipeToEnhance = false } let translation = gestureRecognizer.translation(in: self.view) @@ -3549,6 +3648,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate private var previousRotateTimestamp: Double? @objc func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) { + guard !self.isCollageTimelineOpen else { + return + } if gestureRecognizer.numberOfTouches == 2, let subject = self.subject, case .message = subject, !self.entitiesView.hasSelection { return } @@ -3561,6 +3663,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } @objc func handlePinch(_ gestureRecognizer: UIPinchGestureRecognizer) { + guard !self.isCollageTimelineOpen else { + return + } if gestureRecognizer.numberOfTouches == 2, let subject = self.subject, case .message = subject, !self.entitiesView.hasSelection { return } @@ -3573,6 +3678,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } @objc func handleRotate(_ gestureRecognizer: UIRotationGestureRecognizer) { + guard !self.isCollageTimelineOpen else { + return + } if gestureRecognizer.numberOfTouches == 2, let subject = self.subject, case .message = subject, !self.entitiesView.hasSelection { return } @@ -3587,6 +3695,13 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate guard !self.recording.isActive, let controller = self.controller else { return } + + if self.isCollageTimelineOpen { + self.isCollageTimelineOpen = false + self.requestLayout(forceUpdate: true, transition: .spring(duration: 0.4)) + return + } + let location = gestureRecognizer.location(in: self.view) var entitiesHitTestResult = self.entitiesView.hitTest(self.view.convert(location, to: self.entitiesView), with: nil) if entitiesHitTestResult is DrawingMediaEntityView { @@ -4450,16 +4565,26 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } func presentTrackOptions(trackId: Int32, sourceView: UIView) { + guard let mediaEditor = self.mediaEditor else { + return + } let isVideo = trackId != 2 let actionTitle: String = isVideo ? self.presentationData.strings.MediaEditor_RemoveVideo : self.presentationData.strings.MediaEditor_RemoveAudio + let isCollage = !mediaEditor.values.collage.isEmpty let value: CGFloat - if trackId == 0 { - value = self.mediaEditor?.values.videoVolume ?? 1.0 - } else if trackId == 1 { - value = self.mediaEditor?.values.additionalVideoVolume ?? 1.0 - } else if trackId == 2 { - value = self.mediaEditor?.values.audioTrackVolume ?? 1.0 + if trackId == 1000 { + value = mediaEditor.values.audioTrackVolume ?? 1.0 + } else if trackId == 0 { + value = mediaEditor.values.videoVolume ?? 1.0 + } else if trackId > 0 { + if !isCollage { + value = mediaEditor.values.additionalVideoVolume ?? 1.0 + } else if let index = mediaEditor.collageItemIndexForTrackId(trackId) { + value = mediaEditor.values.collage[index].videoVolume ?? 1.0 + } else { + value = 1.0 + } } else { value = 1.0 } @@ -4468,20 +4593,21 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate items.append( .custom(VolumeSliderContextItem(minValue: 0.0, maxValue: 1.5, value: value, valueChanged: { [weak self] value, _ in if let self, let mediaEditor = self.mediaEditor { - if trackId == 0 { + if trackId == 1000 { + mediaEditor.setAudioTrackVolume(value) + } else if trackId == 0 { if mediaEditor.values.videoIsMuted { mediaEditor.setVideoIsMuted(false) } mediaEditor.setVideoVolume(value) - } else if trackId == 1 { - mediaEditor.setAdditionalVideoVolume(value) - } else if trackId == 2 { - mediaEditor.setAudioTrackVolume(value) + } else if trackId > 0 { + mediaEditor.setAdditionalVideoVolume(value, trackId: isCollage ? trackId : nil) } } }), false) ) - if trackId != 0 { + + if trackId != 0 && !isCollage { items.append( .action( ContextMenuActionItem( @@ -4838,7 +4964,45 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate func viewForZooming(in scrollView: UIScrollView) -> UIView? { return self.previewContentContainerView } + + private var isCollageTimelineOpen = false + func openCollageTimeline() { + self.isCollageTimelineOpen = true + self.requestLayout(forceUpdate: true, transition: .spring(duration: 0.4)) + } + func highlightCollageItem(trackId: Int32) { + if let collageIndex = self.mediaEditor?.collageItemIndexForTrackId(trackId), let frame = self.mediaEditor?.values.collage[collageIndex].frame { + let mappedFrame = CGRect( + x: frame.minX / storyDimensions.width * self.previewContainerView.bounds.width, + y: frame.minY / storyDimensions.height * self.previewContainerView.bounds.height, + width: frame.width / storyDimensions.width * self.previewContainerView.bounds.width, + height: frame.height / storyDimensions.height * self.previewContainerView.bounds.height + ) + + var corners: CACornerMask = [] + if frame.minX <= .ulpOfOne && frame.minY <= .ulpOfOne { + corners.insert(.layerMinXMinYCorner) + } + if frame.minX <= .ulpOfOne && frame.maxY >= storyDimensions.height - .ulpOfOne { + corners.insert(.layerMinXMaxYCorner) + } + if frame.maxX >= storyDimensions.width - .ulpOfOne && frame.minY <= .ulpOfOne { + corners.insert(.layerMaxXMinYCorner) + } + if frame.maxX >= storyDimensions.width - .ulpOfOne && frame.maxY >= storyDimensions.height - .ulpOfOne { + corners.insert(.layerMaxXMaxYCorner) + } + + let highlightView = CollageHighlightView() + highlightView.update(size: mappedFrame.size, corners: corners, completion: { [weak highlightView] in + highlightView?.removeFromSuperview() + }) + highlightView.frame = mappedFrame + self.previewContainerView.addSubview(highlightView) + } + } + func requestLayout(forceUpdate: Bool, transition: ComponentTransition) { guard let layout = self.validLayout else { return @@ -4909,6 +5073,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate isDisplayingTool: self.isDisplayingTool, isInteractingWithEntities: self.isInteractingWithEntities, isSavingAvailable: controller.isSavingAvailable, + isCollageTimelineOpen: self.isCollageTimelineOpen, hasAppeared: self.hasAppeared, isDismissing: self.isDismissing && !self.isDismissBySwipeSuppressed, bottomSafeInset: layout.intrinsicInsets.bottom, @@ -5079,7 +5244,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate let controller = DrawingScreen( context: self.context, sourceHint: .storyEditor, - size: self.previewContainerView.frame.size, + size: self.previewContainerView.bounds.size, originalSize: storyDimensions, isVideo: self.mediaEditor?.sourceIsVideo ?? false, isAvatar: false, @@ -5379,7 +5544,19 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate if layout.size.height < 680.0, case .stickerEditor = controller.mode { previewFrame = previewFrame.offsetBy(dx: 0.0, dy: -44.0) } - transition.setFrame(view: self.previewContainerView, frame: previewFrame) + + var previewScale: CGFloat = 1.0 + var previewOffset: CGFloat = 0.0 + if self.componentExternalState.timelineHeight > 0.0 { + let clippedHeight = previewFrame.size.height - self.componentExternalState.timelineHeight + previewOffset = -self.componentExternalState.timelineHeight / 2.0 + previewScale = clippedHeight / previewFrame.size.height + } + + transition.setBounds(view: self.previewContainerView, bounds: CGRect(origin: .zero, size: previewFrame.size)) + transition.setPosition(view: self.previewContainerView, position: previewFrame.center.offsetBy(dx: 0.0, dy: previewOffset)) + transition.setScale(view: self.previewContainerView, scale: previewScale) + transition.setFrame(view: self.previewScrollView, frame: CGRect(origin: .zero, size: previewSize)) if self.previewScrollView.contentSize == .zero { @@ -5469,9 +5646,51 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate } public enum Subject { + public struct VideoCollageItem { + public enum Content { + case image(UIImage) + case video(String, Double) + case asset(PHAsset) + + var editorContent: MediaEditor.Subject.VideoCollageItem.Content { + switch self { + case let .image(image): + return .image(image) + case let .video(path, duration): + return .video(path, duration) + case let .asset(asset): + return .asset(asset) + } + } + + var duration: Double { + switch self { + case .image: + return 0.0 + case let .video(_, duration): + return duration + case let .asset(asset): + return asset.duration + } + } + } + public let content: Content + public let frame: CGRect + + var editorItem: MediaEditor.Subject.VideoCollageItem { + return MediaEditor.Subject.VideoCollageItem(content: self.content.editorContent, frame: self.frame) + } + + public init(content: Content, frame: CGRect) { + self.content = content + self.frame = frame + } + } + case empty(PixelDimensions) - case image(UIImage, PixelDimensions, UIImage?, PIPPosition) - case video(String, UIImage?, Bool, String?, UIImage?, PixelDimensions, Double, [(Bool, Double)], PIPPosition) + case image(image: UIImage, dimensions: PixelDimensions, additionalImage: UIImage?, additionalImagePosition: PIPPosition) + case video(videoPath: String, thumbnail: UIImage?, mirror: Bool, additionalVideoPath: String?, additionalThumbnail: UIImage?, dimensions: PixelDimensions, duration: Double, videoPositionChanges: [(Bool, Double)], additionalVideoPosition: PIPPosition) + case videoCollage(items: [VideoCollageItem]) case asset(PHAsset) case draft(MediaEditorDraft, Int64?) case message([MessageId]) @@ -5487,9 +5706,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate return PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight)) case let .draft(draft, _): return draft.dimensions - case .message: - return PixelDimensions(width: 1080, height: 1920) - case .sticker: + case .message, .sticker, .videoCollage: return PixelDimensions(width: 1080, height: 1920) } } @@ -5505,6 +5722,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate return .image(image, dimensions) case let .video(videoPath, transitionImage, mirror, additionalVideoPath, _, dimensions, duration, _, _): return .video(videoPath, transitionImage, mirror, additionalVideoPath, dimensions, duration) + case let .videoCollage(items): + return .videoCollage(items.map { $0.editorItem }) case let .asset(asset): return .asset(asset) case let .draft(draft, _): @@ -5528,6 +5747,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate return false case .video: return true + case .videoCollage: + return true case let .asset(asset): return asset.mediaType == .video case let .draft(draft, _): @@ -5546,6 +5767,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate case videoFile(path: String) case asset(localIdentifier: String) } + case image(image: UIImage, dimensions: PixelDimensions) case video(video: VideoResult, coverImage: UIImage?, values: MediaEditorValues, duration: Double, dimensions: PixelDimensions) case sticker(file: TelegramMediaFile, emoji: [String]) @@ -5607,7 +5829,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate fileprivate let transitionOut: (Bool, Bool?) -> TransitionOut? public var cancelled: (Bool) -> Void = { _ in } - public var completion: (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void = { _, _ in } + public var completion: (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void = { _, _ in } public var dismissed: () -> Void = { } public var willDismiss: () -> Void = { } public var sendSticker: ((FileMediaReference, UIView, CGRect) -> Bool)? @@ -5640,7 +5862,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate initialLink: (url: String, name: String?)? = nil, transitionIn: TransitionIn?, transitionOut: @escaping (Bool, Bool?) -> TransitionOut?, - completion: @escaping (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void + completion: @escaping (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void ) { self.context = context self.mode = mode @@ -6493,7 +6715,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate if self.isEmbeddedEditor && !(hasAnyChanges || hasEntityChanges) { self.saveDraft(id: randomId, edit: true) - self.completion(MediaEditorScreen.Result(media: nil, mediaAreas: [], caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in + self.completion(MediaEditorScreenImpl.Result(media: nil, mediaAreas: [], caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in self?.dismiss() Queue.mainQueue().justDispatch { @@ -6593,6 +6815,50 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate avAssetGenerator.cancelAllCGImageGeneration() } } + case let .videoCollage(items): + var maxDurationItem: (Double, Subject.VideoCollageItem)? + for item in items { + switch item.content { + case .image: + break + case let .video(_, duration): + if let (maxDuration, _) = maxDurationItem { + if duration > maxDuration { + maxDurationItem = (duration, item) + } + } else { + maxDurationItem = (duration, item) + } + case let .asset(asset): + if let (maxDuration, _) = maxDurationItem { + if asset.duration > maxDuration { + maxDurationItem = (asset.duration, item) + } + } else { + maxDurationItem = (asset.duration, item) + } + } + } + guard let (maxDuration, mainItem) = maxDurationItem else { + fatalError() + } + switch mainItem.content { + case let .video(path, _): + videoResult = .single(.videoFile(path: path)) + case let .asset(asset): + videoResult = .single(.asset(localIdentifier: asset.localIdentifier)) + default: + fatalError() + } + let image = generateImage(storyDimensions, opaque: false, scale: 1.0, rotatedContext: { size, context in + context.clear(CGRect(origin: .zero, size: size)) + })! + firstFrame = .single((image, nil)) + if let videoTrimRange = mediaEditor.values.videoTrimRange { + duration = videoTrimRange.upperBound - videoTrimRange.lowerBound + } else { + duration = min(maxDuration, storyMaxVideoDuration) + } case let .asset(asset): videoResult = .single(.asset(localIdentifier: asset.localIdentifier)) if asset.mediaType == .video { @@ -6766,7 +7032,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: inputImage, dimensions: storyDimensions, values: mediaEditor.values, time: firstFrameTime, textScale: 2.0, completion: { [weak self] coverImage in if let self { Logger.shared.log("MediaEditor", "Completed with video \(videoResult)") - self.completion(MediaEditorScreen.Result(media: .video(video: videoResult, coverImage: coverImage, values: mediaEditor.values, duration: duration, dimensions: mediaEditor.values.resultDimensions), mediaAreas: mediaAreas, caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in + self.completion(MediaEditorScreenImpl.Result(media: .video(video: videoResult, coverImage: coverImage, values: mediaEditor.values, duration: duration, dimensions: mediaEditor.values.resultDimensions), mediaAreas: mediaAreas, caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in self?.dismiss() Queue.mainQueue().justDispatch { @@ -6789,7 +7055,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: image, dimensions: storyDimensions, values: mediaEditor.values, time: .zero, textScale: 2.0, completion: { [weak self] resultImage in if let self, let resultImage { Logger.shared.log("MediaEditor", "Completed with image \(resultImage)") - self.completion(MediaEditorScreen.Result(media: .image(image: resultImage, dimensions: PixelDimensions(resultImage.size)), mediaAreas: mediaAreas, caption: caption, coverTimestamp: nil, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in + self.completion(MediaEditorScreenImpl.Result(media: .image(image: resultImage, dimensions: PixelDimensions(resultImage.size)), mediaAreas: mediaAreas, caption: caption, coverTimestamp: nil, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in self?.dismiss() Queue.mainQueue().justDispatch { @@ -6933,7 +7199,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate if isVideo { self.uploadSticker(file, action: .send) } else { - self.completion(MediaEditorScreen.Result( + self.completion(MediaEditorScreenImpl.Result( media: .sticker(file: file, emoji: self.effectiveStickerEmoji()), mediaAreas: [], caption: NSAttributedString(), @@ -7343,15 +7609,15 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate case let .complete(resource, _): let navigationController = self.navigationController as? NavigationController - let result: MediaEditorScreen.Result + let result: MediaEditorScreenImpl.Result switch action { case .update: - result = MediaEditorScreen.Result(media: .sticker(file: file, emoji: emojis)) + result = MediaEditorScreenImpl.Result(media: .sticker(file: file, emoji: emojis)) case .upload, .send: let file = stickerFile(resource: resource, thumbnailResource: file.previewRepresentations.first?.resource, size: resource.size ?? 0, dimensions: dimensions, duration: self.preferredStickerDuration(), isVideo: isVideo) - result = MediaEditorScreen.Result(media: .sticker(file: file, emoji: emojis)) + result = MediaEditorScreenImpl.Result(media: .sticker(file: file, emoji: emojis)) default: - result = MediaEditorScreen.Result() + result = MediaEditorScreenImpl.Result() } self.completion(result, { [weak self] finished in @@ -7469,6 +7735,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate case let .video(path, _, _, _, _, _, _, _, _): let asset = AVURLAsset(url: NSURL(fileURLWithPath: path) as URL) exportSubject = .single(.video(asset: asset, isStory: true)) + case let .videoCollage(items): + let _ = items + exportSubject = .complete() case let .image(image, _, _, _): exportSubject = .single(.image(image: image)) case let .asset(asset): @@ -7518,7 +7787,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate exportSubject = .single(.sticker(file: file)) } - let _ = exportSubject.start(next: { [weak self] exportSubject in + let _ = (exportSubject + |> deliverOnMainQueue).start(next: { [weak self] exportSubject in guard let self else { return } @@ -7538,9 +7808,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate let outputPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).\(fileExtension)" let videoExport = MediaEditorVideoExport(postbox: self.context.account.postbox, subject: exportSubject, configuration: configuration, outputPath: outputPath, textScale: 2.0) self.videoExport = videoExport - - videoExport.start() - + self.exportDisposable.set((videoExport.status |> deliverOnMainQueue).start(next: { [weak self] status in if let self { @@ -8287,7 +8555,7 @@ extension MediaScrubberComponent.Track { case let .video(frames, framesUpdateTimestamp): content = .video(frames: frames, framesUpdateTimestamp: framesUpdateTimestamp) case let .audio(artist, title, samples, peak): - content = .audio(artist: artist, title: title, samples: samples, peak: peak) + content = .audio(artist: artist, title: title, samples: samples, peak: peak, isTimeline: false) } self.init( id: track.id, diff --git a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/StoryPreviewComponent.swift b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/StoryPreviewComponent.swift index 572a04dd49..569e8f4273 100644 --- a/submodules/TelegramUI/Components/MediaEditorScreen/Sources/StoryPreviewComponent.swift +++ b/submodules/TelegramUI/Components/MediaEditorScreen/Sources/StoryPreviewComponent.swift @@ -267,6 +267,7 @@ final class StoryPreviewComponent: Component { stopAndPreviewMediaRecording: nil, discardMediaRecordingPreview: nil, attachmentAction: { }, + attachmentButtonMode: .attach, myReaction: nil, likeAction: nil, likeOptionsAction: nil, @@ -274,6 +275,7 @@ final class StoryPreviewComponent: Component { timeoutAction: nil, forwardAction: {}, moreAction: { _, _ in }, + presentCaptionPositionTooltip: nil, presentVoiceMessagesUnavailableTooltip: nil, presentTextLengthLimitTooltip: nil, presentTextFormattingTooltip: nil, diff --git a/submodules/TelegramUI/Components/MediaScrubberComponent/BUILD b/submodules/TelegramUI/Components/MediaScrubberComponent/BUILD index 06766ec3c3..cda70abc0f 100644 --- a/submodules/TelegramUI/Components/MediaScrubberComponent/BUILD +++ b/submodules/TelegramUI/Components/MediaScrubberComponent/BUILD @@ -18,6 +18,7 @@ swift_library( "//submodules/Components/MultilineTextComponent", "//submodules/TelegramUI/Components/MediaEditor", "//submodules/TelegramUI/Components/AudioWaveformComponent", + "//submodules/UIKitRuntimeUtils", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift b/submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift index 7c955ae8a1..1aa98c48f0 100644 --- a/submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift +++ b/submodules/TelegramUI/Components/MediaScrubberComponent/Sources/MediaScrubberComponent.swift @@ -10,11 +10,13 @@ import AccountContext import AudioWaveformComponent import MultilineTextComponent import MediaEditor +import UIKitRuntimeUtils private let handleWidth: CGFloat = 14.0 private let trackHeight: CGFloat = 39.0 private let collapsedTrackHeight: CGFloat = 26.0 private let trackSpacing: CGFloat = 4.0 +private let collageTrackSpacing: CGFloat = 8.0 private let borderHeight: CGFloat = 1.0 + UIScreenPixel public final class MediaScrubberComponent: Component { @@ -23,7 +25,7 @@ public final class MediaScrubberComponent: Component { public struct Track: Equatable { public enum Content: Equatable { case video(frames: [UIImage], framesUpdateTimestamp: Double) - case audio(artist: String?, title: String?, samples: Data?, peak: Int32) + case audio(artist: String?, title: String?, samples: Data?, peak: Int32, isTimeline: Bool) public static func ==(lhs: Content, rhs: Content) -> Bool { switch lhs { @@ -33,9 +35,9 @@ public final class MediaScrubberComponent: Component { } else { return false } - case let .audio(lhsArtist, lhsTitle, lhsSamples, lhsPeak): - if case let .audio(rhsArtist, rhsTitle, rhsSamples, rhsPeak) = rhs { - return lhsArtist == rhsArtist && lhsTitle == rhsTitle && lhsSamples == rhsSamples && lhsPeak == rhsPeak + case let .audio(lhsArtist, lhsTitle, lhsSamples, lhsPeak, lhsIsTimeline): + if case let .audio(rhsArtist, rhsTitle, rhsSamples, rhsPeak, rhsIsTimeline) = rhs { + return lhsArtist == rhsArtist && lhsTitle == rhsTitle && lhsSamples == rhsSamples && lhsPeak == rhsPeak && lhsIsTimeline == rhsIsTimeline } else { return false } @@ -85,6 +87,10 @@ public final class MediaScrubberComponent: Component { let isPlaying: Bool let tracks: [Track] + let isCollage: Bool + let isCollageSelected: Bool + let collageSamples: (samples: Data, peak: Int32)? + let portalView: PortalView? let positionUpdated: (Double, Bool) -> Void @@ -92,6 +98,8 @@ public final class MediaScrubberComponent: Component { let trackTrimUpdated: (Int32, Double, Double, Bool, Bool) -> Void let trackOffsetUpdated: (Int32, Double, Bool) -> Void let trackLongPressed: (Int32, UIView) -> Void + let collageSelectionUpdated: () -> Void + let trackSelectionUpdated: (Int32) -> Void public init( context: AccountContext, @@ -103,12 +111,17 @@ public final class MediaScrubberComponent: Component { maxDuration: Double, isPlaying: Bool, tracks: [Track], + isCollage: Bool, + isCollageSelected: Bool = false, + collageSamples: (samples: Data, peak: Int32)? = nil, portalView: PortalView? = nil, positionUpdated: @escaping (Double, Bool) -> Void, coverPositionUpdated: @escaping (Double, Bool, @escaping () -> Void) -> Void = { _, _, _ in }, trackTrimUpdated: @escaping (Int32, Double, Double, Bool, Bool) -> Void, trackOffsetUpdated: @escaping (Int32, Double, Bool) -> Void, - trackLongPressed: @escaping (Int32, UIView) -> Void + trackLongPressed: @escaping (Int32, UIView) -> Void, + collageSelectionUpdated: @escaping () -> Void = {}, + trackSelectionUpdated: @escaping (Int32) -> Void = { _ in } ) { self.context = context self.style = style @@ -119,12 +132,17 @@ public final class MediaScrubberComponent: Component { self.maxDuration = maxDuration self.isPlaying = isPlaying self.tracks = tracks + self.isCollage = isCollage + self.isCollageSelected = isCollageSelected + self.collageSamples = collageSamples self.portalView = portalView self.positionUpdated = positionUpdated self.coverPositionUpdated = coverPositionUpdated self.trackTrimUpdated = trackTrimUpdated self.trackOffsetUpdated = trackOffsetUpdated self.trackLongPressed = trackLongPressed + self.collageSelectionUpdated = collageSelectionUpdated + self.trackSelectionUpdated = trackSelectionUpdated } public static func ==(lhs: MediaScrubberComponent, rhs: MediaScrubberComponent) -> Bool { @@ -152,15 +170,27 @@ public final class MediaScrubberComponent: Component { if lhs.tracks != rhs.tracks { return false } + if lhs.isCollage != rhs.isCollage { + return false + } + if lhs.isCollageSelected != rhs.isCollageSelected { + return false + } + if lhs.collageSamples?.samples != rhs.collageSamples?.samples || lhs.collageSamples?.peak != rhs.collageSamples?.peak { + return false + } return true } public final class View: UIView, UIGestureRecognizerDelegate { + private let trackContainerView: UIView + private var collageTrackView: TrackView? private var trackViews: [Int32: TrackView] = [:] private let trimView: TrimView private let ghostTrimView: TrimView private let cursorContentView: UIView private let cursorView: HandleView + private let cursorImageView: UIImageView private var cursorDisplayLink: SharedDisplayLinkDriver.Link? private var cursorPositionAnimation: (start: Double, from: Double, to: Double, ended: Bool)? @@ -175,11 +205,13 @@ public final class MediaScrubberComponent: Component { private weak var state: EmptyComponentState? override init(frame: CGRect) { + self.trackContainerView = UIView() self.trimView = TrimView(frame: .zero) self.ghostTrimView = TrimView(frame: .zero) self.ghostTrimView.isHollow = true self.cursorContentView = UIView() self.cursorView = HandleView() + self.cursorImageView = UIImageView() super.init(frame: frame) @@ -201,14 +233,18 @@ public final class MediaScrubberComponent: Component { context.addPath(path.cgPath) context.fillPath() })?.stretchableImage(withLeftCapWidth: Int(handleWidth / 2.0), topCapHeight: 25) - self.cursorView.image = positionImage + self.cursorView.image = nil self.cursorView.isUserInteractionEnabled = true self.cursorView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0) - - self.addSubview(self.ghostTrimView) - self.addSubview(self.trimView) + + self.cursorImageView.image = positionImage + + self.addSubview(self.trackContainerView) + self.trackContainerView.addSubview(self.ghostTrimView) + self.trackContainerView.addSubview(self.trimView) self.addSubview(self.cursorContentView) self.addSubview(self.cursorView) + self.cursorView.addSubview(self.cursorImageView) self.cursorView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleCursorPan(_:)))) @@ -303,6 +339,11 @@ public final class MediaScrubberComponent: Component { guard let component = self.component, case .began = gestureRecognizer.state else { return } + + guard !component.isCollage || component.isCollageSelected else { + return + } + let point = gestureRecognizer.location(in: self) for (id, trackView) in self.trackViews { if trackView.frame.contains(point) { @@ -385,8 +426,18 @@ public final class MediaScrubberComponent: Component { } private var effectiveCursorHeight: CGFloat { - let additionalTracksCount = max(0, (self.component?.tracks.count ?? 1) - 1) - return 50.0 + CGFloat(additionalTracksCount) * 30.0 + var height: CGFloat = 50.0 + if let component = self.component { + if !component.isCollage || component.isCollageSelected { + let trackHeight = component.isCollage ? 34.0 : 30.0 + let additionalTracksCount = max(0, (component.tracks.count) - 1) + height += CGFloat(additionalTracksCount) * trackHeight + } + if component.isCollage && !component.isCollageSelected { + height = 37.0 + } + } + return height } private func updateCursorPosition() { @@ -421,6 +472,7 @@ public final class MediaScrubberComponent: Component { public func update(component: MediaScrubberComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { let isFirstTime = self.component == nil + let previousComponent = self.component self.component = component self.state = state @@ -452,7 +504,7 @@ public final class MediaScrubberComponent: Component { context.addPath(path.cgPath) context.strokePath() }) - self.cursorView.image = positionImage + self.cursorImageView.image = positionImage } } @@ -465,6 +517,8 @@ public final class MediaScrubberComponent: Component { var lowestVideoId: Int32? + let effectiveTrackSpacing = component.isCollage ? collageTrackSpacing : trackSpacing + var validIds = Set() for track in component.tracks { let id = track.id @@ -506,6 +560,7 @@ public final class MediaScrubberComponent: Component { return } self.selectedTrackId = id + self.component?.trackSelectionUpdated(id) self.state?.updated(transition: .easeInOut(duration: 0.2)) } trackView.offsetUpdated = { [weak self] offset, apply in @@ -522,28 +577,80 @@ public final class MediaScrubberComponent: Component { } self.trackViews[id] = trackView - self.insertSubview(trackView, at: 0) + self.trackContainerView.insertSubview(trackView, at: 0) if !isFirstTime { animateTrackIn = true } } + var isSelected = id == self.selectedTrackId + if component.isCollage && !component.isCollageSelected { + isSelected = false + } + let trackSize = trackView.update( context: component.context, style: component.style, track: track, - isSelected: id == self.selectedTrackId, + isSelected: isSelected, availableSize: availableSize, duration: self.duration, transition: trackTransition ) trackLayout[id] = (CGRect(origin: CGPoint(x: 0.0, y: totalHeight), size: trackSize), trackTransition, animateTrackIn) - totalHeight += trackSize.height - totalHeight += trackSpacing + if component.isCollage && !component.isCollageSelected { + + } else { + totalHeight += trackSize.height + totalHeight += effectiveTrackSpacing + } + } + totalHeight -= effectiveTrackSpacing + + if component.isCollage { + if !component.isCollageSelected { + totalHeight = collapsedTrackHeight + } + + var trackTransition = transition + + let trackView: TrackView + if let current = self.collageTrackView { + trackView = current + } else { + trackTransition = .immediate + trackView = TrackView() + trackView.onSelection = { [weak self] _ in + guard let self else { + return + } + self.component?.collageSelectionUpdated() + } + self.insertSubview(trackView, belowSubview: self.cursorView) + self.collageTrackView = trackView + } + + let trackSize = trackView.update( + context: component.context, + style: component.style, + track: MediaScrubberComponent.Track( + id: 1024, + content: .audio(artist: nil, title: "Timeline", samples: component.collageSamples?.samples, peak: component.collageSamples?.peak ?? 0, isTimeline: true), + duration: component.maxDuration, + trimRange: nil, + offset: nil, + isMain: false + ), + isSelected: false, + availableSize: availableSize, + duration: self.duration, + transition: trackTransition + ) + trackTransition.setFrame(view: trackView, frame: CGRect(origin: .zero, size: trackSize)) + trackTransition.setAlpha(view: trackView, alpha: component.isCollageSelected ? 0.0 : 1.0) } - totalHeight -= trackSpacing for track in component.tracks { guard let trackView = self.trackViews[track.id], let (trackFrame, trackTransition, animateTrackIn) = trackLayout[track.id] else { @@ -552,7 +659,7 @@ public final class MediaScrubberComponent: Component { trackTransition.setFrame(view: trackView, frame: CGRect(origin: CGPoint(x: 0.0, y: totalHeight - trackFrame.maxY), size: trackFrame.size)) if animateTrackIn { trackView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) - trackView.layer.animatePosition(from: CGPoint(x: 0.0, y: trackFrame.height + trackSpacing), to: .zero, duration: 0.35, timingFunction: kCAMediaTimingFunctionSpring, additive: true) + trackView.layer.animatePosition(from: CGPoint(x: 0.0, y: trackFrame.height + effectiveTrackSpacing), to: .zero, duration: 0.35, timingFunction: kCAMediaTimingFunctionSpring, additive: true) } } @@ -592,7 +699,7 @@ public final class MediaScrubberComponent: Component { trimViewVisualInsets.left = delta } - if lowestVideoId == 0 && track.id == 1 { + if (lowestVideoId == 0 && track.id == 1) || component.isCollage { trimViewVisualInsets = .zero trackViewWidth = trackView.containerView.frame.width mainTrimDuration = track.duration @@ -653,11 +760,19 @@ public final class MediaScrubberComponent: Component { selectedTrackFrame = mainTrackFrame } - let trimViewFrame = CGRect(origin: CGPoint(x: trimViewOffset, y: selectedTrackFrame.minY), size: scrubberSize) + var trimViewFrame = CGRect(origin: CGPoint(x: trimViewOffset, y: selectedTrackFrame.minY), size: scrubberSize) + + var trimVisible = true + if component.isCollage && !component.isCollageSelected { + trimVisible = false + trimViewFrame = trimViewFrame.offsetBy(dx: 0.0, dy: collapsedTrackHeight - trackHeight) + } + transition.setFrame(view: self.trimView, frame: trimViewFrame) + transition.setAlpha(view: self.trimView, alpha: trimVisible ? 1.0 : 0.0) var ghostTrimVisible = false - if let lowestVideoId, self.selectedTrackId != lowestVideoId { + if let lowestVideoId, !component.isCollage && self.selectedTrackId != lowestVideoId { ghostTrimVisible = true } @@ -709,7 +824,40 @@ public final class MediaScrubberComponent: Component { self.updateCursorPosition() } - return CGSize(width: availableSize.width, height: totalHeight) + transition.setFrame(view: self.cursorImageView, frame: CGRect(origin: .zero, size: self.cursorView.frame.size)) + + if component.isCollage { + transition.setAlpha(view: self.trackContainerView, alpha: component.isCollageSelected ? 1.0 : 0.0) + } + + if let previousComponent, component.isCollage, previousComponent.isCollageSelected != component.isCollageSelected { + if let blurFilter = makeBlurFilter() { + if component.isCollageSelected { + blurFilter.setValue(0.0 as NSNumber, forKey: "inputRadius") + self.trackContainerView.layer.filters = [blurFilter] + self.trackContainerView.layer.animate(from: 20.0 as NSNumber, to: 0.0 as NSNumber, keyPath: "filters.gaussianBlur.inputRadius", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: 0.3, completion: { [weak self] completed in + guard let self, completed else { + return + } + self.trackContainerView.layer.filters = [] + }) + } else { + blurFilter.setValue(0.0 as NSNumber, forKey: "inputRadius") + self.trackContainerView.layer.filters = [blurFilter] + self.trackContainerView.layer.animate(from: 0.0 as NSNumber, to: 20.0 as NSNumber, keyPath: "filters.gaussianBlur.inputRadius", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: 0.4, completion: { [weak self] completed in + guard let self, completed else { + return + } + self.trackContainerView.layer.filters = [] + }) + } + } + } + + let size = CGSize(width: availableSize.width, height: totalHeight) + transition.setFrame(view: self.trackContainerView, frame: CGRect(origin: .zero, size: size)) + + return size } public override func point(inside point: CGPoint, with event: UIEvent?) -> Bool { @@ -1124,7 +1272,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega } frameOffset += frameSize.width } - case let .audio(artist, title, samples, peak): + case let .audio(artist, title, samples, peak, isTimeline): var components: [String] = [] var trackTitle = "" if let artist { @@ -1161,7 +1309,15 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega } let spacing: CGFloat = 4.0 - let iconSize = CGSize(width: 14.0, height: 14.0) + var iconSize = CGSize(width: 14.0, height: 14.0) + var trackTitleAlpha: CGFloat = 1.0 + if isTimeline { + if previousParams == nil { + self.audioIconView.image = UIImage(bundleImageName: "Media Editor/Timeline") + } + iconSize = CGSize(width: 24.0, height: 24.0) + trackTitleAlpha = 0.7 + } let contentTotalWidth = iconSize.width + audioTitleSize.width + spacing let audioContentTransition = transition @@ -1181,16 +1337,16 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega self.audioContentContainerView.addSubview(self.audioIconView) self.audioContentContainerView.addSubview(view) } - transition.setAlpha(view: view, alpha: trackTitleIsVisible ? 1.0 : 0.0) + transition.setAlpha(view: view, alpha: trackTitleIsVisible ? trackTitleAlpha : 0.0) let audioTitleFrame = CGRect(origin: CGPoint(x: audioIconFrame.maxX + spacing, y: floorToScreenPixels((scrubberSize.height - audioTitleSize.height) / 2.0)), size: audioTitleSize) view.bounds = CGRect(origin: .zero, size: audioTitleFrame.size) audioContentTransition.setPosition(view: view, position: audioTitleFrame.center) } - transition.setAlpha(view: self.audioIconView, alpha: trackTitleIsVisible ? 1.0 : 0.0) + transition.setAlpha(view: self.audioIconView, alpha: trackTitleIsVisible ? trackTitleAlpha : 0.0) var previousSamples: Data? - if let previousParams, case let .audio(_ , _, previousSamplesValue, _) = previousParams.track.content { + if let previousParams, case let .audio(_ , _, previousSamplesValue, _, _) = previousParams.track.content { previousSamples = previousSamplesValue } diff --git a/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift b/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift index e521dabde9..bbaefbd936 100644 --- a/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift +++ b/submodules/TelegramUI/Components/MessageInputActionButtonComponent/Sources/MessageInputActionButtonComponent.swift @@ -3,6 +3,7 @@ import UIKit import Display import ComponentFlow import AppBundle +import TelegramCore import ChatTextInputMediaRecordingButton import AccountContext import TelegramPresentationData @@ -10,7 +11,7 @@ import ChatPresentationInterfaceState import MoreHeaderButton import ContextUI import ReactionButtonListComponent -import TelegramCore +import LottieComponent private class ButtonIcon: Equatable { enum IconType: Equatable { @@ -131,6 +132,8 @@ public final class MessageInputActionButtonComponent: Component { case more case like(reaction: MessageReaction.Reaction?, file: TelegramMediaFile?, animationFileId: Int64?) case repost + case captionUp + case captionDown } public enum Action { @@ -228,6 +231,7 @@ public final class MessageInputActionButtonComponent: Component { private let sendIconView: UIImageView private var reactionHeartView: UIImageView? private var moreButton: MoreHeaderButton? + private var animation: ComponentView? private var reactionIconView: ReactionIconView? private var component: MessageInputActionButtonComponent? @@ -423,12 +427,51 @@ public final class MessageInputActionButtonComponent: Component { self.addSubnode(moreButton) } + switch component.mode { + case .captionUp, .captionDown: + var startingPosition: LottieComponent.StartingPosition = .begin + let animation: ComponentView + if let current = self.animation { + animation = current + } else { + animation = ComponentView() + self.animation = animation + startingPosition = .end + } + + let playOnce = ActionSlot() + let animationName = component.mode == .captionUp ? "message_preview_sort_above" : "message_preview_sort_below" + let _ = animation.update( + transition: transition, + component: AnyComponent(LottieComponent( + content: LottieComponent.AppBundleContent(name: animationName), + color: .white, + startingPosition: startingPosition, + playOnce: playOnce + )), + environment: {}, + containerSize: CGSize(width: 30.0, height: 30.0) + ) + if let view = animation.view { + if view.superview == nil { + self.referenceNode.view.addSubview(view) + } + } + if let previousComponent, previousComponent.mode != component.mode { + playOnce.invoke(Void()) + } + default: + break + } + var sendAlpha: CGFloat = 0.0 var microphoneAlpha: CGFloat = 0.0 var moreAlpha: CGFloat = 0.0 switch component.mode { case .none: break + case .captionUp, .captionDown: + sendAlpha = 0.0 case .send, .apply, .attach, .delete, .forward, .removeVideoInput, .repost: sendAlpha = 1.0 case let .like(reaction, _, _): @@ -603,6 +646,13 @@ public final class MessageInputActionButtonComponent: Component { transition.setScale(view: moreButton.view, scale: moreAlpha == 0.0 ? 0.01 : 1.0) } + if let view = self.animation?.view { + let buttonSize = CGSize(width: 30.0, height: 30.0) + let iconFrame = CGRect(origin: CGPoint(x: 2.0 + floorToScreenPixels((availableSize.width - buttonSize.width) * 0.5), y: floorToScreenPixels((availableSize.height - buttonSize.height) * 0.5)), size: buttonSize) + transition.setPosition(view: view, position: iconFrame.center) + transition.setBounds(view: view, bounds: CGRect(origin: CGPoint(), size: iconFrame.size)) + } + if let micButton = self.micButton { micButton.hasShadow = component.hasShadow micButton.hidesOnLock = component.hasShadow @@ -621,7 +671,7 @@ public final class MessageInputActionButtonComponent: Component { if previousComponent?.mode != component.mode { switch component.mode { - case .none, .send, .apply, .voiceInput, .attach, .delete, .forward, .unavailableVoiceInput, .more, .like, .repost: + case .none, .send, .apply, .voiceInput, .attach, .delete, .forward, .unavailableVoiceInput, .more, .like, .repost, .captionUp, .captionDown: micButton.updateMode(mode: .audio, animated: !transition.animation.isImmediate) case .videoInput, .removeVideoInput: micButton.updateMode(mode: .video, animated: !transition.animation.isImmediate) diff --git a/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift b/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift index b481ee2ecf..321b3b75cf 100644 --- a/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift +++ b/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift @@ -56,6 +56,12 @@ public final class MessageInputPanelComponent: Component { case emoji } + public enum AttachmentButtonMode: Hashable { + case attach + case captionUp + case captionDown + } + public struct MyReaction: Equatable { public let reaction: MessageReaction.Reaction public let file: TelegramMediaFile? @@ -157,6 +163,7 @@ public final class MessageInputPanelComponent: Component { public let maxLength: Int? public let queryTypes: ContextQueryTypes public let alwaysDarkWhenHasText: Bool + public let useGrayBackground: Bool public let resetInputContents: SendMessageInput? public let nextInputMode: (Bool) -> InputMode? public let areVoiceMessagesAvailable: Bool @@ -170,6 +177,7 @@ public final class MessageInputPanelComponent: Component { public let stopAndPreviewMediaRecording: (() -> Void)? public let discardMediaRecordingPreview: (() -> Void)? public let attachmentAction: (() -> Void)? + public let attachmentButtonMode: AttachmentButtonMode? public let myReaction: MyReaction? public let likeAction: (() -> Void)? public let likeOptionsAction: ((UIView, ContextGesture?) -> Void)? @@ -177,6 +185,7 @@ public final class MessageInputPanelComponent: Component { public let timeoutAction: ((UIView, ContextGesture?) -> Void)? public let forwardAction: (() -> Void)? public let moreAction: ((UIView, ContextGesture?) -> Void)? + public let presentCaptionPositionTooltip: ((UIView) -> Void)? public let presentVoiceMessagesUnavailableTooltip: ((UIView) -> Void)? public let presentTextLengthLimitTooltip: (() -> Void)? public let presentTextFormattingTooltip: (() -> Void)? @@ -212,6 +221,7 @@ public final class MessageInputPanelComponent: Component { maxLength: Int?, queryTypes: ContextQueryTypes, alwaysDarkWhenHasText: Bool, + useGrayBackground: Bool = false, resetInputContents: SendMessageInput?, nextInputMode: @escaping (Bool) -> InputMode?, areVoiceMessagesAvailable: Bool, @@ -225,6 +235,7 @@ public final class MessageInputPanelComponent: Component { stopAndPreviewMediaRecording: (() -> Void)?, discardMediaRecordingPreview: (() -> Void)?, attachmentAction: (() -> Void)?, + attachmentButtonMode: AttachmentButtonMode? = nil, myReaction: MyReaction?, likeAction: (() -> Void)?, likeOptionsAction: ((UIView, ContextGesture?) -> Void)?, @@ -232,6 +243,7 @@ public final class MessageInputPanelComponent: Component { timeoutAction: ((UIView, ContextGesture?) -> Void)?, forwardAction: (() -> Void)?, moreAction: ((UIView, ContextGesture?) -> Void)?, + presentCaptionPositionTooltip: ((UIView) -> Void)?, presentVoiceMessagesUnavailableTooltip: ((UIView) -> Void)?, presentTextLengthLimitTooltip: (() -> Void)?, presentTextFormattingTooltip: (() -> Void)?, @@ -267,6 +279,7 @@ public final class MessageInputPanelComponent: Component { self.maxLength = maxLength self.queryTypes = queryTypes self.alwaysDarkWhenHasText = alwaysDarkWhenHasText + self.useGrayBackground = useGrayBackground self.resetInputContents = resetInputContents self.areVoiceMessagesAvailable = areVoiceMessagesAvailable self.presentController = presentController @@ -279,6 +292,7 @@ public final class MessageInputPanelComponent: Component { self.stopAndPreviewMediaRecording = stopAndPreviewMediaRecording self.discardMediaRecordingPreview = discardMediaRecordingPreview self.attachmentAction = attachmentAction + self.attachmentButtonMode = attachmentButtonMode self.myReaction = myReaction self.likeAction = likeAction self.likeOptionsAction = likeOptionsAction @@ -286,6 +300,7 @@ public final class MessageInputPanelComponent: Component { self.timeoutAction = timeoutAction self.forwardAction = forwardAction self.moreAction = moreAction + self.presentCaptionPositionTooltip = presentCaptionPositionTooltip self.presentVoiceMessagesUnavailableTooltip = presentVoiceMessagesUnavailableTooltip self.presentTextLengthLimitTooltip = presentTextLengthLimitTooltip self.presentTextFormattingTooltip = presentTextFormattingTooltip @@ -340,6 +355,9 @@ public final class MessageInputPanelComponent: Component { if lhs.alwaysDarkWhenHasText != rhs.alwaysDarkWhenHasText { return false } + if lhs.useGrayBackground != rhs.useGrayBackground { + return false + } if lhs.resetInputContents != rhs.resetInputContents { return false } @@ -409,6 +427,9 @@ public final class MessageInputPanelComponent: Component { if (lhs.attachmentAction == nil) != (rhs.attachmentAction == nil) { return false } + if lhs.attachmentButtonMode != rhs.attachmentButtonMode { + return false + } if lhs.myReaction != rhs.myReaction { return false } @@ -456,7 +477,6 @@ public final class MessageInputPanelComponent: Component { private let inputActionButton = ComponentView() private let likeButton = ComponentView() private let stickerButton = ComponentView() - private let reactionButton = ComponentView() private let timeoutButton = ComponentView() private var mediaRecordingVibrancyContainer: UIView @@ -484,6 +504,8 @@ public final class MessageInputPanelComponent: Component { private var viewStatsCountText: AnimatedCountLabelView? private var reactionStatsCountText: AnimatedCountLabelView? + private var didDisplayCaptionPositionTooltip = false + private let hapticFeedback = HapticFeedback() private var component: MessageInputPanelComponent? @@ -717,7 +739,10 @@ public final class MessageInputPanelComponent: Component { func update(component: MessageInputPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { let previousPlaceholder = self.component?.placeholder - var insets = UIEdgeInsets(top: 14.0, left: 9.0, bottom: 6.0, right: 41.0) + let defaultInsets = UIEdgeInsets(top: 14.0, left: 9.0, bottom: 6.0, right: 41.0) + var insets = defaultInsets + + let layoutFromTop = component.attachmentButtonMode == .captionDown if let _ = component.attachmentAction { insets.left = 41.0 @@ -858,7 +883,10 @@ public final class MessageInputPanelComponent: Component { containerSize: availableTextFieldSize ) if !isEditing && component.setMediaRecordingActive == nil { - insets.right = insets.left + insets.right = defaultInsets.left + } + if component.attachmentButtonMode != .attach && !isEditing && !self.textFieldExternalState.hasText { + insets.left = defaultInsets.left } var headerHeight: CGFloat = 0.0 @@ -938,7 +966,7 @@ public final class MessageInputPanelComponent: Component { fieldBackgroundFrame.size.height += headerHeight transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size)) - self.vibrancyEffectView.isHidden = component.style == .media + self.vibrancyEffectView.isHidden = false // component.style == .media transition.setFrame(view: self.fieldBackgroundView, frame: fieldBackgroundFrame) self.fieldBackgroundView.update(size: fieldBackgroundFrame.size, cornerRadius: headerHeight > 0.0 ? 18.0 : baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition) @@ -1195,7 +1223,24 @@ public final class MessageInputPanelComponent: Component { if component.attachmentAction != nil { let attachmentButtonMode: MessageInputActionButtonComponent.Mode - attachmentButtonMode = .attach + + var attachmentVisible = isEditing || self.textFieldExternalState.hasText + switch component.attachmentButtonMode { + case .captionUp: + attachmentButtonMode = .captionUp + case .captionDown: + attachmentButtonMode = .captionDown + default: + attachmentButtonMode = .attach + attachmentVisible = !(hasMediaRecording || hasMediaEditing || !isEditing) + } + + if attachmentButtonMode == .captionUp && !self.didDisplayCaptionPositionTooltip && self.textFieldExternalState.textLength > 3 { + self.didDisplayCaptionPositionTooltip = true + if let sourceView = self.attachmentButton.view { + component.presentCaptionPositionTooltip?(sourceView) + } + } let attachmentButtonSize = self.attachmentButton.update( transition: transition, @@ -1210,7 +1255,7 @@ public final class MessageInputPanelComponent: Component { switch mode { case .delete: break - case .attach: + case .attach, .captionUp, .captionDown: component.attachmentAction?() default: break @@ -1245,10 +1290,16 @@ public final class MessageInputPanelComponent: Component { if attachmentButtonView.superview == nil { self.addSubview(attachmentButtonView) } - let attachmentButtonFrame = CGRect(origin: CGPoint(x: floor((insets.left - attachmentButtonSize.width) * 0.5) + (fieldBackgroundFrame.minX - fieldFrame.minX), y: size.height - insets.bottom - baseFieldHeight + floor((baseFieldHeight - attachmentButtonSize.height) * 0.5)), size: attachmentButtonSize) + var attachmentButtonPosition = floor((baseFieldHeight - attachmentButtonSize.height) * 0.5) + if layoutFromTop { + attachmentButtonPosition += 14.0 + } else { + attachmentButtonPosition = size.height - insets.bottom - baseFieldHeight + attachmentButtonPosition + } + let attachmentButtonFrame = CGRect(origin: CGPoint(x: floor((insets.left - attachmentButtonSize.width) * 0.5) + (fieldBackgroundFrame.minX - fieldFrame.minX), y: attachmentButtonPosition), size: attachmentButtonSize) transition.setPosition(view: attachmentButtonView, position: attachmentButtonFrame.center) transition.setBounds(view: attachmentButtonView, bounds: CGRect(origin: CGPoint(), size: attachmentButtonFrame.size)) - transition.setAlpha(view: attachmentButtonView, alpha: (hasMediaRecording || hasMediaEditing || !isEditing) ? 0.0 : 1.0) + transition.setAlpha(view: attachmentButtonView, alpha: attachmentVisible ? 1.0 : 0.0) transition.setScale(view: attachmentButtonView, scale: hasMediaEditing ? 0.001 : 1.0) } } @@ -1326,6 +1377,7 @@ public final class MessageInputPanelComponent: Component { } } + var inputActionButtonAlpha = 1.0 let inputActionButtonMode: MessageInputActionButtonComponent.Mode if case .editor = component.style { if isEditing { @@ -1334,7 +1386,10 @@ public final class MessageInputPanelComponent: Component { inputActionButtonMode = component.hasRecordedVideo ? .removeVideoInput : .videoInput } } else if case .media = component.style { - inputActionButtonMode = isEditing ? .apply : .none + inputActionButtonMode = .apply + if !isEditing { + inputActionButtonAlpha = 0.0 + } } else { if hasMediaEditing { inputActionButtonMode = .send @@ -1494,10 +1549,16 @@ public final class MessageInputPanelComponent: Component { if inputActionButtonView.superview == nil { self.addSubview(inputActionButtonView) } - let inputActionButtonFrame = CGRect(origin: CGPoint(x: inputActionButtonOriginX, y: size.height - insets.bottom - baseFieldHeight + floor((baseFieldHeight - inputActionButtonSize.height) * 0.5)), size: inputActionButtonSize) + var inputActionButtonPosition = floor((baseFieldHeight - inputActionButtonSize.height) * 0.5) + if layoutFromTop { + inputActionButtonPosition += 14.0 + } else { + inputActionButtonPosition = size.height - insets.bottom - baseFieldHeight + inputActionButtonPosition + } + let inputActionButtonFrame = CGRect(origin: CGPoint(x: inputActionButtonOriginX, y: inputActionButtonPosition), size: inputActionButtonSize) transition.setPosition(view: inputActionButtonView, position: inputActionButtonFrame.center) transition.setBounds(view: inputActionButtonView, bounds: CGRect(origin: CGPoint(), size: inputActionButtonFrame.size)) - transition.setAlpha(view: inputActionButtonView, alpha: likeActionReplacesInputAction ? 0.0 : 1.0) + transition.setAlpha(view: inputActionButtonView, alpha: likeActionReplacesInputAction ? 0.0 : inputActionButtonAlpha) if rightButtonsOffsetX != 0.0 { if hasLikeAction { @@ -1699,8 +1760,10 @@ public final class MessageInputPanelComponent: Component { } var fieldBackgroundIsDark = false - if component.style == .media { - + if component.useGrayBackground { + fieldBackgroundIsDark = false + } else if component.style == .media { + fieldBackgroundIsDark = true } else if self.textFieldExternalState.hasText && component.alwaysDarkWhenHasText { fieldBackgroundIsDark = true } else if isEditing || component.style == .editor { diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift index 51531704cd..902500d680 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift @@ -9971,6 +9971,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro let controller = self.context.sharedContext.makeStoryMediaPickerScreen( context: self.context, isDark: false, + forCollage: false, getSourceRect: { return .zero }, completion: { [weak self] result, transitionView, transitionRect, transitionImage, transitionOut, dismissed in guard let self else { diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoVisualMediaPaneNode/Sources/PeerInfoStoryPaneNode.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoVisualMediaPaneNode/Sources/PeerInfoStoryPaneNode.swift index f3c14f0fba..0273a7f3ac 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoVisualMediaPaneNode/Sources/PeerInfoStoryPaneNode.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoVisualMediaPaneNode/Sources/PeerInfoStoryPaneNode.swift @@ -2543,15 +2543,15 @@ public final class PeerInfoStoryPaneNode: ASDisplayNode, PeerInfoPaneNode, ASScr } } - guard let controller = MediaEditorScreen.makeEditStoryController( + guard let controller = MediaEditorScreenImpl.makeEditStoryController( context: self.context, peer: peer, storyItem: item, videoPlaybackPosition: nil, cover: false, repost: false, - transitionIn: .gallery(MediaEditorScreen.TransitionIn.GalleryTransitionIn(sourceView: self.itemGrid.view, sourceRect: foundItemLayer?.frame ?? .zero, sourceImage: sourceImage)), - transitionOut: MediaEditorScreen.TransitionOut(destinationView: self.itemGrid.view, destinationRect: foundItemLayer?.frame ?? .zero, destinationCornerRadius: 0.0), + transitionIn: .gallery(MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn(sourceView: self.itemGrid.view, sourceRect: foundItemLayer?.frame ?? .zero, sourceImage: sourceImage)), + transitionOut: MediaEditorScreenImpl.TransitionOut(destinationView: self.itemGrid.view, destinationRect: foundItemLayer?.frame ?? .zero, destinationCornerRadius: 0.0), update: { [weak self] disposable in guard let self else { return diff --git a/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift b/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift index 357e5a00ad..eadb888ff6 100644 --- a/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift +++ b/submodules/TelegramUI/Components/Resources/FetchVideoMediaResource/Sources/FetchVideoMediaResource.swift @@ -245,7 +245,6 @@ public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibr let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: 5.0, image: true, frameRate: 30.0) let videoExport = MediaEditorVideoExport(postbox: postbox, subject: .image(image: image), configuration: configuration, outputPath: tempFile.path) - videoExport.start() let statusDisposable = videoExport.status.start(next: { status in switch status { @@ -349,7 +348,6 @@ public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibr let duration: Double = avAsset.duration.seconds let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0) let videoExport = MediaEditorVideoExport(postbox: postbox, subject: .video(asset: avAsset, isStory: isStory), configuration: configuration, outputPath: tempFile.path) - videoExport.start() let statusDisposable = videoExport.status.start(next: { status in switch status { @@ -554,7 +552,6 @@ public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFi let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0) let videoExport = MediaEditorVideoExport(postbox: postbox, subject: subject, configuration: configuration, outputPath: tempFile.path) - videoExport.start() let statusDisposable = videoExport.status.start(next: { status in switch status { @@ -900,6 +897,7 @@ private extension MediaEditorValues { additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, + collage: [], nightTheme: false, drawing: nil, maskDrawing: nil, @@ -910,6 +908,7 @@ private extension MediaEditorValues { audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, + collageTrackSamples: nil, coverImageTimestamp: nil, qualityPreset: qualityPreset ) @@ -1044,6 +1043,7 @@ private extension MediaEditorValues { additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, + collage: [], nightTheme: false, drawing: drawing, maskDrawing: nil, @@ -1054,6 +1054,7 @@ private extension MediaEditorValues { audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, + collageTrackSamples: nil, coverImageTimestamp: nil, qualityPreset: qualityPreset ) diff --git a/submodules/TelegramUI/Components/Settings/BusinessIntroSetupScreen/BUILD b/submodules/TelegramUI/Components/Settings/BusinessIntroSetupScreen/BUILD index 024c651c5b..02989c42f4 100644 --- a/submodules/TelegramUI/Components/Settings/BusinessIntroSetupScreen/BUILD +++ b/submodules/TelegramUI/Components/Settings/BusinessIntroSetupScreen/BUILD @@ -41,7 +41,6 @@ swift_library( "//submodules/TelegramUI/Components/PeerAllowedReactionsScreen", "//submodules/TelegramUI/Components/EmojiActionIconComponent", "//submodules/TelegramUI/Components/TextFieldComponent", - "//submodules/TelegramUI/Components/CameraScreen", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUI/Components/Settings/BusinessIntroSetupScreen/Sources/BusinessIntroSetupScreen.swift b/submodules/TelegramUI/Components/Settings/BusinessIntroSetupScreen/Sources/BusinessIntroSetupScreen.swift index 679b12f004..67d34c804d 100644 --- a/submodules/TelegramUI/Components/Settings/BusinessIntroSetupScreen/Sources/BusinessIntroSetupScreen.swift +++ b/submodules/TelegramUI/Components/Settings/BusinessIntroSetupScreen/Sources/BusinessIntroSetupScreen.swift @@ -22,7 +22,6 @@ import EntityKeyboard import PeerAllowedReactionsScreen import EmojiActionIconComponent import TextFieldComponent -import CameraScreen final class BusinessIntroSetupScreenComponent: Component { typealias EnvironmentType = ViewControllerComponentContainer.Environment diff --git a/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/ChannelAppearanceScreen.swift b/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/ChannelAppearanceScreen.swift index 8554d99355..412be719b1 100644 --- a/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/ChannelAppearanceScreen.swift +++ b/submodules/TelegramUI/Components/Settings/PeerNameColorScreen/Sources/ChannelAppearanceScreen.swift @@ -617,7 +617,7 @@ final class ChannelAppearanceScreenComponent: Component { let level = boostStatus.level let requiredCustomWallpaperLevel = Int(BoostSubject.customWallpaper.requiredLevel(group: self.isGroup, context: component.context, configuration: premiumConfiguration)) - let controller = MediaPickerScreen(context: component.context, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper)) + let controller = MediaPickerScreenImpl(context: component.context, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper)) controller.customSelection = { [weak self] _, asset in guard let self, let asset = asset as? PHAsset else { return diff --git a/submodules/TelegramUI/Components/Settings/WallpaperGridScreen/Sources/ThemeGridController.swift b/submodules/TelegramUI/Components/Settings/WallpaperGridScreen/Sources/ThemeGridController.swift index 534452bab4..942def632c 100644 --- a/submodules/TelegramUI/Components/Settings/WallpaperGridScreen/Sources/ThemeGridController.swift +++ b/submodules/TelegramUI/Components/Settings/WallpaperGridScreen/Sources/ThemeGridController.swift @@ -231,7 +231,7 @@ public final class ThemeGridController: ViewController { } } - let controller = MediaPickerScreen(context: strongSelf.context, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper)) + let controller = MediaPickerScreenImpl(context: strongSelf.context, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper)) controller.customSelection = { [weak self] _, asset in guard let strongSelf = self, let asset = asset as? PHAsset else { return diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift index 53974ed884..3029dac857 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerComponent.swift @@ -2937,6 +2937,7 @@ public final class StoryItemSetContainerComponent: Component { } self.sendMessageContext.presentAttachmentMenu(view: self, subject: .default) }, + attachmentButtonMode: component.slice.effectivePeer.isService ? nil : .attach, myReaction: component.slice.item.storyItem.myReaction.flatMap { value -> MessageInputPanelComponent.MyReaction? in var centerAnimation: TelegramMediaFile? var animationFileId: Int64? @@ -3007,6 +3008,7 @@ public final class StoryItemSetContainerComponent: Component { } self.performMoreAction(sourceView: sourceView, gesture: gesture) }, + presentCaptionPositionTooltip: nil, presentVoiceMessagesUnavailableTooltip: { [weak self] view in guard let self, let component = self.component, self.voiceMessagesRestrictedTooltipController == nil else { return @@ -5403,7 +5405,7 @@ public final class StoryItemSetContainerComponent: Component { } } - guard let controller = MediaEditorScreen.makeEditStoryController( + guard let controller = MediaEditorScreenImpl.makeEditStoryController( context: component.context, peer: component.slice.effectivePeer, storyItem: component.slice.item.storyItem, diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift index 932be07c77..1e832103f2 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemSetContainerViewSendMessage.swift @@ -1483,7 +1483,7 @@ final class StoryItemSetContainerSendMessage { return } - let currentMediaController = Atomic(value: nil) + let currentMediaController = Atomic(value: nil) let currentFilesController = Atomic(value: nil) let currentLocationController = Atomic(value: nil) @@ -1869,11 +1869,11 @@ final class StoryItemSetContainerSendMessage { peer: EnginePeer, replyToMessageId: EngineMessage.Id?, replyToStoryId: StoryId?, - subject: MediaPickerScreen.Subject = .assets(nil, .default), + subject: MediaPickerScreenImpl.Subject = .assets(nil, .default), saveEditedPhotos: Bool, bannedSendPhotos: (Int32, Bool)?, bannedSendVideos: (Int32, Bool)?, - present: @escaping (MediaPickerScreen, AttachmentMediaPickerContext?) -> Void, + present: @escaping (MediaPickerScreenImpl, AttachmentMediaPickerContext?) -> Void, updateMediaPickerContext: @escaping (AttachmentMediaPickerContext?) -> Void, completion: @escaping ([Any], Bool, Int32?, ChatSendMessageActionSheetController.SendParameters?, @escaping (String) -> UIView?, @escaping () -> Void) -> Void ) { @@ -1881,7 +1881,7 @@ final class StoryItemSetContainerSendMessage { return } let theme = component.theme - let controller = MediaPickerScreen(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peer: peer, threadTitle: nil, chatLocation: .peer(id: peer.id), bannedSendPhotos: bannedSendPhotos, bannedSendVideos: bannedSendVideos, subject: subject, saveEditedPhotos: saveEditedPhotos) + let controller = MediaPickerScreenImpl(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peer: peer, threadTitle: nil, chatLocation: .peer(id: peer.id), bannedSendPhotos: bannedSendPhotos, bannedSendVideos: bannedSendVideos, subject: subject, saveEditedPhotos: saveEditedPhotos) let mediaPickerContext = controller.mediaPickerContext controller.openCamera = { [weak self, weak view] cameraView in guard let self, let view else { @@ -2195,7 +2195,7 @@ final class StoryItemSetContainerSendMessage { }) } - func presentMediaPasteboard(view: StoryItemSetContainerComponent.View, subjects: [MediaPickerScreen.Subject.Media]) { + func presentMediaPasteboard(view: StoryItemSetContainerComponent.View, subjects: [MediaPickerScreenImpl.Subject.Media]) { guard let component = view.component else { return } @@ -2315,7 +2315,7 @@ final class StoryItemSetContainerSendMessage { }) } - private func getCaptionPanelView(view: StoryItemSetContainerComponent.View, peer: EnginePeer, mediaPicker: MediaPickerScreen? = nil) -> TGCaptionPanelView? { + private func getCaptionPanelView(view: StoryItemSetContainerComponent.View, peer: EnginePeer, mediaPicker: MediaPickerScreenImpl? = nil) -> TGCaptionPanelView? { guard let component = view.component else { return nil } diff --git a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift index d8d2908bae..b2069496b0 100644 --- a/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift +++ b/submodules/TelegramUI/Components/VideoMessageCameraScreen/Sources/VideoMessageCameraScreen.swift @@ -1837,7 +1837,7 @@ public class VideoMessageCameraScreen: ViewController { guard let self else { return } - let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, maskDrawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, coverImageTimestamp: nil, qualityPreset: .videoMessage) + let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, collage: [], nightTheme: false, drawing: nil, maskDrawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, collageTrackSamples: nil, coverImageTimestamp: nil, qualityPreset: .videoMessage) var resourceAdjustments: VideoMediaResourceAdjustments? = nil if let valuesData = try? JSONEncoder().encode(values) { diff --git a/submodules/TelegramUI/Images.xcassets/Media Editor/Timeline.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Media Editor/Timeline.imageset/Contents.json new file mode 100644 index 0000000000..5b38656cd5 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Media Editor/Timeline.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "Timeline_24.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Media Editor/Timeline.imageset/Timeline_24.pdf b/submodules/TelegramUI/Images.xcassets/Media Editor/Timeline.imageset/Timeline_24.pdf new file mode 100644 index 0000000000..db0173d45a Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Media Editor/Timeline.imageset/Timeline_24.pdf differ diff --git a/submodules/TelegramUI/Sources/Chat/ChatControllerOpenStorySharing.swift b/submodules/TelegramUI/Sources/Chat/ChatControllerOpenStorySharing.swift index 20b2ba01a1..91548dcfb2 100644 --- a/submodules/TelegramUI/Sources/Chat/ChatControllerOpenStorySharing.swift +++ b/submodules/TelegramUI/Sources/Chat/ChatControllerOpenStorySharing.swift @@ -30,7 +30,7 @@ import MediaEditorScreen extension ChatControllerImpl { func openStorySharing(messages: [Message]) { let context = self.context - let subject: Signal = .single(.message(messages.map { $0.id })) + let subject: Signal = .single(.message(messages.map { $0.id })) let externalState = MediaEditorTransitionOutExternalState( storyTarget: nil, @@ -39,7 +39,7 @@ extension ChatControllerImpl { transitionOut: nil ) - let controller = MediaEditorScreen( + let controller = MediaEditorScreenImpl( context: context, mode: .storyEditor, subject: subject, diff --git a/submodules/TelegramUI/Sources/Chat/ChatControllerPaste.swift b/submodules/TelegramUI/Sources/Chat/ChatControllerPaste.swift index 9e251e8af9..3bce455ff0 100644 --- a/submodules/TelegramUI/Sources/Chat/ChatControllerPaste.swift +++ b/submodules/TelegramUI/Sources/Chat/ChatControllerPaste.swift @@ -13,7 +13,7 @@ import MediaEditor import ChatEntityKeyboardInputNode extension ChatControllerImpl { - func displayPasteMenu(_ subjects: [MediaPickerScreen.Subject.Media]) { + func displayPasteMenu(_ subjects: [MediaPickerScreenImpl.Subject.Media]) { let _ = (self.context.sharedContext.accountManager.transaction { transaction -> GeneratedMediaStoreSettings in let entry = transaction.getSharedData(ApplicationSpecificSharedDataKeys.generatedMediaStoreSettings)?.get(GeneratedMediaStoreSettings.self) return entry ?? GeneratedMediaStoreSettings.defaultSettings @@ -183,6 +183,7 @@ extension ChatControllerImpl { additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, + collage: [], nightTheme: false, drawing: nil, maskDrawing: blackImage, @@ -193,6 +194,7 @@ extension ChatControllerImpl { audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, + collageTrackSamples: nil, coverImageTimestamp: nil, qualityPreset: nil ) @@ -206,7 +208,6 @@ extension ChatControllerImpl { configuration: configuration, outputPath: path ) - videoExport.start() let _ = (videoExport.status |> deliverOnMainQueue).startStandalone(next: { [weak self] status in diff --git a/submodules/TelegramUI/Sources/ChatControllerOpenAttachmentMenu.swift b/submodules/TelegramUI/Sources/ChatControllerOpenAttachmentMenu.swift index 693a426bec..8d154d3a89 100644 --- a/submodules/TelegramUI/Sources/ChatControllerOpenAttachmentMenu.swift +++ b/submodules/TelegramUI/Sources/ChatControllerOpenAttachmentMenu.swift @@ -269,7 +269,7 @@ extension ChatControllerImpl { let inputText = strongSelf.presentationInterfaceState.interfaceState.effectiveInputState.inputText - let currentMediaController = Atomic(value: nil) + let currentMediaController = Atomic(value: nil) let currentFilesController = Atomic(value: nil) let currentLocationController = Atomic(value: nil) @@ -1159,7 +1159,7 @@ extension ChatControllerImpl { self.present(actionSheet, in: .window(.root)) } - func presentMediaPicker(subject: MediaPickerScreen.Subject = .assets(nil, .default), saveEditedPhotos: Bool, bannedSendPhotos: (Int32, Bool)?, bannedSendVideos: (Int32, Bool)?, present: @escaping (MediaPickerScreen, AttachmentMediaPickerContext?) -> Void, updateMediaPickerContext: @escaping (AttachmentMediaPickerContext?) -> Void, completion: @escaping ([Any], Bool, Int32?, ChatSendMessageActionSheetController.SendParameters?, @escaping (String) -> UIView?, @escaping () -> Void) -> Void) { + func presentMediaPicker(subject: MediaPickerScreenImpl.Subject = .assets(nil, .default), saveEditedPhotos: Bool, bannedSendPhotos: (Int32, Bool)?, bannedSendVideos: (Int32, Bool)?, present: @escaping (MediaPickerScreenImpl, AttachmentMediaPickerContext?) -> Void, updateMediaPickerContext: @escaping (AttachmentMediaPickerContext?) -> Void, completion: @escaping ([Any], Bool, Int32?, ChatSendMessageActionSheetController.SendParameters?, @escaping (String) -> UIView?, @escaping () -> Void) -> Void) { var isScheduledMessages = false if case .scheduledMessages = self.presentationInterfaceState.subject { isScheduledMessages = true @@ -1168,7 +1168,7 @@ extension ChatControllerImpl { if let cachedData = self.peerView?.cachedData as? CachedChannelData, cachedData.flags.contains(.paidMediaAllowed) { paidMediaAllowed = true } - let controller = MediaPickerScreen( + let controller = MediaPickerScreenImpl( context: self.context, updatedPresentationData: self.updatedPresentationData, peer: (self.presentationInterfaceState.renderedPeer?.peer).flatMap(EnginePeer.init), @@ -1767,19 +1767,19 @@ extension ChatControllerImpl { guard let self else { return } - let subject: Signal + let subject: Signal if let asset = result as? PHAsset { subject = .single(.asset(asset)) } else if let image = result as? UIImage { - subject = .single(.image(image, PixelDimensions(image.size), nil, .bottomRight)) - } else if let result = result as? Signal { + subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight)) + } else if let result = result as? Signal { subject = result - |> map { value -> MediaEditorScreen.Subject? in + |> map { value -> MediaEditorScreenImpl.Subject? in switch value { case .pendingImage: return nil case let .image(image): - return .image(image.image, PixelDimensions(image.image.size), nil, .topLeft) + return .image(image: image.image, dimensions: PixelDimensions(image.image.size), additionalImage: nil, additionalImagePosition: .topLeft) default: return nil } @@ -1788,12 +1788,12 @@ extension ChatControllerImpl { subject = .single(.empty(PixelDimensions(width: 1080, height: 1920))) } - let editorController = MediaEditorScreen( + let editorController = MediaEditorScreenImpl( context: self.context, mode: .stickerEditor(mode: .generic), subject: subject, transitionIn: fromCamera ? .camera : transitionView.flatMap({ .gallery( - MediaEditorScreen.TransitionIn.GalleryTransitionIn( + MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn( sourceView: $0, sourceRect: transitionRect, sourceImage: transitionImage @@ -1801,7 +1801,7 @@ extension ChatControllerImpl { ) }), transitionOut: { finished, isNew in if !finished, let transitionView { - return MediaEditorScreen.TransitionOut( + return MediaEditorScreenImpl.TransitionOut( destinationView: transitionView, destinationRect: transitionView.bounds, destinationCornerRadius: 0.0 @@ -1818,7 +1818,7 @@ extension ChatControllerImpl { self?.enqueueStickerFile(file) } } - } as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void + } as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void ) editorController.cancelled = { _ in cancelled() diff --git a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift index 23f8124bfc..a01348b9ce 100644 --- a/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift +++ b/submodules/TelegramUI/Sources/ChatRecordingPreviewInputPanelNode.swift @@ -333,6 +333,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode { isMain: true ) ], + isCollage: false, positionUpdated: { _, _ in }, trackTrimUpdated: { [weak self] _, start, end, updatedEnd, apply in if let self { diff --git a/submodules/TelegramUI/Sources/NavigateToChatController.swift b/submodules/TelegramUI/Sources/NavigateToChatController.swift index b2b58e5eae..2d7cf17cbe 100644 --- a/submodules/TelegramUI/Sources/NavigateToChatController.swift +++ b/submodules/TelegramUI/Sources/NavigateToChatController.swift @@ -15,7 +15,6 @@ import AttachmentUI import ForumCreateTopicScreen import LegacyInstantVideoController import StoryContainerScreen -import CameraScreen import MediaEditorScreen import ChatControllerInteraction import SavedMessagesScreen diff --git a/submodules/TelegramUI/Sources/SharedAccountContext.swift b/submodules/TelegramUI/Sources/SharedAccountContext.swift index 0eb67abd7a..20b2179546 100644 --- a/submodules/TelegramUI/Sources/SharedAccountContext.swift +++ b/submodules/TelegramUI/Sources/SharedAccountContext.swift @@ -2560,21 +2560,21 @@ public final class SharedAccountContextImpl: SharedAccountContext { } public func makeBotPreviewEditorScreen(context: AccountContext, source: Any?, target: Stories.PendingTarget, transitionArguments: (UIView, CGRect, UIImage?)?, transitionOut: @escaping () -> BotPreviewEditorTransitionOut?, externalState: MediaEditorTransitionOutExternalState, completion: @escaping (MediaEditorScreenResult, @escaping (@escaping () -> Void) -> Void) -> Void, cancelled: @escaping () -> Void) -> ViewController { - let subject: Signal + let subject: Signal if let asset = source as? PHAsset { subject = .single(.asset(asset)) } else if let image = source as? UIImage { - subject = .single(.image(image, PixelDimensions(image.size), nil, .bottomRight)) + subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight)) } else { subject = .single(.empty(PixelDimensions(width: 1080, height: 1920))) } - let editorController = MediaEditorScreen( + let editorController = MediaEditorScreenImpl( context: context, mode: .botPreview, subject: subject, customTarget: nil, transitionIn: transitionArguments.flatMap { .gallery( - MediaEditorScreen.TransitionIn.GalleryTransitionIn( + MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn( sourceView: $0.0, sourceRect: $0.1, sourceImage: $0.2 @@ -2582,13 +2582,13 @@ public final class SharedAccountContextImpl: SharedAccountContext { ) }, transitionOut: { finished, isNew in if !finished, let transitionArguments { - return MediaEditorScreen.TransitionOut( + return MediaEditorScreenImpl.TransitionOut( destinationView: transitionArguments.0, destinationRect: transitionArguments.0.bounds, destinationCornerRadius: 0.0 ) } else if finished, let transitionOut = transitionOut(), let destinationView = transitionOut.destinationView { - return MediaEditorScreen.TransitionOut( + return MediaEditorScreenImpl.TransitionOut( destinationView: destinationView, destinationRect: transitionOut.destinationRect, destinationCornerRadius: transitionOut.destinationCornerRadius, @@ -2598,7 +2598,7 @@ public final class SharedAccountContextImpl: SharedAccountContext { return nil }, completion: { result, commit in completion(result, commit) - } as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void + } as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void ) editorController.cancelled = { _ in cancelled() @@ -2607,8 +2607,8 @@ public final class SharedAccountContextImpl: SharedAccountContext { } public func makeStickerEditorScreen(context: AccountContext, source: Any?, intro: Bool, transitionArguments: (UIView, CGRect, UIImage?)?, completion: @escaping (TelegramMediaFile, [String], @escaping () -> Void) -> Void, cancelled: @escaping () -> Void) -> ViewController { - let subject: Signal - var mode: MediaEditorScreen.Mode.StickerEditorMode + let subject: Signal + var mode: MediaEditorScreenImpl.Mode.StickerEditorMode var fromCamera = false if let (file, emoji) = source as? (TelegramMediaFile, [String]) { subject = .single(.sticker(file, emoji)) @@ -2617,16 +2617,16 @@ public final class SharedAccountContextImpl: SharedAccountContext { subject = .single(.asset(asset)) mode = .addingToPack } else if let image = source as? UIImage { - subject = .single(.image(image, PixelDimensions(image.size), nil, .bottomRight)) + subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight)) mode = .addingToPack - } else if let source = source as? Signal { + } else if let source = source as? Signal { subject = source - |> map { value -> MediaEditorScreen.Subject? in + |> map { value -> MediaEditorScreenImpl.Subject? in switch value { case .pendingImage: return nil case let .image(image): - return .image(image.image, PixelDimensions(image.image.size), nil, .topLeft) + return .image(image: image.image, dimensions: PixelDimensions(image.image.size), additionalImage: nil, additionalImagePosition: .topLeft) default: return nil } @@ -2640,12 +2640,12 @@ public final class SharedAccountContextImpl: SharedAccountContext { if intro { mode = .businessIntro } - let editorController = MediaEditorScreen( + let editorController = MediaEditorScreenImpl( context: context, mode: .stickerEditor(mode: mode), subject: subject, transitionIn: fromCamera ? .camera : transitionArguments.flatMap { .gallery( - MediaEditorScreen.TransitionIn.GalleryTransitionIn( + MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn( sourceView: $0.0, sourceRect: $0.1, sourceImage: $0.2 @@ -2653,7 +2653,7 @@ public final class SharedAccountContextImpl: SharedAccountContext { ) }, transitionOut: { finished, isNew in if !finished, let transitionArguments { - return MediaEditorScreen.TransitionOut( + return MediaEditorScreenImpl.TransitionOut( destinationView: transitionArguments.0, destinationRect: transitionArguments.0.bounds, destinationCornerRadius: 0.0 @@ -2666,7 +2666,7 @@ public final class SharedAccountContextImpl: SharedAccountContext { commit({}) }) } - } as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void + } as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void ) editorController.cancelled = { _ in cancelled() @@ -2675,15 +2675,15 @@ public final class SharedAccountContextImpl: SharedAccountContext { } public func makeStoryMediaEditorScreen(context: AccountContext, source: Any?, text: String?, link: (url: String, name: String?)?, completion: @escaping (MediaEditorScreenResult, @escaping (@escaping () -> Void) -> Void) -> Void) -> ViewController { - let subject: Signal + let subject: Signal if let image = source as? UIImage { - subject = .single(.image(image, PixelDimensions(image.size), nil, .bottomRight)) + subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight)) } else if let path = source as? String { - subject = .single(.video(path, nil, false, nil, nil, PixelDimensions(width: 1080, height: 1920), 0.0, [], .bottomRight)) + subject = .single(.video(videoPath: path, thumbnail: nil, mirror: false, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: PixelDimensions(width: 1080, height: 1920), duration: 0.0, videoPositionChanges: [], additionalVideoPosition: .bottomRight)) } else { subject = .single(.empty(PixelDimensions(width: 1080, height: 1920))) } - let editorController = MediaEditorScreen( + let editorController = MediaEditorScreenImpl( context: context, mode: .storyEditor, subject: subject, @@ -2695,7 +2695,7 @@ public final class SharedAccountContextImpl: SharedAccountContext { return nil }, completion: { result, commit in completion(result, commit) - } as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void + } as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void ) // editorController.cancelled = { _ in // cancelled() @@ -2707,8 +2707,8 @@ public final class SharedAccountContextImpl: SharedAccountContext { return mediaPickerController(context: context, hasSearch: hasSearch, completion: completion) } - public func makeStoryMediaPickerScreen(context: AccountContext, isDark: Bool, getSourceRect: @escaping () -> CGRect, completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void, groupsPresented: @escaping () -> Void) -> ViewController { - return storyMediaPickerController(context: context, isDark: isDark, getSourceRect: getSourceRect, completion: completion, dismissed: dismissed, groupsPresented: groupsPresented) + public func makeStoryMediaPickerScreen(context: AccountContext, isDark: Bool, forCollage: Bool, getSourceRect: @escaping () -> CGRect, completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void, groupsPresented: @escaping () -> Void) -> ViewController { + return storyMediaPickerController(context: context, isDark: isDark, forCollage: forCollage, getSourceRect: getSourceRect, completion: completion, dismissed: dismissed, groupsPresented: groupsPresented) } public func makeStickerMediaPickerScreen(context: AccountContext, getSourceRect: @escaping () -> CGRect?, completion: @escaping (Any?, UIView?, CGRect, UIImage?, Bool, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void) -> ViewController { diff --git a/submodules/TelegramUI/Sources/TelegramRootController.swift b/submodules/TelegramUI/Sources/TelegramRootController.swift index 291257a38b..8b285bf426 100644 --- a/submodules/TelegramUI/Sources/TelegramRootController.swift +++ b/submodules/TelegramUI/Sources/TelegramRootController.swift @@ -319,12 +319,12 @@ public final class TelegramRootController: NavigationController, TelegramRootCon var returnToCameraImpl: (() -> Void)? var dismissCameraImpl: (() -> Void)? var showDraftTooltipImpl: (() -> Void)? - let cameraController = CameraScreen( + let cameraController = CameraScreenImpl( context: context, mode: .story, transitionIn: transitionIn.flatMap { if let sourceView = $0.sourceView { - return CameraScreen.TransitionIn( + return CameraScreenImpl.TransitionIn( sourceView: sourceView, sourceRect: $0.sourceRect, sourceCornerRadius: $0.sourceCornerRadius @@ -335,7 +335,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon }, transitionOut: { finished in if let transitionOut = (externalState.transitionOut ?? transitionOut)(finished ? externalState.storyTarget : nil, externalState.isPeerArchived), let destinationView = transitionOut.destinationView { - return CameraScreen.TransitionOut( + return CameraScreenImpl.TransitionOut( destinationView: destinationView, destinationRect: transitionOut.destinationRect, destinationCornerRadius: transitionOut.destinationCornerRadius, @@ -346,9 +346,9 @@ public final class TelegramRootController: NavigationController, TelegramRootCon } }, completion: { result, resultTransition, dismissed in - let subject: Signal = result - |> map { value -> MediaEditorScreen.Subject? in - func editorPIPPosition(_ position: CameraScreen.PIPPosition) -> MediaEditorScreen.PIPPosition { + let subject: Signal = result + |> map { value -> MediaEditorScreenImpl.Subject? in + func editorPIPPosition(_ position: CameraScreenImpl.PIPPosition) -> MediaEditorScreenImpl.PIPPosition { switch position { case .topLeft: return .topLeft @@ -364,9 +364,23 @@ public final class TelegramRootController: NavigationController, TelegramRootCon case .pendingImage: return nil case let .image(image): - return .image(image.image, PixelDimensions(image.image.size), image.additionalImage, editorPIPPosition(image.additionalImagePosition)) + return .image(image: image.image, dimensions: PixelDimensions(image.image.size), additionalImage: image.additionalImage, additionalImagePosition: editorPIPPosition(image.additionalImagePosition)) case let .video(video): - return .video(video.videoPath, video.coverImage, video.mirror, video.additionalVideoPath, video.additionalCoverImage, video.dimensions, video.duration, video.positionChangeTimestamps, editorPIPPosition(video.additionalVideoPosition)) + return .video(videoPath: video.videoPath, thumbnail: video.coverImage, mirror: video.mirror, additionalVideoPath: video.additionalVideoPath, additionalThumbnail: video.additionalCoverImage, dimensions: video.dimensions, duration: video.duration, videoPositionChanges: video.positionChangeTimestamps, additionalVideoPosition: editorPIPPosition(video.additionalVideoPosition)) + case let .videoCollage(collage): + func editorCollageItem(_ item: CameraScreenImpl.Result.VideoCollage.Item) -> MediaEditorScreenImpl.Subject.VideoCollageItem { + let content: MediaEditorScreenImpl.Subject.VideoCollageItem.Content + switch item.content { + case let .image(image): + content = .image(image) + case let .video(path, duration): + content = .video(path, duration) + case let .asset(asset): + content = .asset(asset) + } + return MediaEditorScreenImpl.Subject.VideoCollageItem(content: content, frame: item.frame) + } + return .videoCollage(items: collage.items.map { editorCollageItem($0) }) case let .asset(asset): return .asset(asset) case let .draft(draft): @@ -374,10 +388,10 @@ public final class TelegramRootController: NavigationController, TelegramRootCon } } - var transitionIn: MediaEditorScreen.TransitionIn? + var transitionIn: MediaEditorScreenImpl.TransitionIn? if let resultTransition, let sourceView = resultTransition.sourceView { transitionIn = .gallery( - MediaEditorScreen.TransitionIn.GalleryTransitionIn( + MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn( sourceView: sourceView, sourceRect: resultTransition.sourceRect, sourceImage: resultTransition.sourceImage @@ -398,7 +412,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon } } - let controller = MediaEditorScreen( + let controller = MediaEditorScreenImpl( context: context, mode: .storyEditor, subject: subject, @@ -406,14 +420,14 @@ public final class TelegramRootController: NavigationController, TelegramRootCon transitionIn: transitionIn, transitionOut: { finished, isNew in if finished, let transitionOut = (externalState.transitionOut ?? transitionOut)(externalState.storyTarget, false), let destinationView = transitionOut.destinationView { - return MediaEditorScreen.TransitionOut( + return MediaEditorScreenImpl.TransitionOut( destinationView: destinationView, destinationRect: transitionOut.destinationRect, destinationCornerRadius: transitionOut.destinationCornerRadius, completion: transitionOut.completion ) } else if !finished, let resultTransition, let (destinationView, destinationRect) = resultTransition.transitionOut(isNew) { - return MediaEditorScreen.TransitionOut( + return MediaEditorScreenImpl.TransitionOut( destinationView: destinationView, destinationRect: destinationRect, destinationCornerRadius: 0.0, @@ -469,7 +483,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon dismissCameraImpl?() }) } - } as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void + } as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void ) controller.cancelled = { showDraftTooltip in if showDraftTooltip { @@ -525,7 +539,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon } public func proceedWithStoryUpload(target: Stories.PendingTarget, result: MediaEditorScreenResult, existingMedia: EngineMedia?, forwardInfo: Stories.PendingForwardInfo?, externalState: MediaEditorTransitionOutExternalState, commit: @escaping (@escaping () -> Void) -> Void) { - guard let result = result as? MediaEditorScreen.Result else { + guard let result = result as? MediaEditorScreenImpl.Result else { return } let context = self.context @@ -734,7 +748,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon //Xcode 16 #if canImport(ContactProvider) -extension MediaEditorScreen.Result: @retroactive MediaEditorScreenResult { +extension MediaEditorScreenImpl.Result: @retroactive MediaEditorScreenResult { public var target: Stories.PendingTarget { if let sendAsPeerId = self.options.sendAsPeerId { return .peer(sendAsPeerId) @@ -744,7 +758,7 @@ extension MediaEditorScreen.Result: @retroactive MediaEditorScreenResult { } } #else -extension MediaEditorScreen.Result: MediaEditorScreenResult { +extension MediaEditorScreenImpl.Result: MediaEditorScreenResult { public var target: Stories.PendingTarget { if let sendAsPeerId = self.options.sendAsPeerId { return .peer(sendAsPeerId) diff --git a/submodules/WebUI/Sources/WebAppMessagePreviewScreen.swift b/submodules/WebUI/Sources/WebAppMessagePreviewScreen.swift index 62eb845139..5769326ba9 100644 --- a/submodules/WebUI/Sources/WebAppMessagePreviewScreen.swift +++ b/submodules/WebUI/Sources/WebAppMessagePreviewScreen.swift @@ -461,7 +461,7 @@ public final class WebAppMessagePreviewScreen: ViewControllerComponentContainer fileprivate func proceed() { let requestPeerType = self.preparedMessage.peerTypes.requestPeerTypes - let controller = self.context.sharedContext.makePeerSelectionController(PeerSelectionControllerParams(context: self.context, filter: [.excludeRecent, .doNotSearchMessages], requestPeerType: requestPeerType, hasContactSelector: false, multipleSelection: true, immediatelyActivateMultipleSelection: true)) + let controller = self.context.sharedContext.makePeerSelectionController(PeerSelectionControllerParams(context: self.context, filter: [.excludeRecent, .doNotSearchMessages], requestPeerType: requestPeerType, hasContactSelector: false, multipleSelection: true, selectForumThreads: true, immediatelyActivateMultipleSelection: true)) controller.multiplePeersSelected = { [weak self, weak controller] peers, _, _, _, _, _ in guard let self else {