mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-15 13:35:19 +00:00
[WIP] Story collage
This commit is contained in:
parent
0794812bae
commit
d2b5476293
@ -777,6 +777,17 @@ public class MediaEditorTransitionOutExternalState {
|
||||
}
|
||||
}
|
||||
|
||||
public protocol CameraScreen: ViewController {
|
||||
|
||||
}
|
||||
|
||||
public protocol MediaEditorScreen: ViewController {
|
||||
}
|
||||
|
||||
public protocol MediaPickerScreen: ViewController {
|
||||
func dismissAnimated()
|
||||
}
|
||||
|
||||
public protocol MediaEditorScreenResult {
|
||||
var target: Stories.PendingTarget { get }
|
||||
}
|
||||
@ -1015,7 +1026,7 @@ public protocol SharedAccountContext: AnyObject {
|
||||
func makeStickerEditorScreen(context: AccountContext, source: Any?, intro: Bool, transitionArguments: (UIView, CGRect, UIImage?)?, completion: @escaping (TelegramMediaFile, [String], @escaping () -> Void) -> Void, cancelled: @escaping () -> Void) -> ViewController
|
||||
|
||||
func makeStickerMediaPickerScreen(context: AccountContext, getSourceRect: @escaping () -> CGRect?, completion: @escaping (Any?, UIView?, CGRect, UIImage?, Bool, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void) -> ViewController
|
||||
func makeStoryMediaPickerScreen(context: AccountContext, isDark: Bool, getSourceRect: @escaping () -> CGRect, completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void, groupsPresented: @escaping () -> Void) -> ViewController
|
||||
func makeStoryMediaPickerScreen(context: AccountContext, isDark: Bool, forCollage: Bool, getSourceRect: @escaping () -> CGRect, completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void, groupsPresented: @escaping () -> Void) -> ViewController
|
||||
|
||||
func makeStickerPickerScreen(context: AccountContext, inputData: Promise<StickerPickerInput>, completion: @escaping (FileMediaReference) -> Void) -> ViewController
|
||||
|
||||
|
@ -504,6 +504,7 @@ public class AttachmentTextInputPanelNode: ASDisplayNode, TGCaptionPanelView, AS
|
||||
public var focusUpdated: ((Bool) -> Void)?
|
||||
public var heightUpdated: ((Bool) -> Void)?
|
||||
public var timerUpdated: ((NSNumber?) -> Void)?
|
||||
public var captionIsAboveUpdated: ((Bool) -> Void)?
|
||||
|
||||
public func updateLayoutSize(_ size: CGSize, keyboardHeight: CGFloat, sideInset: CGFloat, animated: Bool) -> CGFloat {
|
||||
guard let presentationInterfaceState = self.presentationInterfaceState else {
|
||||
@ -518,7 +519,7 @@ public class AttachmentTextInputPanelNode: ASDisplayNode, TGCaptionPanelView, AS
|
||||
}
|
||||
}
|
||||
|
||||
public func setTimeout(_ timeout: Int32, isVideo: Bool) {
|
||||
public func setTimeout(_ timeout: Int32, isVideo: Bool, isCaptionAbove: Bool) {
|
||||
}
|
||||
|
||||
public func animate(_ view: UIView, frame: CGRect) {
|
||||
|
@ -51,7 +51,7 @@ final class CameraDeviceContext {
|
||||
let device = CameraDevice()
|
||||
let input = CameraInput()
|
||||
let output: CameraOutput
|
||||
|
||||
|
||||
init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext, colorSpace: CGColorSpace, isRoundVideo: Bool = false) {
|
||||
self.session = session
|
||||
self.exclusive = exclusive
|
||||
@ -126,7 +126,7 @@ private final class CameraContext {
|
||||
private let audioLevelPipe = ValuePipe<Float>()
|
||||
fileprivate let modeChangePromise = ValuePromise<Camera.ModeChange>(.none)
|
||||
|
||||
var previewView: CameraPreviewView?
|
||||
var videoOutput: CameraVideoOutput?
|
||||
|
||||
var simplePreviewView: CameraSimplePreviewView?
|
||||
var secondaryPreviewView: CameraSimplePreviewView?
|
||||
@ -310,7 +310,7 @@ private final class CameraContext {
|
||||
|
||||
private var micLevelPeak: Int16 = 0
|
||||
private var micLevelPeakCount = 0
|
||||
|
||||
|
||||
private var isDualCameraEnabled: Bool?
|
||||
public func setDualCameraEnabled(_ enabled: Bool, change: Bool = true) {
|
||||
guard enabled != self.isDualCameraEnabled else {
|
||||
@ -378,6 +378,13 @@ private final class CameraContext {
|
||||
guard let self, let mainDeviceContext = self.mainDeviceContext else {
|
||||
return
|
||||
}
|
||||
|
||||
if sampleBuffer.type == kCMMediaType_Video {
|
||||
Queue.mainQueue().async {
|
||||
self.videoOutput?.push(sampleBuffer)
|
||||
}
|
||||
}
|
||||
|
||||
let timestamp = CACurrentMediaTime()
|
||||
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot {
|
||||
var front = false
|
||||
@ -696,6 +703,26 @@ public final class Camera {
|
||||
public typealias ExposureMode = AVCaptureDevice.ExposureMode
|
||||
public typealias FlashMode = AVCaptureDevice.FlashMode
|
||||
|
||||
public struct CollageGrid: Hashable {
|
||||
public struct Row: Hashable {
|
||||
public let columns: Int
|
||||
|
||||
public init(columns: Int) {
|
||||
self.columns = columns
|
||||
}
|
||||
}
|
||||
|
||||
public let rows: [Row]
|
||||
|
||||
public init(rows: [Row]) {
|
||||
self.rows = rows
|
||||
}
|
||||
|
||||
public var count: Int {
|
||||
return self.rows.reduce(0) { $0 + $1.columns }
|
||||
}
|
||||
}
|
||||
|
||||
public struct Configuration {
|
||||
let preset: Preset
|
||||
let position: Position
|
||||
@ -975,16 +1002,19 @@ public final class Camera {
|
||||
}
|
||||
}
|
||||
|
||||
public func attachPreviewView(_ view: CameraPreviewView) {
|
||||
self.previewView = view
|
||||
let viewRef: Unmanaged<CameraPreviewView> = Unmanaged.passRetained(view)
|
||||
public func setPreviewOutput(_ output: CameraVideoOutput?) {
|
||||
let outputRef: Unmanaged<CameraVideoOutput>? = output.flatMap { Unmanaged.passRetained($0) }
|
||||
self.queue.async {
|
||||
if let context = self.contextRef?.takeUnretainedValue() {
|
||||
context.previewView = viewRef.takeUnretainedValue()
|
||||
viewRef.release()
|
||||
if let outputRef {
|
||||
context.videoOutput = outputRef.takeUnretainedValue()
|
||||
outputRef.release()
|
||||
} else {
|
||||
context.videoOutput = nil
|
||||
}
|
||||
} else {
|
||||
Queue.mainQueue().async {
|
||||
viewRef.release()
|
||||
outputRef?.release()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1108,3 +1138,15 @@ public struct CameraRecordingData {
|
||||
public enum CameraRecordingError {
|
||||
case audioInitializationError
|
||||
}
|
||||
|
||||
public class CameraVideoOutput {
|
||||
private let sink: (CMSampleBuffer) -> Void
|
||||
|
||||
public init(sink: @escaping (CMSampleBuffer) -> Void) {
|
||||
self.sink = sink
|
||||
}
|
||||
|
||||
func push(_ buffer: CMSampleBuffer) {
|
||||
self.sink(buffer)
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
import Foundation
|
||||
import AVFoundation
|
||||
import UIKit
|
||||
import Display
|
||||
import SwiftSignalKit
|
||||
import CoreImage
|
||||
import Vision
|
||||
@ -286,6 +287,19 @@ final class CameraOutput: NSObject {
|
||||
}
|
||||
}
|
||||
|
||||
#if targetEnvironment(simulator)
|
||||
let image = generateImage(CGSize(width: 1080, height: 1920), opaque: true, scale: 1.0, rotatedContext: { size, context in
|
||||
let colors: [UIColor] = [UIColor(rgb: 0xff00ff), UIColor(rgb: 0xff0000), UIColor(rgb: 0x00ffff), UIColor(rgb: 0x00ff00)]
|
||||
if let randomColor = colors.randomElement() {
|
||||
context.setFillColor(randomColor.cgColor)
|
||||
}
|
||||
context.fill(CGRect(origin: .zero, size: size))
|
||||
})!
|
||||
return .single(.began)
|
||||
|> then(
|
||||
.single(.finished(image, nil, CACurrentMediaTime())) |> delay(0.5, queue: Queue.concurrentDefaultQueue())
|
||||
)
|
||||
#else
|
||||
let uniqueId = settings.uniqueID
|
||||
let photoCapture = PhotoCaptureContext(ciContext: self.ciContext, settings: settings, orientation: orientation, mirror: mirror)
|
||||
self.photoCaptureRequests[uniqueId] = photoCapture
|
||||
@ -295,6 +309,7 @@ final class CameraOutput: NSObject {
|
||||
|> afterDisposed { [weak self] in
|
||||
self?.photoCaptureRequests.removeValue(forKey: uniqueId)
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
var isRecording: Bool {
|
||||
|
@ -449,6 +449,12 @@ final class InnerTextSelectionTipContainerNode: ASDisplayNode {
|
||||
self.targetSelectionIndex = nil
|
||||
icon = nil
|
||||
isUserInteractionEnabled = action != nil
|
||||
case .collageReordering:
|
||||
//TODO:localize
|
||||
self.action = nil
|
||||
self.text = "Hold and drag tiles to reorder them."
|
||||
self.targetSelectionIndex = nil
|
||||
icon = UIImage(bundleImageName: "Chat/Context Menu/Tip")
|
||||
}
|
||||
|
||||
self.iconNode = ASImageNode()
|
||||
|
@ -2359,6 +2359,7 @@ public final class ContextController: ViewController, StandalonePresentableContr
|
||||
case notificationTopicExceptions(text: String, action: (() -> Void)?)
|
||||
case starsReactions(topCount: Int)
|
||||
case videoProcessing
|
||||
case collageReordering
|
||||
|
||||
public static func ==(lhs: Tip, rhs: Tip) -> Bool {
|
||||
switch lhs {
|
||||
@ -2416,6 +2417,12 @@ public final class ContextController: ViewController, StandalonePresentableContr
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
case .collageReordering:
|
||||
if case .collageReordering = rhs {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -103,6 +103,12 @@
|
||||
- (bool)setPaintingData:(NSData *)data entitiesData:(NSData *)entitiesData image:(UIImage *)image stillImage:(UIImage *)stillImage forItem:(NSObject<TGMediaEditableItem> *)item dataUrl:(NSURL **)dataOutUrl entitiesDataUrl:(NSURL **)entitiesDataOutUrl imageUrl:(NSURL **)imageOutUrl forVideo:(bool)video;
|
||||
- (void)clearPaintingData;
|
||||
|
||||
|
||||
- (bool)isCaptionAbove;
|
||||
- (SSignal *)captionAbove;
|
||||
- (void)setCaptionAbove:(bool)captionAbove;
|
||||
|
||||
|
||||
- (SSignal *)facesForItem:(NSObject<TGMediaEditableItem> *)item;
|
||||
- (void)setFaces:(NSArray *)faces forItem:(NSObject<TGMediaEditableItem> *)item;
|
||||
|
||||
|
@ -15,6 +15,7 @@
|
||||
@property (nonatomic, assign) UIInterfaceOrientation interfaceOrientation;
|
||||
@property (nonatomic, readonly) CGFloat keyboardHeight;
|
||||
@property (nonatomic, assign) CGFloat contentAreaHeight;
|
||||
@property (nonatomic, assign) UIEdgeInsets safeAreaInset;
|
||||
@property (nonatomic, assign) bool allowEntities;
|
||||
|
||||
@property (nonatomic, copy) UIView *(^panelParentView)(void);
|
||||
@ -23,6 +24,7 @@
|
||||
@property (nonatomic, copy) void (^finishedWithCaption)(NSAttributedString *caption);
|
||||
@property (nonatomic, copy) void (^keyboardHeightChanged)(CGFloat keyboardHeight, NSTimeInterval duration, NSInteger animationCurve);
|
||||
@property (nonatomic, copy) void (^timerUpdated)(NSNumber *timeout);
|
||||
@property (nonatomic, copy) void (^captionIsAboveUpdated)(bool captionIsAbove);
|
||||
|
||||
- (void)createInputPanelIfNeeded;
|
||||
- (void)beginEditing;
|
||||
@ -36,7 +38,7 @@
|
||||
- (void)setCaption:(NSAttributedString *)caption animated:(bool)animated;
|
||||
- (void)setCaptionPanelHidden:(bool)hidden animated:(bool)animated;
|
||||
|
||||
- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo;
|
||||
- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo isCaptionAbove:(bool)isCaptionAbove;
|
||||
|
||||
- (void)updateLayoutWithFrame:(CGRect)frame edgeInsets:(UIEdgeInsets)edgeInsets animated:(bool)animated;
|
||||
|
||||
|
@ -22,7 +22,7 @@
|
||||
|
||||
@property (nonatomic, readonly) UIView * _Nonnull view;
|
||||
|
||||
- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo;
|
||||
- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo isCaptionAbove:(bool)isCaptionAbove;
|
||||
|
||||
- (NSAttributedString * _Nonnull)caption;
|
||||
- (void)setCaption:(NSAttributedString * _Nullable)caption;
|
||||
@ -36,6 +36,7 @@
|
||||
@property (nonatomic, copy) void(^ _Nullable focusUpdated)(BOOL focused);
|
||||
@property (nonatomic, copy) void(^ _Nullable heightUpdated)(BOOL animated);
|
||||
@property (nonatomic, copy) void(^ _Nullable timerUpdated)(NSNumber * _Nullable value);
|
||||
@property (nonatomic, copy) void(^ _Nullable captionIsAboveUpdated)(BOOL value);
|
||||
|
||||
- (CGFloat)updateLayoutSize:(CGSize)size keyboardHeight:(CGFloat)keyboardHeight sideInset:(CGFloat)sideInset animated:(bool)animated;
|
||||
- (CGFloat)baseHeight;
|
||||
|
@ -53,8 +53,9 @@
|
||||
sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
|
||||
}
|
||||
|
||||
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
|
||||
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) {
|
||||
standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
|
||||
}
|
||||
|
||||
NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7U);
|
||||
GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat));
|
||||
|
@ -126,8 +126,11 @@
|
||||
SPipe *_pricePipe;
|
||||
SPipe *_fullSizePipe;
|
||||
SPipe *_cropPipe;
|
||||
SPipe *_captionAbovePipe;
|
||||
|
||||
NSAttributedString *_forcedCaption;
|
||||
|
||||
bool _captionAbove;
|
||||
}
|
||||
@end
|
||||
|
||||
@ -196,6 +199,7 @@
|
||||
_pricePipe = [[SPipe alloc] init];
|
||||
_fullSizePipe = [[SPipe alloc] init];
|
||||
_cropPipe = [[SPipe alloc] init];
|
||||
_captionAbovePipe = [[SPipe alloc] init];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
@ -853,6 +857,28 @@
|
||||
}
|
||||
}
|
||||
|
||||
- (bool)isCaptionAbove {
|
||||
return _captionAbove;
|
||||
}
|
||||
|
||||
- (SSignal *)captionAbove
|
||||
{
|
||||
__weak TGMediaEditingContext *weakSelf = self;
|
||||
SSignal *updateSignal = [_captionAbovePipe.signalProducer() map:^NSNumber *(NSNumber *update)
|
||||
{
|
||||
__strong TGMediaEditingContext *strongSelf = weakSelf;
|
||||
return @(strongSelf->_captionAbove);
|
||||
}];
|
||||
|
||||
return [[SSignal single:@(_captionAbove)] then:updateSignal];
|
||||
}
|
||||
|
||||
- (void)setCaptionAbove:(bool)captionAbove
|
||||
{
|
||||
_captionAbove = captionAbove;
|
||||
_captionAbovePipe.sink(@(captionAbove));
|
||||
}
|
||||
|
||||
- (SSignal *)facesForItem:(NSObject<TGMediaEditableItem> *)item
|
||||
{
|
||||
NSString *itemId = [self _contextualIdForItemId:item.uniqueIdentifier];
|
||||
|
@ -389,6 +389,14 @@
|
||||
[strongSelf->_selectionContext setItem:(id<TGMediaSelectableItem>)galleryEditableItem.editableMediaItem selected:true animated:true sender:nil];
|
||||
};
|
||||
|
||||
_captionMixin.captionIsAboveUpdated = ^(bool captionIsAbove) {
|
||||
__strong TGMediaPickerGalleryInterfaceView *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
[strongSelf->_editingContext setCaptionAbove:captionIsAbove];
|
||||
};
|
||||
|
||||
_captionMixin.stickersContext = stickersContext;
|
||||
[_captionMixin createInputPanelIfNeeded];
|
||||
|
||||
@ -818,6 +826,8 @@
|
||||
{
|
||||
id<TGMediaEditableItem> editableMediaItem = [galleryEditableItem editableMediaItem];
|
||||
|
||||
bool isCaptionAbove = galleryEditableItem.editingContext.isCaptionAbove;
|
||||
|
||||
__weak id<TGModernGalleryEditableItem> weakGalleryEditableItem = galleryEditableItem;
|
||||
[_adjustmentsDisposable setDisposable:[[[[galleryEditableItem.editingContext adjustmentsSignalForItem:editableMediaItem] mapToSignal:^SSignal *(id<TGMediaEditAdjustments> adjustments) {
|
||||
__strong id<TGModernGalleryEditableItem> strongGalleryEditableItem = weakGalleryEditableItem;
|
||||
@ -842,7 +852,7 @@
|
||||
id<TGMediaEditAdjustments> adjustments = dict[@"adjustments"];
|
||||
NSNumber *timer = dict[@"timer"];
|
||||
|
||||
[strongSelf->_captionMixin setTimeout:[timer intValue] isVideo:editableMediaItem.isVideo];
|
||||
[strongSelf->_captionMixin setTimeout:[timer intValue] isVideo:editableMediaItem.isVideo isCaptionAbove:isCaptionAbove];
|
||||
|
||||
if ([adjustments isKindOfClass:[TGVideoEditAdjustments class]])
|
||||
{
|
||||
@ -1617,6 +1627,7 @@
|
||||
- (void)setSafeAreaInset:(UIEdgeInsets)safeAreaInset
|
||||
{
|
||||
_safeAreaInset = safeAreaInset;
|
||||
_captionMixin.safeAreaInset = safeAreaInset;
|
||||
[_currentItemView setSafeAreaInset:[self localSafeAreaInset]];
|
||||
[self setNeedsLayout];
|
||||
}
|
||||
|
@ -222,7 +222,7 @@
|
||||
|
||||
UIView *scrubberBackgroundView = [[UIView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, _headerView.frame.size.width, 64.0f)];
|
||||
scrubberBackgroundView.autoresizingMask = UIViewAutoresizingFlexibleWidth;
|
||||
scrubberBackgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor];
|
||||
//scrubberBackgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor];
|
||||
[_scrubberPanelView addSubview:scrubberBackgroundView];
|
||||
|
||||
_scrubberView = [[TGMediaPickerGalleryVideoScrubber alloc] initWithFrame:CGRectMake(0.0f, _headerView.frame.size.height - 44.0f, _headerView.frame.size.width, 68.0f)];
|
||||
|
@ -99,6 +99,12 @@ typedef enum
|
||||
_currentTimeLabel.backgroundColor = [UIColor clearColor];
|
||||
_currentTimeLabel.text = @"0:00";
|
||||
_currentTimeLabel.textColor = [UIColor whiteColor];
|
||||
_currentTimeLabel.layer.shadowOffset = CGSizeMake(0.0, 0.0);
|
||||
_currentTimeLabel.layer.shadowRadius = 2.0;
|
||||
_currentTimeLabel.layer.shadowColor = [UIColor blackColor].CGColor;
|
||||
_currentTimeLabel.layer.shadowOpacity = 0.4;
|
||||
_currentTimeLabel.layer.rasterizationScale = TGScreenScaling();
|
||||
_currentTimeLabel.layer.shouldRasterize = true;
|
||||
[self addSubview:_currentTimeLabel];
|
||||
|
||||
_inverseTimeLabel = [[UILabel alloc] initWithFrame:CGRectMake(frame.size.width - 108, 4, 100, 15)];
|
||||
@ -108,6 +114,12 @@ typedef enum
|
||||
_inverseTimeLabel.text = @"0:00";
|
||||
_inverseTimeLabel.textAlignment = NSTextAlignmentRight;
|
||||
_inverseTimeLabel.textColor = [UIColor whiteColor];
|
||||
_inverseTimeLabel.layer.shadowOffset = CGSizeMake(0.0, 0.0);
|
||||
_inverseTimeLabel.layer.shadowRadius = 2.0;
|
||||
_inverseTimeLabel.layer.shadowColor = [UIColor blackColor].CGColor;
|
||||
_inverseTimeLabel.layer.shadowOpacity = 0.4;
|
||||
_inverseTimeLabel.layer.rasterizationScale = TGScreenScaling();
|
||||
_inverseTimeLabel.layer.shouldRasterize = true;
|
||||
[self addSubview:_inverseTimeLabel];
|
||||
|
||||
_wrapperView = [[UIControl alloc] initWithFrame:CGRectMake(8, 24, 0, 36)];
|
||||
@ -119,14 +131,19 @@ typedef enum
|
||||
|
||||
_summaryThumbnailWrapperView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 32)];
|
||||
_summaryThumbnailWrapperView.clipsToBounds = true;
|
||||
_summaryThumbnailWrapperView.layer.cornerRadius = 5.0;
|
||||
[_wrapperView addSubview:_summaryThumbnailWrapperView];
|
||||
|
||||
_leftCurtainView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 0)];
|
||||
_leftCurtainView.backgroundColor = [[TGPhotoEditorInterfaceAssets toolbarBackgroundColor] colorWithAlphaComponent:0.8f];
|
||||
_leftCurtainView.clipsToBounds = true;
|
||||
_leftCurtainView.layer.cornerRadius = 5.0;
|
||||
[_wrapperView addSubview:_leftCurtainView];
|
||||
|
||||
_rightCurtainView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 0)];
|
||||
_rightCurtainView.backgroundColor = [[TGPhotoEditorInterfaceAssets toolbarBackgroundColor] colorWithAlphaComponent:0.8f];
|
||||
_rightCurtainView.clipsToBounds = true;
|
||||
_rightCurtainView.layer.cornerRadius = 5.0;
|
||||
[_wrapperView addSubview:_rightCurtainView];
|
||||
|
||||
__weak TGMediaPickerGalleryVideoScrubber *weakSelf = self;
|
||||
|
@ -15,6 +15,8 @@
|
||||
|
||||
CGRect _currentFrame;
|
||||
UIEdgeInsets _currentEdgeInsets;
|
||||
|
||||
bool _currentIsCaptionAbove;
|
||||
}
|
||||
@end
|
||||
|
||||
@ -94,12 +96,21 @@
|
||||
}
|
||||
};
|
||||
|
||||
_inputPanel.captionIsAboveUpdated = ^(bool value) {
|
||||
__strong TGPhotoCaptionInputMixin *strongSelf = weakSelf;
|
||||
if (strongSelf.captionIsAboveUpdated != nil) {
|
||||
strongSelf.captionIsAboveUpdated(value);
|
||||
|
||||
strongSelf->_currentIsCaptionAbove = value;
|
||||
[strongSelf updateLayoutWithFrame:strongSelf->_currentFrame edgeInsets:strongSelf->_currentEdgeInsets animated:true];
|
||||
}
|
||||
};
|
||||
|
||||
_inputPanelView = inputPanel.view;
|
||||
|
||||
_backgroundView = [[UIView alloc] init];
|
||||
_backgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor];
|
||||
[parentView addSubview:_backgroundView];
|
||||
//[parentView addSubview:_backgroundView];
|
||||
[parentView addSubview:_inputPanelView];
|
||||
}
|
||||
|
||||
@ -123,7 +134,7 @@
|
||||
_dismissTapRecognizer.enabled = false;
|
||||
[_dismissView addGestureRecognizer:_dismissTapRecognizer];
|
||||
|
||||
[parentView insertSubview:_dismissView belowSubview:_backgroundView];
|
||||
//[parentView insertSubview:_dismissView belowSubview:_backgroundView];
|
||||
}
|
||||
|
||||
- (void)setCaption:(NSAttributedString *)caption
|
||||
@ -141,8 +152,9 @@
|
||||
[_inputPanel setCaption:caption];
|
||||
}
|
||||
|
||||
- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo {
|
||||
[_inputPanel setTimeout:timeout isVideo:isVideo];
|
||||
- (void)setTimeout:(int32_t)timeout isVideo:(bool)isVideo isCaptionAbove:(bool)isCaptionAbove {
|
||||
_currentIsCaptionAbove = isCaptionAbove;
|
||||
[_inputPanel setTimeout:timeout isVideo:isVideo isCaptionAbove:isCaptionAbove];
|
||||
}
|
||||
|
||||
- (void)setCaptionPanelHidden:(bool)hidden animated:(bool)__unused animated
|
||||
@ -222,14 +234,30 @@
|
||||
CGRect frame = _currentFrame;
|
||||
UIEdgeInsets edgeInsets = _currentEdgeInsets;
|
||||
CGFloat panelHeight = [_inputPanel updateLayoutSize:frame.size keyboardHeight:keyboardHeight sideInset:0.0 animated:false];
|
||||
[UIView animateWithDuration:duration delay:0.0f options:(curve << 16) animations:^{
|
||||
_inputPanelView.frame = CGRectMake(edgeInsets.left, frame.size.height - panelHeight - MAX(edgeInsets.bottom, _keyboardHeight), frame.size.width, panelHeight);
|
||||
|
||||
CGFloat backgroundHeight = panelHeight;
|
||||
if (_keyboardHeight > 0.0) {
|
||||
backgroundHeight += _keyboardHeight - edgeInsets.bottom;
|
||||
|
||||
CGFloat panelY = 0.0;
|
||||
if (frame.size.width > frame.size.height && !TGIsPad()) {
|
||||
panelY = edgeInsets.top + frame.size.height;
|
||||
} else {
|
||||
if (_currentIsCaptionAbove) {
|
||||
if (_keyboardHeight > 0.0) {
|
||||
panelY = _safeAreaInset.top + 8.0;
|
||||
} else {
|
||||
panelY = _safeAreaInset.top + 8.0 + 40.0;
|
||||
}
|
||||
} else {
|
||||
panelY = edgeInsets.top + frame.size.height - panelHeight - MAX(edgeInsets.bottom, _keyboardHeight);
|
||||
}
|
||||
_backgroundView.frame = CGRectMake(edgeInsets.left, frame.size.height - panelHeight - MAX(edgeInsets.bottom, _keyboardHeight), frame.size.width, backgroundHeight);
|
||||
}
|
||||
|
||||
CGFloat backgroundHeight = panelHeight;
|
||||
if (_keyboardHeight > 0.0) {
|
||||
backgroundHeight += _keyboardHeight - edgeInsets.bottom;
|
||||
}
|
||||
|
||||
[UIView animateWithDuration:duration delay:0.0f options:(curve << 16) animations:^{
|
||||
_inputPanelView.frame = CGRectMake(edgeInsets.left, panelY, frame.size.width, panelHeight);
|
||||
_backgroundView.frame = CGRectMake(edgeInsets.left, panelY, frame.size.width, backgroundHeight);
|
||||
} completion:nil];
|
||||
|
||||
if (self.keyboardHeightChanged != nil)
|
||||
@ -243,11 +271,19 @@
|
||||
|
||||
CGFloat panelHeight = [_inputPanel updateLayoutSize:frame.size keyboardHeight:_keyboardHeight sideInset:0.0 animated:animated];
|
||||
|
||||
CGFloat y = 0.0;
|
||||
CGFloat panelY = 0.0;
|
||||
if (frame.size.width > frame.size.height && !TGIsPad()) {
|
||||
y = edgeInsets.top + frame.size.height;
|
||||
panelY = edgeInsets.top + frame.size.height;
|
||||
} else {
|
||||
y = edgeInsets.top + frame.size.height - panelHeight - MAX(edgeInsets.bottom, _keyboardHeight);
|
||||
if (_currentIsCaptionAbove) {
|
||||
if (_keyboardHeight > 0.0) {
|
||||
panelY = _safeAreaInset.top + 8.0;
|
||||
} else {
|
||||
panelY = _safeAreaInset.top + 8.0 + 40.0;
|
||||
}
|
||||
} else {
|
||||
panelY = edgeInsets.top + frame.size.height - panelHeight - MAX(edgeInsets.bottom, _keyboardHeight);
|
||||
}
|
||||
}
|
||||
|
||||
CGFloat backgroundHeight = panelHeight;
|
||||
@ -255,8 +291,8 @@
|
||||
backgroundHeight += _keyboardHeight - edgeInsets.bottom;
|
||||
}
|
||||
|
||||
CGRect panelFrame = CGRectMake(edgeInsets.left, y, frame.size.width, panelHeight);
|
||||
CGRect backgroundFrame = CGRectMake(edgeInsets.left, y, frame.size.width, backgroundHeight);
|
||||
CGRect panelFrame = CGRectMake(edgeInsets.left, panelY, frame.size.width, panelHeight);
|
||||
CGRect backgroundFrame = CGRectMake(edgeInsets.left, panelY, frame.size.width, backgroundHeight);
|
||||
|
||||
if (animated) {
|
||||
[_inputPanel animateView:_inputPanelView frame:panelFrame];
|
||||
|
@ -14,8 +14,8 @@ public func mediaPasteboardScreen(
|
||||
context: AccountContext,
|
||||
updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)? = nil,
|
||||
peer: EnginePeer,
|
||||
subjects: [MediaPickerScreen.Subject.Media],
|
||||
presentMediaPicker: @escaping (_ subject: MediaPickerScreen.Subject, _ saveEditedPhotos: Bool, _ bannedSendPhotos: (Int32, Bool)?, _ bannedSendVideos: (Int32, Bool)?, _ present: @escaping (MediaPickerScreen, AttachmentMediaPickerContext?) -> Void) -> Void,
|
||||
subjects: [MediaPickerScreenImpl.Subject.Media],
|
||||
presentMediaPicker: @escaping (_ subject: MediaPickerScreenImpl.Subject, _ saveEditedPhotos: Bool, _ bannedSendPhotos: (Int32, Bool)?, _ bannedSendVideos: (Int32, Bool)?, _ present: @escaping (MediaPickerScreenImpl, AttachmentMediaPickerContext?) -> Void) -> Void,
|
||||
getSourceRect: (() -> CGRect?)? = nil,
|
||||
makeEntityInputView: @escaping () -> AttachmentTextInputPanelInputView? = { return nil }
|
||||
) -> ViewController {
|
||||
|
@ -51,6 +51,7 @@ swift_library(
|
||||
"//submodules/ComponentFlow",
|
||||
"//submodules/Components/ComponentDisplayAdapters",
|
||||
"//submodules/AnimatedCountLabelNode",
|
||||
"//submodules/TelegramUI/Components/MediaAssetsContext",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -11,6 +11,7 @@ import LegacyComponents
|
||||
import LegacyUI
|
||||
import LegacyMediaPickerUI
|
||||
import Photos
|
||||
import MediaAssetsContext
|
||||
|
||||
private func galleryFetchResultItems(fetchResult: PHFetchResult<PHAsset>, index: Int, reversed: Bool, selectionContext: TGMediaSelectionContext?, editingContext: TGMediaEditingContext, stickersContext: TGPhotoPaintStickersContext, immediateThumbnail: UIImage?) -> ([TGModernGalleryItem], TGModernGalleryItem?) {
|
||||
var focusItem: TGModernGalleryItem?
|
||||
|
@ -7,6 +7,7 @@ import TelegramPresentationData
|
||||
import ItemListUI
|
||||
import MergeLists
|
||||
import Photos
|
||||
import MediaAssetsContext
|
||||
|
||||
private struct MediaGroupsGridAlbumEntry: Comparable, Identifiable {
|
||||
let theme: PresentationTheme
|
||||
|
@ -6,6 +6,7 @@ import ContextUI
|
||||
import AccountContext
|
||||
import TelegramPresentationData
|
||||
import Photos
|
||||
import MediaAssetsContext
|
||||
|
||||
struct MediaGroupItem {
|
||||
let collection: PHAssetCollection
|
||||
|
@ -13,7 +13,7 @@ import Photos
|
||||
import LegacyComponents
|
||||
import AttachmentUI
|
||||
import ItemListUI
|
||||
import CameraScreen
|
||||
import MediaAssetsContext
|
||||
|
||||
private enum MediaGroupsEntry: Comparable, Identifiable {
|
||||
enum StableId: Hashable {
|
||||
@ -470,7 +470,7 @@ public final class MediaGroupsScreen: ViewController, AttachmentContainable {
|
||||
} else {
|
||||
self.updateNavigationStack { current in
|
||||
var mediaPickerContext: AttachmentMediaPickerContext?
|
||||
if let first = current.first as? MediaPickerScreen {
|
||||
if let first = current.first as? MediaPickerScreenImpl {
|
||||
mediaPickerContext = first.webSearchController?.mediaPickerContext ?? first.mediaPickerContext
|
||||
}
|
||||
return (current.filter { $0 !== self }, mediaPickerContext)
|
||||
|
@ -17,6 +17,7 @@ import ImageBlur
|
||||
import FastBlur
|
||||
import MediaEditor
|
||||
import RadialStatusNode
|
||||
import MediaAssetsContext
|
||||
|
||||
private let leftShadowImage: UIImage = {
|
||||
let baseImage = UIImage(bundleImageName: "Peer Info/MediaGridShadow")!
|
||||
@ -48,7 +49,7 @@ private let rightShadowImage: UIImage = {
|
||||
|
||||
enum MediaPickerGridItemContent: Equatable {
|
||||
case asset(PHFetchResult<PHAsset>, Int)
|
||||
case media(MediaPickerScreen.Subject.Media, Int)
|
||||
case media(MediaPickerScreenImpl.Subject.Media, Int)
|
||||
case draft(MediaEditorDraft, Int)
|
||||
}
|
||||
|
||||
@ -395,7 +396,7 @@ final class MediaPickerGridItemNode: GridItemNode {
|
||||
self.updateHiddenMedia()
|
||||
}
|
||||
|
||||
func setup(interaction: MediaPickerInteraction, media: MediaPickerScreen.Subject.Media, index: Int, theme: PresentationTheme, selectable: Bool, enableAnimations: Bool, stories: Bool) {
|
||||
func setup(interaction: MediaPickerInteraction, media: MediaPickerScreenImpl.Subject.Media, index: Int, theme: PresentationTheme, selectable: Bool, enableAnimations: Bool, stories: Bool) {
|
||||
self.interaction = interaction
|
||||
self.theme = theme
|
||||
self.selectable = selectable
|
||||
|
@ -27,6 +27,7 @@ import MediaEditor
|
||||
import ImageObjectSeparation
|
||||
import ChatSendMessageActionUI
|
||||
import AnimatedCountLabelNode
|
||||
import MediaAssetsContext
|
||||
|
||||
final class MediaPickerInteraction {
|
||||
let downloadManager: AssetDownloadManager
|
||||
@ -40,16 +41,7 @@ final class MediaPickerInteraction {
|
||||
let selectionState: TGMediaSelectionContext?
|
||||
let editingState: TGMediaEditingContext
|
||||
var hiddenMediaId: String?
|
||||
|
||||
var captionIsAboveMedia: Bool = false {
|
||||
didSet {
|
||||
if self.captionIsAboveMedia != oldValue {
|
||||
self.captionIsAboveMediaValue.set(self.captionIsAboveMedia)
|
||||
}
|
||||
}
|
||||
}
|
||||
let captionIsAboveMediaValue = ValuePromise<Bool>(false)
|
||||
|
||||
|
||||
init(downloadManager: AssetDownloadManager, openMedia: @escaping (PHFetchResult<PHAsset>, Int, UIImage?) -> Void, openSelectedMedia: @escaping (TGMediaSelectableItem, UIImage?) -> Void, openDraft: @escaping (MediaEditorDraft, UIImage?) -> Void, toggleSelection: @escaping (TGMediaSelectableItem, Bool, Bool) -> Bool, sendSelected: @escaping (TGMediaSelectableItem?, Bool, Int32?, Bool, ChatSendMessageActionSheetController.SendParameters?, @escaping () -> Void) -> Void, schedule: @escaping (ChatSendMessageActionSheetController.SendParameters?) -> Void, dismissInput: @escaping () -> Void, selectionState: TGMediaSelectionContext?, editingState: TGMediaEditingContext) {
|
||||
self.downloadManager = downloadManager
|
||||
self.openMedia = openMedia
|
||||
@ -138,7 +130,7 @@ struct Month: Equatable {
|
||||
|
||||
private var savedStoriesContentOffset: CGFloat?
|
||||
|
||||
public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
||||
public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, AttachmentContainable {
|
||||
public enum Subject {
|
||||
public enum Media: Equatable {
|
||||
case image(UIImage)
|
||||
@ -207,7 +199,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
||||
public var getCaptionPanelView: () -> TGCaptionPanelView? = { return nil }
|
||||
public var openBoost: () -> Void = { }
|
||||
|
||||
public var customSelection: ((MediaPickerScreen, Any) -> Void)? = nil
|
||||
public var customSelection: ((MediaPickerScreenImpl, Any) -> Void)? = nil
|
||||
|
||||
public var createFromScratch: () -> Void = {}
|
||||
public var presentFilePicker: () -> Void = {}
|
||||
@ -250,7 +242,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
||||
case media([Subject.Media])
|
||||
}
|
||||
|
||||
private weak var controller: MediaPickerScreen?
|
||||
private weak var controller: MediaPickerScreenImpl?
|
||||
private var presentationData: PresentationData
|
||||
fileprivate let mediaAssetsContext: MediaAssetsContext
|
||||
|
||||
@ -307,7 +299,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
||||
|
||||
private var validLayout: (ContainerViewLayout, CGFloat)?
|
||||
|
||||
init(controller: MediaPickerScreen) {
|
||||
init(controller: MediaPickerScreenImpl) {
|
||||
self.controller = controller
|
||||
self.presentationData = controller.presentationData
|
||||
|
||||
@ -1255,7 +1247,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
||||
if parameters == nil {
|
||||
var textIsAboveMedia = false
|
||||
if let interaction = controller.interaction {
|
||||
textIsAboveMedia = interaction.captionIsAboveMedia
|
||||
textIsAboveMedia = interaction.editingState.isCaptionAbove()
|
||||
}
|
||||
parameters = ChatSendMessageActionSheetController.SendParameters(
|
||||
effect: nil,
|
||||
@ -2311,7 +2303,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
||||
} else {
|
||||
self.updateNavigationStack { current in
|
||||
var mediaPickerContext: AttachmentMediaPickerContext?
|
||||
if let first = current.first as? MediaPickerScreen {
|
||||
if let first = current.first as? MediaPickerScreenImpl {
|
||||
mediaPickerContext = first.webSearchController?.mediaPickerContext ?? first.mediaPickerContext
|
||||
}
|
||||
return (current.filter { $0 !== self }, mediaPickerContext)
|
||||
@ -2417,7 +2409,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
||||
var updateNavigationStackImpl: ((AttachmentContainable) -> Void)?
|
||||
let groupsController = MediaGroupsScreen(context: self.context, updatedPresentationData: self.updatedPresentationData, mediaAssetsContext: self.controllerNode.mediaAssetsContext, embedded: embedded, openGroup: { [weak self] collection in
|
||||
if let strongSelf = self {
|
||||
let mediaPicker = MediaPickerScreen(context: strongSelf.context, updatedPresentationData: strongSelf.updatedPresentationData, peer: strongSelf.peer, threadTitle: strongSelf.threadTitle, chatLocation: strongSelf.chatLocation, isScheduledMessages: strongSelf.isScheduledMessages, bannedSendPhotos: strongSelf.bannedSendPhotos, bannedSendVideos: strongSelf.bannedSendVideos, subject: .assets(collection, mode), editingContext: strongSelf.interaction?.editingState, selectionContext: strongSelf.interaction?.selectionState)
|
||||
let mediaPicker = MediaPickerScreenImpl(context: strongSelf.context, updatedPresentationData: strongSelf.updatedPresentationData, peer: strongSelf.peer, threadTitle: strongSelf.threadTitle, chatLocation: strongSelf.chatLocation, isScheduledMessages: strongSelf.isScheduledMessages, bannedSendPhotos: strongSelf.bannedSendPhotos, bannedSendVideos: strongSelf.bannedSendVideos, subject: .assets(collection, mode), editingContext: strongSelf.interaction?.editingState, selectionContext: strongSelf.interaction?.selectionState)
|
||||
|
||||
mediaPicker.presentSchedulePicker = strongSelf.presentSchedulePicker
|
||||
mediaPicker.presentTimerPicker = strongSelf.presentTimerPicker
|
||||
@ -2576,7 +2568,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
||||
if isCaptionAboveMediaAvailable {
|
||||
var mediaCaptionIsAbove = false
|
||||
if let interaction = self.interaction {
|
||||
mediaCaptionIsAbove = interaction.captionIsAboveMedia
|
||||
mediaCaptionIsAbove = interaction.editingState.isCaptionAbove()
|
||||
}
|
||||
|
||||
items.append(.action(ContextMenuActionItem(text: mediaCaptionIsAbove ? strings.Chat_SendMessageMenu_MoveCaptionDown : strings.Chat_SendMessageMenu_MoveCaptionUp, icon: { _ in return nil }, iconAnimation: ContextMenuActionItem.IconAnimation(
|
||||
@ -2588,7 +2580,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
||||
}
|
||||
|
||||
if let interaction = strongSelf.interaction {
|
||||
interaction.captionIsAboveMedia = !interaction.captionIsAboveMedia
|
||||
interaction.editingState.setCaptionAbove(!interaction.editingState.isCaptionAbove())
|
||||
}
|
||||
})))
|
||||
}
|
||||
@ -2684,13 +2676,17 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
||||
self.selectedButtonNode.frame = CGRect(origin: CGPoint(x: self.view.bounds.width - 54.0 - self.selectedButtonNode.frame.width - safeInset, y: floorToScreenPixels((navigationHeight - self.selectedButtonNode.frame.height) / 2.0) + 1.0), size: self.selectedButtonNode.frame.size)
|
||||
}
|
||||
|
||||
public func dismissAnimated() {
|
||||
self.requestDismiss(completion: {})
|
||||
}
|
||||
|
||||
public var mediaPickerContext: AttachmentMediaPickerContext? {
|
||||
return MediaPickerContext(controller: self)
|
||||
}
|
||||
}
|
||||
|
||||
final class MediaPickerContext: AttachmentMediaPickerContext {
|
||||
private weak var controller: MediaPickerScreen?
|
||||
private weak var controller: MediaPickerScreenImpl?
|
||||
|
||||
var selectionCount: Signal<Int, NoError> {
|
||||
return Signal { [weak self] subscriber in
|
||||
@ -2791,23 +2787,32 @@ final class MediaPickerContext: AttachmentMediaPickerContext {
|
||||
|
||||
var captionIsAboveMedia: Signal<Bool, NoError> {
|
||||
return Signal { [weak self] subscriber in
|
||||
guard let interaction = self?.controller?.interaction else {
|
||||
guard let self else {
|
||||
subscriber.putNext(false)
|
||||
subscriber.putCompletion()
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
let disposable = interaction.captionIsAboveMediaValue.get().start(next: { value in
|
||||
subscriber.putNext(value)
|
||||
guard let captionAbove = self.controller?.interaction?.editingState.captionAbove() else {
|
||||
subscriber.putNext(false)
|
||||
subscriber.putCompletion()
|
||||
return EmptyDisposable
|
||||
}
|
||||
|
||||
let disposable = captionAbove.start(next: { caption in
|
||||
if let caption = caption as? NSNumber {
|
||||
subscriber.putNext(caption.boolValue)
|
||||
} else {
|
||||
subscriber.putNext(false)
|
||||
}
|
||||
}, error: { _ in }, completed: { })
|
||||
return ActionDisposable {
|
||||
disposable.dispose()
|
||||
disposable?.dispose()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setCaptionIsAboveMedia(_ captionIsAboveMedia: Bool) -> Void {
|
||||
self.controller?.interaction?.captionIsAboveMedia = captionIsAboveMedia
|
||||
self.controller?.interaction?.editingState.setCaptionAbove(captionIsAboveMedia)
|
||||
}
|
||||
|
||||
public var loadingProgress: Signal<CGFloat?, NoError> {
|
||||
@ -2818,7 +2823,7 @@ final class MediaPickerContext: AttachmentMediaPickerContext {
|
||||
return .single(self.controller?.mainButtonState)
|
||||
}
|
||||
|
||||
init(controller: MediaPickerScreen) {
|
||||
init(controller: MediaPickerScreenImpl) {
|
||||
self.controller = controller
|
||||
}
|
||||
|
||||
@ -2954,7 +2959,7 @@ public func wallpaperMediaPickerController(
|
||||
updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)? = nil,
|
||||
peer: EnginePeer,
|
||||
animateAppearance: Bool,
|
||||
completion: @escaping (MediaPickerScreen, Any) -> Void = { _, _ in },
|
||||
completion: @escaping (MediaPickerScreenImpl, Any) -> Void = { _, _ in },
|
||||
openColors: @escaping () -> Void
|
||||
) -> ViewController {
|
||||
let controller = AttachmentController(context: context, updatedPresentationData: updatedPresentationData, chatLocation: nil, buttons: [.standalone], initialButton: .standalone, fromMenu: false, hasTextInput: false, makeEntityInputView: {
|
||||
@ -2963,7 +2968,7 @@ public func wallpaperMediaPickerController(
|
||||
controller.animateAppearance = animateAppearance
|
||||
controller.requestController = { [weak controller] _, present in
|
||||
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||
let mediaPickerController = MediaPickerScreen(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper), mainButtonState: AttachmentMainButtonState(text: presentationData.strings.Conversation_Theme_SetColorWallpaper, font: .regular, background: .color(.clear), textColor: presentationData.theme.actionSheet.controlAccentColor, isVisible: true, progress: .none, isEnabled: true, hasShimmer: false), mainButtonAction: {
|
||||
let mediaPickerController = MediaPickerScreenImpl(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper), mainButtonState: AttachmentMainButtonState(text: presentationData.strings.Conversation_Theme_SetColorWallpaper, font: .regular, background: .color(.clear), textColor: presentationData.theme.actionSheet.controlAccentColor, isVisible: true, progress: .none, isEnabled: true, hasShimmer: false), mainButtonAction: {
|
||||
controller?.dismiss(animated: true)
|
||||
openColors()
|
||||
})
|
||||
@ -2985,7 +2990,7 @@ public func mediaPickerController(
|
||||
return nil
|
||||
})
|
||||
controller.requestController = { _, present in
|
||||
let mediaPickerController = MediaPickerScreen(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .addImage), mainButtonState: nil, mainButtonAction: nil)
|
||||
let mediaPickerController = MediaPickerScreenImpl(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .addImage), mainButtonState: nil, mainButtonAction: nil)
|
||||
mediaPickerController.customSelection = { controller, result in
|
||||
completion(result)
|
||||
controller.dismiss(animated: true)
|
||||
@ -3020,6 +3025,7 @@ public func mediaPickerController(
|
||||
public func storyMediaPickerController(
|
||||
context: AccountContext,
|
||||
isDark: Bool,
|
||||
forCollage: Bool,
|
||||
getSourceRect: @escaping () -> CGRect,
|
||||
completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void,
|
||||
dismissed: @escaping () -> Void,
|
||||
@ -3036,7 +3042,7 @@ public func storyMediaPickerController(
|
||||
controller.forceSourceRect = true
|
||||
controller.getSourceRect = getSourceRect
|
||||
controller.requestController = { _, present in
|
||||
let mediaPickerController = MediaPickerScreen(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .story), mainButtonState: nil, mainButtonAction: nil)
|
||||
let mediaPickerController = MediaPickerScreenImpl(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .story), mainButtonState: nil, mainButtonAction: nil)
|
||||
mediaPickerController.groupsPresented = groupsPresented
|
||||
mediaPickerController.customSelection = { controller, result in
|
||||
if let result = result as? MediaEditorDraft {
|
||||
@ -3062,7 +3068,9 @@ public func storyMediaPickerController(
|
||||
})
|
||||
}
|
||||
} else if let result = result as? PHAsset {
|
||||
controller.updateHiddenMediaId(result.localIdentifier)
|
||||
if !forCollage {
|
||||
controller.updateHiddenMediaId(result.localIdentifier)
|
||||
}
|
||||
if let transitionView = controller.transitionView(for: result.localIdentifier, snapshot: false) {
|
||||
let transitionOut: (Bool?) -> (UIView, CGRect)? = { isNew in
|
||||
if let isNew {
|
||||
@ -3107,7 +3115,7 @@ public func stickerMediaPickerController(
|
||||
controller.forceSourceRect = true
|
||||
controller.getSourceRect = getSourceRect
|
||||
controller.requestController = { [weak controller] _, present in
|
||||
let mediaPickerController = MediaPickerScreen(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .createSticker), mainButtonState: nil, mainButtonAction: nil)
|
||||
let mediaPickerController = MediaPickerScreenImpl(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .createSticker), mainButtonState: nil, mainButtonAction: nil)
|
||||
mediaPickerController.customSelection = { controller, result in
|
||||
if let result = result as? PHAsset {
|
||||
controller.updateHiddenMediaId(result.localIdentifier)
|
||||
@ -3175,17 +3183,17 @@ public func stickerMediaPickerController(
|
||||
}
|
||||
|
||||
var returnToCameraImpl: (() -> Void)?
|
||||
let cameraScreen = CameraScreen(
|
||||
let cameraScreen = CameraScreenImpl(
|
||||
context: context,
|
||||
mode: .sticker,
|
||||
holder: cameraHolder,
|
||||
transitionIn: CameraScreen.TransitionIn(
|
||||
transitionIn: CameraScreenImpl.TransitionIn(
|
||||
sourceView: cameraHolder.parentView,
|
||||
sourceRect: cameraHolder.parentView.bounds,
|
||||
sourceCornerRadius: 0.0
|
||||
),
|
||||
transitionOut: { _ in
|
||||
return CameraScreen.TransitionOut(
|
||||
return CameraScreenImpl.TransitionOut(
|
||||
destinationView: cameraHolder.parentView,
|
||||
destinationRect: cameraHolder.parentView.bounds,
|
||||
destinationCornerRadius: 0.0
|
||||
|
@ -16,6 +16,7 @@ import ChatMessageBackground
|
||||
import ChatSendMessageActionUI
|
||||
import ComponentFlow
|
||||
import ComponentDisplayAdapters
|
||||
import MediaAssetsContext
|
||||
|
||||
private class MediaPickerSelectedItemNode: ASDisplayNode {
|
||||
let asset: TGMediaEditableItem
|
||||
|
@ -128,6 +128,8 @@ public func standaloneSendEnqueueMessages(
|
||||
struct MessageResult {
|
||||
var result: PendingMessageUploadedContentResult
|
||||
var media: [Media]
|
||||
var attributes: [MessageAttribute]
|
||||
var groupingKey: Int64?
|
||||
}
|
||||
|
||||
let signals: [Signal<MessageResult, PendingMessageUploadError>] = messages.map { message in
|
||||
@ -180,7 +182,7 @@ public func standaloneSendEnqueueMessages(
|
||||
|
||||
let content = messageContentToUpload(accountPeerId: accountPeerId, network: network, postbox: postbox, auxiliaryMethods: auxiliaryMethods, transformOutgoingMessageMedia: { _, _, _, _ in
|
||||
return .single(nil)
|
||||
}, messageMediaPreuploadManager: MessageMediaPreuploadManager(), revalidationContext: MediaReferenceRevalidationContext(), forceReupload: false, isGrouped: false, passFetchProgress: true, forceNoBigParts: false, peerId: peerId, messageId: nil, attributes: attributes, text: text, media: media)
|
||||
}, messageMediaPreuploadManager: MessageMediaPreuploadManager(), revalidationContext: MediaReferenceRevalidationContext(), forceReupload: false, isGrouped: message.groupingKey != nil, passFetchProgress: true, forceNoBigParts: false, peerId: peerId, messageId: nil, attributes: attributes, text: text, media: media)
|
||||
let contentResult: Signal<PendingMessageUploadedContentResult, PendingMessageUploadError>
|
||||
switch content {
|
||||
case let .signal(value, _):
|
||||
@ -190,7 +192,7 @@ public func standaloneSendEnqueueMessages(
|
||||
}
|
||||
return contentResult
|
||||
|> map { contentResult in
|
||||
return MessageResult(result: contentResult, media: media)
|
||||
return MessageResult(result: contentResult, media: media, attributes: attributes, groupingKey: 12345)
|
||||
}
|
||||
}
|
||||
|
||||
@ -200,7 +202,7 @@ public func standaloneSendEnqueueMessages(
|
||||
}
|
||||
|> mapToSignal { contentResults -> Signal<StandaloneSendMessageStatus, StandaloneSendMessagesError> in
|
||||
var progressSum: Float = 0.0
|
||||
var allResults: [(result: PendingMessageUploadedContentAndReuploadInfo, media: [Media])] = []
|
||||
var allResults: [(result: PendingMessageUploadedContentAndReuploadInfo, media: [Media], attributes: [MessageAttribute], groupingKey: Int64?)] = []
|
||||
var allDone = true
|
||||
for result in contentResults {
|
||||
switch result.result {
|
||||
@ -208,36 +210,60 @@ public func standaloneSendEnqueueMessages(
|
||||
allDone = false
|
||||
progressSum += value.progress
|
||||
case let .content(content):
|
||||
allResults.append((content, result.media))
|
||||
allResults.append((content, result.media, result.attributes, result.groupingKey))
|
||||
}
|
||||
}
|
||||
if allDone {
|
||||
var sendSignals: [Signal<Never, StandaloneSendMessagesError>] = []
|
||||
|
||||
for (content, media) in allResults {
|
||||
var text: String = ""
|
||||
switch content.content {
|
||||
case let .text(textValue):
|
||||
text = textValue
|
||||
case let .media(_, textValue):
|
||||
text = textValue
|
||||
default:
|
||||
break
|
||||
var existingGroupingKeys = Set<Int64>()
|
||||
for (content, media, attributes, groupingKey) in allResults {
|
||||
if let currentGroupingKey = groupingKey {
|
||||
if !existingGroupingKeys.contains(currentGroupingKey) {
|
||||
existingGroupingKeys.insert(currentGroupingKey)
|
||||
var contents: [PendingMessageUploadedContentAndReuploadInfo] = []
|
||||
for (content, _, _, _) in allResults {
|
||||
contents.append(content)
|
||||
}
|
||||
|
||||
sendSignals.append(sendUploadedMultiMessageContent(
|
||||
auxiliaryMethods: auxiliaryMethods,
|
||||
postbox: postbox,
|
||||
network: network,
|
||||
stateManager: stateManager,
|
||||
accountPeerId: stateManager.accountPeerId,
|
||||
peerId: peerId,
|
||||
content: contents,
|
||||
attributes: attributes,
|
||||
threadId: threadId
|
||||
))
|
||||
}
|
||||
} else {
|
||||
var text: String = ""
|
||||
switch content.content {
|
||||
case let .text(textValue):
|
||||
text = textValue
|
||||
case let .media(_, textValue):
|
||||
text = textValue
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
sendSignals.append(sendUploadedMessageContent(
|
||||
auxiliaryMethods: auxiliaryMethods,
|
||||
postbox: postbox,
|
||||
network: network,
|
||||
stateManager: stateManager,
|
||||
accountPeerId: stateManager.accountPeerId,
|
||||
peerId: peerId,
|
||||
content: content,
|
||||
text: text,
|
||||
attributes: attributes,
|
||||
media: media,
|
||||
threadId: threadId
|
||||
))
|
||||
}
|
||||
|
||||
sendSignals.append(sendUploadedMessageContent(
|
||||
auxiliaryMethods: auxiliaryMethods,
|
||||
postbox: postbox,
|
||||
network: network,
|
||||
stateManager: stateManager,
|
||||
accountPeerId: stateManager.accountPeerId,
|
||||
peerId: peerId,
|
||||
content: content,
|
||||
text: text,
|
||||
attributes: [],
|
||||
media: media,
|
||||
threadId: threadId
|
||||
))
|
||||
}
|
||||
|
||||
return combineLatest(sendSignals)
|
||||
@ -389,111 +415,111 @@ private func sendUploadedMessageContent(
|
||||
|
||||
let sendMessageRequest: Signal<NetworkRequestResult<Api.Updates>, MTRpcError>
|
||||
switch content.content {
|
||||
case .text:
|
||||
if bubbleUpEmojiOrStickersets {
|
||||
flags |= Int32(1 << 15)
|
||||
}
|
||||
|
||||
var replyTo: Api.InputReplyTo?
|
||||
if let replyMessageId = replyMessageId {
|
||||
flags |= 1 << 0
|
||||
|
||||
var replyFlags: Int32 = 0
|
||||
if threadId != nil {
|
||||
replyFlags |= 1 << 0
|
||||
}
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
} else if let replyToStoryId = replyToStoryId {
|
||||
if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) {
|
||||
flags |= 1 << 0
|
||||
replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id)
|
||||
}
|
||||
}
|
||||
|
||||
sendMessageRequest = network.requestWithAdditionalInfo(Api.functions.messages.sendMessage(flags: flags, peer: inputPeer, replyTo: replyTo, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil), info: .acknowledgement, tag: dependencyTag)
|
||||
case let .media(inputMedia, text):
|
||||
if bubbleUpEmojiOrStickersets {
|
||||
flags |= Int32(1 << 15)
|
||||
}
|
||||
|
||||
var replyTo: Api.InputReplyTo?
|
||||
if let replyMessageId = replyMessageId {
|
||||
flags |= 1 << 0
|
||||
|
||||
var replyFlags: Int32 = 0
|
||||
if threadId != nil {
|
||||
replyFlags |= 1 << 0
|
||||
}
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
} else if let replyToStoryId = replyToStoryId {
|
||||
if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) {
|
||||
flags |= 1 << 0
|
||||
replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id)
|
||||
}
|
||||
}
|
||||
case .text:
|
||||
if bubbleUpEmojiOrStickersets {
|
||||
flags |= Int32(1 << 15)
|
||||
}
|
||||
|
||||
var replyTo: Api.InputReplyTo?
|
||||
if let replyMessageId = replyMessageId {
|
||||
flags |= 1 << 0
|
||||
|
||||
sendMessageRequest = network.request(Api.functions.messages.sendMedia(flags: flags, peer: inputPeer, replyTo: replyTo, media: inputMedia, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil), tag: dependencyTag)
|
||||
|> map(NetworkRequestResult.result)
|
||||
case let .forward(sourceInfo):
|
||||
var topMsgId: Int32?
|
||||
if let threadId = threadId {
|
||||
flags |= Int32(1 << 9)
|
||||
topMsgId = Int32(clamping: threadId)
|
||||
var replyFlags: Int32 = 0
|
||||
if threadId != nil {
|
||||
replyFlags |= 1 << 0
|
||||
}
|
||||
|
||||
if let forwardSourceInfoAttribute = forwardSourceInfoAttribute, let sourcePeer = transaction.getPeer(forwardSourceInfoAttribute.messageId.peerId), let sourceInputPeer = apiInputPeer(sourcePeer) {
|
||||
sendMessageRequest = network.request(Api.functions.messages.forwardMessages(flags: flags, fromPeer: sourceInputPeer, id: [sourceInfo.messageId.id], randomId: [uniqueId], toPeer: inputPeer, topMsgId: topMsgId, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil), tag: dependencyTag)
|
||||
|> map(NetworkRequestResult.result)
|
||||
} else {
|
||||
sendMessageRequest = .fail(MTRpcError(errorCode: 400, errorDescription: "internal"))
|
||||
}
|
||||
case let .chatContextResult(chatContextResult):
|
||||
if chatContextResult.hideVia {
|
||||
flags |= Int32(1 << 11)
|
||||
}
|
||||
|
||||
var replyTo: Api.InputReplyTo?
|
||||
if let replyMessageId = replyMessageId {
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
} else if let replyToStoryId = replyToStoryId {
|
||||
if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) {
|
||||
flags |= 1 << 0
|
||||
|
||||
var replyFlags: Int32 = 0
|
||||
if threadId != nil {
|
||||
replyFlags |= 1 << 0
|
||||
}
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
} else if let replyToStoryId = replyToStoryId {
|
||||
if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) {
|
||||
flags |= 1 << 0
|
||||
replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id)
|
||||
}
|
||||
replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id)
|
||||
}
|
||||
}
|
||||
|
||||
sendMessageRequest = network.requestWithAdditionalInfo(Api.functions.messages.sendMessage(flags: flags, peer: inputPeer, replyTo: replyTo, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil), info: .acknowledgement, tag: dependencyTag)
|
||||
case let .media(inputMedia, text):
|
||||
if bubbleUpEmojiOrStickersets {
|
||||
flags |= Int32(1 << 15)
|
||||
}
|
||||
|
||||
var replyTo: Api.InputReplyTo?
|
||||
if let replyMessageId = replyMessageId {
|
||||
flags |= 1 << 0
|
||||
|
||||
var replyFlags: Int32 = 0
|
||||
if threadId != nil {
|
||||
replyFlags |= 1 << 0
|
||||
}
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
} else if let replyToStoryId = replyToStoryId {
|
||||
if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) {
|
||||
flags |= 1 << 0
|
||||
replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id)
|
||||
}
|
||||
}
|
||||
|
||||
sendMessageRequest = network.request(Api.functions.messages.sendInlineBotResult(flags: flags, peer: inputPeer, replyTo: replyTo, randomId: uniqueId, queryId: chatContextResult.queryId, id: chatContextResult.id, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil))
|
||||
sendMessageRequest = network.request(Api.functions.messages.sendMedia(flags: flags, peer: inputPeer, replyTo: replyTo, media: inputMedia, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil), tag: dependencyTag)
|
||||
|> map(NetworkRequestResult.result)
|
||||
case let .forward(sourceInfo):
|
||||
var topMsgId: Int32?
|
||||
if let threadId = threadId {
|
||||
flags |= Int32(1 << 9)
|
||||
topMsgId = Int32(clamping: threadId)
|
||||
}
|
||||
|
||||
if let forwardSourceInfoAttribute = forwardSourceInfoAttribute, let sourcePeer = transaction.getPeer(forwardSourceInfoAttribute.messageId.peerId), let sourceInputPeer = apiInputPeer(sourcePeer) {
|
||||
sendMessageRequest = network.request(Api.functions.messages.forwardMessages(flags: flags, fromPeer: sourceInputPeer, id: [sourceInfo.messageId.id], randomId: [uniqueId], toPeer: inputPeer, topMsgId: topMsgId, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil), tag: dependencyTag)
|
||||
|> map(NetworkRequestResult.result)
|
||||
case .messageScreenshot:
|
||||
let replyTo: Api.InputReplyTo
|
||||
|
||||
if let replyMessageId = replyMessageId {
|
||||
let replyFlags: Int32 = 0
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: nil, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
} else if let replyToStoryId = replyToStoryId {
|
||||
if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) {
|
||||
flags |= 1 << 0
|
||||
replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id)
|
||||
} else {
|
||||
let replyFlags: Int32 = 0
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: 0, topMsgId: nil, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
}
|
||||
} else {
|
||||
sendMessageRequest = .fail(MTRpcError(errorCode: 400, errorDescription: "internal"))
|
||||
}
|
||||
case let .chatContextResult(chatContextResult):
|
||||
if chatContextResult.hideVia {
|
||||
flags |= Int32(1 << 11)
|
||||
}
|
||||
|
||||
var replyTo: Api.InputReplyTo?
|
||||
if let replyMessageId = replyMessageId {
|
||||
flags |= 1 << 0
|
||||
|
||||
var replyFlags: Int32 = 0
|
||||
if threadId != nil {
|
||||
replyFlags |= 1 << 0
|
||||
}
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
} else if let replyToStoryId = replyToStoryId {
|
||||
if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) {
|
||||
flags |= 1 << 0
|
||||
replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id)
|
||||
}
|
||||
}
|
||||
|
||||
sendMessageRequest = network.request(Api.functions.messages.sendInlineBotResult(flags: flags, peer: inputPeer, replyTo: replyTo, randomId: uniqueId, queryId: chatContextResult.queryId, id: chatContextResult.id, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil))
|
||||
|> map(NetworkRequestResult.result)
|
||||
case .messageScreenshot:
|
||||
let replyTo: Api.InputReplyTo
|
||||
|
||||
if let replyMessageId = replyMessageId {
|
||||
let replyFlags: Int32 = 0
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: nil, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
} else if let replyToStoryId = replyToStoryId {
|
||||
if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) {
|
||||
flags |= 1 << 0
|
||||
replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id)
|
||||
} else {
|
||||
let replyFlags: Int32 = 0
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: 0, topMsgId: nil, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
}
|
||||
|
||||
sendMessageRequest = network.request(Api.functions.messages.sendScreenshotNotification(peer: inputPeer, replyTo: replyTo, randomId: uniqueId))
|
||||
|> map(NetworkRequestResult.result)
|
||||
case .secretMedia:
|
||||
assertionFailure()
|
||||
sendMessageRequest = .fail(MTRpcError(errorCode: 400, errorDescription: "internal"))
|
||||
} else {
|
||||
let replyFlags: Int32 = 0
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: 0, topMsgId: nil, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
}
|
||||
|
||||
sendMessageRequest = network.request(Api.functions.messages.sendScreenshotNotification(peer: inputPeer, replyTo: replyTo, randomId: uniqueId))
|
||||
|> map(NetworkRequestResult.result)
|
||||
case .secretMedia:
|
||||
assertionFailure()
|
||||
sendMessageRequest = .fail(MTRpcError(errorCode: 400, errorDescription: "internal"))
|
||||
}
|
||||
|
||||
return sendMessageRequest
|
||||
@ -524,6 +550,143 @@ private func sendUploadedMessageContent(
|
||||
|> switchToLatest
|
||||
}
|
||||
|
||||
private func sendUploadedMultiMessageContent(
|
||||
auxiliaryMethods: AccountAuxiliaryMethods,
|
||||
postbox: Postbox,
|
||||
network: Network,
|
||||
stateManager: AccountStateManager,
|
||||
accountPeerId: PeerId,
|
||||
peerId: PeerId,
|
||||
content: [PendingMessageUploadedContentAndReuploadInfo],
|
||||
attributes: [MessageAttribute],
|
||||
threadId: Int64?
|
||||
) -> Signal<Never, StandaloneSendMessagesError> {
|
||||
return postbox.transaction { transaction -> Signal<Never, StandaloneSendMessagesError> in
|
||||
if let peer = transaction.getPeer(peerId), let inputPeer = apiInputPeer(peer) {
|
||||
//var forwardSourceInfoAttribute: ForwardSourceInfoAttribute?
|
||||
var messageEntities: [Api.MessageEntity]?
|
||||
var replyMessageId: Int32? = threadId.flatMap { threadId in
|
||||
return Int32(clamping: threadId)
|
||||
}
|
||||
var replyToStoryId: StoryId?
|
||||
var scheduleTime: Int32?
|
||||
var sendAsPeerId: PeerId?
|
||||
var bubbleUpEmojiOrStickersets = false
|
||||
|
||||
var flags: Int32 = 0
|
||||
|
||||
for attribute in attributes {
|
||||
if let replyAttribute = attribute as? ReplyMessageAttribute {
|
||||
replyMessageId = replyAttribute.messageId.id
|
||||
} else if let attribute = attribute as? ReplyStoryAttribute {
|
||||
replyToStoryId = attribute.storyId
|
||||
} else if let outgoingInfo = attribute as? OutgoingMessageInfoAttribute {
|
||||
bubbleUpEmojiOrStickersets = !outgoingInfo.bubbleUpEmojiOrStickersets.isEmpty
|
||||
} else if let _ = attribute as? ForwardSourceInfoAttribute {
|
||||
//forwardSourceInfoAttribute = attribute
|
||||
} else if let attribute = attribute as? TextEntitiesMessageAttribute {
|
||||
var associatedPeers = SimpleDictionary<PeerId, Peer>()
|
||||
for attributePeerId in attribute.associatedPeerIds {
|
||||
if let peer = transaction.getPeer(attributePeerId) {
|
||||
associatedPeers[peer.id] = peer
|
||||
}
|
||||
}
|
||||
messageEntities = apiTextAttributeEntities(attribute, associatedPeers: associatedPeers)
|
||||
} else if let attribute = attribute as? OutgoingContentInfoMessageAttribute {
|
||||
if attribute.flags.contains(.disableLinkPreviews) {
|
||||
flags |= Int32(1 << 1)
|
||||
}
|
||||
} else if let attribute = attribute as? NotificationInfoMessageAttribute {
|
||||
if attribute.flags.contains(.muted) {
|
||||
flags |= Int32(1 << 5)
|
||||
}
|
||||
} else if let attribute = attribute as? OutgoingScheduleInfoMessageAttribute {
|
||||
flags |= Int32(1 << 10)
|
||||
scheduleTime = attribute.scheduleTime
|
||||
} else if let attribute = attribute as? SendAsMessageAttribute {
|
||||
sendAsPeerId = attribute.peerId
|
||||
}
|
||||
}
|
||||
|
||||
var replyTo: Api.InputReplyTo?
|
||||
if let replyMessageId = replyMessageId {
|
||||
flags |= 1 << 0
|
||||
|
||||
var replyFlags: Int32 = 0
|
||||
if threadId != nil {
|
||||
replyFlags |= 1 << 0
|
||||
}
|
||||
replyTo = .inputReplyToMessage(flags: replyFlags, replyToMsgId: replyMessageId, topMsgId: threadId.flatMap(Int32.init(clamping:)), replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
} else if let replyToStoryId = replyToStoryId {
|
||||
if let inputPeer = transaction.getPeer(replyToStoryId.peerId).flatMap(apiInputPeer) {
|
||||
flags |= 1 << 0
|
||||
replyTo = .inputReplyToStory(peer: inputPeer, storyId: replyToStoryId.id)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
flags |= (1 << 7)
|
||||
|
||||
if let _ = replyMessageId {
|
||||
flags |= Int32(1 << 0)
|
||||
}
|
||||
if let _ = messageEntities {
|
||||
flags |= Int32(1 << 3)
|
||||
}
|
||||
|
||||
if bubbleUpEmojiOrStickersets {
|
||||
flags |= Int32(1 << 15)
|
||||
}
|
||||
|
||||
var sendAsInputPeer: Api.InputPeer?
|
||||
if let sendAsPeerId = sendAsPeerId, let sendAsPeer = transaction.getPeer(sendAsPeerId), let inputPeer = apiInputPeerOrSelf(sendAsPeer, accountPeerId: accountPeerId) {
|
||||
sendAsInputPeer = inputPeer
|
||||
flags |= (1 << 13)
|
||||
}
|
||||
|
||||
let dependencyTag: PendingMessageRequestDependencyTag? = nil//(messageId: messageId)
|
||||
|
||||
let sendMessageRequest: Signal<NetworkRequestResult<Api.Updates>, MTRpcError>
|
||||
|
||||
var multiMedia: [Api.InputSingleMedia] = []
|
||||
for singleContent in content {
|
||||
if case let .media(inputMedia, text) = singleContent.content {
|
||||
let uniqueId = Int64.random(in: Int64.min ... Int64.max)
|
||||
multiMedia.append(.inputSingleMedia(flags: 0, media: inputMedia, randomId: uniqueId, message: text, entities: nil))
|
||||
}
|
||||
}
|
||||
|
||||
sendMessageRequest = network.request(Api.functions.messages.sendMultiMedia(flags: flags, peer: inputPeer, replyTo: replyTo, multiMedia: multiMedia, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil), tag: dependencyTag)
|
||||
|> map(NetworkRequestResult.result)
|
||||
|
||||
return sendMessageRequest
|
||||
|> mapToSignal { result -> Signal<Never, MTRpcError> in
|
||||
switch result {
|
||||
case .progress:
|
||||
return .complete()
|
||||
case .acknowledged:
|
||||
return .complete()
|
||||
case let .result(result):
|
||||
stateManager.addUpdates(result)
|
||||
return .complete()
|
||||
}
|
||||
}
|
||||
|> mapError { error -> StandaloneSendMessagesError in
|
||||
if error.errorDescription.hasPrefix("FILEREF_INVALID") || error.errorDescription.hasPrefix("FILE_REFERENCE_") {
|
||||
return StandaloneSendMessagesError(peerId: peerId, reason: nil)
|
||||
} else if let failureReason = sendMessageReasonForError(error.errorDescription) {
|
||||
return StandaloneSendMessagesError(peerId: peerId, reason: failureReason)
|
||||
}
|
||||
return StandaloneSendMessagesError(peerId: peerId, reason: nil)
|
||||
}
|
||||
} else {
|
||||
return .complete()
|
||||
}
|
||||
}
|
||||
|> castError(StandaloneSendMessagesError.self)
|
||||
|> switchToLatest
|
||||
}
|
||||
|
||||
public func standaloneSendMessage(account: Account, peerId: PeerId, text: String, attributes: [MessageAttribute], media: StandaloneMedia?, replyToMessageId: MessageId?, threadId: Int32? = nil) -> Signal<Float, StandaloneSendMessageError> {
|
||||
let content: Signal<StandaloneSendMessageEvent, StandaloneSendMessageError>
|
||||
if let media = media {
|
||||
@ -657,7 +820,7 @@ private func sendMessageContent(account: Account, peerId: PeerId, attributes: [M
|
||||
replyTo = .inputReplyToMessage(flags: flags, replyToMsgId: threadId, topMsgId: threadId, replyToPeerId: nil, quoteText: nil, quoteEntities: nil, quoteOffset: nil)
|
||||
}
|
||||
|
||||
sendMessageRequest = account.network.request(Api.functions.messages.sendMedia(flags: flags, peer: inputPeer, replyTo: replyTo, media: inputMedia, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil))
|
||||
sendMessageRequest = account.network.request(Api.functions.messages.sendMedia(flags: flags, peer: inputPeer, replyTo: replyTo, media: inputMedia, message: text, randomId: uniqueId, replyMarkup: nil, entities: messageEntities, scheduleDate: scheduleTime, sendAs: sendAsInputPeer, quickReplyShortcut: nil, effect: nil))
|
||||
|> `catch` { _ -> Signal<Api.Updates, NoError> in
|
||||
return .complete()
|
||||
}
|
||||
|
@ -56,6 +56,7 @@ swift_library(
|
||||
"//submodules/AsyncDisplayKit",
|
||||
"//submodules/Display",
|
||||
"//submodules/TelegramCore",
|
||||
"//submodules/MetalEngine",
|
||||
"//submodules/SSignalKit/SwiftSignalKit",
|
||||
"//submodules/ComponentFlow",
|
||||
"//submodules/Components/ViewControllerComponent",
|
||||
@ -81,6 +82,9 @@ swift_library(
|
||||
"//submodules/TelegramNotices",
|
||||
"//submodules/DeviceAccess",
|
||||
"//submodules/TelegramUI/Components/Utils/RoundedRectWithTailPath",
|
||||
"//submodules/TelegramUI/Components/MediaAssetsContext",
|
||||
"//submodules/UndoUI",
|
||||
"//submodules/ContextUI",
|
||||
|
||||
],
|
||||
visibility = [
|
||||
|
@ -78,3 +78,172 @@ fragment half4 cameraBlobFragment(RasterizerData in[[stage_in]],
|
||||
|
||||
return half4(min(minColor, c), min(minColor, max(cAlpha, 0.231)), min(minColor, max(cAlpha, 0.188)), c);
|
||||
}
|
||||
|
||||
struct Rectangle {
|
||||
float2 origin;
|
||||
float2 size;
|
||||
};
|
||||
|
||||
constant static float2 quadVertices[6] = {
|
||||
float2(0.0, 0.0),
|
||||
float2(1.0, 0.0),
|
||||
float2(0.0, 1.0),
|
||||
float2(1.0, 0.0),
|
||||
float2(0.0, 1.0),
|
||||
float2(1.0, 1.0)
|
||||
};
|
||||
|
||||
struct QuadVertexOut {
|
||||
float4 position [[position]];
|
||||
float2 uv;
|
||||
};
|
||||
|
||||
kernel void videoBiPlanarToRGBA(
|
||||
texture2d<half, access::read> inTextureY [[ texture(0) ]],
|
||||
texture2d<half, access::read> inTextureUV [[ texture(1) ]],
|
||||
texture2d<half, access::write> outTexture [[ texture(2) ]],
|
||||
uint2 threadPosition [[ thread_position_in_grid ]]
|
||||
) {
|
||||
half y = inTextureY.read(threadPosition).r;
|
||||
half2 uv = inTextureUV.read(uint2(threadPosition.x / 2, threadPosition.y / 2)).rg - half2(0.5, 0.5);
|
||||
|
||||
half4 color(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0);
|
||||
outTexture.write(color, threadPosition);
|
||||
}
|
||||
|
||||
kernel void videoTriPlanarToRGBA(
|
||||
texture2d<half, access::read> inTextureY [[ texture(0) ]],
|
||||
texture2d<half, access::read> inTextureU [[ texture(1) ]],
|
||||
texture2d<half, access::read> inTextureV [[ texture(2) ]],
|
||||
texture2d<half, access::write> outTexture [[ texture(3) ]],
|
||||
uint2 threadPosition [[ thread_position_in_grid ]]
|
||||
) {
|
||||
half y = inTextureY.read(threadPosition).r;
|
||||
uint2 uvPosition = uint2(threadPosition.x / 2, threadPosition.y / 2);
|
||||
half2 inUV = (inTextureU.read(uvPosition).r, inTextureV.read(uvPosition).r);
|
||||
half2 uv = inUV - half2(0.5, 0.5);
|
||||
|
||||
half4 color(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0);
|
||||
outTexture.write(color, threadPosition);
|
||||
}
|
||||
|
||||
vertex QuadVertexOut mainVideoVertex(
|
||||
const device Rectangle &rect [[ buffer(0) ]],
|
||||
const device uint2 &mirror [[ buffer(1) ]],
|
||||
unsigned int vid [[ vertex_id ]]
|
||||
) {
|
||||
float2 quadVertex = quadVertices[vid];
|
||||
|
||||
QuadVertexOut out;
|
||||
|
||||
out.position = float4(rect.origin.x + quadVertex.x * rect.size.x, rect.origin.y + quadVertex.y * rect.size.y, 0.0, 1.0);
|
||||
out.position.x = -1.0 + out.position.x * 2.0;
|
||||
out.position.y = -1.0 + out.position.y * 2.0;
|
||||
|
||||
float2 uv = float2(quadVertex.x, 1.0 - quadVertex.y);
|
||||
out.uv = float2(uv.y, 1.0 - uv.x);
|
||||
if (mirror.x == 1) {
|
||||
out.uv.x = 1.0 - out.uv.x;
|
||||
}
|
||||
if (mirror.y == 1) {
|
||||
out.uv.y = 1.0 - out.uv.y;
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
half4 rgb2hsv(half4 c) {
|
||||
half4 K = half4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
|
||||
half4 p = mix(half4(c.bg, K.wz), half4(c.gb, K.xy), step(c.b, c.g));
|
||||
half4 q = mix(half4(p.xyw, c.r), half4(c.r, p.yzx), step(p.x, c.r));
|
||||
|
||||
float d = q.x - min(q.w, q.y);
|
||||
float e = 1.0e-10;
|
||||
return half4(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x, c.a);
|
||||
}
|
||||
|
||||
half4 hsv2rgb(half4 c) {
|
||||
half4 K = half4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
|
||||
half3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
|
||||
return half4(c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y), c.a);
|
||||
}
|
||||
|
||||
fragment half4 mainVideoFragment(
|
||||
QuadVertexOut in [[stage_in]],
|
||||
texture2d<half> texture [[ texture(0) ]],
|
||||
const device float &brightness [[ buffer(0) ]],
|
||||
const device float &saturation [[ buffer(1) ]],
|
||||
const device float4 &overlay [[ buffer(2) ]]
|
||||
) {
|
||||
constexpr sampler sampler(coord::normalized, address::repeat, filter::linear);
|
||||
half4 color = texture.sample(sampler, in.uv);
|
||||
color = rgb2hsv(color);
|
||||
color.b = clamp(color.b * brightness, 0.0, 1.0);
|
||||
color.g = clamp(color.g * saturation, 0.0, 1.0);
|
||||
color = hsv2rgb(color);
|
||||
color.rgb += half3(overlay.rgb * overlay.a);
|
||||
color.rgb = min(color.rgb, half3(1.0, 1.0, 1.0));
|
||||
|
||||
return half4(color.r, color.g, color.b, color.a);
|
||||
}
|
||||
|
||||
constant int BLUR_SAMPLE_COUNT = 7;
|
||||
constant float BLUR_OFFSETS[BLUR_SAMPLE_COUNT] = {
|
||||
1.489585,
|
||||
3.475713,
|
||||
5.461880,
|
||||
7.448104,
|
||||
9.434408,
|
||||
11.420812,
|
||||
13.407332
|
||||
};
|
||||
|
||||
constant float BLUR_WEIGHTS[BLUR_SAMPLE_COUNT] = {
|
||||
0.130498886,
|
||||
0.113685958,
|
||||
0.0886923522,
|
||||
0.0619646012,
|
||||
0.0387683809,
|
||||
0.0217213109,
|
||||
0.0108984858
|
||||
};
|
||||
|
||||
static void gaussianBlur(
|
||||
texture2d<half, access::sample> inTexture,
|
||||
texture2d<half, access::write> outTexture,
|
||||
float2 offset,
|
||||
uint2 gid
|
||||
) {
|
||||
constexpr sampler sampler(coord::normalized, address::clamp_to_edge, filter::linear);
|
||||
|
||||
uint2 textureDim(outTexture.get_width(), outTexture.get_height());
|
||||
if(all(gid < textureDim)) {
|
||||
float3 outColor(0.0);
|
||||
|
||||
float2 size(inTexture.get_width(), inTexture.get_height());
|
||||
|
||||
float2 baseTexCoord = float2(gid);
|
||||
|
||||
for (int i = 0; i < BLUR_SAMPLE_COUNT; i++) {
|
||||
outColor += float3(inTexture.sample(sampler, (baseTexCoord + offset * BLUR_OFFSETS[i]) / size).rgb) * BLUR_WEIGHTS[i];
|
||||
}
|
||||
|
||||
outTexture.write(half4(half3(outColor), 1.0), gid);
|
||||
}
|
||||
}
|
||||
|
||||
kernel void gaussianBlurHorizontal(
|
||||
texture2d<half, access::sample> inTexture [[ texture(0) ]],
|
||||
texture2d<half, access::write> outTexture [[ texture(1) ]],
|
||||
uint2 gid [[ thread_position_in_grid ]]
|
||||
) {
|
||||
gaussianBlur(inTexture, outTexture, float2(1, 0), gid);
|
||||
}
|
||||
|
||||
kernel void gaussianBlurVertical(
|
||||
texture2d<half, access::sample> inTexture [[ texture(0) ]],
|
||||
texture2d<half, access::write> outTexture [[ texture(1) ]],
|
||||
uint2 gid [[ thread_position_in_grid ]]
|
||||
) {
|
||||
gaussianBlur(inTexture, outTexture, float2(0, 1), gid);
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,28 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import Display
|
||||
import MetalKit
|
||||
|
||||
private final class BundleMarker: NSObject {
|
||||
}
|
||||
|
||||
private var metalLibraryValue: MTLLibrary?
|
||||
func metalLibrary(device: MTLDevice) -> MTLLibrary? {
|
||||
if let metalLibraryValue {
|
||||
return metalLibraryValue
|
||||
}
|
||||
|
||||
let mainBundle = Bundle(for: BundleMarker.self)
|
||||
guard let path = mainBundle.path(forResource: "CameraScreenBundle", ofType: "bundle") else {
|
||||
return nil
|
||||
}
|
||||
guard let bundle = Bundle(path: path) else {
|
||||
return nil
|
||||
}
|
||||
guard let library = try? device.makeDefaultLibrary(bundle: bundle) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
metalLibraryValue = library
|
||||
return library
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,342 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import SwiftSignalKit
|
||||
import MetalKit
|
||||
import MetalPerformanceShaders
|
||||
import Accelerate
|
||||
import MetalEngine
|
||||
|
||||
public final class VideoSourceOutput {
|
||||
public struct MirrorDirection: OptionSet {
|
||||
public var rawValue: Int32
|
||||
|
||||
public init(rawValue: Int32) {
|
||||
self.rawValue = rawValue
|
||||
}
|
||||
|
||||
public static let horizontal = MirrorDirection(rawValue: 1 << 0)
|
||||
public static let vertical = MirrorDirection(rawValue: 1 << 1)
|
||||
}
|
||||
|
||||
open class DataBuffer {
|
||||
open var pixelBuffer: CVPixelBuffer? {
|
||||
return nil
|
||||
}
|
||||
|
||||
public init() {
|
||||
}
|
||||
}
|
||||
|
||||
public final class BiPlanarTextureLayout {
|
||||
public let y: MTLTexture
|
||||
public let uv: MTLTexture
|
||||
|
||||
public init(y: MTLTexture, uv: MTLTexture) {
|
||||
self.y = y
|
||||
self.uv = uv
|
||||
}
|
||||
}
|
||||
|
||||
public final class TriPlanarTextureLayout {
|
||||
public let y: MTLTexture
|
||||
public let u: MTLTexture
|
||||
public let v: MTLTexture
|
||||
|
||||
public init(y: MTLTexture, u: MTLTexture, v: MTLTexture) {
|
||||
self.y = y
|
||||
self.u = u
|
||||
self.v = v
|
||||
}
|
||||
}
|
||||
|
||||
public enum TextureLayout {
|
||||
case biPlanar(BiPlanarTextureLayout)
|
||||
case triPlanar(TriPlanarTextureLayout)
|
||||
}
|
||||
|
||||
public final class NativeDataBuffer: DataBuffer {
|
||||
private let pixelBufferValue: CVPixelBuffer
|
||||
override public var pixelBuffer: CVPixelBuffer? {
|
||||
return self.pixelBufferValue
|
||||
}
|
||||
|
||||
public init(pixelBuffer: CVPixelBuffer) {
|
||||
self.pixelBufferValue = pixelBuffer
|
||||
}
|
||||
}
|
||||
|
||||
public let resolution: CGSize
|
||||
public let textureLayout: TextureLayout
|
||||
public let dataBuffer: DataBuffer
|
||||
public let mirrorDirection: MirrorDirection
|
||||
public let sourceId: Int
|
||||
|
||||
public init(resolution: CGSize, textureLayout: TextureLayout, dataBuffer: DataBuffer, mirrorDirection: MirrorDirection, sourceId: Int) {
|
||||
self.resolution = resolution
|
||||
self.textureLayout = textureLayout
|
||||
self.dataBuffer = dataBuffer
|
||||
self.mirrorDirection = mirrorDirection
|
||||
self.sourceId = sourceId
|
||||
}
|
||||
}
|
||||
|
||||
public protocol VideoSource: AnyObject {
|
||||
typealias Output = VideoSourceOutput
|
||||
|
||||
var currentOutput: Output? { get }
|
||||
|
||||
func addOnUpdated(_ f: @escaping () -> Void) -> Disposable
|
||||
}
|
||||
|
||||
final class CameraVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||
public var internalData: MetalEngineSubjectInternalData?
|
||||
|
||||
public let blurredLayer: MetalEngineSubjectLayer
|
||||
|
||||
final class BlurState: ComputeState {
|
||||
let computePipelineStateYUVBiPlanarToRGBA: MTLComputePipelineState
|
||||
let computePipelineStateYUVTriPlanarToRGBA: MTLComputePipelineState
|
||||
let computePipelineStateHorizontal: MTLComputePipelineState
|
||||
let computePipelineStateVertical: MTLComputePipelineState
|
||||
let downscaleKernel: MPSImageBilinearScale
|
||||
|
||||
required init?(device: MTLDevice) {
|
||||
guard let library = metalLibrary(device: device) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
guard let functionVideoBiPlanarToRGBA = library.makeFunction(name: "videoBiPlanarToRGBA") else {
|
||||
return nil
|
||||
}
|
||||
guard let computePipelineStateYUVBiPlanarToRGBA = try? device.makeComputePipelineState(function: functionVideoBiPlanarToRGBA) else {
|
||||
return nil
|
||||
}
|
||||
self.computePipelineStateYUVBiPlanarToRGBA = computePipelineStateYUVBiPlanarToRGBA
|
||||
|
||||
guard let functionVideoTriPlanarToRGBA = library.makeFunction(name: "videoTriPlanarToRGBA") else {
|
||||
return nil
|
||||
}
|
||||
guard let computePipelineStateYUVTriPlanarToRGBA = try? device.makeComputePipelineState(function: functionVideoTriPlanarToRGBA) else {
|
||||
return nil
|
||||
}
|
||||
self.computePipelineStateYUVTriPlanarToRGBA = computePipelineStateYUVTriPlanarToRGBA
|
||||
|
||||
guard let gaussianBlurHorizontal = library.makeFunction(name: "gaussianBlurHorizontal"), let gaussianBlurVertical = library.makeFunction(name: "gaussianBlurVertical") else {
|
||||
return nil
|
||||
}
|
||||
guard let computePipelineStateHorizontal = try? device.makeComputePipelineState(function: gaussianBlurHorizontal) else {
|
||||
return nil
|
||||
}
|
||||
self.computePipelineStateHorizontal = computePipelineStateHorizontal
|
||||
|
||||
guard let computePipelineStateVertical = try? device.makeComputePipelineState(function: gaussianBlurVertical) else {
|
||||
return nil
|
||||
}
|
||||
self.computePipelineStateVertical = computePipelineStateVertical
|
||||
|
||||
self.downscaleKernel = MPSImageBilinearScale(device: device)
|
||||
}
|
||||
}
|
||||
|
||||
final class RenderState: RenderToLayerState {
|
||||
let pipelineState: MTLRenderPipelineState
|
||||
|
||||
required init?(device: MTLDevice) {
|
||||
guard let library = metalLibrary(device: device) else {
|
||||
return nil
|
||||
}
|
||||
guard let vertexFunction = library.makeFunction(name: "mainVideoVertex"), let fragmentFunction = library.makeFunction(name: "mainVideoFragment") else {
|
||||
return nil
|
||||
}
|
||||
|
||||
let pipelineDescriptor = MTLRenderPipelineDescriptor()
|
||||
pipelineDescriptor.vertexFunction = vertexFunction
|
||||
pipelineDescriptor.fragmentFunction = fragmentFunction
|
||||
pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
|
||||
guard let pipelineState = try? device.makeRenderPipelineState(descriptor: pipelineDescriptor) else {
|
||||
return nil
|
||||
}
|
||||
self.pipelineState = pipelineState
|
||||
}
|
||||
}
|
||||
|
||||
public var video: VideoSource.Output? {
|
||||
didSet {
|
||||
self.setNeedsUpdate()
|
||||
}
|
||||
}
|
||||
|
||||
public var renderSpec: RenderLayerSpec?
|
||||
|
||||
private var rgbaTexture: PooledTexture?
|
||||
private var downscaledTexture: PooledTexture?
|
||||
private var blurredHorizontalTexture: PooledTexture?
|
||||
private var blurredVerticalTexture: PooledTexture?
|
||||
|
||||
override public init() {
|
||||
self.blurredLayer = MetalEngineSubjectLayer()
|
||||
|
||||
super.init()
|
||||
}
|
||||
|
||||
override public init(layer: Any) {
|
||||
self.blurredLayer = MetalEngineSubjectLayer()
|
||||
|
||||
super.init(layer: layer)
|
||||
}
|
||||
|
||||
required public init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
public func update(context: MetalEngineSubjectContext) {
|
||||
if self.isHidden {
|
||||
return
|
||||
}
|
||||
guard let renderSpec = self.renderSpec else {
|
||||
return
|
||||
}
|
||||
guard let videoTextures = self.video else {
|
||||
return
|
||||
}
|
||||
|
||||
let rgbaTextureSpec = TextureSpec(width: Int(videoTextures.resolution.width), height: Int(videoTextures.resolution.height), pixelFormat: .rgba8UnsignedNormalized)
|
||||
if self.rgbaTexture == nil || self.rgbaTexture?.spec != rgbaTextureSpec {
|
||||
self.rgbaTexture = MetalEngine.shared.pooledTexture(spec: rgbaTextureSpec)
|
||||
}
|
||||
if self.downscaledTexture == nil {
|
||||
self.downscaledTexture = MetalEngine.shared.pooledTexture(spec: TextureSpec(width: 256, height: 256, pixelFormat: .rgba8UnsignedNormalized))
|
||||
}
|
||||
if self.blurredHorizontalTexture == nil {
|
||||
self.blurredHorizontalTexture = MetalEngine.shared.pooledTexture(spec: TextureSpec(width: 256, height: 256, pixelFormat: .rgba8UnsignedNormalized))
|
||||
}
|
||||
if self.blurredVerticalTexture == nil {
|
||||
self.blurredVerticalTexture = MetalEngine.shared.pooledTexture(spec: TextureSpec(width: 256, height: 256, pixelFormat: .rgba8UnsignedNormalized))
|
||||
}
|
||||
|
||||
guard let rgbaTexture = self.rgbaTexture?.get(context: context) else {
|
||||
return
|
||||
}
|
||||
|
||||
let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture in
|
||||
guard let rgbaTexture else {
|
||||
return
|
||||
}
|
||||
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
|
||||
return
|
||||
}
|
||||
|
||||
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
|
||||
let threadgroupCount = MTLSize(width: (rgbaTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (rgbaTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
|
||||
|
||||
switch videoTextures.textureLayout {
|
||||
case let .biPlanar(biPlanar):
|
||||
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVBiPlanarToRGBA)
|
||||
computeEncoder.setTexture(biPlanar.y, index: 0)
|
||||
computeEncoder.setTexture(biPlanar.uv, index: 1)
|
||||
computeEncoder.setTexture(rgbaTexture, index: 2)
|
||||
case let .triPlanar(triPlanar):
|
||||
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVTriPlanarToRGBA)
|
||||
computeEncoder.setTexture(triPlanar.y, index: 0)
|
||||
computeEncoder.setTexture(triPlanar.u, index: 1)
|
||||
computeEncoder.setTexture(triPlanar.u, index: 2)
|
||||
computeEncoder.setTexture(rgbaTexture, index: 3)
|
||||
}
|
||||
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
|
||||
|
||||
computeEncoder.endEncoding()
|
||||
})
|
||||
|
||||
if !self.blurredLayer.isHidden {
|
||||
guard let downscaledTexture = self.downscaledTexture?.get(context: context), let blurredHorizontalTexture = self.blurredHorizontalTexture?.get(context: context), let blurredVerticalTexture = self.blurredVerticalTexture?.get(context: context) else {
|
||||
return
|
||||
}
|
||||
|
||||
let blurredTexture = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, downscaledTexture.placeholer, blurredHorizontalTexture.placeholer, blurredVerticalTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture, downscaledTexture, blurredHorizontalTexture, blurredVerticalTexture -> MTLTexture? in
|
||||
guard let rgbaTexture, let downscaledTexture, let blurredHorizontalTexture, let blurredVerticalTexture else {
|
||||
return nil
|
||||
}
|
||||
|
||||
blurState.downscaleKernel.encode(commandBuffer: commandBuffer, sourceTexture: rgbaTexture, destinationTexture: downscaledTexture)
|
||||
|
||||
do {
|
||||
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
|
||||
return nil
|
||||
}
|
||||
|
||||
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
|
||||
let threadgroupCount = MTLSize(width: (downscaledTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (downscaledTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
|
||||
|
||||
computeEncoder.setComputePipelineState(blurState.computePipelineStateHorizontal)
|
||||
computeEncoder.setTexture(downscaledTexture, index: 0)
|
||||
computeEncoder.setTexture(blurredHorizontalTexture, index: 1)
|
||||
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
|
||||
|
||||
computeEncoder.setComputePipelineState(blurState.computePipelineStateVertical)
|
||||
computeEncoder.setTexture(blurredHorizontalTexture, index: 0)
|
||||
computeEncoder.setTexture(blurredVerticalTexture, index: 1)
|
||||
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
|
||||
|
||||
computeEncoder.endEncoding()
|
||||
}
|
||||
|
||||
return blurredVerticalTexture
|
||||
})
|
||||
|
||||
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self.blurredLayer, inputs: blurredTexture, commands: { encoder, placement, blurredTexture in
|
||||
guard let blurredTexture else {
|
||||
return
|
||||
}
|
||||
let effectiveRect = placement.effectiveRect
|
||||
|
||||
var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
|
||||
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
|
||||
|
||||
var mirror = SIMD2<UInt32>(
|
||||
videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
|
||||
videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
|
||||
)
|
||||
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
|
||||
|
||||
encoder.setFragmentTexture(blurredTexture, index: 0)
|
||||
|
||||
var brightness: Float = 0.75
|
||||
var saturation: Float = 1.3
|
||||
var overlay: SIMD4<Float> = SIMD4<Float>()
|
||||
encoder.setFragmentBytes(&brightness, length: 4, index: 0)
|
||||
encoder.setFragmentBytes(&saturation, length: 4, index: 1)
|
||||
encoder.setFragmentBytes(&overlay, length: 4 * 4, index: 2)
|
||||
|
||||
encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
|
||||
})
|
||||
}
|
||||
|
||||
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, inputs: rgbaTexture.placeholer, commands: { encoder, placement, rgbaTexture in
|
||||
guard let rgbaTexture else {
|
||||
return
|
||||
}
|
||||
|
||||
let effectiveRect = placement.effectiveRect
|
||||
|
||||
var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
|
||||
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
|
||||
|
||||
var mirror = SIMD2<UInt32>(
|
||||
videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
|
||||
videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
|
||||
)
|
||||
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
|
||||
|
||||
encoder.setFragmentTexture(rgbaTexture, index: 0)
|
||||
|
||||
var brightness: Float = 1.0
|
||||
var saturation: Float = 1.0
|
||||
var overlay: SIMD4<Float> = SIMD4<Float>()
|
||||
encoder.setFragmentBytes(&brightness, length: 4, index: 0)
|
||||
encoder.setFragmentBytes(&saturation, length: 4, index: 1)
|
||||
encoder.setFragmentBytes(&overlay, length: 4 * 4, index: 2)
|
||||
|
||||
encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
|
||||
})
|
||||
}
|
||||
}
|
@ -0,0 +1,82 @@
|
||||
import AVFoundation
|
||||
import Metal
|
||||
import CoreVideo
|
||||
import Display
|
||||
import SwiftSignalKit
|
||||
import Camera
|
||||
import MetalEngine
|
||||
|
||||
final class CameraVideoSource: VideoSource {
|
||||
private var device: MTLDevice
|
||||
private var textureCache: CVMetalTextureCache?
|
||||
|
||||
private(set) var cameraVideoOutput: CameraVideoOutput!
|
||||
|
||||
public private(set) var currentOutput: Output?
|
||||
private var onUpdatedListeners = Bag<() -> Void>()
|
||||
|
||||
public var sourceId: Int = 0
|
||||
public var sizeMultiplicator: CGPoint = CGPoint(x: 1.0, y: 1.0)
|
||||
|
||||
public init?() {
|
||||
self.device = MetalEngine.shared.device
|
||||
|
||||
self.cameraVideoOutput = CameraVideoOutput(sink: { [weak self] buffer in
|
||||
self?.push(buffer)
|
||||
})
|
||||
|
||||
CVMetalTextureCacheCreate(nil, nil, self.device, nil, &self.textureCache)
|
||||
}
|
||||
|
||||
public func addOnUpdated(_ f: @escaping () -> Void) -> Disposable {
|
||||
let index = self.onUpdatedListeners.add(f)
|
||||
|
||||
return ActionDisposable { [weak self] in
|
||||
DispatchQueue.main.async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.onUpdatedListeners.remove(index)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func push(_ sampleBuffer: CMSampleBuffer) {
|
||||
guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||
return
|
||||
}
|
||||
|
||||
let width = CVPixelBufferGetWidth(buffer)
|
||||
let height = CVPixelBufferGetHeight(buffer)
|
||||
|
||||
var cvMetalTextureY: CVMetalTexture?
|
||||
var status = CVMetalTextureCacheCreateTextureFromImage(nil, self.textureCache!, buffer, nil, .r8Unorm, width, height, 0, &cvMetalTextureY)
|
||||
guard status == kCVReturnSuccess, let yTexture = CVMetalTextureGetTexture(cvMetalTextureY!) else {
|
||||
return
|
||||
}
|
||||
var cvMetalTextureUV: CVMetalTexture?
|
||||
status = CVMetalTextureCacheCreateTextureFromImage(nil, self.textureCache!, buffer, nil, .rg8Unorm, width / 2, height / 2, 1, &cvMetalTextureUV)
|
||||
guard status == kCVReturnSuccess, let uvTexture = CVMetalTextureGetTexture(cvMetalTextureUV!) else {
|
||||
return
|
||||
}
|
||||
|
||||
var resolution = CGSize(width: CGFloat(yTexture.width), height: CGFloat(yTexture.height))
|
||||
resolution.width = floor(resolution.width * self.sizeMultiplicator.x)
|
||||
resolution.height = floor(resolution.height * self.sizeMultiplicator.y)
|
||||
|
||||
self.currentOutput = Output(
|
||||
resolution: resolution,
|
||||
textureLayout: .biPlanar(Output.BiPlanarTextureLayout(
|
||||
y: yTexture,
|
||||
uv: uvTexture
|
||||
)),
|
||||
dataBuffer: Output.NativeDataBuffer(pixelBuffer: buffer),
|
||||
mirrorDirection: [],
|
||||
sourceId: self.sourceId
|
||||
)
|
||||
|
||||
for onUpdated in self.onUpdatedListeners.copyItems() {
|
||||
onUpdated()
|
||||
}
|
||||
}
|
||||
}
|
@ -6,6 +6,7 @@ import SwiftSignalKit
|
||||
import Photos
|
||||
import LocalMediaResources
|
||||
import CameraButtonComponent
|
||||
import UIKitRuntimeUtils
|
||||
|
||||
enum ShutterButtonState: Equatable {
|
||||
case disabled
|
||||
@ -34,6 +35,7 @@ private final class ShutterButtonContentComponent: Component {
|
||||
let tintColor: UIColor
|
||||
let shutterState: ShutterButtonState
|
||||
let blobState: ShutterBlobView.BlobState
|
||||
let collageProgress: Float
|
||||
let highlightedAction: ActionSlot<Bool>
|
||||
let updateOffsetX: ActionSlot<(CGFloat, ComponentTransition)>
|
||||
let updateOffsetY: ActionSlot<(CGFloat, ComponentTransition)>
|
||||
@ -44,6 +46,7 @@ private final class ShutterButtonContentComponent: Component {
|
||||
tintColor: UIColor,
|
||||
shutterState: ShutterButtonState,
|
||||
blobState: ShutterBlobView.BlobState,
|
||||
collageProgress: Float,
|
||||
highlightedAction: ActionSlot<Bool>,
|
||||
updateOffsetX: ActionSlot<(CGFloat, ComponentTransition)>,
|
||||
updateOffsetY: ActionSlot<(CGFloat, ComponentTransition)>
|
||||
@ -53,6 +56,7 @@ private final class ShutterButtonContentComponent: Component {
|
||||
self.tintColor = tintColor
|
||||
self.shutterState = shutterState
|
||||
self.blobState = blobState
|
||||
self.collageProgress = collageProgress
|
||||
self.highlightedAction = highlightedAction
|
||||
self.updateOffsetX = updateOffsetX
|
||||
self.updateOffsetY = updateOffsetY
|
||||
@ -74,17 +78,25 @@ private final class ShutterButtonContentComponent: Component {
|
||||
if lhs.blobState != rhs.blobState {
|
||||
return false
|
||||
}
|
||||
if lhs.collageProgress != rhs.collageProgress {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
final class View: UIView {
|
||||
private var component: ShutterButtonContentComponent?
|
||||
|
||||
private let underRingLayer = SimpleShapeLayer()
|
||||
private let ringLayer = SimpleShapeLayer()
|
||||
var blobView: ShutterBlobView?
|
||||
private let innerLayer = SimpleShapeLayer()
|
||||
private let progressLayer = SimpleShapeLayer()
|
||||
|
||||
private let checkLayer = SimpleLayer()
|
||||
private let checkLayerMask = SimpleShapeLayer()
|
||||
private let checkLayerLineMask = SimpleShapeLayer()
|
||||
|
||||
init() {
|
||||
super.init(frame: CGRect())
|
||||
|
||||
@ -92,7 +104,34 @@ private final class ShutterButtonContentComponent: Component {
|
||||
|
||||
self.progressLayer.strokeEnd = 0.0
|
||||
|
||||
let checkPath = CGMutablePath()
|
||||
checkPath.move(to: CGPoint(x: 18.0 + 2.0, y: 18.0 + 13.0))
|
||||
checkPath.addLine(to: CGPoint(x: 18.0 + 9.0, y: 18.0 + 20.0))
|
||||
checkPath.addLine(to: CGPoint(x: 18.0 + 22.0, y: 18.0 + 7.0))
|
||||
|
||||
self.checkLayer.frame = CGRect(origin: .zero, size: CGSize(width: 60.0, height: 60.0))
|
||||
if let filter = makeLuminanceToAlphaFilter() {
|
||||
self.checkLayerMask.filters = [filter]
|
||||
}
|
||||
self.checkLayerMask.backgroundColor = UIColor.black.cgColor
|
||||
self.checkLayerMask.fillColor = UIColor.white.cgColor
|
||||
self.checkLayerMask.path = CGPath(ellipseIn: self.checkLayer.frame, transform: nil)
|
||||
self.checkLayerMask.frame = self.checkLayer.frame
|
||||
|
||||
self.checkLayerLineMask.path = checkPath
|
||||
self.checkLayerLineMask.lineWidth = 3.0
|
||||
self.checkLayerLineMask.lineCap = .round
|
||||
self.checkLayerLineMask.lineJoin = .round
|
||||
self.checkLayerLineMask.fillColor = UIColor.clear.cgColor
|
||||
self.checkLayerLineMask.strokeColor = UIColor.black.cgColor
|
||||
self.checkLayerLineMask.frame = self.checkLayer.frame
|
||||
self.checkLayerMask.addSublayer(self.checkLayerLineMask)
|
||||
|
||||
self.checkLayer.mask = self.checkLayerMask
|
||||
self.checkLayer.isHidden = true
|
||||
|
||||
self.layer.addSublayer(self.innerLayer)
|
||||
self.layer.addSublayer(self.underRingLayer)
|
||||
self.layer.addSublayer(self.ringLayer)
|
||||
self.layer.addSublayer(self.progressLayer)
|
||||
}
|
||||
@ -102,21 +141,27 @@ private final class ShutterButtonContentComponent: Component {
|
||||
}
|
||||
|
||||
func updateIsHighlighted(_ isHighlighted: Bool) {
|
||||
guard let blobView = self.blobView else {
|
||||
guard let blobView = self.blobView, let component = self.component else {
|
||||
return
|
||||
}
|
||||
let scale: CGFloat = isHighlighted ? 0.8 : 1.0
|
||||
let transition = ComponentTransition(animation: .curve(duration: 0.3, curve: .easeInOut))
|
||||
transition.setTransform(view: blobView, transform: CATransform3DMakeScale(scale, scale, 1.0))
|
||||
if component.collageProgress > 1.0 - .ulpOfOne {
|
||||
transition.setTransform(layer: self.ringLayer, transform: CATransform3DMakeScale(scale, scale, 1.0))
|
||||
}
|
||||
}
|
||||
|
||||
func update(component: ShutterButtonContentComponent, availableSize: CGSize, transition: ComponentTransition) -> CGSize {
|
||||
let previousComponent = self.component
|
||||
self.component = component
|
||||
|
||||
if component.hasAppeared && self.blobView == nil {
|
||||
self.blobView = ShutterBlobView(test: false)
|
||||
self.addSubview(self.blobView!)
|
||||
|
||||
self.layer.addSublayer(self.checkLayer)
|
||||
|
||||
Queue.mainQueue().after(0.2) {
|
||||
self.innerLayer.removeFromSuperlayer()
|
||||
}
|
||||
@ -163,10 +208,10 @@ private final class ShutterButtonContentComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
let innerColor: UIColor
|
||||
var innerColor: UIColor
|
||||
let innerSize: CGSize
|
||||
let ringSize: CGSize
|
||||
let ringWidth: CGFloat = 3.0
|
||||
var ringSize: CGSize
|
||||
var ringWidth: CGFloat = 3.0
|
||||
var recordingProgress: Float?
|
||||
switch component.shutterState {
|
||||
case .generic, .disabled:
|
||||
@ -193,9 +238,35 @@ private final class ShutterButtonContentComponent: Component {
|
||||
recordingProgress = 0.0
|
||||
}
|
||||
|
||||
if component.collageProgress > 1.0 - .ulpOfOne {
|
||||
innerColor = component.tintColor
|
||||
ringSize = CGSize(width: 60.0, height: 60.0)
|
||||
ringWidth = 5.0
|
||||
} else if component.collageProgress > 0.0 {
|
||||
ringSize = CGSize(width: 74.0, height: 74.0)
|
||||
ringWidth = 5.0
|
||||
}
|
||||
|
||||
if component.collageProgress > 1.0 - .ulpOfOne {
|
||||
self.blobView?.isHidden = true
|
||||
self.checkLayer.isHidden = false
|
||||
transition.setShapeLayerStrokeEnd(layer: self.checkLayerLineMask, strokeEnd: 1.0)
|
||||
} else {
|
||||
self.checkLayer.isHidden = true
|
||||
self.blobView?.isHidden = false
|
||||
// transition.setAlpha(layer: self.checkLayerLineMask, alpha: 0.0)
|
||||
// transition.setShapeLayerStrokeEnd(layer: self.checkLayerLineMask, strokeEnd: 0.0, completion: { _ in
|
||||
// self.blobView?.isHidden = false
|
||||
// self.checkLayer.isHidden = true
|
||||
// })
|
||||
}
|
||||
|
||||
self.checkLayer.backgroundColor = innerColor.cgColor
|
||||
|
||||
self.ringLayer.fillColor = UIColor.clear.cgColor
|
||||
self.ringLayer.strokeColor = component.tintColor.cgColor
|
||||
self.ringLayer.lineWidth = ringWidth
|
||||
self.ringLayer.lineCap = .round
|
||||
let ringPath = CGPath(
|
||||
ellipseIn: CGRect(
|
||||
origin: CGPoint(
|
||||
@ -208,6 +279,25 @@ private final class ShutterButtonContentComponent: Component {
|
||||
transition.setShapeLayerPath(layer: self.ringLayer, path: ringPath)
|
||||
self.ringLayer.bounds = CGRect(origin: .zero, size: maximumShutterSize)
|
||||
self.ringLayer.position = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0)
|
||||
self.ringLayer.transform = CATransform3DMakeRotation(-.pi / 2.0, 0.0, 0.0, 1.0)
|
||||
|
||||
self.checkLayer.position = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0)
|
||||
|
||||
if component.collageProgress > 0.0 {
|
||||
if previousComponent?.collageProgress == 0.0 {
|
||||
self.ringLayer.animateRotation(from: -.pi * 3.0 / 2.0, to: -.pi / 2.0, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
|
||||
}
|
||||
transition.setShapeLayerStrokeEnd(layer: self.ringLayer, strokeEnd: CGFloat(component.collageProgress))
|
||||
} else {
|
||||
transition.setShapeLayerStrokeEnd(layer: self.ringLayer, strokeEnd: 1.0)
|
||||
}
|
||||
|
||||
self.underRingLayer.fillColor = UIColor.clear.cgColor
|
||||
self.underRingLayer.strokeColor = component.tintColor.withAlphaComponent(0.2).cgColor
|
||||
self.underRingLayer.lineWidth = ringWidth
|
||||
transition.setShapeLayerPath(layer: self.underRingLayer, path: ringPath)
|
||||
self.underRingLayer.bounds = CGRect(origin: .zero, size: maximumShutterSize)
|
||||
self.underRingLayer.position = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0)
|
||||
|
||||
if let blobView = self.blobView {
|
||||
blobView.updateState(component.blobState, tintColor: innerColor, transition: transition)
|
||||
@ -462,6 +552,8 @@ final class CaptureControlsComponent: Component {
|
||||
let isSticker: Bool
|
||||
let hasAppeared: Bool
|
||||
let hasAccess: Bool
|
||||
let hideControls: Bool
|
||||
let collageProgress: Float
|
||||
let tintColor: UIColor
|
||||
let shutterState: ShutterButtonState
|
||||
let lastGalleryAsset: PHAsset?
|
||||
@ -482,6 +574,8 @@ final class CaptureControlsComponent: Component {
|
||||
isSticker: Bool,
|
||||
hasAppeared: Bool,
|
||||
hasAccess: Bool,
|
||||
hideControls: Bool,
|
||||
collageProgress: Float,
|
||||
tintColor: UIColor,
|
||||
shutterState: ShutterButtonState,
|
||||
lastGalleryAsset: PHAsset?,
|
||||
@ -501,6 +595,8 @@ final class CaptureControlsComponent: Component {
|
||||
self.isSticker = isSticker
|
||||
self.hasAppeared = hasAppeared
|
||||
self.hasAccess = hasAccess
|
||||
self.hideControls = hideControls
|
||||
self.collageProgress = collageProgress
|
||||
self.tintColor = tintColor
|
||||
self.shutterState = shutterState
|
||||
self.lastGalleryAsset = lastGalleryAsset
|
||||
@ -530,6 +626,12 @@ final class CaptureControlsComponent: Component {
|
||||
if lhs.hasAccess != rhs.hasAccess {
|
||||
return false
|
||||
}
|
||||
if lhs.hideControls != rhs.hideControls {
|
||||
return false
|
||||
}
|
||||
if lhs.collageProgress != rhs.collageProgress {
|
||||
return false
|
||||
}
|
||||
if lhs.tintColor != rhs.tintColor {
|
||||
return false
|
||||
}
|
||||
@ -918,6 +1020,8 @@ final class CaptureControlsComponent: Component {
|
||||
isTransitioning = true
|
||||
}
|
||||
|
||||
let hideControls = component.hideControls
|
||||
|
||||
let galleryButtonFrame: CGRect
|
||||
let gallerySize: CGSize
|
||||
if !component.isSticker {
|
||||
@ -974,8 +1078,8 @@ final class CaptureControlsComponent: Component {
|
||||
|
||||
let normalAlpha = component.tintColor.rgb == 0xffffff ? 1.0 : 0.6
|
||||
|
||||
transition.setScale(view: galleryButtonView, scale: isRecording || isTransitioning ? 0.1 : 1.0)
|
||||
transition.setAlpha(view: galleryButtonView, alpha: isRecording || isTransitioning ? 0.0 : normalAlpha)
|
||||
transition.setScale(view: galleryButtonView, scale: isRecording || isTransitioning || hideControls ? 0.1 : 1.0)
|
||||
transition.setAlpha(view: galleryButtonView, alpha: isRecording || isTransitioning || hideControls ? 0.0 : normalAlpha)
|
||||
}
|
||||
} else {
|
||||
galleryButtonFrame = .zero
|
||||
@ -1017,8 +1121,8 @@ final class CaptureControlsComponent: Component {
|
||||
transition.setBounds(view: flipButtonView, bounds: CGRect(origin: .zero, size: flipButtonFrame.size))
|
||||
transition.setPosition(view: flipButtonView, position: flipButtonFrame.center)
|
||||
|
||||
transition.setScale(view: flipButtonView, scale: isTransitioning ? 0.01 : 1.0)
|
||||
transition.setAlpha(view: flipButtonView, alpha: isTransitioning ? 0.0 : 1.0)
|
||||
transition.setScale(view: flipButtonView, scale: isTransitioning || hideControls ? 0.01 : 1.0)
|
||||
transition.setAlpha(view: flipButtonView, alpha: isTransitioning || hideControls ? 0.0 : 1.0)
|
||||
}
|
||||
} else if let flipButtonView = self.flipButtonView.view {
|
||||
flipButtonView.removeFromSuperview()
|
||||
@ -1047,6 +1151,7 @@ final class CaptureControlsComponent: Component {
|
||||
tintColor: component.tintColor,
|
||||
shutterState: component.shutterState,
|
||||
blobState: blobState,
|
||||
collageProgress: component.collageProgress,
|
||||
highlightedAction: self.shutterHightlightedAction,
|
||||
updateOffsetX: self.shutterUpdateOffsetX,
|
||||
updateOffsetY: self.shutterUpdateOffsetY
|
||||
|
@ -0,0 +1,329 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import Display
|
||||
import ComponentFlow
|
||||
import Camera
|
||||
import CameraButtonComponent
|
||||
|
||||
private func generateCollageIcon(grid: Camera.CollageGrid, crossed: Bool) -> UIImage? {
|
||||
return generateImage(CGSize(width: 36.0, height: 36.0), rotatedContext: { size, context in
|
||||
let bounds = CGRect(origin: .zero, size: size)
|
||||
context.clear(bounds)
|
||||
|
||||
let lineWidth = 2.0 - UIScreenPixel
|
||||
context.setLineWidth(lineWidth)
|
||||
context.setStrokeColor(UIColor.white.cgColor)
|
||||
|
||||
let iconBounds = bounds.insetBy(dx: 11.0, dy: 9.0)
|
||||
let path = UIBezierPath(roundedRect: iconBounds, cornerRadius: 3.0)
|
||||
context.addPath(path.cgPath)
|
||||
context.strokePath()
|
||||
|
||||
let rowHeight = iconBounds.height / CGFloat(grid.rows.count)
|
||||
|
||||
var yOffset: CGFloat = iconBounds.minY + lineWidth / 2.0
|
||||
for i in 0 ..< grid.rows.count {
|
||||
let row = grid.rows[i]
|
||||
var xOffset: CGFloat = iconBounds.minX
|
||||
let lineCount = max(0, row.columns - 1)
|
||||
let colWidth = iconBounds.width / CGFloat(max(row.columns, 1))
|
||||
for _ in 0 ..< lineCount {
|
||||
xOffset += colWidth
|
||||
context.move(to: CGPoint(x: xOffset, y: yOffset))
|
||||
context.addLine(to: CGPoint(x: xOffset, y: yOffset + rowHeight))
|
||||
context.strokePath()
|
||||
}
|
||||
yOffset += rowHeight
|
||||
|
||||
if i != grid.rows.count - 1 {
|
||||
context.move(to: CGPoint(x: iconBounds.minX, y: yOffset - lineWidth / 2.0))
|
||||
context.addLine(to: CGPoint(x: iconBounds.maxX, y: yOffset - lineWidth / 2.0))
|
||||
context.strokePath()
|
||||
}
|
||||
}
|
||||
|
||||
if crossed {
|
||||
context.setLineCap(.round)
|
||||
|
||||
let startPoint = CGPoint(x: iconBounds.minX - 3.0, y: iconBounds.minY - 2.0)
|
||||
let endPoint = CGPoint(x: iconBounds.maxX + 4.0, y: iconBounds.maxY + 1.0)
|
||||
|
||||
context.setBlendMode(.clear)
|
||||
context.move(to: startPoint.offsetBy(dx: 0.0, dy: lineWidth))
|
||||
context.addLine(to: endPoint.offsetBy(dx: 0.0, dy: lineWidth))
|
||||
context.strokePath()
|
||||
|
||||
context.setBlendMode(.normal)
|
||||
|
||||
context.move(to: startPoint)
|
||||
context.addLine(to: endPoint)
|
||||
context.strokePath()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
final class CollageIconComponent: Component {
|
||||
typealias EnvironmentType = Empty
|
||||
|
||||
let grid: Camera.CollageGrid
|
||||
let crossed: Bool
|
||||
let isSelected: Bool
|
||||
let tintColor: UIColor
|
||||
|
||||
init(
|
||||
grid: Camera.CollageGrid,
|
||||
crossed: Bool,
|
||||
isSelected: Bool,
|
||||
tintColor: UIColor
|
||||
) {
|
||||
self.grid = grid
|
||||
self.crossed = crossed
|
||||
self.isSelected = isSelected
|
||||
self.tintColor = tintColor
|
||||
}
|
||||
|
||||
static func ==(lhs: CollageIconComponent, rhs: CollageIconComponent) -> Bool {
|
||||
if lhs.grid != rhs.grid {
|
||||
return false
|
||||
}
|
||||
if lhs.crossed != rhs.crossed {
|
||||
return false
|
||||
}
|
||||
if lhs.isSelected != rhs.isSelected {
|
||||
return false
|
||||
}
|
||||
if lhs.tintColor != rhs.tintColor {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
final class View: UIView {
|
||||
private let iconView = UIImageView()
|
||||
|
||||
private var component: CollageIconComponent?
|
||||
private weak var state: EmptyComponentState?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
|
||||
self.addSubview(self.iconView)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func update(component: CollageIconComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
let previousComponent = self.component
|
||||
self.component = component
|
||||
self.state = state
|
||||
|
||||
if component.grid != previousComponent?.grid {
|
||||
let image = generateCollageIcon(grid: component.grid, crossed: component.crossed)
|
||||
let selectedImage = generateImage(CGSize(width: 36.0, height: 36.0), contextGenerator: { size, context in
|
||||
context.clear(CGRect(origin: .zero, size: size))
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
context.fillEllipse(in: CGRect(origin: .zero, size: size))
|
||||
|
||||
if let image, let cgImage = image.cgImage {
|
||||
context.setBlendMode(.clear)
|
||||
context.clip(to: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - image.size.width) / 2.0), y: floorToScreenPixels((size.height - image.size.height) / 2.0) - 1.0), size: image.size), mask: cgImage)
|
||||
context.fill(CGRect(origin: .zero, size: size))
|
||||
}
|
||||
})?.withRenderingMode(.alwaysTemplate)
|
||||
|
||||
self.iconView.image = image
|
||||
|
||||
if self.iconView.isHighlighted {
|
||||
self.iconView.isHighlighted = false
|
||||
self.iconView.highlightedImage = selectedImage
|
||||
self.iconView.isHighlighted = true
|
||||
} else {
|
||||
self.iconView.highlightedImage = selectedImage
|
||||
}
|
||||
}
|
||||
|
||||
let size = CGSize(width: 36.0, height: 36.0)
|
||||
self.iconView.frame = CGRect(origin: .zero, size: size)
|
||||
self.iconView.isHighlighted = component.isSelected
|
||||
|
||||
self.iconView.tintColor = component.tintColor
|
||||
|
||||
return size
|
||||
}
|
||||
}
|
||||
|
||||
public func makeView() -> View {
|
||||
return View(frame: CGRect())
|
||||
}
|
||||
|
||||
public func update(view: View, availableSize: CGSize, state: State, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
||||
|
||||
final class CollageIconCarouselComponent: Component {
|
||||
typealias EnvironmentType = Empty
|
||||
|
||||
let grids: [Camera.CollageGrid]
|
||||
let selected: (Camera.CollageGrid) -> Void
|
||||
|
||||
init(
|
||||
grids: [Camera.CollageGrid],
|
||||
selected: @escaping (Camera.CollageGrid) -> Void
|
||||
) {
|
||||
self.grids = grids
|
||||
self.selected = selected
|
||||
}
|
||||
|
||||
static func ==(lhs: CollageIconCarouselComponent, rhs: CollageIconCarouselComponent) -> Bool {
|
||||
if lhs.grids != rhs.grids {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
final class View: UIView {
|
||||
private let clippingView = UIView()
|
||||
private let scrollView = UIScrollView()
|
||||
|
||||
private var itemViews: [AnyHashable: ComponentView<Empty>] = [:]
|
||||
|
||||
private var component: CollageIconCarouselComponent?
|
||||
private weak var state: EmptyComponentState?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
|
||||
self.scrollView.contentInsetAdjustmentBehavior = .never
|
||||
self.scrollView.showsVerticalScrollIndicator = false
|
||||
self.scrollView.showsHorizontalScrollIndicator = false
|
||||
|
||||
self.addSubview(self.clippingView)
|
||||
self.clippingView.addSubview(self.scrollView)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func update(component: CollageIconCarouselComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
self.component = component
|
||||
self.state = state
|
||||
|
||||
let inset: CGFloat = 27.0
|
||||
let spacing: CGFloat = 8.0
|
||||
var contentWidth: CGFloat = inset
|
||||
let buttonSize = CGSize(width: 40.0, height: 40.0)
|
||||
|
||||
var validIds: [AnyHashable] = []
|
||||
for grid in component.grids {
|
||||
validIds.append(grid)
|
||||
|
||||
let itemView: ComponentView<Empty>
|
||||
if let current = itemViews[grid] {
|
||||
itemView = current
|
||||
} else {
|
||||
itemView = ComponentView()
|
||||
self.itemViews[grid] = itemView
|
||||
}
|
||||
let itemSize = itemView.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(CameraButton(
|
||||
content: AnyComponentWithIdentity(
|
||||
id: "content",
|
||||
component: AnyComponent(
|
||||
CollageIconComponent(
|
||||
grid: grid,
|
||||
crossed: false,
|
||||
isSelected: false,
|
||||
tintColor: .white
|
||||
)
|
||||
)
|
||||
),
|
||||
action: { [weak self] in
|
||||
if let component = self?.component {
|
||||
component.selected(grid)
|
||||
}
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: buttonSize
|
||||
)
|
||||
if let view = itemView.view {
|
||||
if view.superview == nil {
|
||||
self.scrollView.addSubview(view)
|
||||
|
||||
view.layer.shadowOffset = CGSize(width: 0.0, height: 0.0)
|
||||
view.layer.shadowRadius = 3.0
|
||||
view.layer.shadowColor = UIColor.black.cgColor
|
||||
view.layer.shadowOpacity = 0.25
|
||||
view.layer.rasterizationScale = UIScreenScale
|
||||
view.layer.shouldRasterize = true
|
||||
}
|
||||
view.frame = CGRect(origin: CGPoint(x: contentWidth, y: 0.0), size: itemSize)
|
||||
}
|
||||
contentWidth += itemSize.width + spacing
|
||||
}
|
||||
|
||||
let contentSize = CGSize(width: contentWidth, height: buttonSize.height)
|
||||
if self.scrollView.contentSize != contentSize {
|
||||
self.scrollView.contentSize = contentSize
|
||||
}
|
||||
self.scrollView.frame = CGRect(origin: .zero, size: availableSize)
|
||||
self.clippingView.frame = CGRect(origin: .zero, size: availableSize)
|
||||
|
||||
if self.clippingView.mask == nil {
|
||||
if let maskImage = generateGradientImage(size: CGSize(width: 42.0, height: 10.0), colors: [UIColor.clear, UIColor.black, UIColor.black, UIColor.clear], locations: [0.0, 0.3, 0.7, 1.0], direction: .horizontal) {
|
||||
let maskView = UIImageView(image: maskImage.stretchableImage(withLeftCapWidth: 13, topCapHeight: 0))
|
||||
self.clippingView.mask = maskView
|
||||
}
|
||||
}
|
||||
self.clippingView.mask?.frame = CGRect(origin: .zero, size: availableSize)
|
||||
|
||||
var removeIds: [AnyHashable] = []
|
||||
for (id, itemView) in self.itemViews {
|
||||
if !validIds.contains(id) {
|
||||
removeIds.append(id)
|
||||
itemView.view?.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
for id in removeIds {
|
||||
self.itemViews.removeValue(forKey: id)
|
||||
}
|
||||
|
||||
return availableSize
|
||||
}
|
||||
|
||||
func animateIn() {
|
||||
guard self.frame.width > 0.0 else {
|
||||
return
|
||||
}
|
||||
self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
|
||||
for (_, itemView) in self.itemViews {
|
||||
itemView.view?.layer.animatePosition(from: CGPoint(x: self.frame.width, y: 0.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
}
|
||||
}
|
||||
|
||||
func animateOut(completion: @escaping () -> Void) {
|
||||
guard self.frame.width > 0.0 else {
|
||||
return
|
||||
}
|
||||
self.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { _ in
|
||||
completion()
|
||||
})
|
||||
for (_, itemView) in self.itemViews {
|
||||
itemView.view?.layer.animatePosition(from: .zero, to: CGPoint(x: self.frame.width + self.scrollView.contentOffset.x, y: 0.0), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func makeView() -> View {
|
||||
return View(frame: CGRect())
|
||||
}
|
||||
|
||||
public func update(view: View, availableSize: CGSize, state: State, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
@ -0,0 +1,90 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import Display
|
||||
import ComponentFlow
|
||||
|
||||
final class DualIconComponent: Component {
|
||||
typealias EnvironmentType = Empty
|
||||
|
||||
let isSelected: Bool
|
||||
let tintColor: UIColor
|
||||
|
||||
init(
|
||||
isSelected: Bool,
|
||||
tintColor: UIColor
|
||||
) {
|
||||
self.isSelected = isSelected
|
||||
self.tintColor = tintColor
|
||||
}
|
||||
|
||||
static func ==(lhs: DualIconComponent, rhs: DualIconComponent) -> Bool {
|
||||
if lhs.isSelected != rhs.isSelected {
|
||||
return false
|
||||
}
|
||||
if lhs.tintColor != rhs.tintColor {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
final class View: UIView {
|
||||
private let iconView = UIImageView()
|
||||
|
||||
private var component: DualIconComponent?
|
||||
private weak var state: EmptyComponentState?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
|
||||
let image = generateImage(CGSize(width: 36.0, height: 36.0), rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: .zero, size: size))
|
||||
|
||||
if let image = UIImage(bundleImageName: "Camera/DualIcon"), let cgImage = image.cgImage {
|
||||
context.draw(cgImage, in: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - image.size.width) / 2.0), y: floorToScreenPixels((size.height - image.size.height) / 2.0) - 1.0), size: image.size))
|
||||
}
|
||||
})?.withRenderingMode(.alwaysTemplate)
|
||||
|
||||
let selectedImage = generateImage(CGSize(width: 36.0, height: 36.0), rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: .zero, size: size))
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
context.fillEllipse(in: CGRect(origin: .zero, size: size))
|
||||
|
||||
if let image = UIImage(bundleImageName: "Camera/DualIcon"), let cgImage = image.cgImage {
|
||||
context.setBlendMode(.clear)
|
||||
context.clip(to: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - image.size.width) / 2.0), y: floorToScreenPixels((size.height - image.size.height) / 2.0) - 1.0), size: image.size), mask: cgImage)
|
||||
context.fill(CGRect(origin: .zero, size: size))
|
||||
}
|
||||
})?.withRenderingMode(.alwaysTemplate)
|
||||
|
||||
self.iconView.image = image
|
||||
self.iconView.highlightedImage = selectedImage
|
||||
|
||||
self.addSubview(self.iconView)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func update(component: DualIconComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
self.component = component
|
||||
self.state = state
|
||||
|
||||
let size = CGSize(width: 36.0, height: 36.0)
|
||||
self.iconView.frame = CGRect(origin: .zero, size: size)
|
||||
self.iconView.isHighlighted = component.isSelected
|
||||
|
||||
self.iconView.tintColor = component.tintColor
|
||||
|
||||
return size
|
||||
}
|
||||
}
|
||||
|
||||
public func makeView() -> View {
|
||||
return View(frame: CGRect())
|
||||
}
|
||||
|
||||
public func update(view: View, availableSize: CGSize, state: State, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
@ -230,20 +230,11 @@ final class ShutterBlobView: UIView {
|
||||
}
|
||||
|
||||
public init?(test: Bool) {
|
||||
let mainBundle = Bundle(for: ShutterBlobView.self)
|
||||
|
||||
guard let path = mainBundle.path(forResource: "CameraScreenBundle", ofType: "bundle") else {
|
||||
return nil
|
||||
}
|
||||
guard let bundle = Bundle(path: path) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
guard let device = MTLCreateSystemDefaultDevice() else {
|
||||
return nil
|
||||
}
|
||||
|
||||
guard let defaultLibrary = try? device.makeDefaultLibrary(bundle: bundle) else {
|
||||
guard let library = metalLibrary(device: device) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -252,11 +243,11 @@ final class ShutterBlobView: UIView {
|
||||
}
|
||||
self.commandQueue = commandQueue
|
||||
|
||||
guard let loadedVertexProgram = defaultLibrary.makeFunction(name: "cameraBlobVertex") else {
|
||||
guard let loadedVertexProgram = library.makeFunction(name: "cameraBlobVertex") else {
|
||||
return nil
|
||||
}
|
||||
|
||||
guard let loadedFragmentProgram = defaultLibrary.makeFunction(name: "cameraBlobFragment") else {
|
||||
guard let loadedFragmentProgram = library.makeFunction(name: "cameraBlobFragment") else {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -33,12 +33,22 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
||||
private var currentIsEditing = false
|
||||
private var currentHeight: CGFloat?
|
||||
private var currentIsVideo = false
|
||||
private var currentIsCaptionAbove = false
|
||||
|
||||
private let hapticFeedback = HapticFeedback()
|
||||
|
||||
private var inputView: LegacyMessageInputPanelInputView?
|
||||
private var isEmojiKeyboardActive = false
|
||||
|
||||
public var sendPressed: ((NSAttributedString?) -> Void)?
|
||||
public var focusUpdated: ((Bool) -> Void)?
|
||||
public var heightUpdated: ((Bool) -> Void)?
|
||||
public var timerUpdated: ((NSNumber?) -> Void)?
|
||||
public var captionIsAboveUpdated: ((Bool) -> Void)?
|
||||
|
||||
private weak var undoController: UndoOverlayController?
|
||||
private weak var tooltipController: TooltipScreen?
|
||||
|
||||
private var validLayout: (width: CGFloat, leftInset: CGFloat, rightInset: CGFloat, bottomInset: CGFloat, keyboardHeight: CGFloat, additionalSideInsets: UIEdgeInsets, maxHeight: CGFloat, isSecondary: Bool, metrics: LayoutMetrics)?
|
||||
|
||||
public init(
|
||||
@ -67,11 +77,6 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
||||
}
|
||||
}
|
||||
|
||||
public var sendPressed: ((NSAttributedString?) -> Void)?
|
||||
public var focusUpdated: ((Bool) -> Void)?
|
||||
public var heightUpdated: ((Bool) -> Void)?
|
||||
public var timerUpdated: ((NSNumber?) -> Void)?
|
||||
|
||||
public func updateLayoutSize(_ size: CGSize, keyboardHeight: CGFloat, sideInset: CGFloat, animated: Bool) -> CGFloat {
|
||||
return self.updateLayout(width: size.width, leftInset: sideInset, rightInset: sideInset, bottomInset: 0.0, keyboardHeight: keyboardHeight, additionalSideInsets: UIEdgeInsets(), maxHeight: size.height, isSecondary: false, transition: animated ? .animated(duration: 0.2, curve: .easeInOut) : .immediate, metrics: LayoutMetrics(widthClass: .compact, heightClass: .compact, orientation: nil), isMediaInputExpanded: false)
|
||||
}
|
||||
@ -99,14 +104,15 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
||||
transition.setFrame(view: view, frame: frame)
|
||||
}
|
||||
|
||||
public func setTimeout(_ timeout: Int32, isVideo: Bool) {
|
||||
self.dismissTimeoutTooltip()
|
||||
public func setTimeout(_ timeout: Int32, isVideo: Bool, isCaptionAbove: Bool) {
|
||||
self.dismissAllTooltips()
|
||||
var timeout: Int32? = timeout
|
||||
if timeout == 0 {
|
||||
timeout = nil
|
||||
}
|
||||
self.currentTimeout = timeout
|
||||
self.currentIsVideo = isVideo
|
||||
self.currentIsCaptionAbove = isCaptionAbove
|
||||
}
|
||||
|
||||
public func activateInput() {
|
||||
@ -132,7 +138,7 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
||||
}
|
||||
|
||||
public func onAnimateOut() {
|
||||
self.dismissTimeoutTooltip()
|
||||
self.dismissAllTooltips()
|
||||
}
|
||||
|
||||
public func baseHeight() -> CGFloat {
|
||||
@ -233,7 +239,12 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
||||
lockMediaRecording: nil,
|
||||
stopAndPreviewMediaRecording: nil,
|
||||
discardMediaRecordingPreview: nil,
|
||||
attachmentAction: nil,
|
||||
attachmentAction: { [weak self] in
|
||||
if let self {
|
||||
self.toggleIsCaptionAbove()
|
||||
}
|
||||
},
|
||||
attachmentButtonMode: self.currentIsCaptionAbove ? .captionDown : .captionUp,
|
||||
myReaction: nil,
|
||||
likeAction: nil,
|
||||
likeOptionsAction: nil,
|
||||
@ -249,6 +260,11 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
||||
} : nil,
|
||||
forwardAction: nil,
|
||||
moreAction: nil,
|
||||
presentCaptionPositionTooltip: { [weak self] sourceView in
|
||||
if let self {
|
||||
self.presentCaptionPositionTooltip(sourceView: sourceView)
|
||||
}
|
||||
},
|
||||
presentVoiceMessagesUnavailableTooltip: nil,
|
||||
presentTextLengthLimitTooltip: nil,
|
||||
presentTextFormattingTooltip: nil,
|
||||
@ -340,6 +356,31 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
||||
}
|
||||
}
|
||||
|
||||
private func toggleIsCaptionAbove() {
|
||||
//TODO:localize
|
||||
self.currentIsCaptionAbove = !self.currentIsCaptionAbove
|
||||
self.captionIsAboveUpdated?(self.currentIsCaptionAbove)
|
||||
self.update(transition: .animated(duration: 0.3, curve: .spring))
|
||||
|
||||
self.dismissAllTooltips()
|
||||
|
||||
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
||||
|
||||
let title = self.currentIsCaptionAbove ? "Caption moved up" : "Caption moved down"
|
||||
let text = self.currentIsCaptionAbove ? "Text will be shown above the media." : "Text will be shown below the media."
|
||||
let animationName = self.currentIsCaptionAbove ? "message_preview_sort_above" : "message_preview_sort_below"
|
||||
|
||||
let controller = UndoOverlayController(
|
||||
presentationData: presentationData,
|
||||
content: .universal(animation: animationName, scale: 1.0, colors: ["__allcolors__": UIColor.white], title: title, text: text, customUndoText: nil, timeout: 2.0),
|
||||
elevatedLayout: false,
|
||||
position: self.currentIsCaptionAbove ? .bottom : .top,
|
||||
action: { _ in return false }
|
||||
)
|
||||
self.present(controller)
|
||||
self.undoController = controller
|
||||
}
|
||||
|
||||
private func presentTimeoutSetup(sourceView: UIView, gesture: ContextGesture?) {
|
||||
self.hapticFeedback.impact(.light)
|
||||
|
||||
@ -395,10 +436,12 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
||||
let contextController = ContextController(presentationData: presentationData, source: .reference(HeaderContextReferenceContentSource(sourceView: sourceView)), items: .single(ContextController.Items(content: .list(items))), gesture: gesture)
|
||||
self.present(contextController)
|
||||
}
|
||||
|
||||
private weak var tooltipController: TooltipScreen?
|
||||
|
||||
private func dismissTimeoutTooltip() {
|
||||
|
||||
private func dismissAllTooltips() {
|
||||
if let undoController = self.undoController {
|
||||
self.undoController = nil
|
||||
undoController.dismissWithCommitAction()
|
||||
}
|
||||
if let tooltipController = self.tooltipController {
|
||||
self.tooltipController = nil
|
||||
tooltipController.dismiss()
|
||||
@ -409,7 +452,7 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
||||
guard let superview = self.view.superview?.superview else {
|
||||
return
|
||||
}
|
||||
self.dismissTimeoutTooltip()
|
||||
self.dismissAllTooltips()
|
||||
|
||||
let parentFrame = superview.convert(superview.bounds, to: nil)
|
||||
let absoluteFrame = sourceView.convert(sourceView.bounds, to: nil).offsetBy(dx: -parentFrame.minX, dy: 0.0)
|
||||
@ -449,6 +492,38 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
||||
self.present(tooltipController)
|
||||
}
|
||||
|
||||
private func presentCaptionPositionTooltip(sourceView: UIView) {
|
||||
guard let superview = self.view.superview?.superview else {
|
||||
return
|
||||
}
|
||||
self.dismissAllTooltips()
|
||||
|
||||
let parentFrame = superview.convert(superview.bounds, to: nil)
|
||||
let absoluteFrame = sourceView.convert(sourceView.bounds, to: nil).offsetBy(dx: -parentFrame.minX, dy: 0.0)
|
||||
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX + 2.0, y: absoluteFrame.minY + 6.0), size: CGSize())
|
||||
|
||||
//TODO:localize
|
||||
let text = "Tap here to move caption up."
|
||||
let tooltipController = TooltipScreen(
|
||||
account: self.context.account,
|
||||
sharedContext: self.context.sharedContext,
|
||||
text: .plain(text: text),
|
||||
balancedTextLayout: false,
|
||||
style: .customBlur(UIColor(rgb: 0x18181a), 4.0),
|
||||
arrowStyle: .small,
|
||||
icon: nil,
|
||||
location: .point(location, .bottom),
|
||||
displayDuration: .default,
|
||||
inset: 4.0,
|
||||
cornerRadius: 10.0,
|
||||
shouldDismissOnTouch: { _, _ in
|
||||
return .ignore
|
||||
}
|
||||
)
|
||||
self.tooltipController = tooltipController
|
||||
self.present(tooltipController)
|
||||
}
|
||||
|
||||
public override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
let result = super.hitTest(point, with: event)
|
||||
if let view = self.inputPanel.view, let panelResult = view.hitTest(self.view.convert(point, to: view), with: event) {
|
||||
|
19
submodules/TelegramUI/Components/MediaAssetsContext/BUILD
Normal file
19
submodules/TelegramUI/Components/MediaAssetsContext/BUILD
Normal file
@ -0,0 +1,19 @@
|
||||
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
|
||||
|
||||
swift_library(
|
||||
name = "MediaAssetsContext",
|
||||
module_name = "MediaAssetsContext",
|
||||
srcs = glob([
|
||||
"Sources/**/*.swift",
|
||||
]),
|
||||
copts = [
|
||||
"-warnings-as-errors",
|
||||
],
|
||||
deps = [
|
||||
"//submodules/Display",
|
||||
"//submodules/SSignalKit/SwiftSignalKit",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
],
|
||||
)
|
@ -12,7 +12,7 @@ private let imageManager: PHCachingImageManager = {
|
||||
|
||||
private let assetsQueue = Queue()
|
||||
|
||||
final class AssetDownloadManager {
|
||||
public final class AssetDownloadManager {
|
||||
private final class DownloadingAssetContext {
|
||||
let identifier: String
|
||||
let updated: () -> Void
|
||||
@ -33,13 +33,13 @@ final class AssetDownloadManager {
|
||||
private let queue = Queue()
|
||||
private var currentAssetContext: DownloadingAssetContext?
|
||||
|
||||
init() {
|
||||
public init() {
|
||||
}
|
||||
|
||||
deinit {
|
||||
}
|
||||
|
||||
func download(asset: PHAsset) {
|
||||
public func download(asset: PHAsset) {
|
||||
self.cancelAllDownloads()
|
||||
|
||||
let queue = self.queue
|
||||
@ -70,7 +70,7 @@ final class AssetDownloadManager {
|
||||
})
|
||||
}
|
||||
|
||||
func cancelAllDownloads() {
|
||||
public func cancelAllDownloads() {
|
||||
if let currentAssetContext = self.currentAssetContext {
|
||||
currentAssetContext.status = .none
|
||||
currentAssetContext.updated()
|
||||
@ -83,7 +83,7 @@ final class AssetDownloadManager {
|
||||
}
|
||||
}
|
||||
|
||||
func cancel(identifier: String) {
|
||||
public func cancel(identifier: String) {
|
||||
if let currentAssetContext = self.currentAssetContext, currentAssetContext.identifier == identifier {
|
||||
currentAssetContext.status = .none
|
||||
currentAssetContext.updated()
|
||||
@ -129,7 +129,7 @@ final class AssetDownloadManager {
|
||||
}
|
||||
}
|
||||
|
||||
func downloadProgress(identifier: String) -> Signal<AssetDownloadStatus, NoError> {
|
||||
public func downloadProgress(identifier: String) -> Signal<AssetDownloadStatus, NoError> {
|
||||
return Signal { [weak self] subscriber in
|
||||
if let self {
|
||||
return self.downloadProgress(identifier: identifier, next: { status in
|
||||
@ -145,7 +145,7 @@ final class AssetDownloadManager {
|
||||
}
|
||||
}
|
||||
|
||||
func checkIfAssetIsLocal(_ asset: PHAsset) -> Signal<Bool, NoError> {
|
||||
public func checkIfAssetIsLocal(_ asset: PHAsset) -> Signal<Bool, NoError> {
|
||||
if asset.isLocallyAvailable == true {
|
||||
return .single(true)
|
||||
}
|
||||
@ -181,7 +181,7 @@ func checkIfAssetIsLocal(_ asset: PHAsset) -> Signal<Bool, NoError> {
|
||||
}
|
||||
}
|
||||
|
||||
enum AssetDownloadStatus {
|
||||
public enum AssetDownloadStatus {
|
||||
case none
|
||||
case progress(Float)
|
||||
case completed
|
||||
@ -242,13 +242,13 @@ private func downloadAssetMediaData(_ asset: PHAsset) -> Signal<AssetDownloadSta
|
||||
}
|
||||
}
|
||||
|
||||
func assetImage(fetchResult: PHFetchResult<PHAsset>, index: Int, targetSize: CGSize, exact: Bool, deliveryMode: PHImageRequestOptionsDeliveryMode = .opportunistic, synchronous: Bool = false) -> Signal<UIImage?, NoError> {
|
||||
public func assetImage(fetchResult: PHFetchResult<PHAsset>, index: Int, targetSize: CGSize, exact: Bool, deliveryMode: PHImageRequestOptionsDeliveryMode = .opportunistic, synchronous: Bool = false) -> Signal<UIImage?, NoError> {
|
||||
let asset = fetchResult[index]
|
||||
return assetImage(asset: asset, targetSize: targetSize, exact: exact, deliveryMode: deliveryMode, synchronous: synchronous)
|
||||
}
|
||||
|
||||
func assetImage(asset: PHAsset, targetSize: CGSize, exact: Bool, deliveryMode: PHImageRequestOptionsDeliveryMode = .opportunistic, synchronous: Bool = false) -> Signal<UIImage?, NoError> {
|
||||
return Signal { subscriber in
|
||||
public func assetImage(asset: PHAsset, targetSize: CGSize, exact: Bool, deliveryMode: PHImageRequestOptionsDeliveryMode = .opportunistic, synchronous: Bool = false) -> Signal<UIImage?, NoError> {
|
||||
return Signal { subscriber in
|
||||
let options = PHImageRequestOptions()
|
||||
options.deliveryMode = deliveryMode
|
||||
if exact {
|
||||
@ -282,7 +282,7 @@ func assetImage(asset: PHAsset, targetSize: CGSize, exact: Bool, deliveryMode: P
|
||||
}
|
||||
}
|
||||
|
||||
func assetVideo(fetchResult: PHFetchResult<PHAsset>, index: Int) -> Signal<AVAsset?, NoError> {
|
||||
public func assetVideo(fetchResult: PHFetchResult<PHAsset>, index: Int) -> Signal<AVAsset?, NoError> {
|
||||
return Signal { subscriber in
|
||||
let asset = fetchResult[index]
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -52,15 +52,15 @@ private func roundedCornersMaskImage(size: CGSize) -> CIImage {
|
||||
|
||||
final class MediaEditorComposer {
|
||||
enum Input {
|
||||
case texture(MTLTexture, CMTime, Bool)
|
||||
case videoBuffer(VideoPixelBuffer)
|
||||
case texture(MTLTexture, CMTime, Bool, CGRect?)
|
||||
case videoBuffer(VideoPixelBuffer, CGRect?)
|
||||
case ciImage(CIImage, CMTime)
|
||||
|
||||
var timestamp: CMTime {
|
||||
switch self {
|
||||
case let .texture(_, timestamp, _):
|
||||
case let .texture(_, timestamp, _, _):
|
||||
return timestamp
|
||||
case let .videoBuffer(videoBuffer):
|
||||
case let .videoBuffer(videoBuffer, _):
|
||||
return videoBuffer.timestamp
|
||||
case let .ciImage(_, timestamp):
|
||||
return timestamp
|
||||
@ -69,10 +69,10 @@ final class MediaEditorComposer {
|
||||
|
||||
var rendererInput: MediaEditorRenderer.Input {
|
||||
switch self {
|
||||
case let .texture(texture, timestamp, hasTransparency):
|
||||
return .texture(texture, timestamp, hasTransparency)
|
||||
case let .videoBuffer(videoBuffer):
|
||||
return .videoBuffer(videoBuffer)
|
||||
case let .texture(texture, timestamp, hasTransparency, rect):
|
||||
return .texture(texture, timestamp, hasTransparency, rect)
|
||||
case let .videoBuffer(videoBuffer, rect):
|
||||
return .videoBuffer(videoBuffer, rect)
|
||||
case let .ciImage(image, timestamp):
|
||||
return .ciImage(image, timestamp)
|
||||
}
|
||||
@ -150,21 +150,26 @@ final class MediaEditorComposer {
|
||||
self.renderer.videoFinishPass.update(values: self.values, videoDuration: videoDuration, additionalVideoDuration: additionalVideoDuration)
|
||||
}
|
||||
|
||||
var previousAdditionalInput: Input?
|
||||
func process(main: Input, additional: Input?, timestamp: CMTime, pool: CVPixelBufferPool?, completion: @escaping (CVPixelBuffer?) -> Void) {
|
||||
var previousAdditionalInput: [Int: Input] = [:]
|
||||
func process(main: Input, additional: [Input?], timestamp: CMTime, pool: CVPixelBufferPool?, completion: @escaping (CVPixelBuffer?) -> Void) {
|
||||
guard let pool, let ciContext = self.ciContext else {
|
||||
completion(nil)
|
||||
return
|
||||
}
|
||||
|
||||
var additional = additional
|
||||
if let additional {
|
||||
self.previousAdditionalInput = additional
|
||||
} else {
|
||||
additional = self.previousAdditionalInput
|
||||
var index = 0
|
||||
var augmentedAdditionals: [Input?] = []
|
||||
for input in additional {
|
||||
if let input {
|
||||
self.previousAdditionalInput[index] = input
|
||||
augmentedAdditionals.append(input)
|
||||
} else {
|
||||
augmentedAdditionals.append(self.previousAdditionalInput[index])
|
||||
}
|
||||
index += 1
|
||||
}
|
||||
|
||||
self.renderer.consume(main: main.rendererInput, additional: additional?.rendererInput, render: true)
|
||||
self.renderer.consume(main: main.rendererInput, additionals: augmentedAdditionals.compactMap { $0 }.map { $0.rendererInput }, render: true)
|
||||
|
||||
if let resultTexture = self.renderer.resultTexture, var ciImage = CIImage(mtlTexture: resultTexture, options: [.colorSpace: self.colorSpace]) {
|
||||
ciImage = ciImage.transformed(by: CGAffineTransformMakeScale(1.0, -1.0).translatedBy(x: 0.0, y: -ciImage.extent.height))
|
||||
@ -190,13 +195,13 @@ final class MediaEditorComposer {
|
||||
completion(nil)
|
||||
}
|
||||
|
||||
private var cachedTexture: MTLTexture?
|
||||
func textureForImage(_ image: UIImage) -> MTLTexture? {
|
||||
if let cachedTexture = self.cachedTexture {
|
||||
private var cachedTextures: [Int: MTLTexture] = [:]
|
||||
func textureForImage(index: Int, image: UIImage) -> MTLTexture? {
|
||||
if let cachedTexture = self.cachedTextures[index] {
|
||||
return cachedTexture
|
||||
}
|
||||
if let device = self.device, let texture = loadTexture(image: image, device: device) {
|
||||
self.cachedTexture = texture
|
||||
self.cachedTextures[index] = texture
|
||||
return texture
|
||||
}
|
||||
return nil
|
||||
|
@ -59,15 +59,15 @@ protocol RenderTarget: AnyObject {
|
||||
|
||||
final class MediaEditorRenderer {
|
||||
enum Input {
|
||||
case texture(MTLTexture, CMTime, Bool)
|
||||
case videoBuffer(VideoPixelBuffer)
|
||||
case texture(MTLTexture, CMTime, Bool, CGRect?)
|
||||
case videoBuffer(VideoPixelBuffer, CGRect?)
|
||||
case ciImage(CIImage, CMTime)
|
||||
|
||||
var timestamp: CMTime {
|
||||
switch self {
|
||||
case let .texture(_, timestamp, _):
|
||||
case let .texture(_, timestamp, _, _):
|
||||
return timestamp
|
||||
case let .videoBuffer(videoBuffer):
|
||||
case let .videoBuffer(videoBuffer, _):
|
||||
return videoBuffer.timestamp
|
||||
case let .ciImage(_, timestamp):
|
||||
return timestamp
|
||||
@ -85,7 +85,7 @@ final class MediaEditorRenderer {
|
||||
|
||||
private let ciInputPass = CIInputPass()
|
||||
private let mainVideoInputPass = VideoInputPass()
|
||||
private let additionalVideoInputPass = VideoInputPass()
|
||||
private var additionalVideoInputPass: [Int : VideoInputPass] = [:]
|
||||
let videoFinishPass = VideoFinishPass()
|
||||
|
||||
private let outputRenderPass = OutputRenderPass()
|
||||
@ -103,7 +103,7 @@ final class MediaEditorRenderer {
|
||||
|
||||
private var currentMainInput: Input?
|
||||
var currentMainInputMask: MTLTexture?
|
||||
private var currentAdditionalInput: Input?
|
||||
private var currentAdditionalInputs: [Input] = []
|
||||
private(set) var resultTexture: MTLTexture?
|
||||
|
||||
var displayEnabled = true
|
||||
@ -156,7 +156,6 @@ final class MediaEditorRenderer {
|
||||
self.commandQueue?.label = "Media Editor Command Queue"
|
||||
self.ciInputPass.setup(device: device, library: library)
|
||||
self.mainVideoInputPass.setup(device: device, library: library)
|
||||
self.additionalVideoInputPass.setup(device: device, library: library)
|
||||
self.videoFinishPass.setup(device: device, library: library)
|
||||
self.renderPasses.forEach { $0.setup(device: device, library: library) }
|
||||
}
|
||||
@ -186,23 +185,25 @@ final class MediaEditorRenderer {
|
||||
}
|
||||
|
||||
private func combinedTextureFromCurrentInputs(device: MTLDevice, commandBuffer: MTLCommandBuffer, textureCache: CVMetalTextureCache) -> MTLTexture? {
|
||||
var mainTexture: MTLTexture?
|
||||
var additionalTexture: MTLTexture?
|
||||
var hasTransparency = false
|
||||
guard let library = self.library else {
|
||||
return nil
|
||||
}
|
||||
var passMainInput: VideoFinishPass.Input?
|
||||
var passAdditionalInputs: [VideoFinishPass.Input] = []
|
||||
|
||||
func textureFromInput(_ input: MediaEditorRenderer.Input, videoInputPass: VideoInputPass) -> (MTLTexture, Bool)? {
|
||||
func textureFromInput(_ input: MediaEditorRenderer.Input, videoInputPass: VideoInputPass) -> VideoFinishPass.Input? {
|
||||
switch input {
|
||||
case let .texture(texture, _, hasTransparency):
|
||||
return (texture, hasTransparency)
|
||||
case let .videoBuffer(videoBuffer):
|
||||
case let .texture(texture, _, hasTransparency, rect):
|
||||
return VideoFinishPass.Input(texture: texture, hasTransparency: hasTransparency, rect: rect)
|
||||
case let .videoBuffer(videoBuffer, rect):
|
||||
if let texture = videoInputPass.processPixelBuffer(videoBuffer, textureCache: textureCache, device: device, commandBuffer: commandBuffer) {
|
||||
return (texture, false)
|
||||
return VideoFinishPass.Input(texture: texture, hasTransparency: false, rect: rect)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
case let .ciImage(image, _):
|
||||
if let texture = self.ciInputPass.processCIImage(image, device: device, commandBuffer: commandBuffer) {
|
||||
return (texture, true)
|
||||
return VideoFinishPass.Input(texture: texture, hasTransparency: true, rect: nil)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
@ -213,16 +214,26 @@ final class MediaEditorRenderer {
|
||||
return nil
|
||||
}
|
||||
|
||||
if let (texture, transparency) = textureFromInput(mainInput, videoInputPass: self.mainVideoInputPass) {
|
||||
mainTexture = texture
|
||||
hasTransparency = transparency
|
||||
if let input = textureFromInput(mainInput, videoInputPass: self.mainVideoInputPass) {
|
||||
passMainInput = input
|
||||
}
|
||||
if let additionalInput = self.currentAdditionalInput, let (texture, _) = textureFromInput(additionalInput, videoInputPass: self.additionalVideoInputPass) {
|
||||
additionalTexture = texture
|
||||
var index = 0
|
||||
for additionalInput in self.currentAdditionalInputs {
|
||||
let videoInputPass: VideoInputPass
|
||||
if let current = self.additionalVideoInputPass[index] {
|
||||
videoInputPass = current
|
||||
} else {
|
||||
videoInputPass = VideoInputPass()
|
||||
videoInputPass.setup(device: device, library: library)
|
||||
self.additionalVideoInputPass[index] = videoInputPass
|
||||
}
|
||||
if let input = textureFromInput(additionalInput, videoInputPass: videoInputPass) {
|
||||
passAdditionalInputs.append(input)
|
||||
}
|
||||
index += 1
|
||||
}
|
||||
|
||||
if let mainTexture {
|
||||
return self.videoFinishPass.process(input: mainTexture, inputMask: self.currentMainInputMask, hasTransparency: hasTransparency, secondInput: additionalTexture, timestamp: mainInput.timestamp, device: device, commandBuffer: commandBuffer)
|
||||
if let passMainInput {
|
||||
return self.videoFinishPass.process(input: passMainInput, inputMask: self.currentMainInputMask, hasTransparency: passMainInput.hasTransparency, secondInput: passAdditionalInputs, timestamp: mainInput.timestamp, device: device, commandBuffer: commandBuffer)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
@ -300,7 +311,7 @@ final class MediaEditorRenderer {
|
||||
}
|
||||
|
||||
if let onNextAdditionalRender = self.onNextAdditionalRender {
|
||||
if self.currentAdditionalInput != nil {
|
||||
if !self.currentAdditionalInputs.isEmpty {
|
||||
self.onNextAdditionalRender = nil
|
||||
Queue.mainQueue().after(0.016) {
|
||||
onNextAdditionalRender()
|
||||
@ -327,7 +338,7 @@ final class MediaEditorRenderer {
|
||||
|
||||
func consume(
|
||||
main: MediaEditorRenderer.Input,
|
||||
additional: MediaEditorRenderer.Input?,
|
||||
additionals: [MediaEditorRenderer.Input],
|
||||
render: Bool,
|
||||
displayEnabled: Bool = true
|
||||
) {
|
||||
@ -338,7 +349,7 @@ final class MediaEditorRenderer {
|
||||
}
|
||||
|
||||
self.currentMainInput = main
|
||||
self.currentAdditionalInput = additional
|
||||
self.currentAdditionalInputs = additionals
|
||||
|
||||
if render {
|
||||
self.renderFrame()
|
||||
|
@ -300,6 +300,9 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
if lhs.additionalVideoVolume != rhs.additionalVideoVolume {
|
||||
return false
|
||||
}
|
||||
if lhs.collage != rhs.collage {
|
||||
return false
|
||||
}
|
||||
if lhs.drawing !== rhs.drawing {
|
||||
return false
|
||||
}
|
||||
@ -324,6 +327,9 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
if lhs.audioTrackSamples != rhs.audioTrackSamples {
|
||||
return false
|
||||
}
|
||||
if lhs.collageTrackSamples != rhs.collageTrackSamples {
|
||||
return false
|
||||
}
|
||||
if lhs.coverImageTimestamp != rhs.coverImageTimestamp {
|
||||
return false
|
||||
}
|
||||
@ -387,6 +393,7 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
case additionalVideoTrimRange
|
||||
case additionalVideoOffset
|
||||
case additionalVideoVolume
|
||||
case collage
|
||||
|
||||
case nightTheme
|
||||
case drawing
|
||||
@ -401,6 +408,131 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
case qualityPreset
|
||||
}
|
||||
|
||||
public struct VideoCollageItem: Codable, Equatable {
|
||||
enum DecodingError: Error {
|
||||
case generic
|
||||
}
|
||||
|
||||
private enum CodingKeys: String, CodingKey {
|
||||
case contentType
|
||||
case contentValue
|
||||
case isVideo
|
||||
case frame
|
||||
case videoTrimRange
|
||||
case videoOffset
|
||||
case videoVolume
|
||||
}
|
||||
|
||||
public enum Content: Equatable {
|
||||
case main
|
||||
case imageFile(path: String)
|
||||
case videoFile(path: String)
|
||||
case asset(localIdentifier: String, isVideo: Bool)
|
||||
|
||||
public var isVideo: Bool {
|
||||
switch self {
|
||||
case .videoFile, .asset(_, true):
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public let content: Content
|
||||
public let frame: CGRect
|
||||
|
||||
public let videoTrimRange: Range<Double>?
|
||||
public let videoOffset: Double?
|
||||
public let videoVolume: CGFloat?
|
||||
|
||||
public init(
|
||||
content: Content,
|
||||
frame: CGRect,
|
||||
videoTrimRange: Range<Double>?,
|
||||
videoOffset: Double?,
|
||||
videoVolume: CGFloat?
|
||||
) {
|
||||
self.content = content
|
||||
self.frame = frame
|
||||
self.videoTrimRange = videoTrimRange
|
||||
self.videoOffset = videoOffset
|
||||
self.videoVolume = videoVolume
|
||||
}
|
||||
|
||||
public init(from decoder: Decoder) throws {
|
||||
let container = try decoder.container(keyedBy: CodingKeys.self)
|
||||
switch try container.decode(Int32.self, forKey: .contentType) {
|
||||
case 0:
|
||||
self.content = .main
|
||||
case 1:
|
||||
self.content = .imageFile(path: try container.decode(String.self, forKey: .contentValue))
|
||||
case 2:
|
||||
self.content = .videoFile(path: try container.decode(String.self, forKey: .contentValue))
|
||||
case 3:
|
||||
self.content = .asset(localIdentifier: try container.decode(String.self, forKey: .contentValue), isVideo: try container.decode(Bool.self, forKey: .isVideo))
|
||||
default:
|
||||
throw DecodingError.generic
|
||||
}
|
||||
self.frame = try container.decode(CGRect.self, forKey: .frame)
|
||||
self.videoTrimRange = try container.decodeIfPresent(Range<Double>.self, forKey: .videoTrimRange)
|
||||
self.videoOffset = try container.decodeIfPresent(Double.self, forKey: .videoOffset)
|
||||
self.videoVolume = try container.decodeIfPresent(CGFloat.self, forKey: .videoVolume)
|
||||
}
|
||||
|
||||
public func encode(to encoder: any Encoder) throws {
|
||||
var container = encoder.container(keyedBy: CodingKeys.self)
|
||||
switch self.content {
|
||||
case .main:
|
||||
try container.encode(Int32(0), forKey: .contentType)
|
||||
case let .imageFile(value):
|
||||
try container.encode(Int32(1), forKey: .contentType)
|
||||
try container.encode(value, forKey: .contentValue)
|
||||
case let .videoFile(value):
|
||||
try container.encode(Int32(2), forKey: .contentType)
|
||||
try container.encode(value, forKey: .contentValue)
|
||||
case let .asset(value, isVideo):
|
||||
try container.encode(Int32(3), forKey: .contentType)
|
||||
try container.encode(value, forKey: .contentValue)
|
||||
try container.encode(isVideo, forKey: .isVideo)
|
||||
}
|
||||
try container.encode(self.frame, forKey: .frame)
|
||||
try container.encodeIfPresent(self.videoTrimRange, forKey: .videoTrimRange)
|
||||
try container.encodeIfPresent(self.videoOffset, forKey: .videoOffset)
|
||||
try container.encodeIfPresent(self.videoVolume, forKey: .videoVolume)
|
||||
}
|
||||
|
||||
func withUpdatedVideoTrimRange(_ videoTrimRange: Range<Double>?) -> VideoCollageItem {
|
||||
return VideoCollageItem(
|
||||
content: self.content,
|
||||
frame: self.frame,
|
||||
videoTrimRange: videoTrimRange,
|
||||
videoOffset: self.videoOffset,
|
||||
videoVolume: self.videoVolume
|
||||
)
|
||||
}
|
||||
|
||||
func withUpdatedVideoOffset(_ videoOffset: Double?) -> VideoCollageItem {
|
||||
return VideoCollageItem(
|
||||
content: self.content,
|
||||
frame: self.frame,
|
||||
videoTrimRange: self.videoTrimRange,
|
||||
videoOffset: videoOffset,
|
||||
videoVolume: self.videoVolume
|
||||
)
|
||||
}
|
||||
|
||||
func withUpdatedVideoVolume(_ videoVolume: CGFloat?) -> VideoCollageItem {
|
||||
return VideoCollageItem(
|
||||
content: self.content,
|
||||
frame: self.frame,
|
||||
videoTrimRange: self.videoTrimRange,
|
||||
videoOffset: self.videoOffset,
|
||||
videoVolume: videoVolume
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
public let peerId: EnginePeer.Id
|
||||
|
||||
public let originalDimensions: PixelDimensions
|
||||
@ -425,11 +557,13 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
public let additionalVideoScale: CGFloat?
|
||||
public let additionalVideoRotation: CGFloat?
|
||||
public let additionalVideoPositionChanges: [VideoPositionChange]
|
||||
|
||||
|
||||
public let additionalVideoTrimRange: Range<Double>?
|
||||
public let additionalVideoOffset: Double?
|
||||
public let additionalVideoVolume: CGFloat?
|
||||
|
||||
public let collage: [VideoCollageItem]
|
||||
|
||||
public let nightTheme: Bool
|
||||
public let drawing: UIImage?
|
||||
public let maskDrawing: UIImage?
|
||||
@ -442,6 +576,8 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
public let audioTrackVolume: CGFloat?
|
||||
public let audioTrackSamples: MediaAudioTrackSamples?
|
||||
|
||||
public let collageTrackSamples: MediaAudioTrackSamples?
|
||||
|
||||
public let coverImageTimestamp: Double?
|
||||
|
||||
public let qualityPreset: MediaQualityPreset?
|
||||
@ -482,6 +618,7 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
additionalVideoTrimRange: Range<Double>?,
|
||||
additionalVideoOffset: Double?,
|
||||
additionalVideoVolume: CGFloat?,
|
||||
collage: [VideoCollageItem],
|
||||
nightTheme: Bool,
|
||||
drawing: UIImage?,
|
||||
maskDrawing: UIImage?,
|
||||
@ -492,6 +629,7 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
audioTrackOffset: Double?,
|
||||
audioTrackVolume: CGFloat?,
|
||||
audioTrackSamples: MediaAudioTrackSamples?,
|
||||
collageTrackSamples: MediaAudioTrackSamples?,
|
||||
coverImageTimestamp: Double?,
|
||||
qualityPreset: MediaQualityPreset?
|
||||
) {
|
||||
@ -518,6 +656,7 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
self.additionalVideoTrimRange = additionalVideoTrimRange
|
||||
self.additionalVideoOffset = additionalVideoOffset
|
||||
self.additionalVideoVolume = additionalVideoVolume
|
||||
self.collage = collage
|
||||
self.nightTheme = nightTheme
|
||||
self.drawing = drawing
|
||||
self.maskDrawing = maskDrawing
|
||||
@ -528,6 +667,7 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
self.audioTrackOffset = audioTrackOffset
|
||||
self.audioTrackVolume = audioTrackVolume
|
||||
self.audioTrackSamples = audioTrackSamples
|
||||
self.collageTrackSamples = collageTrackSamples
|
||||
self.coverImageTimestamp = coverImageTimestamp
|
||||
self.qualityPreset = qualityPreset
|
||||
}
|
||||
@ -570,6 +710,8 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
self.additionalVideoOffset = try container.decodeIfPresent(Double.self, forKey: .additionalVideoOffset)
|
||||
self.additionalVideoVolume = try container.decodeIfPresent(CGFloat.self, forKey: .additionalVideoVolume)
|
||||
|
||||
self.collage = try container.decodeIfPresent([VideoCollageItem].self, forKey: .collage) ?? []
|
||||
|
||||
self.nightTheme = try container.decodeIfPresent(Bool.self, forKey: .nightTheme) ?? false
|
||||
if let drawingData = try container.decodeIfPresent(Data.self, forKey: .drawing), let image = UIImage(data: drawingData) {
|
||||
self.drawing = image
|
||||
@ -598,6 +740,7 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
self.audioTrackVolume = try container.decodeIfPresent(CGFloat.self, forKey: .audioTrackVolume)
|
||||
|
||||
self.audioTrackSamples = nil
|
||||
self.collageTrackSamples = nil
|
||||
|
||||
self.coverImageTimestamp = try container.decodeIfPresent(Double.self, forKey: .coverImageTimestamp)
|
||||
|
||||
@ -639,6 +782,8 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
try container.encodeIfPresent(self.additionalVideoOffset, forKey: .additionalVideoOffset)
|
||||
try container.encodeIfPresent(self.additionalVideoVolume, forKey: .additionalVideoVolume)
|
||||
|
||||
try container.encode(self.collage, forKey: .collage)
|
||||
|
||||
try container.encode(self.nightTheme, forKey: .nightTheme)
|
||||
if let drawing = self.drawing, let pngDrawingData = drawing.pngData() {
|
||||
try container.encode(pngDrawingData, forKey: .drawing)
|
||||
@ -668,109 +813,117 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
}
|
||||
|
||||
public func makeCopy() -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: offset, cropRect: self.cropRect, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: offset, cropRect: self.cropRect, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
public func withUpdatedCropRect(cropRect: CGRect, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: .zero, cropRect: cropRect, cropScale: 1.0, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: .zero, cropRect: cropRect, cropScale: 1.0, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedGradientColors(gradientColors: [UIColor]) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedVideoIsMuted(_ videoIsMuted: Bool) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedVideoIsFullHd(_ videoIsFullHd: Bool) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
|
||||
func withUpdatedVideoIsMirrored(_ videoIsMirrored: Bool) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedVideoVolume(_ videoVolume: CGFloat?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedAdditionalVideo(path: String?, isDual: Bool, positionChanges: [VideoPositionChange]) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: path, additionalVideoIsDual: isDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: path, additionalVideoIsDual: isDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedAdditionalVideo(position: CGPoint, scale: CGFloat, rotation: CGFloat) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedAdditionalVideoTrimRange(_ additionalVideoTrimRange: Range<Double>?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
|
||||
func withUpdatedAdditionalVideoOffset(_ additionalVideoOffset: Double?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedAdditionalVideoVolume(_ additionalVideoVolume: CGFloat?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedCollage(_ collage: [VideoCollageItem]) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedVideoTrimRange(_ videoTrimRange: Range<Double>) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedDrawingAndEntities(drawing: UIImage?, entities: [CodableDrawingEntity]) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
public func withUpdatedMaskDrawing(maskDrawing: UIImage?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedToolValues(_ toolValues: [EditorToolKey: Any]) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedAudioTrack(_ audioTrack: MediaAudioTrack?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedAudioTrackTrimRange(_ audioTrackTrimRange: Range<Double>?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedAudioTrackOffset(_ audioTrackOffset: Double?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedAudioTrackVolume(_ audioTrackVolume: CGFloat?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedAudioTrackSamples(_ audioTrackSamples: MediaAudioTrackSamples?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedCollageTrackSamples(_ collageTrackSamples: MediaAudioTrackSamples?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
func withUpdatedNightTheme(_ nightTheme: Bool) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
public func withUpdatedEntities(_ entities: [CodableDrawingEntity]) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
public func withUpdatedCoverImageTimestamp(_ coverImageTimestamp: Double?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: coverImageTimestamp, qualityPreset: self.qualityPreset)
|
||||
}
|
||||
|
||||
public func withUpdatedQualityPreset(_ qualityPreset: MediaQualityPreset?) -> MediaEditorValues {
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: qualityPreset)
|
||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, collage: self.collage, nightTheme: self.nightTheme, drawing: self.drawing, maskDrawing: self.maskDrawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, collageTrackSamples: self.collageTrackSamples, coverImageTimestamp: self.coverImageTimestamp, qualityPreset: qualityPreset)
|
||||
}
|
||||
|
||||
public var resultDimensions: PixelDimensions {
|
||||
|
@ -5,6 +5,7 @@ import SwiftSignalKit
|
||||
import TelegramCore
|
||||
import Postbox
|
||||
import ImageTransparency
|
||||
import Photos
|
||||
|
||||
enum ExportWriterStatus {
|
||||
case unknown
|
||||
@ -96,15 +97,9 @@ public final class MediaEditorVideoExport {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var additionalVideoStartTime: CMTime {
|
||||
let lowerBound = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0
|
||||
let offset = -min(0.0, self.values.additionalVideoOffset ?? 0.0)
|
||||
if !lowerBound.isZero || !offset.isZero {
|
||||
return CMTime(seconds: offset + lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
|
||||
} else {
|
||||
return .zero
|
||||
}
|
||||
return videoStartTime(trimRange: self.values.additionalVideoTrimRange, offset: self.values.additionalVideoOffset)
|
||||
}
|
||||
|
||||
var audioTimeRange: CMTimeRange? {
|
||||
@ -189,12 +184,40 @@ public final class MediaEditorVideoExport {
|
||||
private var reader: AVAssetReader?
|
||||
private var videoOutput: AVAssetReaderOutput?
|
||||
private var textureRotation: TextureRotation = .rotate0Degrees
|
||||
private var videoRect: CGRect?
|
||||
private var frameRate: Float?
|
||||
|
||||
private var additionalVideoOutput: AVAssetReaderOutput?
|
||||
private var additionalTextureRotation: TextureRotation = .rotate0Degrees
|
||||
private var additionalFrameRate: Float?
|
||||
private var additionalVideoDuration: Double?
|
||||
class VideoOutput {
|
||||
enum Output {
|
||||
case videoOutput(AVAssetReaderOutput)
|
||||
case image(UIImage)
|
||||
}
|
||||
let output: Output
|
||||
let rect: CGRect?
|
||||
let textureRotation: TextureRotation
|
||||
let duration: Double
|
||||
let frameRate: Float
|
||||
let startTime: CMTime
|
||||
|
||||
init(
|
||||
output: Output,
|
||||
rect: CGRect?,
|
||||
textureRotation: TextureRotation,
|
||||
duration: Double,
|
||||
frameRate: Float,
|
||||
startTime: CMTime
|
||||
) {
|
||||
self.output = output
|
||||
self.rect = rect
|
||||
self.textureRotation = textureRotation
|
||||
self.duration = duration
|
||||
self.frameRate = frameRate
|
||||
self.startTime = startTime
|
||||
}
|
||||
|
||||
var skippingUpdate = false
|
||||
}
|
||||
private var additionalVideoOutput: [Int: VideoOutput] = [:]
|
||||
|
||||
private var mainComposeFramerate: Float?
|
||||
|
||||
@ -251,8 +274,8 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
enum Input {
|
||||
case image(UIImage)
|
||||
case video(AVAsset)
|
||||
case image(image: UIImage, rect: CGRect?)
|
||||
case video(asset: AVAsset, rect: CGRect?, rotation: TextureRotation, duration: Double, trimRange: Range<Double>?, offset: Double?, volume: CGFloat?)
|
||||
case sticker(TelegramMediaFile)
|
||||
|
||||
var isVideo: Bool {
|
||||
@ -266,9 +289,52 @@ public final class MediaEditorVideoExport {
|
||||
private func setup() {
|
||||
var mainAsset: AVAsset?
|
||||
|
||||
var signals: [Signal<Input, NoError>] = []
|
||||
|
||||
var mainRect: CGRect?
|
||||
var additionalAsset: AVAsset?
|
||||
if let additionalPath = self.configuration.values.additionalVideoPath {
|
||||
additionalAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
|
||||
if !self.configuration.values.collage.isEmpty {
|
||||
for item in self.configuration.values.collage {
|
||||
switch item.content {
|
||||
case .main:
|
||||
mainRect = item.frame
|
||||
case let .imageFile(path):
|
||||
if let image = UIImage(contentsOfFile: path) {
|
||||
signals.append(.single(.image(image: image, rect: item.frame)))
|
||||
}
|
||||
case let .videoFile(path):
|
||||
let asset = AVURLAsset(url: URL(fileURLWithPath: path))
|
||||
signals.append(.single(.video(asset: asset, rect: item.frame, rotation: textureRotatonForAVAsset(asset, mirror: false), duration: asset.duration.seconds, trimRange: item.videoTrimRange, offset: item.videoOffset, volume: item.videoVolume)))
|
||||
case let .asset(localIdentifier, _):
|
||||
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil)
|
||||
if fetchResult.count != 0 {
|
||||
let asset = fetchResult.object(at: 0)
|
||||
|
||||
let signal: Signal<Input, NoError> = Signal { subscriber in
|
||||
let options = PHVideoRequestOptions()
|
||||
options.isNetworkAccessAllowed = true
|
||||
options.deliveryMode = .highQualityFormat
|
||||
|
||||
PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { avAsset, _, _ in
|
||||
guard let avAsset else {
|
||||
subscriber.putCompletion()
|
||||
return
|
||||
}
|
||||
subscriber.putNext(.video(asset: avAsset, rect: item.frame, rotation: textureRotatonForAVAsset(avAsset, mirror: false), duration: avAsset.duration.seconds, trimRange: item.videoTrimRange, offset: item.videoOffset, volume: item.videoVolume))
|
||||
subscriber.putCompletion()
|
||||
})
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
|
||||
signals.append(signal)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if let additionalPath = self.configuration.values.additionalVideoPath {
|
||||
let asset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
|
||||
additionalAsset = asset
|
||||
signals = [.single(.video(asset: asset, rect: nil, rotation: textureRotatonForAVAsset(asset, mirror: true), duration: asset.duration.seconds, trimRange: nil, offset: nil, volume: nil))]
|
||||
}
|
||||
|
||||
var audioAsset: AVAsset?
|
||||
@ -278,16 +344,14 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
var mainInput: Input
|
||||
let additionalInput: Input? = additionalAsset.flatMap { .video($0) }
|
||||
var isStory = true
|
||||
|
||||
switch self.subject {
|
||||
case let .video(asset, isStoryValue):
|
||||
mainAsset = asset
|
||||
mainInput = .video(asset)
|
||||
mainInput = .video(asset: asset, rect: mainRect, rotation: textureRotatonForAVAsset(asset), duration: asset.duration.seconds, trimRange: nil, offset: nil, volume: nil)
|
||||
isStory = isStoryValue
|
||||
case let .image(image):
|
||||
mainInput = .image(image)
|
||||
mainInput = .image(image: image, rect: nil)
|
||||
case let .sticker(file):
|
||||
mainInput = .sticker(file)
|
||||
}
|
||||
@ -324,7 +388,13 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
self.durationValue = duration
|
||||
|
||||
self.setupWithInputs(main: mainInput, additional: additionalInput, audio: audioAsset, isStory: isStory)
|
||||
let _ = (combineLatest(signals)
|
||||
|> deliverOn(self.queue)).start(next: { [weak self] additionalInputs in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.setupWithInputs(main: mainInput, additional: additionalInputs, audio: audioAsset, isStory: isStory)
|
||||
})
|
||||
}
|
||||
|
||||
private func setupComposer() {
|
||||
@ -337,6 +407,11 @@ public final class MediaEditorVideoExport {
|
||||
duration = nil
|
||||
}
|
||||
|
||||
var additionalVideoDuration: Double?
|
||||
if self.configuration.values.collage.isEmpty, let output = self.additionalVideoOutput.values.first {
|
||||
additionalVideoDuration = output.duration
|
||||
}
|
||||
|
||||
self.composer = MediaEditorComposer(
|
||||
postbox: self.postbox,
|
||||
values: self.configuration.values,
|
||||
@ -344,19 +419,38 @@ public final class MediaEditorVideoExport {
|
||||
outputDimensions: self.configuration.dimensions,
|
||||
textScale: self.textScale,
|
||||
videoDuration: duration,
|
||||
additionalVideoDuration: self.additionalVideoDuration
|
||||
additionalVideoDuration: additionalVideoDuration
|
||||
)
|
||||
}
|
||||
|
||||
private func setupWithInputs(main: Input, additional: Input?, audio: AVAsset?, isStory: Bool) {
|
||||
private func setupWithInputs(main: Input, additional: [Input], audio: AVAsset?, isStory: Bool) {
|
||||
var hasVideoOrAudio = false
|
||||
if main.isVideo || additional?.isVideo == true || audio != nil {
|
||||
if main.isVideo || audio != nil {
|
||||
hasVideoOrAudio = true
|
||||
}
|
||||
for input in additional {
|
||||
if input.isVideo {
|
||||
hasVideoOrAudio = true
|
||||
}
|
||||
}
|
||||
|
||||
enum AdditionalTrack {
|
||||
case image(image: UIImage, rect: CGRect?)
|
||||
case video(track: AVMutableCompositionTrack, rect: CGRect?, rotation: TextureRotation, duration: Double, frameRate: Float, startTime: CMTime?)
|
||||
}
|
||||
|
||||
func frameRate(for track: AVCompositionTrack) -> Float {
|
||||
if track.nominalFrameRate > 0.0 {
|
||||
return track.nominalFrameRate
|
||||
} else if track.minFrameDuration.seconds > 0.0 {
|
||||
return Float(1.0 / track.minFrameDuration.seconds)
|
||||
}
|
||||
return 30.0
|
||||
}
|
||||
|
||||
var composition: AVMutableComposition?
|
||||
var mainVideoTrack: AVMutableCompositionTrack?
|
||||
var additionalVideoTrack: AVMutableCompositionTrack?
|
||||
var additionalTracks: [AdditionalTrack] = []
|
||||
var audioMix: AVMutableAudioMix?
|
||||
|
||||
if hasVideoOrAudio, let duration = self.durationValue {
|
||||
@ -376,8 +470,9 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
var readerRange = wholeRange
|
||||
if case let .video(asset) = main {
|
||||
self.textureRotation = textureRotatonForAVAsset(asset)
|
||||
if case let .video(asset, rect, rotation, _, _, _, _) = main {
|
||||
self.videoRect = rect
|
||||
self.textureRotation = rotation
|
||||
if let videoAssetTrack = asset.tracks(withMediaType: .video).first {
|
||||
if let compositionTrack = composition?.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) {
|
||||
mainVideoTrack = compositionTrack
|
||||
@ -402,10 +497,42 @@ public final class MediaEditorVideoExport {
|
||||
readerRange = timeRange
|
||||
}
|
||||
}
|
||||
if let additional, case let .video(asset) = additional {
|
||||
self.additionalTextureRotation = textureRotatonForAVAsset(asset, mirror: true)
|
||||
self.additionalVideoDuration = asset.duration.seconds
|
||||
|
||||
|
||||
if !self.configuration.values.collage.isEmpty {
|
||||
for input in additional {
|
||||
switch input {
|
||||
case let .image(image, rect):
|
||||
additionalTracks.append(.image(image: image, rect: rect))
|
||||
case let .video(asset, rect, rotation, duration, trimRange, offset, volume):
|
||||
let startTime = videoStartTime(trimRange: trimRange, offset: offset)
|
||||
let timeRange = clampedRange(trackDuration: asset.duration, trackTrimRange: videoTimeRange(trimRange: trimRange), trackStart: startTime, maxDuration: readerRange.end)
|
||||
|
||||
if let videoAssetTrack = asset.tracks(withMediaType: .video).first {
|
||||
if let compositionTrack = composition?.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) {
|
||||
additionalTracks.append(.video(track: compositionTrack, rect: rect, rotation: rotation, duration: duration, frameRate: frameRate(for: compositionTrack), startTime: startTime))
|
||||
|
||||
compositionTrack.preferredTransform = videoAssetTrack.preferredTransform
|
||||
|
||||
try? compositionTrack.insertTimeRange(timeRange, of: videoAssetTrack, at: startTime)
|
||||
}
|
||||
}
|
||||
if let audioAssetTrack = asset.tracks(withMediaType: .audio).first, volume ?? 1.0 > 0.01 {
|
||||
if let compositionTrack = composition?.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
|
||||
try? compositionTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: startTime)
|
||||
|
||||
if let volume, volume != 1.0 {
|
||||
let trackParameters = AVMutableAudioMixInputParameters(track: compositionTrack)
|
||||
trackParameters.trackID = compositionTrack.trackID
|
||||
trackParameters.setVolume(Float(volume), at: .zero)
|
||||
audioMixParameters.append(trackParameters)
|
||||
}
|
||||
}
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if let additional = additional.first, case let .video(asset, _, rotation, duration, _, _, _) = additional {
|
||||
let startTime: CMTime
|
||||
let timeRange: CMTimeRange
|
||||
if mainVideoTrack == nil {
|
||||
@ -418,7 +545,8 @@ public final class MediaEditorVideoExport {
|
||||
|
||||
if let videoAssetTrack = asset.tracks(withMediaType: .video).first {
|
||||
if let compositionTrack = composition?.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) {
|
||||
additionalVideoTrack = compositionTrack
|
||||
additionalTracks.append(.video(track: compositionTrack, rect: nil, rotation: rotation, duration: duration, frameRate: frameRate(for: compositionTrack), startTime: self.configuration.additionalVideoStartTime))
|
||||
|
||||
compositionTrack.preferredTransform = videoAssetTrack.preferredTransform
|
||||
|
||||
try? compositionTrack.insertTimeRange(timeRange, of: videoAssetTrack, at: startTime)
|
||||
@ -440,9 +568,10 @@ public final class MediaEditorVideoExport {
|
||||
readerRange = timeRange
|
||||
}
|
||||
}
|
||||
|
||||
if let audio, let audioAssetTrack = audio.tracks(withMediaType: .audio).first {
|
||||
let startTime: CMTime
|
||||
if mainVideoTrack == nil && additionalVideoTrack == nil {
|
||||
if mainVideoTrack == nil && additionalTracks.isEmpty {
|
||||
startTime = .zero
|
||||
} else {
|
||||
startTime = self.configuration.audioStartTime
|
||||
@ -510,35 +639,51 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
self.videoOutput = videoOutput
|
||||
}
|
||||
if let additionalVideoTrack {
|
||||
let videoOutput = AVAssetReaderTrackOutput(track: additionalVideoTrack, outputSettings: outputSettings)
|
||||
videoOutput.alwaysCopiesSampleData = true
|
||||
if reader.canAdd(videoOutput) {
|
||||
reader.add(videoOutput)
|
||||
} else {
|
||||
self.internalStatus = .finished
|
||||
self.statusValue = .failed(.addVideoOutput)
|
||||
|
||||
var additionalIndex = 0
|
||||
for track in additionalTracks {
|
||||
switch track {
|
||||
case let .image(image, rect):
|
||||
self.additionalVideoOutput[additionalIndex] = VideoOutput(
|
||||
output: .image(image),
|
||||
rect: rect,
|
||||
textureRotation: .rotate0Degrees,
|
||||
duration: 0.0,
|
||||
frameRate: 0.0,
|
||||
startTime: .zero
|
||||
)
|
||||
case let .video(track, rect, rotation, duration, frameRate, startTime):
|
||||
let videoOutput = AVAssetReaderTrackOutput(track: track, outputSettings: outputSettings)
|
||||
videoOutput.alwaysCopiesSampleData = true
|
||||
if reader.canAdd(videoOutput) {
|
||||
reader.add(videoOutput)
|
||||
} else {
|
||||
self.internalStatus = .finished
|
||||
self.statusValue = .failed(.addVideoOutput)
|
||||
}
|
||||
|
||||
self.additionalVideoOutput[additionalIndex] = VideoOutput(
|
||||
output: .videoOutput(videoOutput),
|
||||
rect: rect,
|
||||
textureRotation: rotation,
|
||||
duration: duration,
|
||||
frameRate: frameRate,
|
||||
startTime: startTime ?? .zero
|
||||
)
|
||||
}
|
||||
self.additionalVideoOutput = videoOutput
|
||||
additionalIndex += 1
|
||||
}
|
||||
}
|
||||
|
||||
func frameRate(for track: AVCompositionTrack) -> Float {
|
||||
if track.nominalFrameRate > 0.0 {
|
||||
return track.nominalFrameRate
|
||||
} else if track.minFrameDuration.seconds > 0.0 {
|
||||
return Float(1.0 / track.minFrameDuration.seconds)
|
||||
}
|
||||
return 30.0
|
||||
}
|
||||
|
||||
|
||||
if let mainVideoTrack {
|
||||
self.frameRate = frameRate(for: mainVideoTrack)
|
||||
}
|
||||
if let additionalVideoTrack {
|
||||
self.additionalFrameRate = frameRate(for: additionalVideoTrack)
|
||||
|
||||
var additionalFrameRate: Float?
|
||||
if self.configuration.values.collage.isEmpty, let output = self.additionalVideoOutput.values.first {
|
||||
additionalFrameRate = output.frameRate
|
||||
}
|
||||
let sourceFrameRate: Float = (self.frameRate ?? self.additionalFrameRate) ?? 30.0
|
||||
let sourceFrameRate: Float = (self.frameRate ?? additionalFrameRate) ?? 30.0
|
||||
self.mainComposeFramerate = round(sourceFrameRate / 30.0) * 30.0
|
||||
writer.setupVideoInput(configuration: self.configuration, preferredTransform: nil, sourceFrameRate: sourceFrameRate)
|
||||
|
||||
@ -559,10 +704,10 @@ public final class MediaEditorVideoExport {
|
||||
writer.setupAudioInput(configuration: self.configuration)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private var skippingAdditionalCopyUpdate = false
|
||||
|
||||
self.start()
|
||||
}
|
||||
|
||||
private func encodeVideo() -> Bool {
|
||||
guard let writer = self.writer else {
|
||||
return false
|
||||
@ -587,7 +732,7 @@ public final class MediaEditorVideoExport {
|
||||
var updatedProgress = false
|
||||
|
||||
var mainInput: MediaEditorComposer.Input?
|
||||
var additionalInput: MediaEditorComposer.Input?
|
||||
var additionalInput: [MediaEditorComposer.Input?] = []
|
||||
var mainTimestamp: CMTime?
|
||||
if let videoOutput = self.videoOutput {
|
||||
if let sampleBuffer = videoOutput.copyNextSampleBuffer() {
|
||||
@ -598,7 +743,7 @@ public final class MediaEditorVideoExport {
|
||||
pixelBuffer: pixelBuffer,
|
||||
rotation: self.textureRotation,
|
||||
timestamp: timestamp
|
||||
))
|
||||
), self.videoRect)
|
||||
|
||||
if let duration = self.durationValue {
|
||||
let startTime = self.reader?.timeRange.start.seconds ?? 0.0
|
||||
@ -612,39 +757,53 @@ public final class MediaEditorVideoExport {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let additionalVideoOutput = self.additionalVideoOutput {
|
||||
if let mainTimestamp, mainTimestamp < self.configuration.additionalVideoStartTime {
|
||||
|
||||
} else {
|
||||
if self.skippingAdditionalCopyUpdate {
|
||||
self.skippingAdditionalCopyUpdate = false
|
||||
} else if let sampleBuffer = additionalVideoOutput.copyNextSampleBuffer() {
|
||||
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
|
||||
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
|
||||
additionalInput = .videoBuffer(VideoPixelBuffer(
|
||||
pixelBuffer: pixelBuffer,
|
||||
rotation: self.additionalTextureRotation,
|
||||
timestamp: timestamp
|
||||
))
|
||||
|
||||
if !updatedProgress, let duration = self.durationValue {
|
||||
let startTime = self.reader?.timeRange.start.seconds ?? 0.0
|
||||
let progress = (timestamp.seconds - startTime) / duration.seconds
|
||||
self.statusValue = .progress(Float(progress))
|
||||
updatedProgress = true
|
||||
}
|
||||
}
|
||||
if let additionalFrameRate = self.additionalFrameRate, let mainComposeFramerate = self.mainComposeFramerate {
|
||||
let additionalFrameRate = round(additionalFrameRate / 30.0) * 30.0
|
||||
if Int(mainComposeFramerate) == Int(additionalFrameRate) * 2 {
|
||||
self.skippingAdditionalCopyUpdate = true
|
||||
|
||||
for i in 0 ..< self.additionalVideoOutput.count {
|
||||
if let additionalVideoOutput = self.additionalVideoOutput[i] {
|
||||
if let mainTimestamp, mainTimestamp < additionalVideoOutput.startTime {
|
||||
|
||||
} else {
|
||||
if additionalVideoOutput.skippingUpdate {
|
||||
additionalVideoOutput.skippingUpdate = false
|
||||
} else {
|
||||
switch additionalVideoOutput.output {
|
||||
case let .image(image):
|
||||
if let texture = self.composer?.textureForImage(index: i, image: image) {
|
||||
additionalInput.append(.texture(texture, .zero, false, additionalVideoOutput.rect))
|
||||
}
|
||||
case let .videoOutput(videoOutput):
|
||||
if let sampleBuffer = videoOutput.copyNextSampleBuffer() {
|
||||
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
|
||||
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
|
||||
additionalInput.append(.videoBuffer(VideoPixelBuffer(
|
||||
pixelBuffer: pixelBuffer,
|
||||
rotation: additionalVideoOutput.textureRotation,
|
||||
timestamp: timestamp
|
||||
), additionalVideoOutput.rect))
|
||||
|
||||
if !updatedProgress, let duration = self.durationValue {
|
||||
let startTime = self.reader?.timeRange.start.seconds ?? 0.0
|
||||
let progress = (timestamp.seconds - startTime) / duration.seconds
|
||||
self.statusValue = .progress(Float(progress))
|
||||
updatedProgress = true
|
||||
}
|
||||
}
|
||||
if let mainComposeFramerate = self.mainComposeFramerate {
|
||||
let additionalFrameRate = round(additionalVideoOutput.frameRate / 30.0) * 30.0
|
||||
if Int(mainComposeFramerate) == Int(additionalFrameRate) * 2 {
|
||||
additionalVideoOutput.skippingUpdate = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if case let .image(image) = self.subject, let texture = self.composer?.textureForImage(image) {
|
||||
mainInput = .texture(texture, self.imageArguments?.position ?? .zero, imageHasTransparency(image))
|
||||
|
||||
|
||||
if case let .image(image) = self.subject, let texture = self.composer?.textureForImage(index: -1, image: image) {
|
||||
mainInput = .texture(texture, self.imageArguments?.position ?? .zero, imageHasTransparency(image), nil)
|
||||
|
||||
if !updatedProgress, let imageArguments = self.imageArguments, let duration = self.durationValue {
|
||||
let progress = imageArguments.position.seconds / duration.seconds
|
||||
@ -659,7 +818,7 @@ public final class MediaEditorVideoExport {
|
||||
timestamp = imageArguments.position
|
||||
} else {
|
||||
if case .image = self.subject {
|
||||
timestamp = additionalInput?.timestamp
|
||||
timestamp = additionalInput.first??.timestamp
|
||||
} else {
|
||||
timestamp = mainInput?.timestamp
|
||||
}
|
||||
@ -749,7 +908,7 @@ public final class MediaEditorVideoExport {
|
||||
return true
|
||||
}
|
||||
|
||||
public func start() {
|
||||
private func start() {
|
||||
guard self.internalStatus == .idle, let writer = self.writer else {
|
||||
self.statusValue = .failed(.invalid)
|
||||
return
|
||||
@ -765,7 +924,7 @@ public final class MediaEditorVideoExport {
|
||||
return
|
||||
}
|
||||
|
||||
if self.additionalVideoOutput == nil {
|
||||
if self.additionalVideoOutput.isEmpty {
|
||||
switch self.subject {
|
||||
case .image, .sticker:
|
||||
self.imageArguments = (Double(self.configuration.frameRate), CMTime(value: 0, timescale: Int32(self.configuration.frameRate)))
|
||||
@ -923,3 +1082,21 @@ public final class MediaEditorVideoExport {
|
||||
return self.statusPromise.get()
|
||||
}
|
||||
}
|
||||
|
||||
private func videoStartTime(trimRange: Range<Double>?, offset: Double?) -> CMTime {
|
||||
let lowerBound = trimRange?.lowerBound ?? 0.0
|
||||
let offset = -min(0.0, offset ?? 0.0)
|
||||
if !lowerBound.isZero || !offset.isZero {
|
||||
return CMTime(seconds: offset + lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
|
||||
} else {
|
||||
return .zero
|
||||
}
|
||||
}
|
||||
|
||||
private func videoTimeRange(trimRange: Range<Double>?) -> CMTimeRange? {
|
||||
if let videoTrimRange = trimRange {
|
||||
return CMTimeRange(start: CMTime(seconds: videoTrimRange.lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), end: CMTime(seconds: videoTrimRange.upperBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)))
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
@ -7,8 +7,8 @@ import SwiftSignalKit
|
||||
|
||||
final class UniversalTextureSource: TextureSource {
|
||||
enum Input {
|
||||
case image(UIImage)
|
||||
case video(AVPlayerItem)
|
||||
case image(UIImage, CGRect?)
|
||||
case video(AVPlayerItem, CGRect?)
|
||||
case entity(MediaEditorComposerEntity)
|
||||
|
||||
fileprivate func createContext(renderTarget: RenderTarget, queue: DispatchQueue, additional: Bool) -> InputContext {
|
||||
@ -28,7 +28,7 @@ final class UniversalTextureSource: TextureSource {
|
||||
private let queue: DispatchQueue
|
||||
|
||||
private var mainInputContext: InputContext?
|
||||
private var additionalInputContext: InputContext?
|
||||
private var additionalInputContexts: [InputContext] = []
|
||||
|
||||
var forceUpdates = false
|
||||
private var rate: Float = 1.0
|
||||
@ -48,7 +48,7 @@ final class UniversalTextureSource: TextureSource {
|
||||
}
|
||||
|
||||
var mainImage: UIImage? {
|
||||
if let mainInput = self.mainInputContext?.input, case let .image(image) = mainInput {
|
||||
if let mainInput = self.mainInputContext?.input, case let .image(image, _) = mainInput {
|
||||
return image
|
||||
}
|
||||
return nil
|
||||
@ -62,15 +62,11 @@ final class UniversalTextureSource: TextureSource {
|
||||
self.update(forced: true)
|
||||
}
|
||||
|
||||
func setAdditionalInput(_ input: Input?) {
|
||||
func setAdditionalInputs(_ inputs: [Input]) {
|
||||
guard let renderTarget = self.renderTarget else {
|
||||
return
|
||||
}
|
||||
if let input {
|
||||
self.additionalInputContext = input.createContext(renderTarget: renderTarget, queue: self.queue, additional: true)
|
||||
} else {
|
||||
self.additionalInputContext = nil
|
||||
}
|
||||
self.additionalInputContexts = inputs.map { $0.createContext(renderTarget: renderTarget, queue: self.queue, additional: true) }
|
||||
self.update(forced: true)
|
||||
}
|
||||
|
||||
@ -79,7 +75,7 @@ final class UniversalTextureSource: TextureSource {
|
||||
self.rate = rate
|
||||
}
|
||||
|
||||
private var previousAdditionalOutput: MediaEditorRenderer.Input?
|
||||
private var previousAdditionalOutput: [Int: MediaEditorRenderer.Input] = [:]
|
||||
private var readyForMoreData = Atomic<Bool>(value: true)
|
||||
private func update(forced: Bool) {
|
||||
let time = CACurrentMediaTime()
|
||||
@ -89,7 +85,15 @@ final class UniversalTextureSource: TextureSource {
|
||||
fps = 30
|
||||
}
|
||||
|
||||
let needsDisplayLink = (self.mainInputContext?.needsDisplayLink ?? false) || (self.additionalInputContext?.needsDisplayLink ?? false)
|
||||
var additionalsNeedDisplayLink = false
|
||||
for context in self.additionalInputContexts {
|
||||
if context.needsDisplayLink {
|
||||
additionalsNeedDisplayLink = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
let needsDisplayLink = (self.mainInputContext?.needsDisplayLink ?? false) || additionalsNeedDisplayLink
|
||||
if needsDisplayLink {
|
||||
if self.displayLink == nil {
|
||||
let displayLink = CADisplayLink(target: DisplayLinkTarget({ [weak self] in
|
||||
@ -122,22 +126,32 @@ final class UniversalTextureSource: TextureSource {
|
||||
return
|
||||
}
|
||||
if let main {
|
||||
self.output?.consume(main: main, additional: nil, render: true)
|
||||
self.output?.consume(main: main, additionals: [], render: true)
|
||||
}
|
||||
let _ = self.readyForMoreData.swap(true)
|
||||
})
|
||||
} else {
|
||||
let main = self.mainInputContext?.output(time: time)
|
||||
var additional = self.additionalInputContext?.output(time: time)
|
||||
if let additional {
|
||||
self.previousAdditionalOutput = additional
|
||||
} else if self.additionalInputContext != nil {
|
||||
additional = self.previousAdditionalOutput
|
||||
var additionals: [(Int, InputContext.Output?)] = []
|
||||
var index = 0
|
||||
for context in self.additionalInputContexts {
|
||||
additionals.append((index, context.output(time: time)))
|
||||
index += 1
|
||||
}
|
||||
for (index, output) in additionals {
|
||||
if let output {
|
||||
self.previousAdditionalOutput[index] = output
|
||||
}
|
||||
}
|
||||
for (index, output) in additionals {
|
||||
if output == nil {
|
||||
additionals[index] = (index, self.previousAdditionalOutput[index])
|
||||
}
|
||||
}
|
||||
guard let main else {
|
||||
return
|
||||
}
|
||||
self.output?.consume(main: main, additional: additional, render: true)
|
||||
self.output?.consume(main: main, additionals: additionals.compactMap { $0.1 }, render: true)
|
||||
}
|
||||
}
|
||||
|
||||
@ -148,7 +162,7 @@ final class UniversalTextureSource: TextureSource {
|
||||
|
||||
func invalidate() {
|
||||
self.mainInputContext?.invalidate()
|
||||
self.additionalInputContext?.invalidate()
|
||||
self.additionalInputContexts.forEach { $0.invalidate() }
|
||||
}
|
||||
|
||||
private class DisplayLinkTarget {
|
||||
@ -168,6 +182,8 @@ protocol InputContext {
|
||||
|
||||
var input: Input { get }
|
||||
|
||||
var rect: CGRect? { get }
|
||||
|
||||
var useAsyncOutput: Bool { get }
|
||||
func output(time: Double) -> Output?
|
||||
func asyncOutput(time: Double, completion: @escaping (Output?) -> Void)
|
||||
@ -191,12 +207,14 @@ private class ImageInputContext: InputContext {
|
||||
fileprivate var input: Input
|
||||
private var texture: MTLTexture?
|
||||
private var hasTransparency = false
|
||||
fileprivate var rect: CGRect?
|
||||
|
||||
init(input: Input, renderTarget: RenderTarget, queue: DispatchQueue) {
|
||||
guard case let .image(image) = input else {
|
||||
guard case let .image(image, rect) = input else {
|
||||
fatalError()
|
||||
}
|
||||
self.input = input
|
||||
self.rect = rect
|
||||
if let device = renderTarget.mtlDevice {
|
||||
self.texture = loadTexture(image: image, device: device)
|
||||
}
|
||||
@ -204,7 +222,7 @@ private class ImageInputContext: InputContext {
|
||||
}
|
||||
|
||||
func output(time: Double) -> Output? {
|
||||
return self.texture.flatMap { .texture($0, .zero, self.hasTransparency) }
|
||||
return self.texture.flatMap { .texture($0, .zero, self.hasTransparency, self.rect) }
|
||||
}
|
||||
|
||||
func invalidate() {
|
||||
@ -220,23 +238,26 @@ private class VideoInputContext: NSObject, InputContext, AVPlayerItemOutputPullD
|
||||
fileprivate var input: Input
|
||||
private var videoOutput: AVPlayerItemVideoOutput?
|
||||
private var textureRotation: TextureRotation = .rotate0Degrees
|
||||
fileprivate var rect: CGRect?
|
||||
|
||||
var playerItem: AVPlayerItem {
|
||||
guard case let .video(playerItem) = self.input else {
|
||||
guard case let .video(playerItem, _) = self.input else {
|
||||
fatalError()
|
||||
}
|
||||
return playerItem
|
||||
}
|
||||
|
||||
init(input: Input, renderTarget: RenderTarget, queue: DispatchQueue, additional: Bool) {
|
||||
guard case .video = input else {
|
||||
guard case let .video(_, rect) = input else {
|
||||
fatalError()
|
||||
}
|
||||
self.input = input
|
||||
self.rect = rect
|
||||
|
||||
super.init()
|
||||
|
||||
//TODO: mirror if self.additionalPlayer == nil && self.mirror
|
||||
self.textureRotation = textureRotatonForAVAsset(self.playerItem.asset, mirror: additional)
|
||||
self.textureRotation = textureRotatonForAVAsset(self.playerItem.asset, mirror: rect == nil ? additional : false)
|
||||
|
||||
let colorProperties: [String: Any] = [
|
||||
AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2,
|
||||
@ -270,7 +291,7 @@ private class VideoInputContext: NSObject, InputContext, AVPlayerItemOutputPullD
|
||||
if let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: requestTime, itemTimeForDisplay: &presentationTime) {
|
||||
videoPixelBuffer = VideoPixelBuffer(pixelBuffer: pixelBuffer, rotation: self.textureRotation, timestamp: presentationTime)
|
||||
}
|
||||
return videoPixelBuffer.flatMap { .videoBuffer($0) }
|
||||
return videoPixelBuffer.flatMap { .videoBuffer($0, self.rect) }
|
||||
}
|
||||
|
||||
func invalidate() {
|
||||
@ -290,6 +311,8 @@ final class EntityInputContext: NSObject, InputContext, AVPlayerItemOutputPullDe
|
||||
internal var input: Input
|
||||
private var textureRotation: TextureRotation = .rotate0Degrees
|
||||
|
||||
var rect: CGRect?
|
||||
|
||||
var entity: MediaEditorComposerEntity {
|
||||
guard case let .entity(entity) = self.input else {
|
||||
fatalError()
|
||||
|
@ -106,6 +106,124 @@ private func verticesData(
|
||||
]
|
||||
}
|
||||
|
||||
private func verticesData(
|
||||
size: CGSize,
|
||||
textureRotation: TextureRotation,
|
||||
containerSize: CGSize,
|
||||
textureRect: CGRect,
|
||||
z: Float = 0.0
|
||||
) -> [VertexData] {
|
||||
let textureRect = CGRect(origin: CGPoint(x: textureRect.origin.x, y: containerSize.height - textureRect.maxY ), size: textureRect.size)
|
||||
|
||||
let containerAspect = textureRect.width / textureRect.height
|
||||
let imageAspect = size.width / size.height
|
||||
|
||||
let texCoordScale: simd_float2
|
||||
if imageAspect > containerAspect {
|
||||
texCoordScale = simd_float2(Float(containerAspect / imageAspect), 1.0)
|
||||
} else {
|
||||
texCoordScale = simd_float2(1.0, Float(imageAspect / containerAspect))
|
||||
}
|
||||
|
||||
let scaledTopLeft = simd_float2(0.5 - texCoordScale.x * 0.5, 0.5 + texCoordScale.y * 0.5)
|
||||
let scaledTopRight = simd_float2(0.5 + texCoordScale.x * 0.5, 0.5 + texCoordScale.y * 0.5)
|
||||
let scaledBottomLeft = simd_float2(0.5 - texCoordScale.x * 0.5, 0.5 - texCoordScale.y * 0.5)
|
||||
let scaledBottomRight = simd_float2(0.5 + texCoordScale.x * 0.5, 0.5 - texCoordScale.y * 0.5)
|
||||
|
||||
let topLeft: simd_float2
|
||||
let topRight: simd_float2
|
||||
let bottomLeft: simd_float2
|
||||
let bottomRight: simd_float2
|
||||
|
||||
switch textureRotation {
|
||||
case .rotate0Degrees:
|
||||
topLeft = scaledTopLeft
|
||||
topRight = scaledTopRight
|
||||
bottomLeft = scaledBottomLeft
|
||||
bottomRight = scaledBottomRight
|
||||
case .rotate0DegreesMirrored:
|
||||
topLeft = scaledTopRight
|
||||
topRight = scaledTopLeft
|
||||
bottomLeft = scaledBottomRight
|
||||
bottomRight = scaledBottomLeft
|
||||
case .rotate180Degrees:
|
||||
topLeft = scaledBottomRight
|
||||
topRight = scaledBottomLeft
|
||||
bottomLeft = scaledTopRight
|
||||
bottomRight = scaledTopLeft
|
||||
case .rotate90Degrees:
|
||||
topLeft = scaledTopRight
|
||||
topRight = scaledBottomRight
|
||||
bottomLeft = scaledTopLeft
|
||||
bottomRight = scaledBottomLeft
|
||||
case .rotate90DegreesMirrored:
|
||||
topLeft = scaledBottomRight
|
||||
topRight = scaledTopRight
|
||||
bottomLeft = scaledBottomLeft
|
||||
bottomRight = scaledTopLeft
|
||||
case .rotate270Degrees:
|
||||
topLeft = scaledBottomLeft
|
||||
topRight = scaledTopLeft
|
||||
bottomLeft = scaledBottomRight
|
||||
bottomRight = scaledTopRight
|
||||
}
|
||||
|
||||
let containerSize = CGSize(width: containerSize.width, height: containerSize.height)
|
||||
|
||||
let centerX = Float(textureRect.midX - containerSize.width / 2.0)
|
||||
let centerY = Float(textureRect.midY - containerSize.height / 2.0)
|
||||
|
||||
let halfWidth = Float(textureRect.width / 2.0)
|
||||
let halfHeight = Float(textureRect.height / 2.0)
|
||||
|
||||
let angle = Float.pi
|
||||
let cosAngle = cos(angle)
|
||||
let sinAngle = sin(angle)
|
||||
|
||||
return [
|
||||
VertexData(
|
||||
pos: simd_float4(
|
||||
x: (centerX + (halfWidth * cosAngle) - (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0,
|
||||
y: (centerY + (halfWidth * sinAngle) + (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0,
|
||||
z: z,
|
||||
w: 1
|
||||
),
|
||||
texCoord: topLeft,
|
||||
localPos: simd_float2(0.0, 0.0)
|
||||
),
|
||||
VertexData(
|
||||
pos: simd_float4(
|
||||
x: (centerX - (halfWidth * cosAngle) - (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0,
|
||||
y: (centerY - (halfWidth * sinAngle) + (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0,
|
||||
z: z,
|
||||
w: 1
|
||||
),
|
||||
texCoord: topRight,
|
||||
localPos: simd_float2(1.0, 0.0)
|
||||
),
|
||||
VertexData(
|
||||
pos: simd_float4(
|
||||
x: (centerX + (halfWidth * cosAngle) + (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0,
|
||||
y: (centerY + (halfWidth * sinAngle) - (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0,
|
||||
z: z,
|
||||
w: 1
|
||||
),
|
||||
texCoord: bottomLeft,
|
||||
localPos: simd_float2(0.0, 1.0)
|
||||
),
|
||||
VertexData(
|
||||
pos: simd_float4(
|
||||
x: (centerX - (halfWidth * cosAngle) + (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0,
|
||||
y: (centerY - (halfWidth * sinAngle) - (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0,
|
||||
z: z,
|
||||
w: 1
|
||||
),
|
||||
texCoord: bottomRight,
|
||||
localPos: simd_float2(1.0, 1.0)
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
private func lookupSpringValue(_ t: CGFloat) -> CGFloat {
|
||||
let table: [(CGFloat, CGFloat)] = [
|
||||
(0.0, 0.0),
|
||||
@ -198,6 +316,41 @@ final class VideoFinishPass: RenderPass {
|
||||
}
|
||||
}
|
||||
|
||||
func encodeVideo(
|
||||
using encoder: MTLRenderCommandEncoder,
|
||||
containerSize: CGSize,
|
||||
texture: MTLTexture,
|
||||
textureRotation: TextureRotation,
|
||||
rect: CGRect,
|
||||
zPosition: Float,
|
||||
device: MTLDevice
|
||||
) {
|
||||
encoder.setFragmentTexture(texture, index: 0)
|
||||
encoder.setFragmentTexture(texture, index: 1)
|
||||
|
||||
let vertices = verticesData(
|
||||
size: CGSize(width: texture.width, height: texture.height),
|
||||
textureRotation: textureRotation,
|
||||
containerSize: containerSize,
|
||||
textureRect: rect,
|
||||
z: zPosition
|
||||
)
|
||||
let buffer = device.makeBuffer(
|
||||
bytes: vertices,
|
||||
length: MemoryLayout<VertexData>.stride * vertices.count,
|
||||
options: [])
|
||||
encoder.setVertexBuffer(buffer, offset: 0, index: 0)
|
||||
|
||||
var parameters = VideoEncodeParameters(
|
||||
dimensions: simd_float2(Float(rect.size.width), Float(rect.size.height)),
|
||||
roundness: 0.0,
|
||||
alpha: 1.0,
|
||||
isOpaque: 1.0
|
||||
)
|
||||
encoder.setFragmentBytes(¶meters, length: MemoryLayout<VideoEncodeParameters>.size, index: 0)
|
||||
encoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
|
||||
}
|
||||
|
||||
func encodeVideo(
|
||||
using encoder: MTLRenderCommandEncoder,
|
||||
containerSize: CGSize,
|
||||
@ -228,7 +381,14 @@ final class VideoFinishPass: RenderPass {
|
||||
height: position.size.height * position.scale * position.baseScale
|
||||
)
|
||||
|
||||
let vertices = verticesData(textureRotation: textureRotation, containerSize: containerSize, position: center, size: size, rotation: position.rotation, z: zPosition)
|
||||
let vertices = verticesData(
|
||||
textureRotation: textureRotation,
|
||||
containerSize: containerSize,
|
||||
position: center,
|
||||
size: size,
|
||||
rotation: position.rotation,
|
||||
z: zPosition
|
||||
)
|
||||
let buffer = device.makeBuffer(
|
||||
bytes: vertices,
|
||||
length: MemoryLayout<VertexData>.stride * vertices.count,
|
||||
@ -491,34 +651,40 @@ final class VideoFinishPass: RenderPass {
|
||||
return (backgroundVideoState, foregroundVideoState, disappearingVideoState)
|
||||
}
|
||||
|
||||
struct Input {
|
||||
let texture: MTLTexture
|
||||
let hasTransparency: Bool
|
||||
let rect: CGRect?
|
||||
}
|
||||
|
||||
func process(
|
||||
input: MTLTexture,
|
||||
input: Input,
|
||||
inputMask: MTLTexture?,
|
||||
hasTransparency: Bool,
|
||||
secondInput: MTLTexture?,
|
||||
secondInput: [Input],
|
||||
timestamp: CMTime,
|
||||
device: MTLDevice,
|
||||
commandBuffer: MTLCommandBuffer
|
||||
) -> MTLTexture? {
|
||||
if !self.isStory {
|
||||
return input
|
||||
return input.texture
|
||||
}
|
||||
|
||||
let baseScale: CGFloat
|
||||
if !self.isSticker {
|
||||
if input.height > input.width {
|
||||
baseScale = max(canvasSize.width / CGFloat(input.width), canvasSize.height / CGFloat(input.height))
|
||||
if input.texture.height > input.texture.width {
|
||||
baseScale = max(canvasSize.width / CGFloat(input.texture.width), canvasSize.height / CGFloat(input.texture.height))
|
||||
} else {
|
||||
baseScale = canvasSize.width / CGFloat(input.width)
|
||||
baseScale = canvasSize.width / CGFloat(input.texture.width)
|
||||
}
|
||||
} else {
|
||||
if input.height > input.width {
|
||||
baseScale = canvasSize.width / CGFloat(input.width)
|
||||
if input.texture.height > input.texture.width {
|
||||
baseScale = canvasSize.width / CGFloat(input.texture.width)
|
||||
} else {
|
||||
baseScale = canvasSize.width / CGFloat(input.height)
|
||||
baseScale = canvasSize.width / CGFloat(input.texture.height)
|
||||
}
|
||||
}
|
||||
self.mainPosition = self.mainPosition.with(size: CGSize(width: input.width, height: input.height), baseScale: baseScale)
|
||||
self.mainPosition = self.mainPosition.with(size: CGSize(width: input.texture.width, height: input.texture.height), baseScale: baseScale)
|
||||
|
||||
let containerSize = canvasSize
|
||||
|
||||
@ -527,11 +693,11 @@ final class VideoFinishPass: RenderPass {
|
||||
textureDescriptor.textureType = .type2D
|
||||
textureDescriptor.width = Int(containerSize.width)
|
||||
textureDescriptor.height = Int(containerSize.height)
|
||||
textureDescriptor.pixelFormat = input.pixelFormat
|
||||
textureDescriptor.pixelFormat = input.texture.pixelFormat
|
||||
textureDescriptor.storageMode = .private
|
||||
textureDescriptor.usage = [.shaderRead, .shaderWrite, .renderTarget]
|
||||
guard let texture = device.makeTexture(descriptor: textureDescriptor) else {
|
||||
return input
|
||||
return input.texture
|
||||
}
|
||||
self.cachedTexture = texture
|
||||
texture.label = "finishedTexture"
|
||||
@ -547,7 +713,7 @@ final class VideoFinishPass: RenderPass {
|
||||
renderPassDescriptor.colorAttachments[0].storeAction = .store
|
||||
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 0.0, blue: 0.0, alpha: 0.0)
|
||||
guard let renderCommandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else {
|
||||
return input
|
||||
return input.texture
|
||||
}
|
||||
|
||||
renderCommandEncoder.setViewport(MTLViewport(
|
||||
@ -566,52 +732,78 @@ final class VideoFinishPass: RenderPass {
|
||||
|
||||
renderCommandEncoder.setRenderPipelineState(self.mainPipelineState!)
|
||||
|
||||
let (mainVideoState, additionalVideoState, transitionVideoState) = self.transitionState(for: timestamp, mainInput: input, additionalInput: secondInput)
|
||||
|
||||
if let transitionVideoState {
|
||||
if let rect = input.rect {
|
||||
self.encodeVideo(
|
||||
using: renderCommandEncoder,
|
||||
containerSize: containerSize,
|
||||
texture: transitionVideoState.texture,
|
||||
textureRotation: transitionVideoState.textureRotation,
|
||||
maskTexture: nil,
|
||||
hasTransparency: false,
|
||||
position: transitionVideoState.position,
|
||||
roundness: transitionVideoState.roundness,
|
||||
alpha: transitionVideoState.alpha,
|
||||
zPosition: 0.75,
|
||||
texture: input.texture,
|
||||
textureRotation: self.mainTextureRotation,
|
||||
rect: rect,
|
||||
zPosition: 0.0,
|
||||
device: device
|
||||
)
|
||||
}
|
||||
|
||||
self.encodeVideo(
|
||||
using: renderCommandEncoder,
|
||||
containerSize: containerSize,
|
||||
texture: mainVideoState.texture,
|
||||
textureRotation: mainVideoState.textureRotation,
|
||||
maskTexture: inputMask,
|
||||
hasTransparency: hasTransparency,
|
||||
position: mainVideoState.position,
|
||||
roundness: mainVideoState.roundness,
|
||||
alpha: mainVideoState.alpha,
|
||||
zPosition: 0.0,
|
||||
device: device
|
||||
)
|
||||
|
||||
if let additionalVideoState {
|
||||
|
||||
for input in secondInput {
|
||||
if let rect = input.rect {
|
||||
self.encodeVideo(
|
||||
using: renderCommandEncoder,
|
||||
containerSize: containerSize,
|
||||
texture: input.texture,
|
||||
textureRotation: self.mainTextureRotation,
|
||||
rect: rect,
|
||||
zPosition: 0.0,
|
||||
device: device
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let (mainVideoState, additionalVideoState, transitionVideoState) = self.transitionState(for: timestamp, mainInput: input.texture, additionalInput: secondInput.first?.texture)
|
||||
|
||||
if let transitionVideoState {
|
||||
self.encodeVideo(
|
||||
using: renderCommandEncoder,
|
||||
containerSize: containerSize,
|
||||
texture: transitionVideoState.texture,
|
||||
textureRotation: transitionVideoState.textureRotation,
|
||||
maskTexture: nil,
|
||||
hasTransparency: false,
|
||||
position: transitionVideoState.position,
|
||||
roundness: transitionVideoState.roundness,
|
||||
alpha: transitionVideoState.alpha,
|
||||
zPosition: 0.75,
|
||||
device: device
|
||||
)
|
||||
}
|
||||
|
||||
self.encodeVideo(
|
||||
using: renderCommandEncoder,
|
||||
containerSize: containerSize,
|
||||
texture: additionalVideoState.texture,
|
||||
textureRotation: additionalVideoState.textureRotation,
|
||||
maskTexture: nil,
|
||||
hasTransparency: false,
|
||||
position: additionalVideoState.position,
|
||||
roundness: additionalVideoState.roundness,
|
||||
alpha: additionalVideoState.alpha,
|
||||
zPosition: 0.5,
|
||||
texture: mainVideoState.texture,
|
||||
textureRotation: mainVideoState.textureRotation,
|
||||
maskTexture: inputMask,
|
||||
hasTransparency: hasTransparency,
|
||||
position: mainVideoState.position,
|
||||
roundness: mainVideoState.roundness,
|
||||
alpha: mainVideoState.alpha,
|
||||
zPosition: 0.0,
|
||||
device: device
|
||||
)
|
||||
|
||||
if let additionalVideoState {
|
||||
self.encodeVideo(
|
||||
using: renderCommandEncoder,
|
||||
containerSize: containerSize,
|
||||
texture: additionalVideoState.texture,
|
||||
textureRotation: additionalVideoState.textureRotation,
|
||||
maskTexture: nil,
|
||||
hasTransparency: false,
|
||||
position: additionalVideoState.position,
|
||||
roundness: additionalVideoState.roundness,
|
||||
alpha: additionalVideoState.alpha,
|
||||
zPosition: 0.5,
|
||||
device: device
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
renderCommandEncoder.endEncoding()
|
||||
|
@ -0,0 +1,44 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import Display
|
||||
|
||||
final class CollageHighlightView: UIView {
|
||||
private let borderLayer = SimpleLayer()
|
||||
private let gradientView = UIImageView()
|
||||
|
||||
override public init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
|
||||
self.borderLayer.cornerRadius = 12.0
|
||||
self.borderLayer.borderWidth = 4.0
|
||||
self.borderLayer.borderColor = UIColor.white.cgColor
|
||||
|
||||
self.layer.mask = self.borderLayer
|
||||
|
||||
self.addSubview(self.gradientView)
|
||||
}
|
||||
|
||||
required public init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func update(size: CGSize, corners: CACornerMask, completion: @escaping () -> Void) {
|
||||
self.borderLayer.maskedCorners = corners
|
||||
self.borderLayer.frame = CGRect(origin: .zero, size: size)
|
||||
|
||||
let color = UIColor.white.withAlphaComponent(0.7)
|
||||
|
||||
let gradientWidth = size.width * 3.0
|
||||
self.gradientView.image = generateGradientImage(
|
||||
size: CGSize(width: gradientWidth, height: 24.0),
|
||||
colors: [UIColor.white.withAlphaComponent(0.0), color, color, color, UIColor.white.withAlphaComponent(0.0)],
|
||||
locations: [0.0, 0.2, 0.5, 0.8, 1.0],
|
||||
direction: .horizontal
|
||||
)
|
||||
|
||||
self.gradientView.frame = CGRect(origin: CGPoint(x: -gradientWidth, y: 0.0), size: CGSize(width: gradientWidth, height: size.height))
|
||||
self.gradientView.layer.animatePosition(from: .zero, to: CGPoint(x: gradientWidth * 2.0, y: 0.0), duration: 1.4, additive: true, completion: { _ in
|
||||
completion()
|
||||
})
|
||||
}
|
||||
}
|
@ -10,7 +10,7 @@ import SaveToCameraRoll
|
||||
import ImageCompression
|
||||
import LocalMediaResources
|
||||
|
||||
public extension MediaEditorScreen {
|
||||
public extension MediaEditorScreenImpl {
|
||||
static func makeEditStoryController(
|
||||
context: AccountContext,
|
||||
peer: EnginePeer,
|
||||
@ -18,16 +18,16 @@ public extension MediaEditorScreen {
|
||||
videoPlaybackPosition: Double?,
|
||||
cover: Bool,
|
||||
repost: Bool,
|
||||
transitionIn: MediaEditorScreen.TransitionIn,
|
||||
transitionOut: MediaEditorScreen.TransitionOut?,
|
||||
transitionIn: MediaEditorScreenImpl.TransitionIn,
|
||||
transitionOut: MediaEditorScreenImpl.TransitionOut?,
|
||||
completed: @escaping () -> Void = {},
|
||||
willDismiss: @escaping () -> Void = {},
|
||||
update: @escaping (Disposable?) -> Void
|
||||
) -> MediaEditorScreen? {
|
||||
) -> MediaEditorScreenImpl? {
|
||||
guard let peerReference = PeerReference(peer._asPeer()) else {
|
||||
return nil
|
||||
}
|
||||
let subject: Signal<MediaEditorScreen.Subject?, NoError>
|
||||
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError>
|
||||
subject = getStorySource(engine: context.engine, peerId: peer.id, id: Int64(storyItem.id))
|
||||
|> mapToSignal { source in
|
||||
if !repost, let source {
|
||||
@ -35,14 +35,14 @@ public extension MediaEditorScreen {
|
||||
} else {
|
||||
let media = storyItem.media._asMedia()
|
||||
return fetchMediaData(context: context, postbox: context.account.postbox, userLocation: .peer(peerReference.id), customUserContentType: .story, mediaReference: .story(peer: peerReference, id: storyItem.id, media: media))
|
||||
|> mapToSignal { (value, isImage) -> Signal<MediaEditorScreen.Subject?, NoError> in
|
||||
|> mapToSignal { (value, isImage) -> Signal<MediaEditorScreenImpl.Subject?, NoError> in
|
||||
guard case let .data(data) = value, data.complete else {
|
||||
return .complete()
|
||||
}
|
||||
if let image = UIImage(contentsOfFile: data.path) {
|
||||
return .single(nil)
|
||||
|> then(
|
||||
.single(.image(image, PixelDimensions(image.size), nil, .bottomRight))
|
||||
.single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight))
|
||||
|> delay(0.1, queue: Queue.mainQueue())
|
||||
)
|
||||
} else {
|
||||
@ -56,7 +56,7 @@ public extension MediaEditorScreen {
|
||||
}
|
||||
return .single(nil)
|
||||
|> then(
|
||||
.single(.video(symlinkPath, nil, false, nil, nil, PixelDimensions(width: 720, height: 1280), duration ?? 0.0, [], .bottomRight))
|
||||
.single(.video(videoPath: symlinkPath, thumbnail: nil, mirror: false, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: PixelDimensions(width: 720, height: 1280), duration: duration ?? 0.0, videoPositionChanges: [], additionalVideoPosition: .bottomRight))
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -95,7 +95,7 @@ public extension MediaEditorScreen {
|
||||
}
|
||||
|
||||
var updateProgressImpl: ((Float) -> Void)?
|
||||
let controller = MediaEditorScreen(
|
||||
let controller = MediaEditorScreenImpl(
|
||||
context: context,
|
||||
mode: .storyEditor,
|
||||
subject: subject,
|
||||
@ -110,7 +110,7 @@ public extension MediaEditorScreen {
|
||||
transitionOut: { finished, isNew in
|
||||
if repost && finished {
|
||||
if let transitionOut = externalState.transitionOut?(externalState.storyTarget, externalState.isPeerArchived), let destinationView = transitionOut.destinationView {
|
||||
return MediaEditorScreen.TransitionOut(
|
||||
return MediaEditorScreenImpl.TransitionOut(
|
||||
destinationView: destinationView,
|
||||
destinationRect: transitionOut.destinationRect,
|
||||
destinationCornerRadius: transitionOut.destinationCornerRadius
|
||||
|
@ -327,6 +327,7 @@ private final class MediaCoverScreenComponent: Component {
|
||||
maxDuration: storyMaxVideoDuration,
|
||||
isPlaying: playerState.isPlaying,
|
||||
tracks: visibleTracks,
|
||||
isCollage: false,
|
||||
portalView: controller.portalView,
|
||||
positionUpdated: { [weak state] position, apply in
|
||||
if let mediaEditor = state?.mediaEditor {
|
||||
|
@ -9,7 +9,7 @@ import AccountContext
|
||||
import MediaEditor
|
||||
import DrawingUI
|
||||
|
||||
extension MediaEditorScreen {
|
||||
extension MediaEditorScreenImpl {
|
||||
func isEligibleForDraft() -> Bool {
|
||||
if self.isEditingStory {
|
||||
return false
|
||||
@ -173,6 +173,8 @@ extension MediaEditorScreen {
|
||||
innerSaveDraft(media: .image(image: image, dimensions: dimensions))
|
||||
case let .video(path, _, _, _, _, dimensions, _, _, _):
|
||||
innerSaveDraft(media: .video(path: path, dimensions: dimensions, duration: duration))
|
||||
case let .videoCollage(items):
|
||||
let _ = items
|
||||
case let .asset(asset):
|
||||
if asset.mediaType == .video {
|
||||
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
|
||||
|
@ -10,9 +10,9 @@ import TelegramPresentationData
|
||||
import DeviceAccess
|
||||
import AccountContext
|
||||
|
||||
extension MediaEditorScreen {
|
||||
extension MediaEditorScreenImpl {
|
||||
final class Recording {
|
||||
private weak var controller: MediaEditorScreen?
|
||||
private weak var controller: MediaEditorScreenImpl?
|
||||
|
||||
private var recorder: EntityVideoRecorder?
|
||||
|
||||
@ -37,7 +37,7 @@ extension MediaEditorScreen {
|
||||
|
||||
var isLocked = false
|
||||
|
||||
init(controller: MediaEditorScreen) {
|
||||
init(controller: MediaEditorScreenImpl) {
|
||||
self.controller = controller
|
||||
|
||||
self.authorizationStatusDisposables.add((DeviceAccess.authorizationStatus(subject: .camera(.video))
|
||||
|
@ -64,6 +64,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
|
||||
public final class ExternalState {
|
||||
public fileprivate(set) var derivedInputHeight: CGFloat = 0.0
|
||||
public fileprivate(set) var timelineHeight: CGFloat = 0.0
|
||||
|
||||
public init() {
|
||||
}
|
||||
@ -85,6 +86,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
let isDisplayingTool: DrawingScreenType?
|
||||
let isInteractingWithEntities: Bool
|
||||
let isSavingAvailable: Bool
|
||||
let isCollageTimelineOpen: Bool
|
||||
let hasAppeared: Bool
|
||||
let isDismissing: Bool
|
||||
let bottomSafeInset: CGFloat
|
||||
@ -101,6 +103,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
isDisplayingTool: DrawingScreenType?,
|
||||
isInteractingWithEntities: Bool,
|
||||
isSavingAvailable: Bool,
|
||||
isCollageTimelineOpen: Bool,
|
||||
hasAppeared: Bool,
|
||||
isDismissing: Bool,
|
||||
bottomSafeInset: CGFloat,
|
||||
@ -116,6 +119,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
self.isDisplayingTool = isDisplayingTool
|
||||
self.isInteractingWithEntities = isInteractingWithEntities
|
||||
self.isSavingAvailable = isSavingAvailable
|
||||
self.isCollageTimelineOpen = isCollageTimelineOpen
|
||||
self.hasAppeared = hasAppeared
|
||||
self.isDismissing = isDismissing
|
||||
self.bottomSafeInset = bottomSafeInset
|
||||
@ -140,6 +144,9 @@ final class MediaEditorScreenComponent: Component {
|
||||
if lhs.isSavingAvailable != rhs.isSavingAvailable {
|
||||
return false
|
||||
}
|
||||
if lhs.isCollageTimelineOpen != rhs.isCollageTimelineOpen {
|
||||
return false
|
||||
}
|
||||
if lhs.hasAppeared != rhs.hasAppeared {
|
||||
return false
|
||||
}
|
||||
@ -444,7 +451,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
},
|
||||
requestLayout: { [weak self] transition in
|
||||
if let self {
|
||||
(self.environment?.controller() as? MediaEditorScreen)?.node.requestLayout(forceUpdate: true, transition: ComponentTransition(transition))
|
||||
(self.environment?.controller() as? MediaEditorScreenImpl)?.node.requestLayout(forceUpdate: true, transition: ComponentTransition(transition))
|
||||
}
|
||||
}
|
||||
)
|
||||
@ -722,7 +729,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
return availableSize
|
||||
}
|
||||
let environment = environment[ViewControllerComponentContainer.Environment.self].value
|
||||
guard let controller = environment.controller() as? MediaEditorScreen else {
|
||||
guard let controller = environment.controller() as? MediaEditorScreenImpl else {
|
||||
return availableSize
|
||||
}
|
||||
self.environment = environment
|
||||
@ -744,6 +751,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
|
||||
let isRecordingAdditionalVideo = controller.node.recording.isActive
|
||||
|
||||
let previousComponent = self.component
|
||||
self.component = component
|
||||
self.state = state
|
||||
|
||||
@ -1216,6 +1224,9 @@ final class MediaEditorScreenComponent: Component {
|
||||
if case let .video(_, _, _, additionalPath, _, _, _, _, _) = subject, additionalPath != nil {
|
||||
canRecordVideo = false
|
||||
}
|
||||
if case .videoCollage = subject {
|
||||
canRecordVideo = false
|
||||
}
|
||||
}
|
||||
|
||||
self.inputPanel.parentState = state
|
||||
@ -1231,6 +1242,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
maxLength: Int(component.context.userLimits.maxStoryCaptionLength),
|
||||
queryTypes: [.mention, .hashtag],
|
||||
alwaysDarkWhenHasText: false,
|
||||
useGrayBackground: component.isCollageTimelineOpen,
|
||||
resetInputContents: nil,
|
||||
nextInputMode: { _ in return nextInputMode },
|
||||
areVoiceMessagesAvailable: false,
|
||||
@ -1303,6 +1315,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
},
|
||||
forwardAction: nil,
|
||||
moreAction: nil,
|
||||
presentCaptionPositionTooltip: nil,
|
||||
presentVoiceMessagesUnavailableTooltip: nil,
|
||||
presentTextLengthLimitTooltip: { [weak controller] in
|
||||
guard let controller else {
|
||||
@ -1460,6 +1473,31 @@ final class MediaEditorScreenComponent: Component {
|
||||
)
|
||||
}
|
||||
|
||||
var animateRightButtonsSwitch = false
|
||||
if let previousComponent, previousComponent.isCollageTimelineOpen != component.isCollageTimelineOpen {
|
||||
animateRightButtonsSwitch = true
|
||||
}
|
||||
|
||||
var buttonTransition = transition
|
||||
if animateRightButtonsSwitch {
|
||||
buttonTransition = .immediate
|
||||
for button in [self.muteButton, self.playbackButton] {
|
||||
if let view = button.view {
|
||||
if let snapshotView = view.snapshotView(afterScreenUpdates: false) {
|
||||
snapshotView.frame = view.frame
|
||||
view.superview?.addSubview(snapshotView)
|
||||
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { _ in
|
||||
snapshotView.removeFromSuperview()
|
||||
})
|
||||
snapshotView.layer.animateScale(from: 1.0, to: 0.01, duration: 0.25, removeOnCompletion: false)
|
||||
|
||||
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
|
||||
view.layer.animateScale(from: 0.01, to: 1.0, duration: 0.25)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let saveButtonSize = self.saveButton.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(CameraButton(
|
||||
@ -1493,13 +1531,14 @@ final class MediaEditorScreenComponent: Component {
|
||||
let saveButtonAlpha = component.isSavingAvailable ? topButtonsAlpha : 0.3
|
||||
saveButtonView.isUserInteractionEnabled = component.isSavingAvailable
|
||||
|
||||
transition.setPosition(view: saveButtonView, position: saveButtonFrame.center)
|
||||
transition.setBounds(view: saveButtonView, bounds: CGRect(origin: .zero, size: saveButtonFrame.size))
|
||||
buttonTransition.setPosition(view: saveButtonView, position: saveButtonFrame.center)
|
||||
buttonTransition.setBounds(view: saveButtonView, bounds: CGRect(origin: .zero, size: saveButtonFrame.size))
|
||||
transition.setScale(view: saveButtonView, scale: displayTopButtons ? 1.0 : 0.01)
|
||||
transition.setAlpha(view: saveButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? saveButtonAlpha : 0.0)
|
||||
}
|
||||
|
||||
var topButtonOffsetX: CGFloat = 0.0
|
||||
var topButtonOffsetY: CGFloat = 0.0
|
||||
|
||||
if let subject = controller.node.subject, case .message = subject {
|
||||
let isNightTheme = mediaEditor?.values.nightTheme == true
|
||||
@ -1651,8 +1690,19 @@ final class MediaEditorScreenComponent: Component {
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 44.0, height: 44.0)
|
||||
)
|
||||
|
||||
var xOffset: CGFloat
|
||||
var yOffset: CGFloat = 0.0
|
||||
if component.isCollageTimelineOpen {
|
||||
xOffset = 0.0
|
||||
yOffset = 50.0 + topButtonOffsetY
|
||||
} else {
|
||||
xOffset = -50.0 - topButtonOffsetX
|
||||
yOffset = 0.0
|
||||
}
|
||||
|
||||
let muteButtonFrame = CGRect(
|
||||
origin: CGPoint(x: availableSize.width - 20.0 - muteButtonSize.width - 50.0 - topButtonOffsetX, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0)),
|
||||
origin: CGPoint(x: availableSize.width - 20.0 - muteButtonSize.width + xOffset, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0) + yOffset),
|
||||
size: muteButtonSize
|
||||
)
|
||||
if let muteButtonView = self.muteButton.view {
|
||||
@ -1663,13 +1713,14 @@ final class MediaEditorScreenComponent: Component {
|
||||
muteButtonView.layer.animateAlpha(from: 0.0, to: muteButtonView.alpha, duration: self.animatingButtons ? 0.1 : 0.2)
|
||||
muteButtonView.layer.animateScale(from: 0.4, to: 1.0, duration: self.animatingButtons ? 0.1 : 0.2)
|
||||
}
|
||||
transition.setPosition(view: muteButtonView, position: muteButtonFrame.center)
|
||||
transition.setBounds(view: muteButtonView, bounds: CGRect(origin: .zero, size: muteButtonFrame.size))
|
||||
buttonTransition.setPosition(view: muteButtonView, position: muteButtonFrame.center)
|
||||
buttonTransition.setBounds(view: muteButtonView, bounds: CGRect(origin: .zero, size: muteButtonFrame.size))
|
||||
transition.setScale(view: muteButtonView, scale: displayTopButtons ? 1.0 : 0.01)
|
||||
transition.setAlpha(view: muteButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? topButtonsAlpha : 0.0)
|
||||
}
|
||||
|
||||
topButtonOffsetX += 50.0
|
||||
topButtonOffsetY += 50.0
|
||||
} else {
|
||||
if let muteButtonView = self.muteButton.view, muteButtonView.superview != nil {
|
||||
muteButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak muteButtonView] _ in
|
||||
@ -1728,8 +1779,20 @@ final class MediaEditorScreenComponent: Component {
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 44.0, height: 44.0)
|
||||
)
|
||||
|
||||
|
||||
var xOffset: CGFloat
|
||||
var yOffset: CGFloat = 0.0
|
||||
if component.isCollageTimelineOpen {
|
||||
xOffset = 0.0
|
||||
yOffset = 50.0 + topButtonOffsetY
|
||||
} else {
|
||||
xOffset = -50.0 - topButtonOffsetX
|
||||
yOffset = 0.0
|
||||
}
|
||||
|
||||
let playbackButtonFrame = CGRect(
|
||||
origin: CGPoint(x: availableSize.width - 20.0 - playbackButtonSize.width - 50.0 - topButtonOffsetX, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0)),
|
||||
origin: CGPoint(x: availableSize.width - 20.0 - playbackButtonSize.width + xOffset, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0) + yOffset),
|
||||
size: playbackButtonSize
|
||||
)
|
||||
if let playbackButtonView = self.playbackButton.view {
|
||||
@ -1740,13 +1803,14 @@ final class MediaEditorScreenComponent: Component {
|
||||
playbackButtonView.layer.animateAlpha(from: 0.0, to: playbackButtonView.alpha, duration: self.animatingButtons ? 0.1 : 0.2)
|
||||
playbackButtonView.layer.animateScale(from: 0.4, to: 1.0, duration: self.animatingButtons ? 0.1 : 0.2)
|
||||
}
|
||||
transition.setPosition(view: playbackButtonView, position: playbackButtonFrame.center)
|
||||
transition.setBounds(view: playbackButtonView, bounds: CGRect(origin: .zero, size: playbackButtonFrame.size))
|
||||
buttonTransition.setPosition(view: playbackButtonView, position: playbackButtonFrame.center)
|
||||
buttonTransition.setBounds(view: playbackButtonView, bounds: CGRect(origin: .zero, size: playbackButtonFrame.size))
|
||||
transition.setScale(view: playbackButtonView, scale: displayTopButtons ? 1.0 : 0.01)
|
||||
transition.setAlpha(view: playbackButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? topButtonsAlpha : 0.0)
|
||||
}
|
||||
|
||||
topButtonOffsetX += 50.0
|
||||
topButtonOffsetY += 50.0
|
||||
} else {
|
||||
if let playbackButtonView = self.playbackButton.view, playbackButtonView.superview != nil {
|
||||
playbackButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak playbackButtonView] _ in
|
||||
@ -1820,6 +1884,17 @@ final class MediaEditorScreenComponent: Component {
|
||||
let isAudioOnly = playerState.isAudioOnly
|
||||
let hasMainVideoTrack = playerState.tracks.contains(where: { $0.id == 0 })
|
||||
|
||||
var isCollage = false
|
||||
if let mediaEditor, !mediaEditor.values.collage.isEmpty {
|
||||
var videoCount = 1
|
||||
for item in mediaEditor.values.collage {
|
||||
if item.content.isVideo {
|
||||
videoCount += 1
|
||||
}
|
||||
}
|
||||
isCollage = videoCount > 1
|
||||
}
|
||||
|
||||
let scrubber: ComponentView<Empty>
|
||||
if let current = self.scrubber {
|
||||
scrubber = current
|
||||
@ -1840,6 +1915,9 @@ final class MediaEditorScreenComponent: Component {
|
||||
maxDuration: maxDuration,
|
||||
isPlaying: playerState.isPlaying,
|
||||
tracks: visibleTracks,
|
||||
isCollage: isCollage,
|
||||
isCollageSelected: component.isCollageTimelineOpen,
|
||||
collageSamples: playerState.collageSamples,
|
||||
positionUpdated: { [weak mediaEditor] position, apply in
|
||||
if let mediaEditor {
|
||||
mediaEditor.seek(position, andPlay: apply)
|
||||
@ -1850,7 +1928,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
return
|
||||
}
|
||||
let trimRange = start..<end
|
||||
if trackId == 2 {
|
||||
if trackId == 1000 {
|
||||
mediaEditor.setAudioTrackTrimRange(trimRange, apply: apply)
|
||||
if isAudioOnly {
|
||||
let offset = (mediaEditor.values.audioTrackOffset ?? 0.0)
|
||||
@ -1867,8 +1945,8 @@ final class MediaEditorScreenComponent: Component {
|
||||
mediaEditor.stop()
|
||||
}
|
||||
}
|
||||
} else if trackId == 1 {
|
||||
mediaEditor.setAdditionalVideoTrimRange(trimRange, apply: apply)
|
||||
} else if trackId > 0 {
|
||||
mediaEditor.setAdditionalVideoTrimRange(trimRange, trackId: isCollage ? trackId : nil, apply: apply)
|
||||
if hasMainVideoTrack {
|
||||
if apply {
|
||||
mediaEditor.play()
|
||||
@ -1895,7 +1973,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
guard let mediaEditor else {
|
||||
return
|
||||
}
|
||||
if trackId == 2 {
|
||||
if trackId == 1000 {
|
||||
mediaEditor.setAudioTrackOffset(offset, apply: apply)
|
||||
if isAudioOnly {
|
||||
let offset = (mediaEditor.values.audioTrackOffset ?? 0.0)
|
||||
@ -1928,8 +2006,8 @@ final class MediaEditorScreenComponent: Component {
|
||||
mediaEditor.stop()
|
||||
}
|
||||
}
|
||||
} else if trackId == 1 {
|
||||
mediaEditor.setAdditionalVideoOffset(offset, apply: apply)
|
||||
} else if trackId > 0 {
|
||||
mediaEditor.setAdditionalVideoOffset(offset, trackId: isCollage ? trackId : nil, apply: apply)
|
||||
}
|
||||
},
|
||||
trackLongPressed: { [weak controller] trackId, sourceView in
|
||||
@ -1937,11 +2015,28 @@ final class MediaEditorScreenComponent: Component {
|
||||
return
|
||||
}
|
||||
controller.node.presentTrackOptions(trackId: trackId, sourceView: sourceView)
|
||||
},
|
||||
collageSelectionUpdated: { [weak controller] in
|
||||
guard let controller else {
|
||||
return
|
||||
}
|
||||
controller.node.openCollageTimeline()
|
||||
},
|
||||
trackSelectionUpdated: { [weak controller] trackId in
|
||||
guard let controller else {
|
||||
return
|
||||
}
|
||||
controller.node.highlightCollageItem(trackId: trackId)
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: previewSize.width - scrubberInset * 2.0, height: availableSize.height)
|
||||
)
|
||||
if component.isCollageTimelineOpen {
|
||||
component.externalState.timelineHeight = scrubberSize.height + 65.0
|
||||
} else {
|
||||
component.externalState.timelineHeight = 0.0
|
||||
}
|
||||
|
||||
let scrubberFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - scrubberSize.width) / 2.0), y: availableSize.height - environment.safeInsets.bottom - scrubberSize.height + controlsBottomInset - inputPanelSize.height + 3.0), size: scrubberSize)
|
||||
if let scrubberView = scrubber.view {
|
||||
@ -2394,7 +2489,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
let storyDimensions = CGSize(width: 1080.0, height: 1920.0)
|
||||
let storyMaxVideoDuration: Double = 60.0
|
||||
|
||||
public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate {
|
||||
public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UIDropInteractionDelegate {
|
||||
public enum Mode {
|
||||
public enum StickerEditorMode {
|
||||
case generic
|
||||
@ -2468,13 +2563,13 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
}
|
||||
|
||||
final class Node: ViewControllerTracingNode, ASGestureRecognizerDelegate, UIScrollViewDelegate {
|
||||
private weak var controller: MediaEditorScreen?
|
||||
private weak var controller: MediaEditorScreenImpl?
|
||||
private let context: AccountContext
|
||||
fileprivate var interaction: DrawingToolsInteraction?
|
||||
private let initializationTimestamp = CACurrentMediaTime()
|
||||
|
||||
var subject: MediaEditorScreen.Subject?
|
||||
var actualSubject: MediaEditorScreen.Subject?
|
||||
var subject: MediaEditorScreenImpl.Subject?
|
||||
var actualSubject: MediaEditorScreenImpl.Subject?
|
||||
|
||||
private var subjectDisposable: Disposable?
|
||||
private var appInForegroundDisposable: Disposable?
|
||||
@ -2552,7 +2647,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
|
||||
private var playbackPositionDisposable: Disposable?
|
||||
|
||||
var recording: MediaEditorScreen.Recording
|
||||
var recording: MediaEditorScreenImpl.Recording
|
||||
|
||||
private let locationManager = LocationManager()
|
||||
|
||||
@ -2561,7 +2656,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
|
||||
private let readyValue = Promise<Bool>()
|
||||
|
||||
init(controller: MediaEditorScreen) {
|
||||
init(controller: MediaEditorScreenImpl) {
|
||||
self.controller = controller
|
||||
self.context = controller.context
|
||||
|
||||
@ -2631,7 +2726,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
self.stickerMaskPreviewView.backgroundColor = UIColor(rgb: 0xffffff, alpha: 0.3)
|
||||
self.stickerMaskPreviewView.isUserInteractionEnabled = false
|
||||
|
||||
self.recording = MediaEditorScreen.Recording(controller: controller)
|
||||
self.recording = MediaEditorScreenImpl.Recording(controller: controller)
|
||||
|
||||
super.init()
|
||||
|
||||
@ -2825,7 +2920,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
self.stickerCutoutStatusDisposable?.dispose()
|
||||
}
|
||||
|
||||
private func setup(with subject: MediaEditorScreen.Subject) {
|
||||
private func setup(with subject: MediaEditorScreenImpl.Subject) {
|
||||
guard let controller = self.controller else {
|
||||
return
|
||||
}
|
||||
@ -3038,6 +3133,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if case let .videoCollage(items) = effectiveSubject {
|
||||
mediaEditor.setupCollage(items.map { $0.editorItem })
|
||||
} else if case let .message(messageIds) = effectiveSubject {
|
||||
let isNightTheme = mediaEditor.values.nightTheme
|
||||
let _ = ((self.context.engine.data.get(
|
||||
@ -3129,7 +3226,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
self.mediaEditor = mediaEditor
|
||||
self.mediaEditorPromise.set(.single(mediaEditor))
|
||||
|
||||
if controller.isEmbeddedEditor == true {
|
||||
if controller.isEmbeddedEditor {
|
||||
mediaEditor.onFirstDisplay = { [weak self] in
|
||||
if let self {
|
||||
if let transitionInView = self.transitionInView {
|
||||
@ -3475,6 +3572,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
if case .stickerEditor = controller.mode {
|
||||
hasSwipeToDismiss = false
|
||||
hasSwipeToEnhance = false
|
||||
} else if self.isCollageTimelineOpen {
|
||||
hasSwipeToEnhance = false
|
||||
}
|
||||
|
||||
let translation = gestureRecognizer.translation(in: self.view)
|
||||
@ -3549,6 +3648,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
private var previousRotateTimestamp: Double?
|
||||
|
||||
@objc func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
||||
guard !self.isCollageTimelineOpen else {
|
||||
return
|
||||
}
|
||||
if gestureRecognizer.numberOfTouches == 2, let subject = self.subject, case .message = subject, !self.entitiesView.hasSelection {
|
||||
return
|
||||
}
|
||||
@ -3561,6 +3663,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
}
|
||||
|
||||
@objc func handlePinch(_ gestureRecognizer: UIPinchGestureRecognizer) {
|
||||
guard !self.isCollageTimelineOpen else {
|
||||
return
|
||||
}
|
||||
if gestureRecognizer.numberOfTouches == 2, let subject = self.subject, case .message = subject, !self.entitiesView.hasSelection {
|
||||
return
|
||||
}
|
||||
@ -3573,6 +3678,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
}
|
||||
|
||||
@objc func handleRotate(_ gestureRecognizer: UIRotationGestureRecognizer) {
|
||||
guard !self.isCollageTimelineOpen else {
|
||||
return
|
||||
}
|
||||
if gestureRecognizer.numberOfTouches == 2, let subject = self.subject, case .message = subject, !self.entitiesView.hasSelection {
|
||||
return
|
||||
}
|
||||
@ -3587,6 +3695,13 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
guard !self.recording.isActive, let controller = self.controller else {
|
||||
return
|
||||
}
|
||||
|
||||
if self.isCollageTimelineOpen {
|
||||
self.isCollageTimelineOpen = false
|
||||
self.requestLayout(forceUpdate: true, transition: .spring(duration: 0.4))
|
||||
return
|
||||
}
|
||||
|
||||
let location = gestureRecognizer.location(in: self.view)
|
||||
var entitiesHitTestResult = self.entitiesView.hitTest(self.view.convert(location, to: self.entitiesView), with: nil)
|
||||
if entitiesHitTestResult is DrawingMediaEntityView {
|
||||
@ -4450,16 +4565,26 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
}
|
||||
|
||||
func presentTrackOptions(trackId: Int32, sourceView: UIView) {
|
||||
guard let mediaEditor = self.mediaEditor else {
|
||||
return
|
||||
}
|
||||
let isVideo = trackId != 2
|
||||
let actionTitle: String = isVideo ? self.presentationData.strings.MediaEditor_RemoveVideo : self.presentationData.strings.MediaEditor_RemoveAudio
|
||||
let isCollage = !mediaEditor.values.collage.isEmpty
|
||||
|
||||
let value: CGFloat
|
||||
if trackId == 0 {
|
||||
value = self.mediaEditor?.values.videoVolume ?? 1.0
|
||||
} else if trackId == 1 {
|
||||
value = self.mediaEditor?.values.additionalVideoVolume ?? 1.0
|
||||
} else if trackId == 2 {
|
||||
value = self.mediaEditor?.values.audioTrackVolume ?? 1.0
|
||||
if trackId == 1000 {
|
||||
value = mediaEditor.values.audioTrackVolume ?? 1.0
|
||||
} else if trackId == 0 {
|
||||
value = mediaEditor.values.videoVolume ?? 1.0
|
||||
} else if trackId > 0 {
|
||||
if !isCollage {
|
||||
value = mediaEditor.values.additionalVideoVolume ?? 1.0
|
||||
} else if let index = mediaEditor.collageItemIndexForTrackId(trackId) {
|
||||
value = mediaEditor.values.collage[index].videoVolume ?? 1.0
|
||||
} else {
|
||||
value = 1.0
|
||||
}
|
||||
} else {
|
||||
value = 1.0
|
||||
}
|
||||
@ -4468,20 +4593,21 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
items.append(
|
||||
.custom(VolumeSliderContextItem(minValue: 0.0, maxValue: 1.5, value: value, valueChanged: { [weak self] value, _ in
|
||||
if let self, let mediaEditor = self.mediaEditor {
|
||||
if trackId == 0 {
|
||||
if trackId == 1000 {
|
||||
mediaEditor.setAudioTrackVolume(value)
|
||||
} else if trackId == 0 {
|
||||
if mediaEditor.values.videoIsMuted {
|
||||
mediaEditor.setVideoIsMuted(false)
|
||||
}
|
||||
mediaEditor.setVideoVolume(value)
|
||||
} else if trackId == 1 {
|
||||
mediaEditor.setAdditionalVideoVolume(value)
|
||||
} else if trackId == 2 {
|
||||
mediaEditor.setAudioTrackVolume(value)
|
||||
} else if trackId > 0 {
|
||||
mediaEditor.setAdditionalVideoVolume(value, trackId: isCollage ? trackId : nil)
|
||||
}
|
||||
}
|
||||
}), false)
|
||||
)
|
||||
if trackId != 0 {
|
||||
|
||||
if trackId != 0 && !isCollage {
|
||||
items.append(
|
||||
.action(
|
||||
ContextMenuActionItem(
|
||||
@ -4838,7 +4964,45 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
func viewForZooming(in scrollView: UIScrollView) -> UIView? {
|
||||
return self.previewContentContainerView
|
||||
}
|
||||
|
||||
private var isCollageTimelineOpen = false
|
||||
func openCollageTimeline() {
|
||||
self.isCollageTimelineOpen = true
|
||||
self.requestLayout(forceUpdate: true, transition: .spring(duration: 0.4))
|
||||
}
|
||||
|
||||
func highlightCollageItem(trackId: Int32) {
|
||||
if let collageIndex = self.mediaEditor?.collageItemIndexForTrackId(trackId), let frame = self.mediaEditor?.values.collage[collageIndex].frame {
|
||||
let mappedFrame = CGRect(
|
||||
x: frame.minX / storyDimensions.width * self.previewContainerView.bounds.width,
|
||||
y: frame.minY / storyDimensions.height * self.previewContainerView.bounds.height,
|
||||
width: frame.width / storyDimensions.width * self.previewContainerView.bounds.width,
|
||||
height: frame.height / storyDimensions.height * self.previewContainerView.bounds.height
|
||||
)
|
||||
|
||||
var corners: CACornerMask = []
|
||||
if frame.minX <= .ulpOfOne && frame.minY <= .ulpOfOne {
|
||||
corners.insert(.layerMinXMinYCorner)
|
||||
}
|
||||
if frame.minX <= .ulpOfOne && frame.maxY >= storyDimensions.height - .ulpOfOne {
|
||||
corners.insert(.layerMinXMaxYCorner)
|
||||
}
|
||||
if frame.maxX >= storyDimensions.width - .ulpOfOne && frame.minY <= .ulpOfOne {
|
||||
corners.insert(.layerMaxXMinYCorner)
|
||||
}
|
||||
if frame.maxX >= storyDimensions.width - .ulpOfOne && frame.maxY >= storyDimensions.height - .ulpOfOne {
|
||||
corners.insert(.layerMaxXMaxYCorner)
|
||||
}
|
||||
|
||||
let highlightView = CollageHighlightView()
|
||||
highlightView.update(size: mappedFrame.size, corners: corners, completion: { [weak highlightView] in
|
||||
highlightView?.removeFromSuperview()
|
||||
})
|
||||
highlightView.frame = mappedFrame
|
||||
self.previewContainerView.addSubview(highlightView)
|
||||
}
|
||||
}
|
||||
|
||||
func requestLayout(forceUpdate: Bool, transition: ComponentTransition) {
|
||||
guard let layout = self.validLayout else {
|
||||
return
|
||||
@ -4909,6 +5073,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
isDisplayingTool: self.isDisplayingTool,
|
||||
isInteractingWithEntities: self.isInteractingWithEntities,
|
||||
isSavingAvailable: controller.isSavingAvailable,
|
||||
isCollageTimelineOpen: self.isCollageTimelineOpen,
|
||||
hasAppeared: self.hasAppeared,
|
||||
isDismissing: self.isDismissing && !self.isDismissBySwipeSuppressed,
|
||||
bottomSafeInset: layout.intrinsicInsets.bottom,
|
||||
@ -5079,7 +5244,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
let controller = DrawingScreen(
|
||||
context: self.context,
|
||||
sourceHint: .storyEditor,
|
||||
size: self.previewContainerView.frame.size,
|
||||
size: self.previewContainerView.bounds.size,
|
||||
originalSize: storyDimensions,
|
||||
isVideo: self.mediaEditor?.sourceIsVideo ?? false,
|
||||
isAvatar: false,
|
||||
@ -5379,7 +5544,19 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
if layout.size.height < 680.0, case .stickerEditor = controller.mode {
|
||||
previewFrame = previewFrame.offsetBy(dx: 0.0, dy: -44.0)
|
||||
}
|
||||
transition.setFrame(view: self.previewContainerView, frame: previewFrame)
|
||||
|
||||
var previewScale: CGFloat = 1.0
|
||||
var previewOffset: CGFloat = 0.0
|
||||
if self.componentExternalState.timelineHeight > 0.0 {
|
||||
let clippedHeight = previewFrame.size.height - self.componentExternalState.timelineHeight
|
||||
previewOffset = -self.componentExternalState.timelineHeight / 2.0
|
||||
previewScale = clippedHeight / previewFrame.size.height
|
||||
}
|
||||
|
||||
transition.setBounds(view: self.previewContainerView, bounds: CGRect(origin: .zero, size: previewFrame.size))
|
||||
transition.setPosition(view: self.previewContainerView, position: previewFrame.center.offsetBy(dx: 0.0, dy: previewOffset))
|
||||
transition.setScale(view: self.previewContainerView, scale: previewScale)
|
||||
|
||||
transition.setFrame(view: self.previewScrollView, frame: CGRect(origin: .zero, size: previewSize))
|
||||
|
||||
if self.previewScrollView.contentSize == .zero {
|
||||
@ -5469,9 +5646,51 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
}
|
||||
|
||||
public enum Subject {
|
||||
public struct VideoCollageItem {
|
||||
public enum Content {
|
||||
case image(UIImage)
|
||||
case video(String, Double)
|
||||
case asset(PHAsset)
|
||||
|
||||
var editorContent: MediaEditor.Subject.VideoCollageItem.Content {
|
||||
switch self {
|
||||
case let .image(image):
|
||||
return .image(image)
|
||||
case let .video(path, duration):
|
||||
return .video(path, duration)
|
||||
case let .asset(asset):
|
||||
return .asset(asset)
|
||||
}
|
||||
}
|
||||
|
||||
var duration: Double {
|
||||
switch self {
|
||||
case .image:
|
||||
return 0.0
|
||||
case let .video(_, duration):
|
||||
return duration
|
||||
case let .asset(asset):
|
||||
return asset.duration
|
||||
}
|
||||
}
|
||||
}
|
||||
public let content: Content
|
||||
public let frame: CGRect
|
||||
|
||||
var editorItem: MediaEditor.Subject.VideoCollageItem {
|
||||
return MediaEditor.Subject.VideoCollageItem(content: self.content.editorContent, frame: self.frame)
|
||||
}
|
||||
|
||||
public init(content: Content, frame: CGRect) {
|
||||
self.content = content
|
||||
self.frame = frame
|
||||
}
|
||||
}
|
||||
|
||||
case empty(PixelDimensions)
|
||||
case image(UIImage, PixelDimensions, UIImage?, PIPPosition)
|
||||
case video(String, UIImage?, Bool, String?, UIImage?, PixelDimensions, Double, [(Bool, Double)], PIPPosition)
|
||||
case image(image: UIImage, dimensions: PixelDimensions, additionalImage: UIImage?, additionalImagePosition: PIPPosition)
|
||||
case video(videoPath: String, thumbnail: UIImage?, mirror: Bool, additionalVideoPath: String?, additionalThumbnail: UIImage?, dimensions: PixelDimensions, duration: Double, videoPositionChanges: [(Bool, Double)], additionalVideoPosition: PIPPosition)
|
||||
case videoCollage(items: [VideoCollageItem])
|
||||
case asset(PHAsset)
|
||||
case draft(MediaEditorDraft, Int64?)
|
||||
case message([MessageId])
|
||||
@ -5487,9 +5706,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
return PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight))
|
||||
case let .draft(draft, _):
|
||||
return draft.dimensions
|
||||
case .message:
|
||||
return PixelDimensions(width: 1080, height: 1920)
|
||||
case .sticker:
|
||||
case .message, .sticker, .videoCollage:
|
||||
return PixelDimensions(width: 1080, height: 1920)
|
||||
}
|
||||
}
|
||||
@ -5505,6 +5722,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
return .image(image, dimensions)
|
||||
case let .video(videoPath, transitionImage, mirror, additionalVideoPath, _, dimensions, duration, _, _):
|
||||
return .video(videoPath, transitionImage, mirror, additionalVideoPath, dimensions, duration)
|
||||
case let .videoCollage(items):
|
||||
return .videoCollage(items.map { $0.editorItem })
|
||||
case let .asset(asset):
|
||||
return .asset(asset)
|
||||
case let .draft(draft, _):
|
||||
@ -5528,6 +5747,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
return false
|
||||
case .video:
|
||||
return true
|
||||
case .videoCollage:
|
||||
return true
|
||||
case let .asset(asset):
|
||||
return asset.mediaType == .video
|
||||
case let .draft(draft, _):
|
||||
@ -5546,6 +5767,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
case videoFile(path: String)
|
||||
case asset(localIdentifier: String)
|
||||
}
|
||||
|
||||
case image(image: UIImage, dimensions: PixelDimensions)
|
||||
case video(video: VideoResult, coverImage: UIImage?, values: MediaEditorValues, duration: Double, dimensions: PixelDimensions)
|
||||
case sticker(file: TelegramMediaFile, emoji: [String])
|
||||
@ -5607,7 +5829,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
fileprivate let transitionOut: (Bool, Bool?) -> TransitionOut?
|
||||
|
||||
public var cancelled: (Bool) -> Void = { _ in }
|
||||
public var completion: (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void = { _, _ in }
|
||||
public var completion: (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void = { _, _ in }
|
||||
public var dismissed: () -> Void = { }
|
||||
public var willDismiss: () -> Void = { }
|
||||
public var sendSticker: ((FileMediaReference, UIView, CGRect) -> Bool)?
|
||||
@ -5640,7 +5862,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
initialLink: (url: String, name: String?)? = nil,
|
||||
transitionIn: TransitionIn?,
|
||||
transitionOut: @escaping (Bool, Bool?) -> TransitionOut?,
|
||||
completion: @escaping (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
completion: @escaping (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
) {
|
||||
self.context = context
|
||||
self.mode = mode
|
||||
@ -6493,7 +6715,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
if self.isEmbeddedEditor && !(hasAnyChanges || hasEntityChanges) {
|
||||
self.saveDraft(id: randomId, edit: true)
|
||||
|
||||
self.completion(MediaEditorScreen.Result(media: nil, mediaAreas: [], caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in
|
||||
self.completion(MediaEditorScreenImpl.Result(media: nil, mediaAreas: [], caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in
|
||||
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
|
||||
self?.dismiss()
|
||||
Queue.mainQueue().justDispatch {
|
||||
@ -6593,6 +6815,50 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
avAssetGenerator.cancelAllCGImageGeneration()
|
||||
}
|
||||
}
|
||||
case let .videoCollage(items):
|
||||
var maxDurationItem: (Double, Subject.VideoCollageItem)?
|
||||
for item in items {
|
||||
switch item.content {
|
||||
case .image:
|
||||
break
|
||||
case let .video(_, duration):
|
||||
if let (maxDuration, _) = maxDurationItem {
|
||||
if duration > maxDuration {
|
||||
maxDurationItem = (duration, item)
|
||||
}
|
||||
} else {
|
||||
maxDurationItem = (duration, item)
|
||||
}
|
||||
case let .asset(asset):
|
||||
if let (maxDuration, _) = maxDurationItem {
|
||||
if asset.duration > maxDuration {
|
||||
maxDurationItem = (asset.duration, item)
|
||||
}
|
||||
} else {
|
||||
maxDurationItem = (asset.duration, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
guard let (maxDuration, mainItem) = maxDurationItem else {
|
||||
fatalError()
|
||||
}
|
||||
switch mainItem.content {
|
||||
case let .video(path, _):
|
||||
videoResult = .single(.videoFile(path: path))
|
||||
case let .asset(asset):
|
||||
videoResult = .single(.asset(localIdentifier: asset.localIdentifier))
|
||||
default:
|
||||
fatalError()
|
||||
}
|
||||
let image = generateImage(storyDimensions, opaque: false, scale: 1.0, rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: .zero, size: size))
|
||||
})!
|
||||
firstFrame = .single((image, nil))
|
||||
if let videoTrimRange = mediaEditor.values.videoTrimRange {
|
||||
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
|
||||
} else {
|
||||
duration = min(maxDuration, storyMaxVideoDuration)
|
||||
}
|
||||
case let .asset(asset):
|
||||
videoResult = .single(.asset(localIdentifier: asset.localIdentifier))
|
||||
if asset.mediaType == .video {
|
||||
@ -6766,7 +7032,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: inputImage, dimensions: storyDimensions, values: mediaEditor.values, time: firstFrameTime, textScale: 2.0, completion: { [weak self] coverImage in
|
||||
if let self {
|
||||
Logger.shared.log("MediaEditor", "Completed with video \(videoResult)")
|
||||
self.completion(MediaEditorScreen.Result(media: .video(video: videoResult, coverImage: coverImage, values: mediaEditor.values, duration: duration, dimensions: mediaEditor.values.resultDimensions), mediaAreas: mediaAreas, caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in
|
||||
self.completion(MediaEditorScreenImpl.Result(media: .video(video: videoResult, coverImage: coverImage, values: mediaEditor.values, duration: duration, dimensions: mediaEditor.values.resultDimensions), mediaAreas: mediaAreas, caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in
|
||||
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
|
||||
self?.dismiss()
|
||||
Queue.mainQueue().justDispatch {
|
||||
@ -6789,7 +7055,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: image, dimensions: storyDimensions, values: mediaEditor.values, time: .zero, textScale: 2.0, completion: { [weak self] resultImage in
|
||||
if let self, let resultImage {
|
||||
Logger.shared.log("MediaEditor", "Completed with image \(resultImage)")
|
||||
self.completion(MediaEditorScreen.Result(media: .image(image: resultImage, dimensions: PixelDimensions(resultImage.size)), mediaAreas: mediaAreas, caption: caption, coverTimestamp: nil, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in
|
||||
self.completion(MediaEditorScreenImpl.Result(media: .image(image: resultImage, dimensions: PixelDimensions(resultImage.size)), mediaAreas: mediaAreas, caption: caption, coverTimestamp: nil, options: self.state.privacy, stickers: stickers, randomId: randomId), { [weak self] finished in
|
||||
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
|
||||
self?.dismiss()
|
||||
Queue.mainQueue().justDispatch {
|
||||
@ -6933,7 +7199,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
if isVideo {
|
||||
self.uploadSticker(file, action: .send)
|
||||
} else {
|
||||
self.completion(MediaEditorScreen.Result(
|
||||
self.completion(MediaEditorScreenImpl.Result(
|
||||
media: .sticker(file: file, emoji: self.effectiveStickerEmoji()),
|
||||
mediaAreas: [],
|
||||
caption: NSAttributedString(),
|
||||
@ -7343,15 +7609,15 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
case let .complete(resource, _):
|
||||
let navigationController = self.navigationController as? NavigationController
|
||||
|
||||
let result: MediaEditorScreen.Result
|
||||
let result: MediaEditorScreenImpl.Result
|
||||
switch action {
|
||||
case .update:
|
||||
result = MediaEditorScreen.Result(media: .sticker(file: file, emoji: emojis))
|
||||
result = MediaEditorScreenImpl.Result(media: .sticker(file: file, emoji: emojis))
|
||||
case .upload, .send:
|
||||
let file = stickerFile(resource: resource, thumbnailResource: file.previewRepresentations.first?.resource, size: resource.size ?? 0, dimensions: dimensions, duration: self.preferredStickerDuration(), isVideo: isVideo)
|
||||
result = MediaEditorScreen.Result(media: .sticker(file: file, emoji: emojis))
|
||||
result = MediaEditorScreenImpl.Result(media: .sticker(file: file, emoji: emojis))
|
||||
default:
|
||||
result = MediaEditorScreen.Result()
|
||||
result = MediaEditorScreenImpl.Result()
|
||||
}
|
||||
|
||||
self.completion(result, { [weak self] finished in
|
||||
@ -7469,6 +7735,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
case let .video(path, _, _, _, _, _, _, _, _):
|
||||
let asset = AVURLAsset(url: NSURL(fileURLWithPath: path) as URL)
|
||||
exportSubject = .single(.video(asset: asset, isStory: true))
|
||||
case let .videoCollage(items):
|
||||
let _ = items
|
||||
exportSubject = .complete()
|
||||
case let .image(image, _, _, _):
|
||||
exportSubject = .single(.image(image: image))
|
||||
case let .asset(asset):
|
||||
@ -7518,7 +7787,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
exportSubject = .single(.sticker(file: file))
|
||||
}
|
||||
|
||||
let _ = exportSubject.start(next: { [weak self] exportSubject in
|
||||
let _ = (exportSubject
|
||||
|> deliverOnMainQueue).start(next: { [weak self] exportSubject in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
@ -7538,9 +7808,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
let outputPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).\(fileExtension)"
|
||||
let videoExport = MediaEditorVideoExport(postbox: self.context.account.postbox, subject: exportSubject, configuration: configuration, outputPath: outputPath, textScale: 2.0)
|
||||
self.videoExport = videoExport
|
||||
|
||||
videoExport.start()
|
||||
|
||||
|
||||
self.exportDisposable.set((videoExport.status
|
||||
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||
if let self {
|
||||
@ -8287,7 +8555,7 @@ extension MediaScrubberComponent.Track {
|
||||
case let .video(frames, framesUpdateTimestamp):
|
||||
content = .video(frames: frames, framesUpdateTimestamp: framesUpdateTimestamp)
|
||||
case let .audio(artist, title, samples, peak):
|
||||
content = .audio(artist: artist, title: title, samples: samples, peak: peak)
|
||||
content = .audio(artist: artist, title: title, samples: samples, peak: peak, isTimeline: false)
|
||||
}
|
||||
self.init(
|
||||
id: track.id,
|
||||
|
@ -267,6 +267,7 @@ final class StoryPreviewComponent: Component {
|
||||
stopAndPreviewMediaRecording: nil,
|
||||
discardMediaRecordingPreview: nil,
|
||||
attachmentAction: { },
|
||||
attachmentButtonMode: .attach,
|
||||
myReaction: nil,
|
||||
likeAction: nil,
|
||||
likeOptionsAction: nil,
|
||||
@ -274,6 +275,7 @@ final class StoryPreviewComponent: Component {
|
||||
timeoutAction: nil,
|
||||
forwardAction: {},
|
||||
moreAction: { _, _ in },
|
||||
presentCaptionPositionTooltip: nil,
|
||||
presentVoiceMessagesUnavailableTooltip: nil,
|
||||
presentTextLengthLimitTooltip: nil,
|
||||
presentTextFormattingTooltip: nil,
|
||||
|
@ -18,6 +18,7 @@ swift_library(
|
||||
"//submodules/Components/MultilineTextComponent",
|
||||
"//submodules/TelegramUI/Components/MediaEditor",
|
||||
"//submodules/TelegramUI/Components/AudioWaveformComponent",
|
||||
"//submodules/UIKitRuntimeUtils",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -10,11 +10,13 @@ import AccountContext
|
||||
import AudioWaveformComponent
|
||||
import MultilineTextComponent
|
||||
import MediaEditor
|
||||
import UIKitRuntimeUtils
|
||||
|
||||
private let handleWidth: CGFloat = 14.0
|
||||
private let trackHeight: CGFloat = 39.0
|
||||
private let collapsedTrackHeight: CGFloat = 26.0
|
||||
private let trackSpacing: CGFloat = 4.0
|
||||
private let collageTrackSpacing: CGFloat = 8.0
|
||||
private let borderHeight: CGFloat = 1.0 + UIScreenPixel
|
||||
|
||||
public final class MediaScrubberComponent: Component {
|
||||
@ -23,7 +25,7 @@ public final class MediaScrubberComponent: Component {
|
||||
public struct Track: Equatable {
|
||||
public enum Content: Equatable {
|
||||
case video(frames: [UIImage], framesUpdateTimestamp: Double)
|
||||
case audio(artist: String?, title: String?, samples: Data?, peak: Int32)
|
||||
case audio(artist: String?, title: String?, samples: Data?, peak: Int32, isTimeline: Bool)
|
||||
|
||||
public static func ==(lhs: Content, rhs: Content) -> Bool {
|
||||
switch lhs {
|
||||
@ -33,9 +35,9 @@ public final class MediaScrubberComponent: Component {
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
case let .audio(lhsArtist, lhsTitle, lhsSamples, lhsPeak):
|
||||
if case let .audio(rhsArtist, rhsTitle, rhsSamples, rhsPeak) = rhs {
|
||||
return lhsArtist == rhsArtist && lhsTitle == rhsTitle && lhsSamples == rhsSamples && lhsPeak == rhsPeak
|
||||
case let .audio(lhsArtist, lhsTitle, lhsSamples, lhsPeak, lhsIsTimeline):
|
||||
if case let .audio(rhsArtist, rhsTitle, rhsSamples, rhsPeak, rhsIsTimeline) = rhs {
|
||||
return lhsArtist == rhsArtist && lhsTitle == rhsTitle && lhsSamples == rhsSamples && lhsPeak == rhsPeak && lhsIsTimeline == rhsIsTimeline
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
@ -85,6 +87,10 @@ public final class MediaScrubberComponent: Component {
|
||||
let isPlaying: Bool
|
||||
|
||||
let tracks: [Track]
|
||||
let isCollage: Bool
|
||||
let isCollageSelected: Bool
|
||||
let collageSamples: (samples: Data, peak: Int32)?
|
||||
|
||||
let portalView: PortalView?
|
||||
|
||||
let positionUpdated: (Double, Bool) -> Void
|
||||
@ -92,6 +98,8 @@ public final class MediaScrubberComponent: Component {
|
||||
let trackTrimUpdated: (Int32, Double, Double, Bool, Bool) -> Void
|
||||
let trackOffsetUpdated: (Int32, Double, Bool) -> Void
|
||||
let trackLongPressed: (Int32, UIView) -> Void
|
||||
let collageSelectionUpdated: () -> Void
|
||||
let trackSelectionUpdated: (Int32) -> Void
|
||||
|
||||
public init(
|
||||
context: AccountContext,
|
||||
@ -103,12 +111,17 @@ public final class MediaScrubberComponent: Component {
|
||||
maxDuration: Double,
|
||||
isPlaying: Bool,
|
||||
tracks: [Track],
|
||||
isCollage: Bool,
|
||||
isCollageSelected: Bool = false,
|
||||
collageSamples: (samples: Data, peak: Int32)? = nil,
|
||||
portalView: PortalView? = nil,
|
||||
positionUpdated: @escaping (Double, Bool) -> Void,
|
||||
coverPositionUpdated: @escaping (Double, Bool, @escaping () -> Void) -> Void = { _, _, _ in },
|
||||
trackTrimUpdated: @escaping (Int32, Double, Double, Bool, Bool) -> Void,
|
||||
trackOffsetUpdated: @escaping (Int32, Double, Bool) -> Void,
|
||||
trackLongPressed: @escaping (Int32, UIView) -> Void
|
||||
trackLongPressed: @escaping (Int32, UIView) -> Void,
|
||||
collageSelectionUpdated: @escaping () -> Void = {},
|
||||
trackSelectionUpdated: @escaping (Int32) -> Void = { _ in }
|
||||
) {
|
||||
self.context = context
|
||||
self.style = style
|
||||
@ -119,12 +132,17 @@ public final class MediaScrubberComponent: Component {
|
||||
self.maxDuration = maxDuration
|
||||
self.isPlaying = isPlaying
|
||||
self.tracks = tracks
|
||||
self.isCollage = isCollage
|
||||
self.isCollageSelected = isCollageSelected
|
||||
self.collageSamples = collageSamples
|
||||
self.portalView = portalView
|
||||
self.positionUpdated = positionUpdated
|
||||
self.coverPositionUpdated = coverPositionUpdated
|
||||
self.trackTrimUpdated = trackTrimUpdated
|
||||
self.trackOffsetUpdated = trackOffsetUpdated
|
||||
self.trackLongPressed = trackLongPressed
|
||||
self.collageSelectionUpdated = collageSelectionUpdated
|
||||
self.trackSelectionUpdated = trackSelectionUpdated
|
||||
}
|
||||
|
||||
public static func ==(lhs: MediaScrubberComponent, rhs: MediaScrubberComponent) -> Bool {
|
||||
@ -152,15 +170,27 @@ public final class MediaScrubberComponent: Component {
|
||||
if lhs.tracks != rhs.tracks {
|
||||
return false
|
||||
}
|
||||
if lhs.isCollage != rhs.isCollage {
|
||||
return false
|
||||
}
|
||||
if lhs.isCollageSelected != rhs.isCollageSelected {
|
||||
return false
|
||||
}
|
||||
if lhs.collageSamples?.samples != rhs.collageSamples?.samples || lhs.collageSamples?.peak != rhs.collageSamples?.peak {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
public final class View: UIView, UIGestureRecognizerDelegate {
|
||||
private let trackContainerView: UIView
|
||||
private var collageTrackView: TrackView?
|
||||
private var trackViews: [Int32: TrackView] = [:]
|
||||
private let trimView: TrimView
|
||||
private let ghostTrimView: TrimView
|
||||
private let cursorContentView: UIView
|
||||
private let cursorView: HandleView
|
||||
private let cursorImageView: UIImageView
|
||||
|
||||
private var cursorDisplayLink: SharedDisplayLinkDriver.Link?
|
||||
private var cursorPositionAnimation: (start: Double, from: Double, to: Double, ended: Bool)?
|
||||
@ -175,11 +205,13 @@ public final class MediaScrubberComponent: Component {
|
||||
private weak var state: EmptyComponentState?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.trackContainerView = UIView()
|
||||
self.trimView = TrimView(frame: .zero)
|
||||
self.ghostTrimView = TrimView(frame: .zero)
|
||||
self.ghostTrimView.isHollow = true
|
||||
self.cursorContentView = UIView()
|
||||
self.cursorView = HandleView()
|
||||
self.cursorImageView = UIImageView()
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
@ -201,14 +233,18 @@ public final class MediaScrubberComponent: Component {
|
||||
context.addPath(path.cgPath)
|
||||
context.fillPath()
|
||||
})?.stretchableImage(withLeftCapWidth: Int(handleWidth / 2.0), topCapHeight: 25)
|
||||
self.cursorView.image = positionImage
|
||||
self.cursorView.image = nil
|
||||
self.cursorView.isUserInteractionEnabled = true
|
||||
self.cursorView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0)
|
||||
|
||||
self.addSubview(self.ghostTrimView)
|
||||
self.addSubview(self.trimView)
|
||||
|
||||
self.cursorImageView.image = positionImage
|
||||
|
||||
self.addSubview(self.trackContainerView)
|
||||
self.trackContainerView.addSubview(self.ghostTrimView)
|
||||
self.trackContainerView.addSubview(self.trimView)
|
||||
self.addSubview(self.cursorContentView)
|
||||
self.addSubview(self.cursorView)
|
||||
self.cursorView.addSubview(self.cursorImageView)
|
||||
|
||||
self.cursorView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleCursorPan(_:))))
|
||||
|
||||
@ -303,6 +339,11 @@ public final class MediaScrubberComponent: Component {
|
||||
guard let component = self.component, case .began = gestureRecognizer.state else {
|
||||
return
|
||||
}
|
||||
|
||||
guard !component.isCollage || component.isCollageSelected else {
|
||||
return
|
||||
}
|
||||
|
||||
let point = gestureRecognizer.location(in: self)
|
||||
for (id, trackView) in self.trackViews {
|
||||
if trackView.frame.contains(point) {
|
||||
@ -385,8 +426,18 @@ public final class MediaScrubberComponent: Component {
|
||||
}
|
||||
|
||||
private var effectiveCursorHeight: CGFloat {
|
||||
let additionalTracksCount = max(0, (self.component?.tracks.count ?? 1) - 1)
|
||||
return 50.0 + CGFloat(additionalTracksCount) * 30.0
|
||||
var height: CGFloat = 50.0
|
||||
if let component = self.component {
|
||||
if !component.isCollage || component.isCollageSelected {
|
||||
let trackHeight = component.isCollage ? 34.0 : 30.0
|
||||
let additionalTracksCount = max(0, (component.tracks.count) - 1)
|
||||
height += CGFloat(additionalTracksCount) * trackHeight
|
||||
}
|
||||
if component.isCollage && !component.isCollageSelected {
|
||||
height = 37.0
|
||||
}
|
||||
}
|
||||
return height
|
||||
}
|
||||
|
||||
private func updateCursorPosition() {
|
||||
@ -421,6 +472,7 @@ public final class MediaScrubberComponent: Component {
|
||||
|
||||
public func update(component: MediaScrubberComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
let isFirstTime = self.component == nil
|
||||
let previousComponent = self.component
|
||||
self.component = component
|
||||
self.state = state
|
||||
|
||||
@ -452,7 +504,7 @@ public final class MediaScrubberComponent: Component {
|
||||
context.addPath(path.cgPath)
|
||||
context.strokePath()
|
||||
})
|
||||
self.cursorView.image = positionImage
|
||||
self.cursorImageView.image = positionImage
|
||||
}
|
||||
}
|
||||
|
||||
@ -465,6 +517,8 @@ public final class MediaScrubberComponent: Component {
|
||||
|
||||
var lowestVideoId: Int32?
|
||||
|
||||
let effectiveTrackSpacing = component.isCollage ? collageTrackSpacing : trackSpacing
|
||||
|
||||
var validIds = Set<Int32>()
|
||||
for track in component.tracks {
|
||||
let id = track.id
|
||||
@ -506,6 +560,7 @@ public final class MediaScrubberComponent: Component {
|
||||
return
|
||||
}
|
||||
self.selectedTrackId = id
|
||||
self.component?.trackSelectionUpdated(id)
|
||||
self.state?.updated(transition: .easeInOut(duration: 0.2))
|
||||
}
|
||||
trackView.offsetUpdated = { [weak self] offset, apply in
|
||||
@ -522,28 +577,80 @@ public final class MediaScrubberComponent: Component {
|
||||
}
|
||||
self.trackViews[id] = trackView
|
||||
|
||||
self.insertSubview(trackView, at: 0)
|
||||
self.trackContainerView.insertSubview(trackView, at: 0)
|
||||
|
||||
if !isFirstTime {
|
||||
animateTrackIn = true
|
||||
}
|
||||
}
|
||||
|
||||
var isSelected = id == self.selectedTrackId
|
||||
if component.isCollage && !component.isCollageSelected {
|
||||
isSelected = false
|
||||
}
|
||||
|
||||
let trackSize = trackView.update(
|
||||
context: component.context,
|
||||
style: component.style,
|
||||
track: track,
|
||||
isSelected: id == self.selectedTrackId,
|
||||
isSelected: isSelected,
|
||||
availableSize: availableSize,
|
||||
duration: self.duration,
|
||||
transition: trackTransition
|
||||
)
|
||||
trackLayout[id] = (CGRect(origin: CGPoint(x: 0.0, y: totalHeight), size: trackSize), trackTransition, animateTrackIn)
|
||||
|
||||
totalHeight += trackSize.height
|
||||
totalHeight += trackSpacing
|
||||
if component.isCollage && !component.isCollageSelected {
|
||||
|
||||
} else {
|
||||
totalHeight += trackSize.height
|
||||
totalHeight += effectiveTrackSpacing
|
||||
}
|
||||
}
|
||||
totalHeight -= effectiveTrackSpacing
|
||||
|
||||
if component.isCollage {
|
||||
if !component.isCollageSelected {
|
||||
totalHeight = collapsedTrackHeight
|
||||
}
|
||||
|
||||
var trackTransition = transition
|
||||
|
||||
let trackView: TrackView
|
||||
if let current = self.collageTrackView {
|
||||
trackView = current
|
||||
} else {
|
||||
trackTransition = .immediate
|
||||
trackView = TrackView()
|
||||
trackView.onSelection = { [weak self] _ in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.component?.collageSelectionUpdated()
|
||||
}
|
||||
self.insertSubview(trackView, belowSubview: self.cursorView)
|
||||
self.collageTrackView = trackView
|
||||
}
|
||||
|
||||
let trackSize = trackView.update(
|
||||
context: component.context,
|
||||
style: component.style,
|
||||
track: MediaScrubberComponent.Track(
|
||||
id: 1024,
|
||||
content: .audio(artist: nil, title: "Timeline", samples: component.collageSamples?.samples, peak: component.collageSamples?.peak ?? 0, isTimeline: true),
|
||||
duration: component.maxDuration,
|
||||
trimRange: nil,
|
||||
offset: nil,
|
||||
isMain: false
|
||||
),
|
||||
isSelected: false,
|
||||
availableSize: availableSize,
|
||||
duration: self.duration,
|
||||
transition: trackTransition
|
||||
)
|
||||
trackTransition.setFrame(view: trackView, frame: CGRect(origin: .zero, size: trackSize))
|
||||
trackTransition.setAlpha(view: trackView, alpha: component.isCollageSelected ? 0.0 : 1.0)
|
||||
}
|
||||
totalHeight -= trackSpacing
|
||||
|
||||
for track in component.tracks {
|
||||
guard let trackView = self.trackViews[track.id], let (trackFrame, trackTransition, animateTrackIn) = trackLayout[track.id] else {
|
||||
@ -552,7 +659,7 @@ public final class MediaScrubberComponent: Component {
|
||||
trackTransition.setFrame(view: trackView, frame: CGRect(origin: CGPoint(x: 0.0, y: totalHeight - trackFrame.maxY), size: trackFrame.size))
|
||||
if animateTrackIn {
|
||||
trackView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
trackView.layer.animatePosition(from: CGPoint(x: 0.0, y: trackFrame.height + trackSpacing), to: .zero, duration: 0.35, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
trackView.layer.animatePosition(from: CGPoint(x: 0.0, y: trackFrame.height + effectiveTrackSpacing), to: .zero, duration: 0.35, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
}
|
||||
}
|
||||
|
||||
@ -592,7 +699,7 @@ public final class MediaScrubberComponent: Component {
|
||||
trimViewVisualInsets.left = delta
|
||||
}
|
||||
|
||||
if lowestVideoId == 0 && track.id == 1 {
|
||||
if (lowestVideoId == 0 && track.id == 1) || component.isCollage {
|
||||
trimViewVisualInsets = .zero
|
||||
trackViewWidth = trackView.containerView.frame.width
|
||||
mainTrimDuration = track.duration
|
||||
@ -653,11 +760,19 @@ public final class MediaScrubberComponent: Component {
|
||||
selectedTrackFrame = mainTrackFrame
|
||||
}
|
||||
|
||||
let trimViewFrame = CGRect(origin: CGPoint(x: trimViewOffset, y: selectedTrackFrame.minY), size: scrubberSize)
|
||||
var trimViewFrame = CGRect(origin: CGPoint(x: trimViewOffset, y: selectedTrackFrame.minY), size: scrubberSize)
|
||||
|
||||
var trimVisible = true
|
||||
if component.isCollage && !component.isCollageSelected {
|
||||
trimVisible = false
|
||||
trimViewFrame = trimViewFrame.offsetBy(dx: 0.0, dy: collapsedTrackHeight - trackHeight)
|
||||
}
|
||||
|
||||
transition.setFrame(view: self.trimView, frame: trimViewFrame)
|
||||
transition.setAlpha(view: self.trimView, alpha: trimVisible ? 1.0 : 0.0)
|
||||
|
||||
var ghostTrimVisible = false
|
||||
if let lowestVideoId, self.selectedTrackId != lowestVideoId {
|
||||
if let lowestVideoId, !component.isCollage && self.selectedTrackId != lowestVideoId {
|
||||
ghostTrimVisible = true
|
||||
}
|
||||
|
||||
@ -709,7 +824,40 @@ public final class MediaScrubberComponent: Component {
|
||||
self.updateCursorPosition()
|
||||
}
|
||||
|
||||
return CGSize(width: availableSize.width, height: totalHeight)
|
||||
transition.setFrame(view: self.cursorImageView, frame: CGRect(origin: .zero, size: self.cursorView.frame.size))
|
||||
|
||||
if component.isCollage {
|
||||
transition.setAlpha(view: self.trackContainerView, alpha: component.isCollageSelected ? 1.0 : 0.0)
|
||||
}
|
||||
|
||||
if let previousComponent, component.isCollage, previousComponent.isCollageSelected != component.isCollageSelected {
|
||||
if let blurFilter = makeBlurFilter() {
|
||||
if component.isCollageSelected {
|
||||
blurFilter.setValue(0.0 as NSNumber, forKey: "inputRadius")
|
||||
self.trackContainerView.layer.filters = [blurFilter]
|
||||
self.trackContainerView.layer.animate(from: 20.0 as NSNumber, to: 0.0 as NSNumber, keyPath: "filters.gaussianBlur.inputRadius", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: 0.3, completion: { [weak self] completed in
|
||||
guard let self, completed else {
|
||||
return
|
||||
}
|
||||
self.trackContainerView.layer.filters = []
|
||||
})
|
||||
} else {
|
||||
blurFilter.setValue(0.0 as NSNumber, forKey: "inputRadius")
|
||||
self.trackContainerView.layer.filters = [blurFilter]
|
||||
self.trackContainerView.layer.animate(from: 0.0 as NSNumber, to: 20.0 as NSNumber, keyPath: "filters.gaussianBlur.inputRadius", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: 0.4, completion: { [weak self] completed in
|
||||
guard let self, completed else {
|
||||
return
|
||||
}
|
||||
self.trackContainerView.layer.filters = []
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let size = CGSize(width: availableSize.width, height: totalHeight)
|
||||
transition.setFrame(view: self.trackContainerView, frame: CGRect(origin: .zero, size: size))
|
||||
|
||||
return size
|
||||
}
|
||||
|
||||
public override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
|
||||
@ -1124,7 +1272,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
||||
}
|
||||
frameOffset += frameSize.width
|
||||
}
|
||||
case let .audio(artist, title, samples, peak):
|
||||
case let .audio(artist, title, samples, peak, isTimeline):
|
||||
var components: [String] = []
|
||||
var trackTitle = ""
|
||||
if let artist {
|
||||
@ -1161,7 +1309,15 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
||||
}
|
||||
|
||||
let spacing: CGFloat = 4.0
|
||||
let iconSize = CGSize(width: 14.0, height: 14.0)
|
||||
var iconSize = CGSize(width: 14.0, height: 14.0)
|
||||
var trackTitleAlpha: CGFloat = 1.0
|
||||
if isTimeline {
|
||||
if previousParams == nil {
|
||||
self.audioIconView.image = UIImage(bundleImageName: "Media Editor/Timeline")
|
||||
}
|
||||
iconSize = CGSize(width: 24.0, height: 24.0)
|
||||
trackTitleAlpha = 0.7
|
||||
}
|
||||
let contentTotalWidth = iconSize.width + audioTitleSize.width + spacing
|
||||
|
||||
let audioContentTransition = transition
|
||||
@ -1181,16 +1337,16 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
||||
self.audioContentContainerView.addSubview(self.audioIconView)
|
||||
self.audioContentContainerView.addSubview(view)
|
||||
}
|
||||
transition.setAlpha(view: view, alpha: trackTitleIsVisible ? 1.0 : 0.0)
|
||||
transition.setAlpha(view: view, alpha: trackTitleIsVisible ? trackTitleAlpha : 0.0)
|
||||
|
||||
let audioTitleFrame = CGRect(origin: CGPoint(x: audioIconFrame.maxX + spacing, y: floorToScreenPixels((scrubberSize.height - audioTitleSize.height) / 2.0)), size: audioTitleSize)
|
||||
view.bounds = CGRect(origin: .zero, size: audioTitleFrame.size)
|
||||
audioContentTransition.setPosition(view: view, position: audioTitleFrame.center)
|
||||
}
|
||||
transition.setAlpha(view: self.audioIconView, alpha: trackTitleIsVisible ? 1.0 : 0.0)
|
||||
transition.setAlpha(view: self.audioIconView, alpha: trackTitleIsVisible ? trackTitleAlpha : 0.0)
|
||||
|
||||
var previousSamples: Data?
|
||||
if let previousParams, case let .audio(_ , _, previousSamplesValue, _) = previousParams.track.content {
|
||||
if let previousParams, case let .audio(_ , _, previousSamplesValue, _, _) = previousParams.track.content {
|
||||
previousSamples = previousSamplesValue
|
||||
}
|
||||
|
||||
|
@ -3,6 +3,7 @@ import UIKit
|
||||
import Display
|
||||
import ComponentFlow
|
||||
import AppBundle
|
||||
import TelegramCore
|
||||
import ChatTextInputMediaRecordingButton
|
||||
import AccountContext
|
||||
import TelegramPresentationData
|
||||
@ -10,7 +11,7 @@ import ChatPresentationInterfaceState
|
||||
import MoreHeaderButton
|
||||
import ContextUI
|
||||
import ReactionButtonListComponent
|
||||
import TelegramCore
|
||||
import LottieComponent
|
||||
|
||||
private class ButtonIcon: Equatable {
|
||||
enum IconType: Equatable {
|
||||
@ -131,6 +132,8 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
case more
|
||||
case like(reaction: MessageReaction.Reaction?, file: TelegramMediaFile?, animationFileId: Int64?)
|
||||
case repost
|
||||
case captionUp
|
||||
case captionDown
|
||||
}
|
||||
|
||||
public enum Action {
|
||||
@ -228,6 +231,7 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
private let sendIconView: UIImageView
|
||||
private var reactionHeartView: UIImageView?
|
||||
private var moreButton: MoreHeaderButton?
|
||||
private var animation: ComponentView<Empty>?
|
||||
private var reactionIconView: ReactionIconView?
|
||||
|
||||
private var component: MessageInputActionButtonComponent?
|
||||
@ -423,12 +427,51 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
self.addSubnode(moreButton)
|
||||
}
|
||||
|
||||
switch component.mode {
|
||||
case .captionUp, .captionDown:
|
||||
var startingPosition: LottieComponent.StartingPosition = .begin
|
||||
let animation: ComponentView<Empty>
|
||||
if let current = self.animation {
|
||||
animation = current
|
||||
} else {
|
||||
animation = ComponentView<Empty>()
|
||||
self.animation = animation
|
||||
startingPosition = .end
|
||||
}
|
||||
|
||||
let playOnce = ActionSlot<Void>()
|
||||
let animationName = component.mode == .captionUp ? "message_preview_sort_above" : "message_preview_sort_below"
|
||||
let _ = animation.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(LottieComponent(
|
||||
content: LottieComponent.AppBundleContent(name: animationName),
|
||||
color: .white,
|
||||
startingPosition: startingPosition,
|
||||
playOnce: playOnce
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 30.0, height: 30.0)
|
||||
)
|
||||
if let view = animation.view {
|
||||
if view.superview == nil {
|
||||
self.referenceNode.view.addSubview(view)
|
||||
}
|
||||
}
|
||||
if let previousComponent, previousComponent.mode != component.mode {
|
||||
playOnce.invoke(Void())
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
var sendAlpha: CGFloat = 0.0
|
||||
var microphoneAlpha: CGFloat = 0.0
|
||||
var moreAlpha: CGFloat = 0.0
|
||||
switch component.mode {
|
||||
case .none:
|
||||
break
|
||||
case .captionUp, .captionDown:
|
||||
sendAlpha = 0.0
|
||||
case .send, .apply, .attach, .delete, .forward, .removeVideoInput, .repost:
|
||||
sendAlpha = 1.0
|
||||
case let .like(reaction, _, _):
|
||||
@ -603,6 +646,13 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
transition.setScale(view: moreButton.view, scale: moreAlpha == 0.0 ? 0.01 : 1.0)
|
||||
}
|
||||
|
||||
if let view = self.animation?.view {
|
||||
let buttonSize = CGSize(width: 30.0, height: 30.0)
|
||||
let iconFrame = CGRect(origin: CGPoint(x: 2.0 + floorToScreenPixels((availableSize.width - buttonSize.width) * 0.5), y: floorToScreenPixels((availableSize.height - buttonSize.height) * 0.5)), size: buttonSize)
|
||||
transition.setPosition(view: view, position: iconFrame.center)
|
||||
transition.setBounds(view: view, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
|
||||
}
|
||||
|
||||
if let micButton = self.micButton {
|
||||
micButton.hasShadow = component.hasShadow
|
||||
micButton.hidesOnLock = component.hasShadow
|
||||
@ -621,7 +671,7 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
|
||||
if previousComponent?.mode != component.mode {
|
||||
switch component.mode {
|
||||
case .none, .send, .apply, .voiceInput, .attach, .delete, .forward, .unavailableVoiceInput, .more, .like, .repost:
|
||||
case .none, .send, .apply, .voiceInput, .attach, .delete, .forward, .unavailableVoiceInput, .more, .like, .repost, .captionUp, .captionDown:
|
||||
micButton.updateMode(mode: .audio, animated: !transition.animation.isImmediate)
|
||||
case .videoInput, .removeVideoInput:
|
||||
micButton.updateMode(mode: .video, animated: !transition.animation.isImmediate)
|
||||
|
@ -56,6 +56,12 @@ public final class MessageInputPanelComponent: Component {
|
||||
case emoji
|
||||
}
|
||||
|
||||
public enum AttachmentButtonMode: Hashable {
|
||||
case attach
|
||||
case captionUp
|
||||
case captionDown
|
||||
}
|
||||
|
||||
public struct MyReaction: Equatable {
|
||||
public let reaction: MessageReaction.Reaction
|
||||
public let file: TelegramMediaFile?
|
||||
@ -157,6 +163,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
public let maxLength: Int?
|
||||
public let queryTypes: ContextQueryTypes
|
||||
public let alwaysDarkWhenHasText: Bool
|
||||
public let useGrayBackground: Bool
|
||||
public let resetInputContents: SendMessageInput?
|
||||
public let nextInputMode: (Bool) -> InputMode?
|
||||
public let areVoiceMessagesAvailable: Bool
|
||||
@ -170,6 +177,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
public let stopAndPreviewMediaRecording: (() -> Void)?
|
||||
public let discardMediaRecordingPreview: (() -> Void)?
|
||||
public let attachmentAction: (() -> Void)?
|
||||
public let attachmentButtonMode: AttachmentButtonMode?
|
||||
public let myReaction: MyReaction?
|
||||
public let likeAction: (() -> Void)?
|
||||
public let likeOptionsAction: ((UIView, ContextGesture?) -> Void)?
|
||||
@ -177,6 +185,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
public let timeoutAction: ((UIView, ContextGesture?) -> Void)?
|
||||
public let forwardAction: (() -> Void)?
|
||||
public let moreAction: ((UIView, ContextGesture?) -> Void)?
|
||||
public let presentCaptionPositionTooltip: ((UIView) -> Void)?
|
||||
public let presentVoiceMessagesUnavailableTooltip: ((UIView) -> Void)?
|
||||
public let presentTextLengthLimitTooltip: (() -> Void)?
|
||||
public let presentTextFormattingTooltip: (() -> Void)?
|
||||
@ -212,6 +221,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
maxLength: Int?,
|
||||
queryTypes: ContextQueryTypes,
|
||||
alwaysDarkWhenHasText: Bool,
|
||||
useGrayBackground: Bool = false,
|
||||
resetInputContents: SendMessageInput?,
|
||||
nextInputMode: @escaping (Bool) -> InputMode?,
|
||||
areVoiceMessagesAvailable: Bool,
|
||||
@ -225,6 +235,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
stopAndPreviewMediaRecording: (() -> Void)?,
|
||||
discardMediaRecordingPreview: (() -> Void)?,
|
||||
attachmentAction: (() -> Void)?,
|
||||
attachmentButtonMode: AttachmentButtonMode? = nil,
|
||||
myReaction: MyReaction?,
|
||||
likeAction: (() -> Void)?,
|
||||
likeOptionsAction: ((UIView, ContextGesture?) -> Void)?,
|
||||
@ -232,6 +243,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
timeoutAction: ((UIView, ContextGesture?) -> Void)?,
|
||||
forwardAction: (() -> Void)?,
|
||||
moreAction: ((UIView, ContextGesture?) -> Void)?,
|
||||
presentCaptionPositionTooltip: ((UIView) -> Void)?,
|
||||
presentVoiceMessagesUnavailableTooltip: ((UIView) -> Void)?,
|
||||
presentTextLengthLimitTooltip: (() -> Void)?,
|
||||
presentTextFormattingTooltip: (() -> Void)?,
|
||||
@ -267,6 +279,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
self.maxLength = maxLength
|
||||
self.queryTypes = queryTypes
|
||||
self.alwaysDarkWhenHasText = alwaysDarkWhenHasText
|
||||
self.useGrayBackground = useGrayBackground
|
||||
self.resetInputContents = resetInputContents
|
||||
self.areVoiceMessagesAvailable = areVoiceMessagesAvailable
|
||||
self.presentController = presentController
|
||||
@ -279,6 +292,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
self.stopAndPreviewMediaRecording = stopAndPreviewMediaRecording
|
||||
self.discardMediaRecordingPreview = discardMediaRecordingPreview
|
||||
self.attachmentAction = attachmentAction
|
||||
self.attachmentButtonMode = attachmentButtonMode
|
||||
self.myReaction = myReaction
|
||||
self.likeAction = likeAction
|
||||
self.likeOptionsAction = likeOptionsAction
|
||||
@ -286,6 +300,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
self.timeoutAction = timeoutAction
|
||||
self.forwardAction = forwardAction
|
||||
self.moreAction = moreAction
|
||||
self.presentCaptionPositionTooltip = presentCaptionPositionTooltip
|
||||
self.presentVoiceMessagesUnavailableTooltip = presentVoiceMessagesUnavailableTooltip
|
||||
self.presentTextLengthLimitTooltip = presentTextLengthLimitTooltip
|
||||
self.presentTextFormattingTooltip = presentTextFormattingTooltip
|
||||
@ -340,6 +355,9 @@ public final class MessageInputPanelComponent: Component {
|
||||
if lhs.alwaysDarkWhenHasText != rhs.alwaysDarkWhenHasText {
|
||||
return false
|
||||
}
|
||||
if lhs.useGrayBackground != rhs.useGrayBackground {
|
||||
return false
|
||||
}
|
||||
if lhs.resetInputContents != rhs.resetInputContents {
|
||||
return false
|
||||
}
|
||||
@ -409,6 +427,9 @@ public final class MessageInputPanelComponent: Component {
|
||||
if (lhs.attachmentAction == nil) != (rhs.attachmentAction == nil) {
|
||||
return false
|
||||
}
|
||||
if lhs.attachmentButtonMode != rhs.attachmentButtonMode {
|
||||
return false
|
||||
}
|
||||
if lhs.myReaction != rhs.myReaction {
|
||||
return false
|
||||
}
|
||||
@ -456,7 +477,6 @@ public final class MessageInputPanelComponent: Component {
|
||||
private let inputActionButton = ComponentView<Empty>()
|
||||
private let likeButton = ComponentView<Empty>()
|
||||
private let stickerButton = ComponentView<Empty>()
|
||||
private let reactionButton = ComponentView<Empty>()
|
||||
private let timeoutButton = ComponentView<Empty>()
|
||||
|
||||
private var mediaRecordingVibrancyContainer: UIView
|
||||
@ -484,6 +504,8 @@ public final class MessageInputPanelComponent: Component {
|
||||
private var viewStatsCountText: AnimatedCountLabelView?
|
||||
private var reactionStatsCountText: AnimatedCountLabelView?
|
||||
|
||||
private var didDisplayCaptionPositionTooltip = false
|
||||
|
||||
private let hapticFeedback = HapticFeedback()
|
||||
|
||||
private var component: MessageInputPanelComponent?
|
||||
@ -717,7 +739,10 @@ public final class MessageInputPanelComponent: Component {
|
||||
func update(component: MessageInputPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||
let previousPlaceholder = self.component?.placeholder
|
||||
|
||||
var insets = UIEdgeInsets(top: 14.0, left: 9.0, bottom: 6.0, right: 41.0)
|
||||
let defaultInsets = UIEdgeInsets(top: 14.0, left: 9.0, bottom: 6.0, right: 41.0)
|
||||
var insets = defaultInsets
|
||||
|
||||
let layoutFromTop = component.attachmentButtonMode == .captionDown
|
||||
|
||||
if let _ = component.attachmentAction {
|
||||
insets.left = 41.0
|
||||
@ -858,7 +883,10 @@ public final class MessageInputPanelComponent: Component {
|
||||
containerSize: availableTextFieldSize
|
||||
)
|
||||
if !isEditing && component.setMediaRecordingActive == nil {
|
||||
insets.right = insets.left
|
||||
insets.right = defaultInsets.left
|
||||
}
|
||||
if component.attachmentButtonMode != .attach && !isEditing && !self.textFieldExternalState.hasText {
|
||||
insets.left = defaultInsets.left
|
||||
}
|
||||
|
||||
var headerHeight: CGFloat = 0.0
|
||||
@ -938,7 +966,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
fieldBackgroundFrame.size.height += headerHeight
|
||||
|
||||
transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size))
|
||||
self.vibrancyEffectView.isHidden = component.style == .media
|
||||
self.vibrancyEffectView.isHidden = false // component.style == .media
|
||||
|
||||
transition.setFrame(view: self.fieldBackgroundView, frame: fieldBackgroundFrame)
|
||||
self.fieldBackgroundView.update(size: fieldBackgroundFrame.size, cornerRadius: headerHeight > 0.0 ? 18.0 : baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition)
|
||||
@ -1195,7 +1223,24 @@ public final class MessageInputPanelComponent: Component {
|
||||
|
||||
if component.attachmentAction != nil {
|
||||
let attachmentButtonMode: MessageInputActionButtonComponent.Mode
|
||||
attachmentButtonMode = .attach
|
||||
|
||||
var attachmentVisible = isEditing || self.textFieldExternalState.hasText
|
||||
switch component.attachmentButtonMode {
|
||||
case .captionUp:
|
||||
attachmentButtonMode = .captionUp
|
||||
case .captionDown:
|
||||
attachmentButtonMode = .captionDown
|
||||
default:
|
||||
attachmentButtonMode = .attach
|
||||
attachmentVisible = !(hasMediaRecording || hasMediaEditing || !isEditing)
|
||||
}
|
||||
|
||||
if attachmentButtonMode == .captionUp && !self.didDisplayCaptionPositionTooltip && self.textFieldExternalState.textLength > 3 {
|
||||
self.didDisplayCaptionPositionTooltip = true
|
||||
if let sourceView = self.attachmentButton.view {
|
||||
component.presentCaptionPositionTooltip?(sourceView)
|
||||
}
|
||||
}
|
||||
|
||||
let attachmentButtonSize = self.attachmentButton.update(
|
||||
transition: transition,
|
||||
@ -1210,7 +1255,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
switch mode {
|
||||
case .delete:
|
||||
break
|
||||
case .attach:
|
||||
case .attach, .captionUp, .captionDown:
|
||||
component.attachmentAction?()
|
||||
default:
|
||||
break
|
||||
@ -1245,10 +1290,16 @@ public final class MessageInputPanelComponent: Component {
|
||||
if attachmentButtonView.superview == nil {
|
||||
self.addSubview(attachmentButtonView)
|
||||
}
|
||||
let attachmentButtonFrame = CGRect(origin: CGPoint(x: floor((insets.left - attachmentButtonSize.width) * 0.5) + (fieldBackgroundFrame.minX - fieldFrame.minX), y: size.height - insets.bottom - baseFieldHeight + floor((baseFieldHeight - attachmentButtonSize.height) * 0.5)), size: attachmentButtonSize)
|
||||
var attachmentButtonPosition = floor((baseFieldHeight - attachmentButtonSize.height) * 0.5)
|
||||
if layoutFromTop {
|
||||
attachmentButtonPosition += 14.0
|
||||
} else {
|
||||
attachmentButtonPosition = size.height - insets.bottom - baseFieldHeight + attachmentButtonPosition
|
||||
}
|
||||
let attachmentButtonFrame = CGRect(origin: CGPoint(x: floor((insets.left - attachmentButtonSize.width) * 0.5) + (fieldBackgroundFrame.minX - fieldFrame.minX), y: attachmentButtonPosition), size: attachmentButtonSize)
|
||||
transition.setPosition(view: attachmentButtonView, position: attachmentButtonFrame.center)
|
||||
transition.setBounds(view: attachmentButtonView, bounds: CGRect(origin: CGPoint(), size: attachmentButtonFrame.size))
|
||||
transition.setAlpha(view: attachmentButtonView, alpha: (hasMediaRecording || hasMediaEditing || !isEditing) ? 0.0 : 1.0)
|
||||
transition.setAlpha(view: attachmentButtonView, alpha: attachmentVisible ? 1.0 : 0.0)
|
||||
transition.setScale(view: attachmentButtonView, scale: hasMediaEditing ? 0.001 : 1.0)
|
||||
}
|
||||
}
|
||||
@ -1326,6 +1377,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
var inputActionButtonAlpha = 1.0
|
||||
let inputActionButtonMode: MessageInputActionButtonComponent.Mode
|
||||
if case .editor = component.style {
|
||||
if isEditing {
|
||||
@ -1334,7 +1386,10 @@ public final class MessageInputPanelComponent: Component {
|
||||
inputActionButtonMode = component.hasRecordedVideo ? .removeVideoInput : .videoInput
|
||||
}
|
||||
} else if case .media = component.style {
|
||||
inputActionButtonMode = isEditing ? .apply : .none
|
||||
inputActionButtonMode = .apply
|
||||
if !isEditing {
|
||||
inputActionButtonAlpha = 0.0
|
||||
}
|
||||
} else {
|
||||
if hasMediaEditing {
|
||||
inputActionButtonMode = .send
|
||||
@ -1494,10 +1549,16 @@ public final class MessageInputPanelComponent: Component {
|
||||
if inputActionButtonView.superview == nil {
|
||||
self.addSubview(inputActionButtonView)
|
||||
}
|
||||
let inputActionButtonFrame = CGRect(origin: CGPoint(x: inputActionButtonOriginX, y: size.height - insets.bottom - baseFieldHeight + floor((baseFieldHeight - inputActionButtonSize.height) * 0.5)), size: inputActionButtonSize)
|
||||
var inputActionButtonPosition = floor((baseFieldHeight - inputActionButtonSize.height) * 0.5)
|
||||
if layoutFromTop {
|
||||
inputActionButtonPosition += 14.0
|
||||
} else {
|
||||
inputActionButtonPosition = size.height - insets.bottom - baseFieldHeight + inputActionButtonPosition
|
||||
}
|
||||
let inputActionButtonFrame = CGRect(origin: CGPoint(x: inputActionButtonOriginX, y: inputActionButtonPosition), size: inputActionButtonSize)
|
||||
transition.setPosition(view: inputActionButtonView, position: inputActionButtonFrame.center)
|
||||
transition.setBounds(view: inputActionButtonView, bounds: CGRect(origin: CGPoint(), size: inputActionButtonFrame.size))
|
||||
transition.setAlpha(view: inputActionButtonView, alpha: likeActionReplacesInputAction ? 0.0 : 1.0)
|
||||
transition.setAlpha(view: inputActionButtonView, alpha: likeActionReplacesInputAction ? 0.0 : inputActionButtonAlpha)
|
||||
|
||||
if rightButtonsOffsetX != 0.0 {
|
||||
if hasLikeAction {
|
||||
@ -1699,8 +1760,10 @@ public final class MessageInputPanelComponent: Component {
|
||||
}
|
||||
|
||||
var fieldBackgroundIsDark = false
|
||||
if component.style == .media {
|
||||
|
||||
if component.useGrayBackground {
|
||||
fieldBackgroundIsDark = false
|
||||
} else if component.style == .media {
|
||||
fieldBackgroundIsDark = true
|
||||
} else if self.textFieldExternalState.hasText && component.alwaysDarkWhenHasText {
|
||||
fieldBackgroundIsDark = true
|
||||
} else if isEditing || component.style == .editor {
|
||||
|
@ -9971,6 +9971,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
|
||||
let controller = self.context.sharedContext.makeStoryMediaPickerScreen(
|
||||
context: self.context,
|
||||
isDark: false,
|
||||
forCollage: false,
|
||||
getSourceRect: { return .zero },
|
||||
completion: { [weak self] result, transitionView, transitionRect, transitionImage, transitionOut, dismissed in
|
||||
guard let self else {
|
||||
|
@ -2543,15 +2543,15 @@ public final class PeerInfoStoryPaneNode: ASDisplayNode, PeerInfoPaneNode, ASScr
|
||||
}
|
||||
}
|
||||
|
||||
guard let controller = MediaEditorScreen.makeEditStoryController(
|
||||
guard let controller = MediaEditorScreenImpl.makeEditStoryController(
|
||||
context: self.context,
|
||||
peer: peer,
|
||||
storyItem: item,
|
||||
videoPlaybackPosition: nil,
|
||||
cover: false,
|
||||
repost: false,
|
||||
transitionIn: .gallery(MediaEditorScreen.TransitionIn.GalleryTransitionIn(sourceView: self.itemGrid.view, sourceRect: foundItemLayer?.frame ?? .zero, sourceImage: sourceImage)),
|
||||
transitionOut: MediaEditorScreen.TransitionOut(destinationView: self.itemGrid.view, destinationRect: foundItemLayer?.frame ?? .zero, destinationCornerRadius: 0.0),
|
||||
transitionIn: .gallery(MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn(sourceView: self.itemGrid.view, sourceRect: foundItemLayer?.frame ?? .zero, sourceImage: sourceImage)),
|
||||
transitionOut: MediaEditorScreenImpl.TransitionOut(destinationView: self.itemGrid.view, destinationRect: foundItemLayer?.frame ?? .zero, destinationCornerRadius: 0.0),
|
||||
update: { [weak self] disposable in
|
||||
guard let self else {
|
||||
return
|
||||
|
@ -245,7 +245,6 @@ public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibr
|
||||
|
||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: 5.0, image: true, frameRate: 30.0)
|
||||
let videoExport = MediaEditorVideoExport(postbox: postbox, subject: .image(image: image), configuration: configuration, outputPath: tempFile.path)
|
||||
videoExport.start()
|
||||
|
||||
let statusDisposable = videoExport.status.start(next: { status in
|
||||
switch status {
|
||||
@ -349,7 +348,6 @@ public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibr
|
||||
let duration: Double = avAsset.duration.seconds
|
||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0)
|
||||
let videoExport = MediaEditorVideoExport(postbox: postbox, subject: .video(asset: avAsset, isStory: isStory), configuration: configuration, outputPath: tempFile.path)
|
||||
videoExport.start()
|
||||
|
||||
let statusDisposable = videoExport.status.start(next: { status in
|
||||
switch status {
|
||||
@ -554,7 +552,6 @@ public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFi
|
||||
|
||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0)
|
||||
let videoExport = MediaEditorVideoExport(postbox: postbox, subject: subject, configuration: configuration, outputPath: tempFile.path)
|
||||
videoExport.start()
|
||||
|
||||
let statusDisposable = videoExport.status.start(next: { status in
|
||||
switch status {
|
||||
@ -900,6 +897,7 @@ private extension MediaEditorValues {
|
||||
additionalVideoTrimRange: nil,
|
||||
additionalVideoOffset: nil,
|
||||
additionalVideoVolume: nil,
|
||||
collage: [],
|
||||
nightTheme: false,
|
||||
drawing: nil,
|
||||
maskDrawing: nil,
|
||||
@ -910,6 +908,7 @@ private extension MediaEditorValues {
|
||||
audioTrackOffset: nil,
|
||||
audioTrackVolume: nil,
|
||||
audioTrackSamples: nil,
|
||||
collageTrackSamples: nil,
|
||||
coverImageTimestamp: nil,
|
||||
qualityPreset: qualityPreset
|
||||
)
|
||||
@ -1044,6 +1043,7 @@ private extension MediaEditorValues {
|
||||
additionalVideoTrimRange: nil,
|
||||
additionalVideoOffset: nil,
|
||||
additionalVideoVolume: nil,
|
||||
collage: [],
|
||||
nightTheme: false,
|
||||
drawing: drawing,
|
||||
maskDrawing: nil,
|
||||
@ -1054,6 +1054,7 @@ private extension MediaEditorValues {
|
||||
audioTrackOffset: nil,
|
||||
audioTrackVolume: nil,
|
||||
audioTrackSamples: nil,
|
||||
collageTrackSamples: nil,
|
||||
coverImageTimestamp: nil,
|
||||
qualityPreset: qualityPreset
|
||||
)
|
||||
|
@ -41,7 +41,6 @@ swift_library(
|
||||
"//submodules/TelegramUI/Components/PeerAllowedReactionsScreen",
|
||||
"//submodules/TelegramUI/Components/EmojiActionIconComponent",
|
||||
"//submodules/TelegramUI/Components/TextFieldComponent",
|
||||
"//submodules/TelegramUI/Components/CameraScreen",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -22,7 +22,6 @@ import EntityKeyboard
|
||||
import PeerAllowedReactionsScreen
|
||||
import EmojiActionIconComponent
|
||||
import TextFieldComponent
|
||||
import CameraScreen
|
||||
|
||||
final class BusinessIntroSetupScreenComponent: Component {
|
||||
typealias EnvironmentType = ViewControllerComponentContainer.Environment
|
||||
|
@ -617,7 +617,7 @@ final class ChannelAppearanceScreenComponent: Component {
|
||||
let level = boostStatus.level
|
||||
let requiredCustomWallpaperLevel = Int(BoostSubject.customWallpaper.requiredLevel(group: self.isGroup, context: component.context, configuration: premiumConfiguration))
|
||||
|
||||
let controller = MediaPickerScreen(context: component.context, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper))
|
||||
let controller = MediaPickerScreenImpl(context: component.context, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper))
|
||||
controller.customSelection = { [weak self] _, asset in
|
||||
guard let self, let asset = asset as? PHAsset else {
|
||||
return
|
||||
|
@ -231,7 +231,7 @@ public final class ThemeGridController: ViewController {
|
||||
}
|
||||
}
|
||||
|
||||
let controller = MediaPickerScreen(context: strongSelf.context, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper))
|
||||
let controller = MediaPickerScreenImpl(context: strongSelf.context, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper))
|
||||
controller.customSelection = { [weak self] _, asset in
|
||||
guard let strongSelf = self, let asset = asset as? PHAsset else {
|
||||
return
|
||||
|
@ -2937,6 +2937,7 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
self.sendMessageContext.presentAttachmentMenu(view: self, subject: .default)
|
||||
},
|
||||
attachmentButtonMode: component.slice.effectivePeer.isService ? nil : .attach,
|
||||
myReaction: component.slice.item.storyItem.myReaction.flatMap { value -> MessageInputPanelComponent.MyReaction? in
|
||||
var centerAnimation: TelegramMediaFile?
|
||||
var animationFileId: Int64?
|
||||
@ -3007,6 +3008,7 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
self.performMoreAction(sourceView: sourceView, gesture: gesture)
|
||||
},
|
||||
presentCaptionPositionTooltip: nil,
|
||||
presentVoiceMessagesUnavailableTooltip: { [weak self] view in
|
||||
guard let self, let component = self.component, self.voiceMessagesRestrictedTooltipController == nil else {
|
||||
return
|
||||
@ -5403,7 +5405,7 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
guard let controller = MediaEditorScreen.makeEditStoryController(
|
||||
guard let controller = MediaEditorScreenImpl.makeEditStoryController(
|
||||
context: component.context,
|
||||
peer: component.slice.effectivePeer,
|
||||
storyItem: component.slice.item.storyItem,
|
||||
|
@ -1483,7 +1483,7 @@ final class StoryItemSetContainerSendMessage {
|
||||
return
|
||||
}
|
||||
|
||||
let currentMediaController = Atomic<MediaPickerScreen?>(value: nil)
|
||||
let currentMediaController = Atomic<MediaPickerScreenImpl?>(value: nil)
|
||||
let currentFilesController = Atomic<AttachmentFileController?>(value: nil)
|
||||
let currentLocationController = Atomic<LocationPickerController?>(value: nil)
|
||||
|
||||
@ -1869,11 +1869,11 @@ final class StoryItemSetContainerSendMessage {
|
||||
peer: EnginePeer,
|
||||
replyToMessageId: EngineMessage.Id?,
|
||||
replyToStoryId: StoryId?,
|
||||
subject: MediaPickerScreen.Subject = .assets(nil, .default),
|
||||
subject: MediaPickerScreenImpl.Subject = .assets(nil, .default),
|
||||
saveEditedPhotos: Bool,
|
||||
bannedSendPhotos: (Int32, Bool)?,
|
||||
bannedSendVideos: (Int32, Bool)?,
|
||||
present: @escaping (MediaPickerScreen, AttachmentMediaPickerContext?) -> Void,
|
||||
present: @escaping (MediaPickerScreenImpl, AttachmentMediaPickerContext?) -> Void,
|
||||
updateMediaPickerContext: @escaping (AttachmentMediaPickerContext?) -> Void,
|
||||
completion: @escaping ([Any], Bool, Int32?, ChatSendMessageActionSheetController.SendParameters?, @escaping (String) -> UIView?, @escaping () -> Void) -> Void
|
||||
) {
|
||||
@ -1881,7 +1881,7 @@ final class StoryItemSetContainerSendMessage {
|
||||
return
|
||||
}
|
||||
let theme = component.theme
|
||||
let controller = MediaPickerScreen(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peer: peer, threadTitle: nil, chatLocation: .peer(id: peer.id), bannedSendPhotos: bannedSendPhotos, bannedSendVideos: bannedSendVideos, subject: subject, saveEditedPhotos: saveEditedPhotos)
|
||||
let controller = MediaPickerScreenImpl(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peer: peer, threadTitle: nil, chatLocation: .peer(id: peer.id), bannedSendPhotos: bannedSendPhotos, bannedSendVideos: bannedSendVideos, subject: subject, saveEditedPhotos: saveEditedPhotos)
|
||||
let mediaPickerContext = controller.mediaPickerContext
|
||||
controller.openCamera = { [weak self, weak view] cameraView in
|
||||
guard let self, let view else {
|
||||
@ -2195,7 +2195,7 @@ final class StoryItemSetContainerSendMessage {
|
||||
})
|
||||
}
|
||||
|
||||
func presentMediaPasteboard(view: StoryItemSetContainerComponent.View, subjects: [MediaPickerScreen.Subject.Media]) {
|
||||
func presentMediaPasteboard(view: StoryItemSetContainerComponent.View, subjects: [MediaPickerScreenImpl.Subject.Media]) {
|
||||
guard let component = view.component else {
|
||||
return
|
||||
}
|
||||
@ -2315,7 +2315,7 @@ final class StoryItemSetContainerSendMessage {
|
||||
})
|
||||
}
|
||||
|
||||
private func getCaptionPanelView(view: StoryItemSetContainerComponent.View, peer: EnginePeer, mediaPicker: MediaPickerScreen? = nil) -> TGCaptionPanelView? {
|
||||
private func getCaptionPanelView(view: StoryItemSetContainerComponent.View, peer: EnginePeer, mediaPicker: MediaPickerScreenImpl? = nil) -> TGCaptionPanelView? {
|
||||
guard let component = view.component else {
|
||||
return nil
|
||||
}
|
||||
|
@ -1837,7 +1837,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, maskDrawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, coverImageTimestamp: nil, qualityPreset: .videoMessage)
|
||||
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, collage: [], nightTheme: false, drawing: nil, maskDrawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, collageTrackSamples: nil, coverImageTimestamp: nil, qualityPreset: .videoMessage)
|
||||
|
||||
var resourceAdjustments: VideoMediaResourceAdjustments? = nil
|
||||
if let valuesData = try? JSONEncoder().encode(values) {
|
||||
|
12
submodules/TelegramUI/Images.xcassets/Media Editor/Timeline.imageset/Contents.json
vendored
Normal file
12
submodules/TelegramUI/Images.xcassets/Media Editor/Timeline.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "Timeline_24.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
BIN
submodules/TelegramUI/Images.xcassets/Media Editor/Timeline.imageset/Timeline_24.pdf
vendored
Normal file
BIN
submodules/TelegramUI/Images.xcassets/Media Editor/Timeline.imageset/Timeline_24.pdf
vendored
Normal file
Binary file not shown.
@ -30,7 +30,7 @@ import MediaEditorScreen
|
||||
extension ChatControllerImpl {
|
||||
func openStorySharing(messages: [Message]) {
|
||||
let context = self.context
|
||||
let subject: Signal<MediaEditorScreen.Subject?, NoError> = .single(.message(messages.map { $0.id }))
|
||||
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError> = .single(.message(messages.map { $0.id }))
|
||||
|
||||
let externalState = MediaEditorTransitionOutExternalState(
|
||||
storyTarget: nil,
|
||||
@ -39,7 +39,7 @@ extension ChatControllerImpl {
|
||||
transitionOut: nil
|
||||
)
|
||||
|
||||
let controller = MediaEditorScreen(
|
||||
let controller = MediaEditorScreenImpl(
|
||||
context: context,
|
||||
mode: .storyEditor,
|
||||
subject: subject,
|
||||
|
@ -13,7 +13,7 @@ import MediaEditor
|
||||
import ChatEntityKeyboardInputNode
|
||||
|
||||
extension ChatControllerImpl {
|
||||
func displayPasteMenu(_ subjects: [MediaPickerScreen.Subject.Media]) {
|
||||
func displayPasteMenu(_ subjects: [MediaPickerScreenImpl.Subject.Media]) {
|
||||
let _ = (self.context.sharedContext.accountManager.transaction { transaction -> GeneratedMediaStoreSettings in
|
||||
let entry = transaction.getSharedData(ApplicationSpecificSharedDataKeys.generatedMediaStoreSettings)?.get(GeneratedMediaStoreSettings.self)
|
||||
return entry ?? GeneratedMediaStoreSettings.defaultSettings
|
||||
@ -183,6 +183,7 @@ extension ChatControllerImpl {
|
||||
additionalVideoTrimRange: nil,
|
||||
additionalVideoOffset: nil,
|
||||
additionalVideoVolume: nil,
|
||||
collage: [],
|
||||
nightTheme: false,
|
||||
drawing: nil,
|
||||
maskDrawing: blackImage,
|
||||
@ -193,6 +194,7 @@ extension ChatControllerImpl {
|
||||
audioTrackOffset: nil,
|
||||
audioTrackVolume: nil,
|
||||
audioTrackSamples: nil,
|
||||
collageTrackSamples: nil,
|
||||
coverImageTimestamp: nil,
|
||||
qualityPreset: nil
|
||||
)
|
||||
@ -206,7 +208,6 @@ extension ChatControllerImpl {
|
||||
configuration: configuration,
|
||||
outputPath: path
|
||||
)
|
||||
videoExport.start()
|
||||
|
||||
let _ = (videoExport.status
|
||||
|> deliverOnMainQueue).startStandalone(next: { [weak self] status in
|
||||
|
@ -269,7 +269,7 @@ extension ChatControllerImpl {
|
||||
|
||||
let inputText = strongSelf.presentationInterfaceState.interfaceState.effectiveInputState.inputText
|
||||
|
||||
let currentMediaController = Atomic<MediaPickerScreen?>(value: nil)
|
||||
let currentMediaController = Atomic<MediaPickerScreenImpl?>(value: nil)
|
||||
let currentFilesController = Atomic<AttachmentFileControllerImpl?>(value: nil)
|
||||
let currentLocationController = Atomic<LocationPickerController?>(value: nil)
|
||||
|
||||
@ -1159,7 +1159,7 @@ extension ChatControllerImpl {
|
||||
self.present(actionSheet, in: .window(.root))
|
||||
}
|
||||
|
||||
func presentMediaPicker(subject: MediaPickerScreen.Subject = .assets(nil, .default), saveEditedPhotos: Bool, bannedSendPhotos: (Int32, Bool)?, bannedSendVideos: (Int32, Bool)?, present: @escaping (MediaPickerScreen, AttachmentMediaPickerContext?) -> Void, updateMediaPickerContext: @escaping (AttachmentMediaPickerContext?) -> Void, completion: @escaping ([Any], Bool, Int32?, ChatSendMessageActionSheetController.SendParameters?, @escaping (String) -> UIView?, @escaping () -> Void) -> Void) {
|
||||
func presentMediaPicker(subject: MediaPickerScreenImpl.Subject = .assets(nil, .default), saveEditedPhotos: Bool, bannedSendPhotos: (Int32, Bool)?, bannedSendVideos: (Int32, Bool)?, present: @escaping (MediaPickerScreenImpl, AttachmentMediaPickerContext?) -> Void, updateMediaPickerContext: @escaping (AttachmentMediaPickerContext?) -> Void, completion: @escaping ([Any], Bool, Int32?, ChatSendMessageActionSheetController.SendParameters?, @escaping (String) -> UIView?, @escaping () -> Void) -> Void) {
|
||||
var isScheduledMessages = false
|
||||
if case .scheduledMessages = self.presentationInterfaceState.subject {
|
||||
isScheduledMessages = true
|
||||
@ -1168,7 +1168,7 @@ extension ChatControllerImpl {
|
||||
if let cachedData = self.peerView?.cachedData as? CachedChannelData, cachedData.flags.contains(.paidMediaAllowed) {
|
||||
paidMediaAllowed = true
|
||||
}
|
||||
let controller = MediaPickerScreen(
|
||||
let controller = MediaPickerScreenImpl(
|
||||
context: self.context,
|
||||
updatedPresentationData: self.updatedPresentationData,
|
||||
peer: (self.presentationInterfaceState.renderedPeer?.peer).flatMap(EnginePeer.init),
|
||||
@ -1767,19 +1767,19 @@ extension ChatControllerImpl {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
let subject: Signal<MediaEditorScreen.Subject?, NoError>
|
||||
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError>
|
||||
if let asset = result as? PHAsset {
|
||||
subject = .single(.asset(asset))
|
||||
} else if let image = result as? UIImage {
|
||||
subject = .single(.image(image, PixelDimensions(image.size), nil, .bottomRight))
|
||||
} else if let result = result as? Signal<CameraScreen.Result, NoError> {
|
||||
subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight))
|
||||
} else if let result = result as? Signal<CameraScreenImpl.Result, NoError> {
|
||||
subject = result
|
||||
|> map { value -> MediaEditorScreen.Subject? in
|
||||
|> map { value -> MediaEditorScreenImpl.Subject? in
|
||||
switch value {
|
||||
case .pendingImage:
|
||||
return nil
|
||||
case let .image(image):
|
||||
return .image(image.image, PixelDimensions(image.image.size), nil, .topLeft)
|
||||
return .image(image: image.image, dimensions: PixelDimensions(image.image.size), additionalImage: nil, additionalImagePosition: .topLeft)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
@ -1788,12 +1788,12 @@ extension ChatControllerImpl {
|
||||
subject = .single(.empty(PixelDimensions(width: 1080, height: 1920)))
|
||||
}
|
||||
|
||||
let editorController = MediaEditorScreen(
|
||||
let editorController = MediaEditorScreenImpl(
|
||||
context: self.context,
|
||||
mode: .stickerEditor(mode: .generic),
|
||||
subject: subject,
|
||||
transitionIn: fromCamera ? .camera : transitionView.flatMap({ .gallery(
|
||||
MediaEditorScreen.TransitionIn.GalleryTransitionIn(
|
||||
MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn(
|
||||
sourceView: $0,
|
||||
sourceRect: transitionRect,
|
||||
sourceImage: transitionImage
|
||||
@ -1801,7 +1801,7 @@ extension ChatControllerImpl {
|
||||
) }),
|
||||
transitionOut: { finished, isNew in
|
||||
if !finished, let transitionView {
|
||||
return MediaEditorScreen.TransitionOut(
|
||||
return MediaEditorScreenImpl.TransitionOut(
|
||||
destinationView: transitionView,
|
||||
destinationRect: transitionView.bounds,
|
||||
destinationCornerRadius: 0.0
|
||||
@ -1818,7 +1818,7 @@ extension ChatControllerImpl {
|
||||
self?.enqueueStickerFile(file)
|
||||
}
|
||||
}
|
||||
} as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
} as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
)
|
||||
editorController.cancelled = { _ in
|
||||
cancelled()
|
||||
|
@ -333,6 +333,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
isMain: true
|
||||
)
|
||||
],
|
||||
isCollage: false,
|
||||
positionUpdated: { _, _ in },
|
||||
trackTrimUpdated: { [weak self] _, start, end, updatedEnd, apply in
|
||||
if let self {
|
||||
|
@ -15,7 +15,6 @@ import AttachmentUI
|
||||
import ForumCreateTopicScreen
|
||||
import LegacyInstantVideoController
|
||||
import StoryContainerScreen
|
||||
import CameraScreen
|
||||
import MediaEditorScreen
|
||||
import ChatControllerInteraction
|
||||
import SavedMessagesScreen
|
||||
|
@ -2560,21 +2560,21 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
}
|
||||
|
||||
public func makeBotPreviewEditorScreen(context: AccountContext, source: Any?, target: Stories.PendingTarget, transitionArguments: (UIView, CGRect, UIImage?)?, transitionOut: @escaping () -> BotPreviewEditorTransitionOut?, externalState: MediaEditorTransitionOutExternalState, completion: @escaping (MediaEditorScreenResult, @escaping (@escaping () -> Void) -> Void) -> Void, cancelled: @escaping () -> Void) -> ViewController {
|
||||
let subject: Signal<MediaEditorScreen.Subject?, NoError>
|
||||
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError>
|
||||
if let asset = source as? PHAsset {
|
||||
subject = .single(.asset(asset))
|
||||
} else if let image = source as? UIImage {
|
||||
subject = .single(.image(image, PixelDimensions(image.size), nil, .bottomRight))
|
||||
subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight))
|
||||
} else {
|
||||
subject = .single(.empty(PixelDimensions(width: 1080, height: 1920)))
|
||||
}
|
||||
let editorController = MediaEditorScreen(
|
||||
let editorController = MediaEditorScreenImpl(
|
||||
context: context,
|
||||
mode: .botPreview,
|
||||
subject: subject,
|
||||
customTarget: nil,
|
||||
transitionIn: transitionArguments.flatMap { .gallery(
|
||||
MediaEditorScreen.TransitionIn.GalleryTransitionIn(
|
||||
MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn(
|
||||
sourceView: $0.0,
|
||||
sourceRect: $0.1,
|
||||
sourceImage: $0.2
|
||||
@ -2582,13 +2582,13 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
) },
|
||||
transitionOut: { finished, isNew in
|
||||
if !finished, let transitionArguments {
|
||||
return MediaEditorScreen.TransitionOut(
|
||||
return MediaEditorScreenImpl.TransitionOut(
|
||||
destinationView: transitionArguments.0,
|
||||
destinationRect: transitionArguments.0.bounds,
|
||||
destinationCornerRadius: 0.0
|
||||
)
|
||||
} else if finished, let transitionOut = transitionOut(), let destinationView = transitionOut.destinationView {
|
||||
return MediaEditorScreen.TransitionOut(
|
||||
return MediaEditorScreenImpl.TransitionOut(
|
||||
destinationView: destinationView,
|
||||
destinationRect: transitionOut.destinationRect,
|
||||
destinationCornerRadius: transitionOut.destinationCornerRadius,
|
||||
@ -2598,7 +2598,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
return nil
|
||||
}, completion: { result, commit in
|
||||
completion(result, commit)
|
||||
} as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
} as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
)
|
||||
editorController.cancelled = { _ in
|
||||
cancelled()
|
||||
@ -2607,8 +2607,8 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
}
|
||||
|
||||
public func makeStickerEditorScreen(context: AccountContext, source: Any?, intro: Bool, transitionArguments: (UIView, CGRect, UIImage?)?, completion: @escaping (TelegramMediaFile, [String], @escaping () -> Void) -> Void, cancelled: @escaping () -> Void) -> ViewController {
|
||||
let subject: Signal<MediaEditorScreen.Subject?, NoError>
|
||||
var mode: MediaEditorScreen.Mode.StickerEditorMode
|
||||
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError>
|
||||
var mode: MediaEditorScreenImpl.Mode.StickerEditorMode
|
||||
var fromCamera = false
|
||||
if let (file, emoji) = source as? (TelegramMediaFile, [String]) {
|
||||
subject = .single(.sticker(file, emoji))
|
||||
@ -2617,16 +2617,16 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
subject = .single(.asset(asset))
|
||||
mode = .addingToPack
|
||||
} else if let image = source as? UIImage {
|
||||
subject = .single(.image(image, PixelDimensions(image.size), nil, .bottomRight))
|
||||
subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight))
|
||||
mode = .addingToPack
|
||||
} else if let source = source as? Signal<CameraScreen.Result, NoError> {
|
||||
} else if let source = source as? Signal<CameraScreenImpl.Result, NoError> {
|
||||
subject = source
|
||||
|> map { value -> MediaEditorScreen.Subject? in
|
||||
|> map { value -> MediaEditorScreenImpl.Subject? in
|
||||
switch value {
|
||||
case .pendingImage:
|
||||
return nil
|
||||
case let .image(image):
|
||||
return .image(image.image, PixelDimensions(image.image.size), nil, .topLeft)
|
||||
return .image(image: image.image, dimensions: PixelDimensions(image.image.size), additionalImage: nil, additionalImagePosition: .topLeft)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
@ -2640,12 +2640,12 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
if intro {
|
||||
mode = .businessIntro
|
||||
}
|
||||
let editorController = MediaEditorScreen(
|
||||
let editorController = MediaEditorScreenImpl(
|
||||
context: context,
|
||||
mode: .stickerEditor(mode: mode),
|
||||
subject: subject,
|
||||
transitionIn: fromCamera ? .camera : transitionArguments.flatMap { .gallery(
|
||||
MediaEditorScreen.TransitionIn.GalleryTransitionIn(
|
||||
MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn(
|
||||
sourceView: $0.0,
|
||||
sourceRect: $0.1,
|
||||
sourceImage: $0.2
|
||||
@ -2653,7 +2653,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
) },
|
||||
transitionOut: { finished, isNew in
|
||||
if !finished, let transitionArguments {
|
||||
return MediaEditorScreen.TransitionOut(
|
||||
return MediaEditorScreenImpl.TransitionOut(
|
||||
destinationView: transitionArguments.0,
|
||||
destinationRect: transitionArguments.0.bounds,
|
||||
destinationCornerRadius: 0.0
|
||||
@ -2666,7 +2666,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
commit({})
|
||||
})
|
||||
}
|
||||
} as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
} as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
)
|
||||
editorController.cancelled = { _ in
|
||||
cancelled()
|
||||
@ -2675,15 +2675,15 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
}
|
||||
|
||||
public func makeStoryMediaEditorScreen(context: AccountContext, source: Any?, text: String?, link: (url: String, name: String?)?, completion: @escaping (MediaEditorScreenResult, @escaping (@escaping () -> Void) -> Void) -> Void) -> ViewController {
|
||||
let subject: Signal<MediaEditorScreen.Subject?, NoError>
|
||||
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError>
|
||||
if let image = source as? UIImage {
|
||||
subject = .single(.image(image, PixelDimensions(image.size), nil, .bottomRight))
|
||||
subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight))
|
||||
} else if let path = source as? String {
|
||||
subject = .single(.video(path, nil, false, nil, nil, PixelDimensions(width: 1080, height: 1920), 0.0, [], .bottomRight))
|
||||
subject = .single(.video(videoPath: path, thumbnail: nil, mirror: false, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: PixelDimensions(width: 1080, height: 1920), duration: 0.0, videoPositionChanges: [], additionalVideoPosition: .bottomRight))
|
||||
} else {
|
||||
subject = .single(.empty(PixelDimensions(width: 1080, height: 1920)))
|
||||
}
|
||||
let editorController = MediaEditorScreen(
|
||||
let editorController = MediaEditorScreenImpl(
|
||||
context: context,
|
||||
mode: .storyEditor,
|
||||
subject: subject,
|
||||
@ -2695,7 +2695,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
return nil
|
||||
}, completion: { result, commit in
|
||||
completion(result, commit)
|
||||
} as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
} as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
)
|
||||
// editorController.cancelled = { _ in
|
||||
// cancelled()
|
||||
@ -2707,8 +2707,8 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
return mediaPickerController(context: context, hasSearch: hasSearch, completion: completion)
|
||||
}
|
||||
|
||||
public func makeStoryMediaPickerScreen(context: AccountContext, isDark: Bool, getSourceRect: @escaping () -> CGRect, completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void, groupsPresented: @escaping () -> Void) -> ViewController {
|
||||
return storyMediaPickerController(context: context, isDark: isDark, getSourceRect: getSourceRect, completion: completion, dismissed: dismissed, groupsPresented: groupsPresented)
|
||||
public func makeStoryMediaPickerScreen(context: AccountContext, isDark: Bool, forCollage: Bool, getSourceRect: @escaping () -> CGRect, completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void, groupsPresented: @escaping () -> Void) -> ViewController {
|
||||
return storyMediaPickerController(context: context, isDark: isDark, forCollage: forCollage, getSourceRect: getSourceRect, completion: completion, dismissed: dismissed, groupsPresented: groupsPresented)
|
||||
}
|
||||
|
||||
public func makeStickerMediaPickerScreen(context: AccountContext, getSourceRect: @escaping () -> CGRect?, completion: @escaping (Any?, UIView?, CGRect, UIImage?, Bool, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void) -> ViewController {
|
||||
|
@ -319,12 +319,12 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
var returnToCameraImpl: (() -> Void)?
|
||||
var dismissCameraImpl: (() -> Void)?
|
||||
var showDraftTooltipImpl: (() -> Void)?
|
||||
let cameraController = CameraScreen(
|
||||
let cameraController = CameraScreenImpl(
|
||||
context: context,
|
||||
mode: .story,
|
||||
transitionIn: transitionIn.flatMap {
|
||||
if let sourceView = $0.sourceView {
|
||||
return CameraScreen.TransitionIn(
|
||||
return CameraScreenImpl.TransitionIn(
|
||||
sourceView: sourceView,
|
||||
sourceRect: $0.sourceRect,
|
||||
sourceCornerRadius: $0.sourceCornerRadius
|
||||
@ -335,7 +335,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
},
|
||||
transitionOut: { finished in
|
||||
if let transitionOut = (externalState.transitionOut ?? transitionOut)(finished ? externalState.storyTarget : nil, externalState.isPeerArchived), let destinationView = transitionOut.destinationView {
|
||||
return CameraScreen.TransitionOut(
|
||||
return CameraScreenImpl.TransitionOut(
|
||||
destinationView: destinationView,
|
||||
destinationRect: transitionOut.destinationRect,
|
||||
destinationCornerRadius: transitionOut.destinationCornerRadius,
|
||||
@ -346,9 +346,9 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
}
|
||||
},
|
||||
completion: { result, resultTransition, dismissed in
|
||||
let subject: Signal<MediaEditorScreen.Subject?, NoError> = result
|
||||
|> map { value -> MediaEditorScreen.Subject? in
|
||||
func editorPIPPosition(_ position: CameraScreen.PIPPosition) -> MediaEditorScreen.PIPPosition {
|
||||
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError> = result
|
||||
|> map { value -> MediaEditorScreenImpl.Subject? in
|
||||
func editorPIPPosition(_ position: CameraScreenImpl.PIPPosition) -> MediaEditorScreenImpl.PIPPosition {
|
||||
switch position {
|
||||
case .topLeft:
|
||||
return .topLeft
|
||||
@ -364,9 +364,23 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
case .pendingImage:
|
||||
return nil
|
||||
case let .image(image):
|
||||
return .image(image.image, PixelDimensions(image.image.size), image.additionalImage, editorPIPPosition(image.additionalImagePosition))
|
||||
return .image(image: image.image, dimensions: PixelDimensions(image.image.size), additionalImage: image.additionalImage, additionalImagePosition: editorPIPPosition(image.additionalImagePosition))
|
||||
case let .video(video):
|
||||
return .video(video.videoPath, video.coverImage, video.mirror, video.additionalVideoPath, video.additionalCoverImage, video.dimensions, video.duration, video.positionChangeTimestamps, editorPIPPosition(video.additionalVideoPosition))
|
||||
return .video(videoPath: video.videoPath, thumbnail: video.coverImage, mirror: video.mirror, additionalVideoPath: video.additionalVideoPath, additionalThumbnail: video.additionalCoverImage, dimensions: video.dimensions, duration: video.duration, videoPositionChanges: video.positionChangeTimestamps, additionalVideoPosition: editorPIPPosition(video.additionalVideoPosition))
|
||||
case let .videoCollage(collage):
|
||||
func editorCollageItem(_ item: CameraScreenImpl.Result.VideoCollage.Item) -> MediaEditorScreenImpl.Subject.VideoCollageItem {
|
||||
let content: MediaEditorScreenImpl.Subject.VideoCollageItem.Content
|
||||
switch item.content {
|
||||
case let .image(image):
|
||||
content = .image(image)
|
||||
case let .video(path, duration):
|
||||
content = .video(path, duration)
|
||||
case let .asset(asset):
|
||||
content = .asset(asset)
|
||||
}
|
||||
return MediaEditorScreenImpl.Subject.VideoCollageItem(content: content, frame: item.frame)
|
||||
}
|
||||
return .videoCollage(items: collage.items.map { editorCollageItem($0) })
|
||||
case let .asset(asset):
|
||||
return .asset(asset)
|
||||
case let .draft(draft):
|
||||
@ -374,10 +388,10 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
}
|
||||
}
|
||||
|
||||
var transitionIn: MediaEditorScreen.TransitionIn?
|
||||
var transitionIn: MediaEditorScreenImpl.TransitionIn?
|
||||
if let resultTransition, let sourceView = resultTransition.sourceView {
|
||||
transitionIn = .gallery(
|
||||
MediaEditorScreen.TransitionIn.GalleryTransitionIn(
|
||||
MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn(
|
||||
sourceView: sourceView,
|
||||
sourceRect: resultTransition.sourceRect,
|
||||
sourceImage: resultTransition.sourceImage
|
||||
@ -398,7 +412,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
}
|
||||
}
|
||||
|
||||
let controller = MediaEditorScreen(
|
||||
let controller = MediaEditorScreenImpl(
|
||||
context: context,
|
||||
mode: .storyEditor,
|
||||
subject: subject,
|
||||
@ -406,14 +420,14 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
transitionIn: transitionIn,
|
||||
transitionOut: { finished, isNew in
|
||||
if finished, let transitionOut = (externalState.transitionOut ?? transitionOut)(externalState.storyTarget, false), let destinationView = transitionOut.destinationView {
|
||||
return MediaEditorScreen.TransitionOut(
|
||||
return MediaEditorScreenImpl.TransitionOut(
|
||||
destinationView: destinationView,
|
||||
destinationRect: transitionOut.destinationRect,
|
||||
destinationCornerRadius: transitionOut.destinationCornerRadius,
|
||||
completion: transitionOut.completion
|
||||
)
|
||||
} else if !finished, let resultTransition, let (destinationView, destinationRect) = resultTransition.transitionOut(isNew) {
|
||||
return MediaEditorScreen.TransitionOut(
|
||||
return MediaEditorScreenImpl.TransitionOut(
|
||||
destinationView: destinationView,
|
||||
destinationRect: destinationRect,
|
||||
destinationCornerRadius: 0.0,
|
||||
@ -469,7 +483,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
dismissCameraImpl?()
|
||||
})
|
||||
}
|
||||
} as (MediaEditorScreen.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
} as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
)
|
||||
controller.cancelled = { showDraftTooltip in
|
||||
if showDraftTooltip {
|
||||
@ -525,7 +539,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
}
|
||||
|
||||
public func proceedWithStoryUpload(target: Stories.PendingTarget, result: MediaEditorScreenResult, existingMedia: EngineMedia?, forwardInfo: Stories.PendingForwardInfo?, externalState: MediaEditorTransitionOutExternalState, commit: @escaping (@escaping () -> Void) -> Void) {
|
||||
guard let result = result as? MediaEditorScreen.Result else {
|
||||
guard let result = result as? MediaEditorScreenImpl.Result else {
|
||||
return
|
||||
}
|
||||
let context = self.context
|
||||
@ -734,7 +748,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
|
||||
//Xcode 16
|
||||
#if canImport(ContactProvider)
|
||||
extension MediaEditorScreen.Result: @retroactive MediaEditorScreenResult {
|
||||
extension MediaEditorScreenImpl.Result: @retroactive MediaEditorScreenResult {
|
||||
public var target: Stories.PendingTarget {
|
||||
if let sendAsPeerId = self.options.sendAsPeerId {
|
||||
return .peer(sendAsPeerId)
|
||||
@ -744,7 +758,7 @@ extension MediaEditorScreen.Result: @retroactive MediaEditorScreenResult {
|
||||
}
|
||||
}
|
||||
#else
|
||||
extension MediaEditorScreen.Result: MediaEditorScreenResult {
|
||||
extension MediaEditorScreenImpl.Result: MediaEditorScreenResult {
|
||||
public var target: Stories.PendingTarget {
|
||||
if let sendAsPeerId = self.options.sendAsPeerId {
|
||||
return .peer(sendAsPeerId)
|
||||
|
@ -461,7 +461,7 @@ public final class WebAppMessagePreviewScreen: ViewControllerComponentContainer
|
||||
fileprivate func proceed() {
|
||||
let requestPeerType = self.preparedMessage.peerTypes.requestPeerTypes
|
||||
|
||||
let controller = self.context.sharedContext.makePeerSelectionController(PeerSelectionControllerParams(context: self.context, filter: [.excludeRecent, .doNotSearchMessages], requestPeerType: requestPeerType, hasContactSelector: false, multipleSelection: true, immediatelyActivateMultipleSelection: true))
|
||||
let controller = self.context.sharedContext.makePeerSelectionController(PeerSelectionControllerParams(context: self.context, filter: [.excludeRecent, .doNotSearchMessages], requestPeerType: requestPeerType, hasContactSelector: false, multipleSelection: true, selectForumThreads: true, immediatelyActivateMultipleSelection: true))
|
||||
|
||||
controller.multiplePeersSelected = { [weak self, weak controller] peers, _, _, _, _, _ in
|
||||
guard let self else {
|
||||
|
Loading…
x
Reference in New Issue
Block a user