mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge commit '5ecb819963619cdf0d4f9cadfff7380247303743'
This commit is contained in:
commit
4bf474bbe5
@ -9,13 +9,6 @@ import ComponentFlow
|
|||||||
import LottieAnimationComponent
|
import LottieAnimationComponent
|
||||||
import ReactionSelectionNode
|
import ReactionSelectionNode
|
||||||
|
|
||||||
public func decodeDrawingEntities(data: Data) -> [DrawingEntity] {
|
|
||||||
if let codableEntities = try? JSONDecoder().decode([CodableDrawingEntity].self, from: data) {
|
|
||||||
return codableEntities.map { $0.entity }
|
|
||||||
}
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
|
|
||||||
private func makeEntityView(context: AccountContext, entity: DrawingEntity) -> DrawingEntityView? {
|
private func makeEntityView(context: AccountContext, entity: DrawingEntity) -> DrawingEntityView? {
|
||||||
if let entity = entity as? DrawingBubbleEntity {
|
if let entity = entity as? DrawingBubbleEntity {
|
||||||
return DrawingBubbleEntityView(context: context, entity: entity)
|
return DrawingBubbleEntityView(context: context, entity: entity)
|
||||||
|
@ -3170,6 +3170,7 @@ public final class DrawingToolsInteraction {
|
|||||||
} else {
|
} else {
|
||||||
if self.isVideo {
|
if self.isVideo {
|
||||||
entityView.seek(to: 0.0)
|
entityView.seek(to: 0.0)
|
||||||
|
entityView.play()
|
||||||
}
|
}
|
||||||
|
|
||||||
entityView.animateInsertion()
|
entityView.animateInsertion()
|
||||||
|
@ -34,6 +34,9 @@ typedef enum
|
|||||||
|
|
||||||
- (CMTimeRange)trimTimeRange;
|
- (CMTimeRange)trimTimeRange;
|
||||||
|
|
||||||
|
- (NSDictionary *)tintValue;
|
||||||
|
- (NSDictionary *)curvesValue;
|
||||||
|
|
||||||
- (bool)trimApplied;
|
- (bool)trimApplied;
|
||||||
|
|
||||||
- (bool)isCropAndRotationEqualWith:(id<TGMediaEditAdjustments>)adjustments;
|
- (bool)isCropAndRotationEqualWith:(id<TGMediaEditAdjustments>)adjustments;
|
||||||
|
@ -19,8 +19,15 @@ void TGDispatchAfter(double delay, dispatch_queue_t queue, dispatch_block_t bloc
|
|||||||
|
|
||||||
int deviceMemorySize();
|
int deviceMemorySize();
|
||||||
int cpuCoreCount();
|
int cpuCoreCount();
|
||||||
|
|
||||||
|
@interface UIColor (Int32)
|
||||||
|
|
||||||
|
- (int32_t)int32Value;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
#define UIColorRGB(rgb) ([[UIColor alloc] initWithRed:(((rgb >> 16) & 0xff) / 255.0f) green:(((rgb >> 8) & 0xff) / 255.0f) blue:(((rgb) & 0xff) / 255.0f) alpha:1.0f])
|
#define UIColorRGB(rgb) ([[UIColor alloc] initWithRed:(((rgb >> 16) & 0xff) / 255.0f) green:(((rgb >> 8) & 0xff) / 255.0f) blue:(((rgb) & 0xff) / 255.0f) alpha:1.0f])
|
||||||
|
#define UIColorARGB(rgb) ([[UIColor alloc] initWithRed:(((rgb >> 16) & 0xff) / 255.0f) green:(((rgb >> 8) & 0xff) / 255.0f) blue:(((rgb) & 0xff) / 255.0f) alpha:(((rgb >> 24) & 0xff) / 255.0f)])
|
||||||
#define UIColorRGBA(rgb,a) ([[UIColor alloc] initWithRed:(((rgb >> 16) & 0xff) / 255.0f) green:(((rgb >> 8) & 0xff) / 255.0f) blue:(((rgb) & 0xff) / 255.0f) alpha:a])
|
#define UIColorRGBA(rgb,a) ([[UIColor alloc] initWithRed:(((rgb >> 16) & 0xff) / 255.0f) green:(((rgb >> 8) & 0xff) / 255.0f) blue:(((rgb) & 0xff) / 255.0f) alpha:a])
|
||||||
|
|
||||||
#define TGRestrictedToMainThread {if(![[NSThread currentThread] isMainThread]) TGLegacyLog(@"***** Warning: main thread-bound operation is running in background! *****");}
|
#define TGRestrictedToMainThread {if(![[NSThread currentThread] isMainThread]) TGLegacyLog(@"***** Warning: main thread-bound operation is running in background! *****");}
|
||||||
|
@ -144,3 +144,27 @@ NSString *TGComponentsPathForResource(NSString *name, NSString *type) {
|
|||||||
}
|
}
|
||||||
return [bundle pathForResource:name ofType:type];
|
return [bundle pathForResource:name ofType:type];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@implementation UIColor (Int32)
|
||||||
|
|
||||||
|
- (int32_t)int32Value {
|
||||||
|
CGFloat red, green, blue, alpha;
|
||||||
|
if (![self getRed:&red green:&green blue:&blue alpha:&alpha]) {
|
||||||
|
if ([self getWhite:&red alpha:&alpha]) {
|
||||||
|
green = red;
|
||||||
|
blue = red;
|
||||||
|
} else {
|
||||||
|
red = green = blue = alpha = 1.0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int8_t r = (int8_t)(red * 255);
|
||||||
|
int8_t g = (int8_t)(green * 255);
|
||||||
|
int8_t b = (int8_t)(blue * 255);
|
||||||
|
int8_t a = (int8_t)(alpha * 255);
|
||||||
|
|
||||||
|
int32_t intValue = (a << 24) | (r << 16) | (g << 8) | b;
|
||||||
|
return intValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
|
@ -31,6 +31,9 @@ typedef enum
|
|||||||
|
|
||||||
@property (nonatomic, assign) PGCurvesType activeType;
|
@property (nonatomic, assign) PGCurvesType activeType;
|
||||||
|
|
||||||
|
- (instancetype)initWithDictionary:(NSDictionary *)dictionary;
|
||||||
|
- (NSDictionary *)dictionary;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface PGCurvesTool : PGPhotoTool
|
@interface PGCurvesTool : PGPhotoTool
|
||||||
|
@ -116,6 +116,41 @@ const NSUInteger PGCurveDataStep = 2;
|
|||||||
return interpolatedPoints;
|
return interpolatedPoints;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (instancetype)initWithDictionary:(NSDictionary *)dictionary {
|
||||||
|
if (dictionary.count == 0) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
PGCurvesValue *value = [[PGCurvesValue alloc] init];
|
||||||
|
if (dictionary[@"blacks"]) {
|
||||||
|
value.blacksLevel = [dictionary[@"blacks"] floatValue];
|
||||||
|
}
|
||||||
|
if (dictionary[@"shadows"]) {
|
||||||
|
value.shadowsLevel = [dictionary[@"shadows"] floatValue];
|
||||||
|
}
|
||||||
|
if (dictionary[@"midtones"]) {
|
||||||
|
value.midtonesLevel = [dictionary[@"midtones"] floatValue];
|
||||||
|
}
|
||||||
|
if (dictionary[@"highlights"]) {
|
||||||
|
value.highlightsLevel = [dictionary[@"highlights"] floatValue];
|
||||||
|
}
|
||||||
|
if (dictionary[@"whites"]) {
|
||||||
|
value.whitesLevel = [dictionary[@"whites"] floatValue];
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (NSDictionary *)dictionary {
|
||||||
|
return @{
|
||||||
|
@"blacks": @(self.blacksLevel),
|
||||||
|
@"shadows": @(self.shadowsLevel),
|
||||||
|
@"midtones": @(self.midtonesLevel),
|
||||||
|
@"highlights": @(self.highlightsLevel),
|
||||||
|
@"whites": @(self.whitesLevel)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation PGCurvesToolValue
|
@implementation PGCurvesToolValue
|
||||||
@ -156,6 +191,36 @@ const NSUInteger PGCurveDataStep = 2;
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (instancetype)initWithDictionary:(NSDictionary *)dictionary {
|
||||||
|
if (dictionary.count == 0) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
PGCurvesToolValue *value = [[PGCurvesToolValue alloc] init];
|
||||||
|
if (dictionary[@"luminance"]) {
|
||||||
|
value.luminanceCurve = [[PGCurvesValue alloc] initWithDictionary:dictionary[@"luminance"]];
|
||||||
|
}
|
||||||
|
if (dictionary[@"red"]) {
|
||||||
|
value.redCurve = [[PGCurvesValue alloc] initWithDictionary:dictionary[@"red"]];
|
||||||
|
}
|
||||||
|
if (dictionary[@"green"]) {
|
||||||
|
value.greenCurve = [[PGCurvesValue alloc] initWithDictionary:dictionary[@"green"]];
|
||||||
|
}
|
||||||
|
if (dictionary[@"blue"]) {
|
||||||
|
value.blueCurve = [[PGCurvesValue alloc] initWithDictionary:dictionary[@"blue"]];
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (NSDictionary *)dictionary {
|
||||||
|
return @{
|
||||||
|
@"luminance": self.luminanceCurve.dictionary,
|
||||||
|
@"red": self.redCurve.dictionary,
|
||||||
|
@"green": self.greenCurve.dictionary,
|
||||||
|
@"blue": self.blueCurve.dictionary
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
||||||
|
@ -49,6 +49,11 @@
|
|||||||
[(PGPhotoSkinPass *)_pass setIntensity:value.floatValue / 100];
|
[(PGPhotoSkinPass *)_pass setIntensity:value.floatValue / 100];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (bool)isAvialableForVideo
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
- (bool)requiresFaces
|
- (bool)requiresFaces
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
|
@ -2,13 +2,16 @@
|
|||||||
|
|
||||||
@interface PGTintToolValue : NSObject <PGCustomToolValue>
|
@interface PGTintToolValue : NSObject <PGCustomToolValue>
|
||||||
|
|
||||||
@property (nonatomic, assign) UIColor *shadowsColor;
|
@property (nonatomic, strong) UIColor *shadowsColor;
|
||||||
@property (nonatomic, assign) UIColor *highlightsColor;
|
@property (nonatomic, strong) UIColor *highlightsColor;
|
||||||
@property (nonatomic, assign) CGFloat shadowsIntensity;
|
@property (nonatomic, assign) CGFloat shadowsIntensity;
|
||||||
@property (nonatomic, assign) CGFloat highlightsIntensity;
|
@property (nonatomic, assign) CGFloat highlightsIntensity;
|
||||||
|
|
||||||
@property (nonatomic, assign) bool editingHighlights;
|
@property (nonatomic, assign) bool editingHighlights;
|
||||||
|
|
||||||
|
- (instancetype)initWithDictionary:(NSDictionary *)dictionary;
|
||||||
|
- (NSDictionary *)dictionary;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface PGTintTool : PGPhotoTool
|
@interface PGTintTool : PGPhotoTool
|
||||||
|
@ -62,6 +62,36 @@
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (instancetype)initWithDictionary:(NSDictionary *)dictionary {
|
||||||
|
if (dictionary.count == 0) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
PGTintToolValue *value = [[PGTintToolValue alloc] init];
|
||||||
|
if (dictionary[@"shadowsColor"]) {
|
||||||
|
value.shadowsColor = UIColorARGB([dictionary[@"shadowsColor"] intValue]);
|
||||||
|
}
|
||||||
|
if (dictionary[@"shadowsIntensity"]) {
|
||||||
|
value.shadowsIntensity = [dictionary[@"shadowsIntensity"] floatValue];
|
||||||
|
}
|
||||||
|
if (dictionary[@"highlightsColor"]) {
|
||||||
|
value.highlightsColor = UIColorARGB([dictionary[@"highlightsColor"] intValue]);
|
||||||
|
}
|
||||||
|
if (dictionary[@"highlightsIntensity"]) {
|
||||||
|
value.highlightsIntensity = [dictionary[@"highlightsIntensity"] floatValue];
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (NSDictionary *)dictionary {
|
||||||
|
return @{
|
||||||
|
@"shadowsColor": @(self.shadowsColor.int32Value),
|
||||||
|
@"shadowsIntensity": @(self.shadowsIntensity),
|
||||||
|
@"highlightsColor": @(self.highlightsColor.int32Value),
|
||||||
|
@"highlightsIntensity": @(self.highlightsIntensity)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
||||||
|
@ -421,7 +421,8 @@
|
|||||||
_videoDimensions = item.dimensions;
|
_videoDimensions = item.dimensions;
|
||||||
|
|
||||||
if (_entitiesView == nil) {
|
if (_entitiesView == nil) {
|
||||||
_entitiesView = [item.stickersContext drawingEntitiesViewWithSize:item.dimensions];
|
CGSize maxSize = CGSizeMake(1920.0, 1920.0);
|
||||||
|
_entitiesView = [item.stickersContext drawingEntitiesViewWithSize:TGFitSize(item.dimensions, maxSize)];
|
||||||
_entitiesView.hidden = true;
|
_entitiesView.hidden = true;
|
||||||
_entitiesView.userInteractionEnabled = false;
|
_entitiesView.userInteractionEnabled = false;
|
||||||
[_contentWrapperView addSubview:_entitiesView];
|
[_contentWrapperView addSubview:_entitiesView];
|
||||||
@ -755,7 +756,7 @@
|
|||||||
if (self.bounds.size.width > self.bounds.size.height)
|
if (self.bounds.size.width > self.bounds.size.height)
|
||||||
_containerView.frame = self.bounds;
|
_containerView.frame = self.bounds;
|
||||||
else
|
else
|
||||||
_containerView.frame = CGRectMake(0, 0, self.bounds.size.width, self.bounds.size.height - 44.0);
|
_containerView.frame = CGRectMake(0, 0, self.bounds.size.width, self.bounds.size.height);
|
||||||
|
|
||||||
[self _layoutPlayerView];
|
[self _layoutPlayerView];
|
||||||
|
|
||||||
@ -812,7 +813,6 @@
|
|||||||
_paintingImageView.frame = _imageView.frame;
|
_paintingImageView.frame = _imageView.frame;
|
||||||
_videoView.frame = _imageView.frame;
|
_videoView.frame = _imageView.frame;
|
||||||
|
|
||||||
|
|
||||||
CGSize originalSize = self.item.asset.originalSize;
|
CGSize originalSize = self.item.asset.originalSize;
|
||||||
|
|
||||||
CGSize rotatedCropSize = cropRect.size;
|
CGSize rotatedCropSize = cropRect.size;
|
||||||
@ -831,8 +831,10 @@
|
|||||||
_contentWrapperView.frame = CGRectMake(0.0f, 0.0f, _contentView.bounds.size.width, _contentView.bounds.size.height);
|
_contentWrapperView.frame = CGRectMake(0.0f, 0.0f, _contentView.bounds.size.width, _contentView.bounds.size.height);
|
||||||
|
|
||||||
CGRect rect = [TGPhotoDrawingController fittedCropRect:cropRect originalSize:originalSize keepOriginalSize:true];
|
CGRect rect = [TGPhotoDrawingController fittedCropRect:cropRect originalSize:originalSize keepOriginalSize:true];
|
||||||
_entitiesView.frame = CGRectMake(0, 0, rect.size.width, rect.size.height);
|
_entitiesView.bounds = CGRectMake(0, 0, rect.size.width, rect.size.height);
|
||||||
_entitiesView.transform = CGAffineTransformMakeRotation(0.0);
|
_entitiesView.center = CGPointMake(_contentWrapperView.bounds.size.width / 2.0, _contentWrapperView.bounds.size.height / 2.0);
|
||||||
|
CGFloat entitiesScale = _contentWrapperView.bounds.size.width / rect.size.width;
|
||||||
|
_entitiesView.transform = CGAffineTransformMakeScale(entitiesScale, entitiesScale);
|
||||||
|
|
||||||
CGSize fittedOriginalSize = TGScaleToSize(originalSize, [TGPhotoDrawingController maximumPaintingSize]);
|
CGSize fittedOriginalSize = TGScaleToSize(originalSize, [TGPhotoDrawingController maximumPaintingSize]);
|
||||||
CGSize rotatedSize = TGRotatedContentSize(fittedOriginalSize, 0.0);
|
CGSize rotatedSize = TGRotatedContentSize(fittedOriginalSize, 0.0);
|
||||||
@ -949,10 +951,12 @@
|
|||||||
CGSize originalSize = _videoDimensions;
|
CGSize originalSize = _videoDimensions;
|
||||||
CGRect cropRect = CGRectMake(0, 0, _videoDimensions.width, _videoDimensions.height);
|
CGRect cropRect = CGRectMake(0, 0, _videoDimensions.width, _videoDimensions.height);
|
||||||
UIImageOrientation cropOrientation = UIImageOrientationUp;
|
UIImageOrientation cropOrientation = UIImageOrientationUp;
|
||||||
|
bool cropMirrored = false;
|
||||||
if (adjustments != nil)
|
if (adjustments != nil)
|
||||||
{
|
{
|
||||||
cropRect = adjustments.cropRect;
|
cropRect = adjustments.cropRect;
|
||||||
cropOrientation = adjustments.cropOrientation;
|
cropOrientation = adjustments.cropOrientation;
|
||||||
|
cropMirrored = adjustments.cropMirrored;
|
||||||
}
|
}
|
||||||
|
|
||||||
CGContextConcatCTM(UIGraphicsGetCurrentContext(), TGVideoCropTransformForOrientation(cropOrientation, _playerWrapperView.bounds.size, false));
|
CGContextConcatCTM(UIGraphicsGetCurrentContext(), TGVideoCropTransformForOrientation(cropOrientation, _playerWrapperView.bounds.size, false));
|
||||||
|
@ -786,7 +786,7 @@
|
|||||||
|
|
||||||
+ (SSignal *)hashForAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments
|
+ (SSignal *)hashForAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments
|
||||||
{
|
{
|
||||||
if ([adjustments trimApplied] || [adjustments cropAppliedForAvatar:false] || adjustments.sendAsGif)
|
if ([adjustments trimApplied] || [adjustments cropAppliedForAvatar:false] || adjustments.sendAsGif || [adjustments toolsApplied] || [adjustments hasPainting])
|
||||||
return [SSignal single:nil];
|
return [SSignal single:nil];
|
||||||
|
|
||||||
NSURL *fileUrl = nil;
|
NSURL *fileUrl = nil;
|
||||||
|
@ -413,9 +413,9 @@ NSString * const TGPhotoCropOriginalAspectRatio = @"original";
|
|||||||
if (saving)
|
if (saving)
|
||||||
{
|
{
|
||||||
CGFloat containerHeight = self.view.frame.size.height;
|
CGFloat containerHeight = self.view.frame.size.height;
|
||||||
if (_forVideo && self.view.frame.size.width < self.view.frame.size.height) {
|
// if (_forVideo && self.view.frame.size.width < self.view.frame.size.height) {
|
||||||
containerHeight -= 44.0;
|
// containerHeight -= 44.0;
|
||||||
}
|
// }
|
||||||
CGSize fittedSize = TGScaleToSize(snapshotView.frame.size, self.view.frame.size);
|
CGSize fittedSize = TGScaleToSize(snapshotView.frame.size, self.view.frame.size);
|
||||||
targetFrame = CGRectMake((self.view.frame.size.width - fittedSize.width) / 2,
|
targetFrame = CGRectMake((self.view.frame.size.width - fittedSize.width) / 2,
|
||||||
(containerHeight - fittedSize.height) / 2,
|
(containerHeight - fittedSize.height) / 2,
|
||||||
|
@ -451,12 +451,7 @@ const NSTimeInterval TGPhotoQualityPreviewDuration = 15.0f;
|
|||||||
|
|
||||||
- (CGRect)transitionOutSourceFrameForReferenceFrame:(CGRect)referenceFrame orientation:(UIInterfaceOrientation)orientation
|
- (CGRect)transitionOutSourceFrameForReferenceFrame:(CGRect)referenceFrame orientation:(UIInterfaceOrientation)orientation
|
||||||
{
|
{
|
||||||
bool hasOnScreenNavigation = false;
|
CGRect containerFrame = [TGPhotoQualityController photoContainerFrameForParentViewFrame:self.view.frame toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoEditorQualityPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
|
||||||
if (@available(iOS 11.0, *)) {
|
|
||||||
hasOnScreenNavigation = (self.viewLoaded && self.view.safeAreaInsets.bottom > FLT_EPSILON) || self.context.safeAreaInset.bottom > FLT_EPSILON;
|
|
||||||
}
|
|
||||||
|
|
||||||
CGRect containerFrame = [TGPhotoQualityController photoContainerFrameForParentViewFrame:self.view.frame toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoEditorQualityPanelSize hasOnScreenNavigation:hasOnScreenNavigation];
|
|
||||||
CGSize fittedSize = TGScaleToSize(referenceFrame.size, containerFrame.size);
|
CGSize fittedSize = TGScaleToSize(referenceFrame.size, containerFrame.size);
|
||||||
CGRect sourceFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
|
CGRect sourceFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
|
||||||
|
|
||||||
@ -466,20 +461,7 @@ const NSTimeInterval TGPhotoQualityPreviewDuration = 15.0f;
|
|||||||
- (CGRect)_targetFrameForTransitionInFromFrame:(CGRect)fromFrame
|
- (CGRect)_targetFrameForTransitionInFromFrame:(CGRect)fromFrame
|
||||||
{
|
{
|
||||||
CGSize referenceSize = [self referenceViewSize];
|
CGSize referenceSize = [self referenceViewSize];
|
||||||
#pragma clang diagnostic push
|
CGRect containerFrame = [TGPhotoQualityController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:self.effectiveOrientation panelSize:TGPhotoEditorQualityPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
|
||||||
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
|
|
||||||
UIInterfaceOrientation orientation = self.interfaceOrientation;
|
|
||||||
#pragma clang diagnostic pop
|
|
||||||
|
|
||||||
if ([UIDevice currentDevice].userInterfaceIdiom == UIUserInterfaceIdiomPad)
|
|
||||||
orientation = UIInterfaceOrientationPortrait;
|
|
||||||
|
|
||||||
bool hasOnScreenNavigation = false;
|
|
||||||
if (@available(iOS 11.0, *)) {
|
|
||||||
hasOnScreenNavigation = (self.viewLoaded && self.view.safeAreaInsets.bottom > FLT_EPSILON) || self.context.safeAreaInset.bottom > FLT_EPSILON;
|
|
||||||
}
|
|
||||||
|
|
||||||
CGRect containerFrame = [TGPhotoQualityController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoEditorQualityPanelSize hasOnScreenNavigation:hasOnScreenNavigation];
|
|
||||||
CGSize fittedSize = TGScaleToSize(fromFrame.size, containerFrame.size);
|
CGSize fittedSize = TGScaleToSize(fromFrame.size, containerFrame.size);
|
||||||
CGRect toFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
|
CGRect toFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
|
||||||
|
|
||||||
|
@ -9,6 +9,9 @@
|
|||||||
#import "TGPhotoPaintStickerEntity.h"
|
#import "TGPhotoPaintStickerEntity.h"
|
||||||
#import "TGPhotoPaintTextEntity.h"
|
#import "TGPhotoPaintTextEntity.h"
|
||||||
|
|
||||||
|
#import "PGTintTool.h"
|
||||||
|
#import "PGCurvesTool.h"
|
||||||
|
|
||||||
const NSTimeInterval TGVideoEditMinimumTrimmableDuration = 1.5;
|
const NSTimeInterval TGVideoEditMinimumTrimmableDuration = 1.5;
|
||||||
const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
|
const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
|
||||||
|
|
||||||
@ -91,6 +94,18 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
|
|||||||
id value = dictionary[@"tools"][key];
|
id value = dictionary[@"tools"][key];
|
||||||
if ([value isKindOfClass:[NSNumber class]]) {
|
if ([value isKindOfClass:[NSNumber class]]) {
|
||||||
tools[key] = value;
|
tools[key] = value;
|
||||||
|
} else if ([value isKindOfClass:[NSDictionary class]]) {
|
||||||
|
if ([key isEqualToString:@"tint"]) {
|
||||||
|
PGTintToolValue *tintValue = [[PGTintToolValue alloc] initWithDictionary:value];
|
||||||
|
if (tintValue != nil) {
|
||||||
|
tools[key] = tintValue;
|
||||||
|
}
|
||||||
|
} else if ([key isEqualToString:@"curves"]) {
|
||||||
|
PGCurvesToolValue *curvesValues = [[PGCurvesToolValue alloc] initWithDictionary:value];
|
||||||
|
if (curvesValues != nil) {
|
||||||
|
tools[key] = curvesValues;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
adjustments->_toolValues = tools;
|
adjustments->_toolValues = tools;
|
||||||
@ -250,6 +265,10 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
|
|||||||
id value = self.toolValues[key];
|
id value = self.toolValues[key];
|
||||||
if ([value isKindOfClass:[NSNumber class]]) {
|
if ([value isKindOfClass:[NSNumber class]]) {
|
||||||
tools[key] = value;
|
tools[key] = value;
|
||||||
|
} else if ([value isKindOfClass:[PGTintToolValue class]]) {
|
||||||
|
tools[key] = ((PGTintToolValue *)value).dictionary;
|
||||||
|
} else if ([value isKindOfClass:[PGCurvesToolValue class]]) {
|
||||||
|
tools[key] = ((PGCurvesToolValue *)value).dictionary;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
dict[@"tools"] = tools;
|
dict[@"tools"] = tools;
|
||||||
@ -318,6 +337,70 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
|
|||||||
return CMTimeRangeMake(CMTimeMakeWithSeconds(self.trimStartValue , NSEC_PER_SEC), CMTimeMakeWithSeconds((self.trimEndValue - self.trimStartValue), NSEC_PER_SEC));
|
return CMTimeRangeMake(CMTimeMakeWithSeconds(self.trimStartValue , NSEC_PER_SEC), CMTimeMakeWithSeconds((self.trimEndValue - self.trimStartValue), NSEC_PER_SEC));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (NSDictionary *)tintValue {
|
||||||
|
PGTintToolValue *tintValue = self.toolValues[@"tint"];
|
||||||
|
if (tintValue != nil) {
|
||||||
|
return @{
|
||||||
|
@"shadowsColor": tintValue.shadowsColor,
|
||||||
|
@"shadowsIntensity": @(tintValue.shadowsIntensity),
|
||||||
|
@"highlightsColor": tintValue.highlightsColor,
|
||||||
|
@"highlightsIntensity": @(tintValue.highlightsIntensity)
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- (NSDictionary *)curvesValue {
|
||||||
|
PGCurvesToolValue *curvesValue = self.toolValues[@"curves"];
|
||||||
|
if (curvesValue != nil) {
|
||||||
|
NSMutableDictionary *result = [[NSMutableDictionary alloc] init];
|
||||||
|
PGCurvesValue *luminanceCurve = curvesValue.luminanceCurve;
|
||||||
|
if (luminanceCurve != nil) {
|
||||||
|
result[@"luminance"] = @{
|
||||||
|
@"blacks": @(luminanceCurve.blacksLevel),
|
||||||
|
@"shadows": @(luminanceCurve.shadowsLevel),
|
||||||
|
@"midtones": @(luminanceCurve.midtonesLevel),
|
||||||
|
@"highlights": @(luminanceCurve.highlightsLevel),
|
||||||
|
@"whites": @(luminanceCurve.whitesLevel)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
PGCurvesValue *redCurve = curvesValue.redCurve;
|
||||||
|
if (redCurve != nil) {
|
||||||
|
result[@"red"] = @{
|
||||||
|
@"blacks": @(redCurve.blacksLevel),
|
||||||
|
@"shadows": @(redCurve.shadowsLevel),
|
||||||
|
@"midtones": @(redCurve.midtonesLevel),
|
||||||
|
@"highlights": @(redCurve.highlightsLevel),
|
||||||
|
@"whites": @(redCurve.whitesLevel)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
PGCurvesValue *greenCurve = curvesValue.greenCurve;
|
||||||
|
if (greenCurve != nil) {
|
||||||
|
result[@"green"] = @{
|
||||||
|
@"blacks": @(greenCurve.blacksLevel),
|
||||||
|
@"shadows": @(greenCurve.shadowsLevel),
|
||||||
|
@"midtones": @(greenCurve.midtonesLevel),
|
||||||
|
@"highlights": @(greenCurve.highlightsLevel),
|
||||||
|
@"whites": @(greenCurve.whitesLevel)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
PGCurvesValue *blueCurve = curvesValue.blueCurve;
|
||||||
|
if (blueCurve != nil) {
|
||||||
|
result[@"blue"] = @{
|
||||||
|
@"blacks": @(blueCurve.blacksLevel),
|
||||||
|
@"shadows": @(blueCurve.shadowsLevel),
|
||||||
|
@"midtones": @(blueCurve.midtonesLevel),
|
||||||
|
@"highlights": @(blueCurve.highlightsLevel),
|
||||||
|
@"whites": @(blueCurve.whitesLevel)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
} else {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
- (bool)toolsApplied
|
- (bool)toolsApplied
|
||||||
{
|
{
|
||||||
if (self.toolValues.count > 0)
|
if (self.toolValues.count > 0)
|
||||||
|
@ -8,9 +8,9 @@ import AnimatedStickerNode
|
|||||||
import TelegramAnimatedStickerNode
|
import TelegramAnimatedStickerNode
|
||||||
import YuvConversion
|
import YuvConversion
|
||||||
import StickerResources
|
import StickerResources
|
||||||
import DrawingUI
|
|
||||||
import SolidRoundedButtonNode
|
import SolidRoundedButtonNode
|
||||||
import MediaEditor
|
import MediaEditor
|
||||||
|
import DrawingUI
|
||||||
|
|
||||||
protocol LegacyPaintEntity {
|
protocol LegacyPaintEntity {
|
||||||
var position: CGPoint { get }
|
var position: CGPoint { get }
|
||||||
|
@ -178,6 +178,11 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
|||||||
self.scheduledMessageInput = nil
|
self.scheduledMessageInput = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var hasTimer = self.chatLocation.peerId?.namespace == Namespaces.Peer.CloudUser && !self.isScheduledMessages
|
||||||
|
if self.chatLocation.peerId?.isRepliesOrSavedMessages(accountPeerId: self.context.account.peerId) == true {
|
||||||
|
hasTimer = false
|
||||||
|
}
|
||||||
|
|
||||||
self.inputPanel.parentState = self.state
|
self.inputPanel.parentState = self.state
|
||||||
let inputPanelSize = self.inputPanel.update(
|
let inputPanelSize = self.inputPanel.update(
|
||||||
transition: Transition(transition),
|
transition: Transition(transition),
|
||||||
@ -224,7 +229,7 @@ public class LegacyMessageInputPanelNode: ASDisplayNode, TGCaptionPanelView {
|
|||||||
self.toggleInputMode()
|
self.toggleInputMode()
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
timeoutAction: self.chatLocation.peerId?.namespace == Namespaces.Peer.CloudUser && !self.isScheduledMessages ? { [weak self] sourceView, gesture in
|
timeoutAction: hasTimer ? { [weak self] sourceView, gesture in
|
||||||
if let self {
|
if let self {
|
||||||
self.presentTimeoutSetup(sourceView: sourceView, gesture: gesture)
|
self.presentTimeoutSetup(sourceView: sourceView, gesture: gesture)
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,17 @@
|
|||||||
import Foundation
|
import Foundation
|
||||||
import TelegramCore
|
import TelegramCore
|
||||||
|
|
||||||
|
public func decodeCodableDrawingEntities(data: Data) -> [CodableDrawingEntity] {
|
||||||
|
if let codableEntities = try? JSONDecoder().decode([CodableDrawingEntity].self, from: data) {
|
||||||
|
return codableEntities
|
||||||
|
}
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
public func decodeDrawingEntities(data: Data) -> [DrawingEntity] {
|
||||||
|
return decodeCodableDrawingEntities(data: data).map { $0.entity }
|
||||||
|
}
|
||||||
|
|
||||||
public enum CodableDrawingEntity: Equatable {
|
public enum CodableDrawingEntity: Equatable {
|
||||||
public static func == (lhs: CodableDrawingEntity, rhs: CodableDrawingEntity) -> Bool {
|
public static func == (lhs: CodableDrawingEntity, rhs: CodableDrawingEntity) -> Bool {
|
||||||
return lhs.entity.isEqual(to: rhs.entity)
|
return lhs.entity.isEqual(to: rhs.entity)
|
||||||
|
@ -304,10 +304,11 @@ public final class MediaEditor {
|
|||||||
peerId: context.account.peerId,
|
peerId: context.account.peerId,
|
||||||
originalDimensions: subject.dimensions,
|
originalDimensions: subject.dimensions,
|
||||||
cropOffset: .zero,
|
cropOffset: .zero,
|
||||||
cropSize: nil,
|
cropRect: nil,
|
||||||
cropScale: 1.0,
|
cropScale: 1.0,
|
||||||
cropRotation: 0.0,
|
cropRotation: 0.0,
|
||||||
cropMirroring: false,
|
cropMirroring: false,
|
||||||
|
cropOrientation: nil,
|
||||||
gradientColors: nil,
|
gradientColors: nil,
|
||||||
videoTrimRange: nil,
|
videoTrimRange: nil,
|
||||||
videoIsMuted: false,
|
videoIsMuted: false,
|
||||||
@ -325,7 +326,8 @@ public final class MediaEditor {
|
|||||||
audioTrackTrimRange: nil,
|
audioTrackTrimRange: nil,
|
||||||
audioTrackOffset: nil,
|
audioTrackOffset: nil,
|
||||||
audioTrackVolume: nil,
|
audioTrackVolume: nil,
|
||||||
audioTrackSamples: nil
|
audioTrackSamples: nil,
|
||||||
|
qualityPreset: nil
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
self.valuesPromise.set(.single(self.values))
|
self.valuesPromise.set(.single(self.values))
|
||||||
|
@ -121,9 +121,9 @@ final class MediaEditorComposer {
|
|||||||
if let pixelBuffer {
|
if let pixelBuffer {
|
||||||
processImage(inputImage: ciImage, time: time, completion: { compositedImage in
|
processImage(inputImage: ciImage, time: time, completion: { compositedImage in
|
||||||
if var compositedImage {
|
if var compositedImage {
|
||||||
let scale = self.outputDimensions.width / self.dimensions.width
|
let scale = self.outputDimensions.width / compositedImage.extent.width
|
||||||
compositedImage = compositedImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
|
compositedImage = compositedImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
|
||||||
|
|
||||||
self.ciContext?.render(compositedImage, to: pixelBuffer)
|
self.ciContext?.render(compositedImage, to: pixelBuffer)
|
||||||
completion(pixelBuffer)
|
completion(pixelBuffer)
|
||||||
} else {
|
} else {
|
||||||
@ -160,9 +160,9 @@ final class MediaEditorComposer {
|
|||||||
if let pixelBuffer, let context = self.ciContext {
|
if let pixelBuffer, let context = self.ciContext {
|
||||||
makeEditorImageFrameComposition(context: context, inputImage: image, gradientImage: self.gradientImage, drawingImage: self.drawingImage, dimensions: self.dimensions, outputDimensions: self.outputDimensions, values: self.values, entities: self.entities, time: time, completion: { compositedImage in
|
makeEditorImageFrameComposition(context: context, inputImage: image, gradientImage: self.gradientImage, drawingImage: self.drawingImage, dimensions: self.dimensions, outputDimensions: self.outputDimensions, values: self.values, entities: self.entities, time: time, completion: { compositedImage in
|
||||||
if var compositedImage {
|
if var compositedImage {
|
||||||
let scale = self.outputDimensions.width / self.dimensions.width
|
let scale = self.outputDimensions.width / compositedImage.extent.width
|
||||||
compositedImage = compositedImage.samplingLinear().transformed(by: CGAffineTransform(scaleX: scale, y: scale))
|
compositedImage = compositedImage.samplingLinear().transformed(by: CGAffineTransform(scaleX: scale, y: scale))
|
||||||
|
|
||||||
self.ciContext?.render(compositedImage, to: pixelBuffer)
|
self.ciContext?.render(compositedImage, to: pixelBuffer)
|
||||||
completion(pixelBuffer)
|
completion(pixelBuffer)
|
||||||
} else {
|
} else {
|
||||||
@ -218,24 +218,38 @@ public func makeEditorImageComposition(context: CIContext, postbox: Postbox, inp
|
|||||||
|
|
||||||
private func makeEditorImageFrameComposition(context: CIContext, inputImage: CIImage, gradientImage: CIImage, drawingImage: CIImage?, dimensions: CGSize, outputDimensions: CGSize, values: MediaEditorValues, entities: [MediaEditorComposerEntity], time: CMTime, textScale: CGFloat = 1.0, completion: @escaping (CIImage?) -> Void) {
|
private func makeEditorImageFrameComposition(context: CIContext, inputImage: CIImage, gradientImage: CIImage, drawingImage: CIImage?, dimensions: CGSize, outputDimensions: CGSize, values: MediaEditorValues, entities: [MediaEditorComposerEntity], time: CMTime, textScale: CGFloat = 1.0, completion: @escaping (CIImage?) -> Void) {
|
||||||
var resultImage = CIImage(color: .black).cropped(to: CGRect(origin: .zero, size: dimensions)).transformed(by: CGAffineTransform(translationX: -dimensions.width / 2.0, y: -dimensions.height / 2.0))
|
var resultImage = CIImage(color: .black).cropped(to: CGRect(origin: .zero, size: dimensions)).transformed(by: CGAffineTransform(translationX: -dimensions.width / 2.0, y: -dimensions.height / 2.0))
|
||||||
resultImage = gradientImage.composited(over: resultImage)
|
if values.isStory {
|
||||||
|
resultImage = gradientImage.composited(over: resultImage)
|
||||||
|
}
|
||||||
|
|
||||||
var mediaImage = inputImage.samplingLinear().transformed(by: CGAffineTransform(translationX: -inputImage.extent.midX, y: -inputImage.extent.midY))
|
var mediaImage = inputImage.samplingLinear().transformed(by: CGAffineTransform(translationX: -inputImage.extent.midX, y: -inputImage.extent.midY))
|
||||||
|
|
||||||
var initialScale: CGFloat
|
var initialScale: CGFloat
|
||||||
if mediaImage.extent.height > mediaImage.extent.width {
|
if mediaImage.extent.height > mediaImage.extent.width && values.isStory {
|
||||||
initialScale = max(dimensions.width / mediaImage.extent.width, dimensions.height / mediaImage.extent.height)
|
initialScale = max(dimensions.width / mediaImage.extent.width, dimensions.height / mediaImage.extent.height)
|
||||||
} else {
|
} else {
|
||||||
initialScale = dimensions.width / mediaImage.extent.width
|
initialScale = dimensions.width / mediaImage.extent.width
|
||||||
}
|
}
|
||||||
|
|
||||||
var cropTransform = CGAffineTransform(translationX: values.cropOffset.x, y: values.cropOffset.y * -1.0)
|
if values.isStory {
|
||||||
cropTransform = cropTransform.rotated(by: -values.cropRotation)
|
var cropTransform: CGAffineTransform = CGAffineTransform(translationX: values.cropOffset.x, y: values.cropOffset.y * -1.0)
|
||||||
cropTransform = cropTransform.scaledBy(x: initialScale * values.cropScale, y: initialScale * values.cropScale)
|
cropTransform = cropTransform.rotated(by: -values.cropRotation)
|
||||||
mediaImage = mediaImage.transformed(by: cropTransform)
|
cropTransform = cropTransform.scaledBy(x: initialScale * values.cropScale, y: initialScale * values.cropScale)
|
||||||
resultImage = mediaImage.composited(over: resultImage)
|
mediaImage = mediaImage.transformed(by: cropTransform)
|
||||||
|
resultImage = mediaImage.composited(over: resultImage)
|
||||||
|
} else {
|
||||||
|
var horizontalScale = initialScale
|
||||||
|
if values.cropMirroring {
|
||||||
|
horizontalScale *= -1.0
|
||||||
|
}
|
||||||
|
mediaImage = mediaImage.transformed(by: CGAffineTransformMakeScale(horizontalScale, initialScale))
|
||||||
|
resultImage = mediaImage.composited(over: resultImage)
|
||||||
|
}
|
||||||
|
|
||||||
if let drawingImage {
|
if var drawingImage {
|
||||||
|
if values.isStory {
|
||||||
|
drawingImage = drawingImage.transformed(by: CGAffineTransformMakeScale(initialScale, initialScale))
|
||||||
|
}
|
||||||
resultImage = drawingImage.samplingLinear().composited(over: resultImage)
|
resultImage = drawingImage.samplingLinear().composited(over: resultImage)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -254,7 +268,26 @@ private func makeEditorImageFrameComposition(context: CIContext, inputImage: CII
|
|||||||
}
|
}
|
||||||
|
|
||||||
resultImage = resultImage.transformed(by: CGAffineTransform(translationX: dimensions.width / 2.0, y: dimensions.height / 2.0))
|
resultImage = resultImage.transformed(by: CGAffineTransform(translationX: dimensions.width / 2.0, y: dimensions.height / 2.0))
|
||||||
resultImage = resultImage.cropped(to: CGRect(origin: .zero, size: dimensions))
|
if values.isStory {
|
||||||
|
resultImage = resultImage.cropped(to: CGRect(origin: .zero, size: dimensions))
|
||||||
|
} else {
|
||||||
|
let originalDimensions = values.originalDimensions.cgSize
|
||||||
|
var cropRect = values.cropRect ?? .zero
|
||||||
|
if cropRect.isEmpty {
|
||||||
|
cropRect = CGRect(origin: .zero, size: originalDimensions)
|
||||||
|
}
|
||||||
|
let scale = dimensions.width / originalDimensions.width
|
||||||
|
let scaledCropRect = CGRect(origin: CGPoint(x: cropRect.minX * scale, y: dimensions.height - cropRect.maxY * scale), size: CGSize(width: cropRect.width * scale, height: cropRect.height * scale))
|
||||||
|
resultImage = resultImage.cropped(to: scaledCropRect)
|
||||||
|
resultImage = resultImage.transformed(by: CGAffineTransformMakeTranslation(-scaledCropRect.minX, -scaledCropRect.minY))
|
||||||
|
|
||||||
|
if let orientation = values.cropOrientation, orientation != .up {
|
||||||
|
let rotation = orientation.rotation
|
||||||
|
resultImage = resultImage.transformed(by: CGAffineTransformMakeTranslation(-resultImage.extent.width / 2.0, -resultImage.extent.height / 2.0))
|
||||||
|
resultImage = resultImage.transformed(by: CGAffineTransformMakeRotation(rotation))
|
||||||
|
resultImage = resultImage.transformed(by: CGAffineTransformMakeTranslation(resultImage.extent.width / 2.0, resultImage.extent.height / 2.0))
|
||||||
|
}
|
||||||
|
}
|
||||||
completion(resultImage)
|
completion(resultImage)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -273,7 +306,6 @@ private func makeEditorImageFrameComposition(context: CIContext, inputImage: CII
|
|||||||
if let scale = entity.baseScale {
|
if let scale = entity.baseScale {
|
||||||
baseScale = scale
|
baseScale = scale
|
||||||
} else if let _ = entity.baseDrawingSize {
|
} else if let _ = entity.baseDrawingSize {
|
||||||
// baseScale = textScale
|
|
||||||
} else if let baseSize = entity.baseSize {
|
} else if let baseSize = entity.baseSize {
|
||||||
baseScale = baseSize.width / image.extent.width
|
baseScale = baseSize.width / image.extent.width
|
||||||
}
|
}
|
||||||
|
@ -99,6 +99,130 @@ public struct MediaAudioTrackSamples: Equatable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public enum MediaQualityPreset: Int32 {
|
||||||
|
case compressedDefault
|
||||||
|
case compressedVeryLow
|
||||||
|
case compressedLow
|
||||||
|
case compressedMedium
|
||||||
|
case compressedHigh
|
||||||
|
case compressedVeryHigh
|
||||||
|
case animation
|
||||||
|
case videoMessage
|
||||||
|
case profileLow
|
||||||
|
case profile
|
||||||
|
case profileHigh
|
||||||
|
case profileVeryHigh
|
||||||
|
case passthrough
|
||||||
|
|
||||||
|
var hasAudio: Bool {
|
||||||
|
switch self {
|
||||||
|
case .animation, .profileLow, .profile, .profileHigh, .profileVeryHigh:
|
||||||
|
return false
|
||||||
|
default:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var maximumDimensions: CGFloat {
|
||||||
|
switch self {
|
||||||
|
case .compressedVeryLow:
|
||||||
|
return 480.0
|
||||||
|
case .compressedLow:
|
||||||
|
return 640.0
|
||||||
|
case .compressedMedium:
|
||||||
|
return 848.0
|
||||||
|
case .compressedHigh:
|
||||||
|
return 1280.0
|
||||||
|
case .compressedVeryHigh:
|
||||||
|
return 1920.0
|
||||||
|
case .videoMessage:
|
||||||
|
return 384.0
|
||||||
|
case .profileLow:
|
||||||
|
return 720.0
|
||||||
|
case .profile, .profileHigh, .profileVeryHigh:
|
||||||
|
return 800.0
|
||||||
|
default:
|
||||||
|
return 848.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var videoBitrateKbps: Int {
|
||||||
|
switch self {
|
||||||
|
case .compressedVeryLow:
|
||||||
|
return 400
|
||||||
|
case .compressedLow:
|
||||||
|
return 700
|
||||||
|
case .compressedMedium:
|
||||||
|
return 1600
|
||||||
|
case .compressedHigh:
|
||||||
|
return 3000
|
||||||
|
case .compressedVeryHigh:
|
||||||
|
return 6600
|
||||||
|
case .videoMessage:
|
||||||
|
return 1000
|
||||||
|
case .profileLow:
|
||||||
|
return 1100
|
||||||
|
case .profile:
|
||||||
|
return 1500
|
||||||
|
case .profileHigh:
|
||||||
|
return 2000
|
||||||
|
case .profileVeryHigh:
|
||||||
|
return 2400
|
||||||
|
default:
|
||||||
|
return 900
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var audioBitrateKbps: Int {
|
||||||
|
switch self {
|
||||||
|
case .compressedVeryLow, .compressedLow:
|
||||||
|
return 32
|
||||||
|
case .compressedMedium, .compressedHigh, .compressedVeryHigh, .videoMessage:
|
||||||
|
return 64
|
||||||
|
default:
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var audioChannelsCount: Int {
|
||||||
|
switch self {
|
||||||
|
case .compressedVeryLow, .compressedLow:
|
||||||
|
return 1
|
||||||
|
default:
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public enum MediaCropOrientation: Int32 {
|
||||||
|
case up
|
||||||
|
case down
|
||||||
|
case left
|
||||||
|
case right
|
||||||
|
|
||||||
|
var rotation: CGFloat {
|
||||||
|
switch self {
|
||||||
|
case .up:
|
||||||
|
return 0.0
|
||||||
|
case .down:
|
||||||
|
return .pi
|
||||||
|
case .left:
|
||||||
|
return .pi / 2.0
|
||||||
|
case .right:
|
||||||
|
return -.pi / 2.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var isSideward: Bool {
|
||||||
|
switch self {
|
||||||
|
case .left, .right:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public final class MediaEditorValues: Codable, Equatable {
|
public final class MediaEditorValues: Codable, Equatable {
|
||||||
public static func == (lhs: MediaEditorValues, rhs: MediaEditorValues) -> Bool {
|
public static func == (lhs: MediaEditorValues, rhs: MediaEditorValues) -> Bool {
|
||||||
if lhs.peerId != rhs.peerId {
|
if lhs.peerId != rhs.peerId {
|
||||||
@ -110,7 +234,7 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
if lhs.cropOffset != rhs.cropOffset {
|
if lhs.cropOffset != rhs.cropOffset {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if lhs.cropSize != rhs.cropSize {
|
if lhs.cropRect != rhs.cropRect {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if lhs.cropScale != rhs.cropScale {
|
if lhs.cropScale != rhs.cropScale {
|
||||||
@ -122,6 +246,9 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
if lhs.cropMirroring != rhs.cropMirroring {
|
if lhs.cropMirroring != rhs.cropMirroring {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.cropOrientation != rhs.cropOrientation {
|
||||||
|
return false
|
||||||
|
}
|
||||||
if lhs.gradientColors != rhs.gradientColors {
|
if lhs.gradientColors != rhs.gradientColors {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -207,45 +334,43 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
|
|
||||||
private enum CodingKeys: String, CodingKey {
|
private enum CodingKeys: String, CodingKey {
|
||||||
case peerId
|
case peerId
|
||||||
|
|
||||||
case originalWidth
|
case originalWidth
|
||||||
case originalHeight
|
case originalHeight
|
||||||
case cropOffset
|
case cropOffset
|
||||||
case cropSize
|
case cropRect
|
||||||
case cropScale
|
case cropScale
|
||||||
case cropRotation
|
case cropRotation
|
||||||
case cropMirroring
|
case cropMirroring
|
||||||
|
case cropOrientation
|
||||||
case gradientColors
|
case gradientColors
|
||||||
|
|
||||||
case videoTrimRange
|
case videoTrimRange
|
||||||
case videoIsMuted
|
case videoIsMuted
|
||||||
case videoIsFullHd
|
case videoIsFullHd
|
||||||
case videoIsMirrored
|
case videoIsMirrored
|
||||||
|
|
||||||
case additionalVideoPath
|
case additionalVideoPath
|
||||||
case additionalVideoPosition
|
case additionalVideoPosition
|
||||||
case additionalVideoScale
|
case additionalVideoScale
|
||||||
case additionalVideoRotation
|
case additionalVideoRotation
|
||||||
case additionalVideoPositionChanges
|
case additionalVideoPositionChanges
|
||||||
|
|
||||||
case drawing
|
case drawing
|
||||||
case entities
|
case entities
|
||||||
case toolValues
|
case toolValues
|
||||||
|
|
||||||
case audioTrack
|
case audioTrack
|
||||||
case audioTrackTrimRange
|
case audioTrackTrimRange
|
||||||
case audioTrackOffset
|
case audioTrackOffset
|
||||||
case audioTrackVolume
|
case audioTrackVolume
|
||||||
|
case qualityPreset
|
||||||
}
|
}
|
||||||
|
|
||||||
public let peerId: EnginePeer.Id
|
public let peerId: EnginePeer.Id
|
||||||
|
|
||||||
public let originalDimensions: PixelDimensions
|
public let originalDimensions: PixelDimensions
|
||||||
public let cropOffset: CGPoint
|
public let cropOffset: CGPoint
|
||||||
public let cropSize: CGSize?
|
public let cropRect: CGRect?
|
||||||
public let cropScale: CGFloat
|
public let cropScale: CGFloat
|
||||||
public let cropRotation: CGFloat
|
public let cropRotation: CGFloat
|
||||||
public let cropMirroring: Bool
|
public let cropMirroring: Bool
|
||||||
|
public let cropOrientation: MediaCropOrientation?
|
||||||
|
|
||||||
public let gradientColors: [UIColor]?
|
public let gradientColors: [UIColor]?
|
||||||
|
|
||||||
@ -270,14 +395,21 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
public let audioTrackVolume: CGFloat?
|
public let audioTrackVolume: CGFloat?
|
||||||
public let audioTrackSamples: MediaAudioTrackSamples?
|
public let audioTrackSamples: MediaAudioTrackSamples?
|
||||||
|
|
||||||
init(
|
public let qualityPreset: MediaQualityPreset?
|
||||||
|
|
||||||
|
var isStory: Bool {
|
||||||
|
return self.qualityPreset == nil
|
||||||
|
}
|
||||||
|
|
||||||
|
public init(
|
||||||
peerId: EnginePeer.Id,
|
peerId: EnginePeer.Id,
|
||||||
originalDimensions: PixelDimensions,
|
originalDimensions: PixelDimensions,
|
||||||
cropOffset: CGPoint,
|
cropOffset: CGPoint,
|
||||||
cropSize: CGSize?,
|
cropRect: CGRect?,
|
||||||
cropScale: CGFloat,
|
cropScale: CGFloat,
|
||||||
cropRotation: CGFloat,
|
cropRotation: CGFloat,
|
||||||
cropMirroring: Bool,
|
cropMirroring: Bool,
|
||||||
|
cropOrientation: MediaCropOrientation?,
|
||||||
gradientColors: [UIColor]?,
|
gradientColors: [UIColor]?,
|
||||||
videoTrimRange: Range<Double>?,
|
videoTrimRange: Range<Double>?,
|
||||||
videoIsMuted: Bool,
|
videoIsMuted: Bool,
|
||||||
@ -295,15 +427,17 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
audioTrackTrimRange: Range<Double>?,
|
audioTrackTrimRange: Range<Double>?,
|
||||||
audioTrackOffset: Double?,
|
audioTrackOffset: Double?,
|
||||||
audioTrackVolume: CGFloat?,
|
audioTrackVolume: CGFloat?,
|
||||||
audioTrackSamples: MediaAudioTrackSamples?
|
audioTrackSamples: MediaAudioTrackSamples?,
|
||||||
|
qualityPreset: MediaQualityPreset?
|
||||||
) {
|
) {
|
||||||
self.peerId = peerId
|
self.peerId = peerId
|
||||||
self.originalDimensions = originalDimensions
|
self.originalDimensions = originalDimensions
|
||||||
self.cropOffset = cropOffset
|
self.cropOffset = cropOffset
|
||||||
self.cropSize = cropSize
|
self.cropRect = cropRect
|
||||||
self.cropScale = cropScale
|
self.cropScale = cropScale
|
||||||
self.cropRotation = cropRotation
|
self.cropRotation = cropRotation
|
||||||
self.cropMirroring = cropMirroring
|
self.cropMirroring = cropMirroring
|
||||||
|
self.cropOrientation = cropOrientation
|
||||||
self.gradientColors = gradientColors
|
self.gradientColors = gradientColors
|
||||||
self.videoTrimRange = videoTrimRange
|
self.videoTrimRange = videoTrimRange
|
||||||
self.videoIsMuted = videoIsMuted
|
self.videoIsMuted = videoIsMuted
|
||||||
@ -322,6 +456,7 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
self.audioTrackOffset = audioTrackOffset
|
self.audioTrackOffset = audioTrackOffset
|
||||||
self.audioTrackVolume = audioTrackVolume
|
self.audioTrackVolume = audioTrackVolume
|
||||||
self.audioTrackSamples = audioTrackSamples
|
self.audioTrackSamples = audioTrackSamples
|
||||||
|
self.qualityPreset = qualityPreset
|
||||||
}
|
}
|
||||||
|
|
||||||
public init(from decoder: Decoder) throws {
|
public init(from decoder: Decoder) throws {
|
||||||
@ -334,10 +469,11 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
self.originalDimensions = PixelDimensions(width: width, height: height)
|
self.originalDimensions = PixelDimensions(width: width, height: height)
|
||||||
|
|
||||||
self.cropOffset = try container.decode(CGPoint.self, forKey: .cropOffset)
|
self.cropOffset = try container.decode(CGPoint.self, forKey: .cropOffset)
|
||||||
self.cropSize = try container.decodeIfPresent(CGSize.self, forKey: .cropSize)
|
self.cropRect = try container.decodeIfPresent(CGRect.self, forKey: .cropRect)
|
||||||
self.cropScale = try container.decode(CGFloat.self, forKey: .cropScale)
|
self.cropScale = try container.decode(CGFloat.self, forKey: .cropScale)
|
||||||
self.cropRotation = try container.decode(CGFloat.self, forKey: .cropRotation)
|
self.cropRotation = try container.decode(CGFloat.self, forKey: .cropRotation)
|
||||||
self.cropMirroring = try container.decode(Bool.self, forKey: .cropMirroring)
|
self.cropMirroring = try container.decode(Bool.self, forKey: .cropMirroring)
|
||||||
|
self.cropOrientation = (try container.decodeIfPresent(Int32.self, forKey: .cropOrientation)).flatMap { MediaCropOrientation(rawValue: $0) }
|
||||||
|
|
||||||
if let gradientColors = try container.decodeIfPresent([DrawingColor].self, forKey: .gradientColors) {
|
if let gradientColors = try container.decodeIfPresent([DrawingColor].self, forKey: .gradientColors) {
|
||||||
self.gradientColors = gradientColors.map { $0.toUIColor() }
|
self.gradientColors = gradientColors.map { $0.toUIColor() }
|
||||||
@ -378,6 +514,8 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
self.audioTrackVolume = try container.decodeIfPresent(CGFloat.self, forKey: .audioTrackVolume)
|
self.audioTrackVolume = try container.decodeIfPresent(CGFloat.self, forKey: .audioTrackVolume)
|
||||||
|
|
||||||
self.audioTrackSamples = nil
|
self.audioTrackSamples = nil
|
||||||
|
|
||||||
|
self.qualityPreset = (try container.decodeIfPresent(Int32.self, forKey: .qualityPreset)).flatMap { MediaQualityPreset(rawValue: $0) }
|
||||||
}
|
}
|
||||||
|
|
||||||
public func encode(to encoder: Encoder) throws {
|
public func encode(to encoder: Encoder) throws {
|
||||||
@ -389,10 +527,11 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
try container.encode(self.originalDimensions.height, forKey: .originalHeight)
|
try container.encode(self.originalDimensions.height, forKey: .originalHeight)
|
||||||
|
|
||||||
try container.encode(self.cropOffset, forKey: .cropOffset)
|
try container.encode(self.cropOffset, forKey: .cropOffset)
|
||||||
try container.encode(self.cropSize, forKey: .cropSize)
|
try container.encode(self.cropRect, forKey: .cropRect)
|
||||||
try container.encode(self.cropScale, forKey: .cropScale)
|
try container.encode(self.cropScale, forKey: .cropScale)
|
||||||
try container.encode(self.cropRotation, forKey: .cropRotation)
|
try container.encode(self.cropRotation, forKey: .cropRotation)
|
||||||
try container.encode(self.cropMirroring, forKey: .cropMirroring)
|
try container.encode(self.cropMirroring, forKey: .cropMirroring)
|
||||||
|
try container.encodeIfPresent(self.cropOrientation?.rawValue, forKey: .cropOrientation)
|
||||||
|
|
||||||
if let gradientColors = self.gradientColors {
|
if let gradientColors = self.gradientColors {
|
||||||
try container.encode(gradientColors.map { DrawingColor(color: $0) }, forKey: .gradientColors)
|
try container.encode(gradientColors.map { DrawingColor(color: $0) }, forKey: .gradientColors)
|
||||||
@ -427,71 +566,73 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
try container.encodeIfPresent(self.audioTrackTrimRange, forKey: .audioTrackTrimRange)
|
try container.encodeIfPresent(self.audioTrackTrimRange, forKey: .audioTrackTrimRange)
|
||||||
try container.encodeIfPresent(self.audioTrackOffset, forKey: .audioTrackOffset)
|
try container.encodeIfPresent(self.audioTrackOffset, forKey: .audioTrackOffset)
|
||||||
try container.encodeIfPresent(self.audioTrackVolume, forKey: .audioTrackVolume)
|
try container.encodeIfPresent(self.audioTrackVolume, forKey: .audioTrackVolume)
|
||||||
|
|
||||||
|
try container.encodeIfPresent(self.qualityPreset?.rawValue, forKey: .qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func makeCopy() -> MediaEditorValues {
|
public func makeCopy() -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues {
|
func withUpdatedCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: offset, cropSize: self.cropSize, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: offset, cropRect: self.cropRect, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedGradientColors(gradientColors: [UIColor]) -> MediaEditorValues {
|
func withUpdatedGradientColors(gradientColors: [UIColor]) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedVideoIsMuted(_ videoIsMuted: Bool) -> MediaEditorValues {
|
func withUpdatedVideoIsMuted(_ videoIsMuted: Bool) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedVideoIsFullHd(_ videoIsFullHd: Bool) -> MediaEditorValues {
|
func withUpdatedVideoIsFullHd(_ videoIsFullHd: Bool) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func withUpdatedVideoIsMirrored(_ videoIsMirrored: Bool) -> MediaEditorValues {
|
func withUpdatedVideoIsMirrored(_ videoIsMirrored: Bool) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedAdditionalVideo(path: String, positionChanges: [VideoPositionChange]) -> MediaEditorValues {
|
func withUpdatedAdditionalVideo(path: String, positionChanges: [VideoPositionChange]) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: path, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: path, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedAdditionalVideo(position: CGPoint, scale: CGFloat, rotation: CGFloat) -> MediaEditorValues {
|
func withUpdatedAdditionalVideo(position: CGPoint, scale: CGFloat, rotation: CGFloat) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedVideoTrimRange(_ videoTrimRange: Range<Double>) -> MediaEditorValues {
|
func withUpdatedVideoTrimRange(_ videoTrimRange: Range<Double>) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedDrawingAndEntities(drawing: UIImage?, entities: [CodableDrawingEntity]) -> MediaEditorValues {
|
func withUpdatedDrawingAndEntities(drawing: UIImage?, entities: [CodableDrawingEntity]) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: drawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: drawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedToolValues(_ toolValues: [EditorToolKey: Any]) -> MediaEditorValues {
|
func withUpdatedToolValues(_ toolValues: [EditorToolKey: Any]) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedAudioTrack(_ audioTrack: MediaAudioTrack?) -> MediaEditorValues {
|
func withUpdatedAudioTrack(_ audioTrack: MediaAudioTrack?) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedAudioTrackTrimRange(_ audioTrackTrimRange: Range<Double>?) -> MediaEditorValues {
|
func withUpdatedAudioTrackTrimRange(_ audioTrackTrimRange: Range<Double>?) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedAudioTrackOffset(_ audioTrackOffset: Double?) -> MediaEditorValues {
|
func withUpdatedAudioTrackOffset(_ audioTrackOffset: Double?) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedAudioTrackVolume(_ audioTrackVolume: CGFloat?) -> MediaEditorValues {
|
func withUpdatedAudioTrackVolume(_ audioTrackVolume: CGFloat?) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func withUpdatedAudioTrackSamples(_ audioTrackSamples: MediaAudioTrackSamples?) -> MediaEditorValues {
|
func withUpdatedAudioTrackSamples(_ audioTrackSamples: MediaAudioTrackSamples?) -> MediaEditorValues {
|
||||||
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples)
|
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples, qualityPreset: self.qualityPreset)
|
||||||
}
|
}
|
||||||
|
|
||||||
public var resultDimensions: PixelDimensions {
|
public var resultDimensions: PixelDimensions {
|
||||||
@ -515,6 +656,9 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
if self.cropMirroring {
|
if self.cropMirroring {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
if (self.cropOrientation ?? .up) != .up {
|
||||||
|
return true
|
||||||
|
}
|
||||||
if self.videoTrimRange != nil {
|
if self.videoTrimRange != nil {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@ -527,6 +671,9 @@ public final class MediaEditorValues: Codable, Equatable {
|
|||||||
if !self.toolValues.isEmpty {
|
if !self.toolValues.isEmpty {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
if self.audioTrack != nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1267,46 +1414,89 @@ private let hasHEVCHardwareEncoder: Bool = {
|
|||||||
return result == noErr
|
return result == noErr
|
||||||
}()
|
}()
|
||||||
|
|
||||||
|
|
||||||
|
func targetSize(cropSize: CGSize, rotateSideward: Bool = false) -> CGSize {
|
||||||
|
let blockSize: CGFloat = 16.0
|
||||||
|
|
||||||
|
var adjustedCropSize = cropSize
|
||||||
|
if rotateSideward {
|
||||||
|
adjustedCropSize = CGSize(width: cropSize.height, height: cropSize.width)
|
||||||
|
}
|
||||||
|
|
||||||
|
let renderWidth = (adjustedCropSize.width / blockSize).rounded(.down) * blockSize
|
||||||
|
let renderHeight = (adjustedCropSize.height * renderWidth / adjustedCropSize.width).rounded(.down)
|
||||||
|
|
||||||
|
// if fmod(renderHeight, blockSize) != 0 {
|
||||||
|
// renderHeight = (adjustedCropSize.height / blockSize).rounded(.down) * blockSize
|
||||||
|
// }
|
||||||
|
|
||||||
|
return CGSize(width: renderWidth, height: renderHeight)
|
||||||
|
}
|
||||||
|
|
||||||
public func recommendedVideoExportConfiguration(values: MediaEditorValues, duration: Double, image: Bool = false, forceFullHd: Bool = false, frameRate: Float) -> MediaEditorVideoExport.Configuration {
|
public func recommendedVideoExportConfiguration(values: MediaEditorValues, duration: Double, image: Bool = false, forceFullHd: Bool = false, frameRate: Float) -> MediaEditorVideoExport.Configuration {
|
||||||
let compressionProperties: [String: Any]
|
let compressionProperties: [String: Any]
|
||||||
let codecType: AVVideoCodecType
|
let codecType: AVVideoCodecType
|
||||||
|
|
||||||
var bitrate: Int = 3700
|
var videoBitrate: Int = 3700
|
||||||
|
var audioBitrate: Int = 64
|
||||||
|
var audioNumberOfChannels = 2
|
||||||
if image {
|
if image {
|
||||||
bitrate = 5000
|
videoBitrate = 5000
|
||||||
} else {
|
} else {
|
||||||
if duration < 10 {
|
if duration < 10 {
|
||||||
bitrate = 5800
|
videoBitrate = 5800
|
||||||
} else if duration < 20 {
|
} else if duration < 20 {
|
||||||
bitrate = 5500
|
videoBitrate = 5500
|
||||||
} else if duration < 30 {
|
} else if duration < 30 {
|
||||||
bitrate = 5000
|
videoBitrate = 5000
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if hasHEVCHardwareEncoder {
|
|
||||||
|
let width: Int
|
||||||
|
let height: Int
|
||||||
|
|
||||||
|
var useHEVC = hasHEVCHardwareEncoder
|
||||||
|
if let qualityPreset = values.qualityPreset {
|
||||||
|
let maxSize = CGSize(width: qualityPreset.maximumDimensions, height: qualityPreset.maximumDimensions)
|
||||||
|
var resultSize = values.originalDimensions.cgSize
|
||||||
|
if let cropRect = values.cropRect, !cropRect.isEmpty {
|
||||||
|
resultSize = targetSize(cropSize: cropRect.size.aspectFitted(maxSize), rotateSideward: values.cropOrientation?.isSideward ?? false)
|
||||||
|
} else {
|
||||||
|
resultSize = targetSize(cropSize: resultSize.aspectFitted(maxSize), rotateSideward: values.cropOrientation?.isSideward ?? false)
|
||||||
|
}
|
||||||
|
|
||||||
|
width = Int(resultSize.width)
|
||||||
|
height = Int(resultSize.height)
|
||||||
|
|
||||||
|
videoBitrate = qualityPreset.videoBitrateKbps
|
||||||
|
audioBitrate = qualityPreset.audioBitrateKbps
|
||||||
|
audioNumberOfChannels = qualityPreset.audioChannelsCount
|
||||||
|
|
||||||
|
useHEVC = false
|
||||||
|
} else {
|
||||||
|
if values.videoIsFullHd {
|
||||||
|
width = 1080
|
||||||
|
height = 1920
|
||||||
|
} else {
|
||||||
|
width = 720
|
||||||
|
height = 1280
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if useHEVC {
|
||||||
codecType = AVVideoCodecType.hevc
|
codecType = AVVideoCodecType.hevc
|
||||||
compressionProperties = [
|
compressionProperties = [
|
||||||
AVVideoAverageBitRateKey: bitrate * 1000,
|
AVVideoAverageBitRateKey: videoBitrate * 1000,
|
||||||
AVVideoProfileLevelKey: kVTProfileLevel_HEVC_Main_AutoLevel
|
AVVideoProfileLevelKey: kVTProfileLevel_HEVC_Main_AutoLevel
|
||||||
]
|
]
|
||||||
} else {
|
} else {
|
||||||
codecType = AVVideoCodecType.h264
|
codecType = AVVideoCodecType.h264
|
||||||
compressionProperties = [
|
compressionProperties = [
|
||||||
AVVideoAverageBitRateKey: bitrate * 1000,
|
AVVideoAverageBitRateKey: videoBitrate * 1000,
|
||||||
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
|
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
|
||||||
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
|
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
let width: Int
|
|
||||||
let height: Int
|
|
||||||
if values.videoIsFullHd {
|
|
||||||
width = 1080
|
|
||||||
height = 1920
|
|
||||||
} else {
|
|
||||||
width = 720
|
|
||||||
height = 1280
|
|
||||||
}
|
|
||||||
|
|
||||||
let videoSettings: [String: Any] = [
|
let videoSettings: [String: Any] = [
|
||||||
AVVideoCodecKey: codecType,
|
AVVideoCodecKey: codecType,
|
||||||
@ -1318,8 +1508,8 @@ public func recommendedVideoExportConfiguration(values: MediaEditorValues, durat
|
|||||||
let audioSettings: [String: Any] = [
|
let audioSettings: [String: Any] = [
|
||||||
AVFormatIDKey: kAudioFormatMPEG4AAC,
|
AVFormatIDKey: kAudioFormatMPEG4AAC,
|
||||||
AVSampleRateKey: 44100,
|
AVSampleRateKey: 44100,
|
||||||
AVEncoderBitRateKey: 64000,
|
AVEncoderBitRateKey: audioBitrate * 1000,
|
||||||
AVNumberOfChannelsKey: 2
|
AVNumberOfChannelsKey: audioNumberOfChannels
|
||||||
]
|
]
|
||||||
|
|
||||||
return MediaEditorVideoExport.Configuration(
|
return MediaEditorVideoExport.Configuration(
|
||||||
|
@ -188,8 +188,8 @@ public final class MediaEditorVideoAVAssetWriter: MediaEditorVideoExportWriter {
|
|||||||
|
|
||||||
public final class MediaEditorVideoExport {
|
public final class MediaEditorVideoExport {
|
||||||
public enum Subject {
|
public enum Subject {
|
||||||
case image(UIImage)
|
case image(image: UIImage)
|
||||||
case video(AVAsset)
|
case video(asset: AVAsset, isStory: Bool)
|
||||||
}
|
}
|
||||||
|
|
||||||
public struct Configuration {
|
public struct Configuration {
|
||||||
@ -247,7 +247,12 @@ public final class MediaEditorVideoExport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var composerDimensions: CGSize {
|
var composerDimensions: CGSize {
|
||||||
return CGSize(width: 1080.0, height: 1920.0)
|
if self.values.isStory {
|
||||||
|
return CGSize(width: 1080.0, height: 1920.0)
|
||||||
|
} else {
|
||||||
|
let maxSize = CGSize(width: 1920.0, height: 1920.0)
|
||||||
|
return targetSize(cropSize: self.values.originalDimensions.cgSize.aspectFitted(maxSize))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var dimensions: CGSize {
|
var dimensions: CGSize {
|
||||||
@ -351,12 +356,12 @@ public final class MediaEditorVideoExport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func setup() {
|
private func setup() {
|
||||||
if case let .video(asset) = self.subject {
|
if case let .video(asset, isStory) = self.subject {
|
||||||
if let trimmedVideoDuration = self.configuration.timeRange?.duration {
|
if let trimmedVideoDuration = self.configuration.timeRange?.duration {
|
||||||
self.durationValue = trimmedVideoDuration
|
self.durationValue = trimmedVideoDuration
|
||||||
} else {
|
} else {
|
||||||
asset.loadValuesAsynchronously(forKeys: ["tracks", "duration"]) {
|
asset.loadValuesAsynchronously(forKeys: ["tracks", "duration"]) {
|
||||||
if asset.duration.seconds > 60.0 {
|
if asset.duration.seconds > 60.0 && isStory {
|
||||||
self.durationValue = CMTime(seconds: 60.0, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
|
self.durationValue = CMTime(seconds: 60.0, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
|
||||||
} else {
|
} else {
|
||||||
self.durationValue = asset.duration
|
self.durationValue = asset.duration
|
||||||
@ -368,7 +373,7 @@ public final class MediaEditorVideoExport {
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch self.subject {
|
switch self.subject {
|
||||||
case let .video(asset):
|
case let .video(asset, _):
|
||||||
var additionalAsset: AVAsset?
|
var additionalAsset: AVAsset?
|
||||||
if let additionalPath = self.configuration.values.additionalVideoPath {
|
if let additionalPath = self.configuration.values.additionalVideoPath {
|
||||||
additionalAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
|
additionalAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
|
||||||
|
@ -4834,15 +4834,15 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
switch subject {
|
switch subject {
|
||||||
case let .video(path, _, _, _, _, _, _, _, _):
|
case let .video(path, _, _, _, _, _, _, _, _):
|
||||||
let asset = AVURLAsset(url: NSURL(fileURLWithPath: path) as URL)
|
let asset = AVURLAsset(url: NSURL(fileURLWithPath: path) as URL)
|
||||||
exportSubject = .single(.video(asset))
|
exportSubject = .single(.video(asset: asset, isStory: true))
|
||||||
case let .image(image, _, _, _):
|
case let .image(image, _, _, _):
|
||||||
exportSubject = .single(.image(image))
|
exportSubject = .single(.image(image: image))
|
||||||
case let .asset(asset):
|
case let .asset(asset):
|
||||||
exportSubject = Signal { subscriber in
|
exportSubject = Signal { subscriber in
|
||||||
if asset.mediaType == .video {
|
if asset.mediaType == .video {
|
||||||
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
|
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
|
||||||
if let avAsset {
|
if let avAsset {
|
||||||
subscriber.putNext(.video(avAsset))
|
subscriber.putNext(.video(asset: avAsset, isStory: true))
|
||||||
subscriber.putCompletion()
|
subscriber.putCompletion()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -4851,7 +4851,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
options.deliveryMode = .highQualityFormat
|
options.deliveryMode = .highQualityFormat
|
||||||
PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .default, options: options) { image, _ in
|
PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .default, options: options) { image, _ in
|
||||||
if let image {
|
if let image {
|
||||||
subscriber.putNext(.image(image))
|
subscriber.putNext(.image(image: image))
|
||||||
subscriber.putCompletion()
|
subscriber.putCompletion()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -4861,10 +4861,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
case let .draft(draft, _):
|
case let .draft(draft, _):
|
||||||
if draft.isVideo {
|
if draft.isVideo {
|
||||||
let asset = AVURLAsset(url: NSURL(fileURLWithPath: draft.fullPath(engine: context.engine)) as URL)
|
let asset = AVURLAsset(url: NSURL(fileURLWithPath: draft.fullPath(engine: context.engine)) as URL)
|
||||||
exportSubject = .single(.video(asset))
|
exportSubject = .single(.video(asset: asset, isStory: true))
|
||||||
} else {
|
} else {
|
||||||
if let image = UIImage(contentsOfFile: draft.fullPath(engine: context.engine)) {
|
if let image = UIImage(contentsOfFile: draft.fullPath(engine: context.engine)) {
|
||||||
exportSubject = .single(.image(image))
|
exportSubject = .single(.image(image: image))
|
||||||
} else {
|
} else {
|
||||||
fatalError()
|
fatalError()
|
||||||
}
|
}
|
||||||
@ -4876,7 +4876,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
var duration: Double = 0.0
|
var duration: Double = 0.0
|
||||||
if case let .video(video) = exportSubject {
|
if case let .video(video, _) = exportSubject {
|
||||||
duration = video.duration.seconds
|
duration = video.duration.seconds
|
||||||
}
|
}
|
||||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditor.values, duration: duration, forceFullHd: true, frameRate: 60.0)
|
let configuration = recommendedVideoExportConfiguration(values: mediaEditor.values, duration: duration, forceFullHd: true, frameRate: 60.0)
|
||||||
|
@ -460,7 +460,7 @@ final class VideoScrubberComponent: Component {
|
|||||||
let updatedPosition: Double
|
let updatedPosition: Double
|
||||||
if let (start, from, to, _) = self.positionAnimation {
|
if let (start, from, to, _) = self.positionAnimation {
|
||||||
var from = from
|
var from = from
|
||||||
if let offset = component.audioData?.offset {
|
if component.audioOnly, let offset = component.audioData?.offset {
|
||||||
from -= offset
|
from -= offset
|
||||||
}
|
}
|
||||||
let duration = to - from
|
let duration = to - from
|
||||||
@ -471,13 +471,13 @@ final class VideoScrubberComponent: Component {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
var position = component.position
|
var position = component.position
|
||||||
if let offset = component.audioData?.offset {
|
if component.audioOnly, let offset = component.audioData?.offset {
|
||||||
position -= offset
|
position -= offset
|
||||||
}
|
}
|
||||||
let advance = component.isPlaying ? timestamp - component.generationTimestamp : 0.0
|
let advance = component.isPlaying ? timestamp - component.generationTimestamp : 0.0
|
||||||
updatedPosition = max(component.startPosition, min(component.endPosition, position + advance))
|
updatedPosition = max(component.startPosition, min(component.endPosition, position + advance))
|
||||||
}
|
}
|
||||||
let cursorHeight: CGFloat = component.audioData != nil ? 80.0 : 50.0
|
let cursorHeight: CGFloat = component.audioData != nil && !component.audioOnly ? 80.0 : 50.0
|
||||||
self.cursorView.frame = cursorFrame(size: scrubberSize, height: cursorHeight, position: updatedPosition, duration: trimDuration)
|
self.cursorView.frame = cursorFrame(size: scrubberSize, height: cursorHeight, position: updatedPosition, duration: trimDuration)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -489,6 +489,7 @@ final class VideoScrubberComponent: Component {
|
|||||||
|
|
||||||
var trimDuration = component.duration
|
var trimDuration = component.duration
|
||||||
|
|
||||||
|
var isFirstTime = false
|
||||||
var animateAudioAppearance = false
|
var animateAudioAppearance = false
|
||||||
if let previousComponent {
|
if let previousComponent {
|
||||||
if previousComponent.audioData == nil, component.audioData != nil {
|
if previousComponent.audioData == nil, component.audioData != nil {
|
||||||
@ -499,6 +500,8 @@ final class VideoScrubberComponent: Component {
|
|||||||
self.isAudioSelected = false
|
self.isAudioSelected = false
|
||||||
animateAudioAppearance = true
|
animateAudioAppearance = true
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
isFirstTime = true
|
||||||
}
|
}
|
||||||
|
|
||||||
let scrubberSpacing: CGFloat = 4.0
|
let scrubberSpacing: CGFloat = 4.0
|
||||||
@ -587,6 +590,11 @@ final class VideoScrubberComponent: Component {
|
|||||||
audioTransition.setFrame(view: self.audioScrollView, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: availableSize.width, height: audioScrubberHeight)))
|
audioTransition.setFrame(view: self.audioScrollView, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: availableSize.width, height: audioScrubberHeight)))
|
||||||
self.audioScrollView.contentSize = CGSize(width: audioTotalWidth, height: audioScrubberHeight)
|
self.audioScrollView.contentSize = CGSize(width: audioTotalWidth, height: audioScrubberHeight)
|
||||||
|
|
||||||
|
if isFirstTime, let offset = component.audioData?.offset, let duration = component.audioData?.duration, duration > 0.0 {
|
||||||
|
let contentOffset = offset * audioTotalWidth / duration
|
||||||
|
self.audioScrollView.contentOffset = CGPoint(x: contentOffset, y: 0.0)
|
||||||
|
}
|
||||||
|
|
||||||
audioTransition.setCornerRadius(layer: self.audioClippingView.layer, cornerRadius: self.isAudioSelected ? 0.0 : 9.0)
|
audioTransition.setCornerRadius(layer: self.audioClippingView.layer, cornerRadius: self.isAudioSelected ? 0.0 : 9.0)
|
||||||
|
|
||||||
let audioContainerFrame = CGRect(origin: .zero, size: CGSize(width: audioTotalWidth, height: audioScrubberHeight))
|
let audioContainerFrame = CGRect(origin: .zero, size: CGSize(width: audioTotalWidth, height: audioScrubberHeight))
|
||||||
@ -797,7 +805,7 @@ final class VideoScrubberComponent: Component {
|
|||||||
self.positionAnimation = nil
|
self.positionAnimation = nil
|
||||||
self.displayLink?.isPaused = true
|
self.displayLink?.isPaused = true
|
||||||
|
|
||||||
let cursorHeight: CGFloat = component.audioData != nil ? 80.0 : 50.0
|
let cursorHeight: CGFloat = component.audioData != nil && !component.audioOnly ? 80.0 : 50.0
|
||||||
var cursorPosition = component.position
|
var cursorPosition = component.position
|
||||||
if component.audioOnly, let audioOffset = component.audioData?.offset {
|
if component.audioOnly, let audioOffset = component.audioData?.offset {
|
||||||
cursorPosition -= audioOffset
|
cursorPosition -= audioOffset
|
||||||
|
@ -208,7 +208,7 @@ private final class FetchVideoLibraryMediaResourceContext {
|
|||||||
|
|
||||||
private let throttlingContext = FetchVideoLibraryMediaResourceContext()
|
private let throttlingContext = FetchVideoLibraryMediaResourceContext()
|
||||||
|
|
||||||
public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibraryMediaResource) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> {
|
public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibraryMediaResource, alwaysUseModernPipeline: Bool = true) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> {
|
||||||
let signal = Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { subscriber in
|
let signal = Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { subscriber in
|
||||||
subscriber.putNext(.reset)
|
subscriber.putNext(.reset)
|
||||||
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [resource.localIdentifier], options: nil)
|
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [resource.localIdentifier], options: nil)
|
||||||
@ -248,7 +248,7 @@ public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibr
|
|||||||
Logger.shared.log("FetchVideoResource", "Requesting video export")
|
Logger.shared.log("FetchVideoResource", "Requesting video export")
|
||||||
|
|
||||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: 5.0, image: true, frameRate: 30.0)
|
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: 5.0, image: true, frameRate: 30.0)
|
||||||
let videoExport = MediaEditorVideoExport(postbox: postbox, subject: .image(image), configuration: configuration, outputPath: tempFile.path)
|
let videoExport = MediaEditorVideoExport(postbox: postbox, subject: .image(image: image), configuration: configuration, outputPath: tempFile.path)
|
||||||
videoExport.start()
|
videoExport.start()
|
||||||
|
|
||||||
let statusDisposable = videoExport.status.start(next: { status in
|
let statusDisposable = videoExport.status.start(next: { status in
|
||||||
@ -299,6 +299,7 @@ public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibr
|
|||||||
let options = PHVideoRequestOptions()
|
let options = PHVideoRequestOptions()
|
||||||
options.isNetworkAccessAllowed = true
|
options.isNetworkAccessAllowed = true
|
||||||
options.deliveryMode = .highQualityFormat
|
options.deliveryMode = .highQualityFormat
|
||||||
|
let dimensions = PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight))
|
||||||
requestId = PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { avAsset, _, _ in
|
requestId = PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { avAsset, _, _ in
|
||||||
if alreadyReceivedAsset.swap(true) {
|
if alreadyReceivedAsset.swap(true) {
|
||||||
return
|
return
|
||||||
@ -307,6 +308,7 @@ public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibr
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var isStory = false
|
||||||
var adjustments: TGVideoEditAdjustments?
|
var adjustments: TGVideoEditAdjustments?
|
||||||
var mediaEditorValues: MediaEditorValues?
|
var mediaEditorValues: MediaEditorValues?
|
||||||
switch resource.conversion {
|
switch resource.conversion {
|
||||||
@ -324,13 +326,24 @@ public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibr
|
|||||||
adjustments = nil
|
adjustments = nil
|
||||||
}
|
}
|
||||||
case let .compress(adjustmentsValue):
|
case let .compress(adjustmentsValue):
|
||||||
|
let defaultPreset = TGMediaVideoConversionPreset(rawValue: UInt32(UserDefaults.standard.integer(forKey: "TG_preferredVideoPreset_v0")))
|
||||||
|
let qualityPreset = MediaQualityPreset(preset: defaultPreset)
|
||||||
if let adjustmentsValue = adjustmentsValue {
|
if let adjustmentsValue = adjustmentsValue {
|
||||||
if adjustmentsValue.isStory {
|
if adjustmentsValue.isStory {
|
||||||
|
isStory = true
|
||||||
if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: adjustmentsValue.data.makeData()) {
|
if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: adjustmentsValue.data.makeData()) {
|
||||||
mediaEditorValues = values
|
mediaEditorValues = values
|
||||||
}
|
}
|
||||||
} else if let dict = NSKeyedUnarchiver.unarchiveObject(with: adjustmentsValue.data.makeData()) as? [AnyHashable : Any] {
|
} else if let dict = NSKeyedUnarchiver.unarchiveObject(with: adjustmentsValue.data.makeData()) as? [AnyHashable : Any], let legacyAdjustments = TGVideoEditAdjustments(dictionary: dict) {
|
||||||
adjustments = TGVideoEditAdjustments(dictionary: dict)
|
if alwaysUseModernPipeline {
|
||||||
|
mediaEditorValues = MediaEditorValues(legacyAdjustments: legacyAdjustments, defaultPreset: qualityPreset)
|
||||||
|
} else {
|
||||||
|
adjustments = legacyAdjustments
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if alwaysUseModernPipeline {
|
||||||
|
mediaEditorValues = MediaEditorValues(dimensions: dimensions, qualityPreset: qualityPreset)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -339,7 +352,7 @@ public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibr
|
|||||||
if let mediaEditorValues {
|
if let mediaEditorValues {
|
||||||
let duration: Double = avAsset.duration.seconds
|
let duration: Double = avAsset.duration.seconds
|
||||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0)
|
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0)
|
||||||
let videoExport = MediaEditorVideoExport(postbox: postbox, subject: .video(avAsset), configuration: configuration, outputPath: tempFile.path)
|
let videoExport = MediaEditorVideoExport(postbox: postbox, subject: .video(asset: avAsset, isStory: isStory), configuration: configuration, outputPath: tempFile.path)
|
||||||
videoExport.start()
|
videoExport.start()
|
||||||
|
|
||||||
let statusDisposable = videoExport.status.start(next: { status in
|
let statusDisposable = videoExport.status.start(next: { status in
|
||||||
@ -464,7 +477,7 @@ public func fetchVideoLibraryMediaResource(postbox: Postbox, resource: VideoLibr
|
|||||||
return throttlingContext.wrap(priority: .default, signal: signal)
|
return throttlingContext.wrap(priority: .default, signal: signal)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFileVideoMediaResource) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> {
|
public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFileVideoMediaResource, alwaysUseModernPipeline: Bool = true) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> {
|
||||||
let signal = Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { subscriber in
|
let signal = Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { subscriber in
|
||||||
subscriber.putNext(.reset)
|
subscriber.putNext(.reset)
|
||||||
|
|
||||||
@ -473,16 +486,31 @@ public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFi
|
|||||||
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
|
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let defaultPreset = TGMediaVideoConversionPreset(rawValue: UInt32(UserDefaults.standard.integer(forKey: "TG_preferredVideoPreset_v0")))
|
||||||
|
let qualityPreset = MediaQualityPreset(preset: defaultPreset)
|
||||||
|
|
||||||
|
let isImage = filteredPath.contains(".jpg")
|
||||||
|
var isStory = false
|
||||||
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
|
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
|
||||||
var adjustments: TGVideoEditAdjustments?
|
var adjustments: TGVideoEditAdjustments?
|
||||||
var mediaEditorValues: MediaEditorValues?
|
var mediaEditorValues: MediaEditorValues?
|
||||||
if let videoAdjustments = resource.adjustments {
|
if let videoAdjustments = resource.adjustments {
|
||||||
if videoAdjustments.isStory {
|
if videoAdjustments.isStory {
|
||||||
|
isStory = true
|
||||||
if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: videoAdjustments.data.makeData()) {
|
if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: videoAdjustments.data.makeData()) {
|
||||||
mediaEditorValues = values
|
mediaEditorValues = values
|
||||||
}
|
}
|
||||||
} else if let dict = NSKeyedUnarchiver.unarchiveObject(with: videoAdjustments.data.makeData()) as? [AnyHashable : Any] {
|
} else if let dict = NSKeyedUnarchiver.unarchiveObject(with: videoAdjustments.data.makeData()) as? [AnyHashable : Any], let legacyAdjustments = TGVideoEditAdjustments(dictionary: dict) {
|
||||||
adjustments = TGVideoEditAdjustments(dictionary: dict)
|
if alwaysUseModernPipeline && !isImage {
|
||||||
|
mediaEditorValues = MediaEditorValues(legacyAdjustments: legacyAdjustments, defaultPreset: qualityPreset)
|
||||||
|
} else {
|
||||||
|
adjustments = legacyAdjustments
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if alwaysUseModernPipeline && !isImage, let track = avAsset.tracks(withMediaType: .video).first {
|
||||||
|
let dimensions = track.naturalSize.applying(track.preferredTransform)
|
||||||
|
mediaEditorValues = MediaEditorValues(dimensions: PixelDimensions(dimensions), qualityPreset: qualityPreset)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
|
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
|
||||||
@ -491,10 +519,10 @@ public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFi
|
|||||||
let duration: Double = avAsset.duration.seconds
|
let duration: Double = avAsset.duration.seconds
|
||||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0)
|
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0)
|
||||||
let subject: MediaEditorVideoExport.Subject
|
let subject: MediaEditorVideoExport.Subject
|
||||||
if filteredPath.contains(".jpg"), let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) {
|
if isImage, let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) {
|
||||||
subject = .image(image)
|
subject = .image(image: image)
|
||||||
} else {
|
} else {
|
||||||
subject = .video(avAsset)
|
subject = .video(asset: avAsset, isStory: isStory)
|
||||||
}
|
}
|
||||||
|
|
||||||
let videoExport = MediaEditorVideoExport(postbox: postbox, subject: subject, configuration: configuration, outputPath: tempFile.path)
|
let videoExport = MediaEditorVideoExport(postbox: postbox, subject: subject, configuration: configuration, outputPath: tempFile.path)
|
||||||
@ -556,7 +584,7 @@ public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let signal: SSignal
|
let signal: SSignal
|
||||||
if filteredPath.contains(".jpg"), let entityRenderer = entityRenderer {
|
if isImage, let entityRenderer = entityRenderer {
|
||||||
if let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) {
|
if let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) {
|
||||||
let durationSignal: SSignal = SSignal(generator: { subscriber in
|
let durationSignal: SSignal = SSignal(generator: { subscriber in
|
||||||
let disposable = (entityRenderer.duration()).start(next: { duration in
|
let disposable = (entityRenderer.duration()).start(next: { duration in
|
||||||
@ -687,8 +715,8 @@ public func fetchVideoLibraryMediaResourceHash(resource: VideoLibraryMediaResour
|
|||||||
adjustments = nil
|
adjustments = nil
|
||||||
case let .compress(adjustmentsValue):
|
case let .compress(adjustmentsValue):
|
||||||
if let adjustmentsValue = adjustmentsValue {
|
if let adjustmentsValue = adjustmentsValue {
|
||||||
if let dict = NSKeyedUnarchiver.unarchiveObject(with: adjustmentsValue.data.makeData()) as? [AnyHashable : Any] {
|
if let dict = NSKeyedUnarchiver.unarchiveObject(with: adjustmentsValue.data.makeData()) as? [AnyHashable : Any], let legacyAdjustments = TGVideoEditAdjustments(dictionary: dict) {
|
||||||
adjustments = TGVideoEditAdjustments(dictionary: dict)
|
adjustments = legacyAdjustments
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -765,3 +793,220 @@ public func fetchLocalFileGifMediaResource(resource: LocalFileGifMediaResource)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private extension MediaQualityPreset {
|
||||||
|
init(preset: TGMediaVideoConversionPreset) {
|
||||||
|
var qualityPreset: MediaQualityPreset
|
||||||
|
switch preset {
|
||||||
|
case TGMediaVideoConversionPresetCompressedDefault:
|
||||||
|
qualityPreset = .compressedDefault
|
||||||
|
case TGMediaVideoConversionPresetCompressedVeryLow:
|
||||||
|
qualityPreset = .compressedVeryLow
|
||||||
|
case TGMediaVideoConversionPresetCompressedLow:
|
||||||
|
qualityPreset = .compressedLow
|
||||||
|
case TGMediaVideoConversionPresetCompressedMedium:
|
||||||
|
qualityPreset = .compressedMedium
|
||||||
|
case TGMediaVideoConversionPresetCompressedHigh:
|
||||||
|
qualityPreset = .compressedHigh
|
||||||
|
case TGMediaVideoConversionPresetCompressedVeryHigh:
|
||||||
|
qualityPreset = .compressedVeryHigh
|
||||||
|
case TGMediaVideoConversionPresetProfileLow:
|
||||||
|
qualityPreset = .profileLow
|
||||||
|
case TGMediaVideoConversionPresetProfile:
|
||||||
|
qualityPreset = .profile
|
||||||
|
case TGMediaVideoConversionPresetProfileHigh:
|
||||||
|
qualityPreset = .profileHigh
|
||||||
|
case TGMediaVideoConversionPresetProfileVeryHigh:
|
||||||
|
qualityPreset = .profileVeryHigh
|
||||||
|
case TGMediaVideoConversionPresetAnimation:
|
||||||
|
qualityPreset = .animation
|
||||||
|
case TGMediaVideoConversionPresetVideoMessage:
|
||||||
|
qualityPreset = .videoMessage
|
||||||
|
default:
|
||||||
|
qualityPreset = .compressedMedium
|
||||||
|
}
|
||||||
|
self = qualityPreset
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extension UIImage.Orientation {
|
||||||
|
var cropOrientation: MediaCropOrientation {
|
||||||
|
switch self {
|
||||||
|
case .up:
|
||||||
|
return .up
|
||||||
|
case .down:
|
||||||
|
return .down
|
||||||
|
case .left:
|
||||||
|
return .left
|
||||||
|
case .right:
|
||||||
|
return .right
|
||||||
|
default:
|
||||||
|
return .up
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extension MediaEditorValues {
|
||||||
|
convenience init(dimensions: PixelDimensions, qualityPreset: MediaQualityPreset) {
|
||||||
|
self.init(
|
||||||
|
peerId: EnginePeer.Id(0),
|
||||||
|
originalDimensions: dimensions,
|
||||||
|
cropOffset: .zero,
|
||||||
|
cropRect: nil,
|
||||||
|
cropScale: 1.0,
|
||||||
|
cropRotation: 0.0,
|
||||||
|
cropMirroring: false,
|
||||||
|
cropOrientation: nil,
|
||||||
|
gradientColors: nil,
|
||||||
|
videoTrimRange: nil,
|
||||||
|
videoIsMuted: false,
|
||||||
|
videoIsFullHd: true,
|
||||||
|
videoIsMirrored: false,
|
||||||
|
additionalVideoPath: nil,
|
||||||
|
additionalVideoPosition: nil,
|
||||||
|
additionalVideoScale: nil,
|
||||||
|
additionalVideoRotation: nil,
|
||||||
|
additionalVideoPositionChanges: [],
|
||||||
|
drawing: nil,
|
||||||
|
entities: [],
|
||||||
|
toolValues: [:],
|
||||||
|
audioTrack: nil,
|
||||||
|
audioTrackTrimRange: nil,
|
||||||
|
audioTrackOffset: nil,
|
||||||
|
audioTrackVolume: nil,
|
||||||
|
audioTrackSamples: nil,
|
||||||
|
qualityPreset: qualityPreset
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
convenience init(legacyAdjustments: TGVideoEditAdjustments, defaultPreset: MediaQualityPreset) {
|
||||||
|
var videoTrimRange: Range<Double>?
|
||||||
|
if legacyAdjustments.trimStartValue > 0.0 || !legacyAdjustments.trimEndValue.isZero {
|
||||||
|
videoTrimRange = legacyAdjustments.trimStartValue ..< legacyAdjustments.trimEndValue
|
||||||
|
}
|
||||||
|
|
||||||
|
var entities: [CodableDrawingEntity] = []
|
||||||
|
var drawing: UIImage?
|
||||||
|
|
||||||
|
if let paintingData = legacyAdjustments.paintingData {
|
||||||
|
if let entitiesData = paintingData.entitiesData {
|
||||||
|
entities = decodeCodableDrawingEntities(data: entitiesData)
|
||||||
|
}
|
||||||
|
if let imagePath = paintingData.imagePath, let image = UIImage(contentsOfFile: imagePath) {
|
||||||
|
drawing = image
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var toolValues: [EditorToolKey: Any] = [:]
|
||||||
|
if let tools = legacyAdjustments.toolValues {
|
||||||
|
for (key, value) in tools {
|
||||||
|
if let floatValue = (value as? NSNumber)?.floatValue {
|
||||||
|
if key == AnyHashable("enhance") {
|
||||||
|
toolValues[.enhance] = floatValue / 100.0
|
||||||
|
}
|
||||||
|
if key == AnyHashable("exposure") {
|
||||||
|
toolValues[.brightness] = floatValue / 100.0
|
||||||
|
}
|
||||||
|
if key == AnyHashable("contrast") {
|
||||||
|
toolValues[.contrast] = floatValue / 100.0
|
||||||
|
}
|
||||||
|
if key == AnyHashable("saturation") {
|
||||||
|
toolValues[.saturation] = floatValue / 100.0
|
||||||
|
}
|
||||||
|
if key == AnyHashable("warmth") {
|
||||||
|
toolValues[.warmth] = floatValue / 100.0
|
||||||
|
}
|
||||||
|
if key == AnyHashable("fade") {
|
||||||
|
toolValues[.fade] = floatValue / 100.0
|
||||||
|
}
|
||||||
|
if key == AnyHashable("vignette") {
|
||||||
|
toolValues[.vignette] = floatValue / 100.0
|
||||||
|
}
|
||||||
|
if key == AnyHashable("grain") {
|
||||||
|
toolValues[.grain] = floatValue / 100.0
|
||||||
|
}
|
||||||
|
if key == AnyHashable("highlights") {
|
||||||
|
toolValues[.highlights] = floatValue / 100.0
|
||||||
|
}
|
||||||
|
if key == AnyHashable("shadows") {
|
||||||
|
toolValues[.shadows] = floatValue / 100.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let value = legacyAdjustments.tintValue() {
|
||||||
|
let shadowsColor = value["shadowsColor"] as? UIColor
|
||||||
|
let shadowsIntensity = (value["shadowsIntensity"] as? NSNumber)?.floatValue
|
||||||
|
let highlightsColor = value["highlightsColor"] as? UIColor
|
||||||
|
let highlightsIntensity = (value["highlightsIntensity"] as? NSNumber)?.floatValue
|
||||||
|
|
||||||
|
if let shadowsColor, let shadowsIntensity, shadowsColor.alpha > 0.0 {
|
||||||
|
let shadowsTintValue = TintValue(color: shadowsColor, intensity: shadowsIntensity / 100.0)
|
||||||
|
toolValues[.shadowsTint] = shadowsTintValue
|
||||||
|
}
|
||||||
|
if let highlightsColor, let highlightsIntensity, highlightsColor.alpha > 0.0 {
|
||||||
|
let highlightsTintValue = TintValue(color: highlightsColor, intensity: highlightsIntensity / 100.0)
|
||||||
|
toolValues[.highlightsTint] = highlightsTintValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let value = legacyAdjustments.curvesValue() {
|
||||||
|
func readValue(_ key: String) -> CurvesValue.CurveValue? {
|
||||||
|
if let values = value[key] as? [AnyHashable: Any] {
|
||||||
|
if let blacks = values["blacks"] as? NSNumber, let shadows = values["shadows"] as? NSNumber, let midtones = values["midtones"] as? NSNumber, let highlights = values["highlights"] as? NSNumber, let whites = values["whites"] as? NSNumber {
|
||||||
|
return CurvesValue.CurveValue(
|
||||||
|
blacks: blacks.floatValue / 100.0,
|
||||||
|
shadows: shadows.floatValue / 100.0,
|
||||||
|
midtones: midtones.floatValue / 100.0,
|
||||||
|
highlights: highlights.floatValue / 100.0,
|
||||||
|
whites: whites.floatValue / 100.0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if let all = readValue("luminance"), let red = readValue("red"), let green = readValue("green"), let blue = readValue("blue") {
|
||||||
|
toolValues[.curves] = CurvesValue(
|
||||||
|
all: all,
|
||||||
|
red: red,
|
||||||
|
green: green,
|
||||||
|
blue: blue
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var qualityPreset = MediaQualityPreset(preset: legacyAdjustments.preset)
|
||||||
|
if qualityPreset == .compressedDefault {
|
||||||
|
qualityPreset = defaultPreset
|
||||||
|
}
|
||||||
|
|
||||||
|
self.init(
|
||||||
|
peerId: EnginePeer.Id(0),
|
||||||
|
originalDimensions: PixelDimensions(legacyAdjustments.originalSize),
|
||||||
|
cropOffset: .zero,
|
||||||
|
cropRect: legacyAdjustments.cropRect,
|
||||||
|
cropScale: 1.0,
|
||||||
|
cropRotation: legacyAdjustments.cropRotation,
|
||||||
|
cropMirroring: legacyAdjustments.cropMirrored,
|
||||||
|
cropOrientation: legacyAdjustments.cropOrientation.cropOrientation,
|
||||||
|
gradientColors: nil,
|
||||||
|
videoTrimRange: videoTrimRange,
|
||||||
|
videoIsMuted: legacyAdjustments.sendAsGif,
|
||||||
|
videoIsFullHd: true,
|
||||||
|
videoIsMirrored: false,
|
||||||
|
additionalVideoPath: nil,
|
||||||
|
additionalVideoPosition: nil,
|
||||||
|
additionalVideoScale: nil,
|
||||||
|
additionalVideoRotation: nil,
|
||||||
|
additionalVideoPositionChanges: [],
|
||||||
|
drawing: drawing,
|
||||||
|
entities: entities,
|
||||||
|
toolValues: toolValues,
|
||||||
|
audioTrack: nil,
|
||||||
|
audioTrackTrimRange: nil,
|
||||||
|
audioTrackOffset: nil,
|
||||||
|
audioTrackVolume: nil,
|
||||||
|
audioTrackSamples: nil,
|
||||||
|
qualityPreset: qualityPreset
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -409,7 +409,15 @@ public final class TextFieldComponent: Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public func textViewDidBeginEditing(_ textView: UITextView) {
|
public func textViewDidBeginEditing(_ textView: UITextView) {
|
||||||
|
guard let component = self.component else {
|
||||||
|
return
|
||||||
|
}
|
||||||
self.state?.updated(transition: Transition(animation: .curve(duration: 0.5, curve: .spring)).withUserData(AnimationHint(kind: .textFocusChanged)))
|
self.state?.updated(transition: Transition(animation: .curve(duration: 0.5, curve: .spring)).withUserData(AnimationHint(kind: .textFocusChanged)))
|
||||||
|
if component.isOneLineWhenUnfocused {
|
||||||
|
Queue.mainQueue().justDispatch {
|
||||||
|
self.textView.selectedTextRange = self.textView.textRange(from: self.textView.endOfDocument, to: self.textView.endOfDocument)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func textViewDidEndEditing(_ textView: UITextView) {
|
public func textViewDidEndEditing(_ textView: UITextView) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user