mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-07-04 10:30:42 +00:00
Merge commit '8324c5a55c26821da21d0db50ba02e64a8ec562d' into beta
This commit is contained in:
commit
9a0d6b5ad9
@ -5694,3 +5694,5 @@ Any member of this group will be able to see messages in the channel.";
|
|||||||
"SettingsSearch_Synonyms_ChatFolders" = "";
|
"SettingsSearch_Synonyms_ChatFolders" = "";
|
||||||
|
|
||||||
"EditProfile.NameAndPhotoOrVideoHelp" = "Enter your name and add an optional profile photo or video.";
|
"EditProfile.NameAndPhotoOrVideoHelp" = "Enter your name and add an optional profile photo or video.";
|
||||||
|
|
||||||
|
"Settings.RemoveConfirmation" = "Remove";
|
||||||
|
@ -256,6 +256,7 @@ public final class AvatarNode: ASDisplayNode {
|
|||||||
var iconColor = theme.chatList.unpinnedArchiveAvatarColor.foregroundColor
|
var iconColor = theme.chatList.unpinnedArchiveAvatarColor.foregroundColor
|
||||||
var backgroundColor = theme.chatList.unpinnedArchiveAvatarColor.backgroundColors.topColor
|
var backgroundColor = theme.chatList.unpinnedArchiveAvatarColor.backgroundColors.topColor
|
||||||
let animationBackgroundNode = ASImageNode()
|
let animationBackgroundNode = ASImageNode()
|
||||||
|
animationBackgroundNode.isUserInteractionEnabled = false
|
||||||
animationBackgroundNode.frame = self.imageNode.frame
|
animationBackgroundNode.frame = self.imageNode.frame
|
||||||
if let overrideImage = self.overrideImage, case let .archivedChatsIcon(hiddenByDefault) = overrideImage {
|
if let overrideImage = self.overrideImage, case let .archivedChatsIcon(hiddenByDefault) = overrideImage {
|
||||||
let backgroundColors: (UIColor, UIColor)
|
let backgroundColors: (UIColor, UIColor)
|
||||||
@ -274,6 +275,7 @@ public final class AvatarNode: ASDisplayNode {
|
|||||||
self.addSubnode(animationBackgroundNode)
|
self.addSubnode(animationBackgroundNode)
|
||||||
|
|
||||||
let animationNode = AnimationNode(animation: "anim_archiveAvatar", colors: ["box1.box1.Fill 1": iconColor, "box3.box3.Fill 1": iconColor, "box2.box2.Fill 1": backgroundColor], scale: 0.1653828)
|
let animationNode = AnimationNode(animation: "anim_archiveAvatar", colors: ["box1.box1.Fill 1": iconColor, "box3.box3.Fill 1": iconColor, "box2.box2.Fill 1": backgroundColor], scale: 0.1653828)
|
||||||
|
animationNode.isUserInteractionEnabled = false
|
||||||
animationNode.completion = { [weak animationBackgroundNode, weak self] in
|
animationNode.completion = { [weak animationBackgroundNode, weak self] in
|
||||||
self?.imageNode.isHidden = false
|
self?.imageNode.isHidden = false
|
||||||
animationBackgroundNode?.removeFromSupernode()
|
animationBackgroundNode?.removeFromSupernode()
|
||||||
@ -344,6 +346,7 @@ public final class AvatarNode: ASDisplayNode {
|
|||||||
if self.editOverlayNode == nil {
|
if self.editOverlayNode == nil {
|
||||||
let editOverlayNode = AvatarEditOverlayNode()
|
let editOverlayNode = AvatarEditOverlayNode()
|
||||||
editOverlayNode.frame = self.imageNode.frame
|
editOverlayNode.frame = self.imageNode.frame
|
||||||
|
editOverlayNode.isUserInteractionEnabled = false
|
||||||
self.addSubnode(editOverlayNode)
|
self.addSubnode(editOverlayNode)
|
||||||
|
|
||||||
self.editOverlayNode = editOverlayNode
|
self.editOverlayNode = editOverlayNode
|
||||||
|
@ -84,7 +84,7 @@ final class NavigationSplitContainer: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func update(layout: ContainerViewLayout, masterControllers: [ViewController], detailControllers: [ViewController], transition: ContainedViewLayoutTransition) {
|
func update(layout: ContainerViewLayout, masterControllers: [ViewController], detailControllers: [ViewController], transition: ContainedViewLayoutTransition) {
|
||||||
let masterWidth: CGFloat = 375.0 // min(max(320.0, floor(layout.size.width / 3.0)), floor(layout.size.width / 2.0))
|
let masterWidth: CGFloat = min(max(320.0, floor(layout.size.width / 3.0)), floor(layout.size.width / 2.0))
|
||||||
let detailWidth = layout.size.width - masterWidth
|
let detailWidth = layout.size.width - masterWidth
|
||||||
|
|
||||||
self.masterScrollToTopView.frame = CGRect(origin: CGPoint(x: 0.0, y: -1.0), size: CGSize(width: masterWidth, height: 1.0))
|
self.masterScrollToTopView.frame = CGRect(origin: CGPoint(x: 0.0, y: -1.0), size: CGSize(width: masterWidth, height: 1.0))
|
||||||
|
@ -296,11 +296,13 @@ public class ItemListAvatarAndNameInfoItemNode: ListViewItemNode, ItemListItemNo
|
|||||||
self.avatarNode = AvatarNode(font: avatarFont)
|
self.avatarNode = AvatarNode(font: avatarFont)
|
||||||
|
|
||||||
self.updatingAvatarOverlay = ASImageNode()
|
self.updatingAvatarOverlay = ASImageNode()
|
||||||
|
self.updatingAvatarOverlay.isUserInteractionEnabled = false
|
||||||
self.updatingAvatarOverlay.displayWithoutProcessing = true
|
self.updatingAvatarOverlay.displayWithoutProcessing = true
|
||||||
self.updatingAvatarOverlay.displaysAsynchronously = false
|
self.updatingAvatarOverlay.displaysAsynchronously = false
|
||||||
|
|
||||||
self.activityIndicator = ActivityIndicator(type: .custom(.white, 22.0, 1.0, false))
|
self.activityIndicator = ActivityIndicator(type: .custom(.white, 22.0, 1.0, false))
|
||||||
self.activityIndicator.isHidden = true
|
self.activityIndicator.isHidden = true
|
||||||
|
self.activityIndicator.isUserInteractionEnabled = false
|
||||||
|
|
||||||
self.nameNode = TextNode()
|
self.nameNode = TextNode()
|
||||||
self.nameNode.isUserInteractionEnabled = false
|
self.nameNode.isUserInteractionEnabled = false
|
||||||
|
@ -45,7 +45,7 @@
|
|||||||
@property (nonatomic, copy) void (^cameraPressed)(TGAttachmentCameraView *cameraView);
|
@property (nonatomic, copy) void (^cameraPressed)(TGAttachmentCameraView *cameraView);
|
||||||
@property (nonatomic, copy) void (^sendPressed)(TGMediaAsset *currentItem, bool asFiles, bool silentPosting, int32_t scheduleTime);
|
@property (nonatomic, copy) void (^sendPressed)(TGMediaAsset *currentItem, bool asFiles, bool silentPosting, int32_t scheduleTime);
|
||||||
@property (nonatomic, copy) void (^avatarCompletionBlock)(UIImage *image);
|
@property (nonatomic, copy) void (^avatarCompletionBlock)(UIImage *image);
|
||||||
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments);
|
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments);
|
||||||
|
|
||||||
@property (nonatomic, copy) void (^editorOpened)(void);
|
@property (nonatomic, copy) void (^editorOpened)(void);
|
||||||
@property (nonatomic, copy) void (^editorClosed)(void);
|
@property (nonatomic, copy) void (^editorClosed)(void);
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#import <AVFoundation/AVFoundation.h>
|
||||||
#import <LegacyComponents/TGOverlayControllerWindow.h>
|
#import <LegacyComponents/TGOverlayControllerWindow.h>
|
||||||
#import <LegacyComponents/TGOverlayController.h>
|
#import <LegacyComponents/TGOverlayController.h>
|
||||||
#import <LegacyComponents/LegacyComponentsContext.h>
|
#import <LegacyComponents/LegacyComponentsContext.h>
|
||||||
|
@ -76,7 +76,7 @@ typedef enum
|
|||||||
@property (nonatomic, copy) NSDictionary *(^descriptionGenerator)(id, NSString *, NSArray *, NSString *);
|
@property (nonatomic, copy) NSDictionary *(^descriptionGenerator)(id, NSString *, NSArray *, NSString *);
|
||||||
@property (nonatomic, copy) void (^avatarCompletionBlock)(UIImage *image);
|
@property (nonatomic, copy) void (^avatarCompletionBlock)(UIImage *image);
|
||||||
@property (nonatomic, copy) void (^completionBlock)(NSArray *signals, bool silentPosting, int32_t scheduleTime);
|
@property (nonatomic, copy) void (^completionBlock)(NSArray *signals, bool silentPosting, int32_t scheduleTime);
|
||||||
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments);
|
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments);
|
||||||
@property (nonatomic, copy) void (^singleCompletionBlock)(id<TGMediaEditableItem> item, TGMediaEditingContext *editingContext);
|
@property (nonatomic, copy) void (^singleCompletionBlock)(id<TGMediaEditableItem> item, TGMediaEditingContext *editingContext);
|
||||||
@property (nonatomic, copy) void (^dismissalBlock)(void);
|
@property (nonatomic, copy) void (^dismissalBlock)(void);
|
||||||
@property (nonatomic, copy) void (^selectionBlock)(TGMediaAsset *asset, UIImage *);
|
@property (nonatomic, copy) void (^selectionBlock)(TGMediaAsset *asset, UIImage *);
|
||||||
@ -94,7 +94,7 @@ typedef enum
|
|||||||
- (NSArray *)resultSignalsWithCurrentItem:(TGMediaAsset *)currentItem descriptionGenerator:(id (^)(id, NSString *, NSArray *, NSString *))descriptionGenerator;
|
- (NSArray *)resultSignalsWithCurrentItem:(TGMediaAsset *)currentItem descriptionGenerator:(id (^)(id, NSString *, NSArray *, NSString *))descriptionGenerator;
|
||||||
|
|
||||||
- (void)completeWithAvatarImage:(UIImage *)image;
|
- (void)completeWithAvatarImage:(UIImage *)image;
|
||||||
- (void)completeWithAvatarVideo:(NSURL *)url adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image;
|
- (void)completeWithAvatarVideo:(AVAsset *)asset adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image;
|
||||||
- (void)completeWithCurrentItem:(TGMediaAsset *)currentItem silentPosting:(bool)silentPosting scheduleTime:(int32_t)scheduleTime;
|
- (void)completeWithCurrentItem:(TGMediaAsset *)currentItem silentPosting:(bool)silentPosting scheduleTime:(int32_t)scheduleTime;
|
||||||
|
|
||||||
- (void)dismiss;
|
- (void)dismiss;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
|
#import <AVFoundation/AVFoundation.h>
|
||||||
#import <LegacyComponents/LegacyComponentsContext.h>
|
#import <LegacyComponents/LegacyComponentsContext.h>
|
||||||
|
|
||||||
@class TGViewController;
|
@class TGViewController;
|
||||||
@ -13,7 +14,7 @@ typedef void (^TGMediaAvatarPresentImpl)(id<LegacyComponentsContext>, void (^)(U
|
|||||||
@interface TGMediaAvatarMenuMixin : NSObject
|
@interface TGMediaAvatarMenuMixin : NSObject
|
||||||
|
|
||||||
@property (nonatomic, copy) void (^didFinishWithImage)(UIImage *image);
|
@property (nonatomic, copy) void (^didFinishWithImage)(UIImage *image);
|
||||||
@property (nonatomic, copy) void (^didFinishWithVideo)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments);
|
@property (nonatomic, copy) void (^didFinishWithVideo)(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments);
|
||||||
@property (nonatomic, copy) void (^didFinishWithDelete)(void);
|
@property (nonatomic, copy) void (^didFinishWithDelete)(void);
|
||||||
@property (nonatomic, copy) void (^didFinishWithView)(void);
|
@property (nonatomic, copy) void (^didFinishWithView)(void);
|
||||||
@property (nonatomic, copy) void (^didDismiss)(void);
|
@property (nonatomic, copy) void (^didDismiss)(void);
|
||||||
|
@ -7,6 +7,8 @@
|
|||||||
|
|
||||||
@property (nonatomic, strong) UIImage *image;
|
@property (nonatomic, strong) UIImage *image;
|
||||||
|
|
||||||
|
@property (nonatomic, readonly) UIView *clipView;
|
||||||
|
|
||||||
@property (nonatomic, readonly) CGSize originalSize;
|
@property (nonatomic, readonly) CGSize originalSize;
|
||||||
@property (nonatomic, assign) CGRect cropRect;
|
@property (nonatomic, assign) CGRect cropRect;
|
||||||
@property (nonatomic, assign) UIImageOrientation cropOrientation;
|
@property (nonatomic, assign) UIImageOrientation cropOrientation;
|
||||||
@ -37,6 +39,7 @@
|
|||||||
- (void)animateTransitionOut;
|
- (void)animateTransitionOut;
|
||||||
- (void)transitionInFinishedFromCamera:(bool)fromCamera;
|
- (void)transitionInFinishedFromCamera:(bool)fromCamera;
|
||||||
|
|
||||||
|
- (void)attachEntitiesView;
|
||||||
- (void)closeCurtains;
|
- (void)closeCurtains;
|
||||||
- (void)openCurtains;
|
- (void)openCurtains;
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ typedef enum {
|
|||||||
@property (nonatomic, copy) void (^willFinishEditing)(id<TGMediaEditAdjustments> adjustments, id temporaryRep, bool hasChanges);
|
@property (nonatomic, copy) void (^willFinishEditing)(id<TGMediaEditAdjustments> adjustments, id temporaryRep, bool hasChanges);
|
||||||
@property (nonatomic, copy) void (^didFinishRenderingFullSizeImage)(UIImage *fullSizeImage);
|
@property (nonatomic, copy) void (^didFinishRenderingFullSizeImage)(UIImage *fullSizeImage);
|
||||||
@property (nonatomic, copy) void (^didFinishEditing)(id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges);
|
@property (nonatomic, copy) void (^didFinishEditing)(id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges);
|
||||||
@property (nonatomic, copy) void (^didFinishEditingVideo)(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges);
|
@property (nonatomic, copy) void (^didFinishEditingVideo)(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges);
|
||||||
|
|
||||||
@property (nonatomic, assign) bool skipInitialTransition;
|
@property (nonatomic, assign) bool skipInitialTransition;
|
||||||
@property (nonatomic, assign) bool dontHideStatusBar;
|
@property (nonatomic, assign) bool dontHideStatusBar;
|
||||||
|
@ -9,6 +9,8 @@
|
|||||||
bool _dismissing;
|
bool _dismissing;
|
||||||
UIView *_transitionView;
|
UIView *_transitionView;
|
||||||
bool _noTransitionToSnapshot;
|
bool _noTransitionToSnapshot;
|
||||||
|
|
||||||
|
bool _animateScale;
|
||||||
}
|
}
|
||||||
|
|
||||||
@property (nonatomic, weak) id<TGMediaEditableItem> item;
|
@property (nonatomic, weak) id<TGMediaEditableItem> item;
|
||||||
@ -36,6 +38,7 @@
|
|||||||
|
|
||||||
@property (nonatomic, assign) TGPhotoEditorTab availableTabs;
|
@property (nonatomic, assign) TGPhotoEditorTab availableTabs;
|
||||||
|
|
||||||
|
@property (nonatomic, assign) TGPhotoEditorTab switchingFromTab;
|
||||||
@property (nonatomic, assign) TGPhotoEditorTab switchingToTab;
|
@property (nonatomic, assign) TGPhotoEditorTab switchingToTab;
|
||||||
|
|
||||||
- (void)transitionOutSwitching:(bool)switching completion:(void (^)(void))completion;
|
- (void)transitionOutSwitching:(bool)switching completion:(void (^)(void))completion;
|
||||||
|
@ -37,7 +37,7 @@ typedef enum
|
|||||||
- (instancetype)editAdjustmentsWithPreset:(TGMediaVideoConversionPreset)preset maxDuration:(NSTimeInterval)maxDuration;
|
- (instancetype)editAdjustmentsWithPreset:(TGMediaVideoConversionPreset)preset maxDuration:(NSTimeInterval)maxDuration;
|
||||||
- (instancetype)editAdjustmentsWithPreset:(TGMediaVideoConversionPreset)preset videoStartValue:(NSTimeInterval)videoStartValue trimStartValue:(NSTimeInterval)trimStartValue trimEndValue:(NSTimeInterval)trimEndValue;
|
- (instancetype)editAdjustmentsWithPreset:(TGMediaVideoConversionPreset)preset videoStartValue:(NSTimeInterval)videoStartValue trimStartValue:(NSTimeInterval)trimStartValue trimEndValue:(NSTimeInterval)trimEndValue;
|
||||||
+ (instancetype)editAdjustmentsWithOriginalSize:(CGSize)originalSize preset:(TGMediaVideoConversionPreset)preset;
|
+ (instancetype)editAdjustmentsWithOriginalSize:(CGSize)originalSize preset:(TGMediaVideoConversionPreset)preset;
|
||||||
+ (instancetype)editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)values;
|
+ (instancetype)editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)values preset:(TGMediaVideoConversionPreset)preset;
|
||||||
+ (instancetype)editAdjustmentsWithDictionary:(NSDictionary *)dictionary;
|
+ (instancetype)editAdjustmentsWithDictionary:(NSDictionary *)dictionary;
|
||||||
|
|
||||||
+ (instancetype)editAdjustmentsWithOriginalSize:(CGSize)originalSize
|
+ (instancetype)editAdjustmentsWithOriginalSize:(CGSize)originalSize
|
||||||
|
@ -898,7 +898,7 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
|
|||||||
};
|
};
|
||||||
|
|
||||||
__weak TGPhotoEditorController *weakController = controller;
|
__weak TGPhotoEditorController *weakController = controller;
|
||||||
controller.didFinishEditing = ^(__unused id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, __unused UIImage *thumbnailImage, __unused bool hasChanges)
|
controller.didFinishEditing = ^(id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, __unused UIImage *thumbnailImage, __unused bool hasChanges)
|
||||||
{
|
{
|
||||||
if (!hasChanges)
|
if (!hasChanges)
|
||||||
return;
|
return;
|
||||||
@ -911,10 +911,36 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
|
|||||||
if (strongController == nil)
|
if (strongController == nil)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (strongSelf.avatarCompletionBlock != nil)
|
if (adjustments.paintingData.hasAnimation) {
|
||||||
strongSelf.avatarCompletionBlock(resultImage);
|
TGVideoEditAdjustments *videoAdjustments = adjustments;
|
||||||
|
if ([videoAdjustments isKindOfClass:[PGPhotoEditorValues class]]) {
|
||||||
|
videoAdjustments = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments preset:TGMediaVideoConversionPresetProfileVeryHigh];
|
||||||
|
}
|
||||||
|
|
||||||
|
NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[[NSString alloc] initWithFormat:@"gifvideo_%x.jpg", (int)arc4random()]];
|
||||||
|
NSData *data = UIImageJPEGRepresentation(resultImage, 0.8);
|
||||||
|
[data writeToFile:filePath atomically:true];
|
||||||
|
|
||||||
|
UIImage *previewImage = resultImage;
|
||||||
|
if ([adjustments cropAppliedForAvatar:false] || adjustments.hasPainting || adjustments.toolsApplied)
|
||||||
|
{
|
||||||
|
UIImage *paintingImage = adjustments.paintingData.stillImage;
|
||||||
|
if (paintingImage == nil) {
|
||||||
|
paintingImage = adjustments.paintingData.image;
|
||||||
|
}
|
||||||
|
UIImage *thumbnailImage = TGPhotoEditorVideoExtCrop(resultImage, paintingImage, adjustments.cropOrientation, adjustments.cropRotation, adjustments.cropRect, adjustments.cropMirrored, TGScaleToFill(asset.dimensions, CGSizeMake(800, 800)), adjustments.originalSize, true, true, true);
|
||||||
|
if (thumbnailImage != nil) {
|
||||||
|
previewImage = thumbnailImage;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (strongSelf.avatarVideoCompletionBlock != nil)
|
||||||
|
strongSelf.avatarVideoCompletionBlock(previewImage, [NSURL fileURLWithPath:filePath], videoAdjustments);
|
||||||
|
} else {
|
||||||
|
if (strongSelf.avatarCompletionBlock != nil)
|
||||||
|
strongSelf.avatarCompletionBlock(resultImage);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
|
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
|
||||||
if (!hasChanges)
|
if (!hasChanges)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
@ -927,7 +953,7 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
if (strongSelf.avatarVideoCompletionBlock != nil)
|
if (strongSelf.avatarVideoCompletionBlock != nil)
|
||||||
strongSelf.avatarVideoCompletionBlock(resultImage, url, adjustments);
|
strongSelf.avatarVideoCompletionBlock(resultImage, asset, adjustments);
|
||||||
};
|
};
|
||||||
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
|
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
|
||||||
{
|
{
|
||||||
|
@ -1768,7 +1768,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
|
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
|
||||||
if (!hasChanges)
|
if (!hasChanges)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
@ -1779,7 +1779,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
|||||||
TGDispatchOnMainThread(^
|
TGDispatchOnMainThread(^
|
||||||
{
|
{
|
||||||
if (strongSelf.finishedWithVideo != nil)
|
if (strongSelf.finishedWithVideo != nil)
|
||||||
strongSelf.finishedWithVideo(nil, url, resultImage, 0, CGSizeZero, adjustments, nil, nil, nil, nil);
|
strongSelf.finishedWithVideo(nil, [(AVURLAsset *)asset URL], resultImage, 0, CGSizeZero, adjustments, nil, nil, nil, nil);
|
||||||
|
|
||||||
__strong TGPhotoEditorController *strongController = weakController;
|
__strong TGPhotoEditorController *strongController = weakController;
|
||||||
if (strongController != nil)
|
if (strongController != nil)
|
||||||
@ -2592,7 +2592,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
|||||||
if (animated) {
|
if (animated) {
|
||||||
dict[@"isAnimation"] = @true;
|
dict[@"isAnimation"] = @true;
|
||||||
if ([adjustments isKindOfClass:[PGPhotoEditorValues class]]) {
|
if ([adjustments isKindOfClass:[PGPhotoEditorValues class]]) {
|
||||||
dict[@"adjustments"] = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments];
|
dict[@"adjustments"] = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments preset:TGMediaVideoConversionPresetAnimation];
|
||||||
} else {
|
} else {
|
||||||
dict[@"adjustments"] = adjustments;
|
dict[@"adjustments"] = adjustments;
|
||||||
}
|
}
|
||||||
|
@ -566,10 +566,10 @@
|
|||||||
self.avatarCompletionBlock(image);
|
self.avatarCompletionBlock(image);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)completeWithAvatarVideo:(NSURL *)url adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image
|
- (void)completeWithAvatarVideo:(AVAsset *)asset adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image
|
||||||
{
|
{
|
||||||
if (self.avatarVideoCompletionBlock != nil)
|
if (self.avatarVideoCompletionBlock != nil)
|
||||||
self.avatarVideoCompletionBlock(image, url, adjustments);
|
self.avatarVideoCompletionBlock(image, asset, adjustments);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)completeWithCurrentItem:(TGMediaAsset *)currentItem silentPosting:(bool)silentPosting scheduleTime:(int32_t)scheduleTime
|
- (void)completeWithCurrentItem:(TGMediaAsset *)currentItem silentPosting:(bool)silentPosting scheduleTime:(int32_t)scheduleTime
|
||||||
@ -904,7 +904,7 @@
|
|||||||
if (animated) {
|
if (animated) {
|
||||||
dict[@"isAnimation"] = @true;
|
dict[@"isAnimation"] = @true;
|
||||||
if ([adjustments isKindOfClass:[PGPhotoEditorValues class]]) {
|
if ([adjustments isKindOfClass:[PGPhotoEditorValues class]]) {
|
||||||
dict[@"adjustments"] = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments];
|
dict[@"adjustments"] = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments preset:TGMediaVideoConversionPresetAnimation];
|
||||||
} else {
|
} else {
|
||||||
dict[@"adjustments"] = adjustments;
|
dict[@"adjustments"] = adjustments;
|
||||||
}
|
}
|
||||||
|
@ -406,6 +406,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
TGPhotoEditorController *controller = [[TGPhotoEditorController alloc] initWithContext:_context item:editableItem intent:intent adjustments:nil caption:nil screenImage:thumbnailImage availableTabs:[TGPhotoEditorController defaultTabsForAvatarIntent] selectedTab:TGPhotoEditorCropTab];
|
TGPhotoEditorController *controller = [[TGPhotoEditorController alloc] initWithContext:_context item:editableItem intent:intent adjustments:nil caption:nil screenImage:thumbnailImage availableTabs:[TGPhotoEditorController defaultTabsForAvatarIntent] selectedTab:TGPhotoEditorCropTab];
|
||||||
|
controller.stickersContext = self.stickersContext;
|
||||||
controller.editingContext = self.editingContext;
|
controller.editingContext = self.editingContext;
|
||||||
controller.didFinishRenderingFullSizeImage = ^(UIImage *resultImage)
|
controller.didFinishRenderingFullSizeImage = ^(UIImage *resultImage)
|
||||||
{
|
{
|
||||||
@ -415,7 +416,7 @@
|
|||||||
|
|
||||||
[[strongSelf->_assetsLibrary saveAssetWithImage:resultImage] startWithNext:nil];
|
[[strongSelf->_assetsLibrary saveAssetWithImage:resultImage] startWithNext:nil];
|
||||||
};
|
};
|
||||||
controller.didFinishEditing = ^(__unused id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, __unused UIImage *thumbnailImage, bool hasChanges)
|
controller.didFinishEditing = ^(id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, __unused UIImage *thumbnailImage, bool hasChanges)
|
||||||
{
|
{
|
||||||
if (!hasChanges)
|
if (!hasChanges)
|
||||||
return;
|
return;
|
||||||
@ -424,9 +425,34 @@
|
|||||||
if (strongSelf == nil)
|
if (strongSelf == nil)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarImage:resultImage];
|
if (adjustments.paintingData.hasAnimation) {
|
||||||
|
TGVideoEditAdjustments *videoAdjustments = adjustments;
|
||||||
|
if ([videoAdjustments isKindOfClass:[PGPhotoEditorValues class]]) {
|
||||||
|
videoAdjustments = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments preset:TGMediaVideoConversionPresetProfileVeryHigh];
|
||||||
|
}
|
||||||
|
|
||||||
|
NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[[NSString alloc] initWithFormat:@"gifvideo_%x.jpg", (int)arc4random()]];
|
||||||
|
NSData *data = UIImageJPEGRepresentation(resultImage, 0.8);
|
||||||
|
[data writeToFile:filePath atomically:true];
|
||||||
|
|
||||||
|
UIImage *previewImage = resultImage;
|
||||||
|
if ([adjustments cropAppliedForAvatar:false] || adjustments.hasPainting || adjustments.toolsApplied)
|
||||||
|
{
|
||||||
|
UIImage *paintingImage = adjustments.paintingData.stillImage;
|
||||||
|
if (paintingImage == nil) {
|
||||||
|
paintingImage = adjustments.paintingData.image;
|
||||||
|
}
|
||||||
|
UIImage *thumbnailImage = TGPhotoEditorVideoExtCrop(resultImage, paintingImage, adjustments.cropOrientation, adjustments.cropRotation, adjustments.cropRect, adjustments.cropMirrored, TGScaleToFill(asset.dimensions, CGSizeMake(800, 800)), adjustments.originalSize, true, true, true);
|
||||||
|
if (thumbnailImage != nil) {
|
||||||
|
previewImage = thumbnailImage;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarVideo:[NSURL fileURLWithPath:filePath] adjustments:videoAdjustments image:previewImage];
|
||||||
|
} else {
|
||||||
|
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarImage:resultImage];
|
||||||
|
}
|
||||||
};
|
};
|
||||||
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
|
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
|
||||||
if (!hasChanges)
|
if (!hasChanges)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
@ -434,7 +460,7 @@
|
|||||||
if (strongSelf == nil)
|
if (strongSelf == nil)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarVideo:url adjustments:adjustments image:resultImage];
|
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarVideo:asset adjustments:adjustments image:resultImage];
|
||||||
};
|
};
|
||||||
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
|
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
|
||||||
{
|
{
|
||||||
|
@ -127,7 +127,7 @@
|
|||||||
|
|
||||||
[strongController dismissAnimated:false];
|
[strongController dismissAnimated:false];
|
||||||
};
|
};
|
||||||
carouselItem.avatarVideoCompletionBlock = ^(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments) {
|
carouselItem.avatarVideoCompletionBlock = ^(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments) {
|
||||||
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
||||||
if (strongSelf == nil)
|
if (strongSelf == nil)
|
||||||
return;
|
return;
|
||||||
@ -137,7 +137,7 @@
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
if (strongSelf.didFinishWithVideo != nil)
|
if (strongSelf.didFinishWithVideo != nil)
|
||||||
strongSelf.didFinishWithVideo(image, url, adjustments);
|
strongSelf.didFinishWithVideo(image, asset, adjustments);
|
||||||
|
|
||||||
[strongController dismissAnimated:false];
|
[strongController dismissAnimated:false];
|
||||||
};
|
};
|
||||||
@ -285,7 +285,7 @@
|
|||||||
controller = [[TGCameraController alloc] initWithContext:[windowManager context] saveEditedPhotos:_saveEditedPhotos saveCapturedMedia:_saveCapturedMedia camera:cameraView.previewView.camera previewView:cameraView.previewView intent:_signup ? TGCameraControllerSignupAvatarIntent : TGCameraControllerAvatarIntent];
|
controller = [[TGCameraController alloc] initWithContext:[windowManager context] saveEditedPhotos:_saveEditedPhotos saveCapturedMedia:_saveCapturedMedia camera:cameraView.previewView.camera previewView:cameraView.previewView intent:_signup ? TGCameraControllerSignupAvatarIntent : TGCameraControllerAvatarIntent];
|
||||||
else
|
else
|
||||||
controller = [[TGCameraController alloc] initWithContext:[windowManager context] saveEditedPhotos:_saveEditedPhotos saveCapturedMedia:_saveCapturedMedia intent:_signup ? TGCameraControllerSignupAvatarIntent : TGCameraControllerAvatarIntent];
|
controller = [[TGCameraController alloc] initWithContext:[windowManager context] saveEditedPhotos:_saveEditedPhotos saveCapturedMedia:_saveCapturedMedia intent:_signup ? TGCameraControllerSignupAvatarIntent : TGCameraControllerAvatarIntent];
|
||||||
|
controller.stickersContext = _stickersContext;
|
||||||
controller.shouldStoreCapturedAssets = true;
|
controller.shouldStoreCapturedAssets = true;
|
||||||
|
|
||||||
TGCameraControllerWindow *controllerWindow = [[TGCameraControllerWindow alloc] initWithManager:windowManager parentController:_parentController contentController:controller];
|
TGCameraControllerWindow *controllerWindow = [[TGCameraControllerWindow alloc] initWithManager:windowManager parentController:_parentController contentController:controller];
|
||||||
@ -355,13 +355,13 @@
|
|||||||
[menuController dismissAnimated:false];
|
[menuController dismissAnimated:false];
|
||||||
};
|
};
|
||||||
|
|
||||||
controller.finishedWithVideo = ^(__unused TGOverlayController *controller, NSURL *videoURL, UIImage *previewImage, __unused NSTimeInterval duration, __unused CGSize dimensions, TGVideoEditAdjustments *adjustments, __unused NSString *caption, __unused NSArray *entities, __unused NSArray *stickers, __unused NSNumber *timer){
|
controller.finishedWithVideo = ^(__unused TGOverlayController *controller, NSURL *url, UIImage *previewImage, __unused NSTimeInterval duration, __unused CGSize dimensions, TGVideoEditAdjustments *adjustments, __unused NSString *caption, __unused NSArray *entities, __unused NSArray *stickers, __unused NSNumber *timer){
|
||||||
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
||||||
if (strongSelf == nil)
|
if (strongSelf == nil)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (strongSelf.didFinishWithVideo != nil)
|
if (strongSelf.didFinishWithVideo != nil)
|
||||||
strongSelf.didFinishWithVideo(previewImage, videoURL, adjustments);
|
strongSelf.didFinishWithVideo(previewImage, [[AVURLAsset alloc] initWithURL:url options:nil], adjustments);
|
||||||
|
|
||||||
[menuController dismissAnimated:false];
|
[menuController dismissAnimated:false];
|
||||||
};
|
};
|
||||||
@ -459,6 +459,7 @@
|
|||||||
|
|
||||||
TGMediaAssetsController *controller = [TGMediaAssetsController controllerWithContext:context assetGroup:group intent:strongSelf->_signup ? TGMediaAssetsControllerSetSignupProfilePhotoIntent : TGMediaAssetsControllerSetProfilePhotoIntent recipientName:nil saveEditedPhotos:strongSelf->_saveEditedPhotos allowGrouping:false selectionLimit:10];
|
TGMediaAssetsController *controller = [TGMediaAssetsController controllerWithContext:context assetGroup:group intent:strongSelf->_signup ? TGMediaAssetsControllerSetSignupProfilePhotoIntent : TGMediaAssetsControllerSetProfilePhotoIntent recipientName:nil saveEditedPhotos:strongSelf->_saveEditedPhotos allowGrouping:false selectionLimit:10];
|
||||||
__weak TGMediaAssetsController *weakController = controller;
|
__weak TGMediaAssetsController *weakController = controller;
|
||||||
|
controller.stickersContext = _stickersContext;
|
||||||
controller.avatarCompletionBlock = ^(UIImage *resultImage)
|
controller.avatarCompletionBlock = ^(UIImage *resultImage)
|
||||||
{
|
{
|
||||||
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
||||||
@ -472,13 +473,13 @@
|
|||||||
if (strongController != nil && strongController.dismissalBlock != nil)
|
if (strongController != nil && strongController.dismissalBlock != nil)
|
||||||
strongController.dismissalBlock();
|
strongController.dismissalBlock();
|
||||||
};
|
};
|
||||||
controller.avatarVideoCompletionBlock = ^(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments) {
|
controller.avatarVideoCompletionBlock = ^(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments) {
|
||||||
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
||||||
if (strongSelf == nil)
|
if (strongSelf == nil)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (strongSelf.didFinishWithVideo != nil)
|
if (strongSelf.didFinishWithVideo != nil)
|
||||||
strongSelf.didFinishWithVideo(image, url, adjustments);
|
strongSelf.didFinishWithVideo(image, asset, adjustments);
|
||||||
|
|
||||||
__strong TGMediaAssetsController *strongController = weakController;
|
__strong TGMediaAssetsController *strongController = weakController;
|
||||||
if (strongController != nil && strongController.dismissalBlock != nil)
|
if (strongController != nil && strongController.dismissalBlock != nil)
|
||||||
|
@ -800,7 +800,7 @@
|
|||||||
CGRect fittedCropRect = [TGPhotoPaintController fittedCropRect:cropRect originalSize:originalSize keepOriginalSize:false];
|
CGRect fittedCropRect = [TGPhotoPaintController fittedCropRect:cropRect originalSize:originalSize keepOriginalSize:false];
|
||||||
_contentWrapperView.frame = CGRectMake(0.0f, 0.0f, fittedContentSize.width, fittedContentSize.height);
|
_contentWrapperView.frame = CGRectMake(0.0f, 0.0f, fittedContentSize.width, fittedContentSize.height);
|
||||||
|
|
||||||
CGFloat contentScale = ratio;//_contentView.bounds.size.width / fittedCropRect.size.width;
|
CGFloat contentScale = ratio;
|
||||||
_contentWrapperView.transform = CGAffineTransformMakeScale(contentScale, contentScale);
|
_contentWrapperView.transform = CGAffineTransformMakeScale(contentScale, contentScale);
|
||||||
_contentWrapperView.frame = CGRectMake(0.0f, 0.0f, _contentView.bounds.size.width, _contentView.bounds.size.height);
|
_contentWrapperView.frame = CGRectMake(0.0f, 0.0f, _contentView.bounds.size.width, _contentView.bounds.size.height);
|
||||||
|
|
||||||
|
@ -240,6 +240,9 @@
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
TGMediaVideoConversionPreset preset = TGMediaVideoConversionPresetAnimation;
|
TGMediaVideoConversionPreset preset = TGMediaVideoConversionPresetAnimation;
|
||||||
|
if (adjustments.preset == TGMediaVideoConversionPresetProfile || adjustments.preset == TGMediaVideoConversionPresetProfileHigh || adjustments.preset == TGMediaVideoConversionPresetProfileVeryHigh) {
|
||||||
|
preset = adjustments.preset;
|
||||||
|
}
|
||||||
|
|
||||||
NSError *error = nil;
|
NSError *error = nil;
|
||||||
|
|
||||||
@ -283,7 +286,8 @@
|
|||||||
if (watcher != nil)
|
if (watcher != nil)
|
||||||
liveUploadData = [watcher fileUpdated:true];
|
liveUploadData = [watcher fileUpdated:true];
|
||||||
|
|
||||||
contextResult = [TGMediaVideoConversionResult resultWithFileURL:outputUrl fileSize:0 duration:CMTimeGetSeconds(resultContext.timeRange.duration) dimensions:resultContext.dimensions coverImage:coverImage liveUploadData:liveUploadData];
|
NSUInteger fileSize = [[[NSFileManager defaultManager] attributesOfItemAtPath:outputUrl.path error:nil] fileSize];
|
||||||
|
contextResult = [TGMediaVideoConversionResult resultWithFileURL:outputUrl fileSize:fileSize duration:CMTimeGetSeconds(resultContext.timeRange.duration) dimensions:resultContext.dimensions coverImage:coverImage liveUploadData:liveUploadData];
|
||||||
return [resultContext finishedContext];
|
return [resultContext finishedContext];
|
||||||
}];
|
}];
|
||||||
|
|
||||||
|
@ -42,6 +42,8 @@ const CGFloat TGPhotoAvatarCropViewCurtainMargin = 200;
|
|||||||
|
|
||||||
CGFloat _currentDiameter;
|
CGFloat _currentDiameter;
|
||||||
|
|
||||||
|
UIView *_entitiesWrapperView;
|
||||||
|
|
||||||
__weak PGPhotoEditorView *_fullPreviewView;
|
__weak PGPhotoEditorView *_fullPreviewView;
|
||||||
__weak UIImageView *_fullPaintingView;
|
__weak UIImageView *_fullPaintingView;
|
||||||
__weak TGPhotoEntitiesContainerView *_fullEntitiesView;
|
__weak TGPhotoEntitiesContainerView *_fullEntitiesView;
|
||||||
@ -91,9 +93,16 @@ const CGFloat TGPhotoAvatarCropViewCurtainMargin = 200;
|
|||||||
_fullPaintingView.frame = _fullPreviewView.frame;
|
_fullPaintingView.frame = _fullPreviewView.frame;
|
||||||
[_wrapperView addSubview:_fullPaintingView];
|
[_wrapperView addSubview:_fullPaintingView];
|
||||||
|
|
||||||
|
_entitiesWrapperView = [[UIView alloc] init];
|
||||||
_fullEntitiesView = fullEntitiesView;
|
_fullEntitiesView = fullEntitiesView;
|
||||||
_fullEntitiesView.frame = _fullPreviewView.frame;
|
_fullEntitiesView.frame = CGRectMake(0.0, 0.0, _fullEntitiesView.frame.size.width, _fullEntitiesView.frame.size.height);
|
||||||
[_wrapperView addSubview:_fullEntitiesView];
|
_entitiesWrapperView.frame = _fullEntitiesView.frame;
|
||||||
|
|
||||||
|
CGFloat entitiesScale = _fullPreviewView.frame.size.width / _entitiesWrapperView.frame.size.width;
|
||||||
|
_entitiesWrapperView.transform = CGAffineTransformMakeScale(entitiesScale, entitiesScale);
|
||||||
|
_entitiesWrapperView.frame = _fullPreviewView.frame;
|
||||||
|
[_entitiesWrapperView addSubview:_fullEntitiesView];
|
||||||
|
[_wrapperView addSubview:_entitiesWrapperView];
|
||||||
|
|
||||||
_flashView = [[UIView alloc] init];
|
_flashView = [[UIView alloc] init];
|
||||||
_flashView.alpha = 0.0;
|
_flashView.alpha = 0.0;
|
||||||
@ -137,10 +146,19 @@ const CGFloat TGPhotoAvatarCropViewCurtainMargin = 200;
|
|||||||
|
|
||||||
UITapGestureRecognizer *tapRecognier = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleTap:)];
|
UITapGestureRecognizer *tapRecognier = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleTap:)];
|
||||||
[_wrapperView addGestureRecognizer:tapRecognier];
|
[_wrapperView addGestureRecognizer:tapRecognier];
|
||||||
|
|
||||||
|
_clipView = [[UIView alloc] init];
|
||||||
|
_clipView.clipsToBounds = true;
|
||||||
|
_clipView.userInteractionEnabled = false;
|
||||||
|
[self addSubview:_clipView];
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)attachEntitiesView {
|
||||||
|
[_entitiesWrapperView addSubview:_fullEntitiesView];
|
||||||
|
}
|
||||||
|
|
||||||
- (void)dealloc
|
- (void)dealloc
|
||||||
{
|
{
|
||||||
_scrollView.delegate = nil;
|
_scrollView.delegate = nil;
|
||||||
@ -635,6 +653,8 @@ const CGFloat TGPhotoAvatarCropViewCurtainMargin = 200;
|
|||||||
{
|
{
|
||||||
[self _layoutOverlayViews];
|
[self _layoutOverlayViews];
|
||||||
|
|
||||||
|
_clipView.frame = self.bounds;
|
||||||
|
|
||||||
_flashView.frame = self.bounds;
|
_flashView.frame = self.bounds;
|
||||||
|
|
||||||
if (_scrollView.superview == nil)
|
if (_scrollView.superview == nil)
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
#import "TGMediaPickerGalleryVideoScrubber.h"
|
#import "TGMediaPickerGalleryVideoScrubber.h"
|
||||||
#import "TGModernGalleryVideoView.h"
|
#import "TGModernGalleryVideoView.h"
|
||||||
|
#import "TGPhotoEntitiesContainerView.h"
|
||||||
|
|
||||||
#import "TGPhotoPaintController.h"
|
#import "TGPhotoPaintController.h"
|
||||||
|
|
||||||
@ -257,6 +258,58 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
[self.view insertSubview:_transitionView belowSubview:_wrapperView];
|
[self.view insertSubview:_transitionView belowSubview:_wrapperView];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)animateTransitionIn {
|
||||||
|
if (self.initialAppearance) {
|
||||||
|
[super animateTransitionIn];
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
_animateScale = true;
|
||||||
|
|
||||||
|
[self transitEntities:_previewView];
|
||||||
|
|
||||||
|
[super animateTransitionIn];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
+ (CGRect)fittedCropRect:(CGRect)cropRect originalSize:(CGSize)originalSize fitSize:(CGSize)fitSize {
|
||||||
|
CGSize fittedOriginalSize = TGScaleToSize(originalSize, fitSize);
|
||||||
|
CGFloat scale = fittedOriginalSize.width / originalSize.width;
|
||||||
|
|
||||||
|
CGSize size = fittedOriginalSize;
|
||||||
|
|
||||||
|
return CGRectMake(-cropRect.origin.x * scale, -cropRect.origin.y * scale, size.width, size.height);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)transitEntities:(UIView *)parentView {
|
||||||
|
UIView *containerView = [[UIView alloc] init];
|
||||||
|
[parentView addSubview:containerView];
|
||||||
|
|
||||||
|
containerView.frame = CGRectMake(0.0, 0.0, _fullEntitiesView.frame.size.width, _fullEntitiesView.frame.size.height);
|
||||||
|
[containerView addSubview:_fullEntitiesView];
|
||||||
|
|
||||||
|
CGFloat paintingScale = _fullEntitiesView.frame.size.width / _photoEditor.originalSize.width;
|
||||||
|
_fullEntitiesView.frame = CGRectMake(-_photoEditor.cropRect.origin.x * paintingScale, -_photoEditor.cropRect.origin.y * paintingScale, _fullEntitiesView.frame.size.width, _fullEntitiesView.frame.size.height);
|
||||||
|
|
||||||
|
CGFloat cropScale = 1.0;
|
||||||
|
if (_photoEditor.originalSize.width > _photoEditor.originalSize.height) {
|
||||||
|
cropScale = _photoEditor.originalSize.height / _photoEditor.cropRect.size.height;
|
||||||
|
} else {
|
||||||
|
cropScale = _photoEditor.originalSize.width / _photoEditor.cropRect.size.width;
|
||||||
|
}
|
||||||
|
|
||||||
|
UIImageOrientation imageOrientation = _photoEditor.cropOrientation;
|
||||||
|
if ([parentView isKindOfClass:[TGPhotoEditorPreviewView class]])
|
||||||
|
imageOrientation = UIImageOrientationUp;
|
||||||
|
|
||||||
|
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(TGRotationForOrientation(imageOrientation));
|
||||||
|
if ([parentView isKindOfClass:[TGPhotoEditorPreviewView class]] && _photoEditor.cropMirrored) {
|
||||||
|
rotationTransform = CGAffineTransformMakeScale(-1.0, 1.0);
|
||||||
|
}
|
||||||
|
CGFloat scale = parentView.frame.size.width / _fullEntitiesView.frame.size.width;
|
||||||
|
containerView.transform = CGAffineTransformScale(rotationTransform, scale * cropScale, scale * cropScale);
|
||||||
|
containerView.frame = CGRectMake(0.0, 0.0, parentView.frame.size.width, parentView.frame.size.height);
|
||||||
|
}
|
||||||
|
|
||||||
- (void)transitionIn
|
- (void)transitionIn
|
||||||
{
|
{
|
||||||
if (_portraitToolsWrapperView.frame.size.height < FLT_EPSILON) {
|
if (_portraitToolsWrapperView.frame.size.height < FLT_EPSILON) {
|
||||||
@ -373,13 +426,22 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
|
|
||||||
[_cropView closeCurtains];
|
[_cropView closeCurtains];
|
||||||
|
|
||||||
|
[self transitEntities:_cropView.clipView];
|
||||||
|
|
||||||
|
CGAffineTransform initialTransform = _previewView.transform;
|
||||||
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseInOut | UIViewAnimationOptionLayoutSubviews animations:^
|
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseInOut | UIViewAnimationOptionLayoutSubviews animations:^
|
||||||
{
|
{
|
||||||
_previewView.frame = targetFrame;
|
CGFloat scale = targetFrame.size.width / _previewView.frame.size.width;
|
||||||
|
_previewView.center = CGPointMake(CGRectGetMidX(targetFrame), CGRectGetMidY(targetFrame));
|
||||||
|
_previewView.transform = CGAffineTransformScale(initialTransform, scale, scale);
|
||||||
|
|
||||||
_cropView.center = CGPointMake(CGRectGetMidX(targetCropViewFrame), CGRectGetMidY(targetCropViewFrame));
|
_cropView.center = CGPointMake(CGRectGetMidX(targetCropViewFrame), CGRectGetMidY(targetCropViewFrame));
|
||||||
_cropView.transform = CGAffineTransformMakeScale(targetCropViewScale, targetCropViewScale);
|
_cropView.transform = CGAffineTransformMakeScale(targetCropViewScale, targetCropViewScale);
|
||||||
} completion:^(__unused BOOL finished)
|
} completion:^(__unused BOOL finished)
|
||||||
{
|
{
|
||||||
|
_fullEntitiesView.frame = CGRectMake(0, 0, _fullEntitiesView.frame.size.width, _fullEntitiesView.frame.size.height);
|
||||||
|
_previewView.transform = initialTransform;
|
||||||
|
_previewView.frame = targetFrame;
|
||||||
[_cropView removeFromSuperview];
|
[_cropView removeFromSuperview];
|
||||||
_previewView.alpha = 1.0;
|
_previewView.alpha = 1.0;
|
||||||
if (self.finishedTransitionOut != nil)
|
if (self.finishedTransitionOut != nil)
|
||||||
@ -504,8 +566,13 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
{
|
{
|
||||||
_appeared = true;
|
_appeared = true;
|
||||||
|
|
||||||
|
if (!self.initialAppearance) {
|
||||||
|
[_fullEntitiesView.superview removeFromSuperview];
|
||||||
|
_fullEntitiesView.frame = CGRectMake(0, 0, _fullEntitiesView.frame.size.width, _fullEntitiesView.frame.size.height);
|
||||||
|
[_cropView attachEntitiesView];
|
||||||
|
}
|
||||||
|
|
||||||
if ([transitionView isKindOfClass:[TGPhotoEditorPreviewView class]]) {
|
if ([transitionView isKindOfClass:[TGPhotoEditorPreviewView class]]) {
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
[transitionView removeFromSuperview];
|
[transitionView removeFromSuperview];
|
||||||
}
|
}
|
||||||
|
@ -345,7 +345,9 @@
|
|||||||
_fullPaintingView.frame = _fullPreviewView.frame;
|
_fullPaintingView.frame = _fullPreviewView.frame;
|
||||||
|
|
||||||
_fullEntitiesView = [[TGPhotoEntitiesContainerView alloc] init];
|
_fullEntitiesView = [[TGPhotoEntitiesContainerView alloc] init];
|
||||||
_fullEntitiesView.frame = _fullPreviewView.frame;
|
_fullEntitiesView.userInteractionEnabled = false;
|
||||||
|
CGRect rect = [TGPhotoPaintController fittedCropRect:_photoEditor.cropRect originalSize:_photoEditor.originalSize keepOriginalSize:true];
|
||||||
|
_fullEntitiesView.frame = CGRectMake(0, 0, rect.size.width, rect.size.height);
|
||||||
}
|
}
|
||||||
|
|
||||||
_dotMarkerView = [[UIImageView alloc] initWithImage:TGCircleImage(7.0, [TGPhotoEditorInterfaceAssets accentColor])];
|
_dotMarkerView = [[UIImageView alloc] initWithImage:TGCircleImage(7.0, [TGPhotoEditorInterfaceAssets accentColor])];
|
||||||
@ -1060,8 +1062,13 @@
|
|||||||
UIImage *image = result[@"image"];
|
UIImage *image = result[@"image"];
|
||||||
UIImage *thumbnailImage = result[@"thumbnail"];
|
UIImage *thumbnailImage = result[@"thumbnail"];
|
||||||
|
|
||||||
if (avatar && completion != nil)
|
if (avatar && image.size.width < 150.0) {
|
||||||
|
image = TGScaleImageToPixelSize(image, CGSizeMake(150.0, 150.0));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (avatar && completion != nil) {
|
||||||
completion(image);
|
completion(image);
|
||||||
|
}
|
||||||
|
|
||||||
if (!saveOnly && didFinishEditing != nil)
|
if (!saveOnly && didFinishEditing != nil)
|
||||||
didFinishEditing(editorValues, image, thumbnailImage, true);
|
didFinishEditing(editorValues, image, thumbnailImage, true);
|
||||||
@ -1164,6 +1171,7 @@
|
|||||||
UIView *snapshotView = nil;
|
UIView *snapshotView = nil;
|
||||||
|
|
||||||
TGPhotoEditorTabController *currentController = _currentTabController;
|
TGPhotoEditorTabController *currentController = _currentTabController;
|
||||||
|
TGPhotoEditorTab switchingFromTab = TGPhotoEditorNoneTab;
|
||||||
if (currentController != nil)
|
if (currentController != nil)
|
||||||
{
|
{
|
||||||
if (![currentController isDismissAllowed])
|
if (![currentController isDismissAllowed])
|
||||||
@ -1171,13 +1179,18 @@
|
|||||||
|
|
||||||
[self savePaintingData];
|
[self savePaintingData];
|
||||||
|
|
||||||
|
bool resetTransform = false;
|
||||||
|
if ([self presentedForAvatarCreation] && tab == TGPhotoEditorCropTab && [currentController isKindOfClass:[TGPhotoPaintController class]]) {
|
||||||
|
resetTransform = true;
|
||||||
|
}
|
||||||
|
|
||||||
currentController.switchingToTab = tab;
|
currentController.switchingToTab = tab;
|
||||||
[currentController transitionOutSwitching:true completion:^
|
[currentController transitionOutSwitching:true completion:^
|
||||||
{
|
{
|
||||||
[currentController removeFromParentViewController];
|
[currentController removeFromParentViewController];
|
||||||
[currentController.view removeFromSuperview];
|
[currentController.view removeFromSuperview];
|
||||||
|
|
||||||
if ([self presentedForAvatarCreation] && tab == TGPhotoEditorCropTab) {
|
if (resetTransform) {
|
||||||
_previewView.transform = CGAffineTransformIdentity;
|
_previewView.transform = CGAffineTransformIdentity;
|
||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
@ -1193,6 +1206,9 @@
|
|||||||
{
|
{
|
||||||
_backgroundView.alpha = 0.0f;
|
_backgroundView.alpha = 0.0f;
|
||||||
} completion:nil];
|
} completion:nil];
|
||||||
|
switchingFromTab = TGPhotoEditorCropTab;
|
||||||
|
} else if ([currentController isKindOfClass:[TGPhotoToolsController class]]) {
|
||||||
|
switchingFromTab = TGPhotoEditorToolsTab;
|
||||||
}
|
}
|
||||||
|
|
||||||
isInitialAppearance = false;
|
isInitialAppearance = false;
|
||||||
@ -1266,6 +1282,7 @@
|
|||||||
cropController.fullPreviewView = _fullPreviewView;
|
cropController.fullPreviewView = _fullPreviewView;
|
||||||
cropController.fullPaintingView = _fullPaintingView;
|
cropController.fullPaintingView = _fullPaintingView;
|
||||||
cropController.fullEntitiesView = _fullEntitiesView;
|
cropController.fullEntitiesView = _fullEntitiesView;
|
||||||
|
cropController.fullEntitiesView.userInteractionEnabled = false;
|
||||||
cropController.fromCamera = [self presentedFromCamera];
|
cropController.fromCamera = [self presentedFromCamera];
|
||||||
cropController.skipTransitionIn = skipInitialTransition;
|
cropController.skipTransitionIn = skipInitialTransition;
|
||||||
if (snapshotImage != nil)
|
if (snapshotImage != nil)
|
||||||
@ -1273,7 +1290,7 @@
|
|||||||
cropController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
|
cropController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
|
||||||
cropController.controlVideoPlayback = ^(bool play) {
|
cropController.controlVideoPlayback = ^(bool play) {
|
||||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
||||||
if (strongSelf == nil)
|
if (strongSelf == nil || strongSelf->_progressVisible)
|
||||||
return;
|
return;
|
||||||
if (play) {
|
if (play) {
|
||||||
[strongSelf startVideoPlayback:false];
|
[strongSelf startVideoPlayback:false];
|
||||||
@ -1289,7 +1306,7 @@
|
|||||||
};
|
};
|
||||||
cropController.togglePlayback = ^{
|
cropController.togglePlayback = ^{
|
||||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
||||||
if (strongSelf == nil || !strongSelf->_item.isVideo)
|
if (strongSelf == nil || !strongSelf->_item.isVideo || strongSelf->_progressVisible)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (strongSelf->_isPlaying) {
|
if (strongSelf->_isPlaying) {
|
||||||
@ -1534,7 +1551,7 @@
|
|||||||
|
|
||||||
case TGPhotoEditorToolsTab:
|
case TGPhotoEditorToolsTab:
|
||||||
{
|
{
|
||||||
TGPhotoToolsController *toolsController = [[TGPhotoToolsController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView];
|
TGPhotoToolsController *toolsController = [[TGPhotoToolsController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView entitiesView:_fullEntitiesView];
|
||||||
toolsController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
|
toolsController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
|
||||||
toolsController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
|
toolsController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
|
||||||
{
|
{
|
||||||
@ -1605,6 +1622,7 @@
|
|||||||
_currentTabController = controller;
|
_currentTabController = controller;
|
||||||
_currentTabController.item = _item;
|
_currentTabController.item = _item;
|
||||||
_currentTabController.intent = _intent;
|
_currentTabController.intent = _intent;
|
||||||
|
_currentTabController.switchingFromTab = switchingFromTab;
|
||||||
_currentTabController.initialAppearance = isInitialAppearance;
|
_currentTabController.initialAppearance = isInitialAppearance;
|
||||||
|
|
||||||
if (![_currentTabController isKindOfClass:[TGPhotoPaintController class]])
|
if (![_currentTabController isKindOfClass:[TGPhotoPaintController class]])
|
||||||
@ -1981,7 +1999,7 @@
|
|||||||
|
|
||||||
TGDispatchOnMainThread(^{
|
TGDispatchOnMainThread(^{
|
||||||
if (self.didFinishEditingVideo != nil)
|
if (self.didFinishEditingVideo != nil)
|
||||||
self.didFinishEditingVideo(asset.URL, [adjustments editAdjustmentsWithPreset:preset videoStartValue:videoStartValue trimStartValue:trimStartValue trimEndValue:trimEndValue], fullImage, nil, true);
|
self.didFinishEditingVideo(asset, [adjustments editAdjustmentsWithPreset:preset videoStartValue:videoStartValue trimStartValue:trimStartValue trimEndValue:trimEndValue], fullImage, nil, true);
|
||||||
|
|
||||||
[self dismissAnimated:true];
|
[self dismissAnimated:true];
|
||||||
});
|
});
|
||||||
|
@ -169,45 +169,35 @@ const CGFloat TGPhotoEditorToolbarSize = 49.0f;
|
|||||||
|
|
||||||
_transitionInProgress = true;
|
_transitionInProgress = true;
|
||||||
|
|
||||||
|
CGAffineTransform initialTransform = _transitionView.transform;
|
||||||
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseInOut | UIViewAnimationOptionLayoutSubviews animations:^
|
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseInOut | UIViewAnimationOptionLayoutSubviews animations:^
|
||||||
{
|
{
|
||||||
_transitionView.frame = _transitionTargetFrame;
|
if (_animateScale) {
|
||||||
|
CGFloat scale = _transitionTargetFrame.size.width / _transitionView.frame.size.width;
|
||||||
|
_transitionView.center = CGPointMake(CGRectGetMidX(_transitionTargetFrame), CGRectGetMidY(_transitionTargetFrame));
|
||||||
|
_transitionView.transform = CGAffineTransformScale(initialTransform, scale, scale);
|
||||||
|
} else {
|
||||||
|
_transitionView.frame = _transitionTargetFrame;
|
||||||
|
}
|
||||||
} completion:^(BOOL finished) {
|
} completion:^(BOOL finished) {
|
||||||
_transitionInProgress = false;
|
_transitionInProgress = false;
|
||||||
|
|
||||||
UIView *transitionView = _transitionView;
|
UIView *transitionView = _transitionView;
|
||||||
_transitionView = nil;
|
_transitionView = nil;
|
||||||
|
|
||||||
if (self.finishedTransitionIn != nil)
|
if (_animateScale) {
|
||||||
{
|
_transitionView.transform = initialTransform;
|
||||||
self.finishedTransitionIn();
|
_transitionView.frame = _transitionTargetFrame;
|
||||||
self.finishedTransitionIn = nil;
|
}
|
||||||
}
|
|
||||||
|
if (self.finishedTransitionIn != nil)
|
||||||
[self _finishedTransitionInWithView:transitionView];
|
{
|
||||||
|
self.finishedTransitionIn();
|
||||||
|
self.finishedTransitionIn = nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
[self _finishedTransitionInWithView:transitionView];
|
||||||
}];
|
}];
|
||||||
|
|
||||||
// POPSpringAnimation *animation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewFrame];
|
|
||||||
// if (self.transitionSpeed > FLT_EPSILON)
|
|
||||||
// animation.springSpeed = self.transitionSpeed;
|
|
||||||
// animation.fromValue = [NSValue valueWithCGRect:_transitionView.frame];
|
|
||||||
// animation.toValue = [NSValue valueWithCGRect:_transitionTargetFrame];
|
|
||||||
// animation.completionBlock = ^(__unused POPAnimation *animation, __unused BOOL finished)
|
|
||||||
// {
|
|
||||||
// _transitionInProgress = false;
|
|
||||||
//
|
|
||||||
// UIView *transitionView = _transitionView;
|
|
||||||
// _transitionView = nil;
|
|
||||||
//
|
|
||||||
// if (self.finishedTransitionIn != nil)
|
|
||||||
// {
|
|
||||||
// self.finishedTransitionIn();
|
|
||||||
// self.finishedTransitionIn = nil;
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// [self _finishedTransitionInWithView:transitionView];
|
|
||||||
// };
|
|
||||||
// [_transitionView pop_addAnimation:animation forKey:@"frame"];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)prepareForCustomTransitionOut
|
- (void)prepareForCustomTransitionOut
|
||||||
|
@ -98,6 +98,8 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
|||||||
id<TGPhotoPaintStickersScreen> _stickersScreen;
|
id<TGPhotoPaintStickersScreen> _stickersScreen;
|
||||||
|
|
||||||
bool _appeared;
|
bool _appeared;
|
||||||
|
bool _skipEntitiesSetup;
|
||||||
|
bool _entitiesReady;
|
||||||
|
|
||||||
TGPhotoPaintFont *_selectedTextFont;
|
TGPhotoPaintFont *_selectedTextFont;
|
||||||
TGPhotoPaintTextEntityStyle _selectedTextStyle;
|
TGPhotoPaintTextEntityStyle _selectedTextStyle;
|
||||||
@ -152,6 +154,10 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
|||||||
self.photoEditor = photoEditor;
|
self.photoEditor = photoEditor;
|
||||||
self.previewView = previewView;
|
self.previewView = previewView;
|
||||||
_entitiesContainerView = entitiesView;
|
_entitiesContainerView = entitiesView;
|
||||||
|
if (entitiesView != nil) {
|
||||||
|
_skipEntitiesSetup = true;
|
||||||
|
}
|
||||||
|
entitiesView.userInteractionEnabled = true;
|
||||||
|
|
||||||
_brushes = @
|
_brushes = @
|
||||||
[
|
[
|
||||||
@ -263,7 +269,9 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
|||||||
|
|
||||||
[strongSelf updateSettingsButton];
|
[strongSelf updateSettingsButton];
|
||||||
};
|
};
|
||||||
[_contentWrapperView addSubview:_entitiesContainerView];
|
if (!_skipEntitiesSetup) {
|
||||||
|
[_contentWrapperView addSubview:_entitiesContainerView];
|
||||||
|
}
|
||||||
_undoManager.entitiesContainer = _entitiesContainerView;
|
_undoManager.entitiesContainer = _entitiesContainerView;
|
||||||
|
|
||||||
_dimView = [[UIView alloc] init];
|
_dimView = [[UIView alloc] init];
|
||||||
@ -508,7 +516,9 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
|||||||
[super viewDidLoad];
|
[super viewDidLoad];
|
||||||
|
|
||||||
PGPhotoEditor *photoEditor = _photoEditor;
|
PGPhotoEditor *photoEditor = _photoEditor;
|
||||||
[_entitiesContainerView setupWithPaintingData:photoEditor.paintingData];
|
if (!_skipEntitiesSetup) {
|
||||||
|
[_entitiesContainerView setupWithPaintingData:photoEditor.paintingData];
|
||||||
|
}
|
||||||
for (TGPhotoPaintEntityView *view in _entitiesContainerView.subviews)
|
for (TGPhotoPaintEntityView *view in _entitiesContainerView.subviews)
|
||||||
{
|
{
|
||||||
if (![view isKindOfClass:[TGPhotoPaintEntityView class]])
|
if (![view isKindOfClass:[TGPhotoPaintEntityView class]])
|
||||||
@ -1809,7 +1819,6 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
|||||||
|
|
||||||
if (self.presentedForAvatarCreation) {
|
if (self.presentedForAvatarCreation) {
|
||||||
_canvasView.hidden = true;
|
_canvasView.hidden = true;
|
||||||
_entitiesContainerView.hidden = true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1858,7 +1867,7 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
|||||||
|
|
||||||
[self setupCanvas];
|
[self setupCanvas];
|
||||||
_entitiesContainerView.hidden = false;
|
_entitiesContainerView.hidden = false;
|
||||||
|
|
||||||
TGPhotoEditorPreviewView *previewView = _previewView;
|
TGPhotoEditorPreviewView *previewView = _previewView;
|
||||||
[previewView setPaintingHidden:true];
|
[previewView setPaintingHidden:true];
|
||||||
previewView.hidden = false;
|
previewView.hidden = false;
|
||||||
@ -1880,8 +1889,10 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
|||||||
CGPoint boundsCenter = TGPaintCenterOfRect(_contentWrapperView.bounds);
|
CGPoint boundsCenter = TGPaintCenterOfRect(_contentWrapperView.bounds);
|
||||||
_entitiesContainerView.center = TGPaintAddPoints(boundsCenter, offset);
|
_entitiesContainerView.center = TGPaintAddPoints(boundsCenter, offset);
|
||||||
|
|
||||||
[_contentWrapperView addSubview:_entitiesContainerView];
|
if (!_skipEntitiesSetup || _entitiesReady) {
|
||||||
|
[_contentWrapperView addSubview:_entitiesContainerView];
|
||||||
|
}
|
||||||
|
_entitiesReady = true;
|
||||||
[self resetScrollView];
|
[self resetScrollView];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1899,6 +1910,8 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
|||||||
|
|
||||||
- (void)transitionOutSwitching:(bool)__unused switching completion:(void (^)(void))completion
|
- (void)transitionOutSwitching:(bool)__unused switching completion:(void (^)(void))completion
|
||||||
{
|
{
|
||||||
|
[_stickersScreen invalidate];
|
||||||
|
|
||||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||||
previewView.interactionEnded = nil;
|
previewView.interactionEnded = nil;
|
||||||
|
|
||||||
@ -1930,9 +1943,7 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
|||||||
- (void)_animatePreviewViewTransitionOutToFrame:(CGRect)targetFrame saving:(bool)saving parentView:(UIView *)parentView completion:(void (^)(void))completion
|
- (void)_animatePreviewViewTransitionOutToFrame:(CGRect)targetFrame saving:(bool)saving parentView:(UIView *)parentView completion:(void (^)(void))completion
|
||||||
{
|
{
|
||||||
_dismissing = true;
|
_dismissing = true;
|
||||||
|
|
||||||
[_stickersScreen invalidate];
|
|
||||||
|
|
||||||
[_entitySelectionView removeFromSuperview];
|
[_entitySelectionView removeFromSuperview];
|
||||||
_entitySelectionView = nil;
|
_entitySelectionView = nil;
|
||||||
|
|
||||||
@ -2315,6 +2326,13 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
|||||||
|
|
||||||
previewView.frame = previewFrame;
|
previewView.frame = previewFrame;
|
||||||
|
|
||||||
|
if ([self presentedForAvatarCreation]) {
|
||||||
|
CGAffineTransform transform = CGAffineTransformMakeRotation(TGRotationForOrientation(photoEditor.cropOrientation));
|
||||||
|
if (photoEditor.cropMirrored)
|
||||||
|
transform = CGAffineTransformScale(transform, -1.0f, 1.0f);
|
||||||
|
previewView.transform = transform;
|
||||||
|
}
|
||||||
|
|
||||||
CGSize fittedOriginalSize = CGSizeMake(originalSize.width * ratio, originalSize.height * ratio);
|
CGSize fittedOriginalSize = CGSizeMake(originalSize.width * ratio, originalSize.height * ratio);
|
||||||
CGSize rotatedSize = TGRotatedContentSize(fittedOriginalSize, rotation);
|
CGSize rotatedSize = TGRotatedContentSize(fittedOriginalSize, rotation);
|
||||||
CGPoint centerPoint = CGPointMake(rotatedSize.width / 2.0f, rotatedSize.height / 2.0f);
|
CGPoint centerPoint = CGPointMake(rotatedSize.width / 2.0f, rotatedSize.height / 2.0f);
|
||||||
|
@ -3,9 +3,10 @@
|
|||||||
@class PGPhotoEditor;
|
@class PGPhotoEditor;
|
||||||
@class PGPhotoTool;
|
@class PGPhotoTool;
|
||||||
@class TGPhotoEditorPreviewView;
|
@class TGPhotoEditorPreviewView;
|
||||||
|
@class TGPhotoEntitiesContainerView;
|
||||||
|
|
||||||
@interface TGPhotoToolsController : TGPhotoEditorTabController
|
@interface TGPhotoToolsController : TGPhotoEditorTabController
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView;
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView entitiesView:(TGPhotoEntitiesContainerView *)entitiesView;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -22,6 +22,9 @@
|
|||||||
#import "TGPhotoEditorPreviewView.h"
|
#import "TGPhotoEditorPreviewView.h"
|
||||||
#import "TGPhotoEditorHUDView.h"
|
#import "TGPhotoEditorHUDView.h"
|
||||||
#import "TGPhotoEditorSparseView.h"
|
#import "TGPhotoEditorSparseView.h"
|
||||||
|
#import "TGPhotoEntitiesContainerView.h"
|
||||||
|
|
||||||
|
#import "TGPhotoPaintController.h"
|
||||||
|
|
||||||
const CGFloat TGPhotoEditorToolsPanelSize = 180.0f;
|
const CGFloat TGPhotoEditorToolsPanelSize = 180.0f;
|
||||||
const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize + 40.0f;
|
const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize + 40.0f;
|
||||||
@ -32,6 +35,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
|||||||
bool _appeared;
|
bool _appeared;
|
||||||
bool _scheduledTransitionIn;
|
bool _scheduledTransitionIn;
|
||||||
CGFloat _cellWidth;
|
CGFloat _cellWidth;
|
||||||
|
int _entitiesReady;
|
||||||
|
|
||||||
NSArray *_allTools;
|
NSArray *_allTools;
|
||||||
NSArray *_simpleTools;
|
NSArray *_simpleTools;
|
||||||
@ -44,6 +48,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
|||||||
TGPhotoEditorCollectionView *_portraitCollectionView;
|
TGPhotoEditorCollectionView *_portraitCollectionView;
|
||||||
TGPhotoEditorCollectionView *_landscapeCollectionView;
|
TGPhotoEditorCollectionView *_landscapeCollectionView;
|
||||||
TGPhotoEditorHUDView *_hudView;
|
TGPhotoEditorHUDView *_hudView;
|
||||||
|
TGPhotoEntitiesContainerView *_entitiesView;
|
||||||
|
|
||||||
void (^_changeBlock)(PGPhotoTool *, id, bool);
|
void (^_changeBlock)(PGPhotoTool *, id, bool);
|
||||||
void (^_interactionBegan)(void);
|
void (^_interactionBegan)(void);
|
||||||
@ -52,6 +57,8 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
|||||||
bool _preview;
|
bool _preview;
|
||||||
TGPhotoEditorTab _currentTab;
|
TGPhotoEditorTab _currentTab;
|
||||||
|
|
||||||
|
UIView *_entitiesWrapperView;
|
||||||
|
|
||||||
UIView <TGPhotoEditorToolView> *_toolAreaView;
|
UIView <TGPhotoEditorToolView> *_toolAreaView;
|
||||||
UIView <TGPhotoEditorToolView> *_portraitToolControlView;
|
UIView <TGPhotoEditorToolView> *_portraitToolControlView;
|
||||||
UIView <TGPhotoEditorToolView> *_landscapeToolControlView;
|
UIView <TGPhotoEditorToolView> *_landscapeToolControlView;
|
||||||
@ -64,14 +71,15 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
|||||||
|
|
||||||
@implementation TGPhotoToolsController
|
@implementation TGPhotoToolsController
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView entitiesView:(TGPhotoEntitiesContainerView *)entitiesView
|
||||||
{
|
{
|
||||||
self = [super initWithContext:context];
|
self = [super initWithContext:context];
|
||||||
if (self != nil)
|
if (self != nil)
|
||||||
{
|
{
|
||||||
self.photoEditor = photoEditor;
|
self.photoEditor = photoEditor;
|
||||||
self.previewView = previewView;
|
self.previewView = previewView;
|
||||||
|
_entitiesView = entitiesView;
|
||||||
|
|
||||||
__weak TGPhotoToolsController *weakSelf = self;
|
__weak TGPhotoToolsController *weakSelf = self;
|
||||||
_changeBlock = ^(PGPhotoTool *tool, __unused id newValue, bool animated)
|
_changeBlock = ^(PGPhotoTool *tool, __unused id newValue, bool animated)
|
||||||
{
|
{
|
||||||
@ -99,6 +107,30 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
|||||||
_landscapeCollectionView.toolsDataSource = nil;
|
_landscapeCollectionView.toolsDataSource = nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)layoutEntitiesView {
|
||||||
|
if (_entitiesReady < 2 || _dismissing)
|
||||||
|
return;
|
||||||
|
|
||||||
|
_entitiesWrapperView.transform = CGAffineTransformIdentity;
|
||||||
|
_entitiesWrapperView.frame = CGRectMake(0.0, 0.0, _entitiesView.frame.size.width, _entitiesView.frame.size.height);
|
||||||
|
[_entitiesWrapperView addSubview:_entitiesView];
|
||||||
|
|
||||||
|
CGFloat paintingScale = _entitiesView.frame.size.width / _photoEditor.originalSize.width;
|
||||||
|
_entitiesView.frame = CGRectMake(-_photoEditor.cropRect.origin.x * paintingScale, -_photoEditor.cropRect.origin.y * paintingScale, _entitiesView.frame.size.width, _entitiesView.frame.size.height);
|
||||||
|
|
||||||
|
CGFloat cropScale = 1.0;
|
||||||
|
if (_photoEditor.originalSize.width > _photoEditor.originalSize.height) {
|
||||||
|
cropScale = _photoEditor.originalSize.height / _photoEditor.cropRect.size.height;
|
||||||
|
} else {
|
||||||
|
cropScale = _photoEditor.originalSize.width / _photoEditor.cropRect.size.width;
|
||||||
|
}
|
||||||
|
|
||||||
|
CGFloat scale = _previewView.frame.size.width / _entitiesView.frame.size.width;
|
||||||
|
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(TGRotationForOrientation(_photoEditor.cropOrientation));
|
||||||
|
_entitiesWrapperView.transform = CGAffineTransformScale(rotationTransform, scale * cropScale, scale * cropScale);
|
||||||
|
_entitiesWrapperView.frame = [_previewView convertRect:_previewView.bounds toView:_entitiesWrapperView.superview];
|
||||||
|
}
|
||||||
|
|
||||||
- (void)loadView
|
- (void)loadView
|
||||||
{
|
{
|
||||||
[super loadView];
|
[super loadView];
|
||||||
@ -189,6 +221,10 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
|||||||
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:CGRectZero];
|
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:CGRectZero];
|
||||||
[self.view addSubview:_wrapperView];
|
[self.view addSubview:_wrapperView];
|
||||||
|
|
||||||
|
_entitiesWrapperView = [[UIView alloc] init];
|
||||||
|
_entitiesWrapperView.userInteractionEnabled = false;
|
||||||
|
[_wrapperView addSubview:_entitiesWrapperView];
|
||||||
|
|
||||||
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||||
_portraitToolsWrapperView.alpha = 0.0f;
|
_portraitToolsWrapperView.alpha = 0.0f;
|
||||||
[_wrapperView addSubview:_portraitToolsWrapperView];
|
[_wrapperView addSubview:_portraitToolsWrapperView];
|
||||||
@ -473,6 +509,9 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
|||||||
TGPhotoEditorPreviewView *previewView = _previewView;
|
TGPhotoEditorPreviewView *previewView = _previewView;
|
||||||
previewView.hidden = false;
|
previewView.hidden = false;
|
||||||
[previewView performTransitionInIfNeeded];
|
[previewView performTransitionInIfNeeded];
|
||||||
|
|
||||||
|
_entitiesReady++;
|
||||||
|
[self layoutEntitiesView];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)prepareForCustomTransitionOut
|
- (void)prepareForCustomTransitionOut
|
||||||
@ -975,6 +1014,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
|||||||
[_landscapeCollectionView.collectionViewLayout invalidateLayout];
|
[_landscapeCollectionView.collectionViewLayout invalidateLayout];
|
||||||
|
|
||||||
[self updatePreviewView];
|
[self updatePreviewView];
|
||||||
|
[self layoutEntitiesView];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (TGPhotoEditorTab)availableTabs
|
- (TGPhotoEditorTab)availableTabs
|
||||||
|
@ -33,8 +33,6 @@
|
|||||||
editableItem = [[TGCameraCapturedVideo alloc] initWithURL:video];
|
editableItem = [[TGCameraCapturedVideo alloc] initWithURL:video];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
void (^present)(UIImage *) = ^(UIImage *screenImage) {
|
void (^present)(UIImage *) = ^(UIImage *screenImage) {
|
||||||
TGPhotoEditorController *controller = [[TGPhotoEditorController alloc] initWithContext:[windowManager context] item:editableItem intent:TGPhotoEditorControllerAvatarIntent adjustments:nil caption:nil screenImage:screenImage availableTabs:[TGPhotoEditorController defaultTabsForAvatarIntent] selectedTab:TGPhotoEditorCropTab];
|
TGPhotoEditorController *controller = [[TGPhotoEditorController alloc] initWithContext:[windowManager context] item:editableItem intent:TGPhotoEditorControllerAvatarIntent adjustments:nil caption:nil screenImage:screenImage availableTabs:[TGPhotoEditorController defaultTabsForAvatarIntent] selectedTab:TGPhotoEditorCropTab];
|
||||||
// controller.stickersContext = _stickersContext;
|
// controller.stickersContext = _stickersContext;
|
||||||
@ -45,9 +43,12 @@
|
|||||||
if (didFinishWithImage != nil)
|
if (didFinishWithImage != nil)
|
||||||
didFinishWithImage(resultImage);
|
didFinishWithImage(resultImage);
|
||||||
};
|
};
|
||||||
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
|
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
|
||||||
if (didFinishWithVideo != nil)
|
if (didFinishWithVideo != nil) {
|
||||||
didFinishWithVideo(resultImage, url, adjustments);
|
if ([asset isKindOfClass:[AVURLAsset class]]) {
|
||||||
|
didFinishWithVideo(resultImage, [(AVURLAsset *)asset URL], adjustments);
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
|
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
|
||||||
{
|
{
|
||||||
|
@ -136,7 +136,7 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
|
|||||||
return adjustments;
|
return adjustments;
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (instancetype)editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)values {
|
+ (instancetype)editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)values preset:(TGMediaVideoConversionPreset)preset {
|
||||||
TGVideoEditAdjustments *adjustments = [[[self class] alloc] init];
|
TGVideoEditAdjustments *adjustments = [[[self class] alloc] init];
|
||||||
adjustments->_originalSize = values.originalSize;
|
adjustments->_originalSize = values.originalSize;
|
||||||
CGRect cropRect = values.cropRect;
|
CGRect cropRect = values.cropRect;
|
||||||
@ -150,7 +150,7 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
|
|||||||
adjustments->_cropMirrored = values.cropMirrored;
|
adjustments->_cropMirrored = values.cropMirrored;
|
||||||
adjustments->_paintingData = [values.paintingData dataForAnimation];
|
adjustments->_paintingData = [values.paintingData dataForAnimation];
|
||||||
adjustments->_sendAsGif = true;
|
adjustments->_sendAsGif = true;
|
||||||
adjustments->_preset = TGMediaVideoConversionPresetAnimation;
|
adjustments->_preset = preset;
|
||||||
|
|
||||||
return adjustments;
|
return adjustments;
|
||||||
}
|
}
|
||||||
|
@ -23,16 +23,16 @@ public func presentLegacyAvatarEditor(theme: PresentationTheme, image: UIImage?,
|
|||||||
if let image = image {
|
if let image = image {
|
||||||
imageCompletion(image)
|
imageCompletion(image)
|
||||||
}
|
}
|
||||||
}, didFinishWithVideo: { image, url, adjustments in
|
}, didFinishWithVideo: { image, asset, adjustments in
|
||||||
if let image = image, let url = url {
|
if let image = image {
|
||||||
videoCompletion(image, url, adjustments)
|
// videoCompletion(image, url, adjustments)
|
||||||
}
|
}
|
||||||
}, dismissed: { [weak legacyController] in
|
}, dismissed: { [weak legacyController] in
|
||||||
legacyController?.dismiss()
|
legacyController?.dismiss()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, theme: PresentationTheme, present: (ViewController, Any?) -> Void, openCurrent: (() -> Void)?, completion: @escaping (UIImage) -> Void, videoCompletion: @escaping (UIImage, URL, TGVideoEditAdjustments?) -> Void = { _, _, _ in}) {
|
public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, theme: PresentationTheme, present: (ViewController, Any?) -> Void, openCurrent: (() -> Void)?, completion: @escaping (UIImage) -> Void, videoCompletion: @escaping (UIImage, Any?, TGVideoEditAdjustments?) -> Void = { _, _, _ in}) {
|
||||||
let legacyController = LegacyController(presentation: .custom, theme: theme)
|
let legacyController = LegacyController(presentation: .custom, theme: theme)
|
||||||
legacyController.statusBar.statusBarStyle = .Ignore
|
legacyController.statusBar.statusBarStyle = .Ignore
|
||||||
|
|
||||||
@ -53,11 +53,11 @@ public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, t
|
|||||||
}
|
}
|
||||||
completion(image)
|
completion(image)
|
||||||
}
|
}
|
||||||
mixin.didFinishWithVideo = { image, url, adjustments in
|
mixin.didFinishWithVideo = { image, asset, adjustments in
|
||||||
guard let image = image, let url = url else {
|
guard let image = image else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
videoCompletion(image, url, adjustments)
|
videoCompletion(image, asset, adjustments)
|
||||||
}
|
}
|
||||||
mixin.didFinishWithView = {
|
mixin.didFinishWithView = {
|
||||||
openCurrent?()
|
openCurrent?()
|
||||||
|
@ -287,6 +287,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
|
|
||||||
fileprivate var requestedDataOffset: Int?
|
fileprivate var requestedDataOffset: Int?
|
||||||
fileprivate let fetchedDataDisposable = MetaDisposable()
|
fileprivate let fetchedDataDisposable = MetaDisposable()
|
||||||
|
fileprivate let keepDataDisposable = MetaDisposable()
|
||||||
fileprivate let fetchedFullDataDisposable = MetaDisposable()
|
fileprivate let fetchedFullDataDisposable = MetaDisposable()
|
||||||
fileprivate var requestedCompleteFetch = false
|
fileprivate var requestedCompleteFetch = false
|
||||||
|
|
||||||
@ -294,6 +295,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
didSet {
|
didSet {
|
||||||
self.fetchedDataDisposable.dispose()
|
self.fetchedDataDisposable.dispose()
|
||||||
self.fetchedFullDataDisposable.dispose()
|
self.fetchedFullDataDisposable.dispose()
|
||||||
|
self.keepDataDisposable.dispose()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -316,6 +318,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
|
|
||||||
self.fetchedDataDisposable.dispose()
|
self.fetchedDataDisposable.dispose()
|
||||||
self.fetchedFullDataDisposable.dispose()
|
self.fetchedFullDataDisposable.dispose()
|
||||||
|
self.keepDataDisposable.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
func initializeState(postbox: Postbox, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int?) {
|
func initializeState(postbox: Postbox, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int?) {
|
||||||
@ -341,6 +344,10 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.tempFilePath == nil {
|
||||||
|
self.keepDataDisposable.set(postbox.mediaBox.keepResource(id: resourceReference.resource.id).start())
|
||||||
|
}
|
||||||
|
|
||||||
if streamable {
|
if streamable {
|
||||||
if self.tempFilePath == nil {
|
if self.tempFilePath == nil {
|
||||||
self.fetchedDataDisposable.set(fetchedMediaResource(mediaBox: postbox.mediaBox, reference: resourceReference, range: (0 ..< Int(Int32.max), .elevated), statsCategory: self.statsCategory ?? .generic, preferBackgroundReferenceRevalidation: streamable).start())
|
self.fetchedDataDisposable.set(fetchedMediaResource(mediaBox: postbox.mediaBox, reference: resourceReference, range: (0 ..< Int(Int32.max), .elevated), statsCategory: self.statsCategory ?? .generic, preferBackgroundReferenceRevalidation: streamable).start())
|
||||||
|
@ -174,9 +174,9 @@ public func fetchedAvatarGalleryEntries(account: Account, peer: Peer) -> Signal<
|
|||||||
for photo in photos {
|
for photo in photos {
|
||||||
let indexData = GalleryItemIndexData(position: index, totalCount: Int32(photos.count))
|
let indexData = GalleryItemIndexData(position: index, totalCount: Int32(photos.count))
|
||||||
if result.isEmpty, let first = initialEntries.first {
|
if result.isEmpty, let first = initialEntries.first {
|
||||||
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
|
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
|
||||||
} else {
|
} else {
|
||||||
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.standalone(resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
|
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
|
||||||
}
|
}
|
||||||
index += 1
|
index += 1
|
||||||
}
|
}
|
||||||
@ -202,9 +202,9 @@ public func fetchedAvatarGalleryEntries(account: Account, peer: Peer, firstEntry
|
|||||||
for photo in photos {
|
for photo in photos {
|
||||||
let indexData = GalleryItemIndexData(position: index, totalCount: Int32(photos.count))
|
let indexData = GalleryItemIndexData(position: index, totalCount: Int32(photos.count))
|
||||||
if result.isEmpty, let first = initialEntries.first {
|
if result.isEmpty, let first = initialEntries.first {
|
||||||
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
|
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
|
||||||
} else {
|
} else {
|
||||||
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.standalone(resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
|
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
|
||||||
}
|
}
|
||||||
index += 1
|
index += 1
|
||||||
}
|
}
|
||||||
@ -627,8 +627,6 @@ public class AvatarGalleryController: ViewController, StandalonePresentableContr
|
|||||||
entries.insert(previousFirstEntry, at: index)
|
entries.insert(previousFirstEntry, at: index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
entries = normalizeEntries(entries)
|
entries = normalizeEntries(entries)
|
||||||
self.galleryNode.pager.replaceItems(entries.map({ entry in PeerAvatarImageGalleryItem(context: self.context, peer: self.peer, presentationData: presentationData, entry: entry, sourceCorners: self.sourceCorners, delete: self.canDelete ? { [weak self] in
|
self.galleryNode.pager.replaceItems(entries.map({ entry in PeerAvatarImageGalleryItem(context: self.context, peer: self.peer, presentationData: presentationData, entry: entry, sourceCorners: self.sourceCorners, delete: self.canDelete ? { [weak self] in
|
||||||
self?.deleteEntry(entry)
|
self?.deleteEntry(entry)
|
||||||
@ -638,6 +636,10 @@ public class AvatarGalleryController: ViewController, StandalonePresentableContr
|
|||||||
self?.editEntry(entry)
|
self?.editEntry(entry)
|
||||||
}) }), centralItemIndex: 0, synchronous: true)
|
}) }), centralItemIndex: 0, synchronous: true)
|
||||||
self.entries = entries
|
self.entries = entries
|
||||||
|
|
||||||
|
if let firstEntry = self.entries.first {
|
||||||
|
self._hiddenMedia.set(.single(firstEntry))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// if let messageId = messageId {
|
// if let messageId = messageId {
|
||||||
@ -845,7 +847,7 @@ public class AvatarGalleryController: ViewController, StandalonePresentableContr
|
|||||||
}
|
}
|
||||||
let actionSheet = ActionSheetController(presentationData: presentationData)
|
let actionSheet = ActionSheetController(presentationData: presentationData)
|
||||||
let items: [ActionSheetItem] = [
|
let items: [ActionSheetItem] = [
|
||||||
ActionSheetButtonItem(title: presentationData.strings.Common_Delete, color: .destructive, action: { [weak actionSheet] in
|
ActionSheetButtonItem(title: presentationData.strings.Settings_RemoveConfirmation, color: .destructive, action: { [weak actionSheet] in
|
||||||
actionSheet?.dismissAnimated()
|
actionSheet?.dismissAnimated()
|
||||||
proceed()
|
proceed()
|
||||||
})
|
})
|
||||||
|
@ -259,7 +259,10 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
|
|||||||
id = image.0.id
|
id = image.0.id
|
||||||
category = image.9
|
category = image.9
|
||||||
} else {
|
} else {
|
||||||
id = Int64(entry.peer?.id.id ?? 1)
|
id = Int64(entry.peer?.id.id ?? 0)
|
||||||
|
if let resource = entry.videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
|
||||||
|
id = id &+ resource.photoId
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if let video = entry.videoRepresentations.last, let peerReference = PeerReference(self.peer) {
|
if let video = entry.videoRepresentations.last, let peerReference = PeerReference(self.peer) {
|
||||||
if video != previousVideoRepresentations?.last {
|
if video != previousVideoRepresentations?.last {
|
||||||
|
@ -132,6 +132,14 @@ public enum ResourceDataRequestOption {
|
|||||||
case incremental(waitUntilFetchStatus: Bool)
|
case incremental(waitUntilFetchStatus: Bool)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private final class MediaBoxKeepResourceContext {
|
||||||
|
let subscribers = Bag<Void>()
|
||||||
|
|
||||||
|
var isEmpty: Bool {
|
||||||
|
return self.subscribers.isEmpty
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public final class MediaBox {
|
public final class MediaBox {
|
||||||
public let basePath: String
|
public let basePath: String
|
||||||
|
|
||||||
@ -145,6 +153,7 @@ public final class MediaBox {
|
|||||||
private var cachedRepresentationContexts: [CachedMediaResourceRepresentationKey: CachedMediaResourceRepresentationContext] = [:]
|
private var cachedRepresentationContexts: [CachedMediaResourceRepresentationKey: CachedMediaResourceRepresentationContext] = [:]
|
||||||
|
|
||||||
private var fileContexts: [WrappedMediaResourceId: MediaBoxFileContext] = [:]
|
private var fileContexts: [WrappedMediaResourceId: MediaBoxFileContext] = [:]
|
||||||
|
private var keepResourceContexts: [WrappedMediaResourceId: MediaBoxKeepResourceContext] = [:]
|
||||||
|
|
||||||
private var wrappedFetchResource = Promise<(MediaResource, Signal<[(Range<Int>, MediaBoxFetchPriority)], NoError>, MediaResourceFetchParameters?) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError>>()
|
private var wrappedFetchResource = Promise<(MediaResource, Signal<[(Range<Int>, MediaBoxFetchPriority)], NoError>, MediaResourceFetchParameters?) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError>>()
|
||||||
public var preFetchedResourcePath: (MediaResource) -> String? = { _ in return nil }
|
public var preFetchedResourcePath: (MediaResource) -> String? = { _ in return nil }
|
||||||
@ -204,6 +213,10 @@ public final class MediaBox {
|
|||||||
return ResourceStorePaths(partial: "\(self.basePath)/\(fileNameForId(id))_partial", complete: "\(self.basePath)/\(fileNameForId(id))")
|
return ResourceStorePaths(partial: "\(self.basePath)/\(fileNameForId(id))_partial", complete: "\(self.basePath)/\(fileNameForId(id))")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private func fileNamesForId(_ id: MediaResourceId) -> ResourceStorePaths {
|
||||||
|
return ResourceStorePaths(partial: "\(fileNameForId(id))_partial", complete: "\(fileNameForId(id))")
|
||||||
|
}
|
||||||
|
|
||||||
private func cachedRepresentationPathsForId(_ id: MediaResourceId, representation: CachedMediaResourceRepresentation) -> ResourceStorePaths {
|
private func cachedRepresentationPathsForId(_ id: MediaResourceId, representation: CachedMediaResourceRepresentation) -> ResourceStorePaths {
|
||||||
let cacheString: String
|
let cacheString: String
|
||||||
switch representation.keepDuration {
|
switch representation.keepDuration {
|
||||||
@ -697,6 +710,38 @@ public final class MediaBox {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func keepResource(id: MediaResourceId) -> Signal<Never, NoError> {
|
||||||
|
return Signal { subscriber in
|
||||||
|
let disposable = MetaDisposable()
|
||||||
|
|
||||||
|
let dataQueue = self.dataQueue
|
||||||
|
self.dataQueue.async {
|
||||||
|
let context: MediaBoxKeepResourceContext
|
||||||
|
if let current = self.keepResourceContexts[WrappedMediaResourceId(id)] {
|
||||||
|
context = current
|
||||||
|
} else {
|
||||||
|
context = MediaBoxKeepResourceContext()
|
||||||
|
self.keepResourceContexts[WrappedMediaResourceId(id)] = context
|
||||||
|
}
|
||||||
|
let index = context.subscribers.add(Void())
|
||||||
|
|
||||||
|
disposable.set(ActionDisposable { [weak self, weak context] in
|
||||||
|
dataQueue.async {
|
||||||
|
guard let strongSelf = self, let context = context, let currentContext = strongSelf.keepResourceContexts[WrappedMediaResourceId(id)], currentContext === context else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
currentContext.subscribers.remove(index)
|
||||||
|
if currentContext.isEmpty {
|
||||||
|
strongSelf.keepResourceContexts.removeValue(forKey: WrappedMediaResourceId(id))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return disposable
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public func cancelInteractiveResourceFetch(_ resource: MediaResource) {
|
public func cancelInteractiveResourceFetch(_ resource: MediaResource) {
|
||||||
self.dataQueue.async {
|
self.dataQueue.async {
|
||||||
if let (fileContext, releaseContext) = self.fileContext(for: resource) {
|
if let (fileContext, releaseContext) = self.fileContext(for: resource) {
|
||||||
@ -991,7 +1036,20 @@ public final class MediaBox {
|
|||||||
public func removeOtherCachedResources(paths: [String]) -> Signal<Void, NoError> {
|
public func removeOtherCachedResources(paths: [String]) -> Signal<Void, NoError> {
|
||||||
return Signal { subscriber in
|
return Signal { subscriber in
|
||||||
self.dataQueue.async {
|
self.dataQueue.async {
|
||||||
for path in paths {
|
var keepPrefixes: [String] = []
|
||||||
|
for id in self.keepResourceContexts.keys {
|
||||||
|
let resourcePaths = self.fileNamesForId(id.id)
|
||||||
|
keepPrefixes.append(resourcePaths.partial)
|
||||||
|
keepPrefixes.append(resourcePaths.complete)
|
||||||
|
}
|
||||||
|
|
||||||
|
outer: for path in paths {
|
||||||
|
for prefix in keepPrefixes {
|
||||||
|
if path.starts(with: prefix) {
|
||||||
|
continue outer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
unlink(self.basePath + "/" + path)
|
unlink(self.basePath + "/" + path)
|
||||||
}
|
}
|
||||||
subscriber.putCompletion()
|
subscriber.putCompletion()
|
||||||
@ -1007,6 +1065,9 @@ public final class MediaBox {
|
|||||||
if self.fileContexts[id] != nil {
|
if self.fileContexts[id] != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if self.keepResourceContexts[id] != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
let paths = self.storePathsForId(id.id)
|
let paths = self.storePathsForId(id.id)
|
||||||
unlink(paths.complete)
|
unlink(paths.complete)
|
||||||
unlink(paths.partial)
|
unlink(paths.partial)
|
||||||
@ -1044,35 +1105,4 @@ public final class MediaBox {
|
|||||||
return EmptyDisposable
|
return EmptyDisposable
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func clearFileContexts() -> Signal<Void, NoError> {
|
|
||||||
return Signal { subscriber in
|
|
||||||
self.dataQueue.async {
|
|
||||||
for (id, _) in self.fileContexts {
|
|
||||||
let paths = self.storePathsForId(id.id)
|
|
||||||
unlink(paths.complete)
|
|
||||||
unlink(paths.partial)
|
|
||||||
unlink(paths.partial + ".meta")
|
|
||||||
}
|
|
||||||
self.fileContexts.removeAll()
|
|
||||||
subscriber.putCompletion()
|
|
||||||
}
|
|
||||||
return EmptyDisposable
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public func fileConxtets() -> Signal<[(partial: String, complete: String)], NoError> {
|
|
||||||
return Signal { subscriber in
|
|
||||||
self.dataQueue.async {
|
|
||||||
var result: [(partial: String, complete: String)] = []
|
|
||||||
for (id, _) in self.fileContexts {
|
|
||||||
let paths = self.storePathsForId(id.id)
|
|
||||||
result.append((partial: paths.partial, complete: paths.complete))
|
|
||||||
}
|
|
||||||
subscriber.putNext(result)
|
|
||||||
subscriber.putCompletion()
|
|
||||||
}
|
|
||||||
return EmptyDisposable
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -684,9 +684,9 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
|
|||||||
completedProfilePhotoImpl(image)
|
completedProfilePhotoImpl(image)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mixin.didFinishWithVideo = { image, url, adjustments in
|
mixin.didFinishWithVideo = { image, asset, adjustments in
|
||||||
if let image = image, let url = url {
|
if let image = image {
|
||||||
completedProfileVideoImpl(image, url, adjustments)
|
// completedProfileVideoImpl(image, url, adjustments)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mixin.didFinishWithDelete = {
|
mixin.didFinishWithDelete = {
|
||||||
|
@ -1418,9 +1418,9 @@ public func settingsController(context: AccountContext, accountManager: AccountM
|
|||||||
completedProfilePhotoImpl(image)
|
completedProfilePhotoImpl(image)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mixin.didFinishWithVideo = { image, url, adjustments in
|
mixin.didFinishWithVideo = { image, asset, adjustments in
|
||||||
if let image = image, let url = url {
|
if let image = image {
|
||||||
completedProfileVideoImpl(image, url, adjustments)
|
// completedProfileVideoImpl(image, url, adjustments)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mixin.didFinishWithDelete = {
|
mixin.didFinishWithDelete = {
|
||||||
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -686,7 +686,7 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
|
|||||||
transaction.setState(UnauthorizedAccountState(isTestingEnvironment: strongSelf.account.testingEnvironment, masterDatacenterId: strongSelf.account.masterDatacenterId, contents: .phoneEntry(countryCode: countryCode, number: "")))
|
transaction.setState(UnauthorizedAccountState(isTestingEnvironment: strongSelf.account.testingEnvironment, masterDatacenterId: strongSelf.account.masterDatacenterId, contents: .phoneEntry(countryCode: countryCode, number: "")))
|
||||||
}).start()
|
}).start()
|
||||||
}, displayCancel: displayCancel)
|
}, displayCancel: displayCancel)
|
||||||
controller.signUpWithName = { [weak self, weak controller] firstName, lastName, avatarData, avatarUrl, avatarAdjustments in
|
controller.signUpWithName = { [weak self, weak controller] firstName, lastName, avatarData, avatarAsset, avatarAdjustments in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
controller?.inProgress = true
|
controller?.inProgress = true
|
||||||
|
|
||||||
@ -696,15 +696,9 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
|
|||||||
}
|
}
|
||||||
|
|
||||||
let avatarVideo: Signal<UploadedPeerPhotoData?, NoError>?
|
let avatarVideo: Signal<UploadedPeerPhotoData?, NoError>?
|
||||||
if let avatarUrl = avatarUrl {
|
if let avatarAsset = avatarAsset as? AVAsset {
|
||||||
let account = strongSelf.account
|
let account = strongSelf.account
|
||||||
avatarVideo = Signal<TelegramMediaResource?, NoError> { subscriber in
|
avatarVideo = Signal<TelegramMediaResource?, NoError> { subscriber in
|
||||||
var filteredPath = avatarUrl.path
|
|
||||||
if filteredPath.hasPrefix("file://") {
|
|
||||||
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
|
|
||||||
}
|
|
||||||
|
|
||||||
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
|
|
||||||
let entityRenderer: LegacyPaintEntityRenderer? = avatarAdjustments.flatMap { adjustments in
|
let entityRenderer: LegacyPaintEntityRenderer? = avatarAdjustments.flatMap { adjustments in
|
||||||
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
|
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
|
||||||
return LegacyPaintEntityRenderer(account: nil, adjustments: adjustments)
|
return LegacyPaintEntityRenderer(account: nil, adjustments: adjustments)
|
||||||
@ -713,7 +707,7 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: avatarAdjustments, watcher: nil, entityRenderer: entityRenderer)!
|
let signal = TGMediaVideoConverter.convert(avatarAsset, adjustments: avatarAdjustments, watcher: nil, entityRenderer: entityRenderer)!
|
||||||
|
|
||||||
let signalDisposable = signal.start(next: { next in
|
let signalDisposable = signal.start(next: { next in
|
||||||
if let result = next as? TGMediaVideoConversionResult {
|
if let result = next as? TGMediaVideoConversionResult {
|
||||||
|
@ -22,9 +22,9 @@ final class AuthorizationSequenceSignUpController: ViewController {
|
|||||||
var initialName: (String, String) = ("", "")
|
var initialName: (String, String) = ("", "")
|
||||||
private var termsOfService: UnauthorizedAccountTermsOfService?
|
private var termsOfService: UnauthorizedAccountTermsOfService?
|
||||||
|
|
||||||
var signUpWithName: ((String, String, Data?, URL?, TGVideoEditAdjustments?) -> Void)?
|
var signUpWithName: ((String, String, Data?, Any?, TGVideoEditAdjustments?) -> Void)?
|
||||||
|
|
||||||
var avatarUrl: URL?
|
var avatarAsset: Any?
|
||||||
var avatarAdjustments: TGVideoEditAdjustments?
|
var avatarAdjustments: TGVideoEditAdjustments?
|
||||||
|
|
||||||
private let hapticFeedback = HapticFeedback()
|
private let hapticFeedback = HapticFeedback()
|
||||||
@ -91,11 +91,11 @@ final class AuthorizationSequenceSignUpController: ViewController {
|
|||||||
self?.present(c, in: .window(.root), with: a)
|
self?.present(c, in: .window(.root), with: a)
|
||||||
}, openCurrent: nil, completion: { image in
|
}, openCurrent: nil, completion: { image in
|
||||||
self?.controllerNode.currentPhoto = image
|
self?.controllerNode.currentPhoto = image
|
||||||
self?.avatarUrl = nil
|
self?.avatarAsset = nil
|
||||||
self?.avatarAdjustments = nil
|
self?.avatarAdjustments = nil
|
||||||
}, videoCompletion: { image, url, adjustments in
|
}, videoCompletion: { image, asset, adjustments in
|
||||||
self?.controllerNode.currentPhoto = image
|
self?.controllerNode.currentPhoto = image
|
||||||
self?.avatarUrl = url
|
self?.avatarAsset = asset
|
||||||
self?.avatarAdjustments = adjustments
|
self?.avatarAdjustments = adjustments
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -159,7 +159,7 @@ final class AuthorizationSequenceSignUpController: ViewController {
|
|||||||
if let name = name {
|
if let name = name {
|
||||||
self.signUpWithName?(name.0, name.1, self.controllerNode.currentPhoto.flatMap({ image in
|
self.signUpWithName?(name.0, name.1, self.controllerNode.currentPhoto.flatMap({ image in
|
||||||
return compressImageToJPEG(image, quality: 0.7)
|
return compressImageToJPEG(image, quality: 0.7)
|
||||||
}), self.avatarUrl, self.avatarAdjustments)
|
}), self.avatarAsset, self.avatarAdjustments)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -328,7 +328,7 @@ public func createChannelController(context: AccountContext) -> ViewController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let completedChannelVideoImpl: (UIImage, URL, TGVideoEditAdjustments?) -> Void = { image, url, adjustments in
|
let completedChannelVideoImpl: (UIImage, Any?, TGVideoEditAdjustments?) -> Void = { image, asset, adjustments in
|
||||||
if let data = image.jpegData(compressionQuality: 0.6) {
|
if let data = image.jpegData(compressionQuality: 0.6) {
|
||||||
let photoResource = LocalFileMediaResource(fileId: arc4random64())
|
let photoResource = LocalFileMediaResource(fileId: arc4random64())
|
||||||
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
|
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
|
||||||
@ -345,12 +345,6 @@ public func createChannelController(context: AccountContext) -> ViewController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let signal = Signal<TelegramMediaResource?, UploadPeerPhotoError> { subscriber in
|
let signal = Signal<TelegramMediaResource?, UploadPeerPhotoError> { subscriber in
|
||||||
var filteredPath = url.path
|
|
||||||
if filteredPath.hasPrefix("file://") {
|
|
||||||
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
|
|
||||||
}
|
|
||||||
|
|
||||||
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
|
|
||||||
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
|
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
|
||||||
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
|
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
|
||||||
return LegacyPaintEntityRenderer(account: context.account, adjustments: adjustments)
|
return LegacyPaintEntityRenderer(account: context.account, adjustments: adjustments)
|
||||||
@ -359,7 +353,31 @@ public func createChannelController(context: AccountContext) -> ViewController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let uploadInterface = LegacyLiveUploadInterface(account: context.account)
|
let uploadInterface = LegacyLiveUploadInterface(account: context.account)
|
||||||
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
|
let signal: SSignal
|
||||||
|
if let asset = asset as? AVAsset {
|
||||||
|
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
|
||||||
|
} else if let url = asset as? URL, let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
|
||||||
|
let durationSignal: SSignal = SSignal(generator: { subscriber in
|
||||||
|
let disposable = (entityRenderer.duration()).start(next: { duration in
|
||||||
|
subscriber?.putNext(duration)
|
||||||
|
subscriber?.putCompletion()
|
||||||
|
})
|
||||||
|
|
||||||
|
return SBlockDisposable(block: {
|
||||||
|
disposable.dispose()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
signal = durationSignal.map(toSignal: { duration -> SSignal? in
|
||||||
|
if let duration = duration as? Double {
|
||||||
|
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: nil, entityRenderer: entityRenderer)!
|
||||||
|
} else {
|
||||||
|
return SSignal.single(nil)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
} else {
|
||||||
|
signal = SSignal.complete()
|
||||||
|
}
|
||||||
|
|
||||||
let signalDisposable = signal.start(next: { next in
|
let signalDisposable = signal.start(next: { next in
|
||||||
if let result = next as? TGMediaVideoConversionResult {
|
if let result = next as? TGMediaVideoConversionResult {
|
||||||
@ -438,9 +456,9 @@ public func createChannelController(context: AccountContext) -> ViewController {
|
|||||||
completedChannelPhotoImpl(image)
|
completedChannelPhotoImpl(image)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mixin.didFinishWithVideo = { image, url, adjustments in
|
mixin.didFinishWithVideo = { image, asset, adjustments in
|
||||||
if let image = image, let url = url {
|
if let image = image, let asset = asset {
|
||||||
completedChannelVideoImpl(image, url, adjustments)
|
completedChannelVideoImpl(image, asset, adjustments)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if stateValue.with({ $0.avatar }) != nil {
|
if stateValue.with({ $0.avatar }) != nil {
|
||||||
|
@ -586,7 +586,7 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let completedGroupVideoImpl: (UIImage, URL, TGVideoEditAdjustments?) -> Void = { image, url, adjustments in
|
let completedGroupVideoImpl: (UIImage, Any?, TGVideoEditAdjustments?) -> Void = { image, asset, adjustments in
|
||||||
if let data = image.jpegData(compressionQuality: 0.6) {
|
if let data = image.jpegData(compressionQuality: 0.6) {
|
||||||
let photoResource = LocalFileMediaResource(fileId: arc4random64())
|
let photoResource = LocalFileMediaResource(fileId: arc4random64())
|
||||||
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
|
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
|
||||||
@ -603,12 +603,7 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
|
|||||||
}
|
}
|
||||||
|
|
||||||
let signal = Signal<TelegramMediaResource?, UploadPeerPhotoError> { subscriber in
|
let signal = Signal<TelegramMediaResource?, UploadPeerPhotoError> { subscriber in
|
||||||
var filteredPath = url.path
|
|
||||||
if filteredPath.hasPrefix("file://") {
|
|
||||||
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
|
|
||||||
}
|
|
||||||
|
|
||||||
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
|
|
||||||
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
|
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
|
||||||
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
|
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
|
||||||
return LegacyPaintEntityRenderer(account: context.account, adjustments: adjustments)
|
return LegacyPaintEntityRenderer(account: context.account, adjustments: adjustments)
|
||||||
@ -617,7 +612,31 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let uploadInterface = LegacyLiveUploadInterface(account: context.account)
|
let uploadInterface = LegacyLiveUploadInterface(account: context.account)
|
||||||
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
|
let signal: SSignal
|
||||||
|
if let asset = asset as? AVAsset {
|
||||||
|
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
|
||||||
|
} else if let url = asset as? URL, let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
|
||||||
|
let durationSignal: SSignal = SSignal(generator: { subscriber in
|
||||||
|
let disposable = (entityRenderer.duration()).start(next: { duration in
|
||||||
|
subscriber?.putNext(duration)
|
||||||
|
subscriber?.putCompletion()
|
||||||
|
})
|
||||||
|
|
||||||
|
return SBlockDisposable(block: {
|
||||||
|
disposable.dispose()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
signal = durationSignal.map(toSignal: { duration -> SSignal? in
|
||||||
|
if let duration = duration as? Double {
|
||||||
|
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: nil, entityRenderer: entityRenderer)!
|
||||||
|
} else {
|
||||||
|
return SSignal.single(nil)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
} else {
|
||||||
|
signal = SSignal.complete()
|
||||||
|
}
|
||||||
|
|
||||||
let signalDisposable = signal.start(next: { next in
|
let signalDisposable = signal.start(next: { next in
|
||||||
if let result = next as? TGMediaVideoConversionResult {
|
if let result = next as? TGMediaVideoConversionResult {
|
||||||
@ -696,9 +715,9 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
|
|||||||
completedGroupPhotoImpl(image)
|
completedGroupPhotoImpl(image)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mixin.didFinishWithVideo = { image, url, adjustments in
|
mixin.didFinishWithVideo = { image, asset, adjustments in
|
||||||
if let image = image, let url = url {
|
if let image = image, let asset = asset {
|
||||||
completedGroupVideoImpl(image, url, adjustments)
|
completedGroupVideoImpl(image, asset, adjustments)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if stateValue.with({ $0.avatar }) != nil {
|
if stateValue.with({ $0.avatar }) != nil {
|
||||||
|
@ -334,6 +334,9 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
|
|||||||
videoRepresentations = videoRepresentationsValue
|
videoRepresentations = videoRepresentationsValue
|
||||||
immediateThumbnailData = immediateThumbnail
|
immediateThumbnailData = immediateThumbnail
|
||||||
id = Int64(self.peer.id.id)
|
id = Int64(self.peer.id.id)
|
||||||
|
if let resource = videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
|
||||||
|
id = id &+ resource.photoId
|
||||||
|
}
|
||||||
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
|
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
|
||||||
representations = imageRepresentations
|
representations = imageRepresentations
|
||||||
videoRepresentations = videoRepresentationsValue
|
videoRepresentations = videoRepresentationsValue
|
||||||
@ -350,7 +353,7 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
|
|||||||
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
|
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
|
||||||
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
|
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
|
||||||
|
|
||||||
if videoContent.id != self.videoContent?.id {
|
if videoContent.id != self.videoContent?.id {
|
||||||
self.videoContent = videoContent
|
self.videoContent = videoContent
|
||||||
self.videoStartTimestamp = video.representation.startTimestamp
|
self.videoStartTimestamp = video.representation.startTimestamp
|
||||||
self.setupVideoPlayback()
|
self.setupVideoPlayback()
|
||||||
@ -913,9 +916,13 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
|
|||||||
guard case let .image(image) = item else {
|
guard case let .image(image) = item else {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
var items: [PeerInfoAvatarListItem] = []
|
var items: [PeerInfoAvatarListItem] = []
|
||||||
var entries: [AvatarGalleryEntry] = []
|
var entries: [AvatarGalleryEntry] = []
|
||||||
let previousIndex = self.currentIndex
|
let previousIndex = self.currentIndex
|
||||||
|
|
||||||
|
var index = 0
|
||||||
|
var deletedIndex: Int?
|
||||||
for entry in self.galleryEntries {
|
for entry in self.galleryEntries {
|
||||||
switch entry {
|
switch entry {
|
||||||
case let .topImage(representations, videoRepresentations, _, _, immediateThumbnailData, _):
|
case let .topImage(representations, videoRepresentations, _, _, immediateThumbnailData, _):
|
||||||
@ -925,9 +932,25 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
|
|||||||
if image.0 != reference {
|
if image.0 != reference {
|
||||||
entries.append(entry)
|
entries.append(entry)
|
||||||
items.append(.image(reference, representations, videoRepresentations, immediateThumbnailData))
|
items.append(.image(reference, representations, videoRepresentations, immediateThumbnailData))
|
||||||
|
} else {
|
||||||
|
deletedIndex = index
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
index += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if let peer = self.peer, peer is TelegramGroup || peer is TelegramChannel, deletedIndex == 0 {
|
||||||
|
self.galleryEntries = []
|
||||||
|
self.items = []
|
||||||
|
self.itemsUpdated?([])
|
||||||
|
self.currentIndex = 0
|
||||||
|
if let size = self.validLayout {
|
||||||
|
self.updateItems(size: size, update: true, transition: .immediate, stripTransition: .immediate, synchronous: true)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
self.galleryEntries = normalizeEntries(entries)
|
self.galleryEntries = normalizeEntries(entries)
|
||||||
self.items = items
|
self.items = items
|
||||||
self.itemsUpdated?(items)
|
self.itemsUpdated?(items)
|
||||||
@ -1129,7 +1152,7 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
|
|||||||
stripTransition.updateAlpha(node: self.loadingStripNode, alpha: self.loading ? 1.0 : 0.0)
|
stripTransition.updateAlpha(node: self.loadingStripNode, alpha: self.loading ? 1.0 : 0.0)
|
||||||
|
|
||||||
self.activeStripNode.isHidden = self.stripNodes.count < 2
|
self.activeStripNode.isHidden = self.stripNodes.count < 2
|
||||||
self.loadingStripNode.isHidden = !self.loading
|
self.loadingStripNode.isHidden = self.stripNodes.count < 2 || !self.loading
|
||||||
}
|
}
|
||||||
|
|
||||||
if let item = self.items.first, let itemNode = self.itemNodes[item.id] {
|
if let item = self.items.first, let itemNode = self.itemNodes[item.id] {
|
||||||
@ -1239,6 +1262,9 @@ final class PeerInfoAvatarTransformContainerNode: ASDisplayNode {
|
|||||||
videoRepresentations = videoRepresentationsValue
|
videoRepresentations = videoRepresentationsValue
|
||||||
immediateThumbnailData = immediateThumbnail
|
immediateThumbnailData = immediateThumbnail
|
||||||
id = Int64(peer.id.id)
|
id = Int64(peer.id.id)
|
||||||
|
if let resource = videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
|
||||||
|
id = id &+ resource.photoId
|
||||||
|
}
|
||||||
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
|
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
|
||||||
representations = imageRepresentations
|
representations = imageRepresentations
|
||||||
videoRepresentations = videoRepresentationsValue
|
videoRepresentations = videoRepresentationsValue
|
||||||
@ -1254,6 +1280,8 @@ final class PeerInfoAvatarTransformContainerNode: ASDisplayNode {
|
|||||||
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
|
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
|
||||||
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
|
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
|
||||||
if videoContent.id != self.videoContent?.id {
|
if videoContent.id != self.videoContent?.id {
|
||||||
|
self.videoNode?.removeFromSupernode()
|
||||||
|
|
||||||
let mediaManager = self.context.sharedContext.mediaManager
|
let mediaManager = self.context.sharedContext.mediaManager
|
||||||
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .embedded)
|
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .embedded)
|
||||||
videoNode.isUserInteractionEnabled = false
|
videoNode.isUserInteractionEnabled = false
|
||||||
@ -1520,6 +1548,9 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode {
|
|||||||
videoRepresentations = videoRepresentationsValue
|
videoRepresentations = videoRepresentationsValue
|
||||||
immediateThumbnailData = immediateThumbnail
|
immediateThumbnailData = immediateThumbnail
|
||||||
id = Int64(peer.id.id)
|
id = Int64(peer.id.id)
|
||||||
|
if let resource = videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
|
||||||
|
id = id &+ resource.photoId
|
||||||
|
}
|
||||||
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
|
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
|
||||||
representations = imageRepresentations
|
representations = imageRepresentations
|
||||||
videoRepresentations = videoRepresentationsValue
|
videoRepresentations = videoRepresentationsValue
|
||||||
@ -1535,6 +1566,8 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode {
|
|||||||
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
|
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
|
||||||
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
|
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
|
||||||
if videoContent.id != self.videoContent?.id {
|
if videoContent.id != self.videoContent?.id {
|
||||||
|
self.videoNode?.removeFromSupernode()
|
||||||
|
|
||||||
let mediaManager = self.context.sharedContext.mediaManager
|
let mediaManager = self.context.sharedContext.mediaManager
|
||||||
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .gallery)
|
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .gallery)
|
||||||
videoNode.isUserInteractionEnabled = false
|
videoNode.isUserInteractionEnabled = false
|
||||||
@ -2580,6 +2613,10 @@ final class PeerInfoHeaderNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func initiateAvatarExpansion(gallery: Bool, first: Bool) {
|
func initiateAvatarExpansion(gallery: Bool, first: Bool) {
|
||||||
|
if let peer = self.peer, peer.profileImageRepresentations.isEmpty && gallery {
|
||||||
|
self.requestOpenAvatarForEditing?(false)
|
||||||
|
return
|
||||||
|
}
|
||||||
if self.isAvatarExpanded || gallery {
|
if self.isAvatarExpanded || gallery {
|
||||||
if let currentEntry = self.avatarListNode.listContainerNode.currentEntry, let firstEntry = self.avatarListNode.listContainerNode.galleryEntries.first {
|
if let currentEntry = self.avatarListNode.listContainerNode.currentEntry, let firstEntry = self.avatarListNode.listContainerNode.galleryEntries.first {
|
||||||
let entry = first ? firstEntry : currentEntry
|
let entry = first ? firstEntry : currentEntry
|
||||||
|
@ -612,36 +612,41 @@ private final class PeerInfoInteraction {
|
|||||||
|
|
||||||
private let enabledBioEntities: EnabledEntityTypes = [.url, .mention, .hashtag]
|
private let enabledBioEntities: EnabledEntityTypes = [.url, .mention, .hashtag]
|
||||||
|
|
||||||
private func settingsItems(data: PeerInfoScreenData?, context: AccountContext, presentationData: PresentationData, interaction: PeerInfoInteraction) -> [(AnyHashable, [PeerInfoScreenItem])] {
|
private enum SettingsSection: Int, CaseIterable {
|
||||||
|
case edit
|
||||||
|
case phone
|
||||||
|
case accounts
|
||||||
|
case proxy
|
||||||
|
case shortcuts
|
||||||
|
case advanced
|
||||||
|
case extra
|
||||||
|
case support
|
||||||
|
}
|
||||||
|
|
||||||
|
private func settingsItems(data: PeerInfoScreenData?, context: AccountContext, presentationData: PresentationData, interaction: PeerInfoInteraction, isExpanded: Bool) -> [(AnyHashable, [PeerInfoScreenItem])] {
|
||||||
guard let data = data else {
|
guard let data = data else {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Section: Int, CaseIterable {
|
var items: [SettingsSection: [PeerInfoScreenItem]] = [:]
|
||||||
case edit
|
for section in SettingsSection.allCases {
|
||||||
case phone
|
|
||||||
case accounts
|
|
||||||
case proxy
|
|
||||||
case shortcuts
|
|
||||||
case advanced
|
|
||||||
case extra
|
|
||||||
case support
|
|
||||||
}
|
|
||||||
|
|
||||||
var items: [Section: [PeerInfoScreenItem]] = [:]
|
|
||||||
for section in Section.allCases {
|
|
||||||
items[section] = []
|
items[section] = []
|
||||||
}
|
}
|
||||||
|
|
||||||
let setPhotoTitle: String
|
let setPhotoTitle: String
|
||||||
|
let displaySetPhoto: Bool
|
||||||
if let peer = data.peer, !peer.profileImageRepresentations.isEmpty {
|
if let peer = data.peer, !peer.profileImageRepresentations.isEmpty {
|
||||||
setPhotoTitle = presentationData.strings.Settings_SetNewProfilePhotoOrVideo
|
setPhotoTitle = presentationData.strings.Settings_SetNewProfilePhotoOrVideo
|
||||||
|
displaySetPhoto = isExpanded
|
||||||
} else {
|
} else {
|
||||||
setPhotoTitle = presentationData.strings.Settings_SetProfilePhotoOrVideo
|
setPhotoTitle = presentationData.strings.Settings_SetProfilePhotoOrVideo
|
||||||
|
displaySetPhoto = true
|
||||||
|
}
|
||||||
|
if displaySetPhoto {
|
||||||
|
items[.edit]!.append(PeerInfoScreenActionItem(id: 0, text: setPhotoTitle, icon: UIImage(bundleImageName: "Settings/SetAvatar"), action: {
|
||||||
|
interaction.openSettings(.avatar)
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
items[.edit]!.append(PeerInfoScreenActionItem(id: 0, text: setPhotoTitle, icon: UIImage(bundleImageName: "Settings/SetAvatar"), action: {
|
|
||||||
interaction.openSettings(.avatar)
|
|
||||||
}))
|
|
||||||
if let peer = data.peer, peer.addressName == nil {
|
if let peer = data.peer, peer.addressName == nil {
|
||||||
items[.edit]!.append(PeerInfoScreenActionItem(id: 1, text: presentationData.strings.Settings_SetUsername, icon: UIImage(bundleImageName: "Settings/SetUsername"), action: {
|
items[.edit]!.append(PeerInfoScreenActionItem(id: 1, text: presentationData.strings.Settings_SetUsername, icon: UIImage(bundleImageName: "Settings/SetUsername"), action: {
|
||||||
interaction.openSettings(.username)
|
interaction.openSettings(.username)
|
||||||
@ -784,7 +789,7 @@ private func settingsItems(data: PeerInfoScreenData?, context: AccountContext, p
|
|||||||
}))
|
}))
|
||||||
|
|
||||||
var result: [(AnyHashable, [PeerInfoScreenItem])] = []
|
var result: [(AnyHashable, [PeerInfoScreenItem])] = []
|
||||||
for section in Section.allCases {
|
for section in SettingsSection.allCases {
|
||||||
if let sectionItems = items[section], !sectionItems.isEmpty {
|
if let sectionItems = items[section], !sectionItems.isEmpty {
|
||||||
result.append((section, sectionItems))
|
result.append((section, sectionItems))
|
||||||
}
|
}
|
||||||
@ -830,10 +835,14 @@ private func settingsEditingItems(data: PeerInfoScreenData?, state: PeerInfoStat
|
|||||||
|
|
||||||
if let user = data.peer as? TelegramUser {
|
if let user = data.peer as? TelegramUser {
|
||||||
items[.info]!.append(PeerInfoScreenDisclosureItem(id: ItemPhoneNumber, label: .text(user.phone.flatMap({ formatPhoneNumber($0) }) ?? ""), text: presentationData.strings.Settings_PhoneNumber, action: {
|
items[.info]!.append(PeerInfoScreenDisclosureItem(id: ItemPhoneNumber, label: .text(user.phone.flatMap({ formatPhoneNumber($0) }) ?? ""), text: presentationData.strings.Settings_PhoneNumber, action: {
|
||||||
interaction.openSettings(.phoneNumber)
|
interaction.openSettings(.phoneNumber)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
items[.info]!.append(PeerInfoScreenDisclosureItem(id: ItemUsername, label: .text(data.peer?.addressName.flatMap({ "@\($0)" }) ?? ""), text: presentationData.strings.Settings_Username, action: {
|
var username = ""
|
||||||
|
if let addressName = data.peer?.addressName, !addressName.isEmpty {
|
||||||
|
username = "@\(addressName)"
|
||||||
|
}
|
||||||
|
items[.info]!.append(PeerInfoScreenDisclosureItem(id: ItemUsername, label: .text(username), text: presentationData.strings.Settings_Username, action: {
|
||||||
interaction.openSettings(.username)
|
interaction.openSettings(.username)
|
||||||
}))
|
}))
|
||||||
|
|
||||||
@ -2113,11 +2122,11 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
galleryController.avatarPhotoEditCompletion = { [weak self] image in
|
galleryController.avatarPhotoEditCompletion = { [weak self] image in
|
||||||
self?.updateProfilePhoto(image)
|
self?.updateProfilePhoto(image)
|
||||||
}
|
}
|
||||||
galleryController.avatarVideoEditCompletion = { [weak self] image, url, adjustments in
|
galleryController.avatarVideoEditCompletion = { [weak self] image, asset, adjustments in
|
||||||
self?.updateProfileVideo(image, url: url, adjustments: adjustments)
|
self?.updateProfileVideo(image, asset: asset, adjustments: adjustments)
|
||||||
}
|
}
|
||||||
galleryController.removedEntry = { [weak self] entry in
|
galleryController.removedEntry = { [weak self] entry in
|
||||||
self?.headerNode.avatarListNode.listContainerNode.deleteItem(PeerInfoAvatarListItem(entry: entry))
|
let _ = self?.headerNode.avatarListNode.listContainerNode.deleteItem(PeerInfoAvatarListItem(entry: entry))
|
||||||
}
|
}
|
||||||
strongSelf.hiddenAvatarRepresentationDisposable.set((galleryController.hiddenMedia |> deliverOnMainQueue).start(next: { entry in
|
strongSelf.hiddenAvatarRepresentationDisposable.set((galleryController.hiddenMedia |> deliverOnMainQueue).start(next: { entry in
|
||||||
self?.headerNode.updateAvatarIsHidden(entry: entry)
|
self?.headerNode.updateAvatarIsHidden(entry: entry)
|
||||||
@ -2497,7 +2506,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
case .search:
|
case .search:
|
||||||
strongSelf.activateSearch()
|
strongSelf.activateSearch()
|
||||||
case .editPhoto, .editVideo:
|
case .editPhoto, .editVideo:
|
||||||
strongSelf.openAvatarOptions()
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3837,68 +3846,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func editAvatarItem(_ item: PeerInfoAvatarListItem) {
|
|
||||||
guard case let .image(reference, representations, videoRepresentations, _) = item else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
let mediaReference: AnyMediaReference
|
|
||||||
if let video = videoRepresentations.last {
|
|
||||||
mediaReference = .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
|
|
||||||
} else {
|
|
||||||
let media = TelegramMediaImage(imageId: MediaId(namespace: 0, id: 0), representations: representations.map({ $0.representation }), immediateThumbnailData: nil, reference: nil, partialReference: nil, flags: [])
|
|
||||||
mediaReference = .standalone(media: media)
|
|
||||||
}
|
|
||||||
|
|
||||||
var dismissStatus: (() -> Void)?
|
|
||||||
let statusController = OverlayStatusController(theme: self.presentationData.theme, type: .loading(cancelled: {
|
|
||||||
dismissStatus?()
|
|
||||||
}))
|
|
||||||
dismissStatus = { [weak self, weak statusController] in
|
|
||||||
self?.editAvatarDisposable.set(nil)
|
|
||||||
statusController?.dismiss()
|
|
||||||
}
|
|
||||||
self.controller?.present(statusController, in: .window(.root))
|
|
||||||
|
|
||||||
self.editAvatarDisposable.set((fetchMediaData(context: self.context, postbox: self.context.account.postbox, mediaReference: mediaReference)
|
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] state, isImage in
|
|
||||||
guard let strongSelf = self else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
switch state {
|
|
||||||
case .progress:
|
|
||||||
break
|
|
||||||
case let .data(data):
|
|
||||||
dismissStatus?()
|
|
||||||
|
|
||||||
let image: UIImage?
|
|
||||||
let video: URL?
|
|
||||||
if isImage {
|
|
||||||
if let fileData = try? Data(contentsOf: URL(fileURLWithPath: data.path)) {
|
|
||||||
image = UIImage(data: fileData)
|
|
||||||
} else {
|
|
||||||
image = nil
|
|
||||||
}
|
|
||||||
video = nil
|
|
||||||
} else {
|
|
||||||
image = nil
|
|
||||||
video = URL(fileURLWithPath: data.path)
|
|
||||||
}
|
|
||||||
|
|
||||||
presentLegacyAvatarEditor(theme: strongSelf.presentationData.theme, image: image, video: video, present: { [weak self] c, a in
|
|
||||||
if let strongSelf = self {
|
|
||||||
strongSelf.controller?.present(c, in: .window(.root), with: a, blockInteraction: true)
|
|
||||||
}
|
|
||||||
}, imageCompletion: { [weak self] image in
|
|
||||||
self?.updateProfilePhoto(image)
|
|
||||||
}, videoCompletion: { [weak self] image, url, adjustments in
|
|
||||||
self?.updateProfileVideo(image, url: url, adjustments: adjustments)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
private func setMainAvatar(_ item: PeerInfoAvatarListItem) {
|
private func setMainAvatar(_ item: PeerInfoAvatarListItem) {
|
||||||
if self.data?.peer?.id == self.context.account.peerId {
|
if self.data?.peer?.id == self.context.account.peerId {
|
||||||
if case let .image(reference, _, _, _) = item {
|
if case let .image(reference, _, _, _) = item {
|
||||||
@ -3911,11 +3859,13 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func deleteAvatar(_ item: PeerInfoAvatarListItem) {
|
private func deleteAvatar(_ item: PeerInfoAvatarListItem, remove: Bool = true) {
|
||||||
if self.data?.peer?.id == self.context.account.peerId {
|
if self.data?.peer?.id == self.context.account.peerId {
|
||||||
if case let .image(reference, _, _, _) = item {
|
if case let .image(reference, _, _, _) = item {
|
||||||
if let reference = reference {
|
if let reference = reference {
|
||||||
let _ = removeAccountPhoto(network: self.context.account.network, reference: reference).start()
|
if remove {
|
||||||
|
let _ = removeAccountPhoto(network: self.context.account.network, reference: reference).start()
|
||||||
|
}
|
||||||
let dismiss = self.headerNode.avatarListNode.listContainerNode.deleteItem(item)
|
let dismiss = self.headerNode.avatarListNode.listContainerNode.deleteItem(item)
|
||||||
if dismiss {
|
if dismiss {
|
||||||
if self.headerNode.isAvatarExpanded {
|
if self.headerNode.isAvatarExpanded {
|
||||||
@ -3954,59 +3904,6 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func openAvatarOptions() {
|
|
||||||
let item = self.headerNode.avatarListNode.listContainerNode.currentItemNode?.item
|
|
||||||
let index = self.headerNode.avatarListNode.listContainerNode.currentIndex
|
|
||||||
|
|
||||||
let actionSheet = ActionSheetController(presentationData: self.presentationData)
|
|
||||||
let dismissAction: () -> Void = { [weak actionSheet] in
|
|
||||||
actionSheet?.dismissAnimated()
|
|
||||||
}
|
|
||||||
|
|
||||||
var items: [ActionSheetItem] = []
|
|
||||||
items.append( ActionSheetButtonItem(title: self.presentationData.strings.Settings_SetNewProfilePhotoOrVideo, color: .accent, action: { [weak self] in
|
|
||||||
dismissAction()
|
|
||||||
self?.openAvatarForEditing(hasRemove: false)
|
|
||||||
}))
|
|
||||||
|
|
||||||
if let item = item, case let .image(image) = item {
|
|
||||||
if index > 0 {
|
|
||||||
let setMainTitle: String
|
|
||||||
if image.2.isEmpty {
|
|
||||||
setMainTitle = self.presentationData.strings.ProfilePhoto_SetMainPhoto
|
|
||||||
} else {
|
|
||||||
setMainTitle = self.presentationData.strings.ProfilePhoto_SetMainVideo
|
|
||||||
}
|
|
||||||
items.append(ActionSheetButtonItem(title: setMainTitle, color: .accent, action: { [weak self] in
|
|
||||||
dismissAction()
|
|
||||||
self?.setMainAvatar(item)
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
// items.append(ActionSheetButtonItem(title: self.presentationData.strings.ProfilePhoto_OpenInEditor, color: .accent, action: { [weak self] in
|
|
||||||
// dismissAction()
|
|
||||||
// self?.editAvatarItem(item)
|
|
||||||
// }))
|
|
||||||
|
|
||||||
let deleteTitle: String
|
|
||||||
if image.2.isEmpty {
|
|
||||||
deleteTitle = self.presentationData.strings.GroupInfo_SetGroupPhotoDelete
|
|
||||||
} else {
|
|
||||||
deleteTitle = self.presentationData.strings.Settings_RemoveVideo
|
|
||||||
}
|
|
||||||
items.append(ActionSheetButtonItem(title: deleteTitle, color: .destructive, action: { [weak self] in
|
|
||||||
dismissAction()
|
|
||||||
self?.deleteAvatar(item)
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
actionSheet.setItemGroups([
|
|
||||||
ActionSheetItemGroup(items: items),
|
|
||||||
ActionSheetItemGroup(items: [ActionSheetButtonItem(title: presentationData.strings.Common_Cancel, action: { dismissAction() })])
|
|
||||||
])
|
|
||||||
self.view.endEditing(true)
|
|
||||||
self.controller?.present(actionSheet, in: .window(.root))
|
|
||||||
}
|
|
||||||
|
|
||||||
private func updateProfilePhoto(_ image: UIImage) {
|
private func updateProfilePhoto(_ image: UIImage) {
|
||||||
guard let data = image.jpegData(compressionQuality: 0.6) else {
|
guard let data = image.jpegData(compressionQuality: 0.6) else {
|
||||||
return
|
return
|
||||||
@ -4064,7 +3961,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func updateProfileVideo(_ image: UIImage, url: URL, adjustments: TGVideoEditAdjustments?) {
|
private func updateProfileVideo(_ image: UIImage, asset: Any?, adjustments: TGVideoEditAdjustments?) {
|
||||||
guard let data = image.jpegData(compressionQuality: 0.6) else {
|
guard let data = image.jpegData(compressionQuality: 0.6) else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -4093,12 +3990,6 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
|
|
||||||
let account = self.context.account
|
let account = self.context.account
|
||||||
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { [weak self] subscriber in
|
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { [weak self] subscriber in
|
||||||
var filteredPath = url.path
|
|
||||||
if filteredPath.hasPrefix("file://") {
|
|
||||||
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
|
|
||||||
}
|
|
||||||
|
|
||||||
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
|
|
||||||
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
|
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
|
||||||
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
|
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
|
||||||
return LegacyPaintEntityRenderer(account: account, adjustments: adjustments)
|
return LegacyPaintEntityRenderer(account: account, adjustments: adjustments)
|
||||||
@ -4107,7 +3998,31 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let uploadInterface = LegacyLiveUploadInterface(account: account)
|
let uploadInterface = LegacyLiveUploadInterface(account: account)
|
||||||
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
|
let signal: SSignal
|
||||||
|
if let asset = asset as? AVAsset {
|
||||||
|
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
|
||||||
|
} else if let url = asset as? URL, let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
|
||||||
|
let durationSignal: SSignal = SSignal(generator: { subscriber in
|
||||||
|
let disposable = (entityRenderer.duration()).start(next: { duration in
|
||||||
|
subscriber?.putNext(duration)
|
||||||
|
subscriber?.putCompletion()
|
||||||
|
})
|
||||||
|
|
||||||
|
return SBlockDisposable(block: {
|
||||||
|
disposable.dispose()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
signal = durationSignal.map(toSignal: { duration -> SSignal? in
|
||||||
|
if let duration = duration as? Double {
|
||||||
|
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: nil, entityRenderer: entityRenderer)!
|
||||||
|
} else {
|
||||||
|
return SSignal.single(nil)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
} else {
|
||||||
|
signal = SSignal.complete()
|
||||||
|
}
|
||||||
|
|
||||||
let signalDisposable = signal.start(next: { next in
|
let signalDisposable = signal.start(next: { next in
|
||||||
if let result = next as? TGMediaVideoConversionResult {
|
if let result = next as? TGMediaVideoConversionResult {
|
||||||
@ -4255,10 +4170,10 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
self?.updateProfilePhoto(image)
|
self?.updateProfilePhoto(image)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mixin.didFinishWithVideo = { [weak self] image, url, adjustments in
|
mixin.didFinishWithVideo = { [weak self] image, asset, adjustments in
|
||||||
if let image = image, let url = url {
|
if let image = image, let asset = asset {
|
||||||
completion()
|
completion()
|
||||||
self?.updateProfileVideo(image, url: url, adjustments: adjustments)
|
self?.updateProfileVideo(image, asset: asset, adjustments: adjustments)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mixin.didFinishWithDelete = {
|
mixin.didFinishWithDelete = {
|
||||||
@ -4267,7 +4182,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let item = item {
|
if let item = item {
|
||||||
strongSelf.deleteAvatar(item)
|
strongSelf.deleteAvatar(item, remove: false)
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = strongSelf.currentAvatarMixin.swap(nil)
|
let _ = strongSelf.currentAvatarMixin.swap(nil)
|
||||||
@ -5059,19 +4974,21 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
} else {
|
} else {
|
||||||
transition.updateFrame(node: self.headerNode, frame: headerFrame)
|
transition.updateFrame(node: self.headerNode, frame: headerFrame)
|
||||||
}
|
}
|
||||||
if !self.isMediaOnly {
|
if self.isMediaOnly {
|
||||||
contentHeight += headerHeight
|
|
||||||
if !self.isSettings {
|
|
||||||
contentHeight += sectionSpacing
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
contentHeight += navigationHeight
|
contentHeight += navigationHeight
|
||||||
}
|
}
|
||||||
|
|
||||||
var validRegularSections: [AnyHashable] = []
|
var validRegularSections: [AnyHashable] = []
|
||||||
if !self.isMediaOnly {
|
if !self.isMediaOnly {
|
||||||
let items = self.isSettings ? settingsItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction) : infoItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction, nearbyPeerDistance: self.nearbyPeerDistance, callMessages: self.callMessages)
|
let items = self.isSettings ? settingsItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction, isExpanded: self.headerNode.isAvatarExpanded) : infoItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction, nearbyPeerDistance: self.nearbyPeerDistance, callMessages: self.callMessages)
|
||||||
|
|
||||||
|
contentHeight += headerHeight
|
||||||
|
if !self.isSettings {
|
||||||
|
contentHeight += sectionSpacing
|
||||||
|
} else if let (section, _) = items.first, let sectionValue = section.base as? SettingsSection, sectionValue != .edit && !self.state.isEditing {
|
||||||
|
contentHeight += sectionSpacing
|
||||||
|
}
|
||||||
|
|
||||||
for (sectionId, sectionItems) in items {
|
for (sectionId, sectionItems) in items {
|
||||||
validRegularSections.append(sectionId)
|
validRegularSections.append(sectionId)
|
||||||
|
|
||||||
|
@ -43,6 +43,7 @@ private final class ProfileDataPhotoPreloadContext {
|
|||||||
|
|
||||||
let disposable: Disposable
|
let disposable: Disposable
|
||||||
var value: Any?
|
var value: Any?
|
||||||
|
var skipNext = false
|
||||||
var emptyTimer: SwiftSignalKit.Timer?
|
var emptyTimer: SwiftSignalKit.Timer?
|
||||||
|
|
||||||
init(disposable: Disposable) {
|
init(disposable: Disposable) {
|
||||||
@ -210,6 +211,9 @@ private final class PeerChannelMemberCategoriesContextsManagerImpl {
|
|||||||
let context: ProfileDataPhotoPreloadContext
|
let context: ProfileDataPhotoPreloadContext
|
||||||
if let current = self.profileDataPhotoPreloadContexts[peerId] {
|
if let current = self.profileDataPhotoPreloadContexts[peerId] {
|
||||||
context = current
|
context = current
|
||||||
|
if let _ = context.value {
|
||||||
|
context.skipNext = true
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
let disposable = MetaDisposable()
|
let disposable = MetaDisposable()
|
||||||
context = ProfileDataPhotoPreloadContext(disposable: disposable)
|
context = ProfileDataPhotoPreloadContext(disposable: disposable)
|
||||||
@ -219,6 +223,10 @@ private final class PeerChannelMemberCategoriesContextsManagerImpl {
|
|||||||
guard let context = context else {
|
guard let context = context else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if context.skipNext {
|
||||||
|
context.skipNext = false
|
||||||
|
return
|
||||||
|
}
|
||||||
context.value = value
|
context.value = value
|
||||||
for f in context.subscribers.copyItems() {
|
for f in context.subscribers.copyItems() {
|
||||||
f(value)
|
f(value)
|
||||||
|
@ -14,6 +14,8 @@ static_library(
|
|||||||
"tgcalls/tgcalls/legacy/**",
|
"tgcalls/tgcalls/legacy/**",
|
||||||
"tgcalls/tgcalls/platform/tdesktop/**",
|
"tgcalls/tgcalls/platform/tdesktop/**",
|
||||||
"tgcalls/tgcalls/platform/windows/**",
|
"tgcalls/tgcalls/platform/windows/**",
|
||||||
|
"tgcalls/tgcalls/platform/darwin/VideoCameraCapturerMac.*",
|
||||||
|
"tgcalls/tgcalls/platform/darwin/VideoMetalViewMac.*",
|
||||||
]),
|
]),
|
||||||
has_cpp = True,
|
has_cpp = True,
|
||||||
headers = merge_maps([
|
headers = merge_maps([
|
||||||
|
@ -15,6 +15,8 @@ objc_library(
|
|||||||
"tgcalls/tgcalls/legacy/**",
|
"tgcalls/tgcalls/legacy/**",
|
||||||
"tgcalls/tgcalls/platform/tdesktop/**",
|
"tgcalls/tgcalls/platform/tdesktop/**",
|
||||||
"tgcalls/tgcalls/platform/windows/**",
|
"tgcalls/tgcalls/platform/windows/**",
|
||||||
|
"tgcalls/tgcalls/platform/darwin/VideoCameraCapturerMac.*",
|
||||||
|
"tgcalls/tgcalls/platform/darwin/VideoMetalViewMac.*",
|
||||||
]),
|
]),
|
||||||
hdrs = glob([
|
hdrs = glob([
|
||||||
"PublicHeaders/**/*.h",
|
"PublicHeaders/**/*.h",
|
||||||
|
@ -61,7 +61,7 @@
|
|||||||
- (instancetype _Nonnull)init {
|
- (instancetype _Nonnull)init {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self != nil) {
|
if (self != nil) {
|
||||||
_interface = tgcalls::CreateVideoCapture();
|
_interface = tgcalls::VideoCaptureInterface::Create();
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
@ -541,7 +541,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
|||||||
- (void)acceptVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
|
- (void)acceptVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
|
||||||
if (_tgVoip && _videoCapturer == nil) {
|
if (_tgVoip && _videoCapturer == nil) {
|
||||||
_videoCapturer = videoCapturer;
|
_videoCapturer = videoCapturer;
|
||||||
_tgVoip->acceptVideo([_videoCapturer getInterface]);
|
_tgVoip->requestVideo([_videoCapturer getInterface]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1 +1 @@
|
|||||||
Subproject commit 659712186b39c3f077e3ad091d1de036154064a7
|
Subproject commit 83c85d20ccdde154acca4b964317de1e695f95d1
|
@ -443,7 +443,7 @@ public func legacyEnqueueWebSearchMessages(_ selectionState: TGMediaSelectionCon
|
|||||||
if animated {
|
if animated {
|
||||||
dict["isAnimation"] = true
|
dict["isAnimation"] = true
|
||||||
if let photoEditorValues = adjustments as? PGPhotoEditorValues {
|
if let photoEditorValues = adjustments as? PGPhotoEditorValues {
|
||||||
dict["adjustments"] = TGVideoEditAdjustments(photoEditorValues: photoEditorValues)
|
dict["adjustments"] = TGVideoEditAdjustments(photoEditorValues: photoEditorValues, preset: TGMediaVideoConversionPresetAnimation)
|
||||||
}
|
}
|
||||||
|
|
||||||
let filePath = NSTemporaryDirectory().appending("/gifvideo_\(arc4random()).jpg")
|
let filePath = NSTemporaryDirectory().appending("/gifvideo_\(arc4random()).jpg")
|
||||||
|
Loading…
x
Reference in New Issue
Block a user