Merge branch 'master' into experimental-2

This commit is contained in:
Ali 2020-07-16 21:17:22 +04:00
commit fcf1ed89c0
46 changed files with 4742 additions and 4597 deletions

View File

@ -5694,3 +5694,5 @@ Any member of this group will be able to see messages in the channel.";
"SettingsSearch_Synonyms_ChatFolders" = "";
"EditProfile.NameAndPhotoOrVideoHelp" = "Enter your name and add an optional profile photo or video.";
"Settings.RemoveConfirmation" = "Remove";

View File

@ -256,6 +256,7 @@ public final class AvatarNode: ASDisplayNode {
var iconColor = theme.chatList.unpinnedArchiveAvatarColor.foregroundColor
var backgroundColor = theme.chatList.unpinnedArchiveAvatarColor.backgroundColors.topColor
let animationBackgroundNode = ASImageNode()
animationBackgroundNode.isUserInteractionEnabled = false
animationBackgroundNode.frame = self.imageNode.frame
if let overrideImage = self.overrideImage, case let .archivedChatsIcon(hiddenByDefault) = overrideImage {
let backgroundColors: (UIColor, UIColor)
@ -274,6 +275,7 @@ public final class AvatarNode: ASDisplayNode {
self.addSubnode(animationBackgroundNode)
let animationNode = AnimationNode(animation: "anim_archiveAvatar", colors: ["box1.box1.Fill 1": iconColor, "box3.box3.Fill 1": iconColor, "box2.box2.Fill 1": backgroundColor], scale: 0.1653828)
animationNode.isUserInteractionEnabled = false
animationNode.completion = { [weak animationBackgroundNode, weak self] in
self?.imageNode.isHidden = false
animationBackgroundNode?.removeFromSupernode()
@ -344,6 +346,7 @@ public final class AvatarNode: ASDisplayNode {
if self.editOverlayNode == nil {
let editOverlayNode = AvatarEditOverlayNode()
editOverlayNode.frame = self.imageNode.frame
editOverlayNode.isUserInteractionEnabled = false
self.addSubnode(editOverlayNode)
self.editOverlayNode = editOverlayNode

View File

@ -84,7 +84,7 @@ final class NavigationSplitContainer: ASDisplayNode {
}
func update(layout: ContainerViewLayout, masterControllers: [ViewController], detailControllers: [ViewController], transition: ContainedViewLayoutTransition) {
let masterWidth: CGFloat = 375.0 // min(max(320.0, floor(layout.size.width / 3.0)), floor(layout.size.width / 2.0))
let masterWidth: CGFloat = min(max(320.0, floor(layout.size.width / 3.0)), floor(layout.size.width / 2.0))
let detailWidth = layout.size.width - masterWidth
self.masterScrollToTopView.frame = CGRect(origin: CGPoint(x: 0.0, y: -1.0), size: CGSize(width: masterWidth, height: 1.0))

View File

@ -296,11 +296,13 @@ public class ItemListAvatarAndNameInfoItemNode: ListViewItemNode, ItemListItemNo
self.avatarNode = AvatarNode(font: avatarFont)
self.updatingAvatarOverlay = ASImageNode()
self.updatingAvatarOverlay.isUserInteractionEnabled = false
self.updatingAvatarOverlay.displayWithoutProcessing = true
self.updatingAvatarOverlay.displaysAsynchronously = false
self.activityIndicator = ActivityIndicator(type: .custom(.white, 22.0, 1.0, false))
self.activityIndicator.isHidden = true
self.activityIndicator.isUserInteractionEnabled = false
self.nameNode = TextNode()
self.nameNode.isUserInteractionEnabled = false

View File

@ -45,7 +45,7 @@
@property (nonatomic, copy) void (^cameraPressed)(TGAttachmentCameraView *cameraView);
@property (nonatomic, copy) void (^sendPressed)(TGMediaAsset *currentItem, bool asFiles, bool silentPosting, int32_t scheduleTime);
@property (nonatomic, copy) void (^avatarCompletionBlock)(UIImage *image);
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^editorOpened)(void);
@property (nonatomic, copy) void (^editorClosed)(void);

View File

@ -1,3 +1,4 @@
#import <AVFoundation/AVFoundation.h>
#import <LegacyComponents/TGOverlayControllerWindow.h>
#import <LegacyComponents/TGOverlayController.h>
#import <LegacyComponents/LegacyComponentsContext.h>

View File

@ -76,7 +76,7 @@ typedef enum
@property (nonatomic, copy) NSDictionary *(^descriptionGenerator)(id, NSString *, NSArray *, NSString *);
@property (nonatomic, copy) void (^avatarCompletionBlock)(UIImage *image);
@property (nonatomic, copy) void (^completionBlock)(NSArray *signals, bool silentPosting, int32_t scheduleTime);
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^singleCompletionBlock)(id<TGMediaEditableItem> item, TGMediaEditingContext *editingContext);
@property (nonatomic, copy) void (^dismissalBlock)(void);
@property (nonatomic, copy) void (^selectionBlock)(TGMediaAsset *asset, UIImage *);
@ -94,7 +94,7 @@ typedef enum
- (NSArray *)resultSignalsWithCurrentItem:(TGMediaAsset *)currentItem descriptionGenerator:(id (^)(id, NSString *, NSArray *, NSString *))descriptionGenerator;
- (void)completeWithAvatarImage:(UIImage *)image;
- (void)completeWithAvatarVideo:(NSURL *)url adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image;
- (void)completeWithAvatarVideo:(AVAsset *)asset adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image;
- (void)completeWithCurrentItem:(TGMediaAsset *)currentItem silentPosting:(bool)silentPosting scheduleTime:(int32_t)scheduleTime;
- (void)dismiss;

View File

@ -1,4 +1,5 @@
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <LegacyComponents/LegacyComponentsContext.h>
@class TGViewController;
@ -13,7 +14,7 @@ typedef void (^TGMediaAvatarPresentImpl)(id<LegacyComponentsContext>, void (^)(U
@interface TGMediaAvatarMenuMixin : NSObject
@property (nonatomic, copy) void (^didFinishWithImage)(UIImage *image);
@property (nonatomic, copy) void (^didFinishWithVideo)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^didFinishWithVideo)(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^didFinishWithDelete)(void);
@property (nonatomic, copy) void (^didFinishWithView)(void);
@property (nonatomic, copy) void (^didDismiss)(void);

View File

@ -53,7 +53,7 @@ typedef enum {
@property (nonatomic, copy) void (^willFinishEditing)(id<TGMediaEditAdjustments> adjustments, id temporaryRep, bool hasChanges);
@property (nonatomic, copy) void (^didFinishRenderingFullSizeImage)(UIImage *fullSizeImage);
@property (nonatomic, copy) void (^didFinishEditing)(id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges);
@property (nonatomic, copy) void (^didFinishEditingVideo)(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges);
@property (nonatomic, copy) void (^didFinishEditingVideo)(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges);
@property (nonatomic, assign) bool skipInitialTransition;
@property (nonatomic, assign) bool dontHideStatusBar;

View File

@ -37,7 +37,7 @@ typedef enum
- (instancetype)editAdjustmentsWithPreset:(TGMediaVideoConversionPreset)preset maxDuration:(NSTimeInterval)maxDuration;
- (instancetype)editAdjustmentsWithPreset:(TGMediaVideoConversionPreset)preset videoStartValue:(NSTimeInterval)videoStartValue trimStartValue:(NSTimeInterval)trimStartValue trimEndValue:(NSTimeInterval)trimEndValue;
+ (instancetype)editAdjustmentsWithOriginalSize:(CGSize)originalSize preset:(TGMediaVideoConversionPreset)preset;
+ (instancetype)editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)values;
+ (instancetype)editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)values preset:(TGMediaVideoConversionPreset)preset;
+ (instancetype)editAdjustmentsWithDictionary:(NSDictionary *)dictionary;
+ (instancetype)editAdjustmentsWithOriginalSize:(CGSize)originalSize

View File

@ -898,7 +898,7 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
};
__weak TGPhotoEditorController *weakController = controller;
controller.didFinishEditing = ^(__unused id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, __unused UIImage *thumbnailImage, __unused bool hasChanges)
controller.didFinishEditing = ^(id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, __unused UIImage *thumbnailImage, __unused bool hasChanges)
{
if (!hasChanges)
return;
@ -911,10 +911,36 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
if (strongController == nil)
return;
if (adjustments.paintingData.hasAnimation) {
TGVideoEditAdjustments *videoAdjustments = adjustments;
if ([videoAdjustments isKindOfClass:[PGPhotoEditorValues class]]) {
videoAdjustments = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments preset:TGMediaVideoConversionPresetProfileVeryHigh];
}
NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[[NSString alloc] initWithFormat:@"gifvideo_%x.jpg", (int)arc4random()]];
NSData *data = UIImageJPEGRepresentation(resultImage, 0.8);
[data writeToFile:filePath atomically:true];
UIImage *previewImage = resultImage;
if ([adjustments cropAppliedForAvatar:false] || adjustments.hasPainting || adjustments.toolsApplied)
{
UIImage *paintingImage = adjustments.paintingData.stillImage;
if (paintingImage == nil) {
paintingImage = adjustments.paintingData.image;
}
UIImage *thumbnailImage = TGPhotoEditorVideoExtCrop(resultImage, paintingImage, adjustments.cropOrientation, adjustments.cropRotation, adjustments.cropRect, adjustments.cropMirrored, TGScaleToFill(asset.dimensions, CGSizeMake(800, 800)), adjustments.originalSize, true, true, true);
if (thumbnailImage != nil) {
previewImage = thumbnailImage;
}
}
if (strongSelf.avatarVideoCompletionBlock != nil)
strongSelf.avatarVideoCompletionBlock(previewImage, [NSURL fileURLWithPath:filePath], videoAdjustments);
} else {
if (strongSelf.avatarCompletionBlock != nil)
strongSelf.avatarCompletionBlock(resultImage);
}
};
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
if (!hasChanges)
return;
@ -927,7 +953,7 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
return;
if (strongSelf.avatarVideoCompletionBlock != nil)
strongSelf.avatarVideoCompletionBlock(resultImage, url, adjustments);
strongSelf.avatarVideoCompletionBlock(resultImage, asset, adjustments);
};
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
{

View File

@ -1768,7 +1768,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
});
};
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
if (!hasChanges)
return;
@ -1779,7 +1779,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
TGDispatchOnMainThread(^
{
if (strongSelf.finishedWithVideo != nil)
strongSelf.finishedWithVideo(nil, url, resultImage, 0, CGSizeZero, adjustments, nil, nil, nil, nil);
strongSelf.finishedWithVideo(nil, [(AVURLAsset *)asset URL], resultImage, 0, CGSizeZero, adjustments, nil, nil, nil, nil);
__strong TGPhotoEditorController *strongController = weakController;
if (strongController != nil)
@ -2592,7 +2592,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
if (animated) {
dict[@"isAnimation"] = @true;
if ([adjustments isKindOfClass:[PGPhotoEditorValues class]]) {
dict[@"adjustments"] = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments];
dict[@"adjustments"] = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments preset:TGMediaVideoConversionPresetAnimation];
} else {
dict[@"adjustments"] = adjustments;
}

View File

@ -566,10 +566,10 @@
self.avatarCompletionBlock(image);
}
- (void)completeWithAvatarVideo:(NSURL *)url adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image
- (void)completeWithAvatarVideo:(AVAsset *)asset adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image
{
if (self.avatarVideoCompletionBlock != nil)
self.avatarVideoCompletionBlock(image, url, adjustments);
self.avatarVideoCompletionBlock(image, asset, adjustments);
}
- (void)completeWithCurrentItem:(TGMediaAsset *)currentItem silentPosting:(bool)silentPosting scheduleTime:(int32_t)scheduleTime
@ -904,7 +904,7 @@
if (animated) {
dict[@"isAnimation"] = @true;
if ([adjustments isKindOfClass:[PGPhotoEditorValues class]]) {
dict[@"adjustments"] = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments];
dict[@"adjustments"] = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments preset:TGMediaVideoConversionPresetAnimation];
} else {
dict[@"adjustments"] = adjustments;
}

View File

@ -406,6 +406,7 @@
}
TGPhotoEditorController *controller = [[TGPhotoEditorController alloc] initWithContext:_context item:editableItem intent:intent adjustments:nil caption:nil screenImage:thumbnailImage availableTabs:[TGPhotoEditorController defaultTabsForAvatarIntent] selectedTab:TGPhotoEditorCropTab];
controller.stickersContext = self.stickersContext;
controller.editingContext = self.editingContext;
controller.didFinishRenderingFullSizeImage = ^(UIImage *resultImage)
{
@ -415,7 +416,7 @@
[[strongSelf->_assetsLibrary saveAssetWithImage:resultImage] startWithNext:nil];
};
controller.didFinishEditing = ^(__unused id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, __unused UIImage *thumbnailImage, bool hasChanges)
controller.didFinishEditing = ^(id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, __unused UIImage *thumbnailImage, bool hasChanges)
{
if (!hasChanges)
return;
@ -424,9 +425,34 @@
if (strongSelf == nil)
return;
if (adjustments.paintingData.hasAnimation) {
TGVideoEditAdjustments *videoAdjustments = adjustments;
if ([videoAdjustments isKindOfClass:[PGPhotoEditorValues class]]) {
videoAdjustments = [TGVideoEditAdjustments editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)adjustments preset:TGMediaVideoConversionPresetProfileVeryHigh];
}
NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[[NSString alloc] initWithFormat:@"gifvideo_%x.jpg", (int)arc4random()]];
NSData *data = UIImageJPEGRepresentation(resultImage, 0.8);
[data writeToFile:filePath atomically:true];
UIImage *previewImage = resultImage;
if ([adjustments cropAppliedForAvatar:false] || adjustments.hasPainting || adjustments.toolsApplied)
{
UIImage *paintingImage = adjustments.paintingData.stillImage;
if (paintingImage == nil) {
paintingImage = adjustments.paintingData.image;
}
UIImage *thumbnailImage = TGPhotoEditorVideoExtCrop(resultImage, paintingImage, adjustments.cropOrientation, adjustments.cropRotation, adjustments.cropRect, adjustments.cropMirrored, TGScaleToFill(asset.dimensions, CGSizeMake(800, 800)), adjustments.originalSize, true, true, true);
if (thumbnailImage != nil) {
previewImage = thumbnailImage;
}
}
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarVideo:[NSURL fileURLWithPath:filePath] adjustments:videoAdjustments image:previewImage];
} else {
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarImage:resultImage];
}
};
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
if (!hasChanges)
return;
@ -434,7 +460,7 @@
if (strongSelf == nil)
return;
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarVideo:url adjustments:adjustments image:resultImage];
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarVideo:asset adjustments:adjustments image:resultImage];
};
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
{

View File

@ -127,7 +127,7 @@
[strongController dismissAnimated:false];
};
carouselItem.avatarVideoCompletionBlock = ^(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments) {
carouselItem.avatarVideoCompletionBlock = ^(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments) {
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
if (strongSelf == nil)
return;
@ -137,7 +137,7 @@
return;
if (strongSelf.didFinishWithVideo != nil)
strongSelf.didFinishWithVideo(image, url, adjustments);
strongSelf.didFinishWithVideo(image, asset, adjustments);
[strongController dismissAnimated:false];
};
@ -285,7 +285,7 @@
controller = [[TGCameraController alloc] initWithContext:[windowManager context] saveEditedPhotos:_saveEditedPhotos saveCapturedMedia:_saveCapturedMedia camera:cameraView.previewView.camera previewView:cameraView.previewView intent:_signup ? TGCameraControllerSignupAvatarIntent : TGCameraControllerAvatarIntent];
else
controller = [[TGCameraController alloc] initWithContext:[windowManager context] saveEditedPhotos:_saveEditedPhotos saveCapturedMedia:_saveCapturedMedia intent:_signup ? TGCameraControllerSignupAvatarIntent : TGCameraControllerAvatarIntent];
controller.stickersContext = _stickersContext;
controller.shouldStoreCapturedAssets = true;
TGCameraControllerWindow *controllerWindow = [[TGCameraControllerWindow alloc] initWithManager:windowManager parentController:_parentController contentController:controller];
@ -355,13 +355,13 @@
[menuController dismissAnimated:false];
};
controller.finishedWithVideo = ^(__unused TGOverlayController *controller, NSURL *videoURL, UIImage *previewImage, __unused NSTimeInterval duration, __unused CGSize dimensions, TGVideoEditAdjustments *adjustments, __unused NSString *caption, __unused NSArray *entities, __unused NSArray *stickers, __unused NSNumber *timer){
controller.finishedWithVideo = ^(__unused TGOverlayController *controller, NSURL *url, UIImage *previewImage, __unused NSTimeInterval duration, __unused CGSize dimensions, TGVideoEditAdjustments *adjustments, __unused NSString *caption, __unused NSArray *entities, __unused NSArray *stickers, __unused NSNumber *timer){
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (strongSelf.didFinishWithVideo != nil)
strongSelf.didFinishWithVideo(previewImage, videoURL, adjustments);
strongSelf.didFinishWithVideo(previewImage, [[AVURLAsset alloc] initWithURL:url options:nil], adjustments);
[menuController dismissAnimated:false];
};
@ -459,6 +459,7 @@
TGMediaAssetsController *controller = [TGMediaAssetsController controllerWithContext:context assetGroup:group intent:strongSelf->_signup ? TGMediaAssetsControllerSetSignupProfilePhotoIntent : TGMediaAssetsControllerSetProfilePhotoIntent recipientName:nil saveEditedPhotos:strongSelf->_saveEditedPhotos allowGrouping:false selectionLimit:10];
__weak TGMediaAssetsController *weakController = controller;
controller.stickersContext = _stickersContext;
controller.avatarCompletionBlock = ^(UIImage *resultImage)
{
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
@ -472,13 +473,13 @@
if (strongController != nil && strongController.dismissalBlock != nil)
strongController.dismissalBlock();
};
controller.avatarVideoCompletionBlock = ^(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments) {
controller.avatarVideoCompletionBlock = ^(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments) {
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (strongSelf.didFinishWithVideo != nil)
strongSelf.didFinishWithVideo(image, url, adjustments);
strongSelf.didFinishWithVideo(image, asset, adjustments);
__strong TGMediaAssetsController *strongController = weakController;
if (strongController != nil && strongController.dismissalBlock != nil)

View File

@ -800,7 +800,7 @@
CGRect fittedCropRect = [TGPhotoPaintController fittedCropRect:cropRect originalSize:originalSize keepOriginalSize:false];
_contentWrapperView.frame = CGRectMake(0.0f, 0.0f, fittedContentSize.width, fittedContentSize.height);
CGFloat contentScale = ratio;//_contentView.bounds.size.width / fittedCropRect.size.width;
CGFloat contentScale = ratio;
_contentWrapperView.transform = CGAffineTransformMakeScale(contentScale, contentScale);
_contentWrapperView.frame = CGRectMake(0.0f, 0.0f, _contentView.bounds.size.width, _contentView.bounds.size.height);

View File

@ -240,6 +240,9 @@
return;
TGMediaVideoConversionPreset preset = TGMediaVideoConversionPresetAnimation;
if (adjustments.preset == TGMediaVideoConversionPresetProfile || adjustments.preset == TGMediaVideoConversionPresetProfileHigh || adjustments.preset == TGMediaVideoConversionPresetProfileVeryHigh) {
preset = adjustments.preset;
}
NSError *error = nil;
@ -283,7 +286,8 @@
if (watcher != nil)
liveUploadData = [watcher fileUpdated:true];
contextResult = [TGMediaVideoConversionResult resultWithFileURL:outputUrl fileSize:0 duration:CMTimeGetSeconds(resultContext.timeRange.duration) dimensions:resultContext.dimensions coverImage:coverImage liveUploadData:liveUploadData];
NSUInteger fileSize = [[[NSFileManager defaultManager] attributesOfItemAtPath:outputUrl.path error:nil] fileSize];
contextResult = [TGMediaVideoConversionResult resultWithFileURL:outputUrl fileSize:fileSize duration:CMTimeGetSeconds(resultContext.timeRange.duration) dimensions:resultContext.dimensions coverImage:coverImage liveUploadData:liveUploadData];
return [resultContext finishedContext];
}];

View File

@ -42,6 +42,8 @@ const CGFloat TGPhotoAvatarCropViewCurtainMargin = 200;
CGFloat _currentDiameter;
UIView *_entitiesWrapperView;
__weak PGPhotoEditorView *_fullPreviewView;
__weak UIImageView *_fullPaintingView;
__weak TGPhotoEntitiesContainerView *_fullEntitiesView;
@ -91,9 +93,16 @@ const CGFloat TGPhotoAvatarCropViewCurtainMargin = 200;
_fullPaintingView.frame = _fullPreviewView.frame;
[_wrapperView addSubview:_fullPaintingView];
_entitiesWrapperView = [[UIView alloc] init];
_fullEntitiesView = fullEntitiesView;
_fullEntitiesView.frame = _fullPreviewView.frame;
[_wrapperView addSubview:_fullEntitiesView];
_fullEntitiesView.frame = CGRectMake(0.0, 0.0, _fullEntitiesView.frame.size.width, _fullEntitiesView.frame.size.height);
_entitiesWrapperView.frame = _fullEntitiesView.frame;
CGFloat entitiesScale = _fullPreviewView.frame.size.width / _entitiesWrapperView.frame.size.width;
_entitiesWrapperView.transform = CGAffineTransformMakeScale(entitiesScale, entitiesScale);
_entitiesWrapperView.frame = _fullPreviewView.frame;
[_entitiesWrapperView addSubview:_fullEntitiesView];
[_wrapperView addSubview:_entitiesWrapperView];
_flashView = [[UIView alloc] init];
_flashView.alpha = 0.0;

View File

@ -345,7 +345,9 @@
_fullPaintingView.frame = _fullPreviewView.frame;
_fullEntitiesView = [[TGPhotoEntitiesContainerView alloc] init];
_fullEntitiesView.frame = _fullPreviewView.frame;
_fullEntitiesView.userInteractionEnabled = false;
CGRect rect = [TGPhotoPaintController fittedCropRect:_photoEditor.cropRect originalSize:_photoEditor.originalSize keepOriginalSize:true];
_fullEntitiesView.frame = CGRectMake(0, 0, rect.size.width, rect.size.height);
}
_dotMarkerView = [[UIImageView alloc] initWithImage:TGCircleImage(7.0, [TGPhotoEditorInterfaceAssets accentColor])];
@ -1060,8 +1062,13 @@
UIImage *image = result[@"image"];
UIImage *thumbnailImage = result[@"thumbnail"];
if (avatar && completion != nil)
if (avatar && image.size.width < 150.0) {
image = TGScaleImageToPixelSize(image, CGSizeMake(150.0, 150.0));
}
if (avatar && completion != nil) {
completion(image);
}
if (!saveOnly && didFinishEditing != nil)
didFinishEditing(editorValues, image, thumbnailImage, true);
@ -1266,6 +1273,7 @@
cropController.fullPreviewView = _fullPreviewView;
cropController.fullPaintingView = _fullPaintingView;
cropController.fullEntitiesView = _fullEntitiesView;
cropController.fullEntitiesView.userInteractionEnabled = false;
cropController.fromCamera = [self presentedFromCamera];
cropController.skipTransitionIn = skipInitialTransition;
if (snapshotImage != nil)
@ -1534,7 +1542,7 @@
case TGPhotoEditorToolsTab:
{
TGPhotoToolsController *toolsController = [[TGPhotoToolsController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView];
TGPhotoToolsController *toolsController = [[TGPhotoToolsController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView entitiesView:_fullEntitiesView];
toolsController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
toolsController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
{
@ -1981,7 +1989,7 @@
TGDispatchOnMainThread(^{
if (self.didFinishEditingVideo != nil)
self.didFinishEditingVideo(asset.URL, [adjustments editAdjustmentsWithPreset:preset videoStartValue:videoStartValue trimStartValue:trimStartValue trimEndValue:trimEndValue], fullImage, nil, true);
self.didFinishEditingVideo(asset, [adjustments editAdjustmentsWithPreset:preset videoStartValue:videoStartValue trimStartValue:trimStartValue trimEndValue:trimEndValue], fullImage, nil, true);
[self dismissAnimated:true];
});

View File

@ -186,28 +186,6 @@ const CGFloat TGPhotoEditorToolbarSize = 49.0f;
[self _finishedTransitionInWithView:transitionView];
}];
// POPSpringAnimation *animation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewFrame];
// if (self.transitionSpeed > FLT_EPSILON)
// animation.springSpeed = self.transitionSpeed;
// animation.fromValue = [NSValue valueWithCGRect:_transitionView.frame];
// animation.toValue = [NSValue valueWithCGRect:_transitionTargetFrame];
// animation.completionBlock = ^(__unused POPAnimation *animation, __unused BOOL finished)
// {
// _transitionInProgress = false;
//
// UIView *transitionView = _transitionView;
// _transitionView = nil;
//
// if (self.finishedTransitionIn != nil)
// {
// self.finishedTransitionIn();
// self.finishedTransitionIn = nil;
// }
//
// [self _finishedTransitionInWithView:transitionView];
// };
// [_transitionView pop_addAnimation:animation forKey:@"frame"];
}
- (void)prepareForCustomTransitionOut

View File

@ -98,6 +98,7 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
id<TGPhotoPaintStickersScreen> _stickersScreen;
bool _appeared;
bool _skipEntitiesSetup;
TGPhotoPaintFont *_selectedTextFont;
TGPhotoPaintTextEntityStyle _selectedTextStyle;
@ -152,6 +153,10 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
self.photoEditor = photoEditor;
self.previewView = previewView;
_entitiesContainerView = entitiesView;
if (entitiesView != nil) {
_skipEntitiesSetup = true;
}
entitiesView.userInteractionEnabled = true;
_brushes = @
[
@ -508,7 +513,9 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
[super viewDidLoad];
PGPhotoEditor *photoEditor = _photoEditor;
if (!_skipEntitiesSetup) {
[_entitiesContainerView setupWithPaintingData:photoEditor.paintingData];
}
for (TGPhotoPaintEntityView *view in _entitiesContainerView.subviews)
{
if (![view isKindOfClass:[TGPhotoPaintEntityView class]])
@ -1899,6 +1906,8 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
- (void)transitionOutSwitching:(bool)__unused switching completion:(void (^)(void))completion
{
[_stickersScreen invalidate];
TGPhotoEditorPreviewView *previewView = self.previewView;
previewView.interactionEnded = nil;
@ -1931,8 +1940,6 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
{
_dismissing = true;
[_stickersScreen invalidate];
[_entitySelectionView removeFromSuperview];
_entitySelectionView = nil;

View File

@ -3,9 +3,10 @@
@class PGPhotoEditor;
@class PGPhotoTool;
@class TGPhotoEditorPreviewView;
@class TGPhotoEntitiesContainerView;
@interface TGPhotoToolsController : TGPhotoEditorTabController
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView;
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView entitiesView:(TGPhotoEntitiesContainerView *)entitiesView;
@end

View File

@ -22,6 +22,9 @@
#import "TGPhotoEditorPreviewView.h"
#import "TGPhotoEditorHUDView.h"
#import "TGPhotoEditorSparseView.h"
#import "TGPhotoEntitiesContainerView.h"
#import "TGPhotoPaintController.h"
const CGFloat TGPhotoEditorToolsPanelSize = 180.0f;
const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize + 40.0f;
@ -44,6 +47,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
TGPhotoEditorCollectionView *_portraitCollectionView;
TGPhotoEditorCollectionView *_landscapeCollectionView;
TGPhotoEditorHUDView *_hudView;
TGPhotoEntitiesContainerView *_entitiesView;
void (^_changeBlock)(PGPhotoTool *, id, bool);
void (^_interactionBegan)(void);
@ -52,6 +56,8 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
bool _preview;
TGPhotoEditorTab _currentTab;
UIView *_entitiesWrapperView;
UIView <TGPhotoEditorToolView> *_toolAreaView;
UIView <TGPhotoEditorToolView> *_portraitToolControlView;
UIView <TGPhotoEditorToolView> *_landscapeToolControlView;
@ -64,13 +70,14 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
@implementation TGPhotoToolsController
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView entitiesView:(TGPhotoEntitiesContainerView *)entitiesView
{
self = [super initWithContext:context];
if (self != nil)
{
self.photoEditor = photoEditor;
self.previewView = previewView;
_entitiesView = entitiesView;
__weak TGPhotoToolsController *weakSelf = self;
_changeBlock = ^(PGPhotoTool *tool, __unused id newValue, bool animated)
@ -99,6 +106,29 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
_landscapeCollectionView.toolsDataSource = nil;
}
- (void)layoutEntitiesView {
CGSize fittedContentSize = [TGPhotoPaintController fittedContentSize:_photoEditor.cropRect orientation:_photoEditor.cropOrientation originalSize:_photoEditor.originalSize];
CGRect fittedCropRect = [TGPhotoPaintController fittedCropRect:_photoEditor.cropRect originalSize:_photoEditor.originalSize keepOriginalSize:false];
_entitiesWrapperView.frame = CGRectMake(0.0f, 0.0f, fittedContentSize.width, fittedContentSize.height);
CGRect rect = [TGPhotoPaintController fittedCropRect:self.photoEditor.cropRect originalSize:self.photoEditor.originalSize keepOriginalSize:true];
_entitiesView.frame = CGRectMake(0, 0, rect.size.width, rect.size.height);
_entitiesView.transform = CGAffineTransformMakeRotation(_photoEditor.cropRotation);
CGSize fittedOriginalSize = TGScaleToSize(_photoEditor.originalSize, [TGPhotoPaintController maximumPaintingSize]);
CGSize rotatedSize = TGRotatedContentSize(fittedOriginalSize, _photoEditor.cropRotation);
CGPoint centerPoint = CGPointMake(rotatedSize.width / 2.0f, rotatedSize.height / 2.0f);
CGFloat scale = fittedOriginalSize.width / _photoEditor.originalSize.width;
CGPoint offset = TGPaintSubtractPoints(centerPoint, [TGPhotoPaintController fittedCropRect:_photoEditor.cropRect centerScale:scale]);
CGPoint boundsCenter = TGPaintCenterOfRect(_entitiesWrapperView.bounds);
_entitiesView.center = TGPaintAddPoints(boundsCenter, offset);
if (_entitiesView.superview != _entitiesWrapperView) {
[_entitiesWrapperView addSubview:_entitiesView];
}
}
- (void)loadView
{
[super loadView];
@ -189,6 +219,10 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:CGRectZero];
[self.view addSubview:_wrapperView];
_entitiesWrapperView = [[UIView alloc] init];
_entitiesWrapperView.userInteractionEnabled = false;
[_wrapperView addSubview:_entitiesWrapperView];
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
_portraitToolsWrapperView.alpha = 0.0f;
[_wrapperView addSubview:_portraitToolsWrapperView];
@ -975,6 +1009,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
[_landscapeCollectionView.collectionViewLayout invalidateLayout];
[self updatePreviewView];
[self layoutEntitiesView];
}
- (TGPhotoEditorTab)availableTabs

View File

@ -33,8 +33,6 @@
editableItem = [[TGCameraCapturedVideo alloc] initWithURL:video];
}
void (^present)(UIImage *) = ^(UIImage *screenImage) {
TGPhotoEditorController *controller = [[TGPhotoEditorController alloc] initWithContext:[windowManager context] item:editableItem intent:TGPhotoEditorControllerAvatarIntent adjustments:nil caption:nil screenImage:screenImage availableTabs:[TGPhotoEditorController defaultTabsForAvatarIntent] selectedTab:TGPhotoEditorCropTab];
// controller.stickersContext = _stickersContext;
@ -45,9 +43,12 @@
if (didFinishWithImage != nil)
didFinishWithImage(resultImage);
};
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
if (didFinishWithVideo != nil)
didFinishWithVideo(resultImage, url, adjustments);
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
if (didFinishWithVideo != nil) {
if ([asset isKindOfClass:[AVURLAsset class]]) {
didFinishWithVideo(resultImage, [(AVURLAsset *)asset URL], adjustments);
}
}
};
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
{

View File

@ -136,7 +136,7 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
return adjustments;
}
+ (instancetype)editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)values {
+ (instancetype)editAdjustmentsWithPhotoEditorValues:(PGPhotoEditorValues *)values preset:(TGMediaVideoConversionPreset)preset {
TGVideoEditAdjustments *adjustments = [[[self class] alloc] init];
adjustments->_originalSize = values.originalSize;
CGRect cropRect = values.cropRect;
@ -150,7 +150,7 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
adjustments->_cropMirrored = values.cropMirrored;
adjustments->_paintingData = [values.paintingData dataForAnimation];
adjustments->_sendAsGif = true;
adjustments->_preset = TGMediaVideoConversionPresetAnimation;
adjustments->_preset = preset;
return adjustments;
}

View File

@ -23,16 +23,16 @@ public func presentLegacyAvatarEditor(theme: PresentationTheme, image: UIImage?,
if let image = image {
imageCompletion(image)
}
}, didFinishWithVideo: { image, url, adjustments in
if let image = image, let url = url {
videoCompletion(image, url, adjustments)
}, didFinishWithVideo: { image, asset, adjustments in
if let image = image {
// videoCompletion(image, url, adjustments)
}
}, dismissed: { [weak legacyController] in
legacyController?.dismiss()
})
}
public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, theme: PresentationTheme, present: (ViewController, Any?) -> Void, openCurrent: (() -> Void)?, completion: @escaping (UIImage) -> Void, videoCompletion: @escaping (UIImage, URL, TGVideoEditAdjustments?) -> Void = { _, _, _ in}) {
public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, theme: PresentationTheme, present: (ViewController, Any?) -> Void, openCurrent: (() -> Void)?, completion: @escaping (UIImage) -> Void, videoCompletion: @escaping (UIImage, Any?, TGVideoEditAdjustments?) -> Void = { _, _, _ in}) {
let legacyController = LegacyController(presentation: .custom, theme: theme)
legacyController.statusBar.statusBarStyle = .Ignore
@ -53,11 +53,11 @@ public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, t
}
completion(image)
}
mixin.didFinishWithVideo = { image, url, adjustments in
guard let image = image, let url = url else {
mixin.didFinishWithVideo = { image, asset, adjustments in
guard let image = image else {
return
}
videoCompletion(image, url, adjustments)
videoCompletion(image, asset, adjustments)
}
mixin.didFinishWithView = {
openCurrent?()

View File

@ -287,6 +287,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
fileprivate var requestedDataOffset: Int?
fileprivate let fetchedDataDisposable = MetaDisposable()
fileprivate let keepDataDisposable = MetaDisposable()
fileprivate let fetchedFullDataDisposable = MetaDisposable()
fileprivate var requestedCompleteFetch = false
@ -294,6 +295,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
didSet {
self.fetchedDataDisposable.dispose()
self.fetchedFullDataDisposable.dispose()
self.keepDataDisposable.dispose()
}
}
@ -316,6 +318,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
self.fetchedDataDisposable.dispose()
self.fetchedFullDataDisposable.dispose()
self.keepDataDisposable.dispose()
}
func initializeState(postbox: Postbox, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int?) {
@ -341,6 +344,10 @@ final class FFMpegMediaFrameSourceContext: NSObject {
}
}
if self.tempFilePath == nil {
self.keepDataDisposable.set(postbox.mediaBox.keepResource(id: resourceReference.resource.id).start())
}
if streamable {
if self.tempFilePath == nil {
self.fetchedDataDisposable.set(fetchedMediaResource(mediaBox: postbox.mediaBox, reference: resourceReference, range: (0 ..< Int(Int32.max), .elevated), statsCategory: self.statsCategory ?? .generic, preferBackgroundReferenceRevalidation: streamable).start())

View File

@ -174,9 +174,9 @@ public func fetchedAvatarGalleryEntries(account: Account, peer: Peer) -> Signal<
for photo in photos {
let indexData = GalleryItemIndexData(position: index, totalCount: Int32(photos.count))
if result.isEmpty, let first = initialEntries.first {
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
} else {
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.standalone(resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
}
index += 1
}
@ -202,9 +202,9 @@ public func fetchedAvatarGalleryEntries(account: Account, peer: Peer, firstEntry
for photo in photos {
let indexData = GalleryItemIndexData(position: index, totalCount: Int32(photos.count))
if result.isEmpty, let first = initialEntries.first {
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
} else {
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.standalone(resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
}
index += 1
}
@ -627,8 +627,6 @@ public class AvatarGalleryController: ViewController, StandalonePresentableContr
entries.insert(previousFirstEntry, at: index)
}
entries = normalizeEntries(entries)
self.galleryNode.pager.replaceItems(entries.map({ entry in PeerAvatarImageGalleryItem(context: self.context, peer: self.peer, presentationData: presentationData, entry: entry, sourceCorners: self.sourceCorners, delete: self.canDelete ? { [weak self] in
self?.deleteEntry(entry)
@ -638,6 +636,10 @@ public class AvatarGalleryController: ViewController, StandalonePresentableContr
self?.editEntry(entry)
}) }), centralItemIndex: 0, synchronous: true)
self.entries = entries
if let firstEntry = self.entries.first {
self._hiddenMedia.set(.single(firstEntry))
}
}
} else {
// if let messageId = messageId {
@ -845,7 +847,7 @@ public class AvatarGalleryController: ViewController, StandalonePresentableContr
}
let actionSheet = ActionSheetController(presentationData: presentationData)
let items: [ActionSheetItem] = [
ActionSheetButtonItem(title: presentationData.strings.Common_Delete, color: .destructive, action: { [weak actionSheet] in
ActionSheetButtonItem(title: presentationData.strings.Settings_RemoveConfirmation, color: .destructive, action: { [weak actionSheet] in
actionSheet?.dismissAnimated()
proceed()
})

View File

@ -259,7 +259,10 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
id = image.0.id
category = image.9
} else {
id = Int64(entry.peer?.id.id ?? 1)
id = Int64(entry.peer?.id.id ?? 0)
if let resource = entry.videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
id = id &+ resource.photoId
}
}
if let video = entry.videoRepresentations.last, let peerReference = PeerReference(self.peer) {
if video != previousVideoRepresentations?.last {

View File

@ -132,6 +132,14 @@ public enum ResourceDataRequestOption {
case incremental(waitUntilFetchStatus: Bool)
}
private final class MediaBoxKeepResourceContext {
let subscribers = Bag<Void>()
var isEmpty: Bool {
return self.subscribers.isEmpty
}
}
public final class MediaBox {
public let basePath: String
@ -145,6 +153,7 @@ public final class MediaBox {
private var cachedRepresentationContexts: [CachedMediaResourceRepresentationKey: CachedMediaResourceRepresentationContext] = [:]
private var fileContexts: [WrappedMediaResourceId: MediaBoxFileContext] = [:]
private var keepResourceContexts: [WrappedMediaResourceId: MediaBoxKeepResourceContext] = [:]
private var wrappedFetchResource = Promise<(MediaResource, Signal<[(Range<Int>, MediaBoxFetchPriority)], NoError>, MediaResourceFetchParameters?) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError>>()
public var preFetchedResourcePath: (MediaResource) -> String? = { _ in return nil }
@ -204,6 +213,10 @@ public final class MediaBox {
return ResourceStorePaths(partial: "\(self.basePath)/\(fileNameForId(id))_partial", complete: "\(self.basePath)/\(fileNameForId(id))")
}
private func fileNamesForId(_ id: MediaResourceId) -> ResourceStorePaths {
return ResourceStorePaths(partial: "\(fileNameForId(id))_partial", complete: "\(fileNameForId(id))")
}
private func cachedRepresentationPathsForId(_ id: MediaResourceId, representation: CachedMediaResourceRepresentation) -> ResourceStorePaths {
let cacheString: String
switch representation.keepDuration {
@ -697,6 +710,38 @@ public final class MediaBox {
}
}
public func keepResource(id: MediaResourceId) -> Signal<Never, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
let dataQueue = self.dataQueue
self.dataQueue.async {
let context: MediaBoxKeepResourceContext
if let current = self.keepResourceContexts[WrappedMediaResourceId(id)] {
context = current
} else {
context = MediaBoxKeepResourceContext()
self.keepResourceContexts[WrappedMediaResourceId(id)] = context
}
let index = context.subscribers.add(Void())
disposable.set(ActionDisposable { [weak self, weak context] in
dataQueue.async {
guard let strongSelf = self, let context = context, let currentContext = strongSelf.keepResourceContexts[WrappedMediaResourceId(id)], currentContext === context else {
return
}
currentContext.subscribers.remove(index)
if currentContext.isEmpty {
strongSelf.keepResourceContexts.removeValue(forKey: WrappedMediaResourceId(id))
}
}
})
}
return disposable
}
}
public func cancelInteractiveResourceFetch(_ resource: MediaResource) {
self.dataQueue.async {
if let (fileContext, releaseContext) = self.fileContext(for: resource) {
@ -991,7 +1036,20 @@ public final class MediaBox {
public func removeOtherCachedResources(paths: [String]) -> Signal<Void, NoError> {
return Signal { subscriber in
self.dataQueue.async {
for path in paths {
var keepPrefixes: [String] = []
for id in self.keepResourceContexts.keys {
let resourcePaths = self.fileNamesForId(id.id)
keepPrefixes.append(resourcePaths.partial)
keepPrefixes.append(resourcePaths.complete)
}
outer: for path in paths {
for prefix in keepPrefixes {
if path.starts(with: prefix) {
continue outer
}
}
unlink(self.basePath + "/" + path)
}
subscriber.putCompletion()
@ -1007,6 +1065,9 @@ public final class MediaBox {
if self.fileContexts[id] != nil {
continue
}
if self.keepResourceContexts[id] != nil {
continue
}
let paths = self.storePathsForId(id.id)
unlink(paths.complete)
unlink(paths.partial)
@ -1044,35 +1105,4 @@ public final class MediaBox {
return EmptyDisposable
}
}
public func clearFileContexts() -> Signal<Void, NoError> {
return Signal { subscriber in
self.dataQueue.async {
for (id, _) in self.fileContexts {
let paths = self.storePathsForId(id.id)
unlink(paths.complete)
unlink(paths.partial)
unlink(paths.partial + ".meta")
}
self.fileContexts.removeAll()
subscriber.putCompletion()
}
return EmptyDisposable
}
}
public func fileConxtets() -> Signal<[(partial: String, complete: String)], NoError> {
return Signal { subscriber in
self.dataQueue.async {
var result: [(partial: String, complete: String)] = []
for (id, _) in self.fileContexts {
let paths = self.storePathsForId(id.id)
result.append((partial: paths.partial, complete: paths.complete))
}
subscriber.putNext(result)
subscriber.putCompletion()
}
return EmptyDisposable
}
}
}

View File

@ -684,9 +684,9 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
completedProfilePhotoImpl(image)
}
}
mixin.didFinishWithVideo = { image, url, adjustments in
if let image = image, let url = url {
completedProfileVideoImpl(image, url, adjustments)
mixin.didFinishWithVideo = { image, asset, adjustments in
if let image = image {
// completedProfileVideoImpl(image, url, adjustments)
}
}
mixin.didFinishWithDelete = {

View File

@ -1418,9 +1418,9 @@ public func settingsController(context: AccountContext, accountManager: AccountM
completedProfilePhotoImpl(image)
}
}
mixin.didFinishWithVideo = { image, url, adjustments in
if let image = image, let url = url {
completedProfileVideoImpl(image, url, adjustments)
mixin.didFinishWithVideo = { image, asset, adjustments in
if let image = image {
// completedProfileVideoImpl(image, url, adjustments)
}
}
mixin.didFinishWithDelete = {

View File

@ -686,7 +686,7 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
transaction.setState(UnauthorizedAccountState(isTestingEnvironment: strongSelf.account.testingEnvironment, masterDatacenterId: strongSelf.account.masterDatacenterId, contents: .phoneEntry(countryCode: countryCode, number: "")))
}).start()
}, displayCancel: displayCancel)
controller.signUpWithName = { [weak self, weak controller] firstName, lastName, avatarData, avatarUrl, avatarAdjustments in
controller.signUpWithName = { [weak self, weak controller] firstName, lastName, avatarData, avatarAsset, avatarAdjustments in
if let strongSelf = self {
controller?.inProgress = true
@ -696,15 +696,9 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
}
let avatarVideo: Signal<UploadedPeerPhotoData?, NoError>?
if let avatarUrl = avatarUrl {
if let avatarAsset = avatarAsset as? AVAsset {
let account = strongSelf.account
avatarVideo = Signal<TelegramMediaResource?, NoError> { subscriber in
var filteredPath = avatarUrl.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
}
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
let entityRenderer: LegacyPaintEntityRenderer? = avatarAdjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: nil, adjustments: adjustments)
@ -713,7 +707,7 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
}
}
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: avatarAdjustments, watcher: nil, entityRenderer: entityRenderer)!
let signal = TGMediaVideoConverter.convert(avatarAsset, adjustments: avatarAdjustments, watcher: nil, entityRenderer: entityRenderer)!
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {

View File

@ -22,9 +22,9 @@ final class AuthorizationSequenceSignUpController: ViewController {
var initialName: (String, String) = ("", "")
private var termsOfService: UnauthorizedAccountTermsOfService?
var signUpWithName: ((String, String, Data?, URL?, TGVideoEditAdjustments?) -> Void)?
var signUpWithName: ((String, String, Data?, Any?, TGVideoEditAdjustments?) -> Void)?
var avatarUrl: URL?
var avatarAsset: Any?
var avatarAdjustments: TGVideoEditAdjustments?
private let hapticFeedback = HapticFeedback()
@ -91,11 +91,11 @@ final class AuthorizationSequenceSignUpController: ViewController {
self?.present(c, in: .window(.root), with: a)
}, openCurrent: nil, completion: { image in
self?.controllerNode.currentPhoto = image
self?.avatarUrl = nil
self?.avatarAsset = nil
self?.avatarAdjustments = nil
}, videoCompletion: { image, url, adjustments in
}, videoCompletion: { image, asset, adjustments in
self?.controllerNode.currentPhoto = image
self?.avatarUrl = url
self?.avatarAsset = asset
self?.avatarAdjustments = adjustments
})
})
@ -159,7 +159,7 @@ final class AuthorizationSequenceSignUpController: ViewController {
if let name = name {
self.signUpWithName?(name.0, name.1, self.controllerNode.currentPhoto.flatMap({ image in
return compressImageToJPEG(image, quality: 0.7)
}), self.avatarUrl, self.avatarAdjustments)
}), self.avatarAsset, self.avatarAdjustments)
}
}
}

View File

@ -328,7 +328,7 @@ public func createChannelController(context: AccountContext) -> ViewController {
}
}
let completedChannelVideoImpl: (UIImage, URL, TGVideoEditAdjustments?) -> Void = { image, url, adjustments in
let completedChannelVideoImpl: (UIImage, Any?, TGVideoEditAdjustments?) -> Void = { image, asset, adjustments in
if let data = image.jpegData(compressionQuality: 0.6) {
let photoResource = LocalFileMediaResource(fileId: arc4random64())
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
@ -345,12 +345,6 @@ public func createChannelController(context: AccountContext) -> ViewController {
}
let signal = Signal<TelegramMediaResource?, UploadPeerPhotoError> { subscriber in
var filteredPath = url.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
}
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: context.account, adjustments: adjustments)
@ -359,7 +353,31 @@ public func createChannelController(context: AccountContext) -> ViewController {
}
}
let uploadInterface = LegacyLiveUploadInterface(account: context.account)
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
let signal: SSignal
if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else if let url = asset as? URL, let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
let durationSignal: SSignal = SSignal(generator: { subscriber in
let disposable = (entityRenderer.duration()).start(next: { duration in
subscriber?.putNext(duration)
subscriber?.putCompletion()
})
return SBlockDisposable(block: {
disposable.dispose()
})
})
signal = durationSignal.map(toSignal: { duration -> SSignal? in
if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: nil, entityRenderer: entityRenderer)!
} else {
return SSignal.single(nil)
}
})
} else {
signal = SSignal.complete()
}
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
@ -438,9 +456,9 @@ public func createChannelController(context: AccountContext) -> ViewController {
completedChannelPhotoImpl(image)
}
}
mixin.didFinishWithVideo = { image, url, adjustments in
if let image = image, let url = url {
completedChannelVideoImpl(image, url, adjustments)
mixin.didFinishWithVideo = { image, asset, adjustments in
if let image = image, let asset = asset {
completedChannelVideoImpl(image, asset, adjustments)
}
}
if stateValue.with({ $0.avatar }) != nil {

View File

@ -586,7 +586,7 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
}
}
let completedGroupVideoImpl: (UIImage, URL, TGVideoEditAdjustments?) -> Void = { image, url, adjustments in
let completedGroupVideoImpl: (UIImage, Any?, TGVideoEditAdjustments?) -> Void = { image, asset, adjustments in
if let data = image.jpegData(compressionQuality: 0.6) {
let photoResource = LocalFileMediaResource(fileId: arc4random64())
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
@ -603,12 +603,7 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
}
let signal = Signal<TelegramMediaResource?, UploadPeerPhotoError> { subscriber in
var filteredPath = url.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
}
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: context.account, adjustments: adjustments)
@ -617,7 +612,31 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
}
}
let uploadInterface = LegacyLiveUploadInterface(account: context.account)
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
let signal: SSignal
if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else if let url = asset as? URL, let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
let durationSignal: SSignal = SSignal(generator: { subscriber in
let disposable = (entityRenderer.duration()).start(next: { duration in
subscriber?.putNext(duration)
subscriber?.putCompletion()
})
return SBlockDisposable(block: {
disposable.dispose()
})
})
signal = durationSignal.map(toSignal: { duration -> SSignal? in
if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: nil, entityRenderer: entityRenderer)!
} else {
return SSignal.single(nil)
}
})
} else {
signal = SSignal.complete()
}
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
@ -696,9 +715,9 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
completedGroupPhotoImpl(image)
}
}
mixin.didFinishWithVideo = { image, url, adjustments in
if let image = image, let url = url {
completedGroupVideoImpl(image, url, adjustments)
mixin.didFinishWithVideo = { image, asset, adjustments in
if let image = image, let asset = asset {
completedGroupVideoImpl(image, asset, adjustments)
}
}
if stateValue.with({ $0.avatar }) != nil {

View File

@ -334,6 +334,9 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
videoRepresentations = videoRepresentationsValue
immediateThumbnailData = immediateThumbnail
id = Int64(self.peer.id.id)
if let resource = videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
id = id &+ resource.photoId
}
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
representations = imageRepresentations
videoRepresentations = videoRepresentationsValue
@ -913,9 +916,13 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
guard case let .image(image) = item else {
return false
}
var items: [PeerInfoAvatarListItem] = []
var entries: [AvatarGalleryEntry] = []
let previousIndex = self.currentIndex
var index = 0
var deletedIndex: Int?
for entry in self.galleryEntries {
switch entry {
case let .topImage(representations, videoRepresentations, _, _, immediateThumbnailData, _):
@ -925,9 +932,25 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
if image.0 != reference {
entries.append(entry)
items.append(.image(reference, representations, videoRepresentations, immediateThumbnailData))
} else {
deletedIndex = index
}
}
index += 1
}
if let peer = self.peer, peer is TelegramGroup || peer is TelegramChannel, deletedIndex == 0 {
self.galleryEntries = []
self.items = []
self.itemsUpdated?([])
self.currentIndex = 0
if let size = self.validLayout {
self.updateItems(size: size, update: true, transition: .immediate, stripTransition: .immediate, synchronous: true)
}
return true
}
self.galleryEntries = normalizeEntries(entries)
self.items = items
self.itemsUpdated?(items)
@ -1129,7 +1152,7 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
stripTransition.updateAlpha(node: self.loadingStripNode, alpha: self.loading ? 1.0 : 0.0)
self.activeStripNode.isHidden = self.stripNodes.count < 2
self.loadingStripNode.isHidden = !self.loading
self.loadingStripNode.isHidden = self.stripNodes.count < 2 || !self.loading
}
if let item = self.items.first, let itemNode = self.itemNodes[item.id] {
@ -1239,6 +1262,9 @@ final class PeerInfoAvatarTransformContainerNode: ASDisplayNode {
videoRepresentations = videoRepresentationsValue
immediateThumbnailData = immediateThumbnail
id = Int64(peer.id.id)
if let resource = videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
id = id &+ resource.photoId
}
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
representations = imageRepresentations
videoRepresentations = videoRepresentationsValue
@ -1254,6 +1280,8 @@ final class PeerInfoAvatarTransformContainerNode: ASDisplayNode {
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
if videoContent.id != self.videoContent?.id {
self.videoNode?.removeFromSupernode()
let mediaManager = self.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .embedded)
videoNode.isUserInteractionEnabled = false
@ -1520,6 +1548,9 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode {
videoRepresentations = videoRepresentationsValue
immediateThumbnailData = immediateThumbnail
id = Int64(peer.id.id)
if let resource = videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
id = id &+ resource.photoId
}
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
representations = imageRepresentations
videoRepresentations = videoRepresentationsValue
@ -1535,6 +1566,8 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode {
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
if videoContent.id != self.videoContent?.id {
self.videoNode?.removeFromSupernode()
let mediaManager = self.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .gallery)
videoNode.isUserInteractionEnabled = false
@ -2580,6 +2613,10 @@ final class PeerInfoHeaderNode: ASDisplayNode {
}
func initiateAvatarExpansion(gallery: Bool, first: Bool) {
if let peer = self.peer, peer.profileImageRepresentations.isEmpty && gallery {
self.requestOpenAvatarForEditing?(false)
return
}
if self.isAvatarExpanded || gallery {
if let currentEntry = self.avatarListNode.listContainerNode.currentEntry, let firstEntry = self.avatarListNode.listContainerNode.galleryEntries.first {
let entry = first ? firstEntry : currentEntry

View File

@ -612,12 +612,7 @@ private final class PeerInfoInteraction {
private let enabledBioEntities: EnabledEntityTypes = [.url, .mention, .hashtag]
private func settingsItems(data: PeerInfoScreenData?, context: AccountContext, presentationData: PresentationData, interaction: PeerInfoInteraction) -> [(AnyHashable, [PeerInfoScreenItem])] {
guard let data = data else {
return []
}
enum Section: Int, CaseIterable {
private enum SettingsSection: Int, CaseIterable {
case edit
case phone
case accounts
@ -628,20 +623,30 @@ private func settingsItems(data: PeerInfoScreenData?, context: AccountContext, p
case support
}
var items: [Section: [PeerInfoScreenItem]] = [:]
for section in Section.allCases {
private func settingsItems(data: PeerInfoScreenData?, context: AccountContext, presentationData: PresentationData, interaction: PeerInfoInteraction, isExpanded: Bool) -> [(AnyHashable, [PeerInfoScreenItem])] {
guard let data = data else {
return []
}
var items: [SettingsSection: [PeerInfoScreenItem]] = [:]
for section in SettingsSection.allCases {
items[section] = []
}
let setPhotoTitle: String
let displaySetPhoto: Bool
if let peer = data.peer, !peer.profileImageRepresentations.isEmpty {
setPhotoTitle = presentationData.strings.Settings_SetNewProfilePhotoOrVideo
displaySetPhoto = isExpanded
} else {
setPhotoTitle = presentationData.strings.Settings_SetProfilePhotoOrVideo
displaySetPhoto = true
}
if displaySetPhoto {
items[.edit]!.append(PeerInfoScreenActionItem(id: 0, text: setPhotoTitle, icon: UIImage(bundleImageName: "Settings/SetAvatar"), action: {
interaction.openSettings(.avatar)
}))
}
if let peer = data.peer, peer.addressName == nil {
items[.edit]!.append(PeerInfoScreenActionItem(id: 1, text: presentationData.strings.Settings_SetUsername, icon: UIImage(bundleImageName: "Settings/SetUsername"), action: {
interaction.openSettings(.username)
@ -784,7 +789,7 @@ private func settingsItems(data: PeerInfoScreenData?, context: AccountContext, p
}))
var result: [(AnyHashable, [PeerInfoScreenItem])] = []
for section in Section.allCases {
for section in SettingsSection.allCases {
if let sectionItems = items[section], !sectionItems.isEmpty {
result.append((section, sectionItems))
}
@ -833,7 +838,11 @@ private func settingsEditingItems(data: PeerInfoScreenData?, state: PeerInfoStat
interaction.openSettings(.phoneNumber)
}))
}
items[.info]!.append(PeerInfoScreenDisclosureItem(id: ItemUsername, label: .text(data.peer?.addressName.flatMap({ "@\($0)" }) ?? ""), text: presentationData.strings.Settings_Username, action: {
var username = ""
if let addressName = data.peer?.addressName, !addressName.isEmpty {
username = "@\(addressName)"
}
items[.info]!.append(PeerInfoScreenDisclosureItem(id: ItemUsername, label: .text(username), text: presentationData.strings.Settings_Username, action: {
interaction.openSettings(.username)
}))
@ -2113,11 +2122,11 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
galleryController.avatarPhotoEditCompletion = { [weak self] image in
self?.updateProfilePhoto(image)
}
galleryController.avatarVideoEditCompletion = { [weak self] image, url, adjustments in
self?.updateProfileVideo(image, url: url, adjustments: adjustments)
galleryController.avatarVideoEditCompletion = { [weak self] image, asset, adjustments in
self?.updateProfileVideo(image, asset: asset, adjustments: adjustments)
}
galleryController.removedEntry = { [weak self] entry in
self?.headerNode.avatarListNode.listContainerNode.deleteItem(PeerInfoAvatarListItem(entry: entry))
let _ = self?.headerNode.avatarListNode.listContainerNode.deleteItem(PeerInfoAvatarListItem(entry: entry))
}
strongSelf.hiddenAvatarRepresentationDisposable.set((galleryController.hiddenMedia |> deliverOnMainQueue).start(next: { entry in
self?.headerNode.updateAvatarIsHidden(entry: entry)
@ -2497,7 +2506,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
case .search:
strongSelf.activateSearch()
case .editPhoto, .editVideo:
strongSelf.openAvatarOptions()
break
}
}
@ -3838,67 +3847,6 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
}
private func editAvatarItem(_ item: PeerInfoAvatarListItem) {
guard case let .image(reference, representations, videoRepresentations, _) = item else {
return
}
let mediaReference: AnyMediaReference
if let video = videoRepresentations.last {
mediaReference = .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
} else {
let media = TelegramMediaImage(imageId: MediaId(namespace: 0, id: 0), representations: representations.map({ $0.representation }), immediateThumbnailData: nil, reference: nil, partialReference: nil, flags: [])
mediaReference = .standalone(media: media)
}
var dismissStatus: (() -> Void)?
let statusController = OverlayStatusController(theme: self.presentationData.theme, type: .loading(cancelled: {
dismissStatus?()
}))
dismissStatus = { [weak self, weak statusController] in
self?.editAvatarDisposable.set(nil)
statusController?.dismiss()
}
self.controller?.present(statusController, in: .window(.root))
self.editAvatarDisposable.set((fetchMediaData(context: self.context, postbox: self.context.account.postbox, mediaReference: mediaReference)
|> deliverOnMainQueue).start(next: { [weak self] state, isImage in
guard let strongSelf = self else {
return
}
switch state {
case .progress:
break
case let .data(data):
dismissStatus?()
let image: UIImage?
let video: URL?
if isImage {
if let fileData = try? Data(contentsOf: URL(fileURLWithPath: data.path)) {
image = UIImage(data: fileData)
} else {
image = nil
}
video = nil
} else {
image = nil
video = URL(fileURLWithPath: data.path)
}
presentLegacyAvatarEditor(theme: strongSelf.presentationData.theme, image: image, video: video, present: { [weak self] c, a in
if let strongSelf = self {
strongSelf.controller?.present(c, in: .window(.root), with: a, blockInteraction: true)
}
}, imageCompletion: { [weak self] image in
self?.updateProfilePhoto(image)
}, videoCompletion: { [weak self] image, url, adjustments in
self?.updateProfileVideo(image, url: url, adjustments: adjustments)
})
}
}))
}
private func setMainAvatar(_ item: PeerInfoAvatarListItem) {
if self.data?.peer?.id == self.context.account.peerId {
if case let .image(reference, _, _, _) = item {
@ -3911,11 +3859,13 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
}
private func deleteAvatar(_ item: PeerInfoAvatarListItem) {
private func deleteAvatar(_ item: PeerInfoAvatarListItem, remove: Bool = true) {
if self.data?.peer?.id == self.context.account.peerId {
if case let .image(reference, _, _, _) = item {
if let reference = reference {
if remove {
let _ = removeAccountPhoto(network: self.context.account.network, reference: reference).start()
}
let dismiss = self.headerNode.avatarListNode.listContainerNode.deleteItem(item)
if dismiss {
if self.headerNode.isAvatarExpanded {
@ -3954,59 +3904,6 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
}
private func openAvatarOptions() {
let item = self.headerNode.avatarListNode.listContainerNode.currentItemNode?.item
let index = self.headerNode.avatarListNode.listContainerNode.currentIndex
let actionSheet = ActionSheetController(presentationData: self.presentationData)
let dismissAction: () -> Void = { [weak actionSheet] in
actionSheet?.dismissAnimated()
}
var items: [ActionSheetItem] = []
items.append( ActionSheetButtonItem(title: self.presentationData.strings.Settings_SetNewProfilePhotoOrVideo, color: .accent, action: { [weak self] in
dismissAction()
self?.openAvatarForEditing(hasRemove: false)
}))
if let item = item, case let .image(image) = item {
if index > 0 {
let setMainTitle: String
if image.2.isEmpty {
setMainTitle = self.presentationData.strings.ProfilePhoto_SetMainPhoto
} else {
setMainTitle = self.presentationData.strings.ProfilePhoto_SetMainVideo
}
items.append(ActionSheetButtonItem(title: setMainTitle, color: .accent, action: { [weak self] in
dismissAction()
self?.setMainAvatar(item)
}))
}
// items.append(ActionSheetButtonItem(title: self.presentationData.strings.ProfilePhoto_OpenInEditor, color: .accent, action: { [weak self] in
// dismissAction()
// self?.editAvatarItem(item)
// }))
let deleteTitle: String
if image.2.isEmpty {
deleteTitle = self.presentationData.strings.GroupInfo_SetGroupPhotoDelete
} else {
deleteTitle = self.presentationData.strings.Settings_RemoveVideo
}
items.append(ActionSheetButtonItem(title: deleteTitle, color: .destructive, action: { [weak self] in
dismissAction()
self?.deleteAvatar(item)
}))
}
actionSheet.setItemGroups([
ActionSheetItemGroup(items: items),
ActionSheetItemGroup(items: [ActionSheetButtonItem(title: presentationData.strings.Common_Cancel, action: { dismissAction() })])
])
self.view.endEditing(true)
self.controller?.present(actionSheet, in: .window(.root))
}
private func updateProfilePhoto(_ image: UIImage) {
guard let data = image.jpegData(compressionQuality: 0.6) else {
return
@ -4064,7 +3961,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
}
private func updateProfileVideo(_ image: UIImage, url: URL, adjustments: TGVideoEditAdjustments?) {
private func updateProfileVideo(_ image: UIImage, asset: Any?, adjustments: TGVideoEditAdjustments?) {
guard let data = image.jpegData(compressionQuality: 0.6) else {
return
}
@ -4093,12 +3990,6 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
let account = self.context.account
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { [weak self] subscriber in
var filteredPath = url.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
}
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: account, adjustments: adjustments)
@ -4107,7 +3998,31 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
}
let uploadInterface = LegacyLiveUploadInterface(account: account)
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
let signal: SSignal
if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else if let url = asset as? URL, let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
let durationSignal: SSignal = SSignal(generator: { subscriber in
let disposable = (entityRenderer.duration()).start(next: { duration in
subscriber?.putNext(duration)
subscriber?.putCompletion()
})
return SBlockDisposable(block: {
disposable.dispose()
})
})
signal = durationSignal.map(toSignal: { duration -> SSignal? in
if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: nil, entityRenderer: entityRenderer)!
} else {
return SSignal.single(nil)
}
})
} else {
signal = SSignal.complete()
}
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
@ -4255,10 +4170,10 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
self?.updateProfilePhoto(image)
}
}
mixin.didFinishWithVideo = { [weak self] image, url, adjustments in
if let image = image, let url = url {
mixin.didFinishWithVideo = { [weak self] image, asset, adjustments in
if let image = image, let asset = asset {
completion()
self?.updateProfileVideo(image, url: url, adjustments: adjustments)
self?.updateProfileVideo(image, asset: asset, adjustments: adjustments)
}
}
mixin.didFinishWithDelete = {
@ -4267,7 +4182,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
if let item = item {
strongSelf.deleteAvatar(item)
strongSelf.deleteAvatar(item, remove: false)
}
let _ = strongSelf.currentAvatarMixin.swap(nil)
@ -5059,18 +4974,20 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
} else {
transition.updateFrame(node: self.headerNode, frame: headerFrame)
}
if !self.isMediaOnly {
contentHeight += headerHeight
if !self.isSettings {
contentHeight += sectionSpacing
}
} else {
if self.isMediaOnly {
contentHeight += navigationHeight
}
var validRegularSections: [AnyHashable] = []
if !self.isMediaOnly {
let items = self.isSettings ? settingsItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction) : infoItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction, nearbyPeerDistance: self.nearbyPeerDistance, callMessages: self.callMessages)
let items = self.isSettings ? settingsItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction, isExpanded: self.headerNode.isAvatarExpanded) : infoItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction, nearbyPeerDistance: self.nearbyPeerDistance, callMessages: self.callMessages)
contentHeight += headerHeight
if !self.isSettings {
contentHeight += sectionSpacing
} else if let (section, _) = items.first, let sectionValue = section.base as? SettingsSection, sectionValue != .edit && !self.state.isEditing {
contentHeight += sectionSpacing
}
for (sectionId, sectionItems) in items {
validRegularSections.append(sectionId)

View File

@ -43,6 +43,7 @@ private final class ProfileDataPhotoPreloadContext {
let disposable: Disposable
var value: Any?
var skipNext = false
var emptyTimer: SwiftSignalKit.Timer?
init(disposable: Disposable) {
@ -210,6 +211,9 @@ private final class PeerChannelMemberCategoriesContextsManagerImpl {
let context: ProfileDataPhotoPreloadContext
if let current = self.profileDataPhotoPreloadContexts[peerId] {
context = current
if let _ = context.value {
context.skipNext = true
}
} else {
let disposable = MetaDisposable()
context = ProfileDataPhotoPreloadContext(disposable: disposable)
@ -219,6 +223,10 @@ private final class PeerChannelMemberCategoriesContextsManagerImpl {
guard let context = context else {
return
}
if context.skipNext {
context.skipNext = false
return
}
context.value = value
for f in context.subscribers.copyItems() {
f(value)

View File

@ -14,6 +14,8 @@ static_library(
"tgcalls/tgcalls/legacy/**",
"tgcalls/tgcalls/platform/tdesktop/**",
"tgcalls/tgcalls/platform/windows/**",
"tgcalls/tgcalls/platform/darwin/VideoCameraCapturerMac.*",
"tgcalls/tgcalls/platform/darwin/VideoMetalViewMac.*",
]),
has_cpp = True,
headers = merge_maps([

View File

@ -15,6 +15,8 @@ objc_library(
"tgcalls/tgcalls/legacy/**",
"tgcalls/tgcalls/platform/tdesktop/**",
"tgcalls/tgcalls/platform/windows/**",
"tgcalls/tgcalls/platform/darwin/VideoCameraCapturerMac.*",
"tgcalls/tgcalls/platform/darwin/VideoMetalViewMac.*",
]),
hdrs = glob([
"PublicHeaders/**/*.h",

View File

@ -61,7 +61,7 @@
- (instancetype _Nonnull)init {
self = [super init];
if (self != nil) {
_interface = tgcalls::CreateVideoCapture();
_interface = tgcalls::VideoCaptureInterface::Create();
}
return self;
}
@ -541,7 +541,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
- (void)acceptVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
if (_tgVoip && _videoCapturer == nil) {
_videoCapturer = videoCapturer;
_tgVoip->acceptVideo([_videoCapturer getInterface]);
_tgVoip->requestVideo([_videoCapturer getInterface]);
}
}

@ -1 +1 @@
Subproject commit 659712186b39c3f077e3ad091d1de036154064a7
Subproject commit 83c85d20ccdde154acca4b964317de1e695f95d1

View File

@ -443,7 +443,7 @@ public func legacyEnqueueWebSearchMessages(_ selectionState: TGMediaSelectionCon
if animated {
dict["isAnimation"] = true
if let photoEditorValues = adjustments as? PGPhotoEditorValues {
dict["adjustments"] = TGVideoEditAdjustments(photoEditorValues: photoEditorValues)
dict["adjustments"] = TGVideoEditAdjustments(photoEditorValues: photoEditorValues, preset: TGMediaVideoConversionPresetAnimation)
}
let filePath = NSTemporaryDirectory().appending("/gifvideo_\(arc4random()).jpg")