Video avatar fixes

This commit is contained in:
Ilya Laktyushin 2020-07-16 16:03:12 +03:00
parent 187e260374
commit 2abe662fed
29 changed files with 4506 additions and 4464 deletions

View File

@ -5694,3 +5694,5 @@ Any member of this group will be able to see messages in the channel.";
"SettingsSearch_Synonyms_ChatFolders" = "";
"EditProfile.NameAndPhotoOrVideoHelp" = "Enter your name and add an optional profile photo or video.";
"Settings.RemoveConfirmation" = "Remove";

View File

@ -45,7 +45,7 @@
@property (nonatomic, copy) void (^cameraPressed)(TGAttachmentCameraView *cameraView);
@property (nonatomic, copy) void (^sendPressed)(TGMediaAsset *currentItem, bool asFiles, bool silentPosting, int32_t scheduleTime);
@property (nonatomic, copy) void (^avatarCompletionBlock)(UIImage *image);
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^editorOpened)(void);
@property (nonatomic, copy) void (^editorClosed)(void);

View File

@ -1,3 +1,4 @@
#import <AVFoundation/AVFoundation.h>
#import <LegacyComponents/TGOverlayControllerWindow.h>
#import <LegacyComponents/TGOverlayController.h>
#import <LegacyComponents/LegacyComponentsContext.h>

View File

@ -76,7 +76,7 @@ typedef enum
@property (nonatomic, copy) NSDictionary *(^descriptionGenerator)(id, NSString *, NSArray *, NSString *);
@property (nonatomic, copy) void (^avatarCompletionBlock)(UIImage *image);
@property (nonatomic, copy) void (^completionBlock)(NSArray *signals, bool silentPosting, int32_t scheduleTime);
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^avatarVideoCompletionBlock)(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^singleCompletionBlock)(id<TGMediaEditableItem> item, TGMediaEditingContext *editingContext);
@property (nonatomic, copy) void (^dismissalBlock)(void);
@property (nonatomic, copy) void (^selectionBlock)(TGMediaAsset *asset, UIImage *);
@ -94,7 +94,7 @@ typedef enum
- (NSArray *)resultSignalsWithCurrentItem:(TGMediaAsset *)currentItem descriptionGenerator:(id (^)(id, NSString *, NSArray *, NSString *))descriptionGenerator;
- (void)completeWithAvatarImage:(UIImage *)image;
- (void)completeWithAvatarVideo:(NSURL *)url adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image;
- (void)completeWithAvatarVideo:(AVAsset *)asset adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image;
- (void)completeWithCurrentItem:(TGMediaAsset *)currentItem silentPosting:(bool)silentPosting scheduleTime:(int32_t)scheduleTime;
- (void)dismiss;

View File

@ -1,4 +1,5 @@
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <LegacyComponents/LegacyComponentsContext.h>
@class TGViewController;
@ -13,7 +14,7 @@ typedef void (^TGMediaAvatarPresentImpl)(id<LegacyComponentsContext>, void (^)(U
@interface TGMediaAvatarMenuMixin : NSObject
@property (nonatomic, copy) void (^didFinishWithImage)(UIImage *image);
@property (nonatomic, copy) void (^didFinishWithVideo)(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^didFinishWithVideo)(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments);
@property (nonatomic, copy) void (^didFinishWithDelete)(void);
@property (nonatomic, copy) void (^didFinishWithView)(void);
@property (nonatomic, copy) void (^didDismiss)(void);

View File

@ -53,7 +53,7 @@ typedef enum {
@property (nonatomic, copy) void (^willFinishEditing)(id<TGMediaEditAdjustments> adjustments, id temporaryRep, bool hasChanges);
@property (nonatomic, copy) void (^didFinishRenderingFullSizeImage)(UIImage *fullSizeImage);
@property (nonatomic, copy) void (^didFinishEditing)(id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges);
@property (nonatomic, copy) void (^didFinishEditingVideo)(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges);
@property (nonatomic, copy) void (^didFinishEditingVideo)(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges);
@property (nonatomic, assign) bool skipInitialTransition;
@property (nonatomic, assign) bool dontHideStatusBar;

View File

@ -914,7 +914,7 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
if (strongSelf.avatarCompletionBlock != nil)
strongSelf.avatarCompletionBlock(resultImage);
};
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
if (!hasChanges)
return;
@ -927,7 +927,7 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
return;
if (strongSelf.avatarVideoCompletionBlock != nil)
strongSelf.avatarVideoCompletionBlock(resultImage, url, adjustments);
strongSelf.avatarVideoCompletionBlock(resultImage, asset, adjustments);
};
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
{

View File

@ -1768,7 +1768,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
});
};
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
if (!hasChanges)
return;
@ -1779,7 +1779,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
TGDispatchOnMainThread(^
{
if (strongSelf.finishedWithVideo != nil)
strongSelf.finishedWithVideo(nil, url, resultImage, 0, CGSizeZero, adjustments, nil, nil, nil, nil);
strongSelf.finishedWithVideo(nil, [(AVURLAsset *)asset URL], resultImage, 0, CGSizeZero, adjustments, nil, nil, nil, nil);
__strong TGPhotoEditorController *strongController = weakController;
if (strongController != nil)

View File

@ -566,10 +566,10 @@
self.avatarCompletionBlock(image);
}
- (void)completeWithAvatarVideo:(NSURL *)url adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image
- (void)completeWithAvatarVideo:(AVAsset *)asset adjustments:(TGVideoEditAdjustments *)adjustments image:(UIImage *)image
{
if (self.avatarVideoCompletionBlock != nil)
self.avatarVideoCompletionBlock(image, url, adjustments);
self.avatarVideoCompletionBlock(image, asset, adjustments);
}
- (void)completeWithCurrentItem:(TGMediaAsset *)currentItem silentPosting:(bool)silentPosting scheduleTime:(int32_t)scheduleTime

View File

@ -426,7 +426,7 @@
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarImage:resultImage];
};
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
if (!hasChanges)
return;
@ -434,7 +434,7 @@
if (strongSelf == nil)
return;
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarVideo:url adjustments:adjustments image:resultImage];
[(TGMediaAssetsController *)strongSelf.navigationController completeWithAvatarVideo:asset adjustments:adjustments image:resultImage];
};
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
{

View File

@ -127,7 +127,7 @@
[strongController dismissAnimated:false];
};
carouselItem.avatarVideoCompletionBlock = ^(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments) {
carouselItem.avatarVideoCompletionBlock = ^(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments) {
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
if (strongSelf == nil)
return;
@ -137,7 +137,7 @@
return;
if (strongSelf.didFinishWithVideo != nil)
strongSelf.didFinishWithVideo(image, url, adjustments);
strongSelf.didFinishWithVideo(image, asset, adjustments);
[strongController dismissAnimated:false];
};
@ -285,7 +285,7 @@
controller = [[TGCameraController alloc] initWithContext:[windowManager context] saveEditedPhotos:_saveEditedPhotos saveCapturedMedia:_saveCapturedMedia camera:cameraView.previewView.camera previewView:cameraView.previewView intent:_signup ? TGCameraControllerSignupAvatarIntent : TGCameraControllerAvatarIntent];
else
controller = [[TGCameraController alloc] initWithContext:[windowManager context] saveEditedPhotos:_saveEditedPhotos saveCapturedMedia:_saveCapturedMedia intent:_signup ? TGCameraControllerSignupAvatarIntent : TGCameraControllerAvatarIntent];
controller.stickersContext = _stickersContext;
controller.shouldStoreCapturedAssets = true;
TGCameraControllerWindow *controllerWindow = [[TGCameraControllerWindow alloc] initWithManager:windowManager parentController:_parentController contentController:controller];
@ -355,13 +355,13 @@
[menuController dismissAnimated:false];
};
controller.finishedWithVideo = ^(__unused TGOverlayController *controller, NSURL *videoURL, UIImage *previewImage, __unused NSTimeInterval duration, __unused CGSize dimensions, TGVideoEditAdjustments *adjustments, __unused NSString *caption, __unused NSArray *entities, __unused NSArray *stickers, __unused NSNumber *timer){
controller.finishedWithVideo = ^(__unused TGOverlayController *controller, NSURL *url, UIImage *previewImage, __unused NSTimeInterval duration, __unused CGSize dimensions, TGVideoEditAdjustments *adjustments, __unused NSString *caption, __unused NSArray *entities, __unused NSArray *stickers, __unused NSNumber *timer){
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (strongSelf.didFinishWithVideo != nil)
strongSelf.didFinishWithVideo(previewImage, videoURL, adjustments);
strongSelf.didFinishWithVideo(previewImage, [[AVURLAsset alloc] initWithURL:url options:nil], adjustments);
[menuController dismissAnimated:false];
};
@ -459,6 +459,7 @@
TGMediaAssetsController *controller = [TGMediaAssetsController controllerWithContext:context assetGroup:group intent:strongSelf->_signup ? TGMediaAssetsControllerSetSignupProfilePhotoIntent : TGMediaAssetsControllerSetProfilePhotoIntent recipientName:nil saveEditedPhotos:strongSelf->_saveEditedPhotos allowGrouping:false selectionLimit:10];
__weak TGMediaAssetsController *weakController = controller;
controller.stickersContext = _stickersContext;
controller.avatarCompletionBlock = ^(UIImage *resultImage)
{
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
@ -472,13 +473,13 @@
if (strongController != nil && strongController.dismissalBlock != nil)
strongController.dismissalBlock();
};
controller.avatarVideoCompletionBlock = ^(UIImage *image, NSURL *url, TGVideoEditAdjustments *adjustments) {
controller.avatarVideoCompletionBlock = ^(UIImage *image, AVAsset *asset, TGVideoEditAdjustments *adjustments) {
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (strongSelf.didFinishWithVideo != nil)
strongSelf.didFinishWithVideo(image, url, adjustments);
strongSelf.didFinishWithVideo(image, asset, adjustments);
__strong TGMediaAssetsController *strongController = weakController;
if (strongController != nil && strongController.dismissalBlock != nil)

View File

@ -345,6 +345,7 @@
_fullPaintingView.frame = _fullPreviewView.frame;
_fullEntitiesView = [[TGPhotoEntitiesContainerView alloc] init];
_fullEntitiesView.userInteractionEnabled = false;
_fullEntitiesView.frame = _fullPreviewView.frame;
}
@ -1266,6 +1267,7 @@
cropController.fullPreviewView = _fullPreviewView;
cropController.fullPaintingView = _fullPaintingView;
cropController.fullEntitiesView = _fullEntitiesView;
cropController.fullEntitiesView.userInteractionEnabled = false;
cropController.fromCamera = [self presentedFromCamera];
cropController.skipTransitionIn = skipInitialTransition;
if (snapshotImage != nil)
@ -1534,7 +1536,7 @@
case TGPhotoEditorToolsTab:
{
TGPhotoToolsController *toolsController = [[TGPhotoToolsController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView];
TGPhotoToolsController *toolsController = [[TGPhotoToolsController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView entitiesView:_fullEntitiesView];
toolsController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
toolsController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
{
@ -1981,7 +1983,7 @@
TGDispatchOnMainThread(^{
if (self.didFinishEditingVideo != nil)
self.didFinishEditingVideo(asset.URL, [adjustments editAdjustmentsWithPreset:preset videoStartValue:videoStartValue trimStartValue:trimStartValue trimEndValue:trimEndValue], fullImage, nil, true);
self.didFinishEditingVideo(asset, [adjustments editAdjustmentsWithPreset:preset videoStartValue:videoStartValue trimStartValue:trimStartValue trimEndValue:trimEndValue], fullImage, nil, true);
[self dismissAnimated:true];
});

View File

@ -152,6 +152,7 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
self.photoEditor = photoEditor;
self.previewView = previewView;
_entitiesContainerView = entitiesView;
entitiesView.userInteractionEnabled = true;
_brushes = @
[

View File

@ -3,9 +3,10 @@
@class PGPhotoEditor;
@class PGPhotoTool;
@class TGPhotoEditorPreviewView;
@class TGPhotoEntitiesContainerView;
@interface TGPhotoToolsController : TGPhotoEditorTabController
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView;
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView entitiesView:(TGPhotoEntitiesContainerView *)entitiesView;
@end

View File

@ -44,6 +44,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
TGPhotoEditorCollectionView *_portraitCollectionView;
TGPhotoEditorCollectionView *_landscapeCollectionView;
TGPhotoEditorHUDView *_hudView;
TGPhotoEntitiesContainerView *_entitiesView;
void (^_changeBlock)(PGPhotoTool *, id, bool);
void (^_interactionBegan)(void);
@ -64,14 +65,15 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
@implementation TGPhotoToolsController
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView entitiesView:(TGPhotoEntitiesContainerView *)entitiesView
{
self = [super initWithContext:context];
if (self != nil)
{
self.photoEditor = photoEditor;
self.previewView = previewView;
_entitiesView = entitiesView;
__weak TGPhotoToolsController *weakSelf = self;
_changeBlock = ^(PGPhotoTool *tool, __unused id newValue, bool animated)
{

View File

@ -33,8 +33,6 @@
editableItem = [[TGCameraCapturedVideo alloc] initWithURL:video];
}
void (^present)(UIImage *) = ^(UIImage *screenImage) {
TGPhotoEditorController *controller = [[TGPhotoEditorController alloc] initWithContext:[windowManager context] item:editableItem intent:TGPhotoEditorControllerAvatarIntent adjustments:nil caption:nil screenImage:screenImage availableTabs:[TGPhotoEditorController defaultTabsForAvatarIntent] selectedTab:TGPhotoEditorCropTab];
// controller.stickersContext = _stickersContext;
@ -45,9 +43,12 @@
if (didFinishWithImage != nil)
didFinishWithImage(resultImage);
};
controller.didFinishEditingVideo = ^(NSURL *url, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
if (didFinishWithVideo != nil)
didFinishWithVideo(resultImage, url, adjustments);
controller.didFinishEditingVideo = ^(AVAsset *asset, id<TGMediaEditAdjustments> adjustments, UIImage *resultImage, UIImage *thumbnailImage, bool hasChanges) {
if (didFinishWithVideo != nil) {
if ([asset isKindOfClass:[AVURLAsset class]]) {
didFinishWithVideo(resultImage, [(AVURLAsset *)asset URL], adjustments);
}
}
};
controller.requestThumbnailImage = ^(id<TGMediaEditableItem> editableItem)
{

View File

@ -23,16 +23,16 @@ public func presentLegacyAvatarEditor(theme: PresentationTheme, image: UIImage?,
if let image = image {
imageCompletion(image)
}
}, didFinishWithVideo: { image, url, adjustments in
if let image = image, let url = url {
videoCompletion(image, url, adjustments)
}, didFinishWithVideo: { image, asset, adjustments in
if let image = image {
// videoCompletion(image, url, adjustments)
}
}, dismissed: { [weak legacyController] in
legacyController?.dismiss()
})
}
public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, theme: PresentationTheme, present: (ViewController, Any?) -> Void, openCurrent: (() -> Void)?, completion: @escaping (UIImage) -> Void, videoCompletion: @escaping (UIImage, URL, TGVideoEditAdjustments?) -> Void = { _, _, _ in}) {
public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, theme: PresentationTheme, present: (ViewController, Any?) -> Void, openCurrent: (() -> Void)?, completion: @escaping (UIImage) -> Void, videoCompletion: @escaping (UIImage, Any?, TGVideoEditAdjustments?) -> Void = { _, _, _ in}) {
let legacyController = LegacyController(presentation: .custom, theme: theme)
legacyController.statusBar.statusBarStyle = .Ignore
@ -53,11 +53,11 @@ public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, t
}
completion(image)
}
mixin.didFinishWithVideo = { image, url, adjustments in
guard let image = image, let url = url else {
mixin.didFinishWithVideo = { image, asset, adjustments in
guard let image = image else {
return
}
videoCompletion(image, url, adjustments)
videoCompletion(image, asset, adjustments)
}
mixin.didFinishWithView = {
openCurrent?()

View File

@ -174,9 +174,9 @@ public func fetchedAvatarGalleryEntries(account: Account, peer: Peer) -> Signal<
for photo in photos {
let indexData = GalleryItemIndexData(position: index, totalCount: Int32(photos.count))
if result.isEmpty, let first = initialEntries.first {
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
} else {
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.standalone(resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
}
index += 1
}
@ -202,9 +202,9 @@ public func fetchedAvatarGalleryEntries(account: Account, peer: Peer, firstEntry
for photo in photos {
let indexData = GalleryItemIndexData(position: index, totalCount: Int32(photos.count))
if result.isEmpty, let first = initialEntries.first {
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
result.append(.image(photo.image.imageId, photo.image.reference, first.representations, photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
} else {
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.standalone(resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatar(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
result.append(.image(photo.image.imageId, photo.image.reference, photo.image.representations.map({ ImageRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), photo.image.videoRepresentations.map({ VideoRepresentationWithReference(representation: $0, reference: MediaResourceReference.avatarList(peer: peerReference, resource: $0.resource)) }), peer, photo.date, indexData, photo.messageId, photo.image.immediateThumbnailData, nil))
}
index += 1
}
@ -845,7 +845,7 @@ public class AvatarGalleryController: ViewController, StandalonePresentableContr
}
let actionSheet = ActionSheetController(presentationData: presentationData)
let items: [ActionSheetItem] = [
ActionSheetButtonItem(title: presentationData.strings.Common_Delete, color: .destructive, action: { [weak actionSheet] in
ActionSheetButtonItem(title: presentationData.strings.Settings_RemoveConfirmation, color: .destructive, action: { [weak actionSheet] in
actionSheet?.dismissAnimated()
proceed()
})

View File

@ -259,7 +259,10 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
id = image.0.id
category = image.9
} else {
id = Int64(entry.peer?.id.id ?? 1)
id = Int64(entry.peer?.id.id ?? 0)
if let resource = entry.videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
id = id &+ resource.photoId
}
}
if let video = entry.videoRepresentations.last, let peerReference = PeerReference(self.peer) {
if video != previousVideoRepresentations?.last {

View File

@ -684,9 +684,9 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
completedProfilePhotoImpl(image)
}
}
mixin.didFinishWithVideo = { image, url, adjustments in
if let image = image, let url = url {
completedProfileVideoImpl(image, url, adjustments)
mixin.didFinishWithVideo = { image, asset, adjustments in
if let image = image {
// completedProfileVideoImpl(image, url, adjustments)
}
}
mixin.didFinishWithDelete = {

View File

@ -1418,9 +1418,9 @@ public func settingsController(context: AccountContext, accountManager: AccountM
completedProfilePhotoImpl(image)
}
}
mixin.didFinishWithVideo = { image, url, adjustments in
if let image = image, let url = url {
completedProfileVideoImpl(image, url, adjustments)
mixin.didFinishWithVideo = { image, asset, adjustments in
if let image = image {
// completedProfileVideoImpl(image, url, adjustments)
}
}
mixin.didFinishWithDelete = {

View File

@ -686,7 +686,7 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
transaction.setState(UnauthorizedAccountState(isTestingEnvironment: strongSelf.account.testingEnvironment, masterDatacenterId: strongSelf.account.masterDatacenterId, contents: .phoneEntry(countryCode: countryCode, number: "")))
}).start()
}, displayCancel: displayCancel)
controller.signUpWithName = { [weak self, weak controller] firstName, lastName, avatarData, avatarUrl, avatarAdjustments in
controller.signUpWithName = { [weak self, weak controller] firstName, lastName, avatarData, avatarAsset, avatarAdjustments in
if let strongSelf = self {
controller?.inProgress = true
@ -696,15 +696,9 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
}
let avatarVideo: Signal<UploadedPeerPhotoData?, NoError>?
if let avatarUrl = avatarUrl {
if let avatarAsset = avatarAsset as? AVAsset {
let account = strongSelf.account
avatarVideo = Signal<TelegramMediaResource?, NoError> { subscriber in
var filteredPath = avatarUrl.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
}
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
let entityRenderer: LegacyPaintEntityRenderer? = avatarAdjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: nil, adjustments: adjustments)
@ -713,7 +707,7 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
}
}
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: avatarAdjustments, watcher: nil, entityRenderer: entityRenderer)!
let signal = TGMediaVideoConverter.convert(avatarAsset, adjustments: avatarAdjustments, watcher: nil, entityRenderer: entityRenderer)!
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {

View File

@ -22,9 +22,9 @@ final class AuthorizationSequenceSignUpController: ViewController {
var initialName: (String, String) = ("", "")
private var termsOfService: UnauthorizedAccountTermsOfService?
var signUpWithName: ((String, String, Data?, URL?, TGVideoEditAdjustments?) -> Void)?
var signUpWithName: ((String, String, Data?, Any?, TGVideoEditAdjustments?) -> Void)?
var avatarUrl: URL?
var avatarAsset: Any?
var avatarAdjustments: TGVideoEditAdjustments?
private let hapticFeedback = HapticFeedback()
@ -91,11 +91,11 @@ final class AuthorizationSequenceSignUpController: ViewController {
self?.present(c, in: .window(.root), with: a)
}, openCurrent: nil, completion: { image in
self?.controllerNode.currentPhoto = image
self?.avatarUrl = nil
self?.avatarAsset = nil
self?.avatarAdjustments = nil
}, videoCompletion: { image, url, adjustments in
}, videoCompletion: { image, asset, adjustments in
self?.controllerNode.currentPhoto = image
self?.avatarUrl = url
self?.avatarAsset = asset
self?.avatarAdjustments = adjustments
})
})
@ -159,7 +159,7 @@ final class AuthorizationSequenceSignUpController: ViewController {
if let name = name {
self.signUpWithName?(name.0, name.1, self.controllerNode.currentPhoto.flatMap({ image in
return compressImageToJPEG(image, quality: 0.7)
}), self.avatarUrl, self.avatarAdjustments)
}), self.avatarAsset, self.avatarAdjustments)
}
}
}

View File

@ -328,7 +328,7 @@ public func createChannelController(context: AccountContext) -> ViewController {
}
}
let completedChannelVideoImpl: (UIImage, URL, TGVideoEditAdjustments?) -> Void = { image, url, adjustments in
let completedChannelVideoImpl: (UIImage, Any?, TGVideoEditAdjustments?) -> Void = { image, asset, adjustments in
if let data = image.jpegData(compressionQuality: 0.6) {
let photoResource = LocalFileMediaResource(fileId: arc4random64())
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
@ -345,12 +345,6 @@ public func createChannelController(context: AccountContext) -> ViewController {
}
let signal = Signal<TelegramMediaResource?, UploadPeerPhotoError> { subscriber in
var filteredPath = url.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
}
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: context.account, adjustments: adjustments)
@ -359,7 +353,31 @@ public func createChannelController(context: AccountContext) -> ViewController {
}
}
let uploadInterface = LegacyLiveUploadInterface(account: context.account)
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
let signal: SSignal
if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else if let url = asset as? URL, let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
let durationSignal: SSignal = SSignal(generator: { subscriber in
let disposable = (entityRenderer.duration()).start(next: { duration in
subscriber?.putNext(duration)
subscriber?.putCompletion()
})
return SBlockDisposable(block: {
disposable.dispose()
})
})
signal = durationSignal.map(toSignal: { duration -> SSignal? in
if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else {
return SSignal.single(nil)
}
})
} else {
signal = SSignal.complete()
}
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
@ -438,9 +456,9 @@ public func createChannelController(context: AccountContext) -> ViewController {
completedChannelPhotoImpl(image)
}
}
mixin.didFinishWithVideo = { image, url, adjustments in
if let image = image, let url = url {
completedChannelVideoImpl(image, url, adjustments)
mixin.didFinishWithVideo = { image, asset, adjustments in
if let image = image, let asset = asset {
completedChannelVideoImpl(image, asset, adjustments)
}
}
if stateValue.with({ $0.avatar }) != nil {

View File

@ -586,7 +586,7 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
}
}
let completedGroupVideoImpl: (UIImage, URL, TGVideoEditAdjustments?) -> Void = { image, url, adjustments in
let completedGroupVideoImpl: (UIImage, Any?, TGVideoEditAdjustments?) -> Void = { image, asset, adjustments in
if let data = image.jpegData(compressionQuality: 0.6) {
let photoResource = LocalFileMediaResource(fileId: arc4random64())
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
@ -603,12 +603,7 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
}
let signal = Signal<TelegramMediaResource?, UploadPeerPhotoError> { subscriber in
var filteredPath = url.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
}
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: context.account, adjustments: adjustments)
@ -617,7 +612,31 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
}
}
let uploadInterface = LegacyLiveUploadInterface(account: context.account)
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
let signal: SSignal
if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else if let url = asset as? URL, let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
let durationSignal: SSignal = SSignal(generator: { subscriber in
let disposable = (entityRenderer.duration()).start(next: { duration in
subscriber?.putNext(duration)
subscriber?.putCompletion()
})
return SBlockDisposable(block: {
disposable.dispose()
})
})
signal = durationSignal.map(toSignal: { duration -> SSignal? in
if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else {
return SSignal.single(nil)
}
})
} else {
signal = SSignal.complete()
}
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
@ -696,9 +715,9 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
completedGroupPhotoImpl(image)
}
}
mixin.didFinishWithVideo = { image, url, adjustments in
if let image = image, let url = url {
completedGroupVideoImpl(image, url, adjustments)
mixin.didFinishWithVideo = { image, asset, adjustments in
if let image = image, let asset = asset {
completedGroupVideoImpl(image, asset, adjustments)
}
}
if stateValue.with({ $0.avatar }) != nil {

View File

@ -334,6 +334,9 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
videoRepresentations = videoRepresentationsValue
immediateThumbnailData = immediateThumbnail
id = Int64(self.peer.id.id)
if let resource = videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
id = id &+ resource.photoId
}
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
representations = imageRepresentations
videoRepresentations = videoRepresentationsValue
@ -350,7 +353,7 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
if videoContent.id != self.videoContent?.id {
if videoContent.id != self.videoContent?.id {
self.videoContent = videoContent
self.videoStartTimestamp = video.representation.startTimestamp
self.setupVideoPlayback()
@ -1129,7 +1132,7 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
stripTransition.updateAlpha(node: self.loadingStripNode, alpha: self.loading ? 1.0 : 0.0)
self.activeStripNode.isHidden = self.stripNodes.count < 2
self.loadingStripNode.isHidden = !self.loading
self.loadingStripNode.isHidden = self.stripNodes.count < 2 || !self.loading
}
if let item = self.items.first, let itemNode = self.itemNodes[item.id] {
@ -1239,6 +1242,9 @@ final class PeerInfoAvatarTransformContainerNode: ASDisplayNode {
videoRepresentations = videoRepresentationsValue
immediateThumbnailData = immediateThumbnail
id = Int64(peer.id.id)
if let resource = videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
id = id &+ resource.photoId
}
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
representations = imageRepresentations
videoRepresentations = videoRepresentationsValue
@ -1254,6 +1260,8 @@ final class PeerInfoAvatarTransformContainerNode: ASDisplayNode {
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
if videoContent.id != self.videoContent?.id {
self.videoNode?.removeFromSupernode()
let mediaManager = self.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .embedded)
videoNode.isUserInteractionEnabled = false
@ -1520,6 +1528,9 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode {
videoRepresentations = videoRepresentationsValue
immediateThumbnailData = immediateThumbnail
id = Int64(peer.id.id)
if let resource = videoRepresentations.first?.representation.resource as? CloudPhotoSizeMediaResource {
id = id &+ resource.photoId
}
case let .image(reference, imageRepresentations, videoRepresentationsValue, immediateThumbnail):
representations = imageRepresentations
videoRepresentations = videoRepresentationsValue
@ -1535,6 +1546,8 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode {
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [])]))
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
if videoContent.id != self.videoContent?.id {
self.videoNode?.removeFromSupernode()
let mediaManager = self.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .gallery)
videoNode.isUserInteractionEnabled = false
@ -2580,6 +2593,10 @@ final class PeerInfoHeaderNode: ASDisplayNode {
}
func initiateAvatarExpansion(gallery: Bool, first: Bool) {
if let peer = self.peer, peer.profileImageRepresentations.isEmpty && gallery {
self.requestOpenAvatarForEditing?(false)
return
}
if self.isAvatarExpanded || gallery {
if let currentEntry = self.avatarListNode.listContainerNode.currentEntry, let firstEntry = self.avatarListNode.listContainerNode.galleryEntries.first {
let entry = first ? firstEntry : currentEntry

View File

@ -612,36 +612,41 @@ private final class PeerInfoInteraction {
private let enabledBioEntities: EnabledEntityTypes = [.url, .mention, .hashtag]
private func settingsItems(data: PeerInfoScreenData?, context: AccountContext, presentationData: PresentationData, interaction: PeerInfoInteraction) -> [(AnyHashable, [PeerInfoScreenItem])] {
private enum SettingsSection: Int, CaseIterable {
case edit
case phone
case accounts
case proxy
case shortcuts
case advanced
case extra
case support
}
private func settingsItems(data: PeerInfoScreenData?, context: AccountContext, presentationData: PresentationData, interaction: PeerInfoInteraction, isExpanded: Bool) -> [(AnyHashable, [PeerInfoScreenItem])] {
guard let data = data else {
return []
}
enum Section: Int, CaseIterable {
case edit
case phone
case accounts
case proxy
case shortcuts
case advanced
case extra
case support
}
var items: [Section: [PeerInfoScreenItem]] = [:]
for section in Section.allCases {
var items: [SettingsSection: [PeerInfoScreenItem]] = [:]
for section in SettingsSection.allCases {
items[section] = []
}
let setPhotoTitle: String
let displaySetPhoto: Bool
if let peer = data.peer, !peer.profileImageRepresentations.isEmpty {
setPhotoTitle = presentationData.strings.Settings_SetNewProfilePhotoOrVideo
displaySetPhoto = isExpanded
} else {
setPhotoTitle = presentationData.strings.Settings_SetProfilePhotoOrVideo
displaySetPhoto = true
}
if displaySetPhoto {
items[.edit]!.append(PeerInfoScreenActionItem(id: 0, text: setPhotoTitle, icon: UIImage(bundleImageName: "Settings/SetAvatar"), action: {
interaction.openSettings(.avatar)
}))
}
items[.edit]!.append(PeerInfoScreenActionItem(id: 0, text: setPhotoTitle, icon: UIImage(bundleImageName: "Settings/SetAvatar"), action: {
interaction.openSettings(.avatar)
}))
if let peer = data.peer, peer.addressName == nil {
items[.edit]!.append(PeerInfoScreenActionItem(id: 1, text: presentationData.strings.Settings_SetUsername, icon: UIImage(bundleImageName: "Settings/SetUsername"), action: {
interaction.openSettings(.username)
@ -784,7 +789,7 @@ private func settingsItems(data: PeerInfoScreenData?, context: AccountContext, p
}))
var result: [(AnyHashable, [PeerInfoScreenItem])] = []
for section in Section.allCases {
for section in SettingsSection.allCases {
if let sectionItems = items[section], !sectionItems.isEmpty {
result.append((section, sectionItems))
}
@ -830,10 +835,14 @@ private func settingsEditingItems(data: PeerInfoScreenData?, state: PeerInfoStat
if let user = data.peer as? TelegramUser {
items[.info]!.append(PeerInfoScreenDisclosureItem(id: ItemPhoneNumber, label: .text(user.phone.flatMap({ formatPhoneNumber($0) }) ?? ""), text: presentationData.strings.Settings_PhoneNumber, action: {
interaction.openSettings(.phoneNumber)
interaction.openSettings(.phoneNumber)
}))
}
items[.info]!.append(PeerInfoScreenDisclosureItem(id: ItemUsername, label: .text(data.peer?.addressName.flatMap({ "@\($0)" }) ?? ""), text: presentationData.strings.Settings_Username, action: {
var username = ""
if let addressName = data.peer?.addressName, !addressName.isEmpty {
username = "@\(addressName)"
}
items[.info]!.append(PeerInfoScreenDisclosureItem(id: ItemUsername, label: .text(username), text: presentationData.strings.Settings_Username, action: {
interaction.openSettings(.username)
}))
@ -2113,11 +2122,11 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
galleryController.avatarPhotoEditCompletion = { [weak self] image in
self?.updateProfilePhoto(image)
}
galleryController.avatarVideoEditCompletion = { [weak self] image, url, adjustments in
self?.updateProfileVideo(image, url: url, adjustments: adjustments)
galleryController.avatarVideoEditCompletion = { [weak self] image, asset, adjustments in
self?.updateProfileVideo(image, asset: asset, adjustments: adjustments)
}
galleryController.removedEntry = { [weak self] entry in
self?.headerNode.avatarListNode.listContainerNode.deleteItem(PeerInfoAvatarListItem(entry: entry))
let _ = self?.headerNode.avatarListNode.listContainerNode.deleteItem(PeerInfoAvatarListItem(entry: entry))
}
strongSelf.hiddenAvatarRepresentationDisposable.set((galleryController.hiddenMedia |> deliverOnMainQueue).start(next: { entry in
self?.headerNode.updateAvatarIsHidden(entry: entry)
@ -2497,7 +2506,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
case .search:
strongSelf.activateSearch()
case .editPhoto, .editVideo:
strongSelf.openAvatarOptions()
break
}
}
@ -3886,15 +3895,15 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
video = URL(fileURLWithPath: data.path)
}
presentLegacyAvatarEditor(theme: strongSelf.presentationData.theme, image: image, video: video, present: { [weak self] c, a in
if let strongSelf = self {
strongSelf.controller?.present(c, in: .window(.root), with: a, blockInteraction: true)
}
}, imageCompletion: { [weak self] image in
self?.updateProfilePhoto(image)
}, videoCompletion: { [weak self] image, url, adjustments in
self?.updateProfileVideo(image, url: url, adjustments: adjustments)
})
// presentLegacyAvatarEditor(theme: strongSelf.presentationData.theme, image: image, video: video, present: { [weak self] c, a in
// if let strongSelf = self {
// strongSelf.controller?.present(c, in: .window(.root), with: a, blockInteraction: true)
// }
// }, imageCompletion: { [weak self] image in
// self?.updateProfilePhoto(image)
// }, videoCompletion: { [weak self] image, url, adjustments in
// self?.updateProfileVideo(image, url: url, adjustments: adjustments)
// })
}
}))
}
@ -3911,11 +3920,13 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
}
private func deleteAvatar(_ item: PeerInfoAvatarListItem) {
private func deleteAvatar(_ item: PeerInfoAvatarListItem, remove: Bool = true) {
if self.data?.peer?.id == self.context.account.peerId {
if case let .image(reference, _, _, _) = item {
if let reference = reference {
let _ = removeAccountPhoto(network: self.context.account.network, reference: reference).start()
if remove {
let _ = removeAccountPhoto(network: self.context.account.network, reference: reference).start()
}
let dismiss = self.headerNode.avatarListNode.listContainerNode.deleteItem(item)
if dismiss {
if self.headerNode.isAvatarExpanded {
@ -3954,59 +3965,6 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
}
private func openAvatarOptions() {
let item = self.headerNode.avatarListNode.listContainerNode.currentItemNode?.item
let index = self.headerNode.avatarListNode.listContainerNode.currentIndex
let actionSheet = ActionSheetController(presentationData: self.presentationData)
let dismissAction: () -> Void = { [weak actionSheet] in
actionSheet?.dismissAnimated()
}
var items: [ActionSheetItem] = []
items.append( ActionSheetButtonItem(title: self.presentationData.strings.Settings_SetNewProfilePhotoOrVideo, color: .accent, action: { [weak self] in
dismissAction()
self?.openAvatarForEditing(hasRemove: false)
}))
if let item = item, case let .image(image) = item {
if index > 0 {
let setMainTitle: String
if image.2.isEmpty {
setMainTitle = self.presentationData.strings.ProfilePhoto_SetMainPhoto
} else {
setMainTitle = self.presentationData.strings.ProfilePhoto_SetMainVideo
}
items.append(ActionSheetButtonItem(title: setMainTitle, color: .accent, action: { [weak self] in
dismissAction()
self?.setMainAvatar(item)
}))
}
// items.append(ActionSheetButtonItem(title: self.presentationData.strings.ProfilePhoto_OpenInEditor, color: .accent, action: { [weak self] in
// dismissAction()
// self?.editAvatarItem(item)
// }))
let deleteTitle: String
if image.2.isEmpty {
deleteTitle = self.presentationData.strings.GroupInfo_SetGroupPhotoDelete
} else {
deleteTitle = self.presentationData.strings.Settings_RemoveVideo
}
items.append(ActionSheetButtonItem(title: deleteTitle, color: .destructive, action: { [weak self] in
dismissAction()
self?.deleteAvatar(item)
}))
}
actionSheet.setItemGroups([
ActionSheetItemGroup(items: items),
ActionSheetItemGroup(items: [ActionSheetButtonItem(title: presentationData.strings.Common_Cancel, action: { dismissAction() })])
])
self.view.endEditing(true)
self.controller?.present(actionSheet, in: .window(.root))
}
private func updateProfilePhoto(_ image: UIImage) {
guard let data = image.jpegData(compressionQuality: 0.6) else {
return
@ -4064,7 +4022,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
}
private func updateProfileVideo(_ image: UIImage, url: URL, adjustments: TGVideoEditAdjustments?) {
private func updateProfileVideo(_ image: UIImage, asset: Any?, adjustments: TGVideoEditAdjustments?) {
guard let data = image.jpegData(compressionQuality: 0.6) else {
return
}
@ -4093,12 +4051,6 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
let account = self.context.account
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { [weak self] subscriber in
var filteredPath = url.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
}
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: account, adjustments: adjustments)
@ -4107,7 +4059,31 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
}
let uploadInterface = LegacyLiveUploadInterface(account: account)
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
let signal: SSignal
if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else if let url = asset as? URL, let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
let durationSignal: SSignal = SSignal(generator: { subscriber in
let disposable = (entityRenderer.duration()).start(next: { duration in
subscriber?.putNext(duration)
subscriber?.putCompletion()
})
return SBlockDisposable(block: {
disposable.dispose()
})
})
signal = durationSignal.map(toSignal: { duration -> SSignal? in
if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else {
return SSignal.single(nil)
}
})
} else {
signal = SSignal.complete()
}
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
@ -4255,10 +4231,10 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
self?.updateProfilePhoto(image)
}
}
mixin.didFinishWithVideo = { [weak self] image, url, adjustments in
if let image = image, let url = url {
mixin.didFinishWithVideo = { [weak self] image, asset, adjustments in
if let image = image, let asset = asset {
completion()
self?.updateProfileVideo(image, url: url, adjustments: adjustments)
self?.updateProfileVideo(image, asset: asset, adjustments: adjustments)
}
}
mixin.didFinishWithDelete = {
@ -4267,7 +4243,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
if let item = item {
strongSelf.deleteAvatar(item)
strongSelf.deleteAvatar(item, remove: false)
}
let _ = strongSelf.currentAvatarMixin.swap(nil)
@ -5059,19 +5035,21 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
} else {
transition.updateFrame(node: self.headerNode, frame: headerFrame)
}
if !self.isMediaOnly {
contentHeight += headerHeight
if !self.isSettings {
contentHeight += sectionSpacing
}
} else {
if self.isMediaOnly {
contentHeight += navigationHeight
}
var validRegularSections: [AnyHashable] = []
if !self.isMediaOnly {
let items = self.isSettings ? settingsItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction) : infoItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction, nearbyPeerDistance: self.nearbyPeerDistance, callMessages: self.callMessages)
let items = self.isSettings ? settingsItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction, isExpanded: self.headerNode.isAvatarExpanded) : infoItems(data: self.data, context: self.context, presentationData: self.presentationData, interaction: self.interaction, nearbyPeerDistance: self.nearbyPeerDistance, callMessages: self.callMessages)
contentHeight += headerHeight
if !self.isSettings {
contentHeight += sectionSpacing
} else if let (section, _) = items.first, let sectionValue = section.base as? SettingsSection, sectionValue != .edit && !self.state.isEditing {
contentHeight += sectionSpacing
}
for (sectionId, sectionItems) in items {
validRegularSections.append(sectionId)