Video avatar fixes

This commit is contained in:
Ilya Laktyushin 2020-07-01 04:43:39 +03:00
parent a2b82285db
commit cccb11a5d6
40 changed files with 4022 additions and 3922 deletions

View File

@ -5635,6 +5635,8 @@ Any member of this group will be able to see messages in the channel.";
"Settings.SetProfilePhotoOrVideo" = "Set Profile Photo or Video";
"Settings.SetNewProfilePhotoOrVideo" = "Set New Profile Photo or Video";
"Settings.ViewVideo" = "View Video";
"Settings.RemoveVideo" = "Remove Video";
"Conversation.Unarchive" = "Unarchive";
"Conversation.UnarchiveDone" = "The chat was moved to your main list.";

View File

@ -24,7 +24,7 @@ typedef void (^TGMediaAvatarPresentImpl)(id<LegacyComponentsContext>, void (^)(U
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasDeleteButton:(bool)hasDeleteButton saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia;
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasDeleteButton:(bool)hasDeleteButton personalPhoto:(bool)personalPhoto saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia;
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasSearchButton:(bool)hasSearchButton hasDeleteButton:(bool)hasDeleteButton hasViewButton:(bool)hasViewButton personalPhoto:(bool)personalPhoto saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia signup:(bool)signup;
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasSearchButton:(bool)hasSearchButton hasDeleteButton:(bool)hasDeleteButton hasViewButton:(bool)hasViewButton personalPhoto:(bool)personalPhoto isVideo:(bool)isVideo saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia signup:(bool)signup;
- (TGMenuSheetController *)present;
@end

View File

@ -4,8 +4,6 @@
@interface TGPhotoAvatarCropView : UIView
@property (nonatomic, readonly) PGPhotoEditorView *fullPreviewView;
@property (nonatomic, strong) UIImage *image;
@property (nonatomic, readonly) CGSize originalSize;
@ -22,7 +20,7 @@
@property (nonatomic, readonly) bool isTracking;
@property (nonatomic, readonly) bool isAnimating;
- (instancetype)initWithOriginalSize:(CGSize)originalSize screenSize:(CGSize)screenSize;
- (instancetype)initWithOriginalSize:(CGSize)originalSize screenSize:(CGSize)screenSize fullPreviewView:(PGPhotoEditorView *)fullPreviewView;
- (void)setSnapshotImage:(UIImage *)image;
- (void)setSnapshotView:(UIView *)snapshotView;
@ -35,8 +33,12 @@
- (void)animateTransitionIn;
- (void)animateTransitionOutSwitching:(bool)switching;
- (void)animateTransitionOut;
- (void)transitionInFinishedFromCamera:(bool)fromCamera;
- (void)closeCurtains;
- (void)openCurtains;
- (void)invalidateCropRect;
- (UIImage *)currentImage;

View File

@ -15,8 +15,7 @@ typedef NS_OPTIONS(NSUInteger, TGPhotoEditorTab) {
TGPhotoEditorAspectRatioTab = 1 << 10,
TGPhotoEditorTintTab = 1 << 11,
TGPhotoEditorBlurTab = 1 << 12,
TGPhotoEditorCurvesTab = 1 << 13,
TGPhotoEditorPreviewTab = 1 << 14
TGPhotoEditorCurvesTab = 1 << 13
};
typedef enum

View File

@ -15,6 +15,7 @@ typedef enum
TGMediaVideoConversionPresetVideoMessage,
TGMediaVideoConversionPresetProfile,
TGMediaVideoConversionPresetProfileHigh,
TGMediaVideoConversionPresetProfileVeryHigh,
TGMediaVideoConversionPresetPassthrough
} TGMediaVideoConversionPreset;

View File

@ -169,6 +169,25 @@
return MAX(_originalSize.width, _originalSize.height) * 0.005f;
}
- (CGRect)normalizedCropRect:(CGRect)cropRect
{
CGRect normalizedCropRect = CGRectMake(0.0f, 0.0f, 1.0f, 1.0f);
normalizedCropRect = CGRectMake(MAX(0.0, MIN(1.0, cropRect.origin.x / _originalSize.width)), MAX(0.0, MIN(1.0, cropRect.origin.y / _originalSize.height)), MAX(0.0, MIN(1.0, cropRect.size.width / _originalSize.width)), MAX(0.0, MIN(1.0, cropRect.size.height / _originalSize.height)));
return normalizedCropRect;
}
- (void)setCropRect:(CGRect)cropRect
{
_cropRect = cropRect;
_cropFilter.cropRegion = [self normalizedCropRect:cropRect];
}
- (void)setCropOrientation:(UIImageOrientation)cropOrientation
{
_cropOrientation = cropOrientation;
}
- (void)setPlayerItem:(AVPlayerItem *)playerItem forCropRect:(CGRect)cropRect cropRotation:(CGFloat)cropRotation cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored {
[_toolComposer invalidate];
_currentProcessChain = nil;
@ -182,11 +201,10 @@
_rotationMode = kGPUImageNoRotation;
if (cropOrientation != UIImageOrientationUp || cropMirrored || hasCropping) {
CGRect normalizedCropRect = CGRectMake(0.0f, 0.0f, 1.0f, 1.0f);
if (hasCropping) {
normalizedCropRect = CGRectMake(MAX(0.0, MIN(1.0, cropRect.origin.x / _originalSize.width)), MAX(0.0, MIN(1.0, cropRect.origin.y / _originalSize.height)), MAX(0.0, MIN(1.0, cropRect.size.width / _originalSize.width)), MAX(0.0, MIN(1.0, cropRect.size.height / _originalSize.height)));
}
_cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:normalizedCropRect];
if (_cropFilter == nil)
_cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:[self normalizedCropRect:cropRect]];
else
_cropFilter.cropRegion = [self normalizedCropRect:cropRect];
if (cropOrientation != UIImageOrientationUp || cropMirrored) {
switch (cropOrientation) {
case UIImageOrientationLeft:
@ -411,15 +429,17 @@
}
_finalFilter = lastFilter;
if (_cropFilter != nil && self.cropOnLast) {
if (self.cropOnLast) {
if (_cropFilter == nil)
_cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:[self normalizedCropRect:_cropRect]];
for (PGPhotoEditorView *view in _additionalOutputs) {
[_finalFilter addTarget:view];
}
[_finalFilter addTarget:_cropFilter];
_finalFilter = _cropFilter;
if (previewOutput != nil) {
[_finalFilter addTarget:previewOutput.imageView];
[_cropFilter addTarget:previewOutput.imageView];
}
} else {
if (previewOutput != nil) {
@ -442,15 +462,29 @@
if (_finalFilter == nil)
return;
[_cropFilter removeAllTargets];
[_finalFilter removeAllTargets];
if (self.previewOutput != nil) {
[_finalFilter addTarget:self.previewOutput.imageView];
}
for (PGPhotoEditorView *view in _additionalOutputs) {
[_finalFilter addTarget:view];
if (self.cropOnLast) {
for (PGPhotoEditorView *view in _additionalOutputs) {
[_finalFilter addTarget:view];
}
[_finalFilter addTarget:_cropFilter];
if (self.previewOutput != nil) {
[_cropFilter addTarget:self.previewOutput.imageView];
}
} else {
for (PGPhotoEditorView *view in _additionalOutputs) {
[_finalFilter addTarget:view];
}
if (self.previewOutput != nil) {
[_finalFilter addTarget:self.previewOutput.imageView];
}
}
if (_histogramGenerator != nil && !self.standalone) {
[_finalFilter addTarget:_histogramGenerator];

View File

@ -979,7 +979,7 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
[transition dismissAnimated:true completion:^
{
strongSelf->_hiddenItem = nil;
[strongSelf updateHiddenCellAnimated:false];
[strongSelf updateHiddenCellAnimated:true];
dispatch_async(dispatch_get_main_queue(), ^
{

View File

@ -27,6 +27,7 @@
bool _saveCapturedMedia;
bool _saveEditedPhotos;
bool _signup;
bool _isVideo;
}
@end
@ -39,10 +40,10 @@
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasDeleteButton:(bool)hasDeleteButton personalPhoto:(bool)personalPhoto saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia
{
return [self initWithContext:context parentController:parentController hasSearchButton:false hasDeleteButton:hasDeleteButton hasViewButton:false personalPhoto:personalPhoto saveEditedPhotos:saveEditedPhotos saveCapturedMedia:saveCapturedMedia signup:false];
return [self initWithContext:context parentController:parentController hasSearchButton:false hasDeleteButton:hasDeleteButton hasViewButton:false personalPhoto:personalPhoto isVideo:false saveEditedPhotos:saveEditedPhotos saveCapturedMedia:saveCapturedMedia signup:false];
}
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasSearchButton:(bool)hasSearchButton hasDeleteButton:(bool)hasDeleteButton hasViewButton:(bool)hasViewButton personalPhoto:(bool)personalPhoto saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia signup:(bool)signup
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasSearchButton:(bool)hasSearchButton hasDeleteButton:(bool)hasDeleteButton hasViewButton:(bool)hasViewButton personalPhoto:(bool)personalPhoto isVideo:(bool)isVideo saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia signup:(bool)signup
{
self = [super init];
if (self != nil)
@ -55,6 +56,7 @@
_hasDeleteButton = hasDeleteButton;
_hasViewButton = hasViewButton;
_personalPhoto = ![TGCameraController useLegacyCamera] ? personalPhoto : false;
_isVideo = isVideo;
_signup = signup;
}
return self;
@ -179,7 +181,7 @@
if (_hasViewButton)
{
TGMenuSheetButtonItemView *viewItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:TGLocalized(@"Settings.ViewPhoto") type:TGMenuSheetButtonTypeDefault fontSize:20.0 action:^
TGMenuSheetButtonItemView *viewItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:_isVideo ? TGLocalized(@"Settings.ViewVideo") : TGLocalized(@"Settings.ViewPhoto") type:TGMenuSheetButtonTypeDefault fontSize:20.0 action:^
{
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
if (strongSelf == nil)
@ -197,7 +199,7 @@
if (_hasDeleteButton)
{
TGMenuSheetButtonItemView *deleteItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:TGLocalized(@"GroupInfo.SetGroupPhotoDelete") type:TGMenuSheetButtonTypeDestructive fontSize:20.0 action:^
TGMenuSheetButtonItemView *deleteItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:_isVideo ? TGLocalized(@"Settings.RemoveVideo") : TGLocalized(@"GroupInfo.SetGroupPhotoDelete") type:TGMenuSheetButtonTypeDestructive fontSize:20.0 action:^
{
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
if (strongSelf == nil)

View File

@ -158,11 +158,7 @@ typedef enum
[strongSelf->_trimView setTrimming:true animated:true];
if (strongSelf->_hasDotPicker) {
[strongSelf setDotHandleHidden:true animated:false];
} else {
[strongSelf setScrubberHandleHidden:true animated:false];
}
[strongSelf setScrubberHandleHidden:true animated:false];
};
_trimView.didEndEditing = ^
{
@ -210,11 +206,7 @@ typedef enum
[strongSelf->_trimView setTrimming:isTrimmed animated:true];
if (strongSelf->_hasDotPicker) {
[strongSelf setDotHandleHidden:false animated:true];
} else {
[strongSelf setScrubberHandleHidden:false animated:true];
}
[strongSelf setScrubberHandleHidden:false animated:true];
[strongSelf cancelZoomIn];
if (strongSelf->_zoomedIn)
@ -262,13 +254,8 @@ typedef enum
strongSelf->_trimStartValue = trimStartPosition;
strongSelf->_trimEndValue = trimEndPosition;
if (strongSelf->_hasDotPicker) {
if (strongSelf->_value < trimStartPosition) {
strongSelf->_value = trimStartPosition;
}
} else {
[strongSelf setValue:trimStartPosition];
}
[strongSelf setValue:trimStartPosition];
UIView *handle = strongSelf->_scrubberHandle;
handle.center = CGPointMake(trimView.frame.origin.x + 12 + handle.frame.size.width / 2, handle.center.y);
@ -333,13 +320,7 @@ typedef enum
strongSelf->_trimStartValue = trimStartPosition;
strongSelf->_trimEndValue = trimEndPosition;
if (strongSelf->_hasDotPicker) {
if (strongSelf->_value > trimEndPosition) {
strongSelf->_value = trimEndPosition;
}
} else {
[strongSelf setValue:trimEndPosition];
}
[strongSelf setValue:trimEndPosition];
UIView *handle = strongSelf->_scrubberHandle;
handle.center = CGPointMake(CGRectGetMaxX(trimView.frame) - 12 - handle.frame.size.width / 2, handle.center.y);
@ -464,8 +445,6 @@ typedef enum
- (void)setHasDotPicker:(bool)hasDotPicker {
_hasDotPicker = hasDotPicker;
_dotHandle.hidden = !hasDotPicker;
_scrubberHandle.hidden = true;
_tapGestureRecognizer.enabled = hasDotPicker;
}
@ -690,12 +669,7 @@ typedef enum
frameAspectRatio = originalAspectRatio;
_thumbnailAspectRatio = frameAspectRatio;
if (_hasDotPicker) {
CGSize videoSize = TGFillSize([self _thumbnailSize], _dotImageView.frame.size);
_dotImageView.frame = CGRectMake(TGScreenPixelFloor((_dotContentView.frame.size.width - videoSize.width) / 2.0), 0.0, videoSize.width, videoSize.height);
}
NSInteger thumbnailCount = (NSInteger)CGCeil(_summaryThumbnailWrapperView.frame.size.width / [self _thumbnailSizeWithAspectRatio:frameAspectRatio orientation:_cropOrientation].width);
if ([dataSource respondsToSelector:@selector(videoScrubber:evenlySpacedTimestamps:startingAt:endingAt:)])

View File

@ -125,7 +125,7 @@
CGSize dimensions = [avAsset tracksWithMediaType:AVMediaTypeVideo].firstObject.naturalSize;
TGMediaVideoConversionPreset preset = adjustments.sendAsGif ? TGMediaVideoConversionPresetAnimation : [self presetFromAdjustments:adjustments];
if (!CGSizeEqualToSize(dimensions, CGSizeZero) && preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetVideoMessage && preset != TGMediaVideoConversionPresetProfile && preset != TGMediaVideoConversionPresetProfileHigh && preset != TGMediaVideoConversionPresetPassthrough)
if (!CGSizeEqualToSize(dimensions, CGSizeZero) && preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetVideoMessage && preset != TGMediaVideoConversionPresetProfile && preset != TGMediaVideoConversionPresetProfileHigh && preset != TGMediaVideoConversionPresetProfileVeryHigh && preset != TGMediaVideoConversionPresetPassthrough)
{
TGMediaVideoConversionPreset bestPreset = [self bestAvailablePresetForDimensions:dimensions];
if (preset > bestPreset)
@ -169,7 +169,13 @@
[self processWithConversionContext:context completionBlock:^
{
TGMediaVideoConversionContext *resultContext = context.value;
[resultContext.imageGenerator generateCGImagesAsynchronouslyForTimes:@[ [NSValue valueWithCMTime:kCMTimeZero] ] completionHandler:^(__unused CMTime requestedTime, CGImageRef _Nullable image, __unused CMTime actualTime, AVAssetImageGeneratorResult result, __unused NSError * _Nullable error)
NSTimeInterval videoStartValue = 0.0;
if (adjustments.videoStartValue > 0.0) {
videoStartValue = adjustments.videoStartValue - adjustments.trimStartValue;
}
[resultContext.imageGenerator generateCGImagesAsynchronouslyForTimes:@[ [NSValue valueWithCMTime:CMTimeMakeWithSeconds(videoStartValue, NSEC_PER_SEC)] ] completionHandler:^(__unused CMTime requestedTime, CGImageRef _Nullable image, __unused CMTime actualTime, AVAssetImageGeneratorResult result, __unused NSError * _Nullable error)
{
UIImage *coverImage = nil;
if (result == AVAssetImageGeneratorSucceeded)
@ -183,7 +189,6 @@
liveUploadData = [watcher fileUpdated:true];
NSUInteger fileSize = [[[NSFileManager defaultManager] attributesOfItemAtPath:outputUrl.path error:nil] fileSize];
contextResult = [TGMediaVideoConversionResult resultWithFileURL:outputUrl fileSize:fileSize duration:CMTimeGetSeconds(resultContext.timeRange.duration) dimensions:resultContext.dimensions coverImage:coverImage liveUploadData:liveUploadData];
return [resultContext finishedContext];
}];
@ -347,23 +352,8 @@
outputDimensions = CGSizeMake(outputDimensions.height, outputDimensions.width);
AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
if (adjustments.videoStartValue > 0.0 && adjustments.videoStartValue > adjustments.trimStartValue) {
NSTimeInterval trimEndValue = adjustments.trimEndValue > adjustments.trimStartValue ? adjustments.trimEndValue : CMTimeGetSeconds(videoTrack.timeRange.duration);
CMTimeRange firstRange = CMTimeRangeMake(CMTimeMakeWithSeconds(adjustments.videoStartValue, NSEC_PER_SEC), CMTimeMakeWithSeconds(trimEndValue - adjustments.videoStartValue, NSEC_PER_SEC));
NSError *error;
[compositionTrack insertTimeRange:firstRange ofTrack:videoTrack atTime:kCMTimeZero error:&error];
NSLog(@"");
[compositionTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(adjustments.trimStartValue, NSEC_PER_SEC), CMTimeMakeWithSeconds(adjustments.videoStartValue - adjustments.trimStartValue, NSEC_PER_SEC)) ofTrack:videoTrack atTime:firstRange.duration error:&error];
NSLog(@"");
// instructionTimeRange = CMTimeRangeMake(kCMTimeZero, );
} else {
[compositionTrack insertTimeRange:timeRange ofTrack:videoTrack atTime:kCMTimeZero error:NULL];
}
[compositionTrack insertTimeRange:timeRange ofTrack:videoTrack atTime:kCMTimeZero error:NULL];
CMTime frameDuration = CMTimeMake(1, 30);
if (videoTrack.nominalFrameRate > 0)
frameDuration = CMTimeMake(1, (int32_t)videoTrack.nominalFrameRate);
@ -534,6 +524,7 @@
output.videoComposition = videoComposition;
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:composition];
imageGenerator.appliesPreferredTrackTransform = true;
imageGenerator.videoComposition = videoComposition;
imageGenerator.maximumSize = maxDimensions;
imageGenerator.requestedTimeToleranceBefore = kCMTimeZero;
@ -1258,6 +1249,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
case TGMediaVideoConversionPresetProfile:
case TGMediaVideoConversionPresetProfileHigh:
case TGMediaVideoConversionPresetProfileVeryHigh:
return (CGSize){ 800.0f, 800.0f };
default:
@ -1267,7 +1259,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
+ (bool)keepAudioForPreset:(TGMediaVideoConversionPreset)preset
{
return preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetProfile && preset != TGMediaVideoConversionPresetProfileHigh;
return preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetProfile && preset != TGMediaVideoConversionPresetProfileHigh && preset != TGMediaVideoConversionPresetProfileVeryHigh;
}
+ (NSDictionary *)audioSettingsForPreset:(TGMediaVideoConversionPreset)preset
@ -1344,11 +1336,14 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
return 300;
case TGMediaVideoConversionPresetProfile:
return 1400;
return 1500;
case TGMediaVideoConversionPresetProfileHigh:
return 2000;
case TGMediaVideoConversionPresetProfileVeryHigh:
return 2500;
default:
return 900;
}
@ -1379,6 +1374,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
case TGMediaVideoConversionPresetAnimation:
case TGMediaVideoConversionPresetProfile:
case TGMediaVideoConversionPresetProfileHigh:
case TGMediaVideoConversionPresetProfileVeryHigh:
return 0;
default:
@ -1408,6 +1404,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
case TGMediaVideoConversionPresetAnimation:
case TGMediaVideoConversionPresetProfile:
case TGMediaVideoConversionPresetProfileHigh:
case TGMediaVideoConversionPresetProfileVeryHigh:
return 0;
default:

View File

@ -28,6 +28,7 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
TGModernButton *_resetButton;
TGPhotoAvatarCropView *_cropView;
UIView *_snapshotView;
UIImage *_snapshotImage;
@ -87,7 +88,7 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
[self.view addSubview:_wrapperView];
PGPhotoEditor *photoEditor = self.photoEditor;
_cropView = [[TGPhotoAvatarCropView alloc] initWithOriginalSize:photoEditor.originalSize screenSize:[self referenceViewSize]];
_cropView = [[TGPhotoAvatarCropView alloc] initWithOriginalSize:photoEditor.originalSize screenSize:[self referenceViewSize] fullPreviewView:nil];
[_cropView setCropRect:photoEditor.cropRect];
[_cropView setCropOrientation:photoEditor.cropOrientation];
[_cropView setCropMirrored:photoEditor.cropMirrored];
@ -349,14 +350,14 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
CGRect referenceBounds = CGRectMake(0, 0, referenceSize.width, referenceSize.height);
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoEditorPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
if (self.switchingToTab == TGPhotoEditorPreviewTab)
{
containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0 hasOnScreenNavigation:self.hasOnScreenNavigation];
}
else if (self.switchingToTab == TGPhotoEditorPaintTab)
{
containerFrame = [TGPhotoPaintController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoPaintTopPanelSize + TGPhotoPaintBottomPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
}
// if (self.switchingToTab == TGPhotoEditorPreviewTab)
// {
// containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0 hasOnScreenNavigation:self.hasOnScreenNavigation];
// }
// else if (self.switchingToTab == TGPhotoEditorPaintTab)
// {
// containerFrame = [TGPhotoPaintController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoPaintTopPanelSize + TGPhotoPaintBottomPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
// }
CGSize fittedSize = TGScaleToSize(cropRectFrame.size, containerFrame.size);
CGRect targetFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2,
@ -478,11 +479,7 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
CGSize referenceSize = [self referenceViewSize];
UIInterfaceOrientation orientation = self.effectiveOrientation;
bool hasOnScreenNavigation = false;
if (iosMajorVersion() >= 11)
hasOnScreenNavigation = (self.viewLoaded && self.view.safeAreaInsets.bottom > FLT_EPSILON) || self.context.safeAreaInset.bottom > FLT_EPSILON;
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0.0f hasOnScreenNavigation:hasOnScreenNavigation];
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0.0f hasOnScreenNavigation:self.hasOnScreenNavigation];
CGRect targetFrame = CGRectZero;

View File

@ -11,6 +11,8 @@
#import "PGPhotoEditorView.h"
const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
const CGFloat TGPhotoAvatarCropViewCurtainSize = 300;
const CGFloat TGPhotoAvatarCropViewCurtainMargin = 200;
@interface TGPhotoAvatarCropView () <UIScrollViewDelegate>
{
@ -28,17 +30,22 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
UIView *_leftOverlayView;
UIView *_rightOverlayView;
UIView *_bottomOverlayView;
UIView *_topCurtainView;
UIView *_bottomCurtainView;
UIImageView *_areaMaskView;
bool _imageReloadingNeeded;
CGFloat _currentDiameter;
PGPhotoEditorView *_fullPreviewView;
}
@end
@implementation TGPhotoAvatarCropView
- (instancetype)initWithOriginalSize:(CGSize)originalSize screenSize:(CGSize)screenSize
- (instancetype)initWithOriginalSize:(CGSize)originalSize screenSize:(CGSize)screenSize fullPreviewView:(PGPhotoEditorView *)fullPreviewView
{
self = [super initWithFrame:CGRectZero];
if (self != nil)
@ -67,10 +74,22 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
_imageView.userInteractionEnabled = false;
[_wrapperView addSubview:_imageView];
_fullPreviewView = [[PGPhotoEditorView alloc] initWithFrame:_imageView.frame];
_fullPreviewView = fullPreviewView;
_fullPreviewView.center = _imageView.center;
CGSize fittedSize = TGScaleToSize(_originalSize, CGSizeMake(1024, 1024));
CGFloat scale = _imageView.bounds.size.width / fittedSize.width;
_fullPreviewView.transform = CGAffineTransformMakeScale(self.cropMirrored ? -scale : scale, scale);
_fullPreviewView.userInteractionEnabled = false;
[_wrapperView addSubview:_fullPreviewView];
_topCurtainView = [[UIView alloc] initWithFrame:CGRectZero];
_topCurtainView.backgroundColor = [UIColor blackColor];
[self addSubview:_topCurtainView];
_bottomCurtainView = [[UIView alloc] initWithFrame:CGRectZero];
_bottomCurtainView.backgroundColor = [UIColor blackColor];
[self addSubview:_bottomCurtainView];
_topOverlayView = [[UIView alloc] initWithFrame:CGRectZero];
_topOverlayView.backgroundColor = [TGPhotoEditorInterfaceAssets cropTransparentOverlayColor];
_topOverlayView.userInteractionEnabled = false;
@ -336,18 +355,24 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
- (void)scrollViewDidEndDragging:(UIScrollView *)__unused scrollView willDecelerate:(BOOL)decelerate
{
if (!decelerate)
if (!decelerate) {
[self scrollViewDidEndDecelerating:scrollView];
if (self.croppingChanged != nil)
self.croppingChanged();
}
}
- (void)scrollViewDidEndDecelerating:(UIScrollView *)__unused scrollView
- (void)scrollViewDidEndDecelerating:(UIScrollView *)scrollView
{
_isAnimating = false;
[self _updateCropRect];
if (self.croppingChanged != nil)
self.croppingChanged();
if (!scrollView.isTracking) {
if (self.croppingChanged != nil)
self.croppingChanged();
}
[self reloadImageIfNeeded];
@ -410,7 +435,10 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
{
_cropMirrored = cropMirrored;
_imageView.transform = CGAffineTransformMakeScale(self.cropMirrored ? -1.0f : 1.0f, 1.0f);
_fullPreviewView.transform = _imageView.transform;
CGSize fittedSize = TGScaleToSize(_originalSize, CGSizeMake(1024, 1024));
CGFloat scale = _imageView.bounds.size.width / fittedSize.width;
_fullPreviewView.transform = CGAffineTransformMakeScale(self.cropMirrored ? -scale : scale, scale);
}
- (void)invalidateCropRect
@ -513,6 +541,18 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
}];
}
- (void)animateTransitionOut
{
[UIView animateWithDuration:0.2f animations:^
{
_topOverlayView.alpha = 0.0f;
_leftOverlayView.alpha = 0.0f;
_rightOverlayView.alpha = 0.0f;
_bottomOverlayView.alpha = 0.0f;
_areaMaskView.alpha = 0.0f;
}];
}
- (void)hideImageForCustomTransition
{
_scrollView.hidden = true;
@ -522,10 +562,10 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
- (void)_layoutOverlayViews
{
CGRect topOverlayFrame = CGRectMake(0, -TGPhotoAvatarCropViewOverscreenSize, self.frame.size.width, TGPhotoAvatarCropViewOverscreenSize);
CGRect leftOverlayFrame = CGRectMake(-TGPhotoAvatarCropViewOverscreenSize, -TGPhotoAvatarCropViewOverscreenSize, TGPhotoAvatarCropViewOverscreenSize, self.frame.size.height + 2 * TGPhotoAvatarCropViewOverscreenSize);
CGRect rightOverlayFrame = CGRectMake(self.frame.size.width, -TGPhotoAvatarCropViewOverscreenSize, TGPhotoAvatarCropViewOverscreenSize, self.frame.size.height + 2 * TGPhotoAvatarCropViewOverscreenSize);
CGRect bottomOverlayFrame = CGRectMake(0, self.frame.size.height, self.frame.size.width, TGPhotoAvatarCropViewOverscreenSize);
CGRect topOverlayFrame = CGRectMake(0, -TGPhotoAvatarCropViewOverscreenSize, self.bounds.size.width, TGPhotoAvatarCropViewOverscreenSize);
CGRect leftOverlayFrame = CGRectMake(-TGPhotoAvatarCropViewOverscreenSize, -TGPhotoAvatarCropViewOverscreenSize, TGPhotoAvatarCropViewOverscreenSize, self.bounds.size.height + 2 * TGPhotoAvatarCropViewOverscreenSize);
CGRect rightOverlayFrame = CGRectMake(self.bounds.size.width, -TGPhotoAvatarCropViewOverscreenSize, TGPhotoAvatarCropViewOverscreenSize, self.bounds.size.height + 2 * TGPhotoAvatarCropViewOverscreenSize);
CGRect bottomOverlayFrame = CGRectMake(0, self.bounds.size.height, self.bounds.size.width, TGPhotoAvatarCropViewOverscreenSize);
_topOverlayView.frame = topOverlayFrame;
_leftOverlayView.frame = leftOverlayFrame;
@ -533,6 +573,40 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
_bottomOverlayView.frame = bottomOverlayFrame;
}
- (void)closeCurtains {
CGRect topFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, -TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, 1.0);
CGRect bottomFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, self.bounds.size.height + TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, 1.0);
_topCurtainView.frame = topFrame;
_bottomCurtainView.frame = bottomFrame;
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseOut animations:^
{
CGRect topFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, -TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, TGPhotoAvatarCropViewCurtainSize);
CGRect bottomFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, self.bounds.size.height, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, TGPhotoAvatarCropViewCurtainSize);
_topCurtainView.frame = topFrame;
_bottomCurtainView.frame = bottomFrame;
} completion:nil];
}
- (void)openCurtains {
CGRect topFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, -TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, TGPhotoAvatarCropViewCurtainSize);
CGRect bottomFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, self.bounds.size.height, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, TGPhotoAvatarCropViewCurtainSize);
_topCurtainView.frame = topFrame;
_bottomCurtainView.frame = bottomFrame;
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseIn animations:^
{
CGRect topFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, -TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, 1.0);
CGRect bottomFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, self.bounds.size.height + TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, 1.0);
_topCurtainView.frame = topFrame;
_bottomCurtainView.frame = bottomFrame;
} completion:nil];
}
- (void)layoutSubviews
{
[self _layoutOverlayViews];

View File

@ -3,6 +3,7 @@
@class PGPhotoEditor;
@class PGPhotoTool;
@class TGPhotoEditorPreviewView;
@class PGPhotoEditorView;
@class TGMediaPickerGalleryVideoScrubber;
@interface TGPhotoAvatarPreviewController : TGPhotoEditorTabController
@ -14,7 +15,7 @@
@property (nonatomic, copy) void (^croppingChanged)(void);
@property (nonatomic, copy) void (^togglePlayback)(void);
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIView *)dotImageView;
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIView *)dotImageView fullPreviewView:(PGPhotoEditorView *)fullPreviewView;
- (void)setImage:(UIImage *)image;
- (void)setSnapshotImage:(UIImage *)snapshotImage;

View File

@ -17,6 +17,8 @@
#import "TGMediaPickerGalleryVideoScrubber.h"
#import "TGModernGalleryVideoView.h"
#import "TGPhotoPaintController.h"
const CGFloat TGPhotoAvatarPreviewPanelSize = 96.0f;
const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanelSize + 40.0f;
@ -30,6 +32,7 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
UIView *_wrapperView;
TGPhotoAvatarCropView *_cropView;
PGPhotoEditorView *_fullPreviewView;
UIView *_portraitToolsWrapperView;
UIView *_landscapeToolsWrapperView;
@ -43,6 +46,8 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
UIView *_portraitToolControlView;
UIView *_landscapeToolControlView;
UILabel *_coverLabel;
bool _scheduledTransitionIn;
}
@property (nonatomic, weak) PGPhotoEditor *photoEditor;
@ -52,13 +57,14 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
@implementation TGPhotoAvatarPreviewController
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIView *)dotImageView
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIView *)dotImageView fullPreviewView:(PGPhotoEditorView *)fullPreviewView
{
self = [super initWithContext:context];
if (self != nil)
{
self.photoEditor = photoEditor;
self.previewView = previewView;
_fullPreviewView = fullPreviewView;
_scrubberView = scrubberView;
_dotImageView = dotImageView;
@ -70,8 +76,8 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
{
[super loadView];
self.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
// [self.view addSubview:_previewView];
[_previewView performTransitionInWithCompletion:^{}];
_wrapperView = [[UIView alloc] initWithFrame:CGRectZero];
[self.view addSubview:_wrapperView];
@ -98,7 +104,7 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
};
PGPhotoEditor *photoEditor = self.photoEditor;
_cropView = [[TGPhotoAvatarCropView alloc] initWithOriginalSize:photoEditor.originalSize screenSize:[self referenceViewSize]];
_cropView = [[TGPhotoAvatarCropView alloc] initWithOriginalSize:photoEditor.originalSize screenSize:[self referenceViewSize] fullPreviewView:_fullPreviewView];
[_cropView setCropRect:photoEditor.cropRect];
[_cropView setCropOrientation:photoEditor.cropOrientation];
[_cropView setCropMirrored:photoEditor.cropMirrored];
@ -137,11 +143,10 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
_cropView.interactionEnded = interactionEnded;
[_wrapperView addSubview:_cropView];
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
[_wrapperView addSubview:_portraitToolsWrapperView];
if (self.item.isVideo) {
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
_portraitToolsWrapperView.alpha = 0.0f;
[_wrapperView addSubview:_portraitToolsWrapperView];
_portraitWrapperBackgroundView = [[UIView alloc] initWithFrame:_portraitToolsWrapperView.bounds];
_portraitWrapperBackgroundView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
_portraitWrapperBackgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor];
@ -149,7 +154,6 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
[_portraitToolsWrapperView addSubview:_portraitWrapperBackgroundView];
_landscapeToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
_landscapeToolsWrapperView.alpha = 0.0f;
[_wrapperView addSubview:_landscapeToolsWrapperView];
_landscapeWrapperBackgroundView = [[UIView alloc] initWithFrame:_landscapeToolsWrapperView.bounds];
@ -178,6 +182,7 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
[_coverLabel sizeToFit];
[_portraitToolsWrapperView addSubview:_coverLabel];
_dotImageView.alpha = 1.0f;
[_wrapperView addSubview:_dotImageView];
}
}
@ -186,8 +191,6 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
{
[super viewWillAppear:animated];
self.photoEditor.additionalOutputs = @[_cropView.fullPreviewView];
if (_appeared)
return;
@ -214,8 +217,6 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
if (_imagePendingLoad != nil)
[_cropView setImage:_imagePendingLoad];
[self transitionIn];
}
- (BOOL)shouldAutorotate
@ -260,6 +261,11 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
- (void)transitionIn
{
if (_portraitToolsWrapperView.frame.size.height < FLT_EPSILON) {
_scheduledTransitionIn = true;
return;
}
_scrubberView.layer.rasterizationScale = [UIScreen mainScreen].scale;
_scrubberView.layer.shouldRasterize = true;
@ -273,37 +279,44 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
_scrubberView.layer.shouldRasterize = false;
}];
switch (self.effectiveOrientation)
{
case UIInterfaceOrientationLandscapeLeft:
if (!self.initialAppearance) {
switch (self.effectiveOrientation)
{
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(-_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
case UIInterfaceOrientationLandscapeLeft:
{
_landscapeToolsWrapperView.transform = CGAffineTransformIdentity;
} completion:nil];
}
break;
case UIInterfaceOrientationLandscapeRight:
{
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(-_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
{
_landscapeToolsWrapperView.transform = CGAffineTransformIdentity;
} completion:nil];
}
break;
case UIInterfaceOrientationLandscapeRight:
{
_landscapeToolsWrapperView.transform = CGAffineTransformIdentity;
} completion:nil];
}
break;
default:
{
_portraitToolsWrapperView.transform = CGAffineTransformMakeTranslation(0.0f, _portraitToolsWrapperView.frame.size.height / 3.0f * 2.0f);
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
{
_landscapeToolsWrapperView.transform = CGAffineTransformIdentity;
} completion:nil];
}
break;
default:
{
_portraitToolsWrapperView.transform = CGAffineTransformIdentity;
} completion:nil];
CGFloat offset = _portraitToolsWrapperView.frame.size.height / 3.0f * 2.0f;
CGAffineTransform initialDotImageViewTransform = _dotImageView.transform;
_dotImageView.transform = CGAffineTransformTranslate(initialDotImageViewTransform, 0.0, offset * 4.444);
_portraitToolsWrapperView.transform = CGAffineTransformMakeTranslation(0.0f, offset);
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
{
_portraitToolsWrapperView.transform = CGAffineTransformIdentity;
_dotImageView.transform = initialDotImageViewTransform;
} completion:nil];
}
break;
}
break;
}
}
@ -313,16 +326,52 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
_dismissing = true;
}
[_cropView animateTransitionOutSwitching:switching];
self.photoEditor.additionalOutputs = @[];
[self.view insertSubview:_previewView belowSubview:_wrapperView];
_previewView.frame = [_wrapperView convertRect:_cropView.frame toView:self.view];
TGPhotoEditorPreviewView *previewView = self.previewView;
previewView.touchedUp = nil;
previewView.touchedDown = nil;
previewView.tapped = nil;
previewView.interactionEnded = nil;
[_cropView animateTransitionOut];
[_videoAreaView.superview bringSubviewToFront:_videoAreaView];
if (switching)
{
_switching = true;
UIInterfaceOrientation orientation = self.effectiveOrientation;
CGRect cropRectFrame = [_cropView cropRectFrameForView:self.view];
CGSize referenceSize = [self referenceViewSizeForOrientation:orientation];
CGRect referenceBounds = CGRectMake(0, 0, referenceSize.width, referenceSize.height);
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoEditorPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
if (self.switchingToTab == TGPhotoEditorPaintTab)
{
containerFrame = [TGPhotoPaintController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoPaintTopPanelSize + TGPhotoPaintBottomPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
}
CGSize fittedSize = TGScaleToSize(cropRectFrame.size, containerFrame.size);
CGRect targetFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
CGFloat targetCropViewScale = targetFrame.size.width / _cropView.frame.size.width;
CGRect targetCropViewFrame = [self.view convertRect:targetFrame toView:_wrapperView];
_previewView.alpha = 0.0;
[_cropView closeCurtains];
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseInOut | UIViewAnimationOptionLayoutSubviews animations:^
{
_previewView.frame = targetFrame;
_cropView.center = CGPointMake(CGRectGetMidX(targetCropViewFrame), CGRectGetMidY(targetCropViewFrame));
_cropView.transform = CGAffineTransformMakeScale(targetCropViewScale, targetCropViewScale);
} completion:^(__unused BOOL finished)
{
_previewView.alpha = 1.0;
if (self.finishedTransitionOut != nil)
self.finishedTransitionOut();
if (completion != nil)
completion();
}];
}
switch (self.effectiveOrientation)
{
@ -346,10 +395,15 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
default:
{
CGFloat offset = _portraitToolsWrapperView.frame.size.height / 3.0f * 2.0f;
CGAffineTransform initialDotImageViewTransform = _dotImageView.transform;
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
{
_portraitToolsWrapperView.transform = CGAffineTransformMakeTranslation(0.0f, _portraitToolsWrapperView.frame.size.height / 3.0f * 2.0f);
} completion:nil];
_portraitToolsWrapperView.transform = CGAffineTransformMakeTranslation(0.0f, offset);
_dotImageView.transform = CGAffineTransformTranslate(initialDotImageViewTransform, 0.0, offset * 4.444);
} completion:^(__unused BOOL finished) {
_dotImageView.transform = initialDotImageViewTransform;
}];
}
break;
}
@ -358,12 +412,13 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
{
_portraitToolsWrapperView.alpha = 0.0f;
_landscapeToolsWrapperView.alpha = 0.0f;
_videoAreaView.alpha = 0.0f;
_dotImageView.alpha = 0.0f;
} completion:^(__unused BOOL finished)
{
if (completion != nil)
completion();
if (!switching) {
if (completion != nil)
completion();
}
}];
}
@ -371,8 +426,6 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
{
_dismissing = true;
self.photoEditor.additionalOutputs = @[];
TGPhotoEditorPreviewView *previewView = self.previewView;
[previewView prepareForTransitionOut];
@ -426,15 +479,16 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
_appeared = true;
if ([transitionView isKindOfClass:[TGPhotoEditorPreviewView class]]) {
[self.view insertSubview:transitionView atIndex:0];
} else {
[transitionView removeFromSuperview];
}
TGPhotoEditorPreviewView *previewView = _previewView;
previewView.hidden = false;
previewView.hidden = true;
[previewView performTransitionInIfNeeded];
[_cropView openCurtains];
[_cropView transitionInFinishedFromCamera:(self.fromCamera && self.initialAppearance)];
PGPhotoEditor *photoEditor = self.photoEditor;
@ -467,12 +521,13 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
- (CGRect)transitionOutReferenceFrame
{
return [_cropView cropRectFrameForView:self.view];
TGPhotoEditorPreviewView *previewView = _previewView;
return previewView.frame;
}
- (UIView *)transitionOutReferenceView
{
return [_cropView cropSnapshotView];
return _previewView;
}
- (id)currentResultRepresentation
@ -493,7 +548,13 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
{
[super viewWillLayoutSubviews];
[self updateLayout:[[LegacyComponentsGlobals provider] applicationStatusBarOrientation]];
if (_scheduledTransitionIn) {
_scheduledTransitionIn = false;
[self transitionIn];
}
}
- (CGRect)transitionOutSourceFrameForReferenceFrame:(CGRect)referenceFrame orientation:(UIInterfaceOrientation)orientation
@ -508,11 +569,34 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
- (CGRect)_targetFrameForTransitionInFromFrame:(CGRect)fromFrame
{
CGSize referenceSize = [self referenceViewSize];
CGRect containerFrame = [TGPhotoAvatarPreviewController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:self.effectiveOrientation panelSize:0 hasOnScreenNavigation:self.hasOnScreenNavigation];
CGSize fittedSize = TGScaleToSize(fromFrame.size, containerFrame.size);
CGRect toFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
UIInterfaceOrientation orientation = self.effectiveOrientation;
return toFrame;
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0.0f hasOnScreenNavigation:self.hasOnScreenNavigation];
CGRect targetFrame = CGRectZero;
CGFloat shortSide = MIN(referenceSize.width, referenceSize.height);
CGFloat diameter = shortSide - [TGPhotoAvatarCropView areaInsetSize].width * 2;
if (self.initialAppearance && (self.fromCamera || !self.skipTransitionIn))
{
CGSize referenceSize = fromFrame.size;
if ([_transitionView isKindOfClass:[UIImageView class]])
referenceSize = ((UIImageView *)_transitionView).image.size;
CGSize fittedSize = TGScaleToFill(referenceSize, CGSizeMake(diameter, diameter));
targetFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2,
containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2,
fittedSize.width, fittedSize.height);
}
else
{
targetFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - diameter) / 2,
containerFrame.origin.y + (containerFrame.size.height - diameter) / 2,
diameter, diameter);
}
return targetFrame;
}
+ (CGRect)photoContainerFrameForParentViewFrame:(CGRect)parentViewFrame toolbarLandscapeSize:(CGFloat)toolbarLandscapeSize orientation:(UIInterfaceOrientation)orientation panelSize:(CGFloat)panelSize hasOnScreenNavigation:(bool)hasOnScreenNavigation

View File

@ -66,6 +66,7 @@
TGPhotoToolbarView *_portraitToolbarView;
TGPhotoToolbarView *_landscapeToolbarView;
TGPhotoEditorPreviewView *_previewView;
PGPhotoEditorView *_fullPreviewView;
PGPhotoEditor *_photoEditor;
@ -326,7 +327,13 @@
[_photoEditor setPreviewOutput:_previewView];
[self updatePreviewView];
if ([self presentedForAvatarCreation]) {
CGSize fittedSize = TGScaleToSize(_photoEditor.originalSize, CGSizeMake(1024, 1024));
_fullPreviewView = [[PGPhotoEditorView alloc] initWithFrame:CGRectMake(0, 0, fittedSize.width, fittedSize.height)];
_photoEditor.additionalOutputs = @[_fullPreviewView];
[self.view addSubview:_fullPreviewView];
}
_dotMarkerView = [[UIImageView alloc] initWithImage:TGCircleImage(7.0, [TGPhotoEditorInterfaceAssets accentColor])];
[_scrubberView addSubview:_dotMarkerView];
_dotMarkerView.center = CGPointMake(30.0, -20.0);
@ -345,6 +352,7 @@
if ([self presentedForAvatarCreation] && _item.isVideo) {
_scrubberView = [[TGMediaPickerGalleryVideoScrubber alloc] initWithFrame:CGRectMake(0.0f, 0.0, _portraitToolbarView.frame.size.width, 68.0f)];
_scrubberView.hasDotPicker = true;
_scrubberView.dataSource = self;
_scrubberView.delegate = self;
_scrubberView.clipsToBounds = false;
@ -470,12 +478,17 @@
if (_item.isVideo) {
signal = [self.requestOriginalFullSizeImage(_item, position) deliverOn:_queue];
} else {
bool avatar = [self presentedForAvatarCreation];
signal = [[[[self.requestOriginalFullSizeImage(_item, position) takeLast] deliverOn:_queue] filter:^bool(id image)
{
return [image isKindOfClass:[UIImage class]];
}] map:^UIImage *(UIImage *image)
{
return TGPhotoEditorCrop(image, nil, _photoEditor.cropOrientation, _photoEditor.cropRotation, _photoEditor.cropRect, _photoEditor.cropMirrored, TGPhotoEditorScreenImageMaxSize(), _photoEditor.originalSize, true);
if (avatar) {
return image;
} else {
return TGPhotoEditorCrop(image, nil, _photoEditor.cropOrientation, _photoEditor.cropRotation, _photoEditor.cropRect, _photoEditor.cropMirrored, TGPhotoEditorScreenImageMaxSize(), _photoEditor.originalSize, true);
}
}];
}
}
@ -620,6 +633,11 @@
}];
}
- (void)returnFullPreviewView {
_fullPreviewView.frame = CGRectMake(-10000, 0, _fullPreviewView.frame.size.width, _fullPreviewView.frame.size.height);
[self.view addSubview:_fullPreviewView];
}
- (void)startVideoPlayback:(bool)reset {
if (reset && _player == nil) {
_scheduledVideoPlayback = true;
@ -1029,7 +1047,6 @@
_portraitToolbarView.alpha = 0.0f;
_landscapeToolbarView.alpha = 0.0f;
[UIView animateWithDuration:0.3f delay:delay options:UIViewAnimationOptionCurveLinear animations:^
{
_portraitToolbarView.alpha = 1.0f;
@ -1097,11 +1114,7 @@
{
if (![currentController isDismissAllowed])
return;
transitionReferenceFrame = [currentController transitionOutReferenceFrame];
transitionReferenceView = [currentController transitionOutReferenceView];
transitionNoTransitionView = [currentController isKindOfClass:[TGPhotoAvatarPreviewController class]];
currentController.switchingToTab = tab;
[currentController transitionOutSwitching:true completion:^
{
@ -1109,6 +1122,10 @@
[currentController.view removeFromSuperview];
}];
transitionReferenceFrame = [currentController transitionOutReferenceFrame];
transitionReferenceView = [currentController transitionOutReferenceView];
transitionNoTransitionView = false;
if ([currentController isKindOfClass:[TGPhotoCropController class]])
{
_backgroundView.alpha = 1.0f;
@ -1143,11 +1160,23 @@
}
}
if ([self presentedForAvatarCreation])
transitionNoTransitionView = true;
snapshotImage = _screenImage;
}
_switchingTab = true;
if ([_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]]) {
if (_item.isVideo && !_isPlaying) {
[self setPlayButtonHidden:true animated:false];
[self startVideoPlayback:false];
} else if (!_item.isVideo) {
[_photoEditor processAnimated:false completion:nil];
}
}
TGPhotoEditorBackButton backButtonType = TGPhotoEditorBackButtonCancel;
TGPhotoEditorDoneButton doneButtonType = TGPhotoEditorDoneButtonCheck;
@ -1163,12 +1192,10 @@
{
bool skipInitialTransition = (![self presentedFromCamera] && self.navigationController != nil) || self.skipInitialTransition;
TGPhotoAvatarPreviewController *cropController = [[TGPhotoAvatarPreviewController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView scrubberView:_scrubberView dotImageView:_dotImageView];
TGPhotoAvatarPreviewController *cropController = [[TGPhotoAvatarPreviewController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView scrubberView:_scrubberView dotImageView:_dotImageView fullPreviewView:_fullPreviewView];
cropController.fromCamera = [self presentedFromCamera];
cropController.skipTransitionIn = skipInitialTransition;
if (snapshotView != nil)
[cropController setSnapshotView:snapshotView];
else if (snapshotImage != nil)
if (snapshotImage != nil)
[cropController setSnapshotImage:snapshotImage];
cropController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
cropController.controlVideoPlayback = ^(bool play) {
@ -1190,7 +1217,7 @@
[strongSelf stopVideoPlayback:false];
[strongSelf setPlayButtonHidden:false animated:true];
} else {
[strongSelf startVideoPlayback:true];
[strongSelf startVideoPlayback:false];
[strongSelf setPlayButtonHidden:true animated:true];
}
};
@ -1258,7 +1285,9 @@
}
strongSelf->_switchingTab = false;
[strongSelf startVideoPlayback:true];
if (isInitialAppearance)
[strongSelf startVideoPlayback:true];
};
cropController.finishedTransitionOut = ^
{
@ -1273,40 +1302,12 @@
}
[strongSelf->_currentTabController _finishedTransitionInWithView:nil];
[strongSelf returnFullPreviewView];
};
[[[[self.requestOriginalFullSizeImage(_item, 0) reduceLeftWithPassthrough:nil with:^id(__unused id current, __unused id next, void (^emit)(id))
{
if ([next isKindOfClass:[UIImage class]])
{
if ([next degraded])
{
emit(next);
return current;
}
return next;
}
else
{
return current;
}
}] filter:^bool(id result)
{
return (result != nil);
}] deliverOn:[SQueue mainQueue]] startWithNext:^(UIImage *image)
{
if (cropController.dismissing && !cropController.switching)
return;
[self updateDoneButtonEnabled:!image.degraded animated:true];
if (image.degraded) {
return;
} else {
self.fullSizeImage = image;
[cropController setImage:image];
}
}];
controller = cropController;
doneButtonType = TGPhotoEditorDoneButtonDone;
}
else
{
@ -1441,7 +1442,9 @@
strongSelf.finishedTransitionIn();
strongSelf->_switchingTab = false;
[strongSelf startVideoPlayback:true];
if (isInitialAppearance)
[strongSelf startVideoPlayback:true];
};
controller = paintController;
@ -1471,9 +1474,9 @@
strongSelf->_switchingTab = false;
[strongSelf startVideoPlayback:true];
if (isInitialAppearance)
[strongSelf startVideoPlayback:true];
};
controller = toolsController;
}
break;
@ -1510,71 +1513,11 @@
}
break;
case TGPhotoEditorPreviewTab:
{
if ([_currentTabController isKindOfClass:[TGPhotoToolsController class]]) {
[_scrubberView reloadDataAndReset:false];
[self updateDotImage:false];
}
TGPhotoAvatarPreviewController *previewController = [[TGPhotoAvatarPreviewController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView scrubberView:_scrubberView dotImageView:_dotImageView];
previewController.item = _item;
previewController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
previewController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
{
*referenceFrame = transitionReferenceFrame;
*parentView = transitionParentView;
*noTransitionView = transitionNoTransitionView;
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf != nil) {
[strongSelf startVideoPlayback:true];
}
return transitionReferenceView;
};
previewController.finishedTransitionIn = ^
{
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (isInitialAppearance && strongSelf.finishedTransitionIn != nil)
strongSelf.finishedTransitionIn();
strongSelf->_switchingTab = false;
};
previewController.controlVideoPlayback = ^(bool play) {
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (play) {
[strongSelf startVideoPlayback:false];
} else {
[strongSelf stopVideoPlayback:false];
}
};
previewController.controlVideoSeek = ^(NSTimeInterval position) {
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf != nil)
[strongSelf seekVideo:position];
};
previewController.controlVideoEndTime = ^(NSTimeInterval endTime) {
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf != nil)
[strongSelf setVideoEndTime:endTime];
};
controller = previewController;
doneButtonType = TGPhotoEditorDoneButtonDone;
}
break;
default:
break;
}
if ([self presentedForAvatarCreation] && !isInitialAppearance && tab != TGPhotoEditorPreviewTab) {
if ([self presentedForAvatarCreation] && !isInitialAppearance && tab != TGPhotoEditorCropTab) {
backButtonType = TGPhotoEditorBackButtonBack;
}
@ -1707,7 +1650,7 @@
- (void)dismissEditor
{
if (![_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]] && [self presentedForAvatarCreation]) {
[self presentTab:TGPhotoEditorPreviewTab];
[self presentTab:TGPhotoEditorCropTab];
return;
}
@ -1800,7 +1743,7 @@
- (void)doneButtonPressed
{
if ([self presentedForAvatarCreation] && ![_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]]) {
[self presentTab:TGPhotoEditorPreviewTab];
[self presentTab:TGPhotoEditorCropTab];
} else {
[self applyEditor];
}
@ -1899,7 +1842,14 @@
}
NSTimeInterval duration = trimEndValue - trimStartValue;
TGMediaVideoConversionPreset preset = duration < 4.0 ? TGMediaVideoConversionPresetProfileHigh : TGMediaVideoConversionPresetProfile;
TGMediaVideoConversionPreset preset;
if (duration <= 2.5) {
preset = TGMediaVideoConversionPresetProfileVeryHigh;
} else if (duration <= 5.0) {
preset = TGMediaVideoConversionPresetProfileHigh;
} else {
preset = TGMediaVideoConversionPresetProfile;
}
TGDispatchOnMainThread(^{
if (self.didFinishEditingVideo != nil)
@ -2256,6 +2206,8 @@
CGFloat portraitToolbarViewBottomEdge = screenSide;
if ([UIDevice currentDevice].userInterfaceIdiom == UIUserInterfaceIdiomPad)
portraitToolbarViewBottomEdge = screenEdges.bottom;
CGFloat previousWidth = _portraitToolbarView.frame.size.width;
_portraitToolbarView.frame = CGRectMake(screenEdges.left, portraitToolbarViewBottomEdge - TGPhotoEditorToolbarSize - safeAreaInset.bottom, referenceSize.width, TGPhotoEditorToolbarSize + safeAreaInset.bottom);
_scrubberView.frame = CGRectMake(0.0, 0.0, _portraitToolbarView.frame.size.width, _scrubberView.frame.size.height);
@ -2267,7 +2219,8 @@
[_scrubberView reloadData];
[_scrubberView resetToStart];
} else {
[_scrubberView reloadThumbnails];
if (previousWidth != _portraitToolbarView.frame.size.width)
[_scrubberView reloadThumbnails];
}
});
}

View File

@ -147,7 +147,8 @@ const CGFloat TGPhotoEditorToolbarSize = 49.0f;
}
else
{
_transitionView = [referenceView snapshotViewAfterScreenUpdates:false];
if (![referenceView isKindOfClass:[TGPhotoEditorPreviewView class]])
_transitionView = [referenceView snapshotViewAfterScreenUpdates:false];
if (_transitionView == nil) {
_transitionView = referenceView;
}
@ -168,27 +169,45 @@ const CGFloat TGPhotoEditorToolbarSize = 49.0f;
_transitionInProgress = true;
POPSpringAnimation *animation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewFrame];
if (self.transitionSpeed > FLT_EPSILON)
animation.springSpeed = self.transitionSpeed;
animation.fromValue = [NSValue valueWithCGRect:_transitionView.frame];
animation.toValue = [NSValue valueWithCGRect:_transitionTargetFrame];
animation.completionBlock = ^(__unused POPAnimation *animation, __unused BOOL finished)
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseInOut | UIViewAnimationOptionLayoutSubviews animations:^
{
_transitionView.frame = _transitionTargetFrame;
} completion:^(BOOL finished) {
_transitionInProgress = false;
UIView *transitionView = _transitionView;
_transitionView = nil;
if (self.finishedTransitionIn != nil)
{
self.finishedTransitionIn();
self.finishedTransitionIn = nil;
}
[self _finishedTransitionInWithView:transitionView];
};
[_transitionView pop_addAnimation:animation forKey:@"frame"];
UIView *transitionView = _transitionView;
_transitionView = nil;
if (self.finishedTransitionIn != nil)
{
self.finishedTransitionIn();
self.finishedTransitionIn = nil;
}
[self _finishedTransitionInWithView:transitionView];
}];
// POPSpringAnimation *animation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewFrame];
// if (self.transitionSpeed > FLT_EPSILON)
// animation.springSpeed = self.transitionSpeed;
// animation.fromValue = [NSValue valueWithCGRect:_transitionView.frame];
// animation.toValue = [NSValue valueWithCGRect:_transitionTargetFrame];
// animation.completionBlock = ^(__unused POPAnimation *animation, __unused BOOL finished)
// {
// _transitionInProgress = false;
//
// UIView *transitionView = _transitionView;
// _transitionView = nil;
//
// if (self.finishedTransitionIn != nil)
// {
// self.finishedTransitionIn();
// self.finishedTransitionIn = nil;
// }
//
// [self _finishedTransitionInWithView:transitionView];
// };
// [_transitionView pop_addAnimation:animation forKey:@"frame"];
}
- (void)prepareForCustomTransitionOut

View File

@ -181,7 +181,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
if (strongSelf != nil)
[strongSelf setPreview:!strongSelf->_preview animated:true];
};
previewView.customTouchDownHandling = forVideo;
previewView.customTouchDownHandling = true;
[self.view addSubview:_previewView];
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:CGRectZero];

View File

@ -19,7 +19,7 @@ public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, t
present(legacyController, nil)
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: false, hasDeleteButton: false, hasViewButton: openCurrent != nil, personalPhoto: true, saveEditedPhotos: false, saveCapturedMedia: false, signup: signup)!
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: false, hasDeleteButton: false, hasViewButton: openCurrent != nil, personalPhoto: true, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: signup)!
let _ = holder.swap(mixin)
mixin.didFinishWithImage = { image in
guard let image = image else {

View File

@ -230,47 +230,6 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
self.fetchDisposable.set(fetchedMediaResource(mediaBox: self.context.account.postbox.mediaBox, reference: representations[largestIndex].reference).start())
}
// self.statusDisposable.set((self.context.account.postbox.mediaBox.resourceStatus(largestSize.resource)
// |> deliverOnMainQueue).start(next: { [weak self] status in
// if let strongSelf = self {
// let previousStatus = strongSelf.status
// strongSelf.status = status
// switch status {
// case .Remote:
// strongSelf.statusNode.isHidden = false
// strongSelf.statusNodeContainer.isUserInteractionEnabled = true
// strongSelf.statusNode.transitionToState(.download(.white), completion: {})
// case let .Fetching(_, progress):
// strongSelf.statusNode.isHidden = false
// strongSelf.statusNodeContainer.isUserInteractionEnabled = true
// let adjustedProgress = max(progress, 0.027)
// strongSelf.statusNode.transitionToState(.progress(color: .white, lineWidth: nil, value: CGFloat(adjustedProgress), cancelEnabled: true), completion: {})
// case .Local:
// if let previousStatus = previousStatus, case .Fetching = previousStatus {
// strongSelf.statusNode.transitionToState(.progress(color: .white, lineWidth: nil, value: 1.0, cancelEnabled: true), completion: {
// if let strongSelf = self {
// strongSelf.statusNode.alpha = 0.0
// strongSelf.statusNodeContainer.isUserInteractionEnabled = false
// strongSelf.statusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, completion: { _ in
// if let strongSelf = self {
// strongSelf.statusNode.transitionToState(.none, animated: false, completion: {})
// }
// })
// }
// })
// } else if !strongSelf.statusNode.isHidden && !strongSelf.statusNode.alpha.isZero {
// strongSelf.statusNode.alpha = 0.0
// strongSelf.statusNodeContainer.isUserInteractionEnabled = false
// strongSelf.statusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, completion: { _ in
// if let strongSelf = self {
// strongSelf.statusNode.transitionToState(.none, animated: false, completion: {})
// }
// })
// }
// }
// }
// }))
var id: Int64?
if case let .image(image) = entry {
id = image.0.id
@ -287,13 +246,15 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
}
}
videoNode.canAttachContent = true
if let startTimestamp = video.startTimestamp {
videoNode.seek(startTimestamp)
}
if videoNode.hasAttachedContext {
videoNode.play()
}
self.videoContent = videoContent
self.videoNode = videoNode
videoNode.updateLayout(size: largestSize.dimensions.cgSize, transition: .immediate)
self.contentNode.addSubnode(videoNode)

View File

@ -754,7 +754,7 @@ public func channelInfoController(context: AccountContext, peerId: PeerId) -> Vi
}
}
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
let _ = currentAvatarMixin.swap(mixin)
mixin.requestSearchController = { assetsController in
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: peer?.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder), completion: { result in

View File

@ -1479,7 +1479,7 @@ public func groupInfoController(context: AccountContext, peerId originalPeerId:
}
}
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
let _ = currentAvatarMixin.swap(mixin)
mixin.requestSearchController = { assetsController in
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: peer?.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder), completion: { result in

View File

@ -14,6 +14,7 @@ swift_library(
"//submodules/Display:Display",
"//submodules/AccountContext:AccountContext",
"//submodules/DeviceAccess:DeviceAccess",
"//submodules/LegacyComponents:LegacyComponents",
],
visibility = [
"//visibility:public",

View File

@ -9,6 +9,7 @@ import Display
import MobileCoreServices
import DeviceAccess
import AccountContext
import LegacyComponents
public enum FetchMediaDataState {
case progress(Float)

View File

@ -532,7 +532,7 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
updateState {
$0.withUpdatedUpdatingAvatar(.image(representation, true))
}
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: resource, videoResource: nil, mapResourceToAvatarSizes: { resource, representations in
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: resource, videoResource: nil, videoStartTimestamp: nil, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
}) |> deliverOnMainQueue).start(next: { result in
switch result {
@ -556,6 +556,11 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
$0.withUpdatedUpdatingAvatar(.image(representation, true))
}
var videoStartTimestamp: Double? = nil
if let adjustments = adjustments, adjustments.videoStartValue > 0.0 {
videoStartTimestamp = adjustments.videoStartValue - adjustments.trimStartValue
}
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { subscriber in
var filteredPath = url.path
if filteredPath.hasPrefix("file://") {
@ -606,9 +611,11 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
}
}
updateAvatarDisposable.set((signal
|> mapToSignal { videoResource in
return updateAccountPhoto(account: context.account, resource: photoResource, videoResource: videoResource, mapResourceToAvatarSizes: { resource, representations in
return updateAccountPhoto(account: context.account, resource: photoResource, videoResource: videoResource, videoStartTimestamp: videoStartTimestamp, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
})
} |> deliverOnMainQueue).start(next: { result in
@ -624,7 +631,7 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
}
}
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: hasPhotos, personalPhoto: true, saveEditedPhotos: false, saveCapturedMedia: false, signup: false)!
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: hasPhotos, personalPhoto: true, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: false)!
let _ = currentAvatarMixin.swap(mixin)
mixin.requestSearchController = { assetsController in
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: nil, completion: { result in
@ -652,7 +659,7 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
return $0.withUpdatedUpdatingAvatar(ItemListAvatarAndNameInfoItemUpdatingAvatar.none)
}
}
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: nil, videoResource: nil, mapResourceToAvatarSizes: { resource, representations in
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: nil, videoResource: nil, videoStartTimestamp: nil, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
}) |> deliverOnMainQueue).start(next: { result in
switch result {

View File

@ -1295,7 +1295,7 @@ public func settingsController(context: AccountContext, accountManager: AccountM
state.updatingAvatar = .image(representation, true)
return state
}
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: resource, videoResource: nil, mapResourceToAvatarSizes: { resource, representations in
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: resource, videoResource: nil, videoStartTimestamp: nil, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
}) |> deliverOnMainQueue).start(next: { result in
switch result {
@ -1323,6 +1323,11 @@ public func settingsController(context: AccountContext, accountManager: AccountM
return state
}
var videoStartTimestamp: Double? = nil
if let adjustments = adjustments, adjustments.videoStartValue > 0.0 {
videoStartTimestamp = adjustments.videoStartValue - adjustments.trimStartValue
}
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { subscriber in
var filteredPath = url.path
if filteredPath.hasPrefix("file://") {
@ -1375,7 +1380,7 @@ public func settingsController(context: AccountContext, accountManager: AccountM
updateAvatarDisposable.set((signal
|> mapToSignal { videoResource in
return updateAccountPhoto(account: context.account, resource: photoResource, videoResource: videoResource, mapResourceToAvatarSizes: { resource, representations in
return updateAccountPhoto(account: context.account, resource: photoResource, videoResource: videoResource, videoStartTimestamp: videoStartTimestamp, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
})
} |> deliverOnMainQueue).start(next: { result in
@ -1393,7 +1398,7 @@ public func settingsController(context: AccountContext, accountManager: AccountM
}
}
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: true, saveEditedPhotos: false, saveCapturedMedia: false, signup: false)!
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: true, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: false)!
let _ = currentAvatarMixin.swap(mixin)
mixin.requestSearchController = { assetsController in
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: nil, completion: { result in
@ -1423,7 +1428,7 @@ public func settingsController(context: AccountContext, accountManager: AccountM
}
return state
}
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: nil, videoResource: nil, mapResourceToAvatarSizes: { resource, representations in
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: nil, videoResource: nil, videoStartTimestamp: nil, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
}) |> deliverOnMainQueue).start(next: { result in
switch result {

View File

@ -88,21 +88,29 @@ public final class TelegramMediaImage: Media, Equatable, Codable {
public final class VideoRepresentation: Equatable, PostboxCoding {
public let dimensions: PixelDimensions
public let resource: TelegramMediaResource
public let startTimestamp: Double?
public init(dimensions: PixelDimensions, resource: TelegramMediaResource) {
public init(dimensions: PixelDimensions, resource: TelegramMediaResource, startTimestamp: Double?) {
self.dimensions = dimensions
self.resource = resource
self.startTimestamp = startTimestamp
}
public init(decoder: PostboxDecoder) {
self.dimensions = PixelDimensions(width: decoder.decodeInt32ForKey("w", orElse: 0), height: decoder.decodeInt32ForKey("h", orElse: 0))
self.resource = decoder.decodeObjectForKey("r") as! TelegramMediaResource
self.startTimestamp = decoder.decodeOptionalDoubleForKey("s")
}
public func encode(_ encoder: PostboxEncoder) {
encoder.encodeInt32(self.dimensions.width, forKey: "w")
encoder.encodeInt32(self.dimensions.height, forKey: "h")
encoder.encodeObject(self.resource, forKey: "r")
if let startTimestamp = self.startTimestamp {
encoder.encodeDouble(startTimestamp, forKey: "s")
} else {
encoder.encodeNil(forKey: "s")
}
}
public static func ==(lhs: VideoRepresentation, rhs: VideoRepresentation) -> Bool {
@ -115,6 +123,9 @@ public final class TelegramMediaImage: Media, Equatable, Codable {
if !lhs.resource.isEqual(to: rhs.resource) {
return false
}
if lhs.startTimestamp != rhs.startTimestamp {
return false
}
return true
}
}

View File

@ -106,7 +106,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[997055186] = { return Api.PollAnswerVoters.parse_pollAnswerVoters($0) }
dict[-1705233435] = { return Api.account.PasswordSettings.parse_passwordSettings($0) }
dict[-288727837] = { return Api.LangPackLanguage.parse_langPackLanguage($0) }
dict[1130084743] = { return Api.VideoSize.parse_videoSize($0) }
dict[-399391402] = { return Api.VideoSize.parse_videoSize($0) }
dict[-1000708810] = { return Api.help.AppUpdate.parse_noAppUpdate($0) }
dict[497489295] = { return Api.help.AppUpdate.parse_appUpdate($0) }
dict[-209337866] = { return Api.LangPackDifference.parse_langPackDifference($0) }
@ -254,7 +254,6 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-1512627963] = { return Api.Update.parse_updateDialogFilterOrder($0) }
dict[889491791] = { return Api.Update.parse_updateDialogFilters($0) }
dict[643940105] = { return Api.Update.parse_updatePhoneCallSignalingData($0) }
dict[-1812551503] = { return Api.Update.parse_updateChannelParticipant($0) }
dict[136574537] = { return Api.messages.VotesList.parse_votesList($0) }
dict[1558266229] = { return Api.PopularContact.parse_popularContact($0) }
dict[-373643672] = { return Api.FolderPeer.parse_folderPeer($0) }

View File

@ -4710,50 +4710,58 @@ public extension Api {
}
public enum VideoSize: TypeConstructorDescription {
case videoSize(type: String, location: Api.FileLocation, w: Int32, h: Int32, size: Int32)
case videoSize(flags: Int32, type: String, location: Api.FileLocation, w: Int32, h: Int32, size: Int32, videoStartTs: Double?)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
case .videoSize(let type, let location, let w, let h, let size):
case .videoSize(let flags, let type, let location, let w, let h, let size, let videoStartTs):
if boxed {
buffer.appendInt32(1130084743)
buffer.appendInt32(-399391402)
}
serializeInt32(flags, buffer: buffer, boxed: false)
serializeString(type, buffer: buffer, boxed: false)
location.serialize(buffer, true)
serializeInt32(w, buffer: buffer, boxed: false)
serializeInt32(h, buffer: buffer, boxed: false)
serializeInt32(size, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 0) != 0 {serializeDouble(videoStartTs!, buffer: buffer, boxed: false)}
break
}
}
public func descriptionFields() -> (String, [(String, Any)]) {
switch self {
case .videoSize(let type, let location, let w, let h, let size):
return ("videoSize", [("type", type), ("location", location), ("w", w), ("h", h), ("size", size)])
case .videoSize(let flags, let type, let location, let w, let h, let size, let videoStartTs):
return ("videoSize", [("flags", flags), ("type", type), ("location", location), ("w", w), ("h", h), ("size", size), ("videoStartTs", videoStartTs)])
}
}
public static func parse_videoSize(_ reader: BufferReader) -> VideoSize? {
var _1: String?
_1 = parseString(reader)
var _2: Api.FileLocation?
var _1: Int32?
_1 = reader.readInt32()
var _2: String?
_2 = parseString(reader)
var _3: Api.FileLocation?
if let signature = reader.readInt32() {
_2 = Api.parse(reader, signature: signature) as? Api.FileLocation
_3 = Api.parse(reader, signature: signature) as? Api.FileLocation
}
var _3: Int32?
_3 = reader.readInt32()
var _4: Int32?
_4 = reader.readInt32()
var _5: Int32?
_5 = reader.readInt32()
var _6: Int32?
_6 = reader.readInt32()
var _7: Double?
if Int(_1!) & Int(1 << 0) != 0 {_7 = reader.readDouble() }
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
let _c4 = _4 != nil
let _c5 = _5 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 {
return Api.VideoSize.videoSize(type: _1!, location: _2!, w: _3!, h: _4!, size: _5!)
let _c6 = _6 != nil
let _c7 = (Int(_1!) & Int(1 << 0) == 0) || _7 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 {
return Api.VideoSize.videoSize(flags: _1!, type: _2!, location: _3!, w: _4!, h: _5!, size: _6!, videoStartTs: _7)
}
else {
return nil
@ -6029,7 +6037,6 @@ public extension Api {
case updateDialogFilterOrder(order: [Int32])
case updateDialogFilters
case updatePhoneCallSignalingData(phoneCallId: Int64, data: Buffer)
case updateChannelParticipant(channelId: Int32, prevParticipant: Api.ChannelParticipant, newParticipant: Api.ChannelParticipant, qts: Int32)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
@ -6710,15 +6717,6 @@ public extension Api {
serializeInt64(phoneCallId, buffer: buffer, boxed: false)
serializeBytes(data, buffer: buffer, boxed: false)
break
case .updateChannelParticipant(let channelId, let prevParticipant, let newParticipant, let qts):
if boxed {
buffer.appendInt32(-1812551503)
}
serializeInt32(channelId, buffer: buffer, boxed: false)
prevParticipant.serialize(buffer, true)
newParticipant.serialize(buffer, true)
serializeInt32(qts, buffer: buffer, boxed: false)
break
}
}
@ -6886,8 +6884,6 @@ public extension Api {
return ("updateDialogFilters", [])
case .updatePhoneCallSignalingData(let phoneCallId, let data):
return ("updatePhoneCallSignalingData", [("phoneCallId", phoneCallId), ("data", data)])
case .updateChannelParticipant(let channelId, let prevParticipant, let newParticipant, let qts):
return ("updateChannelParticipant", [("channelId", channelId), ("prevParticipant", prevParticipant), ("newParticipant", newParticipant), ("qts", qts)])
}
}
@ -8233,30 +8229,6 @@ public extension Api {
return nil
}
}
public static func parse_updateChannelParticipant(_ reader: BufferReader) -> Update? {
var _1: Int32?
_1 = reader.readInt32()
var _2: Api.ChannelParticipant?
if let signature = reader.readInt32() {
_2 = Api.parse(reader, signature: signature) as? Api.ChannelParticipant
}
var _3: Api.ChannelParticipant?
if let signature = reader.readInt32() {
_3 = Api.parse(reader, signature: signature) as? Api.ChannelParticipant
}
var _4: Int32?
_4 = reader.readInt32()
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
let _c4 = _4 != nil
if _c1 && _c2 && _c3 && _c4 {
return Api.Update.updateChannelParticipant(channelId: _1!, prevParticipant: _2!, newParticipant: _3!, qts: _4!)
}
else {
return nil
}
}
}
public enum PopularContact: TypeConstructorDescription {

View File

@ -4421,13 +4421,12 @@ public extension Api {
})
}
public static func getBroadcastStats(flags: Int32, channel: Api.InputChannel, tzOffset: Int32) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.stats.BroadcastStats>) {
public static func getBroadcastStats(flags: Int32, channel: Api.InputChannel) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.stats.BroadcastStats>) {
let buffer = Buffer()
buffer.appendInt32(-433058374)
buffer.appendInt32(-1421720550)
serializeInt32(flags, buffer: buffer, boxed: false)
channel.serialize(buffer, true)
serializeInt32(tzOffset, buffer: buffer, boxed: false)
return (FunctionDescription(name: "stats.getBroadcastStats", parameters: [("flags", flags), ("channel", channel), ("tzOffset", tzOffset)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.stats.BroadcastStats? in
return (FunctionDescription(name: "stats.getBroadcastStats", parameters: [("flags", flags), ("channel", channel)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.stats.BroadcastStats? in
let reader = BufferReader(buffer)
var result: Api.stats.BroadcastStats?
if let signature = reader.readInt32() {
@ -6803,13 +6802,14 @@ public extension Api {
})
}
public static func uploadProfilePhoto(flags: Int32, file: Api.InputFile?, video: Api.InputFile?) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.photos.Photo>) {
public static func uploadProfilePhoto(flags: Int32, file: Api.InputFile?, video: Api.InputFile?, videoStartTs: Double?) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.photos.Photo>) {
let buffer = Buffer()
buffer.appendInt32(28740206)
buffer.appendInt32(-1980559511)
serializeInt32(flags, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 0) != 0 {file!.serialize(buffer, true)}
if Int(flags) & Int(1 << 1) != 0 {video!.serialize(buffer, true)}
return (FunctionDescription(name: "photos.uploadProfilePhoto", parameters: [("flags", flags), ("file", file), ("video", video)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.photos.Photo? in
if Int(flags) & Int(1 << 2) != 0 {serializeDouble(videoStartTs!, buffer: buffer, boxed: false)}
return (FunctionDescription(name: "photos.uploadProfilePhoto", parameters: [("flags", flags), ("file", file), ("video", video), ("videoStartTs", videoStartTs)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.photos.Photo? in
let reader = BufferReader(buffer)
var result: Api.photos.Photo?
if let signature = reader.readInt32() {

View File

@ -470,7 +470,7 @@ public func signUpWithName(accountManager: AccountManager, account: Unauthorized
let resource = LocalFileMediaResource(fileId: arc4random64())
account.postbox.mediaBox.storeResourceData(resource.id, data: avatarData)
return updatePeerPhotoInternal(postbox: account.postbox, network: account.network, stateManager: nil, accountPeerId: user.id, peer: .single(user), photo: uploadedPeerPhoto(postbox: account.postbox, network: account.network, resource: resource), video: nil, mapResourceToAvatarSizes: { _, _ in .single([:]) })
return updatePeerPhotoInternal(postbox: account.postbox, network: account.network, stateManager: nil, accountPeerId: user.id, peer: .single(user), photo: uploadedPeerPhoto(postbox: account.postbox, network: account.network, resource: resource), video: nil, videoStartTimestamp: nil, mapResourceToAvatarSizes: { _, _ in .single([:]) })
|> `catch` { _ -> Signal<UpdatePeerPhotoStatus, SignUpError> in
return .complete()
}

View File

@ -15,8 +15,8 @@ public enum UploadPeerPhotoError {
case generic
}
public func updateAccountPhoto(account: Account, resource: MediaResource?, videoResource: MediaResource?, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
return updatePeerPhoto(postbox: account.postbox, network: account.network, stateManager: account.stateManager, accountPeerId: account.peerId, peerId: account.peerId, photo: resource.flatMap({ uploadedPeerPhoto(postbox: account.postbox, network: account.network, resource: $0) }), video: videoResource.flatMap({ uploadedPeerVideo(postbox: account.postbox, network: account.network, messageMediaPreuploadManager: account.messageMediaPreuploadManager, resource: $0) |> map(Optional.init) }), mapResourceToAvatarSizes: mapResourceToAvatarSizes)
public func updateAccountPhoto(account: Account, resource: MediaResource?, videoResource: MediaResource?, videoStartTimestamp: Double?, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
return updatePeerPhoto(postbox: account.postbox, network: account.network, stateManager: account.stateManager, accountPeerId: account.peerId, peerId: account.peerId, photo: resource.flatMap({ uploadedPeerPhoto(postbox: account.postbox, network: account.network, resource: $0) }), video: videoResource.flatMap({ uploadedPeerVideo(postbox: account.postbox, network: account.network, messageMediaPreuploadManager: account.messageMediaPreuploadManager, resource: $0) |> map(Optional.init) }), videoStartTimestamp: videoStartTimestamp, mapResourceToAvatarSizes: mapResourceToAvatarSizes)
}
public struct UploadedPeerPhotoData {
@ -49,11 +49,11 @@ public func uploadedPeerVideo(postbox: Postbox, network: Network, messageMediaPr
}
}
public func updatePeerPhoto(postbox: Postbox, network: Network, stateManager: AccountStateManager?, accountPeerId: PeerId, peerId: PeerId, photo: Signal<UploadedPeerPhotoData, NoError>?, video: Signal<UploadedPeerPhotoData?, NoError>? = nil, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
return updatePeerPhotoInternal(postbox: postbox, network: network, stateManager: stateManager, accountPeerId: accountPeerId, peer: postbox.loadedPeerWithId(peerId), photo: photo, video: video, mapResourceToAvatarSizes: mapResourceToAvatarSizes)
public func updatePeerPhoto(postbox: Postbox, network: Network, stateManager: AccountStateManager?, accountPeerId: PeerId, peerId: PeerId, photo: Signal<UploadedPeerPhotoData, NoError>?, video: Signal<UploadedPeerPhotoData?, NoError>? = nil, videoStartTimestamp: Double? = nil, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
return updatePeerPhotoInternal(postbox: postbox, network: network, stateManager: stateManager, accountPeerId: accountPeerId, peer: postbox.loadedPeerWithId(peerId), photo: photo, video: video, videoStartTimestamp: videoStartTimestamp, mapResourceToAvatarSizes: mapResourceToAvatarSizes)
}
public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateManager: AccountStateManager?, accountPeerId: PeerId, peer: Signal<Peer, NoError>, photo: Signal<UploadedPeerPhotoData, NoError>?, video: Signal<UploadedPeerPhotoData?, NoError>?, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateManager: AccountStateManager?, accountPeerId: PeerId, peer: Signal<Peer, NoError>, photo: Signal<UploadedPeerPhotoData, NoError>?, video: Signal<UploadedPeerPhotoData?, NoError>?, videoStartTimestamp: Double?, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
return peer
|> mapError { _ in return .generic }
|> mapToSignal { peer -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> in
@ -116,12 +116,15 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
}
}
var flags: Int32 = 0
var flags: Int32 = (1 << 0)
if let _ = videoFile {
flags |= (1 << 0)
flags |= (1 << 1)
if let _ = videoStartTimestamp {
flags |= (1 << 2)
}
}
return network.request(Api.functions.photos.uploadProfilePhoto(flags: flags, file: file, video: videoFile))
return network.request(Api.functions.photos.uploadProfilePhoto(flags: flags, file: file, video: videoFile, videoStartTs: videoStartTimestamp))
|> mapError { _ in return UploadPeerPhotoError.generic }
|> mapToSignal { photo -> Signal<(UpdatePeerPhotoStatus, MediaResource?), UploadPeerPhotoError> in
var representations: [TelegramMediaImageRepresentation] = []
@ -151,16 +154,14 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
if let videoSizes = videoSizes {
for size in videoSizes {
switch size {
case let .videoSize(type, location, w, h, size):
case let .videoSize(_, type, location, w, h, size, videoStartTs):
let resource: TelegramMediaResource
switch location {
case let .fileLocationToBeDeprecated(volumeId, localId):
resource = CloudPhotoSizeMediaResource(datacenterId: dcId, photoId: id, accessHash: accessHash, sizeSpec: type, volumeId: volumeId, localId: localId, size: Int(size), fileReference: fileReference.makeData())
}
videoRepresentations.append(TelegramMediaImage.VideoRepresentation(
dimensions: PixelDimensions(width: w, height: h),
resource: resource))
videoRepresentations.append(TelegramMediaImage.VideoRepresentation(dimensions: PixelDimensions(width: w, height: h), resource: resource, startTimestamp: videoStartTs))
}
}
}

View File

@ -198,10 +198,10 @@ private func requestChannelStats(postbox: Postbox, network: Network, datacenterI
signal = network.download(datacenterId: Int(datacenterId), isMedia: false, tag: nil)
|> castError(MTRpcError.self)
|> mapToSignal { worker in
return worker.request(Api.functions.stats.getBroadcastStats(flags: flags, channel: inputChannel, tzOffset: 0))
return worker.request(Api.functions.stats.getBroadcastStats(flags: flags, channel: inputChannel))
}
} else {
signal = network.request(Api.functions.stats.getBroadcastStats(flags: flags, channel: inputChannel, tzOffset: 0))
signal = network.request(Api.functions.stats.getBroadcastStats(flags: flags, channel: inputChannel))
}
return signal

View File

@ -156,7 +156,7 @@ func telegramMediaFileFromApiDocument(_ document: Api.Document) -> TelegramMedia
if let videoThumbs = videoThumbs {
for thumb in videoThumbs {
switch thumb {
case let .videoSize(type, location, w, h, _):
case let .videoSize(_, type, location, w, h, _, _):
let resource: TelegramMediaResource
switch location {
case let .fileLocationToBeDeprecated(volumeId, localId):

View File

@ -44,16 +44,14 @@ func telegramMediaImageFromApiPhoto(_ photo: Api.Photo) -> TelegramMediaImage? {
if let videoSizes = videoSizes {
for size in videoSizes {
switch size {
case let .videoSize(type, location, w, h, size):
case let .videoSize(_, type, location, w, h, size, videoStartTs):
let resource: TelegramMediaResource
switch location {
case let .fileLocationToBeDeprecated(volumeId, localId):
resource = CloudPhotoSizeMediaResource(datacenterId: dcId, photoId: id, accessHash: accessHash, sizeSpec: type, volumeId: volumeId, localId: localId, size: Int(size), fileReference: fileReference.makeData())
}
videoRepresentations.append(TelegramMediaImage.VideoRepresentation(
dimensions: PixelDimensions(width: w, height: h),
resource: resource))
videoRepresentations.append(TelegramMediaImage.VideoRepresentation(dimensions: PixelDimensions(width: w, height: h), resource: resource, startTimestamp: videoStartTs))
}
}
}

View File

@ -325,7 +325,7 @@ public func createChannelController(context: AccountContext) -> ViewController {
}
}
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: stateValue.with({ $0.avatar }) != nil, hasViewButton: false, personalPhoto: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: stateValue.with({ $0.avatar }) != nil, hasViewButton: false, personalPhoto: false, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
let _ = currentAvatarMixin.swap(mixin)
mixin.requestSearchController = { assetsController in
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: title, completion: { result in

View File

@ -583,7 +583,7 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
}
}
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: stateValue.with({ $0.avatar }) != nil, hasViewButton: false, personalPhoto: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: stateValue.with({ $0.avatar }) != nil, hasViewButton: false, personalPhoto: false, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
let _ = currentAvatarMixin.swap(mixin)
mixin.requestSearchController = { assetsController in
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: title, completion: { result in

View File

@ -244,7 +244,10 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
strongSelf.videoNode?.isHidden = !owns
}
}
if let startTimestamp = video.startTimestamp {
videoNode.seek(startTimestamp)
}
self.videoContent = videoContent
self.videoNode = videoNode

View File

@ -3303,7 +3303,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}))
}
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
let _ = strongSelf.currentAvatarMixin.swap(mixin)
mixin.requestSearchController = { assetsController in
guard let strongSelf = self else {