mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Video avatar fixes
This commit is contained in:
parent
a2b82285db
commit
cccb11a5d6
@ -5635,6 +5635,8 @@ Any member of this group will be able to see messages in the channel.";
|
|||||||
|
|
||||||
"Settings.SetProfilePhotoOrVideo" = "Set Profile Photo or Video";
|
"Settings.SetProfilePhotoOrVideo" = "Set Profile Photo or Video";
|
||||||
"Settings.SetNewProfilePhotoOrVideo" = "Set New Profile Photo or Video";
|
"Settings.SetNewProfilePhotoOrVideo" = "Set New Profile Photo or Video";
|
||||||
|
"Settings.ViewVideo" = "View Video";
|
||||||
|
"Settings.RemoveVideo" = "Remove Video";
|
||||||
|
|
||||||
"Conversation.Unarchive" = "Unarchive";
|
"Conversation.Unarchive" = "Unarchive";
|
||||||
"Conversation.UnarchiveDone" = "The chat was moved to your main list.";
|
"Conversation.UnarchiveDone" = "The chat was moved to your main list.";
|
||||||
|
@ -24,7 +24,7 @@ typedef void (^TGMediaAvatarPresentImpl)(id<LegacyComponentsContext>, void (^)(U
|
|||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasDeleteButton:(bool)hasDeleteButton saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia;
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasDeleteButton:(bool)hasDeleteButton saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia;
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasDeleteButton:(bool)hasDeleteButton personalPhoto:(bool)personalPhoto saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia;
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasDeleteButton:(bool)hasDeleteButton personalPhoto:(bool)personalPhoto saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia;
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasSearchButton:(bool)hasSearchButton hasDeleteButton:(bool)hasDeleteButton hasViewButton:(bool)hasViewButton personalPhoto:(bool)personalPhoto saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia signup:(bool)signup;
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasSearchButton:(bool)hasSearchButton hasDeleteButton:(bool)hasDeleteButton hasViewButton:(bool)hasViewButton personalPhoto:(bool)personalPhoto isVideo:(bool)isVideo saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia signup:(bool)signup;
|
||||||
- (TGMenuSheetController *)present;
|
- (TGMenuSheetController *)present;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -4,8 +4,6 @@
|
|||||||
|
|
||||||
@interface TGPhotoAvatarCropView : UIView
|
@interface TGPhotoAvatarCropView : UIView
|
||||||
|
|
||||||
@property (nonatomic, readonly) PGPhotoEditorView *fullPreviewView;
|
|
||||||
|
|
||||||
@property (nonatomic, strong) UIImage *image;
|
@property (nonatomic, strong) UIImage *image;
|
||||||
|
|
||||||
@property (nonatomic, readonly) CGSize originalSize;
|
@property (nonatomic, readonly) CGSize originalSize;
|
||||||
@ -22,7 +20,7 @@
|
|||||||
@property (nonatomic, readonly) bool isTracking;
|
@property (nonatomic, readonly) bool isTracking;
|
||||||
@property (nonatomic, readonly) bool isAnimating;
|
@property (nonatomic, readonly) bool isAnimating;
|
||||||
|
|
||||||
- (instancetype)initWithOriginalSize:(CGSize)originalSize screenSize:(CGSize)screenSize;
|
- (instancetype)initWithOriginalSize:(CGSize)originalSize screenSize:(CGSize)screenSize fullPreviewView:(PGPhotoEditorView *)fullPreviewView;
|
||||||
|
|
||||||
- (void)setSnapshotImage:(UIImage *)image;
|
- (void)setSnapshotImage:(UIImage *)image;
|
||||||
- (void)setSnapshotView:(UIView *)snapshotView;
|
- (void)setSnapshotView:(UIView *)snapshotView;
|
||||||
@ -35,8 +33,12 @@
|
|||||||
|
|
||||||
- (void)animateTransitionIn;
|
- (void)animateTransitionIn;
|
||||||
- (void)animateTransitionOutSwitching:(bool)switching;
|
- (void)animateTransitionOutSwitching:(bool)switching;
|
||||||
|
- (void)animateTransitionOut;
|
||||||
- (void)transitionInFinishedFromCamera:(bool)fromCamera;
|
- (void)transitionInFinishedFromCamera:(bool)fromCamera;
|
||||||
|
|
||||||
|
- (void)closeCurtains;
|
||||||
|
- (void)openCurtains;
|
||||||
|
|
||||||
- (void)invalidateCropRect;
|
- (void)invalidateCropRect;
|
||||||
|
|
||||||
- (UIImage *)currentImage;
|
- (UIImage *)currentImage;
|
||||||
|
@ -15,8 +15,7 @@ typedef NS_OPTIONS(NSUInteger, TGPhotoEditorTab) {
|
|||||||
TGPhotoEditorAspectRatioTab = 1 << 10,
|
TGPhotoEditorAspectRatioTab = 1 << 10,
|
||||||
TGPhotoEditorTintTab = 1 << 11,
|
TGPhotoEditorTintTab = 1 << 11,
|
||||||
TGPhotoEditorBlurTab = 1 << 12,
|
TGPhotoEditorBlurTab = 1 << 12,
|
||||||
TGPhotoEditorCurvesTab = 1 << 13,
|
TGPhotoEditorCurvesTab = 1 << 13
|
||||||
TGPhotoEditorPreviewTab = 1 << 14
|
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef enum
|
typedef enum
|
||||||
|
@ -15,6 +15,7 @@ typedef enum
|
|||||||
TGMediaVideoConversionPresetVideoMessage,
|
TGMediaVideoConversionPresetVideoMessage,
|
||||||
TGMediaVideoConversionPresetProfile,
|
TGMediaVideoConversionPresetProfile,
|
||||||
TGMediaVideoConversionPresetProfileHigh,
|
TGMediaVideoConversionPresetProfileHigh,
|
||||||
|
TGMediaVideoConversionPresetProfileVeryHigh,
|
||||||
TGMediaVideoConversionPresetPassthrough
|
TGMediaVideoConversionPresetPassthrough
|
||||||
} TGMediaVideoConversionPreset;
|
} TGMediaVideoConversionPreset;
|
||||||
|
|
||||||
|
@ -169,6 +169,25 @@
|
|||||||
return MAX(_originalSize.width, _originalSize.height) * 0.005f;
|
return MAX(_originalSize.width, _originalSize.height) * 0.005f;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (CGRect)normalizedCropRect:(CGRect)cropRect
|
||||||
|
{
|
||||||
|
CGRect normalizedCropRect = CGRectMake(0.0f, 0.0f, 1.0f, 1.0f);
|
||||||
|
normalizedCropRect = CGRectMake(MAX(0.0, MIN(1.0, cropRect.origin.x / _originalSize.width)), MAX(0.0, MIN(1.0, cropRect.origin.y / _originalSize.height)), MAX(0.0, MIN(1.0, cropRect.size.width / _originalSize.width)), MAX(0.0, MIN(1.0, cropRect.size.height / _originalSize.height)));
|
||||||
|
return normalizedCropRect;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setCropRect:(CGRect)cropRect
|
||||||
|
{
|
||||||
|
_cropRect = cropRect;
|
||||||
|
_cropFilter.cropRegion = [self normalizedCropRect:cropRect];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setCropOrientation:(UIImageOrientation)cropOrientation
|
||||||
|
{
|
||||||
|
_cropOrientation = cropOrientation;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
- (void)setPlayerItem:(AVPlayerItem *)playerItem forCropRect:(CGRect)cropRect cropRotation:(CGFloat)cropRotation cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored {
|
- (void)setPlayerItem:(AVPlayerItem *)playerItem forCropRect:(CGRect)cropRect cropRotation:(CGFloat)cropRotation cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored {
|
||||||
[_toolComposer invalidate];
|
[_toolComposer invalidate];
|
||||||
_currentProcessChain = nil;
|
_currentProcessChain = nil;
|
||||||
@ -182,11 +201,10 @@
|
|||||||
|
|
||||||
_rotationMode = kGPUImageNoRotation;
|
_rotationMode = kGPUImageNoRotation;
|
||||||
if (cropOrientation != UIImageOrientationUp || cropMirrored || hasCropping) {
|
if (cropOrientation != UIImageOrientationUp || cropMirrored || hasCropping) {
|
||||||
CGRect normalizedCropRect = CGRectMake(0.0f, 0.0f, 1.0f, 1.0f);
|
if (_cropFilter == nil)
|
||||||
if (hasCropping) {
|
_cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:[self normalizedCropRect:cropRect]];
|
||||||
normalizedCropRect = CGRectMake(MAX(0.0, MIN(1.0, cropRect.origin.x / _originalSize.width)), MAX(0.0, MIN(1.0, cropRect.origin.y / _originalSize.height)), MAX(0.0, MIN(1.0, cropRect.size.width / _originalSize.width)), MAX(0.0, MIN(1.0, cropRect.size.height / _originalSize.height)));
|
else
|
||||||
}
|
_cropFilter.cropRegion = [self normalizedCropRect:cropRect];
|
||||||
_cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:normalizedCropRect];
|
|
||||||
if (cropOrientation != UIImageOrientationUp || cropMirrored) {
|
if (cropOrientation != UIImageOrientationUp || cropMirrored) {
|
||||||
switch (cropOrientation) {
|
switch (cropOrientation) {
|
||||||
case UIImageOrientationLeft:
|
case UIImageOrientationLeft:
|
||||||
@ -411,15 +429,17 @@
|
|||||||
}
|
}
|
||||||
_finalFilter = lastFilter;
|
_finalFilter = lastFilter;
|
||||||
|
|
||||||
if (_cropFilter != nil && self.cropOnLast) {
|
if (self.cropOnLast) {
|
||||||
|
if (_cropFilter == nil)
|
||||||
|
_cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:[self normalizedCropRect:_cropRect]];
|
||||||
|
|
||||||
for (PGPhotoEditorView *view in _additionalOutputs) {
|
for (PGPhotoEditorView *view in _additionalOutputs) {
|
||||||
[_finalFilter addTarget:view];
|
[_finalFilter addTarget:view];
|
||||||
}
|
}
|
||||||
[_finalFilter addTarget:_cropFilter];
|
[_finalFilter addTarget:_cropFilter];
|
||||||
_finalFilter = _cropFilter;
|
|
||||||
|
|
||||||
if (previewOutput != nil) {
|
if (previewOutput != nil) {
|
||||||
[_finalFilter addTarget:previewOutput.imageView];
|
[_cropFilter addTarget:previewOutput.imageView];
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (previewOutput != nil) {
|
if (previewOutput != nil) {
|
||||||
@ -442,15 +462,29 @@
|
|||||||
if (_finalFilter == nil)
|
if (_finalFilter == nil)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
[_cropFilter removeAllTargets];
|
||||||
[_finalFilter removeAllTargets];
|
[_finalFilter removeAllTargets];
|
||||||
|
|
||||||
if (self.previewOutput != nil) {
|
if (self.cropOnLast) {
|
||||||
[_finalFilter addTarget:self.previewOutput.imageView];
|
for (PGPhotoEditorView *view in _additionalOutputs) {
|
||||||
|
[_finalFilter addTarget:view];
|
||||||
|
}
|
||||||
|
|
||||||
|
[_finalFilter addTarget:_cropFilter];
|
||||||
|
|
||||||
|
if (self.previewOutput != nil) {
|
||||||
|
[_cropFilter addTarget:self.previewOutput.imageView];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (PGPhotoEditorView *view in _additionalOutputs) {
|
||||||
|
[_finalFilter addTarget:view];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (self.previewOutput != nil) {
|
||||||
|
[_finalFilter addTarget:self.previewOutput.imageView];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (PGPhotoEditorView *view in _additionalOutputs) {
|
|
||||||
[_finalFilter addTarget:view];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (_histogramGenerator != nil && !self.standalone) {
|
if (_histogramGenerator != nil && !self.standalone) {
|
||||||
[_finalFilter addTarget:_histogramGenerator];
|
[_finalFilter addTarget:_histogramGenerator];
|
||||||
|
@ -979,7 +979,7 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
|
|||||||
[transition dismissAnimated:true completion:^
|
[transition dismissAnimated:true completion:^
|
||||||
{
|
{
|
||||||
strongSelf->_hiddenItem = nil;
|
strongSelf->_hiddenItem = nil;
|
||||||
[strongSelf updateHiddenCellAnimated:false];
|
[strongSelf updateHiddenCellAnimated:true];
|
||||||
|
|
||||||
dispatch_async(dispatch_get_main_queue(), ^
|
dispatch_async(dispatch_get_main_queue(), ^
|
||||||
{
|
{
|
||||||
|
@ -27,6 +27,7 @@
|
|||||||
bool _saveCapturedMedia;
|
bool _saveCapturedMedia;
|
||||||
bool _saveEditedPhotos;
|
bool _saveEditedPhotos;
|
||||||
bool _signup;
|
bool _signup;
|
||||||
|
bool _isVideo;
|
||||||
}
|
}
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@ -39,10 +40,10 @@
|
|||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasDeleteButton:(bool)hasDeleteButton personalPhoto:(bool)personalPhoto saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasDeleteButton:(bool)hasDeleteButton personalPhoto:(bool)personalPhoto saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia
|
||||||
{
|
{
|
||||||
return [self initWithContext:context parentController:parentController hasSearchButton:false hasDeleteButton:hasDeleteButton hasViewButton:false personalPhoto:personalPhoto saveEditedPhotos:saveEditedPhotos saveCapturedMedia:saveCapturedMedia signup:false];
|
return [self initWithContext:context parentController:parentController hasSearchButton:false hasDeleteButton:hasDeleteButton hasViewButton:false personalPhoto:personalPhoto isVideo:false saveEditedPhotos:saveEditedPhotos saveCapturedMedia:saveCapturedMedia signup:false];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasSearchButton:(bool)hasSearchButton hasDeleteButton:(bool)hasDeleteButton hasViewButton:(bool)hasViewButton personalPhoto:(bool)personalPhoto saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia signup:(bool)signup
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context parentController:(TGViewController *)parentController hasSearchButton:(bool)hasSearchButton hasDeleteButton:(bool)hasDeleteButton hasViewButton:(bool)hasViewButton personalPhoto:(bool)personalPhoto isVideo:(bool)isVideo saveEditedPhotos:(bool)saveEditedPhotos saveCapturedMedia:(bool)saveCapturedMedia signup:(bool)signup
|
||||||
{
|
{
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self != nil)
|
if (self != nil)
|
||||||
@ -55,6 +56,7 @@
|
|||||||
_hasDeleteButton = hasDeleteButton;
|
_hasDeleteButton = hasDeleteButton;
|
||||||
_hasViewButton = hasViewButton;
|
_hasViewButton = hasViewButton;
|
||||||
_personalPhoto = ![TGCameraController useLegacyCamera] ? personalPhoto : false;
|
_personalPhoto = ![TGCameraController useLegacyCamera] ? personalPhoto : false;
|
||||||
|
_isVideo = isVideo;
|
||||||
_signup = signup;
|
_signup = signup;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@ -179,7 +181,7 @@
|
|||||||
|
|
||||||
if (_hasViewButton)
|
if (_hasViewButton)
|
||||||
{
|
{
|
||||||
TGMenuSheetButtonItemView *viewItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:TGLocalized(@"Settings.ViewPhoto") type:TGMenuSheetButtonTypeDefault fontSize:20.0 action:^
|
TGMenuSheetButtonItemView *viewItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:_isVideo ? TGLocalized(@"Settings.ViewVideo") : TGLocalized(@"Settings.ViewPhoto") type:TGMenuSheetButtonTypeDefault fontSize:20.0 action:^
|
||||||
{
|
{
|
||||||
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
||||||
if (strongSelf == nil)
|
if (strongSelf == nil)
|
||||||
@ -197,7 +199,7 @@
|
|||||||
|
|
||||||
if (_hasDeleteButton)
|
if (_hasDeleteButton)
|
||||||
{
|
{
|
||||||
TGMenuSheetButtonItemView *deleteItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:TGLocalized(@"GroupInfo.SetGroupPhotoDelete") type:TGMenuSheetButtonTypeDestructive fontSize:20.0 action:^
|
TGMenuSheetButtonItemView *deleteItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:_isVideo ? TGLocalized(@"Settings.RemoveVideo") : TGLocalized(@"GroupInfo.SetGroupPhotoDelete") type:TGMenuSheetButtonTypeDestructive fontSize:20.0 action:^
|
||||||
{
|
{
|
||||||
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
||||||
if (strongSelf == nil)
|
if (strongSelf == nil)
|
||||||
|
@ -158,11 +158,7 @@ typedef enum
|
|||||||
|
|
||||||
[strongSelf->_trimView setTrimming:true animated:true];
|
[strongSelf->_trimView setTrimming:true animated:true];
|
||||||
|
|
||||||
if (strongSelf->_hasDotPicker) {
|
[strongSelf setScrubberHandleHidden:true animated:false];
|
||||||
[strongSelf setDotHandleHidden:true animated:false];
|
|
||||||
} else {
|
|
||||||
[strongSelf setScrubberHandleHidden:true animated:false];
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
_trimView.didEndEditing = ^
|
_trimView.didEndEditing = ^
|
||||||
{
|
{
|
||||||
@ -210,11 +206,7 @@ typedef enum
|
|||||||
|
|
||||||
[strongSelf->_trimView setTrimming:isTrimmed animated:true];
|
[strongSelf->_trimView setTrimming:isTrimmed animated:true];
|
||||||
|
|
||||||
if (strongSelf->_hasDotPicker) {
|
[strongSelf setScrubberHandleHidden:false animated:true];
|
||||||
[strongSelf setDotHandleHidden:false animated:true];
|
|
||||||
} else {
|
|
||||||
[strongSelf setScrubberHandleHidden:false animated:true];
|
|
||||||
}
|
|
||||||
|
|
||||||
[strongSelf cancelZoomIn];
|
[strongSelf cancelZoomIn];
|
||||||
if (strongSelf->_zoomedIn)
|
if (strongSelf->_zoomedIn)
|
||||||
@ -262,13 +254,8 @@ typedef enum
|
|||||||
strongSelf->_trimStartValue = trimStartPosition;
|
strongSelf->_trimStartValue = trimStartPosition;
|
||||||
strongSelf->_trimEndValue = trimEndPosition;
|
strongSelf->_trimEndValue = trimEndPosition;
|
||||||
|
|
||||||
if (strongSelf->_hasDotPicker) {
|
[strongSelf setValue:trimStartPosition];
|
||||||
if (strongSelf->_value < trimStartPosition) {
|
|
||||||
strongSelf->_value = trimStartPosition;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
[strongSelf setValue:trimStartPosition];
|
|
||||||
}
|
|
||||||
UIView *handle = strongSelf->_scrubberHandle;
|
UIView *handle = strongSelf->_scrubberHandle;
|
||||||
handle.center = CGPointMake(trimView.frame.origin.x + 12 + handle.frame.size.width / 2, handle.center.y);
|
handle.center = CGPointMake(trimView.frame.origin.x + 12 + handle.frame.size.width / 2, handle.center.y);
|
||||||
|
|
||||||
@ -333,13 +320,7 @@ typedef enum
|
|||||||
strongSelf->_trimStartValue = trimStartPosition;
|
strongSelf->_trimStartValue = trimStartPosition;
|
||||||
strongSelf->_trimEndValue = trimEndPosition;
|
strongSelf->_trimEndValue = trimEndPosition;
|
||||||
|
|
||||||
if (strongSelf->_hasDotPicker) {
|
[strongSelf setValue:trimEndPosition];
|
||||||
if (strongSelf->_value > trimEndPosition) {
|
|
||||||
strongSelf->_value = trimEndPosition;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
[strongSelf setValue:trimEndPosition];
|
|
||||||
}
|
|
||||||
|
|
||||||
UIView *handle = strongSelf->_scrubberHandle;
|
UIView *handle = strongSelf->_scrubberHandle;
|
||||||
handle.center = CGPointMake(CGRectGetMaxX(trimView.frame) - 12 - handle.frame.size.width / 2, handle.center.y);
|
handle.center = CGPointMake(CGRectGetMaxX(trimView.frame) - 12 - handle.frame.size.width / 2, handle.center.y);
|
||||||
@ -464,8 +445,6 @@ typedef enum
|
|||||||
|
|
||||||
- (void)setHasDotPicker:(bool)hasDotPicker {
|
- (void)setHasDotPicker:(bool)hasDotPicker {
|
||||||
_hasDotPicker = hasDotPicker;
|
_hasDotPicker = hasDotPicker;
|
||||||
_dotHandle.hidden = !hasDotPicker;
|
|
||||||
_scrubberHandle.hidden = true;
|
|
||||||
_tapGestureRecognizer.enabled = hasDotPicker;
|
_tapGestureRecognizer.enabled = hasDotPicker;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -691,11 +670,6 @@ typedef enum
|
|||||||
|
|
||||||
_thumbnailAspectRatio = frameAspectRatio;
|
_thumbnailAspectRatio = frameAspectRatio;
|
||||||
|
|
||||||
if (_hasDotPicker) {
|
|
||||||
CGSize videoSize = TGFillSize([self _thumbnailSize], _dotImageView.frame.size);
|
|
||||||
_dotImageView.frame = CGRectMake(TGScreenPixelFloor((_dotContentView.frame.size.width - videoSize.width) / 2.0), 0.0, videoSize.width, videoSize.height);
|
|
||||||
}
|
|
||||||
|
|
||||||
NSInteger thumbnailCount = (NSInteger)CGCeil(_summaryThumbnailWrapperView.frame.size.width / [self _thumbnailSizeWithAspectRatio:frameAspectRatio orientation:_cropOrientation].width);
|
NSInteger thumbnailCount = (NSInteger)CGCeil(_summaryThumbnailWrapperView.frame.size.width / [self _thumbnailSizeWithAspectRatio:frameAspectRatio orientation:_cropOrientation].width);
|
||||||
|
|
||||||
if ([dataSource respondsToSelector:@selector(videoScrubber:evenlySpacedTimestamps:startingAt:endingAt:)])
|
if ([dataSource respondsToSelector:@selector(videoScrubber:evenlySpacedTimestamps:startingAt:endingAt:)])
|
||||||
|
@ -125,7 +125,7 @@
|
|||||||
|
|
||||||
CGSize dimensions = [avAsset tracksWithMediaType:AVMediaTypeVideo].firstObject.naturalSize;
|
CGSize dimensions = [avAsset tracksWithMediaType:AVMediaTypeVideo].firstObject.naturalSize;
|
||||||
TGMediaVideoConversionPreset preset = adjustments.sendAsGif ? TGMediaVideoConversionPresetAnimation : [self presetFromAdjustments:adjustments];
|
TGMediaVideoConversionPreset preset = adjustments.sendAsGif ? TGMediaVideoConversionPresetAnimation : [self presetFromAdjustments:adjustments];
|
||||||
if (!CGSizeEqualToSize(dimensions, CGSizeZero) && preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetVideoMessage && preset != TGMediaVideoConversionPresetProfile && preset != TGMediaVideoConversionPresetProfileHigh && preset != TGMediaVideoConversionPresetPassthrough)
|
if (!CGSizeEqualToSize(dimensions, CGSizeZero) && preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetVideoMessage && preset != TGMediaVideoConversionPresetProfile && preset != TGMediaVideoConversionPresetProfileHigh && preset != TGMediaVideoConversionPresetProfileVeryHigh && preset != TGMediaVideoConversionPresetPassthrough)
|
||||||
{
|
{
|
||||||
TGMediaVideoConversionPreset bestPreset = [self bestAvailablePresetForDimensions:dimensions];
|
TGMediaVideoConversionPreset bestPreset = [self bestAvailablePresetForDimensions:dimensions];
|
||||||
if (preset > bestPreset)
|
if (preset > bestPreset)
|
||||||
@ -169,7 +169,13 @@
|
|||||||
[self processWithConversionContext:context completionBlock:^
|
[self processWithConversionContext:context completionBlock:^
|
||||||
{
|
{
|
||||||
TGMediaVideoConversionContext *resultContext = context.value;
|
TGMediaVideoConversionContext *resultContext = context.value;
|
||||||
[resultContext.imageGenerator generateCGImagesAsynchronouslyForTimes:@[ [NSValue valueWithCMTime:kCMTimeZero] ] completionHandler:^(__unused CMTime requestedTime, CGImageRef _Nullable image, __unused CMTime actualTime, AVAssetImageGeneratorResult result, __unused NSError * _Nullable error)
|
|
||||||
|
NSTimeInterval videoStartValue = 0.0;
|
||||||
|
if (adjustments.videoStartValue > 0.0) {
|
||||||
|
videoStartValue = adjustments.videoStartValue - adjustments.trimStartValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
[resultContext.imageGenerator generateCGImagesAsynchronouslyForTimes:@[ [NSValue valueWithCMTime:CMTimeMakeWithSeconds(videoStartValue, NSEC_PER_SEC)] ] completionHandler:^(__unused CMTime requestedTime, CGImageRef _Nullable image, __unused CMTime actualTime, AVAssetImageGeneratorResult result, __unused NSError * _Nullable error)
|
||||||
{
|
{
|
||||||
UIImage *coverImage = nil;
|
UIImage *coverImage = nil;
|
||||||
if (result == AVAssetImageGeneratorSucceeded)
|
if (result == AVAssetImageGeneratorSucceeded)
|
||||||
@ -183,7 +189,6 @@
|
|||||||
liveUploadData = [watcher fileUpdated:true];
|
liveUploadData = [watcher fileUpdated:true];
|
||||||
|
|
||||||
NSUInteger fileSize = [[[NSFileManager defaultManager] attributesOfItemAtPath:outputUrl.path error:nil] fileSize];
|
NSUInteger fileSize = [[[NSFileManager defaultManager] attributesOfItemAtPath:outputUrl.path error:nil] fileSize];
|
||||||
|
|
||||||
contextResult = [TGMediaVideoConversionResult resultWithFileURL:outputUrl fileSize:fileSize duration:CMTimeGetSeconds(resultContext.timeRange.duration) dimensions:resultContext.dimensions coverImage:coverImage liveUploadData:liveUploadData];
|
contextResult = [TGMediaVideoConversionResult resultWithFileURL:outputUrl fileSize:fileSize duration:CMTimeGetSeconds(resultContext.timeRange.duration) dimensions:resultContext.dimensions coverImage:coverImage liveUploadData:liveUploadData];
|
||||||
return [resultContext finishedContext];
|
return [resultContext finishedContext];
|
||||||
}];
|
}];
|
||||||
@ -347,23 +352,8 @@
|
|||||||
outputDimensions = CGSizeMake(outputDimensions.height, outputDimensions.width);
|
outputDimensions = CGSizeMake(outputDimensions.height, outputDimensions.width);
|
||||||
|
|
||||||
AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
|
AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
if (adjustments.videoStartValue > 0.0 && adjustments.videoStartValue > adjustments.trimStartValue) {
|
[compositionTrack insertTimeRange:timeRange ofTrack:videoTrack atTime:kCMTimeZero error:NULL];
|
||||||
NSTimeInterval trimEndValue = adjustments.trimEndValue > adjustments.trimStartValue ? adjustments.trimEndValue : CMTimeGetSeconds(videoTrack.timeRange.duration);
|
|
||||||
|
|
||||||
CMTimeRange firstRange = CMTimeRangeMake(CMTimeMakeWithSeconds(adjustments.videoStartValue, NSEC_PER_SEC), CMTimeMakeWithSeconds(trimEndValue - adjustments.videoStartValue, NSEC_PER_SEC));
|
|
||||||
|
|
||||||
NSError *error;
|
|
||||||
|
|
||||||
[compositionTrack insertTimeRange:firstRange ofTrack:videoTrack atTime:kCMTimeZero error:&error];
|
|
||||||
NSLog(@"");
|
|
||||||
[compositionTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(adjustments.trimStartValue, NSEC_PER_SEC), CMTimeMakeWithSeconds(adjustments.videoStartValue - adjustments.trimStartValue, NSEC_PER_SEC)) ofTrack:videoTrack atTime:firstRange.duration error:&error];
|
|
||||||
|
|
||||||
NSLog(@"");
|
|
||||||
|
|
||||||
// instructionTimeRange = CMTimeRangeMake(kCMTimeZero, );
|
|
||||||
} else {
|
|
||||||
[compositionTrack insertTimeRange:timeRange ofTrack:videoTrack atTime:kCMTimeZero error:NULL];
|
|
||||||
}
|
|
||||||
CMTime frameDuration = CMTimeMake(1, 30);
|
CMTime frameDuration = CMTimeMake(1, 30);
|
||||||
if (videoTrack.nominalFrameRate > 0)
|
if (videoTrack.nominalFrameRate > 0)
|
||||||
frameDuration = CMTimeMake(1, (int32_t)videoTrack.nominalFrameRate);
|
frameDuration = CMTimeMake(1, (int32_t)videoTrack.nominalFrameRate);
|
||||||
@ -534,6 +524,7 @@
|
|||||||
output.videoComposition = videoComposition;
|
output.videoComposition = videoComposition;
|
||||||
|
|
||||||
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:composition];
|
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:composition];
|
||||||
|
imageGenerator.appliesPreferredTrackTransform = true;
|
||||||
imageGenerator.videoComposition = videoComposition;
|
imageGenerator.videoComposition = videoComposition;
|
||||||
imageGenerator.maximumSize = maxDimensions;
|
imageGenerator.maximumSize = maxDimensions;
|
||||||
imageGenerator.requestedTimeToleranceBefore = kCMTimeZero;
|
imageGenerator.requestedTimeToleranceBefore = kCMTimeZero;
|
||||||
@ -1258,6 +1249,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
|||||||
|
|
||||||
case TGMediaVideoConversionPresetProfile:
|
case TGMediaVideoConversionPresetProfile:
|
||||||
case TGMediaVideoConversionPresetProfileHigh:
|
case TGMediaVideoConversionPresetProfileHigh:
|
||||||
|
case TGMediaVideoConversionPresetProfileVeryHigh:
|
||||||
return (CGSize){ 800.0f, 800.0f };
|
return (CGSize){ 800.0f, 800.0f };
|
||||||
|
|
||||||
default:
|
default:
|
||||||
@ -1267,7 +1259,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
|||||||
|
|
||||||
+ (bool)keepAudioForPreset:(TGMediaVideoConversionPreset)preset
|
+ (bool)keepAudioForPreset:(TGMediaVideoConversionPreset)preset
|
||||||
{
|
{
|
||||||
return preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetProfile && preset != TGMediaVideoConversionPresetProfileHigh;
|
return preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetProfile && preset != TGMediaVideoConversionPresetProfileHigh && preset != TGMediaVideoConversionPresetProfileVeryHigh;
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (NSDictionary *)audioSettingsForPreset:(TGMediaVideoConversionPreset)preset
|
+ (NSDictionary *)audioSettingsForPreset:(TGMediaVideoConversionPreset)preset
|
||||||
@ -1344,11 +1336,14 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
|||||||
return 300;
|
return 300;
|
||||||
|
|
||||||
case TGMediaVideoConversionPresetProfile:
|
case TGMediaVideoConversionPresetProfile:
|
||||||
return 1400;
|
return 1500;
|
||||||
|
|
||||||
case TGMediaVideoConversionPresetProfileHigh:
|
case TGMediaVideoConversionPresetProfileHigh:
|
||||||
return 2000;
|
return 2000;
|
||||||
|
|
||||||
|
case TGMediaVideoConversionPresetProfileVeryHigh:
|
||||||
|
return 2500;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return 900;
|
return 900;
|
||||||
}
|
}
|
||||||
@ -1379,6 +1374,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
|||||||
case TGMediaVideoConversionPresetAnimation:
|
case TGMediaVideoConversionPresetAnimation:
|
||||||
case TGMediaVideoConversionPresetProfile:
|
case TGMediaVideoConversionPresetProfile:
|
||||||
case TGMediaVideoConversionPresetProfileHigh:
|
case TGMediaVideoConversionPresetProfileHigh:
|
||||||
|
case TGMediaVideoConversionPresetProfileVeryHigh:
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
@ -1408,6 +1404,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
|||||||
case TGMediaVideoConversionPresetAnimation:
|
case TGMediaVideoConversionPresetAnimation:
|
||||||
case TGMediaVideoConversionPresetProfile:
|
case TGMediaVideoConversionPresetProfile:
|
||||||
case TGMediaVideoConversionPresetProfileHigh:
|
case TGMediaVideoConversionPresetProfileHigh:
|
||||||
|
case TGMediaVideoConversionPresetProfileVeryHigh:
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
@ -28,6 +28,7 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
|||||||
TGModernButton *_resetButton;
|
TGModernButton *_resetButton;
|
||||||
|
|
||||||
TGPhotoAvatarCropView *_cropView;
|
TGPhotoAvatarCropView *_cropView;
|
||||||
|
|
||||||
UIView *_snapshotView;
|
UIView *_snapshotView;
|
||||||
UIImage *_snapshotImage;
|
UIImage *_snapshotImage;
|
||||||
|
|
||||||
@ -87,7 +88,7 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
|||||||
[self.view addSubview:_wrapperView];
|
[self.view addSubview:_wrapperView];
|
||||||
|
|
||||||
PGPhotoEditor *photoEditor = self.photoEditor;
|
PGPhotoEditor *photoEditor = self.photoEditor;
|
||||||
_cropView = [[TGPhotoAvatarCropView alloc] initWithOriginalSize:photoEditor.originalSize screenSize:[self referenceViewSize]];
|
_cropView = [[TGPhotoAvatarCropView alloc] initWithOriginalSize:photoEditor.originalSize screenSize:[self referenceViewSize] fullPreviewView:nil];
|
||||||
[_cropView setCropRect:photoEditor.cropRect];
|
[_cropView setCropRect:photoEditor.cropRect];
|
||||||
[_cropView setCropOrientation:photoEditor.cropOrientation];
|
[_cropView setCropOrientation:photoEditor.cropOrientation];
|
||||||
[_cropView setCropMirrored:photoEditor.cropMirrored];
|
[_cropView setCropMirrored:photoEditor.cropMirrored];
|
||||||
@ -349,14 +350,14 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
|||||||
CGRect referenceBounds = CGRectMake(0, 0, referenceSize.width, referenceSize.height);
|
CGRect referenceBounds = CGRectMake(0, 0, referenceSize.width, referenceSize.height);
|
||||||
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoEditorPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
|
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoEditorPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
|
||||||
|
|
||||||
if (self.switchingToTab == TGPhotoEditorPreviewTab)
|
// if (self.switchingToTab == TGPhotoEditorPreviewTab)
|
||||||
{
|
// {
|
||||||
containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0 hasOnScreenNavigation:self.hasOnScreenNavigation];
|
// containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0 hasOnScreenNavigation:self.hasOnScreenNavigation];
|
||||||
}
|
// }
|
||||||
else if (self.switchingToTab == TGPhotoEditorPaintTab)
|
// else if (self.switchingToTab == TGPhotoEditorPaintTab)
|
||||||
{
|
// {
|
||||||
containerFrame = [TGPhotoPaintController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoPaintTopPanelSize + TGPhotoPaintBottomPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
|
// containerFrame = [TGPhotoPaintController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoPaintTopPanelSize + TGPhotoPaintBottomPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
|
||||||
}
|
// }
|
||||||
|
|
||||||
CGSize fittedSize = TGScaleToSize(cropRectFrame.size, containerFrame.size);
|
CGSize fittedSize = TGScaleToSize(cropRectFrame.size, containerFrame.size);
|
||||||
CGRect targetFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2,
|
CGRect targetFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2,
|
||||||
@ -478,11 +479,7 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
|||||||
CGSize referenceSize = [self referenceViewSize];
|
CGSize referenceSize = [self referenceViewSize];
|
||||||
UIInterfaceOrientation orientation = self.effectiveOrientation;
|
UIInterfaceOrientation orientation = self.effectiveOrientation;
|
||||||
|
|
||||||
bool hasOnScreenNavigation = false;
|
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0.0f hasOnScreenNavigation:self.hasOnScreenNavigation];
|
||||||
if (iosMajorVersion() >= 11)
|
|
||||||
hasOnScreenNavigation = (self.viewLoaded && self.view.safeAreaInsets.bottom > FLT_EPSILON) || self.context.safeAreaInset.bottom > FLT_EPSILON;
|
|
||||||
|
|
||||||
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0.0f hasOnScreenNavigation:hasOnScreenNavigation];
|
|
||||||
|
|
||||||
CGRect targetFrame = CGRectZero;
|
CGRect targetFrame = CGRectZero;
|
||||||
|
|
||||||
|
@ -11,6 +11,8 @@
|
|||||||
#import "PGPhotoEditorView.h"
|
#import "PGPhotoEditorView.h"
|
||||||
|
|
||||||
const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
||||||
|
const CGFloat TGPhotoAvatarCropViewCurtainSize = 300;
|
||||||
|
const CGFloat TGPhotoAvatarCropViewCurtainMargin = 200;
|
||||||
|
|
||||||
@interface TGPhotoAvatarCropView () <UIScrollViewDelegate>
|
@interface TGPhotoAvatarCropView () <UIScrollViewDelegate>
|
||||||
{
|
{
|
||||||
@ -28,17 +30,22 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
|||||||
UIView *_leftOverlayView;
|
UIView *_leftOverlayView;
|
||||||
UIView *_rightOverlayView;
|
UIView *_rightOverlayView;
|
||||||
UIView *_bottomOverlayView;
|
UIView *_bottomOverlayView;
|
||||||
|
|
||||||
|
UIView *_topCurtainView;
|
||||||
|
UIView *_bottomCurtainView;
|
||||||
UIImageView *_areaMaskView;
|
UIImageView *_areaMaskView;
|
||||||
|
|
||||||
bool _imageReloadingNeeded;
|
bool _imageReloadingNeeded;
|
||||||
|
|
||||||
CGFloat _currentDiameter;
|
CGFloat _currentDiameter;
|
||||||
|
|
||||||
|
PGPhotoEditorView *_fullPreviewView;
|
||||||
}
|
}
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation TGPhotoAvatarCropView
|
@implementation TGPhotoAvatarCropView
|
||||||
|
|
||||||
- (instancetype)initWithOriginalSize:(CGSize)originalSize screenSize:(CGSize)screenSize
|
- (instancetype)initWithOriginalSize:(CGSize)originalSize screenSize:(CGSize)screenSize fullPreviewView:(PGPhotoEditorView *)fullPreviewView
|
||||||
{
|
{
|
||||||
self = [super initWithFrame:CGRectZero];
|
self = [super initWithFrame:CGRectZero];
|
||||||
if (self != nil)
|
if (self != nil)
|
||||||
@ -67,10 +74,22 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
|||||||
_imageView.userInteractionEnabled = false;
|
_imageView.userInteractionEnabled = false;
|
||||||
[_wrapperView addSubview:_imageView];
|
[_wrapperView addSubview:_imageView];
|
||||||
|
|
||||||
_fullPreviewView = [[PGPhotoEditorView alloc] initWithFrame:_imageView.frame];
|
_fullPreviewView = fullPreviewView;
|
||||||
|
_fullPreviewView.center = _imageView.center;
|
||||||
|
CGSize fittedSize = TGScaleToSize(_originalSize, CGSizeMake(1024, 1024));
|
||||||
|
CGFloat scale = _imageView.bounds.size.width / fittedSize.width;
|
||||||
|
_fullPreviewView.transform = CGAffineTransformMakeScale(self.cropMirrored ? -scale : scale, scale);
|
||||||
_fullPreviewView.userInteractionEnabled = false;
|
_fullPreviewView.userInteractionEnabled = false;
|
||||||
[_wrapperView addSubview:_fullPreviewView];
|
[_wrapperView addSubview:_fullPreviewView];
|
||||||
|
|
||||||
|
_topCurtainView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||||
|
_topCurtainView.backgroundColor = [UIColor blackColor];
|
||||||
|
[self addSubview:_topCurtainView];
|
||||||
|
|
||||||
|
_bottomCurtainView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||||
|
_bottomCurtainView.backgroundColor = [UIColor blackColor];
|
||||||
|
[self addSubview:_bottomCurtainView];
|
||||||
|
|
||||||
_topOverlayView = [[UIView alloc] initWithFrame:CGRectZero];
|
_topOverlayView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||||
_topOverlayView.backgroundColor = [TGPhotoEditorInterfaceAssets cropTransparentOverlayColor];
|
_topOverlayView.backgroundColor = [TGPhotoEditorInterfaceAssets cropTransparentOverlayColor];
|
||||||
_topOverlayView.userInteractionEnabled = false;
|
_topOverlayView.userInteractionEnabled = false;
|
||||||
@ -336,18 +355,24 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
|||||||
|
|
||||||
- (void)scrollViewDidEndDragging:(UIScrollView *)__unused scrollView willDecelerate:(BOOL)decelerate
|
- (void)scrollViewDidEndDragging:(UIScrollView *)__unused scrollView willDecelerate:(BOOL)decelerate
|
||||||
{
|
{
|
||||||
if (!decelerate)
|
if (!decelerate) {
|
||||||
[self scrollViewDidEndDecelerating:scrollView];
|
[self scrollViewDidEndDecelerating:scrollView];
|
||||||
|
|
||||||
|
if (self.croppingChanged != nil)
|
||||||
|
self.croppingChanged();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)scrollViewDidEndDecelerating:(UIScrollView *)__unused scrollView
|
- (void)scrollViewDidEndDecelerating:(UIScrollView *)scrollView
|
||||||
{
|
{
|
||||||
_isAnimating = false;
|
_isAnimating = false;
|
||||||
|
|
||||||
[self _updateCropRect];
|
[self _updateCropRect];
|
||||||
|
|
||||||
if (self.croppingChanged != nil)
|
if (!scrollView.isTracking) {
|
||||||
self.croppingChanged();
|
if (self.croppingChanged != nil)
|
||||||
|
self.croppingChanged();
|
||||||
|
}
|
||||||
|
|
||||||
[self reloadImageIfNeeded];
|
[self reloadImageIfNeeded];
|
||||||
|
|
||||||
@ -410,7 +435,10 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
|||||||
{
|
{
|
||||||
_cropMirrored = cropMirrored;
|
_cropMirrored = cropMirrored;
|
||||||
_imageView.transform = CGAffineTransformMakeScale(self.cropMirrored ? -1.0f : 1.0f, 1.0f);
|
_imageView.transform = CGAffineTransformMakeScale(self.cropMirrored ? -1.0f : 1.0f, 1.0f);
|
||||||
_fullPreviewView.transform = _imageView.transform;
|
|
||||||
|
CGSize fittedSize = TGScaleToSize(_originalSize, CGSizeMake(1024, 1024));
|
||||||
|
CGFloat scale = _imageView.bounds.size.width / fittedSize.width;
|
||||||
|
_fullPreviewView.transform = CGAffineTransformMakeScale(self.cropMirrored ? -scale : scale, scale);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)invalidateCropRect
|
- (void)invalidateCropRect
|
||||||
@ -513,6 +541,18 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
|||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)animateTransitionOut
|
||||||
|
{
|
||||||
|
[UIView animateWithDuration:0.2f animations:^
|
||||||
|
{
|
||||||
|
_topOverlayView.alpha = 0.0f;
|
||||||
|
_leftOverlayView.alpha = 0.0f;
|
||||||
|
_rightOverlayView.alpha = 0.0f;
|
||||||
|
_bottomOverlayView.alpha = 0.0f;
|
||||||
|
_areaMaskView.alpha = 0.0f;
|
||||||
|
}];
|
||||||
|
}
|
||||||
|
|
||||||
- (void)hideImageForCustomTransition
|
- (void)hideImageForCustomTransition
|
||||||
{
|
{
|
||||||
_scrollView.hidden = true;
|
_scrollView.hidden = true;
|
||||||
@ -522,10 +562,10 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
|||||||
|
|
||||||
- (void)_layoutOverlayViews
|
- (void)_layoutOverlayViews
|
||||||
{
|
{
|
||||||
CGRect topOverlayFrame = CGRectMake(0, -TGPhotoAvatarCropViewOverscreenSize, self.frame.size.width, TGPhotoAvatarCropViewOverscreenSize);
|
CGRect topOverlayFrame = CGRectMake(0, -TGPhotoAvatarCropViewOverscreenSize, self.bounds.size.width, TGPhotoAvatarCropViewOverscreenSize);
|
||||||
CGRect leftOverlayFrame = CGRectMake(-TGPhotoAvatarCropViewOverscreenSize, -TGPhotoAvatarCropViewOverscreenSize, TGPhotoAvatarCropViewOverscreenSize, self.frame.size.height + 2 * TGPhotoAvatarCropViewOverscreenSize);
|
CGRect leftOverlayFrame = CGRectMake(-TGPhotoAvatarCropViewOverscreenSize, -TGPhotoAvatarCropViewOverscreenSize, TGPhotoAvatarCropViewOverscreenSize, self.bounds.size.height + 2 * TGPhotoAvatarCropViewOverscreenSize);
|
||||||
CGRect rightOverlayFrame = CGRectMake(self.frame.size.width, -TGPhotoAvatarCropViewOverscreenSize, TGPhotoAvatarCropViewOverscreenSize, self.frame.size.height + 2 * TGPhotoAvatarCropViewOverscreenSize);
|
CGRect rightOverlayFrame = CGRectMake(self.bounds.size.width, -TGPhotoAvatarCropViewOverscreenSize, TGPhotoAvatarCropViewOverscreenSize, self.bounds.size.height + 2 * TGPhotoAvatarCropViewOverscreenSize);
|
||||||
CGRect bottomOverlayFrame = CGRectMake(0, self.frame.size.height, self.frame.size.width, TGPhotoAvatarCropViewOverscreenSize);
|
CGRect bottomOverlayFrame = CGRectMake(0, self.bounds.size.height, self.bounds.size.width, TGPhotoAvatarCropViewOverscreenSize);
|
||||||
|
|
||||||
_topOverlayView.frame = topOverlayFrame;
|
_topOverlayView.frame = topOverlayFrame;
|
||||||
_leftOverlayView.frame = leftOverlayFrame;
|
_leftOverlayView.frame = leftOverlayFrame;
|
||||||
@ -533,6 +573,40 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
|||||||
_bottomOverlayView.frame = bottomOverlayFrame;
|
_bottomOverlayView.frame = bottomOverlayFrame;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)closeCurtains {
|
||||||
|
CGRect topFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, -TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, 1.0);
|
||||||
|
CGRect bottomFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, self.bounds.size.height + TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, 1.0);
|
||||||
|
|
||||||
|
_topCurtainView.frame = topFrame;
|
||||||
|
_bottomCurtainView.frame = bottomFrame;
|
||||||
|
|
||||||
|
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseOut animations:^
|
||||||
|
{
|
||||||
|
CGRect topFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, -TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, TGPhotoAvatarCropViewCurtainSize);
|
||||||
|
CGRect bottomFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, self.bounds.size.height, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, TGPhotoAvatarCropViewCurtainSize);
|
||||||
|
|
||||||
|
_topCurtainView.frame = topFrame;
|
||||||
|
_bottomCurtainView.frame = bottomFrame;
|
||||||
|
} completion:nil];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)openCurtains {
|
||||||
|
CGRect topFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, -TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, TGPhotoAvatarCropViewCurtainSize);
|
||||||
|
CGRect bottomFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, self.bounds.size.height, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, TGPhotoAvatarCropViewCurtainSize);
|
||||||
|
|
||||||
|
_topCurtainView.frame = topFrame;
|
||||||
|
_bottomCurtainView.frame = bottomFrame;
|
||||||
|
|
||||||
|
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseIn animations:^
|
||||||
|
{
|
||||||
|
CGRect topFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, -TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, 1.0);
|
||||||
|
CGRect bottomFrame = CGRectMake(-TGPhotoAvatarCropViewCurtainMargin, self.bounds.size.height + TGPhotoAvatarCropViewCurtainSize, self.bounds.size.width + TGPhotoAvatarCropViewCurtainMargin * 2.0, 1.0);
|
||||||
|
|
||||||
|
_topCurtainView.frame = topFrame;
|
||||||
|
_bottomCurtainView.frame = bottomFrame;
|
||||||
|
} completion:nil];
|
||||||
|
}
|
||||||
|
|
||||||
- (void)layoutSubviews
|
- (void)layoutSubviews
|
||||||
{
|
{
|
||||||
[self _layoutOverlayViews];
|
[self _layoutOverlayViews];
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
@class PGPhotoEditor;
|
@class PGPhotoEditor;
|
||||||
@class PGPhotoTool;
|
@class PGPhotoTool;
|
||||||
@class TGPhotoEditorPreviewView;
|
@class TGPhotoEditorPreviewView;
|
||||||
|
@class PGPhotoEditorView;
|
||||||
@class TGMediaPickerGalleryVideoScrubber;
|
@class TGMediaPickerGalleryVideoScrubber;
|
||||||
|
|
||||||
@interface TGPhotoAvatarPreviewController : TGPhotoEditorTabController
|
@interface TGPhotoAvatarPreviewController : TGPhotoEditorTabController
|
||||||
@ -14,7 +15,7 @@
|
|||||||
@property (nonatomic, copy) void (^croppingChanged)(void);
|
@property (nonatomic, copy) void (^croppingChanged)(void);
|
||||||
@property (nonatomic, copy) void (^togglePlayback)(void);
|
@property (nonatomic, copy) void (^togglePlayback)(void);
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIView *)dotImageView;
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIView *)dotImageView fullPreviewView:(PGPhotoEditorView *)fullPreviewView;
|
||||||
|
|
||||||
- (void)setImage:(UIImage *)image;
|
- (void)setImage:(UIImage *)image;
|
||||||
- (void)setSnapshotImage:(UIImage *)snapshotImage;
|
- (void)setSnapshotImage:(UIImage *)snapshotImage;
|
||||||
|
@ -17,6 +17,8 @@
|
|||||||
#import "TGMediaPickerGalleryVideoScrubber.h"
|
#import "TGMediaPickerGalleryVideoScrubber.h"
|
||||||
#import "TGModernGalleryVideoView.h"
|
#import "TGModernGalleryVideoView.h"
|
||||||
|
|
||||||
|
#import "TGPhotoPaintController.h"
|
||||||
|
|
||||||
const CGFloat TGPhotoAvatarPreviewPanelSize = 96.0f;
|
const CGFloat TGPhotoAvatarPreviewPanelSize = 96.0f;
|
||||||
const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanelSize + 40.0f;
|
const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanelSize + 40.0f;
|
||||||
|
|
||||||
@ -30,6 +32,7 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
UIView *_wrapperView;
|
UIView *_wrapperView;
|
||||||
|
|
||||||
TGPhotoAvatarCropView *_cropView;
|
TGPhotoAvatarCropView *_cropView;
|
||||||
|
PGPhotoEditorView *_fullPreviewView;
|
||||||
|
|
||||||
UIView *_portraitToolsWrapperView;
|
UIView *_portraitToolsWrapperView;
|
||||||
UIView *_landscapeToolsWrapperView;
|
UIView *_landscapeToolsWrapperView;
|
||||||
@ -43,6 +46,8 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
UIView *_portraitToolControlView;
|
UIView *_portraitToolControlView;
|
||||||
UIView *_landscapeToolControlView;
|
UIView *_landscapeToolControlView;
|
||||||
UILabel *_coverLabel;
|
UILabel *_coverLabel;
|
||||||
|
|
||||||
|
bool _scheduledTransitionIn;
|
||||||
}
|
}
|
||||||
|
|
||||||
@property (nonatomic, weak) PGPhotoEditor *photoEditor;
|
@property (nonatomic, weak) PGPhotoEditor *photoEditor;
|
||||||
@ -52,13 +57,14 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
|
|
||||||
@implementation TGPhotoAvatarPreviewController
|
@implementation TGPhotoAvatarPreviewController
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIView *)dotImageView
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIView *)dotImageView fullPreviewView:(PGPhotoEditorView *)fullPreviewView
|
||||||
{
|
{
|
||||||
self = [super initWithContext:context];
|
self = [super initWithContext:context];
|
||||||
if (self != nil)
|
if (self != nil)
|
||||||
{
|
{
|
||||||
self.photoEditor = photoEditor;
|
self.photoEditor = photoEditor;
|
||||||
self.previewView = previewView;
|
self.previewView = previewView;
|
||||||
|
_fullPreviewView = fullPreviewView;
|
||||||
_scrubberView = scrubberView;
|
_scrubberView = scrubberView;
|
||||||
|
|
||||||
_dotImageView = dotImageView;
|
_dotImageView = dotImageView;
|
||||||
@ -71,7 +77,7 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
[super loadView];
|
[super loadView];
|
||||||
self.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
self.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
||||||
|
|
||||||
// [self.view addSubview:_previewView];
|
[_previewView performTransitionInWithCompletion:^{}];
|
||||||
|
|
||||||
_wrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
_wrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||||
[self.view addSubview:_wrapperView];
|
[self.view addSubview:_wrapperView];
|
||||||
@ -98,7 +104,7 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
};
|
};
|
||||||
|
|
||||||
PGPhotoEditor *photoEditor = self.photoEditor;
|
PGPhotoEditor *photoEditor = self.photoEditor;
|
||||||
_cropView = [[TGPhotoAvatarCropView alloc] initWithOriginalSize:photoEditor.originalSize screenSize:[self referenceViewSize]];
|
_cropView = [[TGPhotoAvatarCropView alloc] initWithOriginalSize:photoEditor.originalSize screenSize:[self referenceViewSize] fullPreviewView:_fullPreviewView];
|
||||||
[_cropView setCropRect:photoEditor.cropRect];
|
[_cropView setCropRect:photoEditor.cropRect];
|
||||||
[_cropView setCropOrientation:photoEditor.cropOrientation];
|
[_cropView setCropOrientation:photoEditor.cropOrientation];
|
||||||
[_cropView setCropMirrored:photoEditor.cropMirrored];
|
[_cropView setCropMirrored:photoEditor.cropMirrored];
|
||||||
@ -137,11 +143,10 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
_cropView.interactionEnded = interactionEnded;
|
_cropView.interactionEnded = interactionEnded;
|
||||||
[_wrapperView addSubview:_cropView];
|
[_wrapperView addSubview:_cropView];
|
||||||
|
|
||||||
if (self.item.isVideo) {
|
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||||
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
[_wrapperView addSubview:_portraitToolsWrapperView];
|
||||||
_portraitToolsWrapperView.alpha = 0.0f;
|
|
||||||
[_wrapperView addSubview:_portraitToolsWrapperView];
|
|
||||||
|
|
||||||
|
if (self.item.isVideo) {
|
||||||
_portraitWrapperBackgroundView = [[UIView alloc] initWithFrame:_portraitToolsWrapperView.bounds];
|
_portraitWrapperBackgroundView = [[UIView alloc] initWithFrame:_portraitToolsWrapperView.bounds];
|
||||||
_portraitWrapperBackgroundView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
_portraitWrapperBackgroundView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
||||||
_portraitWrapperBackgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor];
|
_portraitWrapperBackgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor];
|
||||||
@ -149,7 +154,6 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
[_portraitToolsWrapperView addSubview:_portraitWrapperBackgroundView];
|
[_portraitToolsWrapperView addSubview:_portraitWrapperBackgroundView];
|
||||||
|
|
||||||
_landscapeToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
_landscapeToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||||
_landscapeToolsWrapperView.alpha = 0.0f;
|
|
||||||
[_wrapperView addSubview:_landscapeToolsWrapperView];
|
[_wrapperView addSubview:_landscapeToolsWrapperView];
|
||||||
|
|
||||||
_landscapeWrapperBackgroundView = [[UIView alloc] initWithFrame:_landscapeToolsWrapperView.bounds];
|
_landscapeWrapperBackgroundView = [[UIView alloc] initWithFrame:_landscapeToolsWrapperView.bounds];
|
||||||
@ -178,6 +182,7 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
[_coverLabel sizeToFit];
|
[_coverLabel sizeToFit];
|
||||||
[_portraitToolsWrapperView addSubview:_coverLabel];
|
[_portraitToolsWrapperView addSubview:_coverLabel];
|
||||||
|
|
||||||
|
_dotImageView.alpha = 1.0f;
|
||||||
[_wrapperView addSubview:_dotImageView];
|
[_wrapperView addSubview:_dotImageView];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -186,8 +191,6 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
{
|
{
|
||||||
[super viewWillAppear:animated];
|
[super viewWillAppear:animated];
|
||||||
|
|
||||||
self.photoEditor.additionalOutputs = @[_cropView.fullPreviewView];
|
|
||||||
|
|
||||||
if (_appeared)
|
if (_appeared)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
@ -214,8 +217,6 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
|
|
||||||
if (_imagePendingLoad != nil)
|
if (_imagePendingLoad != nil)
|
||||||
[_cropView setImage:_imagePendingLoad];
|
[_cropView setImage:_imagePendingLoad];
|
||||||
|
|
||||||
[self transitionIn];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)shouldAutorotate
|
- (BOOL)shouldAutorotate
|
||||||
@ -260,6 +261,11 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
|
|
||||||
- (void)transitionIn
|
- (void)transitionIn
|
||||||
{
|
{
|
||||||
|
if (_portraitToolsWrapperView.frame.size.height < FLT_EPSILON) {
|
||||||
|
_scheduledTransitionIn = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
_scrubberView.layer.rasterizationScale = [UIScreen mainScreen].scale;
|
_scrubberView.layer.rasterizationScale = [UIScreen mainScreen].scale;
|
||||||
_scrubberView.layer.shouldRasterize = true;
|
_scrubberView.layer.shouldRasterize = true;
|
||||||
|
|
||||||
@ -273,37 +279,44 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
_scrubberView.layer.shouldRasterize = false;
|
_scrubberView.layer.shouldRasterize = false;
|
||||||
}];
|
}];
|
||||||
|
|
||||||
switch (self.effectiveOrientation)
|
if (!self.initialAppearance) {
|
||||||
{
|
switch (self.effectiveOrientation)
|
||||||
case UIInterfaceOrientationLandscapeLeft:
|
|
||||||
{
|
{
|
||||||
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(-_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
|
case UIInterfaceOrientationLandscapeLeft:
|
||||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
|
||||||
{
|
{
|
||||||
_landscapeToolsWrapperView.transform = CGAffineTransformIdentity;
|
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(-_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
|
||||||
} completion:nil];
|
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||||
}
|
{
|
||||||
break;
|
_landscapeToolsWrapperView.transform = CGAffineTransformIdentity;
|
||||||
|
} completion:nil];
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
case UIInterfaceOrientationLandscapeRight:
|
case UIInterfaceOrientationLandscapeRight:
|
||||||
{
|
|
||||||
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
|
|
||||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
|
||||||
{
|
{
|
||||||
_landscapeToolsWrapperView.transform = CGAffineTransformIdentity;
|
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
|
||||||
} completion:nil];
|
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||||
}
|
{
|
||||||
break;
|
_landscapeToolsWrapperView.transform = CGAffineTransformIdentity;
|
||||||
|
} completion:nil];
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
{
|
|
||||||
_portraitToolsWrapperView.transform = CGAffineTransformMakeTranslation(0.0f, _portraitToolsWrapperView.frame.size.height / 3.0f * 2.0f);
|
|
||||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
|
||||||
{
|
{
|
||||||
_portraitToolsWrapperView.transform = CGAffineTransformIdentity;
|
CGFloat offset = _portraitToolsWrapperView.frame.size.height / 3.0f * 2.0f;
|
||||||
} completion:nil];
|
CGAffineTransform initialDotImageViewTransform = _dotImageView.transform;
|
||||||
|
_dotImageView.transform = CGAffineTransformTranslate(initialDotImageViewTransform, 0.0, offset * 4.444);
|
||||||
|
_portraitToolsWrapperView.transform = CGAffineTransformMakeTranslation(0.0f, offset);
|
||||||
|
|
||||||
|
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||||
|
{
|
||||||
|
_portraitToolsWrapperView.transform = CGAffineTransformIdentity;
|
||||||
|
_dotImageView.transform = initialDotImageViewTransform;
|
||||||
|
} completion:nil];
|
||||||
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -313,16 +326,52 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
_dismissing = true;
|
_dismissing = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
[_cropView animateTransitionOutSwitching:switching];
|
[self.view insertSubview:_previewView belowSubview:_wrapperView];
|
||||||
self.photoEditor.additionalOutputs = @[];
|
_previewView.frame = [_wrapperView convertRect:_cropView.frame toView:self.view];
|
||||||
|
|
||||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
[_cropView animateTransitionOut];
|
||||||
previewView.touchedUp = nil;
|
|
||||||
previewView.touchedDown = nil;
|
|
||||||
previewView.tapped = nil;
|
|
||||||
previewView.interactionEnded = nil;
|
|
||||||
|
|
||||||
[_videoAreaView.superview bringSubviewToFront:_videoAreaView];
|
if (switching)
|
||||||
|
{
|
||||||
|
_switching = true;
|
||||||
|
|
||||||
|
UIInterfaceOrientation orientation = self.effectiveOrientation;
|
||||||
|
|
||||||
|
CGRect cropRectFrame = [_cropView cropRectFrameForView:self.view];
|
||||||
|
CGSize referenceSize = [self referenceViewSizeForOrientation:orientation];
|
||||||
|
CGRect referenceBounds = CGRectMake(0, 0, referenceSize.width, referenceSize.height);
|
||||||
|
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoEditorPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
|
||||||
|
|
||||||
|
if (self.switchingToTab == TGPhotoEditorPaintTab)
|
||||||
|
{
|
||||||
|
containerFrame = [TGPhotoPaintController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoPaintTopPanelSize + TGPhotoPaintBottomPanelSize hasOnScreenNavigation:self.hasOnScreenNavigation];
|
||||||
|
}
|
||||||
|
|
||||||
|
CGSize fittedSize = TGScaleToSize(cropRectFrame.size, containerFrame.size);
|
||||||
|
CGRect targetFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
|
||||||
|
|
||||||
|
CGFloat targetCropViewScale = targetFrame.size.width / _cropView.frame.size.width;
|
||||||
|
CGRect targetCropViewFrame = [self.view convertRect:targetFrame toView:_wrapperView];
|
||||||
|
|
||||||
|
_previewView.alpha = 0.0;
|
||||||
|
|
||||||
|
[_cropView closeCurtains];
|
||||||
|
|
||||||
|
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseInOut | UIViewAnimationOptionLayoutSubviews animations:^
|
||||||
|
{
|
||||||
|
_previewView.frame = targetFrame;
|
||||||
|
_cropView.center = CGPointMake(CGRectGetMidX(targetCropViewFrame), CGRectGetMidY(targetCropViewFrame));
|
||||||
|
_cropView.transform = CGAffineTransformMakeScale(targetCropViewScale, targetCropViewScale);
|
||||||
|
} completion:^(__unused BOOL finished)
|
||||||
|
{
|
||||||
|
_previewView.alpha = 1.0;
|
||||||
|
if (self.finishedTransitionOut != nil)
|
||||||
|
self.finishedTransitionOut();
|
||||||
|
|
||||||
|
if (completion != nil)
|
||||||
|
completion();
|
||||||
|
}];
|
||||||
|
}
|
||||||
|
|
||||||
switch (self.effectiveOrientation)
|
switch (self.effectiveOrientation)
|
||||||
{
|
{
|
||||||
@ -346,10 +395,15 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
|
|
||||||
default:
|
default:
|
||||||
{
|
{
|
||||||
|
CGFloat offset = _portraitToolsWrapperView.frame.size.height / 3.0f * 2.0f;
|
||||||
|
CGAffineTransform initialDotImageViewTransform = _dotImageView.transform;
|
||||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||||
{
|
{
|
||||||
_portraitToolsWrapperView.transform = CGAffineTransformMakeTranslation(0.0f, _portraitToolsWrapperView.frame.size.height / 3.0f * 2.0f);
|
_portraitToolsWrapperView.transform = CGAffineTransformMakeTranslation(0.0f, offset);
|
||||||
} completion:nil];
|
_dotImageView.transform = CGAffineTransformTranslate(initialDotImageViewTransform, 0.0, offset * 4.444);
|
||||||
|
} completion:^(__unused BOOL finished) {
|
||||||
|
_dotImageView.transform = initialDotImageViewTransform;
|
||||||
|
}];
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -358,12 +412,13 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
{
|
{
|
||||||
_portraitToolsWrapperView.alpha = 0.0f;
|
_portraitToolsWrapperView.alpha = 0.0f;
|
||||||
_landscapeToolsWrapperView.alpha = 0.0f;
|
_landscapeToolsWrapperView.alpha = 0.0f;
|
||||||
_videoAreaView.alpha = 0.0f;
|
|
||||||
_dotImageView.alpha = 0.0f;
|
_dotImageView.alpha = 0.0f;
|
||||||
} completion:^(__unused BOOL finished)
|
} completion:^(__unused BOOL finished)
|
||||||
{
|
{
|
||||||
if (completion != nil)
|
if (!switching) {
|
||||||
completion();
|
if (completion != nil)
|
||||||
|
completion();
|
||||||
|
}
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -371,8 +426,6 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
{
|
{
|
||||||
_dismissing = true;
|
_dismissing = true;
|
||||||
|
|
||||||
self.photoEditor.additionalOutputs = @[];
|
|
||||||
|
|
||||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||||
[previewView prepareForTransitionOut];
|
[previewView prepareForTransitionOut];
|
||||||
|
|
||||||
@ -426,15 +479,16 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
_appeared = true;
|
_appeared = true;
|
||||||
|
|
||||||
if ([transitionView isKindOfClass:[TGPhotoEditorPreviewView class]]) {
|
if ([transitionView isKindOfClass:[TGPhotoEditorPreviewView class]]) {
|
||||||
[self.view insertSubview:transitionView atIndex:0];
|
|
||||||
} else {
|
} else {
|
||||||
[transitionView removeFromSuperview];
|
[transitionView removeFromSuperview];
|
||||||
}
|
}
|
||||||
|
|
||||||
TGPhotoEditorPreviewView *previewView = _previewView;
|
TGPhotoEditorPreviewView *previewView = _previewView;
|
||||||
previewView.hidden = false;
|
previewView.hidden = true;
|
||||||
[previewView performTransitionInIfNeeded];
|
[previewView performTransitionInIfNeeded];
|
||||||
|
|
||||||
|
[_cropView openCurtains];
|
||||||
[_cropView transitionInFinishedFromCamera:(self.fromCamera && self.initialAppearance)];
|
[_cropView transitionInFinishedFromCamera:(self.fromCamera && self.initialAppearance)];
|
||||||
|
|
||||||
PGPhotoEditor *photoEditor = self.photoEditor;
|
PGPhotoEditor *photoEditor = self.photoEditor;
|
||||||
@ -467,12 +521,13 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
|
|
||||||
- (CGRect)transitionOutReferenceFrame
|
- (CGRect)transitionOutReferenceFrame
|
||||||
{
|
{
|
||||||
return [_cropView cropRectFrameForView:self.view];
|
TGPhotoEditorPreviewView *previewView = _previewView;
|
||||||
|
return previewView.frame;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (UIView *)transitionOutReferenceView
|
- (UIView *)transitionOutReferenceView
|
||||||
{
|
{
|
||||||
return [_cropView cropSnapshotView];
|
return _previewView;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (id)currentResultRepresentation
|
- (id)currentResultRepresentation
|
||||||
@ -493,7 +548,13 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
{
|
{
|
||||||
[super viewWillLayoutSubviews];
|
[super viewWillLayoutSubviews];
|
||||||
|
|
||||||
|
|
||||||
[self updateLayout:[[LegacyComponentsGlobals provider] applicationStatusBarOrientation]];
|
[self updateLayout:[[LegacyComponentsGlobals provider] applicationStatusBarOrientation]];
|
||||||
|
|
||||||
|
if (_scheduledTransitionIn) {
|
||||||
|
_scheduledTransitionIn = false;
|
||||||
|
[self transitionIn];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (CGRect)transitionOutSourceFrameForReferenceFrame:(CGRect)referenceFrame orientation:(UIInterfaceOrientation)orientation
|
- (CGRect)transitionOutSourceFrameForReferenceFrame:(CGRect)referenceFrame orientation:(UIInterfaceOrientation)orientation
|
||||||
@ -508,11 +569,34 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
|
|||||||
- (CGRect)_targetFrameForTransitionInFromFrame:(CGRect)fromFrame
|
- (CGRect)_targetFrameForTransitionInFromFrame:(CGRect)fromFrame
|
||||||
{
|
{
|
||||||
CGSize referenceSize = [self referenceViewSize];
|
CGSize referenceSize = [self referenceViewSize];
|
||||||
CGRect containerFrame = [TGPhotoAvatarPreviewController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:self.effectiveOrientation panelSize:0 hasOnScreenNavigation:self.hasOnScreenNavigation];
|
UIInterfaceOrientation orientation = self.effectiveOrientation;
|
||||||
CGSize fittedSize = TGScaleToSize(fromFrame.size, containerFrame.size);
|
|
||||||
CGRect toFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
|
|
||||||
|
|
||||||
return toFrame;
|
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0.0f hasOnScreenNavigation:self.hasOnScreenNavigation];
|
||||||
|
|
||||||
|
CGRect targetFrame = CGRectZero;
|
||||||
|
|
||||||
|
CGFloat shortSide = MIN(referenceSize.width, referenceSize.height);
|
||||||
|
CGFloat diameter = shortSide - [TGPhotoAvatarCropView areaInsetSize].width * 2;
|
||||||
|
if (self.initialAppearance && (self.fromCamera || !self.skipTransitionIn))
|
||||||
|
{
|
||||||
|
CGSize referenceSize = fromFrame.size;
|
||||||
|
if ([_transitionView isKindOfClass:[UIImageView class]])
|
||||||
|
referenceSize = ((UIImageView *)_transitionView).image.size;
|
||||||
|
|
||||||
|
CGSize fittedSize = TGScaleToFill(referenceSize, CGSizeMake(diameter, diameter));
|
||||||
|
|
||||||
|
targetFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2,
|
||||||
|
containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2,
|
||||||
|
fittedSize.width, fittedSize.height);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
targetFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - diameter) / 2,
|
||||||
|
containerFrame.origin.y + (containerFrame.size.height - diameter) / 2,
|
||||||
|
diameter, diameter);
|
||||||
|
}
|
||||||
|
|
||||||
|
return targetFrame;
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (CGRect)photoContainerFrameForParentViewFrame:(CGRect)parentViewFrame toolbarLandscapeSize:(CGFloat)toolbarLandscapeSize orientation:(UIInterfaceOrientation)orientation panelSize:(CGFloat)panelSize hasOnScreenNavigation:(bool)hasOnScreenNavigation
|
+ (CGRect)photoContainerFrameForParentViewFrame:(CGRect)parentViewFrame toolbarLandscapeSize:(CGFloat)toolbarLandscapeSize orientation:(UIInterfaceOrientation)orientation panelSize:(CGFloat)panelSize hasOnScreenNavigation:(bool)hasOnScreenNavigation
|
||||||
|
@ -66,6 +66,7 @@
|
|||||||
TGPhotoToolbarView *_portraitToolbarView;
|
TGPhotoToolbarView *_portraitToolbarView;
|
||||||
TGPhotoToolbarView *_landscapeToolbarView;
|
TGPhotoToolbarView *_landscapeToolbarView;
|
||||||
TGPhotoEditorPreviewView *_previewView;
|
TGPhotoEditorPreviewView *_previewView;
|
||||||
|
PGPhotoEditorView *_fullPreviewView;
|
||||||
|
|
||||||
PGPhotoEditor *_photoEditor;
|
PGPhotoEditor *_photoEditor;
|
||||||
|
|
||||||
@ -326,6 +327,12 @@
|
|||||||
[_photoEditor setPreviewOutput:_previewView];
|
[_photoEditor setPreviewOutput:_previewView];
|
||||||
[self updatePreviewView];
|
[self updatePreviewView];
|
||||||
|
|
||||||
|
if ([self presentedForAvatarCreation]) {
|
||||||
|
CGSize fittedSize = TGScaleToSize(_photoEditor.originalSize, CGSizeMake(1024, 1024));
|
||||||
|
_fullPreviewView = [[PGPhotoEditorView alloc] initWithFrame:CGRectMake(0, 0, fittedSize.width, fittedSize.height)];
|
||||||
|
_photoEditor.additionalOutputs = @[_fullPreviewView];
|
||||||
|
[self.view addSubview:_fullPreviewView];
|
||||||
|
}
|
||||||
|
|
||||||
_dotMarkerView = [[UIImageView alloc] initWithImage:TGCircleImage(7.0, [TGPhotoEditorInterfaceAssets accentColor])];
|
_dotMarkerView = [[UIImageView alloc] initWithImage:TGCircleImage(7.0, [TGPhotoEditorInterfaceAssets accentColor])];
|
||||||
[_scrubberView addSubview:_dotMarkerView];
|
[_scrubberView addSubview:_dotMarkerView];
|
||||||
@ -345,6 +352,7 @@
|
|||||||
|
|
||||||
if ([self presentedForAvatarCreation] && _item.isVideo) {
|
if ([self presentedForAvatarCreation] && _item.isVideo) {
|
||||||
_scrubberView = [[TGMediaPickerGalleryVideoScrubber alloc] initWithFrame:CGRectMake(0.0f, 0.0, _portraitToolbarView.frame.size.width, 68.0f)];
|
_scrubberView = [[TGMediaPickerGalleryVideoScrubber alloc] initWithFrame:CGRectMake(0.0f, 0.0, _portraitToolbarView.frame.size.width, 68.0f)];
|
||||||
|
_scrubberView.hasDotPicker = true;
|
||||||
_scrubberView.dataSource = self;
|
_scrubberView.dataSource = self;
|
||||||
_scrubberView.delegate = self;
|
_scrubberView.delegate = self;
|
||||||
_scrubberView.clipsToBounds = false;
|
_scrubberView.clipsToBounds = false;
|
||||||
@ -470,12 +478,17 @@
|
|||||||
if (_item.isVideo) {
|
if (_item.isVideo) {
|
||||||
signal = [self.requestOriginalFullSizeImage(_item, position) deliverOn:_queue];
|
signal = [self.requestOriginalFullSizeImage(_item, position) deliverOn:_queue];
|
||||||
} else {
|
} else {
|
||||||
|
bool avatar = [self presentedForAvatarCreation];
|
||||||
signal = [[[[self.requestOriginalFullSizeImage(_item, position) takeLast] deliverOn:_queue] filter:^bool(id image)
|
signal = [[[[self.requestOriginalFullSizeImage(_item, position) takeLast] deliverOn:_queue] filter:^bool(id image)
|
||||||
{
|
{
|
||||||
return [image isKindOfClass:[UIImage class]];
|
return [image isKindOfClass:[UIImage class]];
|
||||||
}] map:^UIImage *(UIImage *image)
|
}] map:^UIImage *(UIImage *image)
|
||||||
{
|
{
|
||||||
return TGPhotoEditorCrop(image, nil, _photoEditor.cropOrientation, _photoEditor.cropRotation, _photoEditor.cropRect, _photoEditor.cropMirrored, TGPhotoEditorScreenImageMaxSize(), _photoEditor.originalSize, true);
|
if (avatar) {
|
||||||
|
return image;
|
||||||
|
} else {
|
||||||
|
return TGPhotoEditorCrop(image, nil, _photoEditor.cropOrientation, _photoEditor.cropRotation, _photoEditor.cropRect, _photoEditor.cropMirrored, TGPhotoEditorScreenImageMaxSize(), _photoEditor.originalSize, true);
|
||||||
|
}
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -620,6 +633,11 @@
|
|||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)returnFullPreviewView {
|
||||||
|
_fullPreviewView.frame = CGRectMake(-10000, 0, _fullPreviewView.frame.size.width, _fullPreviewView.frame.size.height);
|
||||||
|
[self.view addSubview:_fullPreviewView];
|
||||||
|
}
|
||||||
|
|
||||||
- (void)startVideoPlayback:(bool)reset {
|
- (void)startVideoPlayback:(bool)reset {
|
||||||
if (reset && _player == nil) {
|
if (reset && _player == nil) {
|
||||||
_scheduledVideoPlayback = true;
|
_scheduledVideoPlayback = true;
|
||||||
@ -1029,7 +1047,6 @@
|
|||||||
|
|
||||||
_portraitToolbarView.alpha = 0.0f;
|
_portraitToolbarView.alpha = 0.0f;
|
||||||
_landscapeToolbarView.alpha = 0.0f;
|
_landscapeToolbarView.alpha = 0.0f;
|
||||||
|
|
||||||
[UIView animateWithDuration:0.3f delay:delay options:UIViewAnimationOptionCurveLinear animations:^
|
[UIView animateWithDuration:0.3f delay:delay options:UIViewAnimationOptionCurveLinear animations:^
|
||||||
{
|
{
|
||||||
_portraitToolbarView.alpha = 1.0f;
|
_portraitToolbarView.alpha = 1.0f;
|
||||||
@ -1098,10 +1115,6 @@
|
|||||||
if (![currentController isDismissAllowed])
|
if (![currentController isDismissAllowed])
|
||||||
return;
|
return;
|
||||||
|
|
||||||
transitionReferenceFrame = [currentController transitionOutReferenceFrame];
|
|
||||||
transitionReferenceView = [currentController transitionOutReferenceView];
|
|
||||||
transitionNoTransitionView = [currentController isKindOfClass:[TGPhotoAvatarPreviewController class]];
|
|
||||||
|
|
||||||
currentController.switchingToTab = tab;
|
currentController.switchingToTab = tab;
|
||||||
[currentController transitionOutSwitching:true completion:^
|
[currentController transitionOutSwitching:true completion:^
|
||||||
{
|
{
|
||||||
@ -1109,6 +1122,10 @@
|
|||||||
[currentController.view removeFromSuperview];
|
[currentController.view removeFromSuperview];
|
||||||
}];
|
}];
|
||||||
|
|
||||||
|
transitionReferenceFrame = [currentController transitionOutReferenceFrame];
|
||||||
|
transitionReferenceView = [currentController transitionOutReferenceView];
|
||||||
|
transitionNoTransitionView = false;
|
||||||
|
|
||||||
if ([currentController isKindOfClass:[TGPhotoCropController class]])
|
if ([currentController isKindOfClass:[TGPhotoCropController class]])
|
||||||
{
|
{
|
||||||
_backgroundView.alpha = 1.0f;
|
_backgroundView.alpha = 1.0f;
|
||||||
@ -1143,11 +1160,23 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ([self presentedForAvatarCreation])
|
||||||
|
transitionNoTransitionView = true;
|
||||||
|
|
||||||
snapshotImage = _screenImage;
|
snapshotImage = _screenImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
_switchingTab = true;
|
_switchingTab = true;
|
||||||
|
|
||||||
|
if ([_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]]) {
|
||||||
|
if (_item.isVideo && !_isPlaying) {
|
||||||
|
[self setPlayButtonHidden:true animated:false];
|
||||||
|
[self startVideoPlayback:false];
|
||||||
|
} else if (!_item.isVideo) {
|
||||||
|
[_photoEditor processAnimated:false completion:nil];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
TGPhotoEditorBackButton backButtonType = TGPhotoEditorBackButtonCancel;
|
TGPhotoEditorBackButton backButtonType = TGPhotoEditorBackButtonCancel;
|
||||||
TGPhotoEditorDoneButton doneButtonType = TGPhotoEditorDoneButtonCheck;
|
TGPhotoEditorDoneButton doneButtonType = TGPhotoEditorDoneButtonCheck;
|
||||||
|
|
||||||
@ -1163,12 +1192,10 @@
|
|||||||
{
|
{
|
||||||
bool skipInitialTransition = (![self presentedFromCamera] && self.navigationController != nil) || self.skipInitialTransition;
|
bool skipInitialTransition = (![self presentedFromCamera] && self.navigationController != nil) || self.skipInitialTransition;
|
||||||
|
|
||||||
TGPhotoAvatarPreviewController *cropController = [[TGPhotoAvatarPreviewController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView scrubberView:_scrubberView dotImageView:_dotImageView];
|
TGPhotoAvatarPreviewController *cropController = [[TGPhotoAvatarPreviewController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView scrubberView:_scrubberView dotImageView:_dotImageView fullPreviewView:_fullPreviewView];
|
||||||
cropController.fromCamera = [self presentedFromCamera];
|
cropController.fromCamera = [self presentedFromCamera];
|
||||||
cropController.skipTransitionIn = skipInitialTransition;
|
cropController.skipTransitionIn = skipInitialTransition;
|
||||||
if (snapshotView != nil)
|
if (snapshotImage != nil)
|
||||||
[cropController setSnapshotView:snapshotView];
|
|
||||||
else if (snapshotImage != nil)
|
|
||||||
[cropController setSnapshotImage:snapshotImage];
|
[cropController setSnapshotImage:snapshotImage];
|
||||||
cropController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
|
cropController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
|
||||||
cropController.controlVideoPlayback = ^(bool play) {
|
cropController.controlVideoPlayback = ^(bool play) {
|
||||||
@ -1190,7 +1217,7 @@
|
|||||||
[strongSelf stopVideoPlayback:false];
|
[strongSelf stopVideoPlayback:false];
|
||||||
[strongSelf setPlayButtonHidden:false animated:true];
|
[strongSelf setPlayButtonHidden:false animated:true];
|
||||||
} else {
|
} else {
|
||||||
[strongSelf startVideoPlayback:true];
|
[strongSelf startVideoPlayback:false];
|
||||||
[strongSelf setPlayButtonHidden:true animated:true];
|
[strongSelf setPlayButtonHidden:true animated:true];
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -1258,7 +1285,9 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
strongSelf->_switchingTab = false;
|
strongSelf->_switchingTab = false;
|
||||||
[strongSelf startVideoPlayback:true];
|
|
||||||
|
if (isInitialAppearance)
|
||||||
|
[strongSelf startVideoPlayback:true];
|
||||||
};
|
};
|
||||||
cropController.finishedTransitionOut = ^
|
cropController.finishedTransitionOut = ^
|
||||||
{
|
{
|
||||||
@ -1273,40 +1302,12 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
[strongSelf->_currentTabController _finishedTransitionInWithView:nil];
|
[strongSelf->_currentTabController _finishedTransitionInWithView:nil];
|
||||||
|
|
||||||
|
[strongSelf returnFullPreviewView];
|
||||||
};
|
};
|
||||||
|
|
||||||
[[[[self.requestOriginalFullSizeImage(_item, 0) reduceLeftWithPassthrough:nil with:^id(__unused id current, __unused id next, void (^emit)(id))
|
|
||||||
{
|
|
||||||
if ([next isKindOfClass:[UIImage class]])
|
|
||||||
{
|
|
||||||
if ([next degraded])
|
|
||||||
{
|
|
||||||
emit(next);
|
|
||||||
return current;
|
|
||||||
}
|
|
||||||
return next;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
return current;
|
|
||||||
}
|
|
||||||
}] filter:^bool(id result)
|
|
||||||
{
|
|
||||||
return (result != nil);
|
|
||||||
}] deliverOn:[SQueue mainQueue]] startWithNext:^(UIImage *image)
|
|
||||||
{
|
|
||||||
if (cropController.dismissing && !cropController.switching)
|
|
||||||
return;
|
|
||||||
|
|
||||||
[self updateDoneButtonEnabled:!image.degraded animated:true];
|
|
||||||
if (image.degraded) {
|
|
||||||
return;
|
|
||||||
} else {
|
|
||||||
self.fullSizeImage = image;
|
|
||||||
[cropController setImage:image];
|
|
||||||
}
|
|
||||||
}];
|
|
||||||
controller = cropController;
|
controller = cropController;
|
||||||
|
|
||||||
|
doneButtonType = TGPhotoEditorDoneButtonDone;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -1441,7 +1442,9 @@
|
|||||||
strongSelf.finishedTransitionIn();
|
strongSelf.finishedTransitionIn();
|
||||||
|
|
||||||
strongSelf->_switchingTab = false;
|
strongSelf->_switchingTab = false;
|
||||||
[strongSelf startVideoPlayback:true];
|
|
||||||
|
if (isInitialAppearance)
|
||||||
|
[strongSelf startVideoPlayback:true];
|
||||||
};
|
};
|
||||||
|
|
||||||
controller = paintController;
|
controller = paintController;
|
||||||
@ -1471,9 +1474,9 @@
|
|||||||
|
|
||||||
strongSelf->_switchingTab = false;
|
strongSelf->_switchingTab = false;
|
||||||
|
|
||||||
[strongSelf startVideoPlayback:true];
|
if (isInitialAppearance)
|
||||||
|
[strongSelf startVideoPlayback:true];
|
||||||
};
|
};
|
||||||
|
|
||||||
controller = toolsController;
|
controller = toolsController;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -1510,71 +1513,11 @@
|
|||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TGPhotoEditorPreviewTab:
|
|
||||||
{
|
|
||||||
if ([_currentTabController isKindOfClass:[TGPhotoToolsController class]]) {
|
|
||||||
[_scrubberView reloadDataAndReset:false];
|
|
||||||
[self updateDotImage:false];
|
|
||||||
}
|
|
||||||
|
|
||||||
TGPhotoAvatarPreviewController *previewController = [[TGPhotoAvatarPreviewController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView scrubberView:_scrubberView dotImageView:_dotImageView];
|
|
||||||
previewController.item = _item;
|
|
||||||
previewController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
|
|
||||||
previewController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
|
|
||||||
{
|
|
||||||
*referenceFrame = transitionReferenceFrame;
|
|
||||||
*parentView = transitionParentView;
|
|
||||||
*noTransitionView = transitionNoTransitionView;
|
|
||||||
|
|
||||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
|
||||||
if (strongSelf != nil) {
|
|
||||||
[strongSelf startVideoPlayback:true];
|
|
||||||
}
|
|
||||||
|
|
||||||
return transitionReferenceView;
|
|
||||||
};
|
|
||||||
previewController.finishedTransitionIn = ^
|
|
||||||
{
|
|
||||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
|
||||||
if (strongSelf == nil)
|
|
||||||
return;
|
|
||||||
|
|
||||||
if (isInitialAppearance && strongSelf.finishedTransitionIn != nil)
|
|
||||||
strongSelf.finishedTransitionIn();
|
|
||||||
|
|
||||||
strongSelf->_switchingTab = false;
|
|
||||||
};
|
|
||||||
previewController.controlVideoPlayback = ^(bool play) {
|
|
||||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
|
||||||
if (strongSelf == nil)
|
|
||||||
return;
|
|
||||||
if (play) {
|
|
||||||
[strongSelf startVideoPlayback:false];
|
|
||||||
} else {
|
|
||||||
[strongSelf stopVideoPlayback:false];
|
|
||||||
}
|
|
||||||
};
|
|
||||||
previewController.controlVideoSeek = ^(NSTimeInterval position) {
|
|
||||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
|
||||||
if (strongSelf != nil)
|
|
||||||
[strongSelf seekVideo:position];
|
|
||||||
};
|
|
||||||
previewController.controlVideoEndTime = ^(NSTimeInterval endTime) {
|
|
||||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
|
||||||
if (strongSelf != nil)
|
|
||||||
[strongSelf setVideoEndTime:endTime];
|
|
||||||
};
|
|
||||||
controller = previewController;
|
|
||||||
|
|
||||||
doneButtonType = TGPhotoEditorDoneButtonDone;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ([self presentedForAvatarCreation] && !isInitialAppearance && tab != TGPhotoEditorPreviewTab) {
|
if ([self presentedForAvatarCreation] && !isInitialAppearance && tab != TGPhotoEditorCropTab) {
|
||||||
backButtonType = TGPhotoEditorBackButtonBack;
|
backButtonType = TGPhotoEditorBackButtonBack;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1707,7 +1650,7 @@
|
|||||||
- (void)dismissEditor
|
- (void)dismissEditor
|
||||||
{
|
{
|
||||||
if (![_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]] && [self presentedForAvatarCreation]) {
|
if (![_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]] && [self presentedForAvatarCreation]) {
|
||||||
[self presentTab:TGPhotoEditorPreviewTab];
|
[self presentTab:TGPhotoEditorCropTab];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1800,7 +1743,7 @@
|
|||||||
- (void)doneButtonPressed
|
- (void)doneButtonPressed
|
||||||
{
|
{
|
||||||
if ([self presentedForAvatarCreation] && ![_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]]) {
|
if ([self presentedForAvatarCreation] && ![_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]]) {
|
||||||
[self presentTab:TGPhotoEditorPreviewTab];
|
[self presentTab:TGPhotoEditorCropTab];
|
||||||
} else {
|
} else {
|
||||||
[self applyEditor];
|
[self applyEditor];
|
||||||
}
|
}
|
||||||
@ -1899,7 +1842,14 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
NSTimeInterval duration = trimEndValue - trimStartValue;
|
NSTimeInterval duration = trimEndValue - trimStartValue;
|
||||||
TGMediaVideoConversionPreset preset = duration < 4.0 ? TGMediaVideoConversionPresetProfileHigh : TGMediaVideoConversionPresetProfile;
|
TGMediaVideoConversionPreset preset;
|
||||||
|
if (duration <= 2.5) {
|
||||||
|
preset = TGMediaVideoConversionPresetProfileVeryHigh;
|
||||||
|
} else if (duration <= 5.0) {
|
||||||
|
preset = TGMediaVideoConversionPresetProfileHigh;
|
||||||
|
} else {
|
||||||
|
preset = TGMediaVideoConversionPresetProfile;
|
||||||
|
}
|
||||||
|
|
||||||
TGDispatchOnMainThread(^{
|
TGDispatchOnMainThread(^{
|
||||||
if (self.didFinishEditingVideo != nil)
|
if (self.didFinishEditingVideo != nil)
|
||||||
@ -2256,6 +2206,8 @@
|
|||||||
CGFloat portraitToolbarViewBottomEdge = screenSide;
|
CGFloat portraitToolbarViewBottomEdge = screenSide;
|
||||||
if ([UIDevice currentDevice].userInterfaceIdiom == UIUserInterfaceIdiomPad)
|
if ([UIDevice currentDevice].userInterfaceIdiom == UIUserInterfaceIdiomPad)
|
||||||
portraitToolbarViewBottomEdge = screenEdges.bottom;
|
portraitToolbarViewBottomEdge = screenEdges.bottom;
|
||||||
|
|
||||||
|
CGFloat previousWidth = _portraitToolbarView.frame.size.width;
|
||||||
_portraitToolbarView.frame = CGRectMake(screenEdges.left, portraitToolbarViewBottomEdge - TGPhotoEditorToolbarSize - safeAreaInset.bottom, referenceSize.width, TGPhotoEditorToolbarSize + safeAreaInset.bottom);
|
_portraitToolbarView.frame = CGRectMake(screenEdges.left, portraitToolbarViewBottomEdge - TGPhotoEditorToolbarSize - safeAreaInset.bottom, referenceSize.width, TGPhotoEditorToolbarSize + safeAreaInset.bottom);
|
||||||
|
|
||||||
_scrubberView.frame = CGRectMake(0.0, 0.0, _portraitToolbarView.frame.size.width, _scrubberView.frame.size.height);
|
_scrubberView.frame = CGRectMake(0.0, 0.0, _portraitToolbarView.frame.size.width, _scrubberView.frame.size.height);
|
||||||
@ -2267,7 +2219,8 @@
|
|||||||
[_scrubberView reloadData];
|
[_scrubberView reloadData];
|
||||||
[_scrubberView resetToStart];
|
[_scrubberView resetToStart];
|
||||||
} else {
|
} else {
|
||||||
[_scrubberView reloadThumbnails];
|
if (previousWidth != _portraitToolbarView.frame.size.width)
|
||||||
|
[_scrubberView reloadThumbnails];
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -147,7 +147,8 @@ const CGFloat TGPhotoEditorToolbarSize = 49.0f;
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
_transitionView = [referenceView snapshotViewAfterScreenUpdates:false];
|
if (![referenceView isKindOfClass:[TGPhotoEditorPreviewView class]])
|
||||||
|
_transitionView = [referenceView snapshotViewAfterScreenUpdates:false];
|
||||||
if (_transitionView == nil) {
|
if (_transitionView == nil) {
|
||||||
_transitionView = referenceView;
|
_transitionView = referenceView;
|
||||||
}
|
}
|
||||||
@ -168,27 +169,45 @@ const CGFloat TGPhotoEditorToolbarSize = 49.0f;
|
|||||||
|
|
||||||
_transitionInProgress = true;
|
_transitionInProgress = true;
|
||||||
|
|
||||||
POPSpringAnimation *animation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewFrame];
|
[UIView animateWithDuration:0.3f delay:0.0f options:UIViewAnimationOptionCurveEaseInOut | UIViewAnimationOptionLayoutSubviews animations:^
|
||||||
if (self.transitionSpeed > FLT_EPSILON)
|
|
||||||
animation.springSpeed = self.transitionSpeed;
|
|
||||||
animation.fromValue = [NSValue valueWithCGRect:_transitionView.frame];
|
|
||||||
animation.toValue = [NSValue valueWithCGRect:_transitionTargetFrame];
|
|
||||||
animation.completionBlock = ^(__unused POPAnimation *animation, __unused BOOL finished)
|
|
||||||
{
|
{
|
||||||
|
_transitionView.frame = _transitionTargetFrame;
|
||||||
|
} completion:^(BOOL finished) {
|
||||||
_transitionInProgress = false;
|
_transitionInProgress = false;
|
||||||
|
|
||||||
UIView *transitionView = _transitionView;
|
UIView *transitionView = _transitionView;
|
||||||
_transitionView = nil;
|
_transitionView = nil;
|
||||||
|
|
||||||
if (self.finishedTransitionIn != nil)
|
if (self.finishedTransitionIn != nil)
|
||||||
{
|
{
|
||||||
self.finishedTransitionIn();
|
self.finishedTransitionIn();
|
||||||
self.finishedTransitionIn = nil;
|
self.finishedTransitionIn = nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
[self _finishedTransitionInWithView:transitionView];
|
[self _finishedTransitionInWithView:transitionView];
|
||||||
};
|
}];
|
||||||
[_transitionView pop_addAnimation:animation forKey:@"frame"];
|
|
||||||
|
// POPSpringAnimation *animation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewFrame];
|
||||||
|
// if (self.transitionSpeed > FLT_EPSILON)
|
||||||
|
// animation.springSpeed = self.transitionSpeed;
|
||||||
|
// animation.fromValue = [NSValue valueWithCGRect:_transitionView.frame];
|
||||||
|
// animation.toValue = [NSValue valueWithCGRect:_transitionTargetFrame];
|
||||||
|
// animation.completionBlock = ^(__unused POPAnimation *animation, __unused BOOL finished)
|
||||||
|
// {
|
||||||
|
// _transitionInProgress = false;
|
||||||
|
//
|
||||||
|
// UIView *transitionView = _transitionView;
|
||||||
|
// _transitionView = nil;
|
||||||
|
//
|
||||||
|
// if (self.finishedTransitionIn != nil)
|
||||||
|
// {
|
||||||
|
// self.finishedTransitionIn();
|
||||||
|
// self.finishedTransitionIn = nil;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// [self _finishedTransitionInWithView:transitionView];
|
||||||
|
// };
|
||||||
|
// [_transitionView pop_addAnimation:animation forKey:@"frame"];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)prepareForCustomTransitionOut
|
- (void)prepareForCustomTransitionOut
|
||||||
|
@ -181,7 +181,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
|||||||
if (strongSelf != nil)
|
if (strongSelf != nil)
|
||||||
[strongSelf setPreview:!strongSelf->_preview animated:true];
|
[strongSelf setPreview:!strongSelf->_preview animated:true];
|
||||||
};
|
};
|
||||||
previewView.customTouchDownHandling = forVideo;
|
previewView.customTouchDownHandling = true;
|
||||||
[self.view addSubview:_previewView];
|
[self.view addSubview:_previewView];
|
||||||
|
|
||||||
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:CGRectZero];
|
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:CGRectZero];
|
||||||
|
@ -19,7 +19,7 @@ public func presentLegacyAvatarPicker(holder: Atomic<NSObject?>, signup: Bool, t
|
|||||||
|
|
||||||
present(legacyController, nil)
|
present(legacyController, nil)
|
||||||
|
|
||||||
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: false, hasDeleteButton: false, hasViewButton: openCurrent != nil, personalPhoto: true, saveEditedPhotos: false, saveCapturedMedia: false, signup: signup)!
|
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: false, hasDeleteButton: false, hasViewButton: openCurrent != nil, personalPhoto: true, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: signup)!
|
||||||
let _ = holder.swap(mixin)
|
let _ = holder.swap(mixin)
|
||||||
mixin.didFinishWithImage = { image in
|
mixin.didFinishWithImage = { image in
|
||||||
guard let image = image else {
|
guard let image = image else {
|
||||||
|
@ -230,47 +230,6 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
|
|||||||
self.fetchDisposable.set(fetchedMediaResource(mediaBox: self.context.account.postbox.mediaBox, reference: representations[largestIndex].reference).start())
|
self.fetchDisposable.set(fetchedMediaResource(mediaBox: self.context.account.postbox.mediaBox, reference: representations[largestIndex].reference).start())
|
||||||
}
|
}
|
||||||
|
|
||||||
// self.statusDisposable.set((self.context.account.postbox.mediaBox.resourceStatus(largestSize.resource)
|
|
||||||
// |> deliverOnMainQueue).start(next: { [weak self] status in
|
|
||||||
// if let strongSelf = self {
|
|
||||||
// let previousStatus = strongSelf.status
|
|
||||||
// strongSelf.status = status
|
|
||||||
// switch status {
|
|
||||||
// case .Remote:
|
|
||||||
// strongSelf.statusNode.isHidden = false
|
|
||||||
// strongSelf.statusNodeContainer.isUserInteractionEnabled = true
|
|
||||||
// strongSelf.statusNode.transitionToState(.download(.white), completion: {})
|
|
||||||
// case let .Fetching(_, progress):
|
|
||||||
// strongSelf.statusNode.isHidden = false
|
|
||||||
// strongSelf.statusNodeContainer.isUserInteractionEnabled = true
|
|
||||||
// let adjustedProgress = max(progress, 0.027)
|
|
||||||
// strongSelf.statusNode.transitionToState(.progress(color: .white, lineWidth: nil, value: CGFloat(adjustedProgress), cancelEnabled: true), completion: {})
|
|
||||||
// case .Local:
|
|
||||||
// if let previousStatus = previousStatus, case .Fetching = previousStatus {
|
|
||||||
// strongSelf.statusNode.transitionToState(.progress(color: .white, lineWidth: nil, value: 1.0, cancelEnabled: true), completion: {
|
|
||||||
// if let strongSelf = self {
|
|
||||||
// strongSelf.statusNode.alpha = 0.0
|
|
||||||
// strongSelf.statusNodeContainer.isUserInteractionEnabled = false
|
|
||||||
// strongSelf.statusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, completion: { _ in
|
|
||||||
// if let strongSelf = self {
|
|
||||||
// strongSelf.statusNode.transitionToState(.none, animated: false, completion: {})
|
|
||||||
// }
|
|
||||||
// })
|
|
||||||
// }
|
|
||||||
// })
|
|
||||||
// } else if !strongSelf.statusNode.isHidden && !strongSelf.statusNode.alpha.isZero {
|
|
||||||
// strongSelf.statusNode.alpha = 0.0
|
|
||||||
// strongSelf.statusNodeContainer.isUserInteractionEnabled = false
|
|
||||||
// strongSelf.statusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, completion: { _ in
|
|
||||||
// if let strongSelf = self {
|
|
||||||
// strongSelf.statusNode.transitionToState(.none, animated: false, completion: {})
|
|
||||||
// }
|
|
||||||
// })
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }))
|
|
||||||
|
|
||||||
var id: Int64?
|
var id: Int64?
|
||||||
if case let .image(image) = entry {
|
if case let .image(image) = entry {
|
||||||
id = image.0.id
|
id = image.0.id
|
||||||
@ -287,10 +246,12 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
videoNode.canAttachContent = true
|
videoNode.canAttachContent = true
|
||||||
|
if let startTimestamp = video.startTimestamp {
|
||||||
|
videoNode.seek(startTimestamp)
|
||||||
|
}
|
||||||
if videoNode.hasAttachedContext {
|
if videoNode.hasAttachedContext {
|
||||||
videoNode.play()
|
videoNode.play()
|
||||||
}
|
}
|
||||||
|
|
||||||
self.videoContent = videoContent
|
self.videoContent = videoContent
|
||||||
self.videoNode = videoNode
|
self.videoNode = videoNode
|
||||||
|
|
||||||
|
@ -754,7 +754,7 @@ public func channelInfoController(context: AccountContext, peerId: PeerId) -> Vi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
||||||
let _ = currentAvatarMixin.swap(mixin)
|
let _ = currentAvatarMixin.swap(mixin)
|
||||||
mixin.requestSearchController = { assetsController in
|
mixin.requestSearchController = { assetsController in
|
||||||
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: peer?.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder), completion: { result in
|
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: peer?.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder), completion: { result in
|
||||||
|
@ -1479,7 +1479,7 @@ public func groupInfoController(context: AccountContext, peerId originalPeerId:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
||||||
let _ = currentAvatarMixin.swap(mixin)
|
let _ = currentAvatarMixin.swap(mixin)
|
||||||
mixin.requestSearchController = { assetsController in
|
mixin.requestSearchController = { assetsController in
|
||||||
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: peer?.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder), completion: { result in
|
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: peer?.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder), completion: { result in
|
||||||
|
@ -14,6 +14,7 @@ swift_library(
|
|||||||
"//submodules/Display:Display",
|
"//submodules/Display:Display",
|
||||||
"//submodules/AccountContext:AccountContext",
|
"//submodules/AccountContext:AccountContext",
|
||||||
"//submodules/DeviceAccess:DeviceAccess",
|
"//submodules/DeviceAccess:DeviceAccess",
|
||||||
|
"//submodules/LegacyComponents:LegacyComponents",
|
||||||
],
|
],
|
||||||
visibility = [
|
visibility = [
|
||||||
"//visibility:public",
|
"//visibility:public",
|
||||||
|
@ -9,6 +9,7 @@ import Display
|
|||||||
import MobileCoreServices
|
import MobileCoreServices
|
||||||
import DeviceAccess
|
import DeviceAccess
|
||||||
import AccountContext
|
import AccountContext
|
||||||
|
import LegacyComponents
|
||||||
|
|
||||||
public enum FetchMediaDataState {
|
public enum FetchMediaDataState {
|
||||||
case progress(Float)
|
case progress(Float)
|
||||||
|
@ -532,7 +532,7 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
|
|||||||
updateState {
|
updateState {
|
||||||
$0.withUpdatedUpdatingAvatar(.image(representation, true))
|
$0.withUpdatedUpdatingAvatar(.image(representation, true))
|
||||||
}
|
}
|
||||||
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: resource, videoResource: nil, mapResourceToAvatarSizes: { resource, representations in
|
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: resource, videoResource: nil, videoStartTimestamp: nil, mapResourceToAvatarSizes: { resource, representations in
|
||||||
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
||||||
}) |> deliverOnMainQueue).start(next: { result in
|
}) |> deliverOnMainQueue).start(next: { result in
|
||||||
switch result {
|
switch result {
|
||||||
@ -556,6 +556,11 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
|
|||||||
$0.withUpdatedUpdatingAvatar(.image(representation, true))
|
$0.withUpdatedUpdatingAvatar(.image(representation, true))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var videoStartTimestamp: Double? = nil
|
||||||
|
if let adjustments = adjustments, adjustments.videoStartValue > 0.0 {
|
||||||
|
videoStartTimestamp = adjustments.videoStartValue - adjustments.trimStartValue
|
||||||
|
}
|
||||||
|
|
||||||
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { subscriber in
|
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { subscriber in
|
||||||
var filteredPath = url.path
|
var filteredPath = url.path
|
||||||
if filteredPath.hasPrefix("file://") {
|
if filteredPath.hasPrefix("file://") {
|
||||||
@ -606,9 +611,11 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
updateAvatarDisposable.set((signal
|
updateAvatarDisposable.set((signal
|
||||||
|> mapToSignal { videoResource in
|
|> mapToSignal { videoResource in
|
||||||
return updateAccountPhoto(account: context.account, resource: photoResource, videoResource: videoResource, mapResourceToAvatarSizes: { resource, representations in
|
return updateAccountPhoto(account: context.account, resource: photoResource, videoResource: videoResource, videoStartTimestamp: videoStartTimestamp, mapResourceToAvatarSizes: { resource, representations in
|
||||||
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
||||||
})
|
})
|
||||||
} |> deliverOnMainQueue).start(next: { result in
|
} |> deliverOnMainQueue).start(next: { result in
|
||||||
@ -624,7 +631,7 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: hasPhotos, personalPhoto: true, saveEditedPhotos: false, saveCapturedMedia: false, signup: false)!
|
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: hasPhotos, personalPhoto: true, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: false)!
|
||||||
let _ = currentAvatarMixin.swap(mixin)
|
let _ = currentAvatarMixin.swap(mixin)
|
||||||
mixin.requestSearchController = { assetsController in
|
mixin.requestSearchController = { assetsController in
|
||||||
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: nil, completion: { result in
|
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: nil, completion: { result in
|
||||||
@ -652,7 +659,7 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
|
|||||||
return $0.withUpdatedUpdatingAvatar(ItemListAvatarAndNameInfoItemUpdatingAvatar.none)
|
return $0.withUpdatedUpdatingAvatar(ItemListAvatarAndNameInfoItemUpdatingAvatar.none)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: nil, videoResource: nil, mapResourceToAvatarSizes: { resource, representations in
|
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: nil, videoResource: nil, videoStartTimestamp: nil, mapResourceToAvatarSizes: { resource, representations in
|
||||||
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
||||||
}) |> deliverOnMainQueue).start(next: { result in
|
}) |> deliverOnMainQueue).start(next: { result in
|
||||||
switch result {
|
switch result {
|
||||||
|
@ -1295,7 +1295,7 @@ public func settingsController(context: AccountContext, accountManager: AccountM
|
|||||||
state.updatingAvatar = .image(representation, true)
|
state.updatingAvatar = .image(representation, true)
|
||||||
return state
|
return state
|
||||||
}
|
}
|
||||||
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: resource, videoResource: nil, mapResourceToAvatarSizes: { resource, representations in
|
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: resource, videoResource: nil, videoStartTimestamp: nil, mapResourceToAvatarSizes: { resource, representations in
|
||||||
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
||||||
}) |> deliverOnMainQueue).start(next: { result in
|
}) |> deliverOnMainQueue).start(next: { result in
|
||||||
switch result {
|
switch result {
|
||||||
@ -1323,6 +1323,11 @@ public func settingsController(context: AccountContext, accountManager: AccountM
|
|||||||
return state
|
return state
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var videoStartTimestamp: Double? = nil
|
||||||
|
if let adjustments = adjustments, adjustments.videoStartValue > 0.0 {
|
||||||
|
videoStartTimestamp = adjustments.videoStartValue - adjustments.trimStartValue
|
||||||
|
}
|
||||||
|
|
||||||
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { subscriber in
|
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { subscriber in
|
||||||
var filteredPath = url.path
|
var filteredPath = url.path
|
||||||
if filteredPath.hasPrefix("file://") {
|
if filteredPath.hasPrefix("file://") {
|
||||||
@ -1375,7 +1380,7 @@ public func settingsController(context: AccountContext, accountManager: AccountM
|
|||||||
|
|
||||||
updateAvatarDisposable.set((signal
|
updateAvatarDisposable.set((signal
|
||||||
|> mapToSignal { videoResource in
|
|> mapToSignal { videoResource in
|
||||||
return updateAccountPhoto(account: context.account, resource: photoResource, videoResource: videoResource, mapResourceToAvatarSizes: { resource, representations in
|
return updateAccountPhoto(account: context.account, resource: photoResource, videoResource: videoResource, videoStartTimestamp: videoStartTimestamp, mapResourceToAvatarSizes: { resource, representations in
|
||||||
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
||||||
})
|
})
|
||||||
} |> deliverOnMainQueue).start(next: { result in
|
} |> deliverOnMainQueue).start(next: { result in
|
||||||
@ -1393,7 +1398,7 @@ public func settingsController(context: AccountContext, accountManager: AccountM
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: true, saveEditedPhotos: false, saveCapturedMedia: false, signup: false)!
|
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: true, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: false)!
|
||||||
let _ = currentAvatarMixin.swap(mixin)
|
let _ = currentAvatarMixin.swap(mixin)
|
||||||
mixin.requestSearchController = { assetsController in
|
mixin.requestSearchController = { assetsController in
|
||||||
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: nil, completion: { result in
|
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: nil, completion: { result in
|
||||||
@ -1423,7 +1428,7 @@ public func settingsController(context: AccountContext, accountManager: AccountM
|
|||||||
}
|
}
|
||||||
return state
|
return state
|
||||||
}
|
}
|
||||||
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: nil, videoResource: nil, mapResourceToAvatarSizes: { resource, representations in
|
updateAvatarDisposable.set((updateAccountPhoto(account: context.account, resource: nil, videoResource: nil, videoStartTimestamp: nil, mapResourceToAvatarSizes: { resource, representations in
|
||||||
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
|
||||||
}) |> deliverOnMainQueue).start(next: { result in
|
}) |> deliverOnMainQueue).start(next: { result in
|
||||||
switch result {
|
switch result {
|
||||||
|
@ -88,21 +88,29 @@ public final class TelegramMediaImage: Media, Equatable, Codable {
|
|||||||
public final class VideoRepresentation: Equatable, PostboxCoding {
|
public final class VideoRepresentation: Equatable, PostboxCoding {
|
||||||
public let dimensions: PixelDimensions
|
public let dimensions: PixelDimensions
|
||||||
public let resource: TelegramMediaResource
|
public let resource: TelegramMediaResource
|
||||||
|
public let startTimestamp: Double?
|
||||||
|
|
||||||
public init(dimensions: PixelDimensions, resource: TelegramMediaResource) {
|
public init(dimensions: PixelDimensions, resource: TelegramMediaResource, startTimestamp: Double?) {
|
||||||
self.dimensions = dimensions
|
self.dimensions = dimensions
|
||||||
self.resource = resource
|
self.resource = resource
|
||||||
|
self.startTimestamp = startTimestamp
|
||||||
}
|
}
|
||||||
|
|
||||||
public init(decoder: PostboxDecoder) {
|
public init(decoder: PostboxDecoder) {
|
||||||
self.dimensions = PixelDimensions(width: decoder.decodeInt32ForKey("w", orElse: 0), height: decoder.decodeInt32ForKey("h", orElse: 0))
|
self.dimensions = PixelDimensions(width: decoder.decodeInt32ForKey("w", orElse: 0), height: decoder.decodeInt32ForKey("h", orElse: 0))
|
||||||
self.resource = decoder.decodeObjectForKey("r") as! TelegramMediaResource
|
self.resource = decoder.decodeObjectForKey("r") as! TelegramMediaResource
|
||||||
|
self.startTimestamp = decoder.decodeOptionalDoubleForKey("s")
|
||||||
}
|
}
|
||||||
|
|
||||||
public func encode(_ encoder: PostboxEncoder) {
|
public func encode(_ encoder: PostboxEncoder) {
|
||||||
encoder.encodeInt32(self.dimensions.width, forKey: "w")
|
encoder.encodeInt32(self.dimensions.width, forKey: "w")
|
||||||
encoder.encodeInt32(self.dimensions.height, forKey: "h")
|
encoder.encodeInt32(self.dimensions.height, forKey: "h")
|
||||||
encoder.encodeObject(self.resource, forKey: "r")
|
encoder.encodeObject(self.resource, forKey: "r")
|
||||||
|
if let startTimestamp = self.startTimestamp {
|
||||||
|
encoder.encodeDouble(startTimestamp, forKey: "s")
|
||||||
|
} else {
|
||||||
|
encoder.encodeNil(forKey: "s")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static func ==(lhs: VideoRepresentation, rhs: VideoRepresentation) -> Bool {
|
public static func ==(lhs: VideoRepresentation, rhs: VideoRepresentation) -> Bool {
|
||||||
@ -115,6 +123,9 @@ public final class TelegramMediaImage: Media, Equatable, Codable {
|
|||||||
if !lhs.resource.isEqual(to: rhs.resource) {
|
if !lhs.resource.isEqual(to: rhs.resource) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.startTimestamp != rhs.startTimestamp {
|
||||||
|
return false
|
||||||
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -106,7 +106,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
|
|||||||
dict[997055186] = { return Api.PollAnswerVoters.parse_pollAnswerVoters($0) }
|
dict[997055186] = { return Api.PollAnswerVoters.parse_pollAnswerVoters($0) }
|
||||||
dict[-1705233435] = { return Api.account.PasswordSettings.parse_passwordSettings($0) }
|
dict[-1705233435] = { return Api.account.PasswordSettings.parse_passwordSettings($0) }
|
||||||
dict[-288727837] = { return Api.LangPackLanguage.parse_langPackLanguage($0) }
|
dict[-288727837] = { return Api.LangPackLanguage.parse_langPackLanguage($0) }
|
||||||
dict[1130084743] = { return Api.VideoSize.parse_videoSize($0) }
|
dict[-399391402] = { return Api.VideoSize.parse_videoSize($0) }
|
||||||
dict[-1000708810] = { return Api.help.AppUpdate.parse_noAppUpdate($0) }
|
dict[-1000708810] = { return Api.help.AppUpdate.parse_noAppUpdate($0) }
|
||||||
dict[497489295] = { return Api.help.AppUpdate.parse_appUpdate($0) }
|
dict[497489295] = { return Api.help.AppUpdate.parse_appUpdate($0) }
|
||||||
dict[-209337866] = { return Api.LangPackDifference.parse_langPackDifference($0) }
|
dict[-209337866] = { return Api.LangPackDifference.parse_langPackDifference($0) }
|
||||||
@ -254,7 +254,6 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
|
|||||||
dict[-1512627963] = { return Api.Update.parse_updateDialogFilterOrder($0) }
|
dict[-1512627963] = { return Api.Update.parse_updateDialogFilterOrder($0) }
|
||||||
dict[889491791] = { return Api.Update.parse_updateDialogFilters($0) }
|
dict[889491791] = { return Api.Update.parse_updateDialogFilters($0) }
|
||||||
dict[643940105] = { return Api.Update.parse_updatePhoneCallSignalingData($0) }
|
dict[643940105] = { return Api.Update.parse_updatePhoneCallSignalingData($0) }
|
||||||
dict[-1812551503] = { return Api.Update.parse_updateChannelParticipant($0) }
|
|
||||||
dict[136574537] = { return Api.messages.VotesList.parse_votesList($0) }
|
dict[136574537] = { return Api.messages.VotesList.parse_votesList($0) }
|
||||||
dict[1558266229] = { return Api.PopularContact.parse_popularContact($0) }
|
dict[1558266229] = { return Api.PopularContact.parse_popularContact($0) }
|
||||||
dict[-373643672] = { return Api.FolderPeer.parse_folderPeer($0) }
|
dict[-373643672] = { return Api.FolderPeer.parse_folderPeer($0) }
|
||||||
|
@ -4710,50 +4710,58 @@ public extension Api {
|
|||||||
|
|
||||||
}
|
}
|
||||||
public enum VideoSize: TypeConstructorDescription {
|
public enum VideoSize: TypeConstructorDescription {
|
||||||
case videoSize(type: String, location: Api.FileLocation, w: Int32, h: Int32, size: Int32)
|
case videoSize(flags: Int32, type: String, location: Api.FileLocation, w: Int32, h: Int32, size: Int32, videoStartTs: Double?)
|
||||||
|
|
||||||
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||||
switch self {
|
switch self {
|
||||||
case .videoSize(let type, let location, let w, let h, let size):
|
case .videoSize(let flags, let type, let location, let w, let h, let size, let videoStartTs):
|
||||||
if boxed {
|
if boxed {
|
||||||
buffer.appendInt32(1130084743)
|
buffer.appendInt32(-399391402)
|
||||||
}
|
}
|
||||||
|
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||||
serializeString(type, buffer: buffer, boxed: false)
|
serializeString(type, buffer: buffer, boxed: false)
|
||||||
location.serialize(buffer, true)
|
location.serialize(buffer, true)
|
||||||
serializeInt32(w, buffer: buffer, boxed: false)
|
serializeInt32(w, buffer: buffer, boxed: false)
|
||||||
serializeInt32(h, buffer: buffer, boxed: false)
|
serializeInt32(h, buffer: buffer, boxed: false)
|
||||||
serializeInt32(size, buffer: buffer, boxed: false)
|
serializeInt32(size, buffer: buffer, boxed: false)
|
||||||
|
if Int(flags) & Int(1 << 0) != 0 {serializeDouble(videoStartTs!, buffer: buffer, boxed: false)}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func descriptionFields() -> (String, [(String, Any)]) {
|
public func descriptionFields() -> (String, [(String, Any)]) {
|
||||||
switch self {
|
switch self {
|
||||||
case .videoSize(let type, let location, let w, let h, let size):
|
case .videoSize(let flags, let type, let location, let w, let h, let size, let videoStartTs):
|
||||||
return ("videoSize", [("type", type), ("location", location), ("w", w), ("h", h), ("size", size)])
|
return ("videoSize", [("flags", flags), ("type", type), ("location", location), ("w", w), ("h", h), ("size", size), ("videoStartTs", videoStartTs)])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static func parse_videoSize(_ reader: BufferReader) -> VideoSize? {
|
public static func parse_videoSize(_ reader: BufferReader) -> VideoSize? {
|
||||||
var _1: String?
|
var _1: Int32?
|
||||||
_1 = parseString(reader)
|
_1 = reader.readInt32()
|
||||||
var _2: Api.FileLocation?
|
var _2: String?
|
||||||
|
_2 = parseString(reader)
|
||||||
|
var _3: Api.FileLocation?
|
||||||
if let signature = reader.readInt32() {
|
if let signature = reader.readInt32() {
|
||||||
_2 = Api.parse(reader, signature: signature) as? Api.FileLocation
|
_3 = Api.parse(reader, signature: signature) as? Api.FileLocation
|
||||||
}
|
}
|
||||||
var _3: Int32?
|
|
||||||
_3 = reader.readInt32()
|
|
||||||
var _4: Int32?
|
var _4: Int32?
|
||||||
_4 = reader.readInt32()
|
_4 = reader.readInt32()
|
||||||
var _5: Int32?
|
var _5: Int32?
|
||||||
_5 = reader.readInt32()
|
_5 = reader.readInt32()
|
||||||
|
var _6: Int32?
|
||||||
|
_6 = reader.readInt32()
|
||||||
|
var _7: Double?
|
||||||
|
if Int(_1!) & Int(1 << 0) != 0 {_7 = reader.readDouble() }
|
||||||
let _c1 = _1 != nil
|
let _c1 = _1 != nil
|
||||||
let _c2 = _2 != nil
|
let _c2 = _2 != nil
|
||||||
let _c3 = _3 != nil
|
let _c3 = _3 != nil
|
||||||
let _c4 = _4 != nil
|
let _c4 = _4 != nil
|
||||||
let _c5 = _5 != nil
|
let _c5 = _5 != nil
|
||||||
if _c1 && _c2 && _c3 && _c4 && _c5 {
|
let _c6 = _6 != nil
|
||||||
return Api.VideoSize.videoSize(type: _1!, location: _2!, w: _3!, h: _4!, size: _5!)
|
let _c7 = (Int(_1!) & Int(1 << 0) == 0) || _7 != nil
|
||||||
|
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 {
|
||||||
|
return Api.VideoSize.videoSize(flags: _1!, type: _2!, location: _3!, w: _4!, h: _5!, size: _6!, videoStartTs: _7)
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return nil
|
return nil
|
||||||
@ -6029,7 +6037,6 @@ public extension Api {
|
|||||||
case updateDialogFilterOrder(order: [Int32])
|
case updateDialogFilterOrder(order: [Int32])
|
||||||
case updateDialogFilters
|
case updateDialogFilters
|
||||||
case updatePhoneCallSignalingData(phoneCallId: Int64, data: Buffer)
|
case updatePhoneCallSignalingData(phoneCallId: Int64, data: Buffer)
|
||||||
case updateChannelParticipant(channelId: Int32, prevParticipant: Api.ChannelParticipant, newParticipant: Api.ChannelParticipant, qts: Int32)
|
|
||||||
|
|
||||||
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||||
switch self {
|
switch self {
|
||||||
@ -6710,15 +6717,6 @@ public extension Api {
|
|||||||
serializeInt64(phoneCallId, buffer: buffer, boxed: false)
|
serializeInt64(phoneCallId, buffer: buffer, boxed: false)
|
||||||
serializeBytes(data, buffer: buffer, boxed: false)
|
serializeBytes(data, buffer: buffer, boxed: false)
|
||||||
break
|
break
|
||||||
case .updateChannelParticipant(let channelId, let prevParticipant, let newParticipant, let qts):
|
|
||||||
if boxed {
|
|
||||||
buffer.appendInt32(-1812551503)
|
|
||||||
}
|
|
||||||
serializeInt32(channelId, buffer: buffer, boxed: false)
|
|
||||||
prevParticipant.serialize(buffer, true)
|
|
||||||
newParticipant.serialize(buffer, true)
|
|
||||||
serializeInt32(qts, buffer: buffer, boxed: false)
|
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6886,8 +6884,6 @@ public extension Api {
|
|||||||
return ("updateDialogFilters", [])
|
return ("updateDialogFilters", [])
|
||||||
case .updatePhoneCallSignalingData(let phoneCallId, let data):
|
case .updatePhoneCallSignalingData(let phoneCallId, let data):
|
||||||
return ("updatePhoneCallSignalingData", [("phoneCallId", phoneCallId), ("data", data)])
|
return ("updatePhoneCallSignalingData", [("phoneCallId", phoneCallId), ("data", data)])
|
||||||
case .updateChannelParticipant(let channelId, let prevParticipant, let newParticipant, let qts):
|
|
||||||
return ("updateChannelParticipant", [("channelId", channelId), ("prevParticipant", prevParticipant), ("newParticipant", newParticipant), ("qts", qts)])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8233,30 +8229,6 @@ public extension Api {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
public static func parse_updateChannelParticipant(_ reader: BufferReader) -> Update? {
|
|
||||||
var _1: Int32?
|
|
||||||
_1 = reader.readInt32()
|
|
||||||
var _2: Api.ChannelParticipant?
|
|
||||||
if let signature = reader.readInt32() {
|
|
||||||
_2 = Api.parse(reader, signature: signature) as? Api.ChannelParticipant
|
|
||||||
}
|
|
||||||
var _3: Api.ChannelParticipant?
|
|
||||||
if let signature = reader.readInt32() {
|
|
||||||
_3 = Api.parse(reader, signature: signature) as? Api.ChannelParticipant
|
|
||||||
}
|
|
||||||
var _4: Int32?
|
|
||||||
_4 = reader.readInt32()
|
|
||||||
let _c1 = _1 != nil
|
|
||||||
let _c2 = _2 != nil
|
|
||||||
let _c3 = _3 != nil
|
|
||||||
let _c4 = _4 != nil
|
|
||||||
if _c1 && _c2 && _c3 && _c4 {
|
|
||||||
return Api.Update.updateChannelParticipant(channelId: _1!, prevParticipant: _2!, newParticipant: _3!, qts: _4!)
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
public enum PopularContact: TypeConstructorDescription {
|
public enum PopularContact: TypeConstructorDescription {
|
||||||
|
@ -4421,13 +4421,12 @@ public extension Api {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
public static func getBroadcastStats(flags: Int32, channel: Api.InputChannel, tzOffset: Int32) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.stats.BroadcastStats>) {
|
public static func getBroadcastStats(flags: Int32, channel: Api.InputChannel) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.stats.BroadcastStats>) {
|
||||||
let buffer = Buffer()
|
let buffer = Buffer()
|
||||||
buffer.appendInt32(-433058374)
|
buffer.appendInt32(-1421720550)
|
||||||
serializeInt32(flags, buffer: buffer, boxed: false)
|
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||||
channel.serialize(buffer, true)
|
channel.serialize(buffer, true)
|
||||||
serializeInt32(tzOffset, buffer: buffer, boxed: false)
|
return (FunctionDescription(name: "stats.getBroadcastStats", parameters: [("flags", flags), ("channel", channel)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.stats.BroadcastStats? in
|
||||||
return (FunctionDescription(name: "stats.getBroadcastStats", parameters: [("flags", flags), ("channel", channel), ("tzOffset", tzOffset)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.stats.BroadcastStats? in
|
|
||||||
let reader = BufferReader(buffer)
|
let reader = BufferReader(buffer)
|
||||||
var result: Api.stats.BroadcastStats?
|
var result: Api.stats.BroadcastStats?
|
||||||
if let signature = reader.readInt32() {
|
if let signature = reader.readInt32() {
|
||||||
@ -6803,13 +6802,14 @@ public extension Api {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
public static func uploadProfilePhoto(flags: Int32, file: Api.InputFile?, video: Api.InputFile?) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.photos.Photo>) {
|
public static func uploadProfilePhoto(flags: Int32, file: Api.InputFile?, video: Api.InputFile?, videoStartTs: Double?) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.photos.Photo>) {
|
||||||
let buffer = Buffer()
|
let buffer = Buffer()
|
||||||
buffer.appendInt32(28740206)
|
buffer.appendInt32(-1980559511)
|
||||||
serializeInt32(flags, buffer: buffer, boxed: false)
|
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||||
if Int(flags) & Int(1 << 0) != 0 {file!.serialize(buffer, true)}
|
if Int(flags) & Int(1 << 0) != 0 {file!.serialize(buffer, true)}
|
||||||
if Int(flags) & Int(1 << 1) != 0 {video!.serialize(buffer, true)}
|
if Int(flags) & Int(1 << 1) != 0 {video!.serialize(buffer, true)}
|
||||||
return (FunctionDescription(name: "photos.uploadProfilePhoto", parameters: [("flags", flags), ("file", file), ("video", video)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.photos.Photo? in
|
if Int(flags) & Int(1 << 2) != 0 {serializeDouble(videoStartTs!, buffer: buffer, boxed: false)}
|
||||||
|
return (FunctionDescription(name: "photos.uploadProfilePhoto", parameters: [("flags", flags), ("file", file), ("video", video), ("videoStartTs", videoStartTs)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.photos.Photo? in
|
||||||
let reader = BufferReader(buffer)
|
let reader = BufferReader(buffer)
|
||||||
var result: Api.photos.Photo?
|
var result: Api.photos.Photo?
|
||||||
if let signature = reader.readInt32() {
|
if let signature = reader.readInt32() {
|
||||||
|
@ -470,7 +470,7 @@ public func signUpWithName(accountManager: AccountManager, account: Unauthorized
|
|||||||
let resource = LocalFileMediaResource(fileId: arc4random64())
|
let resource = LocalFileMediaResource(fileId: arc4random64())
|
||||||
account.postbox.mediaBox.storeResourceData(resource.id, data: avatarData)
|
account.postbox.mediaBox.storeResourceData(resource.id, data: avatarData)
|
||||||
|
|
||||||
return updatePeerPhotoInternal(postbox: account.postbox, network: account.network, stateManager: nil, accountPeerId: user.id, peer: .single(user), photo: uploadedPeerPhoto(postbox: account.postbox, network: account.network, resource: resource), video: nil, mapResourceToAvatarSizes: { _, _ in .single([:]) })
|
return updatePeerPhotoInternal(postbox: account.postbox, network: account.network, stateManager: nil, accountPeerId: user.id, peer: .single(user), photo: uploadedPeerPhoto(postbox: account.postbox, network: account.network, resource: resource), video: nil, videoStartTimestamp: nil, mapResourceToAvatarSizes: { _, _ in .single([:]) })
|
||||||
|> `catch` { _ -> Signal<UpdatePeerPhotoStatus, SignUpError> in
|
|> `catch` { _ -> Signal<UpdatePeerPhotoStatus, SignUpError> in
|
||||||
return .complete()
|
return .complete()
|
||||||
}
|
}
|
||||||
|
@ -15,8 +15,8 @@ public enum UploadPeerPhotoError {
|
|||||||
case generic
|
case generic
|
||||||
}
|
}
|
||||||
|
|
||||||
public func updateAccountPhoto(account: Account, resource: MediaResource?, videoResource: MediaResource?, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
|
public func updateAccountPhoto(account: Account, resource: MediaResource?, videoResource: MediaResource?, videoStartTimestamp: Double?, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
|
||||||
return updatePeerPhoto(postbox: account.postbox, network: account.network, stateManager: account.stateManager, accountPeerId: account.peerId, peerId: account.peerId, photo: resource.flatMap({ uploadedPeerPhoto(postbox: account.postbox, network: account.network, resource: $0) }), video: videoResource.flatMap({ uploadedPeerVideo(postbox: account.postbox, network: account.network, messageMediaPreuploadManager: account.messageMediaPreuploadManager, resource: $0) |> map(Optional.init) }), mapResourceToAvatarSizes: mapResourceToAvatarSizes)
|
return updatePeerPhoto(postbox: account.postbox, network: account.network, stateManager: account.stateManager, accountPeerId: account.peerId, peerId: account.peerId, photo: resource.flatMap({ uploadedPeerPhoto(postbox: account.postbox, network: account.network, resource: $0) }), video: videoResource.flatMap({ uploadedPeerVideo(postbox: account.postbox, network: account.network, messageMediaPreuploadManager: account.messageMediaPreuploadManager, resource: $0) |> map(Optional.init) }), videoStartTimestamp: videoStartTimestamp, mapResourceToAvatarSizes: mapResourceToAvatarSizes)
|
||||||
}
|
}
|
||||||
|
|
||||||
public struct UploadedPeerPhotoData {
|
public struct UploadedPeerPhotoData {
|
||||||
@ -49,11 +49,11 @@ public func uploadedPeerVideo(postbox: Postbox, network: Network, messageMediaPr
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func updatePeerPhoto(postbox: Postbox, network: Network, stateManager: AccountStateManager?, accountPeerId: PeerId, peerId: PeerId, photo: Signal<UploadedPeerPhotoData, NoError>?, video: Signal<UploadedPeerPhotoData?, NoError>? = nil, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
|
public func updatePeerPhoto(postbox: Postbox, network: Network, stateManager: AccountStateManager?, accountPeerId: PeerId, peerId: PeerId, photo: Signal<UploadedPeerPhotoData, NoError>?, video: Signal<UploadedPeerPhotoData?, NoError>? = nil, videoStartTimestamp: Double? = nil, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
|
||||||
return updatePeerPhotoInternal(postbox: postbox, network: network, stateManager: stateManager, accountPeerId: accountPeerId, peer: postbox.loadedPeerWithId(peerId), photo: photo, video: video, mapResourceToAvatarSizes: mapResourceToAvatarSizes)
|
return updatePeerPhotoInternal(postbox: postbox, network: network, stateManager: stateManager, accountPeerId: accountPeerId, peer: postbox.loadedPeerWithId(peerId), photo: photo, video: video, videoStartTimestamp: videoStartTimestamp, mapResourceToAvatarSizes: mapResourceToAvatarSizes)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateManager: AccountStateManager?, accountPeerId: PeerId, peer: Signal<Peer, NoError>, photo: Signal<UploadedPeerPhotoData, NoError>?, video: Signal<UploadedPeerPhotoData?, NoError>?, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
|
public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateManager: AccountStateManager?, accountPeerId: PeerId, peer: Signal<Peer, NoError>, photo: Signal<UploadedPeerPhotoData, NoError>?, video: Signal<UploadedPeerPhotoData?, NoError>?, videoStartTimestamp: Double?, mapResourceToAvatarSizes: @escaping (MediaResource, [TelegramMediaImageRepresentation]) -> Signal<[Int: Data], NoError>) -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> {
|
||||||
return peer
|
return peer
|
||||||
|> mapError { _ in return .generic }
|
|> mapError { _ in return .generic }
|
||||||
|> mapToSignal { peer -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> in
|
|> mapToSignal { peer -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> in
|
||||||
@ -116,12 +116,15 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var flags: Int32 = 0
|
var flags: Int32 = (1 << 0)
|
||||||
if let _ = videoFile {
|
if let _ = videoFile {
|
||||||
flags |= (1 << 0)
|
flags |= (1 << 1)
|
||||||
|
if let _ = videoStartTimestamp {
|
||||||
|
flags |= (1 << 2)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return network.request(Api.functions.photos.uploadProfilePhoto(flags: flags, file: file, video: videoFile))
|
return network.request(Api.functions.photos.uploadProfilePhoto(flags: flags, file: file, video: videoFile, videoStartTs: videoStartTimestamp))
|
||||||
|> mapError { _ in return UploadPeerPhotoError.generic }
|
|> mapError { _ in return UploadPeerPhotoError.generic }
|
||||||
|> mapToSignal { photo -> Signal<(UpdatePeerPhotoStatus, MediaResource?), UploadPeerPhotoError> in
|
|> mapToSignal { photo -> Signal<(UpdatePeerPhotoStatus, MediaResource?), UploadPeerPhotoError> in
|
||||||
var representations: [TelegramMediaImageRepresentation] = []
|
var representations: [TelegramMediaImageRepresentation] = []
|
||||||
@ -151,16 +154,14 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
|
|||||||
if let videoSizes = videoSizes {
|
if let videoSizes = videoSizes {
|
||||||
for size in videoSizes {
|
for size in videoSizes {
|
||||||
switch size {
|
switch size {
|
||||||
case let .videoSize(type, location, w, h, size):
|
case let .videoSize(_, type, location, w, h, size, videoStartTs):
|
||||||
let resource: TelegramMediaResource
|
let resource: TelegramMediaResource
|
||||||
switch location {
|
switch location {
|
||||||
case let .fileLocationToBeDeprecated(volumeId, localId):
|
case let .fileLocationToBeDeprecated(volumeId, localId):
|
||||||
resource = CloudPhotoSizeMediaResource(datacenterId: dcId, photoId: id, accessHash: accessHash, sizeSpec: type, volumeId: volumeId, localId: localId, size: Int(size), fileReference: fileReference.makeData())
|
resource = CloudPhotoSizeMediaResource(datacenterId: dcId, photoId: id, accessHash: accessHash, sizeSpec: type, volumeId: volumeId, localId: localId, size: Int(size), fileReference: fileReference.makeData())
|
||||||
}
|
}
|
||||||
|
|
||||||
videoRepresentations.append(TelegramMediaImage.VideoRepresentation(
|
videoRepresentations.append(TelegramMediaImage.VideoRepresentation(dimensions: PixelDimensions(width: w, height: h), resource: resource, startTimestamp: videoStartTs))
|
||||||
dimensions: PixelDimensions(width: w, height: h),
|
|
||||||
resource: resource))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -198,10 +198,10 @@ private func requestChannelStats(postbox: Postbox, network: Network, datacenterI
|
|||||||
signal = network.download(datacenterId: Int(datacenterId), isMedia: false, tag: nil)
|
signal = network.download(datacenterId: Int(datacenterId), isMedia: false, tag: nil)
|
||||||
|> castError(MTRpcError.self)
|
|> castError(MTRpcError.self)
|
||||||
|> mapToSignal { worker in
|
|> mapToSignal { worker in
|
||||||
return worker.request(Api.functions.stats.getBroadcastStats(flags: flags, channel: inputChannel, tzOffset: 0))
|
return worker.request(Api.functions.stats.getBroadcastStats(flags: flags, channel: inputChannel))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
signal = network.request(Api.functions.stats.getBroadcastStats(flags: flags, channel: inputChannel, tzOffset: 0))
|
signal = network.request(Api.functions.stats.getBroadcastStats(flags: flags, channel: inputChannel))
|
||||||
}
|
}
|
||||||
|
|
||||||
return signal
|
return signal
|
||||||
|
@ -156,7 +156,7 @@ func telegramMediaFileFromApiDocument(_ document: Api.Document) -> TelegramMedia
|
|||||||
if let videoThumbs = videoThumbs {
|
if let videoThumbs = videoThumbs {
|
||||||
for thumb in videoThumbs {
|
for thumb in videoThumbs {
|
||||||
switch thumb {
|
switch thumb {
|
||||||
case let .videoSize(type, location, w, h, _):
|
case let .videoSize(_, type, location, w, h, _, _):
|
||||||
let resource: TelegramMediaResource
|
let resource: TelegramMediaResource
|
||||||
switch location {
|
switch location {
|
||||||
case let .fileLocationToBeDeprecated(volumeId, localId):
|
case let .fileLocationToBeDeprecated(volumeId, localId):
|
||||||
|
@ -44,16 +44,14 @@ func telegramMediaImageFromApiPhoto(_ photo: Api.Photo) -> TelegramMediaImage? {
|
|||||||
if let videoSizes = videoSizes {
|
if let videoSizes = videoSizes {
|
||||||
for size in videoSizes {
|
for size in videoSizes {
|
||||||
switch size {
|
switch size {
|
||||||
case let .videoSize(type, location, w, h, size):
|
case let .videoSize(_, type, location, w, h, size, videoStartTs):
|
||||||
let resource: TelegramMediaResource
|
let resource: TelegramMediaResource
|
||||||
switch location {
|
switch location {
|
||||||
case let .fileLocationToBeDeprecated(volumeId, localId):
|
case let .fileLocationToBeDeprecated(volumeId, localId):
|
||||||
resource = CloudPhotoSizeMediaResource(datacenterId: dcId, photoId: id, accessHash: accessHash, sizeSpec: type, volumeId: volumeId, localId: localId, size: Int(size), fileReference: fileReference.makeData())
|
resource = CloudPhotoSizeMediaResource(datacenterId: dcId, photoId: id, accessHash: accessHash, sizeSpec: type, volumeId: volumeId, localId: localId, size: Int(size), fileReference: fileReference.makeData())
|
||||||
}
|
}
|
||||||
|
|
||||||
videoRepresentations.append(TelegramMediaImage.VideoRepresentation(
|
videoRepresentations.append(TelegramMediaImage.VideoRepresentation(dimensions: PixelDimensions(width: w, height: h), resource: resource, startTimestamp: videoStartTs))
|
||||||
dimensions: PixelDimensions(width: w, height: h),
|
|
||||||
resource: resource))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -325,7 +325,7 @@ public func createChannelController(context: AccountContext) -> ViewController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: stateValue.with({ $0.avatar }) != nil, hasViewButton: false, personalPhoto: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: stateValue.with({ $0.avatar }) != nil, hasViewButton: false, personalPhoto: false, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
||||||
let _ = currentAvatarMixin.swap(mixin)
|
let _ = currentAvatarMixin.swap(mixin)
|
||||||
mixin.requestSearchController = { assetsController in
|
mixin.requestSearchController = { assetsController in
|
||||||
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: title, completion: { result in
|
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: title, completion: { result in
|
||||||
|
@ -583,7 +583,7 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: stateValue.with({ $0.avatar }) != nil, hasViewButton: false, personalPhoto: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: stateValue.with({ $0.avatar }) != nil, hasViewButton: false, personalPhoto: false, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
||||||
let _ = currentAvatarMixin.swap(mixin)
|
let _ = currentAvatarMixin.swap(mixin)
|
||||||
mixin.requestSearchController = { assetsController in
|
mixin.requestSearchController = { assetsController in
|
||||||
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: title, completion: { result in
|
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: title, completion: { result in
|
||||||
|
@ -244,6 +244,9 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
|
|||||||
strongSelf.videoNode?.isHidden = !owns
|
strongSelf.videoNode?.isHidden = !owns
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if let startTimestamp = video.startTimestamp {
|
||||||
|
videoNode.seek(startTimestamp)
|
||||||
|
}
|
||||||
|
|
||||||
self.videoContent = videoContent
|
self.videoContent = videoContent
|
||||||
self.videoNode = videoNode
|
self.videoNode = videoNode
|
||||||
|
@ -3303,7 +3303,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: false, personalPhoto: false, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
||||||
let _ = strongSelf.currentAvatarMixin.swap(mixin)
|
let _ = strongSelf.currentAvatarMixin.swap(mixin)
|
||||||
mixin.requestSearchController = { assetsController in
|
mixin.requestSearchController = { assetsController in
|
||||||
guard let strongSelf = self else {
|
guard let strongSelf = self else {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user