Video avatar fixes

This commit is contained in:
Ilya Laktyushin 2020-06-30 17:47:42 +03:00
parent 51a95f3f6f
commit 53572b9631
24 changed files with 742 additions and 389 deletions

View File

@ -16,7 +16,7 @@ UIImage *TGReducedAttachmentImage(UIImage *source, CGSize originalSize, bool att
UIImage *TGBlurredBackgroundImage(UIImage *source, CGSize size);
UIImage *TGRoundImage(UIImage *source, CGSize size);
UIImage *TGBlurredAlphaImage(UIImage *source, CGSize size);
UIImage *TGBlurredRectangularImage(UIImage *source, CGSize size, CGSize renderSize, uint32_t *averageColor, void (^pixelProcessingBlock)(void *, int, int, int));
UIImage *TGBlurredRectangularImage(UIImage *source, bool more, CGSize size, CGSize renderSize, uint32_t *averageColor, void (^pixelProcessingBlock)(void *, int, int, int));
UIImage *TGCropBackdropImage(UIImage *source, CGSize size);
UIImage *TGCameraPositionSwitchImage(UIImage *source, CGSize size);

View File

@ -1,11 +1,12 @@
#import <UIKit/UIKit.h>
@class AVPlayer;
@class PGPhotoEditorView;
@interface TGPhotoAvatarCropView : UIView
@property (nonatomic, readonly) PGPhotoEditorView *fullPreviewView;
@property (nonatomic, strong) UIImage *image;
@property (nonatomic, strong) AVPlayer *player;
@property (nonatomic, readonly) CGSize originalSize;
@property (nonatomic, assign) CGRect cropRect;
@ -16,6 +17,8 @@
@property (nonatomic, copy) void(^interactionBegan)(void);
@property (nonatomic, copy) void(^interactionEnded)(void);
@property (nonatomic, copy) void(^tapped)(void);
@property (nonatomic, readonly) bool isTracking;
@property (nonatomic, readonly) bool isAnimating;
@ -35,7 +38,6 @@
- (void)transitionInFinishedFromCamera:(bool)fromCamera;
- (void)invalidateCropRect;
- (void)invalidateVideoView;
- (UIImage *)currentImage;

View File

@ -13,7 +13,9 @@ typedef enum
TGMediaVideoConversionPresetCompressedVeryHigh,
TGMediaVideoConversionPresetAnimation,
TGMediaVideoConversionPresetVideoMessage,
TGMediaVideoConversionPresetProfile
TGMediaVideoConversionPresetProfile,
TGMediaVideoConversionPresetProfileHigh,
TGMediaVideoConversionPresetPassthrough
} TGMediaVideoConversionPreset;
@interface TGVideoEditAdjustments : NSObject <TGMediaEditAdjustments>

View File

@ -30,6 +30,8 @@
@property (nonatomic, readonly) bool enableStickers;
@property (nonatomic, assign) bool cropOnLast;
@property (nonatomic, readonly) bool forVideo;
@property (nonatomic, assign) bool standalone;

View File

@ -391,7 +391,7 @@
_currentProcessChain = processChain;
GPUImageOutput <GPUImageInput> *lastFilter = ((PGPhotoProcessPass *)_currentProcessChain.firstObject).filter;
if (_cropFilter != nil) {
if (_cropFilter != nil && !self.cropOnLast) {
[_currentInput addTarget:_cropFilter];
[_cropFilter addTarget:lastFilter];
} else {
@ -411,12 +411,23 @@
}
_finalFilter = lastFilter;
if (previewOutput != nil) {
[_finalFilter addTarget:previewOutput.imageView];
}
for (PGPhotoEditorView *view in _additionalOutputs) {
[_finalFilter addTarget:view];
if (_cropFilter != nil && self.cropOnLast) {
for (PGPhotoEditorView *view in _additionalOutputs) {
[_finalFilter addTarget:view];
}
[_finalFilter addTarget:_cropFilter];
_finalFilter = _cropFilter;
if (previewOutput != nil) {
[_finalFilter addTarget:previewOutput.imageView];
}
} else {
if (previewOutput != nil) {
[_finalFilter addTarget:previewOutput.imageView];
}
for (PGPhotoEditorView *view in _additionalOutputs) {
[_finalFilter addTarget:view];
}
}
if (_histogramGenerator != nil && !self.standalone) {
@ -428,6 +439,9 @@
- (void)setAdditionalOutputs:(NSArray *)additionalOutputs {
_additionalOutputs = additionalOutputs;
if (_finalFilter == nil)
return;
[_finalFilter removeAllTargets];
if (self.previewOutput != nil) {

View File

@ -1703,7 +1703,7 @@ UIImage *TGBlurredAlphaImage(UIImage *source, CGSize size)
return image;
}
UIImage *TGBlurredRectangularImage(UIImage *source, CGSize size, CGSize renderSize, uint32_t *averageColor, void (^pixelProcessingBlock)(void *, int, int, int))
UIImage *TGBlurredRectangularImage(UIImage *source, bool more, CGSize size, CGSize renderSize, uint32_t *averageColor, void (^pixelProcessingBlock)(void *, int, int, int))
{
CGSize fittedSize = fitSize(size, CGSizeMake(90, 90));
CGSize fittedRenderSize = CGSizeMake(fittedSize.width / size.width * renderSize.width, fittedSize.height / size.height * renderSize.height);
@ -1733,7 +1733,12 @@ UIImage *TGBlurredRectangularImage(UIImage *source, CGSize size, CGSize renderSi
[source drawInRect:CGRectMake((blurredContextSize.width - fittedRenderSize.width) / 2.0f, (blurredContextSize.height - fittedRenderSize.height) / 2.0f, fittedRenderSize.width, fittedRenderSize.height) blendMode:kCGBlendModeCopy alpha:1.0f];
UIGraphicsPopContext();
fastBlur((int)blurredContextSize.width, (int)blurredContextSize.height, (int)blurredBytesPerRow, blurredMemory);
if (more) {
fastBlurMore((int)blurredContextSize.width, (int)blurredContextSize.height, (int)blurredBytesPerRow, blurredMemory);
fastBlurMore((int)blurredContextSize.width, (int)blurredContextSize.height, (int)blurredBytesPerRow, blurredMemory);
} else {
fastBlur((int)blurredContextSize.width, (int)blurredContextSize.height, (int)blurredBytesPerRow, blurredMemory);
}
if (averageColor != NULL)
{
@ -2283,12 +2288,12 @@ UIImage *TGCropBackdropImage(UIImage *source, CGSize size)
UIImage *TGCameraPositionSwitchImage(UIImage *source, CGSize size)
{
return TGBlurredRectangularImage(source, size, size, NULL, nil);
return TGBlurredRectangularImage(source, false, size, size, NULL, nil);
}
UIImage *TGCameraModeSwitchImage(UIImage *source, CGSize size)
{
return TGBlurredRectangularImage(source, size, size, NULL, nil);
return TGBlurredRectangularImage(source, false, size, size, NULL, nil);
}
UIImage *TGScaleAndCropImageToPixelSize(UIImage *source, CGSize size, CGSize renderSize, uint32_t *averageColor, void (^pixelProcessingBlock)(void *, int, int, int))

View File

@ -707,8 +707,13 @@
if (grouping && selectedItems.count > 1)
groupedId = @([self generateGroupedId]);
for (TGMediaAsset *asset in selectedItems)
for (TGMediaAsset *item in selectedItems)
{
TGMediaAsset *asset = item;
if ([asset isKindOfClass:[TGCameraCapturedVideo class]]) {
asset = ((TGCameraCapturedVideo *)asset).originalAsset;
}
switch (asset.type)
{
case TGMediaAssetPhotoType:
@ -1044,84 +1049,77 @@
case TGMediaAssetGifType:
{
NSString *caption = [editingContext captionForItem:asset];
NSArray *entities = [editingContext entitiesForItem:asset];
TGCameraCapturedVideo *video = (TGCameraCapturedVideo *)item;
[signals addObject:[[[TGMediaAssetImageSignals imageDataForAsset:asset allowNetworkAccess:false] mapToSignal:^SSignal *(TGMediaAssetImageData *assetData)
TGVideoEditAdjustments *adjustments = (TGVideoEditAdjustments *)[editingContext adjustmentsForItem:video];
NSString *caption = [editingContext captionForItem:video];
NSArray *entities = [editingContext entitiesForItem:video];
NSNumber *timer = [editingContext timerForItem:video];
UIImage *(^cropVideoThumbnail)(UIImage *, CGSize, CGSize, bool) = ^UIImage *(UIImage *image, CGSize targetSize, CGSize sourceSize, bool resize)
{
NSString *tempFileName = TGTemporaryFileName(nil);
NSData *data = assetData.imageData;
const char *gif87Header = "GIF87";
const char *gif89Header = "GIF89";
if (data.length >= 5 && (!memcmp(data.bytes, gif87Header, 5) || !memcmp(data.bytes, gif89Header, 5)))
if ([adjustments cropAppliedForAvatar:false] || adjustments.hasPainting || adjustments.toolsApplied)
{
return [[[TGGifConverter convertGifToMp4:data] map:^id(NSDictionary *result)
{
NSString *filePath = result[@"path"];
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
dict[@"type"] = @"file";
dict[@"tempFileUrl"] = [NSURL fileURLWithPath:filePath];
dict[@"fileName"] = @"animation.mp4";
dict[@"mimeType"] = @"video/mp4";
dict[@"isAnimation"] = @true;
if (result[@"dimensions"] != nil) {
dict[@"dimensions"] = result[@"dimensions"];
}
if (result[@"duration"] != nil) {
dict[@"duration"] = result[@"duration"];
}
if (result[@"previewImage"] != nil) {
dict[@"previewImage"] = result[@"previewImage"];
}
id generatedItem = descriptionGenerator(dict, caption, entities, nil);
return generatedItem;
}] catch:^SSignal *(id error)
{
[data writeToURL:[NSURL fileURLWithPath:tempFileName] atomically:true];
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
dict[@"type"] = @"file";
dict[@"tempFileUrl"] = [NSURL fileURLWithPath:tempFileName];
dict[@"fileName"] = assetData.fileName;
dict[@"mimeType"] = TGMimeTypeForFileUTI(assetData.fileUTI);
id generatedItem = descriptionGenerator(dict, caption, entities, nil);
return [SSignal single:generatedItem];
}];
CGRect scaledCropRect = CGRectMake(adjustments.cropRect.origin.x * image.size.width / adjustments.originalSize.width, adjustments.cropRect.origin.y * image.size.height / adjustments.originalSize.height, adjustments.cropRect.size.width * image.size.width / adjustments.originalSize.width, adjustments.cropRect.size.height * image.size.height / adjustments.originalSize.height);
UIImage *paintingImage = adjustments.paintingData.stillImage;
if (paintingImage == nil) {
paintingImage = adjustments.paintingData.image;
}
if (adjustments.toolsApplied) {
image = [PGPhotoEditor resultImageForImage:image adjustments:adjustments];
}
return TGPhotoEditorCrop(image, paintingImage, adjustments.cropOrientation, 0, scaledCropRect, adjustments.cropMirrored, targetSize, sourceSize, resize);
}
else
{
[data writeToURL:[NSURL fileURLWithPath:tempFileName] atomically:true];
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
dict[@"type"] = @"file";
dict[@"tempFileUrl"] = [NSURL fileURLWithPath:tempFileName];
dict[@"fileName"] = assetData.fileName;
dict[@"mimeType"] = TGMimeTypeForFileUTI(assetData.fileUTI);
id generatedItem = descriptionGenerator(dict, caption, entities, nil);
return [SSignal single:generatedItem];
}
}] catch:^SSignal *(id error)
{
if (![error isKindOfClass:[NSNumber class]])
return [SSignal complete];
return [inlineThumbnailSignal(asset) map:^id(UIImage *image)
return image;
};
CGSize imageSize = TGFillSize(video.originalSize, CGSizeMake(512, 512));
SSignal *trimmedVideoThumbnailSignal = [[video avAsset] mapToSignal:^SSignal *(AVURLAsset *avAsset) {
return [[TGMediaAssetImageSignals videoThumbnailForAVAsset:avAsset size:imageSize timestamp:CMTimeMakeWithSeconds(adjustments.trimStartValue, NSEC_PER_SEC)] map:^UIImage *(UIImage *image)
{
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
dict[@"type"] = @"cloudPhoto";
dict[@"document"] = @true;
dict[@"asset"] = asset;
dict[@"previewImage"] = image;
id generatedItem = descriptionGenerator(dict, caption, entities, nil);
return generatedItem;
return cropVideoThumbnail(image, TGScaleToFill(video.originalSize, CGSizeMake(512, 512)), video.originalSize, true);
}];
}];
SSignal *videoThumbnailSignal = [[video thumbnailImageSignal] map:^UIImage *(UIImage *image)
{
return cropVideoThumbnail(image, image.size, image.size, false);
}];
SSignal *thumbnailSignal = adjustments.trimStartValue > FLT_EPSILON ? trimmedVideoThumbnailSignal : videoThumbnailSignal;
TGMediaVideoConversionPreset preset = TGMediaVideoConversionPresetAnimation;
if (adjustments != nil) {
adjustments = [adjustments editAdjustmentsWithPreset:preset maxDuration:0.0];
} else {
adjustments = [TGVideoEditAdjustments editAdjustmentsWithOriginalSize:video.originalSize preset:preset];
}
CGSize dimensions = [TGMediaVideoConverter dimensionsFor:video.originalSize adjustments:adjustments preset:preset];
NSTimeInterval duration = adjustments.trimApplied ? (adjustments.trimEndValue - adjustments.trimStartValue) : video.videoDuration;
[signals addObject:[thumbnailSignal map:^id(UIImage *image)
{
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
dict[@"type"] = @"cameraVideo";
dict[@"url"] = ((TGCameraCapturedVideo *)item).immediateAVAsset.URL;
dict[@"previewImage"] = image;
dict[@"adjustments"] = adjustments;
dict[@"dimensions"] = [NSValue valueWithCGSize:dimensions];
dict[@"duration"] = @(duration);
if (adjustments.paintingData.stickers.count > 0)
dict[@"stickers"] = adjustments.paintingData.stickers;
if (timer != nil)
dict[@"timer"] = timer;
else if (groupedId != nil && !hasAnyTimers)
dict[@"groupedId"] = groupedId;
id generatedItem = descriptionGenerator(dict, caption, entities, nil);
return generatedItem;
}]];
i++;
}
break;

View File

@ -143,7 +143,7 @@
};
[itemViews addObject:carouselItem];
TGMenuSheetButtonItemView *galleryItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:TGLocalized(@"AttachmentMenu.PhotoOrVideo") type:TGMenuSheetButtonTypeDefault fontSize:20.0 action:^
TGMenuSheetButtonItemView *galleryItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:_signup ? TGLocalized(@"Common.ChoosePhoto") : TGLocalized(@"AttachmentMenu.PhotoOrVideo") type:TGMenuSheetButtonTypeDefault fontSize:20.0 action:^
{
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
if (strongSelf == nil)

View File

@ -1635,7 +1635,7 @@
[images enumerateObjectsUsingBlock:^(UIImage *image, NSUInteger index, __unused BOOL *stop)
{
if (index < timestamps.count)
[strongSelf->_scrubberView setThumbnailImage:image forTimestamp:[timestamps[index] doubleValue] isSummaryThubmnail:isSummaryThumbnails];
[strongSelf->_scrubberView setThumbnailImage:image forTimestamp:[timestamps[index] doubleValue] index:index isSummaryThubmnail:isSummaryThumbnails];
}];
} completed:^
{

View File

@ -40,7 +40,9 @@
- (void)ignoreThumbnails;
- (void)resetThumbnails;
- (void)setThumbnailImage:(UIImage *)image forTimestamp:(NSTimeInterval)timestamp isSummaryThubmnail:(bool)isSummaryThumbnail;
- (void)updateThumbnails;
- (void)setThumbnailImage:(UIImage *)image forTimestamp:(NSTimeInterval)timestamp index:(NSInteger)index isSummaryThubmnail:(bool)isSummaryThumbnail;
- (void)setRecipientName:(NSString *)recipientName;

View File

@ -58,7 +58,6 @@ typedef enum
NSTimeInterval _duration;
bool _ignoreThumbnailLoad;
bool _fadingThumbnailViews;
CGFloat _thumbnailAspectRatio;
NSArray *_summaryTimestamps;
NSMutableArray *_summaryThumbnailViews;
@ -741,6 +740,41 @@ typedef enum
[self _resetZooming];
}
- (void)updateThumbnails {
UIView *snapshotView = [_summaryThumbnailWrapperView snapshotViewAfterScreenUpdates:true];
snapshotView.frame = _summaryThumbnailWrapperView.frame;
[_summaryThumbnailWrapperView.superview insertSubview:snapshotView aboveSubview:_summaryThumbnailWrapperView];
id<TGMediaPickerGalleryVideoScrubberDataSource> dataSource = self.dataSource;
if ([dataSource respondsToSelector:@selector(videoScrubberOriginalSize:cropRect:cropOrientation:cropMirrored:)])
_originalSize = [dataSource videoScrubberOriginalSize:self cropRect:&_cropRect cropOrientation:&_cropOrientation cropMirrored:&_cropMirrored];
for (TGMediaPickerGalleryVideoScrubberThumbnailView *view in _summaryThumbnailWrapperView.subviews) {
view.cropRect = _cropRect;
view.cropOrientation = _cropOrientation;
view.cropMirrored = _cropMirrored;
[view updateCropping];
}
for (TGMediaPickerGalleryVideoScrubberThumbnailView *view in _zoomedThumbnailWrapperView.subviews) {
view.cropRect = _cropRect;
view.cropOrientation = _cropOrientation;
view.cropMirrored = _cropMirrored;
[view updateCropping];
}
if (snapshotView != nil)
{
[UIView animateWithDuration:0.2f animations:^
{
snapshotView.alpha = 0.0f;
} completion:^(__unused BOOL finished)
{
[snapshotView removeFromSuperview];
}];
}
}
- (void)reloadData
{
[self reloadDataAndReset:true];
@ -771,23 +805,29 @@ typedef enum
[self reloadThumbnails];
}
- (void)setThumbnailImage:(UIImage *)image forTimestamp:(NSTimeInterval)__unused timestamp isSummaryThubmnail:(bool)isSummaryThumbnail
- (void)setThumbnailImage:(UIImage *)image forTimestamp:(NSTimeInterval)__unused timestamp index:(NSInteger)index isSummaryThubmnail:(bool)isSummaryThumbnail
{
TGMediaPickerGalleryVideoScrubberThumbnailView *thumbnailView = [[TGMediaPickerGalleryVideoScrubberThumbnailView alloc] initWithImage:image originalSize:_originalSize cropRect:_cropRect cropOrientation:_cropOrientation cropMirrored:_cropMirrored];
bool exists = false;
if (isSummaryThumbnail)
{
[_summaryThumbnailWrapperView addSubview:thumbnailView];
[_summaryThumbnailViews addObject:thumbnailView];
if (_summaryThumbnailViews.count >= index + 1) {
exists = true;
[_summaryThumbnailViews[index] setImage:image animated:true];
} else {
TGMediaPickerGalleryVideoScrubberThumbnailView *thumbnailView = [[TGMediaPickerGalleryVideoScrubberThumbnailView alloc] initWithImage:image originalSize:_originalSize cropRect:_cropRect cropOrientation:_cropOrientation cropMirrored:_cropMirrored];
[_summaryThumbnailWrapperView addSubview:thumbnailView];
[_summaryThumbnailViews addObject:thumbnailView];
}
}
else
{
TGMediaPickerGalleryVideoScrubberThumbnailView *thumbnailView = [[TGMediaPickerGalleryVideoScrubberThumbnailView alloc] initWithImage:image originalSize:_originalSize cropRect:_cropRect cropOrientation:_cropOrientation cropMirrored:_cropMirrored];
[_zoomedThumbnailWrapperView addSubview:thumbnailView];
[_zoomedThumbnailViews addObject:thumbnailView];
}
if ((isSummaryThumbnail && _summaryThumbnailViews.count == _summaryTimestamps.count)
|| (!isSummaryThumbnail && _zoomedThumbnailViews.count == _zoomedTimestamps.count))
if (!exists && ((isSummaryThumbnail && _summaryThumbnailViews.count == _summaryTimestamps.count)
|| (!isSummaryThumbnail && _zoomedThumbnailViews.count == _zoomedTimestamps.count)))
{
if (!_ignoreThumbnailLoad)
{
@ -806,13 +846,11 @@ typedef enum
if (snapshotView != nil)
{
_fadingThumbnailViews = true;
[UIView animateWithDuration:0.3f animations:^
[UIView animateWithDuration:0.2f animations:^
{
snapshotView.alpha = 0.0f;
} completion:^(__unused BOOL finished)
{
_fadingThumbnailViews = false;
[snapshotView removeFromSuperview];
}];
}

View File

@ -2,8 +2,16 @@
@interface TGMediaPickerGalleryVideoScrubberThumbnailView : UIView
@property (nonatomic, assign) CGRect cropRect;
@property (nonatomic, assign) UIImageOrientation cropOrientation;
@property (nonatomic, assign) bool cropMirrored;
@property (nonatomic, strong) UIImage *image;
- (void)setImage:(UIImage *)image animated:(bool)animated;
- (instancetype)initWithImage:(UIImage *)image originalSize:(CGSize)originalSize cropRect:(CGRect)cropRect cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored;
- (void)updateCropping;
- (void)updateCropping:(bool)animated;
@end

View File

@ -7,9 +7,6 @@
@interface TGMediaPickerGalleryVideoScrubberThumbnailView ()
{
CGSize _originalSize;
CGRect _cropRect;
UIImageOrientation _cropOrientation;
bool _cropMirrored;
UIImageView *_imageView;
UIView *_stripeView;
@ -52,14 +49,50 @@
- (void)setFrame:(CGRect)frame
{
[super setFrame:frame];
if (_imageView == nil)
return;
[self updateCropping];
}
- (void)setImage:(UIImage *)image
{
_image = image;
_imageView.image = image;
}
- (void)setImage:(UIImage *)image animated:(bool)animated
{
if (animated) {
UIView *snapshotView = [self snapshotViewAfterScreenUpdates:false];
[self addSubview:snapshotView];
[self setImage:image];
[UIView animateWithDuration:0.2f animations:^
{
snapshotView.alpha = 0.0f;
} completion:^(__unused BOOL finished)
{
[snapshotView removeFromSuperview];
}];
} else {
[self setImage:image];
}
}
- (void)updateCropping {
[self updateCropping:false];
}
- (void)updateCropping:(bool)animated {
if (_imageView.image == nil || _cropRect.size.width < FLT_EPSILON)
return;
UIView *snapshotView;
if (animated) {
snapshotView = [self snapshotViewAfterScreenUpdates:false];
[self addSubview:snapshotView];
}
CGAffineTransform transform = CGAffineTransformMakeRotation(TGRotationForOrientation(_cropOrientation));
if (_cropMirrored)
transform = CGAffineTransformScale(transform, -1.0f, 1.0f);
@ -83,11 +116,22 @@
cropRect = CGRectMake(originalSize.width - cropRect.size.width - cropRect.origin.x, originalSize.height - cropRect.size.height - cropRect.origin.y, cropRect.size.width, cropRect.size.height);
}
CGFloat ratio = self.frame.size.width / cropRect.size.width;
CGFloat ratio = self.bounds.size.width / cropRect.size.width;
_imageView.frame = CGRectMake(-cropRect.origin.x * ratio, -cropRect.origin.y * ratio, originalSize.width * ratio, originalSize.height * ratio);
CGFloat thickness = 1.0f - TGRetinaPixel;
_stripeView.frame = CGRectMake(self.frame.size.width - thickness, 0, thickness, self.frame.size.height);
_stripeView.frame = CGRectMake(self.bounds.size.width - thickness, 0, thickness, self.bounds.size.height);
if (snapshotView != nil)
{
[UIView animateWithDuration:0.2f animations:^
{
snapshotView.alpha = 0.0f;
} completion:^(__unused BOOL finished)
{
[snapshotView removeFromSuperview];
}];
}
}
@end

View File

@ -125,7 +125,7 @@
CGSize dimensions = [avAsset tracksWithMediaType:AVMediaTypeVideo].firstObject.naturalSize;
TGMediaVideoConversionPreset preset = adjustments.sendAsGif ? TGMediaVideoConversionPresetAnimation : [self presetFromAdjustments:adjustments];
if (!CGSizeEqualToSize(dimensions, CGSizeZero) && preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetVideoMessage && preset != TGMediaVideoConversionPresetProfile)
if (!CGSizeEqualToSize(dimensions, CGSizeZero) && preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetVideoMessage && preset != TGMediaVideoConversionPresetProfile && preset != TGMediaVideoConversionPresetProfileHigh && preset != TGMediaVideoConversionPresetPassthrough)
{
TGMediaVideoConversionPreset bestPreset = [self bestAvailablePresetForDimensions:dimensions];
if (preset > bestPreset)
@ -182,7 +182,9 @@
if (watcher != nil)
liveUploadData = [watcher fileUpdated:true];
contextResult = [TGMediaVideoConversionResult resultWithFileURL:outputUrl fileSize:0 duration:CMTimeGetSeconds(resultContext.timeRange.duration) dimensions:resultContext.dimensions coverImage:coverImage liveUploadData:liveUploadData];
NSUInteger fileSize = [[[NSFileManager defaultManager] attributesOfItemAtPath:outputUrl.path error:nil] fileSize];
contextResult = [TGMediaVideoConversionResult resultWithFileURL:outputUrl fileSize:fileSize duration:CMTimeGetSeconds(resultContext.timeRange.duration) dimensions:resultContext.dimensions coverImage:coverImage liveUploadData:liveUploadData];
return [resultContext finishedContext];
}];
@ -581,6 +583,9 @@
if (output == nil)
return false;
if (preset == TGMediaVideoConversionPresetPassthrough)
outputSettings = nil;
AVAssetReader *assetReader = [[AVAssetReader alloc] initWithAsset:composition error:error];
if (assetReader == nil)
return false;
@ -1252,6 +1257,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
return (CGSize){ 240.0f, 240.0f };
case TGMediaVideoConversionPresetProfile:
case TGMediaVideoConversionPresetProfileHigh:
return (CGSize){ 800.0f, 800.0f };
default:
@ -1261,7 +1267,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
+ (bool)keepAudioForPreset:(TGMediaVideoConversionPreset)preset
{
return preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetProfile;
return preset != TGMediaVideoConversionPresetAnimation && preset != TGMediaVideoConversionPresetProfile && preset != TGMediaVideoConversionPresetProfileHigh;
}
+ (NSDictionary *)audioSettingsForPreset:(TGMediaVideoConversionPreset)preset
@ -1340,6 +1346,9 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
case TGMediaVideoConversionPresetProfile:
return 1400;
case TGMediaVideoConversionPresetProfileHigh:
return 2000;
default:
return 900;
}
@ -1369,6 +1378,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
case TGMediaVideoConversionPresetAnimation:
case TGMediaVideoConversionPresetProfile:
case TGMediaVideoConversionPresetProfileHigh:
return 0;
default:
@ -1397,6 +1407,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
case TGMediaVideoConversionPresetAnimation:
case TGMediaVideoConversionPresetProfile:
case TGMediaVideoConversionPresetProfileHigh:
return 0;
default:

View File

@ -97,7 +97,6 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
if (strongSelf == nil)
return;
PGPhotoEditor *photoEditor = strongSelf.photoEditor;
photoEditor.cropRect = strongSelf->_cropView.cropRect;
photoEditor.cropOrientation = strongSelf->_cropView.cropOrientation;
photoEditor.cropMirrored = strongSelf->_cropView.cropMirrored;
@ -210,7 +209,6 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
- (void)setPlayer:(AVPlayer *)player
{
[_cropView setPlayer:player];
}
- (void)setSnapshotImage:(UIImage *)snapshotImage
@ -277,11 +275,10 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
{
[_cropView hideImageForCustomTransition];
[_cropView animateTransitionOutSwitching:false];
[_cropView invalidateVideoView];
[UIView animateWithDuration:0.3f animations:^
{
_buttonsWrapperView.alpha = 0.0f;
_buttonsWrapperView.alpha = 0.0f;
} completion:nil];
}
@ -290,7 +287,6 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
_dismissing = true;
[_cropView animateTransitionOutSwitching:switching];
[_cropView invalidateVideoView];
if (switching)
{

View File

@ -8,7 +8,7 @@
#import <LegacyComponents/TGPhotoEditorAnimation.h>
#import "TGPhotoEditorInterfaceAssets.h"
#import "TGModernGalleryVideoView.h"
#import "PGPhotoEditorView.h"
const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
@ -21,7 +21,6 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
UIScrollView *_scrollView;
UIView *_wrapperView;
UIImageView *_imageView;
TGModernGalleryVideoView *_videoView;
UIView *_snapshotView;
CGSize _snapshotSize;
@ -65,8 +64,13 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
_imageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, _wrapperView.frame.size.width, _wrapperView.frame.size.height)];
_imageView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
_imageView.userInteractionEnabled = false;
[_wrapperView addSubview:_imageView];
_fullPreviewView = [[PGPhotoEditorView alloc] initWithFrame:_imageView.frame];
_fullPreviewView.userInteractionEnabled = false;
[_wrapperView addSubview:_fullPreviewView];
_topOverlayView = [[UIView alloc] initWithFrame:CGRectZero];
_topOverlayView.backgroundColor = [TGPhotoEditorInterfaceAssets cropTransparentOverlayColor];
_topOverlayView.userInteractionEnabled = false;
@ -92,10 +96,18 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
[self addSubview:_areaMaskView];
[self updateCircleImageWithReferenceSize:screenSize];
UITapGestureRecognizer *tapRecognier = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleTap:)];
[_wrapperView addGestureRecognizer:tapRecognier];
}
return self;
}
- (void)handleTap:(UITapGestureRecognizer *)gestureRecognizer {
if (self.tapped != nil)
self.tapped();
}
- (void)updateCircleImageWithReferenceSize:(CGSize)referenceSize
{
CGFloat shortSide = MIN(referenceSize.width, referenceSize.height);
@ -139,23 +151,6 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
[self reloadImageIfNeeded];
}
- (void)setPlayer:(AVPlayer *)player
{
_player = player;
_videoView = [[TGModernGalleryVideoView alloc] initWithFrame:_imageView.bounds player:player];
_videoView.frame = _imageView.frame;
_videoView.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_videoView.playerLayer.opaque = false;
_videoView.playerLayer.backgroundColor = nil;
[_imageView.superview insertSubview:_videoView aboveSubview:_imageView];
}
- (void)invalidateVideoView
{
_videoView.player = nil;
}
- (void)reloadImageIfNeeded
{
if (!_imageReloadingNeeded)
@ -415,7 +410,7 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
{
_cropMirrored = cropMirrored;
_imageView.transform = CGAffineTransformMakeScale(self.cropMirrored ? -1.0f : 1.0f, 1.0f);
_videoView.transform = _imageView.transform;
_fullPreviewView.transform = _imageView.transform;
}
- (void)invalidateCropRect
@ -489,24 +484,6 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
- (void)transitionInFinishedFromCamera:(bool)fromCamera
{
// if (fromCamera)
// {
// [UIView animateWithDuration:0.3f animations:^
// {
// _topOverlayView.alpha = 1.0f;
// _leftOverlayView.alpha = 1.0f;
// _rightOverlayView.alpha = 1.0f;
// _bottomOverlayView.alpha = 1.0f;
// }];
// }
// else
// {
// _topOverlayView.alpha = 1.0f;
// _leftOverlayView.alpha = 1.0f;
// _rightOverlayView.alpha = 1.0f;
// _bottomOverlayView.alpha = 1.0f;
// }
_scrollView.hidden = false;
_scrollView.backgroundColor = [UIColor clearColor];

View File

@ -7,9 +7,22 @@
@interface TGPhotoAvatarPreviewController : TGPhotoEditorTabController
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIImageView *)dotImageView;
@property (nonatomic, assign) bool switching;
@property (nonatomic, assign) bool skipTransitionIn;
@property (nonatomic, assign) bool fromCamera;
@property (nonatomic, copy) void (^croppingChanged)(void);
@property (nonatomic, copy) void (^togglePlayback)(void);
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIView *)dotImageView;
- (void)setImage:(UIImage *)image;
- (void)setSnapshotImage:(UIImage *)snapshotImage;
- (void)setSnapshotView:(UIView *)snapshotView;
- (void)beginScrubbing:(bool)flash;
- (void)endScrubbing:(bool)flash completion:(bool (^)(void))completion;
- (void)_finishedTransitionIn;
@end

View File

@ -12,7 +12,7 @@
#import "TGPhotoEditorController.h"
#import "TGPhotoEditorPreviewView.h"
#import "TGPhotoEditorSparseView.h"
#import "TGPhotoAvatarCropView.h"
#import "TGMediaPickerGalleryVideoScrubber.h"
#import "TGModernGalleryVideoView.h"
@ -23,17 +23,21 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
@interface TGPhotoAvatarPreviewController ()
{
bool _appeared;
UIImage *_imagePendingLoad;
UIView *_snapshotView;
UIImage *_snapshotImage;
TGPhotoEditorSparseView *_wrapperView;
TGMediaPickerGalleryVideoScrubber *_scrubberView;
UIImageView *_dotImageView;
UIView *_wrapperView;
TGPhotoAvatarCropView *_cropView;
UIView *_portraitToolsWrapperView;
UIView *_landscapeToolsWrapperView;
UIView *_portraitWrapperBackgroundView;
UIView *_landscapeWrapperBackgroundView;
TGMediaPickerGalleryVideoScrubber *_scrubberView;
UIView *_dotImageView;
UIView *_videoAreaView;
UIView *_flashView;
UIView *_portraitToolControlView;
@ -48,7 +52,7 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
@implementation TGPhotoAvatarPreviewController
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIImageView *)dotImageView
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView scrubberView:(TGMediaPickerGalleryVideoScrubber *)scrubberView dotImageView:(UIView *)dotImageView
{
self = [super initWithContext:context];
if (self != nil)
@ -67,12 +71,73 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
[super loadView];
self.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
[self.view addSubview:_previewView];
// [self.view addSubview:_previewView];
_wrapperView = [[UIView alloc] initWithFrame:CGRectZero];
[self.view addSubview:_wrapperView];
__weak TGPhotoAvatarPreviewController *weakSelf = self;
void(^interactionBegan)(void) = ^
{
__strong TGPhotoAvatarPreviewController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
self.controlVideoPlayback(false);
};
void(^interactionEnded)(void) = ^
{
__strong TGPhotoAvatarPreviewController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if ([strongSelf shouldAutorotate])
[TGViewController attemptAutorotation];
self.controlVideoPlayback(true);
};
PGPhotoEditor *photoEditor = self.photoEditor;
_cropView = [[TGPhotoAvatarCropView alloc] initWithOriginalSize:photoEditor.originalSize screenSize:[self referenceViewSize]];
[_cropView setCropRect:photoEditor.cropRect];
[_cropView setCropOrientation:photoEditor.cropOrientation];
[_cropView setCropMirrored:photoEditor.cropMirrored];
_cropView.tapped = ^{
__strong TGPhotoAvatarPreviewController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (strongSelf.togglePlayback != nil)
strongSelf.togglePlayback();
};
_cropView.croppingChanged = ^
{
__strong TGPhotoAvatarPreviewController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
photoEditor.cropRect = strongSelf->_cropView.cropRect;
photoEditor.cropOrientation = strongSelf->_cropView.cropOrientation;
photoEditor.cropMirrored = strongSelf->_cropView.cropMirrored;
if (strongSelf.croppingChanged != nil)
strongSelf.croppingChanged();
};
if (_snapshotView != nil)
{
[_cropView setSnapshotView:_snapshotView];
_snapshotView = nil;
}
else if (_snapshotImage != nil)
{
[_cropView setSnapshotImage:_snapshotImage];
_snapshotImage = nil;
}
_cropView.interactionBegan = interactionBegan;
_cropView.interactionEnded = interactionEnded;
[_wrapperView addSubview:_cropView];
if (self.item.isVideo) {
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:CGRectZero];
[self.view addSubview:_wrapperView];
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
_portraitToolsWrapperView.alpha = 0.0f;
[_wrapperView addSubview:_portraitToolsWrapperView];
@ -117,9 +182,38 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
}
}
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
self.photoEditor.additionalOutputs = @[_cropView.fullPreviewView];
if (_appeared)
return;
if (self.initialAppearance && self.skipTransitionIn)
{
[self _finishedTransitionInWithView:nil];
if (self.finishedTransitionIn != nil)
{
self.finishedTransitionIn();
self.finishedTransitionIn = nil;
}
}
else
{
[self transitionIn];
}
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
_appeared = true;
if (_imagePendingLoad != nil)
[_cropView setImage:_imagePendingLoad];
[self transitionIn];
}
@ -127,12 +221,39 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
- (BOOL)shouldAutorotate
{
TGPhotoEditorPreviewView *previewView = self.previewView;
return (!previewView.isTracking && [super shouldAutorotate]);
return (!previewView.isTracking && !_cropView.isTracking && [super shouldAutorotate]);
}
- (bool)isDismissAllowed
{
return _appeared;
return _appeared && !_cropView.isTracking && !_cropView.isAnimating;
}
#pragma mark -
- (void)setImage:(UIImage *)image
{
if (_dismissing && !_switching)
return;
if (!_appeared)
{
_imagePendingLoad = image;
return;
}
[_cropView setImage:image];
}
- (void)setSnapshotImage:(UIImage *)snapshotImage
{
_snapshotImage = snapshotImage;
[_cropView _replaceSnapshotImage:snapshotImage];
}
- (void)setSnapshotView:(UIView *)snapshotView
{
_snapshotView = snapshotView;
}
#pragma mark - Transition
@ -142,6 +263,8 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
_scrubberView.layer.rasterizationScale = [UIScreen mainScreen].scale;
_scrubberView.layer.shouldRasterize = true;
[_cropView animateTransitionIn];
[UIView animateWithDuration:0.3f animations:^
{
_portraitToolsWrapperView.alpha = 1.0f;
@ -190,6 +313,9 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
_dismissing = true;
}
[_cropView animateTransitionOutSwitching:switching];
self.photoEditor.additionalOutputs = @[];
TGPhotoEditorPreviewView *previewView = self.previewView;
previewView.touchedUp = nil;
previewView.touchedDown = nil;
@ -233,6 +359,7 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
_portraitToolsWrapperView.alpha = 0.0f;
_landscapeToolsWrapperView.alpha = 0.0f;
_videoAreaView.alpha = 0.0f;
_dotImageView.alpha = 0.0f;
} completion:^(__unused BOOL finished)
{
if (completion != nil)
@ -308,42 +435,49 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
previewView.hidden = false;
[previewView performTransitionInIfNeeded];
[_cropView transitionInFinishedFromCamera:(self.fromCamera && self.initialAppearance)];
PGPhotoEditor *photoEditor = self.photoEditor;
[photoEditor processAnimated:false completion:nil];
}
- (void)_finishedTransitionIn
{
// [_cropView animateTransitionIn];
[_cropView transitionInFinishedFromCamera:true];
self.finishedTransitionIn();
self.finishedTransitionIn = nil;
}
- (void)prepareForCustomTransitionOut
{
[_cropView hideImageForCustomTransition];
[_cropView animateTransitionOutSwitching:false];
_previewView.hidden = true;
[UIView animateWithDuration:0.3f animations:^
{
_portraitToolsWrapperView.alpha = 0.0f;
_landscapeToolsWrapperView.alpha = 0.0f;
_videoAreaView.alpha = 0.0f;
_dotImageView.alpha = 0.0f;
} completion:nil];
}
- (CGRect)transitionOutReferenceFrame
{
TGPhotoEditorPreviewView *previewView = _previewView;
return previewView.frame;
return [_cropView cropRectFrameForView:self.view];
}
- (UIView *)transitionOutReferenceView
{
return _previewView;
}
- (UIView *)snapshotView
{
TGPhotoEditorPreviewView *previewView = self.previewView;
return [previewView originalSnapshotView];
return [_cropView cropSnapshotView];
}
- (id)currentResultRepresentation
{
return [self snapshotView];
// return TGPaintCombineCroppedImages(self.photoEditor.currentResultImage, self.photoEditor.paintingData.image, true, self.photoEditor.originalSize, self.photoEditor.cropRect, self.photoEditor.cropOrientation, self.photoEditor.cropRotation, self.photoEditor.cropMirrored);
return [_cropView cropSnapshotView];
}
#pragma mark - Layout
@ -400,8 +534,6 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
CGSize referenceSize = [self referenceViewSize];
CGFloat screenSide = MAX(referenceSize.width, referenceSize.height);
_wrapperView.frame = CGRectMake((referenceSize.width - screenSide) / 2, (referenceSize.height - screenSide) / 2, screenSide, screenSide);
CGFloat panelSize = UIInterfaceOrientationIsPortrait(orientation) ? TGPhotoAvatarPreviewPanelSize : TGPhotoAvatarPreviewLandscapePanelSize;
CGFloat panelToolbarPortraitSize = panelSize + TGPhotoEditorToolbarSize;
@ -421,15 +553,12 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
[UIView performWithoutAnimation:^
{
_landscapeToolsWrapperView.frame = CGRectMake(0, screenEdges.top, panelToolbarLandscapeSize, _landscapeToolsWrapperView.frame.size.height);
// _landscapeCollectionView.frame = CGRectMake(panelToolbarLandscapeSize - panelSize, 0, panelSize, _landscapeCollectionView.frame.size.height);
}];
_landscapeToolsWrapperView.frame = CGRectMake(screenEdges.left, screenEdges.top, panelToolbarLandscapeSize, referenceSize.height);
// _landscapeCollectionView.frame = CGRectMake(_landscapeCollectionView.frame.origin.x, _landscapeCollectionView.frame.origin.y, _landscapeCollectionView.frame.size.width, _landscapeToolsWrapperView.frame.size.height);
_portraitToolsWrapperView.frame = CGRectMake(screenEdges.left, screenSide - panelToolbarPortraitSize, referenceSize.width, panelToolbarPortraitSize);
// _portraitCollectionView.frame = CGRectMake(0, 0, _portraitToolsWrapperView.frame.size.width, panelSize);
_portraitToolsWrapperView.frame = CGRectMake((screenSide - referenceSize.width) / 2, screenSide - panelToolbarPortraitSize, referenceSize.width, panelToolbarPortraitSize);
}
break;
@ -439,14 +568,12 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
[UIView performWithoutAnimation:^
{
_landscapeToolsWrapperView.frame = CGRectMake(screenSide - panelToolbarLandscapeSize, screenEdges.top, panelToolbarLandscapeSize, _landscapeToolsWrapperView.frame.size.height);
// _landscapeCollectionView.frame = CGRectMake(0, 0, panelSize, _landscapeCollectionView.frame.size.height);
}];
_landscapeToolsWrapperView.frame = CGRectMake(screenEdges.right - panelToolbarLandscapeSize, screenEdges.top, panelToolbarLandscapeSize, referenceSize.height);
// _landscapeCollectionView.frame = CGRectMake(_landscapeCollectionView.frame.origin.x, _landscapeCollectionView.frame.origin.y, _landscapeCollectionView.frame.size.width, _landscapeToolsWrapperView.frame.size.height);
_portraitToolsWrapperView.frame = CGRectMake(screenEdges.top, screenSide - panelToolbarPortraitSize, referenceSize.width, panelToolbarPortraitSize);
// _portraitCollectionView.frame = CGRectMake(0, 0, _portraitToolsWrapperView.frame.size.width, panelSize);
_portraitToolsWrapperView.frame = CGRectMake((screenSide - referenceSize.width) / 2, screenSide - panelToolbarPortraitSize, referenceSize.width, panelToolbarPortraitSize);
}
@ -465,12 +592,10 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
else
x = screenSide - TGPhotoAvatarPreviewPanelSize;
_landscapeToolsWrapperView.frame = CGRectMake(x, screenEdges.top, panelToolbarLandscapeSize, referenceSize.height);
// _landscapeCollectionView.frame = CGRectMake(_landscapeCollectionView.frame.origin.x, _landscapeCollectionView.frame.origin.y, panelSize, _landscapeToolsWrapperView.frame.size.height);
_portraitToolsWrapperView.frame = CGRectMake(screenEdges.left, screenEdges.bottom - panelToolbarPortraitSize, referenceSize.width, panelToolbarPortraitSize);
_coverLabel.frame = CGRectMake(floor((_portraitToolsWrapperView.frame.size.width - _coverLabel.frame.size.width) / 2.0), CGRectGetMaxY(_scrubberView.frame) + 6.0, _coverLabel.frame.size.width, _coverLabel.frame.size.height);
// _portraitCollectionView.frame = CGRectMake(0, 0, _portraitToolsWrapperView.frame.size.width, panelSize);
}
break;
}
@ -499,13 +624,29 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
- (void)updateLayout:(UIInterfaceOrientation)orientation
{
if ([self inFormSheet] || TGIsPad())
orientation = UIInterfaceOrientationPortrait;
orientation = [self effectiveOrientation:orientation];
CGSize referenceSize = [self referenceViewSize];
if ([UIDevice currentDevice].userInterfaceIdiom == UIUserInterfaceIdiomPad)
[_cropView updateCircleImageWithReferenceSize:referenceSize];
CGFloat screenSide = MAX(referenceSize.width, referenceSize.height);
_wrapperView.frame = CGRectMake((referenceSize.width - screenSide) / 2, (referenceSize.height - screenSide) / 2, screenSide, screenSide);
UIEdgeInsets screenEdges = UIEdgeInsetsMake((screenSide - self.view.frame.size.height) / 2, (screenSide - self.view.frame.size.width) / 2, (screenSide + self.view.frame.size.height) / 2, (screenSide + self.view.frame.size.width) / 2);
if (_dismissing)
return;
if (!_dismissing)
[self updateToolViews];
[self updatePreviewView];
[self updateToolViews];
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0.0f hasOnScreenNavigation:self.hasOnScreenNavigation];
containerFrame = CGRectOffset(containerFrame, screenEdges.left, screenEdges.top);
CGFloat shortSide = MIN(referenceSize.width, referenceSize.height);
CGFloat diameter = shortSide - [TGPhotoAvatarCropView areaInsetSize].width * 2;
_cropView.frame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - diameter) / 2, containerFrame.origin.y + (containerFrame.size.height - diameter) / 2, diameter, diameter);
}
- (TGPhotoEditorTab)availableTabs
@ -555,11 +696,11 @@ const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanel
#pragma mark - Cropping
- (void)rotate {
// [_cropView rotate90DegreesCCWAnimated:true];
[_cropView rotate90DegreesCCWAnimated:true];
}
- (void)mirror {
// [_cropView mirror];
[_cropView mirror];
}
- (void)beginScrubbing:(bool)flash

View File

@ -39,11 +39,13 @@
#import "TGPhotoToolsController.h"
#import "TGPhotoPaintController.h"
#import "TGPhotoQualityController.h"
#import "TGPhotoAvatarCropController.h"
#import "TGPhotoAvatarPreviewController.h"
#import "TGPhotoAvatarCropView.h"
#import "TGMessageImageViewOverlayView.h"
#import "TGMediaPickerGalleryVideoScrubber.h"
#import "TGMediaPickerGalleryVideoScrubberThumbnailView.h"
#import "TGMenuSheetController.h"
@ -107,11 +109,13 @@
bool _wasPlaying;
bool _initializedScrubber;
NSArray *_cachedThumbnails;
TGMediaPickerGalleryVideoScrubber *_scrubberView;
bool _resetDotPosition;
NSTimeInterval _dotPosition;
UIImageView *_dotMarkerView;
UIImageView *_dotImageView;
TGMediaPickerGalleryVideoScrubberThumbnailView *_dotImageView;
UIView *_dotImageSnapshotView;
bool _requestingThumbnails;
@ -154,6 +158,7 @@
_photoEditor = [[PGPhotoEditor alloc] initWithOriginalSize:_item.originalSize adjustments:adjustments forVideo:item.isVideo enableStickers:(intent & TGPhotoEditorControllerSignupAvatarIntent) == 0];
if ([self presentedForAvatarCreation])
{
_photoEditor.cropOnLast = true;
CGFloat shortSide = MIN(_item.originalSize.width, _item.originalSize.height);
_photoEditor.cropRect = CGRectMake((_item.originalSize.width - shortSide) / 2, (_item.originalSize.height - shortSide) / 2, shortSide, shortSide);
}
@ -209,6 +214,10 @@
_containerView = [[UIView alloc] initWithFrame:CGRectZero];
[_wrapperView addSubview:_containerView];
_progressView = [[TGMessageImageViewOverlayView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 60.0f, 60.0f)];
[_progressView setRadius:60.0];
_progressView.userInteractionEnabled = false;
__weak TGPhotoEditorController *weakSelf = self;
void(^toolbarCancelPressed)(void) = ^
@ -247,7 +256,7 @@
switch (tab)
{
default:
[strongSelf presentEditorTab:tab];
[strongSelf presentTab:tab];
break;
case TGPhotoEditorToolsTab:
@ -257,7 +266,7 @@
if ([strongSelf->_currentTabController isKindOfClass:[TGPhotoToolsController class]])
[strongSelf->_currentTabController handleTabAction:tab];
else
[strongSelf presentEditorTab:TGPhotoEditorToolsTab];
[strongSelf presentTab:TGPhotoEditorToolsTab];
break;
case TGPhotoEditorPaintTab:
@ -265,7 +274,7 @@
if ([strongSelf->_currentTabController isKindOfClass:[TGPhotoPaintController class]])
[strongSelf->_currentTabController handleTabAction:tab];
else
[strongSelf presentEditorTab:TGPhotoEditorPaintTab];
[strongSelf presentTab:TGPhotoEditorPaintTab];
break;
case TGPhotoEditorStickerTab:
@ -276,7 +285,7 @@
case TGPhotoEditorRotateTab:
case TGPhotoEditorMirrorTab:
case TGPhotoEditorAspectRatioTab:
if ([strongSelf->_currentTabController isKindOfClass:[TGPhotoCropController class]] || [strongSelf->_currentTabController isKindOfClass:[TGPhotoAvatarCropController class]])
if ([strongSelf->_currentTabController isKindOfClass:[TGPhotoCropController class]] || [strongSelf->_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]])
[strongSelf->_currentTabController handleTabAction:tab];
break;
}
@ -322,9 +331,15 @@
[_scrubberView addSubview:_dotMarkerView];
_dotMarkerView.center = CGPointMake(30.0, -20.0);
_dotImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, 160.0, 160.0)];
_dotImageView = [[TGMediaPickerGalleryVideoScrubberThumbnailView alloc] initWithImage:nil originalSize:_item.originalSize cropRect:CGRectZero cropOrientation:UIImageOrientationUp cropMirrored:false];
_dotImageView.frame = CGRectMake(0.0, 0.0, 160.0, 160.0);
_dotImageView.userInteractionEnabled = true;
CAShapeLayer* maskLayer = [CAShapeLayer new];
maskLayer.frame = _dotImageView.bounds;
maskLayer.path = [UIBezierPath bezierPathWithOvalInRect:_dotImageView.bounds].CGPath;
_dotImageView.layer.mask = maskLayer;
UITapGestureRecognizer *dotTapRecognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleDotTap)];
[_dotImageView addGestureRecognizer:dotTapRecognizer];
@ -337,7 +352,7 @@
[self detectFaces];
[self presentEditorTab:_currentTab];
[self presentTab:_currentTab];
}
- (void)handleDotTap {
@ -495,12 +510,7 @@
TGDispatchOnMainThread(^
{
if ([strongSelf->_currentTabController isKindOfClass:[TGPhotoAvatarCropController class]])
[(TGPhotoAvatarCropController *)strongSelf->_currentTabController setPlayer:strongSelf->_player];
[strongSelf->_previewView performTransitionInWithCompletion:^
{
}];
[strongSelf->_previewView performTransitionInWithCompletion:^{}];
if (strongSelf->_scheduledVideoPlayback) {
strongSelf->_scheduledVideoPlayback = false;
@ -519,12 +529,14 @@
if (strongSelf->_dismissed)
return;
if (progressVisible)
strongSelf->_hadProgress = true;
[strongSelf setProgressVisible:progressVisible value:progress animated:true];
[strongSelf updateDoneButtonEnabled:doneEnabled animated:true];
if (progressVisible)
strongSelf->_hadProgress = true;
if (strongSelf->_hadProgress) {
if (strongSelf->_hadProgress && !progressVisible) {
[strongSelf->_progressView setPlay];
[strongSelf->_scrubberView reloadThumbnails];
}
});
@ -566,8 +578,8 @@
- (void)_setupPlaybackStartedObserver
{
CMTime startTime = CMTimeMake(10, 100);
if (_photoEditor.trimStartValue > DBL_EPSILON)
startTime = CMTimeMakeWithSeconds(_photoEditor.trimStartValue + 0.1, NSEC_PER_SEC);
if (_scrubberView.trimStartValue > DBL_EPSILON)
startTime = CMTimeMakeWithSeconds(_scrubberView.trimStartValue + 0.1, NSEC_PER_SEC);
__weak TGPhotoEditorController *weakSelf = self;
_playerStartedObserver = [_player addBoundaryTimeObserverForTimes:@[[NSValue valueWithCMTime:startTime]] queue:NULL usingBlock:^
@ -589,14 +601,13 @@
if (_playerReachedEndObserver != nil)
[_player removeTimeObserver:_playerReachedEndObserver];
PGPhotoEditor *photoEditor = _photoEditor;
CMTime endTime = CMTimeSubtract(_player.currentItem.duration, CMTimeMake(10, 100));
if (photoEditor.trimEndValue > DBL_EPSILON && photoEditor.trimEndValue < CMTimeGetSeconds(_player.currentItem.duration))
endTime = CMTimeMakeWithSeconds(photoEditor.trimEndValue - 0.1, NSEC_PER_SEC);
if (_scrubberView.trimEndValue > DBL_EPSILON && _scrubberView.trimEndValue < CMTimeGetSeconds(_player.currentItem.duration))
endTime = CMTimeMakeWithSeconds(_scrubberView.trimEndValue - 0.1, NSEC_PER_SEC);
CMTime startTime = CMTimeMake(5, 100);
if (photoEditor.trimStartValue > DBL_EPSILON)
startTime = CMTimeMakeWithSeconds(photoEditor.trimStartValue + 0.05, NSEC_PER_SEC);
if (_scrubberView.trimStartValue > DBL_EPSILON)
startTime = CMTimeMakeWithSeconds(_scrubberView.trimStartValue + 0.05, NSEC_PER_SEC);
__weak TGPhotoEditorController *weakSelf = self;
_playerReachedEndObserver = [_player addBoundaryTimeObserverForTimes:@[[NSValue valueWithCMTime:endTime]] queue:NULL usingBlock:^
@ -604,7 +615,7 @@
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf != nil && !strongSelf->_dismissed) {
[strongSelf->_player seekToTime:startTime];
[strongSelf->_scrubberView setValue:strongSelf->_photoEditor.trimEndValue resetPosition:true];
[strongSelf->_scrubberView setValue:strongSelf->_scrubberView.trimStartValue resetPosition:true];
}
}];
}
@ -617,8 +628,8 @@
if (reset) {
NSTimeInterval startPosition = 0.0f;
if (_photoEditor.trimStartValue > DBL_EPSILON)
startPosition = _photoEditor.trimStartValue;
if (_scrubberView.trimStartValue > DBL_EPSILON)
startPosition = _scrubberView.trimStartValue;
CMTime targetTime = CMTimeMakeWithSeconds(startPosition, NSEC_PER_SEC);
[_player.currentItem seekToTime:targetTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero];
@ -650,6 +661,8 @@
[_player removeObserver:self forKeyPath:@"rate" context:nil];
_registeredKeypathObserver = false;
}
[_scrubberView setIsPlaying:false];
}
_isPlaying = false;
@ -1064,7 +1077,7 @@
}
}
- (void)presentEditorTab:(TGPhotoEditorTab)tab
- (void)presentTab:(TGPhotoEditorTab)tab
{
if (_switchingTab || (tab == _currentTab && _currentTabController != nil))
return;
@ -1087,7 +1100,7 @@
transitionReferenceFrame = [currentController transitionOutReferenceFrame];
transitionReferenceView = [currentController transitionOutReferenceView];
transitionNoTransitionView = [currentController isKindOfClass:[TGPhotoAvatarCropController class]];
transitionNoTransitionView = [currentController isKindOfClass:[TGPhotoAvatarPreviewController class]];
currentController.switchingToTab = tab;
[currentController transitionOutSwitching:true completion:^
@ -1142,59 +1155,15 @@
TGPhotoEditorTabController *controller = nil;
switch (tab)
{
case TGPhotoEditorPaintTab:
{
TGPhotoPaintController *paintController = [[TGPhotoPaintController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView];
paintController.stickersContext = _stickersContext;
paintController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
paintController.controlVideoPlayback = ^(bool play) {
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (play) {
[strongSelf startVideoPlayback:false];
} else {
[strongSelf stopVideoPlayback:false];
}
};
paintController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
{
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf == nil)
return nil;
*referenceFrame = transitionReferenceFrame;
*parentView = transitionParentView;
*noTransitionView = transitionNoTransitionView;
return transitionReferenceView;
};
paintController.finishedTransitionIn = ^
{
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (isInitialAppearance && strongSelf.finishedTransitionIn != nil)
strongSelf.finishedTransitionIn();
strongSelf->_switchingTab = false;
[strongSelf startVideoPlayback:true];
};
controller = paintController;
}
break;
case TGPhotoEditorCropTab:
{
__block UIView *initialBackgroundView = nil;
if ([self presentedForAvatarCreation])
{
TGPhotoAvatarCropController *cropController = [[TGPhotoAvatarCropController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView];
bool skipInitialTransition = (![self presentedFromCamera] && self.navigationController != nil) || self.skipInitialTransition;
TGPhotoAvatarPreviewController *cropController = [[TGPhotoAvatarPreviewController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView scrubberView:_scrubberView dotImageView:_dotImageView];
cropController.fromCamera = [self presentedFromCamera];
cropController.skipTransitionIn = skipInitialTransition;
if (snapshotView != nil)
@ -1212,13 +1181,37 @@
[strongSelf stopVideoPlayback:false];
}
};
cropController.togglePlayback = ^{
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf == nil || !strongSelf->_item.isVideo)
return;
if (strongSelf->_isPlaying) {
[strongSelf stopVideoPlayback:false];
[strongSelf setPlayButtonHidden:false animated:true];
} else {
[strongSelf startVideoPlayback:true];
[strongSelf setPlayButtonHidden:true animated:true];
}
};
cropController.croppingChanged = ^{
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf != nil) {
[strongSelf->_scrubberView updateThumbnails];
strongSelf->_dotImageView.cropRect = strongSelf->_photoEditor.cropRect;
strongSelf->_dotImageView.cropOrientation = strongSelf->_photoEditor.cropOrientation;
strongSelf->_dotImageView.cropMirrored = strongSelf->_photoEditor.cropMirrored;
[strongSelf->_dotImageView updateCropping:true];
}
};
cropController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
{
__strong TGPhotoEditorController *strongSelf = weakSelf;
*referenceFrame = transitionReferenceFrame;
*noTransitionView = transitionNoTransitionView;
*parentView = transitionParentView;
if (strongSelf != nil)
{
UIView *backgroundView = nil;
@ -1240,7 +1233,7 @@
}
[UIView animateWithDuration:0.3f animations:^
{
{
backgroundView.alpha = 1.0f;
}];
}
@ -1304,19 +1297,15 @@
{
if (cropController.dismissing && !cropController.switching)
return;
[self updateDoneButtonEnabled:!image.degraded animated:true];
if (image.degraded)
{
if (image.degraded) {
return;
}
else
{
} else {
self.fullSizeImage = image;
[cropController setImage:image];
}
}];
controller = cropController;
}
else
@ -1415,6 +1404,50 @@
}
break;
case TGPhotoEditorPaintTab:
{
TGPhotoPaintController *paintController = [[TGPhotoPaintController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView];
paintController.stickersContext = _stickersContext;
paintController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
paintController.controlVideoPlayback = ^(bool play) {
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (play) {
[strongSelf startVideoPlayback:false];
} else {
[strongSelf stopVideoPlayback:false];
}
};
paintController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
{
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf == nil)
return nil;
*referenceFrame = transitionReferenceFrame;
*parentView = transitionParentView;
*noTransitionView = transitionNoTransitionView;
return transitionReferenceView;
};
paintController.finishedTransitionIn = ^
{
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (isInitialAppearance && strongSelf.finishedTransitionIn != nil)
strongSelf.finishedTransitionIn();
strongSelf->_switchingTab = false;
[strongSelf startVideoPlayback:true];
};
controller = paintController;
}
break;
case TGPhotoEditorToolsTab:
{
TGPhotoToolsController *toolsController = [[TGPhotoToolsController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView];
@ -1495,14 +1528,7 @@
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf != nil) {
if ([currentController isKindOfClass:[TGPhotoAvatarCropController class]]) {
dispatch_async(dispatch_get_main_queue(), ^{
[strongSelf stopVideoPlayback:false];
[strongSelf seekVideo:0];
});
} else {
[strongSelf startVideoPlayback:true];
}
[strongSelf startVideoPlayback:true];
}
return transitionReferenceView;
@ -1516,10 +1542,6 @@
if (isInitialAppearance && strongSelf.finishedTransitionIn != nil)
strongSelf.finishedTransitionIn();
if ([currentController isKindOfClass:[TGPhotoAvatarCropController class]]) {
[strongSelf startVideoPlayback:false];
}
strongSelf->_switchingTab = false;
};
previewController.controlVideoPlayback = ^(bool play) {
@ -1684,8 +1706,8 @@
- (void)dismissEditor
{
if ((![_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]] && ![_currentTabController isKindOfClass:[TGPhotoAvatarCropController class]]) && [self presentedForAvatarCreation]) {
[self presentEditorTab:TGPhotoEditorPreviewTab];
if (![_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]] && [self presentedForAvatarCreation]) {
[self presentTab:TGPhotoEditorPreviewTab];
return;
}
@ -1778,7 +1800,7 @@
- (void)doneButtonPressed
{
if ([self presentedForAvatarCreation] && ![_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]]) {
[self presentEditorTab:TGPhotoEditorPreviewTab];
[self presentTab:TGPhotoEditorPreviewTab];
} else {
[self applyEditor];
}
@ -1876,9 +1898,12 @@
fullImage = TGPhotoEditorVideoCrop(image, paintingImage, adjustments.cropOrientation, adjustments.cropRotation, adjustments.cropRect, adjustments.cropMirrored, CGSizeMake(640, 640), item.originalSize, true, false);
}
NSTimeInterval duration = trimEndValue - trimStartValue;
TGMediaVideoConversionPreset preset = duration < 4.0 ? TGMediaVideoConversionPresetProfileHigh : TGMediaVideoConversionPresetProfile;
TGDispatchOnMainThread(^{
if (self.didFinishEditingVideo != nil)
self.didFinishEditingVideo(asset.URL, [adjustments editAdjustmentsWithPreset:TGMediaVideoConversionPresetProfile videoStartValue:videoStartValue trimStartValue:trimStartValue trimEndValue:trimEndValue], fullImage, nil, true);
self.didFinishEditingVideo(asset.URL, [adjustments editAdjustmentsWithPreset:preset videoStartValue:videoStartValue trimStartValue:trimStartValue trimEndValue:trimEndValue], fullImage, nil, true);
[self dismissAnimated:true];
});
@ -2234,7 +2259,7 @@
_portraitToolbarView.frame = CGRectMake(screenEdges.left, portraitToolbarViewBottomEdge - TGPhotoEditorToolbarSize - safeAreaInset.bottom, referenceSize.width, TGPhotoEditorToolbarSize + safeAreaInset.bottom);
_scrubberView.frame = CGRectMake(0.0, 0.0, _portraitToolbarView.frame.size.width, _scrubberView.frame.size.height);
dispatch_async(dispatch_get_main_queue(), ^{
if (!_initializedScrubber) {
[_scrubberView layoutSubviews];
@ -2250,15 +2275,15 @@
- (void)_setScreenImage:(UIImage *)screenImage
{
_screenImage = screenImage;
if ([_currentTabController isKindOfClass:[TGPhotoAvatarCropController class]])
[(TGPhotoAvatarCropController *)_currentTabController setSnapshotImage:screenImage];
if ([_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]])
[(TGPhotoAvatarPreviewController *)_currentTabController setSnapshotImage:screenImage];
}
- (void)_finishedTransitionIn
{
_switchingTab = false;
if ([_currentTabController isKindOfClass:[TGPhotoAvatarCropController class]])
[(TGPhotoAvatarCropController *)_currentTabController _finishedTransitionIn];
if ([_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]])
[(TGPhotoAvatarPreviewController *)_currentTabController _finishedTransitionIn];
}
- (CGFloat)toolbarLandscapeSize
@ -2271,25 +2296,23 @@
return _transitionWrapperView;
}
- (void)layoutProgressView {
if (_progressView.superview == nil)
[_containerView addSubview:_progressView];
CGSize referenceSize = [self referenceViewSize];
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:self.effectiveOrientation panelSize:0.0 hasOnScreenNavigation:self.hasOnScreenNavigation];
_progressView.frame = (CGRect){{CGFloor(CGRectGetMidX(containerFrame) - _progressView.frame.size.width / 2.0f), CGFloor(CGRectGetMidY(containerFrame) - _progressView.frame.size.height / 2.0f)}, _progressView.frame.size};
}
- (void)setProgressVisible:(bool)progressVisible value:(CGFloat)value animated:(bool)animated
{
_progressVisible = progressVisible;
if (progressVisible && _progressView == nil)
{
_progressView = [[TGMessageImageViewOverlayView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 50.0f, 50.0f)];
_progressView.userInteractionEnabled = false;
CGSize referenceSize = [self referenceViewSize];
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:self.effectiveOrientation panelSize:0.0 hasOnScreenNavigation:self.hasOnScreenNavigation];
_progressView.frame = (CGRect){{CGFloor(CGRectGetMidX(containerFrame) - _progressView.frame.size.width / 2.0f), CGFloor(CGRectGetMidY(containerFrame) - _progressView.frame.size.height / 2.0f)}, _progressView.frame.size};
}
if (progressVisible)
{
if (_progressView.superview == nil)
[_containerView addSubview:_progressView];
[self layoutProgressView];
_progressView.alpha = 1.0f;
}
@ -2306,8 +2329,9 @@
[_progressView removeFromSuperview];
}];
}
else
else {
[_progressView removeFromSuperview];
}
}
[_progressView setProgress:value cancelEnabled:false animated:animated];
@ -2383,23 +2407,29 @@
- (void)setPlayButtonHidden:(bool)hidden animated:(bool)animated
{
// if (animated)
// {
// _actionButton.hidden = false;
// [UIView animateWithDuration:0.15f animations:^
// {
// _actionButton.alpha = hidden ? 0.0f : 1.0f;
// } completion:^(BOOL finished)
// {
// if (finished)
// _actionButton.hidden = hidden;
// }];
// }
// else
// {
// _actionButton.alpha = hidden ? 0.0f : 1.0f;
// _actionButton.hidden = hidden;
// }
if (!hidden) {
[_progressView setPlay];
[self layoutProgressView];
}
if (animated)
{
_progressView.hidden = false;
_progressView.alpha = 0.0f;
[UIView animateWithDuration:0.15f animations:^
{
_progressView.alpha = hidden ? 0.0f : 1.0f;
} completion:^(BOOL finished)
{
if (finished)
_progressView.hidden = hidden;
}];
}
else
{
_progressView.alpha = hidden ? 0.0f : 1.0f;
_progressView.hidden = hidden;
}
}
#pragma mark - Video Scrubber Data Source & Delegate
@ -2417,10 +2447,7 @@
- (CGFloat)videoScrubberThumbnailAspectRatio:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber
{
if (CGSizeEqualToSize(self.item.originalSize, CGSizeZero))
return 1.0f;
return self.item.originalSize.width / self.item.originalSize.height;
return 1.0f;
}
- (void)videoScrubberDidBeginScrubbing:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber
@ -2436,6 +2463,36 @@
[previewController beginScrubbing:true];
}
- (void)resetDotImage {
UIView *snapshotView = nil;
UIView *dotSnapshotView = nil;
if (_dotImageView.image != nil) {
dotSnapshotView = [_dotMarkerView snapshotViewAfterScreenUpdates:false];
dotSnapshotView.frame = _dotMarkerView.frame;
[_dotMarkerView.superview addSubview:dotSnapshotView];
snapshotView = [_dotImageView snapshotViewAfterScreenUpdates:false];
snapshotView.frame = [_dotImageView.superview convertRect:_dotImageView.frame toView:_dotMarkerView.superview];
[_dotMarkerView.superview addSubview:snapshotView];
}
if (snapshotView != nil) {
[UIView animateWithDuration:0.15 animations:^{
snapshotView.center = _dotMarkerView.center;
snapshotView.transform = CGAffineTransformMakeScale(0.05, 0.05);
snapshotView.alpha = 0.0f;
dotSnapshotView.transform = CGAffineTransformMakeScale(0.3, 0.3);
dotSnapshotView.alpha = 0.0f;
} completion:^(BOOL finished) {
[snapshotView removeFromSuperview];
[dotSnapshotView removeFromSuperview];
}];
}
_dotImageView.image = nil;
_dotMarkerView.hidden = true;
}
- (void)updateDotImage:(bool)animated {
AVPlayer *player = _player;
if (player == nil) {
@ -2459,19 +2516,7 @@
[editor setImage:image forCropRect:adjustments.cropRect cropRotation:0.0 cropOrientation:adjustments.cropOrientation cropMirrored:adjustments.cropMirrored fullSize:false];
image = editor.currentResultImage;
}
UIGraphicsBeginImageContextWithOptions(CGSizeMake(160.0, 160.0), false, 1.0);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextAddEllipseInRect(context, CGRectMake(0.0, 0.0, 160.0, 160.0));
CGContextClip(context);
CGSize filledSize = TGScaleToFill(image.size, CGSizeMake(160, 160));
[image drawInRect:CGRectMake((160.0 - filledSize.width) / 2.0, (160.0 - filledSize.height) / 2.0, filledSize.width, filledSize.height)];
UIImage *finalImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
TGDispatchOnMainThread(^{
if (animated) {
UIView *snapshotView = nil;
@ -2499,7 +2544,13 @@
}];
}
_dotImageView.image = finalImage;
_dotMarkerView.hidden = false;
_dotImageView.image = image;
_dotImageView.cropRect = _photoEditor.cropRect;
_dotImageView.cropOrientation = _photoEditor.cropOrientation;
_dotImageView.cropMirrored = _photoEditor.cropMirrored;
[_dotImageView updateCropping];
[_scrubberView addSubview:_dotMarkerView];
_dotMarkerView.center = CGPointMake([_scrubberView scrubberPositionForPosition:_dotPosition].x + 7.0, 9.5);
@ -2516,7 +2567,7 @@
containerFrame.origin.x += screenEdges.left;
containerFrame.origin.y += screenEdges.top;
CGFloat scale = containerFrame.size.width / 160.0;
CGFloat scale = (containerFrame.size.width - [TGPhotoAvatarCropView areaInsetSize].width * 2.0) / 160.0;
_dotImageView.center = CGPointMake(CGRectGetMidX(containerFrame), CGRectGetMidY(containerFrame));
_dotImageView.transform = CGAffineTransformMakeScale(scale, scale);
@ -2539,7 +2590,12 @@
[_dotImageView addSubview:snapshotView];
}
_dotImageView.image = finalImage;
_dotMarkerView.hidden = false;
_dotImageView.image = image;
_dotImageView.cropRect = _photoEditor.cropRect;
_dotImageView.cropOrientation = _photoEditor.cropOrientation;
_dotImageView.cropMirrored = _photoEditor.cropMirrored;
[_dotImageView updateCropping];
}
}
});
@ -2594,21 +2650,34 @@
- (void)videoScrubberDidEndEditing:(TGMediaPickerGalleryVideoScrubber *)videoScrubber
{
if (_resetDotPosition) {
_dotPosition = videoScrubber.trimStartValue;
_resetDotPosition = false;
}
[self setVideoEndTime:videoScrubber.trimEndValue];
[self seekVideo:videoScrubber.trimStartValue];
[self stopVideoPlayback:false];
[videoScrubber resetToStart];
[self startVideoPlayback:true];
[self setPlayButtonHidden:true animated:false];
}
- (void)videoScrubber:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber editingStartValueDidChange:(NSTimeInterval)startValue
{
if (startValue > _dotPosition) {
_resetDotPosition = true;
[self resetDotImage];
}
[self seekVideo:startValue];
}
- (void)videoScrubber:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber editingEndValueDidChange:(NSTimeInterval)endValue
{
if (endValue < _dotPosition) {
_resetDotPosition = true;
[self resetDotImage];
}
[self seekVideo:endValue];
}
@ -2635,6 +2704,18 @@
return timestamps;
}
- (NSArray *)_placeholderThumbnails:(NSArray *)timestamps {
NSMutableArray *thumbnails = [[NSMutableArray alloc] init];
UIImage *blurredImage = TGBlurredRectangularImage(_screenImage, true, _screenImage.size, _screenImage.size, NULL, nil);
for (__unused NSNumber *value in timestamps) {
if (thumbnails.count == 0)
[thumbnails addObject:_screenImage];
else
[thumbnails addObject:blurredImage];
}
return thumbnails;
}
- (void)videoScrubber:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber requestThumbnailImagesForTimestamps:(NSArray *)timestamps size:(CGSize)size isSummaryThumbnails:(bool)isSummaryThumbnails
{
if (timestamps.count == 0)
@ -2643,18 +2724,27 @@
id<TGMediaEditAdjustments> adjustments = [_photoEditor exportAdjustments];
SSignal *thumbnailsSignal = nil;
if ([self.item isKindOfClass:[TGMediaAsset class]]) {
thumbnailsSignal = [TGMediaAssetImageSignals videoThumbnailsForAsset:(TGMediaAsset *)self.item size:size timestamps:timestamps];
if (_cachedThumbnails != nil) {
thumbnailsSignal = [SSignal single:_cachedThumbnails];
} else if ([self.item isKindOfClass:[TGMediaAsset class]]) {
thumbnailsSignal = [[SSignal single:[self _placeholderThumbnails:timestamps]] then:[TGMediaAssetImageSignals videoThumbnailsForAsset:(TGMediaAsset *)self.item size:size timestamps:timestamps]];
} else if ([self.item isKindOfClass:[TGCameraCapturedVideo class]]) {
thumbnailsSignal = [[((TGCameraCapturedVideo *)self.item).avAsset takeLast] mapToSignal:^SSignal *(AVAsset *avAsset) {
return [TGMediaAssetImageSignals videoThumbnailsForAVAsset:avAsset size:size timestamps:timestamps];
return [[SSignal single:[self _placeholderThumbnails:timestamps]] then:[TGMediaAssetImageSignals videoThumbnailsForAVAsset:avAsset size:size timestamps:timestamps]];
}];
}
_requestingThumbnails = true;
__weak TGPhotoEditorController *weakSelf = self;
[_thumbnailsDisposable setDisposable:[[[thumbnailsSignal map:^NSArray *(NSArray *images) {
[_thumbnailsDisposable setDisposable:[[[[thumbnailsSignal onNext:^(NSArray *images) {
__strong TGPhotoEditorController *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (strongSelf->_cachedThumbnails == nil)
strongSelf->_cachedThumbnails = images;
}] map:^NSArray *(NSArray *images) {
if (adjustments.toolsApplied) {
NSMutableArray *editedImages = [[NSMutableArray alloc] init];
PGPhotoEditor *editor = [[PGPhotoEditor alloc] initWithOriginalSize:adjustments.originalSize adjustments:adjustments forVideo:false enableStickers:true];
@ -2681,7 +2771,7 @@
[images enumerateObjectsUsingBlock:^(UIImage *image, NSUInteger index, __unused BOOL *stop)
{
if (index < timestamps.count)
[strongSelf->_scrubberView setThumbnailImage:image forTimestamp:[timestamps[index] doubleValue] isSummaryThubmnail:isSummaryThumbnails];
[strongSelf->_scrubberView setThumbnailImage:image forTimestamp:[timestamps[index] doubleValue] index:index isSummaryThubmnail:isSummaryThumbnails];
}];
if (strongSelf->_dotImageSnapshotView != nil) {

View File

@ -130,6 +130,8 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
TGVideoEditAdjustments *adjustments = [[[self class] alloc] init];
adjustments->_originalSize = originalSize;
adjustments->_preset = preset;
if (preset == TGMediaVideoConversionPresetAnimation)
adjustments->_sendAsGif = true;
return adjustments;
}
@ -165,10 +167,10 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
adjustments->_trimStartValue = _trimStartValue;
adjustments->_trimEndValue = _trimEndValue;
adjustments->_paintingData = _paintingData;
adjustments->_sendAsGif = _sendAsGif;
adjustments->_preset = preset;
adjustments->_toolValues = _toolValues;
adjustments->_videoStartValue = _videoStartValue;
adjustments->_sendAsGif = preset == TGMediaVideoConversionPresetAnimation ? true : _sendAsGif;
if (maxDuration > DBL_EPSILON)
{

View File

@ -66,13 +66,13 @@ public enum AvatarGalleryEntry: Equatable {
public static func ==(lhs: AvatarGalleryEntry, rhs: AvatarGalleryEntry) -> Bool {
switch lhs {
case let .topImage(lhsRepresentations, lhsIndexData, lhsImmediateThumbnailData):
if case let .topImage(rhsRepresentations, rhsIndexData, rhsImmediateThumbnailData) = rhs, lhsRepresentations == rhsRepresentations, lhsIndexData == rhsIndexData, lhsImmediateThumbnailData != rhsImmediateThumbnailData {
if case let .topImage(rhsRepresentations, rhsIndexData, rhsImmediateThumbnailData) = rhs, lhsRepresentations == rhsRepresentations, lhsIndexData == rhsIndexData, lhsImmediateThumbnailData == rhsImmediateThumbnailData {
return true
} else {
return false
}
case let .image(lhsId, lhsImageReference, lhsRepresentations, lhsVideoRepresentations, lhsPeer, lhsDate, lhsIndexData, lhsMessageId, lhsImmediateThumbnailData):
if case let .image(rhsId, rhsImageReference, rhsRepresentations, rhsVideoRepresentations, rhsPeer, rhsDate, rhsIndexData, rhsMessageId, rhsImmediateThumbnailData) = rhs, lhsId == rhsId, lhsImageReference == rhsImageReference, lhsRepresentations == rhsRepresentations, lhsVideoRepresentations == rhsVideoRepresentations, arePeersEqual(lhsPeer, rhsPeer), lhsDate == rhsDate, lhsIndexData == rhsIndexData, lhsMessageId == rhsMessageId, lhsImmediateThumbnailData != rhsImmediateThumbnailData {
if case let .image(rhsId, rhsImageReference, rhsRepresentations, rhsVideoRepresentations, rhsPeer, rhsDate, rhsIndexData, rhsMessageId, rhsImmediateThumbnailData) = rhs, lhsId == rhsId, lhsImageReference == rhsImageReference, lhsRepresentations == rhsRepresentations, lhsVideoRepresentations == rhsVideoRepresentations, arePeersEqual(lhsPeer, rhsPeer), lhsDate == rhsDate, lhsIndexData == rhsIndexData, lhsMessageId == rhsMessageId, lhsImmediateThumbnailData == rhsImmediateThumbnailData {
return true
} else {
return false
@ -265,7 +265,7 @@ public class AvatarGalleryController: ViewController, StandalonePresentableContr
} : nil, setMain: { [weak self] in
self?.setMainEntry(entry)
})
}), centralItemIndex: 0, synchronous: !isFirstTime)
}), centralItemIndex: strongSelf.centralEntryIndex, synchronous: !isFirstTime)
let ready = strongSelf.galleryNode.pager.ready() |> timeout(2.0, queue: Queue.mainQueue(), alternate: .single(Void())) |> afterNext { [weak strongSelf] _ in
strongSelf?.didSetReady = true

View File

@ -549,6 +549,13 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
let completedVideoImpl: (UIImage, URL, TGVideoEditAdjustments?) -> Void = { image, url, adjustments in
if let data = image.jpegData(compressionQuality: 0.6) {
let photoResource = LocalFileMediaResource(fileId: arc4random64())
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
let representation = TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: 640, height: 640), resource: photoResource)
updateState {
$0.withUpdatedUpdatingAvatar(.image(representation, true))
}
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { subscriber in
var filteredPath = url.path
if filteredPath.hasPrefix("file://") {
@ -568,6 +575,10 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
if let image = result.coverImage, let data = image.jpegData(compressionQuality: 0.7) {
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
}
var value = stat()
if stat(result.fileURL.path, &value) == 0 {
if let data = try? Data(contentsOf: result.fileURL) {
@ -577,7 +588,7 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
} else {
resource = LocalFileMediaResource(fileId: arc4random64())
}
context.account.postbox.mediaBox.storeResourceData(resource.id, data: data)
context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
subscriber.putNext(resource)
}
}
@ -595,16 +606,9 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
}
}
let resource = LocalFileMediaResource(fileId: arc4random64())
context.account.postbox.mediaBox.storeResourceData(resource.id, data: data)
let representation = TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: 640, height: 640), resource: resource)
updateState {
$0.withUpdatedUpdatingAvatar(.image(representation, true))
}
updateAvatarDisposable.set((signal
|> mapToSignal { videoResource in
return updateAccountPhoto(account: context.account, resource: resource, videoResource: videoResource, mapResourceToAvatarSizes: { resource, representations in
return updateAccountPhoto(account: context.account, resource: photoResource, videoResource: videoResource, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
})
} |> deliverOnMainQueue).start(next: { result in

View File

@ -1314,6 +1314,15 @@ public func settingsController(context: AccountContext, accountManager: AccountM
let completedVideoImpl: (UIImage, URL, TGVideoEditAdjustments?) -> Void = { image, url, adjustments in
if let data = image.jpegData(compressionQuality: 0.6) {
let photoResource = LocalFileMediaResource(fileId: arc4random64())
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
let representation = TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: 640, height: 640), resource: photoResource)
updateState { state in
var state = state
state.updatingAvatar = .image(representation, true)
return state
}
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { subscriber in
var filteredPath = url.path
if filteredPath.hasPrefix("file://") {
@ -1333,6 +1342,10 @@ public func settingsController(context: AccountContext, accountManager: AccountM
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
if let image = result.coverImage, let data = image.jpegData(compressionQuality: 0.7) {
context.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
}
var value = stat()
if stat(result.fileURL.path, &value) == 0 {
if let data = try? Data(contentsOf: result.fileURL) {
@ -1342,7 +1355,7 @@ public func settingsController(context: AccountContext, accountManager: AccountM
} else {
resource = LocalFileMediaResource(fileId: arc4random64())
}
context.account.postbox.mediaBox.storeResourceData(resource.id, data: data)
context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
subscriber.putNext(resource)
}
}
@ -1359,19 +1372,10 @@ public func settingsController(context: AccountContext, accountManager: AccountM
disposable.dispose()
}
}
let resource = LocalFileMediaResource(fileId: arc4random64())
context.account.postbox.mediaBox.storeResourceData(resource.id, data: data)
let representation = TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: 640, height: 640), resource: resource)
updateState { state in
var state = state
state.updatingAvatar = .image(representation, true)
return state
}
updateAvatarDisposable.set((signal
|> mapToSignal { videoResource in
return updateAccountPhoto(account: context.account, resource: resource, videoResource: videoResource, mapResourceToAvatarSizes: { resource, representations in
return updateAccountPhoto(account: context.account, resource: photoResource, videoResource: videoResource, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: context.account.postbox, resource: resource, representations: representations)
})
} |> deliverOnMainQueue).start(next: { result in

View File

@ -182,7 +182,7 @@ func legacyInstantVideoController(theme: PresentationTheme, panelFrame: CGRect,
let resource: TelegramMediaResource
if let liveUploadData = liveUploadData as? LegacyLiveUploadInterfaceResult, resourceAdjustments == nil, let data = try? Data(contentsOf: videoUrl) {
resource = LocalFileMediaResource(fileId: liveUploadData.id)
context.account.postbox.mediaBox.storeResourceData(resource.id, data: data)
context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
} else {
resource = LocalFileVideoMediaResource(randomId: arc4random64(), path: videoUrl.path, adjustments: resourceAdjustments)
}