mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Photo editor improvements
This commit is contained in:
parent
91ff180db5
commit
025130a8b9
@ -5540,3 +5540,7 @@ Any member of this group will be able to see messages in the channel.";
|
||||
"Conversation.PrivateChannelTimeLimitedAlertJoin" = "Join";
|
||||
|
||||
"KeyCommand.SearchInChat" = "Search in Chat";
|
||||
|
||||
"PhotoEditor.SkinTool" = "Soften Skin";
|
||||
"PhotoEditor.BlurToolPortrait" = "Portrait";
|
||||
"PhotoEditor.SelectCoverFrame" = "Choose a cover for your profile video";
|
||||
|
@ -695,9 +695,9 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
if !isAnimated && !disablePlayerControls && !disablePictureInPicture {
|
||||
let rightBarButtonItem = UIBarButtonItem(image: pictureInPictureButtonImage, style: .plain, target: self, action: #selector(self.pictureInPictureButtonPressed))
|
||||
barButtonItems.append(rightBarButtonItem)
|
||||
hasPictureInPicture = true
|
||||
self.hasPictureInPicture = true
|
||||
} else {
|
||||
hasPictureInPicture = false
|
||||
self.hasPictureInPicture = false
|
||||
}
|
||||
self._rightBarButtonItems.set(.single(barButtonItems))
|
||||
|
||||
|
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/BlurLinear.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/BlurLinear.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "ic_editor_blurlinear.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/BlurOff.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/BlurOff.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "ic_editor_blurnope.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/BlurOff.imageset/ic_editor_blurnope.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/BlurOff.imageset/ic_editor_blurnope.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/BlurPortrait.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/BlurPortrait.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "ic_editor_blurportrait.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/BlurRadial.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/BlurRadial.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "ic_editor_blurradian.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -24,8 +24,8 @@ typedef enum
|
||||
PGCameraModeUndefined,
|
||||
PGCameraModePhoto,
|
||||
PGCameraModeVideo,
|
||||
PGCameraModeSquare,
|
||||
PGCameraModeClip
|
||||
PGCameraModeSquarePhoto,
|
||||
PGCameraModeSquareVideo
|
||||
} PGCameraMode;
|
||||
|
||||
typedef enum
|
||||
|
@ -57,6 +57,8 @@
|
||||
|
||||
@property (nonatomic, assign) CGRect previewViewFrame;
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame avatar:(bool)avatar;
|
||||
|
||||
- (void)setDocumentFrameHidden:(bool)hidden;
|
||||
- (void)setCameraMode:(PGCameraMode)mode;
|
||||
- (void)updateForCameraModeChangeWithPreviousMode:(PGCameraMode)previousMode;
|
||||
|
@ -10,4 +10,6 @@
|
||||
|
||||
- (void)setHidden:(bool)hidden animated:(bool)animated;
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame avatar:(bool)avatar;
|
||||
|
||||
@end
|
||||
|
@ -1,7 +1,6 @@
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#import <Photos/Photos.h>
|
||||
#import <AssetsLibrary/AssetsLibrary.h>
|
||||
|
||||
#import <LegacyComponents/TGMediaSelectionContext.h>
|
||||
#import <LegacyComponents/TGMediaEditingContext.h>
|
||||
@ -30,7 +29,6 @@ typedef enum
|
||||
@interface TGMediaAsset : NSObject <TGMediaSelectableItem>
|
||||
|
||||
@property (nonatomic, readonly) NSString *identifier;
|
||||
@property (nonatomic, readonly) NSURL *url;
|
||||
@property (nonatomic, readonly) CGSize dimensions;
|
||||
@property (nonatomic, readonly) NSDate *date;
|
||||
@property (nonatomic, readonly) bool isVideo;
|
||||
@ -44,10 +42,8 @@ typedef enum
|
||||
@property (nonatomic, readonly) TGMediaAssetSubtype subtypes;
|
||||
|
||||
- (instancetype)initWithPHAsset:(PHAsset *)asset;
|
||||
- (instancetype)initWithALAsset:(ALAsset *)asset;
|
||||
|
||||
@property (nonatomic, readonly) PHAsset *backingAsset;
|
||||
@property (nonatomic, readonly) ALAsset *backingLegacyAsset;
|
||||
|
||||
+ (PHAssetMediaType)assetMediaTypeForAssetType:(TGMediaAssetType)assetType;
|
||||
|
||||
|
@ -9,7 +9,6 @@
|
||||
|
||||
@property (nonatomic, readonly) NSUInteger count;
|
||||
|
||||
- (instancetype)initForALAssetsReversed:(bool)reversed;
|
||||
- (instancetype)initWithPHFetchResult:(PHFetchResult *)fetchResult reversed:(bool)reversed;
|
||||
|
||||
- (TGMediaAsset *)assetAtIndex:(NSUInteger)index;
|
||||
@ -17,6 +16,4 @@
|
||||
|
||||
- (NSSet *)itemsIdentifiers;
|
||||
|
||||
- (void)_appendALAsset:(ALAsset *)asset;
|
||||
|
||||
@end
|
||||
|
@ -1,5 +1,4 @@
|
||||
#import <Photos/Photos.h>
|
||||
#import <AssetsLibrary/AssetsLibrary.h>
|
||||
|
||||
#import <LegacyComponents/TGMediaAsset.h>
|
||||
|
||||
@ -33,12 +32,9 @@ typedef enum
|
||||
|
||||
@property (nonatomic, readonly) PHFetchResult *backingFetchResult;
|
||||
@property (nonatomic, readonly) PHAssetCollection *backingAssetCollection;
|
||||
@property (nonatomic, readonly) ALAssetsGroup *backingAssetsGroup;
|
||||
|
||||
- (instancetype)initWithPHFetchResult:(PHFetchResult *)fetchResult;
|
||||
- (instancetype)initWithPHAssetCollection:(PHAssetCollection *)collection fetchResult:(PHFetchResult *)fetchResult;
|
||||
- (instancetype)initWithALAssetsGroup:(ALAssetsGroup *)assetsGroup;
|
||||
- (instancetype)initWithALAssetsGroup:(ALAssetsGroup *)assetsGroup subtype:(TGMediaAssetGroupSubtype)subtype;
|
||||
|
||||
- (NSArray *)latestAssets;
|
||||
|
||||
|
@ -1,14 +1,18 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@class AVPlayer;
|
||||
|
||||
@interface TGPhotoAvatarCropView : UIView
|
||||
|
||||
@property (nonatomic, strong) UIImage *image;
|
||||
@property (nonatomic, strong) AVPlayer *player;
|
||||
|
||||
@property (nonatomic, assign) CGRect cropRect;
|
||||
@property (nonatomic, assign) UIImageOrientation cropOrientation;
|
||||
@property (nonatomic, assign) bool cropMirrored;
|
||||
|
||||
@property (nonatomic, copy) void(^croppingChanged)(void);
|
||||
@property (nonatomic, copy) void(^interactionBegan)(void);
|
||||
@property (nonatomic, copy) void(^interactionEnded)(void);
|
||||
|
||||
@property (nonatomic, readonly) bool isTracking;
|
||||
@ -30,6 +34,7 @@
|
||||
- (void)transitionInFinishedFromCamera:(bool)fromCamera;
|
||||
|
||||
- (void)invalidateCropRect;
|
||||
- (void)invalidateVideoView;
|
||||
|
||||
- (void)hideImageForCustomTransition;
|
||||
|
||||
|
@ -54,6 +54,7 @@ typedef enum {
|
||||
@property (nonatomic, assign) bool skipInitialTransition;
|
||||
@property (nonatomic, assign) bool dontHideStatusBar;
|
||||
@property (nonatomic, strong) PGCameraShotMetadata *metadata;
|
||||
@property (nonatomic, strong) NSArray *faces;
|
||||
|
||||
@property (nonatomic, strong) TGPhotoEntitiesContainerView *entitiesView;
|
||||
|
||||
|
@ -32,6 +32,8 @@
|
||||
@property (nonatomic, copy) void (^tabsChanged)(void);
|
||||
|
||||
@property (nonatomic, copy) void (^controlVideoPlayback)(bool);
|
||||
@property (nonatomic, copy) void (^controlVideoSeek)(NSTimeInterval);
|
||||
@property (nonatomic, copy) void (^controlVideoEndTime)(NSTimeInterval);
|
||||
|
||||
@property (nonatomic, assign) TGPhotoEditorTab availableTabs;
|
||||
|
||||
|
@ -16,6 +16,7 @@ typedef NS_OPTIONS(NSUInteger, TGPhotoEditorTab) {
|
||||
TGPhotoEditorTintTab = 1 << 11,
|
||||
TGPhotoEditorBlurTab = 1 << 12,
|
||||
TGPhotoEditorCurvesTab = 1 << 13,
|
||||
TGPhotoEditorPreviewTab = 1 << 14
|
||||
};
|
||||
|
||||
typedef enum
|
||||
|
@ -3,12 +3,6 @@
|
||||
// Base classes
|
||||
#import "GPUImageContext.h"
|
||||
#import "GPUImageOutput.h"
|
||||
#import "GPUImageView.h"
|
||||
#import "GPUImageVideoCamera.h"
|
||||
#import "GPUImageStillCamera.h"
|
||||
#import "GPUImagePicture.h"
|
||||
#import "GPUImageRawDataInput.h"
|
||||
#import "GPUImageRawDataOutput.h"
|
||||
#import "GPUImageFilterGroup.h"
|
||||
#import "GPUImageFramebuffer.h"
|
||||
#import "GPUImageFramebufferCache.h"
|
||||
@ -16,4 +10,5 @@
|
||||
// Filters
|
||||
#import "GPUImageFilter.h"
|
||||
#import "GPUImageTwoInputFilter.h"
|
||||
#import "GPUImageThreeInputFilter.h"
|
||||
#import "GPUImageGaussianBlurFilter.h"
|
||||
|
11
submodules/LegacyComponents/Sources/GPUImageDissolveBlendFilter.h
Executable file
11
submodules/LegacyComponents/Sources/GPUImageDissolveBlendFilter.h
Executable file
@ -0,0 +1,11 @@
|
||||
#import "GPUImageTwoInputFilter.h"
|
||||
|
||||
@interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter
|
||||
{
|
||||
GLint mixUniform;
|
||||
}
|
||||
|
||||
// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 0.5 (half of either) as the normal level
|
||||
@property(readwrite, nonatomic) CGFloat mix;
|
||||
|
||||
@end
|
52
submodules/LegacyComponents/Sources/GPUImageDissolveBlendFilter.m
Executable file
52
submodules/LegacyComponents/Sources/GPUImageDissolveBlendFilter.m
Executable file
@ -0,0 +1,52 @@
|
||||
#import "GPUImageDissolveBlendFilter.h"
|
||||
|
||||
NSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING
|
||||
(
|
||||
varying highp vec2 texCoord;
|
||||
varying highp vec2 texCoord2;
|
||||
|
||||
uniform sampler2D sourceImage;
|
||||
uniform sampler2D inputImageTexture2;
|
||||
uniform lowp float mixturePercent;
|
||||
|
||||
void main()
|
||||
{
|
||||
lowp vec4 textureColor = texture2D(sourceImage, texCoord);
|
||||
lowp vec4 textureColor2 = texture2D(inputImageTexture2, texCoord2);
|
||||
|
||||
gl_FragColor = mix(textureColor, textureColor2, mixturePercent);
|
||||
}
|
||||
);
|
||||
|
||||
@implementation GPUImageDissolveBlendFilter
|
||||
|
||||
@synthesize mix = _mix;
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Initialization and teardown
|
||||
|
||||
- (id)init;
|
||||
{
|
||||
if (!(self = [super initWithFragmentShaderFromString:kGPUImageDissolveBlendFragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
mixUniform = [filterProgram uniformIndex:@"mixturePercent"];
|
||||
self.mix = 0.5;
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Accessors
|
||||
|
||||
- (void)setMix:(CGFloat)newValue;
|
||||
{
|
||||
_mix = newValue;
|
||||
|
||||
[self setFloat:_mix forUniform:mixUniform program:filterProgram];
|
||||
}
|
||||
|
||||
@end
|
||||
|
11
submodules/LegacyComponents/Sources/GPUImageExposureFilter.h
Executable file
11
submodules/LegacyComponents/Sources/GPUImageExposureFilter.h
Executable file
@ -0,0 +1,11 @@
|
||||
#import "GPUImageFilter.h"
|
||||
|
||||
@interface GPUImageExposureFilter : GPUImageFilter
|
||||
{
|
||||
GLint exposureUniform;
|
||||
}
|
||||
|
||||
// Exposure ranges from -10.0 to 10.0, with 0.0 as the normal level
|
||||
@property(readwrite, nonatomic) CGFloat exposure;
|
||||
|
||||
@end
|
49
submodules/LegacyComponents/Sources/GPUImageExposureFilter.m
Executable file
49
submodules/LegacyComponents/Sources/GPUImageExposureFilter.m
Executable file
@ -0,0 +1,49 @@
|
||||
#import "GPUImageExposureFilter.h"
|
||||
|
||||
NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING
|
||||
(
|
||||
varying highp vec2 texCoord;
|
||||
|
||||
uniform sampler2D sourceImage;
|
||||
uniform highp float exposure;
|
||||
|
||||
void main()
|
||||
{
|
||||
highp vec4 textureColor = texture2D(sourceImage, texCoord);
|
||||
|
||||
gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);
|
||||
}
|
||||
);
|
||||
|
||||
@implementation GPUImageExposureFilter
|
||||
|
||||
@synthesize exposure = _exposure;
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Initialization and teardown
|
||||
|
||||
- (id)init;
|
||||
{
|
||||
if (!(self = [super initWithFragmentShaderFromString:kGPUImageExposureFragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
exposureUniform = [filterProgram uniformIndex:@"exposure"];
|
||||
self.exposure = 0.0;
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Accessors
|
||||
|
||||
- (void)setExposure:(CGFloat)newValue;
|
||||
{
|
||||
_exposure = newValue;
|
||||
|
||||
[self setFloat:_exposure forUniform:exposureUniform program:filterProgram];
|
||||
}
|
||||
|
||||
@end
|
||||
|
19
submodules/LegacyComponents/Sources/GPUImageFilterGroup.h
Executable file
19
submodules/LegacyComponents/Sources/GPUImageFilterGroup.h
Executable file
@ -0,0 +1,19 @@
|
||||
#import "GPUImageOutput.h"
|
||||
#import "GPUImageFilter.h"
|
||||
|
||||
@interface GPUImageFilterGroup : GPUImageOutput <GPUImageInput>
|
||||
{
|
||||
NSMutableArray *filters;
|
||||
BOOL isEndProcessing;
|
||||
}
|
||||
|
||||
@property(readwrite, nonatomic, strong) GPUImageOutput<GPUImageInput> *terminalFilter;
|
||||
@property(readwrite, nonatomic, strong) NSArray *initialFilters;
|
||||
@property(readwrite, nonatomic, strong) GPUImageOutput<GPUImageInput> *inputFilterToIgnoreForUpdates;
|
||||
|
||||
// Filter management
|
||||
- (void)addFilter:(GPUImageOutput<GPUImageInput> *)newFilter;
|
||||
- (GPUImageOutput<GPUImageInput> *)filterAtIndex:(NSUInteger)filterIndex;
|
||||
- (NSUInteger)filterCount;
|
||||
|
||||
@end
|
207
submodules/LegacyComponents/Sources/GPUImageFilterGroup.m
Executable file
207
submodules/LegacyComponents/Sources/GPUImageFilterGroup.m
Executable file
@ -0,0 +1,207 @@
|
||||
#import "GPUImageFilterGroup.h"
|
||||
|
||||
@implementation GPUImageFilterGroup
|
||||
|
||||
@synthesize terminalFilter = _terminalFilter;
|
||||
@synthesize initialFilters = _initialFilters;
|
||||
@synthesize inputFilterToIgnoreForUpdates = _inputFilterToIgnoreForUpdates;
|
||||
|
||||
- (id)init;
|
||||
{
|
||||
if (!(self = [super init]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
filters = [[NSMutableArray alloc] init];
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Filter management
|
||||
|
||||
- (void)addFilter:(GPUImageOutput<GPUImageInput> *)newFilter;
|
||||
{
|
||||
[filters addObject:newFilter];
|
||||
}
|
||||
|
||||
- (GPUImageOutput<GPUImageInput> *)filterAtIndex:(NSUInteger)filterIndex;
|
||||
{
|
||||
return [filters objectAtIndex:filterIndex];
|
||||
}
|
||||
|
||||
- (NSUInteger)filterCount;
|
||||
{
|
||||
return [filters count];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Still image processing
|
||||
|
||||
- (void)useNextFrameForImageCapture;
|
||||
{
|
||||
[self.terminalFilter useNextFrameForImageCapture];
|
||||
}
|
||||
|
||||
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
|
||||
{
|
||||
return [self.terminalFilter newCGImageFromCurrentlyProcessedOutput];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark GPUImageOutput overrides
|
||||
|
||||
- (void)setTargetToIgnoreForUpdates:(id<GPUImageInput>)targetToIgnoreForUpdates;
|
||||
{
|
||||
[_terminalFilter setTargetToIgnoreForUpdates:targetToIgnoreForUpdates];
|
||||
}
|
||||
|
||||
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
|
||||
{
|
||||
[_terminalFilter addTarget:newTarget atTextureLocation:textureLocation];
|
||||
}
|
||||
|
||||
- (void)removeTarget:(id<GPUImageInput>)targetToRemove;
|
||||
{
|
||||
[_terminalFilter removeTarget:targetToRemove];
|
||||
}
|
||||
|
||||
- (void)removeAllTargets;
|
||||
{
|
||||
[_terminalFilter removeAllTargets];
|
||||
}
|
||||
|
||||
- (NSArray *)targets;
|
||||
{
|
||||
return [_terminalFilter targets];
|
||||
}
|
||||
|
||||
- (void)setFrameProcessingCompletionBlock:(void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock;
|
||||
{
|
||||
[_terminalFilter setFrameProcessingCompletionBlock:frameProcessingCompletionBlock];
|
||||
}
|
||||
|
||||
- (void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock;
|
||||
{
|
||||
return [_terminalFilter frameProcessingCompletionBlock];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark GPUImageInput protocol
|
||||
|
||||
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
|
||||
{
|
||||
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
|
||||
{
|
||||
if (currentFilter != self.inputFilterToIgnoreForUpdates)
|
||||
{
|
||||
[currentFilter newFrameReadyAtTime:frameTime atIndex:textureIndex];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
|
||||
{
|
||||
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
|
||||
{
|
||||
[currentFilter setInputFramebuffer:newInputFramebuffer atIndex:textureIndex];
|
||||
}
|
||||
}
|
||||
|
||||
- (NSInteger)nextAvailableTextureIndex;
|
||||
{
|
||||
// if ([_initialFilters count] > 0)
|
||||
// {
|
||||
// return [[_initialFilters objectAtIndex:0] nextAvailableTextureIndex];
|
||||
// }
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
|
||||
{
|
||||
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
|
||||
{
|
||||
[currentFilter setInputSize:newSize atIndex:textureIndex];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
|
||||
{
|
||||
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
|
||||
{
|
||||
[currentFilter setInputRotation:newInputRotation atIndex:(NSInteger)textureIndex];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)forceProcessingAtSize:(CGSize)frameSize;
|
||||
{
|
||||
for (GPUImageOutput<GPUImageInput> *currentFilter in filters)
|
||||
{
|
||||
[currentFilter forceProcessingAtSize:frameSize];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
|
||||
{
|
||||
for (GPUImageOutput<GPUImageInput> *currentFilter in filters)
|
||||
{
|
||||
[currentFilter forceProcessingAtSizeRespectingAspectRatio:frameSize];
|
||||
}
|
||||
}
|
||||
|
||||
- (CGSize)maximumOutputSize;
|
||||
{
|
||||
// I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better
|
||||
return CGSizeZero;
|
||||
|
||||
/*
|
||||
if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero))
|
||||
{
|
||||
for (id<GPUImageInput> currentTarget in _initialFilters)
|
||||
{
|
||||
if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width)
|
||||
{
|
||||
cachedMaximumOutputSize = [currentTarget maximumOutputSize];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cachedMaximumOutputSize;
|
||||
*/
|
||||
}
|
||||
|
||||
- (void)endProcessing;
|
||||
{
|
||||
if (!isEndProcessing)
|
||||
{
|
||||
isEndProcessing = YES;
|
||||
|
||||
for (id<GPUImageInput> currentTarget in _initialFilters)
|
||||
{
|
||||
[currentTarget endProcessing];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)wantsMonochromeInput;
|
||||
{
|
||||
BOOL allInputsWantMonochromeInput = YES;
|
||||
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
|
||||
{
|
||||
allInputsWantMonochromeInput = allInputsWantMonochromeInput && [currentFilter wantsMonochromeInput];
|
||||
}
|
||||
|
||||
return allInputsWantMonochromeInput;
|
||||
}
|
||||
|
||||
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
|
||||
{
|
||||
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
|
||||
{
|
||||
[currentFilter setCurrentlyReceivingMonochromeInput:newValue];
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
36
submodules/LegacyComponents/Sources/GPUImageGaussianBlurFilter.h
Executable file
36
submodules/LegacyComponents/Sources/GPUImageGaussianBlurFilter.h
Executable file
@ -0,0 +1,36 @@
|
||||
#import "GPUImageTwoPassTextureSamplingFilter.h"
|
||||
|
||||
/** A Gaussian blur filter
|
||||
Interpolated optimization based on Daniel Rákos' work at http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/
|
||||
*/
|
||||
|
||||
@interface GPUImageGaussianBlurFilter : GPUImageTwoPassTextureSamplingFilter
|
||||
{
|
||||
BOOL shouldResizeBlurRadiusWithImageSize;
|
||||
CGFloat _blurRadiusInPixels;
|
||||
}
|
||||
|
||||
/** A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result.
|
||||
*/
|
||||
@property (readwrite, nonatomic) CGFloat texelSpacingMultiplier;
|
||||
|
||||
/** A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function.
|
||||
*/
|
||||
@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
|
||||
|
||||
/** Setting these properties will allow the blur radius to scale with the size of the image. These properties are mutually exclusive; setting either will set the other to 0.
|
||||
*/
|
||||
@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageWidth;
|
||||
@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageHeight;
|
||||
|
||||
/// The number of times to sequentially blur the incoming image. The more passes, the slower the filter.
|
||||
@property(readwrite, nonatomic) NSUInteger blurPasses;
|
||||
|
||||
+ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
|
||||
+ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
|
||||
+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
|
||||
+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
|
||||
|
||||
- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;
|
||||
|
||||
@end
|
485
submodules/LegacyComponents/Sources/GPUImageGaussianBlurFilter.m
Executable file
485
submodules/LegacyComponents/Sources/GPUImageGaussianBlurFilter.m
Executable file
@ -0,0 +1,485 @@
|
||||
#import "GPUImageGaussianBlurFilter.h"
|
||||
|
||||
@implementation GPUImageGaussianBlurFilter
|
||||
|
||||
@synthesize texelSpacingMultiplier = _texelSpacingMultiplier;
|
||||
@synthesize blurRadiusInPixels = _blurRadiusInPixels;
|
||||
@synthesize blurRadiusAsFractionOfImageWidth = _blurRadiusAsFractionOfImageWidth;
|
||||
@synthesize blurRadiusAsFractionOfImageHeight = _blurRadiusAsFractionOfImageHeight;
|
||||
@synthesize blurPasses = _blurPasses;
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Initialization and teardown
|
||||
|
||||
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString
|
||||
{
|
||||
if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
self.texelSpacingMultiplier = 1.0;
|
||||
_blurRadiusInPixels = 2.0;
|
||||
shouldResizeBlurRadiusWithImageSize = NO;
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (id)init;
|
||||
{
|
||||
NSString *currentGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:4 sigma:2.0];
|
||||
NSString *currentGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:4 sigma:2.0];
|
||||
|
||||
return [self initWithFirstStageVertexShaderFromString:currentGaussianBlurVertexShader firstStageFragmentShaderFromString:currentGaussianBlurFragmentShader secondStageVertexShaderFromString:currentGaussianBlurVertexShader secondStageFragmentShaderFromString:currentGaussianBlurFragmentShader];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Auto-generation of optimized Gaussian shaders
|
||||
|
||||
// "Implementation limit of 32 varying components exceeded" - Max number of varyings for these GPUs
|
||||
|
||||
+ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
|
||||
{
|
||||
if (blurRadius < 1)
|
||||
{
|
||||
return kGPUImageVertexShaderString;
|
||||
}
|
||||
|
||||
// NSLog(@"Max varyings: %d", [GPUImageContext maximumVaryingVectorsForThisDevice]);
|
||||
NSMutableString *shaderString = [[NSMutableString alloc] init];
|
||||
|
||||
// Header
|
||||
[shaderString appendFormat:@"\
|
||||
attribute vec4 position;\n\
|
||||
attribute vec4 inputTexCoord;\n\
|
||||
\n\
|
||||
uniform float texelWidthOffset;\n\
|
||||
uniform float texelHeightOffset;\n\
|
||||
\n\
|
||||
varying vec2 blurCoordinates[%lu];\n\
|
||||
\n\
|
||||
void main()\n\
|
||||
{\n\
|
||||
gl_Position = position;\n\
|
||||
\n\
|
||||
vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(blurRadius * 2 + 1) ];
|
||||
|
||||
// Inner offset loop
|
||||
for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < (blurRadius * 2 + 1); currentBlurCoordinateIndex++)
|
||||
{
|
||||
NSInteger offsetFromCenter = currentBlurCoordinateIndex - blurRadius;
|
||||
if (offsetFromCenter < 0)
|
||||
{
|
||||
[shaderString appendFormat:@"blurCoordinates[%ld] = inputTexCoord.xy - singleStepOffset * %f;\n", (unsigned long)currentBlurCoordinateIndex, (GLfloat)(-offsetFromCenter)];
|
||||
}
|
||||
else if (offsetFromCenter > 0)
|
||||
{
|
||||
[shaderString appendFormat:@"blurCoordinates[%ld] = inputTexCoord.xy + singleStepOffset * %f;\n", (unsigned long)currentBlurCoordinateIndex, (GLfloat)(offsetFromCenter)];
|
||||
}
|
||||
else
|
||||
{
|
||||
[shaderString appendFormat:@"blurCoordinates[%ld] = inputTexCoord.xy;\n", (unsigned long)currentBlurCoordinateIndex];
|
||||
}
|
||||
}
|
||||
|
||||
// Footer
|
||||
[shaderString appendString:@"}\n"];
|
||||
|
||||
return shaderString;
|
||||
}
|
||||
|
||||
+ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
|
||||
{
|
||||
if (blurRadius < 1)
|
||||
{
|
||||
return kGPUImagePassthroughFragmentShaderString;
|
||||
}
|
||||
|
||||
// First, generate the normal Gaussian weights for a given sigma
|
||||
GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
|
||||
GLfloat sumOfWeights = 0.0;
|
||||
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
|
||||
{
|
||||
standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
|
||||
|
||||
if (currentGaussianWeightIndex == 0)
|
||||
{
|
||||
sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
|
||||
}
|
||||
else
|
||||
{
|
||||
sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
|
||||
}
|
||||
}
|
||||
|
||||
// Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
|
||||
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
|
||||
{
|
||||
standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
|
||||
}
|
||||
|
||||
// Finally, generate the shader from these weights
|
||||
NSMutableString *shaderString = [[NSMutableString alloc] init];
|
||||
|
||||
// Header
|
||||
[shaderString appendFormat:@"\
|
||||
uniform sampler2D sourceImage;\n\
|
||||
\n\
|
||||
varying highp vec2 blurCoordinates[%lu];\n\
|
||||
\n\
|
||||
void main()\n\
|
||||
{\n\
|
||||
lowp vec4 sum = vec4(0.0);\n", (unsigned long)(blurRadius * 2 + 1) ];
|
||||
|
||||
// Inner texture loop
|
||||
for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < (blurRadius * 2 + 1); currentBlurCoordinateIndex++)
|
||||
{
|
||||
NSInteger offsetFromCenter = currentBlurCoordinateIndex - blurRadius;
|
||||
if (offsetFromCenter < 0)
|
||||
{
|
||||
[shaderString appendFormat:@"sum += texture2D(sourceImage, blurCoordinates[%lu]) * %f;\n", (unsigned long)currentBlurCoordinateIndex, standardGaussianWeights[-offsetFromCenter]];
|
||||
}
|
||||
else
|
||||
{
|
||||
[shaderString appendFormat:@"sum += texture2D(sourceImage, blurCoordinates[%lu]) * %f;\n", (unsigned long)currentBlurCoordinateIndex, standardGaussianWeights[offsetFromCenter]];
|
||||
}
|
||||
}
|
||||
|
||||
// Footer
|
||||
[shaderString appendString:@"\
|
||||
gl_FragColor = sum;\n\
|
||||
}\n"];
|
||||
|
||||
free(standardGaussianWeights);
|
||||
return shaderString;
|
||||
}
|
||||
|
||||
+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
|
||||
{
|
||||
if (blurRadius < 1)
|
||||
{
|
||||
return kGPUImageVertexShaderString;
|
||||
}
|
||||
|
||||
// First, generate the normal Gaussian weights for a given sigma
|
||||
GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
|
||||
GLfloat sumOfWeights = 0.0;
|
||||
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
|
||||
{
|
||||
standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
|
||||
|
||||
if (currentGaussianWeightIndex == 0)
|
||||
{
|
||||
sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
|
||||
}
|
||||
else
|
||||
{
|
||||
sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
|
||||
}
|
||||
}
|
||||
|
||||
// Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
|
||||
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
|
||||
{
|
||||
standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
|
||||
}
|
||||
|
||||
// From these weights we calculate the offsets to read interpolated values from
|
||||
NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
|
||||
GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat));
|
||||
|
||||
for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
|
||||
{
|
||||
GLfloat firstWeight = standardGaussianWeights[currentOptimizedOffset*2 + 1];
|
||||
GLfloat secondWeight = standardGaussianWeights[currentOptimizedOffset*2 + 2];
|
||||
|
||||
GLfloat optimizedWeight = firstWeight + secondWeight;
|
||||
|
||||
optimizedGaussianOffsets[currentOptimizedOffset] = (firstWeight * (currentOptimizedOffset*2 + 1) + secondWeight * (currentOptimizedOffset*2 + 2)) / optimizedWeight;
|
||||
}
|
||||
|
||||
NSMutableString *shaderString = [[NSMutableString alloc] init];
|
||||
// Header
|
||||
[shaderString appendFormat:@"\
|
||||
attribute vec4 position;\n\
|
||||
attribute vec4 inputTexCoord;\n\
|
||||
\n\
|
||||
uniform float texelWidthOffset;\n\
|
||||
uniform float texelHeightOffset;\n\
|
||||
\n\
|
||||
varying vec2 blurCoordinates[%lu];\n\
|
||||
\n\
|
||||
void main()\n\
|
||||
{\n\
|
||||
gl_Position = position;\n\
|
||||
\n\
|
||||
vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))];
|
||||
|
||||
// Inner offset loop
|
||||
[shaderString appendString:@"blurCoordinates[0] = inputTexCoord.xy;\n"];
|
||||
for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
|
||||
{
|
||||
[shaderString appendFormat:@"\
|
||||
blurCoordinates[%lu] = inputTexCoord.xy + singleStepOffset * %f;\n\
|
||||
blurCoordinates[%lu] = inputTexCoord.xy - singleStepOffset * %f;\n", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedGaussianOffsets[currentOptimizedOffset], (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedGaussianOffsets[currentOptimizedOffset]];
|
||||
}
|
||||
|
||||
// Footer
|
||||
[shaderString appendString:@"}\n"];
|
||||
|
||||
free(optimizedGaussianOffsets);
|
||||
free(standardGaussianWeights);
|
||||
return shaderString;
|
||||
}
|
||||
|
||||
+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
|
||||
{
|
||||
if (blurRadius < 1)
|
||||
{
|
||||
return kGPUImagePassthroughFragmentShaderString;
|
||||
}
|
||||
|
||||
// First, generate the normal Gaussian weights for a given sigma
|
||||
GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
|
||||
GLfloat sumOfWeights = 0.0;
|
||||
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
|
||||
{
|
||||
standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
|
||||
|
||||
if (currentGaussianWeightIndex == 0)
|
||||
{
|
||||
sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
|
||||
}
|
||||
else
|
||||
{
|
||||
sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
|
||||
}
|
||||
}
|
||||
|
||||
// Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
|
||||
for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
|
||||
{
|
||||
standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
|
||||
}
|
||||
|
||||
// From these weights we calculate the offsets to read interpolated values from
|
||||
NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
|
||||
NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2);
|
||||
|
||||
NSMutableString *shaderString = [[NSMutableString alloc] init];
|
||||
|
||||
// Header
|
||||
[shaderString appendFormat:@"\
|
||||
uniform sampler2D sourceImage;\n\
|
||||
uniform highp float texelWidthOffset;\n\
|
||||
uniform highp float texelHeightOffset;\n\
|
||||
\n\
|
||||
varying highp vec2 blurCoordinates[%lu];\n\
|
||||
\n\
|
||||
void main()\n\
|
||||
{\n\
|
||||
lowp vec4 sum = vec4(0.0);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ];
|
||||
|
||||
// Inner texture loop
|
||||
[shaderString appendFormat:@"sum += texture2D(sourceImage, blurCoordinates[0]) * %f;\n", standardGaussianWeights[0]];
|
||||
|
||||
for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++)
|
||||
{
|
||||
GLfloat firstWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 1];
|
||||
GLfloat secondWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 2];
|
||||
GLfloat optimizedWeight = firstWeight + secondWeight;
|
||||
|
||||
[shaderString appendFormat:@"sum += texture2D(sourceImage, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), optimizedWeight];
|
||||
[shaderString appendFormat:@"sum += texture2D(sourceImage, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), optimizedWeight];
|
||||
}
|
||||
|
||||
// If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader
|
||||
if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets)
|
||||
{
|
||||
[shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
|
||||
|
||||
for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++)
|
||||
{
|
||||
GLfloat firstWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 1];
|
||||
GLfloat secondWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 2];
|
||||
|
||||
GLfloat optimizedWeight = firstWeight + secondWeight;
|
||||
GLfloat optimizedOffset = (firstWeight * (currentOverlowTextureRead * 2 + 1) + secondWeight * (currentOverlowTextureRead * 2 + 2)) / optimizedWeight;
|
||||
|
||||
[shaderString appendFormat:@"sum += texture2D(sourceImage, blurCoordinates[0] + singleStepOffset * %f) * %f;\n", optimizedOffset, optimizedWeight];
|
||||
[shaderString appendFormat:@"sum += texture2D(sourceImage, blurCoordinates[0] - singleStepOffset * %f) * %f;\n", optimizedOffset, optimizedWeight];
|
||||
}
|
||||
}
|
||||
|
||||
// Footer
|
||||
[shaderString appendString:@"\
|
||||
gl_FragColor = sum;\n\
|
||||
}\n"];
|
||||
|
||||
free(standardGaussianWeights);
|
||||
return shaderString;
|
||||
}
|
||||
|
||||
- (void)setupFilterForSize:(CGSize)filterFrameSize;
|
||||
{
|
||||
[super setupFilterForSize:filterFrameSize];
|
||||
|
||||
if (shouldResizeBlurRadiusWithImageSize)
|
||||
{
|
||||
if (self.blurRadiusAsFractionOfImageWidth > 0)
|
||||
{
|
||||
self.blurRadiusInPixels = filterFrameSize.width * self.blurRadiusAsFractionOfImageWidth;
|
||||
}
|
||||
else
|
||||
{
|
||||
self.blurRadiusInPixels = filterFrameSize.height * self.blurRadiusAsFractionOfImageHeight;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Rendering
|
||||
|
||||
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
|
||||
{
|
||||
[super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
|
||||
|
||||
for (NSUInteger currentAdditionalBlurPass = 1; currentAdditionalBlurPass < _blurPasses; currentAdditionalBlurPass++)
|
||||
{
|
||||
[super renderToTextureWithVertices:vertices textureCoordinates:[[self class] textureCoordinatesForRotation:kGPUImageNoRotation]];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;
|
||||
{
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[GPUImageContext useImageProcessingContext];
|
||||
|
||||
filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:newVertexShader fragmentShaderString:newFragmentShader];
|
||||
|
||||
if (!filterProgram.initialized)
|
||||
{
|
||||
[self initializeAttributes];
|
||||
|
||||
if (![filterProgram link])
|
||||
{
|
||||
NSString *progLog = [filterProgram programLog];
|
||||
NSLog(@"Program link log: %@", progLog);
|
||||
NSString *fragLog = [filterProgram fragmentShaderLog];
|
||||
NSLog(@"Fragment shader compile log: %@", fragLog);
|
||||
NSString *vertLog = [filterProgram vertexShaderLog];
|
||||
NSLog(@"Vertex shader compile log: %@", vertLog);
|
||||
filterProgram = nil;
|
||||
NSAssert(NO, @"Filter shader link failed");
|
||||
}
|
||||
}
|
||||
|
||||
filterPositionAttribute = [filterProgram attributeIndex:@"position"];
|
||||
filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTexCoord"];
|
||||
filterInputTextureUniform = [filterProgram uniformIndex:@"sourceImage"]; // This does assume a name of "inputImageTexture" for the fragment shader
|
||||
verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@"texelWidthOffset"];
|
||||
verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@"texelHeightOffset"];
|
||||
[GPUImageContext setActiveShaderProgram:filterProgram];
|
||||
|
||||
glEnableVertexAttribArray(filterPositionAttribute);
|
||||
glEnableVertexAttribArray(filterTextureCoordinateAttribute);
|
||||
|
||||
secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:newVertexShader fragmentShaderString:newFragmentShader];
|
||||
|
||||
if (!secondFilterProgram.initialized)
|
||||
{
|
||||
[self initializeSecondaryAttributes];
|
||||
|
||||
if (![secondFilterProgram link])
|
||||
{
|
||||
NSString *progLog = [secondFilterProgram programLog];
|
||||
NSLog(@"Program link log: %@", progLog);
|
||||
NSString *fragLog = [secondFilterProgram fragmentShaderLog];
|
||||
NSLog(@"Fragment shader compile log: %@", fragLog);
|
||||
NSString *vertLog = [secondFilterProgram vertexShaderLog];
|
||||
NSLog(@"Vertex shader compile log: %@", vertLog);
|
||||
secondFilterProgram = nil;
|
||||
NSAssert(NO, @"Filter shader link failed");
|
||||
}
|
||||
}
|
||||
|
||||
secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
|
||||
secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTexCoord"];
|
||||
secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"sourceImage"]; // This does assume a name of "inputImageTexture" for the fragment shader
|
||||
secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
|
||||
horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@"texelWidthOffset"];
|
||||
horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@"texelHeightOffset"];
|
||||
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
|
||||
|
||||
glEnableVertexAttribArray(secondFilterPositionAttribute);
|
||||
glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);
|
||||
|
||||
[self setupFilterForSize:[self sizeOfFBO]];
|
||||
glFinish();
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Accessors
|
||||
|
||||
- (void)setTexelSpacingMultiplier:(CGFloat)newValue;
|
||||
{
|
||||
_texelSpacingMultiplier = newValue;
|
||||
|
||||
_verticalTexelSpacing = _texelSpacingMultiplier;
|
||||
_horizontalTexelSpacing = _texelSpacingMultiplier;
|
||||
|
||||
[self setupFilterForSize:[self sizeOfFBO]];
|
||||
}
|
||||
|
||||
// inputRadius for Core Image's CIGaussianBlur is really sigma in the Gaussian equation, so I'm using that for my blur radius, to be consistent
|
||||
- (void)setBlurRadiusInPixels:(CGFloat)newValue;
|
||||
{
|
||||
// 7.0 is the limit for blur size for hardcoded varying offsets
|
||||
|
||||
if (round(newValue) != _blurRadiusInPixels)
|
||||
{
|
||||
_blurRadiusInPixels = round(newValue); // For now, only do integral sigmas
|
||||
|
||||
NSUInteger calculatedSampleRadius = 0;
|
||||
if (_blurRadiusInPixels >= 1) // Avoid a divide-by-zero error here
|
||||
{
|
||||
// Calculate the number of pixels to sample from by setting a bottom limit for the contribution of the outermost pixel
|
||||
CGFloat minimumWeightToFindEdgeOfSamplingArea = 1.0/256.0;
|
||||
calculatedSampleRadius = floor(sqrt(-2.0 * pow(_blurRadiusInPixels, 2.0) * log(minimumWeightToFindEdgeOfSamplingArea * sqrt(2.0 * M_PI * pow(_blurRadiusInPixels, 2.0))) ));
|
||||
calculatedSampleRadius += calculatedSampleRadius % 2; // There's nothing to gain from handling odd radius sizes, due to the optimizations I use
|
||||
}
|
||||
|
||||
// NSLog(@"Blur radius: %f, calculated sample radius: %d", _blurRadiusInPixels, calculatedSampleRadius);
|
||||
//
|
||||
NSString *newGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:calculatedSampleRadius sigma:_blurRadiusInPixels];
|
||||
NSString *newGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:calculatedSampleRadius sigma:_blurRadiusInPixels];
|
||||
|
||||
// NSLog(@"Optimized vertex shader: \n%@", newGaussianBlurVertexShader);
|
||||
// NSLog(@"Optimized fragment shader: \n%@", newGaussianBlurFragmentShader);
|
||||
//
|
||||
[self switchToVertexShader:newGaussianBlurVertexShader fragmentShader:newGaussianBlurFragmentShader];
|
||||
}
|
||||
shouldResizeBlurRadiusWithImageSize = NO;
|
||||
}
|
||||
|
||||
- (void)setBlurRadiusAsFractionOfImageWidth:(CGFloat)blurRadiusAsFractionOfImageWidth
|
||||
{
|
||||
if (blurRadiusAsFractionOfImageWidth < 0) return;
|
||||
|
||||
shouldResizeBlurRadiusWithImageSize = _blurRadiusAsFractionOfImageWidth != blurRadiusAsFractionOfImageWidth && blurRadiusAsFractionOfImageWidth > 0;
|
||||
_blurRadiusAsFractionOfImageWidth = blurRadiusAsFractionOfImageWidth;
|
||||
_blurRadiusAsFractionOfImageHeight = 0;
|
||||
}
|
||||
|
||||
- (void)setBlurRadiusAsFractionOfImageHeight:(CGFloat)blurRadiusAsFractionOfImageHeight
|
||||
{
|
||||
if (blurRadiusAsFractionOfImageHeight < 0) return;
|
||||
|
||||
shouldResizeBlurRadiusWithImageSize = _blurRadiusAsFractionOfImageHeight != blurRadiusAsFractionOfImageHeight && blurRadiusAsFractionOfImageHeight > 0;
|
||||
_blurRadiusAsFractionOfImageHeight = blurRadiusAsFractionOfImageHeight;
|
||||
_blurRadiusAsFractionOfImageWidth = 0;
|
||||
}
|
||||
|
||||
@end
|
12
submodules/LegacyComponents/Sources/GPUImageSharpenFilter.h
Executable file
12
submodules/LegacyComponents/Sources/GPUImageSharpenFilter.h
Executable file
@ -0,0 +1,12 @@
|
||||
#import "GPUImageFilter.h"
|
||||
|
||||
@interface GPUImageSharpenFilter : GPUImageFilter
|
||||
{
|
||||
GLint sharpnessUniform;
|
||||
GLint imageWidthFactorUniform, imageHeightFactorUniform;
|
||||
}
|
||||
|
||||
// Sharpness ranges from -4.0 to 4.0, with 0.0 as the normal level
|
||||
@property(readwrite, nonatomic) CGFloat sharpness;
|
||||
|
||||
@end
|
120
submodules/LegacyComponents/Sources/GPUImageSharpenFilter.m
Executable file
120
submodules/LegacyComponents/Sources/GPUImageSharpenFilter.m
Executable file
@ -0,0 +1,120 @@
|
||||
#import "GPUImageSharpenFilter.h"
|
||||
|
||||
NSString *const kGPUImageSharpenVertexShaderString = SHADER_STRING
|
||||
(
|
||||
attribute vec4 position;
|
||||
attribute vec4 inputTexCoord;
|
||||
|
||||
uniform float imageWidthFactor;
|
||||
uniform float imageHeightFactor;
|
||||
uniform float sharpness;
|
||||
|
||||
varying vec2 texCoord;
|
||||
varying vec2 leftTextureCoordinate;
|
||||
varying vec2 rightTextureCoordinate;
|
||||
varying vec2 topTextureCoordinate;
|
||||
varying vec2 bottomTextureCoordinate;
|
||||
|
||||
varying float centerMultiplier;
|
||||
varying float edgeMultiplier;
|
||||
|
||||
void main()
|
||||
{
|
||||
gl_Position = position;
|
||||
|
||||
vec2 widthStep = vec2(imageWidthFactor, 0.0);
|
||||
vec2 heightStep = vec2(0.0, imageHeightFactor);
|
||||
|
||||
texCoord = inputTexCoord.xy;
|
||||
leftTextureCoordinate = inputTexCoord.xy - widthStep;
|
||||
rightTextureCoordinate = inputTexCoord.xy + widthStep;
|
||||
topTextureCoordinate = inputTexCoord.xy + heightStep;
|
||||
bottomTextureCoordinate = inputTexCoord.xy - heightStep;
|
||||
|
||||
centerMultiplier = 1.0 + 4.0 * sharpness;
|
||||
edgeMultiplier = sharpness;
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
NSString *const kGPUImageSharpenFragmentShaderString = SHADER_STRING
|
||||
(
|
||||
precision highp float;
|
||||
|
||||
varying highp vec2 texCoord;
|
||||
varying highp vec2 leftTextureCoordinate;
|
||||
varying highp vec2 rightTextureCoordinate;
|
||||
varying highp vec2 topTextureCoordinate;
|
||||
varying highp vec2 bottomTextureCoordinate;
|
||||
|
||||
varying highp float centerMultiplier;
|
||||
varying highp float edgeMultiplier;
|
||||
|
||||
uniform sampler2D sourceImage;
|
||||
|
||||
void main()
|
||||
{
|
||||
mediump vec3 textureColor = texture2D(sourceImage, texCoord).rgb;
|
||||
mediump vec3 leftTextureColor = texture2D(sourceImage, leftTextureCoordinate).rgb;
|
||||
mediump vec3 rightTextureColor = texture2D(sourceImage, rightTextureCoordinate).rgb;
|
||||
mediump vec3 topTextureColor = texture2D(sourceImage, topTextureCoordinate).rgb;
|
||||
mediump vec3 bottomTextureColor = texture2D(sourceImage, bottomTextureCoordinate).rgb;
|
||||
|
||||
gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(sourceImage, bottomTextureCoordinate).w);
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
@implementation GPUImageSharpenFilter
|
||||
|
||||
@synthesize sharpness = _sharpness;
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Initialization and teardown
|
||||
|
||||
- (id)init;
|
||||
{
|
||||
if (!(self = [super initWithVertexShaderFromString:kGPUImageSharpenVertexShaderString fragmentShaderFromString:kGPUImageSharpenFragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
sharpnessUniform = [filterProgram uniformIndex:@"sharpness"];
|
||||
self.sharpness = 0.0;
|
||||
|
||||
imageWidthFactorUniform = [filterProgram uniformIndex:@"imageWidthFactor"];
|
||||
imageHeightFactorUniform = [filterProgram uniformIndex:@"imageHeightFactor"];
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setupFilterForSize:(CGSize)filterFrameSize;
|
||||
{
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[GPUImageContext setActiveShaderProgram:filterProgram];
|
||||
|
||||
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
|
||||
{
|
||||
glUniform1f(imageWidthFactorUniform, 1.0 / filterFrameSize.height);
|
||||
glUniform1f(imageHeightFactorUniform, 1.0 / filterFrameSize.width);
|
||||
}
|
||||
else
|
||||
{
|
||||
glUniform1f(imageWidthFactorUniform, 1.0 / filterFrameSize.width);
|
||||
glUniform1f(imageHeightFactorUniform, 1.0 / filterFrameSize.height);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Accessors
|
||||
|
||||
- (void)setSharpness:(CGFloat)newValue;
|
||||
{
|
||||
_sharpness = newValue;
|
||||
|
||||
[self setFloat:_sharpness forUniform:sharpnessUniform program:filterProgram];
|
||||
}
|
||||
|
||||
@end
|
||||
|
@ -0,0 +1,21 @@
|
||||
#import "GPUImageTwoInputFilter.h"
|
||||
|
||||
extern NSString *const kGPUImageThreeInputTextureVertexShaderString;
|
||||
|
||||
@interface GPUImageThreeInputFilter : GPUImageTwoInputFilter
|
||||
{
|
||||
GPUImageFramebuffer *thirdInputFramebuffer;
|
||||
|
||||
GLint filterThirdTextureCoordinateAttribute;
|
||||
GLint filterInputTextureUniform3;
|
||||
GPUImageRotationMode inputRotation3;
|
||||
GLuint filterSourceTexture3;
|
||||
CMTime thirdFrameTime;
|
||||
|
||||
BOOL hasSetSecondTexture, hasReceivedThirdFrame, thirdFrameWasVideo;
|
||||
BOOL thirdFrameCheckDisabled;
|
||||
}
|
||||
|
||||
- (void)disableThirdFrameCheck;
|
||||
|
||||
@end
|
328
submodules/LegacyComponents/Sources/GPUImageThreeInputFilter.m
Normal file
328
submodules/LegacyComponents/Sources/GPUImageThreeInputFilter.m
Normal file
@ -0,0 +1,328 @@
|
||||
#import "GPUImageThreeInputFilter.h"
|
||||
|
||||
|
||||
NSString *const kGPUImageThreeInputTextureVertexShaderString = SHADER_STRING
|
||||
(
|
||||
attribute vec4 position;
|
||||
attribute vec4 inputTexCoord;
|
||||
attribute vec4 inputTexCoord2;
|
||||
attribute vec4 inputTexCoord3;
|
||||
|
||||
varying vec2 texCoord;
|
||||
varying vec2 texCoord2;
|
||||
varying vec2 texCoord3;
|
||||
|
||||
void main()
|
||||
{
|
||||
gl_Position = position;
|
||||
texCoord = inputTexCoord.xy;
|
||||
texCoord2 = inputTexCoord2.xy;
|
||||
texCoord3 = inputTexCoord3.xy;
|
||||
}
|
||||
);
|
||||
|
||||
@implementation GPUImageThreeInputFilter
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Initialization and teardown
|
||||
|
||||
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
|
||||
{
|
||||
if (!(self = [self initWithVertexShaderFromString:kGPUImageThreeInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
|
||||
{
|
||||
if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
inputRotation3 = kGPUImageNoRotation;
|
||||
|
||||
hasSetSecondTexture = NO;
|
||||
|
||||
hasReceivedThirdFrame = NO;
|
||||
thirdFrameWasVideo = NO;
|
||||
thirdFrameCheckDisabled = NO;
|
||||
|
||||
thirdFrameTime = kCMTimeInvalid;
|
||||
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[GPUImageContext useImageProcessingContext];
|
||||
filterThirdTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTexCoord3"];
|
||||
|
||||
filterInputTextureUniform3 = [filterProgram uniformIndex:@"inputImageTexture3"]; // This does assume a name of "inputImageTexture3" for the third input texture in the fragment shader
|
||||
glEnableVertexAttribArray(filterThirdTextureCoordinateAttribute);
|
||||
});
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)initializeAttributes;
|
||||
{
|
||||
[super initializeAttributes];
|
||||
[filterProgram addAttribute:@"inputTexCoord3"];
|
||||
}
|
||||
|
||||
- (void)disableThirdFrameCheck;
|
||||
{
|
||||
thirdFrameCheckDisabled = YES;
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Rendering
|
||||
|
||||
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
|
||||
{
|
||||
if (self.preventRendering)
|
||||
{
|
||||
[firstInputFramebuffer unlock];
|
||||
[secondInputFramebuffer unlock];
|
||||
[thirdInputFramebuffer unlock];
|
||||
return;
|
||||
}
|
||||
|
||||
[GPUImageContext setActiveShaderProgram:filterProgram];
|
||||
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
|
||||
[outputFramebuffer activateFramebuffer];
|
||||
if (usingNextFrameForImageCapture)
|
||||
{
|
||||
[outputFramebuffer lock];
|
||||
}
|
||||
|
||||
[self setUniformsForProgramAtIndex:0];
|
||||
|
||||
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
glActiveTexture(GL_TEXTURE2);
|
||||
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
|
||||
glUniform1i(filterInputTextureUniform, 2);
|
||||
|
||||
glActiveTexture(GL_TEXTURE3);
|
||||
glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
|
||||
glUniform1i(filterInputTextureUniform2, 3);
|
||||
|
||||
glActiveTexture(GL_TEXTURE4);
|
||||
glBindTexture(GL_TEXTURE_2D, [thirdInputFramebuffer texture]);
|
||||
glUniform1i(filterInputTextureUniform3, 4);
|
||||
|
||||
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
|
||||
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
|
||||
glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
|
||||
glVertexAttribPointer(filterThirdTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation3]);
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
[firstInputFramebuffer unlock];
|
||||
[secondInputFramebuffer unlock];
|
||||
[thirdInputFramebuffer unlock];
|
||||
if (usingNextFrameForImageCapture)
|
||||
{
|
||||
dispatch_semaphore_signal(imageCaptureSemaphore);
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark GPUImageInput
|
||||
|
||||
- (NSInteger)nextAvailableTextureIndex;
|
||||
{
|
||||
if (hasSetSecondTexture)
|
||||
{
|
||||
return 2;
|
||||
}
|
||||
else if (hasSetFirstTexture)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
|
||||
{
|
||||
if (textureIndex == 0)
|
||||
{
|
||||
firstInputFramebuffer = newInputFramebuffer;
|
||||
hasSetFirstTexture = YES;
|
||||
[firstInputFramebuffer lock];
|
||||
}
|
||||
else if (textureIndex == 1)
|
||||
{
|
||||
secondInputFramebuffer = newInputFramebuffer;
|
||||
hasSetSecondTexture = YES;
|
||||
[secondInputFramebuffer lock];
|
||||
}
|
||||
else
|
||||
{
|
||||
thirdInputFramebuffer = newInputFramebuffer;
|
||||
[thirdInputFramebuffer lock];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
|
||||
{
|
||||
if (textureIndex == 0)
|
||||
{
|
||||
[super setInputSize:newSize atIndex:textureIndex];
|
||||
|
||||
if (CGSizeEqualToSize(newSize, CGSizeZero))
|
||||
{
|
||||
hasSetFirstTexture = NO;
|
||||
}
|
||||
}
|
||||
else if (textureIndex == 1)
|
||||
{
|
||||
if (CGSizeEqualToSize(newSize, CGSizeZero))
|
||||
{
|
||||
hasSetSecondTexture = NO;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
|
||||
{
|
||||
if (textureIndex == 0)
|
||||
{
|
||||
inputRotation = newInputRotation;
|
||||
}
|
||||
else if (textureIndex == 1)
|
||||
{
|
||||
inputRotation2 = newInputRotation;
|
||||
}
|
||||
else
|
||||
{
|
||||
inputRotation3 = newInputRotation;
|
||||
}
|
||||
}
|
||||
|
||||
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
|
||||
{
|
||||
CGSize rotatedSize = sizeToRotate;
|
||||
|
||||
GPUImageRotationMode rotationToCheck;
|
||||
if (textureIndex == 0)
|
||||
{
|
||||
rotationToCheck = inputRotation;
|
||||
}
|
||||
else if (textureIndex == 1)
|
||||
{
|
||||
rotationToCheck = inputRotation2;
|
||||
}
|
||||
else
|
||||
{
|
||||
rotationToCheck = inputRotation3;
|
||||
}
|
||||
|
||||
if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))
|
||||
{
|
||||
rotatedSize.width = sizeToRotate.height;
|
||||
rotatedSize.height = sizeToRotate.width;
|
||||
}
|
||||
|
||||
return rotatedSize;
|
||||
}
|
||||
|
||||
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
|
||||
{
|
||||
// You can set up infinite update loops, so this helps to short circuit them
|
||||
if (hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
BOOL updatedMovieFrameOppositeStillImage = NO;
|
||||
|
||||
if (textureIndex == 0)
|
||||
{
|
||||
hasReceivedFirstFrame = YES;
|
||||
firstFrameTime = frameTime;
|
||||
if (secondFrameCheckDisabled)
|
||||
{
|
||||
hasReceivedSecondFrame = YES;
|
||||
}
|
||||
if (thirdFrameCheckDisabled)
|
||||
{
|
||||
hasReceivedThirdFrame = YES;
|
||||
}
|
||||
|
||||
if (!CMTIME_IS_INDEFINITE(frameTime))
|
||||
{
|
||||
if CMTIME_IS_INDEFINITE(secondFrameTime)
|
||||
{
|
||||
updatedMovieFrameOppositeStillImage = YES;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (textureIndex == 1)
|
||||
{
|
||||
hasReceivedSecondFrame = YES;
|
||||
secondFrameTime = frameTime;
|
||||
if (firstFrameCheckDisabled)
|
||||
{
|
||||
hasReceivedFirstFrame = YES;
|
||||
}
|
||||
if (thirdFrameCheckDisabled)
|
||||
{
|
||||
hasReceivedThirdFrame = YES;
|
||||
}
|
||||
|
||||
if (!CMTIME_IS_INDEFINITE(frameTime))
|
||||
{
|
||||
if CMTIME_IS_INDEFINITE(firstFrameTime)
|
||||
{
|
||||
updatedMovieFrameOppositeStillImage = YES;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
hasReceivedThirdFrame = YES;
|
||||
thirdFrameTime = frameTime;
|
||||
if (firstFrameCheckDisabled)
|
||||
{
|
||||
hasReceivedFirstFrame = YES;
|
||||
}
|
||||
if (secondFrameCheckDisabled)
|
||||
{
|
||||
hasReceivedSecondFrame = YES;
|
||||
}
|
||||
|
||||
if (!CMTIME_IS_INDEFINITE(frameTime))
|
||||
{
|
||||
if CMTIME_IS_INDEFINITE(firstFrameTime)
|
||||
{
|
||||
updatedMovieFrameOppositeStillImage = YES;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)
|
||||
if ((hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame) || updatedMovieFrameOppositeStillImage)
|
||||
{
|
||||
static const GLfloat imageVertices[] = {
|
||||
-1.0f, -1.0f,
|
||||
1.0f, -1.0f,
|
||||
-1.0f, 1.0f,
|
||||
1.0f, 1.0f,
|
||||
};
|
||||
|
||||
[self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
|
||||
|
||||
[self informTargetsAboutNewFrameAtTime:frameTime];
|
||||
|
||||
hasReceivedFirstFrame = NO;
|
||||
hasReceivedSecondFrame = NO;
|
||||
hasReceivedThirdFrame = NO;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
30
submodules/LegacyComponents/Sources/GPUImageToneCurveFilter.h
Executable file
30
submodules/LegacyComponents/Sources/GPUImageToneCurveFilter.h
Executable file
@ -0,0 +1,30 @@
|
||||
#import "GPUImageFilter.h"
|
||||
|
||||
@interface GPUImageToneCurveFilter : GPUImageFilter
|
||||
|
||||
@property(readwrite, nonatomic, copy) NSArray *redControlPoints;
|
||||
@property(readwrite, nonatomic, copy) NSArray *greenControlPoints;
|
||||
@property(readwrite, nonatomic, copy) NSArray *blueControlPoints;
|
||||
@property(readwrite, nonatomic, copy) NSArray *rgbCompositeControlPoints;
|
||||
|
||||
// Initialization and teardown
|
||||
- (id)initWithACVData:(NSData*)data;
|
||||
|
||||
- (id)initWithACV:(NSString*)curveFilename;
|
||||
- (id)initWithACVURL:(NSURL*)curveFileURL;
|
||||
|
||||
// This lets you set all three red, green, and blue tone curves at once.
|
||||
// NOTE: Deprecated this function because this effect can be accomplished
|
||||
// using the rgbComposite channel rather then setting all 3 R, G, and B channels.
|
||||
- (void)setRGBControlPoints:(NSArray *)points DEPRECATED_ATTRIBUTE;
|
||||
|
||||
- (void)setPointsWithACV:(NSString*)curveFilename;
|
||||
- (void)setPointsWithACVURL:(NSURL*)curveFileURL;
|
||||
|
||||
// Curve calculation
|
||||
- (NSMutableArray *)getPreparedSplineCurve:(NSArray *)points;
|
||||
- (NSMutableArray *)splineCurve:(NSArray *)points;
|
||||
- (NSMutableArray *)secondDerivative:(NSArray *)cgPoints;
|
||||
- (void)updateToneCurveTexture;
|
||||
|
||||
@end
|
603
submodules/LegacyComponents/Sources/GPUImageToneCurveFilter.m
Normal file
603
submodules/LegacyComponents/Sources/GPUImageToneCurveFilter.m
Normal file
@ -0,0 +1,603 @@
|
||||
#import "GPUImageToneCurveFilter.h"
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark GPUImageACVFile Helper
|
||||
|
||||
// GPUImageACVFile
|
||||
//
|
||||
// ACV File format Parser
|
||||
// Please refer to http://www.adobe.com/devnet-apps/photoshop/fileformatashtml/PhotoshopFileFormats.htm#50577411_pgfId-1056330
|
||||
//
|
||||
|
||||
@interface GPUImageACVFile : NSObject{
|
||||
short version;
|
||||
short totalCurves;
|
||||
|
||||
NSArray *rgbCompositeCurvePoints;
|
||||
NSArray *redCurvePoints;
|
||||
NSArray *greenCurvePoints;
|
||||
NSArray *blueCurvePoints;
|
||||
}
|
||||
|
||||
@property(strong,nonatomic) NSArray *rgbCompositeCurvePoints;
|
||||
@property(strong,nonatomic) NSArray *redCurvePoints;
|
||||
@property(strong,nonatomic) NSArray *greenCurvePoints;
|
||||
@property(strong,nonatomic) NSArray *blueCurvePoints;
|
||||
|
||||
- (id) initWithACVFileData:(NSData*)data;
|
||||
|
||||
|
||||
unsigned short int16WithBytes(Byte* bytes);
|
||||
@end
|
||||
|
||||
@implementation GPUImageACVFile
|
||||
|
||||
@synthesize rgbCompositeCurvePoints, redCurvePoints, greenCurvePoints, blueCurvePoints;
|
||||
|
||||
- (id) initWithACVFileData:(NSData *)data {
|
||||
self = [super init];
|
||||
if (self != nil)
|
||||
{
|
||||
if (data.length == 0)
|
||||
{
|
||||
NSLog(@"failed to init ACVFile with data:%@", data);
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
Byte* rawBytes = (Byte*) [data bytes];
|
||||
version = int16WithBytes(rawBytes);
|
||||
rawBytes+=2;
|
||||
|
||||
totalCurves = int16WithBytes(rawBytes);
|
||||
rawBytes+=2;
|
||||
|
||||
NSMutableArray *curves = [NSMutableArray new];
|
||||
|
||||
float pointRate = (1.0 / 255);
|
||||
// The following is the data for each curve specified by count above
|
||||
for (NSInteger x = 0; x<totalCurves; x++)
|
||||
{
|
||||
unsigned short pointCount = int16WithBytes(rawBytes);
|
||||
rawBytes+=2;
|
||||
|
||||
NSMutableArray *points = [NSMutableArray new];
|
||||
// point count * 4
|
||||
// Curve points. Each curve point is a pair of short integers where
|
||||
// the first number is the output value (vertical coordinate on the
|
||||
// Curves dialog graph) and the second is the input value. All coordinates have range 0 to 255.
|
||||
for (NSInteger y = 0; y<pointCount; y++)
|
||||
{
|
||||
unsigned short y = int16WithBytes(rawBytes);
|
||||
rawBytes+=2;
|
||||
unsigned short x = int16WithBytes(rawBytes);
|
||||
rawBytes+=2;
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
[points addObject:[NSValue valueWithCGSize:CGSizeMake(x * pointRate, y * pointRate)]];
|
||||
#else
|
||||
[points addObject:[NSValue valueWithSize:CGSizeMake(x * pointRate, y * pointRate)]];
|
||||
#endif
|
||||
}
|
||||
[curves addObject:points];
|
||||
}
|
||||
rgbCompositeCurvePoints = [curves objectAtIndex:0];
|
||||
redCurvePoints = [curves objectAtIndex:1];
|
||||
greenCurvePoints = [curves objectAtIndex:2];
|
||||
blueCurvePoints = [curves objectAtIndex:3];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
unsigned short int16WithBytes(Byte* bytes) {
|
||||
uint16_t result;
|
||||
memcpy(&result, bytes, sizeof(result));
|
||||
return CFSwapInt16BigToHost(result);
|
||||
}
|
||||
@end
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark GPUImageToneCurveFilter Implementation
|
||||
|
||||
|
||||
NSString *const kGPUImageToneCurveFragmentShaderString = SHADER_STRING
|
||||
(
|
||||
varying highp vec2 texCoord;
|
||||
uniform sampler2D sourceImage;
|
||||
uniform sampler2D toneCurveTexture;
|
||||
|
||||
void main()
|
||||
{
|
||||
lowp vec4 textureColor = texture2D(sourceImage, texCoord);
|
||||
lowp float redCurveValue = texture2D(toneCurveTexture, vec2(textureColor.r, 0.0)).r;
|
||||
lowp float greenCurveValue = texture2D(toneCurveTexture, vec2(textureColor.g, 0.0)).g;
|
||||
lowp float blueCurveValue = texture2D(toneCurveTexture, vec2(textureColor.b, 0.0)).b;
|
||||
|
||||
gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);
|
||||
}
|
||||
);
|
||||
|
||||
@interface GPUImageToneCurveFilter()
|
||||
{
|
||||
GLint toneCurveTextureUniform;
|
||||
GLuint toneCurveTexture;
|
||||
GLubyte *toneCurveByteArray;
|
||||
|
||||
NSArray *_redCurve, *_greenCurve, *_blueCurve, *_rgbCompositeCurve;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation GPUImageToneCurveFilter
|
||||
|
||||
@synthesize rgbCompositeControlPoints = _rgbCompositeControlPoints;
|
||||
@synthesize redControlPoints = _redControlPoints;
|
||||
@synthesize greenControlPoints = _greenControlPoints;
|
||||
@synthesize blueControlPoints = _blueControlPoints;
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Initialization and teardown
|
||||
|
||||
- (id)init;
|
||||
{
|
||||
if (!(self = [super initWithFragmentShaderFromString:kGPUImageToneCurveFragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
toneCurveTextureUniform = [filterProgram uniformIndex:@"toneCurveTexture"];
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
NSArray *defaultCurve = [NSArray arrayWithObjects:[NSValue valueWithCGPoint:CGPointMake(0.0, 0.0)], [NSValue valueWithCGPoint:CGPointMake(0.5, 0.5)], [NSValue valueWithCGPoint:CGPointMake(1.0, 1.0)], nil];
|
||||
#else
|
||||
NSArray *defaultCurve = [NSArray arrayWithObjects:[NSValue valueWithPoint:NSMakePoint(0.0, 0.0)], [NSValue valueWithPoint:NSMakePoint(0.5, 0.5)], [NSValue valueWithPoint:NSMakePoint(1.0, 1.0)], nil];
|
||||
#endif
|
||||
[self setRgbCompositeControlPoints:defaultCurve];
|
||||
[self setRedControlPoints:defaultCurve];
|
||||
[self setGreenControlPoints:defaultCurve];
|
||||
[self setBlueControlPoints:defaultCurve];
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
// This pulls in Adobe ACV curve files to specify the tone curve
|
||||
- (id)initWithACVData:(NSData *)data {
|
||||
if (!(self = [super initWithFragmentShaderFromString:kGPUImageToneCurveFragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
toneCurveTextureUniform = [filterProgram uniformIndex:@"toneCurveTexture"];
|
||||
|
||||
GPUImageACVFile *curve = [[GPUImageACVFile alloc] initWithACVFileData:data];
|
||||
|
||||
[self setRgbCompositeControlPoints:curve.rgbCompositeCurvePoints];
|
||||
[self setRedControlPoints:curve.redCurvePoints];
|
||||
[self setGreenControlPoints:curve.greenCurvePoints];
|
||||
[self setBlueControlPoints:curve.blueCurvePoints];
|
||||
|
||||
curve = nil;
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (id)initWithACV:(NSString*)curveFilename
|
||||
{
|
||||
return [self initWithACVURL:[[NSBundle mainBundle] URLForResource:curveFilename
|
||||
withExtension:@"acv"]];
|
||||
}
|
||||
|
||||
- (id)initWithACVURL:(NSURL*)curveFileURL
|
||||
{
|
||||
NSData* fileData = [NSData dataWithContentsOfURL:curveFileURL];
|
||||
return [self initWithACVData:fileData];
|
||||
}
|
||||
|
||||
- (void)setPointsWithACV:(NSString*)curveFilename
|
||||
{
|
||||
[self setPointsWithACVURL:[[NSBundle mainBundle] URLForResource:curveFilename withExtension:@"acv"]];
|
||||
}
|
||||
|
||||
- (void)setPointsWithACVURL:(NSURL*)curveFileURL
|
||||
{
|
||||
NSData* fileData = [NSData dataWithContentsOfURL:curveFileURL];
|
||||
GPUImageACVFile *curve = [[GPUImageACVFile alloc] initWithACVFileData:fileData];
|
||||
|
||||
[self setRgbCompositeControlPoints:curve.rgbCompositeCurvePoints];
|
||||
[self setRedControlPoints:curve.redCurvePoints];
|
||||
[self setGreenControlPoints:curve.greenCurvePoints];
|
||||
[self setBlueControlPoints:curve.blueCurvePoints];
|
||||
|
||||
curve = nil;
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[GPUImageContext useImageProcessingContext];
|
||||
|
||||
if (toneCurveTexture)
|
||||
{
|
||||
glDeleteTextures(1, &toneCurveTexture);
|
||||
toneCurveTexture = 0;
|
||||
free(toneCurveByteArray);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Curve calculation
|
||||
|
||||
- (NSArray *)getPreparedSplineCurve:(NSArray *)points
|
||||
{
|
||||
if (points && [points count] > 0)
|
||||
{
|
||||
// Sort the array.
|
||||
NSArray *sortedPoints = [points sortedArrayUsingComparator:^NSComparisonResult(id a, id b) {
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
float x1 = [(NSValue *)a CGPointValue].x;
|
||||
float x2 = [(NSValue *)b CGPointValue].x;
|
||||
#else
|
||||
float x1 = [(NSValue *)a pointValue].x;
|
||||
float x2 = [(NSValue *)b pointValue].x;
|
||||
#endif
|
||||
return x1 > x2;
|
||||
}];
|
||||
|
||||
// Convert from (0, 1) to (0, 255).
|
||||
NSMutableArray *convertedPoints = [NSMutableArray arrayWithCapacity:[sortedPoints count]];
|
||||
for (int i=0; i<[points count]; i++){
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
CGPoint point = [[sortedPoints objectAtIndex:i] CGPointValue];
|
||||
#else
|
||||
NSPoint point = [[sortedPoints objectAtIndex:i] pointValue];
|
||||
#endif
|
||||
point.x = point.x * 255;
|
||||
point.y = point.y * 255;
|
||||
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
[convertedPoints addObject:[NSValue valueWithCGPoint:point]];
|
||||
#else
|
||||
[convertedPoints addObject:[NSValue valueWithPoint:point]];
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
NSMutableArray *splinePoints = [self splineCurve:convertedPoints];
|
||||
|
||||
// If we have a first point like (0.3, 0) we'll be missing some points at the beginning
|
||||
// that should be 0.
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
CGPoint firstSplinePoint = [[splinePoints objectAtIndex:0] CGPointValue];
|
||||
#else
|
||||
NSPoint firstSplinePoint = [[splinePoints objectAtIndex:0] pointValue];
|
||||
#endif
|
||||
|
||||
if (firstSplinePoint.x > 0) {
|
||||
for (int i=firstSplinePoint.x; i >= 0; i--) {
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
CGPoint newCGPoint = CGPointMake(i, 0);
|
||||
[splinePoints insertObject:[NSValue valueWithCGPoint:newCGPoint] atIndex:0];
|
||||
#else
|
||||
NSPoint newNSPoint = NSMakePoint(i, 0);
|
||||
[splinePoints insertObject:[NSValue valueWithPoint:newNSPoint] atIndex:0];
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
// Insert points similarly at the end, if necessary.
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
CGPoint lastSplinePoint = [[splinePoints lastObject] CGPointValue];
|
||||
|
||||
if (lastSplinePoint.x < 255) {
|
||||
for (int i = lastSplinePoint.x + 1; i <= 255; i++) {
|
||||
CGPoint newCGPoint = CGPointMake(i, 255);
|
||||
[splinePoints addObject:[NSValue valueWithCGPoint:newCGPoint]];
|
||||
}
|
||||
}
|
||||
#else
|
||||
NSPoint lastSplinePoint = [[splinePoints lastObject] pointValue];
|
||||
|
||||
if (lastSplinePoint.x < 255) {
|
||||
for (int i = lastSplinePoint.x + 1; i <= 255; i++) {
|
||||
NSPoint newNSPoint = NSMakePoint(i, 255);
|
||||
[splinePoints addObject:[NSValue valueWithPoint:newNSPoint]];
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// Prepare the spline points.
|
||||
NSMutableArray *preparedSplinePoints = [NSMutableArray arrayWithCapacity:[splinePoints count]];
|
||||
for (int i=0; i<[splinePoints count]; i++)
|
||||
{
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
CGPoint newPoint = [[splinePoints objectAtIndex:i] CGPointValue];
|
||||
#else
|
||||
NSPoint newPoint = [[splinePoints objectAtIndex:i] pointValue];
|
||||
#endif
|
||||
CGPoint origPoint = CGPointMake(newPoint.x, newPoint.x);
|
||||
|
||||
float distance = sqrt(pow((origPoint.x - newPoint.x), 2.0) + pow((origPoint.y - newPoint.y), 2.0));
|
||||
|
||||
if (origPoint.y > newPoint.y)
|
||||
{
|
||||
distance = -distance;
|
||||
}
|
||||
|
||||
[preparedSplinePoints addObject:[NSNumber numberWithFloat:distance]];
|
||||
}
|
||||
|
||||
return preparedSplinePoints;
|
||||
}
|
||||
|
||||
return nil;
|
||||
}
|
||||
|
||||
|
||||
- (NSMutableArray *)splineCurve:(NSArray *)points
|
||||
{
|
||||
NSMutableArray *sdA = [self secondDerivative:points];
|
||||
|
||||
// [points count] is equal to [sdA count]
|
||||
NSInteger n = [sdA count];
|
||||
if (n < 1)
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
double sd[n];
|
||||
|
||||
// From NSMutableArray to sd[n];
|
||||
for (int i=0; i<n; i++)
|
||||
{
|
||||
sd[i] = [[sdA objectAtIndex:i] doubleValue];
|
||||
}
|
||||
|
||||
|
||||
NSMutableArray *output = [NSMutableArray arrayWithCapacity:(n+1)];
|
||||
|
||||
for(int i=0; i<n-1 ; i++)
|
||||
{
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
CGPoint cur = [[points objectAtIndex:i] CGPointValue];
|
||||
CGPoint next = [[points objectAtIndex:(i+1)] CGPointValue];
|
||||
#else
|
||||
NSPoint cur = [[points objectAtIndex:i] pointValue];
|
||||
NSPoint next = [[points objectAtIndex:(i+1)] pointValue];
|
||||
#endif
|
||||
|
||||
for(int x=cur.x;x<(int)next.x;x++)
|
||||
{
|
||||
double t = (double)(x-cur.x)/(next.x-cur.x);
|
||||
|
||||
double a = 1-t;
|
||||
double b = t;
|
||||
double h = next.x-cur.x;
|
||||
|
||||
double y= a*cur.y + b*next.y + (h*h/6)*( (a*a*a-a)*sd[i]+ (b*b*b-b)*sd[i+1] );
|
||||
|
||||
if (y > 255.0)
|
||||
{
|
||||
y = 255.0;
|
||||
}
|
||||
else if (y < 0.0)
|
||||
{
|
||||
y = 0.0;
|
||||
}
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
[output addObject:[NSValue valueWithCGPoint:CGPointMake(x, y)]];
|
||||
#else
|
||||
[output addObject:[NSValue valueWithPoint:NSMakePoint(x, y)]];
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
// The above always misses the last point because the last point is the last next, so we approach but don't equal it.
|
||||
[output addObject:[points lastObject]];
|
||||
return output;
|
||||
}
|
||||
|
||||
- (NSMutableArray *)secondDerivative:(NSArray *)points
|
||||
{
|
||||
const NSInteger n = [points count];
|
||||
if ((n <= 0) || (n == 1))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
double matrix[n][3];
|
||||
double result[n];
|
||||
matrix[0][1]=1;
|
||||
// What about matrix[0][1] and matrix[0][0]? Assuming 0 for now (Brad L.)
|
||||
matrix[0][0]=0;
|
||||
matrix[0][2]=0;
|
||||
|
||||
for(int i=1;i<n-1;i++)
|
||||
{
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
CGPoint P1 = [[points objectAtIndex:(i-1)] CGPointValue];
|
||||
CGPoint P2 = [[points objectAtIndex:i] CGPointValue];
|
||||
CGPoint P3 = [[points objectAtIndex:(i+1)] CGPointValue];
|
||||
#else
|
||||
NSPoint P1 = [[points objectAtIndex:(i-1)] pointValue];
|
||||
NSPoint P2 = [[points objectAtIndex:i] pointValue];
|
||||
NSPoint P3 = [[points objectAtIndex:(i+1)] pointValue];
|
||||
#endif
|
||||
|
||||
matrix[i][0]=(double)(P2.x-P1.x)/6;
|
||||
matrix[i][1]=(double)(P3.x-P1.x)/3;
|
||||
matrix[i][2]=(double)(P3.x-P2.x)/6;
|
||||
result[i]=(double)(P3.y-P2.y)/(P3.x-P2.x) - (double)(P2.y-P1.y)/(P2.x-P1.x);
|
||||
}
|
||||
|
||||
// What about result[0] and result[n-1]? Assuming 0 for now (Brad L.)
|
||||
result[0] = 0;
|
||||
result[n-1] = 0;
|
||||
|
||||
matrix[n-1][1]=1;
|
||||
// What about matrix[n-1][0] and matrix[n-1][2]? For now, assuming they are 0 (Brad L.)
|
||||
matrix[n-1][0]=0;
|
||||
matrix[n-1][2]=0;
|
||||
|
||||
// solving pass1 (up->down)
|
||||
for(int i=1;i<n;i++)
|
||||
{
|
||||
double k = matrix[i][0]/matrix[i-1][1];
|
||||
matrix[i][1] -= k*matrix[i-1][2];
|
||||
matrix[i][0] = 0;
|
||||
result[i] -= k*result[i-1];
|
||||
}
|
||||
// solving pass2 (down->up)
|
||||
for(NSInteger i=n-2;i>=0;i--)
|
||||
{
|
||||
double k = matrix[i][2]/matrix[i+1][1];
|
||||
matrix[i][1] -= k*matrix[i+1][0];
|
||||
matrix[i][2] = 0;
|
||||
result[i] -= k*result[i+1];
|
||||
}
|
||||
|
||||
double y2[n];
|
||||
for(int i=0;i<n;i++) y2[i]=result[i]/matrix[i][1];
|
||||
|
||||
NSMutableArray *output = [NSMutableArray arrayWithCapacity:n];
|
||||
for (int i=0;i<n;i++)
|
||||
{
|
||||
[output addObject:[NSNumber numberWithDouble:y2[i]]];
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
- (void)updateToneCurveTexture;
|
||||
{
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[GPUImageContext useImageProcessingContext];
|
||||
if (!toneCurveTexture)
|
||||
{
|
||||
glActiveTexture(GL_TEXTURE3);
|
||||
glGenTextures(1, &toneCurveTexture);
|
||||
glBindTexture(GL_TEXTURE_2D, toneCurveTexture);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
|
||||
toneCurveByteArray = calloc(256 * 4, sizeof(GLubyte));
|
||||
}
|
||||
else
|
||||
{
|
||||
glActiveTexture(GL_TEXTURE3);
|
||||
glBindTexture(GL_TEXTURE_2D, toneCurveTexture);
|
||||
}
|
||||
|
||||
if ( ([_redCurve count] >= 256) && ([_greenCurve count] >= 256) && ([_blueCurve count] >= 256) && ([_rgbCompositeCurve count] >= 256))
|
||||
{
|
||||
for (unsigned int currentCurveIndex = 0; currentCurveIndex < 256; currentCurveIndex++)
|
||||
{
|
||||
// BGRA for upload to texture
|
||||
GLubyte b = fmin(fmax(currentCurveIndex + [[_blueCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);
|
||||
toneCurveByteArray[currentCurveIndex * 4] = fmin(fmax(b + [[_rgbCompositeCurve objectAtIndex:b] floatValue], 0), 255);
|
||||
GLubyte g = fmin(fmax(currentCurveIndex + [[_greenCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);
|
||||
toneCurveByteArray[currentCurveIndex * 4 + 1] = fmin(fmax(g + [[_rgbCompositeCurve objectAtIndex:g] floatValue], 0), 255);
|
||||
GLubyte r = fmin(fmax(currentCurveIndex + [[_redCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);
|
||||
toneCurveByteArray[currentCurveIndex * 4 + 2] = fmin(fmax(r + [[_rgbCompositeCurve objectAtIndex:r] floatValue], 0), 255);
|
||||
toneCurveByteArray[currentCurveIndex * 4 + 3] = 255;
|
||||
}
|
||||
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 256 /*width*/, 1 /*height*/, 0, GL_BGRA, GL_UNSIGNED_BYTE, toneCurveByteArray);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Rendering
|
||||
|
||||
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
|
||||
{
|
||||
if (self.preventRendering)
|
||||
{
|
||||
[firstInputFramebuffer unlock];
|
||||
return;
|
||||
}
|
||||
|
||||
[GPUImageContext setActiveShaderProgram:filterProgram];
|
||||
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
|
||||
[outputFramebuffer activateFramebuffer];
|
||||
if (usingNextFrameForImageCapture)
|
||||
{
|
||||
[outputFramebuffer lock];
|
||||
}
|
||||
|
||||
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
glActiveTexture(GL_TEXTURE2);
|
||||
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
|
||||
glUniform1i(filterInputTextureUniform, 2);
|
||||
|
||||
glActiveTexture(GL_TEXTURE3);
|
||||
glBindTexture(GL_TEXTURE_2D, toneCurveTexture);
|
||||
glUniform1i(toneCurveTextureUniform, 3);
|
||||
|
||||
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
|
||||
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
[firstInputFramebuffer unlock];
|
||||
if (usingNextFrameForImageCapture)
|
||||
{
|
||||
dispatch_semaphore_signal(imageCaptureSemaphore);
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Accessors
|
||||
|
||||
- (void)setRGBControlPoints:(NSArray *)points
|
||||
{
|
||||
_redControlPoints = [points copy];
|
||||
_redCurve = [self getPreparedSplineCurve:_redControlPoints];
|
||||
|
||||
_greenControlPoints = [points copy];
|
||||
_greenCurve = [self getPreparedSplineCurve:_greenControlPoints];
|
||||
|
||||
_blueControlPoints = [points copy];
|
||||
_blueCurve = [self getPreparedSplineCurve:_blueControlPoints];
|
||||
|
||||
[self updateToneCurveTexture];
|
||||
}
|
||||
|
||||
|
||||
- (void)setRgbCompositeControlPoints:(NSArray *)newValue
|
||||
{
|
||||
_rgbCompositeControlPoints = [newValue copy];
|
||||
_rgbCompositeCurve = [self getPreparedSplineCurve:_rgbCompositeControlPoints];
|
||||
|
||||
[self updateToneCurveTexture];
|
||||
}
|
||||
|
||||
|
||||
- (void)setRedControlPoints:(NSArray *)newValue;
|
||||
{
|
||||
_redControlPoints = [newValue copy];
|
||||
_redCurve = [self getPreparedSplineCurve:_redControlPoints];
|
||||
|
||||
[self updateToneCurveTexture];
|
||||
}
|
||||
|
||||
|
||||
- (void)setGreenControlPoints:(NSArray *)newValue
|
||||
{
|
||||
_greenControlPoints = [newValue copy];
|
||||
_greenCurve = [self getPreparedSplineCurve:_greenControlPoints];
|
||||
|
||||
[self updateToneCurveTexture];
|
||||
}
|
||||
|
||||
|
||||
- (void)setBlueControlPoints:(NSArray *)newValue
|
||||
{
|
||||
_blueControlPoints = [newValue copy];
|
||||
_blueCurve = [self getPreparedSplineCurve:_blueControlPoints];
|
||||
|
||||
[self updateToneCurveTexture];
|
||||
}
|
||||
|
||||
@end
|
19
submodules/LegacyComponents/Sources/GPUImageTwoPassFilter.h
Executable file
19
submodules/LegacyComponents/Sources/GPUImageTwoPassFilter.h
Executable file
@ -0,0 +1,19 @@
|
||||
#import "GPUImageFilter.h"
|
||||
|
||||
@interface GPUImageTwoPassFilter : GPUImageFilter
|
||||
{
|
||||
GPUImageFramebuffer *secondOutputFramebuffer;
|
||||
|
||||
GLProgram *secondFilterProgram;
|
||||
GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
|
||||
GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
|
||||
|
||||
NSMutableDictionary *secondProgramUniformStateRestorationBlocks;
|
||||
}
|
||||
|
||||
// Initialization and teardown
|
||||
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
|
||||
- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
|
||||
- (void)initializeSecondaryAttributes;
|
||||
|
||||
@end
|
201
submodules/LegacyComponents/Sources/GPUImageTwoPassFilter.m
Executable file
201
submodules/LegacyComponents/Sources/GPUImageTwoPassFilter.m
Executable file
@ -0,0 +1,201 @@
|
||||
#import "GPUImageTwoPassFilter.h"
|
||||
|
||||
@implementation GPUImageTwoPassFilter
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Initialization and teardown
|
||||
|
||||
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
|
||||
{
|
||||
if (!(self = [super initWithVertexShaderFromString:firstStageVertexShaderString fragmentShaderFromString:firstStageFragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
secondProgramUniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10];
|
||||
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[GPUImageContext useImageProcessingContext];
|
||||
|
||||
secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:secondStageVertexShaderString fragmentShaderString:secondStageFragmentShaderString];
|
||||
|
||||
if (!secondFilterProgram.initialized)
|
||||
{
|
||||
[self initializeSecondaryAttributes];
|
||||
|
||||
if (![secondFilterProgram link])
|
||||
{
|
||||
NSString *progLog = [secondFilterProgram programLog];
|
||||
NSLog(@"Program link log: %@", progLog);
|
||||
NSString *fragLog = [secondFilterProgram fragmentShaderLog];
|
||||
NSLog(@"Fragment shader compile log: %@", fragLog);
|
||||
NSString *vertLog = [secondFilterProgram vertexShaderLog];
|
||||
NSLog(@"Vertex shader compile log: %@", vertLog);
|
||||
secondFilterProgram = nil;
|
||||
NSAssert(NO, @"Filter shader link failed");
|
||||
}
|
||||
}
|
||||
|
||||
secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
|
||||
secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTexCoord"];
|
||||
secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"sourceImage"]; // This does assume a name of "inputImageTexture" for the fragment shader
|
||||
secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
|
||||
|
||||
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
|
||||
|
||||
glEnableVertexAttribArray(secondFilterPositionAttribute);
|
||||
glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);
|
||||
});
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
|
||||
{
|
||||
if (!(self = [self initWithFirstStageVertexShaderFromString:kGPUImageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:kGPUImageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)initializeSecondaryAttributes;
|
||||
{
|
||||
[secondFilterProgram addAttribute:@"position"];
|
||||
[secondFilterProgram addAttribute:@"inputTexCoord"];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Managing targets
|
||||
|
||||
- (GPUImageFramebuffer *)framebufferForOutput;
|
||||
{
|
||||
return secondOutputFramebuffer;
|
||||
}
|
||||
|
||||
- (void)removeOutputFramebuffer;
|
||||
{
|
||||
secondOutputFramebuffer = nil;
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Rendering
|
||||
|
||||
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
|
||||
{
|
||||
if (self.preventRendering)
|
||||
{
|
||||
[firstInputFramebuffer unlock];
|
||||
return;
|
||||
}
|
||||
|
||||
[GPUImageContext setActiveShaderProgram:filterProgram];
|
||||
|
||||
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
|
||||
[outputFramebuffer activateFramebuffer];
|
||||
|
||||
[self setUniformsForProgramAtIndex:0];
|
||||
|
||||
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
glActiveTexture(GL_TEXTURE2);
|
||||
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
|
||||
|
||||
glUniform1i(filterInputTextureUniform, 2);
|
||||
|
||||
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
|
||||
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
[firstInputFramebuffer unlock];
|
||||
firstInputFramebuffer = nil;
|
||||
|
||||
// This assumes that any two-pass filter that says it desires monochrome input is using the first pass for a luminance conversion, which can be dropped
|
||||
// if (!currentlyReceivingMonochromeInput)
|
||||
// {
|
||||
// Run the first stage of the two-pass filter
|
||||
// [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
|
||||
// }
|
||||
|
||||
// Run the second stage of the two-pass filter
|
||||
secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
|
||||
[secondOutputFramebuffer activateFramebuffer];
|
||||
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
|
||||
if (usingNextFrameForImageCapture)
|
||||
{
|
||||
[secondOutputFramebuffer lock];
|
||||
}
|
||||
|
||||
[self setUniformsForProgramAtIndex:1];
|
||||
|
||||
glActiveTexture(GL_TEXTURE3);
|
||||
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
|
||||
glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
|
||||
|
||||
// TODO: Re-enable this monochrome optimization
|
||||
// if (!currentlyReceivingMonochromeInput)
|
||||
// {
|
||||
// glActiveTexture(GL_TEXTURE3);
|
||||
// glBindTexture(GL_TEXTURE_2D, outputTexture);
|
||||
// glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// glActiveTexture(GL_TEXTURE3);
|
||||
// glBindTexture(GL_TEXTURE_2D, sourceTexture);
|
||||
// glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
|
||||
// }
|
||||
|
||||
glUniform1i(secondFilterInputTextureUniform, 3);
|
||||
|
||||
glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
|
||||
|
||||
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
[outputFramebuffer unlock];
|
||||
outputFramebuffer = nil;
|
||||
|
||||
if (usingNextFrameForImageCapture)
|
||||
{
|
||||
dispatch_semaphore_signal(imageCaptureSemaphore);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
|
||||
{
|
||||
// TODO: Deal with the fact that two-pass filters may have the same shader program identifier
|
||||
if (shaderProgram == filterProgram)
|
||||
{
|
||||
[uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
|
||||
}
|
||||
else
|
||||
{
|
||||
[secondProgramUniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
|
||||
}
|
||||
uniformStateBlock();
|
||||
}
|
||||
|
||||
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
|
||||
{
|
||||
if (programIndex == 0)
|
||||
{
|
||||
[uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
|
||||
dispatch_block_t currentBlock = obj;
|
||||
currentBlock();
|
||||
}];
|
||||
}
|
||||
else
|
||||
{
|
||||
[secondProgramUniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
|
||||
dispatch_block_t currentBlock = obj;
|
||||
currentBlock();
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,13 @@
|
||||
#import "GPUImageTwoPassFilter.h"
|
||||
|
||||
@interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter
|
||||
{
|
||||
GLint verticalPassTexelWidthOffsetUniform, verticalPassTexelHeightOffsetUniform, horizontalPassTexelWidthOffsetUniform, horizontalPassTexelHeightOffsetUniform;
|
||||
GLfloat verticalPassTexelWidthOffset, verticalPassTexelHeightOffset, horizontalPassTexelWidthOffset, horizontalPassTexelHeightOffset;
|
||||
CGFloat _verticalTexelSpacing, _horizontalTexelSpacing;
|
||||
}
|
||||
|
||||
// This sets the spacing between texels (in pixels) when sampling for the first. By default, this is 1.0
|
||||
@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
|
||||
|
||||
@end
|
@ -0,0 +1,85 @@
|
||||
#import "GPUImageTwoPassTextureSamplingFilter.h"
|
||||
|
||||
@implementation GPUImageTwoPassTextureSamplingFilter
|
||||
|
||||
@synthesize verticalTexelSpacing = _verticalTexelSpacing;
|
||||
@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Initialization and teardown
|
||||
|
||||
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString
|
||||
{
|
||||
if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[GPUImageContext useImageProcessingContext];
|
||||
|
||||
verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@"texelWidthOffset"];
|
||||
verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@"texelHeightOffset"];
|
||||
|
||||
horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@"texelWidthOffset"];
|
||||
horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@"texelHeightOffset"];
|
||||
});
|
||||
|
||||
self.verticalTexelSpacing = 1.0;
|
||||
self.horizontalTexelSpacing = 1.0;
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
|
||||
{
|
||||
[super setUniformsForProgramAtIndex:programIndex];
|
||||
|
||||
if (programIndex == 0)
|
||||
{
|
||||
glUniform1f(verticalPassTexelWidthOffsetUniform, verticalPassTexelWidthOffset);
|
||||
glUniform1f(verticalPassTexelHeightOffsetUniform, verticalPassTexelHeightOffset);
|
||||
}
|
||||
else
|
||||
{
|
||||
glUniform1f(horizontalPassTexelWidthOffsetUniform, horizontalPassTexelWidthOffset);
|
||||
glUniform1f(horizontalPassTexelHeightOffsetUniform, horizontalPassTexelHeightOffset);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setupFilterForSize:(CGSize)filterFrameSize;
|
||||
{
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
// The first pass through the framebuffer may rotate the inbound image, so need to account for that by changing up the kernel ordering for that pass
|
||||
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
|
||||
{
|
||||
verticalPassTexelWidthOffset = _verticalTexelSpacing / filterFrameSize.height;
|
||||
verticalPassTexelHeightOffset = 0.0;
|
||||
}
|
||||
else
|
||||
{
|
||||
verticalPassTexelWidthOffset = 0.0;
|
||||
verticalPassTexelHeightOffset = _verticalTexelSpacing / filterFrameSize.height;
|
||||
}
|
||||
|
||||
horizontalPassTexelWidthOffset = _horizontalTexelSpacing / filterFrameSize.width;
|
||||
horizontalPassTexelHeightOffset = 0.0;
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Accessors
|
||||
|
||||
- (void)setVerticalTexelSpacing:(CGFloat)newValue;
|
||||
{
|
||||
_verticalTexelSpacing = newValue;
|
||||
[self setupFilterForSize:[self sizeOfFBO]];
|
||||
}
|
||||
|
||||
- (void)setHorizontalTexelSpacing:(CGFloat)newValue;
|
||||
{
|
||||
_horizontalTexelSpacing = newValue;
|
||||
[self setupFilterForSize:[self sizeOfFBO]];
|
||||
}
|
||||
|
||||
@end
|
@ -384,7 +384,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
||||
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
|
||||
UIImage *image = [[UIImage alloc] initWithData:imageData];
|
||||
|
||||
if (self.cameraMode == PGCameraModeSquare)
|
||||
if (self.cameraMode == PGCameraModeSquarePhoto || self.cameraMode == PGCameraModeSquareVideo)
|
||||
{
|
||||
CGFloat shorterSide = MIN(image.size.width, image.size.height);
|
||||
CGFloat longerSide = MAX(image.size.width, image.size.height);
|
||||
@ -636,7 +636,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
||||
|
||||
- (bool)flashActive
|
||||
{
|
||||
if (self.cameraMode == PGCameraModeVideo || self.cameraMode == PGCameraModeClip)
|
||||
if (self.cameraMode == PGCameraModeVideo || self.cameraMode == PGCameraModeSquareVideo)
|
||||
return self.captureSession.videoDevice.torchActive;
|
||||
|
||||
return self.captureSession.videoDevice.flashActive;
|
||||
@ -644,7 +644,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
||||
|
||||
- (bool)flashAvailable
|
||||
{
|
||||
if (self.cameraMode == PGCameraModeVideo || self.cameraMode == PGCameraModeClip)
|
||||
if (self.cameraMode == PGCameraModeVideo || self.cameraMode == PGCameraModeSquareVideo)
|
||||
return self.captureSession.videoDevice.torchAvailable;
|
||||
|
||||
return self.captureSession.videoDevice.flashAvailable;
|
||||
|
@ -110,7 +110,7 @@ const NSInteger PGCameraFrameRate = 30;
|
||||
TGLegacyLog(@"ERROR: camera can't create video device");
|
||||
}
|
||||
|
||||
if (_currentMode == PGCameraModePhoto || _currentMode == PGCameraModeSquare)
|
||||
if (_currentMode == PGCameraModePhoto || _currentMode == PGCameraModeSquarePhoto)
|
||||
{
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
self.sessionPreset = AVCaptureSessionPresetPhoto;
|
||||
@ -188,7 +188,7 @@ const NSInteger PGCameraFrameRate = 30;
|
||||
if (self.currentCameraPosition != _preferredCameraPosition)
|
||||
return true;
|
||||
|
||||
if (self.currentMode == PGCameraModeVideo || self.currentMode == PGCameraModeClip)
|
||||
if (self.currentMode == PGCameraModeVideo || self.currentMode == PGCameraModeSquareVideo)
|
||||
return true;
|
||||
|
||||
if (self.zoomLevel > FLT_EPSILON)
|
||||
@ -224,7 +224,7 @@ const NSInteger PGCameraFrameRate = 30;
|
||||
|
||||
if (self.currentMode != PGCameraModePhoto)
|
||||
{
|
||||
if (self.currentMode == PGCameraModeVideo || self.currentMode == PGCameraModeClip)
|
||||
if (self.currentMode == PGCameraModeVideo || self.currentMode == PGCameraModeSquareVideo)
|
||||
self.sessionPreset = AVCaptureSessionPresetPhoto;
|
||||
|
||||
_currentMode = PGCameraModePhoto;
|
||||
@ -260,7 +260,7 @@ const NSInteger PGCameraFrameRate = 30;
|
||||
switch (mode)
|
||||
{
|
||||
case PGCameraModePhoto:
|
||||
case PGCameraModeSquare:
|
||||
case PGCameraModeSquarePhoto:
|
||||
{
|
||||
[self _removeAudioInputEndAudioSession:true];
|
||||
self.sessionPreset = AVCaptureSessionPresetPhoto;
|
||||
@ -269,7 +269,7 @@ const NSInteger PGCameraFrameRate = 30;
|
||||
break;
|
||||
|
||||
case PGCameraModeVideo:
|
||||
case PGCameraModeClip:
|
||||
case PGCameraModeSquareVideo:
|
||||
{
|
||||
self.sessionPreset = AVCaptureSessionPresetInputPriority;
|
||||
[self switchToBestVideoFormatForDevice:_videoDevice];
|
||||
@ -528,7 +528,7 @@ const NSInteger PGCameraFrameRate = 30;
|
||||
switch (self.currentMode)
|
||||
{
|
||||
case PGCameraModeVideo:
|
||||
case PGCameraModeClip:
|
||||
case PGCameraModeSquareVideo:
|
||||
return _videoFlashMode;
|
||||
|
||||
default:
|
||||
@ -543,7 +543,7 @@ const NSInteger PGCameraFrameRate = 30;
|
||||
switch (self.currentMode)
|
||||
{
|
||||
case PGCameraModeVideo:
|
||||
case PGCameraModeClip:
|
||||
case PGCameraModeSquareVideo:
|
||||
{
|
||||
AVCaptureTorchMode torchMode = [PGCameraCaptureSession _deviceTorchModeForCameraFlashMode:mode];
|
||||
if (device.hasTorch && [device isTorchModeSupported:torchMode])
|
||||
@ -660,7 +660,7 @@ const NSInteger PGCameraFrameRate = 30;
|
||||
|
||||
[self commitConfiguration];
|
||||
|
||||
if (self.currentMode == PGCameraModeVideo || self.currentMode == PGCameraModeClip)
|
||||
if (self.currentMode == PGCameraModeVideo || self.currentMode == PGCameraModeSquareVideo)
|
||||
[self setFrameRate:PGCameraFrameRate forDevice:deviceForTargetPosition];
|
||||
else
|
||||
[self setFrameRate:0 forDevice:deviceForTargetPosition];
|
||||
|
@ -4,7 +4,8 @@ typedef enum
|
||||
{
|
||||
PGBlurToolTypeNone,
|
||||
PGBlurToolTypeRadial,
|
||||
PGBlurToolTypeLinear
|
||||
PGBlurToolTypeLinear,
|
||||
PGBlurToolTypePortrait
|
||||
} PGBlurToolType;
|
||||
|
||||
@interface PGPhotoBlurPass : PGPhotoProcessPass
|
||||
|
@ -21,6 +21,7 @@
|
||||
|
||||
#import "PGPhotoToolComposer.h"
|
||||
#import "PGEnhanceTool.h"
|
||||
#import "PGSkinTool.h"
|
||||
#import "PGExposureTool.h"
|
||||
#import "PGContrastTool.h"
|
||||
#import "PGWarmthTool.h"
|
||||
@ -263,7 +264,7 @@
|
||||
if (self.previewOutput == nil && !self.standalone)
|
||||
return;
|
||||
|
||||
if (self.forVideo) {
|
||||
if (![_currentInput isKindOfClass:[PGPhotoEditorPicture class]]) {
|
||||
[_queue dispatch:^
|
||||
{
|
||||
[self updateProcessChain];
|
||||
@ -536,7 +537,8 @@
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^
|
||||
{
|
||||
tools = @[ [PGEnhanceTool class],
|
||||
tools = @[ [PGSkinTool class],
|
||||
[PGEnhanceTool class],
|
||||
[PGExposureTool class],
|
||||
[PGContrastTool class],
|
||||
[PGSaturationTool class],
|
||||
|
@ -1,4 +1,4 @@
|
||||
#import "PGPhotoGaussianBlurFIlter.h"
|
||||
#import "PGPhotoGaussianBlurFilter.h"
|
||||
|
||||
#import "PGPhotoProcessPass.h"
|
||||
|
||||
|
8
submodules/LegacyComponents/Sources/PGPhotoSkinPass.h
Normal file
8
submodules/LegacyComponents/Sources/PGPhotoSkinPass.h
Normal file
@ -0,0 +1,8 @@
|
||||
#import "PGPhotoProcessPass.h"
|
||||
|
||||
@interface PGPhotoSkinPass : PGPhotoProcessPass
|
||||
|
||||
@property (nonatomic, assign) CGFloat intensity;
|
||||
|
||||
@end
|
||||
|
33
submodules/LegacyComponents/Sources/PGPhotoSkinPass.m
Normal file
33
submodules/LegacyComponents/Sources/PGPhotoSkinPass.m
Normal file
@ -0,0 +1,33 @@
|
||||
#import "PGPhotoSkinPass.h"
|
||||
#import "YUGPUImageHighPassSkinSmoothingFilter.h"
|
||||
|
||||
@implementation PGPhotoSkinPass
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
self = [super init];
|
||||
if (self != nil)
|
||||
{
|
||||
YUGPUImageHighPassSkinSmoothingFilter *filter = [[YUGPUImageHighPassSkinSmoothingFilter alloc] init];
|
||||
_filter = filter;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setIntensity:(CGFloat)intensity
|
||||
{
|
||||
_intensity = intensity;
|
||||
[self updateParameters];
|
||||
}
|
||||
|
||||
- (void)updateParameters
|
||||
{
|
||||
[(YUGPUImageHighPassSkinSmoothingFilter *)_filter setAmount:0.75 * _intensity];
|
||||
}
|
||||
|
||||
- (void)invalidate
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@end
|
@ -42,6 +42,7 @@ typedef enum
|
||||
|
||||
@property (nonatomic, readonly) bool isSimple;
|
||||
@property (nonatomic, readonly) bool isAvialableForVideo;
|
||||
@property (nonatomic, readonly) bool requiresFaces;
|
||||
|
||||
@property (nonatomic, weak) PGPhotoToolComposer *toolComposer;
|
||||
|
||||
|
@ -43,6 +43,11 @@
|
||||
return true;
|
||||
}
|
||||
|
||||
- (bool)requiresFaces
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
- (NSInteger)order
|
||||
{
|
||||
return _order;
|
||||
|
5
submodules/LegacyComponents/Sources/PGSkinTool.h
Normal file
5
submodules/LegacyComponents/Sources/PGSkinTool.h
Normal file
@ -0,0 +1,5 @@
|
||||
#import "PGPhotoTool.h"
|
||||
|
||||
@interface PGSkinTool : PGPhotoTool
|
||||
|
||||
@end
|
57
submodules/LegacyComponents/Sources/PGSkinTool.m
Normal file
57
submodules/LegacyComponents/Sources/PGSkinTool.m
Normal file
@ -0,0 +1,57 @@
|
||||
#import "PGSkinTool.h"
|
||||
|
||||
#import "LegacyComponentsInternal.h"
|
||||
|
||||
#import "PGPhotoSkinPass.h"
|
||||
|
||||
@implementation PGSkinTool
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
self = [super init];
|
||||
if (self != nil)
|
||||
{
|
||||
_identifier = @"skin";
|
||||
_type = PGPhotoToolTypePass;
|
||||
_order = 0;
|
||||
|
||||
_pass = [[PGPhotoSkinPass alloc] init];
|
||||
|
||||
_minimumValue = 0;
|
||||
_maximumValue = 100;
|
||||
_defaultValue = 0;
|
||||
|
||||
self.value = @(_defaultValue);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSString *)title
|
||||
{
|
||||
return TGLocalized(@"PhotoEditor.SkinTool");
|
||||
}
|
||||
|
||||
- (PGPhotoProcessPass *)pass
|
||||
{
|
||||
[self updatePassParameters];
|
||||
|
||||
return _pass;
|
||||
}
|
||||
|
||||
- (bool)shouldBeSkipped
|
||||
{
|
||||
return (ABS(((NSNumber *)self.displayValue).floatValue - self.defaultValue) < FLT_EPSILON);
|
||||
}
|
||||
|
||||
- (void)updatePassParameters
|
||||
{
|
||||
NSNumber *value = (NSNumber *)self.displayValue;
|
||||
[(PGPhotoSkinPass *)_pass setIntensity:value.floatValue / 100];
|
||||
}
|
||||
|
||||
- (bool)requiresFaces
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
@end
|
@ -1000,10 +1000,7 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
|
||||
break;
|
||||
|
||||
case TGMediaAssetGifType:
|
||||
if (_forProfilePhoto)
|
||||
cellIdentifier = TGAttachmentPhotoCellIdentifier;
|
||||
else
|
||||
cellIdentifier = TGAttachmentGifCellIdentifier;
|
||||
cellIdentifier = TGAttachmentGifCellIdentifier;
|
||||
break;
|
||||
|
||||
default:
|
||||
|
@ -292,7 +292,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
||||
|
||||
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone)
|
||||
{
|
||||
_interfaceView = [[TGCameraMainPhoneView alloc] initWithFrame:screenBounds];
|
||||
_interfaceView = [[TGCameraMainPhoneView alloc] initWithFrame:screenBounds avatar:_intent == TGCameraControllerAvatarIntent];
|
||||
[_interfaceView setInterfaceOrientation:interfaceOrientation animated:false];
|
||||
}
|
||||
else
|
||||
@ -420,7 +420,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
||||
}
|
||||
};
|
||||
|
||||
if (_intent != TGCameraControllerGenericIntent)
|
||||
if (_intent != TGCameraControllerGenericIntent && _intent != TGCameraControllerAvatarIntent)
|
||||
[_interfaceView setHasModeControl:false];
|
||||
|
||||
if (iosMajorVersion() >= 11)
|
||||
@ -510,9 +510,9 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
||||
strongSelf.view.userInteractionEnabled = false;
|
||||
|
||||
PGCameraMode currentMode = strongSelf->_camera.cameraMode;
|
||||
bool generalModeNotChanged = (mode == PGCameraModePhoto && currentMode == PGCameraModeSquare) || (mode == PGCameraModeSquare && currentMode == PGCameraModePhoto) || (mode == PGCameraModeVideo && currentMode == PGCameraModeClip) || (mode == PGCameraModeClip && currentMode == PGCameraModeVideo);
|
||||
bool generalModeNotChanged = (mode == PGCameraModePhoto && currentMode == PGCameraModeSquarePhoto) || (mode == PGCameraModeSquarePhoto && currentMode == PGCameraModePhoto) || (mode == PGCameraModeVideo && currentMode == PGCameraModeSquareVideo) || (mode == PGCameraModeSquareVideo && currentMode == PGCameraModeVideo);
|
||||
|
||||
if ((mode == PGCameraModeVideo || mode == PGCameraModeClip) && !generalModeNotChanged)
|
||||
if ((mode == PGCameraModeVideo || mode == PGCameraModeSquareVideo) && !generalModeNotChanged)
|
||||
{
|
||||
[[LegacyComponentsGlobals provider] pauseMusicPlayback];
|
||||
}
|
||||
@ -920,6 +920,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
||||
break;
|
||||
|
||||
case PGCameraModeVideo:
|
||||
case PGCameraModeSquareVideo:
|
||||
{
|
||||
if (!_camera.isRecordingVideo)
|
||||
{
|
||||
@ -932,11 +933,6 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
||||
}
|
||||
break;
|
||||
|
||||
case PGCameraModeClip:
|
||||
{
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@ -955,7 +951,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
||||
|
||||
__weak TGCameraController *weakSelf = self;
|
||||
PGCameraMode cameraMode = _camera.cameraMode;
|
||||
if (cameraMode == PGCameraModePhoto || cameraMode == PGCameraModeSquare)
|
||||
if (cameraMode == PGCameraModePhoto || cameraMode == PGCameraModeSquarePhoto)
|
||||
{
|
||||
_camera.disabled = true;
|
||||
|
||||
@ -2377,8 +2373,8 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
||||
}
|
||||
break;
|
||||
|
||||
case PGCameraModeSquare:
|
||||
case PGCameraModeClip:
|
||||
case PGCameraModeSquarePhoto:
|
||||
case PGCameraModeSquareVideo:
|
||||
{
|
||||
CGRect rect = [self _cameraPreviewFrameForScreenSize:screenSize mode:PGCameraModePhoto];
|
||||
CGFloat topOffset = CGRectGetMidY(rect) - rect.size.width / 2;
|
||||
@ -2410,7 +2406,7 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
||||
}
|
||||
else
|
||||
{
|
||||
if (mode == PGCameraModeSquare)
|
||||
if (mode == PGCameraModeSquarePhoto || mode == PGCameraModeSquareVideo)
|
||||
return CGRectMake(0, (screenSize.height - screenSize.width) / 2, screenSize.width, screenSize.width);
|
||||
|
||||
return CGRectMake(0, 0, screenSize.width, screenSize.height);
|
||||
|
@ -100,7 +100,7 @@
|
||||
@synthesize cancelPressed;
|
||||
@synthesize actionHandle = _actionHandle;
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame
|
||||
- (instancetype)initWithFrame:(CGRect)frame avatar:(bool)avatar
|
||||
{
|
||||
self = [super initWithFrame:frame];
|
||||
if (self != nil)
|
||||
@ -216,7 +216,7 @@
|
||||
[_shutterButton addTarget:self action:@selector(shutterButtonPressed) forControlEvents:UIControlEventTouchDown];
|
||||
[_bottomPanelView addSubview:_shutterButton];
|
||||
|
||||
_modeControl = [[TGCameraModeControl alloc] initWithFrame:CGRectMake(0, 0, frame.size.width, _modeControlHeight)];
|
||||
_modeControl = [[TGCameraModeControl alloc] initWithFrame:CGRectMake(0, 0, frame.size.width, _modeControlHeight) avatar:avatar];
|
||||
[_bottomPanelView addSubview:_modeControl];
|
||||
|
||||
_flipButton = [[TGCameraFlipButton alloc] initWithFrame:CGRectMake(0, 0, 56, 56) large:true];
|
||||
@ -443,7 +443,7 @@
|
||||
UIInterfaceOrientation orientation = _interfaceOrientation;
|
||||
PGCameraMode cameraMode = _modeControl.cameraMode;
|
||||
|
||||
if (UIInterfaceOrientationIsLandscape(orientation) && !((cameraMode == PGCameraModePhoto && previousMode == PGCameraModeSquare) || (cameraMode == PGCameraModeSquare && previousMode == PGCameraModePhoto)))
|
||||
if (UIInterfaceOrientationIsLandscape(orientation) && !((cameraMode == PGCameraModePhoto && previousMode == PGCameraModeSquarePhoto) || (cameraMode == PGCameraModeSquarePhoto && previousMode == PGCameraModePhoto)))
|
||||
{
|
||||
if (cameraMode == PGCameraModeVideo)
|
||||
_timecodeView.hidden = true;
|
||||
|
@ -42,7 +42,7 @@ const CGFloat TGCameraTabletPanelViewWidth = 102.0f;
|
||||
@synthesize shutterReleased;
|
||||
@synthesize cancelPressed;
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame
|
||||
- (instancetype)initWithFrame:(CGRect)frame avatar:(bool)avatar
|
||||
{
|
||||
self = [super initWithFrame:frame];
|
||||
if (self != nil)
|
||||
@ -83,7 +83,7 @@ const CGFloat TGCameraTabletPanelViewWidth = 102.0f;
|
||||
[_shutterButton addTarget:self action:@selector(shutterButtonReleased) forControlEvents:UIControlEventTouchUpInside];
|
||||
[_panelView addSubview:_shutterButton];
|
||||
|
||||
_modeControl = [[TGCameraModeControl alloc] initWithFrame:CGRectMake(0, 0, _panelView.frame.size.width, 260)];
|
||||
_modeControl = [[TGCameraModeControl alloc] initWithFrame:CGRectMake(0, 0, _panelView.frame.size.width, 260) avatar:avatar];
|
||||
[_panelView addSubview:_modeControl];
|
||||
|
||||
__weak TGCameraMainTabletView *weakSelf = self;
|
||||
|
@ -42,7 +42,7 @@
|
||||
switch (_modeControl.cameraMode)
|
||||
{
|
||||
case PGCameraModePhoto:
|
||||
case PGCameraModeSquare:
|
||||
case PGCameraModeSquarePhoto:
|
||||
{
|
||||
[_shutterButton setButtonMode:TGCameraShutterButtonNormalMode animated:true];
|
||||
[_timecodeView setHidden:true animated:true];
|
||||
@ -56,7 +56,7 @@
|
||||
}
|
||||
break;
|
||||
|
||||
case PGCameraModeClip:
|
||||
case PGCameraModeSquareVideo:
|
||||
{
|
||||
[_shutterButton setButtonMode:TGCameraShutterButtonVideoMode animated:true];
|
||||
[_timecodeView setHidden:true animated:true];
|
||||
|
@ -22,7 +22,7 @@ const CGFloat TGCameraModeControlVerticalInteritemSpace = 29.0f;
|
||||
|
||||
@implementation TGCameraModeControl
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame
|
||||
- (instancetype)initWithFrame:(CGRect)frame avatar:(bool)avatar
|
||||
{
|
||||
self = [super initWithFrame:frame];
|
||||
if (self != nil)
|
||||
@ -60,13 +60,19 @@ const CGFloat TGCameraModeControlVerticalInteritemSpace = 29.0f;
|
||||
_wrapperView.opaque = false;
|
||||
[_maskView addSubview:_wrapperView];
|
||||
|
||||
_buttons = @
|
||||
[
|
||||
[self _createButtonForMode:PGCameraModeVideo title:TGLocalized(@"Camera.VideoMode")],
|
||||
[self _createButtonForMode:PGCameraModePhoto title:TGLocalized(@"Camera.PhotoMode")],
|
||||
// [self _createButtonForMode:PGCameraModeSquare title:TGLocalized(@"Camera.SquareMode")],
|
||||
// [self _createButtonForMode:PGCameraModeClip title:TGLocalized(@"Camera.MomentMode")]
|
||||
];
|
||||
if (avatar) {
|
||||
_buttons = @
|
||||
[
|
||||
[self _createButtonForMode:PGCameraModeSquareVideo title:TGLocalized(@"Camera.VideoMode")],
|
||||
[self _createButtonForMode:PGCameraModePhoto title:TGLocalized(@"Camera.PhotoMode")]
|
||||
];
|
||||
} else {
|
||||
_buttons = @
|
||||
[
|
||||
[self _createButtonForMode:PGCameraModeVideo title:TGLocalized(@"Camera.VideoMode")],
|
||||
[self _createButtonForMode:PGCameraModePhoto title:TGLocalized(@"Camera.PhotoMode")]
|
||||
];
|
||||
}
|
||||
|
||||
for (UIButton *button in _buttons)
|
||||
[_wrapperView addSubview:button];
|
||||
|
@ -20,6 +20,11 @@
|
||||
return self.dimensions;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)originalDuration
|
||||
{
|
||||
return self.videoDuration;
|
||||
}
|
||||
|
||||
- (SSignal *)thumbnailImageSignal
|
||||
{
|
||||
CGFloat scale = MIN(2.0f, TGScreenScaling());
|
||||
|
@ -29,77 +29,21 @@
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithALAsset:(ALAsset *)asset
|
||||
{
|
||||
self = [super init];
|
||||
if (self != nil)
|
||||
{
|
||||
_backingLegacyAsset = asset;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSString *)identifier
|
||||
{
|
||||
if (_cachedUniqueId == nil)
|
||||
{
|
||||
if (self.backingAsset != nil)
|
||||
_cachedUniqueId = self.backingAsset.localIdentifier;
|
||||
else
|
||||
_cachedUniqueId = self.url.absoluteString;
|
||||
}
|
||||
|
||||
return _cachedUniqueId;
|
||||
}
|
||||
|
||||
- (NSURL *)url
|
||||
{
|
||||
if (self.backingLegacyAsset != nil)
|
||||
{
|
||||
if (!_cachedLegacyAssetUrl)
|
||||
_cachedLegacyAssetUrl = [self.backingLegacyAsset defaultRepresentation].url;
|
||||
|
||||
return _cachedLegacyAssetUrl;
|
||||
}
|
||||
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (CGSize)dimensions
|
||||
{
|
||||
if (self.backingAsset != nil)
|
||||
{
|
||||
return CGSizeMake(self.backingAsset.pixelWidth, self.backingAsset.pixelHeight);
|
||||
}
|
||||
else if (self.backingLegacyAsset != nil)
|
||||
{
|
||||
CGSize dimensions = self.backingLegacyAsset.defaultRepresentation.dimensions;
|
||||
|
||||
if (self.isVideo)
|
||||
{
|
||||
bool videoRotated = false;
|
||||
if (_cachedLegacyVideoRotated == nil)
|
||||
{
|
||||
CGImageRef thumbnailImage = self.backingLegacyAsset.aspectRatioThumbnail;
|
||||
CGSize thumbnailSize = CGSizeMake(CGImageGetWidth(thumbnailImage), CGImageGetHeight(thumbnailImage));
|
||||
bool thumbnailIsWide = (thumbnailSize.width > thumbnailSize.height);
|
||||
bool videoIsWide = (dimensions.width > dimensions.height);
|
||||
|
||||
videoRotated = (thumbnailIsWide != videoIsWide);
|
||||
_cachedLegacyVideoRotated = @(videoRotated);
|
||||
}
|
||||
else
|
||||
{
|
||||
videoRotated = _cachedLegacyVideoRotated.boolValue;
|
||||
}
|
||||
|
||||
if (videoRotated)
|
||||
dimensions = CGSizeMake(dimensions.height, dimensions.width);
|
||||
}
|
||||
|
||||
return dimensions;
|
||||
}
|
||||
|
||||
return CGSizeZero;
|
||||
}
|
||||
|
||||
@ -107,9 +51,6 @@
|
||||
{
|
||||
if (self.backingAsset != nil)
|
||||
return self.backingAsset.creationDate;
|
||||
else if (self.backingLegacyAsset != nil)
|
||||
return [self.backingLegacyAsset valueForProperty:ALAssetPropertyDate];
|
||||
|
||||
return nil;
|
||||
}
|
||||
|
||||
@ -127,9 +68,6 @@
|
||||
{
|
||||
if (self.backingAsset != nil)
|
||||
return [self.backingAsset valueForKey:@"uniformTypeIdentifier"];
|
||||
else if (self.backingLegacyAsset != nil)
|
||||
return self.backingLegacyAsset.defaultRepresentation.UTI;
|
||||
|
||||
return nil;
|
||||
}
|
||||
|
||||
@ -145,8 +83,6 @@
|
||||
}
|
||||
}
|
||||
return fileName;
|
||||
} else if (self.backingLegacyAsset != nil) {
|
||||
return self.backingLegacyAsset.defaultRepresentation.filename;
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
@ -167,15 +103,6 @@
|
||||
else
|
||||
_cachedType = @([TGMediaAsset assetTypeForPHAssetMediaType:self.backingAsset.mediaType]);
|
||||
}
|
||||
else if (self.backingLegacyAsset != nil)
|
||||
{
|
||||
if ([[self.backingLegacyAsset valueForProperty:ALAssetPropertyType] isEqualToString:ALAssetTypeVideo])
|
||||
_cachedType = @(TGMediaAssetVideoType);
|
||||
else if ([self _isGif])
|
||||
_cachedType = @(TGMediaAssetGifType);
|
||||
else
|
||||
_cachedType = @(TGMediaAssetPhotoType);
|
||||
}
|
||||
}
|
||||
|
||||
return _cachedType.intValue;
|
||||
@ -195,9 +122,6 @@
|
||||
{
|
||||
if (self.backingAsset != nil)
|
||||
return self.backingAsset.duration;
|
||||
else if (self.backingLegacyAsset != nil)
|
||||
return [[self.backingLegacyAsset valueForProperty:ALAssetPropertyDuration] doubleValue];
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
#import "TGMediaAssetFetchResult.h"
|
||||
|
||||
#import <Photos/Photos.h>
|
||||
#import <AssetsLibrary/AssetsLibrary.h>
|
||||
|
||||
#import "TGMediaAsset.h"
|
||||
|
||||
@ -16,17 +15,6 @@
|
||||
|
||||
@implementation TGMediaAssetFetchResult
|
||||
|
||||
- (instancetype)initForALAssetsReversed:(bool)reversed
|
||||
{
|
||||
self = [super init];
|
||||
if (self != nil)
|
||||
{
|
||||
_assets = [[NSMutableArray alloc] init];
|
||||
_reversed = reversed;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithPHFetchResult:(PHFetchResult *)fetchResult reversed:(bool)reversed
|
||||
{
|
||||
self = [super init];
|
||||
@ -92,12 +80,4 @@
|
||||
return itemsIds;
|
||||
}
|
||||
|
||||
- (void)_appendALAsset:(ALAsset *)asset
|
||||
{
|
||||
if (asset == nil)
|
||||
return;
|
||||
|
||||
[_assets addObject:[[TGMediaAsset alloc] initWithALAsset:asset]];
|
||||
}
|
||||
|
||||
@end
|
||||
|
@ -38,75 +38,16 @@
|
||||
if (_backingFetchResult == nil)
|
||||
{
|
||||
PHFetchOptions *options = [[PHFetchOptions alloc] init];
|
||||
//if (_assetType != TGMediaPickerAssetAnyType)
|
||||
// options.predicate = [NSPredicate predicateWithFormat:@"mediaType = %i", [TGMediaAssetsLibrary _assetMediaTypeForAssetType:_assetType]];
|
||||
|
||||
_backingFetchResult = [PHAsset fetchAssetsInAssetCollection:_backingAssetCollection options:options];
|
||||
}
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithALAssetsGroup:(ALAssetsGroup *)assetsGroup
|
||||
{
|
||||
bool isCameraRoll = ([[assetsGroup valueForProperty:ALAssetsGroupPropertyType] integerValue] == ALAssetsGroupSavedPhotos);
|
||||
TGMediaAssetGroupSubtype subtype = isCameraRoll ? TGMediaAssetGroupSubtypeCameraRoll : TGMediaAssetGroupSubtypeNone;
|
||||
return [self initWithALAssetsGroup:assetsGroup subtype:subtype];
|
||||
}
|
||||
|
||||
- (instancetype)initWithALAssetsGroup:(ALAssetsGroup *)assetsGroup subtype:(TGMediaAssetGroupSubtype)subtype
|
||||
{
|
||||
self = [super init];
|
||||
if (self != nil)
|
||||
{
|
||||
_backingAssetsGroup = assetsGroup;
|
||||
_subtype = subtype;
|
||||
|
||||
if (subtype == TGMediaAssetGroupSubtypeVideos)
|
||||
{
|
||||
_title = TGLocalized(@"MediaPicker.Videos");
|
||||
|
||||
[self.backingAssetsGroup setAssetsFilter:[ALAssetsFilter allVideos]];
|
||||
_cachedAssetCount = @(self.backingAssetsGroup.numberOfAssets);
|
||||
[self.backingAssetsGroup setAssetsFilter:[ALAssetsFilter allAssets]];
|
||||
}
|
||||
else
|
||||
{
|
||||
_isCameraRoll = ([[assetsGroup valueForProperty:ALAssetsGroupPropertyType] integerValue] == ALAssetsGroupSavedPhotos);
|
||||
if (_isCameraRoll)
|
||||
{
|
||||
_subtype = TGMediaAssetGroupSubtypeCameraRoll;
|
||||
}
|
||||
else
|
||||
{
|
||||
_isPhotoStream = ([[assetsGroup valueForProperty:ALAssetsGroupPropertyType] integerValue] == ALAssetsGroupPhotoStream);
|
||||
_subtype = _isPhotoStream ? TGMediaAssetGroupSubtypeMyPhotoStream : TGMediaAssetGroupSubtypeRegular;
|
||||
}
|
||||
}
|
||||
|
||||
NSMutableArray *latestAssets = [[NSMutableArray alloc] init];
|
||||
[assetsGroup enumerateAssetsWithOptions:NSEnumerationReverse usingBlock:^(ALAsset *asset, __unused NSUInteger index, BOOL *stop)
|
||||
{
|
||||
if (asset != nil && (_subtype != TGMediaAssetGroupSubtypeVideos || [[asset valueForProperty:ALAssetPropertyType] isEqualToString:ALAssetTypeVideo]))
|
||||
{
|
||||
[latestAssets addObject:[[TGMediaAsset alloc] initWithALAsset:asset]];
|
||||
}
|
||||
if (latestAssets.count == 3 && stop != NULL)
|
||||
*stop = true;
|
||||
}];
|
||||
|
||||
_latestAssets = latestAssets;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSString *)identifier
|
||||
{
|
||||
if (self.backingAssetCollection != nil)
|
||||
return self.backingAssetCollection.localIdentifier;
|
||||
else if (_backingAssetsGroup != nil)
|
||||
return [self.backingAssetsGroup valueForProperty:ALAssetsGroupPropertyPersistentID];
|
||||
|
||||
return _identifier;
|
||||
}
|
||||
|
||||
@ -116,9 +57,6 @@
|
||||
return _title;
|
||||
if (_backingAssetCollection != nil)
|
||||
return _backingAssetCollection.localizedTitle;
|
||||
if (_backingAssetsGroup != nil)
|
||||
return [_backingAssetsGroup valueForProperty:ALAssetsGroupPropertyName];
|
||||
|
||||
return nil;
|
||||
}
|
||||
|
||||
@ -128,18 +66,6 @@
|
||||
{
|
||||
return self.backingFetchResult.count;
|
||||
}
|
||||
else if (self.backingAssetsGroup != nil)
|
||||
{
|
||||
if (self.subtype == TGMediaAssetGroupSubtypeVideos)
|
||||
{
|
||||
if (_cachedAssetCount != nil)
|
||||
return _cachedAssetCount.integerValue;
|
||||
|
||||
return -1;
|
||||
}
|
||||
return self.backingAssetsGroup.numberOfAssets;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -153,15 +79,7 @@
|
||||
{
|
||||
if (_isCameraRoll)
|
||||
return TGMediaAssetGroupSubtypeCameraRoll;
|
||||
}
|
||||
else if (self.backingAssetsGroup != nil)
|
||||
{
|
||||
if (_isCameraRoll)
|
||||
return TGMediaAssetGroupSubtypeCameraRoll;
|
||||
else if (_subtype != TGMediaAssetGroupSubtypeNone)
|
||||
return _subtype;
|
||||
}
|
||||
|
||||
}
|
||||
return TGMediaAssetGroupSubtypeRegular;
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,8 @@
|
||||
#import "TGMediaAssetImageSignals.h"
|
||||
|
||||
#import <Photos/Photos.h>
|
||||
#import <AssetsLibrary/AssetsLibrary.h>
|
||||
|
||||
#import "TGMediaAssetModernImageSignals.h"
|
||||
#import "TGMediaAssetLegacyImageSignals.h"
|
||||
|
||||
#import "TGPhotoEditorUtils.h"
|
||||
|
||||
@ -27,10 +25,7 @@ static Class TGMediaAssetImageSignalsClass = nil;
|
||||
|
||||
+ (void)load
|
||||
{
|
||||
if ([TGMediaAssetsLibrary usesPhotoFramework])
|
||||
TGMediaAssetImageSignalsClass = [TGMediaAssetModernImageSignals class];
|
||||
else
|
||||
TGMediaAssetImageSignalsClass = [TGMediaAssetLegacyImageSignals class];
|
||||
TGMediaAssetImageSignalsClass = [TGMediaAssetModernImageSignals class];
|
||||
}
|
||||
|
||||
+ (SSignal *)imageForAsset:(TGMediaAsset *)asset imageType:(TGMediaAssetImageType)imageType size:(CGSize)size
|
||||
@ -116,6 +111,8 @@ static Class TGMediaAssetImageSignalsClass = nil;
|
||||
AVAssetImageGenerator *generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:avAsset];
|
||||
generator.appliesPreferredTrackTransform = true;
|
||||
generator.maximumSize = size;
|
||||
generator.requestedTimeToleranceBefore = kCMTimeZero;
|
||||
generator.requestedTimeToleranceAfter = kCMTimeZero;
|
||||
|
||||
[generator generateCGImagesAsynchronouslyForTimes:timestamps completionHandler:^(__unused CMTime requestedTime, CGImageRef imageRef, __unused CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error)
|
||||
{
|
||||
|
@ -1,5 +0,0 @@
|
||||
#import <LegacyComponents/TGMediaAssetImageSignals.h>
|
||||
|
||||
@interface TGMediaAssetLegacyImageSignals : TGMediaAssetImageSignals
|
||||
|
||||
@end
|
@ -1,325 +0,0 @@
|
||||
#import "TGMediaAssetLegacyImageSignals.h"
|
||||
|
||||
#import "LegacyComponentsInternal.h"
|
||||
|
||||
#import <AssetsLibrary/AssetsLibrary.h>
|
||||
|
||||
#import "TGMediaAsset.h"
|
||||
|
||||
@implementation TGMediaAssetLegacyImageSignals
|
||||
|
||||
+ (SSignal *)imageForAsset:(TGMediaAsset *)asset imageType:(TGMediaAssetImageType)imageType size:(CGSize)size allowNetworkAccess:(bool)__unused allowNetworkAccess
|
||||
{
|
||||
if (imageType == TGMediaAssetImageTypeFastScreen)
|
||||
{
|
||||
return [[self imageForAsset:asset imageType:TGMediaAssetImageTypeAspectRatioThumbnail size:CGSizeZero] then:[self imageForAsset:asset imageType:TGMediaAssetImageTypeScreen size:size]];
|
||||
}
|
||||
|
||||
switch (imageType)
|
||||
{
|
||||
case TGMediaAssetImageTypeThumbnail:
|
||||
{
|
||||
return [SSignal single:[UIImage imageWithCGImage:asset.backingLegacyAsset.thumbnail]];
|
||||
}
|
||||
break;
|
||||
|
||||
case TGMediaAssetImageTypeAspectRatioThumbnail:
|
||||
{
|
||||
return [SSignal single:[UIImage imageWithCGImage:asset.backingLegacyAsset.aspectRatioThumbnail]];
|
||||
}
|
||||
break;
|
||||
|
||||
case TGMediaAssetImageTypeScreen:
|
||||
case TGMediaAssetImageTypeFullSize:
|
||||
{
|
||||
if (imageType == TGMediaAssetImageTypeScreen && asset.isVideo)
|
||||
return [SSignal single:[UIImage imageWithCGImage:asset.backingLegacyAsset.defaultRepresentation.fullScreenImage]];
|
||||
|
||||
if (imageType == TGMediaAssetImageTypeFullSize)
|
||||
size = TGMediaAssetImageLegacySizeLimit;
|
||||
|
||||
return [[[[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
ALAssetRepresentation *representation = asset.backingLegacyAsset.defaultRepresentation;
|
||||
CGDataProviderDirectCallbacks callbacks =
|
||||
{
|
||||
.version = 0,
|
||||
.getBytePointer = NULL,
|
||||
.releaseBytePointer = NULL,
|
||||
.getBytesAtPosition = TGGetAssetBytesCallback,
|
||||
.releaseInfo = TGReleaseAssetCallback,
|
||||
};
|
||||
|
||||
CGDataProviderRef provider = CGDataProviderCreateDirect((void *)CFBridgingRetain(representation), representation.size, &callbacks);
|
||||
CGImageSourceRef source = CGImageSourceCreateWithDataProvider(provider, NULL);
|
||||
|
||||
CGImageRef imageRef = CGImageSourceCreateThumbnailAtIndex(source, 0, (__bridge CFDictionaryRef)@
|
||||
{
|
||||
(NSString *)kCGImageSourceCreateThumbnailFromImageAlways : @(YES),
|
||||
(NSString *)kCGImageSourceThumbnailMaxPixelSize : @((NSInteger)MAX(size.width, size.height)),
|
||||
(NSString *)kCGImageSourceCreateThumbnailWithTransform : @(YES)
|
||||
});
|
||||
|
||||
if (source != NULL)
|
||||
CFRelease(source);
|
||||
|
||||
if (provider != NULL)
|
||||
CFRelease(provider);
|
||||
|
||||
NSMutableDictionary *result = [[NSMutableDictionary alloc] init];
|
||||
if (imageRef != nil && representation != nil)
|
||||
{
|
||||
result[@"imageRef"] = (__bridge id)(imageRef);
|
||||
result[@"representation"] = representation;
|
||||
|
||||
[subscriber putNext:result];
|
||||
[subscriber putCompletion];
|
||||
}
|
||||
else
|
||||
{
|
||||
[subscriber putError:nil];
|
||||
}
|
||||
|
||||
return [[SBlockDisposable alloc] initWithBlock:^
|
||||
{
|
||||
if (imageRef != NULL)
|
||||
CFRelease(imageRef);
|
||||
}];
|
||||
}] mapToSignal:^SSignal *(NSDictionary *result)
|
||||
{
|
||||
return [self _editedImageWithCGImage:(__bridge CGImageRef)(result[@"imageRef"]) representation:result[@"representation"]];
|
||||
}] startOn:[self _processingQueue]];
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return [SSignal fail:nil];
|
||||
}
|
||||
|
||||
+ (SSignal *)livePhotoForAsset:(TGMediaAsset *)asset
|
||||
{
|
||||
return [SSignal fail:nil];
|
||||
}
|
||||
|
||||
+ (SSignal *)imageDataForAsset:(TGMediaAsset *)asset allowNetworkAccess:(bool)__unused allowNetworkAccess
|
||||
{
|
||||
return [[[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
ALAssetRepresentation *representation = asset.backingLegacyAsset.defaultRepresentation;
|
||||
NSUInteger size = (NSUInteger)representation.size;
|
||||
void *bytes = malloc(size);
|
||||
for (NSUInteger offset = 0; offset < size; )
|
||||
{
|
||||
NSError *error = nil;
|
||||
offset += [representation getBytes:bytes + offset fromOffset:(long long)offset length:256 * 1024 error:&error];
|
||||
if (error != nil)
|
||||
{
|
||||
[subscriber putError:nil];
|
||||
return nil;
|
||||
}
|
||||
}
|
||||
|
||||
NSData *imageData = [[NSData alloc] initWithBytesNoCopy:bytes length:size freeWhenDone:true];
|
||||
NSArray *fileNameComponents = [representation.url.absoluteString.lastPathComponent componentsSeparatedByString:@"?"];
|
||||
NSString *fileName = fileNameComponents.firstObject;
|
||||
|
||||
TGMediaAssetImageData *data = [[TGMediaAssetImageData alloc] init];
|
||||
data.fileName = fileName;
|
||||
data.fileUTI = representation.UTI;
|
||||
data.imageData = imageData;
|
||||
|
||||
[subscriber putNext:data];
|
||||
[subscriber putCompletion];
|
||||
|
||||
return nil;
|
||||
}] startOn:[self _processingQueue]];
|
||||
}
|
||||
|
||||
+ (SSignal *)imageMetadataWithAsset:(TGMediaAsset *)asset
|
||||
{
|
||||
return [SSignal single:asset.backingLegacyAsset.defaultRepresentation.metadata];
|
||||
}
|
||||
|
||||
+ (SSignal *)fileAttributesForAsset:(TGMediaAsset *)asset
|
||||
{
|
||||
return [[[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
ALAssetRepresentation *representation = asset.backingLegacyAsset.defaultRepresentation;
|
||||
NSArray *fileNameComponents = [representation.url.absoluteString.lastPathComponent componentsSeparatedByString:@"?"];
|
||||
NSString *fileName = fileNameComponents.firstObject;
|
||||
NSString *fileUTI = representation.UTI;
|
||||
|
||||
TGMediaAssetImageFileAttributes *attributes = [[TGMediaAssetImageFileAttributes alloc] init];
|
||||
attributes.fileName = fileName;
|
||||
attributes.fileUTI = fileUTI;
|
||||
attributes.dimensions = representation.dimensions;
|
||||
attributes.fileSize = (NSUInteger)representation.size;
|
||||
|
||||
[subscriber putNext:attributes];
|
||||
[subscriber putCompletion];
|
||||
|
||||
return nil;
|
||||
}] startOn:[self _processingQueue]];
|
||||
}
|
||||
|
||||
+ (void)startCachingImagesForAssets:(NSArray *)__unused assets imageType:(TGMediaAssetImageType)__unused imageType size:(CGSize)__unused size
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
+ (void)stopCachingImagesForAssets:(NSArray *)__unused assets imageType:(TGMediaAssetImageType)__unused imageType size:(CGSize)__unused size
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
+ (void)stopCachingImagesForAllAssets
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
+ (SSignal *)saveUncompressedVideoForAsset:(TGMediaAsset *)asset toPath:(NSString *)path allowNetworkAccess:(bool)__unused allowNetworkAccess
|
||||
{
|
||||
return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
NSOutputStream *os = [[NSOutputStream alloc] initToFileAtPath:path append:false];
|
||||
[os open];
|
||||
|
||||
ALAssetRepresentation *representation = asset.backingLegacyAsset.defaultRepresentation;
|
||||
long long size = representation.size;
|
||||
|
||||
uint8_t buf[128 * 1024];
|
||||
for (long long offset = 0; offset < size; offset += 128 * 1024)
|
||||
{
|
||||
long long batchSize = MIN(128 * 1024, size - offset);
|
||||
NSUInteger readBytes = [representation getBytes:buf fromOffset:offset length:(NSUInteger)batchSize error:nil];
|
||||
[os write:buf maxLength:readBytes];
|
||||
}
|
||||
|
||||
[os close];
|
||||
|
||||
NSArray *fileNameComponents = [representation.url.absoluteString.lastPathComponent componentsSeparatedByString:@"?"];
|
||||
NSString *fileName = fileNameComponents.firstObject;
|
||||
|
||||
[subscriber putNext:fileName];
|
||||
[subscriber putCompletion];
|
||||
|
||||
return nil;
|
||||
}];
|
||||
}
|
||||
|
||||
+ (SSignal *)playerItemForVideoAsset:(TGMediaAsset *)asset
|
||||
{
|
||||
return [SSignal single:[AVPlayerItem playerItemWithURL:asset.url]];
|
||||
}
|
||||
|
||||
+ (SSignal *)avAssetForVideoAsset:(TGMediaAsset *)asset allowNetworkAccess:(bool)__unused allowNetworkAccess
|
||||
{
|
||||
return [SSignal single:[[AVURLAsset alloc] initWithURL:asset.url options:nil]];
|
||||
}
|
||||
|
||||
+ (bool)usesPhotoFramework
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
+ (SSignal *)_editedImageWithCGImage:(CGImageRef)cgImage representation:(ALAssetRepresentation *)representation
|
||||
{
|
||||
return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
NSError *error = nil;
|
||||
CGSize originalImageSize = CGSizeMake([representation.metadata[@"PixelWidth"] floatValue], [representation.metadata[@"PixelHeight"] floatValue]);
|
||||
|
||||
NSData *xmpData = [representation.metadata[@"AdjustmentXMP"] dataUsingEncoding:NSUTF8StringEncoding];
|
||||
|
||||
CIContext *context = [CIContext contextWithOptions:nil];
|
||||
|
||||
CIImage *ciImage = [CIImage imageWithCGImage:cgImage];
|
||||
NSArray *filterArray = [CIFilter filterArrayFromSerializedXMP:xmpData inputImageExtent:ciImage.extent error:&error];
|
||||
if ((originalImageSize.width != CGImageGetWidth(cgImage)) || (originalImageSize.height != CGImageGetHeight(cgImage)))
|
||||
{
|
||||
CGFloat zoom = MIN(originalImageSize.width / CGImageGetWidth(cgImage), originalImageSize.height / CGImageGetHeight(cgImage));
|
||||
|
||||
bool hasTranslation = false;
|
||||
bool hasCrop = false;
|
||||
|
||||
for (CIFilter *filter in filterArray)
|
||||
{
|
||||
if ([filter.name isEqualToString:@"CIAffineTransform"] && !hasTranslation)
|
||||
{
|
||||
hasTranslation = true;
|
||||
CGAffineTransform t = [[filter valueForKey:@"inputTransform"] CGAffineTransformValue];
|
||||
t.tx /= zoom;
|
||||
t.ty /= zoom;
|
||||
[filter setValue:[NSValue valueWithCGAffineTransform:t] forKey:@"inputTransform"];
|
||||
}
|
||||
|
||||
if ([filter.name isEqualToString:@"CICrop"] && !hasCrop)
|
||||
{
|
||||
hasCrop = true;
|
||||
CGRect r = [[filter valueForKey:@"inputRectangle"] CGRectValue];
|
||||
r.origin.x /= zoom;
|
||||
r.origin.y /= zoom;
|
||||
r.size.width /= zoom;
|
||||
r.size.height /= zoom;
|
||||
[filter setValue:[NSValue valueWithCGRect:r] forKey:@"inputRectangle"];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (CIFilter *filter in filterArray)
|
||||
{
|
||||
[filter setValue:ciImage forKey:kCIInputImageKey];
|
||||
ciImage = [filter outputImage];
|
||||
}
|
||||
|
||||
CGImageRef editedImage = [context createCGImage:ciImage fromRect:ciImage.extent];
|
||||
UIImage *resultImage = [UIImage imageWithCGImage:editedImage];
|
||||
CGImageRelease(editedImage);
|
||||
|
||||
if (error == nil)
|
||||
{
|
||||
[subscriber putNext:resultImage];
|
||||
[subscriber putCompletion];
|
||||
}
|
||||
else
|
||||
{
|
||||
[subscriber putError:error];
|
||||
}
|
||||
|
||||
return nil;
|
||||
}];
|
||||
}
|
||||
|
||||
+ (SQueue *)_processingQueue
|
||||
{
|
||||
static dispatch_once_t onceToken;
|
||||
static SQueue *queue;
|
||||
dispatch_once(&onceToken, ^
|
||||
{
|
||||
queue = [[SQueue alloc] init];
|
||||
});
|
||||
return queue;
|
||||
}
|
||||
|
||||
static size_t TGGetAssetBytesCallback(void *info, void *buffer, off_t position, size_t count)
|
||||
{
|
||||
ALAssetRepresentation *rep = (__bridge id)info;
|
||||
|
||||
NSError *error = nil;
|
||||
size_t countRead = [rep getBytes:(uint8_t *)buffer fromOffset:position length:count error:&error];
|
||||
|
||||
if (countRead == 0 && error)
|
||||
TGLegacyLog(@"error occured while reading an asset: %@", error);
|
||||
|
||||
return countRead;
|
||||
}
|
||||
|
||||
static void TGReleaseAssetCallback(void *info)
|
||||
{
|
||||
CFRelease(info);
|
||||
}
|
||||
|
||||
@end
|
@ -1250,14 +1250,14 @@
|
||||
|
||||
switch (intent)
|
||||
{
|
||||
case TGMediaAssetsControllerSetProfilePhotoIntent:
|
||||
case TGMediaAssetsControllerSetSignupProfilePhotoIntent:
|
||||
case TGMediaAssetsControllerSetCustomWallpaperIntent:
|
||||
case TGMediaAssetsControllerPassportIntent:
|
||||
case TGMediaAssetsControllerPassportMultipleIntent:
|
||||
assetType = TGMediaAssetPhotoType;
|
||||
break;
|
||||
|
||||
|
||||
case TGMediaAssetsControllerSetProfilePhotoIntent:
|
||||
case TGMediaAssetsControllerSendMediaIntent:
|
||||
assetType = TGMediaAssetAnyType;
|
||||
break;
|
||||
|
@ -1,5 +0,0 @@
|
||||
#import <LegacyComponents/TGMediaAssetsLibrary.h>
|
||||
|
||||
@interface TGMediaAssetsLegacyLibrary : TGMediaAssetsLibrary
|
||||
|
||||
@end
|
@ -1,431 +0,0 @@
|
||||
#import "TGMediaAssetsLegacyLibrary.h"
|
||||
|
||||
#import <AssetsLibrary/AssetsLibrary.h>
|
||||
|
||||
#import <LegacyComponents/TGObserverProxy.h>
|
||||
|
||||
@interface TGMediaAssetsLegacyLibrary ()
|
||||
{
|
||||
ALAssetsLibrary *_assetsLibrary;
|
||||
TGObserverProxy *_assetsChangeObserver;
|
||||
SPipe *_libraryChangePipe;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation TGMediaAssetsLegacyLibrary
|
||||
|
||||
- (instancetype)initForAssetType:(TGMediaAssetType)assetType
|
||||
{
|
||||
self = [super initForAssetType:assetType];
|
||||
if (self != nil)
|
||||
{
|
||||
_assetsLibrary = [[ALAssetsLibrary alloc] init];
|
||||
_assetsChangeObserver = [[TGObserverProxy alloc] initWithTarget:self targetSelector:@selector(assetsLibraryDidChange:) name:ALAssetsLibraryChangedNotification];
|
||||
_libraryChangePipe = [[SPipe alloc] init];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (SSignal *)assetWithIdentifier:(NSString *)identifier
|
||||
{
|
||||
if (identifier.length == 0)
|
||||
return [SSignal fail:nil];
|
||||
|
||||
return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
[_assetsLibrary assetForURL:[NSURL URLWithString:identifier] resultBlock:^(ALAsset *asset)
|
||||
{
|
||||
if (asset != nil)
|
||||
{
|
||||
[subscriber putNext:[[TGMediaAsset alloc] initWithALAsset:asset]];
|
||||
[subscriber putCompletion];
|
||||
}
|
||||
else
|
||||
{
|
||||
[subscriber putError:nil];
|
||||
}
|
||||
} failureBlock:^(__unused NSError *error)
|
||||
{
|
||||
[subscriber putError:nil];
|
||||
}];
|
||||
|
||||
return nil;
|
||||
}];
|
||||
}
|
||||
|
||||
- (SSignal *)assetGroups
|
||||
{
|
||||
SSignal *(^groupsSignal)(void) = ^
|
||||
{
|
||||
return [[[[[[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
[_assetsLibrary enumerateGroupsWithTypes:ALAssetsGroupAll usingBlock:^(ALAssetsGroup *assetsGroup, __unused BOOL *stop)
|
||||
{
|
||||
if (assetsGroup != nil)
|
||||
{
|
||||
if (self.assetType != TGMediaAssetAnyType)
|
||||
[assetsGroup setAssetsFilter:[TGMediaAssetsLegacyLibrary _assetsFilterForAssetType:self.assetType]];
|
||||
|
||||
TGMediaAssetGroup *group = [[TGMediaAssetGroup alloc] initWithALAssetsGroup:assetsGroup];
|
||||
[subscriber putNext:group];
|
||||
}
|
||||
else
|
||||
{
|
||||
[subscriber putCompletion];
|
||||
}
|
||||
} failureBlock:^(NSError *error)
|
||||
{
|
||||
[subscriber putError:error];
|
||||
}];
|
||||
|
||||
return nil;
|
||||
}] then:[[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
[_assetsLibrary enumerateGroupsWithTypes:ALAssetsGroupAll usingBlock:^(ALAssetsGroup *assetsGroup, __unused BOOL *stop)
|
||||
{
|
||||
if (assetsGroup != nil)
|
||||
{
|
||||
if ([[assetsGroup valueForProperty:ALAssetsGroupPropertyType] integerValue] == ALAssetsGroupSavedPhotos)
|
||||
{
|
||||
TGMediaAssetGroup *group = [[TGMediaAssetGroup alloc] initWithALAssetsGroup:assetsGroup subtype:TGMediaAssetGroupSubtypeVideos];
|
||||
[subscriber putNext:group];
|
||||
[subscriber putCompletion];
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
[subscriber putCompletion];
|
||||
}
|
||||
} failureBlock:^(NSError *error)
|
||||
{
|
||||
[subscriber putError:error];
|
||||
}];
|
||||
|
||||
return nil;
|
||||
}]] reduceLeft:[[NSMutableArray alloc] init] with:^id(NSMutableArray *groups, id group)
|
||||
{
|
||||
[groups addObject:group];
|
||||
return groups;
|
||||
}] map:^NSMutableArray *(NSMutableArray *groups)
|
||||
{
|
||||
[groups sortUsingFunction:TGMediaAssetGroupComparator context:nil];
|
||||
return groups;
|
||||
}] startOn:_queue];
|
||||
};
|
||||
|
||||
SSignal *updateSignal = [[self libraryChanged] mapToSignal:^SSignal *(__unused id change)
|
||||
{
|
||||
return groupsSignal();
|
||||
}];
|
||||
|
||||
return [groupsSignal() then:updateSignal];
|
||||
}
|
||||
|
||||
- (SSignal *)cameraRollGroup
|
||||
{
|
||||
return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
[_assetsLibrary enumerateGroupsWithTypes:ALAssetsGroupSavedPhotos usingBlock:^(ALAssetsGroup *group, BOOL *stop)
|
||||
{
|
||||
if (group != nil)
|
||||
{
|
||||
if (self.assetType != TGMediaAssetAnyType)
|
||||
[group setAssetsFilter:[TGMediaAssetsLegacyLibrary _assetsFilterForAssetType:self.assetType]];
|
||||
|
||||
[subscriber putNext:[[TGMediaAssetGroup alloc] initWithALAssetsGroup:group]];
|
||||
[subscriber putCompletion];
|
||||
|
||||
if (stop != NULL)
|
||||
*stop = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
[subscriber putError:nil];
|
||||
}
|
||||
} failureBlock:^(NSError *error)
|
||||
{
|
||||
[subscriber putError:error];
|
||||
}];
|
||||
|
||||
return nil;
|
||||
}];
|
||||
}
|
||||
|
||||
- (SSignal *)assetsOfAssetGroup:(TGMediaAssetGroup *)assetGroup reversed:(bool)reversed
|
||||
{
|
||||
NSParameterAssert(assetGroup);
|
||||
|
||||
SSignal *(^fetchSignal)(TGMediaAssetGroup *) = ^SSignal *(TGMediaAssetGroup *group)
|
||||
{
|
||||
return [[[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
TGMediaAssetFetchResult *mediaFetchResult = [[TGMediaAssetFetchResult alloc] initForALAssetsReversed:reversed];
|
||||
|
||||
NSEnumerationOptions options = kNilOptions;
|
||||
if (group.isReversed)
|
||||
options = NSEnumerationReverse;
|
||||
|
||||
[group.backingAssetsGroup enumerateAssetsWithOptions:options usingBlock:^(ALAsset *asset, __unused NSUInteger index, __unused BOOL *stop)
|
||||
{
|
||||
if (asset != nil && (assetGroup.subtype != TGMediaAssetGroupSubtypeVideos || [[asset valueForProperty:ALAssetPropertyType] isEqualToString:ALAssetTypeVideo]))
|
||||
{
|
||||
[mediaFetchResult _appendALAsset:asset];
|
||||
}
|
||||
}];
|
||||
|
||||
[subscriber putNext:mediaFetchResult];
|
||||
[subscriber putCompletion];
|
||||
|
||||
return nil;
|
||||
}] startOn:_queue];
|
||||
};
|
||||
|
||||
SSignal *updateSignal = [[self libraryChanged] mapToSignal:^SSignal *(__unused id change)
|
||||
{
|
||||
return fetchSignal(assetGroup);
|
||||
}];
|
||||
|
||||
return [fetchSignal(assetGroup) then:updateSignal];
|
||||
}
|
||||
|
||||
- (SSignal *)updatedAssetsForAssets:(NSArray *)assets
|
||||
{
|
||||
SSignal *(^updatedAssetSignal)(TGMediaAsset *) = ^SSignal *(TGMediaAsset *asset)
|
||||
{
|
||||
return [[[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
[_assetsLibrary assetForURL:asset.url resultBlock:^(ALAsset *asset)
|
||||
{
|
||||
if (asset != nil)
|
||||
{
|
||||
TGMediaAsset *updatedAsset = [[TGMediaAsset alloc] initWithALAsset:asset];
|
||||
[subscriber putNext:updatedAsset];
|
||||
[subscriber putCompletion];
|
||||
}
|
||||
else
|
||||
{
|
||||
[subscriber putError:nil];
|
||||
}
|
||||
} failureBlock:^(__unused NSError *error)
|
||||
{
|
||||
[subscriber putError:nil];
|
||||
}];
|
||||
|
||||
return nil;
|
||||
}] catch:^SSignal *(__unused id error)
|
||||
{
|
||||
return [SSignal complete];
|
||||
}];
|
||||
};
|
||||
|
||||
NSMutableArray *signals = [[NSMutableArray alloc] init];
|
||||
for (TGMediaAsset *asset in assets)
|
||||
[signals addObject:updatedAssetSignal(asset)];
|
||||
|
||||
SSignal *combinedSignal = nil;
|
||||
for (SSignal *signal in signals)
|
||||
{
|
||||
if (combinedSignal == nil)
|
||||
combinedSignal = signal;
|
||||
else
|
||||
combinedSignal = [combinedSignal then:signal];
|
||||
}
|
||||
|
||||
return [combinedSignal reduceLeft:[[NSMutableArray alloc] init] with:^id(NSMutableArray *array, TGMediaAsset *updatedAsset)
|
||||
{
|
||||
[array addObject:updatedAsset];
|
||||
return array;
|
||||
}];
|
||||
}
|
||||
|
||||
- (SSignal *)filterDeletedAssets:(NSArray *)assets
|
||||
{
|
||||
SSignal *(^assetDeletedSignal)(TGMediaAsset *) = ^SSignal *(TGMediaAsset *asset)
|
||||
{
|
||||
return [[[[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
[_assetsLibrary assetForURL:asset.url resultBlock:^(ALAsset *asset)
|
||||
{
|
||||
[subscriber putNext:@(asset != nil)];
|
||||
[subscriber putCompletion];
|
||||
} failureBlock:^(__unused NSError *error)
|
||||
{
|
||||
[subscriber putNext:@(false)];
|
||||
[subscriber putCompletion];
|
||||
}];
|
||||
|
||||
return nil;
|
||||
}] filter:^bool(NSNumber *exists)
|
||||
{
|
||||
return !exists.boolValue;
|
||||
}] map:^TGMediaAsset *(__unused id exists)
|
||||
{
|
||||
return asset;
|
||||
}];
|
||||
};
|
||||
|
||||
NSMutableArray *signals = [[NSMutableArray alloc] init];
|
||||
for (TGMediaAsset *asset in assets)
|
||||
[signals addObject:assetDeletedSignal(asset)];
|
||||
|
||||
SSignal *combinedSignal = nil;
|
||||
for (SSignal *signal in signals)
|
||||
{
|
||||
if (combinedSignal == nil)
|
||||
combinedSignal = signal;
|
||||
else
|
||||
combinedSignal = [combinedSignal then:signal];
|
||||
}
|
||||
|
||||
return [combinedSignal reduceLeft:[[NSMutableArray alloc] init] with:^id(NSMutableArray *array, TGMediaAsset *deletedAsset)
|
||||
{
|
||||
[array addObject:deletedAsset];
|
||||
return array;
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
|
||||
- (void)assetsLibraryDidChange:(NSNotification *)__unused notification
|
||||
{
|
||||
__strong TGMediaAssetsLegacyLibrary *strongSelf = self;
|
||||
if (strongSelf != nil)
|
||||
strongSelf->_libraryChangePipe.sink([SSignal single:@(true)]);
|
||||
}
|
||||
|
||||
- (SSignal *)libraryChanged
|
||||
{
|
||||
return [[_libraryChangePipe.signalProducer() map:^SSignal *(id data) {
|
||||
return [[SSignal single:data] delay:0.5 onQueue:_queue];
|
||||
}] switchToLatest];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
|
||||
- (SSignal *)saveAssetWithImage:(UIImage *)image
|
||||
{
|
||||
return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
[_assetsLibrary writeImageToSavedPhotosAlbum:image.CGImage orientation:(ALAssetOrientation)image.imageOrientation completionBlock:^(NSURL *assetURL, NSError *error)
|
||||
{
|
||||
if (assetURL != nil && error == nil)
|
||||
[subscriber putCompletion];
|
||||
else
|
||||
[subscriber putError:error];
|
||||
}];
|
||||
|
||||
return nil;
|
||||
}];
|
||||
}
|
||||
|
||||
- (SSignal *)saveAssetWithImageData:(NSData *)imageData
|
||||
{
|
||||
return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
[_assetsLibrary writeImageDataToSavedPhotosAlbum:imageData metadata:nil completionBlock:^(NSURL *assetURL, NSError *error)
|
||||
{
|
||||
if (assetURL != nil && error == nil)
|
||||
[subscriber putCompletion];
|
||||
else
|
||||
[subscriber putError:error];
|
||||
}];
|
||||
|
||||
return nil;
|
||||
}];
|
||||
}
|
||||
|
||||
- (SSignal *)_saveAssetWithUrl:(NSURL *)url isVideo:(bool)isVideo
|
||||
{
|
||||
return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
void (^writeCompletionBlock)(NSURL *, NSError *) = ^(NSURL *assetURL, NSError *error)
|
||||
{
|
||||
if (assetURL != nil && error == nil)
|
||||
[subscriber putCompletion];
|
||||
else
|
||||
[subscriber putError:error];
|
||||
};
|
||||
|
||||
if (!isVideo)
|
||||
{
|
||||
NSData *data = [[NSData alloc] initWithContentsOfURL:url options:NSDataReadingMappedIfSafe error:nil];
|
||||
[_assetsLibrary writeImageDataToSavedPhotosAlbum:data metadata:nil completionBlock:writeCompletionBlock];
|
||||
}
|
||||
else
|
||||
{
|
||||
[_assetsLibrary writeVideoAtPathToSavedPhotosAlbum:url completionBlock:writeCompletionBlock];
|
||||
}
|
||||
|
||||
return nil;
|
||||
}];
|
||||
}
|
||||
|
||||
+ (ALAssetsFilter *)_assetsFilterForAssetType:(TGMediaAssetType)assetType
|
||||
{
|
||||
switch (assetType)
|
||||
{
|
||||
case TGMediaAssetPhotoType:
|
||||
return [ALAssetsFilter allPhotos];
|
||||
|
||||
case TGMediaAssetVideoType:
|
||||
return [ALAssetsFilter allVideos];
|
||||
|
||||
default:
|
||||
return [ALAssetsFilter allAssets];
|
||||
}
|
||||
}
|
||||
|
||||
+ (SSignal *)authorizationStatusSignal
|
||||
{
|
||||
if (TGMediaLibraryCachedAuthorizationStatus != TGMediaLibraryAuthorizationStatusNotDetermined)
|
||||
return [SSignal single:@(TGMediaLibraryCachedAuthorizationStatus)];
|
||||
|
||||
return [SSignal single:@(TGMediaLibraryAuthorizationStatusAuthorized)];
|
||||
}
|
||||
|
||||
+ (void)requestAuthorizationForAssetType:(TGMediaAssetType)assetType completion:(void (^)(TGMediaLibraryAuthorizationStatus, TGMediaAssetGroup *))completion
|
||||
{
|
||||
TGMediaLibraryAuthorizationStatus currentStatus = [self authorizationStatus];
|
||||
if (currentStatus == TGMediaLibraryAuthorizationStatusDenied || currentStatus == TGMediaLibraryAuthorizationStatusRestricted)
|
||||
{
|
||||
completion(currentStatus, nil);
|
||||
}
|
||||
else
|
||||
{
|
||||
TGMediaAssetsLibrary *library = [self libraryForAssetType:assetType];
|
||||
[[library cameraRollGroup] startWithNext:^(TGMediaAssetGroup *group)
|
||||
{
|
||||
TGMediaLibraryCachedAuthorizationStatus = [self authorizationStatus];
|
||||
completion([self authorizationStatus], group);
|
||||
} error:^(__unused id error)
|
||||
{
|
||||
TGMediaLibraryCachedAuthorizationStatus = [self authorizationStatus];
|
||||
completion([self authorizationStatus], nil);
|
||||
} completed:nil];
|
||||
}
|
||||
}
|
||||
|
||||
+ (TGMediaLibraryAuthorizationStatus)authorizationStatus
|
||||
{
|
||||
return [self _authorizationStatusForALAuthorizationStatus:[ALAssetsLibrary authorizationStatus]];
|
||||
}
|
||||
|
||||
+ (TGMediaLibraryAuthorizationStatus)_authorizationStatusForALAuthorizationStatus:(ALAuthorizationStatus)status
|
||||
{
|
||||
switch (status)
|
||||
{
|
||||
case ALAuthorizationStatusRestricted:
|
||||
return TGMediaLibraryAuthorizationStatusRestricted;
|
||||
|
||||
case ALAuthorizationStatusDenied:
|
||||
return TGMediaLibraryAuthorizationStatusDenied;
|
||||
|
||||
case ALAuthorizationStatusAuthorized:
|
||||
return TGMediaLibraryAuthorizationStatusAuthorized;
|
||||
|
||||
default:
|
||||
return TGMediaLibraryAuthorizationStatusNotDetermined;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
@ -3,7 +3,6 @@
|
||||
#import "LegacyComponentsInternal.h"
|
||||
|
||||
#import "TGMediaAssetsModernLibrary.h"
|
||||
#import "TGMediaAssetsLegacyLibrary.h"
|
||||
|
||||
@implementation TGMediaAssetsLibrary
|
||||
|
||||
@ -11,10 +10,7 @@ static Class TGMediaAssetsLibraryClass = nil;
|
||||
|
||||
+ (void)load
|
||||
{
|
||||
if ([self usesPhotoFramework])
|
||||
TGMediaAssetsLibraryClass = [TGMediaAssetsModernLibrary class];
|
||||
else
|
||||
TGMediaAssetsLibraryClass = [TGMediaAssetsLegacyLibrary class];
|
||||
TGMediaAssetsLibraryClass = [TGMediaAssetsModernLibrary class];
|
||||
|
||||
[TGMediaAssetsLibraryClass authorizationStatus];
|
||||
}
|
||||
@ -77,30 +73,6 @@ NSInteger TGMediaAssetGroupComparator(TGMediaAssetGroup *group1, TGMediaAssetGro
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (SSignal *)_legacyAssetsOfAssetGroup:(TGMediaAssetGroup *)assetGroup reversed:(bool)reversed
|
||||
{
|
||||
NSParameterAssert(assetGroup);
|
||||
return [[[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||
{
|
||||
TGMediaAssetFetchResult *mediaFetchResult = [[TGMediaAssetFetchResult alloc] init];
|
||||
|
||||
NSEnumerationOptions options = kNilOptions;
|
||||
if (reversed)
|
||||
options = NSEnumerationReverse;
|
||||
|
||||
[assetGroup.backingAssetsGroup enumerateAssetsWithOptions:options usingBlock:^(ALAsset *asset, __unused NSUInteger index, __unused BOOL *stop)
|
||||
{
|
||||
if (asset != nil)
|
||||
[mediaFetchResult _appendALAsset:asset];
|
||||
}];
|
||||
|
||||
[subscriber putNext:mediaFetchResult];
|
||||
[subscriber putCompletion];
|
||||
|
||||
return nil;
|
||||
}] startOn:_queue];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
|
||||
- (SSignal *)saveAssetWithImage:(UIImage *)__unused image
|
||||
|
@ -260,7 +260,7 @@
|
||||
return TGMediaAssetsVideoCellKind;
|
||||
|
||||
case TGMediaAssetGifType:
|
||||
if (_intent == TGMediaAssetsControllerSetProfilePhotoIntent || _intent == TGMediaAssetsControllerSetSignupProfilePhotoIntent || _intent == TGMediaAssetsControllerPassportIntent || _intent == TGMediaAssetsControllerPassportMultipleIntent)
|
||||
if (_intent == TGMediaAssetsControllerSetSignupProfilePhotoIntent || _intent == TGMediaAssetsControllerPassportIntent || _intent == TGMediaAssetsControllerPassportMultipleIntent)
|
||||
return TGMediaAssetsPhotoCellKind;
|
||||
else
|
||||
return TGMediaAssetsGifCellKind;
|
||||
|
@ -93,7 +93,7 @@
|
||||
|
||||
NSMutableArray *itemViews = [[NSMutableArray alloc] init];
|
||||
|
||||
TGAttachmentCarouselItemView *carouselItem = [[TGAttachmentCarouselItemView alloc] initWithContext:_context camera:true selfPortrait:_personalPhoto forProfilePhoto:true assetType:TGMediaAssetPhotoType saveEditedPhotos:_saveEditedPhotos allowGrouping:false];
|
||||
TGAttachmentCarouselItemView *carouselItem = [[TGAttachmentCarouselItemView alloc] initWithContext:_context camera:true selfPortrait:_personalPhoto forProfilePhoto:true assetType:_signup ? TGMediaAssetPhotoType : TGMediaAssetAnyType saveEditedPhotos:_saveEditedPhotos allowGrouping:false];
|
||||
carouselItem.stickersContext = _stickersContext;
|
||||
carouselItem.parentController = _parentController;
|
||||
carouselItem.openEditor = true;
|
||||
|
@ -218,7 +218,7 @@
|
||||
scrubberBackgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor];
|
||||
[_scrubberPanelView addSubview:scrubberBackgroundView];
|
||||
|
||||
_scrubberView = [[TGMediaPickerGalleryVideoScrubber alloc] initWithFrame:CGRectMake(0.0f, _headerView.frame.size.height - 44.0f, _headerView.frame.size.width, 44.0f)];
|
||||
_scrubberView = [[TGMediaPickerGalleryVideoScrubber alloc] initWithFrame:CGRectMake(0.0f, _headerView.frame.size.height - 44.0f, _headerView.frame.size.width, 68.0f)];
|
||||
_scrubberView.autoresizingMask = UIViewAutoresizingFlexibleWidth;
|
||||
_scrubberView.dataSource = self;
|
||||
_scrubberView.delegate = self;
|
||||
@ -738,7 +738,7 @@
|
||||
mirrored = adjustments.cropMirrored;
|
||||
}
|
||||
|
||||
_scrubberView.maximumLength = adjustments.sendAsGif ? TGVideoEditMaximumGifDuration : 0.0;
|
||||
// _scrubberView.maximumLength = adjustments.sendAsGif ? TGVideoEditMaximumGifDuration : 0.0;
|
||||
|
||||
[self _layoutPlayerViewWithCropRect:cropRect videoFrameSize:videoFrameSize orientation:orientation mirrored:mirrored];
|
||||
}
|
||||
@ -1274,13 +1274,7 @@
|
||||
|
||||
- (void)positionTimerEvent
|
||||
{
|
||||
[self updatePositionAndForceStartTime:false];
|
||||
}
|
||||
|
||||
- (void)updatePositionAndForceStartTime:(bool)forceStartTime
|
||||
{
|
||||
NSTimeInterval value = forceStartTime ? _scrubberView.trimStartValue : CMTimeGetSeconds(_player.currentItem.currentTime);
|
||||
[_scrubberView setValue:value];
|
||||
[_scrubberView setValue:CMTimeGetSeconds(_player.currentItem.currentTime)];
|
||||
}
|
||||
|
||||
- (void)_seekToPosition:(NSTimeInterval)position manual:(bool)__unused manual
|
||||
|
@ -15,9 +15,14 @@
|
||||
@property (nonatomic, assign) NSTimeInterval trimStartValue;
|
||||
@property (nonatomic, assign) NSTimeInterval trimEndValue;
|
||||
|
||||
@property (nonatomic, assign) NSTimeInterval dotValue;
|
||||
|
||||
@property (nonatomic, assign) NSTimeInterval maximumLength;
|
||||
|
||||
|
||||
@property (nonatomic, assign) bool disableZoom;
|
||||
@property (nonatomic, assign) bool disableTimeDisplay;
|
||||
|
||||
@property (nonatomic, assign) bool isPlaying;
|
||||
@property (nonatomic, assign) NSTimeInterval value;
|
||||
- (void)setValue:(NSTimeInterval)value resetPosition:(bool)resetPosition;
|
||||
|
@ -39,6 +39,8 @@ typedef enum
|
||||
UIView *_rightCurtainView;
|
||||
UIControl *_scrubberHandle;
|
||||
|
||||
UIImageView *_dotView;
|
||||
|
||||
UIPanGestureRecognizer *_panGestureRecognizer;
|
||||
UILongPressGestureRecognizer *_pressGestureRecognizer;
|
||||
|
||||
@ -46,7 +48,6 @@ typedef enum
|
||||
bool _endedInteraction;
|
||||
|
||||
bool _scrubbing;
|
||||
CGFloat _scrubbingPosition;
|
||||
|
||||
NSTimeInterval _duration;
|
||||
|
||||
@ -125,6 +126,11 @@ typedef enum
|
||||
_rightCurtainView.backgroundColor = [[TGPhotoEditorInterfaceAssets toolbarBackgroundColor] colorWithAlphaComponent:0.8f];
|
||||
[_wrapperView addSubview:_rightCurtainView];
|
||||
|
||||
_dotView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 8, 8)];
|
||||
_dotView.image = TGCircleImage(8.0, [TGPhotoEditorInterfaceAssets accentColor]);
|
||||
_dotView.hidden = true;
|
||||
[self addSubview:_dotView];
|
||||
|
||||
__weak TGMediaPickerGalleryVideoScrubber *weakSelf = self;
|
||||
_trimView = [[TGMediaPickerGalleryVideoTrimView alloc] initWithFrame:CGRectZero];
|
||||
_trimView.exclusiveTouch = true;
|
||||
@ -396,7 +402,7 @@ typedef enum
|
||||
|
||||
- (bool)zoomAvailable
|
||||
{
|
||||
if (_zoomedIn || _preparingToZoomIn || _summaryTimestamps.count == 0)
|
||||
if (_disableZoom || _zoomedIn || _preparingToZoomIn || _summaryTimestamps.count == 0)
|
||||
return false;
|
||||
|
||||
return _duration > 1.0f;
|
||||
@ -854,7 +860,7 @@ typedef enum
|
||||
|
||||
- (void)setValue:(NSTimeInterval)value resetPosition:(bool)resetPosition
|
||||
{
|
||||
if (_duration < FLT_EPSILON)
|
||||
if (_duration < FLT_EPSILON || _scrubbing)
|
||||
return;
|
||||
|
||||
if (value > _duration)
|
||||
@ -952,7 +958,7 @@ typedef enum
|
||||
|
||||
NSString *text = [NSString stringWithFormat:@"%@ / %@", [TGMediaPickerGalleryVideoScrubber _stringFromTotalSeconds:(NSInteger)self.value], [TGMediaPickerGalleryVideoScrubber _stringFromTotalSeconds:(NSInteger)self.duration]];
|
||||
|
||||
_inverseTimeLabel.text = text;
|
||||
_inverseTimeLabel.text = self.disableTimeDisplay ? @"" : text;
|
||||
}
|
||||
|
||||
#pragma mark - Scrubber Handle
|
||||
@ -1099,6 +1105,8 @@ typedef enum
|
||||
|
||||
_scrubbing = false;
|
||||
|
||||
[self setDotValue:_value];
|
||||
|
||||
id<TGMediaPickerGalleryVideoScrubberDelegate> delegate = self.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubberDidEndScrubbing:)])
|
||||
[delegate videoScrubberDidEndScrubbing:self];
|
||||
@ -1192,6 +1200,29 @@ typedef enum
|
||||
return CGRectMake(origin, 24, width, 40);
|
||||
}
|
||||
|
||||
#pragma mark - Dot
|
||||
|
||||
- (void)setDotValue:(NSTimeInterval)dotValue
|
||||
{
|
||||
_dotValue = dotValue;
|
||||
|
||||
if (dotValue > FLT_EPSILON) {
|
||||
_dotView.hidden = false;
|
||||
|
||||
CGPoint point = [self _scrubberPositionForPosition:dotValue duration:_duration zoomedIn:false];
|
||||
_dotView.frame = CGRectMake(_wrapperView.frame.origin.x + point.x - _dotView.frame.size.width / 2.0, 8.0f, _dotView.frame.size.width, _dotView.frame.size.height);
|
||||
|
||||
_dotView.alpha = 0.0f;
|
||||
_dotView.transform = CGAffineTransformMakeScale(0.25, 0.25);
|
||||
[UIView animateWithDuration:0.2 animations:^{
|
||||
_dotView.alpha = 1.0;
|
||||
_dotView.transform = CGAffineTransformIdentity;
|
||||
}];
|
||||
} else {
|
||||
_dotView.hidden = true;
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - Trimming
|
||||
|
||||
- (bool)hasTrimming
|
||||
|
@ -2,19 +2,19 @@
|
||||
#import <LegacyComponents/TGPhotoEditorUtils.h>
|
||||
|
||||
const CGFloat TGPaintEllipticalBrushHardness = 0.89f;
|
||||
const CGFloat TGPaintEllipticalBrushAngle = 125.0f;
|
||||
const CGFloat TGPaintEllipticalBrushAngle = 110.0f;
|
||||
const CGFloat TGPaintEllipticalBrushRoundness = 0.35f;
|
||||
|
||||
@implementation TGPaintEllipticalBrush
|
||||
|
||||
- (CGFloat)spacing
|
||||
{
|
||||
return 0.05f;
|
||||
return 0.075f;
|
||||
}
|
||||
|
||||
- (CGFloat)alpha
|
||||
{
|
||||
return 0.5f;
|
||||
return 0.17f;
|
||||
}
|
||||
|
||||
- (CGFloat)angle
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
@class PGPhotoEditor;
|
||||
@class TGPhotoEditorPreviewView;
|
||||
@class AVPlayer;
|
||||
|
||||
@interface TGPhotoAvatarCropController : TGPhotoEditorTabController
|
||||
|
||||
@ -16,6 +17,7 @@
|
||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView;
|
||||
|
||||
- (void)setImage:(UIImage *)image;
|
||||
- (void)setPlayer:(AVPlayer *)player;
|
||||
- (void)setSnapshotImage:(UIImage *)snapshotImage;
|
||||
- (void)setSnapshotView:(UIView *)snapshotView;
|
||||
|
||||
|
@ -62,6 +62,14 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
[super loadView];
|
||||
|
||||
__weak TGPhotoAvatarCropController *weakSelf = self;
|
||||
void(^interactionBegan)(void) = ^
|
||||
{
|
||||
__strong TGPhotoAvatarCropController *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
self.controlVideoPlayback(false);
|
||||
};
|
||||
void(^interactionEnded)(void) = ^
|
||||
{
|
||||
__strong TGPhotoAvatarCropController *strongSelf = weakSelf;
|
||||
@ -70,6 +78,8 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
|
||||
if ([strongSelf shouldAutorotate])
|
||||
[TGViewController attemptAutorotation];
|
||||
|
||||
self.controlVideoPlayback(true);
|
||||
};
|
||||
|
||||
_wrapperView = [[UIView alloc] initWithFrame:self.view.bounds];
|
||||
@ -102,6 +112,7 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
[_cropView setSnapshotImage:_snapshotImage];
|
||||
_snapshotImage = nil;
|
||||
}
|
||||
_cropView.interactionBegan = interactionBegan;
|
||||
_cropView.interactionEnded = interactionEnded;
|
||||
[_wrapperView addSubview:_cropView];
|
||||
|
||||
@ -113,7 +124,7 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
_rotateButton.hitTestEdgeInsets = UIEdgeInsetsMake(-10, -10, -10, -10);
|
||||
[_rotateButton addTarget:self action:@selector(rotate) forControlEvents:UIControlEventTouchUpInside];
|
||||
[_rotateButton setImage:TGComponentsImageNamed(@"PhotoEditorRotateIcon") forState:UIControlStateNormal];
|
||||
[_buttonsWrapperView addSubview:_rotateButton];
|
||||
// [_buttonsWrapperView addSubview:_rotateButton];
|
||||
|
||||
_mirrorButton = [[TGModernButton alloc] initWithFrame:CGRectMake(0, 0, 36, 36)];
|
||||
_mirrorButton.exclusiveTouch = true;
|
||||
@ -121,7 +132,7 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
_mirrorButton.hitTestEdgeInsets = UIEdgeInsetsMake(-10, -10, -10, -10);
|
||||
[_mirrorButton addTarget:self action:@selector(mirror) forControlEvents:UIControlEventTouchUpInside];
|
||||
[_mirrorButton setImage:TGComponentsImageNamed(@"PhotoEditorMirrorIcon") forState:UIControlStateNormal];
|
||||
[_buttonsWrapperView addSubview:_mirrorButton];
|
||||
// [_buttonsWrapperView addSubview:_mirrorButton];
|
||||
|
||||
_resetButton = [[TGModernButton alloc] init];
|
||||
_resetButton.contentEdgeInsets = UIEdgeInsetsMake(0.0f, 8.0f, 0.0f, 8.0f);
|
||||
@ -197,6 +208,11 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
[_cropView setImage:image];
|
||||
}
|
||||
|
||||
- (void)setPlayer:(AVPlayer *)player
|
||||
{
|
||||
[_cropView setPlayer:player];
|
||||
}
|
||||
|
||||
- (void)setSnapshotImage:(UIImage *)snapshotImage
|
||||
{
|
||||
_snapshotImage = snapshotImage;
|
||||
@ -246,8 +262,11 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
|
||||
- (void)_finishedTransitionIn
|
||||
{
|
||||
[_cropView animateTransitionIn];
|
||||
// [_cropView animateTransitionIn];
|
||||
[_cropView transitionInFinishedFromCamera:true];
|
||||
|
||||
self.finishedTransitionIn();
|
||||
self.finishedTransitionIn = nil;
|
||||
}
|
||||
|
||||
- (void)prepareForCustomTransitionOut
|
||||
@ -265,6 +284,7 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
_dismissing = true;
|
||||
|
||||
[_cropView animateTransitionOutSwitching:switching];
|
||||
[_cropView invalidateVideoView];
|
||||
|
||||
if (switching)
|
||||
{
|
||||
@ -276,33 +296,44 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
|
||||
PGPhotoEditor *photoEditor = self.photoEditor;
|
||||
|
||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
|
||||
{
|
||||
if (dispatch_semaphore_wait(_waitSemaphore, dispatch_time(DISPATCH_TIME_NOW, (int64_t)(3.0 * NSEC_PER_SEC))))
|
||||
{
|
||||
TGLegacyLog(@"Photo crop on switching failed");
|
||||
return;
|
||||
}
|
||||
|
||||
UIImage *croppedImage = [_cropView croppedImageWithMaxSize:TGPhotoEditorScreenImageMaxSize()];
|
||||
[photoEditor setImage:croppedImage forCropRect:_cropView.cropRect cropRotation:0.0f cropOrientation:_cropView.cropOrientation cropMirrored:_cropView.cropMirrored fullSize:false];
|
||||
|
||||
[photoEditor processAnimated:false completion:^
|
||||
{
|
||||
TGDispatchOnMainThread(^
|
||||
{
|
||||
[previewView setSnapshotImage:croppedImage];
|
||||
|
||||
if (!previewView.hidden)
|
||||
[previewView performTransitionInWithCompletion:nil];
|
||||
else
|
||||
[previewView setNeedsTransitionIn];
|
||||
});
|
||||
}];
|
||||
if (self.item.isVideo) {
|
||||
if (!previewView.hidden)
|
||||
[previewView performTransitionInWithCompletion:nil];
|
||||
else
|
||||
[previewView setNeedsTransitionIn];
|
||||
|
||||
if (self.finishedPhotoProcessing != nil)
|
||||
self.finishedPhotoProcessing();
|
||||
});
|
||||
} else {
|
||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
|
||||
{
|
||||
if (dispatch_semaphore_wait(_waitSemaphore, dispatch_time(DISPATCH_TIME_NOW, (int64_t)(3.0 * NSEC_PER_SEC))))
|
||||
{
|
||||
TGLegacyLog(@"Photo crop on switching failed");
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
UIImage *croppedImage = [_cropView croppedImageWithMaxSize:TGPhotoEditorScreenImageMaxSize()];
|
||||
[photoEditor setImage:croppedImage forCropRect:_cropView.cropRect cropRotation:0.0f cropOrientation:_cropView.cropOrientation cropMirrored:_cropView.cropMirrored fullSize:false];
|
||||
|
||||
[photoEditor processAnimated:false completion:^
|
||||
{
|
||||
TGDispatchOnMainThread(^
|
||||
{
|
||||
[previewView setSnapshotImage:croppedImage];
|
||||
|
||||
if (!previewView.hidden)
|
||||
[previewView performTransitionInWithCompletion:nil];
|
||||
else
|
||||
[previewView setNeedsTransitionIn];
|
||||
});
|
||||
}];
|
||||
|
||||
if (self.finishedPhotoProcessing != nil)
|
||||
self.finishedPhotoProcessing();
|
||||
});
|
||||
}
|
||||
|
||||
UIInterfaceOrientation orientation = [[LegacyComponentsGlobals provider] applicationStatusBarOrientation];
|
||||
if ([self inFormSheet] || [UIDevice currentDevice].userInterfaceIdiom == UIUserInterfaceIdiomPad)
|
||||
@ -317,7 +348,11 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
CGRect referenceBounds = CGRectMake(0, 0, referenceSize.width, referenceSize.height);
|
||||
CGRect containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoEditorPanelSize hasOnScreenNavigation:hasOnScreenNavigation];
|
||||
|
||||
if (self.switchingToTab == TGPhotoEditorPaintTab)
|
||||
if (self.switchingToTab == TGPhotoEditorPreviewTab)
|
||||
{
|
||||
containerFrame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0 hasOnScreenNavigation:hasOnScreenNavigation];
|
||||
}
|
||||
else if (self.switchingToTab == TGPhotoEditorPaintTab)
|
||||
{
|
||||
containerFrame = [TGPhotoPaintController photoContainerFrameForParentViewFrame:referenceBounds toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:TGPhotoPaintTopPanelSize + TGPhotoPaintBottomPanelSize hasOnScreenNavigation:hasOnScreenNavigation];
|
||||
}
|
||||
@ -640,7 +675,28 @@ const CGFloat TGPhotoAvatarCropButtonsWrapperSize = 61.0f;
|
||||
|
||||
- (TGPhotoEditorTab)availableTabs
|
||||
{
|
||||
return iosMajorVersion() >= 7 ? (TGPhotoEditorPaintTab | TGPhotoEditorToolsTab) : TGPhotoEditorNoneTab;
|
||||
return TGPhotoEditorRotateTab | TGPhotoEditorMirrorTab;
|
||||
}
|
||||
|
||||
- (void)handleTabAction:(TGPhotoEditorTab)tab
|
||||
{
|
||||
switch (tab)
|
||||
{
|
||||
case TGPhotoEditorRotateTab:
|
||||
{
|
||||
[self rotate];
|
||||
}
|
||||
break;
|
||||
|
||||
case TGPhotoEditorMirrorTab:
|
||||
{
|
||||
[self mirror];
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
@ -1,4 +1,5 @@
|
||||
#import "TGPhotoAvatarCropView.h"
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
#import <LegacyComponents/LegacyComponents.h>
|
||||
|
||||
@ -7,6 +8,8 @@
|
||||
#import <LegacyComponents/TGPhotoEditorAnimation.h>
|
||||
#import "TGPhotoEditorInterfaceAssets.h"
|
||||
|
||||
#import "TGModernGalleryVideoView.h"
|
||||
|
||||
const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
||||
|
||||
@interface TGPhotoAvatarCropView () <UIScrollViewDelegate>
|
||||
@ -18,6 +21,7 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
||||
UIScrollView *_scrollView;
|
||||
UIView *_wrapperView;
|
||||
UIImageView *_imageView;
|
||||
TGModernGalleryVideoView *_videoView;
|
||||
UIView *_snapshotView;
|
||||
CGSize _snapshotSize;
|
||||
|
||||
@ -135,6 +139,23 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
||||
[self reloadImageIfNeeded];
|
||||
}
|
||||
|
||||
- (void)setPlayer:(AVPlayer *)player
|
||||
{
|
||||
_player = player;
|
||||
|
||||
_videoView = [[TGModernGalleryVideoView alloc] initWithFrame:_imageView.bounds player:player];
|
||||
_videoView.frame = _imageView.frame;
|
||||
_videoView.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
|
||||
_videoView.playerLayer.opaque = false;
|
||||
_videoView.playerLayer.backgroundColor = nil;
|
||||
[_imageView.superview insertSubview:_videoView aboveSubview:_imageView];
|
||||
}
|
||||
|
||||
- (void)invalidateVideoView
|
||||
{
|
||||
_videoView.player = nil;
|
||||
}
|
||||
|
||||
- (void)reloadImageIfNeeded
|
||||
{
|
||||
if (!_imageReloadingNeeded)
|
||||
@ -283,6 +304,18 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
||||
|
||||
#pragma mark - Scroll View
|
||||
|
||||
- (void)scrollViewWillBeginDragging:(UIScrollView *)scrollView
|
||||
{
|
||||
if (self.interactionBegan != nil)
|
||||
self.interactionBegan();
|
||||
}
|
||||
|
||||
- (void)scrollViewWillBeginZooming:(UIScrollView *)scrollView withView:(UIView *)view
|
||||
{
|
||||
if (self.interactionBegan != nil)
|
||||
self.interactionBegan();
|
||||
}
|
||||
|
||||
- (void)scrollViewDidZoom:(UIScrollView *)__unused scrollView
|
||||
{
|
||||
[self adjustScrollView];
|
||||
@ -382,6 +415,7 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
||||
{
|
||||
_cropMirrored = cropMirrored;
|
||||
_imageView.transform = CGAffineTransformMakeScale(self.cropMirrored ? -1.0f : 1.0f, 1.0f);
|
||||
_videoView.transform = _imageView.transform;
|
||||
}
|
||||
|
||||
- (void)invalidateCropRect
|
||||
@ -438,27 +472,36 @@ const CGFloat TGPhotoAvatarCropViewOverscreenSize = 1000;
|
||||
_leftOverlayView.alpha = 0.0f;
|
||||
_rightOverlayView.alpha = 0.0f;
|
||||
_bottomOverlayView.alpha = 0.0f;
|
||||
}
|
||||
|
||||
- (void)transitionInFinishedFromCamera:(bool)fromCamera
|
||||
{
|
||||
if (fromCamera)
|
||||
{
|
||||
[UIView animateWithDuration:0.3f animations:^
|
||||
{
|
||||
_topOverlayView.alpha = 1.0f;
|
||||
_leftOverlayView.alpha = 1.0f;
|
||||
_rightOverlayView.alpha = 1.0f;
|
||||
_bottomOverlayView.alpha = 1.0f;
|
||||
}];
|
||||
}
|
||||
else
|
||||
|
||||
[UIView animateWithDuration:0.3f animations:^
|
||||
{
|
||||
_topOverlayView.alpha = 1.0f;
|
||||
_leftOverlayView.alpha = 1.0f;
|
||||
_rightOverlayView.alpha = 1.0f;
|
||||
_bottomOverlayView.alpha = 1.0f;
|
||||
}
|
||||
_areaMaskView.alpha = 1.0f;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)transitionInFinishedFromCamera:(bool)fromCamera
|
||||
{
|
||||
// if (fromCamera)
|
||||
// {
|
||||
// [UIView animateWithDuration:0.3f animations:^
|
||||
// {
|
||||
// _topOverlayView.alpha = 1.0f;
|
||||
// _leftOverlayView.alpha = 1.0f;
|
||||
// _rightOverlayView.alpha = 1.0f;
|
||||
// _bottomOverlayView.alpha = 1.0f;
|
||||
// }];
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// _topOverlayView.alpha = 1.0f;
|
||||
// _leftOverlayView.alpha = 1.0f;
|
||||
// _rightOverlayView.alpha = 1.0f;
|
||||
// _bottomOverlayView.alpha = 1.0f;
|
||||
// }
|
||||
|
||||
_scrollView.hidden = false;
|
||||
_scrollView.backgroundColor = [UIColor clearColor];
|
||||
|
@ -0,0 +1,14 @@
|
||||
#import <LegacyComponents/TGPhotoEditorTabController.h>
|
||||
|
||||
@class PGPhotoEditor;
|
||||
@class PGPhotoTool;
|
||||
@class TGPhotoEditorPreviewView;
|
||||
|
||||
@interface TGPhotoAvatarPreviewController : TGPhotoEditorTabController
|
||||
|
||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView;
|
||||
|
||||
- (void)setScrubberPosition:(NSTimeInterval)position reset:(bool)reset;
|
||||
- (void)setScrubberPlaying:(bool)value;
|
||||
|
||||
@end
|
@ -0,0 +1,872 @@
|
||||
#import "TGPhotoAvatarPreviewController.h"
|
||||
|
||||
#import "LegacyComponentsInternal.h"
|
||||
|
||||
#import <LegacyComponents/TGPhotoEditorAnimation.h>
|
||||
#import "TGPhotoEditorInterfaceAssets.h"
|
||||
|
||||
#import "PGPhotoEditor.h"
|
||||
#import <LegacyComponents/TGPhotoEditorUtils.h>
|
||||
#import <LegacyComponents/TGPaintUtils.h>
|
||||
|
||||
#import "TGPhotoEditorController.h"
|
||||
#import "TGPhotoEditorPreviewView.h"
|
||||
#import "TGPhotoEditorSparseView.h"
|
||||
|
||||
#import "TGMediaPickerGalleryVideoScrubber.h"
|
||||
|
||||
const CGFloat TGPhotoAvatarPreviewPanelSize = 96.0f;
|
||||
const CGFloat TGPhotoAvatarPreviewLandscapePanelSize = TGPhotoAvatarPreviewPanelSize + 40.0f;
|
||||
|
||||
@interface TGPhotoAvatarPreviewController () <TGMediaPickerGalleryVideoScrubberDataSource, TGMediaPickerGalleryVideoScrubberDelegate>
|
||||
{
|
||||
bool _appeared;
|
||||
|
||||
TGPhotoEditorSparseView *_wrapperView;
|
||||
UIView *_portraitToolsWrapperView;
|
||||
UIView *_landscapeToolsWrapperView;
|
||||
UIView *_portraitWrapperBackgroundView;
|
||||
UIView *_landscapeWrapperBackgroundView;
|
||||
|
||||
UIView *_videoAreaView;
|
||||
UIView *_flashView;
|
||||
UIView *_portraitToolControlView;
|
||||
UIView *_landscapeToolControlView;
|
||||
UIImageView *_areaMaskView;
|
||||
CGFloat _currentDiameter;
|
||||
|
||||
TGMediaPickerGalleryVideoScrubber *_scrubberView;
|
||||
UILabel *_coverLabel;
|
||||
bool _wasPlayingBeforeScrubbing;
|
||||
bool _requestingThumbnails;
|
||||
SMetaDisposable *_thumbnailsDisposable;
|
||||
}
|
||||
|
||||
@property (nonatomic, weak) PGPhotoEditor *photoEditor;
|
||||
@property (nonatomic, weak) TGPhotoEditorPreviewView *previewView;
|
||||
|
||||
@end
|
||||
|
||||
@implementation TGPhotoAvatarPreviewController
|
||||
|
||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView
|
||||
{
|
||||
self = [super initWithContext:context];
|
||||
if (self != nil)
|
||||
{
|
||||
self.photoEditor = photoEditor;
|
||||
self.previewView = previewView;
|
||||
|
||||
_thumbnailsDisposable = [[SMetaDisposable alloc] init];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)loadView
|
||||
{
|
||||
[super loadView];
|
||||
self.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
||||
|
||||
[self.view addSubview:_previewView];
|
||||
|
||||
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:CGRectZero];
|
||||
[self.view addSubview:_wrapperView];
|
||||
|
||||
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||
_portraitToolsWrapperView.alpha = 0.0f;
|
||||
[_wrapperView addSubview:_portraitToolsWrapperView];
|
||||
|
||||
_portraitWrapperBackgroundView = [[UIView alloc] initWithFrame:_portraitToolsWrapperView.bounds];
|
||||
_portraitWrapperBackgroundView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
||||
_portraitWrapperBackgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor];
|
||||
_portraitWrapperBackgroundView.userInteractionEnabled = false;
|
||||
[_portraitToolsWrapperView addSubview:_portraitWrapperBackgroundView];
|
||||
|
||||
_landscapeToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||
_landscapeToolsWrapperView.alpha = 0.0f;
|
||||
[_wrapperView addSubview:_landscapeToolsWrapperView];
|
||||
|
||||
_landscapeWrapperBackgroundView = [[UIView alloc] initWithFrame:_landscapeToolsWrapperView.bounds];
|
||||
_landscapeWrapperBackgroundView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
||||
_landscapeWrapperBackgroundView.backgroundColor = [TGPhotoEditorInterfaceAssets toolbarTransparentBackgroundColor];
|
||||
_landscapeWrapperBackgroundView.userInteractionEnabled = false;
|
||||
[_landscapeToolsWrapperView addSubview:_landscapeWrapperBackgroundView];
|
||||
|
||||
_videoAreaView = [[UIView alloc] init];
|
||||
[self.view insertSubview:_videoAreaView belowSubview:_wrapperView];
|
||||
|
||||
_flashView = [[UIView alloc] init];
|
||||
_flashView.alpha = 0.0;
|
||||
_flashView.backgroundColor = [UIColor whiteColor];
|
||||
_flashView.userInteractionEnabled = false;
|
||||
[_videoAreaView addSubview:_flashView];
|
||||
|
||||
_areaMaskView = [[UIImageView alloc] init];
|
||||
_areaMaskView.alpha = 0.0f;
|
||||
[self.view insertSubview:_areaMaskView aboveSubview:_videoAreaView];
|
||||
|
||||
_scrubberView = [[TGMediaPickerGalleryVideoScrubber alloc] initWithFrame:CGRectMake(0.0f, 0.0, _portraitToolsWrapperView.frame.size.width, 68.0f)];
|
||||
_scrubberView.autoresizingMask = UIViewAutoresizingFlexibleWidth;
|
||||
_scrubberView.dataSource = self;
|
||||
_scrubberView.delegate = self;
|
||||
[_portraitToolsWrapperView addSubview:_scrubberView];
|
||||
|
||||
_coverLabel = [[UILabel alloc] init];
|
||||
_coverLabel.alpha = 0.7f;
|
||||
_coverLabel.backgroundColor = [UIColor clearColor];
|
||||
_coverLabel.font = TGSystemFontOfSize(14.0f);
|
||||
_coverLabel.textColor = [UIColor whiteColor];
|
||||
_coverLabel.text = TGLocalized(@"PhotoEditor.SelectCoverFrame");
|
||||
[_coverLabel sizeToFit];
|
||||
[_portraitToolsWrapperView addSubview:_coverLabel];
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
[_thumbnailsDisposable dispose];
|
||||
}
|
||||
|
||||
- (void)viewDidAppear:(BOOL)animated
|
||||
{
|
||||
[super viewDidAppear:animated];
|
||||
|
||||
[self transitionIn];
|
||||
}
|
||||
|
||||
- (void)viewDidLoad
|
||||
{
|
||||
[super viewDidLoad];
|
||||
|
||||
_scrubberView.allowsTrimming = true;
|
||||
_scrubberView.disableZoom = true;
|
||||
_scrubberView.disableTimeDisplay = true;
|
||||
_scrubberView.trimStartValue = 0.0;
|
||||
_scrubberView.trimEndValue = self.item.originalDuration;
|
||||
[_scrubberView reloadData];
|
||||
[_scrubberView resetToStart];
|
||||
}
|
||||
|
||||
- (BOOL)shouldAutorotate
|
||||
{
|
||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||
return (!previewView.isTracking && [super shouldAutorotate]);
|
||||
}
|
||||
|
||||
- (bool)isDismissAllowed
|
||||
{
|
||||
return _appeared;
|
||||
}
|
||||
|
||||
#pragma mark - Transition
|
||||
|
||||
- (void)transitionIn
|
||||
{
|
||||
[UIView animateWithDuration:0.3f animations:^
|
||||
{
|
||||
_portraitToolsWrapperView.alpha = 1.0f;
|
||||
_landscapeToolsWrapperView.alpha = 1.0f;
|
||||
}];
|
||||
|
||||
UIInterfaceOrientation orientation = self.interfaceOrientation;
|
||||
if ([UIDevice currentDevice].userInterfaceIdiom == UIUserInterfaceIdiomPad)
|
||||
orientation = UIInterfaceOrientationPortrait;
|
||||
|
||||
switch (orientation)
|
||||
{
|
||||
case UIInterfaceOrientationLandscapeLeft:
|
||||
{
|
||||
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(-_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
|
||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||
{
|
||||
_landscapeToolsWrapperView.transform = CGAffineTransformIdentity;
|
||||
} completion:nil];
|
||||
}
|
||||
break;
|
||||
|
||||
case UIInterfaceOrientationLandscapeRight:
|
||||
{
|
||||
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
|
||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||
{
|
||||
_landscapeToolsWrapperView.transform = CGAffineTransformIdentity;
|
||||
} completion:nil];
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
{
|
||||
_portraitToolsWrapperView.transform = CGAffineTransformMakeTranslation(0.0f, _portraitToolsWrapperView.frame.size.height / 3.0f * 2.0f);
|
||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||
{
|
||||
_portraitToolsWrapperView.transform = CGAffineTransformIdentity;
|
||||
} completion:nil];
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)transitionOutSwitching:(bool)__unused switching completion:(void (^)(void))completion
|
||||
{
|
||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||
previewView.touchedUp = nil;
|
||||
previewView.touchedDown = nil;
|
||||
previewView.tapped = nil;
|
||||
previewView.interactionEnded = nil;
|
||||
|
||||
[_videoAreaView.superview bringSubviewToFront:_videoAreaView];
|
||||
|
||||
UIInterfaceOrientation orientation = self.interfaceOrientation;
|
||||
if ([UIDevice currentDevice].userInterfaceIdiom == UIUserInterfaceIdiomPad)
|
||||
orientation = UIInterfaceOrientationPortrait;
|
||||
|
||||
switch (orientation)
|
||||
{
|
||||
case UIInterfaceOrientationLandscapeLeft:
|
||||
{
|
||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||
{
|
||||
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(-_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
|
||||
} completion:nil];
|
||||
}
|
||||
break;
|
||||
|
||||
case UIInterfaceOrientationLandscapeRight:
|
||||
{
|
||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||
{
|
||||
_landscapeToolsWrapperView.transform = CGAffineTransformMakeTranslation(_landscapeToolsWrapperView.frame.size.width / 3.0f * 2.0f, 0.0f);
|
||||
} completion:nil];
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
{
|
||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||
{
|
||||
_portraitToolsWrapperView.transform = CGAffineTransformMakeTranslation(0.0f, _portraitToolsWrapperView.frame.size.height / 3.0f * 2.0f);
|
||||
} completion:nil];
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
[UIView animateWithDuration:0.2f animations:^
|
||||
{
|
||||
_portraitToolsWrapperView.alpha = 0.0f;
|
||||
_landscapeToolsWrapperView.alpha = 0.0f;
|
||||
_videoAreaView.alpha = 0.0f;
|
||||
} completion:^(__unused BOOL finished)
|
||||
{
|
||||
if (completion != nil)
|
||||
completion();
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)_animatePreviewViewTransitionOutToFrame:(CGRect)targetFrame saving:(bool)saving parentView:(UIView *)parentView completion:(void (^)(void))completion
|
||||
{
|
||||
_dismissing = true;
|
||||
|
||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||
[previewView prepareForTransitionOut];
|
||||
|
||||
UIView *snapshotView = nil;
|
||||
POPSpringAnimation *snapshotAnimation = nil;
|
||||
|
||||
if (saving && CGRectIsNull(targetFrame) && parentView != nil)
|
||||
{
|
||||
snapshotView = [previewView snapshotViewAfterScreenUpdates:false];
|
||||
snapshotView.frame = previewView.frame;
|
||||
|
||||
CGSize fittedSize = TGScaleToSize(previewView.frame.size, self.view.frame.size);
|
||||
targetFrame = CGRectMake((self.view.frame.size.width - fittedSize.width) / 2, (self.view.frame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
|
||||
|
||||
[parentView addSubview:snapshotView];
|
||||
|
||||
snapshotAnimation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewFrame];
|
||||
snapshotAnimation.fromValue = [NSValue valueWithCGRect:snapshotView.frame];
|
||||
snapshotAnimation.toValue = [NSValue valueWithCGRect:targetFrame];
|
||||
}
|
||||
|
||||
POPSpringAnimation *previewAnimation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewFrame];
|
||||
previewAnimation.fromValue = [NSValue valueWithCGRect:previewView.frame];
|
||||
previewAnimation.toValue = [NSValue valueWithCGRect:targetFrame];
|
||||
|
||||
POPSpringAnimation *previewAlphaAnimation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewAlpha];
|
||||
previewAlphaAnimation.fromValue = @(previewView.alpha);
|
||||
previewAlphaAnimation.toValue = @(0.0f);
|
||||
|
||||
NSMutableArray *animations = [NSMutableArray arrayWithArray:@[ previewAnimation, previewAlphaAnimation ]];
|
||||
if (snapshotAnimation != nil)
|
||||
[animations addObject:snapshotAnimation];
|
||||
|
||||
[TGPhotoEditorAnimation performBlock:^(__unused bool allFinished)
|
||||
{
|
||||
[snapshotView removeFromSuperview];
|
||||
|
||||
if (completion != nil)
|
||||
completion();
|
||||
} whenCompletedAllAnimations:animations];
|
||||
|
||||
if (snapshotAnimation != nil)
|
||||
[snapshotView pop_addAnimation:snapshotAnimation forKey:@"frame"];
|
||||
[previewView pop_addAnimation:previewAnimation forKey:@"frame"];
|
||||
[previewView pop_addAnimation:previewAlphaAnimation forKey:@"alpha"];
|
||||
}
|
||||
|
||||
- (void)_finishedTransitionInWithView:(UIView *)transitionView
|
||||
{
|
||||
_appeared = true;
|
||||
|
||||
[transitionView removeFromSuperview];
|
||||
|
||||
TGPhotoEditorPreviewView *previewView = _previewView;
|
||||
previewView.hidden = false;
|
||||
[previewView performTransitionInIfNeeded];
|
||||
|
||||
PGPhotoEditor *photoEditor = self.photoEditor;
|
||||
[photoEditor processAnimated:false completion:nil];
|
||||
}
|
||||
|
||||
- (void)prepareForCustomTransitionOut
|
||||
{
|
||||
_previewView.hidden = true;
|
||||
[UIView animateWithDuration:0.3f animations:^
|
||||
{
|
||||
_portraitToolsWrapperView.alpha = 0.0f;
|
||||
_landscapeToolsWrapperView.alpha = 0.0f;
|
||||
} completion:nil];
|
||||
}
|
||||
|
||||
- (CGRect)transitionOutReferenceFrame
|
||||
{
|
||||
TGPhotoEditorPreviewView *previewView = _previewView;
|
||||
return previewView.frame;
|
||||
}
|
||||
|
||||
- (UIView *)transitionOutReferenceView
|
||||
{
|
||||
return _previewView;
|
||||
}
|
||||
|
||||
- (UIView *)snapshotView
|
||||
{
|
||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||
return [previewView originalSnapshotView];
|
||||
}
|
||||
|
||||
- (id)currentResultRepresentation
|
||||
{
|
||||
return [self snapshotView];
|
||||
// return TGPaintCombineCroppedImages(self.photoEditor.currentResultImage, self.photoEditor.paintingData.image, true, self.photoEditor.originalSize, self.photoEditor.cropRect, self.photoEditor.cropOrientation, self.photoEditor.cropRotation, self.photoEditor.cropMirrored);
|
||||
}
|
||||
|
||||
#pragma mark - Layout
|
||||
|
||||
- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration
|
||||
{
|
||||
[self.view setNeedsLayout];
|
||||
|
||||
[super willRotateToInterfaceOrientation:toInterfaceOrientation duration:duration];
|
||||
}
|
||||
|
||||
- (void)viewWillLayoutSubviews
|
||||
{
|
||||
[super viewWillLayoutSubviews];
|
||||
|
||||
[self updateLayout:[[LegacyComponentsGlobals provider] applicationStatusBarOrientation]];
|
||||
}
|
||||
|
||||
- (CGRect)transitionOutSourceFrameForReferenceFrame:(CGRect)referenceFrame orientation:(UIInterfaceOrientation)orientation
|
||||
{
|
||||
bool hasOnScreenNavigation = false;
|
||||
if (iosMajorVersion() >= 11)
|
||||
hasOnScreenNavigation = (self.viewLoaded && self.view.safeAreaInsets.bottom > FLT_EPSILON) || self.context.safeAreaInset.bottom > FLT_EPSILON;
|
||||
|
||||
CGRect containerFrame = [TGPhotoAvatarPreviewController photoContainerFrameForParentViewFrame:self.view.frame toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0 hasOnScreenNavigation:hasOnScreenNavigation];
|
||||
CGSize fittedSize = TGScaleToSize(referenceFrame.size, containerFrame.size);
|
||||
CGRect sourceFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
|
||||
|
||||
return sourceFrame;
|
||||
}
|
||||
|
||||
- (CGRect)_targetFrameForTransitionInFromFrame:(CGRect)fromFrame
|
||||
{
|
||||
CGSize referenceSize = [self referenceViewSize];
|
||||
UIInterfaceOrientation orientation = self.interfaceOrientation;
|
||||
|
||||
if ([UIDevice currentDevice].userInterfaceIdiom == UIUserInterfaceIdiomPad)
|
||||
orientation = UIInterfaceOrientationPortrait;
|
||||
|
||||
bool hasOnScreenNavigation = false;
|
||||
if (iosMajorVersion() >= 11)
|
||||
hasOnScreenNavigation = (self.viewLoaded && self.view.safeAreaInsets.bottom > FLT_EPSILON) || self.context.safeAreaInset.bottom > FLT_EPSILON;
|
||||
|
||||
CGRect containerFrame = [TGPhotoAvatarPreviewController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0 hasOnScreenNavigation:hasOnScreenNavigation];
|
||||
CGSize fittedSize = TGScaleToSize(fromFrame.size, containerFrame.size);
|
||||
CGRect toFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
|
||||
|
||||
return toFrame;
|
||||
}
|
||||
|
||||
+ (CGRect)photoContainerFrameForParentViewFrame:(CGRect)parentViewFrame toolbarLandscapeSize:(CGFloat)toolbarLandscapeSize orientation:(UIInterfaceOrientation)orientation panelSize:(CGFloat)panelSize hasOnScreenNavigation:(bool)hasOnScreenNavigation
|
||||
{
|
||||
CGRect frame = [TGPhotoEditorTabController photoContainerFrameForParentViewFrame:parentViewFrame toolbarLandscapeSize:toolbarLandscapeSize orientation:orientation panelSize:panelSize hasOnScreenNavigation:hasOnScreenNavigation];
|
||||
|
||||
return frame;
|
||||
}
|
||||
|
||||
- (void)updateToolViews
|
||||
{
|
||||
UIInterfaceOrientation orientation = self.interfaceOrientation;
|
||||
if ([self inFormSheet] || TGIsPad())
|
||||
{
|
||||
_landscapeToolsWrapperView.hidden = true;
|
||||
orientation = UIInterfaceOrientationPortrait;
|
||||
}
|
||||
|
||||
CGSize referenceSize = [self referenceViewSize];
|
||||
|
||||
CGFloat screenSide = MAX(referenceSize.width, referenceSize.height) + 2 * TGPhotoAvatarPreviewPanelSize;
|
||||
_wrapperView.frame = CGRectMake((referenceSize.width - screenSide) / 2, (referenceSize.height - screenSide) / 2, screenSide, screenSide);
|
||||
|
||||
CGFloat panelSize = UIInterfaceOrientationIsPortrait(orientation) ? TGPhotoAvatarPreviewPanelSize : TGPhotoAvatarPreviewLandscapePanelSize;
|
||||
// if (_portraitToolControlView != nil)
|
||||
// panelSize = TGPhotoEditorPanelSize;
|
||||
|
||||
CGFloat panelToolbarPortraitSize = panelSize + TGPhotoEditorToolbarSize;
|
||||
CGFloat panelToolbarLandscapeSize = panelSize + TGPhotoEditorToolbarSize;
|
||||
|
||||
bool hasOnScreenNavigation = false;
|
||||
if (iosMajorVersion() >= 11)
|
||||
hasOnScreenNavigation = (self.viewLoaded && self.view.safeAreaInsets.bottom > FLT_EPSILON) || self.context.safeAreaInset.bottom > FLT_EPSILON;
|
||||
|
||||
UIEdgeInsets safeAreaInset = [TGViewController safeAreaInsetForOrientation:orientation hasOnScreenNavigation:hasOnScreenNavigation];
|
||||
UIEdgeInsets screenEdges = UIEdgeInsetsMake((screenSide - referenceSize.height) / 2, (screenSide - referenceSize.width) / 2, (screenSide + referenceSize.height) / 2, (screenSide + referenceSize.width) / 2);
|
||||
screenEdges.top += safeAreaInset.top;
|
||||
screenEdges.left += safeAreaInset.left;
|
||||
screenEdges.bottom -= safeAreaInset.bottom;
|
||||
screenEdges.right -= safeAreaInset.right;
|
||||
|
||||
switch (orientation)
|
||||
{
|
||||
case UIInterfaceOrientationLandscapeLeft:
|
||||
{
|
||||
[UIView performWithoutAnimation:^
|
||||
{
|
||||
_landscapeToolsWrapperView.frame = CGRectMake(0, screenEdges.top, panelToolbarLandscapeSize, _landscapeToolsWrapperView.frame.size.height);
|
||||
// _landscapeCollectionView.frame = CGRectMake(panelToolbarLandscapeSize - panelSize, 0, panelSize, _landscapeCollectionView.frame.size.height);
|
||||
}];
|
||||
|
||||
_landscapeToolsWrapperView.frame = CGRectMake(screenEdges.left, screenEdges.top, panelToolbarLandscapeSize, referenceSize.height);
|
||||
// _landscapeCollectionView.frame = CGRectMake(_landscapeCollectionView.frame.origin.x, _landscapeCollectionView.frame.origin.y, _landscapeCollectionView.frame.size.width, _landscapeToolsWrapperView.frame.size.height);
|
||||
|
||||
_portraitToolsWrapperView.frame = CGRectMake(screenEdges.left, screenSide - panelToolbarPortraitSize, referenceSize.width, panelToolbarPortraitSize);
|
||||
// _portraitCollectionView.frame = CGRectMake(0, 0, _portraitToolsWrapperView.frame.size.width, panelSize);
|
||||
|
||||
_portraitToolsWrapperView.frame = CGRectMake((screenSide - referenceSize.width) / 2, screenSide - panelToolbarPortraitSize, referenceSize.width, panelToolbarPortraitSize);
|
||||
}
|
||||
break;
|
||||
|
||||
case UIInterfaceOrientationLandscapeRight:
|
||||
{
|
||||
[UIView performWithoutAnimation:^
|
||||
{
|
||||
_landscapeToolsWrapperView.frame = CGRectMake(screenSide - panelToolbarLandscapeSize, screenEdges.top, panelToolbarLandscapeSize, _landscapeToolsWrapperView.frame.size.height);
|
||||
// _landscapeCollectionView.frame = CGRectMake(0, 0, panelSize, _landscapeCollectionView.frame.size.height);
|
||||
}];
|
||||
|
||||
_landscapeToolsWrapperView.frame = CGRectMake(screenEdges.right - panelToolbarLandscapeSize, screenEdges.top, panelToolbarLandscapeSize, referenceSize.height);
|
||||
// _landscapeCollectionView.frame = CGRectMake(_landscapeCollectionView.frame.origin.x, _landscapeCollectionView.frame.origin.y, _landscapeCollectionView.frame.size.width, _landscapeToolsWrapperView.frame.size.height);
|
||||
|
||||
_portraitToolsWrapperView.frame = CGRectMake(screenEdges.top, screenSide - panelToolbarPortraitSize, referenceSize.width, panelToolbarPortraitSize);
|
||||
// _portraitCollectionView.frame = CGRectMake(0, 0, _portraitToolsWrapperView.frame.size.width, panelSize);
|
||||
|
||||
_portraitToolsWrapperView.frame = CGRectMake((screenSide - referenceSize.width) / 2, screenSide - panelToolbarPortraitSize, referenceSize.width, panelToolbarPortraitSize);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
{
|
||||
[UIView performWithoutAnimation:^
|
||||
{
|
||||
_portraitToolControlView.frame = CGRectMake(0, 0, referenceSize.width, panelSize);
|
||||
}];
|
||||
|
||||
CGFloat x = _landscapeToolsWrapperView.frame.origin.x;
|
||||
if (x < screenSide / 2)
|
||||
x = 0;
|
||||
else
|
||||
x = screenSide - TGPhotoAvatarPreviewPanelSize;
|
||||
_landscapeToolsWrapperView.frame = CGRectMake(x, screenEdges.top, panelToolbarLandscapeSize, referenceSize.height);
|
||||
// _landscapeCollectionView.frame = CGRectMake(_landscapeCollectionView.frame.origin.x, _landscapeCollectionView.frame.origin.y, panelSize, _landscapeToolsWrapperView.frame.size.height);
|
||||
|
||||
_portraitToolsWrapperView.frame = CGRectMake(screenEdges.left, screenEdges.bottom - panelToolbarPortraitSize, referenceSize.width, panelToolbarPortraitSize);
|
||||
|
||||
_scrubberView.frame = CGRectMake(0.0, 0.0, _portraitToolsWrapperView.frame.size.width, _scrubberView.frame.size.height);
|
||||
_coverLabel.frame = CGRectMake(floor((_portraitToolsWrapperView.frame.size.width - _coverLabel.frame.size.width) / 2.0), CGRectGetMaxY(_scrubberView.frame) + 6.0, _coverLabel.frame.size.width, _coverLabel.frame.size.height);
|
||||
// _portraitCollectionView.frame = CGRectMake(0, 0, _portraitToolsWrapperView.frame.size.width, panelSize);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)updatePreviewView
|
||||
{
|
||||
UIInterfaceOrientation orientation = self.interfaceOrientation;
|
||||
if ([self inFormSheet] || TGIsPad())
|
||||
orientation = UIInterfaceOrientationPortrait;
|
||||
|
||||
CGSize referenceSize = [self referenceViewSize];
|
||||
|
||||
PGPhotoEditor *photoEditor = self.photoEditor;
|
||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||
|
||||
if (_dismissing || previewView.superview != self.view)
|
||||
return;
|
||||
|
||||
bool hasOnScreenNavigation = false;
|
||||
if (iosMajorVersion() >= 11)
|
||||
hasOnScreenNavigation = (self.viewLoaded && self.view.safeAreaInsets.bottom > FLT_EPSILON) || self.context.safeAreaInset.bottom > FLT_EPSILON;
|
||||
|
||||
CGRect containerFrame = [TGPhotoAvatarPreviewController photoContainerFrameForParentViewFrame:CGRectMake(0, 0, referenceSize.width, referenceSize.height) toolbarLandscapeSize:self.toolbarLandscapeSize orientation:orientation panelSize:0 hasOnScreenNavigation:hasOnScreenNavigation];
|
||||
CGSize fittedSize = TGScaleToSize(photoEditor.rotatedCropSize, containerFrame.size);
|
||||
previewView.frame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2, containerFrame.origin.y + (containerFrame.size.height - fittedSize.height) / 2, fittedSize.width, fittedSize.height);
|
||||
|
||||
[UIView performWithoutAnimation:^
|
||||
{
|
||||
_videoAreaView.frame = _previewView.frame;
|
||||
_flashView.frame = _videoAreaView.bounds;
|
||||
_areaMaskView.frame = _previewView.frame;
|
||||
|
||||
[self updateCircleImage];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)updateCircleImage
|
||||
{
|
||||
CGFloat diameter = _areaMaskView.frame.size.width;
|
||||
|
||||
if (fabs(diameter - _currentDiameter) < DBL_EPSILON)
|
||||
return;
|
||||
|
||||
_currentDiameter = diameter;
|
||||
|
||||
UIGraphicsBeginImageContextWithOptions(CGSizeMake(diameter, diameter), false, 0.0f);
|
||||
CGContextRef context = UIGraphicsGetCurrentContext();
|
||||
CGContextSetFillColorWithColor(context, [TGPhotoEditorInterfaceAssets cropTransparentOverlayColor].CGColor);
|
||||
|
||||
UIBezierPath *path = [UIBezierPath bezierPathWithOvalInRect:CGRectMake(0, 0, diameter, diameter)];
|
||||
[path appendPath:[UIBezierPath bezierPathWithRect:CGRectMake(0, 0, diameter, diameter)]];
|
||||
path.usesEvenOddFillRule = true;
|
||||
[path fill];
|
||||
|
||||
UIImage *areaMaskImage = UIGraphicsGetImageFromCurrentImageContext();
|
||||
UIGraphicsEndImageContext();
|
||||
|
||||
_areaMaskView.image = areaMaskImage;
|
||||
}
|
||||
|
||||
- (void)updateLayout:(UIInterfaceOrientation)orientation
|
||||
{
|
||||
if ([self inFormSheet] || TGIsPad())
|
||||
orientation = UIInterfaceOrientationPortrait;
|
||||
|
||||
if (!_dismissing)
|
||||
[self updateToolViews];
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
[_scrubberView reloadThumbnails];
|
||||
});
|
||||
|
||||
[self updatePreviewView];
|
||||
}
|
||||
|
||||
- (TGPhotoEditorTab)availableTabs
|
||||
{
|
||||
return TGPhotoEditorPaintTab | TGPhotoEditorToolsTab;
|
||||
}
|
||||
|
||||
- (TGPhotoEditorTab)activeTab
|
||||
{
|
||||
return TGPhotoEditorCropTab;
|
||||
}
|
||||
|
||||
- (TGPhotoEditorTab)highlightedTabs
|
||||
{
|
||||
bool hasSimpleValue = false;
|
||||
bool hasBlur = false;
|
||||
bool hasCurves = false;
|
||||
bool hasTint = false;
|
||||
|
||||
// for (PGPhotoTool *tool in _allTools)
|
||||
// {
|
||||
// if (tool.isSimple)
|
||||
// {
|
||||
// if (tool.stringValue != nil)
|
||||
// hasSimpleValue = true;
|
||||
// }
|
||||
// else if ([tool isKindOfClass:[PGBlurTool class]] && tool.stringValue != nil)
|
||||
// {
|
||||
// hasBlur = true;
|
||||
// }
|
||||
// else if ([tool isKindOfClass:[PGCurvesTool class]] && tool.stringValue != nil)
|
||||
// {
|
||||
// hasCurves = true;
|
||||
// }
|
||||
// else if ([tool isKindOfClass:[PGTintTool class]] && tool.stringValue != nil)
|
||||
// {
|
||||
// hasTint = true;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
TGPhotoEditorTab tabs = TGPhotoEditorNoneTab;
|
||||
|
||||
if (hasSimpleValue)
|
||||
tabs |= TGPhotoEditorToolsTab;
|
||||
if (hasBlur)
|
||||
tabs |= TGPhotoEditorBlurTab;
|
||||
if (hasCurves)
|
||||
tabs |= TGPhotoEditorCurvesTab;
|
||||
if (hasTint)
|
||||
tabs |= TGPhotoEditorTintTab;
|
||||
|
||||
return tabs;
|
||||
}
|
||||
|
||||
- (void)setPlayButtonHidden:(bool)hidden animated:(bool)animated
|
||||
{
|
||||
// if (animated)
|
||||
// {
|
||||
// _actionButton.hidden = false;
|
||||
// [UIView animateWithDuration:0.15f animations:^
|
||||
// {
|
||||
// _actionButton.alpha = hidden ? 0.0f : 1.0f;
|
||||
// } completion:^(BOOL finished)
|
||||
// {
|
||||
// if (finished)
|
||||
// _actionButton.hidden = hidden;
|
||||
// }];
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// _actionButton.alpha = hidden ? 0.0f : 1.0f;
|
||||
// _actionButton.hidden = hidden;
|
||||
// }
|
||||
}
|
||||
|
||||
#pragma mark - Video Scrubber Data Source & Delegate
|
||||
|
||||
#pragma mark Scrubbing
|
||||
|
||||
- (NSTimeInterval)videoScrubberDuration:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber
|
||||
{
|
||||
return self.item.originalDuration;
|
||||
}
|
||||
|
||||
- (CGFloat)videoScrubberThumbnailAspectRatio:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber
|
||||
{
|
||||
if (CGSizeEqualToSize(self.item.originalSize, CGSizeZero))
|
||||
return 1.0f;
|
||||
|
||||
return self.item.originalSize.width / self.item.originalSize.height;
|
||||
}
|
||||
|
||||
- (void)videoScrubberDidBeginScrubbing:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber
|
||||
{
|
||||
_wasPlayingBeforeScrubbing = true;
|
||||
self.controlVideoPlayback(false);
|
||||
|
||||
_scrubberView.dotValue = 0.0;
|
||||
|
||||
_coverLabel.alpha = 1.0f;
|
||||
|
||||
[self setPlayButtonHidden:true animated:false];
|
||||
|
||||
[UIView animateWithDuration:0.2 animations:^{
|
||||
_areaMaskView.alpha = 1.0f;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)videoScrubberDidEndScrubbing:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber
|
||||
{
|
||||
[UIView animateWithDuration:0.12 animations:^{
|
||||
_flashView.alpha = 1.0f;
|
||||
} completion:^(BOOL finished) {
|
||||
[UIView animateWithDuration:0.2 animations:^{
|
||||
_flashView.alpha = 0.0f;
|
||||
} completion:^(BOOL finished) {
|
||||
TGDispatchAfter(1.0, dispatch_get_main_queue(), ^{
|
||||
[UIView animateWithDuration:0.2 animations:^{
|
||||
_areaMaskView.alpha = 0.0f;
|
||||
_coverLabel.alpha = 0.7f;
|
||||
}];
|
||||
|
||||
self.controlVideoPlayback(true);
|
||||
});
|
||||
}];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)videoScrubber:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber valueDidChange:(NSTimeInterval)position
|
||||
{
|
||||
self.controlVideoSeek(position);
|
||||
}
|
||||
|
||||
#pragma mark Trimming
|
||||
|
||||
- (bool)hasTrimming
|
||||
{
|
||||
return _scrubberView.hasTrimming;
|
||||
}
|
||||
|
||||
- (CMTimeRange)trimRange
|
||||
{
|
||||
return CMTimeRangeMake(CMTimeMakeWithSeconds(_scrubberView.trimStartValue , NSEC_PER_SEC), CMTimeMakeWithSeconds((_scrubberView.trimEndValue - _scrubberView.trimStartValue), NSEC_PER_SEC));
|
||||
}
|
||||
|
||||
- (void)videoScrubberDidBeginEditing:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber
|
||||
{
|
||||
self.controlVideoPlayback(false);
|
||||
|
||||
[self setPlayButtonHidden:true animated:false];
|
||||
}
|
||||
|
||||
- (void)videoScrubberDidEndEditing:(TGMediaPickerGalleryVideoScrubber *)videoScrubber
|
||||
{
|
||||
[self updatePlayerRange:videoScrubber.trimEndValue];
|
||||
|
||||
self.controlVideoSeek(videoScrubber.trimStartValue);
|
||||
self.controlVideoPlayback(true);
|
||||
|
||||
[self setPlayButtonHidden:false animated:true];
|
||||
}
|
||||
|
||||
- (void)videoScrubber:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber editingStartValueDidChange:(NSTimeInterval)startValue
|
||||
{
|
||||
self.controlVideoSeek(startValue);
|
||||
}
|
||||
|
||||
- (void)videoScrubber:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber editingEndValueDidChange:(NSTimeInterval)endValue
|
||||
{
|
||||
self.controlVideoSeek(endValue);
|
||||
}
|
||||
|
||||
- (void)updatePlayerRange:(NSTimeInterval)trimEndValue
|
||||
{
|
||||
self.controlVideoEndTime(trimEndValue);
|
||||
}
|
||||
|
||||
#pragma mark Thumbnails
|
||||
|
||||
- (NSArray *)videoScrubber:(TGMediaPickerGalleryVideoScrubber *)videoScrubber evenlySpacedTimestamps:(NSInteger)count startingAt:(NSTimeInterval)startTimestamp endingAt:(NSTimeInterval)endTimestamp
|
||||
{
|
||||
if (endTimestamp < startTimestamp)
|
||||
return nil;
|
||||
|
||||
if (count == 0)
|
||||
return nil;
|
||||
|
||||
NSTimeInterval duration = [self videoScrubberDuration:videoScrubber];
|
||||
if (endTimestamp > duration)
|
||||
endTimestamp = duration;
|
||||
|
||||
NSTimeInterval interval = (endTimestamp - startTimestamp) / count;
|
||||
|
||||
NSMutableArray *timestamps = [[NSMutableArray alloc] init];
|
||||
for (NSInteger i = 0; i < count; i++)
|
||||
[timestamps addObject:@(startTimestamp + i * interval)];
|
||||
|
||||
return timestamps;
|
||||
}
|
||||
|
||||
- (void)videoScrubber:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber requestThumbnailImagesForTimestamps:(NSArray *)timestamps size:(CGSize)size isSummaryThumbnails:(bool)isSummaryThumbnails
|
||||
{
|
||||
if (timestamps.count == 0)
|
||||
return;
|
||||
|
||||
id<TGMediaEditAdjustments> adjustments = [self.photoEditor exportAdjustments];
|
||||
|
||||
SSignal *thumbnailsSignal = nil;
|
||||
if ([self.item isKindOfClass:[TGMediaAsset class]])
|
||||
thumbnailsSignal = [TGMediaAssetImageSignals videoThumbnailsForAsset:(TGMediaAsset *)self.item size:size timestamps:timestamps];
|
||||
else if ([self.item isKindOfClass:[TGCameraCapturedVideo class]])
|
||||
thumbnailsSignal = [((TGCameraCapturedVideo *)self.item).avAsset mapToSignal:^SSignal *(AVAsset *avAsset) {
|
||||
return [TGMediaAssetImageSignals videoThumbnailsForAVAsset:avAsset size:size timestamps:timestamps];
|
||||
}];
|
||||
|
||||
_requestingThumbnails = true;
|
||||
|
||||
__weak TGPhotoAvatarPreviewController *weakSelf = self;
|
||||
[_thumbnailsDisposable setDisposable:[[[thumbnailsSignal map:^NSArray *(NSArray *images) {
|
||||
if (adjustments.toolsApplied) {
|
||||
NSMutableArray *editedImages = [[NSMutableArray alloc] init];
|
||||
PGPhotoEditor *editor = [[PGPhotoEditor alloc] initWithOriginalSize:adjustments.originalSize adjustments:adjustments forVideo:false enableStickers:true];
|
||||
editor.standalone = true;
|
||||
for (UIImage *image in images) {
|
||||
[editor setImage:image forCropRect:adjustments.cropRect cropRotation:0.0 cropOrientation:adjustments.cropOrientation cropMirrored:adjustments.cropMirrored fullSize:false];
|
||||
UIImage *resultImage = editor.currentResultImage;
|
||||
if (resultImage != nil) {
|
||||
[editedImages addObject:resultImage];
|
||||
} else {
|
||||
[editedImages addObject:image];
|
||||
}
|
||||
}
|
||||
return editedImages;
|
||||
} else {
|
||||
return images;
|
||||
}
|
||||
}] deliverOn:[SQueue mainQueue]] startWithNext:^(NSArray *images)
|
||||
{
|
||||
__strong TGPhotoAvatarPreviewController *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
[images enumerateObjectsUsingBlock:^(UIImage *image, NSUInteger index, __unused BOOL *stop)
|
||||
{
|
||||
if (index < timestamps.count)
|
||||
[strongSelf->_scrubberView setThumbnailImage:image forTimestamp:[timestamps[index] doubleValue] isSummaryThubmnail:isSummaryThumbnails];
|
||||
}];
|
||||
} completed:^
|
||||
{
|
||||
__strong TGPhotoAvatarPreviewController *strongSelf = weakSelf;
|
||||
if (strongSelf != nil)
|
||||
strongSelf->_requestingThumbnails = false;
|
||||
}]];
|
||||
}
|
||||
|
||||
- (void)videoScrubberDidFinishRequestingThumbnails:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber
|
||||
{
|
||||
_requestingThumbnails = false;
|
||||
|
||||
// [self setScrubbingPanelHidden:false animated:true];
|
||||
}
|
||||
|
||||
- (void)videoScrubberDidCancelRequestingThumbnails:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber
|
||||
{
|
||||
_requestingThumbnails = false;
|
||||
}
|
||||
|
||||
- (CGSize)videoScrubberOriginalSize:(TGMediaPickerGalleryVideoScrubber *)__unused videoScrubber cropRect:(CGRect *)cropRect cropOrientation:(UIImageOrientation *)cropOrientation cropMirrored:(bool *)cropMirrored
|
||||
{
|
||||
id<TGMediaEditAdjustments> adjustments = [self.photoEditor exportAdjustments];
|
||||
if (cropRect != NULL)
|
||||
*cropRect = (adjustments != nil) ? adjustments.cropRect : CGRectMake(0, 0, self.item.originalSize.width, self.item.originalSize.height);
|
||||
|
||||
if (cropOrientation != NULL)
|
||||
*cropOrientation = (adjustments != nil) ? adjustments.cropOrientation : UIImageOrientationUp;
|
||||
|
||||
if (cropMirrored != NULL)
|
||||
*cropMirrored = adjustments.cropMirrored;
|
||||
|
||||
return self.item.originalSize;
|
||||
}
|
||||
|
||||
- (void)setScrubberPosition:(NSTimeInterval)position reset:(bool)reset
|
||||
{
|
||||
[_scrubberView setValue:_scrubberView.trimStartValue resetPosition:reset];
|
||||
}
|
||||
|
||||
- (void)setScrubberPlaying:(bool)value
|
||||
{
|
||||
[_scrubberView setIsPlaying:value];
|
||||
}
|
||||
|
||||
@end
|
@ -1,17 +0,0 @@
|
||||
#import "TGPhotoEditorTabController.h"
|
||||
|
||||
#import <LegacyComponents/LegacyComponentsContext.h>
|
||||
|
||||
@class PGPhotoEditor;
|
||||
@class TGSuggestionContext;
|
||||
@class TGPhotoEditorPreviewView;
|
||||
|
||||
@interface TGPhotoCaptionController : TGPhotoEditorTabController
|
||||
|
||||
@property (nonatomic, copy) void (^captionSet)(NSString *caption, NSArray *entities);
|
||||
|
||||
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
||||
|
||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView caption:(NSString *)caption;
|
||||
|
||||
@end
|
@ -1,357 +0,0 @@
|
||||
#import "TGPhotoCaptionController.h"
|
||||
|
||||
#import <LegacyComponents/LegacyComponents.h>
|
||||
|
||||
#import <LegacyComponents/TGPhotoEditorAnimation.h>
|
||||
|
||||
#import <LegacyComponents/TGPhotoEditorUtils.h>
|
||||
|
||||
#import "PGPhotoEditor.h"
|
||||
#import "TGPhotoCaptionInputMixin.h"
|
||||
|
||||
#import "TGSuggestionContext.h"
|
||||
|
||||
#import "TGPhotoEditorController.h"
|
||||
#import "TGPhotoEditorPreviewView.h"
|
||||
|
||||
@interface TGPhotoCaptionController ()
|
||||
{
|
||||
UIView *_wrapperView;
|
||||
TGPhotoCaptionInputMixin *_captionMixin;
|
||||
NSString *_initialCaption;
|
||||
|
||||
bool _transitionedIn;
|
||||
CGFloat _keyboardHeight;
|
||||
|
||||
bool _appeared;
|
||||
|
||||
id<LegacyComponentsContext> _context;
|
||||
}
|
||||
|
||||
@property (nonatomic, weak) PGPhotoEditor *photoEditor;
|
||||
@property (nonatomic, weak) TGPhotoEditorPreviewView *previewView;
|
||||
|
||||
@end
|
||||
|
||||
@implementation TGPhotoCaptionController
|
||||
|
||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView caption:(NSString *)caption
|
||||
{
|
||||
self = [super init];
|
||||
if (self != nil)
|
||||
{
|
||||
_context = context;
|
||||
self.photoEditor = photoEditor;
|
||||
self.previewView = previewView;
|
||||
|
||||
_initialCaption = caption;
|
||||
|
||||
__weak TGPhotoCaptionController *weakSelf = self;
|
||||
_captionMixin = [[TGPhotoCaptionInputMixin alloc] initWithKeyCommandController:[context keyCommandController]];
|
||||
_captionMixin.panelParentView = ^UIView *
|
||||
{
|
||||
__strong TGPhotoCaptionController *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return nil;
|
||||
|
||||
return strongSelf.view;
|
||||
};
|
||||
|
||||
_captionMixin.finishedWithCaption = ^(NSString *caption, NSArray *entities)
|
||||
{
|
||||
__strong TGPhotoCaptionController *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
strongSelf->_dismissing = true;
|
||||
|
||||
if (strongSelf.captionSet != nil)
|
||||
strongSelf.captionSet(caption, entities);
|
||||
|
||||
if (strongSelf->_keyboardHeight == 0)
|
||||
[strongSelf->_captionMixin.inputPanel setCollapsed:true animated:true];
|
||||
};
|
||||
|
||||
_captionMixin.keyboardHeightChanged = ^(CGFloat keyboardHeight, NSTimeInterval duration, NSInteger animationCurve)
|
||||
{
|
||||
__strong TGPhotoCaptionController *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
strongSelf->_keyboardHeight = keyboardHeight;
|
||||
|
||||
if (!strongSelf->_transitionedIn)
|
||||
{
|
||||
strongSelf.transitionInPending = false;
|
||||
strongSelf->_transitionedIn = true;
|
||||
[strongSelf animateTransitionInWithDuration:duration curve:animationCurve];
|
||||
}
|
||||
else
|
||||
{
|
||||
[UIView animateWithDuration:duration delay:0.0f options:animationCurve animations:^
|
||||
{
|
||||
[strongSelf updateLayout:[[LegacyComponentsGlobals provider] applicationStatusBarOrientation]];
|
||||
} completion:nil];
|
||||
}
|
||||
};
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)loadView
|
||||
{
|
||||
[super loadView];
|
||||
self.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
||||
|
||||
_wrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||
[self.view addSubview:_wrapperView];
|
||||
|
||||
TGPhotoEditorPreviewView *previewView = _previewView;
|
||||
previewView.interactionEnded = ^{ };
|
||||
[self.view addSubview:_previewView];
|
||||
}
|
||||
|
||||
- (void)viewDidLoad
|
||||
{
|
||||
[super viewDidLoad];
|
||||
|
||||
TGPhotoEditorPreviewView *previewView = _previewView;
|
||||
if (self.initialAppearance || ![_transitionView isKindOfClass:[TGPhotoEditorPreviewView class]])
|
||||
previewView.hidden = true;
|
||||
}
|
||||
|
||||
- (void)viewDidAppear:(BOOL)animated
|
||||
{
|
||||
[super viewDidAppear:animated];
|
||||
|
||||
_captionMixin.suggestionContext = self.suggestionContext;
|
||||
[_captionMixin beginEditing];
|
||||
|
||||
if (_keyboardHeight == 0 && !_transitionedIn)
|
||||
{
|
||||
[_captionMixin.inputPanel setCollapsed:true];
|
||||
_transitionedIn = true;
|
||||
[self animateTransitionInWithDuration:0.25 curve:UIViewAnimationOptionCurveEaseInOut];
|
||||
self.transitionInPending = false;
|
||||
[_captionMixin.inputPanel setCollapsed:false animated:true];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)viewWillAppear:(BOOL)__unused animated
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
- (void)transitionOutSwitching:(bool)__unused switching completion:(void (^)(void))completion
|
||||
{
|
||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||
previewView.interactionEnded = nil;
|
||||
|
||||
if (completion != nil)
|
||||
completion();
|
||||
}
|
||||
|
||||
- (void)prepareTransitionInWithReferenceView:(UIView *)referenceView referenceFrame:(CGRect)referenceFrame parentView:(UIView *)__unused parentView noTransitionView:(bool)__unused noTransitionView
|
||||
{
|
||||
self.transitionInPending = true;
|
||||
|
||||
if (parentView == nil)
|
||||
parentView = referenceView.superview.superview;
|
||||
|
||||
UIView *transitionViewSuperview = nil;
|
||||
UIImage *transitionImage = nil;
|
||||
if ([referenceView isKindOfClass:[UIImageView class]])
|
||||
transitionImage = ((UIImageView *)referenceView).image;
|
||||
|
||||
if (transitionImage != nil)
|
||||
{
|
||||
_transitionView = [[UIImageView alloc] initWithImage:transitionImage];
|
||||
_transitionView.clipsToBounds = true;
|
||||
_transitionView.contentMode = UIViewContentModeScaleAspectFill;
|
||||
transitionViewSuperview = parentView;
|
||||
}
|
||||
else
|
||||
{
|
||||
_transitionView = referenceView;
|
||||
transitionViewSuperview = self.view;
|
||||
}
|
||||
|
||||
_transitionView.frame = referenceFrame;
|
||||
[transitionViewSuperview addSubview:_transitionView];
|
||||
|
||||
}
|
||||
|
||||
- (CGRect)_targetFrameForTransitionInFromFrame:(CGRect)fromFrame
|
||||
{
|
||||
CGSize referenceSize = [self referenceViewSize];
|
||||
|
||||
if ([self inFormSheet])
|
||||
referenceSize = CGSizeMake(540.0f, 620.0f);
|
||||
|
||||
CGRect containerFrame = CGRectMake(0, 0, referenceSize.width, referenceSize.height);
|
||||
CGSize fittedSize = TGScaleToSize(fromFrame.size, containerFrame.size);
|
||||
CGRect toFrame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2,
|
||||
containerFrame.origin.y + (containerFrame.size.height - fittedSize.height - _keyboardHeight) / 2,
|
||||
fittedSize.width,
|
||||
fittedSize.height);
|
||||
|
||||
return toFrame;
|
||||
}
|
||||
|
||||
- (void)animateTransitionInWithDuration:(NSTimeInterval)duration curve:(NSInteger)curve
|
||||
{
|
||||
if ([_transitionView isKindOfClass:[TGPhotoEditorPreviewView class]])
|
||||
_transitionView.hidden = false;
|
||||
|
||||
self.transitionInProgress = true;
|
||||
|
||||
[UIView animateWithDuration:duration delay:0.0f options:curve animations:^
|
||||
{
|
||||
_transitionView.frame = [self _targetFrameForTransitionInFromFrame:_transitionView.frame];
|
||||
} completion:^(__unused BOOL finished)
|
||||
{
|
||||
self.transitionInProgress = false;
|
||||
|
||||
UIView *transitionView = _transitionView;
|
||||
_transitionView = nil;
|
||||
|
||||
if (self.finishedTransitionIn != nil)
|
||||
{
|
||||
self.finishedTransitionIn();
|
||||
self.finishedTransitionIn = nil;
|
||||
}
|
||||
|
||||
[self _finishedTransitionInWithView:transitionView];
|
||||
}];
|
||||
}
|
||||
|
||||
- (BOOL)shouldAutorotate
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
- (bool)isDismissAllowed
|
||||
{
|
||||
return _appeared;
|
||||
}
|
||||
|
||||
- (void)_animatePreviewViewTransitionOutToFrame:(CGRect)targetFrame saving:(bool)saving parentView:(UIView *)parentView completion:(void (^)(void))completion
|
||||
{
|
||||
_dismissing = true;
|
||||
|
||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||
[previewView prepareForTransitionOut];
|
||||
|
||||
UIView *snapshotView = nil;
|
||||
POPSpringAnimation *snapshotAnimation = nil;
|
||||
|
||||
if (saving && CGRectIsNull(targetFrame) && parentView != nil)
|
||||
{
|
||||
snapshotView = [previewView snapshotViewAfterScreenUpdates:false];
|
||||
snapshotView.frame = previewView.frame;
|
||||
|
||||
CGSize fittedSize = TGScaleToSize(previewView.frame.size, self.view.frame.size);
|
||||
targetFrame = CGRectMake((self.view.frame.size.width - fittedSize.width) / 2,
|
||||
(self.view.frame.size.height - fittedSize.height) / 2,
|
||||
fittedSize.width,
|
||||
fittedSize.height);
|
||||
|
||||
[parentView addSubview:snapshotView];
|
||||
|
||||
snapshotAnimation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewFrame];
|
||||
snapshotAnimation.fromValue = [NSValue valueWithCGRect:snapshotView.frame];
|
||||
snapshotAnimation.toValue = [NSValue valueWithCGRect:targetFrame];
|
||||
}
|
||||
|
||||
POPSpringAnimation *previewAnimation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewFrame];
|
||||
previewAnimation.fromValue = [NSValue valueWithCGRect:previewView.frame];
|
||||
previewAnimation.toValue = [NSValue valueWithCGRect:targetFrame];
|
||||
|
||||
POPSpringAnimation *previewAlphaAnimation = [TGPhotoEditorAnimation prepareTransitionAnimationForPropertyNamed:kPOPViewAlpha];
|
||||
previewAlphaAnimation.fromValue = @(previewView.alpha);
|
||||
previewAlphaAnimation.toValue = @(0.0f);
|
||||
|
||||
NSMutableArray *animations = [NSMutableArray arrayWithArray:@[ previewAnimation, previewAlphaAnimation ]];
|
||||
if (snapshotAnimation != nil)
|
||||
[animations addObject:snapshotAnimation];
|
||||
|
||||
[TGPhotoEditorAnimation performBlock:^(__unused bool allFinished)
|
||||
{
|
||||
[snapshotView removeFromSuperview];
|
||||
|
||||
if (completion != nil)
|
||||
completion();
|
||||
} whenCompletedAllAnimations:animations];
|
||||
|
||||
if (snapshotAnimation != nil)
|
||||
[snapshotView pop_addAnimation:snapshotAnimation forKey:@"frame"];
|
||||
[previewView pop_addAnimation:previewAnimation forKey:@"frame"];
|
||||
[previewView pop_addAnimation:previewAlphaAnimation forKey:@"alpha"];
|
||||
}
|
||||
|
||||
- (void)_finishedTransitionInWithView:(UIView *)transitionView
|
||||
{
|
||||
_appeared = true;
|
||||
[_captionMixin enableDismissal];
|
||||
|
||||
TGPhotoEditorPreviewView *previewView = _previewView;
|
||||
previewView.hidden = false;
|
||||
[previewView performTransitionInIfNeeded];
|
||||
|
||||
if (![transitionView isKindOfClass:[TGPhotoEditorPreviewView class]])
|
||||
[transitionView removeFromSuperview];
|
||||
}
|
||||
|
||||
- (CGRect)transitionOutReferenceFrame
|
||||
{
|
||||
TGPhotoEditorPreviewView *previewView = _previewView;
|
||||
return previewView.frame;
|
||||
}
|
||||
|
||||
- (UIView *)transitionOutReferenceView
|
||||
{
|
||||
return _previewView;
|
||||
}
|
||||
|
||||
- (UIView *)snapshotView
|
||||
{
|
||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||
return [previewView originalSnapshotView];
|
||||
}
|
||||
|
||||
- (id)currentResultRepresentation
|
||||
{
|
||||
return self.photoEditor.currentResultImage;
|
||||
}
|
||||
|
||||
- (void)viewWillLayoutSubviews
|
||||
{
|
||||
[super viewWillLayoutSubviews];
|
||||
|
||||
[self updateLayout:[[LegacyComponentsGlobals provider] applicationStatusBarOrientation]];
|
||||
}
|
||||
|
||||
- (void)updateLayout:(UIInterfaceOrientation)__unused orientation
|
||||
{
|
||||
CGSize referenceSize = [self referenceViewSize];
|
||||
|
||||
CGFloat screenSide = MAX(referenceSize.width, referenceSize.height) + 2 * TGPhotoEditorPanelSize;
|
||||
_wrapperView.frame = CGRectMake((referenceSize.width - screenSide) / 2, (referenceSize.height - screenSide) / 2, screenSide, screenSide);
|
||||
|
||||
PGPhotoEditor *photoEditor = self.photoEditor;
|
||||
TGPhotoEditorPreviewView *previewView = self.previewView;
|
||||
|
||||
if (_dismissing || previewView.superview != self.view || self.transitionInPending)
|
||||
return;
|
||||
|
||||
CGRect containerFrame = CGRectMake(0, 0, referenceSize.width, referenceSize.height);
|
||||
CGSize fittedSize = TGScaleToSize(photoEditor.rotatedCropSize, containerFrame.size);
|
||||
previewView.frame = CGRectMake(containerFrame.origin.x + (containerFrame.size.width - fittedSize.width) / 2,
|
||||
containerFrame.origin.y + (containerFrame.size.height - fittedSize.height - _keyboardHeight) / 2,
|
||||
fittedSize.width,
|
||||
fittedSize.height);
|
||||
|
||||
[_captionMixin setContentAreaHeight:self.view.frame.size.height];
|
||||
}
|
||||
|
||||
@end
|
@ -107,7 +107,7 @@ NSString * const TGPhotoCropOriginalAspectRatio = @"original";
|
||||
[self.view addSubview:_wrapperView];
|
||||
|
||||
PGPhotoEditor *photoEditor = self.photoEditor;
|
||||
_cropView = [[TGPhotoCropView alloc] initWithOriginalSize:photoEditor.originalSize hasArbitraryRotation:!_forVideo];
|
||||
_cropView = [[TGPhotoCropView alloc] initWithOriginalSize:photoEditor.originalSize hasArbitraryRotation:true];
|
||||
[_cropView setCropRect:photoEditor.cropRect];
|
||||
[_cropView setCropOrientation:photoEditor.cropOrientation];
|
||||
[_cropView setRotation:photoEditor.cropRotation];
|
||||
|
@ -21,6 +21,7 @@
|
||||
TGPhotoEditorBlurTypeButton *_offButton;
|
||||
TGPhotoEditorBlurTypeButton *_radialButton;
|
||||
TGPhotoEditorBlurTypeButton *_linearButton;
|
||||
TGPhotoEditorBlurTypeButton *_portraitButton;
|
||||
|
||||
TGPhotoEditorSliderView *_sliderView;
|
||||
|
||||
@ -59,28 +60,35 @@
|
||||
_titleLabel.textColor = [TGPhotoEditorInterfaceAssets editorItemTitleColor];
|
||||
_titleLabel.userInteractionEnabled = false;
|
||||
[self addSubview:_titleLabel];
|
||||
|
||||
|
||||
_offButton = [[TGPhotoEditorBlurTypeButton alloc] initWithFrame:CGRectZero];
|
||||
_offButton.tag = PGBlurToolTypeNone;
|
||||
[_offButton addTarget:self action:@selector(blurButtonPressed:) forControlEvents:UIControlEventTouchUpInside];
|
||||
[_offButton setImage:TGComponentsImageNamed(@"PhotoEditorBlurOff")];
|
||||
[_offButton setImage:TGTintedImage([UIImage imageNamed:@"Editor/BlurOff"], [UIColor whiteColor])];
|
||||
[_offButton setTitle:TGLocalized(@"PhotoEditor.BlurToolOff")];
|
||||
[_buttonsWrapper addSubview:_offButton];
|
||||
|
||||
_radialButton = [[TGPhotoEditorBlurTypeButton alloc] initWithFrame:CGRectZero];
|
||||
_radialButton.tag = PGBlurToolTypeRadial;
|
||||
[_radialButton addTarget:self action:@selector(blurButtonPressed:) forControlEvents:UIControlEventTouchUpInside];
|
||||
[_radialButton setImage:TGComponentsImageNamed(@"PhotoEditorBlurRadial")];
|
||||
[_radialButton setImage:TGTintedImage([UIImage imageNamed:@"Editor/BlurRadial"], [UIColor whiteColor])];
|
||||
[_radialButton setTitle:TGLocalized(@"PhotoEditor.BlurToolRadial")];
|
||||
[_buttonsWrapper addSubview:_radialButton];
|
||||
|
||||
_linearButton = [[TGPhotoEditorBlurTypeButton alloc] initWithFrame:CGRectZero];
|
||||
_linearButton.tag = PGBlurToolTypeLinear;
|
||||
[_linearButton addTarget:self action:@selector(blurButtonPressed:) forControlEvents:UIControlEventTouchUpInside];
|
||||
[_linearButton setImage:TGComponentsImageNamed(@"PhotoEditorBlurLinear")];
|
||||
[_linearButton setImage:TGTintedImage([UIImage imageNamed:@"Editor/BlurLinear"], [UIColor whiteColor])];
|
||||
[_linearButton setTitle:TGLocalized(@"PhotoEditor.BlurToolLinear")];
|
||||
[_buttonsWrapper addSubview:_linearButton];
|
||||
|
||||
_portraitButton = [[TGPhotoEditorBlurTypeButton alloc] initWithFrame:CGRectZero];
|
||||
_portraitButton.tag = PGBlurToolTypePortrait;
|
||||
[_portraitButton addTarget:self action:@selector(blurButtonPressed:) forControlEvents:UIControlEventTouchUpInside];
|
||||
[_portraitButton setImage:TGTintedImage([UIImage imageNamed:@"Editor/BlurPortrait"], [UIColor whiteColor])];
|
||||
[_portraitButton setTitle:TGLocalized(@"PhotoEditor.BlurToolPortrait")];
|
||||
[_buttonsWrapper addSubview:_portraitButton];
|
||||
|
||||
_sliderView = [[TGPhotoEditorSliderView alloc] initWithFrame:CGRectZero];
|
||||
_sliderView.alpha = 0.0f;
|
||||
_sliderView.hidden = true;
|
||||
@ -258,9 +266,28 @@
|
||||
{
|
||||
_titleLabel.frame = CGRectMake((self.frame.size.width - _titleLabel.frame.size.width) / 2, 10, _titleLabel.frame.size.width, _titleLabel.frame.size.height);
|
||||
|
||||
_offButton.frame = CGRectMake(CGFloor(self.frame.size.width / 4 - 50), self.frame.size.height / 2 - 42, 100, 100);
|
||||
_radialButton.frame = CGRectMake(self.frame.size.width / 2 - 50, self.frame.size.height / 2 - 42, 100, 100);
|
||||
_linearButton.frame = CGRectMake(CGCeil(self.frame.size.width / 2 + self.frame.size.width / 4 - 50), self.frame.size.height / 2 - 42, 100, 100);
|
||||
// _offButton.frame = CGRectMake(CGFloor(self.frame.size.width / 4 - 50), self.frame.size.height / 2 - 42, 100, 100);
|
||||
// _radialButton.frame = CGRectMake(self.frame.size.width / 2 - 75, self.frame.size.height / 2 - 42, 100, 100);
|
||||
// _linearButton.frame = CGRectMake(CGCeil(self.frame.size.width / 2 - 50), self.frame.size.height / 2 - 42, 100, 100);
|
||||
// _portraitButton.frame = CGRectMake(CGCeil(self.frame.size.width / 2 + self.frame.size.width / 4 - 50), self.frame.size.height / 2 - 42, 100, 100);
|
||||
|
||||
NSArray *buttons = @[_offButton, _radialButton, _linearButton, _portraitButton];
|
||||
|
||||
UIView *leftButton = buttons.firstObject;
|
||||
UIView *centerLeftButton = [buttons objectAtIndex:1];
|
||||
UIView *centerRightButton = [buttons objectAtIndex:2];
|
||||
UIView *rightButton = buttons.lastObject;
|
||||
|
||||
CGFloat offset = self.frame.size.height / 2 - 42;
|
||||
CGSize buttonSize = CGSizeMake(100.0, 100.0);
|
||||
|
||||
leftButton.frame = CGRectMake(CGFloor(self.frame.size.width / 8 * 1.5 - 3 - buttonSize.width / 2), offset, buttonSize.width, buttonSize.height);
|
||||
|
||||
centerLeftButton.frame = CGRectMake(CGFloor(self.frame.size.width / 10 * 3.75 + 5 - buttonSize.width / 2), offset, buttonSize.width, buttonSize.height);
|
||||
|
||||
centerRightButton.frame = CGRectMake(CGCeil(self.frame.size.width - centerLeftButton.frame.origin.x - buttonSize.width), offset, buttonSize.width, buttonSize.height);
|
||||
|
||||
rightButton.frame = CGRectMake(CGCeil(self.frame.size.width - leftButton.frame.origin.x - buttonSize.width), offset, buttonSize.width, buttonSize.height);
|
||||
|
||||
_sliderView.frame = CGRectMake(TGPhotoEditorSliderViewMargin, (self.frame.size.height - 32) / 2, self.frame.size.width - 2 * TGPhotoEditorSliderViewMargin, 32);
|
||||
}
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
#import "PGPhotoEditor.h"
|
||||
#import "PGEnhanceTool.h"
|
||||
#import "TGPaintFaceDetector.h"
|
||||
|
||||
#import <LegacyComponents/PGPhotoEditorValues.h>
|
||||
#import <LegacyComponents/TGVideoEditAdjustments.h>
|
||||
@ -33,12 +34,12 @@
|
||||
#import <LegacyComponents/TGMediaAssetsLibrary.h>
|
||||
#import <LegacyComponents/TGMediaAssetImageSignals.h>
|
||||
|
||||
#import "TGPhotoCaptionController.h"
|
||||
#import "TGPhotoCropController.h"
|
||||
#import "TGPhotoAvatarCropController.h"
|
||||
#import "TGPhotoToolsController.h"
|
||||
#import "TGPhotoPaintController.h"
|
||||
#import "TGPhotoQualityController.h"
|
||||
#import "TGPhotoAvatarCropController.h"
|
||||
#import "TGPhotoAvatarPreviewController.h"
|
||||
|
||||
#import "TGMessageImageViewOverlayView.h"
|
||||
|
||||
@ -75,6 +76,7 @@
|
||||
SMetaDisposable *_playerItemDisposable;
|
||||
id _playerStartedObserver;
|
||||
id _playerReachedEndObserver;
|
||||
NSTimer *_positionTimer;
|
||||
|
||||
id<TGMediaEditAdjustments> _initialAdjustments;
|
||||
NSString *_caption;
|
||||
@ -92,6 +94,8 @@
|
||||
bool _progressVisible;
|
||||
TGMessageImageViewOverlayView *_progressView;
|
||||
|
||||
SMetaDisposable *_faceDetectorDisposable;
|
||||
|
||||
id<LegacyComponentsContext> _context;
|
||||
}
|
||||
|
||||
@ -126,7 +130,7 @@
|
||||
_screenImage = screenImage;
|
||||
|
||||
_queue = [[SQueue alloc] init];
|
||||
_photoEditor = [[PGPhotoEditor alloc] initWithOriginalSize:_item.originalSize adjustments:adjustments forVideo:(intent == TGPhotoEditorControllerVideoIntent) enableStickers:(intent & TGPhotoEditorControllerSignupAvatarIntent) == 0];
|
||||
_photoEditor = [[PGPhotoEditor alloc] initWithOriginalSize:_item.originalSize adjustments:adjustments forVideo:(intent == TGPhotoEditorControllerVideoIntent || intent == TGPhotoEditorControllerAvatarIntent) enableStickers:(intent & TGPhotoEditorControllerSignupAvatarIntent) == 0];
|
||||
if ([self presentedForAvatarCreation])
|
||||
{
|
||||
CGFloat shortSide = MIN(_item.originalSize.width, _item.originalSize.height);
|
||||
@ -147,7 +151,9 @@
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
[_positionTimer invalidate];
|
||||
[_actionHandle reset];
|
||||
[_faceDetectorDisposable dispose];
|
||||
}
|
||||
|
||||
- (void)loadView
|
||||
@ -245,7 +251,7 @@
|
||||
case TGPhotoEditorRotateTab:
|
||||
case TGPhotoEditorMirrorTab:
|
||||
case TGPhotoEditorAspectRatioTab:
|
||||
if ([strongSelf->_currentTabController isKindOfClass:[TGPhotoCropController class]])
|
||||
if ([strongSelf->_currentTabController isKindOfClass:[TGPhotoCropController class]] || [strongSelf->_currentTabController isKindOfClass:[TGPhotoAvatarCropController class]])
|
||||
[strongSelf->_currentTabController handleTabAction:tab];
|
||||
break;
|
||||
}
|
||||
@ -302,6 +308,8 @@
|
||||
[_photoEditor setPreviewOutput:_previewView];
|
||||
[self updatePreviewView];
|
||||
|
||||
[self detectFaces];
|
||||
|
||||
[self presentEditorTab:_currentTab];
|
||||
}
|
||||
|
||||
@ -364,7 +372,7 @@
|
||||
{
|
||||
[super viewDidLoad];
|
||||
|
||||
if ([_currentTabController isKindOfClass:[TGPhotoCropController class]] || [_currentTabController isKindOfClass:[TGPhotoCaptionController class]] || [_currentTabController isKindOfClass:[TGPhotoAvatarCropController class]])
|
||||
if ([_currentTabController isKindOfClass:[TGPhotoCropController class]])
|
||||
return;
|
||||
|
||||
NSTimeInterval position = 0;
|
||||
@ -407,9 +415,12 @@
|
||||
_player.muted = true;
|
||||
|
||||
[_photoEditor setPlayerItem:_playerItem forCropRect:_photoEditor.cropRect cropRotation:0.0 cropOrientation:_photoEditor.cropOrientation cropMirrored:_photoEditor.cropMirrored];
|
||||
|
||||
|
||||
TGDispatchOnMainThread(^
|
||||
{
|
||||
if ([_currentTabController isKindOfClass:[TGPhotoAvatarCropController class]])
|
||||
[(TGPhotoAvatarCropController *)_currentTabController setPlayer:_player];
|
||||
|
||||
[_previewView performTransitionInWithCompletion:^
|
||||
{
|
||||
}];
|
||||
@ -465,41 +476,87 @@
|
||||
|
||||
- (void)_setupPlaybackReachedEndObserver
|
||||
{
|
||||
PGPhotoEditor *photoEditor = _photoEditor;
|
||||
CMTime endTime = CMTimeSubtract(_player.currentItem.duration, CMTimeMake(10, 100));
|
||||
if (_photoEditor.trimEndValue > DBL_EPSILON && _photoEditor.trimEndValue < CMTimeGetSeconds(_player.currentItem.duration))
|
||||
endTime = CMTimeMakeWithSeconds(_photoEditor.trimEndValue - 0.1, NSEC_PER_SEC);
|
||||
if (photoEditor.trimEndValue > DBL_EPSILON && photoEditor.trimEndValue < CMTimeGetSeconds(_player.currentItem.duration))
|
||||
endTime = CMTimeMakeWithSeconds(photoEditor.trimEndValue - 0.1, NSEC_PER_SEC);
|
||||
|
||||
CMTime startTime = CMTimeMake(5, 100);
|
||||
if (_photoEditor.trimStartValue > DBL_EPSILON)
|
||||
startTime = CMTimeMakeWithSeconds(_photoEditor.trimStartValue + 0.05, NSEC_PER_SEC);
|
||||
if (photoEditor.trimStartValue > DBL_EPSILON)
|
||||
startTime = CMTimeMakeWithSeconds(photoEditor.trimStartValue + 0.05, NSEC_PER_SEC);
|
||||
|
||||
__weak TGPhotoEditorController *weakSelf = self;
|
||||
_playerReachedEndObserver = [_player addBoundaryTimeObserverForTimes:@[[NSValue valueWithCMTime:endTime]] queue:NULL usingBlock:^
|
||||
{
|
||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
||||
if (strongSelf != nil)
|
||||
if (strongSelf != nil) {
|
||||
[strongSelf->_player seekToTime:startTime];
|
||||
if ([strongSelf->_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]]) {
|
||||
[(TGPhotoAvatarPreviewController *)strongSelf->_currentTabController setScrubberPosition:photoEditor.trimStartValue reset:true];
|
||||
}
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)startVideoPlayback {
|
||||
NSTimeInterval startPosition = 0.0f;
|
||||
if (_photoEditor.trimStartValue > DBL_EPSILON)
|
||||
startPosition = _photoEditor.trimStartValue;
|
||||
- (void)startVideoPlayback:(bool)reset {
|
||||
if (reset) {
|
||||
NSTimeInterval startPosition = 0.0f;
|
||||
if (_photoEditor.trimStartValue > DBL_EPSILON)
|
||||
startPosition = _photoEditor.trimStartValue;
|
||||
|
||||
CMTime targetTime = CMTimeMakeWithSeconds(startPosition, NSEC_PER_SEC);
|
||||
[_player.currentItem seekToTime:targetTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero];
|
||||
|
||||
[self _setupPlaybackStartedObserver];
|
||||
|
||||
[_player addObserver:self forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:nil];
|
||||
}
|
||||
|
||||
CMTime targetTime = CMTimeMakeWithSeconds(startPosition, NSEC_PER_SEC);
|
||||
[_player.currentItem seekToTime:targetTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero];
|
||||
|
||||
[self _setupPlaybackStartedObserver];
|
||||
[_player play];
|
||||
|
||||
_positionTimer = [TGTimerTarget scheduledMainThreadTimerWithTarget:self action:@selector(positionTimerEvent) interval:0.25 repeat:true];
|
||||
[self positionTimerEvent];
|
||||
}
|
||||
|
||||
- (void)stopVideoPlayback {
|
||||
if (_playerStartedObserver != nil)
|
||||
[_player removeTimeObserver:_playerStartedObserver];
|
||||
if (_playerReachedEndObserver != nil)
|
||||
[_player removeTimeObserver:_playerReachedEndObserver];
|
||||
- (void)stopVideoPlayback:(bool)reset {
|
||||
if (reset) {
|
||||
if (_playerStartedObserver != nil)
|
||||
[_player removeTimeObserver:_playerStartedObserver];
|
||||
if (_playerReachedEndObserver != nil)
|
||||
[_player removeTimeObserver:_playerReachedEndObserver];
|
||||
|
||||
[_player removeObserver:self forKeyPath:@"rate" context:nil];
|
||||
}
|
||||
[_player pause];
|
||||
|
||||
[_positionTimer invalidate];
|
||||
_positionTimer = nil;
|
||||
}
|
||||
|
||||
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)__unused change context:(void *)__unused context
|
||||
{
|
||||
if (object == _player && [keyPath isEqualToString:@"rate"])
|
||||
{
|
||||
if ([_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]]) {
|
||||
[(TGPhotoAvatarPreviewController *)_currentTabController setScrubberPlaying:_player.rate > FLT_EPSILON];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)positionTimerEvent
|
||||
{
|
||||
if ([_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]]) {
|
||||
[(TGPhotoAvatarPreviewController *)_currentTabController setScrubberPosition:CMTimeGetSeconds(_player.currentItem.currentTime) reset:false];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)seekVideo:(NSTimeInterval)position {
|
||||
CMTime targetTime = CMTimeMakeWithSeconds(position, NSEC_PER_SEC);
|
||||
[_player.currentItem seekToTime:targetTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero];
|
||||
}
|
||||
|
||||
- (void)setVideoEndTime:(NSTimeInterval)endTime {
|
||||
_player.currentItem.forwardPlaybackEndTime = CMTimeMakeWithSeconds(endTime, NSEC_PER_SEC);
|
||||
}
|
||||
|
||||
- (void)viewWillAppear:(BOOL)animated
|
||||
@ -927,9 +984,9 @@
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
if (play) {
|
||||
[strongSelf->_player play];
|
||||
[strongSelf startVideoPlayback:false];
|
||||
} else {
|
||||
[strongSelf->_player pause];
|
||||
[strongSelf stopVideoPlayback:false];
|
||||
}
|
||||
};
|
||||
paintController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
|
||||
@ -954,7 +1011,7 @@
|
||||
strongSelf.finishedTransitionIn();
|
||||
|
||||
strongSelf->_switchingTab = false;
|
||||
[strongSelf startVideoPlayback];
|
||||
[strongSelf startVideoPlayback:true];
|
||||
};
|
||||
|
||||
controller = paintController;
|
||||
@ -977,6 +1034,16 @@
|
||||
else if (snapshotImage != nil)
|
||||
[cropController setSnapshotImage:snapshotImage];
|
||||
cropController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
|
||||
cropController.controlVideoPlayback = ^(bool play) {
|
||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
if (play) {
|
||||
[strongSelf startVideoPlayback:false];
|
||||
} else {
|
||||
[strongSelf stopVideoPlayback:false];
|
||||
}
|
||||
};
|
||||
cropController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
|
||||
{
|
||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
||||
@ -1030,6 +1097,7 @@
|
||||
}
|
||||
|
||||
strongSelf->_switchingTab = false;
|
||||
[strongSelf startVideoPlayback:true];
|
||||
};
|
||||
cropController.finishedTransitionOut = ^
|
||||
{
|
||||
@ -1205,7 +1273,7 @@
|
||||
|
||||
strongSelf->_switchingTab = false;
|
||||
|
||||
[strongSelf startVideoPlayback];
|
||||
[strongSelf startVideoPlayback:true];
|
||||
};
|
||||
|
||||
controller = toolsController;
|
||||
@ -1244,6 +1312,55 @@
|
||||
}
|
||||
break;
|
||||
|
||||
case TGPhotoEditorPreviewTab:
|
||||
{
|
||||
TGPhotoAvatarPreviewController *previewController = [[TGPhotoAvatarPreviewController alloc] initWithContext:_context photoEditor:_photoEditor previewView:_previewView];
|
||||
previewController.item = _item;
|
||||
previewController.toolbarLandscapeSize = TGPhotoEditorToolbarSize;
|
||||
previewController.beginTransitionIn = ^UIView *(CGRect *referenceFrame, UIView **parentView, bool *noTransitionView)
|
||||
{
|
||||
*referenceFrame = transitionReferenceFrame;
|
||||
*parentView = transitionParentView;
|
||||
*noTransitionView = transitionNoTransitionView;
|
||||
|
||||
return transitionReferenceView;
|
||||
};
|
||||
previewController.finishedTransitionIn = ^
|
||||
{
|
||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
if (isInitialAppearance && strongSelf.finishedTransitionIn != nil)
|
||||
strongSelf.finishedTransitionIn();
|
||||
|
||||
strongSelf->_switchingTab = false;
|
||||
[strongSelf startVideoPlayback:true];
|
||||
};
|
||||
previewController.controlVideoPlayback = ^(bool play) {
|
||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
if (play) {
|
||||
[strongSelf startVideoPlayback:false];
|
||||
} else {
|
||||
[strongSelf stopVideoPlayback:false];
|
||||
}
|
||||
};
|
||||
previewController.controlVideoSeek = ^(NSTimeInterval position) {
|
||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
||||
if (strongSelf != nil)
|
||||
[strongSelf seekVideo:position];
|
||||
};
|
||||
previewController.controlVideoEndTime = ^(NSTimeInterval endTime) {
|
||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
||||
if (strongSelf != nil)
|
||||
[strongSelf setVideoEndTime:endTime];
|
||||
};
|
||||
controller = previewController;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@ -1396,6 +1513,11 @@
|
||||
|
||||
- (void)dismissEditor
|
||||
{
|
||||
if ([_currentTabController isKindOfClass:[TGPhotoAvatarPreviewController class]]) {
|
||||
[self presentEditorTab:TGPhotoEditorCropTab];
|
||||
return;
|
||||
}
|
||||
|
||||
if (![_currentTabController isDismissAllowed])
|
||||
return;
|
||||
|
||||
@ -1484,7 +1606,11 @@
|
||||
|
||||
- (void)doneButtonPressed
|
||||
{
|
||||
[self applyEditor];
|
||||
if ([_currentTabController isKindOfClass:[TGPhotoAvatarCropController class]]) {
|
||||
[self presentEditorTab:TGPhotoEditorPreviewTab];
|
||||
} else {
|
||||
[self applyEditor];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)applyEditor
|
||||
@ -1908,6 +2034,56 @@
|
||||
[_landscapeToolbarView setInfoString:string];
|
||||
}
|
||||
|
||||
- (void)detectFaces
|
||||
{
|
||||
if (_faceDetectorDisposable == nil)
|
||||
_faceDetectorDisposable = [[SMetaDisposable alloc] init];
|
||||
|
||||
id<TGMediaEditableItem> item = _item;
|
||||
CGSize originalSize = _photoEditor.originalSize;
|
||||
|
||||
if (self.requestOriginalScreenSizeImage == nil)
|
||||
return;
|
||||
|
||||
SSignal *cachedSignal = [[self.editingContext facesForItem:item] mapToSignal:^SSignal *(id result)
|
||||
{
|
||||
if (result == nil)
|
||||
return [SSignal fail:nil];
|
||||
return [SSignal single:result];
|
||||
}];
|
||||
SSignal *imageSignal = [self.requestOriginalScreenSizeImage(item, 0) take:1];
|
||||
SSignal *detectSignal = [[imageSignal filter:^bool(UIImage *image)
|
||||
{
|
||||
if (![image isKindOfClass:[UIImage class]])
|
||||
return false;
|
||||
|
||||
if (image.degraded)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}] mapToSignal:^SSignal *(UIImage *image) {
|
||||
return [[TGPaintFaceDetector detectFacesInImage:image originalSize:originalSize] startOn:[SQueue concurrentDefaultQueue]];
|
||||
}];
|
||||
|
||||
__weak TGPhotoEditorController *weakSelf = self;
|
||||
[_faceDetectorDisposable setDisposable:[[[cachedSignal catch:^SSignal *(__unused id error)
|
||||
{
|
||||
return detectSignal;
|
||||
}] deliverOn:[SQueue mainQueue]] startWithNext:^(NSArray *next)
|
||||
{
|
||||
__strong TGPhotoEditorController *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
[strongSelf.editingContext setFaces:next forItem:item];
|
||||
|
||||
if (next.count == 0)
|
||||
return;
|
||||
|
||||
strongSelf->_faces = next;
|
||||
}]];
|
||||
}
|
||||
|
||||
+ (TGPhotoEditorTab)defaultTabsForAvatarIntent
|
||||
{
|
||||
static dispatch_once_t onceToken;
|
||||
|
@ -0,0 +1,5 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface TGPhotoEditorSparseView : UIView
|
||||
|
||||
@end
|
@ -1,6 +1,6 @@
|
||||
#import "TGPhotoPaintSparseView.h"
|
||||
#import "TGPhotoEditorSparseView.h"
|
||||
|
||||
@implementation TGPhotoPaintSparseView
|
||||
@implementation TGPhotoEditorSparseView
|
||||
|
||||
- (UIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event
|
||||
{
|
@ -1,11 +1,11 @@
|
||||
#import "TGPhotoPaintSparseView.h"
|
||||
#import "TGPhotoEditorSparseView.h"
|
||||
#import "TGPhotoPaintStickersContext.h"
|
||||
|
||||
@class TGPaintingData;
|
||||
@class TGPhotoPaintEntity;
|
||||
@class TGPhotoPaintEntityView;
|
||||
|
||||
@interface TGPhotoEntitiesContainerView : TGPhotoPaintSparseView
|
||||
@interface TGPhotoEntitiesContainerView : TGPhotoEditorSparseView
|
||||
|
||||
@property (nonatomic, strong) id<TGPhotoPaintStickersContext> stickersContext;
|
||||
|
||||
|
@ -68,7 +68,7 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
||||
UIView *_scrollContentView;
|
||||
|
||||
UIButton *_containerView;
|
||||
TGPhotoPaintSparseView *_wrapperView;
|
||||
TGPhotoEditorSparseView *_wrapperView;
|
||||
UIView *_portraitToolsWrapperView;
|
||||
UIView *_landscapeToolsWrapperView;
|
||||
|
||||
@ -120,10 +120,7 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
||||
UIImage *_stillImage;
|
||||
|
||||
TGPaintingWrapperView *_paintingWrapperView;
|
||||
|
||||
SMetaDisposable *_faceDetectorDisposable;
|
||||
NSArray *_faces;
|
||||
|
||||
|
||||
bool _enableStickers;
|
||||
|
||||
NSData *_eyedropperBackgroundData;
|
||||
@ -183,7 +180,6 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
||||
- (void)dealloc
|
||||
{
|
||||
[_actionHandle reset];
|
||||
[_faceDetectorDisposable dispose];
|
||||
}
|
||||
|
||||
- (void)loadView
|
||||
@ -299,7 +295,7 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
||||
_eyedropperView.hidden = true;
|
||||
[_selectionContainerView addSubview:_eyedropperView];
|
||||
|
||||
_wrapperView = [[TGPhotoPaintSparseView alloc] initWithFrame:CGRectZero];
|
||||
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:CGRectZero];
|
||||
[self.view addSubview:_wrapperView];
|
||||
|
||||
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||
@ -527,8 +523,6 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
||||
};
|
||||
|
||||
[self updateActionsView];
|
||||
|
||||
[self performFaceDetection];
|
||||
}
|
||||
|
||||
- (void)viewDidAppear:(BOOL)animated
|
||||
@ -2478,13 +2472,13 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
||||
|
||||
- (TGPaintFace *)_randomFaceWithVacantAnchor:(TGPhotoMaskAnchor)anchor documentId:(int64_t)documentId
|
||||
{
|
||||
NSInteger randomIndex = (NSInteger)arc4random_uniform((uint32_t)_faces.count);
|
||||
NSInteger count = _faces.count;
|
||||
NSInteger remaining = _faces.count;
|
||||
NSInteger randomIndex = (NSInteger)arc4random_uniform((uint32_t)self.faces.count);
|
||||
NSInteger count = self.faces.count;
|
||||
NSInteger remaining = self.faces.count;
|
||||
|
||||
for (NSInteger i = randomIndex; remaining > 0; (i = (i + 1) % count), remaining--)
|
||||
{
|
||||
TGPaintFace *face = _faces[i];
|
||||
TGPaintFace *face = self.faces[i];
|
||||
if (![self _isFaceAnchorOccupied:face anchor:anchor documentId:documentId])
|
||||
return face;
|
||||
}
|
||||
@ -2543,68 +2537,20 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
|
||||
if ([TGPhotoMaskPosition anchorOfMask:mask] != anchor)
|
||||
continue;
|
||||
|
||||
if ((documentId == maskDocumentId || _faces.count > 1) && TGPaintDistance(entity.position, anchorPoint) < minDistance)
|
||||
if ((documentId == maskDocumentId || self.faces.count > 1) && TGPaintDistance(entity.position, anchorPoint) < minDistance)
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
- (void)performFaceDetection
|
||||
- (NSArray *)faces
|
||||
{
|
||||
TGPhotoEditorController *editorController = (TGPhotoEditorController *)self.parentViewController;
|
||||
if (![editorController isKindOfClass:[TGPhotoEditorController class]])
|
||||
return;
|
||||
|
||||
if (self.intent == TGPhotoEditorControllerVideoIntent)
|
||||
return;
|
||||
|
||||
if (_faceDetectorDisposable == nil)
|
||||
_faceDetectorDisposable = [[SMetaDisposable alloc] init];
|
||||
|
||||
id<TGMediaEditableItem> item = self.item;
|
||||
CGSize originalSize = _photoEditor.originalSize;
|
||||
|
||||
if (editorController.requestOriginalScreenSizeImage == nil)
|
||||
return;
|
||||
|
||||
SSignal *cachedSignal = [[editorController.editingContext facesForItem:item] mapToSignal:^SSignal *(id result)
|
||||
{
|
||||
if (result == nil)
|
||||
return [SSignal fail:nil];
|
||||
return [SSignal single:result];
|
||||
}];
|
||||
SSignal *imageSignal = [editorController.requestOriginalScreenSizeImage(item, 0) take:1];
|
||||
SSignal *detectSignal = [[imageSignal filter:^bool(UIImage *image)
|
||||
{
|
||||
if (![image isKindOfClass:[UIImage class]])
|
||||
return false;
|
||||
|
||||
if (image.degraded)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}] mapToSignal:^SSignal *(UIImage *image) {
|
||||
return [[TGPaintFaceDetector detectFacesInImage:image originalSize:originalSize] startOn:[SQueue concurrentDefaultQueue]];
|
||||
}];
|
||||
|
||||
__weak TGPhotoPaintController *weakSelf = self;
|
||||
[_faceDetectorDisposable setDisposable:[[[cachedSignal catch:^SSignal *(__unused id error)
|
||||
{
|
||||
return detectSignal;
|
||||
}] deliverOn:[SQueue mainQueue]] startWithNext:^(NSArray *next)
|
||||
{
|
||||
[editorController.editingContext setFaces:next forItem:item];
|
||||
|
||||
if (next.count == 0)
|
||||
return;
|
||||
|
||||
__strong TGPhotoPaintController *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
strongSelf->_faces = next;
|
||||
}]];
|
||||
if ([editorController isKindOfClass:[TGPhotoEditorController class]])
|
||||
return editorController.faces;
|
||||
else
|
||||
return @[];
|
||||
}
|
||||
|
||||
- (UIRectEdge)preferredScreenEdgesDeferringSystemGestures
|
||||
|
@ -1,5 +1,5 @@
|
||||
#import "TGPhotoPaintSparseView.h"
|
||||
#import "TGPhotoEditorSparseView.h"
|
||||
|
||||
@interface TGPhotoPaintSelectionContainerView : TGPhotoPaintSparseView
|
||||
@interface TGPhotoPaintSelectionContainerView : TGPhotoEditorSparseView
|
||||
|
||||
@end
|
||||
|
@ -1,5 +0,0 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface TGPhotoPaintSparseView : UIView
|
||||
|
||||
@end
|
@ -8,6 +8,4 @@
|
||||
|
||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context photoEditor:(PGPhotoEditor *)photoEditor previewView:(TGPhotoEditorPreviewView *)previewView;
|
||||
|
||||
- (void)updateValues;
|
||||
|
||||
@end
|
||||
|
@ -21,14 +21,11 @@
|
||||
#import "TGPhotoEditorController.h"
|
||||
#import "TGPhotoEditorPreviewView.h"
|
||||
#import "TGPhotoEditorHUDView.h"
|
||||
#import "TGPhotoEditorSparseView.h"
|
||||
|
||||
const CGFloat TGPhotoEditorToolsPanelSize = 180.0f;
|
||||
const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize + 40.0f;
|
||||
|
||||
@interface TGPhotoToolsWrapperView : UIView
|
||||
|
||||
@end
|
||||
|
||||
@interface TGPhotoToolsController () <TGPhotoEditorCollectionViewToolsDataSource>
|
||||
{
|
||||
NSValue *_contentOffsetAfterRotation;
|
||||
@ -38,7 +35,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
||||
NSArray *_allTools;
|
||||
NSArray *_simpleTools;
|
||||
|
||||
TGPhotoToolsWrapperView *_wrapperView;
|
||||
TGPhotoEditorSparseView *_wrapperView;
|
||||
UIView *_portraitToolsWrapperView;
|
||||
UIView *_landscapeToolsWrapperView;
|
||||
UIView *_portraitWrapperBackgroundView;
|
||||
@ -73,21 +70,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
||||
{
|
||||
self.photoEditor = photoEditor;
|
||||
self.previewView = previewView;
|
||||
|
||||
NSMutableArray *tools = [[NSMutableArray alloc] init];
|
||||
NSMutableArray *simpleTools = [[NSMutableArray alloc] init];
|
||||
for (PGPhotoTool *tool in photoEditor.tools)
|
||||
{
|
||||
if (!tool.isHidden)
|
||||
{
|
||||
[tools addObject:tool];
|
||||
if (tool.isSimple)
|
||||
[simpleTools addObject:tool];
|
||||
}
|
||||
}
|
||||
_allTools = tools;
|
||||
_simpleTools = simpleTools;
|
||||
|
||||
|
||||
__weak TGPhotoToolsController *weakSelf = self;
|
||||
_changeBlock = ^(PGPhotoTool *tool, __unused id newValue, bool animated)
|
||||
{
|
||||
@ -120,6 +103,29 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
||||
[super loadView];
|
||||
self.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
|
||||
|
||||
TGPhotoEditorController *editorController = (TGPhotoEditorController *)self.parentViewController;
|
||||
NSArray *faces;
|
||||
if ([editorController isKindOfClass:[TGPhotoEditorController class]]) {
|
||||
faces = editorController.faces;
|
||||
}
|
||||
|
||||
NSMutableArray *tools = [[NSMutableArray alloc] init];
|
||||
NSMutableArray *simpleTools = [[NSMutableArray alloc] init];
|
||||
for (PGPhotoTool *tool in self.photoEditor.tools)
|
||||
{
|
||||
if (tool.requiresFaces && faces.count < 1) {
|
||||
continue;
|
||||
}
|
||||
if (!tool.isHidden)
|
||||
{
|
||||
[tools addObject:tool];
|
||||
if (tool.isSimple)
|
||||
[simpleTools addObject:tool];
|
||||
}
|
||||
}
|
||||
_allTools = tools;
|
||||
_simpleTools = simpleTools;
|
||||
|
||||
__weak TGPhotoToolsController *weakSelf = self;
|
||||
_interactionBegan = ^
|
||||
{
|
||||
@ -177,7 +183,7 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
||||
previewView.customTouchDownHandling = forVideo;
|
||||
[self.view addSubview:_previewView];
|
||||
|
||||
_wrapperView = [[TGPhotoToolsWrapperView alloc] initWithFrame:CGRectZero];
|
||||
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:CGRectZero];
|
||||
[self.view addSubview:_wrapperView];
|
||||
|
||||
_portraitToolsWrapperView = [[UIView alloc] initWithFrame:CGRectZero];
|
||||
@ -662,12 +668,6 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
||||
return _interactionEnded;
|
||||
}
|
||||
|
||||
- (void)updateValues
|
||||
{
|
||||
[_portraitCollectionView reloadData];
|
||||
[_landscapeCollectionView reloadData];
|
||||
}
|
||||
|
||||
#pragma mark - Layout
|
||||
|
||||
- (void)_prepareCollectionViewsForTransitionFromOrientation:(UIInterfaceOrientation)fromOrientation toOrientation:(UIInterfaceOrientation)toOrientation
|
||||
@ -1069,17 +1069,3 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
|
||||
@implementation TGPhotoToolsWrapperView
|
||||
|
||||
- (UIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event
|
||||
{
|
||||
UIView *result = [super hitTest:point withEvent:event];
|
||||
if (result == self)
|
||||
return nil;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@end
|
||||
|
@ -41,7 +41,6 @@ typedef enum
|
||||
bool _endedInteraction;
|
||||
|
||||
bool _scrubbing;
|
||||
CGFloat _scrubbingPosition;
|
||||
|
||||
NSTimeInterval _duration;
|
||||
NSTimeInterval _trimStartValue;
|
||||
|
@ -0,0 +1,30 @@
|
||||
#import "GPUImage.h"
|
||||
|
||||
typedef NS_ENUM(NSInteger, YUGPUImageHighPassSkinSmoothingRadiusUnit) {
|
||||
YUGPUImageHighPassSkinSmoothingRadiusUnitPixel = 1,
|
||||
YUGPUImageHighPassSkinSmoothingRadiusUnitFractionOfImageWidth = 2
|
||||
};
|
||||
|
||||
@interface YUGPUImageHighPassSkinSmoothingRadius : NSObject <NSCopying,NSSecureCoding>
|
||||
|
||||
@property (nonatomic,readonly) CGFloat value;
|
||||
@property (nonatomic,readonly) YUGPUImageHighPassSkinSmoothingRadiusUnit unit;
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
+ (instancetype)radiusInPixels:(CGFloat)pixels;
|
||||
+ (instancetype)radiusAsFractionOfImageWidth:(CGFloat)fraction;
|
||||
|
||||
@end
|
||||
|
||||
@interface YUGPUImageHighPassSkinSmoothingFilter : GPUImageFilterGroup
|
||||
|
||||
@property (nonatomic) CGFloat amount;
|
||||
|
||||
@property (nonatomic,copy) NSArray<NSValue *> *controlPoints;
|
||||
|
||||
@property (nonatomic,copy) YUGPUImageHighPassSkinSmoothingRadius *radius;
|
||||
|
||||
@property (nonatomic) CGFloat sharpnessFactor;
|
||||
|
||||
@end
|
@ -0,0 +1,328 @@
|
||||
#import "YUGPUImageHighPassSkinSmoothingFilter.h"
|
||||
|
||||
#import "GPUImageExposureFilter.h"
|
||||
#import "GPUImageDissolveBlendFilter.h"
|
||||
#import "GPUImageSharpenFilter.h"
|
||||
#import "GPUImageToneCurveFilter.h"
|
||||
|
||||
NSString * const YUCIHighPassSkinSmoothingMaskBoostFilterFragmentShaderString =
|
||||
SHADER_STRING
|
||||
(
|
||||
precision lowp float;
|
||||
varying highp vec2 texCoord;
|
||||
uniform sampler2D sourceImage;
|
||||
|
||||
void main() {
|
||||
vec4 color = texture2D(sourceImage,texCoord);
|
||||
|
||||
float hardLightColor = color.b;
|
||||
for (int i = 0; i < 3; ++i)
|
||||
{
|
||||
if (hardLightColor < 0.5) {
|
||||
hardLightColor = hardLightColor * hardLightColor * 2.;
|
||||
} else {
|
||||
hardLightColor = 1. - (1. - hardLightColor) * (1. - hardLightColor) * 2.;
|
||||
}
|
||||
}
|
||||
|
||||
float k = 255.0 / (164.0 - 75.0);
|
||||
hardLightColor = (hardLightColor - 75.0 / 255.0) * k;
|
||||
|
||||
gl_FragColor = vec4(vec3(hardLightColor),color.a);
|
||||
}
|
||||
);
|
||||
|
||||
NSString * const YUGPUImageGreenAndBlueChannelOverlayFragmentShaderString =
|
||||
SHADER_STRING
|
||||
(
|
||||
precision lowp float;
|
||||
varying highp vec2 texCoord;
|
||||
uniform sampler2D sourceImage;
|
||||
|
||||
void main() {
|
||||
vec4 image = texture2D(sourceImage, texCoord);
|
||||
vec4 base = vec4(image.g,image.g,image.g,1.0);
|
||||
vec4 overlay = vec4(image.b,image.b,image.b,1.0);
|
||||
float ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
|
||||
gl_FragColor = vec4(ba,ba,ba,image.a);
|
||||
}
|
||||
);
|
||||
|
||||
NSString * const YUGPUImageStillImageHighPassFilterFragmentShaderString =
|
||||
SHADER_STRING
|
||||
(
|
||||
precision lowp float;
|
||||
varying highp vec2 texCoord;
|
||||
varying highp vec2 texCoord2;
|
||||
|
||||
uniform sampler2D sourceImage;
|
||||
uniform sampler2D inputImageTexture2;
|
||||
|
||||
void main() {
|
||||
vec4 image = texture2D(sourceImage, texCoord);
|
||||
vec4 blurredImage = texture2D(inputImageTexture2, texCoord);
|
||||
gl_FragColor = vec4((image.rgb - blurredImage.rgb + vec3(0.5,0.5,0.5)), image.a);
|
||||
}
|
||||
);
|
||||
|
||||
@interface YUGPUImageStillImageHighPassFilter : GPUImageFilterGroup
|
||||
|
||||
@property (nonatomic) CGFloat radiusInPixels;
|
||||
@property (nonatomic, weak) GPUImageGaussianBlurFilter *blurFilter;
|
||||
|
||||
@end
|
||||
|
||||
@implementation YUGPUImageStillImageHighPassFilter
|
||||
|
||||
- (instancetype)init {
|
||||
if (self = [super init]) {
|
||||
GPUImageGaussianBlurFilter *blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
|
||||
[self addFilter:blurFilter];
|
||||
self.blurFilter = blurFilter;
|
||||
|
||||
GPUImageTwoInputFilter *filter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:YUGPUImageStillImageHighPassFilterFragmentShaderString];
|
||||
[self addFilter:filter];
|
||||
|
||||
[blurFilter addTarget:filter atTextureLocation:1];
|
||||
|
||||
self.initialFilters = @[blurFilter,filter];
|
||||
self.terminalFilter = filter;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setRadiusInPixels:(CGFloat)radiusInPixels {
|
||||
self.blurFilter.blurRadiusInPixels = radiusInPixels;
|
||||
}
|
||||
|
||||
- (CGFloat)radiusInPixels {
|
||||
return self.blurFilter.blurRadiusInPixels;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@interface YUCIHighPassSkinSmoothingMaskGenerator : GPUImageFilterGroup
|
||||
|
||||
@property (nonatomic) CGFloat highPassRadiusInPixels;
|
||||
|
||||
@property (nonatomic,weak) YUGPUImageStillImageHighPassFilter *highPassFilter;
|
||||
|
||||
@end
|
||||
|
||||
@implementation YUCIHighPassSkinSmoothingMaskGenerator
|
||||
|
||||
- (instancetype)init {
|
||||
if (self = [super init]) {
|
||||
GPUImageFilter *channelOverlayFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromString:YUGPUImageGreenAndBlueChannelOverlayFragmentShaderString];
|
||||
[self addFilter:channelOverlayFilter];
|
||||
|
||||
YUGPUImageStillImageHighPassFilter *highpassFilter = [[YUGPUImageStillImageHighPassFilter alloc] init];
|
||||
[self addFilter:highpassFilter];
|
||||
self.highPassFilter = highpassFilter;
|
||||
|
||||
GPUImageFilter *maskBoostFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromString:YUCIHighPassSkinSmoothingMaskBoostFilterFragmentShaderString];
|
||||
[self addFilter:maskBoostFilter];
|
||||
|
||||
[channelOverlayFilter addTarget:highpassFilter];
|
||||
[highpassFilter addTarget:maskBoostFilter];
|
||||
|
||||
self.initialFilters = @[channelOverlayFilter];
|
||||
self.terminalFilter = maskBoostFilter;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setHighPassRadiusInPixels:(CGFloat)highPassRadiusInPixels {
|
||||
self.highPassFilter.radiusInPixels = highPassRadiusInPixels;
|
||||
}
|
||||
|
||||
- (CGFloat)highPassRadiusInPixels {
|
||||
return self.highPassFilter.radiusInPixels;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@interface YUGPUImageHighPassSkinSmoothingRadius ()
|
||||
|
||||
@property (nonatomic) CGFloat value;
|
||||
@property (nonatomic) YUGPUImageHighPassSkinSmoothingRadiusUnit unit;
|
||||
|
||||
@end
|
||||
|
||||
@implementation YUGPUImageHighPassSkinSmoothingRadius
|
||||
|
||||
+ (instancetype)radiusInPixels:(CGFloat)pixels {
|
||||
YUGPUImageHighPassSkinSmoothingRadius *radius = [YUGPUImageHighPassSkinSmoothingRadius new];
|
||||
radius.unit = YUGPUImageHighPassSkinSmoothingRadiusUnitPixel;
|
||||
radius.value = pixels;
|
||||
return radius;
|
||||
}
|
||||
|
||||
+ (instancetype)radiusAsFractionOfImageWidth:(CGFloat)fraction {
|
||||
YUGPUImageHighPassSkinSmoothingRadius *radius = [YUGPUImageHighPassSkinSmoothingRadius new];
|
||||
radius.unit = YUGPUImageHighPassSkinSmoothingRadiusUnitFractionOfImageWidth;
|
||||
radius.value = fraction;
|
||||
return radius;
|
||||
}
|
||||
|
||||
- (id)copyWithZone:(NSZone *)zone {
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
|
||||
if (self = [super init]) {
|
||||
self.value = [[aDecoder decodeObjectOfClass:[NSNumber class] forKey:NSStringFromSelector(@selector(value))] floatValue];
|
||||
self.unit = [[aDecoder decodeObjectOfClass:[NSNumber class] forKey:NSStringFromSelector(@selector(unit))] integerValue];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)encodeWithCoder:(NSCoder *)aCoder {
|
||||
[aCoder encodeObject:@(self.value) forKey:NSStringFromSelector(@selector(value))];
|
||||
[aCoder encodeObject:@(self.unit) forKey:NSStringFromSelector(@selector(unit))];
|
||||
}
|
||||
|
||||
+ (BOOL)supportsSecureCoding {
|
||||
return YES;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
NSString * const YUGPUImageHighpassSkinSmoothingCompositingFilterFragmentShaderString =
|
||||
SHADER_STRING
|
||||
(
|
||||
precision lowp float;
|
||||
varying highp vec2 texCoord;
|
||||
varying highp vec2 texCoord2;
|
||||
varying highp vec2 texCoord3;
|
||||
|
||||
uniform sampler2D sourceImage;
|
||||
uniform sampler2D inputImageTexture2;
|
||||
uniform sampler2D inputImageTexture3;
|
||||
|
||||
void main() {
|
||||
vec4 image = texture2D(sourceImage, texCoord);
|
||||
vec4 toneCurvedImage = texture2D(inputImageTexture2, texCoord);
|
||||
vec4 mask = texture2D(inputImageTexture3, texCoord);
|
||||
gl_FragColor = vec4(mix(image.rgb,toneCurvedImage.rgb,1.0 - mask.b),1.0);
|
||||
}
|
||||
);
|
||||
|
||||
@interface YUGPUImageHighPassSkinSmoothingFilter ()
|
||||
|
||||
@property (nonatomic,weak) YUCIHighPassSkinSmoothingMaskGenerator *maskGenerator;
|
||||
|
||||
@property (nonatomic,weak) GPUImageDissolveBlendFilter *dissolveFilter;
|
||||
|
||||
@property (nonatomic,weak) GPUImageSharpenFilter *sharpenFilter;
|
||||
|
||||
@property (nonatomic,weak) GPUImageToneCurveFilter *skinToneCurveFilter;
|
||||
|
||||
@property (nonatomic) CGSize currentInputSize;
|
||||
|
||||
@end
|
||||
|
||||
@implementation YUGPUImageHighPassSkinSmoothingFilter
|
||||
|
||||
- (instancetype)init {
|
||||
if (self = [super init]) {
|
||||
GPUImageExposureFilter *exposureFilter = [[GPUImageExposureFilter alloc] init];
|
||||
exposureFilter.exposure = -1.0;
|
||||
[self addFilter:exposureFilter];
|
||||
|
||||
YUCIHighPassSkinSmoothingMaskGenerator *maskGenerator = [[YUCIHighPassSkinSmoothingMaskGenerator alloc] init];
|
||||
[self addFilter:maskGenerator];
|
||||
self.maskGenerator = maskGenerator;
|
||||
[exposureFilter addTarget:maskGenerator];
|
||||
|
||||
GPUImageToneCurveFilter *skinToneCurveFilter = [[GPUImageToneCurveFilter alloc] init];
|
||||
[self addFilter:skinToneCurveFilter];
|
||||
self.skinToneCurveFilter = skinToneCurveFilter;
|
||||
|
||||
GPUImageDissolveBlendFilter *dissolveFilter = [[GPUImageDissolveBlendFilter alloc] init];
|
||||
[self addFilter:dissolveFilter];
|
||||
self.dissolveFilter = dissolveFilter;
|
||||
|
||||
[skinToneCurveFilter addTarget:dissolveFilter atTextureLocation:1];
|
||||
|
||||
GPUImageThreeInputFilter *composeFilter = [[GPUImageThreeInputFilter alloc] initWithFragmentShaderFromString:YUGPUImageHighpassSkinSmoothingCompositingFilterFragmentShaderString];
|
||||
[self addFilter:composeFilter];
|
||||
|
||||
[maskGenerator addTarget:composeFilter atTextureLocation:2];
|
||||
[self.dissolveFilter addTarget:composeFilter atTextureLocation:1];
|
||||
|
||||
GPUImageSharpenFilter *sharpen = [[GPUImageSharpenFilter alloc] init];
|
||||
[self addFilter:sharpen];
|
||||
[composeFilter addTarget:sharpen];
|
||||
self.sharpenFilter = sharpen;
|
||||
|
||||
self.initialFilters = @[exposureFilter,skinToneCurveFilter,dissolveFilter,composeFilter];
|
||||
self.terminalFilter = sharpen;
|
||||
|
||||
//set defaults
|
||||
self.amount = 0.75;
|
||||
self.radius = [YUGPUImageHighPassSkinSmoothingRadius radiusAsFractionOfImageWidth:4.5/750.0];
|
||||
self.sharpnessFactor = 0.4;
|
||||
|
||||
CGPoint controlPoint0 = CGPointMake(0, 0);
|
||||
CGPoint controlPoint1 = CGPointMake(120/255.0, 146/255.0);
|
||||
CGPoint controlPoint2 = CGPointMake(1.0, 1.0);
|
||||
|
||||
self.controlPoints = @[[NSValue valueWithCGPoint:controlPoint0],
|
||||
[NSValue valueWithCGPoint:controlPoint1],
|
||||
[NSValue valueWithCGPoint:controlPoint2]];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex {
|
||||
[super setInputSize:newSize atIndex:textureIndex];
|
||||
self.currentInputSize = newSize;
|
||||
[self updateHighPassRadius];
|
||||
}
|
||||
|
||||
- (void)updateHighPassRadius {
|
||||
CGSize inputSize = self.currentInputSize;
|
||||
if (inputSize.width * inputSize.height > 0) {
|
||||
CGFloat radiusInPixels = 0;
|
||||
switch (self.radius.unit) {
|
||||
case YUGPUImageHighPassSkinSmoothingRadiusUnitPixel:
|
||||
radiusInPixels = self.radius.value;
|
||||
break;
|
||||
case YUGPUImageHighPassSkinSmoothingRadiusUnitFractionOfImageWidth:
|
||||
radiusInPixels = ceil(inputSize.width * self.radius.value);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (radiusInPixels != self.maskGenerator.highPassRadiusInPixels) {
|
||||
self.maskGenerator.highPassRadiusInPixels = radiusInPixels;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setRadius:(YUGPUImageHighPassSkinSmoothingRadius *)radius {
|
||||
_radius = radius.copy;
|
||||
[self updateHighPassRadius];
|
||||
}
|
||||
|
||||
- (void)setControlPoints:(NSArray<NSValue *> *)controlPoints {
|
||||
self.skinToneCurveFilter.rgbCompositeControlPoints = controlPoints;
|
||||
}
|
||||
|
||||
- (NSArray<NSValue *> *)controlPoints {
|
||||
return self.skinToneCurveFilter.rgbCompositeControlPoints;
|
||||
}
|
||||
|
||||
- (void)setAmount:(CGFloat)amount {
|
||||
_amount = amount;
|
||||
self.dissolveFilter.mix = amount;
|
||||
self.sharpenFilter.sharpness = self.sharpnessFactor * amount;
|
||||
}
|
||||
|
||||
- (void)setSharpnessFactor:(CGFloat)sharpnessFactor {
|
||||
_sharpnessFactor = sharpnessFactor;
|
||||
self.sharpenFilter.sharpness = sharpnessFactor * self.amount;
|
||||
}
|
||||
|
||||
@end
|
@ -20,6 +20,11 @@ private let actionImage = generateTintedImage(image: UIImage(bundleImageName: "C
|
||||
private let nameFont = Font.medium(15.0)
|
||||
private let dateFont = Font.regular(14.0)
|
||||
|
||||
enum AvatarGalleryItemFooterContent {
|
||||
case info
|
||||
case own(Bool)
|
||||
}
|
||||
|
||||
final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
private let context: AccountContext
|
||||
private var presentationData: PresentationData
|
||||
@ -30,6 +35,8 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
private let actionButton: UIButton
|
||||
private let nameNode: ASTextNode
|
||||
private let dateNode: ASTextNode
|
||||
private let mainNode: ASTextNode
|
||||
private let setMainButton: HighlightableButtonNode
|
||||
|
||||
private var currentNameText: String?
|
||||
private var currentDateText: String?
|
||||
@ -42,6 +49,12 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
|
||||
var share: ((GalleryControllerInteraction) -> Void)?
|
||||
|
||||
var setMain: (() -> Void)? {
|
||||
didSet {
|
||||
self.setMainButton.isHidden = self.setMain == nil
|
||||
}
|
||||
}
|
||||
|
||||
init(context: AccountContext, presentationData: PresentationData) {
|
||||
self.context = context
|
||||
self.presentationData = presentationData
|
||||
@ -50,6 +63,7 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
|
||||
self.deleteButton = UIButton()
|
||||
self.deleteButton.isHidden = true
|
||||
|
||||
self.actionButton = UIButton()
|
||||
|
||||
self.deleteButton.setImage(deleteImage, for: [.normal])
|
||||
@ -65,6 +79,16 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
self.dateNode.isUserInteractionEnabled = false
|
||||
self.dateNode.displaysAsynchronously = false
|
||||
|
||||
self.setMainButton = HighlightableButtonNode()
|
||||
self.setMainButton.isHidden = true
|
||||
self.setMainButton.setAttributedTitle(NSAttributedString(string: "Set as Main Photo", font: Font.regular(17.0), textColor: .white), for: .normal)
|
||||
|
||||
self.mainNode = ASTextNode()
|
||||
self.mainNode.maximumNumberOfLines = 1
|
||||
self.mainNode.isUserInteractionEnabled = false
|
||||
self.mainNode.displaysAsynchronously = false
|
||||
self.mainNode.attributedText = NSAttributedString(string: "Main Photo", font: Font.regular(17.0), textColor: UIColor(rgb: 0x808080))
|
||||
|
||||
super.init()
|
||||
|
||||
self.view.addSubview(self.deleteButton)
|
||||
@ -72,15 +96,18 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
|
||||
self.addSubnode(self.nameNode)
|
||||
self.addSubnode(self.dateNode)
|
||||
self.addSubnode(self.setMainButton)
|
||||
self.addSubnode(self.mainNode)
|
||||
|
||||
self.deleteButton.addTarget(self, action: #selector(self.deleteButtonPressed), for: [.touchUpInside])
|
||||
self.actionButton.addTarget(self, action: #selector(self.actionButtonPressed), for: [.touchUpInside])
|
||||
self.setMainButton.addTarget(self, action: #selector(self.setMainButtonPressed), forControlEvents: .touchUpInside)
|
||||
}
|
||||
|
||||
deinit {
|
||||
}
|
||||
|
||||
func setEntry(_ entry: AvatarGalleryEntry) {
|
||||
func setEntry(_ entry: AvatarGalleryEntry, content: AvatarGalleryItemFooterContent) {
|
||||
var nameText: String?
|
||||
var dateText: String?
|
||||
switch entry {
|
||||
@ -107,6 +134,19 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
self.dateNode.attributedText = nil
|
||||
}
|
||||
}
|
||||
|
||||
switch content {
|
||||
case .info:
|
||||
self.nameNode.isHidden = false
|
||||
self.dateNode.isHidden = false
|
||||
self.mainNode.isHidden = true
|
||||
self.setMainButton.isHidden = true
|
||||
case let .own(isMainPhoto):
|
||||
self.nameNode.isHidden = true
|
||||
self.dateNode.isHidden = true
|
||||
self.mainNode.isHidden = !isMainPhoto
|
||||
self.setMainButton.isHidden = isMainPhoto
|
||||
}
|
||||
}
|
||||
|
||||
override func updateLayout(size: CGSize, metrics: LayoutMetrics, leftInset: CGFloat, rightInset: CGFloat, bottomInset: CGFloat, contentInset: CGFloat, transition: ContainedViewLayoutTransition) -> CGFloat {
|
||||
@ -117,8 +157,9 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
self.actionButton.frame = CGRect(origin: CGPoint(x: leftInset, y: panelHeight - bottomInset - 44.0), size: CGSize(width: 44.0, height: 44.0))
|
||||
self.deleteButton.frame = CGRect(origin: CGPoint(x: width - 44.0 - rightInset, y: panelHeight - bottomInset - 44.0), size: CGSize(width: 44.0, height: 44.0))
|
||||
|
||||
let nameSize = self.nameNode.measure(CGSize(width: width - 44.0 * 2.0 - 8.0 * 2.0 - leftInset - rightInset, height: CGFloat.greatestFiniteMagnitude))
|
||||
let dateSize = self.dateNode.measure(CGSize(width: width - 44.0 * 2.0 - 8.0 * 2.0, height: CGFloat.greatestFiniteMagnitude))
|
||||
let constrainedSize = CGSize(width: width - 44.0 * 2.0 - 8.0 * 2.0 - leftInset - rightInset, height: CGFloat.greatestFiniteMagnitude)
|
||||
let nameSize = self.nameNode.measure(constrainedSize)
|
||||
let dateSize = self.dateNode.measure(constrainedSize)
|
||||
|
||||
if nameSize.height.isZero {
|
||||
self.dateNode.frame = CGRect(origin: CGPoint(x: floor((width - dateSize.width) / 2.0), y: panelHeight - bottomInset - 44.0 + floor((44.0 - dateSize.height) / 2.0)), size: dateSize)
|
||||
@ -128,6 +169,12 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
self.dateNode.frame = CGRect(origin: CGPoint(x: floor((width - dateSize.width) / 2.0), y: panelHeight - bottomInset - 44.0 + floor((44.0 - dateSize.height - nameSize.height - labelsSpacing) / 2.0) + nameSize.height + labelsSpacing), size: dateSize)
|
||||
}
|
||||
|
||||
let mainSize = self.mainNode.measure(constrainedSize)
|
||||
self.mainNode.frame = CGRect(origin: CGPoint(x: floor((width - mainSize.width) / 2.0), y: panelHeight - bottomInset - 44.0 + floor((44.0 - mainSize.height) / 2.0)), size: mainSize)
|
||||
|
||||
let mainButtonSize = self.setMainButton.measure(constrainedSize)
|
||||
self.setMainButton.frame = CGRect(origin: CGPoint(x: floor((width - mainButtonSize.width) / 2.0), y: panelHeight - bottomInset - 44.0 + floor((44.0 - mainButtonSize.height) / 2.0)), size: mainButtonSize)
|
||||
|
||||
return panelHeight
|
||||
}
|
||||
|
||||
@ -136,6 +183,7 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
self.actionButton.alpha = 1.0
|
||||
self.nameNode.alpha = 1.0
|
||||
self.dateNode.alpha = 1.0
|
||||
self.setMainButton.alpha = 1.0
|
||||
}
|
||||
|
||||
override func animateOut(toHeight: CGFloat, nextContentNode: GalleryFooterContentNode, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) {
|
||||
@ -143,6 +191,7 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
self.actionButton.alpha = 0.0
|
||||
self.nameNode.alpha = 0.0
|
||||
self.dateNode.alpha = 0.0
|
||||
self.setMainButton.alpha = 0.0
|
||||
completion()
|
||||
}
|
||||
|
||||
@ -171,4 +220,8 @@ final class AvatarGalleryItemFooterContentNode: GalleryFooterContentNode {
|
||||
self.share?(controllerInteraction)
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func setMainButtonPressed() {
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -174,7 +174,16 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
if self.entry != entry {
|
||||
self.entry = entry
|
||||
|
||||
self.footerContentNode.setEntry(entry)
|
||||
var footerContent: AvatarGalleryItemFooterContent
|
||||
if self.peer.id == self.context.account.peerId {
|
||||
footerContent = .own(true)
|
||||
} else {
|
||||
footerContent = .info
|
||||
}
|
||||
|
||||
self.peer.largeProfileImage
|
||||
|
||||
self.footerContentNode.setEntry(entry, content: footerContent)
|
||||
|
||||
if let largestSize = largestImageRepresentation(entry.representations.map({ $0.representation })) {
|
||||
let displaySize = largestSize.dimensions.cgSize.fitted(CGSize(width: 1280.0, height: 1280.0)).dividedByScreenScale().integralFloor
|
||||
|
@ -528,7 +528,7 @@ func editSettingsController(context: AccountContext, currentName: ItemListAvatar
|
||||
}
|
||||
}
|
||||
|
||||
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: hasPhotos, personalPhoto: true, saveEditedPhotos: false, saveCapturedMedia: false, signup: true)!
|
||||
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos, hasViewButton: hasPhotos, personalPhoto: true, saveEditedPhotos: false, saveCapturedMedia: false, signup: false)!
|
||||
let _ = currentAvatarMixin.swap(mixin)
|
||||
mixin.requestSearchController = { assetsController in
|
||||
let controller = WebSearchController(context: context, peer: peer, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: nil, completion: { result in
|
||||
|
Loading…
x
Reference in New Issue
Block a user