Video editing preview

This commit is contained in:
Ilya Laktyushin 2020-05-14 19:40:27 +01:00
parent d7e7b968ca
commit ec68cb0d19
44 changed files with 1050 additions and 167 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 111 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 122 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 845 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 652 B

View File

@ -2,7 +2,7 @@
filegroup(
name = "LegacyComponentsResources",
srcs = glob([
"LegacyComponentsResources.bundle/**/*",
"Resources/LegacyComponentsResources.bundle/**/*",
], exclude = ["Resources/LegacyComponentsResources.bundle/**/.*"]),
visibility = ["//visibility:public"],
)

View File

@ -5,9 +5,6 @@
@interface PGPhotoEditorValues : NSObject <TGMediaEditAdjustments>
@property (nonatomic, readonly) CGFloat cropRotation;
@property (nonatomic, readonly) NSDictionary *toolValues;
- (bool)toolsApplied;
+ (instancetype)editorValuesWithOriginalSize:(CGSize)originalSize cropRect:(CGRect)cropRect cropRotation:(CGFloat)cropRotation cropOrientation:(UIImageOrientation)cropOrientation cropLockedAspectRatio:(CGFloat)cropLockedAspectRatio cropMirrored:(bool)cropMirrored toolValues:(NSDictionary *)toolValues paintingData:(TGPaintingData *)paintingData sendAsGif:(bool)sendAsGif;

View File

@ -29,7 +29,9 @@
@property (nonatomic, readonly) bool cropMirrored;
@property (nonatomic, readonly) bool sendAsGif;
@property (nonatomic, readonly) TGPaintingData *paintingData;
@property (nonatomic, readonly) NSDictionary *toolValues;
- (bool)toolsApplied;
- (bool)hasPainting;
- (bool)cropAppliedForAvatar:(bool)forAvatar;

View File

@ -39,6 +39,7 @@ typedef enum
cropMirrored:(bool)cropMirrored
trimStartValue:(NSTimeInterval)trimStartValue
trimEndValue:(NSTimeInterval)trimEndValue
toolValues:(NSDictionary *)toolValues
paintingData:(TGPaintingData *)paintingData
sendAsGif:(bool)sendAsGif
preset:(TGMediaVideoConversionPreset)preset;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View File

@ -300,7 +300,7 @@ NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO mark: true];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{

View File

@ -30,6 +30,8 @@ typedef struct GPUTextureOptions {
@property (nonatomic, readonly) GLuint texture;
@property (nonatomic, readonly) BOOL missingFramebuffer;
@property (nonatomic, assign) BOOL mark;
// Initialization and teardown
- (id)initWithSize:(CGSize)framebufferSize;
- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;

View File

@ -1,6 +1,8 @@
#import "GPUImageFramebuffer.h"
#import "GPUImageOutput.h"
#import "TGTimerTarget.h"
@interface GPUImageFramebuffer()
{
GLuint framebuffer;
@ -24,6 +26,9 @@ void dataProviderReleaseCallback (void *info, const void *data, size_t size);
void dataProviderUnlockCallback (void *info, const void *data, size_t size);
@implementation GPUImageFramebuffer
{
NSTimer *fixer;
}
#pragma mark -
#pragma mark Initialization and teardown
@ -252,6 +257,9 @@ void dataProviderUnlockCallback (void *info, const void *data, size_t size);
}
framebufferReferenceCount++;
[fixer invalidate];
fixer = nil;
}
- (void)unlock
@ -263,12 +271,20 @@ void dataProviderUnlockCallback (void *info, const void *data, size_t size);
NSAssert(framebufferReferenceCount > 0, @"Tried to overrelease a framebuffer, did you forget to call -useNextFrameForImageCapture before using -imageFromCurrentFramebuffer?");
framebufferReferenceCount--;
if (framebufferReferenceCount < 1)
{
[[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self];
} else if (framebufferReferenceCount == 1) {
fixer = [TGTimerTarget scheduledMainThreadTimerWithTarget:self action:@selector(fixTick) interval:0.3 repeat:false];
}
}
- (void)fixTick {
[self clearAllLocks];
[self destroyFramebuffer];
}
- (void)clearAllLocks
{
framebufferReferenceCount = 0;

View File

@ -7,6 +7,7 @@
// Framebuffer management
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture mark:(BOOL)mark;
- (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;
- (void)purgeAllUnassignedFramebuffers;
- (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;

View File

@ -77,7 +77,11 @@
}
}
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture {
return [self fetchFramebufferForSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture mark:false];
}
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture mark:(BOOL)mark
{
__block GPUImageFramebuffer *framebufferFromCache = nil;
// dispatch_sync(framebufferCacheQueue, ^{
@ -90,6 +94,7 @@
{
// Nothing in the cache, create a new framebuffer to use
framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
framebufferFromCache.mark = mark;
}
else
{
@ -115,6 +120,7 @@
if (framebufferFromCache == nil)
{
framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
framebufferFromCache.mark = mark;
}
}
});
@ -142,7 +148,7 @@
[framebuffer clearAllLocks];
// dispatch_async(framebufferCacheQueue, ^{
runAsynchronouslyOnVideoProcessingQueue(^{
runAsynchronouslyOnVideoProcessingQueue(^{
CGSize framebufferSize = framebuffer.size;
GPUTextureOptions framebufferTextureOptions = framebuffer.textureOptions;
NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:framebufferTextureOptions onlyTexture:framebuffer.missingFramebuffer];

View File

@ -122,10 +122,9 @@ NSString *const kGPUImageTwoInputTextureVertexShaderString = SHADER_STRING
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
#pragma mark -

View File

@ -215,4 +215,9 @@
return false;
}
- (bool)isAvialableForVideo
{
return false;
}
@end

View File

@ -155,4 +155,9 @@
);
}
- (bool)isAvialableForVideo
{
return false;
}
@end

View File

@ -29,11 +29,14 @@
@property (nonatomic, readonly) bool enableStickers;
@property (nonatomic, readonly) bool forVideo;
- (instancetype)initWithOriginalSize:(CGSize)originalSize adjustments:(id<TGMediaEditAdjustments>)adjustments forVideo:(bool)forVideo enableStickers:(bool)enableStickers;
- (void)cleanup;
- (void)setImage:(UIImage *)image forCropRect:(CGRect)cropRect cropRotation:(CGFloat)cropRotation cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored fullSize:(bool)fullSize;
- (void)setVideoAsset:(AVAsset *)asset;
- (void)processAnimated:(bool)animated completion:(void (^)(void))completion;

View File

@ -14,6 +14,8 @@
#import <LegacyComponents/TGVideoEditAdjustments.h>
#import <LegacyComponents/TGPaintingData.h>
#import "PGVideoMovie.h"
#import "PGPhotoToolComposer.h"
#import "PGEnhanceTool.h"
#import "PGExposureTool.h"
@ -38,7 +40,7 @@
id<TGMediaEditAdjustments> _initialAdjustments;
PGPhotoEditorPicture *_currentInput;
GPUImageOutput *_currentInput;
NSArray *_currentProcessChain;
GPUImageOutput <GPUImageInput> *_finalFilter;
@ -53,9 +55,9 @@
SPipe *_histogramPipe;
SQueue *_queue;
bool _forVideo;
SQueue *_videoQueue;
bool _playing;
bool _processing;
bool _needsReprocessing;
@ -71,6 +73,7 @@
if (self != nil)
{
_queue = [[SQueue alloc] init];
_videoQueue = [[SQueue alloc] init];
_forVideo = forVideo;
_enableStickers = enableStickers;
@ -105,6 +108,10 @@
- (void)dealloc
{
if ([_currentInput isKindOfClass:[PGVideoMovie class]]) {
[(PGVideoMovie *)_currentInput cancelProcessing];
}
TGDispatchAfter(1.5f, dispatch_get_main_queue(), ^
{
[[GPUImageContext sharedFramebufferCache] purgeAllUnassignedFramebuffers];
@ -122,7 +129,9 @@
for (Class toolClass in [PGPhotoEditor availableTools])
{
PGPhotoTool *toolInstance = [[toolClass alloc] init];
[tools addObject:toolInstance];
if (!_forVideo || toolInstance.isAvialableForVideo) {
[tools addObject:toolInstance];
}
}
return tools;
@ -146,6 +155,19 @@
_fullSize = fullSize;
}
- (void)setVideoAsset:(AVAsset *)asset {
[_toolComposer invalidate];
_currentProcessChain = nil;
[_currentInput removeAllTargets];
PGVideoMovie *movie = [[PGVideoMovie alloc] initWithAsset:asset];
movie.shouldRepeat = true;
movie.playAtActualSpeed = true;
_currentInput = movie;
_fullSize = true;
}
#pragma mark - Properties
- (CGSize)rotatedCropSize
@ -183,6 +205,25 @@
if (self.previewOutput == nil)
return;
if (self.forVideo) {
[_queue dispatch:^
{
[self updateProcessChain];
GPUImageOutput *currentInput = _currentInput;
if (!_playing) {
_playing = true;
[_videoQueue dispatch:^{
if ([currentInput isKindOfClass:[PGVideoMovie class]]) {
[(PGVideoMovie *)currentInput startProcessing];
}
}];
}
}];
return;
}
if (iosMajorVersion() < 7)
animated = false;
@ -196,86 +237,96 @@
[_queue dispatch:^
{
NSMutableArray *processChain = [NSMutableArray array];
for (PGPhotoTool *tool in _toolComposer.advancedTools)
{
if (!tool.shouldBeSkipped && tool.pass != nil)
[processChain addObject:tool.pass];
}
_toolComposer.imageSize = _cropRect.size;
[processChain addObject:_toolComposer];
[self updateProcessChain];
if (!self.forVideo && capture)
[_finalFilter useNextFrameForImageCapture];
TGPhotoEditorPreviewView *previewOutput = self.previewOutput;
if (![_currentProcessChain isEqualToArray:processChain])
{
[_currentInput removeAllTargets];
for (PGPhotoProcessPass *pass in _currentProcessChain)
[pass.filter removeAllTargets];
_currentProcessChain = processChain;
GPUImageOutput <GPUImageInput> *lastFilter = ((PGPhotoProcessPass *)_currentProcessChain.firstObject).filter;
[_currentInput addTarget:lastFilter];
NSInteger chainLength = _currentProcessChain.count;
if (chainLength > 1)
{
for (NSInteger i = 1; i < chainLength; i++)
{
PGPhotoProcessPass *pass = ((PGPhotoProcessPass *)_currentProcessChain[i]);
GPUImageOutput <GPUImageInput> *filter = pass.filter;
[lastFilter addTarget:filter];
lastFilter = filter;
}
}
_finalFilter = lastFilter;
[_finalFilter addTarget:previewOutput.imageView];
[_finalFilter addTarget:_histogramGenerator];
}
if (capture)
[_finalFilter useNextFrameForImageCapture];
for (PGPhotoProcessPass *pass in _currentProcessChain)
[pass process];
if (animated)
{
TGDispatchOnMainThread(^
{
[previewOutput prepareTransitionFadeView];
});
}
[_currentInput processSynchronous:true completion:^
{
if (completion != nil)
completion();
_processing = false;
if ([_currentInput isKindOfClass:[PGPhotoEditorPicture class]]) {
PGPhotoEditorPicture *picture = (PGPhotoEditorPicture *)_currentInput;
if (animated)
{
TGDispatchOnMainThread(^
{
[previewOutput performTransitionFade];
[previewOutput prepareTransitionFadeView];
});
}
if (_needsReprocessing && !synchronous)
[picture processSynchronous:true completion:^
{
_needsReprocessing = false;
[self processAnimated:false completion:nil];
}
}];
if (completion != nil)
completion();
_processing = false;
if (animated)
{
TGDispatchOnMainThread(^
{
[previewOutput performTransitionFade];
});
}
if (_needsReprocessing && !synchronous)
{
_needsReprocessing = false;
[self processAnimated:false completion:nil];
}
}];
} else {
}
} synchronous:synchronous];
}
- (void)updateProcessChain {
NSMutableArray *processChain = [NSMutableArray array];
for (PGPhotoTool *tool in _toolComposer.advancedTools)
{
if (!tool.shouldBeSkipped && tool.pass != nil)
[processChain addObject:tool.pass];
}
_toolComposer.imageSize = _cropRect.size;
[processChain addObject:_toolComposer];
TGPhotoEditorPreviewView *previewOutput = self.previewOutput;
if (![_currentProcessChain isEqualToArray:processChain])
{
[_currentInput removeAllTargets];
for (PGPhotoProcessPass *pass in _currentProcessChain)
[pass.filter removeAllTargets];
_currentProcessChain = processChain;
GPUImageOutput <GPUImageInput> *lastFilter = ((PGPhotoProcessPass *)_currentProcessChain.firstObject).filter;
[_currentInput addTarget:lastFilter];
NSInteger chainLength = _currentProcessChain.count;
if (chainLength > 1)
{
for (NSInteger i = 1; i < chainLength; i++)
{
PGPhotoProcessPass *pass = ((PGPhotoProcessPass *)_currentProcessChain[i]);
GPUImageOutput <GPUImageInput> *filter = pass.filter;
[lastFilter addTarget:filter];
lastFilter = filter;
}
}
_finalFilter = lastFilter;
[_finalFilter addTarget:previewOutput.imageView];
if (!self.forVideo)
[_finalFilter addTarget:_histogramGenerator];
}
}
#pragma mark - Result
- (void)createResultImageWithCompletion:(void (^)(UIImage *image))completion
@ -318,13 +369,6 @@
PGPhotoEditorValues *editorValues = (PGPhotoEditorValues *)adjustments;
self.cropRotation = editorValues.cropRotation;
for (PGPhotoTool *tool in self.tools)
{
id value = editorValues.toolValues[tool.identifier];
if (value != nil && [value isKindOfClass:[tool valueClass]])
tool.value = [value copy];
}
}
else if ([adjustments isKindOfClass:[TGVideoEditAdjustments class]])
{
@ -334,6 +378,13 @@
self.sendAsGif = videoAdjustments.sendAsGif;
self.preset = videoAdjustments.preset;
}
for (PGPhotoTool *tool in self.tools)
{
id value = adjustments.toolValues[tool.identifier];
if (value != nil && [value isKindOfClass:[tool valueClass]])
tool.value = [value copy];
}
}
- (id<TGMediaEditAdjustments>)exportAdjustments
@ -343,25 +394,25 @@
- (id<TGMediaEditAdjustments>)exportAdjustmentsWithPaintingData:(TGPaintingData *)paintingData
{
NSMutableDictionary *toolValues = [[NSMutableDictionary alloc] init];
for (PGPhotoTool *tool in self.tools)
{
if (!tool.shouldBeSkipped && (!_forVideo || tool.isAvialableForVideo))
{
if (!([tool.value isKindOfClass:[NSNumber class]] && ABS([tool.value floatValue] - (float)tool.defaultValue) < FLT_EPSILON))
toolValues[tool.identifier] = [tool.value copy];
}
}
if (!_forVideo)
{
NSMutableDictionary *toolValues = [[NSMutableDictionary alloc] init];
for (PGPhotoTool *tool in self.tools)
{
if (!tool.shouldBeSkipped)
{
if (!([tool.value isKindOfClass:[NSNumber class]] && ABS([tool.value floatValue] - (float)tool.defaultValue) < FLT_EPSILON))
toolValues[tool.identifier] = [tool.value copy];
}
}
return [PGPhotoEditorValues editorValuesWithOriginalSize:self.originalSize cropRect:self.cropRect cropRotation:self.cropRotation cropOrientation:self.cropOrientation cropLockedAspectRatio:self.cropLockedAspectRatio cropMirrored:self.cropMirrored toolValues:toolValues paintingData:paintingData sendAsGif:self.sendAsGif];
}
else
{
TGVideoEditAdjustments *initialAdjustments = (TGVideoEditAdjustments *)_initialAdjustments;
return [TGVideoEditAdjustments editAdjustmentsWithOriginalSize:self.originalSize cropRect:self.cropRect cropOrientation:self.cropOrientation cropLockedAspectRatio:self.cropLockedAspectRatio cropMirrored:self.cropMirrored trimStartValue:initialAdjustments.trimStartValue trimEndValue:initialAdjustments.trimEndValue paintingData:paintingData sendAsGif:self.sendAsGif preset:self.preset];
return [TGVideoEditAdjustments editAdjustmentsWithOriginalSize:self.originalSize cropRect:self.cropRect cropOrientation:self.cropOrientation cropLockedAspectRatio:self.cropLockedAspectRatio cropMirrored:self.cropMirrored trimStartValue:initialAdjustments.trimStartValue trimEndValue:initialAdjustments.trimEndValue toolValues:toolValues paintingData:paintingData sendAsGif:self.sendAsGif preset:self.preset];
}
}

View File

@ -12,6 +12,7 @@
@synthesize cropMirrored = _cropMirrored;
@synthesize paintingData = _paintingData;
@synthesize sendAsGif = _sendAsGif;
@synthesize toolValues = _toolValues;
+ (instancetype)editorValuesWithOriginalSize:(CGSize)originalSize cropRect:(CGRect)cropRect cropRotation:(CGFloat)cropRotation cropOrientation:(UIImageOrientation)cropOrientation cropLockedAspectRatio:(CGFloat)cropLockedAspectRatio cropMirrored:(bool)cropMirrored toolValues:(NSDictionary *)toolValues paintingData:(TGPaintingData *)paintingData sendAsGif:(bool)sendAsGif
{

View File

@ -98,6 +98,14 @@
return [_interpolationFilter frameProcessingCompletionBlock];
}
- (GPUImageFramebuffer *)framebufferForOutput {
return [_interpolationFilter framebufferForOutput];
}
- (void)removeOutputFramebuffer {
[_interpolationFilter removeOutputFramebuffer];
}
#pragma mark - GPUImageInput
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex

View File

@ -32,9 +32,8 @@
@property (nonatomic, readonly) GPUImageOutput <GPUImageInput> *filter;
- (void)updateParameters;
- (void)process;
- (void)invalidate;
@end
extern NSString *const PGPhotoEnhanceColorSwapShaderString;
extern NSString *const PGPhotoEnhanceColorSwapShaderString;

View File

@ -148,11 +148,6 @@ NSString *const PGPhotoEnhanceColorSwapShaderString = PGShaderString
[_filter removeAllTargets];
}
- (void)process
{
}
- (void)invalidate
{

View File

@ -41,6 +41,7 @@ typedef enum
@property (nonatomic, readonly) PGPhotoProcessPass *pass;
@property (nonatomic, readonly) bool isSimple;
@property (nonatomic, readonly) bool isAvialableForVideo;
@property (nonatomic, weak) PGPhotoToolComposer *toolComposer;

View File

@ -38,6 +38,11 @@
return true;
}
- (bool)isAvialableForVideo
{
return true;
}
- (NSInteger)order
{
return _order;

View File

@ -40,8 +40,7 @@
- (bool)shouldBeSkipped
{
return false;
//return (fabsf(((NSNumber *)self.displayValue).floatValue - self.defaultValue) < FLT_EPSILON);
return (fabs(((NSNumber *)self.displayValue).floatValue - self.defaultValue) < FLT_EPSILON);
}
- (void)updatePassParameters
@ -50,4 +49,9 @@
[(PGPhotoSharpenPass *)_pass setSharpness:0.125f + value.floatValue / 100 * 0.6f];
}
- (bool)isAvialableForVideo
{
return false;
}
@end

View File

@ -0,0 +1,26 @@
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import "GPUImageContext.h"
#import "GPUImageOutput.h"
@interface PGVideoMovie : GPUImageOutput
@property (readwrite, retain) AVAsset *asset;
@property (readonly, nonatomic) bool shouldRepeat;
@property (readonly, nonatomic) CGFloat progress;
@property (readonly, nonatomic) AVAssetReader *assetReader;
@property (readonly, nonatomic) BOOL audioEncodingIsFinished;
@property (readonly, nonatomic) BOOL videoEncodingIsFinished;
- (instancetype)initWithAsset:(AVAsset *)asset;
- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
- (void)startProcessing;
- (void)endProcessing;
- (void)cancelProcessing;
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
@end

View File

@ -0,0 +1,682 @@
#import "PGVideoMovie.h"
#import "GPUImageFilter.h"
GLfloat kColorConversion601Default[] = {
1.164, 1.164, 1.164,
0.0, -0.392, 2.017,
1.596, -0.813, 0.0,
};
GLfloat kColorConversion601FullRangeDefault[] = {
1.0, 1.0, 1.0,
0.0, -0.343, 1.765,
1.4, -0.711, 0.0,
};
GLfloat kColorConversion709Default[] = {
1.164, 1.164, 1.164,
0.0, -0.213, 2.112,
1.793, -0.533, 0.0,
};
GLfloat *kColorConversion601 = kColorConversion601Default;
GLfloat *kColorConversion601FullRange = kColorConversion601FullRangeDefault;
GLfloat *kColorConversion709 = kColorConversion709Default;
NSString *const kYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING
(
varying highp vec2 texCoord;
uniform sampler2D luminanceTexture;
uniform sampler2D chrominanceTexture;
uniform mediump mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(luminanceTexture, texCoord).r;
yuv.yz = texture2D(chrominanceTexture, texCoord).rg - vec2(0.5, 0.5);
rgb = colorConversionMatrix * yuv;
gl_FragColor = vec4(rgb, 1);
}
);
NSString *const kYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING
(
varying highp vec2 texCoord;
uniform sampler2D luminanceTexture;
uniform sampler2D chrominanceTexture;
uniform mediump mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(luminanceTexture, texCoord).r;
yuv.yz = texture2D(chrominanceTexture, texCoord).ra - vec2(0.5, 0.5);
rgb = colorConversionMatrix * yuv;
gl_FragColor = vec4(rgb, 1);
}
);
NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING
(
varying highp vec2 texCoord;
uniform sampler2D luminanceTexture;
uniform sampler2D chrominanceTexture;
uniform mediump mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(luminanceTexture, texCoord).r - (16.0/255.0);
yuv.yz = texture2D(chrominanceTexture, texCoord).ra - vec2(0.5, 0.5);
rgb = colorConversionMatrix * yuv;
gl_FragColor = vec4(rgb, 1);
}
);
@interface PGVideoMovie () <AVPlayerItemOutputPullDelegate>
{
BOOL audioEncodingIsFinished, videoEncodingIsFinished;
// GPUImageMovieWriter *synchronizedMovieWriter;
AVAssetReader *reader;
AVPlayerItemVideoOutput *playerItemOutput;
CADisplayLink *displayLink;
CMTime previousFrameTime, processingFrameTime;
CFAbsoluteTime previousActualFrameTime;
BOOL keepLooping;
GLuint luminanceTexture, chrominanceTexture;
GLProgram *yuvConversionProgram;
GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
GLint yuvConversionMatrixUniform;
const GLfloat *_preferredConversion;
BOOL isFullYUVRange;
int imageBufferWidth, imageBufferHeight;
}
- (void)processAsset;
@end
@implementation PGVideoMovie
@synthesize asset = _asset;
@synthesize shouldRepeat = _shouldRepeat;
#pragma mark -
#pragma mark Initialization and teardown
- (instancetype)initWithAsset:(AVAsset *)asset;
{
if (!(self = [super init]))
{
return nil;
}
[self yuvConversionSetup];
self.asset = asset;
return self;
}
- (void)yuvConversionSetup;
{
if ([GPUImageContext supportsFastTextureUpload])
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
_preferredConversion = kColorConversion709;
isFullYUVRange = YES;
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kYUVFullRangeConversionForLAFragmentShaderString];
if (!yuvConversionProgram.initialized)
{
[yuvConversionProgram addAttribute:@"position"];
[yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
if (![yuvConversionProgram link])
{
NSString *progLog = [yuvConversionProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [yuvConversionProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
yuvConversionProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
glEnableVertexAttribArray(yuvConversionPositionAttribute);
glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
});
}
}
- (void)dealloc
{
[playerItemOutput setDelegate:nil queue:nil];
// Moved into endProcessing
//if (self.playerItem && (displayLink != nil))
//{
// [displayLink invalidate]; // remove from all run loops
// displayLink = nil;
//}
}
#pragma mark -
#pragma mark Movie processing
//- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
//{
// synchronizedMovieWriter = movieWriter;
// movieWriter.encodingLiveVideo = NO;
//}
- (void)startProcessing
{
if (_shouldRepeat) self->keepLooping = true;
[self processAsset];
}
- (AVAssetReader*)createAssetReader
{
NSError *error = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
if ([GPUImageContext supportsFastTextureUpload]) {
[outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
isFullYUVRange = YES;
}
else {
[outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
isFullYUVRange = NO;
}
// Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
readerVideoTrackOutput.alwaysCopiesSampleData = NO;
[assetReader addOutput:readerVideoTrackOutput];
// NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
// BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
// AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
//
// if (shouldRecordAudioTrack)
// {
// [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
//
// // This might need to be extended to handle movies with more than one audio track
// AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
// readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
// readerAudioTrackOutput.alwaysCopiesSampleData = NO;
// [assetReader addOutput:readerAudioTrackOutput];
// }
return assetReader;
}
- (void)processAsset
{
reader = [self createAssetReader];
AVAssetReaderOutput *readerVideoTrackOutput = nil;
AVAssetReaderOutput *readerAudioTrackOutput = nil;
audioEncodingIsFinished = YES;
for( AVAssetReaderOutput *output in reader.outputs ) {
if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {
audioEncodingIsFinished = NO;
readerAudioTrackOutput = output;
}
else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
readerVideoTrackOutput = output;
}
}
if ([reader startReading] == NO) {
return;
}
__unsafe_unretained PGVideoMovie *weakSelf = self;
// if (synchronizedMovieWriter != nil)
// {
// [synchronizedMovieWriter setVideoInputReadyCallback:^{
// BOOL success = [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
// return success;
// }];
//
// [synchronizedMovieWriter setAudioInputReadyCallback:^{
// BOOL success = [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
// return success;
// }];
//
// [synchronizedMovieWriter enableSynchronizationCallbacks];
// }
// else
// {
while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
if ((readerAudioTrackOutput) && (!audioEncodingIsFinished))
{
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}
}
if (reader.status == AVAssetReaderStatusCompleted) {
[reader cancelReading];
if (keepLooping) {
reader = nil;
[self startProcessing];
} else {
[weakSelf endProcessing];
}
}
// }
}
- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
{
[displayLink setPaused:false];
}
- (void)displayLinkCallback:(CADisplayLink *)sender
{
CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);
CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];
[self processPixelBufferAtTime:outputItemTime];
}
- (void)processPixelBufferAtTime:(CMTime)outputItemTime
{
if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime])
{
__unsafe_unretained PGVideoMovie *weakSelf = self;
CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
if (pixelBuffer != NULL)
{
runSynchronouslyOnVideoProcessingQueue(^{
[weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];
CFRelease(pixelBuffer);
});
}
}
}
- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
{
if (reader.status == AVAssetReaderStatusReading && !videoEncodingIsFinished)
{
CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
if (sampleBufferRef)
{
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime);
CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();
CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);
CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;
if (frameTimeDifference > actualTimeDifference)
{
usleep(1000000.0 * (frameTimeDifference - actualTimeDifference));
}
previousFrameTime = currentSampleTime;
previousActualFrameTime = CFAbsoluteTimeGetCurrent();
__unsafe_unretained PGVideoMovie *weakSelf = self;
runSynchronouslyOnVideoProcessingQueue(^{
[weakSelf processMovieFrame:sampleBufferRef];
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
});
return YES;
}
else
{
if (!keepLooping) {
videoEncodingIsFinished = YES;
if( videoEncodingIsFinished && audioEncodingIsFinished )
[self endProcessing];
}
}
}
// else if (synchronizedMovieWriter != nil)
// {
// if (reader.status == AVAssetReaderStatusCompleted)
// {
// [self endProcessing];
// }
// }
return NO;
}
- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
{
if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)
{
CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
if (audioSampleBufferRef)
{
CFRelease(audioSampleBufferRef);
return YES;
}
else
{
if (!keepLooping) {
audioEncodingIsFinished = YES;
if (videoEncodingIsFinished && audioEncodingIsFinished)
[self endProcessing];
}
}
}
// else if (synchronizedMovieWriter != nil)
// {
// if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||
// reader.status == AVAssetReaderStatusCancelled)
// {
// [self endProcessing];
// }
// }
return NO;
}
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
{
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
processingFrameTime = currentSampleTime;
[self processMovieFrame:movieFrame withSampleTime:currentSampleTime];
}
- (CGFloat)progress
{
if (AVAssetReaderStatusReading == reader.status)
{
float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale;
float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale;
return current / duration;
}
else if ( AVAssetReaderStatusCompleted == reader.status )
{
return 1.f;
}
else
{
return 0.f;
}
}
- (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime
{
int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);
int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);
CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);
if (colorAttachments != NULL)
{
if (CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
else
{
_preferredConversion = kColorConversion709;
}
}
else
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
[GPUImageContext useImageProcessingContext];
if ([GPUImageContext supportsFastTextureUpload])
{
CVOpenGLESTextureRef luminanceTextureRef = NULL;
CVOpenGLESTextureRef chrominanceTextureRef = NULL;
if (CVPixelBufferGetPlaneCount(movieFrame) > 0)
{
CVPixelBufferLockBaseAddress(movieFrame,0);
if ((imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight))
{
imageBufferWidth = bufferWidth;
imageBufferHeight = bufferHeight;
}
CVReturn err;
// Y-plane
glActiveTexture(GL_TEXTURE4);
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// UV-plane
glActiveTexture(GL_TEXTURE5);
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
[self convertYUVToRGBOutput];
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
}
[outputFramebuffer unlock];
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
}
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
CFRelease(luminanceTextureRef);
CFRelease(chrominanceTextureRef);
}
}
else
{
CVPixelBufferLockBaseAddress(movieFrame, 0);
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
glTexImage2D(GL_TEXTURE_2D, 0, self.outputTextureOptions.internalFormat, bufferWidth, bufferHeight, 0, self.outputTextureOptions.format, self.outputTextureOptions.type, CVPixelBufferGetBaseAddress(movieFrame));
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
}
[outputFramebuffer unlock];
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
}
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
}
}
- (void)endProcessing
{
keepLooping = NO;
[displayLink setPaused:YES];
for (id<GPUImageInput> currentTarget in targets)
{
[currentTarget endProcessing];
}
if (displayLink != nil)
{
[displayLink invalidate];
displayLink = nil;
}
}
- (void)cancelProcessing
{
if (reader) {
[reader cancelReading];
}
[self endProcessing];
}
- (void)convertYUVToRGBOutput;
{
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat textureCoordinates[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glUniform1i(yuvConversionLuminanceTextureUniform, 4);
glActiveTexture(GL_TEXTURE5);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glUniform1i(yuvConversionChrominanceTextureUniform, 5);
glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
- (AVAssetReader*)assetReader {
return reader;
}
- (BOOL)audioEncodingIsFinished {
return audioEncodingIsFinished;
}
- (BOOL)videoEncodingIsFinished {
return videoEncodingIsFinished;
}
@end

View File

@ -911,7 +911,11 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
controller.requestOriginalFullSizeImage = ^(id<TGMediaEditableItem> editableItem, NSTimeInterval position)
{
return [editableItem originalImageSignal:position];
if (editableItem.isVideo && [editableItem isKindOfClass:[TGMediaAsset class]]) {
return [TGMediaAssetImageSignals avAssetForVideoAsset:(TGMediaAsset *)editableItem];
} else {
return [editableItem originalImageSignal:position];
}
};
TGOverlayControllerWindow *controllerWindow = [[TGOverlayControllerWindow alloc] initWithManager:windowManager parentController:_parentController contentController:controller];

View File

@ -429,7 +429,11 @@
controller.requestOriginalFullSizeImage = ^(id<TGMediaEditableItem> editableItem, NSTimeInterval position)
{
return [editableItem originalImageSignal:position];
if (editableItem.isVideo && [editableItem isKindOfClass:[TGMediaAsset class]]) {
return [TGMediaAssetImageSignals avAssetForVideoAsset:(TGMediaAsset *)editableItem];
} else {
return [editableItem originalImageSignal:position];
}
};
[self.navigationController pushViewController:controller animated:true];

View File

@ -559,7 +559,11 @@
controller.requestOriginalFullSizeImage = ^SSignal *(id<TGMediaEditableItem> editableItem, NSTimeInterval position)
{
return [editableItem originalImageSignal:position];
if (editableItem.isVideo && [editableItem isKindOfClass:[TGMediaAsset class]]) {
return [TGMediaAssetImageSignals avAssetForVideoAsset:(TGMediaAsset *)editableItem];
} else {
return [editableItem originalImageSignal:position];
}
};
controller.requestAdjustments = ^id<TGMediaEditAdjustments> (id<TGMediaEditableItem> editableItem)

View File

@ -72,7 +72,7 @@
if ([self.asset isKindOfClass:[TGMediaAsset class]] && ((TGMediaAsset *)self.asset).subtypes & TGMediaAssetSubtypePhotoLive)
return TGPhotoEditorCropTab | TGPhotoEditorPaintTab | TGPhotoEditorToolsTab | TGPhotoEditorTimerTab;
else
return TGPhotoEditorCropTab | TGPhotoEditorPaintTab | TGPhotoEditorQualityTab | TGPhotoEditorTimerTab;
return TGPhotoEditorCropTab | TGPhotoEditorToolsTab | TGPhotoEditorPaintTab | TGPhotoEditorQualityTab;
}
- (Class)viewClass

View File

@ -1313,7 +1313,7 @@
[self updatePlayerRange:trimEndValue];
}
TGVideoEditAdjustments *updatedAdjustments = [TGVideoEditAdjustments editAdjustmentsWithOriginalSize:_videoDimensions cropRect:cropRect cropOrientation:adjustments.cropOrientation cropLockedAspectRatio:adjustments.cropLockedAspectRatio cropMirrored:adjustments.cropMirrored trimStartValue:trimStartValue trimEndValue:trimEndValue paintingData:adjustments.paintingData sendAsGif:sendAsGif preset:adjustments.preset];
TGVideoEditAdjustments *updatedAdjustments = [TGVideoEditAdjustments editAdjustmentsWithOriginalSize:_videoDimensions cropRect:cropRect cropOrientation:adjustments.cropOrientation cropLockedAspectRatio:adjustments.cropLockedAspectRatio cropMirrored:adjustments.cropMirrored trimStartValue:trimStartValue trimEndValue:trimEndValue toolValues:adjustments.toolValues paintingData:adjustments.paintingData sendAsGif:sendAsGif preset:adjustments.preset];
[self.item.editingContext setAdjustments:updatedAdjustments forItem:self.item.editableMediaItem];
if (sendAsGif)
@ -1405,7 +1405,7 @@
UIImageOrientation cropOrientation = (adjustments != nil) ? adjustments.cropOrientation : UIImageOrientationUp;
CGFloat cropLockedAspectRatio = (adjustments != nil) ? adjustments.cropLockedAspectRatio : 0.0f;
TGVideoEditAdjustments *updatedAdjustments = [TGVideoEditAdjustments editAdjustmentsWithOriginalSize:_videoDimensions cropRect:cropRect cropOrientation:cropOrientation cropLockedAspectRatio:cropLockedAspectRatio cropMirrored:adjustments.cropMirrored trimStartValue:_scrubberView.trimStartValue trimEndValue:_scrubberView.trimEndValue paintingData:adjustments.paintingData sendAsGif:adjustments.sendAsGif preset:adjustments.preset];
TGVideoEditAdjustments *updatedAdjustments = [TGVideoEditAdjustments editAdjustmentsWithOriginalSize:_videoDimensions cropRect:cropRect cropOrientation:cropOrientation cropLockedAspectRatio:cropLockedAspectRatio cropMirrored:adjustments.cropMirrored trimStartValue:_scrubberView.trimStartValue trimEndValue:_scrubberView.trimEndValue toolValues:adjustments.toolValues paintingData:adjustments.paintingData sendAsGif:adjustments.sendAsGif preset:adjustments.preset];
[self.item.editingContext setAdjustments:updatedAdjustments forItem:self.item.editableMediaItem];
}

View File

@ -378,6 +378,14 @@
[assetReader addOutput:output];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:dimensions.width], kCVPixelBufferWidthKey,
[NSNumber numberWithInt:dimensions.height], kCVPixelBufferHeightKey,
nil];
AVAssetWriterInputPixelBufferAdaptor *pixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:input sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
[assetWriter addInput:input];
videoProcessor = [[TGMediaSampleBufferProcessor alloc] initWithAssetReaderOutput:output assetWriterInput:input];

View File

@ -378,18 +378,32 @@
}
else
{
signal = [[[[self.requestOriginalFullSizeImage(_item, position) takeLast] deliverOn:_queue] filter:^bool(id image)
{
return [image isKindOfClass:[UIImage class]];
}] map:^UIImage *(UIImage *image)
{
return TGPhotoEditorCrop(image, nil, _photoEditor.cropOrientation, _photoEditor.cropRotation, _photoEditor.cropRect, _photoEditor.cropMirrored, TGPhotoEditorScreenImageMaxSize(), _photoEditor.originalSize, true);
}];
if (_item.isVideo) {
signal = [[self.requestOriginalFullSizeImage(_item, position) takeLast] deliverOn:_queue];
} else {
signal = [[[[self.requestOriginalFullSizeImage(_item, position) takeLast] deliverOn:_queue] filter:^bool(id image)
{
return [image isKindOfClass:[UIImage class]];
}] map:^UIImage *(UIImage *image)
{
return TGPhotoEditorCrop(image, nil, _photoEditor.cropOrientation, _photoEditor.cropRotation, _photoEditor.cropRect, _photoEditor.cropMirrored, TGPhotoEditorScreenImageMaxSize(), _photoEditor.originalSize, true);
}];
}
}
[signal startWithNext:^(UIImage *next)
[signal startWithNext:^(id next)
{
[_photoEditor setImage:next forCropRect:_photoEditor.cropRect cropRotation:_photoEditor.cropRotation cropOrientation:_photoEditor.cropOrientation cropMirrored:_photoEditor.cropMirrored fullSize:false];
if ([next isKindOfClass:[UIImage class]]) {
[_photoEditor setImage:(UIImage *)next forCropRect:_photoEditor.cropRect cropRotation:_photoEditor.cropRotation cropOrientation:_photoEditor.cropOrientation cropMirrored:_photoEditor.cropMirrored fullSize:false];
} else if ([next isKindOfClass:[AVAsset class]]) {
[_photoEditor setVideoAsset:(AVAsset *)next];
TGDispatchOnMainThread(^
{
[_previewView performTransitionInWithCompletion:^
{
}];
});
}
if (_ignoreDefaultPreviewViewTransitionIn)
{

View File

@ -446,11 +446,8 @@ const CGFloat TGPhotoEditorToolbarSize = 49.0f;
if ([editorValues hasPainting])
highlightedButtons |= TGPhotoEditorPaintTab;
if ([editorValues isKindOfClass:[PGPhotoEditorValues class]])
{
if ([(PGPhotoEditorValues *)editorValues toolsApplied])
highlightedButtons |= TGPhotoEditorToolsTab;
}
if ([editorValues toolsApplied])
highlightedButtons |= TGPhotoEditorToolsTab;
return highlightedButtons;
}

View File

@ -312,11 +312,12 @@ const CGFloat TGPhotoStickerSelectionViewHandleSide = 30.0f;
{
CGContextRef context = UIGraphicsGetCurrentContext();
CGFloat thickness = 1;
CGFloat thickness = 1.5f;
CGFloat radius = rect.size.width / 2.0f - 5.5f;
CGContextSetFillColorWithColor(context, [UIColor whiteColor].CGColor);
CGContextSetShadowWithColor(context, CGSizeZero, 2.5f, [UIColor colorWithWhite:0.0f alpha:0.3f].CGColor);
UIColor *color = UIColorRGBA(0xeaeaea, 0.8);
CGContextSetFillColorWithColor(context, color.CGColor);
CGFloat radSpace = TGDegreesToRadians(4.0f);
CGFloat radLen = TGDegreesToRadians(4.0f);
@ -339,19 +340,29 @@ const CGFloat TGPhotoStickerSelectionViewHandleSide = 30.0f;
CGContextFillPath(context);
CGContextSetFillColorWithColor(context, TGAccentColor().CGColor);
CGContextSetStrokeColorWithColor(context, [UIColor whiteColor].CGColor);
CGContextSetStrokeColorWithColor(context, color.CGColor);
CGContextSetLineWidth(context, thickness);
void (^drawEllipse)(CGPoint) = ^(CGPoint center)
void (^drawEllipse)(CGPoint, bool) = ^(CGPoint center, bool clear)
{
CGContextSetShadowWithColor(context, CGSizeZero, 2.5f, [UIColor clearColor].CGColor);
CGContextFillEllipseInRect(context, CGRectMake(center.x - 4.5f, center.y - 4.5f, 9.0f, 9.0f));
CGContextStrokeEllipseInRect(context, CGRectMake(center.x - 4.5f, center.y - 4.5f, 9.0f, 9.0f));
CGRect rect = CGRectMake(center.x - 4.5f, center.y - 4.5f, 9.0f, 9.0f);
if (clear) {
rect = CGRectInset(rect, -thickness, -thickness);
CGContextFillEllipseInRect(context, rect);
} else {
CGContextStrokeEllipseInRect(context, rect);
}
};
CGContextSetBlendMode(context, kCGBlendModeClear);
drawEllipse(CGPointMake(5.5f, centerPoint.y));
drawEllipse(CGPointMake(rect.size.width - 5.5f, centerPoint.y));
drawEllipse(CGPointMake(5.5f, centerPoint.y), true);
drawEllipse(CGPointMake(rect.size.width - 5.5f, centerPoint.y), true);
CGContextSetBlendMode(context, kCGBlendModeNormal);
drawEllipse(CGPointMake(5.5f, centerPoint.y), false);
drawEllipse(CGPointMake(rect.size.width - 5.5f, centerPoint.y), false);
}
- (void)layoutSubviews

View File

@ -1,6 +1,7 @@
#import "TGPhotoTextEntityView.h"
#import "TGColor.h"
#import "LegacyComponentsInternal.h"
#import <LegacyComponents/TGPaintUtils.h>
@ -354,13 +355,14 @@ const CGFloat TGPhotoTextSelectionViewHandleSide = 30.0f;
{
CGContextRef context = UIGraphicsGetCurrentContext();
CGFloat space = 3.0f;
CGFloat length = 3.0f;
CGFloat thickness = 1;
CGFloat space = 4.0f;
CGFloat length = 4.5f;
CGFloat thickness = 1.5f;
CGRect selectionBounds = CGRectInset(rect, 5.5f, 5.5f);
CGContextSetFillColorWithColor(context, [UIColor whiteColor].CGColor);
CGContextSetShadowWithColor(context, CGSizeZero, 2.5f, [UIColor colorWithWhite:0.0f alpha:0.3f].CGColor);
UIColor *color = UIColorRGBA(0xeaeaea, 0.8);
CGContextSetFillColorWithColor(context, color.CGColor);
CGPoint centerPoint = TGPaintCenterOfRect(rect);
@ -384,19 +386,29 @@ const CGFloat TGPhotoTextSelectionViewHandleSide = 30.0f;
CGContextFillPath(context);
CGContextSetFillColorWithColor(context, TGAccentColor().CGColor);
CGContextSetStrokeColorWithColor(context, [UIColor whiteColor].CGColor);
CGContextSetStrokeColorWithColor(context, color.CGColor);
CGContextSetLineWidth(context, thickness);
void (^drawEllipse)(CGPoint) = ^(CGPoint center)
void (^drawEllipse)(CGPoint, bool) = ^(CGPoint center, bool clear)
{
CGContextSetShadowWithColor(context, CGSizeZero, 2.5f, [UIColor clearColor].CGColor);
CGContextFillEllipseInRect(context, CGRectMake(center.x - 4.5f, center.y - 4.5f, 9.0f, 9.0f));
CGContextStrokeEllipseInRect(context, CGRectMake(center.x - 4.5f, center.y - 4.5f, 9.0f, 9.0f));
CGRect rect = CGRectMake(center.x - 4.5f, center.y - 4.5f, 9.0f, 9.0f);
if (clear) {
rect = CGRectInset(rect, -thickness, -thickness);
CGContextFillEllipseInRect(context, rect);
} else {
CGContextStrokeEllipseInRect(context, rect);
}
};
drawEllipse(CGPointMake(5.5f, centerPoint.y));
drawEllipse(CGPointMake(rect.size.width - 5.5f, centerPoint.y));
CGContextSetBlendMode(context, kCGBlendModeClear);
drawEllipse(CGPointMake(5.5f, centerPoint.y), true);
drawEllipse(CGPointMake(rect.size.width - 5.5f, centerPoint.y), true);
CGContextSetBlendMode(context, kCGBlendModeNormal);
drawEllipse(CGPointMake(5.5f, centerPoint.y), false);
drawEllipse(CGPointMake(rect.size.width - 5.5f, centerPoint.y), false);
}
- (void)layoutSubviews

View File

@ -977,7 +977,11 @@ const CGFloat TGPhotoEditorToolsLandscapePanelSize = TGPhotoEditorToolsPanelSize
- (TGPhotoEditorTab)availableTabs
{
return TGPhotoEditorToolsTab | TGPhotoEditorTintTab | TGPhotoEditorBlurTab | TGPhotoEditorCurvesTab;
if (self.photoEditor.forVideo) {
return TGPhotoEditorToolsTab | TGPhotoEditorTintTab | TGPhotoEditorCurvesTab;
} else {
return TGPhotoEditorToolsTab | TGPhotoEditorTintTab | TGPhotoEditorBlurTab | TGPhotoEditorCurvesTab;
}
}
- (PGPhotoTool *)toolForTab:(TGPhotoEditorTab)tab

View File

@ -16,6 +16,7 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
@synthesize cropMirrored = _cropMirrored;
@synthesize paintingData = _paintingData;
@synthesize sendAsGif = _sendAsGif;
@synthesize toolValues = _toolValues;
+ (instancetype)editAdjustmentsWithOriginalSize:(CGSize)originalSize
cropRect:(CGRect)cropRect
@ -24,6 +25,7 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
cropMirrored:(bool)cropMirrored
trimStartValue:(NSTimeInterval)trimStartValue
trimEndValue:(NSTimeInterval)trimEndValue
toolValues:(NSDictionary *)toolValues
paintingData:(TGPaintingData *)paintingData
sendAsGif:(bool)sendAsGif
preset:(TGMediaVideoConversionPreset)preset
@ -36,6 +38,7 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
adjustments->_cropMirrored = cropMirrored;
adjustments->_trimStartValue = trimStartValue;
adjustments->_trimEndValue = trimEndValue;
adjustments->_toolValues = toolValues;
adjustments->_paintingData = paintingData;
adjustments->_sendAsGif = sendAsGif;
adjustments->_preset = preset;
@ -95,6 +98,7 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
adjustments->_cropMirrored = _cropMirrored;
adjustments->_trimStartValue = _trimStartValue;
adjustments->_trimEndValue = _trimEndValue;
adjustments->_toolValues = _toolValues;
adjustments->_paintingData = _paintingData;
adjustments->_sendAsGif = _sendAsGif;
adjustments->_preset = preset;
@ -179,9 +183,17 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
return CMTimeRangeMake(CMTimeMakeWithSeconds(self.trimStartValue , NSEC_PER_SEC), CMTimeMakeWithSeconds((self.trimEndValue - self.trimStartValue), NSEC_PER_SEC));
}
- (bool)toolsApplied
{
if (self.toolValues.count > 0)
return true;
return false;
}
- (bool)isDefaultValuesForAvatar:(bool)forAvatar
{
return ![self cropAppliedForAvatar:forAvatar] && ![self hasPainting] && !_sendAsGif && _preset == TGMediaVideoConversionPresetCompressedDefault;
return ![self cropAppliedForAvatar:forAvatar] && ![self toolsApplied] && ![self hasPainting] && !_sendAsGif && _preset == TGMediaVideoConversionPresetCompressedDefault;
}
- (bool)isCropEqualWith:(id<TGMediaEditAdjustments>)adjusments
@ -197,38 +209,41 @@ const NSTimeInterval TGVideoEditMaximumGifDuration = 30.5;
- (BOOL)isEqual:(id)object
{
if (object == self)
return YES;
return true;
if (!object || ![object isKindOfClass:[self class]])
return NO;
return false;
TGVideoEditAdjustments *adjustments = (TGVideoEditAdjustments *)object;
if (!_CGRectEqualToRectWithEpsilon(self.cropRect, adjustments.cropRect, [self _cropRectEpsilon]))
return NO;
return false;
if (self.cropOrientation != adjustments.cropOrientation)
return NO;
return false;
if (ABS(self.cropLockedAspectRatio - adjustments.cropLockedAspectRatio) > FLT_EPSILON)
return NO;
return false;
if (self.cropMirrored != adjustments.cropMirrored)
return NO;
return false;
if (fabs(self.trimStartValue - adjustments.trimStartValue) > FLT_EPSILON)
return NO;
return false;
if (fabs(self.trimEndValue - adjustments.trimEndValue) > FLT_EPSILON)
return NO;
return false;
if (![self.toolValues isEqual:adjustments.toolValues])
return false;
if ((self.paintingData != nil && ![self.paintingData isEqual:adjustments.paintingData]) || (self.paintingData == nil && adjustments.paintingData != nil))
return NO;
return false;
if (self.sendAsGif != adjustments.sendAsGif)
return NO;
return false;
return YES;
return true;
}
- (CGFloat)_cropRectEpsilon

View File

@ -980,7 +980,7 @@ typedef enum
if (trimStartValue > DBL_EPSILON || trimEndValue < _duration - DBL_EPSILON)
{
adjustments = [TGVideoEditAdjustments editAdjustmentsWithOriginalSize:dimensions cropRect:CGRectMake(0.0f, 0.0f, dimensions.width, dimensions.height) cropOrientation:UIImageOrientationUp cropLockedAspectRatio:1.0 cropMirrored:false trimStartValue:trimStartValue trimEndValue:trimEndValue paintingData:nil sendAsGif:false preset:TGMediaVideoConversionPresetVideoMessage];
adjustments = [TGVideoEditAdjustments editAdjustmentsWithOriginalSize:dimensions cropRect:CGRectMake(0.0f, 0.0f, dimensions.width, dimensions.height) cropOrientation:UIImageOrientationUp cropLockedAspectRatio:1.0 cropMirrored:false trimStartValue:trimStartValue trimEndValue:trimEndValue toolValues:nil paintingData:nil sendAsGif:false preset:TGMediaVideoConversionPresetVideoMessage];
duration = trimEndValue - trimStartValue;
}

View File

@ -106,7 +106,7 @@ private func preparedShareItem(account: Account, to peerId: PeerId, value: [Stri
cropRect = CGRect(x: (size.width - shortestSide) / 2.0, y: (size.height - shortestSide) / 2.0, width: shortestSide, height: shortestSide)
}
adjustments = TGVideoEditAdjustments(originalSize: size, cropRect: cropRect, cropOrientation: .up, cropLockedAspectRatio: 1.0, cropMirrored: false, trimStartValue: 0.0, trimEndValue: 0.0, paintingData: nil, sendAsGif: false, preset: TGMediaVideoConversionPresetVideoMessage)
adjustments = TGVideoEditAdjustments(originalSize: size, cropRect: cropRect, cropOrientation: .up, cropLockedAspectRatio: 1.0, cropMirrored: false, trimStartValue: 0.0, trimEndValue: 0.0, toolValues: nil, paintingData: nil, sendAsGif: false, preset: TGMediaVideoConversionPresetVideoMessage)
}
}
var finalDuration: Double = CMTimeGetSeconds(asset.duration)

View File

@ -46,6 +46,7 @@ private func animationItem(account: Account, emojis: Signal<[TelegramMediaFile],
return data.complete
}).start(next: { next in
subscriber.putNext(ManagedAnimationItem(source: .resource(account.postbox.mediaBox, file.resource), loop: loop, callbacks: callbacks))
subscriber.putCompletion()
})
return ActionDisposable {