Video editor fixes

This commit is contained in:
Ilya Laktyushin 2020-05-23 20:23:30 +03:00
parent fb656b8da1
commit 7d7ac395da
13 changed files with 216 additions and 125 deletions

View File

@ -200,6 +200,29 @@ NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
return image;
}
- (CIImage *)newCIImageFromCurrentlyProcessedOutput {
// Give it three seconds to process, then abort if they forgot to set up the image capture properly
double timeoutForImageCapture = 3.0;
dispatch_time_t convertedTimeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeoutForImageCapture * NSEC_PER_SEC));
if (dispatch_semaphore_wait(imageCaptureSemaphore, convertedTimeout) != 0)
{
return NULL;
}
GPUImageFramebuffer* framebuffer = [self framebufferForOutput];
usingNextFrameForImageCapture = NO;
dispatch_semaphore_signal(imageCaptureSemaphore);
CIImage *image = [framebuffer newCIImageFromFramebufferContents];
return image;
}
- (void)commitImageCapture {
dispatch_semaphore_signal(imageCaptureSemaphore);
}
#pragma mark -
#pragma mark Managing the display FBOs

View File

@ -1,17 +1,12 @@
#import <Foundation/Foundation.h>
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#import <OpenGLES/EAGL.h>
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#else
#import <OpenGL/OpenGL.h>
#import <OpenGL/gl.h>
#endif
#import <QuartzCore/QuartzCore.h>
#import <CoreMedia/CoreMedia.h>
#import <CoreImage/CoreImage.h>
typedef struct GPUTextureOptions {
GLenum minFilter;
@ -49,6 +44,7 @@ typedef struct GPUTextureOptions {
// Image capture
- (CGImageRef)newCGImageFromFramebufferContents;
- (CIImage *)newCIImageFromFramebufferContents;
- (void)restoreRenderTarget;
// Raw data bytes

View File

@ -380,6 +380,75 @@ void dataProviderUnlockCallback (void *info, __unused const void *data, __unused
return cgImageFromBytes;
}
- (CIImage *)newCIImageFromFramebufferContents
{
// a CGImage can only be created from a 'normal' color texture
NSAssert(self.textureOptions.internalFormat == GL_RGBA, @"For conversion to a CGImage the output texture format for this filter must be GL_RGBA.");
NSAssert(self.textureOptions.type == GL_UNSIGNED_BYTE, @"For conversion to a CGImage the type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
__block CIImage *ciImageFromBytes;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
NSUInteger totalBytesForImage = (int)_size.width * (int)_size.height * 4;
// It appears that the width of a texture must be padded out to be a multiple of 8 (32 bytes) if reading from it using a texture cache
GLubyte *rawImagePixels;
CGDataProviderRef dataProvider = NULL;
if ([GPUImageContext supportsFastTextureUpload])
{
NSUInteger paddedWidthOfImage = (NSUInteger)(CVPixelBufferGetBytesPerRow(renderTarget) / 4.0);
NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4;
glFinish();
CFRetain(renderTarget); // I need to retain the pixel buffer here and release in the data source callback to prevent its bytes from being prematurely deallocated during a photo write operation
[self lockForReading];
rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);
// dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback);
[[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self]; // In case the framebuffer is swapped out on the filter, need to have a strong reference to it somewhere for it to hang on while the image is in existence
ciImageFromBytes = [[CIImage alloc] initWithCVPixelBuffer:renderTarget options:nil];
[self restoreRenderTarget];
[self unlock];
[[GPUImageContext sharedFramebufferCache] removeFramebufferFromActiveImageCaptureList:self];
}
// else
// {
// [self activateFramebuffer];
// rawImagePixels = (GLubyte *)malloc(totalBytesForImage);
// glReadPixels(0, 0, (int)_size.width, (int)_size.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
// dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels, totalBytesForImage, dataProviderReleaseCallback);
// [self unlock]; // Don't need to keep this around anymore
// }
// CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();
//
//
// CIImage *image = [[CIImage alloc] initWithImageProvider:dataProvider size:<#(size_t)#> :<#(size_t)#> format:kCIFormatRGBA8 colorSpace:defaultRGBColorSpace options:<#(nullable NSDictionary<CIImageOption,id> *)#>]
// if ([GPUImageContext supportsFastTextureUpload])
// {
// cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
// }
// else
// {
// cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, 4 * (int)_size.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast, dataProvider, NULL, NO, kCGRenderingIntentDefault);
// }
// Capture image with current device orientation
// CGDataProviderRelease(dataProvider);
// CGColorSpaceRelease(defaultRGBColorSpace);
});
return ciImageFromBytes;
}
- (void)restoreRenderTarget
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE

View File

@ -2,6 +2,7 @@
#import "GPUImageFramebuffer.h"
#import <UIKit/UIKit.h>
#import <CoreImage/CoreImage.h>
void runOnMainQueueWithoutDeadlocking(void (^block)(void));
void runSynchronouslyOnVideoProcessingQueue(void (^block)(void));
@ -79,6 +80,8 @@ void reportAvailableMemoryForGPUImage(NSString *tag);
- (void)useNextFrameForImageCapture;
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
- (CIImage *)newCIImageFromCurrentlyProcessedOutput;
- (void)commitImageCapture;
- (UIImage *)imageFromCurrentFramebuffer;
- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;

View File

@ -278,6 +278,15 @@ void reportAvailableMemoryForGPUImage(NSString *tag)
return nil;
}
- (CIImage *)newCIImageFromCurrentlyProcessedOutput {
return nil;
}
- (void)commitImageCapture
{
}
- (BOOL)providesMonochromeOutput
{
return NO;

View File

@ -28,7 +28,7 @@
- (instancetype)initWithCIImage:(CIImage *)ciImage
{
EAGLContext *context = [[GPUImageContext sharedImageProcessingContext] context];
[EAGLContext setCurrentContext:[[GPUImageContext sharedImageProcessingContext] context]];
[EAGLContext setCurrentContext:context];
GLsizei backingWidth = ciImage.extent.size.width;
GLsizei backingHeight = ciImage.extent.size.height;

View File

@ -41,6 +41,9 @@
- (CIImage *)CIImageWithSize:(CGSize)size
{
EAGLContext *context = [[GPUImageContext sharedImageProcessingContext] context];
[EAGLContext setCurrentContext:context];
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CIImage *image = [[CIImage alloc] initWithTexture:self.texture size:size flipped:true colorSpace:colorSpace];
CGColorSpaceRelease(colorSpace);

View File

@ -240,9 +240,13 @@
}];
}
} else if ([currentInput isKindOfClass:[GPUImageTextureInput class]]) {
if (capture)
[_finalFilter useNextFrameForImageCapture];
[(GPUImageTextureInput *)currentInput processTextureWithFrameTime:kCMTimeZero synchronous:synchronous];
if (completion != nil)
completion();
[_finalFilter commitImageCapture];
}
} synchronous:synchronous];
return;
@ -386,11 +390,12 @@
- (CIImage *)currentResultCIImage {
__block CIImage *image = nil;
GPUImageOutput *currentInput = _currentInput;
[self processAnimated:false capture:false synchronous:true completion:^
[self processAnimated:false capture:true synchronous:true completion:^
{
if ([currentInput isKindOfClass:[GPUImageTextureInput class]]) {
image = [_textureOutput CIImageWithSize:[(GPUImageTextureInput *)currentInput textureSize]];
}
image = [_finalFilter newCIImageFromCurrentlyProcessedOutput];
// if ([currentInput isKindOfClass:[GPUImageTextureInput class]]) {
// image = [_textureOutput CIImageWithSize:[(GPUImageTextureInput *)currentInput textureSize]];
// }
}];
return image;
}

View File

@ -172,7 +172,7 @@ const NSUInteger PGPhotoEnhanceSegments = 4;
NSUInteger hMin = PGPhotoEnhanceHistogramBins - 1;
for (NSUInteger j = 0; j < hMin; ++j)
{
if (cdfs[j] != 0)
if (cdfs[i][j] != 0)
hMin = j;
}

View File

@ -6,18 +6,18 @@
@interface PGVideoMovie : GPUImageOutput
@property (readwrite, retain) AVAsset *asset;
@property (readwrite, retain) AVPlayerItem *playerItem;
@property (nonatomic, assign) bool shouldRepeat;
@property (readonly, nonatomic) CGFloat progress;
@property (readonly, nonatomic) AVAssetReader *assetReader;
@property (readonly, nonatomic) bool audioEncodingIsFinished;
@property (readonly, nonatomic) bool videoEncodingIsFinished;
- (instancetype)initWithAsset:(AVAsset *)asset;
- (instancetype)initWithPlayerItem:(AVPlayerItem *)playerItem;
- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
- (void)startProcessing;
- (void)endProcessing;
- (void)cancelProcessing;

View File

@ -89,8 +89,6 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
@interface PGVideoMovie () <AVPlayerItemOutputPullDelegate>
{
BOOL audioEncodingIsFinished, videoEncodingIsFinished;
// GPUImageMovieWriter *synchronizedMovieWriter;
AVAssetReader *reader;
AVPlayerItemVideoOutput *playerItemOutput;
CADisplayLink *displayLink;
@ -116,6 +114,9 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
@end
@implementation PGVideoMovie
{
bool videoEncodingIsFinished;
}
@synthesize asset = _asset;
@synthesize shouldRepeat = _shouldRepeat;
@ -153,6 +154,20 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
return self;
}
- (instancetype)initWithPlayerItem:(AVPlayerItem *)playerItem;
{
if (!(self = [super init]))
{
return nil;
}
[self yuvConversionSetup];
self.playerItem = playerItem;
return self;
}
- (void)yuvConversionSetup
{
if ([GPUImageContext supportsFastTextureUpload])
@ -221,7 +236,11 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
{
if (_shouldRepeat) self->keepLooping = true;
[self processAsset];
if (self.playerItem != nil) {
[self processPlayerItem];
} else {
[self processAsset];
}
}
- (AVAssetReader*)createAssetReader
@ -240,24 +259,9 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
}
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
readerVideoTrackOutput.alwaysCopiesSampleData = NO;
readerVideoTrackOutput.alwaysCopiesSampleData = false;
[assetReader addOutput:readerVideoTrackOutput];
// NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
// BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
// AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
//
// if (shouldRecordAudioTrack)
// {
// [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
//
// // This might need to be extended to handle movies with more than one audio track
// AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
// readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
// readerAudioTrackOutput.alwaysCopiesSampleData = NO;
// [assetReader addOutput:readerAudioTrackOutput];
// }
return assetReader;
}
@ -266,65 +270,59 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
reader = [self createAssetReader];
AVAssetReaderOutput *readerVideoTrackOutput = nil;
AVAssetReaderOutput *readerAudioTrackOutput = nil;
audioEncodingIsFinished = YES;
for( AVAssetReaderOutput *output in reader.outputs ) {
if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {
audioEncodingIsFinished = NO;
readerAudioTrackOutput = output;
}
else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
for (AVAssetReaderOutput *output in reader.outputs) {
if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
readerVideoTrackOutput = output;
}
}
if ([reader startReading] == NO) {
if (![reader startReading]) {
return;
}
__unsafe_unretained PGVideoMovie *weakSelf = self;
// if (synchronizedMovieWriter != nil)
// {
// [synchronizedMovieWriter setVideoInputReadyCallback:^{
// BOOL success = [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
// return success;
// }];
//
// [synchronizedMovieWriter setAudioInputReadyCallback:^{
// BOOL success = [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
// return success;
// }];
//
// [synchronizedMovieWriter enableSynchronizationCallbacks];
// }
// else
// {
while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
}
if ((readerAudioTrackOutput) && (!audioEncodingIsFinished))
{
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}
if (reader.status == AVAssetReaderStatusCompleted) {
[reader cancelReading];
if (keepLooping) {
reader = nil;
[self startProcessing];
} else {
[weakSelf endProcessing];
}
if (reader.status == AVAssetReaderStatusCompleted) {
[reader cancelReading];
}
}
if (keepLooping) {
reader = nil;
[self startProcessing];
} else {
[weakSelf endProcessing];
}
- (void)processPlayerItem
{
runSynchronouslyOnVideoProcessingQueue(^{
displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
[displayLink setPaused:YES];
dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary];
if ([GPUImageContext supportsFastTextureUpload]) {
[pixBuffAttributes setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
}
// }
else {
[pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
}
playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
[playerItemOutput setDelegate:self queue:videoProcessingQueue];
[_playerItem addOutput:playerItemOutput];
[playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1];
});
}
- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
@ -385,56 +383,20 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
CFRelease(sampleBufferRef);
});
return YES;
return true;
}
else
{
if (!keepLooping) {
videoEncodingIsFinished = YES;
if( videoEncodingIsFinished && audioEncodingIsFinished )
videoEncodingIsFinished = true;
if (videoEncodingIsFinished)
[self endProcessing];
}
}
}
// else if (synchronizedMovieWriter != nil)
// {
// if (reader.status == AVAssetReaderStatusCompleted)
// {
// [self endProcessing];
// }
// }
return NO;
return false;
}
- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
{
if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)
{
CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
if (audioSampleBufferRef)
{
CFRelease(audioSampleBufferRef);
return YES;
}
else
{
if (!keepLooping) {
audioEncodingIsFinished = YES;
if (videoEncodingIsFinished && audioEncodingIsFinished)
[self endProcessing];
}
}
}
// else if (synchronizedMovieWriter != nil)
// {
// if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||
// reader.status == AVAssetReaderStatusCancelled)
// {
// [self endProcessing];
// }
// }
return NO;
}
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
{
@ -692,10 +654,6 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
return reader;
}
- (BOOL)audioEncodingIsFinished {
return audioEncodingIsFinished;
}
- (BOOL)videoEncodingIsFinished {
return videoEncodingIsFinished;
}

View File

@ -34,6 +34,8 @@
#import <LegacyComponents/TGMenuView.h>
#import "PGPhotoEditor.h"
@interface TGMediaPickerGalleryVideoItemView() <TGMediaPickerGalleryVideoScrubberDataSource, TGMediaPickerGalleryVideoScrubberDelegate>
{
UIView *_containerView;
@ -1442,6 +1444,8 @@
return;
AVAsset *avAsset = self.item.avAsset ?: _player.currentItem.asset;
TGMediaEditingContext *editingContext = self.item.editingContext;
id<TGMediaEditableItem> editableItem = self.item.editableMediaItem;
SSignal *thumbnailsSignal = nil;
if ([self.item.asset isKindOfClass:[TGMediaAsset class]] && ![self itemIsLivePhoto])
@ -1452,7 +1456,26 @@
_requestingThumbnails = true;
__weak TGMediaPickerGalleryVideoItemView *weakSelf = self;
[_thumbnailsDisposable setDisposable:[[thumbnailsSignal deliverOn:[SQueue mainQueue]] startWithNext:^(NSArray *images)
[_thumbnailsDisposable setDisposable:[[[thumbnailsSignal map:^NSArray *(NSArray *images) {
id<TGMediaEditAdjustments> adjustments = [editingContext adjustmentsForItem:editableItem];
if (adjustments.toolsApplied) {
NSMutableArray *editedImages = [[NSMutableArray alloc] init];
PGPhotoEditor *editor = [[PGPhotoEditor alloc] initWithOriginalSize:adjustments.originalSize adjustments:adjustments forVideo:false enableStickers:true];
editor.standalone = true;
for (UIImage *image in images) {
[editor setImage:image forCropRect:adjustments.cropRect cropRotation:0.0 cropOrientation:adjustments.cropOrientation cropMirrored:adjustments.cropMirrored fullSize:false];
UIImage *resultImage = editor.currentResultImage;
if (resultImage != nil) {
[editedImages addObject:resultImage];
} else {
[editedImages addObject:image];
}
}
return editedImages;
} else {
return images;
}
}] deliverOn:[SQueue mainQueue]] startWithNext:^(NSArray *images)
{
__strong TGMediaPickerGalleryVideoItemView *strongSelf = weakSelf;
if (strongSelf == nil)

View File

@ -207,8 +207,10 @@
+ (UIImage *)qualityIconForPreset:(TGMediaVideoConversionPreset)preset
{
CGSize size = CGSizeMake(27.0f, 22.0f);
CGRect rect = CGRectInset(CGRectMake(0.0f, 0.0f, size.width, size.height), 1.0, 1.0);
CGFloat lineWidth = 2.0f - TGScreenPixel;
CGSize size = CGSizeMake(28.0f, 22.0f);
CGRect rect = CGRectInset(CGRectMake(0.0f, 0.0f, size.width, size.height), lineWidth / 2.0, lineWidth / 2.0);
UIGraphicsBeginImageContextWithOptions(size, false, 0.0f);
CGContextRef context = UIGraphicsGetCurrentContext();
@ -245,13 +247,13 @@
CGContextAddPath(context, path.CGPath);
CGContextSetStrokeColorWithColor(context, [UIColor whiteColor].CGColor);
CGContextSetLineWidth(context, 2.0f - TGScreenPixel);
CGContextSetLineWidth(context, lineWidth);
CGContextStrokePath(context);
UIFont *font = [TGFont roundedFontOfSize:11];
CGSize textSize = [label sizeWithFont:font];
[[UIColor whiteColor] setFill];
[label drawInRect:CGRectMake(floor(size.width - textSize.width) / 2.0f, 4.0f, textSize.width, textSize.height) withFont:font];
[label drawInRect:CGRectMake((size.width - textSize.width) / 2.0f + TGScreenPixel, 4.0f, textSize.width, textSize.height) withFont:font];
UIImage *result = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();