Video editor fixes

This commit is contained in:
Ilya Laktyushin 2020-05-28 06:00:36 +03:00
parent 310e71c71b
commit ae6bc476ae
8 changed files with 60 additions and 108 deletions

View File

@ -183,23 +183,24 @@ NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
return image; return image;
} }
- (CIImage *)newCIImageFromCurrentlyProcessedOutput { - (void)newCIImageFromCurrentlyProcessedOutput:(void (^)(CIImage *image, void(^unlock)(void)))completion
{
// Give it three seconds to process, then abort if they forgot to set up the image capture properly // Give it three seconds to process, then abort if they forgot to set up the image capture properly
double timeoutForImageCapture = 3.0; double timeoutForImageCapture = 3.0;
dispatch_time_t convertedTimeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeoutForImageCapture * NSEC_PER_SEC)); dispatch_time_t convertedTimeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeoutForImageCapture * NSEC_PER_SEC));
if (dispatch_semaphore_wait(imageCaptureSemaphore, convertedTimeout) != 0) if (dispatch_semaphore_wait(imageCaptureSemaphore, convertedTimeout) != 0)
{ {
return NULL; completion(nil, ^{});
return;
} }
GPUImageFramebuffer* framebuffer = [self framebufferForOutput]; GPUImageFramebuffer *framebuffer = [self framebufferForOutput];
usingNextFrameForImageCapture = NO; usingNextFrameForImageCapture = NO;
dispatch_semaphore_signal(imageCaptureSemaphore); dispatch_semaphore_signal(imageCaptureSemaphore);
CIImage *image = [framebuffer newCIImageFromFramebufferContents]; [framebuffer newCIImageFromFramebufferContents:completion];
return image;
} }
- (void)commitImageCapture { - (void)commitImageCapture {

View File

@ -46,7 +46,7 @@ typedef struct GPUTextureOptions {
// Image capture // Image capture
- (CGImageRef)newCGImageFromFramebufferContents; - (CGImageRef)newCGImageFromFramebufferContents;
- (CIImage *)newCIImageFromFramebufferContents; - (void)newCIImageFromFramebufferContents:(void (^)(CIImage *image, void(^unlock)(void)))completion;
- (void)restoreRenderTarget; - (void)restoreRenderTarget;
// Raw data bytes // Raw data bytes

View File

@ -1,17 +1,14 @@
#import "GPUImageFramebuffer.h" #import "GPUImageFramebuffer.h"
#import "GPUImageOutput.h" #import "GPUImageOutput.h"
#import "TGTimerTarget.h"
@interface GPUImageFramebuffer() @interface GPUImageFramebuffer()
{ {
GLuint framebuffer; GLuint framebuffer;
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CVPixelBufferRef renderTarget; CVPixelBufferRef renderTarget;
CVOpenGLESTextureRef renderTexture; CVOpenGLESTextureRef renderTexture;
NSUInteger readLockCount; NSUInteger readLockCount;
#else
#endif
NSUInteger framebufferReferenceCount; NSUInteger framebufferReferenceCount;
BOOL referenceCountingDisabled; BOOL referenceCountingDisabled;
} }
@ -26,9 +23,6 @@ void dataProviderReleaseCallback (void *info, const void *data, size_t size);
void dataProviderUnlockCallback (void *info, const void *data, size_t size); void dataProviderUnlockCallback (void *info, const void *data, size_t size);
@implementation GPUImageFramebuffer @implementation GPUImageFramebuffer
{
NSTimer *fixer;
}
#pragma mark - #pragma mark -
#pragma mark Initialization and teardown #pragma mark Initialization and teardown
@ -292,9 +286,6 @@ static BOOL mark = false;
} }
framebufferReferenceCount++; framebufferReferenceCount++;
[fixer invalidate];
fixer = nil;
} }
- (void)unlock - (void)unlock
@ -310,18 +301,9 @@ static BOOL mark = false;
if (framebufferReferenceCount < 1) if (framebufferReferenceCount < 1)
{ {
[[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self]; [[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self];
[fixer invalidate];
fixer = nil;
} else if (framebufferReferenceCount == 1 && self.mark) {
// fixer = [TGTimerTarget scheduledMainThreadTimerWithTarget:self action:@selector(fixTick) interval:0.35 repeat:false];
} }
} }
- (void)fixTick {
[self clearAllLocks];
[self destroyFramebuffer];
}
- (void)clearAllLocks - (void)clearAllLocks
{ {
framebufferReferenceCount = 0; framebufferReferenceCount = 0;
@ -412,82 +394,42 @@ void dataProviderUnlockCallback (void *info, __unused const void *data, __unused
return cgImageFromBytes; return cgImageFromBytes;
} }
- (CIImage *)newCIImageFromFramebufferContents - (void)newCIImageFromFramebufferContents:(void (^)(CIImage *image, void(^unlock)(void)))completion
{ {
// a CGImage can only be created from a 'normal' color texture // a CGImage can only be created from a 'normal' color texture
NSAssert(self.textureOptions.internalFormat == GL_RGBA, @"For conversion to a CGImage the output texture format for this filter must be GL_RGBA."); NSAssert(self.textureOptions.internalFormat == GL_RGBA, @"For conversion to a CGImage the output texture format for this filter must be GL_RGBA.");
NSAssert(self.textureOptions.type == GL_UNSIGNED_BYTE, @"For conversion to a CGImage the type of the output texture of this filter must be GL_UNSIGNED_BYTE."); NSAssert(self.textureOptions.type == GL_UNSIGNED_BYTE, @"For conversion to a CGImage the type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
__block CIImage *ciImageFromBytes; __block CIImage *ciImage;
runSynchronouslyOnVideoProcessingQueue(^{ runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext]; [GPUImageContext useImageProcessingContext];
NSUInteger totalBytesForImage = (int)_size.width * (int)_size.height * 4;
// It appears that the width of a texture must be padded out to be a multiple of 8 (32 bytes) if reading from it using a texture cache
GLubyte *rawImagePixels;
CGDataProviderRef dataProvider = NULL;
if ([GPUImageContext supportsFastTextureUpload]) if ([GPUImageContext supportsFastTextureUpload])
{ {
NSUInteger paddedWidthOfImage = (NSUInteger)(CVPixelBufferGetBytesPerRow(renderTarget) / 4.0);
NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4;
glFinish(); glFinish();
CFRetain(renderTarget); // I need to retain the pixel buffer here and release in the data source callback to prevent its bytes from being prematurely deallocated during a photo write operation CFRetain(renderTarget);
[self lockForReading]; [self lockForReading];
rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);
// dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback);
[[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self]; // In case the framebuffer is swapped out on the filter, need to have a strong reference to it somewhere for it to hang on while the image is in existence
ciImageFromBytes = [[CIImage alloc] initWithCVPixelBuffer:renderTarget options:nil]; [[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self];
ciImage = [[CIImage alloc] initWithCVPixelBuffer:renderTarget options:nil];
}
});
completion(ciImage, ^{
runSynchronouslyOnVideoProcessingQueue(^{
[self restoreRenderTarget]; [self restoreRenderTarget];
[self unlock]; [self unlock];
[[GPUImageContext sharedFramebufferCache] removeFramebufferFromActiveImageCaptureList:self]; [[GPUImageContext sharedFramebufferCache] removeFramebufferFromActiveImageCaptureList:self];
} });
// else
// {
// [self activateFramebuffer];
// rawImagePixels = (GLubyte *)malloc(totalBytesForImage);
// glReadPixels(0, 0, (int)_size.width, (int)_size.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
// dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels, totalBytesForImage, dataProviderReleaseCallback);
// [self unlock]; // Don't need to keep this around anymore
// }
// CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();
//
//
// CIImage *image = [[CIImage alloc] initWithImageProvider:dataProvider size:<#(size_t)#> :<#(size_t)#> format:kCIFormatRGBA8 colorSpace:defaultRGBColorSpace options:<#(nullable NSDictionary<CIImageOption,id> *)#>]
// if ([GPUImageContext supportsFastTextureUpload])
// {
// cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
// }
// else
// {
// cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, 4 * (int)_size.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast, dataProvider, NULL, NO, kCGRenderingIntentDefault);
// }
// Capture image with current device orientation
// CGDataProviderRelease(dataProvider);
// CGColorSpaceRelease(defaultRGBColorSpace);
}); });
return ciImageFromBytes;
} }
- (void)restoreRenderTarget - (void)restoreRenderTarget
{ {
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[self unlockAfterReading]; [self unlockAfterReading];
CFRelease(renderTarget); CFRelease(renderTarget);
#else
#endif
} }
#pragma mark - #pragma mark -

View File

@ -80,7 +80,7 @@ void reportAvailableMemoryForGPUImage(NSString *tag);
- (void)useNextFrameForImageCapture; - (void)useNextFrameForImageCapture;
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput; - (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
- (CIImage *)newCIImageFromCurrentlyProcessedOutput; - (void)newCIImageFromCurrentlyProcessedOutput:(void (^)(CIImage *image, void(^unlock)(void)))completion;
- (void)commitImageCapture; - (void)commitImageCapture;
- (UIImage *)imageFromCurrentFramebuffer; - (UIImage *)imageFromCurrentFramebuffer;

View File

@ -278,8 +278,9 @@ void reportAvailableMemoryForGPUImage(NSString *tag)
return nil; return nil;
} }
- (CIImage *)newCIImageFromCurrentlyProcessedOutput { - (void)newCIImageFromCurrentlyProcessedOutput:(void (^)(CIImage *image, void(^unlock)(void)))completion
return nil; {
} }
- (void)commitImageCapture - (void)commitImageCapture

View File

@ -48,7 +48,7 @@
- (void)createResultImageWithCompletion:(void (^)(UIImage *image))completion; - (void)createResultImageWithCompletion:(void (^)(UIImage *image))completion;
- (UIImage *)currentResultImage; - (UIImage *)currentResultImage;
- (CIImage *)currentResultCIImage; - (void)currentResultCIImage:(void (^)(CIImage *image, void(^unlock)(void)))completion;
- (bool)hasDefaultCropping; - (bool)hasDefaultCropping;

View File

@ -428,13 +428,12 @@
return image; return image;
} }
- (CIImage *)currentResultCIImage { - (void)currentResultCIImage:(void (^)(CIImage *image, void(^unlock)(void)))completion
__block CIImage *image = nil; {
[self processAnimated:false capture:true synchronous:true completion:^ [self processAnimated:false capture:true synchronous:true completion:^
{ {
image = [_finalFilter newCIImageFromCurrentlyProcessedOutput]; [_finalFilter newCIImageFromCurrentlyProcessedOutput:completion];
}]; }];
return image;
} }
#pragma mark - Editor Values #pragma mark - Editor Values

View File

@ -378,36 +378,45 @@
__block CIImage *overlayCIImage = nil; __block CIImage *overlayCIImage = nil;
videoComposition = [AVMutableVideoComposition videoCompositionWithAsset:avAsset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest * _Nonnull request) { videoComposition = [AVMutableVideoComposition videoCompositionWithAsset:avAsset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest * _Nonnull request) {
__block CIImage *resultImage = request.sourceImage; CIImage *resultImage = request.sourceImage;
if (backgroundCIImage != nil) { if (backgroundCIImage != nil) {
resultImage = backgroundCIImage; resultImage = backgroundCIImage;
} }
void (^process)(CIImage *, void(^)(void)) = ^(CIImage *resultImage, void(^unlock)(void)) {
CGSize size = resultImage.extent.size;
if (overlayImage != nil && overlayImage.size.width > 0.0) {
if (overlayCIImage == nil) {
overlayCIImage = [[CIImage alloc] initWithImage:overlayImage];
CGFloat scale = size.width / overlayCIImage.extent.size.width;
overlayCIImage = [overlayCIImage imageByApplyingTransform:CGAffineTransformMakeScale(scale, scale)];
}
resultImage = [overlayCIImage imageByCompositingOverImage:resultImage];
}
if (entityRenderer != nil) {
[entityRenderer entitiesForTime:request.compositionTime fps:fps size:size completion:^(NSArray<CIImage *> *images) {
CIImage *mergedImage = resultImage;
for (CIImage *image in images) {
mergedImage = [image imageByCompositingOverImage:mergedImage];
}
[request finishWithImage:mergedImage context:ciContext];
unlock();
}];
} else {
[request finishWithImage:resultImage context:ciContext];
unlock();
}
};
if (editor != nil) { if (editor != nil) {
[editor setCIImage:resultImage]; [editor setCIImage:resultImage];
resultImage = editor.currentResultCIImage; [editor currentResultCIImage:^(CIImage *image, void(^unlock)(void)) {
} process(image, unlock);
CGSize size = resultImage.extent.size;
if (overlayImage != nil && overlayImage.size.width > 0.0) {
if (overlayCIImage == nil) {
overlayCIImage = [[CIImage alloc] initWithImage:overlayImage];
CGFloat scale = size.width / overlayCIImage.extent.size.width;
overlayCIImage = [overlayCIImage imageByApplyingTransform:CGAffineTransformMakeScale(scale, scale)];
}
resultImage = [overlayCIImage imageByCompositingOverImage:resultImage];
}
if (entityRenderer != nil) {
[entityRenderer entitiesForTime:request.compositionTime fps:fps size:size completion:^(NSArray<CIImage *> *images) {
for (CIImage *image in images) {
resultImage = [image imageByCompositingOverImage:resultImage];
}
[request finishWithImage:resultImage context:ciContext];
}]; }];
} else { } else {
[request finishWithImage:resultImage context:ciContext]; process(resultImage, ^{});
} }
}]; }];
} else { } else {