mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Video editor fixes
This commit is contained in:
parent
310e71c71b
commit
ae6bc476ae
@ -183,14 +183,16 @@ NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
|
||||
return image;
|
||||
}
|
||||
|
||||
- (CIImage *)newCIImageFromCurrentlyProcessedOutput {
|
||||
- (void)newCIImageFromCurrentlyProcessedOutput:(void (^)(CIImage *image, void(^unlock)(void)))completion
|
||||
{
|
||||
// Give it three seconds to process, then abort if they forgot to set up the image capture properly
|
||||
double timeoutForImageCapture = 3.0;
|
||||
dispatch_time_t convertedTimeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeoutForImageCapture * NSEC_PER_SEC));
|
||||
|
||||
if (dispatch_semaphore_wait(imageCaptureSemaphore, convertedTimeout) != 0)
|
||||
{
|
||||
return NULL;
|
||||
completion(nil, ^{});
|
||||
return;
|
||||
}
|
||||
|
||||
GPUImageFramebuffer *framebuffer = [self framebufferForOutput];
|
||||
@ -198,8 +200,7 @@ NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
|
||||
usingNextFrameForImageCapture = NO;
|
||||
dispatch_semaphore_signal(imageCaptureSemaphore);
|
||||
|
||||
CIImage *image = [framebuffer newCIImageFromFramebufferContents];
|
||||
return image;
|
||||
[framebuffer newCIImageFromFramebufferContents:completion];
|
||||
}
|
||||
|
||||
- (void)commitImageCapture {
|
||||
|
@ -46,7 +46,7 @@ typedef struct GPUTextureOptions {
|
||||
|
||||
// Image capture
|
||||
- (CGImageRef)newCGImageFromFramebufferContents;
|
||||
- (CIImage *)newCIImageFromFramebufferContents;
|
||||
- (void)newCIImageFromFramebufferContents:(void (^)(CIImage *image, void(^unlock)(void)))completion;
|
||||
- (void)restoreRenderTarget;
|
||||
|
||||
// Raw data bytes
|
||||
|
@ -1,17 +1,14 @@
|
||||
#import "GPUImageFramebuffer.h"
|
||||
#import "GPUImageOutput.h"
|
||||
|
||||
#import "TGTimerTarget.h"
|
||||
|
||||
@interface GPUImageFramebuffer()
|
||||
{
|
||||
GLuint framebuffer;
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
|
||||
CVPixelBufferRef renderTarget;
|
||||
CVOpenGLESTextureRef renderTexture;
|
||||
NSUInteger readLockCount;
|
||||
#else
|
||||
#endif
|
||||
|
||||
NSUInteger framebufferReferenceCount;
|
||||
BOOL referenceCountingDisabled;
|
||||
}
|
||||
@ -26,9 +23,6 @@ void dataProviderReleaseCallback (void *info, const void *data, size_t size);
|
||||
void dataProviderUnlockCallback (void *info, const void *data, size_t size);
|
||||
|
||||
@implementation GPUImageFramebuffer
|
||||
{
|
||||
NSTimer *fixer;
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Initialization and teardown
|
||||
@ -292,9 +286,6 @@ static BOOL mark = false;
|
||||
}
|
||||
|
||||
framebufferReferenceCount++;
|
||||
|
||||
[fixer invalidate];
|
||||
fixer = nil;
|
||||
}
|
||||
|
||||
- (void)unlock
|
||||
@ -310,18 +301,9 @@ static BOOL mark = false;
|
||||
if (framebufferReferenceCount < 1)
|
||||
{
|
||||
[[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self];
|
||||
[fixer invalidate];
|
||||
fixer = nil;
|
||||
} else if (framebufferReferenceCount == 1 && self.mark) {
|
||||
// fixer = [TGTimerTarget scheduledMainThreadTimerWithTarget:self action:@selector(fixTick) interval:0.35 repeat:false];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)fixTick {
|
||||
[self clearAllLocks];
|
||||
[self destroyFramebuffer];
|
||||
}
|
||||
|
||||
- (void)clearAllLocks
|
||||
{
|
||||
framebufferReferenceCount = 0;
|
||||
@ -412,82 +394,42 @@ void dataProviderUnlockCallback (void *info, __unused const void *data, __unused
|
||||
return cgImageFromBytes;
|
||||
}
|
||||
|
||||
- (CIImage *)newCIImageFromFramebufferContents
|
||||
- (void)newCIImageFromFramebufferContents:(void (^)(CIImage *image, void(^unlock)(void)))completion
|
||||
{
|
||||
// a CGImage can only be created from a 'normal' color texture
|
||||
NSAssert(self.textureOptions.internalFormat == GL_RGBA, @"For conversion to a CGImage the output texture format for this filter must be GL_RGBA.");
|
||||
NSAssert(self.textureOptions.type == GL_UNSIGNED_BYTE, @"For conversion to a CGImage the type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
|
||||
|
||||
__block CIImage *ciImageFromBytes;
|
||||
__block CIImage *ciImage;
|
||||
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[GPUImageContext useImageProcessingContext];
|
||||
|
||||
NSUInteger totalBytesForImage = (int)_size.width * (int)_size.height * 4;
|
||||
// It appears that the width of a texture must be padded out to be a multiple of 8 (32 bytes) if reading from it using a texture cache
|
||||
|
||||
|
||||
GLubyte *rawImagePixels;
|
||||
|
||||
CGDataProviderRef dataProvider = NULL;
|
||||
if ([GPUImageContext supportsFastTextureUpload])
|
||||
{
|
||||
NSUInteger paddedWidthOfImage = (NSUInteger)(CVPixelBufferGetBytesPerRow(renderTarget) / 4.0);
|
||||
NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4;
|
||||
|
||||
glFinish();
|
||||
CFRetain(renderTarget); // I need to retain the pixel buffer here and release in the data source callback to prevent its bytes from being prematurely deallocated during a photo write operation
|
||||
CFRetain(renderTarget);
|
||||
[self lockForReading];
|
||||
rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);
|
||||
|
||||
// dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback);
|
||||
[[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self]; // In case the framebuffer is swapped out on the filter, need to have a strong reference to it somewhere for it to hang on while the image is in existence
|
||||
[[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self];
|
||||
|
||||
ciImageFromBytes = [[CIImage alloc] initWithCVPixelBuffer:renderTarget options:nil];
|
||||
ciImage = [[CIImage alloc] initWithCVPixelBuffer:renderTarget options:nil];
|
||||
}
|
||||
});
|
||||
|
||||
completion(ciImage, ^{
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[self restoreRenderTarget];
|
||||
[self unlock];
|
||||
[[GPUImageContext sharedFramebufferCache] removeFramebufferFromActiveImageCaptureList:self];
|
||||
}
|
||||
// else
|
||||
// {
|
||||
// [self activateFramebuffer];
|
||||
// rawImagePixels = (GLubyte *)malloc(totalBytesForImage);
|
||||
// glReadPixels(0, 0, (int)_size.width, (int)_size.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
|
||||
// dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels, totalBytesForImage, dataProviderReleaseCallback);
|
||||
// [self unlock]; // Don't need to keep this around anymore
|
||||
// }
|
||||
|
||||
// CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();
|
||||
//
|
||||
//
|
||||
// CIImage *image = [[CIImage alloc] initWithImageProvider:dataProvider size:<#(size_t)#> :<#(size_t)#> format:kCIFormatRGBA8 colorSpace:defaultRGBColorSpace options:<#(nullable NSDictionary<CIImageOption,id> *)#>]
|
||||
|
||||
// if ([GPUImageContext supportsFastTextureUpload])
|
||||
// {
|
||||
// cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, 4 * (int)_size.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast, dataProvider, NULL, NO, kCGRenderingIntentDefault);
|
||||
// }
|
||||
|
||||
// Capture image with current device orientation
|
||||
// CGDataProviderRelease(dataProvider);
|
||||
// CGColorSpaceRelease(defaultRGBColorSpace);
|
||||
|
||||
});
|
||||
|
||||
return ciImageFromBytes;
|
||||
});
|
||||
}
|
||||
|
||||
- (void)restoreRenderTarget
|
||||
{
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
[self unlockAfterReading];
|
||||
CFRelease(renderTarget);
|
||||
#else
|
||||
#endif
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
|
@ -80,7 +80,7 @@ void reportAvailableMemoryForGPUImage(NSString *tag);
|
||||
|
||||
- (void)useNextFrameForImageCapture;
|
||||
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
|
||||
- (CIImage *)newCIImageFromCurrentlyProcessedOutput;
|
||||
- (void)newCIImageFromCurrentlyProcessedOutput:(void (^)(CIImage *image, void(^unlock)(void)))completion;
|
||||
- (void)commitImageCapture;
|
||||
|
||||
- (UIImage *)imageFromCurrentFramebuffer;
|
||||
|
@ -278,8 +278,9 @@ void reportAvailableMemoryForGPUImage(NSString *tag)
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (CIImage *)newCIImageFromCurrentlyProcessedOutput {
|
||||
return nil;
|
||||
- (void)newCIImageFromCurrentlyProcessedOutput:(void (^)(CIImage *image, void(^unlock)(void)))completion
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
- (void)commitImageCapture
|
||||
|
@ -48,7 +48,7 @@
|
||||
|
||||
- (void)createResultImageWithCompletion:(void (^)(UIImage *image))completion;
|
||||
- (UIImage *)currentResultImage;
|
||||
- (CIImage *)currentResultCIImage;
|
||||
- (void)currentResultCIImage:(void (^)(CIImage *image, void(^unlock)(void)))completion;
|
||||
|
||||
- (bool)hasDefaultCropping;
|
||||
|
||||
|
@ -428,13 +428,12 @@
|
||||
return image;
|
||||
}
|
||||
|
||||
- (CIImage *)currentResultCIImage {
|
||||
__block CIImage *image = nil;
|
||||
- (void)currentResultCIImage:(void (^)(CIImage *image, void(^unlock)(void)))completion
|
||||
{
|
||||
[self processAnimated:false capture:true synchronous:true completion:^
|
||||
{
|
||||
image = [_finalFilter newCIImageFromCurrentlyProcessedOutput];
|
||||
[_finalFilter newCIImageFromCurrentlyProcessedOutput:completion];
|
||||
}];
|
||||
return image;
|
||||
}
|
||||
|
||||
#pragma mark - Editor Values
|
||||
|
@ -378,17 +378,13 @@
|
||||
|
||||
__block CIImage *overlayCIImage = nil;
|
||||
videoComposition = [AVMutableVideoComposition videoCompositionWithAsset:avAsset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest * _Nonnull request) {
|
||||
__block CIImage *resultImage = request.sourceImage;
|
||||
CIImage *resultImage = request.sourceImage;
|
||||
|
||||
if (backgroundCIImage != nil) {
|
||||
resultImage = backgroundCIImage;
|
||||
}
|
||||
|
||||
if (editor != nil) {
|
||||
[editor setCIImage:resultImage];
|
||||
resultImage = editor.currentResultCIImage;
|
||||
}
|
||||
|
||||
void (^process)(CIImage *, void(^)(void)) = ^(CIImage *resultImage, void(^unlock)(void)) {
|
||||
CGSize size = resultImage.extent.size;
|
||||
if (overlayImage != nil && overlayImage.size.width > 0.0) {
|
||||
if (overlayCIImage == nil) {
|
||||
@ -401,13 +397,26 @@
|
||||
|
||||
if (entityRenderer != nil) {
|
||||
[entityRenderer entitiesForTime:request.compositionTime fps:fps size:size completion:^(NSArray<CIImage *> *images) {
|
||||
CIImage *mergedImage = resultImage;
|
||||
for (CIImage *image in images) {
|
||||
resultImage = [image imageByCompositingOverImage:resultImage];
|
||||
mergedImage = [image imageByCompositingOverImage:mergedImage];
|
||||
}
|
||||
[request finishWithImage:resultImage context:ciContext];
|
||||
[request finishWithImage:mergedImage context:ciContext];
|
||||
unlock();
|
||||
}];
|
||||
} else {
|
||||
[request finishWithImage:resultImage context:ciContext];
|
||||
unlock();
|
||||
}
|
||||
};
|
||||
|
||||
if (editor != nil) {
|
||||
[editor setCIImage:resultImage];
|
||||
[editor currentResultCIImage:^(CIImage *image, void(^unlock)(void)) {
|
||||
process(image, unlock);
|
||||
}];
|
||||
} else {
|
||||
process(resultImage, ^{});
|
||||
}
|
||||
}];
|
||||
} else {
|
||||
|
Loading…
x
Reference in New Issue
Block a user