mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-12-22 14:20:20 +00:00
Build ChatListUI
This commit is contained in:
362
submodules/LegacyComponents/Sources/GPUImageOutput.m
Executable file
362
submodules/LegacyComponents/Sources/GPUImageOutput.m
Executable file
@@ -0,0 +1,362 @@
|
||||
#import "GPUImageOutput.h"
|
||||
//#import "GPUImagePicture.h"
|
||||
#import <mach/mach.h>
|
||||
|
||||
void runOnMainQueueWithoutDeadlocking(void (^block)(void))
|
||||
{
|
||||
if ([NSThread isMainThread])
|
||||
{
|
||||
block();
|
||||
}
|
||||
else
|
||||
{
|
||||
dispatch_sync(dispatch_get_main_queue(), block);
|
||||
}
|
||||
}
|
||||
|
||||
void runSynchronouslyOnVideoProcessingQueue(void (^block)(void))
|
||||
{
|
||||
dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
|
||||
#if !OS_OBJECT_USE_OBJC
|
||||
if (dispatch_get_current_queue() == videoProcessingQueue)
|
||||
#else
|
||||
if (dispatch_get_specific([GPUImageContext contextKey]))
|
||||
#endif
|
||||
{
|
||||
block();
|
||||
}else
|
||||
{
|
||||
dispatch_sync(videoProcessingQueue, block);
|
||||
}
|
||||
}
|
||||
|
||||
void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void))
|
||||
{
|
||||
dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
|
||||
|
||||
#if !OS_OBJECT_USE_OBJC
|
||||
if (dispatch_get_current_queue() == videoProcessingQueue)
|
||||
#else
|
||||
if (dispatch_get_specific([GPUImageContext contextKey]))
|
||||
#endif
|
||||
{
|
||||
block();
|
||||
}else
|
||||
{
|
||||
dispatch_async(videoProcessingQueue, block);
|
||||
}
|
||||
}
|
||||
|
||||
void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void))
|
||||
{
|
||||
dispatch_queue_t videoProcessingQueue = [context contextQueue];
|
||||
#if !OS_OBJECT_USE_OBJC
|
||||
if (dispatch_get_current_queue() == videoProcessingQueue)
|
||||
#else
|
||||
if (dispatch_get_specific([GPUImageContext contextKey]))
|
||||
#endif
|
||||
{
|
||||
block();
|
||||
}else
|
||||
{
|
||||
dispatch_sync(videoProcessingQueue, block);
|
||||
}
|
||||
}
|
||||
|
||||
void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void))
|
||||
{
|
||||
dispatch_queue_t videoProcessingQueue = [context contextQueue];
|
||||
|
||||
#if !OS_OBJECT_USE_OBJC
|
||||
if (dispatch_get_current_queue() == videoProcessingQueue)
|
||||
#else
|
||||
if (dispatch_get_specific([GPUImageContext contextKey]))
|
||||
#endif
|
||||
{
|
||||
block();
|
||||
}else
|
||||
{
|
||||
dispatch_async(videoProcessingQueue, block);
|
||||
}
|
||||
}
|
||||
|
||||
void reportAvailableMemoryForGPUImage(NSString *tag)
|
||||
{
|
||||
if (!tag)
|
||||
tag = @"Default";
|
||||
|
||||
struct task_basic_info info;
|
||||
|
||||
mach_msg_type_number_t size = sizeof(info);
|
||||
|
||||
kern_return_t kerr = task_info(mach_task_self(),
|
||||
|
||||
TASK_BASIC_INFO,
|
||||
|
||||
(task_info_t)&info,
|
||||
|
||||
&size);
|
||||
if( kerr == KERN_SUCCESS ) {
|
||||
NSLog(@"%@ - Memory used: %u", tag, (unsigned int)info.resident_size); //in bytes
|
||||
} else {
|
||||
NSLog(@"%@ - Error: %s", tag, mach_error_string(kerr));
|
||||
}
|
||||
}
|
||||
|
||||
@implementation GPUImageOutput
|
||||
|
||||
@synthesize shouldSmoothlyScaleOutput = _shouldSmoothlyScaleOutput;
|
||||
@synthesize shouldIgnoreUpdatesToThisTarget = _shouldIgnoreUpdatesToThisTarget;
|
||||
@synthesize targetToIgnoreForUpdates = _targetToIgnoreForUpdates;
|
||||
@synthesize frameProcessingCompletionBlock = _frameProcessingCompletionBlock;
|
||||
@synthesize enabled = _enabled;
|
||||
@synthesize outputTextureOptions = _outputTextureOptions;
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Initialization and teardown
|
||||
|
||||
- (id)init
|
||||
{
|
||||
if (!(self = [super init]))
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
targets = [[NSMutableArray alloc] init];
|
||||
targetTextureIndices = [[NSMutableArray alloc] init];
|
||||
_enabled = YES;
|
||||
allTargetsWantMonochromeData = YES;
|
||||
usingNextFrameForImageCapture = NO;
|
||||
|
||||
// set default texture options
|
||||
_outputTextureOptions.minFilter = GL_LINEAR;
|
||||
_outputTextureOptions.magFilter = GL_LINEAR;
|
||||
_outputTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
|
||||
_outputTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
|
||||
_outputTextureOptions.internalFormat = GL_RGBA;
|
||||
_outputTextureOptions.format = GL_BGRA;
|
||||
_outputTextureOptions.type = GL_UNSIGNED_BYTE;
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
[self removeAllTargets];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Managing targets
|
||||
|
||||
- (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex
|
||||
{
|
||||
[target setInputFramebuffer:[self framebufferForOutput] atIndex:inputTextureIndex];
|
||||
}
|
||||
|
||||
- (GPUImageFramebuffer *)framebufferForOutput
|
||||
{
|
||||
return outputFramebuffer;
|
||||
}
|
||||
|
||||
- (void)removeOutputFramebuffer
|
||||
{
|
||||
outputFramebuffer = nil;
|
||||
}
|
||||
|
||||
- (void)notifyTargetsAboutNewOutputTexture
|
||||
{
|
||||
for (id<GPUImageInput> currentTarget in targets)
|
||||
{
|
||||
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
|
||||
NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
||||
|
||||
[self setInputFramebufferForTarget:currentTarget atIndex:textureIndex];
|
||||
}
|
||||
}
|
||||
|
||||
- (NSArray*)targets
|
||||
{
|
||||
return [NSArray arrayWithArray:targets];
|
||||
}
|
||||
|
||||
- (void)addTarget:(id<GPUImageInput>)newTarget
|
||||
{
|
||||
NSInteger nextAvailableTextureIndex = [newTarget nextAvailableTextureIndex];
|
||||
[self addTarget:newTarget atTextureLocation:nextAvailableTextureIndex];
|
||||
|
||||
if ([newTarget shouldIgnoreUpdatesToThisTarget])
|
||||
{
|
||||
_targetToIgnoreForUpdates = newTarget;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation
|
||||
{
|
||||
if (newTarget == nil || [targets containsObject:newTarget])
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
cachedMaximumOutputSize = CGSizeZero;
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[self setInputFramebufferForTarget:newTarget atIndex:textureLocation];
|
||||
[targets addObject:newTarget];
|
||||
[targetTextureIndices addObject:[NSNumber numberWithInteger:textureLocation]];
|
||||
|
||||
allTargetsWantMonochromeData = allTargetsWantMonochromeData && [newTarget wantsMonochromeInput];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)removeTarget:(id<GPUImageInput>)targetToRemove
|
||||
{
|
||||
if(![targets containsObject:targetToRemove])
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (_targetToIgnoreForUpdates == targetToRemove)
|
||||
{
|
||||
_targetToIgnoreForUpdates = nil;
|
||||
}
|
||||
|
||||
cachedMaximumOutputSize = CGSizeZero;
|
||||
|
||||
NSInteger indexOfObject = [targets indexOfObject:targetToRemove];
|
||||
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
||||
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
[targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget];
|
||||
[targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget];
|
||||
|
||||
[targetTextureIndices removeObjectAtIndex:indexOfObject];
|
||||
[targets removeObject:targetToRemove];
|
||||
[targetToRemove endProcessing];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)removeAllTargets
|
||||
{
|
||||
cachedMaximumOutputSize = CGSizeZero;
|
||||
runSynchronouslyOnVideoProcessingQueue(^{
|
||||
for (id<GPUImageInput> targetToRemove in targets)
|
||||
{
|
||||
NSInteger indexOfObject = [targets indexOfObject:targetToRemove];
|
||||
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
||||
|
||||
[targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget];
|
||||
[targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget];
|
||||
}
|
||||
[targets removeAllObjects];
|
||||
[targetTextureIndices removeAllObjects];
|
||||
|
||||
allTargetsWantMonochromeData = YES;
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Manage the output texture
|
||||
|
||||
- (void)forceProcessingAtSize:(CGSize)__unused frameSize
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)__unused frameSize
|
||||
{
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Still image processing
|
||||
|
||||
- (void)useNextFrameForImageCapture
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (BOOL)providesMonochromeOutput
|
||||
{
|
||||
return NO;
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
#pragma mark Platform-specific image output methods
|
||||
|
||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
||||
|
||||
- (UIImage *)imageFromCurrentFramebuffer
|
||||
{
|
||||
UIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation];
|
||||
UIImageOrientation imageOrientation = UIImageOrientationLeft;
|
||||
switch (deviceOrientation)
|
||||
{
|
||||
case UIDeviceOrientationPortrait:
|
||||
imageOrientation = UIImageOrientationUp;
|
||||
break;
|
||||
case UIDeviceOrientationPortraitUpsideDown:
|
||||
imageOrientation = UIImageOrientationDown;
|
||||
break;
|
||||
case UIDeviceOrientationLandscapeLeft:
|
||||
imageOrientation = UIImageOrientationLeft;
|
||||
break;
|
||||
case UIDeviceOrientationLandscapeRight:
|
||||
imageOrientation = UIImageOrientationRight;
|
||||
break;
|
||||
default:
|
||||
imageOrientation = UIImageOrientationUp;
|
||||
break;
|
||||
}
|
||||
|
||||
return [self imageFromCurrentFramebufferWithOrientation:imageOrientation];
|
||||
}
|
||||
|
||||
- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation
|
||||
{
|
||||
CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutput];
|
||||
UIImage *finalImage = [UIImage imageWithCGImage:cgImageFromBytes scale:1.0 orientation:imageOrientation];
|
||||
CGImageRelease(cgImageFromBytes);
|
||||
|
||||
return finalImage;
|
||||
}
|
||||
|
||||
- (CGSize)inputTextureSize
|
||||
{
|
||||
return inputTextureSize;
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
- (NSImage *)imageFromCurrentFramebuffer;
|
||||
{
|
||||
return [self imageFromCurrentFramebufferWithOrientation:UIImageOrientationLeft];
|
||||
}
|
||||
|
||||
- (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
|
||||
{
|
||||
CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutput];
|
||||
NSImage *finalImage = [[NSImage alloc] initWithCGImage:cgImageFromBytes size:NSZeroSize];
|
||||
CGImageRelease(cgImageFromBytes);
|
||||
|
||||
return finalImage;
|
||||
}
|
||||
|
||||
- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;
|
||||
{
|
||||
CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil]];
|
||||
NSImage *processedImage = [[NSImage alloc] initWithCGImage:image size:NSZeroSize];
|
||||
CGImageRelease(image);
|
||||
return processedImage;
|
||||
}
|
||||
|
||||
- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter
|
||||
{
|
||||
return [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil]];
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
@end
|
||||
Reference in New Issue
Block a user