Memory leak fixes

This commit is contained in:
Ali 2023-09-26 16:55:04 +04:00
parent 7afa98c52c
commit 30639d9a88
18 changed files with 288 additions and 153 deletions

View File

@ -785,6 +785,7 @@ public final class ChatListContainerNode: ASDisplayNode, UIGestureRecognizerDele
}, openArchiveSettings: { [weak self] in
self?.openArchiveSettings()
}, autoSetReady: !animated, isMainTab: index == 0)
self.pendingItemNode?.2.dispose()
let disposable = MetaDisposable()
self.pendingItemNode = (id, itemNode, disposable)
@ -801,6 +802,7 @@ public final class ChatListContainerNode: ASDisplayNode, UIGestureRecognizerDele
return
}
strongSelf.pendingItemNode?.2.dispose()
strongSelf.pendingItemNode = nil
itemNode.listNode.tempTopInset = strongSelf.tempTopInset

View File

@ -2,6 +2,8 @@
#import <CoreVideo/CoreVideo.h>
#import <AVFoundation/AVFoundation.h>
@class TGVideoCameraRendererBuffer;
@interface TGVideoCameraGLRenderer : NSObject
@property (nonatomic, readonly) __attribute__((NSObject)) CMFormatDescriptionRef outputFormatDescription;
@ -13,7 +15,7 @@
- (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint;
- (void)reset;
- (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer;
- (void)setPreviousPixelBuffer:(CVPixelBufferRef)previousPixelBuffer;
- (TGVideoCameraRendererBuffer *)copyRenderedPixelBuffer:(TGVideoCameraRendererBuffer *)pixelBuffer;
- (void)setPreviousPixelBuffer:(TGVideoCameraRendererBuffer *)previousPixelBuffer;
@end

View File

@ -1,9 +1,11 @@
#import <UIKit/UIKit.h>
#import <CoreVideo/CoreVideo.h>
@class TGVideoCameraRendererBuffer;
@interface TGVideoCameraGLView : UIView
- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer;
- (void)displayPixelBuffer:(TGVideoCameraRendererBuffer *)pixelBuffer;
- (void)flushPixelBufferCache;
- (void)reset;

View File

@ -2,6 +2,7 @@
#import <CoreMedia/CoreMedia.h>
@protocol TGVideoCameraMovieRecorderDelegate;
@class TGVideoCameraRendererBuffer;
@interface TGVideoCameraMovieRecorder : NSObject
@ -15,7 +16,7 @@
- (void)prepareToRecord;
- (void)appendVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime;
- (void)appendVideoPixelBuffer:(TGVideoCameraRendererBuffer *)pixelBuffer withPresentationTime:(CMTime)presentationTime;
- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;

View File

@ -1387,6 +1387,14 @@ UIImage *TGSecretBlurredAttachmentImage(UIImage *source, CGSize size, uint32_t *
return TGSecretBlurredAttachmentWithCornerRadiusImage(source, size, averageColor, attachmentBorder, 13, position);
}
#if DEBUG
@interface DebugTGSecretBlurredAttachmentWithCornerRadiusImage : UIImage
@end
@implementation DebugTGSecretBlurredAttachmentWithCornerRadiusImage
@end
#endif
UIImage *TGSecretBlurredAttachmentWithCornerRadiusImage(UIImage *source, CGSize size, uint32_t *averageColor, bool attachmentBorder, CGFloat cornerRadius, int position)
{
CGFloat scale = TGScreenScaling(); //TGIsRetina() ? 2.0f : 1.0f;
@ -1496,7 +1504,11 @@ UIImage *TGSecretBlurredAttachmentWithCornerRadiusImage(UIImage *source, CGSize
}
CGImageRef bitmapImage = CGBitmapContextCreateImage(targetContext);
#if DEBUG
UIImage *image = [[DebugTGSecretBlurredAttachmentWithCornerRadiusImage alloc] initWithCGImage:bitmapImage];
#else
UIImage *image = [[UIImage alloc] initWithCGImage:bitmapImage];
#endif
CGImageRelease(bitmapImage);
CGContextRelease(targetContext);

View File

@ -213,62 +213,8 @@ typedef enum
_trimView.startHandleMoved = ^(CGPoint translation)
{
__strong TGMediaPickerGalleryVideoScrubber *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (strongSelf->_animatingZoomIn)
return;
UIView *trimView = strongSelf->_trimView;
CGRect availableTrimRect = [strongSelf _scrubbingRect];
CGRect normalScrubbingRect = [strongSelf _scrubbingRectZoomedIn:false];
CGFloat originX = MAX(0, trimView.frame.origin.x + translation.x);
CGFloat delta = originX - trimView.frame.origin.x;
CGFloat maxWidth = availableTrimRect.size.width + normalScrubbingRect.origin.x * 2 - originX;
CGRect trimViewRect = CGRectMake(originX, trimView.frame.origin.y, MIN(maxWidth, trimView.frame.size.width - delta), trimView.frame.size.height);
NSTimeInterval trimStartPosition = 0.0;
NSTimeInterval trimEndPosition = 0.0;
[strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration];
NSTimeInterval duration = trimEndPosition - trimStartPosition;
if (trimEndPosition - trimStartPosition < self.minimumLength)
return;
if (strongSelf.maximumLength > DBL_EPSILON && duration > strongSelf.maximumLength)
{
trimViewRect = CGRectMake(trimView.frame.origin.x + delta, trimView.frame.origin.y, trimView.frame.size.width, trimView.frame.size.height);
[strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration];
}
trimView.frame = trimViewRect;
[strongSelf _layoutTrimCurtainViews];
strongSelf->_trimStartValue = trimStartPosition;
strongSelf->_trimEndValue = trimEndPosition;
[strongSelf setValue:trimStartPosition];
UIView *handle = strongSelf->_scrubberHandle;
handle.center = CGPointMake(trimView.frame.origin.x + 12 + handle.frame.size.width / 2, handle.center.y);
UIView *dotHandle = strongSelf->_dotHandle;
dotHandle.center = CGPointMake(trimView.frame.origin.x + 12 + dotHandle.frame.size.width / 2, dotHandle.center.y);
id<TGMediaPickerGalleryVideoScrubberDelegate> delegate = strongSelf.delegate;
if ([delegate respondsToSelector:@selector(videoScrubber:editingStartValueDidChange:)])
[delegate videoScrubber:strongSelf editingStartValueDidChange:trimStartPosition];
[strongSelf cancelZoomIn];
if ([strongSelf zoomAvailable])
{
strongSelf->_pivotSource = TGMediaPickerGalleryVideoScrubberPivotSourceTrimStart;
[strongSelf performSelector:@selector(zoomIn) withObject:nil afterDelay:TGVideoScrubberZoomActivationInterval];
if (strongSelf) {
[strongSelf startHandleMoved:translation];
}
};
_trimView.endHandleMoved = ^(CGPoint translation)
@ -418,6 +364,65 @@ typedef enum
return self;
}
- (void)startHandleMoved:(CGPoint)translation {
TGMediaPickerGalleryVideoScrubber *strongSelf = self;
if (strongSelf->_animatingZoomIn)
return;
UIView *trimView = strongSelf->_trimView;
CGRect availableTrimRect = [strongSelf _scrubbingRect];
CGRect normalScrubbingRect = [strongSelf _scrubbingRectZoomedIn:false];
CGFloat originX = MAX(0, trimView.frame.origin.x + translation.x);
CGFloat delta = originX - trimView.frame.origin.x;
CGFloat maxWidth = availableTrimRect.size.width + normalScrubbingRect.origin.x * 2 - originX;
CGRect trimViewRect = CGRectMake(originX, trimView.frame.origin.y, MIN(maxWidth, trimView.frame.size.width - delta), trimView.frame.size.height);
NSTimeInterval trimStartPosition = 0.0;
NSTimeInterval trimEndPosition = 0.0;
[strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration];
NSTimeInterval duration = trimEndPosition - trimStartPosition;
if (trimEndPosition - trimStartPosition < self.minimumLength)
return;
if (strongSelf.maximumLength > DBL_EPSILON && duration > strongSelf.maximumLength)
{
trimViewRect = CGRectMake(trimView.frame.origin.x + delta, trimView.frame.origin.y, trimView.frame.size.width, trimView.frame.size.height);
[strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration];
}
trimView.frame = trimViewRect;
[strongSelf _layoutTrimCurtainViews];
strongSelf->_trimStartValue = trimStartPosition;
strongSelf->_trimEndValue = trimEndPosition;
[strongSelf setValue:trimStartPosition];
UIView *handle = strongSelf->_scrubberHandle;
handle.center = CGPointMake(trimView.frame.origin.x + 12 + handle.frame.size.width / 2, handle.center.y);
UIView *dotHandle = strongSelf->_dotHandle;
dotHandle.center = CGPointMake(trimView.frame.origin.x + 12 + dotHandle.frame.size.width / 2, dotHandle.center.y);
id<TGMediaPickerGalleryVideoScrubberDelegate> delegate = strongSelf.delegate;
if ([delegate respondsToSelector:@selector(videoScrubber:editingStartValueDidChange:)])
[delegate videoScrubber:strongSelf editingStartValueDidChange:trimStartPosition];
[strongSelf cancelZoomIn];
if ([strongSelf zoomAvailable])
{
strongSelf->_pivotSource = TGMediaPickerGalleryVideoScrubberPivotSourceTrimStart;
[strongSelf performSelector:@selector(zoomIn) withObject:nil afterDelay:TGVideoScrubberZoomActivationInterval];
}
}
- (void)setHasDotPicker:(bool)hasDotPicker {
_hasDotPicker = hasDotPicker;
_tapGestureRecognizer.enabled = hasDotPicker;

View File

@ -3,6 +3,56 @@
#import <OpenGLES/ES2/glext.h>
#import <LegacyComponents/TGPaintShader.h>
#import "TGVideoCameraPipeline.h"
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
#ifndef GLES_SILENCE_DEPRECATION
#define GLES_SILENCE_DEPRECATION
#endif
@interface TGVideoCameraGLRendererBufferPool : NSObject
@property (nonatomic, assign) CVPixelBufferPoolRef pool;
@end
@implementation TGVideoCameraGLRendererBufferPool
- (instancetype)initWithRetainedPool:(CVPixelBufferPoolRef)pool {
self = [super init];
if (self != nil) {
_pool = pool;
}
return self;
}
- (void)dealloc {
if (_pool) {
CVPixelBufferPoolRelease(_pool);
}
}
@end
@implementation TGVideoCameraRendererBuffer
- (instancetype)initWithRetainedBuffer:(CVPixelBufferRef)buffer {
self = [super init];
if (self != nil) {
_buffer = buffer;
}
return self;
}
- (void)dealloc {
if (_buffer) {
CVPixelBufferRelease(_buffer);
}
}
@end
@interface TGVideoCameraGLRenderer ()
{
@ -10,11 +60,11 @@
CVOpenGLESTextureCacheRef _textureCache;
CVOpenGLESTextureCacheRef _prevTextureCache;
CVOpenGLESTextureCacheRef _renderTextureCache;
CVPixelBufferPoolRef _bufferPool;
TGVideoCameraGLRendererBufferPool *_bufferPool;
CFDictionaryRef _bufferPoolAuxAttributes;
CMFormatDescriptionRef _outputFormatDescription;
CVPixelBufferRef _previousPixelBuffer;
TGVideoCameraRendererBuffer *_previousPixelBuffer;
TGPaintShader *_shader;
GLint _frameUniform;
@ -194,20 +244,12 @@
return _previousPixelBuffer != NULL;
}
- (void)setPreviousPixelBuffer:(CVPixelBufferRef)previousPixelBuffer
- (void)setPreviousPixelBuffer:(TGVideoCameraRendererBuffer *)previousPixelBuffer
{
if (_previousPixelBuffer != NULL)
{
CFRelease(_previousPixelBuffer);
_previousPixelBuffer = NULL;
}
_previousPixelBuffer = previousPixelBuffer;
if (_previousPixelBuffer != NULL)
CFRetain(_previousPixelBuffer);
}
- (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer
- (TGVideoCameraRendererBuffer *)copyRenderedPixelBuffer:(TGVideoCameraRendererBuffer *)pixelBuffer
{
static const GLfloat squareVertices[] =
{
@ -217,13 +259,15 @@
1.0f, 1.0f,
};
if (_offscreenBufferHandle == 0)
return NULL;
if (_offscreenBufferHandle == 0) {
return NULL;
}
if (pixelBuffer == NULL)
return NULL;
if (pixelBuffer == NULL) {
return nil;
}
const CMVideoDimensions srcDimensions = { (int32_t)CVPixelBufferGetWidth(pixelBuffer), (int32_t)CVPixelBufferGetHeight(pixelBuffer) };
const CMVideoDimensions srcDimensions = { (int32_t)CVPixelBufferGetWidth(pixelBuffer.buffer), (int32_t)CVPixelBufferGetHeight(pixelBuffer.buffer) };
const CMVideoDimensions dstDimensions = CMVideoFormatDescriptionGetDimensions(_outputFormatDescription);
EAGLContext *oldContext = [EAGLContext currentContext];
@ -237,35 +281,72 @@
CVOpenGLESTextureRef srcTexture = NULL;
CVOpenGLESTextureRef prevTexture = NULL;
CVOpenGLESTextureRef dstTexture = NULL;
CVPixelBufferRef dstPixelBuffer = NULL;
CVPixelBufferRef dstPixelBufferValue = NULL;
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, srcDimensions.width, srcDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &srcTexture);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer.buffer, NULL, GL_TEXTURE_2D, GL_RGBA, srcDimensions.width, srcDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &srcTexture);
if (!srcTexture || err)
goto bail;
if (!srcTexture || err) {
if (oldContext != _context) {
[EAGLContext setCurrentContext:oldContext];
}
if (srcTexture) {
CFRelease(srcTexture);
}
if (prevTexture) {
CFRelease(prevTexture);
}
if (dstTexture) {
CFRelease(dstTexture);
}
return nil;
}
bool hasPreviousTexture = false;
if (_previousPixelBuffer != NULL)
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _prevTextureCache, _previousPixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, srcDimensions.width, srcDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &prevTexture);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _prevTextureCache, _previousPixelBuffer.buffer, NULL, GL_TEXTURE_2D, GL_RGBA, srcDimensions.width, srcDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &prevTexture);
if (!prevTexture || err)
goto bail;
if (!prevTexture || err) {
if (oldContext != _context) {
[EAGLContext setCurrentContext:oldContext];
}
if (srcTexture) {
CFRelease(srcTexture);
}
if (prevTexture) {
CFRelease(prevTexture);
}
if (dstTexture) {
CFRelease(dstTexture);
}
return nil;
}
hasPreviousTexture = true;
}
err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer);
if (err == kCVReturnWouldExceedAllocationThreshold)
{
err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool.pool, _bufferPoolAuxAttributes, &dstPixelBufferValue);
if (err == kCVReturnWouldExceedAllocationThreshold) {
CVOpenGLESTextureCacheFlush(_renderTextureCache, 0);
err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer);
err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool.pool, _bufferPoolAuxAttributes, &dstPixelBufferValue);
}
TGVideoCameraRendererBuffer *dstPixelBuffer = nil;
if (dstPixelBufferValue) {
dstPixelBuffer = [[TGVideoCameraRendererBuffer alloc] initWithRetainedBuffer:dstPixelBufferValue];
}
if (err)
goto bail;
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _renderTextureCache, dstPixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, dstDimensions.width, dstDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &dstTexture);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _renderTextureCache, dstPixelBuffer.buffer, NULL, GL_TEXTURE_2D, GL_RGBA, dstDimensions.width, dstDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &dstTexture);
if (!dstTexture || err)
goto bail;
@ -388,7 +469,10 @@ bail:
_noMirrorUniform = [_shader uniformForKey:@"noMirror"];
size_t maxRetainedBufferCount = clientRetainedBufferCountHint + 1;
_bufferPool = [TGVideoCameraGLRenderer createPixelBufferPoolWithWidth:(int32_t)outputSize.width height:(int32_t)outputSize.height pixelFormat:kCVPixelFormatType_32BGRA maxBufferCount:(int32_t)maxRetainedBufferCount];
CVPixelBufferPoolRef bufferPoolValue = [TGVideoCameraGLRenderer createPixelBufferPoolWithWidth:(int32_t)outputSize.width height:(int32_t)outputSize.height pixelFormat:kCVPixelFormatType_32BGRA maxBufferCount:(int32_t)maxRetainedBufferCount];
if (bufferPoolValue) {
_bufferPool = [[TGVideoCameraGLRendererBufferPool alloc] initWithRetainedPool:bufferPoolValue];
}
if (!_bufferPool)
{
@ -397,11 +481,11 @@ bail:
}
_bufferPoolAuxAttributes = [TGVideoCameraGLRenderer createPixelBufferPoolAuxAttribute:(int32_t)maxRetainedBufferCount];
[TGVideoCameraGLRenderer preallocatePixelBuffersInPool:_bufferPool auxAttributes:_bufferPoolAuxAttributes];
[TGVideoCameraGLRenderer preallocatePixelBuffersInPool:_bufferPool.pool auxAttributes:_bufferPoolAuxAttributes];
CMFormatDescriptionRef outputFormatDescription = NULL;
CVPixelBufferRef testPixelBuffer = NULL;
CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &testPixelBuffer);
CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool.pool, _bufferPoolAuxAttributes, &testPixelBuffer);
if (!testPixelBuffer)
{
success = false;
@ -460,11 +544,7 @@ bail:
_renderTextureCache = 0;
}
if (_bufferPool)
{
CFRelease(_bufferPool);
_bufferPool = NULL;
}
_bufferPool = nil;
if (_bufferPoolAuxAttributes)
{
@ -525,3 +605,5 @@ bail:
}
@end
#pragma clang diagnostic pop

View File

@ -6,6 +6,7 @@
#import <LegacyComponents/TGPaintShader.h>
#import "LegacyComponentsInternal.h"
#import "TGVideoCameraPipeline.h"
@interface TGVideoCameraGLView ()
{
@ -134,7 +135,7 @@ bail:
[self reset];
}
- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer
- (void)displayPixelBuffer:(TGVideoCameraRendererBuffer *)pixelBuffer
{
static const GLfloat squareVertices[] =
{
@ -161,10 +162,10 @@ bail:
return;
}
size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer);
size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer.buffer);
size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer.buffer);
CVOpenGLESTextureRef texture = NULL;
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, (GLsizei)frameWidth, (GLsizei)frameHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer.buffer, NULL, GL_TEXTURE_2D, GL_RGBA, (GLsizei)frameWidth, (GLsizei)frameHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
if (!texture || err)
return;

View File

@ -1,6 +1,8 @@
#import "TGVideoCameraMovieRecorder.h"
#import <AVFoundation/AVFoundation.h>
#import "TGVideoCameraPipeline.h"
typedef enum {
TGMovieRecorderStatusIdle = 0,
TGMovieRecorderStatusPreparingToRecord,
@ -146,7 +148,7 @@ typedef enum {
} );
}
- (void)appendVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime
- (void)appendVideoPixelBuffer:(TGVideoCameraRendererBuffer *)pixelBuffer withPresentationTime:(CMTime)presentationTime
{
CMSampleBufferRef sampleBuffer = NULL;
@ -155,7 +157,7 @@ typedef enum {
timingInfo.decodeTimeStamp = kCMTimeInvalid;
timingInfo.presentationTimeStamp = presentationTime;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, _videoTrackSourceFormatDescription, &timingInfo, &sampleBuffer);
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer.buffer, true, NULL, NULL, _videoTrackSourceFormatDescription, &timingInfo, &sampleBuffer);
if (sampleBuffer)
{

View File

@ -4,6 +4,13 @@
@protocol TGVideoCameraPipelineDelegate;
@interface TGVideoCameraRendererBuffer : NSObject
@property (nonatomic, assign) CVPixelBufferRef buffer;
- (instancetype)initWithRetainedBuffer:(CVPixelBufferRef)buffer;
@end
@interface TGVideoCameraPipeline : NSObject
@ -40,7 +47,7 @@
- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline didStopRunningWithError:(NSError *)error;
- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline previewPixelBufferReadyForDisplay:(CVPixelBufferRef)previewPixelBuffer;
- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline previewPixelBufferReadyForDisplay:(TGVideoCameraRendererBuffer *)previewPixelBuffer;
- (void)capturePipelineDidRunOutOfPreviewBuffers:(TGVideoCameraPipeline *)capturePipeline;
- (void)capturePipelineRecordingDidStart:(TGVideoCameraPipeline *)capturePipeline;

View File

@ -57,7 +57,7 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
NSTimeInterval _resultDuration;
CVPixelBufferRef _previousPixelBuffer;
TGVideoCameraRendererBuffer *_previousPixelBuffer;
int32_t _repeatingCount;
int16_t _micLevelPeak;
@ -72,7 +72,7 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
os_unfair_lock _recordLock;
bool _startRecordAfterAudioBuffer;
CVPixelBufferRef _currentPreviewPixelBuffer;
TGVideoCameraRendererBuffer *_currentPreviewPixelBuffer;
NSMutableDictionary *_thumbnails;
NSTimeInterval _firstThumbnailTime;
@ -332,13 +332,10 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
return;
self.outputVideoFormatDescription = NULL;
[_renderer reset];
if (_currentPreviewPixelBuffer != NULL)
{
CFRelease(_currentPreviewPixelBuffer);
_currentPreviewPixelBuffer = NULL;
}
_currentPreviewPixelBuffer = nil;
});
}
@ -440,23 +437,23 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
}
}
- (UIImage *)imageFromImageBuffer:(CVPixelBufferRef)imageBuffer
- (UIImage *)imageFromImageBuffer:(TGVideoCameraRendererBuffer *)imageBuffer
{
CVPixelBufferLockBaseAddress(imageBuffer, 0);
CVPixelBufferLockBaseAddress(imageBuffer.buffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer.buffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer.buffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer.buffer);
size_t height = CVPixelBufferGetHeight(imageBuffer.buffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef cgImage = CGBitmapContextCreateImage(context);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
CVPixelBufferUnlockBaseAddress(imageBuffer.buffer, 0);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
@ -470,7 +467,7 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
- (void)renderVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CVPixelBufferRef renderedPixelBuffer = NULL;
TGVideoCameraRendererBuffer *renderedPixelBuffer = nil;
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
@synchronized (_renderer)
@ -486,8 +483,7 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
_repeatingCount = 11;
[_renderer setPreviousPixelBuffer:_previousPixelBuffer];
CFRelease(_previousPixelBuffer);
_previousPixelBuffer = NULL;
_previousPixelBuffer = nil;
}
if (_repeatingCount > 0)
@ -506,7 +502,11 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
[_renderer setPreviousPixelBuffer:NULL];
}
CVPixelBufferRef sourcePixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferRef sourcePixelBufferValue = CMSampleBufferGetImageBuffer(sampleBuffer);
TGVideoCameraRendererBuffer *sourcePixelBuffer = nil;
if (sourcePixelBufferValue) {
sourcePixelBuffer = [[TGVideoCameraRendererBuffer alloc] initWithRetainedBuffer:CVPixelBufferRetain(sourcePixelBufferValue)];
}
renderedPixelBuffer = [_renderer copyRenderedPixelBuffer:sourcePixelBuffer];
@synchronized (self)
@ -542,14 +542,11 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
if (!repeatingFrames)
{
if (_previousPixelBuffer != NULL)
{
CFRelease(_previousPixelBuffer);
if (_previousPixelBuffer != NULL) {
_previousPixelBuffer = NULL;
}
_previousPixelBuffer = sourcePixelBuffer;
CFRetain(sourcePixelBuffer);
}
}
}
@ -568,8 +565,6 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
if (_recordingStatus == TGVideoCameraRecordingStatusRecording)
[_recorder appendVideoPixelBuffer:renderedPixelBuffer withPresentationTime:timestamp];
}
CFRelease(renderedPixelBuffer);
}
else
{
@ -577,33 +572,27 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
}
}
- (void)outputPreviewPixelBuffer:(CVPixelBufferRef)previewPixelBuffer
- (void)outputPreviewPixelBuffer:(TGVideoCameraRendererBuffer *)previewPixelBuffer
{
if (_currentPreviewPixelBuffer != NULL)
{
CFRelease(_currentPreviewPixelBuffer);
_currentPreviewPixelBuffer = NULL;
}
if (_previousPixelBuffer != NULL)
{
_currentPreviewPixelBuffer = previewPixelBuffer;
CFRetain(_currentPreviewPixelBuffer);
}
[self invokeDelegateCallbackAsync:^
{
CVPixelBufferRef currentPreviewPixelBuffer = NULL;
TGVideoCameraRendererBuffer *currentPreviewPixelBuffer = nil;
@synchronized (self)
{
currentPreviewPixelBuffer = _currentPreviewPixelBuffer;
if (currentPreviewPixelBuffer != NULL)
{
CFRetain(currentPreviewPixelBuffer);
if (_currentPreviewPixelBuffer != NULL)
{
CFRelease(_currentPreviewPixelBuffer);
_currentPreviewPixelBuffer = NULL;
if (currentPreviewPixelBuffer != NULL) {
if (_currentPreviewPixelBuffer != NULL) {
_currentPreviewPixelBuffer = nil;
}
}
}
@ -611,7 +600,6 @@ const NSInteger TGVideoCameraRetainedBufferCount = 16;
if (currentPreviewPixelBuffer != NULL)
{
[_delegate capturePipeline:self previewPixelBufferReadyForDisplay:currentPreviewPixelBuffer];
CFRelease(currentPreviewPixelBuffer);
}
}];
}

View File

@ -1273,7 +1273,7 @@ typedef enum
{
}
- (void)capturePipeline:(TGVideoCameraPipeline *)__unused capturePipeline previewPixelBufferReadyForDisplay:(CVPixelBufferRef)previewPixelBuffer
- (void)capturePipeline:(TGVideoCameraPipeline *)__unused capturePipeline previewPixelBufferReadyForDisplay:(TGVideoCameraRendererBuffer *)previewPixelBuffer
{
if (!_gpuAvailable)
return;

View File

@ -132,6 +132,10 @@ static inline int writeOggPage(ogg_page *page, TGDataItem *fileItem)
return self;
}
- (void)dealloc {
[self cleanup];
}
- (void)cleanup
{
if (_encoder != NULL)

View File

@ -3241,7 +3241,20 @@ final class PostboxImpl {
let disposable = signal.start(next: { next in
subscriber.putNext((next.0, next.1, nil))
})
return ActionDisposable { [weak self] in
final class MarkedActionDisposable: Disposable {
let disposable: ActionDisposable
init(_ f: @escaping () -> Void) {
self.disposable = ActionDisposable(action: f)
}
func dispose() {
self.disposable.dispose()
}
}
return MarkedActionDisposable { [weak self] in
disposable.dispose()
if let strongSelf = self {
strongSelf.queue.justDispatch {
@ -4268,7 +4281,7 @@ public class Postbox {
).start(next: subscriber.putNext, error: subscriber.putError, completed: subscriber.putCompletion))
}
return disposable
return disposable.strict()
}
}
@ -4306,7 +4319,7 @@ public class Postbox {
).start(next: subscriber.putNext, error: subscriber.putError, completed: subscriber.putCompletion))
}
return disposable
return disposable.strict()
}
}
@ -4346,7 +4359,7 @@ public class Postbox {
).start(next: subscriber.putNext, error: subscriber.putError, completed: subscriber.putCompletion))
}
return disposable
return disposable.strict()
}
}

View File

@ -84,6 +84,9 @@ final class ViewTracker {
}
func removeMessageHistoryView(index: Bag<(MutableMessageHistoryView, ValuePipe<(MessageHistoryView, ViewUpdateType)>)>.Index) {
#if DEBUG
assert(self.messageHistoryViews.get(index) != nil)
#endif
self.messageHistoryViews.remove(index)
self.updateTrackedHoles()

View File

@ -191,10 +191,22 @@ private final class MultipartUploadManager {
}
}
deinit {
let uploadingParts = self.uploadingParts
let dataDisposable = self.dataDisposable
self.queue.async {
for (_, (_, disposable)) in uploadingParts {
disposable.dispose()
}
dataDisposable.dispose()
}
}
func start() {
self.queue.async {
self.dataDisposable.set((self.dataSignal
|> deliverOn(self.queue)).start(next: { [weak self] data in
|> deliverOn(self.queue)).startStrict(next: { [weak self] data in
if let strongSelf = self {
strongSelf.resourceData = data
strongSelf.checkState()
@ -276,11 +288,11 @@ private final class MultipartUploadManager {
self.headerPartState = .uploading
let part = self.uploadPart(UploadPart(fileId: self.fileId, index: partIndex, data: partData, bigTotalParts: currentBigTotalParts, bigPart: self.bigParts))
|> deliverOn(self.queue)
self.uploadingParts[0] = (partSize, part.start(error: { [weak self] _ in
self.uploadingParts[0] = (partSize, part.startStrict(error: { [weak self] _ in
self?.completed(nil)
}, completed: { [weak self] in
if let strongSelf = self {
let _ = strongSelf.uploadingParts.removeValue(forKey: 0)
strongSelf.uploadingParts.removeValue(forKey: 0)?.1.dispose()
strongSelf.headerPartState = .ready
strongSelf.checkState()
}
@ -350,11 +362,11 @@ private final class MultipartUploadManager {
break
}
}
self.uploadingParts[nextOffset] = (partSize, part.start(error: { [weak self] _ in
self.uploadingParts[nextOffset] = (partSize, part.startStrict(error: { [weak self] _ in
self?.completed(nil)
}, completed: { [weak self] in
if let strongSelf = self {
let _ = strongSelf.uploadingParts.removeValue(forKey: nextOffset)
strongSelf.uploadingParts.removeValue(forKey: nextOffset)?.1.dispose()
strongSelf.uploadedParts[partOffset] = partSize
if partIndex == 0 {
strongSelf.headerPartState = .ready

View File

@ -1515,7 +1515,6 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
if !buildConfig.isAppStoreBuild {
if value >= 2000 * 1024 * 1024 {
if self.contextValue?.context.sharedContext.immediateExperimentalUISettings.crashOnMemoryPressure == true {
preconditionFailure()
}
}
}

View File

@ -163,7 +163,7 @@ private final class PeerChannelMemberCategoriesContextsManagerImpl {
self.profileDataPreloadContexts[peerId] = context
if let customData = customData {
disposable.add(customData.start())
disposable.add(customData.startStrict())
}
/*disposable.set(signal.start(next: { [weak context] value in
@ -195,7 +195,7 @@ private final class PeerChannelMemberCategoriesContextsManagerImpl {
current.subscribers.remove(index)
if current.subscribers.isEmpty {
if current.emptyTimer == nil {
let timer = SwiftSignalKit.Timer(timeout: 60.0, repeat: false, completion: { [weak context] in
let timer = SwiftSignalKit.Timer(timeout: 1.0, repeat: false, completion: { [weak context] in
if let current = strongSelf.profileDataPreloadContexts[peerId], let context = context, current === context {
if current.subscribers.isEmpty {
strongSelf.profileDataPreloadContexts.removeValue(forKey: peerId)