mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge commit '15ecbd113638c7496f6c988fa44e244e9c3db699'
This commit is contained in:
commit
6603bc2882
@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
@class PGCameraMovieWriter;
|
@class PGCameraMovieWriter;
|
||||||
@class PGRectangleDetector;
|
@class PGRectangleDetector;
|
||||||
|
@class SQueue;
|
||||||
|
|
||||||
@interface PGCameraCaptureSession : AVCaptureSession
|
@interface PGCameraCaptureSession : AVCaptureSession
|
||||||
|
|
||||||
@ -65,4 +66,6 @@
|
|||||||
|
|
||||||
+ (bool)_isZoomAvailableForDevice:(AVCaptureDevice *)device;
|
+ (bool)_isZoomAvailableForDevice:(AVCaptureDevice *)device;
|
||||||
|
|
||||||
|
+ (SQueue *)cameraQueue;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -120,7 +120,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
TGLegacyLog(@"ERROR: Camera runtime error: %@", notification.userInfo[AVCaptureSessionErrorKey]);
|
TGLegacyLog(@"ERROR: Camera runtime error: %@", notification.userInfo[AVCaptureSessionErrorKey]);
|
||||||
|
|
||||||
__weak PGCamera *weakSelf = self;
|
__weak PGCamera *weakSelf = self;
|
||||||
TGDispatchAfter(1.5f, [PGCamera cameraQueue]._dispatch_queue, ^
|
TGDispatchAfter(1.5f, [PGCameraCaptureSession cameraQueue]._dispatch_queue, ^
|
||||||
{
|
{
|
||||||
__strong PGCamera *strongSelf = weakSelf;
|
__strong PGCamera *strongSelf = weakSelf;
|
||||||
if (strongSelf == nil || strongSelf->_invalidated)
|
if (strongSelf == nil || strongSelf->_invalidated)
|
||||||
@ -198,7 +198,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
[previewView setupWithCamera:self];
|
[previewView setupWithCamera:self];
|
||||||
|
|
||||||
__weak PGCamera *weakSelf = self;
|
__weak PGCamera *weakSelf = self;
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
__strong PGCamera *strongSelf = weakSelf;
|
__strong PGCamera *strongSelf = weakSelf;
|
||||||
if (strongSelf == nil || strongSelf->_invalidated)
|
if (strongSelf == nil || strongSelf->_invalidated)
|
||||||
@ -225,7 +225,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
if (_invalidated)
|
if (_invalidated)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
if (self.captureSession.isRunning)
|
if (self.captureSession.isRunning)
|
||||||
return;
|
return;
|
||||||
@ -261,10 +261,11 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
|
|
||||||
TGLegacyLog(@"Camera: stop capture");
|
TGLegacyLog(@"Camera: stop capture");
|
||||||
|
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
if (_invalidated)
|
if (_invalidated)
|
||||||
{
|
{
|
||||||
|
#if !TARGET_IPHONE_SIMULATOR
|
||||||
[self.captureSession beginConfiguration];
|
[self.captureSession beginConfiguration];
|
||||||
|
|
||||||
[self.captureSession resetFlashMode];
|
[self.captureSession resetFlashMode];
|
||||||
@ -279,16 +280,21 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
for (AVCaptureOutput *output in self.captureSession.outputs)
|
for (AVCaptureOutput *output in self.captureSession.outputs)
|
||||||
[self.captureSession removeOutput:output];
|
[self.captureSession removeOutput:output];
|
||||||
|
|
||||||
#if !TARGET_IPHONE_SIMULATOR
|
|
||||||
[self.captureSession commitConfiguration];
|
[self.captureSession commitConfiguration];
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
TGLegacyLog(@"Camera: stop running");
|
TGLegacyLog(@"Camera: stop running");
|
||||||
#if !TARGET_IPHONE_SIMULATOR
|
#if !TARGET_IPHONE_SIMULATOR
|
||||||
[self.captureSession stopRunning];
|
@try {
|
||||||
|
[self.captureSession stopRunning];
|
||||||
|
} @catch (NSException *exception) {
|
||||||
|
TGLegacyLog(@"Camera: caught exception – %@", exception.description);
|
||||||
|
[self.captureSession commitConfiguration];
|
||||||
|
[self.captureSession stopRunning];
|
||||||
|
TGLegacyLog(@"Camera: seems to be successfully resolved");
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
_capturing = false;
|
_capturing = false;
|
||||||
|
|
||||||
TGDispatchOnMainThread(^
|
TGDispatchOnMainThread(^
|
||||||
@ -328,9 +334,9 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (synchronous)
|
if (synchronous)
|
||||||
[[PGCamera cameraQueue] dispatchSync:block];
|
[[PGCameraCaptureSession cameraQueue] dispatchSync:block];
|
||||||
else
|
else
|
||||||
[[PGCamera cameraQueue] dispatch:block];
|
[[PGCameraCaptureSession cameraQueue] dispatch:block];
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark -
|
#pragma mark -
|
||||||
@ -361,7 +367,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
{
|
{
|
||||||
bool videoMirrored = !self.disableResultMirroring ? _previewView.captureConnection.videoMirrored : false;
|
bool videoMirrored = !self.disableResultMirroring ? _previewView.captureConnection.videoMirrored : false;
|
||||||
|
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
if (!self.captureSession.isRunning || self.captureSession.imageOutput.isCapturingStillImage || _invalidated)
|
if (!self.captureSession.isRunning || self.captureSession.imageOutput.isCapturingStillImage || _invalidated)
|
||||||
return;
|
return;
|
||||||
@ -410,13 +416,13 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
if (CFAbsoluteTimeGetCurrent() - _captureStartTime > 0.4)
|
if (CFAbsoluteTimeGetCurrent() - _captureStartTime > 0.4)
|
||||||
takePhoto();
|
takePhoto();
|
||||||
else
|
else
|
||||||
TGDispatchAfter(0.4 - delta, [[PGCamera cameraQueue] _dispatch_queue], takePhoto);
|
TGDispatchAfter(0.4 - delta, [[PGCameraCaptureSession cameraQueue] _dispatch_queue], takePhoto);
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)startVideoRecordingForMoment:(bool)moment completion:(void (^)(NSURL *, CGAffineTransform transform, CGSize dimensions, NSTimeInterval duration, bool success))completion
|
- (void)startVideoRecordingForMoment:(bool)moment completion:(void (^)(NSURL *, CGAffineTransform transform, CGSize dimensions, NSTimeInterval duration, bool success))completion
|
||||||
{
|
{
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
if (!self.captureSession.isRunning || _invalidated)
|
if (!self.captureSession.isRunning || _invalidated)
|
||||||
return;
|
return;
|
||||||
@ -443,7 +449,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
if (CFAbsoluteTimeGetCurrent() - _captureStartTime > 1.5)
|
if (CFAbsoluteTimeGetCurrent() - _captureStartTime > 1.5)
|
||||||
startRecording();
|
startRecording();
|
||||||
else
|
else
|
||||||
TGDispatchAfter(1.5, [[PGCamera cameraQueue] _dispatch_queue], startRecording);
|
TGDispatchAfter(1.5, [[PGCameraCaptureSession cameraQueue] _dispatch_queue], startRecording);
|
||||||
|
|
||||||
TGDispatchOnMainThread(^
|
TGDispatchOnMainThread(^
|
||||||
{
|
{
|
||||||
@ -455,7 +461,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
|
|
||||||
- (void)stopVideoRecording
|
- (void)stopVideoRecording
|
||||||
{
|
{
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
[self.captureSession stopVideoRecording];
|
[self.captureSession stopVideoRecording];
|
||||||
|
|
||||||
@ -496,7 +502,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
if (strongSelf == nil)
|
if (strongSelf == nil)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
strongSelf.captureSession.currentMode = cameraMode;
|
strongSelf.captureSession.currentMode = cameraMode;
|
||||||
|
|
||||||
@ -584,7 +590,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
|
|
||||||
- (void)_setFocusPoint:(CGPoint)point focusMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode monitorSubjectAreaChange:(bool)monitorSubjectAreaChange
|
- (void)_setFocusPoint:(CGPoint)point focusMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode monitorSubjectAreaChange:(bool)monitorSubjectAreaChange
|
||||||
{
|
{
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
if (self.disabled)
|
if (self.disabled)
|
||||||
return;
|
return;
|
||||||
@ -600,7 +606,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
|
|
||||||
- (void)beginExposureTargetBiasChange
|
- (void)beginExposureTargetBiasChange
|
||||||
{
|
{
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
if (self.disabled)
|
if (self.disabled)
|
||||||
return;
|
return;
|
||||||
@ -611,7 +617,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
|
|
||||||
- (void)setExposureTargetBias:(CGFloat)bias
|
- (void)setExposureTargetBias:(CGFloat)bias
|
||||||
{
|
{
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
if (self.disabled)
|
if (self.disabled)
|
||||||
return;
|
return;
|
||||||
@ -622,7 +628,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
|
|
||||||
- (void)endExposureTargetBiasChange
|
- (void)endExposureTargetBiasChange
|
||||||
{
|
{
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
if (self.disabled)
|
if (self.disabled)
|
||||||
return;
|
return;
|
||||||
@ -661,7 +667,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
|
|
||||||
- (void)setFlashMode:(PGCameraFlashMode)flashMode
|
- (void)setFlashMode:(PGCameraFlashMode)flashMode
|
||||||
{
|
{
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
self.captureSession.currentFlashMode = flashMode;
|
self.captureSession.currentFlashMode = flashMode;
|
||||||
}];
|
}];
|
||||||
@ -689,7 +695,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
if (strongSelf == nil)
|
if (strongSelf == nil)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
[strongSelf.captureSession setCurrentCameraPosition:targetCameraPosition];
|
[strongSelf.captureSession setCurrentCameraPosition:targetCameraPosition];
|
||||||
|
|
||||||
@ -744,7 +750,7 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
if (self.cameraMode == PGCameraModeVideo) {
|
if (self.cameraMode == PGCameraModeVideo) {
|
||||||
animated = false;
|
animated = false;
|
||||||
}
|
}
|
||||||
[[PGCamera cameraQueue] dispatch:^
|
[[PGCameraCaptureSession cameraQueue] dispatch:^
|
||||||
{
|
{
|
||||||
if (self.disabled)
|
if (self.disabled)
|
||||||
return;
|
return;
|
||||||
@ -786,18 +792,6 @@ NSString *const PGCameraAdjustingFocusKey = @"adjustingFocus";
|
|||||||
return ([PGCameraCaptureSession _deviceWithCameraPosition:PGCameraPositionFront] != nil);
|
return ([PGCameraCaptureSession _deviceWithCameraPosition:PGCameraPositionFront] != nil);
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (SQueue *)cameraQueue
|
|
||||||
{
|
|
||||||
static dispatch_once_t onceToken;
|
|
||||||
static SQueue *queue = nil;
|
|
||||||
dispatch_once(&onceToken, ^
|
|
||||||
{
|
|
||||||
queue = [[SQueue alloc] init];
|
|
||||||
});
|
|
||||||
|
|
||||||
return queue;
|
|
||||||
}
|
|
||||||
|
|
||||||
+ (AVCaptureVideoOrientation)_videoOrientationForInterfaceOrientation:(UIInterfaceOrientation)deviceOrientation mirrored:(bool)mirrored
|
+ (AVCaptureVideoOrientation)_videoOrientationForInterfaceOrientation:(UIInterfaceOrientation)deviceOrientation mirrored:(bool)mirrored
|
||||||
{
|
{
|
||||||
switch (deviceOrientation)
|
switch (deviceOrientation)
|
||||||
|
@ -208,6 +208,7 @@ const NSInteger PGCameraFrameRate = 30;
|
|||||||
|
|
||||||
- (void)reset
|
- (void)reset
|
||||||
{
|
{
|
||||||
|
NSAssert([[PGCameraCaptureSession cameraQueue] isCurrentQueue], @"[[PGCameraCaptureSession cameraQueue] isCurrentQueue]");
|
||||||
[self beginConfiguration];
|
[self beginConfiguration];
|
||||||
|
|
||||||
[self _removeAudioInputEndAudioSession:true];
|
[self _removeAudioInputEndAudioSession:true];
|
||||||
@ -259,6 +260,8 @@ const NSInteger PGCameraFrameRate = 30;
|
|||||||
|
|
||||||
- (void)setCurrentMode:(PGCameraMode)mode
|
- (void)setCurrentMode:(PGCameraMode)mode
|
||||||
{
|
{
|
||||||
|
NSAssert([[PGCameraCaptureSession cameraQueue] isCurrentQueue], @"[[PGCameraCaptureSession cameraQueue] isCurrentQueue]");
|
||||||
|
|
||||||
_currentMode = mode;
|
_currentMode = mode;
|
||||||
|
|
||||||
[self beginConfiguration];
|
[self beginConfiguration];
|
||||||
@ -804,6 +807,7 @@ const NSInteger PGCameraFrameRate = 30;
|
|||||||
|
|
||||||
- (void)setCurrentCameraPosition:(PGCameraPosition)position
|
- (void)setCurrentCameraPosition:(PGCameraPosition)position
|
||||||
{
|
{
|
||||||
|
NSAssert([[PGCameraCaptureSession cameraQueue] isCurrentQueue], @"[[PGCameraCaptureSession cameraQueue] isCurrentQueue]");
|
||||||
AVCaptureDevice *deviceForTargetPosition = [PGCameraCaptureSession _deviceWithCameraPosition:position];
|
AVCaptureDevice *deviceForTargetPosition = [PGCameraCaptureSession _deviceWithCameraPosition:position];
|
||||||
if ([_videoDevice isEqual:deviceForTargetPosition])
|
if ([_videoDevice isEqual:deviceForTargetPosition])
|
||||||
return;
|
return;
|
||||||
@ -1123,4 +1127,18 @@ static UIImageOrientation TGSnapshotOrientationForVideoOrientation(bool mirrored
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#pragma mark -
|
||||||
|
|
||||||
|
+ (SQueue *)cameraQueue
|
||||||
|
{
|
||||||
|
static dispatch_once_t onceToken;
|
||||||
|
static SQueue *queue = nil;
|
||||||
|
dispatch_once(&onceToken, ^
|
||||||
|
{
|
||||||
|
queue = [[SQueue alloc] init];
|
||||||
|
});
|
||||||
|
|
||||||
|
return queue;
|
||||||
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -425,7 +425,6 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
strongSelf.presentationData = presentationData
|
strongSelf.presentationData = presentationData
|
||||||
strongSelf.nativeNode.updateBubbleTheme(bubbleTheme: presentationData.theme, bubbleCorners: presentationData.chatBubbleCorners)
|
|
||||||
|
|
||||||
if let (layout, _) = strongSelf.validLayout {
|
if let (layout, _) = strongSelf.validLayout {
|
||||||
strongSelf.updateMessagesLayout(layout: layout, offset: CGPoint(), transition: .animated(duration: 0.3, curve: .easeInOut))
|
strongSelf.updateMessagesLayout(layout: layout, offset: CGPoint(), transition: .animated(duration: 0.3, curve: .easeInOut))
|
||||||
@ -533,7 +532,9 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
|||||||
|
|
||||||
switch entry {
|
switch entry {
|
||||||
case let .wallpaper(wallpaper, _):
|
case let .wallpaper(wallpaper, _):
|
||||||
self.nativeNode.update(wallpaper: wallpaper)
|
Queue.mainQueue().justDispatch {
|
||||||
|
self.nativeNode.update(wallpaper: wallpaper)
|
||||||
|
}
|
||||||
|
|
||||||
if case let .file(file) = wallpaper, file.isPattern {
|
if case let .file(file) = wallpaper, file.isPattern {
|
||||||
self.nativeNode.isHidden = false
|
self.nativeNode.isHidden = false
|
||||||
@ -559,15 +560,14 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
|||||||
self.nativeNode.update(wallpaper: wallpaper)
|
self.nativeNode.update(wallpaper: wallpaper)
|
||||||
self.patternButtonNode.isSelected = false
|
self.patternButtonNode.isSelected = false
|
||||||
} else {
|
} else {
|
||||||
|
self.nativeNode._internalUpdateIsSettingUpWallpaper()
|
||||||
self.nativeNode.isHidden = true
|
self.nativeNode.isHidden = true
|
||||||
self.patternButtonNode.isSelected = false
|
self.patternButtonNode.isSelected = false
|
||||||
self.playButtonNode.setIcon(self.playButtonRotateImage)
|
self.playButtonNode.setIcon(self.playButtonRotateImage)
|
||||||
}
|
}
|
||||||
case .asset:
|
case .asset:
|
||||||
self.nativeNode._internalUpdateIsSettingUpWallpaper()
|
self.nativeNode._internalUpdateIsSettingUpWallpaper()
|
||||||
|
self.nativeNode.isHidden = true
|
||||||
//self.nativeNode.update(wallpaper: .color(0xff000000))
|
|
||||||
self.nativeNode.isHidden = false
|
|
||||||
self.patternButtonNode.isSelected = false
|
self.patternButtonNode.isSelected = false
|
||||||
self.playButtonNode.setIcon(self.playButtonRotateImage)
|
self.playButtonNode.setIcon(self.playButtonRotateImage)
|
||||||
default:
|
default:
|
||||||
@ -1322,6 +1322,8 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func updateMessagesLayout(layout: ContainerViewLayout, offset: CGPoint, transition: ContainedViewLayoutTransition) {
|
private func updateMessagesLayout(layout: ContainerViewLayout, offset: CGPoint, transition: ContainedViewLayoutTransition) {
|
||||||
|
self.nativeNode.updateBubbleTheme(bubbleTheme: self.presentationData.theme, bubbleCorners: self.presentationData.chatBubbleCorners)
|
||||||
|
|
||||||
var bottomInset: CGFloat = 132.0
|
var bottomInset: CGFloat = 132.0
|
||||||
|
|
||||||
var items: [ListViewItem] = []
|
var items: [ListViewItem] = []
|
||||||
@ -1468,7 +1470,7 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
|||||||
|
|
||||||
if let _ = serviceMessageText, let messageNodes = self.messageNodes, let node = messageNodes.last {
|
if let _ = serviceMessageText, let messageNodes = self.messageNodes, let node = messageNodes.last {
|
||||||
if let backgroundNode = node.subnodes?.first?.subnodes?.first?.subnodes?.first?.subnodes?.first, let backdropNode = node.subnodes?.first?.subnodes?.first?.subnodes?.first?.subnodes?.last?.subnodes?.last?.subnodes?.first {
|
if let backgroundNode = node.subnodes?.first?.subnodes?.first?.subnodes?.first?.subnodes?.first, let backdropNode = node.subnodes?.first?.subnodes?.first?.subnodes?.first?.subnodes?.last?.subnodes?.last?.subnodes?.first {
|
||||||
backdropNode.isHidden = true
|
backdropNode.isHidden = true
|
||||||
let serviceBackgroundFrame = backgroundNode.view.convert(backgroundNode.bounds, to: self.view).offsetBy(dx: 0.0, dy: -1.0).insetBy(dx: 0.0, dy: -1.0)
|
let serviceBackgroundFrame = backgroundNode.view.convert(backgroundNode.bounds, to: self.view).offsetBy(dx: 0.0, dy: -1.0).insetBy(dx: 0.0, dy: -1.0)
|
||||||
transition.updateFrame(node: self.serviceBackgroundNode, frame: serviceBackgroundFrame)
|
transition.updateFrame(node: self.serviceBackgroundNode, frame: serviceBackgroundFrame)
|
||||||
self.serviceBackgroundNode.update(size: serviceBackgroundFrame.size, cornerRadius: serviceBackgroundFrame.height / 2.0, transition: transition)
|
self.serviceBackgroundNode.update(size: serviceBackgroundFrame.size, cornerRadius: serviceBackgroundFrame.height / 2.0, transition: transition)
|
||||||
|
@ -423,7 +423,6 @@ final class WallpaperOptionButtonNode: HighlightTrackingButtonNode {
|
|||||||
|
|
||||||
func setEnabled(_ enabled: Bool) {
|
func setEnabled(_ enabled: Bool) {
|
||||||
let alpha: CGFloat = enabled ? 1.0 : 0.4
|
let alpha: CGFloat = enabled ? 1.0 : 0.4
|
||||||
self.backgroundNode.alpha = alpha
|
|
||||||
self.checkNode.alpha = alpha
|
self.checkNode.alpha = alpha
|
||||||
self.colorNode.alpha = alpha
|
self.colorNode.alpha = alpha
|
||||||
self.textNode.alpha = alpha
|
self.textNode.alpha = alpha
|
||||||
|
@ -164,6 +164,10 @@ func _internal_setExistingChatWallpaper(account: Account, messageId: MessageId,
|
|||||||
return account.postbox.transaction { transaction -> Peer? in
|
return account.postbox.transaction { transaction -> Peer? in
|
||||||
if let peer = transaction.getPeer(messageId.peerId), let message = transaction.getMessage(messageId) {
|
if let peer = transaction.getPeer(messageId.peerId), let message = transaction.getMessage(messageId) {
|
||||||
if let action = message.media.first(where: { $0 is TelegramMediaAction }) as? TelegramMediaAction, case let .setChatWallpaper(wallpaper) = action.action {
|
if let action = message.media.first(where: { $0 is TelegramMediaAction }) as? TelegramMediaAction, case let .setChatWallpaper(wallpaper) = action.action {
|
||||||
|
var wallpaper = wallpaper
|
||||||
|
if let settings = settings {
|
||||||
|
wallpaper = wallpaper.withUpdatedSettings(settings)
|
||||||
|
}
|
||||||
transaction.updatePeerCachedData(peerIds: Set([peer.id]), update: { _, current in
|
transaction.updatePeerCachedData(peerIds: Set([peer.id]), update: { _, current in
|
||||||
if let current = current as? CachedUserData {
|
if let current = current as? CachedUserData {
|
||||||
return current.withUpdatedWallpaper(wallpaper)
|
return current.withUpdatedWallpaper(wallpaper)
|
||||||
|
@ -858,11 +858,18 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
}
|
}
|
||||||
strongSelf.chatDisplayNode.dismissInput()
|
strongSelf.chatDisplayNode.dismissInput()
|
||||||
let wallpaperPreviewController = WallpaperGalleryController(context: strongSelf.context, source: .wallpaper(wallpaper, nil, [], nil, nil, nil), mode: .peer(EnginePeer(peer), true))
|
let wallpaperPreviewController = WallpaperGalleryController(context: strongSelf.context, source: .wallpaper(wallpaper, nil, [], nil, nil, nil), mode: .peer(EnginePeer(peer), true))
|
||||||
wallpaperPreviewController.apply = { wallpaper, options, _, _ in
|
wallpaperPreviewController.apply = { [weak wallpaperPreviewController] entry, options, _, _ in
|
||||||
let _ = (strongSelf.context.engine.themes.setExistingChatWallpaper(messageId: message.id, settings: nil)
|
if case let .wallpaper(wallpaper, _) = entry, case let .file(file) = wallpaper, !file.isPattern && options.contains(.blur) {
|
||||||
|> deliverOnMainQueue).start(completed: { [weak wallpaperPreviewController] in
|
uploadCustomPeerWallpaper(context: strongSelf.context, wallpaper: entry, mode: options, cropRect: nil, brightness: nil, peerId: message.id.peerId, completion: {
|
||||||
wallpaperPreviewController?.dismiss()
|
wallpaperPreviewController?.dismiss()
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
let _ = (strongSelf.context.engine.themes.setExistingChatWallpaper(messageId: message.id, settings: nil)
|
||||||
|
|> deliverOnMainQueue).start()
|
||||||
|
Queue.mainQueue().after(0.1) {
|
||||||
|
wallpaperPreviewController?.dismiss()
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
strongSelf.push(wallpaperPreviewController)
|
strongSelf.push(wallpaperPreviewController)
|
||||||
return true
|
return true
|
||||||
@ -5861,7 +5868,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
var themeEmoticon = themeEmoticon
|
var themeEmoticon = themeEmoticon
|
||||||
if let themeEmoticonPreview = themeEmoticonPreview {
|
if let themeEmoticonPreview = themeEmoticonPreview {
|
||||||
if !themeEmoticonPreview.isEmpty {
|
if !themeEmoticonPreview.isEmpty {
|
||||||
if themeEmoticon != themeEmoticonPreview {
|
if themeEmoticon?.strippedEmoji != themeEmoticonPreview.strippedEmoji {
|
||||||
chatWallpaper = nil
|
chatWallpaper = nil
|
||||||
themeEmoticon = themeEmoticonPreview
|
themeEmoticon = themeEmoticonPreview
|
||||||
}
|
}
|
||||||
|
@ -864,6 +864,7 @@ final class WallpaperBackgroundNodeImpl: ASDisplayNode, WallpaperBackgroundNode
|
|||||||
self.bakedBackgroundView.isHidden = true
|
self.bakedBackgroundView.isHidden = true
|
||||||
|
|
||||||
self.dimLayer = SimpleLayer()
|
self.dimLayer = SimpleLayer()
|
||||||
|
self.dimLayer.opacity = 0.0
|
||||||
self.dimLayer.backgroundColor = UIColor.black.cgColor
|
self.dimLayer.backgroundColor = UIColor.black.cgColor
|
||||||
|
|
||||||
super.init()
|
super.init()
|
||||||
@ -904,8 +905,19 @@ final class WallpaperBackgroundNodeImpl: ASDisplayNode, WallpaperBackgroundNode
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
var dimAlpha: Float = 0.0
|
var dimAlpha: Float = 0.0
|
||||||
if case let .file(file) = wallpaper, !file.isPattern {
|
if theme.overallDarkAppearance == true {
|
||||||
if let intensity = file.settings.intensity, intensity < 100, theme.overallDarkAppearance == true {
|
var intensity: Int32?
|
||||||
|
switch wallpaper {
|
||||||
|
case let .image(_, settings):
|
||||||
|
intensity = settings.intensity
|
||||||
|
case let .file(file):
|
||||||
|
if !file.isPattern {
|
||||||
|
intensity = file.settings.intensity
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if let intensity, intensity < 100 {
|
||||||
dimAlpha = 1.0 - max(0.0, min(1.0, Float(intensity) / 100.0))
|
dimAlpha = 1.0 - max(0.0, min(1.0, Float(intensity) / 100.0))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -258,7 +258,7 @@ public func wallpaperImage(account: Account, accountManager: AccountManager<Tele
|
|||||||
if let thumbnailImage = thumbnailImage {
|
if let thumbnailImage = thumbnailImage {
|
||||||
let thumbnailSize = CGSize(width: thumbnailImage.width, height: thumbnailImage.height)
|
let thumbnailSize = CGSize(width: thumbnailImage.width, height: thumbnailImage.height)
|
||||||
|
|
||||||
let initialThumbnailContextFittingSize = fittedSize.fitted(CGSize(width: 90.0, height: 90.0))
|
let initialThumbnailContextFittingSize = fittedSize.fitted(CGSize(width: 240.0, height: 240.0))
|
||||||
|
|
||||||
let thumbnailContextSize = thumbnailSize.aspectFitted(initialThumbnailContextFittingSize)
|
let thumbnailContextSize = thumbnailSize.aspectFitted(initialThumbnailContextFittingSize)
|
||||||
guard let thumbnailContext = DrawingContext(size: thumbnailContextSize, scale: 1.0) else {
|
guard let thumbnailContext = DrawingContext(size: thumbnailContextSize, scale: 1.0) else {
|
||||||
@ -268,6 +268,7 @@ public func wallpaperImage(account: Account, accountManager: AccountManager<Tele
|
|||||||
c.draw(thumbnailImage, in: CGRect(origin: CGPoint(), size: thumbnailContextSize))
|
c.draw(thumbnailImage, in: CGRect(origin: CGPoint(), size: thumbnailContextSize))
|
||||||
}
|
}
|
||||||
telegramFastBlurMore(Int32(thumbnailContextSize.width), Int32(thumbnailContextSize.height), Int32(thumbnailContext.bytesPerRow), thumbnailContext.bytes)
|
telegramFastBlurMore(Int32(thumbnailContextSize.width), Int32(thumbnailContextSize.height), Int32(thumbnailContext.bytesPerRow), thumbnailContext.bytes)
|
||||||
|
telegramFastBlurMore(Int32(thumbnailContextSize.width), Int32(thumbnailContextSize.height), Int32(thumbnailContext.bytesPerRow), thumbnailContext.bytes)
|
||||||
|
|
||||||
var thumbnailContextFittingSize = CGSize(width: floor(arguments.drawingSize.width * 0.5), height: floor(arguments.drawingSize.width * 0.5))
|
var thumbnailContextFittingSize = CGSize(width: floor(arguments.drawingSize.width * 0.5), height: floor(arguments.drawingSize.width * 0.5))
|
||||||
if thumbnailContextFittingSize.width < 150.0 || thumbnailContextFittingSize.height < 150.0 {
|
if thumbnailContextFittingSize.width < 150.0 || thumbnailContextFittingSize.height < 150.0 {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user