Video message reply for stories

This commit is contained in:
Ilya Laktyushin 2023-07-03 23:17:48 +02:00
parent bd418c8689
commit 08fd6f80a3
23 changed files with 289 additions and 90 deletions

View File

@ -995,7 +995,7 @@ private final class DrawingScreenComponent: CombinedComponent {
}
func addTextEntity() {
let textEntity = DrawingTextEntity(text: NSAttributedString(), style: .regular, animation: .none, font: .sanFrancisco, alignment: .center, fontSize: 1.0, color: DrawingColor(color: .white))
let textEntity = DrawingTextEntity(text: NSAttributedString(), style: .filled, animation: .none, font: .sanFrancisco, alignment: .center, fontSize: 1.0, color: DrawingColor(color: .white))
self.insertEntity.invoke(textEntity)
}

View File

@ -222,6 +222,7 @@ private final class StickerSelectionComponent: Component {
let context = component.context
let stickerPeekBehavior = EmojiContentPeekBehaviorImpl(
context: context,
forceTheme: defaultDarkColorPresentationTheme,
interaction: nil,
chatPeerId: nil,
present: { c, a in
@ -859,6 +860,7 @@ public class StickerPickerScreen: ViewController {
if let controller = self.controller {
stickerPeekBehavior = EmojiContentPeekBehaviorImpl(
context: controller.context,
forceTheme: defaultDarkColorPresentationTheme,
interaction: nil,
chatPeerId: nil,
present: { [weak controller] c, a in

View File

@ -25,16 +25,18 @@
@property (nonatomic, copy) void(^onStop)(void);
@property (nonatomic, copy) void(^onCancel)(void);
@property (nonatomic, copy) void(^didDismiss)(void);
@property (nonatomic, copy) void(^didStop)(void);
@property (nonatomic, copy) void(^displaySlowmodeTooltip)(void);
@property (nonatomic, copy) void (^presentScheduleController)(void (^)(int32_t));
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context assets:(TGVideoMessageCaptureControllerAssets *)assets transitionInView:(UIView *(^)(void))transitionInView parentController:(TGViewController *)parentController controlsFrame:(CGRect)controlsFrame isAlreadyLocked:(bool (^)(void))isAlreadyLocked liveUploadInterface:(id<TGLiveUploadInterface>)liveUploadInterface pallete:(TGModernConversationInputMicPallete *)pallete slowmodeTimestamp:(int32_t)slowmodeTimestamp slowmodeView:(UIView *(^)(void))slowmodeView canSendSilently:(bool)canSendSilently canSchedule:(bool)canSchedule reminder:(bool)reminder;
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context forStory:(bool)forStory assets:(TGVideoMessageCaptureControllerAssets *)assets transitionInView:(UIView *(^)(void))transitionInView parentController:(TGViewController *)parentController controlsFrame:(CGRect)controlsFrame isAlreadyLocked:(bool (^)(void))isAlreadyLocked liveUploadInterface:(id<TGLiveUploadInterface>)liveUploadInterface pallete:(TGModernConversationInputMicPallete *)pallete slowmodeTimestamp:(int32_t)slowmodeTimestamp slowmodeView:(UIView *(^)(void))slowmodeView canSendSilently:(bool)canSendSilently canSchedule:(bool)canSchedule reminder:(bool)reminder;
- (void)buttonInteractionUpdate:(CGPoint)value;
- (void)setLocked;
- (CGRect)frameForSendButton;
- (void)send;
- (void)complete;
- (void)dismiss:(bool)cancelled;
- (bool)stop;

View File

@ -1,10 +1,11 @@
#import <UIKit/UIKit.h>
#import <LegacyComponents/TGVideoMessageScrubber.h>
#import <LegacyComponents/TGPhotoEditorSparseView.h>
@class TGVideoMessageCaptureControllerAssets;
@class TGModernConversationInputMicPallete;
@interface TGVideoMessageControls : UIView
@interface TGVideoMessageControls : TGPhotoEditorSparseView
@property (nonatomic, readonly) TGVideoMessageScrubber *scrubberView;
@ -23,7 +24,7 @@
@property (nonatomic, weak) id<TGVideoMessageScrubberDelegate, TGVideoMessageScrubberDataSource> parent;
- (instancetype)initWithFrame:(CGRect)frame assets:(TGVideoMessageCaptureControllerAssets *)assets slowmodeTimestamp:(int32_t)slowmodeTimestamp slowmodeView:(UIView *(^)(void))slowmodeView;
- (instancetype)initWithFrame:(CGRect)frame forStory:(bool)forStory assets:(TGVideoMessageCaptureControllerAssets *)assets slowmodeTimestamp:(int32_t)slowmodeTimestamp slowmodeView:(UIView *(^)(void))slowmodeView;
- (void)captureStarted;
- (void)recordingStarted;

View File

@ -39,6 +39,8 @@
- (void)setThumbnailImage:(UIImage *)image forTimestamp:(NSTimeInterval)timestamp isSummaryThubmnail:(bool)isSummaryThumbnail;
- (instancetype)initWithFrame:(CGRect)frame forStory:(bool)forStory;
@end
@protocol TGVideoMessageScrubberDelegate <NSObject>

View File

@ -29,6 +29,7 @@
#import "TGMediaPickerSendActionSheetController.h"
#import "TGOverlayControllerWindow.h"
#import <LegacyComponents/TGPhotoEditorSparseView.h>
const NSTimeInterval TGVideoMessageMaximumDuration = 60.0;
@ -63,12 +64,13 @@ typedef enum
@interface TGVideoMessageCaptureController () <TGVideoCameraPipelineDelegate, TGVideoMessageScrubberDataSource, TGVideoMessageScrubberDelegate, UIGestureRecognizerDelegate>
{
SQueue *_queue;
AVCaptureDevicePosition _preferredPosition;
TGVideoCameraPipeline *_capturePipeline;
NSURL *_url;
PGCameraVolumeButtonHandler *_buttonHandler;
bool _forStory;
bool _autorotationWasEnabled;
bool _dismissed;
bool _gpuAvailable;
@ -149,12 +151,13 @@ typedef enum
@implementation TGVideoMessageCaptureController
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context assets:(TGVideoMessageCaptureControllerAssets *)assets transitionInView:(UIView *(^)(void))transitionInView parentController:(TGViewController *)parentController controlsFrame:(CGRect)controlsFrame isAlreadyLocked:(bool (^)(void))isAlreadyLocked liveUploadInterface:(id<TGLiveUploadInterface>)liveUploadInterface pallete:(TGModernConversationInputMicPallete *)pallete slowmodeTimestamp:(int32_t)slowmodeTimestamp slowmodeView:(UIView *(^)(void))slowmodeView canSendSilently:(bool)canSendSilently canSchedule:(bool)canSchedule reminder:(bool)reminder
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context forStory:(bool)forStory assets:(TGVideoMessageCaptureControllerAssets *)assets transitionInView:(UIView *(^)(void))transitionInView parentController:(TGViewController *)parentController controlsFrame:(CGRect)controlsFrame isAlreadyLocked:(bool (^)(void))isAlreadyLocked liveUploadInterface:(id<TGLiveUploadInterface>)liveUploadInterface pallete:(TGModernConversationInputMicPallete *)pallete slowmodeTimestamp:(int32_t)slowmodeTimestamp slowmodeView:(UIView *(^)(void))slowmodeView canSendSilently:(bool)canSendSilently canSchedule:(bool)canSchedule reminder:(bool)reminder
{
self = [super initWithContext:context];
if (self != nil)
{
_context = context;
_forStory = forStory;
_transitionInView = [transitionInView copy];
self.isAlreadyLocked = isAlreadyLocked;
_liveUploadInterface = liveUploadInterface;
@ -253,15 +256,17 @@ typedef enum
{
[super loadView];
self.view = [[TGPhotoEditorSparseView alloc] initWithFrame:self.view.frame];
self.view.backgroundColor = [UIColor clearColor];
CGFloat bottomOffset = self.view.frame.size.height - CGRectGetMaxY(_controlsFrame);
if (bottomOffset > 44.0) {
bottomOffset = 0.0f;
}
CGRect wrapperFrame = TGIsPad() ? CGRectMake(0.0f, 0.0f, self.view.frame.size.width, CGRectGetMaxY(_controlsFrame) + bottomOffset) : CGRectMake(0.0f, 0.0f, self.view.frame.size.width, CGRectGetMinY(_controlsFrame));
CGRect wrapperFrame = TGIsPad() || _forStory ? CGRectMake(0.0f, 0.0f, self.view.frame.size.width, CGRectGetMaxY(_controlsFrame) + bottomOffset) : CGRectMake(0.0f, 0.0f, self.view.frame.size.width, CGRectGetMinY(_controlsFrame));
_wrapperView = [[UIView alloc] initWithFrame:wrapperFrame];
_wrapperView = [[TGPhotoEditorSparseView alloc] initWithFrame:wrapperFrame];
_wrapperView.clipsToBounds = true;
[self.view addSubview:_wrapperView];
@ -278,7 +283,9 @@ typedef enum
effect = [UIBlurEffect effectWithStyle:self.pallete.isDark ? UIBlurEffectStyleDark : UIBlurEffectStyleLight];
_blurView = [[UIVisualEffectView alloc] initWithEffect:effect];
[_wrapperView addSubview:_blurView];
if (!_forStory) {
[_wrapperView addSubview:_blurView];
}
if (type == TGVideoMessageTransitionTypeSimplified)
{
@ -289,7 +296,9 @@ typedef enum
_fadeView = [[UIView alloc] initWithFrame:fadeFrame];
_fadeView.alpha = 0.0f;
_fadeView.backgroundColor = [curtainColor colorWithAlphaComponent:0.4f];
[_wrapperView addSubview:_fadeView];
if (!_forStory) {
[_wrapperView addSubview:_fadeView];
}
}
}
else
@ -297,7 +306,9 @@ typedef enum
_fadeView = [[UIView alloc] initWithFrame:fadeFrame];
_fadeView.alpha = 0.0f;
_fadeView.backgroundColor = [curtainColor colorWithAlphaComponent:0.6f];
[_wrapperView addSubview:_fadeView];
if (!_forStory) {
[_wrapperView addSubview:_fadeView];
}
}
CGFloat minSide = MIN(_wrapperView.frame.size.width, _wrapperView.frame.size.height);
@ -352,7 +363,7 @@ typedef enum
CGRect controlsFrame = _controlsFrame;
_controlsView = [[TGVideoMessageControls alloc] initWithFrame:controlsFrame assets:_assets slowmodeTimestamp:_slowmodeTimestamp slowmodeView:_slowmodeView];
_controlsView = [[TGVideoMessageControls alloc] initWithFrame:controlsFrame forStory:_forStory assets:_assets slowmodeTimestamp:_slowmodeTimestamp slowmodeView:_slowmodeView];
_controlsView.pallete = self.pallete;
_controlsView.clipsToBounds = true;
_controlsView.parent = self;
@ -406,7 +417,9 @@ typedef enum
_separatorView = [[UIView alloc] initWithFrame:CGRectMake(controlsFrame.origin.x, controlsFrame.origin.y - TGScreenPixel, controlsFrame.size.width, TGScreenPixel)];
_separatorView.backgroundColor = self.pallete != nil ? self.pallete.borderColor : UIColorRGB(0xb2b2b2);
_separatorView.userInteractionEnabled = false;
[self.view addSubview:_separatorView];
if (!_forStory) {
[self.view addSubview:_separatorView];
}
if ([TGVideoCameraPipeline cameraPositionChangeAvailable])
{
@ -634,7 +647,7 @@ typedef enum
{
[super viewWillLayoutSubviews];
CGRect fadeFrame = TGIsPad() ? self.view.bounds : CGRectMake(0.0f, 0.0f, _wrapperView.frame.size.width, _wrapperView.frame.size.height);
CGRect fadeFrame = TGIsPad() || _forStory ? self.view.bounds : CGRectMake(0.0f, 0.0f, _wrapperView.frame.size.width, _wrapperView.frame.size.height);
_blurView.frame = fadeFrame;
}
@ -682,6 +695,7 @@ typedef enum
self.view.backgroundColor = [UIColor clearColor];
self.view.userInteractionEnabled = false;
_circleWrapperView.layer.allowsGroupOpacity = true;
[UIView animateWithDuration:0.15 animations:^
{
_circleWrapperView.alpha = 0.0f;
@ -746,9 +760,18 @@ typedef enum
[_activityDisposable dispose];
[self stopRecording:^{}];
if (self.didStop != nil) {
self.didStop();
}
return true;
}
- (void)send
{
[self sendPressed];
}
- (bool)sendPressed
{
if (_slowmodeTimestamp != 0) {

View File

@ -36,6 +36,8 @@ static CGRect viewFrame(UIView *view)
UIImageView *_slideToCancelArrow;
UILabel *_slideToCancelLabel;
bool _forStory;
TGModernButton *_cancelButton;
TGModernButton *_deleteButton;
@ -59,10 +61,11 @@ static CGRect viewFrame(UIView *view)
@implementation TGVideoMessageControls
- (instancetype)initWithFrame:(CGRect)frame assets:(TGVideoMessageCaptureControllerAssets *)assets slowmodeTimestamp:(int32_t)slowmodeTimestamp slowmodeView:(UIView *(^)(void))slowmodeView {
- (instancetype)initWithFrame:(CGRect)frame forStory:(bool)forStory assets:(TGVideoMessageCaptureControllerAssets *)assets slowmodeTimestamp:(int32_t)slowmodeTimestamp slowmodeView:(UIView *(^)(void))slowmodeView {
self = [super initWithFrame:frame];
if (self != nil) {
_assets = assets;
_forStory = forStory;
_slowmodeTimestamp = slowmodeTimestamp;
_generateSlowmodeView = [slowmodeView copy];
}
@ -270,8 +273,12 @@ static CGRect viewFrame(UIView *view)
_sendButton.transform = CGAffineTransformMakeScale(0.01, 0.01);
_sendButton.alpha = 0.0f;
transform = CGAffineTransformMakeTranslation(0.0f, -44.0f);
transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
if (_forStory) {
transform = CGAffineTransformMakeScale(0.25f, 0.25f);
} else {
transform = CGAffineTransformMakeTranslation(0.0f, -44.0f);
transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
}
_deleteButton.transform = transform;
_deleteButton.alpha = 0.0f;
@ -361,7 +368,9 @@ static CGRect viewFrame(UIView *view)
_deleteButton.adjustsImageWhenDisabled = false;
_deleteButton.adjustsImageWhenHighlighted = false;
[_deleteButton addTarget:self action:@selector(deleteButtonPressed) forControlEvents:UIControlEventTouchUpInside];
[self addSubview:_deleteButton];
if (!_forStory) {
[self addSubview:_deleteButton];
}
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, 45.0f);
transform = CGAffineTransformScale(transform, 0.88f, 0.88f);
@ -380,7 +389,9 @@ static CGRect viewFrame(UIView *view)
_longPressGestureRecognizer.minimumPressDuration = 0.4;
[_sendButton addGestureRecognizer:_longPressGestureRecognizer];
[self addSubview:_sendButton];
if (!_forStory) {
[self addSubview:_sendButton];
}
if (_slowmodeTimestamp != 0) {
int32_t timestamp = (int32_t)[[NSDate date] timeIntervalSince1970];
@ -410,16 +421,21 @@ static CGRect viewFrame(UIView *view)
}
}
_scrubberView = [[TGVideoMessageScrubber alloc] init];
_scrubberView = [[TGVideoMessageScrubber alloc] initWithFrame:CGRectZero forStory:_forStory];
_scrubberView.pallete = self.pallete;
_scrubberView.dataSource = self.parent;
_scrubberView.delegate = self.parent;
[self addSubview:_scrubberView];
if (_forStory) {
_scrubberView.alpha = 0.0f;
}
[self layoutSubviews];
transform = CGAffineTransformMakeTranslation(0.0f, 44.0f);
_scrubberView.transform = transform;
//transform = CGAffineTransformMakeTranslation(0.0f, 44.0f);
//_scrubberView.transform = transform;
int animationCurveOption = iosMajorVersion() >= 7 ? (7 << 16) : 0;
[UIView animateWithDuration:0.25 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
@ -467,13 +483,17 @@ static CGRect viewFrame(UIView *view)
- (void)showScrubberView
{
int animationCurveOption = iosMajorVersion() >= 7 ? (7 << 16) : 0;
[UIView animateWithDuration:0.2 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
{
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, -22.0f);
transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
_scrubberView.transform = CGAffineTransformIdentity;
} completion:nil];
if (_forStory) {
CGAffineTransform transform = CGAffineTransformMakeScale(0.25f, 0.25f);
_scrubberView.transform = transform;
[UIView animateWithDuration:0.2 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^{
_scrubberView.transform = CGAffineTransformIdentity;
} completion:nil];
[UIView animateWithDuration:0.2 animations:^{
_scrubberView.alpha = 1.0f;
}];
}
}
- (void)deleteButtonPressed
@ -570,7 +590,17 @@ static CGFloat floorToScreenPixels(CGFloat value) {
_sendButton.layer.sublayerTransform = CATransform3DIdentity;
}
_deleteButton.center = CGPointMake(24.0f, self.bounds.size.height / 2.0f);
setViewFrame(_scrubberView, CGRectMake(46.0f, (self.frame.size.height - 33.0f) / 2.0f, self.frame.size.width - 46.0f * 2.0f, 33.0f));
CGFloat offset = 0.0f;
CGFloat height = 33.0f;
CGFloat sideInset = 46.0f;
if (_forStory) {
offset += 4.0;
sideInset -= 5.0f;
height = 40.0f;
}
setViewFrame(_scrubberView, CGRectMake(sideInset, (self.frame.size.height - height) / 2.0f + offset, self.frame.size.width - sideInset * 2.0f, height));
}
- (CGRect)frameForSendButton {

View File

@ -25,6 +25,8 @@ typedef enum
@interface TGVideoMessageScrubber () <UIGestureRecognizerDelegate>
{
bool _forStory;
UIControl *_wrapperView;
UIView *_summaryThumbnailSnapshotView;
UIView *_zoomedThumbnailWrapperView;
@ -64,24 +66,27 @@ typedef enum
@implementation TGVideoMessageScrubber
- (instancetype)initWithFrame:(CGRect)frame
- (instancetype)initWithFrame:(CGRect)frame forStory:(bool)forStory
{
self = [super initWithFrame:frame];
if (self != nil)
{
_allowsTrimming = true;
_forStory = forStory;
self.clipsToBounds = true;
self.layer.cornerRadius = 16.0f;
_wrapperView = [[UIControl alloc] initWithFrame:CGRectMake(0, 0, 0, 33)];
CGFloat height = _forStory ? 40.0 : 33.0;
_wrapperView = [[UIControl alloc] initWithFrame:CGRectMake(0, 0, 0, height)];
_wrapperView.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -10, -5, -10);
[self addSubview:_wrapperView];
_zoomedThumbnailWrapperView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 33)];
_zoomedThumbnailWrapperView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, height)];
[_wrapperView addSubview:_zoomedThumbnailWrapperView];
_summaryThumbnailWrapperView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 33)];
_summaryThumbnailWrapperView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, height)];
_summaryThumbnailWrapperView.clipsToBounds = true;
[_wrapperView addSubview:_summaryThumbnailWrapperView];
@ -100,7 +105,7 @@ typedef enum
[_wrapperView addSubview:_rightCurtainView];
__weak TGVideoMessageScrubber *weakSelf = self;
_trimView = [[TGVideoMessageTrimView alloc] initWithFrame:CGRectZero];
_trimView = [[TGVideoMessageTrimView alloc] initWithFrame:CGRectZero forStory:forStory];
_trimView.exclusiveTouch = true;
_trimView.trimmingEnabled = _allowsTrimming;
_trimView.didBeginEditing = ^(__unused bool start)
@ -264,7 +269,7 @@ typedef enum
};
[_wrapperView addSubview:_trimView];
_scrubberHandle = [[UIControl alloc] initWithFrame:CGRectMake(0, -1, 8, 33.0f)];
_scrubberHandle = [[UIControl alloc] initWithFrame:CGRectMake(0, -1, 8, height)];
_scrubberHandle.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -10, -5, -10);
//[_wrapperView addSubview:_scrubberHandle];
@ -310,7 +315,10 @@ typedef enum
_rightCurtainView.backgroundColor = [pallete.backgroundColor colorWithAlphaComponent:0.8f];
CGSize size = _leftMaskView.image.size;
UIGraphicsBeginImageContextWithOptions(_leftMaskView.image.size, false, 0.0f);
if (_forStory) {
size.height = 40.0;
}
UIGraphicsBeginImageContextWithOptions(size, false, 0.0f);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, pallete.backgroundColor.CGColor);
CGContextFillRect(context, CGRectMake(0.0f, 0.0f, size.width, size.height));
@ -323,13 +331,15 @@ typedef enum
_leftMaskView.image = maskView;
_rightMaskView.image = [UIImage imageWithCGImage:maskView.CGImage scale:maskView.scale orientation:UIImageOrientationUpMirrored];
size = CGSizeMake(16.0f, 33.0f);
UIGraphicsBeginImageContextWithOptions(_leftMaskView.image.size, false, 0.0f);
CGFloat height = _forStory ? 40.0f : 33.0f;
size = CGSizeMake(16.0f, height);
UIGraphicsBeginImageContextWithOptions(size, false, 0.0f);
context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, pallete.buttonColor.CGColor);
CGContextFillEllipseInRect(context, CGRectMake(0.0f, 0.0f, size.width * 2.0f, size.height));
CGContextSetFillColorWithColor(context, pallete.iconColor.CGColor);
UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:CGRectMake(8.0f, 12.0f, 1.666f, 9.0f) cornerRadius:0.833f];
UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:CGRectMake(8.0f, (size.height - 9.0) / 2.0, 1.666f, 9.0f) cornerRadius:0.833f];
CGContextAddPath(context, path.CGPath);
CGContextFillPath(context);
UIImage *handleImage = UIGraphicsGetImageFromCurrentImageContext();
@ -477,7 +487,8 @@ typedef enum
- (CGSize)_thumbnailSizeWithAspectRatio:(CGFloat)__unused aspectRatio orientation:(UIImageOrientation)__unused orientation
{
return CGSizeMake(33, 33);
CGFloat height = _forStory ? 40.0f : 33.0f;
return CGSizeMake(height, height);
}
- (void)_layoutSummaryThumbnailViews
@ -785,7 +796,8 @@ typedef enum
origin = 2;
}
return CGRectMake(origin, 0, width, 33);
CGFloat height = _forStory ? 40.0f : 33.0f;
return CGRectMake(origin, 0, width, height);
}
#pragma mark - Trimming
@ -861,7 +873,8 @@ typedef enum
CGFloat minX = (CGFloat)startPosition * trimRect.size.width / (CGFloat)duration + trimRect.origin.x - normalScrubbingRect.origin.x;
CGFloat maxX = (CGFloat)endPosition * trimRect.size.width / (CGFloat)duration + trimRect.origin.x + normalScrubbingRect.origin.x;
return CGRectMake(minX, 0, maxX - minX, 33);
CGFloat height = _forStory ? 40.0f : 33.0f;
return CGRectMake(minX, 0, maxX - minX, height);
}
- (void)_layoutTrimView
@ -887,13 +900,15 @@ typedef enum
_leftCurtainView.hidden = !self.allowsTrimming;
_rightCurtainView.hidden = !self.allowsTrimming;
CGFloat height = _forStory ? 40.0f : 33.0f;
if (self.allowsTrimming)
{
CGRect scrubbingRect = [self _scrubbingRect];
CGRect normalScrubbingRect = [self _scrubbingRect];
_leftCurtainView.frame = CGRectMake(scrubbingRect.origin.x - 16.0f, 0.0f, _trimView.frame.origin.x - scrubbingRect.origin.x + normalScrubbingRect.origin.x + 16.0f, 33);
_rightCurtainView.frame = CGRectMake(CGRectGetMaxX(_trimView.frame) - 16.0f, 0.0f, scrubbingRect.origin.x + scrubbingRect.size.width - CGRectGetMaxX(_trimView.frame) - scrubbingRect.origin.x + normalScrubbingRect.origin.x + 32.0f, 33);
_leftCurtainView.frame = CGRectMake(scrubbingRect.origin.x - 16.0f, 0.0f, _trimView.frame.origin.x - scrubbingRect.origin.x + normalScrubbingRect.origin.x + 16.0f, height);
_rightCurtainView.frame = CGRectMake(CGRectGetMaxX(_trimView.frame) - 16.0f, 0.0f, scrubbingRect.origin.x + scrubbingRect.size.width - CGRectGetMaxX(_trimView.frame) - scrubbingRect.origin.x + normalScrubbingRect.origin.x + 32.0f, height);
}
}
@ -903,16 +918,20 @@ typedef enum
{
[super setFrame:frame];
_summaryThumbnailWrapperView.frame = CGRectMake(0.0f, 0.0f, frame.size.width, 33);
CGFloat height = _forStory ? 40.0f : 33.0f;
_summaryThumbnailWrapperView.frame = CGRectMake(0.0f, 0.0f, frame.size.width, height);
_zoomedThumbnailWrapperView.frame = _summaryThumbnailWrapperView.frame;
_leftMaskView.frame = CGRectMake(0.0f, 0.0f, 16.0f, 33.0f);
_rightMaskView.frame = CGRectMake(frame.size.width - 16.0f, 0.0f, 16.0f, 33.0f);
_leftMaskView.frame = CGRectMake(0.0f, 0.0f, 16.0f, height);
_rightMaskView.frame = CGRectMake(frame.size.width - 16.0f, 0.0f, 16.0f, height);
}
- (void)layoutSubviews
{
_wrapperView.frame = CGRectMake(0, 0, self.frame.size.width, 33);
CGFloat height = _forStory ? 40.0f : 33.0f;
_wrapperView.frame = CGRectMake(0, 0, self.frame.size.width, height);
[self _layoutTrimView];
[self _updateScrubberAnimationsAndResetCurrentPosition:true];

View File

@ -12,4 +12,6 @@
- (void)setTrimming:(bool)trimming animated:(bool)animated;
- (void)setLeftHandleImage:(UIImage *)leftHandleImage rightHandleImage:(UIImage *)rightHandleImage;
- (instancetype)initWithFrame:(CGRect)frame forStory:(bool)forStory;
@end

View File

@ -23,21 +23,23 @@
@implementation TGVideoMessageTrimView
- (instancetype)initWithFrame:(CGRect)frame
- (instancetype)initWithFrame:(CGRect)frame forStory:(bool)forStory
{
self = [super initWithFrame:frame];
if (self != nil)
{
self.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -25, -5, -25);
_leftSegmentView = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 16, 33)];
CGFloat height = forStory ? 40.0 : 33.0;
_leftSegmentView = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 16, height)];
_leftSegmentView.exclusiveTouch = true;
_leftSegmentView.adjustsImageWhenHighlighted = false;
[_leftSegmentView setBackgroundImage:TGComponentsImageNamed(@"VideoMessageLeftHandle") forState:UIControlStateNormal];
_leftSegmentView.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -25, -5, -10);
[self addSubview:_leftSegmentView];
_rightSegmentView = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 16, 33)];
_rightSegmentView = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 16, height)];
_rightSegmentView.exclusiveTouch = true;
_rightSegmentView.adjustsImageWhenHighlighted = false;
[_rightSegmentView setBackgroundImage:TGComponentsImageNamed(@"VideoMessageRightHandle") forState:UIControlStateNormal];

View File

@ -34,6 +34,7 @@ private final class LegacyComponentsOverlayWindowManagerImpl: NSObject, LegacyCo
init(parentController: ViewController?, theme: PresentationTheme?) {
self.parentController = parentController
self.controller = LegacyController(presentation: .custom, theme: theme)
self.controller?.hasSparseContainerView = (parentController as? LegacyController)?.hasSparseContainerView ?? false
super.init()
@ -429,6 +430,14 @@ open class LegacyController: ViewController, PresentableController, AttachmentCo
}
private var enableContainerLayoutUpdates = false
public var hasSparseContainerView = false {
didSet {
if self.isNodeLoaded {
self.controllerNode.hasSparseContainerView = self.hasSparseContainerView
}
}
}
public var disposables = DisposableSet()
open var requestAttachmentMenuExpansion: () -> Void = {}
@ -483,6 +492,8 @@ open class LegacyController: ViewController, PresentableController, AttachmentCo
override open func loadDisplayNode() {
self.displayNode = LegacyControllerNode()
self.displayNodeDidLoad()
self.controllerNode.hasSparseContainerView = self.hasSparseContainerView
}
override open func viewWillAppear(_ animated: Bool) {

View File

@ -3,6 +3,17 @@ import UIKit
import AsyncDisplayKit
import Display
private class LegacyTracingLayerView: UITracingLayerView {
var isSparse = false
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let result = super.hitTest(point, with: event)
if result === self && self.isSparse {
return nil
}
return result
}
}
final class LegacyControllerNode: ASDisplayNode {
private var containerLayout: ContainerViewLayout?
@ -15,16 +26,30 @@ final class LegacyControllerNode: ASDisplayNode {
}
}
public var hasSparseContainerView = false {
didSet {
if self.isNodeLoaded {
(self.view as? LegacyTracingLayerView)?.isSparse = self.hasSparseContainerView
}
}
}
override init() {
super.init()
self.setViewBlock({
return UITracingLayerView()
return LegacyTracingLayerView()
})
// self.clipsToBounds = true
}
override func didLoad() {
super.didLoad()
(self.view as? LegacyTracingLayerView)?.isSparse = self.hasSparseContainerView
}
func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) {
self.containerLayout = layout
let size = CGSize(width: layout.size.width - layout.intrinsicInsets.left - layout.intrinsicInsets.right, height: layout.size.height)

View File

@ -2647,6 +2647,7 @@ public final class EmojiContentPeekBehaviorImpl: EmojiContentPeekBehavior {
}
private let context: AccountContext
private let forceTheme: PresentationTheme?
private let interaction: Interaction?
private let chatPeerId: EnginePeer.Id?
private let present: (ViewController, Any?) -> Void
@ -2654,8 +2655,9 @@ public final class EmojiContentPeekBehaviorImpl: EmojiContentPeekBehavior {
private var viewRecords: [ViewRecord] = []
private weak var peekController: PeekController?
public init(context: AccountContext, interaction: Interaction?, chatPeerId: EnginePeer.Id?, present: @escaping (ViewController, Any?) -> Void) {
public init(context: AccountContext, forceTheme: PresentationTheme? = nil, interaction: Interaction?, chatPeerId: EnginePeer.Id?, present: @escaping (ViewController, Any?) -> Void) {
self.context = context
self.forceTheme = forceTheme
self.interaction = interaction
self.chatPeerId = chatPeerId
self.present = present
@ -2926,7 +2928,10 @@ public final class EmojiContentPeekBehaviorImpl: EmojiContentPeekBehavior {
return nil
}
let presentationData = strongSelf.context.sharedContext.currentPresentationData.with { $0 }
var presentationData = strongSelf.context.sharedContext.currentPresentationData.with { $0 }
if let forceTheme = strongSelf.forceTheme {
presentationData = presentationData.withUpdated(theme: forceTheme)
}
let controller = PeekController(presentationData: presentationData, content: content, sourceView: {
return (sourceView, sourceRect)
})

View File

@ -22,6 +22,7 @@ public final class InstantVideoController: LegacyController, StandalonePresentab
public var onDismiss: ((Bool) -> Void)?
public var onStop: (() -> Void)?
public var didStop: (() -> Void)?
private let micLevelValue = ValuePromise<Float>(0.0)
private let durationValue = ValuePromise<TimeInterval>(0.0)
@ -35,6 +36,7 @@ public final class InstantVideoController: LegacyController, StandalonePresentab
super.init(presentation: presentation, theme: theme, initialLayout: initialLayout)
self.hasSparseContainerView = true
self.lockOrientation = true
}
@ -55,12 +57,16 @@ public final class InstantVideoController: LegacyController, StandalonePresentab
self?.durationValue.set(duration)
}
captureController.onDismiss = { [weak self] _, isCancelled in
guard let strongSelf = self else { return }
if !strongSelf.dismissed {
self?.dismissed = true
self?.onDismiss?(isCancelled)
guard let self = self else { return }
if !self.dismissed {
self.dismissed = true
self.onDismiss?(isCancelled)
}
}
captureController.didStop = { [weak self] in
guard let self else { return }
self.didStop?()
}
captureController.onStop = { [weak self] in
self?.onStop?()
}
@ -114,6 +120,12 @@ public final class InstantVideoController: LegacyController, StandalonePresentab
captureController.buttonInteractionUpdate(CGPoint(x: value, y: 0.0))
}
}
public func send() {
if let captureController = self.captureController {
captureController.send()
}
}
}
public func legacyInputMicPalette(from theme: PresentationTheme) -> TGModernConversationInputMicPallete {
@ -121,7 +133,7 @@ public func legacyInputMicPalette(from theme: PresentationTheme) -> TGModernConv
return TGModernConversationInputMicPallete(dark: theme.overallDarkAppearance, buttonColor: inputPanelTheme.actionControlFillColor, iconColor: inputPanelTheme.actionControlForegroundColor, backgroundColor: theme.rootController.navigationBar.opaqueBackgroundColor, borderColor: inputPanelTheme.panelSeparatorColor, lock: inputPanelTheme.panelControlAccentColor, textColor: inputPanelTheme.primaryTextColor, secondaryTextColor: inputPanelTheme.secondaryTextColor, recording: inputPanelTheme.mediaRecordingDotColor)
}
public func legacyInstantVideoController(theme: PresentationTheme, panelFrame: CGRect, context: AccountContext, peerId: PeerId, slowmodeState: ChatSlowmodeState?, hasSchedule: Bool, send: @escaping (InstantVideoController, EnqueueMessage?) -> Void, displaySlowmodeTooltip: @escaping (UIView, CGRect) -> Void, presentSchedulePicker: @escaping (@escaping (Int32) -> Void) -> Void) -> InstantVideoController {
public func legacyInstantVideoController(theme: PresentationTheme, forStory: Bool, panelFrame: CGRect, context: AccountContext, peerId: PeerId, slowmodeState: ChatSlowmodeState?, hasSchedule: Bool, send: @escaping (InstantVideoController, EnqueueMessage?) -> Void, displaySlowmodeTooltip: @escaping (UIView, CGRect) -> Void, presentSchedulePicker: @escaping (@escaping (Int32) -> Void) -> Void) -> InstantVideoController {
let isSecretChat = peerId.namespace == Namespaces.Peer.SecretChat
let legacyController = InstantVideoController(presentation: .custom, theme: theme)
@ -129,6 +141,7 @@ public func legacyInstantVideoController(theme: PresentationTheme, panelFrame: C
legacyController.lockOrientation = true
legacyController.statusBar.statusBarStyle = .Hide
let baseController = TGViewController(context: legacyController.context)!
baseController.view.isUserInteractionEnabled = false
legacyController.bind(controller: baseController)
legacyController.presentationCompleted = { [weak legacyController, weak baseController] in
if let legacyController = legacyController, let baseController = baseController {
@ -143,7 +156,7 @@ public func legacyInstantVideoController(theme: PresentationTheme, panelFrame: C
slowmodeValidUntil = timestamp
}
let controller = TGVideoMessageCaptureController(context: legacyController.context, assets: TGVideoMessageCaptureControllerAssets(send: PresentationResourcesChat.chatInputPanelSendButtonImage(theme)!, slideToCancel: PresentationResourcesChat.chatInputPanelMediaRecordingCancelArrowImage(theme)!, actionDelete: generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Accessory Panels/MessageSelectionTrash"), color: theme.chat.inputPanel.panelControlAccentColor))!, transitionInView: {
let controller = TGVideoMessageCaptureController(context: legacyController.context, forStory: forStory, assets: TGVideoMessageCaptureControllerAssets(send: PresentationResourcesChat.chatInputPanelSendButtonImage(theme)!, slideToCancel: PresentationResourcesChat.chatInputPanelMediaRecordingCancelArrowImage(theme)!, actionDelete: generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Accessory Panels/MessageSelectionTrash"), color: theme.chat.inputPanel.panelControlAccentColor))!, transitionInView: {
return nil
}, parentController: baseController, controlsFrame: panelFrame, isAlreadyLocked: {
return false

View File

@ -1139,6 +1139,7 @@ final class MediaEditorScreenComponent: Component {
videoRecordingStatus: nil,
isRecordingLocked: false,
recordedAudioPreview: nil,
hasRecordedVideoPreview: false,
wasRecordingDismissed: false,
timeoutValue: timeoutValue,
timeoutSelected: timeoutSelected,
@ -2201,7 +2202,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
if let layout = self.validLayout, (layout.inputHeight ?? 0.0) > 0.0 {
self.view.endEditing(true)
} else {
let textEntity = DrawingTextEntity(text: NSAttributedString(), style: .regular, animation: .none, font: .sanFrancisco, alignment: .center, fontSize: 1.0, color: DrawingColor(color: .white))
let textEntity = DrawingTextEntity(text: NSAttributedString(), style: .filled, animation: .none, font: .sanFrancisco, alignment: .center, fontSize: 1.0, color: DrawingColor(color: .white))
self.interaction?.insertEntity(textEntity)
}
}
@ -2803,7 +2804,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
self.controller?.present(controller, in: .window(.root))
return
case .text:
let textEntity = DrawingTextEntity(text: NSAttributedString(), style: .regular, animation: .none, font: .sanFrancisco, alignment: .center, fontSize: 1.0, color: DrawingColor(color: .white))
let textEntity = DrawingTextEntity(text: NSAttributedString(), style: .filled, animation: .none, font: .sanFrancisco, alignment: .center, fontSize: 1.0, color: DrawingColor(color: .white))
self.interaction?.insertEntity(textEntity)
self.hasAnyChanges = true
@ -3325,7 +3326,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
let text = "Subscribe to **Telegram Premium** to make your stories disappear \(timeoutString)."
let context = self.context
let controller = UndoOverlayController(presentationData: presentationData, content: .autoDelete(isOn: true, title: nil, text: text, customUndoText: "More"), elevatedLayout: false, position: .bottom, animateInAsReplacement: false, action: { [weak self] action in
let controller = UndoOverlayController(presentationData: presentationData, content: .autoDelete(isOn: true, title: nil, text: text, customUndoText: "More"), elevatedLayout: false, position: .top, animateInAsReplacement: false, action: { [weak self] action in
if case .undo = action, let self {
let controller = context.sharedContext.makePremiumIntroController(context: context, source: .settings)
self.push(controller)

View File

@ -272,6 +272,7 @@ final class StoryPreviewComponent: Component {
videoRecordingStatus: nil,
isRecordingLocked: false,
recordedAudioPreview: nil,
hasRecordedVideoPreview: false,
wasRecordingDismissed: false,
timeoutValue: nil,
timeoutSelected: false,

View File

@ -120,9 +120,7 @@ public final class MediaPreviewPanelComponent: Component {
public let vibrancyContainer: UIView
private let trackingLayer: HierarchyTrackingLayer
private let indicator = ComponentView<Empty>()
private let timerFont: UIFont
private let timerText = ComponentView<Empty>()

View File

@ -173,7 +173,7 @@ public final class MessageInputActionButtonComponent: Component {
if self.micButton == nil {
let micButton = ChatTextInputMediaRecordingButton(
context: component.context,
theme: component.theme,
theme: defaultDarkPresentationTheme,
useDarkTheme: true,
strings: component.strings,
presentController: component.presentController
@ -182,6 +182,8 @@ public final class MessageInputActionButtonComponent: Component {
micButton.statusBarHost = component.context.sharedContext.mainWindow?.statusBarHost
self.addSubview(micButton)
micButton.disablesInteractiveKeyboardGestureRecognizer = true
micButton.beginRecording = { [weak self] in
guard let self, let component = self.component else {
return
@ -356,7 +358,7 @@ public final class MessageInputActionButtonComponent: Component {
if let micButton = self.micButton {
if themeUpdated {
micButton.updateTheme(theme: component.theme)
micButton.updateTheme(theme: defaultDarkPresentationTheme)
}
let micButtonFrame = CGRect(origin: CGPoint(), size: availableSize)

View File

@ -86,6 +86,7 @@ public final class MessageInputPanelComponent: Component {
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
public let isRecordingLocked: Bool
public let recordedAudioPreview: ChatRecordedMediaPreview?
public let hasRecordedVideoPreview: Bool
public let wasRecordingDismissed: Bool
public let timeoutValue: String?
public let timeoutSelected: Bool
@ -124,6 +125,7 @@ public final class MessageInputPanelComponent: Component {
videoRecordingStatus: InstantVideoControllerRecordingStatus?,
isRecordingLocked: Bool,
recordedAudioPreview: ChatRecordedMediaPreview?,
hasRecordedVideoPreview: Bool,
wasRecordingDismissed: Bool,
timeoutValue: String?,
timeoutSelected: Bool,
@ -162,6 +164,7 @@ public final class MessageInputPanelComponent: Component {
self.isRecordingLocked = isRecordingLocked
self.wasRecordingDismissed = wasRecordingDismissed
self.recordedAudioPreview = recordedAudioPreview
self.hasRecordedVideoPreview = hasRecordedVideoPreview
self.timeoutValue = timeoutValue
self.timeoutSelected = timeoutSelected
self.displayGradient = displayGradient
@ -214,6 +217,9 @@ public final class MessageInputPanelComponent: Component {
if lhs.recordedAudioPreview !== rhs.recordedAudioPreview {
return false
}
if lhs.hasRecordedVideoPreview != rhs.hasRecordedVideoPreview {
return false
}
if lhs.timeoutValue != rhs.timeoutValue {
return false
}
@ -468,8 +474,8 @@ public final class MessageInputPanelComponent: Component {
self.textFieldExternalState.initialText = initialText
}
let hasMediaRecording = component.audioRecorder != nil || component.videoRecordingStatus != nil
let hasMediaEditing = component.recordedAudioPreview != nil
let hasMediaRecording = component.audioRecorder != nil || (component.videoRecordingStatus != nil && !component.hasRecordedVideoPreview)
let hasMediaEditing = component.recordedAudioPreview != nil || component.hasRecordedVideoPreview
let topGradientHeight: CGFloat = 32.0
if self.gradientView.image == nil {
@ -814,6 +820,8 @@ public final class MessageInputPanelComponent: Component {
if case .up = action {
if component.recordedAudioPreview != nil {
component.sendMessageAction()
} else if component.hasRecordedVideoPreview {
component.sendMessageAction()
} else if case let .text(string) = self.getSendMessageInput(), string.string.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty {
} else {
component.sendMessageAction()

View File

@ -320,7 +320,7 @@ public final class StoryItemSetContainerComponent: Component {
var captionItem: CaptionItem?
let inputBackground = ComponentView<Empty>()
var videoRecordingBackgroundView: UIVisualEffectView?
let inputPanel = ComponentView<Empty>()
let inputPanelExternalState = MessageInputPanelComponent.ExternalState()
private let inputPanelBackground = ComponentView<Empty>()
@ -487,6 +487,7 @@ public final class StoryItemSetContainerComponent: Component {
if self.sendMessageContext.videoRecorderValue !== videoRecorder {
let previousVideoRecorderValue = self.sendMessageContext.videoRecorderValue
self.sendMessageContext.videoRecorderValue = videoRecorder
self.component?.controller()?.lockOrientation = videoRecorder != nil
if let videoRecorder = videoRecorder {
self.sendMessageContext.wasRecordingDismissed = false
@ -511,7 +512,13 @@ public final class StoryItemSetContainerComponent: Component {
})
}*/
let _ = self
//TODO:editing
}
videoRecorder.didStop = { [weak self] in
guard let self else {
return
}
self.sendMessageContext.hasRecordedVideoPreview = true
self.state?.updated(transition: Transition(animation: .curve(duration: 0.4, curve: .spring)))
}
self.component?.controller()?.present(videoRecorder, in: .window(.root))
@ -521,10 +528,10 @@ public final class StoryItemSetContainerComponent: Component {
}
if let previousVideoRecorderValue {
previousVideoRecorderValue.dismissVideo()
let _ = previousVideoRecorderValue.dismissVideo()
}
self.state?.updated(transition: .immediate)
self.state?.updated(transition: Transition(animation: .curve(duration: 0.4, curve: .spring)))
}
})
}
@ -1723,6 +1730,7 @@ public final class StoryItemSetContainerComponent: Component {
guard let self else {
return
}
self.sendMessageContext.videoRecorderValue?.dismissVideo()
self.sendMessageContext.discardMediaRecordingPreview(view: self)
},
attachmentAction: { [weak self] in
@ -1775,9 +1783,10 @@ public final class StoryItemSetContainerComponent: Component {
self.state?.updated(transition: Transition(animation: .curve(duration: 0.2, curve: .easeInOut)))
},
audioRecorder: self.sendMessageContext.audioRecorderValue,
videoRecordingStatus: self.sendMessageContext.videoRecorderValue?.audioStatus,
videoRecordingStatus: !self.sendMessageContext.hasRecordedVideoPreview ? self.sendMessageContext.videoRecorderValue?.audioStatus : nil,
isRecordingLocked: self.sendMessageContext.isMediaRecordingLocked,
recordedAudioPreview: self.sendMessageContext.recordedAudioPreview,
hasRecordedVideoPreview: self.sendMessageContext.hasRecordedVideoPreview,
wasRecordingDismissed: self.sendMessageContext.wasRecordingDismissed,
timeoutValue: nil,
timeoutSelected: false,
@ -1804,6 +1813,31 @@ public final class StoryItemSetContainerComponent: Component {
}
keyboardHeight = max(keyboardHeight, inputMediaNodeHeight)
let hasRecordingBlurBackground = self.sendMessageContext.videoRecorderValue != nil || self.sendMessageContext.hasRecordedVideoPreview
if hasRecordingBlurBackground {
let videoRecordingBackgroundView: UIVisualEffectView
if let current = self.videoRecordingBackgroundView {
videoRecordingBackgroundView = current
} else {
videoRecordingBackgroundView = UIVisualEffectView(effect: nil)
UIView.animate(withDuration: 0.3, animations: {
videoRecordingBackgroundView.effect = UIBlurEffect(style: .dark)
})
if let inputPanelView = self.inputPanel.view {
inputPanelView.superview?.insertSubview(videoRecordingBackgroundView, belowSubview: inputPanelView)
}
self.videoRecordingBackgroundView = videoRecordingBackgroundView
}
transition.setFrame(view: videoRecordingBackgroundView, frame: CGRect(origin: .zero, size: availableSize))
} else if let videoRecordingBackgroundView = self.videoRecordingBackgroundView {
self.videoRecordingBackgroundView = nil
UIView.animate(withDuration: 0.3, animations: {
videoRecordingBackgroundView.effect = nil
}, completion: { _ in
videoRecordingBackgroundView.removeFromSuperview()
})
}
let inputPanelBackgroundSize = self.inputPanelBackground.update(
transition: transition,
component: AnyComponent(BlurredGradientComponent(position: .bottom, dark: true, tag: nil)),
@ -1814,7 +1848,7 @@ public final class StoryItemSetContainerComponent: Component {
if inputPanelBackgroundView.superview == nil {
self.addSubview(inputPanelBackgroundView)
}
let isVisible = inputHeight > 44.0
let isVisible = inputHeight > 44.0 && !hasRecordingBlurBackground
transition.setFrame(view: inputPanelBackgroundView, frame: CGRect(origin: CGPoint(x: 0.0, y: isVisible ? availableSize.height - inputPanelBackgroundSize.height : availableSize.height), size: inputPanelBackgroundSize))
transition.setAlpha(view: inputPanelBackgroundView, alpha: isVisible ? 1.0 : 0.0, delay: isVisible ? 0.0 : 0.4)
}
@ -2516,6 +2550,9 @@ public final class StoryItemSetContainerComponent: Component {
if self.sendMessageContext.recordedAudioPreview != nil {
effectiveDisplayReactions = false
}
if self.sendMessageContext.hasRecordedVideoPreview {
effectiveDisplayReactions = false
}
if self.voiceMessagesRestrictedTooltipController != nil {
effectiveDisplayReactions = false
}

View File

@ -59,6 +59,10 @@ final class StoryItemSetContainerSendMessage {
var audioRecorder = Promise<ManagedAudioRecorder?>()
var recordedAudioPreview: ChatRecordedMediaPreview?
var videoRecorderValue: InstantVideoController?
var videoRecorder = Promise<InstantVideoController?>()
var hasRecordedVideoPreview = false
var inputMediaNodeData: ChatEntityKeyboardInputNode.InputData?
var inputMediaNodeDataDisposable: Disposable?
var inputMediaNodeStateContext = ChatEntityKeyboardInputNode.StateContext()
@ -66,9 +70,6 @@ final class StoryItemSetContainerSendMessage {
var inputMediaNode: ChatEntityKeyboardInputNode?
var inputMediaNodeBackground = SimpleLayer()
var videoRecorderValue: InstantVideoController?
var tempVideoRecorderValue: InstantVideoController?
var videoRecorder = Promise<InstantVideoController?>()
let controllerNavigationDisposable = MetaDisposable()
let enqueueMediaMessageDisposable = MetaDisposable()
let navigationActionDisposable = MetaDisposable()
@ -450,6 +451,11 @@ final class StoryItemSetContainerSendMessage {
let _ = enqueueMessages(account: component.context.account, peerId: peerId, messages: messages).start()
view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
} else if self.hasRecordedVideoPreview, let videoRecorderValue = self.videoRecorderValue {
videoRecorderValue.send()
self.hasRecordedVideoPreview = false
self.videoRecorder.set(.single(nil))
view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
} else {
switch inputPanelView.getSendMessageInput() {
@ -515,7 +521,7 @@ final class StoryItemSetContainerSendMessage {
if isVideo {
if self.videoRecorderValue == nil {
if let currentInputPanelFrame = view.inputPanel.view?.frame {
self.videoRecorder.set(.single(legacyInstantVideoController(theme: component.theme, panelFrame: view.convert(currentInputPanelFrame, to: nil), context: component.context, peerId: peer.id, slowmodeState: nil, hasSchedule: true, send: { [weak self, weak view] videoController, message in
self.videoRecorder.set(.single(legacyInstantVideoController(theme: defaultDarkPresentationTheme, forStory: true, panelFrame: view.convert(currentInputPanelFrame, to: nil), context: component.context, peerId: peer.id, slowmodeState: nil, hasSchedule: true, send: { [weak self, weak view] videoController, message in
guard let self, let view, let component = view.component else {
return
}
@ -599,8 +605,16 @@ final class StoryItemSetContainerSendMessage {
}
})
} else if let videoRecorderValue = self.videoRecorderValue {
let _ = videoRecorderValue
self.videoRecorder.set(.single(nil))
self.wasRecordingDismissed = !sendAction
if sendAction {
videoRecorderValue.completeVideo()
} else {
self.videoRecorder.set(.single(nil))
}
self.hasRecordedVideoPreview = false
view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
}
}
})
@ -633,11 +647,8 @@ final class StoryItemSetContainerSendMessage {
})
} else if let videoRecorderValue = self.videoRecorderValue {
if videoRecorderValue.stopVideo() {
/*self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
}
})*/
self.hasRecordedVideoPreview = true
view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
} else {
self.videoRecorder.set(.single(nil))
}
@ -649,6 +660,11 @@ final class StoryItemSetContainerSendMessage {
self.recordedAudioPreview = nil
self.wasRecordingDismissed = true
view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
} else if self.hasRecordedVideoPreview {
self.videoRecorder.set(.single(nil))
self.hasRecordedVideoPreview = false
self.wasRecordingDismissed = true
view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
}
}

View File

@ -354,7 +354,6 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
private var audioRecorderStatusDisposable: Disposable?
private var videoRecorderValue: InstantVideoController?
private var tempVideoRecorderValue: InstantVideoController?
private var videoRecorder = Promise<InstantVideoController?>()
private var videoRecorderDisposable: Disposable?
@ -15407,7 +15406,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
isScheduledMessages = true
}
self.videoRecorder.set(.single(legacyInstantVideoController(theme: self.presentationData.theme, panelFrame: self.view.convert(currentInputPanelFrame, to: nil), context: self.context, peerId: peerId, slowmodeState: !isScheduledMessages ? self.presentationInterfaceState.slowmodeState : nil, hasSchedule: !isScheduledMessages && peerId.namespace != Namespaces.Peer.SecretChat, send: { [weak self] videoController, message in
self.videoRecorder.set(.single(legacyInstantVideoController(theme: self.presentationData.theme, forStory: false, panelFrame: self.view.convert(currentInputPanelFrame, to: nil), context: self.context, peerId: peerId, slowmodeState: !isScheduledMessages ? self.presentationInterfaceState.slowmodeState : nil, hasSchedule: !isScheduledMessages && peerId.namespace != Namespaces.Peer.SecretChat, send: { [weak self] videoController, message in
if let strongSelf = self {
guard let message = message else {
strongSelf.videoRecorder.set(.single(nil))