Video Messages Improvements

(cherry picked from commit e614343888ebdb7208399b785815b5dbc3f8ad26)
This commit is contained in:
Ilya Laktyushin 2021-07-31 00:27:42 +03:00 committed by Ali
parent 51b1f7a836
commit 40b7feac42
6 changed files with 193 additions and 12 deletions

View File

@ -1,5 +1,12 @@
#import <UIKit/UIKit.h>
@interface TGVideoMessageShimmerView : UIView
- (void)updateAbsoluteRect:(CGRect)absoluteRect containerSize:(CGSize)containerSize;
@end
@interface TGVideoMessageRingView : UIView
@property (nonatomic, strong) UIColor *accentColor;

View File

@ -96,6 +96,7 @@ typedef enum
UIView *_separatorView;
UIImageView *_placeholderView;
TGVideoMessageShimmerView *_shimmerView;
bool _automaticDismiss;
NSTimeInterval _startTimestamp;
@ -329,6 +330,10 @@ typedef enum
_placeholderView.image = [TGVideoMessageCaptureController startImage];
[_circleView addSubview:_placeholderView];
_shimmerView = [[TGVideoMessageShimmerView alloc] initWithFrame:_circleView.bounds];
[_shimmerView updateAbsoluteRect:_circleView.bounds containerSize:_circleView.bounds.size];
[_circleView addSubview:_shimmerView];
if (iosMajorVersion() >= 11)
{
_shadowView.accessibilityIgnoresInvertColors = true;
@ -1182,9 +1187,11 @@ typedef enum
[UIView animateWithDuration:0.3 delay:delay options:kNilOptions animations:^
{
_placeholderView.alpha = 0.0f;
_shimmerView.alpha = 0.0f;
_switchButton.alpha = 1.0f;
} completion:^(__unused BOOL finished)
{
_shimmerView.hidden = true;
_placeholderView.hidden = true;
_placeholderView.alpha = 1.0f;
}];

View File

@ -2,6 +2,168 @@
#import "TGColor.h"
#import "LegacyComponentsInternal.h"
@interface TGVideoMessageShimmerEffectForegroundView : UIView
{
UIView *_imageContainerView;
UIView *_imageView;
CGFloat _size;
bool _hasContainerSize;
CGRect _absoluteRect;
CGSize _containerSize;
}
- (instancetype)initWithSize:(CGFloat)size alpha:(CGFloat)alpha;
@end
@implementation TGVideoMessageShimmerEffectForegroundView
- (instancetype)initWithSize:(CGFloat)size alpha:(CGFloat)alpha {
self = [super initWithFrame:CGRectZero];
if (self != nil) {
_size = size;
_imageContainerView = [[UIView alloc] init];
_imageView = [[UIView alloc] init];
self.clipsToBounds = true;
[_imageContainerView addSubview:_imageView];
[self addSubview:_imageContainerView];
UIGraphicsBeginImageContextWithOptions(CGSizeMake(size, 16), false, 0.0f);
CGContextRef context = UIGraphicsGetCurrentContext();
CGRect bounds = CGRectMake(0, 0, size, 16);
CGContextClearRect(context, bounds);
CGContextClipToRect(context, bounds);
UIColor *transparentColor = [UIColor colorWithWhite:1.0 alpha:0.0];
UIColor *peakColor = [UIColor colorWithWhite:1.0 alpha:alpha];
CGColorRef colors[3] = {
CGColorRetain(transparentColor.CGColor),
CGColorRetain(peakColor.CGColor),
CGColorRetain(transparentColor.CGColor)
};
CFArrayRef colorsArray = CFArrayCreate(kCFAllocatorDefault, (const void **)&colors, 3, NULL);
CGFloat locations[3] = {0.0f, 0.5, 1.0};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGGradientRef gradient = CGGradientCreateWithColors(colorSpace, colorsArray, (CGFloat const *)&locations);
CGContextDrawLinearGradient(context, gradient, CGPointMake(0, 0), CGPointMake(size, 0), kNilOptions);
CFRelease(colorsArray);
CFRelease(colors[0]);
CFRelease(colors[1]);
CGColorSpaceRelease(colorSpace);
CFRelease(gradient);
UIImage *image = [UIGraphicsGetImageFromCurrentImageContext() stretchableImageWithLeftCapWidth:25 topCapHeight:25];
UIGraphicsEndImageContext();
_imageView.backgroundColor = [UIColor colorWithPatternImage:image];
}
return self;
}
- (void)updateAbsoluteRect:(CGRect)absoluteRect containerSize:(CGSize)containerSize {
_hasContainerSize = true;
CGRect previousAbsoluteRect = _absoluteRect;
CGSize previousContainerSize = _containerSize;
_absoluteRect = absoluteRect;
_containerSize = containerSize;
if (!CGSizeEqualToSize(previousContainerSize, containerSize)) {
[self setupAnimation];
}
if (!CGRectEqualToRect(previousAbsoluteRect, absoluteRect)) {
_imageContainerView.frame = CGRectMake(-absoluteRect.origin.x, -absoluteRect.origin.y, containerSize.width, containerSize.height);
}
}
- (void)setupAnimation {
if (!_hasContainerSize) {
return;
}
CGFloat gradientHeight = _size;
_imageView.frame = CGRectMake(-gradientHeight, 0, gradientHeight, _containerSize.height);
CABasicAnimation *animation = [CABasicAnimation animationWithKeyPath:@"position.x"];
animation.fromValue = @(_imageView.center.x);
animation.toValue = @(_imageView.center.x + _containerSize.width + gradientHeight);
animation.duration = 1.3f;
animation.repeatCount = INFINITY;
animation.beginTime = 1.0;
[_imageView.layer addAnimation:animation forKey:@"position"];
}
@end
@interface TGVideoMessageShimmerView ()
{
TGVideoMessageShimmerEffectForegroundView *_effectView;
UIImageView *_imageView;
UIView *_borderView;
UIView *_borderMaskView;
TGVideoMessageShimmerEffectForegroundView *_borderEffectView;
}
@end
@implementation TGVideoMessageShimmerView
- (instancetype)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self != nil) {
self.clipsToBounds = true;
self.layer.cornerRadius = frame.size.width / 2.0f;
if (iosMajorVersion() >= 13) {
self.layer.cornerCurve = kCACornerCurveCircular;
}
_effectView = [[TGVideoMessageShimmerEffectForegroundView alloc] initWithSize:320 alpha:0.3];
_effectView.layer.compositingFilter = @"screenBlendMode";
_effectView.frame = self.bounds;
_borderView = [[UIView alloc] initWithFrame:self.bounds];
_borderMaskView = [[UIView alloc] initWithFrame:self.bounds];
_borderMaskView.layer.borderWidth = 1.0;
_borderMaskView.layer.borderColor = [UIColor whiteColor].CGColor;
_borderMaskView.layer.cornerRadius = frame.size.width / 2.0f;
if (iosMajorVersion() >= 13) {
_borderMaskView.layer.cornerCurve = kCACornerCurveCircular;
}
_borderView.maskView = _borderMaskView;
_borderEffectView = [[TGVideoMessageShimmerEffectForegroundView alloc] initWithSize:400 alpha:0.45];
_borderEffectView.layer.compositingFilter = @"screenBlendMode";
_borderEffectView.frame = self.bounds;
[self addSubview:_effectView];
[self addSubview:_borderView];
[_borderView addSubview:_borderEffectView];
}
return self;
}
- (void)updateAbsoluteRect:(CGRect)absoluteRect containerSize:(CGSize)containerSize {
[_effectView updateAbsoluteRect:absoluteRect containerSize:containerSize];
[_borderEffectView updateAbsoluteRect:absoluteRect containerSize:containerSize];
}
@end
@interface TGVideoMessageRingView ()
{
CGFloat _value;

View File

@ -281,7 +281,6 @@ private final class MediaPlayerContext {
CMTimebaseSetRate(loadedState.controlTimebase.timebase, rate: 0.0)
}
}
let currentTimestamp = CMTimeGetSeconds(CMTimebaseGetTime(loadedState.controlTimebase.timebase))
var duration: Double = 0.0
if let videoTrackFrameBuffer = loadedState.mediaBuffers.videoBuffer {
duration = max(duration, CMTimeGetSeconds(videoTrackFrameBuffer.duration))

View File

@ -200,8 +200,12 @@ final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecognizerDele
self.hapticFeedback.impact(.light)
}
}
self.seekTo?(min(0.99, fraction), false)
let newProgress = min(0.99, fraction)
if let seekingProgress = self.seekingProgress, abs(seekingProgress - CGFloat(newProgress)) < 0.005 {
} else {
self.seekTo?(newProgress, false)
self.seekingProgress = CGFloat(fraction)
}
case .ended, .cancelled:
self.seeking = false
self.seekTo?(min(0.99, fraction), true)

View File

@ -720,21 +720,23 @@ public func convertMarkdownToAttributes(_ text: NSAttributedString) -> NSAttribu
let entity = string.substring(with: match.range(at: 7))
let substring = string.substring(with: match.range(at: 6)) + text + string.substring(with: match.range(at: 9))
let textInputAttribute: NSAttributedString.Key?
switch entity {
case "`":
result.append(NSAttributedString(string: substring, attributes: [ChatTextInputAttributes.monospace: true as NSNumber]))
offsetRanges.append((NSMakeRange(matchIndex + match.range(at: 6).length, text.count), match.range(at: 6).length * 2))
textInputAttribute = ChatTextInputAttributes.monospace
case "**":
result.append(NSAttributedString(string: substring, attributes: [ChatTextInputAttributes.bold: true as NSNumber]))
offsetRanges.append((NSMakeRange(matchIndex + match.range(at: 6).length, text.count), match.range(at: 6).length * 2))
textInputAttribute = ChatTextInputAttributes.bold
case "__":
result.append(NSAttributedString(string: substring, attributes: [ChatTextInputAttributes.italic: true as NSNumber]))
offsetRanges.append((NSMakeRange(matchIndex + match.range(at: 6).length, text.count), match.range(at: 6).length * 2))
textInputAttribute = ChatTextInputAttributes.italic
case "~~":
result.append(NSAttributedString(string: substring, attributes: [ChatTextInputAttributes.strikethrough: true as NSNumber]))
offsetRanges.append((NSMakeRange(matchIndex + match.range(at: 6).length, text.count), match.range(at: 6).length * 2))
textInputAttribute = ChatTextInputAttributes.strikethrough
default:
break
textInputAttribute = nil
}
if let textInputAttribute = textInputAttribute {
result.append(NSAttributedString(string: substring, attributes: [textInputAttribute: true as NSNumber]))
offsetRanges.append((NSMakeRange(matchIndex + match.range(at: 6).length, text.count), match.range(at: 6).length * 2))
}
stringOffset -= match.range(at: 7).length * 2