Merge branch 'master' into experimental-2

This commit is contained in:
Ali 2021-07-25 12:00:40 +02:00
commit 46e6c9d75c
47 changed files with 1475 additions and 819 deletions

View File

@ -145,6 +145,7 @@ objc_library(
],
deps = [
"//submodules/NumberPluralizationForm:NumberPluralizationForm",
"//submodules/AppBundle:AppBundle",
],
visibility = [
"//visibility:public",

View File

@ -6557,7 +6557,9 @@ Sorry for the inconvenience.";
"TwoFactorRemember.Done.Text" = "You still remember your password.";
"TwoFactorRemember.Done.Action" = "Back to Settings";
"VoiceChat.VideoPreviewPhoneScreen" = "Phone Screen";
"VoiceChat.VideoPreviewTabletScreen" = "Phone Screen";
"VoiceChat.VideoPreviewFrontCamera" = "Front Camera";
"VoiceChat.VideoPreviewBackCamera" = "Back Camera";
"VoiceChat.VideoPreviewContinue" = "Continue";
"VoiceChat.VideoPreviewShareScreenInfo" = "Everything on your screen, including notifications, will be shared.";
"VoiceChat.VideoPreviewShareScreenInfo" = "Everything on your screen\nwill be shared";

View File

@ -308,6 +308,7 @@ static _FormattedString * _Nonnull getFormatted{num_arguments}(_PresentationStri
#import <PresentationStrings/PresentationStrings.h>
#import <NumberPluralizationForm/NumberPluralizationForm.h>
#import <AppBundle/AppBundle.h>
@implementation _FormattedStringRange
@ -447,7 +448,7 @@ static NSString * _Nonnull getSingle(_PresentationStrings * _Nonnull strings, NS
static NSDictionary<NSString *, NSString *> *fallbackDict = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
NSString *lprojPath = [[NSBundle mainBundle] pathForResource:@"en" ofType:@"lproj"];
NSString *lprojPath = [getAppBundle() pathForResource:@"en" ofType:@"lproj"];
if (!lprojPath) {
return;
}
@ -496,7 +497,7 @@ static NSString * _Nonnull getPluralizedIndirect(_PresentationStrings * _Nonnull
static NSDictionary<NSNumber *, NSString *> *idToKey = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
NSString *dataPath = [[NSBundle mainBundle] pathForResource:@"PresentationStrings" ofType:@"data"];
NSString *dataPath = [getAppBundle() pathForResource:@"PresentationStrings" ofType:@"data"];
if (!dataPath) {
assert(false);
return;

View File

@ -55,9 +55,15 @@ public final class PeerSelectionControllerParams {
}
}
public enum PeerSelectionControllerSendMode {
case generic
case silent
case schedule
}
public protocol PeerSelectionController: ViewController {
var peerSelected: ((Peer) -> Void)? { get set }
var multiplePeersSelected: (([Peer], NSAttributedString) -> Void)? { get set }
var multiplePeersSelected: (([Peer], NSAttributedString, PeerSelectionControllerSendMode) -> Void)? { get set }
var inProgress: Bool { get set }
var customDismiss: (() -> Void)? { get set }
}

View File

@ -901,32 +901,32 @@ public final class ChatListSearchContainerNode: SearchDisplayControllerContentNo
}).start()
let peerSelectionController = self.context.sharedContext.makePeerSelectionController(PeerSelectionControllerParams(context: self.context, filter: [.onlyWriteable, .excludeDisabled], multipleSelection: true))
peerSelectionController.multiplePeersSelected = { [weak self, weak peerSelectionController] peers, messageText in
peerSelectionController.multiplePeersSelected = { [weak self, weak peerSelectionController] peers, messageText, mode in
guard let strongSelf = self, let strongController = peerSelectionController else {
return
}
strongController.dismiss()
for peer in peers {
var result: [EnqueueMessage] = []
if messageText.string.count > 0 {
let inputText = convertMarkdownToAttributes(messageText)
for text in breakChatInputText(trimChatInputText(inputText)) {
if text.length != 0 {
var attributes: [MessageAttribute] = []
let entities = generateTextEntities(text.string, enabledTypes: .all, currentEntities: generateChatInputTextEntities(text))
if !entities.isEmpty {
attributes.append(TextEntitiesMessageAttribute(entities: entities))
}
result.append(.message(text: text.string, attributes: attributes, mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
var result: [EnqueueMessage] = []
if messageText.string.count > 0 {
let inputText = convertMarkdownToAttributes(messageText)
for text in breakChatInputText(trimChatInputText(inputText)) {
if text.length != 0 {
var attributes: [MessageAttribute] = []
let entities = generateTextEntities(text.string, enabledTypes: .all, currentEntities: generateChatInputTextEntities(text))
if !entities.isEmpty {
attributes.append(TextEntitiesMessageAttribute(entities: entities))
}
result.append(.message(text: text.string, attributes: attributes, mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
}
}
}
result.append(contentsOf: messageIds.map { messageId -> EnqueueMessage in
return .forward(source: messageId, grouping: .auto, attributes: [], correlationId: nil)
})
result.append(contentsOf: messageIds.map { messageId -> EnqueueMessage in
return .forward(source: messageId, grouping: .auto, attributes: [], correlationId: nil)
})
for peer in peers {
let _ = (enqueueMessages(account: strongSelf.context.account, peerId: peer.id, messages: result)
|> deliverOnMainQueue).start(next: { messageIds in
if let strongSelf = self {

View File

@ -957,7 +957,7 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
contentParentNode.willUpdateIsExtractedToContextPreview?(false, .animated(duration: 0.2, curve: .easeInOut))
} else {
if let snapshotView = contentParentNode.contentNode.view.snapshotContentTree() {
if let snapshotView = contentParentNode.contentNode.view.snapshotContentTree(keepTransform: true) {
self.contentContainerNode.view.addSubview(snapshotView)
}

View File

@ -7,6 +7,7 @@ public struct Font {
case serif
case monospace
case round
case camera
}
public struct Traits: OptionSet {
@ -57,7 +58,7 @@ public struct Font {
}
public static func with(size: CGFloat, design: Design = .regular, weight: Weight = .regular, traits: Traits = []) -> UIFont {
if #available(iOS 13.0, *) {
if #available(iOS 13.0, *), design != .camera {
let descriptor: UIFontDescriptor
if #available(iOS 14.0, *) {
descriptor = UIFont.systemFont(ofSize: size).fontDescriptor
@ -136,6 +137,22 @@ public struct Font {
}
case .round:
return UIFont(name: ".SFCompactRounded-Semibold", size: size) ?? UIFont.systemFont(ofSize: size)
case .camera:
func encodeText(string: String, key: Int16) -> String {
let nsString = string as NSString
let result = NSMutableString()
for i in 0 ..< nsString.length {
var c: unichar = nsString.character(at: i)
c = unichar(Int16(c) + key)
result.append(NSString(characters: &c, length: 1) as String)
}
return result as String
}
if case .semibold = weight {
return UIFont(name: encodeText(string: "TGDbnfsb.Tfnjcpme", key: -1), size: size) ?? UIFont.systemFont(ofSize: size, weight: weight.weight)
} else {
return UIFont(name: encodeText(string: "TGDbnfsb.Sfhvmbs", key: -1), size: size) ?? UIFont.systemFont(ofSize: size, weight: weight.weight)
}
}
}
}

View File

@ -491,6 +491,9 @@ open class ListViewItemNode: ASDisplayNode, AccessibilityFocusableNode {
if let update = update {
update(progress, currentValue)
}
if progress == 1.0 {
strongSelf.apparentHeightTransition = nil
}
}
})
self.setAnimationForKey("apparentHeight", animation: animation)

View File

@ -20,6 +20,7 @@ import TextSelectionNode
import UrlEscaping
import UndoUI
import ManagedAnimationNode
import TelegramUniversalVideoContent
private let deleteImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Accessory Panels/MessageSelectionTrash"), color: .white)
private let actionImage = generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Accessory Panels/MessageSelectionForward"), color: .white)
@ -592,6 +593,14 @@ final class ChatItemGalleryFooterContentNode: GalleryFooterContentNode, UIScroll
break
}
}
} else if let media = media as? TelegramMediaWebpage, case let .Loaded(content) = media.content {
let type = webEmbedType(content: content)
switch type {
case .youtube, .vimeo:
canFullscreen = true
default:
break
}
}
}

View File

@ -773,6 +773,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
var forceEnablePiP = false
var forceEnableUserInteraction = false
var isAnimated = false
var isEnhancedWebPlayer = false
if let content = item.content as? NativeVideoContent {
isAnimated = content.fileReference.media.isAnimated
self.videoFramePreview = MediaPlayerFramePreview(postbox: item.context.account.postbox, fileReference: content.fileReference)
@ -782,9 +783,12 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
let type = webEmbedType(content: content.webpageContent)
switch type {
case .youtube:
isEnhancedWebPlayer = true
forceEnableUserInteraction = true
disablePictureInPicture = !(item.configuration?.youtubePictureInPictureEnabled ?? false)
self.videoFramePreview = YoutubeEmbedFramePreview(context: item.context, content: content)
case .vimeo:
isEnhancedWebPlayer = true
case .iframe:
disablePlayerControls = true
default:
@ -1121,7 +1125,14 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
}
}
if !isWebpage, let file = file, !file.isAnimated {
var hasMoreButton = false
if isEnhancedWebPlayer {
hasMoreButton = true
} else if !isWebpage, let file = file, !file.isAnimated {
hasMoreButton = true
}
if hasMoreButton {
let moreMenuItem = UIBarButtonItem(customDisplayNode: self.moreBarButton)!
barButtonItems.append(moreMenuItem)
}
@ -1315,6 +1326,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
if let time = item.timecode {
seek = .timecode(time)
}
playbackRate = item.playbackRate
}
}
if let playbackRate = playbackRate {
@ -1923,7 +1935,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
}
}
private func contentInfo() -> (message: Message, file: TelegramMediaFile, isWebpage: Bool)? {
private func contentInfo() -> (message: Message, file: TelegramMediaFile?, isWebpage: Bool)? {
guard let item = self.item else {
return nil
}
@ -1934,16 +1946,15 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
if let m = m as? TelegramMediaFile, m.isVideo {
file = m
break
} else if let m = m as? TelegramMediaWebpage, case let .Loaded(content) = m.content, let f = content.file, f.isVideo {
file = f
} else if let m = m as? TelegramMediaWebpage, case let .Loaded(content) = m.content {
if let f = content.file, f.isVideo {
file = f
}
isWebpage = true
break
}
}
if let file = file {
return (message, file, isWebpage)
}
return (message, file, isWebpage)
}
return nil
}
@ -2042,7 +2053,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
c.setItems(strongSelf.contextMenuSpeedItems())
})))
if let (message, file, isWebpage) = strongSelf.contentInfo(), !isWebpage {
if let (message, maybeFile, isWebpage) = strongSelf.contentInfo(), let file = maybeFile, !isWebpage {
items.append(.action(ContextMenuActionItem(text: "Save to Gallery", icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Download"), color: theme.actionSheet.primaryTextColor) }, action: { _, f in
f(.default)

View File

@ -10,6 +10,8 @@
@class TGCameraFlipButton;
@class TGCameraTimeCodeView;
@class TGCameraZoomView;
@class TGCameraZoomModeView;
@class TGCameraZoomWheelView;
@class TGCameraToastView;
@class TGMediaPickerPhotoCounterButton;
@class TGMediaPickerPhotoStripView;
@ -32,6 +34,8 @@
TGMediaPickerPhotoStripView *_selectedPhotosView;
TGCameraZoomView *_zoomView;
TGCameraZoomModeView *_zoomModeView;
TGCameraZoomWheelView *_zoomWheelView;
@public
TGModernButton *_cancelButton;

View File

@ -14,3 +14,24 @@
- (void)hideAnimated:(bool)animated;
@end
@interface TGCameraZoomModeView : UIView
@property (copy, nonatomic) void(^zoomChanged)(CGFloat zoomLevel, bool done);
@property (nonatomic, assign) CGFloat zoomLevel;
- (void)setZoomLevel:(CGFloat)zoomLevel animated:(bool)animated;
- (void)setHidden:(bool)hidden animated:(bool)animated;
@end
@interface TGCameraZoomWheelView : UIView
@property (nonatomic, assign) CGFloat zoomLevel;
- (void)setHidden:(bool)hidden animated:(bool)animated;
@end

View File

@ -700,14 +700,25 @@ const NSInteger PGCameraFrameRate = 30;
+ (AVCaptureDevice *)_deviceWithPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
if (iosMajorVersion() >= 10 && position == AVCaptureDevicePositionBack) {
AVCaptureDevice *device = nil;
if (iosMajorVersion() >= 13) {
device = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInTripleCamera mediaType:AVMediaTypeVideo position:position];
}
if (device == nil) {
device = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera mediaType:AVMediaTypeVideo position:position];
}
if (device != nil) {
return device;
}
}
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if (device.position == position)
return device;
}
return nil;
}

View File

@ -2438,27 +2438,21 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
PGCameraMode newMode = PGCameraModeUndefined;
if (gestureRecognizer == _photoSwipeGestureRecognizer)
{
if (_camera.cameraMode == PGCameraModePhoto && _intent == TGCameraControllerGenericIntent)
newMode = PGCameraModePhotoScan;
else if (_camera.cameraMode != PGCameraModePhotoScan)
newMode = PGCameraModePhoto;
newMode = PGCameraModePhoto;
}
else if (gestureRecognizer == _videoSwipeGestureRecognizer)
{
if (_camera.cameraMode == PGCameraModePhotoScan) {
if (_items.count == 0)
newMode = PGCameraModePhoto;
if (_intent == TGCameraControllerAvatarIntent) {
newMode = PGCameraModeSquareVideo;
} else {
if (_intent == TGCameraControllerAvatarIntent) {
newMode = PGCameraModeSquareVideo;
} else {
newMode = PGCameraModeVideo;
}
newMode = PGCameraModeVideo;
}
}
if (newMode != PGCameraModeUndefined && _camera.cameraMode != newMode) {
[self _updateCameraMode:newMode updateInterface:true];
if (newMode != PGCameraModeUndefined && _camera.cameraMode != newMode)
{
[_camera setCameraMode:newMode];
[_interfaceView setCameraMode:newMode];
}
}

View File

@ -68,6 +68,9 @@
TGCameraFlipButton *_topFlipButton;
TGCameraZoomModeView *_zoomModeView;
TGCameraZoomWheelView *_zoomWheelView;
bool _hasResults;
CGFloat _topPanelOffset;
@ -267,6 +270,7 @@
// [self addSubview:_flashActiveView];
_toastView = [[TGCameraToastView alloc] initWithFrame:CGRectMake(0, frame.size.height - _bottomPanelHeight - 42, frame.size.width, 32)];
_toastView.userInteractionEnabled = false;
[self addSubview:_toastView];
_zoomView = [[TGCameraZoomView alloc] initWithFrame:CGRectMake(10, frame.size.height - _bottomPanelHeight - _bottomPanelOffset - 18, frame.size.width - 20, 1.5f)];
@ -281,17 +285,33 @@
[strongSelf _layoutFlashActiveViewForInterfaceOrientation:strongSelf->_interfaceOrientation zoomViewHidden:!active];
} completion:nil];
};
[self addSubview:_zoomView];
// [self addSubview:_zoomView];
_flashControl.becameActive = ^
{
_zoomModeView = [[TGCameraZoomModeView alloc] initWithFrame:CGRectMake(floor((frame.size.width - 129.0) / 2.0), frame.size.height - _bottomPanelHeight - _bottomPanelOffset - 18 - 43, 129, 43)];
_zoomModeView.zoomChanged = ^(CGFloat zoomLevel, bool done) {
__strong TGCameraMainPhoneView *strongSelf = weakSelf;
if (strongSelf == nil)
return;
if (strongSelf->_modeControl.cameraMode == PGCameraModeVideo)
[strongSelf->_timecodeView setHidden:true animated:true];
if (!done) {
[strongSelf->_zoomWheelView setZoomLevel:zoomLevel];
[strongSelf->_zoomModeView setHidden:true animated:true];
[strongSelf->_zoomWheelView setHidden:false animated:true];
} else {
[strongSelf->_zoomWheelView setZoomLevel:zoomLevel];
[strongSelf->_zoomModeView setZoomLevel:zoomLevel animated:false];
[strongSelf->_zoomModeView setHidden:false animated:true];
[strongSelf->_zoomWheelView setHidden:true animated:true];
}
};
[_zoomModeView setZoomLevel:1.0];
[self addSubview:_zoomModeView];
_zoomWheelView = [[TGCameraZoomWheelView alloc] initWithFrame:CGRectMake(0.0, frame.size.height - _bottomPanelHeight - _bottomPanelOffset - 132, frame.size.width, 132)];
[_zoomWheelView setHidden:true animated:false];
[_zoomWheelView setZoomLevel:1.0];
_zoomWheelView.userInteractionEnabled = false;
[self addSubview:_zoomWheelView];
_flashControl.modeChanged = ^(PGCameraFlashMode mode)
{
@ -301,9 +321,6 @@
if (strongSelf.flashModeChanged != nil)
strongSelf.flashModeChanged(mode);
if (strongSelf->_modeControl.cameraMode == PGCameraModeVideo)
[strongSelf->_timecodeView setHidden:false animated:true];
};
_modeControl.modeChanged = ^(PGCameraMode mode, PGCameraMode previousMode)
@ -440,7 +457,7 @@
{
UIView *view = [super hitTest:point withEvent:event];
if ([view isDescendantOfView:_topPanelView] || [view isDescendantOfView:_bottomPanelView] || [view isDescendantOfView:_videoLandscapePanelView] || [view isDescendantOfView:_tooltipContainerView] || [view isDescendantOfView:_selectedPhotosView])
if ([view isDescendantOfView:_topPanelView] || [view isDescendantOfView:_bottomPanelView] || [view isDescendantOfView:_videoLandscapePanelView] || [view isDescendantOfView:_tooltipContainerView] || [view isDescendantOfView:_selectedPhotosView] || [view isDescendantOfView:_zoomModeView] || view == _zoomModeView)
return view;
return nil;

View File

@ -182,6 +182,7 @@
- (void)setZoomLevel:(CGFloat)zoomLevel displayNeeded:(bool)displayNeeded
{
[_zoomView setZoomLevel:zoomLevel displayNeeded:displayNeeded];
[_zoomModeView setZoomLevel:zoomLevel];
}
- (void)zoomChangingEnded

View File

@ -190,7 +190,7 @@ const CGFloat TGCameraModeControlVerticalInteritemSpace = 29.0f;
CGFloat angle = ABS(offset / _wrapperView.frame.size.width * 0.99f);
CGFloat sign = offset > 0 ? 1.0f : -1.0f;
CATransform3D transform = CATransform3DTranslate(CATransform3DIdentity, -2 * angle * angle * sign, 0.0f, 0.0f);
CATransform3D transform = CATransform3DTranslate(CATransform3DIdentity, -28 * angle * angle * sign, 0.0f, 0.0f);
transform = CATransform3DRotate(transform, angle, 0.0f, sign, 0.0f);
return transform;
}

View File

@ -1,6 +1,10 @@
#import "TGCameraZoomView.h"
#import "TGCameraInterfaceAssets.h"
#import "TGModernButton.h"
#import "TGImageUtils.h"
#import "TGPhotoEditorUtils.h"
#import "LegacyComponentsInternal.h"
@interface TGCameraZoomView ()
@ -174,3 +178,287 @@
}
@end
@interface TGCameraZoomModeItemView: TGModernButton
{
UIImageView *_backgroundView;
UILabel *_label;
}
@end
@implementation TGCameraZoomModeItemView
- (instancetype)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self != nil) {
_backgroundView = [[UIImageView alloc] initWithFrame:CGRectMake(3, 3, 37, 37)];
_backgroundView.image = TGCircleImage(37, [UIColor colorWithWhite:0.0 alpha:0.4]);
_label = [[UILabel alloc] initWithFrame:self.bounds];
_label.textAlignment = NSTextAlignmentCenter;
[self addSubview:_backgroundView];
[self addSubview:_label];
}
return self;
}
- (void)setValue:(NSString *)value selected:(bool)selected animated:(bool)animated {
CGFloat scale = selected ? 1.0 : 0.7;
CGFloat textScale = selected ? 1.0 : 0.85;
_label.text = value;
_label.textColor = selected ? [TGCameraInterfaceAssets accentColor] : [UIColor whiteColor];
_label.font = [TGCameraInterfaceAssets boldFontOfSize:13.0];
if (animated) {
[UIView animateWithDuration:0.3f animations:^
{
_backgroundView.transform = CGAffineTransformMakeScale(scale, scale);
_label.transform = CGAffineTransformMakeScale(textScale, textScale);
}];
} else {
_backgroundView.transform = CGAffineTransformMakeScale(scale, scale);
_label.transform = CGAffineTransformMakeScale(textScale, textScale);
}
}
@end
@interface TGCameraZoomModeView ()
{
UIView *_backgroundView;
TGCameraZoomModeItemView *_leftItem;
TGCameraZoomModeItemView *_centerItem;
TGCameraZoomModeItemView *_rightItem;
}
@end
@implementation TGCameraZoomModeView
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self != nil)
{
_backgroundView = [[UIView alloc] initWithFrame:self.bounds];
_backgroundView.backgroundColor = [UIColor colorWithWhite:0.0 alpha:0.15];
_backgroundView.layer.cornerRadius = self.bounds.size.height / 2.0;
_leftItem = [[TGCameraZoomModeItemView alloc] initWithFrame:CGRectMake(0, 0, 43, 43)];
[_leftItem addTarget:self action:@selector(leftPressed) forControlEvents:UIControlEventTouchUpInside];
_centerItem = [[TGCameraZoomModeItemView alloc] initWithFrame:CGRectMake(43, 0, 43, 43)];
[_centerItem addTarget:self action:@selector(centerPressed) forControlEvents:UIControlEventTouchUpInside];
_rightItem = [[TGCameraZoomModeItemView alloc] initWithFrame:CGRectMake(86, 0, 43, 43)];
[_rightItem addTarget:self action:@selector(rightPressed) forControlEvents:UIControlEventTouchUpInside];
[self addSubview:_backgroundView];
[self addSubview:_leftItem];
[self addSubview:_centerItem];
[self addSubview:_rightItem];
UIPanGestureRecognizer *gestureRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(panGesture:)];
[self addGestureRecognizer:gestureRecognizer];
}
return self;
}
- (void)panGesture:(UIPanGestureRecognizer *)gestureRecognizer {
CGPoint translation = [gestureRecognizer translationInView:self];
switch (gestureRecognizer.state) {
case UIGestureRecognizerStateBegan:
self.zoomChanged(_zoomLevel, false);
break;
case UIGestureRecognizerStateChanged:
_zoomLevel = MAX(0.5, MIN(10.0, _zoomLevel - translation.x / 100.0));
self.zoomChanged(_zoomLevel, false);
break;
case UIGestureRecognizerStateEnded:
self.zoomChanged(_zoomLevel, true);
break;
case UIGestureRecognizerStateCancelled:
self.zoomChanged(_zoomLevel, true);
break;
default:
break;
}
[gestureRecognizer setTranslation:CGPointZero inView:self];
}
- (void)leftPressed {
[self setZoomLevel:0.5 animated:true];
self.zoomChanged(0.5, true);
}
- (void)centerPressed {
[self setZoomLevel:1.0 animated:true];
self.zoomChanged(1.0, true);
}
- (void)rightPressed {
[self setZoomLevel:2.0 animated:true];
self.zoomChanged(2.0, true);
}
- (void)setZoomLevel:(CGFloat)zoomLevel {
[self setZoomLevel:zoomLevel animated:false];
}
- (void)setZoomLevel:(CGFloat)zoomLevel animated:(bool)animated
{
_zoomLevel = zoomLevel;
if (zoomLevel < 1.0) {
[_leftItem setValue:[NSString stringWithFormat:@"%.1fx", zoomLevel] selected:true animated:animated];
[_centerItem setValue:@"1" selected:false animated:animated];
[_rightItem setValue:@"2" selected:false animated:animated];
} else if (zoomLevel < 2.0) {
[_leftItem setValue:@"0.5" selected:false animated:animated];
if ((zoomLevel - 1.0) < FLT_EPSILON) {
[_centerItem setValue:@"1x" selected:true animated:animated];
} else {
[_centerItem setValue:[NSString stringWithFormat:@"%.1fx", zoomLevel] selected:true animated:animated];
}
[_rightItem setValue:@"2" selected:false animated:animated];
} else {
[_leftItem setValue:@"0.5" selected:false animated:animated];
[_centerItem setValue:@"1" selected:false animated:animated];
CGFloat near = round(zoomLevel);
if (ABS(zoomLevel - near) < FLT_EPSILON) {
[_rightItem setValue:[NSString stringWithFormat:@"%d", (int)zoomLevel] selected:true animated:animated];
} else {
[_rightItem setValue:[NSString stringWithFormat:@"%.1fx", zoomLevel] selected:true animated:animated];
}
}
}
- (void)setHidden:(BOOL)hidden
{
self.alpha = hidden ? 0.0f : 1.0f;
super.hidden = hidden;
}
- (void)setHidden:(bool)hidden animated:(bool)animated
{
if (animated)
{
super.hidden = false;
self.userInteractionEnabled = false;
[UIView animateWithDuration:0.25f animations:^
{
self.alpha = hidden ? 0.0f : 1.0f;
} completion:^(BOOL finished)
{
self.userInteractionEnabled = true;
if (finished)
self.hidden = hidden;
}];
}
else
{
self.alpha = hidden ? 0.0f : 1.0f;
super.hidden = hidden;
}
}
- (void)layoutSubviews
{
if (_leftItem.isHidden) {
} else {
_leftItem.frame = CGRectMake(0, 0, 43, 43.0);
_centerItem.frame = CGRectMake(43, 0, 43, 43.0);
_rightItem.frame = CGRectMake(86, 0, 43, 43.0);
}
}
@end
@interface TGCameraZoomWheelView ()
{
UIImageView *_backgroundView;
}
@end
@implementation TGCameraZoomWheelView
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self != nil)
{
self.clipsToBounds = true;
_backgroundView = [[UIImageView alloc] initWithFrame:CGRectMake(-28.0, 0.0, 446.0, 446.0)];
_backgroundView.alpha = 0.75;
[self addSubview:_backgroundView];
}
return self;
}
- (void)setZoomLevel:(CGFloat)zoomLevel {
zoomLevel = MAX(0.5, zoomLevel);
_zoomLevel = zoomLevel;
CGFloat angle = 0.0;
if (zoomLevel < 1.0) {
CGFloat delta = (zoomLevel - 0.5) / 0.5;
angle = TGDegreesToRadians(20.8) * (1.0 - delta);
} else if (zoomLevel < 2.0) {
CGFloat delta = zoomLevel - 1.0;
angle = TGDegreesToRadians(-22.0) * delta;
} else if (zoomLevel < 10.0) {
CGFloat delta = (zoomLevel - 2.0) / 8.0;
angle = TGDegreesToRadians(-22.0) + TGDegreesToRadians(-68.0) * delta;
}
_backgroundView.transform = CGAffineTransformMakeRotation(angle);
}
- (void)setHidden:(BOOL)hidden
{
self.alpha = hidden ? 0.0f : 1.0f;
super.hidden = hidden;
}
- (void)setHidden:(bool)hidden animated:(bool)animated
{
if (animated)
{
super.hidden = false;
self.userInteractionEnabled = false;
[UIView animateWithDuration:0.25f animations:^
{
self.alpha = hidden ? 0.0f : 1.0f;
} completion:^(BOOL finished)
{
self.userInteractionEnabled = true;
if (finished)
self.hidden = hidden;
}];
}
else
{
self.alpha = hidden ? 0.0f : 1.0f;
super.hidden = hidden;
}
}
@end

View File

@ -245,19 +245,6 @@
if (_selectionContext.allowGrouping)
{
/*_groupButton = [[TGMediaPickerGroupButton alloc] initWithFrame:CGRectMake(0, 0, 38.0f, 38.0f)];
[_groupButton setHidden:true animated:false];
_groupButton.selected = _selectionContext.grouping;
[_groupButton addTarget:self action:@selector(toggleGrouping) forControlEvents:UIControlEventTouchUpInside];
[_wrapperView addSubview:_groupButton];
_groupingChangedDisposable = [[_selectionContext groupingChangedSignal] startWithNext:^(NSNumber *next)
{
__strong TGMediaPickerGalleryInterfaceView *strongSelf = weakSelf;
if (strongSelf != nil)
[strongSelf->_groupButton setSelected:next.boolValue];
}];*/
if (_editingContext != nil)
{
_timersChangedDisposable = [_editingContext.timersUpdatedSignal startWithNext:^(__unused NSNumber *next)

View File

@ -300,8 +300,8 @@ typedef enum
}
CGFloat minSide = MIN(_wrapperView.frame.size.width, _wrapperView.frame.size.height);
CGFloat diameter = minSide > 320.0f ? 240.0f : 216.0f;
CGFloat shadowSize = minSide > 320.0f ? 21.0f : 19.0f;
CGFloat diameter = MIN(404.0, minSide - 24.0f);
CGFloat shadowSize = 21.0f;
CGFloat circleWrapperViewLength = diameter + shadowSize * 2.0;
_circleWrapperView = [[UIView alloc] initWithFrame:(CGRect){
@ -335,14 +335,14 @@ typedef enum
_placeholderView.accessibilityIgnoresInvertColors = true;
}
CGFloat ringViewLength = minSide > 320.0f ? 260.0f : 234.0f;
CGFloat ringViewLength = diameter - 8.0f;
_ringView = [[TGVideoMessageRingView alloc] initWithFrame:(CGRect){
.origin.x = (_circleWrapperView.bounds.size.width - ringViewLength) / 2.0f,
.origin.y = (_circleWrapperView.bounds.size.height - ringViewLength) / 2.0f,
.size.width = ringViewLength,
.size.height = ringViewLength
}];
_ringView.accentColor = self.pallete != nil ? self.pallete.buttonColor : TGAccentColor();
_ringView.accentColor = [UIColor colorWithWhite:1.0 alpha:0.6];
[_circleWrapperView addSubview:_ringView];
CGRect controlsFrame = _controlsFrame;

View File

@ -295,6 +295,9 @@ private final class CallVideoNode: ASDisplayNode, PreviewVideoNode {
}
func updateIsBlurred(isBlurred: Bool, light: Bool = false, animated: Bool = true) {
if self.hasScheduledUnblur {
self.hasScheduledUnblur = false
}
if self.isBlurred == isBlurred {
return
}
@ -326,18 +329,22 @@ private final class CallVideoNode: ASDisplayNode, PreviewVideoNode {
}
}
private var hasScheduledUnblur = false
func flip(withBackground: Bool) {
if withBackground {
self.backgroundColor = .black
}
UIView.transition(with: withBackground ? self.videoTransformContainer.view : self.view, duration: 0.4, options: [.transitionFlipFromLeft, .curveEaseOut], animations: {
UIView.performWithoutAnimation {
self.updateIsBlurred(isBlurred: true, light: true, animated: false)
self.updateIsBlurred(isBlurred: true, light: false, animated: false)
}
}) { finished in
self.backgroundColor = nil
self.hasScheduledUnblur = true
Queue.mainQueue().after(0.5) {
self.updateIsBlurred(isBlurred: false)
if self.hasScheduledUnblur {
self.updateIsBlurred(isBlurred: false)
}
}
}
}

View File

@ -99,6 +99,9 @@ final class GroupVideoNode: ASDisplayNode, PreviewVideoNode {
}
func updateIsBlurred(isBlurred: Bool, light: Bool = false, animated: Bool = true) {
if self.hasScheduledUnblur {
self.hasScheduledUnblur = false
}
if self.isBlurred == isBlurred {
return
}
@ -128,6 +131,7 @@ final class GroupVideoNode: ASDisplayNode, PreviewVideoNode {
}
}
private var hasScheduledUnblur = false
func flip(withBackground: Bool) {
if withBackground {
self.backgroundColor = .black
@ -145,16 +149,21 @@ final class GroupVideoNode: ASDisplayNode, PreviewVideoNode {
}
}) { finished in
self.backgroundColor = nil
self.hasScheduledUnblur = true
if let snapshotView = snapshotView {
Queue.mainQueue().after(0.3) {
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak snapshotView] _ in
snapshotView?.removeFromSuperview()
})
self.updateIsBlurred(isBlurred: false)
if self.hasScheduledUnblur {
self.updateIsBlurred(isBlurred: false)
}
}
} else {
Queue.mainQueue().after(0.4) {
self.updateIsBlurred(isBlurred: false)
if self.hasScheduledUnblur {
self.updateIsBlurred(isBlurred: false)
}
}
}
}

View File

@ -957,7 +957,11 @@ public final class PresentationCallImpl: PresentationCall {
screencastCapturer.injectPixelBuffer(screencastFrame.0, rotation: screencastFrame.1)
}))
self.screencastAudioDataDisposable.set((screencastBufferServerContext.audioData
|> deliverOnMainQueue).start(next: { _ in
|> deliverOnMainQueue).start(next: { [weak self] data in
guard let strongSelf = self else {
return
}
strongSelf.ongoingContext?.addExternalAudioData(data: data)
}))
self.screencastStateDisposable.set((screencastBufferServerContext.isActive
|> distinctUntilChanged

View File

@ -121,7 +121,6 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
private let dimNode: ASDisplayNode
private let wrappingScrollNode: ASScrollNode
private let contentContainerNode: ASDisplayNode
private let effectNode: ASDisplayNode
private let backgroundNode: ASDisplayNode
private let contentBackgroundNode: ASDisplayNode
private let titleNode: ASTextNode
@ -129,18 +128,13 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
private let shimmerNode: ShimmerEffectForegroundNode
private let doneButton: SolidRoundedButtonNode
private var broadcastPickerView: UIView?
private let cancelButton: SolidRoundedButtonNode
private let microphoneButton: HighlightTrackingButtonNode
private let microphoneEffectView: UIVisualEffectView
private let microphoneIconNode: VoiceChatMicrophoneNode
private let cancelButton: HighlightableButtonNode
private let placeholderTextNode: ImmediateTextNode
private let placeholderIconNode: ASImageNode
private let tabsNode: TabsSegmentedControlNode
private var selectedTabIndex: Int = 0
private var wheelNode: WheelControlNode
private var selectedTabIndex: Int = 1
private var containerLayout: (ContainerViewLayout, CGFloat)?
private var applicationStateDisposable: Disposable?
@ -176,15 +170,7 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.backgroundNode.clipsToBounds = true
self.backgroundNode.cornerRadius = 16.0
let backgroundColor = UIColor(rgb: 0x1c1c1e)
let textColor: UIColor = .white
let buttonColor: UIColor = UIColor(rgb: 0x2b2b2f)
let buttonTextColor: UIColor = .white
let blurStyle: UIBlurEffect.Style = .dark
self.effectNode = ASDisplayNode(viewBlock: {
return UIVisualEffectView(effect: UIBlurEffect(style: blurStyle))
})
let backgroundColor = UIColor(rgb: 0x000000)
self.contentBackgroundNode = ASDisplayNode()
self.contentBackgroundNode.backgroundColor = backgroundColor
@ -192,9 +178,9 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
let title = self.presentationData.strings.VoiceChat_VideoPreviewTitle
self.titleNode = ASTextNode()
self.titleNode.attributedText = NSAttributedString(string: title, font: Font.bold(17.0), textColor: textColor)
self.titleNode.attributedText = NSAttributedString(string: title, font: Font.bold(17.0), textColor: UIColor(rgb: 0xffffff))
self.doneButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(backgroundColor: accentColor, foregroundColor: .white), font: .bold, height: 52.0, cornerRadius: 11.0, gloss: false)
self.doneButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(backgroundColor: UIColor(rgb: 0xffffff), foregroundColor: UIColor(rgb: 0x4f5352)), font: .bold, height: 48.0, cornerRadius: 24.0, gloss: false)
self.doneButton.title = self.presentationData.strings.VoiceChat_VideoPreviewContinue
if #available(iOS 12.0, *) {
@ -206,8 +192,8 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.broadcastPickerView = broadcastPickerView
}
self.cancelButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(backgroundColor: buttonColor, foregroundColor: buttonTextColor), font: .regular, height: 52.0, cornerRadius: 11.0, gloss: false)
self.cancelButton.title = self.presentationData.strings.Common_Cancel
self.cancelButton = HighlightableButtonNode()
self.cancelButton.setAttributedTitle(NSAttributedString(string: self.presentationData.strings.Common_Cancel, font: Font.regular(17.0), textColor: UIColor(rgb: 0xffffff)), for: [])
self.previewContainerNode = ASDisplayNode()
self.previewContainerNode.clipsToBounds = true
@ -217,19 +203,6 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.shimmerNode = ShimmerEffectForegroundNode(size: 200.0)
self.previewContainerNode.addSubnode(self.shimmerNode)
self.microphoneButton = HighlightTrackingButtonNode()
self.microphoneButton.isSelected = true
self.microphoneEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .light))
self.microphoneEffectView.clipsToBounds = true
self.microphoneEffectView.layer.cornerRadius = 24.0
self.microphoneEffectView.isUserInteractionEnabled = false
self.microphoneIconNode = VoiceChatMicrophoneNode()
// self.microphoneIconNode.alpha = 0.75
self.microphoneIconNode.update(state: .init(muted: false, filled: true, color: .white), animated: false)
self.tabsNode = TabsSegmentedControlNode(items: [TabsSegmentedControlNode.Item(title: "Front Camera"), TabsSegmentedControlNode.Item(title: "Back Camera"), TabsSegmentedControlNode.Item(title: "Share Screen")], selectedIndex: 0)
self.placeholderTextNode = ImmediateTextNode()
self.placeholderTextNode.alpha = 0.0
self.placeholderTextNode.maximumNumberOfLines = 3
@ -240,6 +213,8 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.placeholderIconNode.contentMode = .scaleAspectFit
self.placeholderIconNode.displaysAsynchronously = false
self.wheelNode = WheelControlNode(items: [WheelControlNode.Item(title: self.presentationData.strings.VoiceChat_VideoPreviewPhoneScreen), WheelControlNode.Item(title: self.presentationData.strings.VoiceChat_VideoPreviewFrontCamera), WheelControlNode.Item(title: self.presentationData.strings.VoiceChat_VideoPreviewBackCamera)], selectedIndex: self.selectedTabIndex)
super.init()
self.backgroundColor = nil
@ -254,8 +229,8 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.wrappingScrollNode.addSubnode(self.backgroundNode)
self.wrappingScrollNode.addSubnode(self.contentContainerNode)
self.backgroundNode.addSubnode(self.effectNode)
self.backgroundNode.addSubnode(self.contentBackgroundNode)
self.contentContainerNode.addSubnode(self.previewContainerNode)
self.contentContainerNode.addSubnode(self.titleNode)
self.contentContainerNode.addSubnode(self.doneButton)
if let broadcastPickerView = self.broadcastPickerView {
@ -263,32 +238,25 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
}
self.contentContainerNode.addSubnode(self.cancelButton)
self.contentContainerNode.addSubnode(self.previewContainerNode)
self.previewContainerNode.addSubnode(self.cameraNode)
self.previewContainerNode.addSubnode(self.placeholderIconNode)
self.previewContainerNode.addSubnode(self.placeholderTextNode)
if self.cameraNode is GroupVideoNode {
self.previewContainerNode.addSubnode(self.microphoneButton)
self.microphoneButton.view.addSubview(self.microphoneEffectView)
self.microphoneButton.addSubnode(self.microphoneIconNode)
}
self.previewContainerNode.addSubnode(self.tabsNode)
self.previewContainerNode.addSubnode(self.wheelNode)
self.tabsNode.selectedIndexChanged = { [weak self] index in
self.wheelNode.selectedIndexChanged = { [weak self] index in
if let strongSelf = self {
if (index == 0 && strongSelf.selectedTabIndex == 1) || (index == 1 && strongSelf.selectedTabIndex == 0) {
if (index == 1 && strongSelf.selectedTabIndex == 2) || (index == 2 && strongSelf.selectedTabIndex == 1) {
strongSelf.switchCamera?()
}
if index == 2 && [0, 1].contains(strongSelf.selectedTabIndex) {
if index == 0 && [1, 2].contains(strongSelf.selectedTabIndex) {
strongSelf.broadcastPickerView?.isHidden = false
strongSelf.cameraNode.updateIsBlurred(isBlurred: true, light: false, animated: true)
let transition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut)
transition.updateAlpha(node: strongSelf.placeholderTextNode, alpha: 1.0)
transition.updateAlpha(node: strongSelf.placeholderIconNode, alpha: 1.0)
} else if [0, 1].contains(index) && strongSelf.selectedTabIndex == 2 {
} else if [1, 2].contains(index) && strongSelf.selectedTabIndex == 0 {
strongSelf.broadcastPickerView?.isHidden = true
strongSelf.cameraNode.updateIsBlurred(isBlurred: false, light: false, animated: true)
let transition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut)
@ -301,27 +269,10 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.doneButton.pressed = { [weak self] in
if let strongSelf = self {
strongSelf.shareCamera?(strongSelf.microphoneButton.isSelected)
}
}
self.cancelButton.pressed = { [weak self] in
if let strongSelf = self {
strongSelf.cancel?()
}
}
self.microphoneButton.addTarget(self, action: #selector(self.microphonePressed), forControlEvents: .touchUpInside)
self.microphoneButton.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self {
if highlighted {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .spring)
transition.updateSublayerTransformScale(node: strongSelf.microphoneButton, scale: 0.9)
} else {
let transition: ContainedViewLayoutTransition = .animated(duration: 0.5, curve: .spring)
transition.updateSublayerTransformScale(node: strongSelf.microphoneButton, scale: 1.0)
}
strongSelf.shareCamera?(true)
}
}
self.cancelButton.addTarget(self, action: #selector(self.cancelPressed), forControlEvents: .touchUpInside)
self.readyDisposable.set(self.cameraNode.ready.start(next: { [weak self] ready in
if let strongSelf = self, ready {
@ -338,12 +289,6 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.applicationStateDisposable?.dispose()
}
@objc private func microphonePressed() {
self.hapticFeedback.impact(.light)
self.microphoneButton.isSelected = !self.microphoneButton.isSelected
self.microphoneIconNode.update(state: .init(muted: !self.microphoneButton.isSelected, filled: true, color: .white), animated: true)
}
func updatePresentationData(_ presentationData: PresentationData) {
self.presentationData = presentationData
}
@ -351,11 +296,37 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
override func didLoad() {
super.didLoad()
let leftSwipeGestureRecognizer = UISwipeGestureRecognizer(target: self, action: #selector(self.leftSwipeGesture))
leftSwipeGestureRecognizer.direction = .left
let rightSwipeGestureRecognizer = UISwipeGestureRecognizer(target: self, action: #selector(self.rightSwipeGesture))
rightSwipeGestureRecognizer.direction = .right
self.view.addGestureRecognizer(leftSwipeGestureRecognizer)
self.view.addGestureRecognizer(rightSwipeGestureRecognizer)
if #available(iOSApplicationExtension 11.0, iOS 11.0, *) {
self.wrappingScrollNode.view.contentInsetAdjustmentBehavior = .never
}
}
@objc func leftSwipeGesture() {
if self.selectedTabIndex < 2 {
self.wheelNode.setSelectedIndex(self.selectedTabIndex + 1, animated: true)
self.wheelNode.selectedIndexChanged(self.wheelNode.selectedIndex)
}
}
@objc func rightSwipeGesture() {
if self.selectedTabIndex > 0 {
self.wheelNode.setSelectedIndex(self.selectedTabIndex - 1, animated: true)
self.wheelNode.selectedIndexChanged(self.wheelNode.selectedIndex)
}
}
@objc func cancelPressed() {
self.cancel?()
}
@objc func dimTapGesture(_ recognizer: UITapGestureRecognizer) {
if case .ended = recognizer.state {
self.cancel?()
@ -447,98 +418,71 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
isTablet = false
}
var insets = layout.insets(options: [.statusBar, .input])
let cleanInsets = layout.insets(options: [.statusBar])
var insets = layout.insets(options: [.statusBar])
insets.top = max(10.0, insets.top)
var buttonOffset: CGFloat = 60.0
let bottomInset: CGFloat = isTablet ? 31.0 : 10.0 + cleanInsets.bottom
let titleHeight: CGFloat = 54.0
var contentHeight = titleHeight + bottomInset + 52.0 + 17.0
let innerContentHeight: CGFloat = layout.size.height - contentHeight - 160.0
var width = horizontalContainerFillingSizeForLayout(layout: layout, sideInset: layout.safeInsets.left)
let contentSize: CGSize
if isLandscape {
if isTablet {
width = 870.0
contentHeight = 690.0
contentSize = CGSize(width: 870.0, height: 690.0)
} else {
contentHeight = layout.size.height
width = layout.size.width
contentSize = CGSize(width: layout.size.width, height: layout.size.height)
}
} else {
if isTablet {
width = 600.0
contentHeight = 960.0
contentSize = CGSize(width: 600.0, height: 960.0)
} else {
contentHeight = titleHeight + bottomInset + 52.0 + 17.0 + innerContentHeight + buttonOffset
contentSize = CGSize(width: layout.size.width, height: layout.size.height - insets.top - 8.0)
}
}
let previewInset: CGFloat = 16.0
let sideInset = floor((layout.size.width - width) / 2.0)
let sideInset = floor((layout.size.width - contentSize.width) / 2.0)
let contentFrame: CGRect
if isTablet {
contentFrame = CGRect(origin: CGPoint(x: sideInset, y: floor((layout.size.height - contentHeight) / 2.0)), size: CGSize(width: width, height: contentHeight))
contentFrame = CGRect(origin: CGPoint(x: sideInset, y: floor((layout.size.height - contentSize.height) / 2.0)), size: contentSize)
} else {
contentFrame = CGRect(origin: CGPoint(x: sideInset, y: layout.size.height - contentHeight), size: CGSize(width: width, height: contentHeight))
contentFrame = CGRect(origin: CGPoint(x: sideInset, y: layout.size.height - contentSize.height), size: contentSize)
}
var backgroundFrame = CGRect(origin: CGPoint(x: contentFrame.minX, y: contentFrame.minY), size: CGSize(width: contentFrame.width, height: contentFrame.height))
var backgroundFrame = contentFrame
if !isTablet {
backgroundFrame.size.height += 2000.0
}
if backgroundFrame.minY < contentFrame.minY {
backgroundFrame.origin.y = contentFrame.minY
}
transition.updateAlpha(node: self.titleNode, alpha: isLandscape && !isTablet ? 0.0 : 1.0)
transition.updateFrame(node: self.backgroundNode, frame: backgroundFrame)
transition.updateFrame(node: self.effectNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
transition.updateFrame(node: self.contentBackgroundNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
transition.updateFrame(node: self.wrappingScrollNode, frame: CGRect(origin: CGPoint(), size: layout.size))
transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size))
let titleSize = self.titleNode.measure(CGSize(width: width, height: titleHeight))
let titleFrame = CGRect(origin: CGPoint(x: floor((contentFrame.width - titleSize.width) / 2.0), y: 18.0), size: titleSize)
let titleSize = self.titleNode.measure(CGSize(width: contentFrame.width, height: .greatestFiniteMagnitude))
let titleFrame = CGRect(origin: CGPoint(x: floor((contentFrame.width - titleSize.width) / 2.0), y: 20.0), size: titleSize)
transition.updateFrame(node: self.titleNode, frame: titleFrame)
var previewSize: CGSize
var previewFrame: CGRect
let previewAspectRatio: CGFloat = 1.85
if isLandscape {
let previewHeight = contentHeight - 21.0 - 52.0 - 10.0
previewSize = CGSize(width: min(contentFrame.width - layout.safeInsets.left - layout.safeInsets.right, previewHeight * 1.7778), height: previewHeight)
if isTablet {
previewSize.width -= previewInset * 2.0
previewSize.height -= 46.0
}
let previewHeight = contentFrame.height
previewSize = CGSize(width: min(contentFrame.width - layout.safeInsets.left - layout.safeInsets.right, ceil(previewHeight * previewAspectRatio)), height: previewHeight)
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((contentFrame.width - previewSize.width) / 2.0), y: 0.0), size: previewSize)
if isTablet {
previewFrame.origin.y += 56.0
}
} else {
previewSize = CGSize(width: contentFrame.width - previewInset * 2.0, height: contentHeight - 243.0 - bottomInset + (120.0 - buttonOffset))
if isTablet {
previewSize.height += 17.0
}
previewFrame = CGRect(origin: CGPoint(x: previewInset, y: 56.0), size: previewSize)
previewSize = CGSize(width: contentFrame.width, height: min(contentFrame.height, ceil(contentFrame.width * previewAspectRatio)))
previewFrame = CGRect(origin: CGPoint(), size: previewSize)
}
transition.updateFrame(node: self.previewContainerNode, frame: previewFrame)
transition.updateFrame(node: self.shimmerNode, frame: CGRect(origin: CGPoint(), size: previewFrame.size))
self.shimmerNode.update(foregroundColor: UIColor(rgb: 0xffffff, alpha: 0.07))
self.shimmerNode.updateAbsoluteRect(previewFrame, within: layout.size)
let cancelButtonSize = self.cancelButton.measure(CGSize(width: (previewFrame.width - titleSize.width) / 2.0, height: .greatestFiniteMagnitude))
let cancelButtonFrame = CGRect(origin: CGPoint(x: previewFrame.minX + 17.0, y: 20.0), size: cancelButtonSize)
transition.updateFrame(node: self.cancelButton, frame: cancelButtonFrame)
self.cameraNode.frame = CGRect(origin: CGPoint(), size: previewSize)
self.cameraNode.updateLayout(size: previewSize, layoutMode: isLandscape ? .fillHorizontal : .fillVertical, transition: .immediate)
let microphoneFrame = CGRect(x: 8.0, y: previewSize.height - 48.0 - 8.0 - 48.0, width: 48.0, height: 48.0)
transition.updateFrame(node: self.microphoneButton, frame: microphoneFrame)
transition.updateFrame(view: self.microphoneEffectView, frame: CGRect(origin: CGPoint(), size: microphoneFrame.size))
transition.updateFrameAsPositionAndBounds(node: self.microphoneIconNode, frame: CGRect(origin: CGPoint(x: 1.0, y: 0.0), size: microphoneFrame.size).insetBy(dx: 6.0, dy: 6.0))
self.microphoneIconNode.transform = CATransform3DMakeScale(1.2, 1.2, 1.0)
let tabsFrame = CGRect(x: 8.0, y: previewSize.height - 40.0 - 8.0, width: previewSize.width - 16.0, height: 40.0)
self.tabsNode.updateLayout(size: tabsFrame.size, transition: transition)
transition.updateFrame(node: self.tabsNode, frame: tabsFrame)
self.placeholderTextNode.attributedText = NSAttributedString(string: presentationData.strings.VoiceChat_VideoPreviewShareScreenInfo, font: Font.semibold(14.0), textColor: .white)
self.placeholderTextNode.attributedText = NSAttributedString(string: presentationData.strings.VoiceChat_VideoPreviewShareScreenInfo, font: Font.semibold(16.0), textColor: .white)
self.placeholderIconNode.image = generateTintedImage(image: UIImage(bundleImageName: isTablet ? "Call/ScreenShareTablet" : "Call/ScreenSharePhone"), color: .white)
let placeholderTextSize = self.placeholderTextNode.updateLayout(CGSize(width: previewSize.width - 80.0, height: 100.0))
@ -547,44 +491,26 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
transition.updateFrame(node: self.placeholderIconNode, frame: CGRect(origin: CGPoint(x: floor((previewSize.width - imageSize.width) / 2.0), y: floorToScreenPixels(previewSize.height / 2.0) - imageSize.height - 8.0), size: imageSize))
}
if isLandscape {
var buttonsCount: Int = 2
let buttonInset: CGFloat = 16.0
let buttonMaxWidth: CGFloat = 360.0
let buttonInset: CGFloat = 6.0
var leftButtonInset = buttonInset
let availableWidth: CGFloat
if isTablet {
availableWidth = contentFrame.width - layout.safeInsets.left - layout.safeInsets.right - previewInset * 2.0
leftButtonInset += previewInset
} else {
availableWidth = contentFrame.width - layout.safeInsets.left - layout.safeInsets.right
}
let buttonWidth = floorToScreenPixels((availableWidth - CGFloat(buttonsCount + 1) * buttonInset) / CGFloat(buttonsCount))
let buttonWidth = min(buttonMaxWidth, contentFrame.width - buttonInset * 2.0)
let doneButtonHeight = self.doneButton.updateLayout(width: buttonWidth, transition: transition)
transition.updateFrame(node: self.doneButton, frame: CGRect(x: floorToScreenPixels((contentFrame.width - buttonWidth) / 2.0), y: previewFrame.maxY - doneButtonHeight - buttonInset, width: buttonWidth, height: doneButtonHeight))
self.broadcastPickerView?.frame = self.doneButton.frame
let cameraButtonHeight = self.doneButton.updateLayout(width: buttonWidth, transition: transition)
let cancelButtonHeight = self.cancelButton.updateLayout(width: buttonWidth, transition: transition)
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: layout.safeInsets.left + leftButtonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cancelButtonHeight))
transition.updateFrame(node: self.doneButton, frame: CGRect(x: layout.safeInsets.left + leftButtonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
self.broadcastPickerView?.frame = self.doneButton.frame
} else {
let bottomInset = isTablet ? 21.0 : insets.bottom + 16.0
let buttonInset: CGFloat = 16.0
let cameraButtonHeight = self.doneButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
transition.updateFrame(node: self.doneButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - bottomInset - buttonOffset, width: contentFrame.width, height: cameraButtonHeight))
self.broadcastPickerView?.frame = self.doneButton.frame
let cancelButtonHeight = self.cancelButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: buttonInset, y: contentHeight - cancelButtonHeight - bottomInset, width: contentFrame.width, height: cancelButtonHeight))
}
let wheelFrame = CGRect(origin: CGPoint(x: 16.0 + previewFrame.minX, y: previewFrame.maxY - doneButtonHeight - buttonInset - 36.0 - 20.0), size: CGSize(width: previewFrame.width - 32.0, height: 36.0))
self.wheelNode.updateLayout(size: wheelFrame.size, transition: transition)
transition.updateFrame(node: self.wheelNode, frame: wheelFrame)
transition.updateFrame(node: self.contentContainerNode, frame: contentFrame)
}
}
private let textFont = Font.medium(14.0)
private let textFont = Font.with(size: 14.0, design: .camera, weight: .regular)
private let selectedTextFont = Font.with(size: 14.0, design: .camera, weight: .semibold)
class TabsSegmentedControlNode: ASDisplayNode, UIGestureRecognizerDelegate {
private class WheelControlNode: ASDisplayNode, UIGestureRecognizerDelegate {
struct Item: Equatable {
public let title: String
@ -593,12 +519,9 @@ class TabsSegmentedControlNode: ASDisplayNode, UIGestureRecognizerDelegate {
}
}
private var blurEffectView: UIVisualEffectView?
private var vibrancyEffectView: UIVisualEffectView?
private let selectionNode: ASDisplayNode
private let maskNode: ASDisplayNode
private let containerNode: ASDisplayNode
private var itemNodes: [HighlightTrackingButtonNode]
private var highlightedItemNodes: [HighlightTrackingButtonNode]
private var validLayout: CGSize?
@ -632,84 +555,56 @@ class TabsSegmentedControlNode: ASDisplayNode, UIGestureRecognizerDelegate {
public var selectedIndexChanged: (Int) -> Void = { _ in }
private var gestureRecognizer: UIPanGestureRecognizer?
private var gestureSelectedIndex: Int?
public init(items: [Item], selectedIndex: Int) {
self._items = items
self._selectedIndex = selectedIndex
self.selectionNode = ASDisplayNode()
self.selectionNode.clipsToBounds = true
self.selectionNode.backgroundColor = .black
self.selectionNode.alpha = 0.75
self.maskNode = ASDisplayNode()
self.maskNode.setLayerBlock({
let maskLayer = CAGradientLayer()
maskLayer.colors = [UIColor.clear.cgColor, UIColor.white.cgColor, UIColor.white.cgColor, UIColor.clear.cgColor]
maskLayer.locations = [0.0, 0.15, 0.85, 1.0]
maskLayer.startPoint = CGPoint(x: 0.0, y: 0.0)
maskLayer.endPoint = CGPoint(x: 1.0, y: 0.0)
return maskLayer
})
self.containerNode = ASDisplayNode()
self.itemNodes = items.map { item in
let itemNode = HighlightTrackingButtonNode()
itemNode.contentEdgeInsets = UIEdgeInsets(top: 0.0, left: 8.0, bottom: 0.0, right: 8.0)
itemNode.imageNode.isHidden = true
itemNode.titleNode.maximumNumberOfLines = 1
itemNode.titleNode.truncationMode = .byTruncatingTail
itemNode.titleNode.alpha = 0.75
itemNode.accessibilityLabel = item.title
itemNode.accessibilityTraits = [.button]
itemNode.setTitle(item.title, with: textFont, with: .black, for: .normal)
return itemNode
}
self.highlightedItemNodes = items.map { item in
let itemNode = HighlightTrackingButtonNode()
itemNode.isUserInteractionEnabled = false
itemNode.isHidden = true
itemNode.contentEdgeInsets = UIEdgeInsets(top: 0.0, left: 8.0, bottom: 0.0, right: 8.0)
itemNode.imageNode.isHidden = true
itemNode.titleNode.maximumNumberOfLines = 1
itemNode.titleNode.truncationMode = .byTruncatingTail
itemNode.setTitle(item.title, with: textFont, with: .white, for: .normal)
itemNode.hitTestSlop = UIEdgeInsets(top: -10.0, left: -5.0, bottom: -10.0, right: -5.0)
itemNode.setTitle(item.title.uppercased(), with: textFont, with: .white, for: .normal)
itemNode.titleNode.shadowColor = UIColor.black.cgColor
itemNode.titleNode.shadowOffset = CGSize()
itemNode.titleNode.layer.shadowRadius = 2.0
itemNode.titleNode.layer.shadowOpacity = 0.3
itemNode.titleNode.layer.masksToBounds = false
itemNode.titleNode.layer.shouldRasterize = true
itemNode.titleNode.layer.rasterizationScale = UIScreen.main.scale
return itemNode
}
super.init()
self.clipsToBounds = true
if #available(iOS 13.0, *) {
self.layer.cornerCurve = .continuous
self.selectionNode.layer.cornerCurve = .continuous
}
self.addSubnode(self.containerNode)
self.itemNodes.forEach(self.containerNode.addSubnode(_:))
self.setupButtons()
}
override func didLoad() {
super.didLoad()
self.view.layer.mask = self.maskNode.layer
self.view.disablesInteractiveTransitionGestureRecognizer = true
let gestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:)))
gestureRecognizer.delegate = self
self.view.addGestureRecognizer(gestureRecognizer)
self.gestureRecognizer = gestureRecognizer
let blurEffect = UIBlurEffect(style: .light)
let blurEffectView = UIVisualEffectView(effect: blurEffect)
self.blurEffectView = blurEffectView
self.view.addSubview(blurEffectView)
let vibrancyEffect: UIVibrancyEffect
if #available(iOS 13.0, *) {
vibrancyEffect = UIVibrancyEffect(blurEffect: blurEffect, style: .label)
} else {
vibrancyEffect = UIVibrancyEffect(blurEffect: blurEffect)
}
let vibrancyEffectView = UIVisualEffectView(effect: vibrancyEffect)
self.vibrancyEffectView = vibrancyEffectView
blurEffectView.contentView.addSubview(vibrancyEffectView)
self.itemNodes.forEach(vibrancyEffectView.contentView.addSubnode(_:))
vibrancyEffectView.contentView.addSubnode(self.selectionNode)
self.highlightedItemNodes.forEach(self.addSubnode(_:))
}
func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) {
@ -717,48 +612,27 @@ class TabsSegmentedControlNode: ASDisplayNode, UIGestureRecognizerDelegate {
let bounds = CGRect(origin: CGPoint(), size: size)
self.cornerRadius = size.height / 2.0
if let blurEffectView = self.blurEffectView {
transition.updateFrame(view: blurEffectView, frame: bounds)
}
if let vibrancyEffectView = self.vibrancyEffectView {
transition.updateFrame(view: vibrancyEffectView, frame: bounds)
}
let selectedIndex: Int
if let gestureSelectedIndex = self.gestureSelectedIndex {
selectedIndex = gestureSelectedIndex
} else {
selectedIndex = self.selectedIndex
}
transition.updateFrame(node: self.maskNode, frame: bounds)
let spacing: CGFloat = 15.0
if !self.itemNodes.isEmpty {
let itemSize = CGSize(width: floorToScreenPixels(size.width / CGFloat(self.itemNodes.count)), height: size.height)
let selectionFrame = CGRect(origin: CGPoint(x: itemSize.width * CGFloat(selectedIndex), y: 0.0), size: itemSize).insetBy(dx: 4.0, dy: 4.0)
transition.updateFrameAsPositionAndBounds(node: self.selectionNode, frame: selectionFrame)
self.selectionNode.cornerRadius = selectionFrame.height / 2.0
var leftOffset: CGFloat = 0.0
var selectedItemNode: ASDisplayNode?
for i in 0 ..< self.itemNodes.count {
let itemNode = self.itemNodes[i]
let highlightedItemNode = self.highlightedItemNodes[i]
let _ = itemNode.measure(itemSize)
transition.updateFrame(node: itemNode, frame: CGRect(origin: CGPoint(x: itemSize.width * CGFloat(i), y: (size.height - itemSize.height) / 2.0), size: itemSize))
transition.updateFrame(node: highlightedItemNode, frame: CGRect(origin: CGPoint(x: itemSize.width * CGFloat(i), y: (size.height - itemSize.height) / 2.0), size: itemSize))
let itemSize = itemNode.measure(size)
transition.updateFrame(node: itemNode, frame: CGRect(origin: CGPoint(x: leftOffset, y: (size.height - itemSize.height) / 2.0), size: itemSize))
let isSelected = selectedIndex == i
leftOffset += itemSize.width + spacing
let isSelected = self.selectedIndex == i
if isSelected {
selectedItemNode = itemNode
}
if itemNode.isSelected != isSelected {
if case .animated = transition {
UIView.transition(with: itemNode.view, duration: 0.2, options: .transitionCrossDissolve, animations: {
itemNode.isSelected = isSelected
highlightedItemNode.isHidden = !isSelected
}, completion: nil)
} else {
itemNode.isSelected = isSelected
highlightedItemNode.isHidden = !isSelected
}
itemNode.isSelected = isSelected
let title = itemNode.attributedTitle(for: .normal)?.string ?? ""
itemNode.setTitle(title, with: isSelected ? selectedTextFont : textFont, with: isSelected ? UIColor(rgb: 0xffd60a) : .white, for: .normal)
if isSelected {
itemNode.accessibilityTraits.insert(.selected)
} else {
@ -766,6 +640,27 @@ class TabsSegmentedControlNode: ASDisplayNode, UIGestureRecognizerDelegate {
}
}
}
let totalWidth = leftOffset - spacing
if let selectedItemNode = selectedItemNode {
let itemCenter = selectedItemNode.frame.center
transition.updateFrame(node: self.containerNode, frame: CGRect(x: bounds.width / 2.0 - itemCenter.x, y: 0.0, width: totalWidth, height: bounds.height))
for i in 0 ..< self.itemNodes.count {
let itemNode = self.itemNodes[i]
let convertedBounds = itemNode.view.convert(itemNode.bounds, to: self.view)
let position = convertedBounds.center
let offset = position.x - bounds.width / 2.0
let angle = abs(offset / bounds.width * 0.99)
let sign: CGFloat = offset > 0 ? 1.0 : -1.0
var transform = CATransform3DMakeTranslation(-22.0 * angle * angle * sign, 0.0, 0.0)
transform = CATransform3DRotate(transform, angle, 0.0, sign, 0.0)
transition.animateView {
itemNode.transform = transform
}
}
}
}
}
@ -773,61 +668,6 @@ class TabsSegmentedControlNode: ASDisplayNode, UIGestureRecognizerDelegate {
for i in 0 ..< self.itemNodes.count {
let itemNode = self.itemNodes[i]
itemNode.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
itemNode.highligthedChanged = { [weak self, weak itemNode] highlighted in
if let strongSelf = self, let itemNode = itemNode {
let transition = ContainedViewLayoutTransition.animated(duration: 0.25, curve: .easeInOut)
if strongSelf.selectedIndex == i {
if let gestureRecognizer = strongSelf.gestureRecognizer, case .began = gestureRecognizer.state {
} else {
strongSelf.updateButtonsHighlights(highlightedIndex: highlighted ? i : nil, gestureSelectedIndex: strongSelf.gestureSelectedIndex)
}
} else if highlighted {
transition.updateAlpha(node: itemNode, alpha: 0.4)
}
if !highlighted {
transition.updateAlpha(node: itemNode, alpha: 1.0)
}
}
}
}
}
private func updateButtonsHighlights(highlightedIndex: Int?, gestureSelectedIndex: Int?) {
let transition = ContainedViewLayoutTransition.animated(duration: 0.25, curve: .easeInOut)
if highlightedIndex == nil && gestureSelectedIndex == nil {
transition.updateTransformScale(node: self.selectionNode, scale: 1.0)
} else {
transition.updateTransformScale(node: self.selectionNode, scale: 0.96)
}
for i in 0 ..< self.itemNodes.count {
let itemNode = self.itemNodes[i]
let highlightedItemNode = self.highlightedItemNodes[i]
if i == highlightedIndex || i == gestureSelectedIndex {
transition.updateTransformScale(node: itemNode, scale: 0.96)
transition.updateTransformScale(node: highlightedItemNode, scale: 0.96)
} else {
transition.updateTransformScale(node: itemNode, scale: 1.0)
transition.updateTransformScale(node: highlightedItemNode, scale: 1.0)
}
}
}
private func updateButtonsHighlights() {
let transition = ContainedViewLayoutTransition.animated(duration: 0.25, curve: .easeInOut)
if let gestureSelectedIndex = self.gestureSelectedIndex {
for i in 0 ..< self.itemNodes.count {
let itemNode = self.itemNodes[i]
let highlightedItemNode = self.highlightedItemNodes[i]
transition.updateTransformScale(node: itemNode, scale: i == gestureSelectedIndex ? 0.96 : 1.0)
transition.updateTransformScale(node: highlightedItemNode, scale: i == gestureSelectedIndex ? 0.96 : 1.0)
}
} else {
for itemNode in self.itemNodes {
transition.updateTransformScale(node: itemNode, scale: 1.0)
}
for itemNode in self.highlightedItemNodes {
transition.updateTransformScale(node: itemNode, scale: 1.0)
}
}
}
@ -842,42 +682,4 @@ class TabsSegmentedControlNode: ASDisplayNode, UIGestureRecognizerDelegate {
self.updateLayout(size: size, transition: .animated(duration: 0.2, curve: .slide))
}
}
public override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
return self.selectionNode.frame.contains(gestureRecognizer.location(in: self.view))
}
@objc private func panGesture(_ recognizer: UIPanGestureRecognizer) {
let location = recognizer.location(in: self.view)
switch recognizer.state {
case .changed:
if !self.selectionNode.frame.contains(location) {
let point = CGPoint(x: max(0.0, min(self.bounds.width, location.x)), y: 1.0)
for i in 0 ..< self.itemNodes.count {
let itemNode = self.itemNodes[i]
if itemNode.frame.contains(point) {
if i != self.gestureSelectedIndex {
self.gestureSelectedIndex = i
self.updateButtonsHighlights(highlightedIndex: nil, gestureSelectedIndex: i)
if let size = self.validLayout {
self.updateLayout(size: size, transition: .animated(duration: 0.35, curve: .slide))
}
}
break
}
}
}
case .ended:
if let gestureSelectedIndex = self.gestureSelectedIndex {
if gestureSelectedIndex != self.selectedIndex {
self._selectedIndex = gestureSelectedIndex
self.selectedIndexChanged(gestureSelectedIndex)
}
self.gestureSelectedIndex = nil
}
self.updateButtonsHighlights(highlightedIndex: nil, gestureSelectedIndex: nil)
default:
break
}
}
}

View File

@ -9,14 +9,10 @@
</head>
<body>
<div class="container">
<iframe id="player" src="https://player.vimeo.com/video/%@?api=1&badge=0&byline=0&portrait=0&title=0&player_id=player" width="100%" height="100%" frameborder="0"></iframe>
<iframe id="player" src="https://player.vimeo.com/video/%@?badge=0&byline=0&portrait=0&title=0" width="100%" height="100%" frameborder="0"></iframe>
</div>
<script src="https://player.vimeo.com/api/player.js"></script>
<script>
var Froogaloop=function(){function e(a){return new e.fn.init(a)}function g(a,c,b){if(!b.contentWindow.postMessage)return!1;a=JSON.stringify({method:a,value:c});b.contentWindow.postMessage(a,h)}function l(a){var c,b;try{c=JSON.parse(a.data),b=c.event||c.method}catch(e){}"ready"!=b||k||(k=!0);if(!/^https?:\/\/player.vimeo.com/.test(a.origin))return!1;"*"===h&&(h=a.origin);a=c.value;var m=c.data,f=""===f?null:c.player_id;c=f?d[f][b]:d[b];b=[];if(!c)return!1;void 0!==a&&b.push(a);m&&b.push(m);f&&b.push(f);
return 0<b.length?c.apply(null,b):c.call()}function n(a,c,b){b?(d[b]||(d[b]={}),d[b][a]=c):d[a]=c}var d={},k=!1,h="*";e.fn=e.prototype={element:null,init:function(a){"string"===typeof a&&(a=document.getElementById(a));this.element=a;return this},api:function(a,c){if(!this.element||!a)return!1;var b=this.element,d=""!==b.id?b.id:null,e=c&&c.constructor&&c.call&&c.apply?null:c,f=c&&c.constructor&&c.call&&c.apply?c:null;f&&n(a,f,d);g(a,e,b);return this},addEvent:function(a,c){if(!this.element)return!1;
var b=this.element,d=""!==b.id?b.id:null;n(a,c,d);"ready"!=a?g("addEventListener",a,b):"ready"==a&&k&&c.call(null,d);return this},removeEvent:function(a){if(!this.element)return!1;var c=this.element,b=""!==c.id?c.id:null;a:{if(b&&d[b]){if(!d[b][a]){b=!1;break a}d[b][a]=null}else{if(!d[a]){b=!1;break a}d[a]=null}b=!0}"ready"!=a&&b&&g("removeEventListener",a,c)}};e.fn.init.prototype=e.fn;window.addEventListener?window.addEventListener("message",l,!1):window.attachEvent("onmessage",l);return window.Froogaloop=
window.$f=e}();
var iframe;
var player;
function invoke(command) {
@ -26,7 +22,7 @@
var played = false;
function play() {
if (played) {
player.api("play");
player.play();
} else {
invoke("autoplay");
played = true;
@ -34,43 +30,45 @@
}
function pause() {
player.api("pause");
player.pause();
}
function seek(timestamp) {
player.api("seekTo", timestamp);
player.setCurrentTime(timestamp)
}
function setRate(rate) {
player.setPlaybackRate(rate)
}
(function() {
var playbackState = 0;
var playbackState = 1;
var duration = 0.0;
var position = 0.0;
var downloadProgress = 0.0;
iframe = document.querySelectorAll("iframe")[0];
player = $f(iframe);
player = new Vimeo.Player(iframe);
player.getCurrentTime().then(function(seconds) {
position = seconds;
});
player.getDuration().then(function(seconds) {
duration = seconds;
});
function updateState() {
player.getPaused().then(function(paused) {
playbackState = paused ? 0 : 1;
});
player.getCurrentTime().then(function(seconds) {
position = seconds;
});
player.getDuration().then(function(seconds) {
duration = seconds;
});
window.location.href = "embed://onState?playback=" + playbackState + "&position=" + position + "&duration=" + duration + "&download=" + downloadProgress;
}
player.addEvent("ready", function(player_id) {
window.location.href = "embed://onReady?data=" + player_id;
player.addEvent("play", onPlay);
player.addEvent("pause", onPause);
player.addEvent("finish", onFinish);
player.addEvent("playProgress", onPlayProgress);
player.addEvent("loadProgress", onLoadProgress);
window.setInterval(updateState, 500);
invoke("initialize");
if (%@) {
invoke("autoplay");
}
});
}
function onPlay(data) {
playbackState = 1;
@ -95,6 +93,16 @@
function onLoadProgress(data) {
downloadProgress = data.percent;
}
player.on('play', onPlay);
player.on('pause', onPause);
player.on("ended", onFinish);
window.setInterval(updateState, 500);
if (%@) {
invoke("autoplay");
}
})();
</script>
</body>

View File

@ -4,17 +4,17 @@ function initialize() {
controls.style.display = "none";
}
var sidedock = document.getElementsByClassName("sidedock")[0];
var sidedock = document.getElementsByClassName("vp-sidedock")[0];
if (sidedock != null) {
sidedock.style.display = "none";
}
var video = document.getElementsByTagName("video")[0];
if (video != null) {
video.setAttribute("webkit-playsinline", "");
video.setAttribute("playsinline", "");
video.webkitEnterFullscreen = undefined;
}
// var video = document.getElementsByTagName("video")[0];
// if (video != null) {
// video.setAttribute("webkit-playsinline", "");
// video.setAttribute("playsinline", "");
// video.webkitEnterFullscreen = undefined;
// }
}
function eventFire(el, etype){

View File

@ -60,6 +60,10 @@
player.seekTo(timestamp, true);
}
function setRate(rate) {
player.setPlaybackRate(rate);
}
function updateState() {
window.location.href = "embed://onState?failed=" + failed + "&playback=" + playbackState + "&position=" + position + "&duration=" + duration + "&download=" + downloadProgress + '&quality=' + quality + '&availableQualities=' + availableQualities + '&storyboard=' + storyboardSpec;
}

View File

@ -6603,10 +6603,18 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
let _ = ApplicationSpecificNotice.incrementChatMessageOptionsTip(accountManager: strongSelf.context.sharedContext.accountManager, count: 4).start()
let controller = ChatSendMessageActionSheetController(context: strongSelf.context, controllerInteraction: strongSelf.controllerInteraction, interfaceState: strongSelf.presentationInterfaceState, gesture: gesture, sourceSendButton: node, textInputNode: textInputNode, completion: { [weak self] in
let controller = ChatSendMessageActionSheetController(context: strongSelf.context, interfaceState: strongSelf.presentationInterfaceState, gesture: gesture, sourceSendButton: node, textInputNode: textInputNode, completion: { [weak self] in
if let strongSelf = self {
strongSelf.supportedOrientations = previousSupportedOrientations
}
}, sendMessage: { [weak self] silently in
if let strongSelf = self {
strongSelf.controllerInteraction?.sendCurrentMessage(silently)
}
}, schedule: { [weak self] in
if let strongSelf = self {
strongSelf.controllerInteraction?.scheduleCurrentMessage()
}
})
strongSelf.sendMessageActionsController = controller
if layout.isNonExclusive {
@ -10917,80 +10925,97 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
controller.present(textAlertController(context: context, title: nil, text: presentationData.strings.Forward_ErrorDisabledForChat, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_OK, action: {})]), in: .window(.root))
}
controller.multiplePeersSelected = { [weak self, weak controller] peers, messageText in
controller.multiplePeersSelected = { [weak self, weak controller] peers, messageText, mode in
guard let strongSelf = self, let strongController = controller else {
return
}
strongController.dismiss()
for peer in peers {
var result: [EnqueueMessage] = []
if messageText.string.count > 0 {
let inputText = convertMarkdownToAttributes(messageText)
for text in breakChatInputText(trimChatInputText(inputText)) {
if text.length != 0 {
var attributes: [MessageAttribute] = []
let entities = generateTextEntities(text.string, enabledTypes: .all, currentEntities: generateChatInputTextEntities(text))
if !entities.isEmpty {
attributes.append(TextEntitiesMessageAttribute(entities: entities))
}
result.append(.message(text: text.string, attributes: attributes, mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
var result: [EnqueueMessage] = []
if messageText.string.count > 0 {
let inputText = convertMarkdownToAttributes(messageText)
for text in breakChatInputText(trimChatInputText(inputText)) {
if text.length != 0 {
var attributes: [MessageAttribute] = []
let entities = generateTextEntities(text.string, enabledTypes: .all, currentEntities: generateChatInputTextEntities(text))
if !entities.isEmpty {
attributes.append(TextEntitiesMessageAttribute(entities: entities))
}
result.append(.message(text: text.string, attributes: attributes, mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
}
}
}
result.append(contentsOf: messages.map { message -> EnqueueMessage in
return .forward(source: message.id, grouping: .auto, attributes: [], correlationId: nil)
})
result.append(contentsOf: messages.map { message -> EnqueueMessage in
return .forward(source: message.id, grouping: .auto, attributes: [], correlationId: nil)
})
let _ = (enqueueMessages(account: strongSelf.context.account, peerId: peer.id, messages: result)
|> deliverOnMainQueue).start(next: { messageIds in
if let strongSelf = self {
let signals: [Signal<Bool, NoError>] = messageIds.compactMap({ id -> Signal<Bool, NoError>? in
guard let id = id else {
return nil
}
return strongSelf.context.account.pendingMessageManager.pendingMessageStatus(id)
|> mapToSignal { status, _ -> Signal<Bool, NoError> in
if status != nil {
return .never()
} else {
return .single(true)
let commit: ([EnqueueMessage]) -> Void = { result in
for peer in peers {
let _ = (enqueueMessages(account: strongSelf.context.account, peerId: peer.id, messages: result)
|> deliverOnMainQueue).start(next: { messageIds in
if let strongSelf = self {
let signals: [Signal<Bool, NoError>] = messageIds.compactMap({ id -> Signal<Bool, NoError>? in
guard let id = id else {
return nil
}
return strongSelf.context.account.pendingMessageManager.pendingMessageStatus(id)
|> mapToSignal { status, _ -> Signal<Bool, NoError> in
if status != nil {
return .never()
} else {
return .single(true)
}
}
|> take(1)
})
if strongSelf.shareStatusDisposable == nil {
strongSelf.shareStatusDisposable = MetaDisposable()
}
|> take(1)
})
if strongSelf.shareStatusDisposable == nil {
strongSelf.shareStatusDisposable = MetaDisposable()
strongSelf.shareStatusDisposable?.set((combineLatest(signals)
|> deliverOnMainQueue).start())
}
strongSelf.shareStatusDisposable?.set((combineLatest(signals)
|> deliverOnMainQueue).start())
}
})
})
let presentationData = strongSelf.context.sharedContext.currentPresentationData.with { $0 }
let text: String
var savedMessages = false
if peers.count == 1, let peerId = peers.first?.id, peerId == strongSelf.context.account.peerId {
text = messages.count == 1 ? presentationData.strings.Conversation_ForwardTooltip_SavedMessages_One : presentationData.strings.Conversation_ForwardTooltip_SavedMessages_Many
savedMessages = true
} else {
if peers.count == 1, let peer = peers.first {
let peerName = peer.id == strongSelf.context.account.peerId ? presentationData.strings.DialogList_SavedMessages : peer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder)
text = messages.count == 1 ? presentationData.strings.Conversation_ForwardTooltip_Chat_One(peerName).string : presentationData.strings.Conversation_ForwardTooltip_Chat_Many(peerName).string
} else if peers.count == 2, let firstPeer = peers.first, let secondPeer = peers.last {
let firstPeerName = firstPeer.id == strongSelf.context.account.peerId ? presentationData.strings.DialogList_SavedMessages : firstPeer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder)
let secondPeerName = secondPeer.id == strongSelf.context.account.peerId ? presentationData.strings.DialogList_SavedMessages : secondPeer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder)
text = messages.count == 1 ? presentationData.strings.Conversation_ForwardTooltip_TwoChats_One(firstPeerName, secondPeerName).string : presentationData.strings.Conversation_ForwardTooltip_TwoChats_Many(firstPeerName, secondPeerName).string
} else if let peer = peers.first {
let peerName = peer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder)
text = messages.count == 1 ? presentationData.strings.Conversation_ForwardTooltip_ManyChats_One(peerName, "\(peers.count - 1)").string : presentationData.strings.Conversation_ForwardTooltip_ManyChats_Many(peerName, "\(peers.count - 1)").string
let presentationData = strongSelf.context.sharedContext.currentPresentationData.with { $0 }
let text: String
var savedMessages = false
if peers.count == 1, let peerId = peers.first?.id, peerId == strongSelf.context.account.peerId {
text = messages.count == 1 ? presentationData.strings.Conversation_ForwardTooltip_SavedMessages_One : presentationData.strings.Conversation_ForwardTooltip_SavedMessages_Many
savedMessages = true
} else {
text = ""
if peers.count == 1, let peer = peers.first {
let peerName = peer.id == strongSelf.context.account.peerId ? presentationData.strings.DialogList_SavedMessages : peer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder)
text = messages.count == 1 ? presentationData.strings.Conversation_ForwardTooltip_Chat_One(peerName).string : presentationData.strings.Conversation_ForwardTooltip_Chat_Many(peerName).string
} else if peers.count == 2, let firstPeer = peers.first, let secondPeer = peers.last {
let firstPeerName = firstPeer.id == strongSelf.context.account.peerId ? presentationData.strings.DialogList_SavedMessages : firstPeer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder)
let secondPeerName = secondPeer.id == strongSelf.context.account.peerId ? presentationData.strings.DialogList_SavedMessages : secondPeer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder)
text = messages.count == 1 ? presentationData.strings.Conversation_ForwardTooltip_TwoChats_One(firstPeerName, secondPeerName).string : presentationData.strings.Conversation_ForwardTooltip_TwoChats_Many(firstPeerName, secondPeerName).string
} else if let peer = peers.first {
let peerName = peer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder)
text = messages.count == 1 ? presentationData.strings.Conversation_ForwardTooltip_ManyChats_One(peerName, "\(peers.count - 1)").string : presentationData.strings.Conversation_ForwardTooltip_ManyChats_Many(peerName, "\(peers.count - 1)").string
} else {
text = ""
}
}
}
strongSelf.present(UndoOverlayController(presentationData: presentationData, content: .forward(savedMessages: savedMessages, text: text), elevatedLayout: false, animateInAsReplacement: true, action: { _ in return false }), in: .current)
strongSelf.present(UndoOverlayController(presentationData: presentationData, content: .forward(savedMessages: savedMessages, text: text), elevatedLayout: false, animateInAsReplacement: true, action: { _ in return false }), in: .current)
}
}
switch mode {
case .generic:
commit(result)
case .silent:
let transformedMessages = strongSelf.transformEnqueueMessages(result, silentPosting: true)
commit(transformedMessages)
case .schedule:
strongSelf.presentScheduleTimePicker(completion: { [weak self] scheduleTime in
if let strongSelf = self {
let transformedMessages = strongSelf.transformEnqueueMessages(result, silentPosting: false, scheduleTime: scheduleTime)
commit(transformedMessages)
}
})
}
}
controller.peerSelected = { [weak self, weak controller] peer in

View File

@ -36,6 +36,11 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
private var appliedHasAvatar = false
private var appliedCurrentlyPlaying = false
private var appliedAutomaticDownload = false
private var avatarOffset: CGFloat?
private var animatingHeight: Bool {
return self.apparentHeightTransition != nil
}
private var forwardInfoNode: ChatMessageForwardInfoNode?
private var forwardBackgroundNode: ASImageNode?
@ -52,8 +57,6 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
private var recognizer: TapLongTapOrDoubleTapGestureRecognizer?
private var seekRecognizer: UIPanGestureRecognizer?
private var currentSwipeAction: ChatControllerInteractionSwipeAction?
override var visibility: ListViewItemNodeVisibility {
@ -75,6 +78,17 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
super.init(layerBacked: false)
self.interactiveVideoNode.shouldOpen = { [weak self] in
if let strongSelf = self {
if let item = strongSelf.item, item.message.id.namespace == Namespaces.Message.Local {
return false
}
return !strongSelf.animatingHeight
} else {
return false
}
}
self.containerNode.shouldBegin = { [weak self] location in
guard let strongSelf = self else {
return false
@ -82,6 +96,9 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
if !strongSelf.interactiveVideoNode.frame.contains(location) {
return false
}
if strongSelf.appliedCurrentlyPlaying && !strongSelf.interactiveVideoNode.isPlaying {
return false
}
if let action = strongSelf.gestureRecognized(gesture: .tap, location: location, recognizer: nil) {
if case .action = action {
return false
@ -120,7 +137,7 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
self.addSubnode(self.messageAccessibilityArea)
self.messageAccessibilityArea.activate = { [weak self] in
guard let strongSelf = self, let accessibilityData = strongSelf.accessibilityData else {
guard let strongSelf = self, let _ = strongSelf.accessibilityData else {
return false
}
@ -161,6 +178,9 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
if strongSelf.selectionNode != nil {
return false
}
if strongSelf.appliedCurrentlyPlaying && !strongSelf.interactiveVideoNode.isPlaying {
return false
}
let action = item.controllerInteraction.canSetupReply(item.message)
strongSelf.currentSwipeAction = action
if case .none = action {
@ -172,12 +192,6 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
return false
}
self.view.addGestureRecognizer(replyRecognizer)
let seekRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.seekGesture(_:)))
seekRecognizer.isEnabled = false
seekRecognizer.delegate = self
self.seekRecognizer = seekRecognizer
self.interactiveVideoNode.view.addGestureRecognizer(seekRecognizer)
}
override func updateAccessibilityData(_ accessibilityData: ChatMessageAccessibilityData) {
@ -331,8 +345,9 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
}
var isPlaying = false
var displaySize = layoutConstants.instantVideo.dimensions
let maximumDisplaySize = CGSize(width: params.width - 20.0, height: params.width - 20.0)
let normalDisplaySize = layoutConstants.instantVideo.dimensions
var displaySize = normalDisplaySize
let maximumDisplaySize = CGSize(width: min(404, params.width - 20.0), height: min(404, params.width - 20.0))
var effectiveAvatarInset = avatarInset
if item.associatedData.currentlyPlayingMessageId == item.message.index {
isPlaying = true
@ -493,7 +508,7 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
forwardBackgroundImage = graphics.chatServiceBubbleFillImage
}
var maxContentWidth = videoLayout.contentSize.width
var maxContentWidth = normalDisplaySize.width
var actionButtonsFinalize: ((CGFloat) -> (CGSize, (_ animated: Bool) -> ChatMessageActionButtonsNode))?
if let replyMarkup = replyMarkup {
let (minWidth, buttonsLayout) = actionButtonsLayout(item.context, item.presentationData.theme, item.presentationData.chatBubbleCorners, item.presentationData.strings, replyMarkup, item.message, maxContentWidth)
@ -530,6 +545,7 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
strongSelf.appliedHasAvatar = hasAvatar
strongSelf.appliedForwardInfo = (forwardSource, forwardAuthorSignature)
strongSelf.appliedCurrentlyPlaying = isPlaying
strongSelf.appliedAutomaticDownload = automaticDownload
strongSelf.updateAccessibilityData(accessibilityData)
@ -540,178 +556,189 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
videoLayoutData = .constrained(left: max(0.0, availableContentWidth - videoFrame.width), right: 0.0)
}
if currentItem != nil && currentPlaying != isPlaying {
} else {
let animating = (currentItem != nil && currentPlaying != isPlaying) || strongSelf.animatingHeight
if !animating {
strongSelf.interactiveVideoNode.frame = videoFrame
videoApply(videoLayoutData, transition)
}
if currentPlaying != isPlaying {
if isPlaying {
strongSelf.avatarOffset = -100.0
} else {
strongSelf.avatarOffset = nil
}
strongSelf.updateSelectionState(animated: true)
strongSelf.updateAttachedAvatarNodeOffset(offset: strongSelf.avatarOffset ?? 0.0, transition: .animated(duration: 0.3, curve: .easeInOut))
}
strongSelf.interactiveVideoNode.view.disablesInteractiveTransitionGestureRecognizer = isPlaying
strongSelf.seekRecognizer?.isEnabled = isPlaying
strongSelf.contextSourceNode.contentRect = videoFrame
strongSelf.containerNode.targetNodeForActivationProgressContentRect = strongSelf.contextSourceNode.contentRect
if let updatedShareButtonNode = updatedShareButtonNode {
if updatedShareButtonNode !== strongSelf.shareButtonNode {
if let shareButtonNode = strongSelf.shareButtonNode {
shareButtonNode.removeFromSupernode()
}
strongSelf.shareButtonNode = updatedShareButtonNode
strongSelf.addSubnode(updatedShareButtonNode)
updatedShareButtonNode.addTarget(strongSelf, action: #selector(strongSelf.shareButtonPressed), forControlEvents: .touchUpInside)
}
let buttonSize = updatedShareButtonNode.update(presentationData: item.presentationData, chatLocation: item.chatLocation, subject: item.associatedData.subject, message: item.message, account: item.context.account)
updatedShareButtonNode.frame = CGRect(origin: CGPoint(x: videoFrame.maxX - 7.0, y: videoFrame.maxY - 24.0 - buttonSize.height), size: buttonSize)
} else if let shareButtonNode = strongSelf.shareButtonNode {
shareButtonNode.removeFromSupernode()
strongSelf.shareButtonNode = nil
}
if let updatedReplyBackgroundNode = updatedReplyBackgroundNode {
if strongSelf.replyBackgroundNode == nil {
strongSelf.replyBackgroundNode = updatedReplyBackgroundNode
strongSelf.addSubnode(updatedReplyBackgroundNode)
updatedReplyBackgroundNode.image = replyBackgroundImage
} else {
strongSelf.replyBackgroundNode?.image = replyBackgroundImage
}
} else if let replyBackgroundNode = strongSelf.replyBackgroundNode {
replyBackgroundNode.removeFromSupernode()
strongSelf.replyBackgroundNode = nil
}
if let (viaBotLayout, viaBotApply) = viaBotApply {
let viaBotNode = viaBotApply()
if strongSelf.viaBotNode == nil {
strongSelf.viaBotNode = viaBotNode
strongSelf.addSubnode(viaBotNode)
}
let viaBotFrame = CGRect(origin: CGPoint(x: (!incoming ? (params.leftInset + layoutConstants.bubble.edgeInset + 10.0) : (params.width - params.rightInset - viaBotLayout.size.width - layoutConstants.bubble.edgeInset - 10.0)), y: 8.0), size: viaBotLayout.size)
viaBotNode.frame = viaBotFrame
strongSelf.replyBackgroundNode?.frame = CGRect(origin: CGPoint(x: viaBotFrame.minX - 4.0, y: viaBotFrame.minY - 2.0), size: CGSize(width: viaBotFrame.size.width + 8.0, height: viaBotFrame.size.height + 5.0))
} else if let viaBotNode = strongSelf.viaBotNode {
viaBotNode.removeFromSupernode()
strongSelf.viaBotNode = nil
}
if let (replyInfoSize, replyInfoApply) = replyInfoApply {
let replyInfoNode = replyInfoApply()
if strongSelf.replyInfoNode == nil {
strongSelf.replyInfoNode = replyInfoNode
strongSelf.addSubnode(replyInfoNode)
}
var viaBotSize = CGSize()
if let viaBotNode = strongSelf.viaBotNode {
viaBotSize = viaBotNode.frame.size
}
let replyInfoFrame = CGRect(origin: CGPoint(x: (!incoming ? (params.leftInset + layoutConstants.bubble.edgeInset + 10.0) : (params.width - params.rightInset - max(replyInfoSize.width, viaBotSize.width) - layoutConstants.bubble.edgeInset - 10.0)), y: 8.0 + viaBotSize.height), size: replyInfoSize)
if let viaBotNode = strongSelf.viaBotNode {
if replyInfoFrame.minX < viaBotNode.frame.minX {
viaBotNode.frame = viaBotNode.frame.offsetBy(dx: replyInfoFrame.minX - viaBotNode.frame.minX, dy: 0.0)
}
}
replyInfoNode.frame = replyInfoFrame
strongSelf.replyBackgroundNode?.frame = CGRect(origin: CGPoint(x: replyInfoFrame.minX - 4.0, y: replyInfoFrame.minY - viaBotSize.height - 2.0), size: CGSize(width: max(replyInfoFrame.size.width, viaBotSize.width) + 8.0, height: replyInfoFrame.size.height + viaBotSize.height + 5.0))
} else if let replyInfoNode = strongSelf.replyInfoNode {
replyInfoNode.removeFromSupernode()
strongSelf.replyInfoNode = nil
}
if isFailed {
let deliveryFailedNode: ChatMessageDeliveryFailedNode
var isAppearing = false
if let current = strongSelf.deliveryFailedNode {
deliveryFailedNode = current
} else {
isAppearing = true
deliveryFailedNode = ChatMessageDeliveryFailedNode(tapped: {
if let item = self?.item {
item.controllerInteraction.requestRedeliveryOfFailedMessages(item.content.firstMessage.id)
if !animating {
if let updatedShareButtonNode = updatedShareButtonNode {
if updatedShareButtonNode !== strongSelf.shareButtonNode {
if let shareButtonNode = strongSelf.shareButtonNode {
shareButtonNode.removeFromSupernode()
}
strongSelf.shareButtonNode = updatedShareButtonNode
strongSelf.addSubnode(updatedShareButtonNode)
updatedShareButtonNode.addTarget(strongSelf, action: #selector(strongSelf.shareButtonPressed), forControlEvents: .touchUpInside)
}
let buttonSize = updatedShareButtonNode.update(presentationData: item.presentationData, chatLocation: item.chatLocation, subject: item.associatedData.subject, message: item.message, account: item.context.account)
updatedShareButtonNode.frame = CGRect(origin: CGPoint(x: min(params.width - buttonSize.width - 8.0, videoFrame.maxX - 7.0), y: videoFrame.maxY - 24.0 - buttonSize.height), size: buttonSize)
} else if let shareButtonNode = strongSelf.shareButtonNode {
shareButtonNode.removeFromSupernode()
strongSelf.shareButtonNode = nil
}
if let updatedReplyBackgroundNode = updatedReplyBackgroundNode {
if strongSelf.replyBackgroundNode == nil {
strongSelf.replyBackgroundNode = updatedReplyBackgroundNode
strongSelf.addSubnode(updatedReplyBackgroundNode)
updatedReplyBackgroundNode.image = replyBackgroundImage
} else {
strongSelf.replyBackgroundNode?.image = replyBackgroundImage
}
} else if let replyBackgroundNode = strongSelf.replyBackgroundNode {
replyBackgroundNode.removeFromSupernode()
strongSelf.replyBackgroundNode = nil
}
if let (viaBotLayout, viaBotApply) = viaBotApply {
let viaBotNode = viaBotApply()
if strongSelf.viaBotNode == nil {
strongSelf.viaBotNode = viaBotNode
strongSelf.addSubnode(viaBotNode)
}
let viaBotFrame = CGRect(origin: CGPoint(x: (!incoming ? (params.leftInset + layoutConstants.bubble.edgeInset + 10.0) : (params.width - params.rightInset - viaBotLayout.size.width - layoutConstants.bubble.edgeInset - 10.0)), y: 8.0), size: viaBotLayout.size)
viaBotNode.frame = viaBotFrame
strongSelf.replyBackgroundNode?.frame = CGRect(origin: CGPoint(x: viaBotFrame.minX - 4.0, y: viaBotFrame.minY - 2.0), size: CGSize(width: viaBotFrame.size.width + 8.0, height: viaBotFrame.size.height + 5.0))
} else if let viaBotNode = strongSelf.viaBotNode {
viaBotNode.removeFromSupernode()
strongSelf.viaBotNode = nil
}
if let (replyInfoSize, replyInfoApply) = replyInfoApply {
let replyInfoNode = replyInfoApply()
if strongSelf.replyInfoNode == nil {
strongSelf.replyInfoNode = replyInfoNode
strongSelf.addSubnode(replyInfoNode)
}
var viaBotSize = CGSize()
if let viaBotNode = strongSelf.viaBotNode {
viaBotSize = viaBotNode.frame.size
}
let replyInfoFrame = CGRect(origin: CGPoint(x: (!incoming ? (params.leftInset + layoutConstants.bubble.edgeInset + 10.0) : (params.width - params.rightInset - max(replyInfoSize.width, viaBotSize.width) - layoutConstants.bubble.edgeInset - 10.0)), y: 8.0 + viaBotSize.height), size: replyInfoSize)
if let viaBotNode = strongSelf.viaBotNode {
if replyInfoFrame.minX < viaBotNode.frame.minX {
viaBotNode.frame = viaBotNode.frame.offsetBy(dx: replyInfoFrame.minX - viaBotNode.frame.minX, dy: 0.0)
}
}
replyInfoNode.frame = replyInfoFrame
strongSelf.replyBackgroundNode?.frame = CGRect(origin: CGPoint(x: replyInfoFrame.minX - 4.0, y: replyInfoFrame.minY - viaBotSize.height - 2.0), size: CGSize(width: max(replyInfoFrame.size.width, viaBotSize.width) + 8.0, height: replyInfoFrame.size.height + viaBotSize.height + 5.0))
} else if let replyInfoNode = strongSelf.replyInfoNode {
replyInfoNode.removeFromSupernode()
strongSelf.replyInfoNode = nil
}
if isFailed {
let deliveryFailedNode: ChatMessageDeliveryFailedNode
var isAppearing = false
if let current = strongSelf.deliveryFailedNode {
deliveryFailedNode = current
} else {
isAppearing = true
deliveryFailedNode = ChatMessageDeliveryFailedNode(tapped: {
if let item = self?.item {
item.controllerInteraction.requestRedeliveryOfFailedMessages(item.content.firstMessage.id)
}
})
strongSelf.deliveryFailedNode = deliveryFailedNode
strongSelf.addSubnode(deliveryFailedNode)
}
let deliveryFailedSize = deliveryFailedNode.updateLayout(theme: item.presentationData.theme.theme)
let deliveryFailedFrame = CGRect(origin: CGPoint(x: videoFrame.maxX + deliveryFailedInset - deliveryFailedSize.width, y: videoFrame.maxY - deliveryFailedSize.height), size: deliveryFailedSize)
if isAppearing {
deliveryFailedNode.frame = deliveryFailedFrame
transition.animatePositionAdditive(node: deliveryFailedNode, offset: CGPoint(x: deliveryFailedInset, y: 0.0))
} else {
transition.updateFrame(node: deliveryFailedNode, frame: deliveryFailedFrame)
}
} else if let deliveryFailedNode = strongSelf.deliveryFailedNode {
strongSelf.deliveryFailedNode = nil
transition.updateAlpha(node: deliveryFailedNode, alpha: 0.0)
transition.updateFrame(node: deliveryFailedNode, frame: deliveryFailedNode.frame.offsetBy(dx: 24.0, dy: 0.0), completion: { [weak deliveryFailedNode] _ in
deliveryFailedNode?.removeFromSupernode()
})
strongSelf.deliveryFailedNode = deliveryFailedNode
strongSelf.addSubnode(deliveryFailedNode)
}
let deliveryFailedSize = deliveryFailedNode.updateLayout(theme: item.presentationData.theme.theme)
let deliveryFailedFrame = CGRect(origin: CGPoint(x: videoFrame.maxX + deliveryFailedInset - deliveryFailedSize.width, y: videoFrame.maxY - deliveryFailedSize.height), size: deliveryFailedSize)
if isAppearing {
deliveryFailedNode.frame = deliveryFailedFrame
transition.animatePositionAdditive(node: deliveryFailedNode, offset: CGPoint(x: deliveryFailedInset, y: 0.0))
} else {
transition.updateFrame(node: deliveryFailedNode, frame: deliveryFailedFrame)
}
} else if let deliveryFailedNode = strongSelf.deliveryFailedNode {
strongSelf.deliveryFailedNode = nil
transition.updateAlpha(node: deliveryFailedNode, alpha: 0.0)
transition.updateFrame(node: deliveryFailedNode, frame: deliveryFailedNode.frame.offsetBy(dx: 24.0, dy: 0.0), completion: { [weak deliveryFailedNode] _ in
deliveryFailedNode?.removeFromSupernode()
})
}
if let updatedForwardBackgroundNode = updatedForwardBackgroundNode {
if strongSelf.forwardBackgroundNode == nil {
strongSelf.forwardBackgroundNode = updatedForwardBackgroundNode
strongSelf.addSubnode(updatedForwardBackgroundNode)
updatedForwardBackgroundNode.image = forwardBackgroundImage
}
} else if let forwardBackgroundNode = strongSelf.forwardBackgroundNode {
forwardBackgroundNode.removeFromSupernode()
strongSelf.forwardBackgroundNode = nil
}
if let (forwardInfoSize, forwardInfoApply) = forwardInfoSizeApply {
let forwardInfoNode = forwardInfoApply(forwardInfoSize.width)
if strongSelf.forwardInfoNode == nil {
strongSelf.forwardInfoNode = forwardInfoNode
strongSelf.addSubnode(forwardInfoNode)
forwardInfoNode.openPsa = { [weak strongSelf] type, sourceNode in
guard let strongSelf = strongSelf, let item = strongSelf.item else {
return
}
item.controllerInteraction.displayPsa(type, sourceNode)
if let updatedForwardBackgroundNode = updatedForwardBackgroundNode {
if strongSelf.forwardBackgroundNode == nil {
strongSelf.forwardBackgroundNode = updatedForwardBackgroundNode
strongSelf.addSubnode(updatedForwardBackgroundNode)
updatedForwardBackgroundNode.image = forwardBackgroundImage
}
} else if let forwardBackgroundNode = strongSelf.forwardBackgroundNode {
forwardBackgroundNode.removeFromSupernode()
strongSelf.forwardBackgroundNode = nil
}
let forwardInfoFrame = CGRect(origin: CGPoint(x: (!incoming ? (params.leftInset + layoutConstants.bubble.edgeInset + 12.0) : (params.width - params.rightInset - forwardInfoSize.width - layoutConstants.bubble.edgeInset - 12.0)), y: 8.0), size: forwardInfoSize)
forwardInfoNode.frame = forwardInfoFrame
strongSelf.forwardBackgroundNode?.frame = CGRect(origin: CGPoint(x: forwardInfoFrame.minX - 6.0, y: forwardInfoFrame.minY - 2.0), size: CGSize(width: forwardInfoFrame.size.width + 10.0, height: forwardInfoFrame.size.height + 4.0))
} else if let forwardInfoNode = strongSelf.forwardInfoNode {
forwardInfoNode.removeFromSupernode()
strongSelf.forwardInfoNode = nil
}
if let actionButtonsSizeAndApply = actionButtonsSizeAndApply {
var animated = false
if let _ = strongSelf.actionButtonsNode {
if case .System = animation {
animated = true
}
}
let actionButtonsNode = actionButtonsSizeAndApply.1(animated)
let previousFrame = actionButtonsNode.frame
let actionButtonsFrame = CGRect(origin: CGPoint(x: videoFrame.minX, y: videoFrame.maxY), size: actionButtonsSizeAndApply.0)
actionButtonsNode.frame = actionButtonsFrame
if actionButtonsNode !== strongSelf.actionButtonsNode {
strongSelf.actionButtonsNode = actionButtonsNode
actionButtonsNode.buttonPressed = { button in
if let strongSelf = self {
strongSelf.performMessageButtonAction(button: button)
if let (forwardInfoSize, forwardInfoApply) = forwardInfoSizeApply {
let forwardInfoNode = forwardInfoApply(forwardInfoSize.width)
if strongSelf.forwardInfoNode == nil {
strongSelf.forwardInfoNode = forwardInfoNode
strongSelf.addSubnode(forwardInfoNode)
forwardInfoNode.openPsa = { [weak strongSelf] type, sourceNode in
guard let strongSelf = strongSelf, let item = strongSelf.item else {
return
}
item.controllerInteraction.displayPsa(type, sourceNode)
}
}
actionButtonsNode.buttonLongTapped = { button in
if let strongSelf = self {
strongSelf.presentMessageButtonContextMenu(button: button)
let forwardInfoFrame = CGRect(origin: CGPoint(x: (!incoming ? (params.leftInset + layoutConstants.bubble.edgeInset + 12.0) : (params.width - params.rightInset - forwardInfoSize.width - layoutConstants.bubble.edgeInset - 12.0)), y: 8.0), size: forwardInfoSize)
forwardInfoNode.frame = forwardInfoFrame
strongSelf.forwardBackgroundNode?.frame = CGRect(origin: CGPoint(x: forwardInfoFrame.minX - 6.0, y: forwardInfoFrame.minY - 2.0), size: CGSize(width: forwardInfoFrame.size.width + 10.0, height: forwardInfoFrame.size.height + 4.0))
} else if let forwardInfoNode = strongSelf.forwardInfoNode {
forwardInfoNode.removeFromSupernode()
strongSelf.forwardInfoNode = nil
}
if let actionButtonsSizeAndApply = actionButtonsSizeAndApply {
var animated = false
if let _ = strongSelf.actionButtonsNode {
if case .System = animation {
animated = true
}
}
strongSelf.addSubnode(actionButtonsNode)
} else {
if case let .System(duration) = animation {
actionButtonsNode.layer.animateFrame(from: previousFrame, to: actionButtonsFrame, duration: duration, timingFunction: kCAMediaTimingFunctionSpring)
let actionButtonsNode = actionButtonsSizeAndApply.1(animated)
let previousFrame = actionButtonsNode.frame
let actionButtonsFrame = CGRect(origin: CGPoint(x: videoFrame.minX, y: videoFrame.maxY), size: actionButtonsSizeAndApply.0)
actionButtonsNode.frame = actionButtonsFrame
if actionButtonsNode !== strongSelf.actionButtonsNode {
strongSelf.actionButtonsNode = actionButtonsNode
actionButtonsNode.buttonPressed = { button in
if let strongSelf = self {
strongSelf.performMessageButtonAction(button: button)
}
}
actionButtonsNode.buttonLongTapped = { button in
if let strongSelf = self {
strongSelf.presentMessageButtonContextMenu(button: button)
}
}
strongSelf.addSubnode(actionButtonsNode)
} else {
if case let .System(duration) = animation {
actionButtonsNode.layer.animateFrame(from: previousFrame, to: actionButtonsFrame, duration: duration, timingFunction: kCAMediaTimingFunctionSpring)
}
}
} else if let actionButtonsNode = strongSelf.actionButtonsNode {
actionButtonsNode.removeFromSupernode()
strongSelf.actionButtonsNode = nil
}
} else if let actionButtonsNode = strongSelf.actionButtonsNode {
actionButtonsNode.removeFromSupernode()
strongSelf.actionButtonsNode = nil
}
}
})
@ -862,30 +889,6 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
}
}
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
if let panGestureRecognizer = gestureRecognizer as? UIPanGestureRecognizer, panGestureRecognizer === self.seekRecognizer {
let velocity = panGestureRecognizer.velocity(in: self.interactiveVideoNode.view)
return abs(velocity.x) > abs(velocity.y)
}
return true
}
private var wasPlaying = false
@objc func seekGesture(_ recognizer: UIPanGestureRecognizer) {
let location = recognizer.location(in: self.interactiveVideoNode.view)
switch recognizer.state {
case .began:
self.interactiveVideoNode.pause()
case .changed:
self.interactiveVideoNode.seekTo(Double(location.x / self.interactiveVideoNode.bounds.size.width))
case .ended, .cancelled:
self.interactiveVideoNode.seekTo(Double(location.x / self.interactiveVideoNode.bounds.size.width))
self.interactiveVideoNode.play()
default:
break
}
}
@objc func swipeToReplyGesture(_ recognizer: ChatSwipeToReplyRecognizer) {
switch recognizer.state {
case .began:
@ -914,7 +917,7 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
bounds.origin.x = -translation.x
self.bounds = bounds
self.updateAttachedAvatarNodeOffset(offset: translation.x, transition: .immediate)
self.updateAttachedAvatarNodeOffset(offset: self.avatarOffset ?? translation.x, transition: .immediate)
if let swipeToReplyNode = self.swipeToReplyNode {
swipeToReplyNode.frame = CGRect(origin: CGPoint(x: bounds.size.width, y: floor((self.contentSize.height - 33.0) / 2.0)), size: CGSize(width: 33.0, height: 33.0))
@ -951,7 +954,7 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
self.bounds = bounds
self.layer.animateBounds(from: previousBounds, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
self.updateAttachedAvatarNodeOffset(offset: 0.0, transition: .animated(duration: 0.3, curve: .spring))
self.updateAttachedAvatarNodeOffset(offset: self.avatarOffset ?? 0.0, transition: .animated(duration: 0.3, curve: .spring))
if let swipeToReplyNode = self.swipeToReplyNode {
self.swipeToReplyNode = nil
@ -991,7 +994,7 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
selected = selectionState.selectedIds.contains(item.message.id)
incoming = item.message.effectivelyIncoming(item.context.account.peerId)
let offset: CGFloat = incoming ? 42.0 : 0.0
let offset: CGFloat = incoming || self.appliedCurrentlyPlaying ? 42.0 : 0.0
if let selectionNode = self.selectionNode {
let selectionFrame = CGRect(origin: CGPoint(x: -offset, y: 0.0), size: CGSize(width: self.contentBounds.size.width, height: self.contentBounds.size.height))
@ -1086,7 +1089,7 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
override func animateFrameTransition(_ progress: CGFloat, _ currentValue: CGFloat) {
super.animateFrameTransition(progress, currentValue)
guard let item = self.appliedItem, let params = self.appliedParams, progress > 0.0, let (initialHeight, targetHeight) = self.apparentHeightTransition else {
guard let item = self.appliedItem, let params = self.appliedParams, progress > 0.0, let (initialHeight, targetHeight) = self.apparentHeightTransition, !targetHeight.isZero && !initialHeight.isZero else {
return
}
@ -1100,7 +1103,7 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
var isPlaying = false
var displaySize = layoutConstants.instantVideo.dimensions
let maximumDisplaySize = CGSize(width: params.width - 20.0, height: params.width - 20.0)
let maximumDisplaySize = CGSize(width: min(404, params.width - 20.0), height: min(404, params.width - 20.0))
if item.associatedData.currentlyPlayingMessageId == item.message.index {
isPlaying = true
}
@ -1154,5 +1157,54 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
videoLayoutData = .constrained(left: max(0.0, availableContentWidth - videoFrame.width), right: 0.0)
}
videoApply(videoLayoutData, .immediate)
if let shareButtonNode = self.shareButtonNode {
let buttonSize = shareButtonNode.frame.size
shareButtonNode.frame = CGRect(origin: CGPoint(x: min(params.width - buttonSize.width - 8.0, videoFrame.maxX - 7.0), y: videoFrame.maxY - 24.0 - buttonSize.height), size: buttonSize)
}
if let viaBotNode = self.viaBotNode {
let viaBotLayout = viaBotNode.frame
let viaBotFrame = CGRect(origin: CGPoint(x: (!incoming ? (params.leftInset + layoutConstants.bubble.edgeInset + 10.0) : (params.width - params.rightInset - viaBotLayout.size.width - layoutConstants.bubble.edgeInset - 10.0)), y: 8.0), size: viaBotLayout.size)
viaBotNode.frame = viaBotFrame
self.replyBackgroundNode?.frame = CGRect(origin: CGPoint(x: viaBotFrame.minX - 4.0, y: viaBotFrame.minY - 2.0), size: CGSize(width: viaBotFrame.size.width + 8.0, height: viaBotFrame.size.height + 5.0))
}
if let replyInfoNode = self.replyInfoNode {
var viaBotSize = CGSize()
if let viaBotNode = self.viaBotNode {
viaBotSize = viaBotNode.frame.size
}
let replyInfoSize = replyInfoNode.frame.size
let replyInfoFrame = CGRect(origin: CGPoint(x: (!incoming ? (params.leftInset + layoutConstants.bubble.edgeInset + 10.0) : (params.width - params.rightInset - max(replyInfoSize.width, viaBotSize.width) - layoutConstants.bubble.edgeInset - 10.0)), y: 8.0 + viaBotSize.height), size: replyInfoSize)
if let viaBotNode = self.viaBotNode {
if replyInfoFrame.minX < viaBotNode.frame.minX {
viaBotNode.frame = viaBotNode.frame.offsetBy(dx: replyInfoFrame.minX - viaBotNode.frame.minX, dy: 0.0)
}
}
replyInfoNode.frame = replyInfoFrame
self.replyBackgroundNode?.frame = CGRect(origin: CGPoint(x: replyInfoFrame.minX - 4.0, y: replyInfoFrame.minY - viaBotSize.height - 2.0), size: CGSize(width: max(replyInfoFrame.size.width, viaBotSize.width) + 8.0, height: replyInfoFrame.size.height + viaBotSize.height + 5.0))
}
if let deliveryFailedNode = self.deliveryFailedNode {
let deliveryFailedSize = deliveryFailedNode.frame.size
let deliveryFailedFrame = CGRect(origin: CGPoint(x: videoFrame.maxX + deliveryFailedInset - deliveryFailedSize.width, y: videoFrame.maxY - deliveryFailedSize.height), size: deliveryFailedSize)
deliveryFailedNode.frame = deliveryFailedFrame
}
if let forwardInfoNode = self.forwardInfoNode {
let forwardInfoSize = forwardInfoNode.frame.size
let forwardInfoFrame = CGRect(origin: CGPoint(x: (!incoming ? (params.leftInset + layoutConstants.bubble.edgeInset + 12.0) : (params.width - params.rightInset - forwardInfoSize.width - layoutConstants.bubble.edgeInset - 12.0)), y: 8.0), size: forwardInfoSize)
forwardInfoNode.frame = forwardInfoFrame
self.forwardBackgroundNode?.frame = CGRect(origin: CGPoint(x: forwardInfoFrame.minX - 6.0, y: forwardInfoFrame.minY - 2.0), size: CGSize(width: forwardInfoFrame.size.width + 10.0, height: forwardInfoFrame.size.height + 4.0))
}
if let actionButtonsNode = self.actionButtonsNode {
let actionButtonsSize = actionButtonsNode.frame.size
let actionButtonsFrame = CGRect(origin: CGPoint(x: videoFrame.minX, y: videoFrame.maxY), size: actionButtonsSize)
actionButtonsNode.frame = actionButtonsFrame
}
}
}

View File

@ -37,6 +37,7 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
private let secretVideoPlaceholder: TransformImageNode
private var statusNode: RadialStatusNode?
private var disappearingStatusNode: RadialStatusNode?
private var playbackStatusNode: InstantVideoRadialStatusNode?
private(set) var videoFrame: CGRect?
@ -82,7 +83,7 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
}
}
private var animating = false
var shouldOpen: () -> Bool = { return true }
override init() {
self.secretVideoPlaceholderBackground = ASImageNode()
@ -494,8 +495,10 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
}
strongSelf.secretVideoPlaceholderBackground.frame = displayVideoFrame
let placeholderFrame = displayVideoFrame.insetBy(dx: 2.0, dy: 2.0)
strongSelf.secretVideoPlaceholder.frame = placeholderFrame
let placeholderFrame = videoFrame.insetBy(dx: 2.0, dy: 2.0)
strongSelf.secretVideoPlaceholder.bounds = CGRect(origin: CGPoint(), size: videoFrame.size)
strongSelf.secretVideoPlaceholder.transform = CATransform3DMakeScale(imageScale, imageScale, 1.0)
strongSelf.secretVideoPlaceholder.position = displayVideoFrame.center
let makeSecretPlaceholderLayout = strongSelf.secretVideoPlaceholder.asyncLayout()
let arguments = TransformImageArguments(corners: ImageCorners(radius: placeholderFrame.size.width / 2.0), imageSize: placeholderFrame.size, boundingSize: placeholderFrame.size, intrinsicInsets: UIEdgeInsets())
let applySecretPlaceholder = makeSecretPlaceholderLayout(arguments)
@ -607,19 +610,26 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
if self.statusNode == nil {
let statusNode = RadialStatusNode(backgroundNodeColor: item.presentationData.theme.theme.chat.message.mediaOverlayControlColors.fillColor)
self.isUserInteractionEnabled = false
statusNode.frame = CGRect(origin: CGPoint(x: videoFrame.origin.x + floorToScreenPixels((videoFrame.size.width - 50.0) / 2.0), y: videoFrame.origin.y + floorToScreenPixels((videoFrame.size.height - 50.0) / 2.0)), size: CGSize(width: 50.0, height: 50.0))
self.statusNode = statusNode
self.addSubnode(statusNode)
}
} else {
if let statusNode = self.statusNode {
statusNode.transitionToState(.none, completion: { [weak statusNode] in
self.disappearingStatusNode = statusNode
statusNode.transitionToState(.none, completion: { [weak statusNode, weak self] in
statusNode?.removeFromSupernode()
if self?.disappearingStatusNode === statusNode {
self?.disappearingStatusNode = nil
}
})
self.statusNode = nil
}
}
let statusFrame = CGRect(origin: CGPoint(x: videoFrame.origin.x + floorToScreenPixels((videoFrame.size.width - 50.0) / 2.0), y: videoFrame.origin.y + floorToScreenPixels((videoFrame.size.height - 50.0) / 2.0)), size: CGSize(width: 50.0, height: 50.0))
self.statusNode?.frame = statusFrame
self.disappearingStatusNode?.frame = statusFrame
var state: RadialStatusNodeState
switch status.mediaStatus {
case var .fetchStatus(fetchStatus):
@ -679,7 +689,16 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
if let current = self.playbackStatusNode {
playbackStatusNode = current
} else {
playbackStatusNode = InstantVideoRadialStatusNode(color: UIColor(white: 1.0, alpha: 0.6))
playbackStatusNode = InstantVideoRadialStatusNode(color: UIColor(white: 1.0, alpha: 0.6), hasSeek: true)
playbackStatusNode.seekTo = { [weak self] position, play in
guard let strongSelf = self else {
return
}
strongSelf.seekTo(position)
if play {
strongSelf.play()
}
}
self.addSubnode(playbackStatusNode)
self.playbackStatusNode = playbackStatusNode
}
@ -696,9 +715,7 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
} else {
if let playbackStatusNode = self.playbackStatusNode {
self.playbackStatusNode = nil
playbackStatusNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak playbackStatusNode] _ in
playbackStatusNode?.removeFromSupernode()
})
playbackStatusNode.removeFromSupernode()
}
self.durationNode?.status = .single(nil)
@ -737,7 +754,7 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
}
private func activateVideoPlayback() {
guard let item = self.item else {
guard let item = self.item, self.shouldOpen() else {
return
}
if self.infoBackgroundNode.alpha.isZero {
@ -757,6 +774,14 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
if !self.bounds.contains(point) {
return nil
}
if let playbackNode = self.playbackStatusNode, !self.isPlaying, !playbackNode.frame.insetBy(dx: 0.15 * playbackNode.frame.width, dy: 0.15 * playbackNode.frame.height).contains(point) {
let distanceFromCenter = point.distanceTo(playbackNode.position)
if distanceFromCenter < 0.15 * playbackNode.frame.width {
return self.view
} else {
return playbackNode.view
}
}
if let statusNode = self.statusNode, statusNode.supernode != nil, !statusNode.isHidden, statusNode.frame.contains(point) {
return self.view
}
@ -831,6 +856,14 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
}
}
var isPlaying: Bool {
if let status = self.status, case let .playbackStatus(playbackStatus) = status.mediaStatus, case .playing = playbackStatus {
return true
} else {
return false
}
}
func seekTo(_ position: Double) {
if let duration = self.playbackStatusNode?.duration {
self.videoNode?.seek(position * duration)

View File

@ -14,12 +14,13 @@ final class ChatSendMessageActionSheetController: ViewController {
}
private let context: AccountContext
private let controllerInteraction: ChatControllerInteraction?
private let interfaceState: ChatPresentationInterfaceState
private let gesture: ContextGesture
private let sourceSendButton: ASDisplayNode
private let textInputNode: EditableTextNode
private let completion: () -> Void
private let sendMessage: (Bool) -> Void
private let schedule: () -> Void
private var presentationDataDisposable: Disposable?
@ -29,14 +30,15 @@ final class ChatSendMessageActionSheetController: ViewController {
private let hapticFeedback = HapticFeedback()
init(context: AccountContext, controllerInteraction: ChatControllerInteraction?, interfaceState: ChatPresentationInterfaceState, gesture: ContextGesture, sourceSendButton: ASDisplayNode, textInputNode: EditableTextNode, completion: @escaping () -> Void) {
init(context: AccountContext, interfaceState: ChatPresentationInterfaceState, gesture: ContextGesture, sourceSendButton: ASDisplayNode, textInputNode: EditableTextNode, completion: @escaping () -> Void, sendMessage: @escaping (Bool) -> Void, schedule: @escaping () -> Void) {
self.context = context
self.controllerInteraction = controllerInteraction
self.interfaceState = interfaceState
self.gesture = gesture
self.sourceSendButton = sourceSendButton
self.textInputNode = textInputNode
self.completion = completion
self.sendMessage = sendMessage
self.schedule = schedule
super.init(navigationBarPresentationData: nil)
@ -77,13 +79,13 @@ final class ChatSendMessageActionSheetController: ViewController {
}
self.displayNode = ChatSendMessageActionSheetControllerNode(context: self.context, reminders: reminders, gesture: gesture, sourceSendButton: self.sourceSendButton, textInputNode: self.textInputNode, forwardedCount: forwardedCount, send: { [weak self] in
self?.controllerInteraction?.sendCurrentMessage(false)
self?.sendMessage(false)
self?.dismiss(cancel: false)
}, sendSilently: { [weak self] in
self?.controllerInteraction?.sendCurrentMessage(true)
self?.sendMessage(true)
self?.dismiss(cancel: false)
}, schedule: !canSchedule ? nil : { [weak self] in
self?.controllerInteraction?.scheduleCurrentMessage()
self?.schedule()
self?.dismiss(cancel: false)
}, cancel: { [weak self] in
self?.dismiss(cancel: true)

View File

@ -9,15 +9,36 @@ import LegacyComponents
private final class InstantVideoRadialStatusNodeParameters: NSObject {
let color: UIColor
let progress: CGFloat
let dimProgress: CGFloat
let playProgress: CGFloat
init(color: UIColor, progress: CGFloat) {
init(color: UIColor, progress: CGFloat, dimProgress: CGFloat, playProgress: CGFloat) {
self.color = color
self.progress = progress
self.dimProgress = dimProgress
self.playProgress = playProgress
}
}
final class InstantVideoRadialStatusNode: ASDisplayNode {
private extension CGFloat {
var degrees: CGFloat {
return self * CGFloat(180) / .pi
}
}
private extension CGPoint {
func angle(to otherPoint: CGPoint) -> CGFloat {
let originX = otherPoint.x - x
let originY = otherPoint.y - y
let bearingRadians = atan2f(Float(originY), Float(originX))
return CGFloat(bearingRadians)
}
}
final class InstantVideoRadialStatusNode: ASDisplayNode, UIGestureRecognizerDelegate {
private let color: UIColor
private let hasSeek: Bool
private let hapticFeedback = HapticFeedback()
private var effectiveProgress: CGFloat = 0.0 {
didSet {
@ -25,6 +46,22 @@ final class InstantVideoRadialStatusNode: ASDisplayNode {
}
}
private var seeking = false
private var seekingProgress: CGFloat?
private var dimmed = false
private var effectiveDimProgress: CGFloat = 0.0 {
didSet {
self.setNeedsDisplay()
}
}
private var effectivePlayProgress: CGFloat = 0.0 {
didSet {
self.setNeedsDisplay()
}
}
private var _statusValue: MediaPlayerStatus?
private var statusValue: MediaPlayerStatus? {
get {
@ -58,27 +95,122 @@ final class InstantVideoRadialStatusNode: ASDisplayNode {
}
}
init(color: UIColor) {
var tapGestureRecognizer: UITapGestureRecognizer?
var panGestureRecognizer: UIPanGestureRecognizer?
var seekTo: ((Double, Bool) -> Void)?
init(color: UIColor, hasSeek: Bool) {
self.color = color
self.hasSeek = hasSeek
super.init()
self.isOpaque = false
self.statusDisposable = (self.statusValuePromise.get()
|> deliverOnMainQueue).start(next: { [weak self] status in
if let strongSelf = self {
strongSelf.statusValue = status
}
})
|> deliverOnMainQueue).start(next: { [weak self] status in
if let strongSelf = self {
strongSelf.statusValue = status
}
})
}
deinit {
self.statusDisposable?.dispose()
}
override func didLoad() {
super.didLoad()
guard self.hasSeek else {
return
}
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:)))
tapGestureRecognizer.delegate = self
self.view.addGestureRecognizer(tapGestureRecognizer)
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:)))
panGestureRecognizer.delegate = self
self.view.addGestureRecognizer(panGestureRecognizer)
}
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
if gestureRecognizer === self.tapGestureRecognizer || gestureRecognizer === self.panGestureRecognizer {
let center = CGPoint(x: self.bounds.width / 2.0, y: self.bounds.height / 2.0)
let location = gestureRecognizer.location(in: self.view)
let distanceFromCenter = location.distanceTo(center)
if distanceFromCenter < self.bounds.width * 0.15 {
return false
}
return true
} else {
return true
}
}
@objc private func tapGesture(_ gestureRecognizer: UITapGestureRecognizer) {
let center = CGPoint(x: self.bounds.width / 2.0, y: self.bounds.height / 2.0)
let location = gestureRecognizer.location(in: self.view)
var angle = center.angle(to: location) + CGFloat.pi / 2.0
if angle < 0.0 {
angle = CGFloat.pi * 2.0 + angle
}
let fraction = max(0.0, min(1.0, Double(angle / (2.0 * CGFloat.pi))))
self.seekTo?(min(0.99, fraction), true)
}
@objc private func panGesture(_ gestureRecognizer: UIPanGestureRecognizer) {
let center = CGPoint(x: self.bounds.width / 2.0, y: self.bounds.height / 2.0)
let location = gestureRecognizer.location(in: self.view)
var angle = center.angle(to: location) + CGFloat.pi / 2.0
if angle < 0.0 {
angle = CGFloat.pi * 2.0 + angle
}
let fraction = max(0.0, min(1.0, Double(angle / (2.0 * CGFloat.pi))))
switch gestureRecognizer.state {
case .began:
self.seeking = true
let playAnimation = POPSpringAnimation()
playAnimation.property = POPAnimatableProperty.property(withName: "playProgress", initializer: { property in
property?.readBlock = { node, values in
values?.pointee = (node as! InstantVideoRadialStatusNode).effectivePlayProgress
}
property?.writeBlock = { node, values in
(node as! InstantVideoRadialStatusNode).effectivePlayProgress = values!.pointee
}
property?.threshold = 0.01
}) as? POPAnimatableProperty
playAnimation.fromValue = self.effectivePlayProgress as NSNumber
playAnimation.toValue = 0.0 as NSNumber
playAnimation.springSpeed = 20
playAnimation.springBounciness = 8
self.pop_add(playAnimation, forKey: "playProgress")
case .changed:
if let seekingProgress = self.seekingProgress {
if seekingProgress > 0.98 && fraction > 0.0 && fraction < 0.05 {
self.hapticFeedback.impact(.light)
} else if seekingProgress > 0.0 && seekingProgress < 0.05 && fraction > 0.98 {
self.hapticFeedback.impact(.light)
}
}
self.seekTo?(min(0.99, fraction), false)
self.seekingProgress = CGFloat(fraction)
case .ended, .cancelled:
self.seeking = false
self.seekTo?(min(0.99, fraction), true)
self.seekingProgress = nil
default:
break
}
}
override func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
return InstantVideoRadialStatusNodeParameters(color: self.color, progress: self.effectiveProgress)
return InstantVideoRadialStatusNodeParameters(color: self.color, progress: self.effectiveProgress, dimProgress: self.effectiveDimProgress, playProgress: self.effectivePlayProgress)
}
@objc override class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
@ -93,20 +225,59 @@ final class InstantVideoRadialStatusNode: ASDisplayNode {
if let parameters = parameters as? InstantVideoRadialStatusNodeParameters {
context.setStrokeColor(parameters.color.cgColor)
if !parameters.dimProgress.isZero {
context.setFillColor(UIColor(rgb: 0x000000, alpha: 0.35 * min(1.0, parameters.dimProgress)).cgColor)
context.fillEllipse(in: bounds)
}
context.setBlendMode(.normal)
var progress = parameters.progress
let startAngle = -CGFloat.pi / 2.0
let endAngle = CGFloat(progress) * 2.0 * CGFloat.pi + startAngle
progress = min(1.0, progress)
let lineWidth: CGFloat = 4.0
var lineWidth: CGFloat = 4.0
lineWidth += 1.0 * parameters.dimProgress
let pathDiameter = bounds.size.width - lineWidth - 8.0
var pathDiameter = bounds.size.width - lineWidth - 8.0
pathDiameter -= (18.0 * 2.0) * parameters.dimProgress
if !parameters.dimProgress.isZero {
context.setStrokeColor(parameters.color.withAlphaComponent(0.2 * parameters.dimProgress).cgColor)
context.setLineWidth(lineWidth)
context.strokeEllipse(in: CGRect(x: (bounds.size.width - pathDiameter) / 2.0 , y: (bounds.size.height - pathDiameter) / 2.0, width: pathDiameter, height: pathDiameter))
if !parameters.playProgress.isZero {
context.saveGState()
context.translateBy(x: bounds.width / 2.0, y: bounds.height / 2.0)
context.scaleBy(x: 1.0 + 1.4 * parameters.playProgress, y: 1.0 + 1.4 * parameters.playProgress)
context.translateBy(x: -bounds.width / 2.0, y: -bounds.height / 2.0)
let iconSize = CGSize(width: 15.0, height: 18.0)
context.translateBy(x: (bounds.width - iconSize.width) / 2.0 + 2.0, y: (bounds.height - iconSize.height) / 2.0)
context.setFillColor(UIColor(rgb: 0xffffff).withAlphaComponent(min(1.0, parameters.playProgress)).cgColor)
let _ = try? drawSvgPath(context, path: "M1.71891969,0.209353049 C0.769586558,-0.350676705 0,0.0908839327 0,1.18800046 L0,16.8564753 C0,17.9569971 0.750549162,18.357187 1.67393713,17.7519379 L14.1073836,9.60224049 C15.0318735,8.99626906 15.0094718,8.04970371 14.062401,7.49100858 L1.71891969,0.209353049 ")
context.fillPath()
context.restoreGState()
}
}
context.setStrokeColor(parameters.color.cgColor)
let path = UIBezierPath(arcCenter: CGPoint(x: bounds.size.width / 2.0, y: bounds.size.height / 2.0), radius: pathDiameter / 2.0, startAngle: startAngle, endAngle: endAngle, clockwise:true)
path.lineWidth = lineWidth
path.lineCapStyle = .round
path.stroke()
let handleSide = 16.0 * min(1.0, (parameters.dimProgress * 2.0))
let handleSize = CGSize(width: handleSide, height: handleSide)
let handlePosition = CGPoint(x: 0.5 * pathDiameter * cos(endAngle), y: 0.5 * pathDiameter * sin(endAngle)).offsetBy(dx: bounds.size.width / 2.0, dy: bounds.size.height / 2.0)
let handleFrame = CGRect(origin: CGPoint(x: floorToScreenPixels(handlePosition.x - handleSize.width / 2.0), y: floorToScreenPixels(handlePosition.y - handleSize.height / 2.0)), size: handleSize)
context.setFillColor(UIColor.white.cgColor)
context.fillEllipse(in: handleFrame)
}
}
@ -118,7 +289,58 @@ final class InstantVideoRadialStatusNode: ASDisplayNode {
timestampAndDuration = nil
}
if let (timestamp, duration, baseRate) = timestampAndDuration, let statusValue = self.statusValue {
var dimmed = false
if let statusValue = self.statusValue {
dimmed = statusValue.status == .paused
}
if self.seeking {
dimmed = true
}
if !self.hasSeek {
dimmed = false
}
if dimmed != self.dimmed {
self.dimmed = dimmed
let animation = POPSpringAnimation()
animation.property = POPAnimatableProperty.property(withName: "dimProgress", initializer: { property in
property?.readBlock = { node, values in
values?.pointee = (node as! InstantVideoRadialStatusNode).effectiveDimProgress
}
property?.writeBlock = { node, values in
(node as! InstantVideoRadialStatusNode).effectiveDimProgress = values!.pointee
}
property?.threshold = 0.01
}) as? POPAnimatableProperty
animation.fromValue = self.effectiveDimProgress as NSNumber
animation.toValue = (dimmed ? 1.0 : 0.0) as NSNumber
animation.springSpeed = 20
animation.springBounciness = 8
self.pop_add(animation, forKey: "dimProgress")
let playAnimation = POPSpringAnimation()
playAnimation.property = POPAnimatableProperty.property(withName: "playProgress", initializer: { property in
property?.readBlock = { node, values in
values?.pointee = (node as! InstantVideoRadialStatusNode).effectivePlayProgress
}
property?.writeBlock = { node, values in
(node as! InstantVideoRadialStatusNode).effectivePlayProgress = values!.pointee
}
property?.threshold = 0.01
}) as? POPAnimatableProperty
playAnimation.fromValue = self.effectivePlayProgress as NSNumber
playAnimation.toValue = (dimmed ? 1.0 : 0.0) as NSNumber
playAnimation.springSpeed = 20
playAnimation.springBounciness = 8
self.pop_add(playAnimation, forKey: "playProgress")
}
if self.seeking, let progress = self.seekingProgress {
self.pop_removeAnimation(forKey: "progress")
self.effectiveProgress = progress
} else if let (timestamp, duration, baseRate) = timestampAndDuration, let statusValue = self.statusValue {
let progress = CGFloat(timestamp / duration)
if progress.isNaN || !progress.isFinite {
@ -148,6 +370,9 @@ final class InstantVideoRadialStatusNode: ASDisplayNode {
self.pop_add(animation, forKey: "progress")
}
} else {
self.pop_removeAnimation(forKey: "dimProgress")
self.effectiveDimProgress = 0.0
self.pop_removeAnimation(forKey: "progress")
self.effectiveProgress = 0.0
}

View File

@ -37,7 +37,7 @@ final class OverlayInstantVideoDecoration: UniversalVideoDecoration {
self.contentContainerNode.clipsToBounds = true
self.foregroundContainerNode = ASDisplayNode()
self.progressNode = InstantVideoRadialStatusNode(color: UIColor(white: 1.0, alpha: 0.6))
self.progressNode = InstantVideoRadialStatusNode(color: UIColor(white: 1.0, alpha: 0.6), hasSeek: false)
self.foregroundContainerNode.addSubnode(self.progressNode)
self.foregroundNode = self.foregroundContainerNode
}

View File

@ -5649,32 +5649,32 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
func forwardMessages(messageIds: Set<MessageId>?) {
if let messageIds = messageIds ?? self.state.selectedMessageIds, !messageIds.isEmpty {
let peerSelectionController = self.context.sharedContext.makePeerSelectionController(PeerSelectionControllerParams(context: self.context, filter: [.onlyWriteable, .excludeDisabled], multipleSelection: true))
peerSelectionController.multiplePeersSelected = { [weak self, weak peerSelectionController] peers, messageText in
peerSelectionController.multiplePeersSelected = { [weak self, weak peerSelectionController] peers, messageText, mode in
guard let strongSelf = self, let strongController = peerSelectionController else {
return
}
strongController.dismiss()
for peer in peers {
var result: [EnqueueMessage] = []
if messageText.string.count > 0 {
let inputText = convertMarkdownToAttributes(messageText)
for text in breakChatInputText(trimChatInputText(inputText)) {
if text.length != 0 {
var attributes: [MessageAttribute] = []
let entities = generateTextEntities(text.string, enabledTypes: .all, currentEntities: generateChatInputTextEntities(text))
if !entities.isEmpty {
attributes.append(TextEntitiesMessageAttribute(entities: entities))
}
result.append(.message(text: text.string, attributes: attributes, mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
var result: [EnqueueMessage] = []
if messageText.string.count > 0 {
let inputText = convertMarkdownToAttributes(messageText)
for text in breakChatInputText(trimChatInputText(inputText)) {
if text.length != 0 {
var attributes: [MessageAttribute] = []
let entities = generateTextEntities(text.string, enabledTypes: .all, currentEntities: generateChatInputTextEntities(text))
if !entities.isEmpty {
attributes.append(TextEntitiesMessageAttribute(entities: entities))
}
result.append(.message(text: text.string, attributes: attributes, mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil))
}
}
}
result.append(contentsOf: messageIds.map { messageId -> EnqueueMessage in
return .forward(source: messageId, grouping: .auto, attributes: [], correlationId: nil)
})
result.append(contentsOf: messageIds.map { messageId -> EnqueueMessage in
return .forward(source: messageId, grouping: .auto, attributes: [], correlationId: nil)
})
for peer in peers {
let _ = (enqueueMessages(account: strongSelf.context.account, peerId: peer.id, messages: result)
|> deliverOnMainQueue).start(next: { messageIds in
if let strongSelf = self {

View File

@ -19,7 +19,7 @@ public final class PeerSelectionControllerImpl: ViewController, PeerSelectionCon
private var customTitle: String?
public var peerSelected: ((Peer) -> Void)?
public var multiplePeersSelected: (([Peer], NSAttributedString) -> Void)?
public var multiplePeersSelected: (([Peer], NSAttributedString, PeerSelectionControllerSendMode) -> Void)?
private let filter: ChatListNodePeersFilter
private let attemptSelection: ((Peer) -> Void)?
@ -150,14 +150,16 @@ public final class PeerSelectionControllerImpl: ViewController, PeerSelectionCon
override public func loadDisplayNode() {
self.displayNode = PeerSelectionControllerNode(context: self.context, filter: self.filter, hasChatListSelector: self.hasChatListSelector, hasContactSelector: self.hasContactSelector, hasGlobalSearch: self.hasGlobalSearch, createNewGroup: self.createNewGroup, present: { [weak self] c, a in
self?.present(c, in: .window(.root), with: a)
}, presentInGlobalOverlay: { [weak self] c, a in
self?.presentInGlobalOverlay(c, with: a)
}, dismiss: { [weak self] in
self?.presentingViewController?.dismiss(animated: false, completion: nil)
})
self.peerSelectionNode.navigationBar = self.navigationBar
self.peerSelectionNode.requestSend = { [weak self] peers, text in
self?.multiplePeersSelected?(peers, text)
self.peerSelectionNode.requestSend = { [weak self] peers, text, mode in
self?.multiplePeersSelected?(peers, text, mode)
}
self.peerSelectionNode.requestDeactivateSearch = { [weak self] in

View File

@ -16,6 +16,7 @@ import SegmentedControlNode
final class PeerSelectionControllerNode: ASDisplayNode {
private let context: AccountContext
private let present: (ViewController, Any?) -> Void
private let presentInGlobalOverlay: (ViewController, Any?) -> Void
private let dismiss: () -> Void
private let filter: ChatListNodePeersFilter
private let hasGlobalSearch: Bool
@ -55,7 +56,7 @@ final class PeerSelectionControllerNode: ASDisplayNode {
var requestOpenDisabledPeer: ((Peer) -> Void)?
var requestOpenPeerFromSearch: ((Peer) -> Void)?
var requestOpenMessageFromSearch: ((Peer, MessageId) -> Void)?
var requestSend: (([Peer], NSAttributedString) -> Void)?
var requestSend: (([Peer], NSAttributedString, PeerSelectionControllerSendMode) -> Void)?
private var presentationData: PresentationData
private var presentationDataDisposable: Disposable?
@ -65,9 +66,10 @@ final class PeerSelectionControllerNode: ASDisplayNode {
return self.readyValue.get()
}
init(context: AccountContext, filter: ChatListNodePeersFilter, hasChatListSelector: Bool, hasContactSelector: Bool, hasGlobalSearch: Bool, createNewGroup: (() -> Void)?, present: @escaping (ViewController, Any?) -> Void, dismiss: @escaping () -> Void) {
init(context: AccountContext, filter: ChatListNodePeersFilter, hasChatListSelector: Bool, hasContactSelector: Bool, hasGlobalSearch: Bool, createNewGroup: (() -> Void)?, present: @escaping (ViewController, Any?) -> Void, presentInGlobalOverlay: @escaping (ViewController, Any?) -> Void, dismiss: @escaping () -> Void) {
self.context = context
self.present = present
self.presentInGlobalOverlay = presentInGlobalOverlay
self.dismiss = dismiss
self.filter = filter
self.hasGlobalSearch = hasGlobalSearch
@ -266,7 +268,32 @@ final class PeerSelectionControllerNode: ASDisplayNode {
}, openLinkEditing: {
}, reportPeerIrrelevantGeoLocation: {
}, displaySlowmodeTooltip: { _, _ in
}, displaySendMessageOptions: { _, _ in
}, displaySendMessageOptions: { [weak self] node, gesture in
guard let strongSelf = self, let textInputPanelNode = strongSelf.textInputPanelNode else {
return
}
textInputPanelNode.loadTextInputNodeIfNeeded()
guard let textInputNode = textInputPanelNode.textInputNode else {
return
}
// let previousSupportedOrientations = strongSelf.supportedOrientations
// if layout.size.width > layout.size.height {
// strongSelf.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .landscape)
// } else {
// strongSelf.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait)
// }
let controller = ChatSendMessageActionSheetController(context: strongSelf.context, interfaceState: strongSelf.presentationInterfaceState, gesture: gesture, sourceSendButton: node, textInputNode: textInputNode, completion: { [weak self] in
if let strongSelf = self {
// strongSelf.supportedOrientations = previousSupportedOrientations
}
}, sendMessage: { [weak textInputPanelNode] silently in
textInputPanelNode?.sendMessage(silently ? .silent : .generic)
}, schedule: { [weak textInputPanelNode] in
textInputPanelNode?.sendMessage(.schedule)
})
// strongSelf.sendMessageActionsController = controller
strongSelf.presentInGlobalOverlay(controller, nil)
}, openScheduledMessages: {
}, openPeersNearby: {
}, displaySearchResultsTooltip: { _, _ in
@ -311,7 +338,7 @@ final class PeerSelectionControllerNode: ASDisplayNode {
} else {
let textInputPanelNode = PeerSelectionTextInputPanelNode(presentationInterfaceState: self.presentationInterfaceState, presentController: { [weak self] c in self?.present(c, nil) })
textInputPanelNode.interfaceInteraction = self.interfaceInteraction
textInputPanelNode.sendMessage = { [weak self] in
textInputPanelNode.sendMessage = { [weak self] mode in
guard let strongSelf = self else {
return
}
@ -327,7 +354,7 @@ final class PeerSelectionControllerNode: ASDisplayNode {
}
}
if !selectedPeers.isEmpty {
strongSelf.requestSend?(selectedPeers, effectiveInputText)
strongSelf.requestSend?(selectedPeers, effectiveInputText, mode)
}
} else {
var selectedPeerIds: [PeerId] = []
@ -345,7 +372,7 @@ final class PeerSelectionControllerNode: ASDisplayNode {
selectedPeers.append(peer)
}
}
strongSelf.requestSend?(selectedPeers, effectiveInputText)
strongSelf.requestSend?(selectedPeers, effectiveInputText, mode)
}
}
}

View File

@ -109,7 +109,7 @@ class PeerSelectionTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDel
private var validLayout: (CGFloat, CGFloat, CGFloat, UIEdgeInsets, CGFloat, LayoutMetrics, Bool)?
var sendMessage: () -> Void = { }
var sendMessage: (PeerSelectionControllerSendMode) -> Void = { _ in }
var updateHeight: (Bool) -> Void = { _ in }
private var updatingInputState = false
@ -896,7 +896,7 @@ class PeerSelectionTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDel
}
}
self.sendMessage()
self.sendMessage(.generic)
}
@objc func textInputBackgroundViewTap(_ recognizer: UITapGestureRecognizer) {

View File

@ -71,6 +71,9 @@ final class GenericEmbedImplementation: WebEmbedImplementation {
func seek(timestamp: Double) {
}
func setBaseRate(_ baseRate: Double) {
}
func pageReady() {
self.status = MediaPlayerStatus(generationTimestamp: 0.0, duration: 0.0, dimensions: CGSize(), timestamp: 0.0, baseRate: 1.0, seekId: 0, status: .playing, soundEnabled: true)
self.updateStatus?(self.status)

View File

@ -88,6 +88,9 @@ final class TwitchEmbedImplementation: WebEmbedImplementation {
func seek(timestamp: Double) {
}
func setBaseRate(_ baseRate: Double) {
}
func pageReady() {
// Queue.mainQueue().after(delay: 0.5) {
// if let onPlaybackStarted = self.onPlaybackStarted {

View File

@ -88,6 +88,7 @@ final class VimeoEmbedImplementation: WebEmbedImplementation {
private let videoId: String
private let timestamp: Int
private var baseRate: Double = 1.0
private var status : MediaPlayerStatus
private var ready: Bool = false
@ -160,14 +161,32 @@ final class VimeoEmbedImplementation: WebEmbedImplementation {
eval("seek(\(timestamp));", nil)
}
self.status = MediaPlayerStatus(generationTimestamp: self.status.generationTimestamp, duration: self.status.duration, dimensions: self.status.dimensions, timestamp: timestamp, baseRate: 1.0, seekId: self.status.seekId + 1, status: self.status.status, soundEnabled: self.status.soundEnabled)
if let updateStatus = self.updateStatus {
updateStatus(self.status)
}
self.status = MediaPlayerStatus(generationTimestamp: self.status.generationTimestamp, duration: self.status.duration, dimensions: self.status.dimensions, timestamp: timestamp, baseRate: self.status.baseRate, seekId: self.status.seekId + 1, status: self.status.status, soundEnabled: self.status.soundEnabled)
self.updateStatus?(self.status)
self.ignorePosition = 2
}
func setBaseRate(_ baseRate: Double) {
var baseRate = baseRate
if baseRate < 0.5 {
baseRate = 0.5
}
if baseRate > 2.0 {
baseRate = 2.0
}
if !self.ready {
self.baseRate = baseRate
}
if let eval = self.evalImpl {
eval("setRate(\(baseRate));", nil)
}
self.status = MediaPlayerStatus(generationTimestamp: self.status.generationTimestamp, duration: self.status.duration, dimensions: self.status.dimensions, timestamp: self.status.timestamp, baseRate: baseRate, seekId: self.status.seekId + 1, status: self.status.status, soundEnabled: true)
self.updateStatus?(self.status)
}
func pageReady() {
}
@ -210,6 +229,11 @@ final class VimeoEmbedImplementation: WebEmbedImplementation {
}
}
if !self.ready {
self.ready = true
self.play()
}
if let updateStatus = self.updateStatus, let playback = playback, let duration = duration {
let playbackStatus: MediaPlayerPlaybackStatus
switch playback {
@ -226,10 +250,12 @@ final class VimeoEmbedImplementation: WebEmbedImplementation {
if case .playing = playbackStatus, !self.started {
self.started = true
self.onPlaybackStarted?()
Queue.mainQueue().after(0.5) {
self.onPlaybackStarted?()
}
}
self.status = MediaPlayerStatus(generationTimestamp: self.status.generationTimestamp, duration: Double(duration), dimensions: self.status.dimensions, timestamp: newTimestamp, baseRate: 1.0, seekId: self.status.seekId, status: playbackStatus, soundEnabled: true)
self.status = MediaPlayerStatus(generationTimestamp: self.status.generationTimestamp, duration: Double(duration), dimensions: self.status.dimensions, timestamp: newTimestamp, baseRate: self.status.baseRate, seekId: self.status.seekId, status: playbackStatus, soundEnabled: true)
updateStatus(self.status)
}
}

View File

@ -13,6 +13,7 @@ protocol WebEmbedImplementation {
func pause()
func togglePlayPause()
func seek(timestamp: Double)
func setBaseRate(_ baseRate: Double)
func pageReady()
func callback(url: URL)
@ -169,6 +170,10 @@ final class WebEmbedPlayerNode: ASDisplayNode, WKNavigationDelegate {
self.impl.seek(timestamp: timestamp)
}
func setBaseRate(_ baseRate: Double) {
self.impl.setBaseRate(baseRate)
}
func webView(_ webView: WKWebView, didStartProvisionalNavigation navigation: WKNavigation!) {
}

View File

@ -171,6 +171,7 @@ final class WebEmbedVideoContentNode: ASDisplayNode, UniversalVideoContentNode {
}
func setBaseRate(_ baseRate: Double) {
self.playerNode.setBaseRate(baseRate)
}
func addPlaybackCompleted(_ f: @escaping () -> Void) -> Int {

View File

@ -99,6 +99,7 @@ final class YoutubeEmbedImplementation: WebEmbedImplementation {
}
private var timestamp: Int
private var baseRate: Double = 1.0
private var ignoreEarlierTimestamps = false
private var status: MediaPlayerStatus
@ -106,6 +107,8 @@ final class YoutubeEmbedImplementation: WebEmbedImplementation {
private var started = false
private var ignorePosition: Int?
private var isPlaying = true
private enum PlaybackDelay {
case none
case afterPositionUpdates(count: Int)
@ -185,6 +188,8 @@ final class YoutubeEmbedImplementation: WebEmbedImplementation {
return
}
self.isPlaying = true
if let eval = self.evalImpl {
eval("play();", nil)
}
@ -193,6 +198,7 @@ final class YoutubeEmbedImplementation: WebEmbedImplementation {
}
func pause() {
self.isPlaying = false
if let eval = self.evalImpl {
eval("pause();", nil)
}
@ -200,9 +206,9 @@ final class YoutubeEmbedImplementation: WebEmbedImplementation {
func togglePlayPause() {
if case .playing = self.status.status {
pause()
self.pause()
} else {
play()
self.play()
}
}
@ -216,12 +222,32 @@ final class YoutubeEmbedImplementation: WebEmbedImplementation {
eval("seek(\(timestamp));", nil)
}
self.status = MediaPlayerStatus(generationTimestamp: self.status.generationTimestamp, duration: self.status.duration, dimensions: self.status.dimensions, timestamp: timestamp, baseRate: 1.0, seekId: self.status.seekId + 1, status: self.status.status, soundEnabled: true)
self.status = MediaPlayerStatus(generationTimestamp: self.status.generationTimestamp, duration: self.status.duration, dimensions: self.status.dimensions, timestamp: timestamp, baseRate: self.status.baseRate, seekId: self.status.seekId + 1, status: self.status.status, soundEnabled: true)
self.updateStatus?(self.status)
self.ignorePosition = 2
}
func setBaseRate(_ baseRate: Double) {
var baseRate = baseRate
if baseRate < 0.5 {
baseRate = 0.5
}
if baseRate > 2.0 {
baseRate = 2.0
}
if !self.ready {
self.baseRate = baseRate
}
if let eval = self.evalImpl {
eval("setRate(\(baseRate));", nil)
}
self.status = MediaPlayerStatus(generationTimestamp: self.status.generationTimestamp, duration: self.status.duration, dimensions: self.status.dimensions, timestamp: self.status.timestamp, baseRate: baseRate, seekId: self.status.seekId + 1, status: self.status.status, soundEnabled: true)
self.updateStatus?(self.status)
}
func pageReady() {
}
@ -282,6 +308,7 @@ final class YoutubeEmbedImplementation: WebEmbedImplementation {
switch playback {
case 0:
if newTimestamp > Double(duration) - 1.0 {
self.isPlaying = false
playbackStatus = .paused
newTimestamp = 0.0
} else {
@ -292,9 +319,9 @@ final class YoutubeEmbedImplementation: WebEmbedImplementation {
case 2:
playbackStatus = .paused
case 3:
playbackStatus = .buffering(initial: false, whilePlaying: true, progress: 0.0, display: false)
playbackStatus = .buffering(initial: !self.started, whilePlaying: self.isPlaying, progress: 0.0, display: false)
default:
playbackStatus = .buffering(initial: true, whilePlaying: false, progress: 0.0, display: false)
playbackStatus = .buffering(initial: true, whilePlaying: true, progress: 0.0, display: false)
}
if case .playing = playbackStatus, !self.started {
@ -304,7 +331,7 @@ final class YoutubeEmbedImplementation: WebEmbedImplementation {
self.onPlaybackStarted?()
}
self.status = MediaPlayerStatus(generationTimestamp: self.status.generationTimestamp, duration: Double(duration), dimensions: self.status.dimensions, timestamp: newTimestamp, baseRate: 1.0, seekId: self.status.seekId, status: playbackStatus, soundEnabled: true)
self.status = MediaPlayerStatus(generationTimestamp: self.status.generationTimestamp, duration: Double(duration), dimensions: self.status.dimensions, timestamp: newTimestamp, baseRate: self.status.baseRate, seekId: self.status.seekId, status: playbackStatus, soundEnabled: true)
updateStatus(self.status)
}
}
@ -326,12 +353,20 @@ final class YoutubeEmbedImplementation: WebEmbedImplementation {
self.play()
}
if self.baseRate != 1.0 {
self.setBaseRate(self.baseRate)
}
print("YT ready in \(CFAbsoluteTimeGetCurrent() - self.benchmarkStartTime)")
Queue.mainQueue().async {
self.play()
let delay = self.timestamp > 0 ? 2.8 : 2.0
if self.timestamp > 0 {
self.seek(timestamp: Double(self.timestamp))
self.play()
} else {
self.play()
}
Queue.mainQueue().after(delay, {
if !self.started {
self.play()

View File

@ -252,20 +252,7 @@ private func ongoingDataSavingForTypeWebrtc(_ type: VoiceCallDataSaving) -> Ongo
}
}
/*private func ongoingDataSavingForTypeWebrtcCustom(_ type: VoiceCallDataSaving) -> OngoingCallDataSavingWebrtcCustom {
switch type {
case .never:
return .never
case .cellular:
return .cellular
case .always:
return .always
default:
return .never
}
}*/
private protocol OngoingCallThreadLocalContextProtocol: class {
private protocol OngoingCallThreadLocalContextProtocol: AnyObject {
func nativeSetNetworkType(_ type: NetworkType)
func nativeSetIsMuted(_ value: Bool)
func nativeSetIsLowBatteryLevel(_ value: Bool)
@ -277,6 +264,7 @@ private protocol OngoingCallThreadLocalContextProtocol: class {
func nativeDebugInfo() -> String
func nativeVersion() -> String
func nativeGetDerivedState() -> Data
func addExternalAudioData(data: Data)
}
private final class OngoingCallThreadLocalContextHolder {
@ -329,6 +317,9 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol {
func nativeGetDerivedState() -> Data {
return self.getDerivedState()
}
func addExternalAudioData(data: Data) {
}
}
public final class OngoingCallVideoCapturer {
@ -517,6 +508,10 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
func nativeGetDerivedState() -> Data {
return self.getDerivedState()
}
func addExternalAudioData(data: Data) {
self.addExternalAudioData(data)
}
}
private extension OngoingCallContextState.State {
@ -586,8 +581,6 @@ extension OngoingCallVideoOrientation {
return .orientation180
case .rotation270:
return .orientation270
@unknown default:
return .orientation0
}
}
}
@ -1026,4 +1019,10 @@ public final class OngoingCallContext {
}
}
}
public func addExternalAudioData(data: Data) {
self.withContext { context in
context.addExternalAudioData(data: data)
}
}
}

View File

@ -221,6 +221,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
- (void)addSignalingData:(NSData * _Nonnull)data;
- (void)switchAudioOutput:(NSString * _Nonnull)deviceId;
- (void)switchAudioInput:(NSString * _Nonnull)deviceId;
- (void)addExternalAudioData:(NSData * _Nonnull)data;
@end
typedef struct {

View File

@ -1264,6 +1264,15 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
_tgVoip->setAudioInputDevice(deviceId.UTF8String);
}
- (void)addExternalAudioData:(NSData * _Nonnull)data {
if (_tgVoip) {
std::vector<uint8_t> samples;
samples.resize(data.length);
[data getBytes:samples.data() length:data.length];
_tgVoip->addExternalAudioSamples(std::move(samples));
}
}
@end
namespace {

@ -1 +1 @@
Subproject commit 9c00e5d67f39cbfc8cc764b3c87c0f526eb0040f
Subproject commit e8f7d439309abd4da1c15b97141c546295fcdcb4