From af267cae64c46c9e01cdbb03ec20bf0bf019d915 Mon Sep 17 00:00:00 2001 From: Isaac <> Date: Fri, 20 Sep 2024 22:33:36 +0800 Subject: [PATCH] Video player and calls --- .../Sources/AccountContext.swift | 2 +- .../Sources/PresentationCallManager.swift | 3 +- .../Sources/UniversalVideoNode.swift | 9 +- .../Sources/AvatarVideoNode.swift | 2 +- .../Sources/ChatImportActivityScreen.swift | 2 +- .../Sources/DrawingStickerEntityView.swift | 1 + .../ChatVideoGalleryItemScrubberView.swift | 1 + .../GalleryUI/Sources/GalleryController.swift | 11 + .../Items/UniversalVideoGalleryItem.swift | 368 +++++++------- .../InstantPagePlayableVideoNode.swift | 2 +- .../Sources/PeerAvatarImageGalleryItem.swift | 2 +- .../Sources/PeerInfoAvatarListNode.swift | 2 +- submodules/Postbox/Sources/MediaBox.swift | 67 +-- .../Sources/MediaBoxFileContextV2Impl.swift | 28 +- .../Postbox/Sources/MediaBoxFileManager.swift | 8 +- .../Sources/PhoneDemoComponent.swift | 2 +- .../Sources/ShareLoadingContainerNode.swift | 2 +- .../Sources/PresentationCallManager.swift | 122 +++-- .../ScheduleVideoChatSheetScreen.swift | 466 ++++++++++++++++++ .../VideoChatActionButtonComponent.swift | 5 +- .../Sources/VideoChatMicButtonComponent.swift | 49 +- .../VideoChatScheduledInfoComponent.swift | 213 ++++++++ .../Sources/VideoChatScreen.swift | 124 ++++- .../Sources/VoiceChatController.swift | 6 +- .../Network/FetchedMediaResource.swift | 12 + .../Sources/ChatBotInfoItem.swift | 2 +- .../ChatMessageActionBubbleContentNode.swift | 2 +- ...atMessageInteractiveInstantVideoNode.swift | 2 +- .../ChatMessageInteractiveMediaNode.swift | 23 +- ...ageProfilePhotoSuggestionContentNode.swift | 2 +- .../Sources/ChatQrCodeScreen.swift | 2 +- ...PeerInfoAvatarTransformContainerNode.swift | 2 +- .../Sources/PeerInfoEditingAvatarNode.swift | 2 +- .../Sources/PeerInfoScreen.swift | 5 +- .../Sources/StoryItemContentComponent.swift | 1 + .../NavigationSettings.imageset/Contents.json | 12 + .../videosettings_30.pdf | Bin 0 -> 5753 bytes .../Contents.json | 12 + .../videosettingsauto_30.pdf | Bin 0 -> 5233 bytes .../Contents.json | 12 + .../videosettingshd_30.pdf | Bin 0 -> 5183 bytes .../Contents.json | 12 + .../videosettingssd_30.pdf | Bin 0 -> 5423 bytes .../TelegramUI/Sources/AccountContext.swift | 4 +- .../TelegramUI/Sources/AppDelegate.swift | 2 +- .../TelegramUI/Sources/ChatController.swift | 2 +- .../Sources/OverlayInstantVideoNode.swift | 4 +- .../Sources/SharedMediaPlayer.swift | 2 +- .../TelegramUniversalVideoContent/BUILD | 1 + .../Sources/HLSVideoContent.swift | 391 +++++++++------ .../Sources/NativeVideoContent.swift | 2 +- .../Sources/OverlayUniversalVideoNode.swift | 4 +- .../Sources/PlatformVideoContent.swift | 2 +- .../Sources/SystemVideoContent.swift | 2 +- .../Sources/WebEmbedVideoContent.swift | 2 +- submodules/TelegramVoip/BUILD | 1 + .../WrappedMediaStreamingContext.swift | 160 +++++- .../Sources/WebSearchVideoGalleryItem.swift | 2 +- 58 files changed, 1643 insertions(+), 538 deletions(-) create mode 100644 submodules/TelegramCallsUI/Sources/ScheduleVideoChatSheetScreen.swift create mode 100644 submodules/TelegramCallsUI/Sources/VideoChatScheduledInfoComponent.swift create mode 100644 submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettings.imageset/Contents.json create mode 100644 submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettings.imageset/videosettings_30.pdf create mode 100644 submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQAuto.imageset/Contents.json create mode 100644 submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQAuto.imageset/videosettingsauto_30.pdf create mode 100644 submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQHD.imageset/Contents.json create mode 100644 submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQHD.imageset/videosettingshd_30.pdf create mode 100644 submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQSD.imageset/Contents.json create mode 100644 submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQSD.imageset/videosettingssd_30.pdf diff --git a/submodules/AccountContext/Sources/AccountContext.swift b/submodules/AccountContext/Sources/AccountContext.swift index 8a704ace67..366086569a 100644 --- a/submodules/AccountContext/Sources/AccountContext.swift +++ b/submodules/AccountContext/Sources/AccountContext.swift @@ -1135,7 +1135,7 @@ public protocol AccountContext: AnyObject { func chatLocationUnreadCount(for location: ChatLocation, contextHolder: Atomic) -> Signal func applyMaxReadIndex(for location: ChatLocation, contextHolder: Atomic, messageIndex: MessageIndex) - func scheduleGroupCall(peerId: PeerId) + func scheduleGroupCall(peerId: PeerId, parentController: ViewController) func joinGroupCall(peerId: PeerId, invite: String?, requestJoinAsPeerId: ((@escaping (PeerId?) -> Void) -> Void)?, activeCall: EngineGroupCallDescription) func requestCall(peerId: PeerId, isVideo: Bool, completion: @escaping () -> Void) } diff --git a/submodules/AccountContext/Sources/PresentationCallManager.swift b/submodules/AccountContext/Sources/PresentationCallManager.swift index 310c6846ce..d4d605a69a 100644 --- a/submodules/AccountContext/Sources/PresentationCallManager.swift +++ b/submodules/AccountContext/Sources/PresentationCallManager.swift @@ -4,6 +4,7 @@ import AsyncDisplayKit import TelegramCore import SwiftSignalKit import TelegramAudio +import Display public enum RequestCallResult { case requested @@ -472,5 +473,5 @@ public protocol PresentationCallManager: AnyObject { func requestCall(context: AccountContext, peerId: EnginePeer.Id, isVideo: Bool, endCurrentIfAny: Bool) -> RequestCallResult func joinGroupCall(context: AccountContext, peerId: EnginePeer.Id, invite: String?, requestJoinAsPeerId: ((@escaping (EnginePeer.Id?) -> Void) -> Void)?, initialCall: EngineGroupCallDescription, endCurrentIfAny: Bool) -> JoinGroupCallManagerResult - func scheduleGroupCall(context: AccountContext, peerId: EnginePeer.Id, endCurrentIfAny: Bool) -> RequestScheduleGroupCallResult + func scheduleGroupCall(context: AccountContext, peerId: EnginePeer.Id, endCurrentIfAny: Bool, parentController: ViewController) -> RequestScheduleGroupCallResult } diff --git a/submodules/AccountContext/Sources/UniversalVideoNode.swift b/submodules/AccountContext/Sources/UniversalVideoNode.swift index d224b6aa78..788431c2be 100644 --- a/submodules/AccountContext/Sources/UniversalVideoNode.swift +++ b/submodules/AccountContext/Sources/UniversalVideoNode.swift @@ -48,7 +48,7 @@ public protocol UniversalVideoContent { var dimensions: CGSize { get } var duration: Double { get } - func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode + func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode func isEqual(to other: UniversalVideoContent) -> Bool } @@ -90,6 +90,7 @@ public enum UniversalVideoNodeFetchControl { } public final class UniversalVideoNode: ASDisplayNode { + private let accountId: AccountRecordId private let postbox: Postbox private let audioSession: ManagedAudioSession private let manager: UniversalVideoManager @@ -135,11 +136,12 @@ public final class UniversalVideoNode: ASDisplayNode { if self.canAttachContent { assert(self.contentRequestIndex == nil) + let accountId = self.accountId let content = self.content let postbox = self.postbox let audioSession = self.audioSession self.contentRequestIndex = self.manager.attachUniversalVideoContent(content: self.content, priority: self.priority, create: { - return content.makeContentNode(postbox: postbox, audioSession: audioSession) + return content.makeContentNode(accountId: accountId, postbox: postbox, audioSession: audioSession) }, update: { [weak self] contentNodeAndFlags in if let strongSelf = self { strongSelf.updateContentNode(contentNodeAndFlags) @@ -160,7 +162,8 @@ public final class UniversalVideoNode: ASDisplayNode { return self.contentNode != nil } - public init(postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, decoration: UniversalVideoDecoration, content: UniversalVideoContent, priority: UniversalVideoPriority, autoplay: Bool = false, snapshotContentWhenGone: Bool = false) { + public init(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, decoration: UniversalVideoDecoration, content: UniversalVideoContent, priority: UniversalVideoPriority, autoplay: Bool = false, snapshotContentWhenGone: Bool = false) { + self.accountId = accountId self.postbox = postbox self.audioSession = audioSession self.manager = manager diff --git a/submodules/AvatarVideoNode/Sources/AvatarVideoNode.swift b/submodules/AvatarVideoNode/Sources/AvatarVideoNode.swift index 6cc94e44f2..85c701d480 100644 --- a/submodules/AvatarVideoNode/Sources/AvatarVideoNode.swift +++ b/submodules/AvatarVideoNode/Sources/AvatarVideoNode.swift @@ -234,7 +234,7 @@ public final class AvatarVideoNode: ASDisplayNode { if self.videoNode == nil { let context = self.context let mediaManager = context.sharedContext.mediaManager - let videoNode = UniversalVideoNode(postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded) + let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded) videoNode.clipsToBounds = true videoNode.isUserInteractionEnabled = false videoNode.isHidden = true diff --git a/submodules/ChatImportUI/Sources/ChatImportActivityScreen.swift b/submodules/ChatImportUI/Sources/ChatImportActivityScreen.swift index fcc1047125..9634edd0a8 100644 --- a/submodules/ChatImportUI/Sources/ChatImportActivityScreen.swift +++ b/submodules/ChatImportUI/Sources/ChatImportActivityScreen.swift @@ -464,7 +464,7 @@ public final class ChatImportActivityScreen: ViewController { let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil) - let videoNode = UniversalVideoNode(postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) + let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) videoNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 2.0, height: 2.0)) videoNode.alpha = 0.01 self.videoNode = videoNode diff --git a/submodules/DrawingUI/Sources/DrawingStickerEntityView.swift b/submodules/DrawingUI/Sources/DrawingStickerEntityView.swift index ae5a1f2e6e..c1c8f5d686 100644 --- a/submodules/DrawingUI/Sources/DrawingStickerEntityView.swift +++ b/submodules/DrawingUI/Sources/DrawingStickerEntityView.swift @@ -330,6 +330,7 @@ public class DrawingStickerEntityView: DrawingEntityView { private func setupWithVideo(_ file: TelegramMediaFile) { let videoNode = UniversalVideoNode( + accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, diff --git a/submodules/GalleryUI/Sources/ChatVideoGalleryItemScrubberView.swift b/submodules/GalleryUI/Sources/ChatVideoGalleryItemScrubberView.swift index bee6431d99..b98b543865 100644 --- a/submodules/GalleryUI/Sources/ChatVideoGalleryItemScrubberView.swift +++ b/submodules/GalleryUI/Sources/ChatVideoGalleryItemScrubberView.swift @@ -9,6 +9,7 @@ import UniversalMediaPlayer import TelegramPresentationData import RangeSet import ShimmerEffect +import TelegramUniversalVideoContent private let textFont = Font.with(size: 13.0, design: .regular, weight: .regular, traits: [.monospacedNumbers]) diff --git a/submodules/GalleryUI/Sources/GalleryController.swift b/submodules/GalleryUI/Sources/GalleryController.swift index c5e76739a1..1448c0f424 100644 --- a/submodules/GalleryUI/Sources/GalleryController.swift +++ b/submodules/GalleryUI/Sources/GalleryController.swift @@ -578,6 +578,7 @@ public class GalleryController: ViewController, StandalonePresentableController, private let landscape: Bool private let timecode: Double? private var playbackRate: Double? + private var videoQuality: UniversalVideoContentVideoQuality = .auto private let accountInUseDisposable = MetaDisposable() private let disposable = MetaDisposable() @@ -1757,6 +1758,16 @@ public class GalleryController: ViewController, StandalonePresentableController, } } + func updateSharedVideoQuality(_ videoQuality: UniversalVideoContentVideoQuality) { + self.videoQuality = videoQuality + + self.galleryNode.pager.forEachItemNode { itemNode in + if let itemNode = itemNode as? UniversalVideoGalleryItemNode { + itemNode.updateVideoQuality(videoQuality) + } + } + } + public var keyShortcuts: [KeyShortcut] { var keyShortcuts: [KeyShortcut] = [] keyShortcuts.append( diff --git a/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift b/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift index fd7be6047b..9f361c67be 100644 --- a/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift +++ b/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift @@ -769,6 +769,8 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { private var moreBarButtonRate: Double = 1.0 private var moreBarButtonRateTimestamp: Double? + private let settingsBarButton: MoreHeaderButton + private var videoNode: UniversalVideoNode? private var videoNodeUserInteractionEnabled: Bool = false private var videoFramePreview: FramePreview? @@ -798,6 +800,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { private var item: UniversalVideoGalleryItem? private var playbackRate: Double? + private var videoQuality: UniversalVideoContentVideoQuality = .auto private let playbackRatePromise = ValuePromise() private let statusDisposable = MetaDisposable() @@ -849,11 +852,15 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { self.moreBarButton.isUserInteractionEnabled = true self.moreBarButton.setContent(.more(optionsCircleImage(dark: false))) + self.settingsBarButton = MoreHeaderButton() + self.settingsBarButton.isUserInteractionEnabled = true + super.init() self.clipsToBounds = true self.moreBarButton.addTarget(self, action: #selector(self.moreButtonPressed), forControlEvents: .touchUpInside) + self.settingsBarButton.addTarget(self, action: #selector(self.settingsButtonPressed), forControlEvents: .touchUpInside) self.footerContentNode.interacting = { [weak self] value in self?.isInteractingPromise.set(value) @@ -966,7 +973,10 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { } self.moreBarButton.contextAction = { [weak self] sourceNode, gesture in - self?.openMoreMenu(sourceNode: sourceNode, gesture: gesture) + guard let self else { + return + } + self.openMoreMenu(sourceNode: self.moreBarButton.referenceNode, gesture: gesture, isSettings: false) } self.titleContentView = GalleryTitleView(frame: CGRect()) @@ -1106,6 +1116,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { var forceEnableUserInteraction = false var isAnimated = false var isEnhancedWebPlayer = false + var isAdaptive = false if let content = item.content as? NativeVideoContent { isAnimated = content.fileReference.media.isAnimated self.videoFramePreview = MediaPlayerFramePreview(postbox: item.context.account.postbox, userLocation: content.userLocation, userContentType: .video, fileReference: content.fileReference) @@ -1129,6 +1140,14 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { } else if let _ = item.content as? PlatformVideoContent { disablePlayerControls = true forceEnablePiP = true + } else if let _ = item.content as? HLSVideoContent { + isAdaptive = true + } + + if isAdaptive { + self.settingsBarButton.setContent(.image(generateTintedImage(image: UIImage(bundleImageName: "Media Gallery/NavigationSettingsQAuto"), color: .white))) + } else { + self.settingsBarButton.setContent(.image(generateTintedImage(image: UIImage(bundleImageName: "Media Gallery/NavigationSettings"), color: .white))) } let dimensions = item.content.dimensions @@ -1149,7 +1168,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { let mediaManager = item.context.sharedContext.mediaManager - let videoNode = UniversalVideoNode(postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .gallery) + let videoNode = UniversalVideoNode(accountId: item.context.account.id, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .gallery) let videoScale: CGFloat if item.content is WebEmbedVideoContent { videoScale = 1.0 @@ -1250,7 +1269,9 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { } let status = messageMediaFileStatus(context: item.context, messageId: message.id, file: file) if !isWebpage { - scrubberView.setFetchStatusSignal(status, strings: self.presentationData.strings, decimalSeparator: self.presentationData.dateTimeFormat.decimalSeparator, fileSize: file.size) + if !NativeVideoContent.isHLSVideo(file: file) { + scrubberView.setFetchStatusSignal(status, strings: self.presentationData.strings, decimalSeparator: self.presentationData.dateTimeFormat.decimalSeparator, fileSize: file.size) + } } self.requiresDownload = !isMediaStreamable(message: message, media: file) @@ -1443,6 +1464,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { rightBarButtonItem.accessibilityLabel = self.presentationData.strings.Gallery_VoiceOver_Stickers barButtonItems.append(rightBarButtonItem) } + if forceEnablePiP || (!isAnimated && !disablePlayerControls && !disablePictureInPicture) { let rightBarButtonItem = UIBarButtonItem(image: pictureInPictureButtonImage, style: .plain, target: self, action: #selector(self.pictureInPictureButtonPressed)) rightBarButtonItem.accessibilityLabel = self.presentationData.strings.Gallery_VoiceOver_PictureInPicture @@ -1487,6 +1509,12 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { hasMoreButton = true } + if !isAnimated && !disablePlayerControls { + let settingsMenuItem = UIBarButtonItem(customDisplayNode: self.settingsBarButton)! + settingsMenuItem.accessibilityLabel = self.presentationData.strings.Settings_Title + barButtonItems.append(settingsMenuItem) + } + if hasMoreButton { let moreMenuItem = UIBarButtonItem(customDisplayNode: self.moreBarButton)! moreMenuItem.accessibilityLabel = self.presentationData.strings.Common_More @@ -2169,7 +2197,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { let baseNavigationController = self.baseNavigationController() let mediaManager = self.context.sharedContext.mediaManager var expandImpl: (() -> Void)? - let overlayNode = OverlayUniversalVideoNode(postbox: self.context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, content: item.content, expand: { + let overlayNode = OverlayUniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, content: item.content, expand: { expandImpl?() }, close: { [weak mediaManager] in mediaManager?.setOverlayVideoNode(nil) @@ -2271,7 +2299,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { if #available(iOSApplicationExtension 15.0, iOS 15.0, *), AVPictureInPictureController.isPictureInPictureSupported(), isNativePictureInPictureSupported { self.disablePictureInPicturePlaceholder = true - let overlayVideoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .overlay) + let overlayVideoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .overlay) let absoluteRect = videoNode.view.convert(videoNode.view.bounds, to: nil) overlayVideoNode.frame = absoluteRect overlayVideoNode.updateLayout(size: absoluteRect.size, transition: .immediate) @@ -2354,7 +2382,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { shouldBeDismissed = .single(false) } - let overlayNode = OverlayUniversalVideoNode(postbox: self.context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, content: item.content, shouldBeDismissed: shouldBeDismissed, expand: { + let overlayNode = OverlayUniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, content: item.content, shouldBeDismissed: shouldBeDismissed, expand: { expandImpl?() }, close: { [weak mediaManager] in mediaManager?.setOverlayVideoNode(nil) @@ -2501,7 +2529,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { self.moreBarButton.contextAction?(self.moreBarButton.containerNode, nil) } - private func openMoreMenu(sourceNode: ASDisplayNode, gesture: ContextGesture?) { + private func openMoreMenu(sourceNode: ContextReferenceContentNode, gesture: ContextGesture?, isSettings: Bool) { guard let controller = self.baseNavigationController()?.topViewController as? ViewController else { return } @@ -2510,12 +2538,12 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { if case let .message(message, _) = self.item?.contentInfo, let _ = message.adAttribute { items = self.adMenuMainItems() } else { - items = self.contextMenuMainItems(dismiss: { + items = self.contextMenuMainItems(isSettings: isSettings, dismiss: { dismissImpl?() }) } - let contextController = ContextController(presentationData: self.presentationData.withUpdated(theme: defaultDarkColorPresentationTheme), source: .reference(HeaderContextReferenceContentSource(controller: controller, sourceNode: self.moreBarButton.referenceNode)), items: items |> map { ContextController.Items(content: .list($0)) }, gesture: gesture) + let contextController = ContextController(presentationData: self.presentationData.withUpdated(theme: defaultDarkColorPresentationTheme), source: .reference(HeaderContextReferenceContentSource(controller: controller, sourceNode: sourceNode)), items: items |> map { ContextController.Items(content: .list($0)) }, gesture: gesture) self.isShowingContextMenuPromise.set(true) controller.presentInGlobalOverlay(contextController) dismissImpl = { [weak contextController] in @@ -2666,7 +2694,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { } - private func contextMenuMainItems(dismiss: @escaping () -> Void) -> Signal<[ContextMenuItem], NoError> { + private func contextMenuMainItems(isSettings: Bool, dismiss: @escaping () -> Void) -> Signal<[ContextMenuItem], NoError> { guard let videoNode = self.videoNode, let item = self.item else { return .single([]) } @@ -2687,172 +2715,172 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { var items: [ContextMenuItem] = [] - var speedValue: String = strongSelf.presentationData.strings.PlaybackSpeed_Normal - var speedIconText: String = "1x" - var didSetSpeedValue = false - for (text, iconText, speed) in strongSelf.speedList(strings: strongSelf.presentationData.strings) { - if abs(speed - status.baseRate) < 0.01 { - speedValue = text - speedIconText = iconText - didSetSpeedValue = true - break - } - } - if !didSetSpeedValue && status.baseRate != 1.0 { - speedValue = String(format: "%.1fx", status.baseRate) - speedIconText = speedValue - } - - items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.PlaybackSpeed_Title, textLayout: .secondLineWithValue(speedValue), icon: { theme in - return optionsRateImage(rate: speedIconText, isLarge: false, color: theme.contextMenu.primaryColor) - }, action: { c, _ in - guard let strongSelf = self else { - c?.dismiss(completion: nil) - return - } - - c?.setItems(strongSelf.contextMenuSpeedItems(dismiss: dismiss) |> map { ContextController.Items(content: .list($0)) }, minHeight: nil, animated: true) - }))) - - items.append(.separator) - - if let videoQualityState = strongSelf.videoNode?.videoQualityState(), !videoQualityState.available.isEmpty { - //TODO:localize - - let qualityText: String - switch videoQualityState.preferred { - case .auto: - if videoQualityState.current != 0 { - qualityText = "Auto (\(videoQualityState.current)p)" - } else { - qualityText = "Auto" + if isSettings { + var speedValue: String = strongSelf.presentationData.strings.PlaybackSpeed_Normal + var speedIconText: String = "1x" + var didSetSpeedValue = false + for (text, iconText, speed) in strongSelf.speedList(strings: strongSelf.presentationData.strings) { + if abs(speed - status.baseRate) < 0.01 { + speedValue = text + speedIconText = iconText + didSetSpeedValue = true + break } - case let .quality(value): - qualityText = "\(value)p" + } + if !didSetSpeedValue && status.baseRate != 1.0 { + speedValue = String(format: "%.1fx", status.baseRate) + speedIconText = speedValue } - items.append(.action(ContextMenuActionItem(text: "Video Quality", textLayout: .secondLineWithValue(qualityText), icon: { _ in - return nil + items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.PlaybackSpeed_Title, textLayout: .secondLineWithValue(speedValue), icon: { theme in + return optionsRateImage(rate: speedIconText, isLarge: false, color: theme.contextMenu.primaryColor) }, action: { c, _ in guard let strongSelf = self else { c?.dismiss(completion: nil) return } - - c?.setItems(.single(ContextController.Items(content: .list(strongSelf.contextMenuVideoQualityItems(dismiss: dismiss)))), minHeight: nil, animated: true) + + c?.pushItems(items: strongSelf.contextMenuSpeedItems(dismiss: dismiss) |> map { ContextController.Items(content: .list($0)) }) }))) - items.append(.separator) - } - - if let (message, _, _) = strongSelf.contentInfo() { - let context = strongSelf.context - items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.SharedMedia_ViewInChat, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/GoToMessage"), color: theme.contextMenu.primaryColor)}, action: { [weak self] _, f in - guard let strongSelf = self, let peer = peer else { - return + if let videoQualityState = strongSelf.videoNode?.videoQualityState(), !videoQualityState.available.isEmpty { + items.append(.separator) + + //TODO:localize + + let qualityText: String + switch videoQualityState.preferred { + case .auto: + if videoQualityState.current != 0 { + qualityText = "Auto (\(videoQualityState.current)p)" + } else { + qualityText = "Auto" + } + case let .quality(value): + qualityText = "\(value)p" } - if let navigationController = strongSelf.baseNavigationController() { - strongSelf.beginCustomDismiss(true) + + items.append(.action(ContextMenuActionItem(text: "Video Quality", textLayout: .secondLineWithValue(qualityText), icon: { _ in + return nil + }, action: { c, _ in + guard let strongSelf = self else { + c?.dismiss(completion: nil) + return + } - context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: context, chatLocation: .peer(peer), subject: .message(id: .id(message.id), highlight: ChatControllerSubject.MessageHighlight(quote: nil), timecode: nil, setupReply: false))) - - Queue.mainQueue().after(0.3) { - strongSelf.completeCustomDismiss() + c?.pushItems(items: .single(ContextController.Items(content: .list(strongSelf.contextMenuVideoQualityItems(dismiss: dismiss))))) + }))) + } + } else { + if let (message, _, _) = strongSelf.contentInfo() { + let context = strongSelf.context + items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.SharedMedia_ViewInChat, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/GoToMessage"), color: theme.contextMenu.primaryColor)}, action: { [weak self] _, f in + guard let strongSelf = self, let peer = peer else { + return + } + if let navigationController = strongSelf.baseNavigationController() { + strongSelf.beginCustomDismiss(true) + + context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: context, chatLocation: .peer(peer), subject: .message(id: .id(message.id), highlight: ChatControllerSubject.MessageHighlight(quote: nil), timecode: nil, setupReply: false))) + + Queue.mainQueue().after(0.3) { + strongSelf.completeCustomDismiss() + } + } + f(.default) + }))) + } + + // if #available(iOS 11.0, *) { + // items.append(.action(ContextMenuActionItem(text: "AirPlay", textColor: .primary, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Media Gallery/AirPlay"), color: theme.contextMenu.primaryColor) }, action: { [weak self] _, f in + // f(.default) + // guard let strongSelf = self else { + // return + // } + // strongSelf.beginAirPlaySetup() + // }))) + // } + + if let (message, _, _) = strongSelf.contentInfo() { + for media in message.media { + if let webpage = media as? TelegramMediaWebpage, case let .Loaded(content) = webpage.content { + let url = content.url + + let item = OpenInItem.url(url: url) + let openText = strongSelf.presentationData.strings.Conversation_FileOpenIn + items.append(.action(ContextMenuActionItem(text: openText, textColor: .primary, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Share"), color: theme.contextMenu.primaryColor) }, action: { _, f in + f(.default) + + if let strongSelf = self, let controller = strongSelf.galleryController() { + var presentationData = strongSelf.context.sharedContext.currentPresentationData.with { $0 } + if !presentationData.theme.overallDarkAppearance { + presentationData = presentationData.withUpdated(theme: defaultDarkColorPresentationTheme) + } + let actionSheet = OpenInActionSheetController(context: strongSelf.context, forceTheme: presentationData.theme, item: item, openUrl: { [weak self] url in + if let strongSelf = self { + strongSelf.context.sharedContext.openExternalUrl(context: strongSelf.context, urlContext: .generic, url: url, forceExternal: true, presentationData: presentationData, navigationController: strongSelf.baseNavigationController(), dismissInput: {}) + } + }) + controller.present(actionSheet, in: .window(.root)) + } + }))) + break } } - f(.default) - }))) - } - -// if #available(iOS 11.0, *) { -// items.append(.action(ContextMenuActionItem(text: "AirPlay", textColor: .primary, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Media Gallery/AirPlay"), color: theme.contextMenu.primaryColor) }, action: { [weak self] _, f in -// f(.default) -// guard let strongSelf = self else { -// return -// } -// strongSelf.beginAirPlaySetup() -// }))) -// } - - if let (message, _, _) = strongSelf.contentInfo() { - for media in message.media { - if let webpage = media as? TelegramMediaWebpage, case let .Loaded(content) = webpage.content { - let url = content.url - - let item = OpenInItem.url(url: url) - let openText = strongSelf.presentationData.strings.Conversation_FileOpenIn - items.append(.action(ContextMenuActionItem(text: openText, textColor: .primary, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Share"), color: theme.contextMenu.primaryColor) }, action: { _, f in - f(.default) - - if let strongSelf = self, let controller = strongSelf.galleryController() { - var presentationData = strongSelf.context.sharedContext.currentPresentationData.with { $0 } - if !presentationData.theme.overallDarkAppearance { - presentationData = presentationData.withUpdated(theme: defaultDarkColorPresentationTheme) - } - let actionSheet = OpenInActionSheetController(context: strongSelf.context, forceTheme: presentationData.theme, item: item, openUrl: { [weak self] url in - if let strongSelf = self { - strongSelf.context.sharedContext.openExternalUrl(context: strongSelf.context, urlContext: .generic, url: url, forceExternal: true, presentationData: presentationData, navigationController: strongSelf.baseNavigationController(), dismissInput: {}) - } - }) - controller.present(actionSheet, in: .window(.root)) - } - }))) - break - } } - } - - if let (message, maybeFile, _) = strongSelf.contentInfo(), let file = maybeFile, !message.isCopyProtected() && !item.peerIsCopyProtected && message.paidContent == nil { - items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Gallery_SaveVideo, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Download"), color: theme.actionSheet.primaryTextColor) }, action: { _, f in - f(.default) - - if let strongSelf = self { - switch strongSelf.fetchStatus { - case .Local: - let _ = (SaveToCameraRoll.saveToCameraRoll(context: strongSelf.context, postbox: strongSelf.context.account.postbox, userLocation: .peer(message.id.peerId), mediaReference: .message(message: MessageReference(message), media: file)) - |> deliverOnMainQueue).start(completed: { - guard let strongSelf = self else { - return - } + + if let (message, maybeFile, _) = strongSelf.contentInfo(), let file = maybeFile, !message.isCopyProtected() && !item.peerIsCopyProtected && message.paidContent == nil { + items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Gallery_SaveVideo, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Download"), color: theme.actionSheet.primaryTextColor) }, action: { _, f in + f(.default) + + if let strongSelf = self { + switch strongSelf.fetchStatus { + case .Local: + let _ = (SaveToCameraRoll.saveToCameraRoll(context: strongSelf.context, postbox: strongSelf.context.account.postbox, userLocation: .peer(message.id.peerId), mediaReference: .message(message: MessageReference(message), media: file)) + |> deliverOnMainQueue).start(completed: { + guard let strongSelf = self else { + return + } + guard let controller = strongSelf.galleryController() else { + return + } + controller.present(UndoOverlayController(presentationData: strongSelf.presentationData, content: .mediaSaved(text: strongSelf.presentationData.strings.Gallery_VideoSaved), elevatedLayout: false, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root)) + }) + default: guard let controller = strongSelf.galleryController() else { return } - controller.present(UndoOverlayController(presentationData: strongSelf.presentationData, content: .mediaSaved(text: strongSelf.presentationData.strings.Gallery_VideoSaved), elevatedLayout: false, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root)) - }) - default: - guard let controller = strongSelf.galleryController() else { - return + controller.present(textAlertController(context: strongSelf.context, title: nil, text: strongSelf.presentationData.strings.Gallery_WaitForVideoDownoad, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: { + })]), in: .window(.root)) } - controller.present(textAlertController(context: strongSelf.context, title: nil, text: strongSelf.presentationData.strings.Gallery_WaitForVideoDownoad, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: { - })]), in: .window(.root)) } - } - }))) - } - - if let peer, let (message, _, _) = strongSelf.contentInfo(), canSendMessagesToPeer(peer._asPeer()) { - items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Conversation_ContextMenuReply, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Reply"), color: theme.contextMenu.primaryColor)}, action: { [weak self] _, f in - if let self, let navigationController = self.baseNavigationController() { - self.beginCustomDismiss(true) - - context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: context, chatLocation: .peer(peer), subject: .message(id: .id(message.id), highlight: ChatControllerSubject.MessageHighlight(quote: nil), timecode: nil, setupReply: true))) - - Queue.mainQueue().after(0.3) { - self.completeCustomDismiss() + }))) + } + + if let peer, let (message, _, _) = strongSelf.contentInfo(), canSendMessagesToPeer(peer._asPeer()) { + items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Conversation_ContextMenuReply, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Reply"), color: theme.contextMenu.primaryColor)}, action: { [weak self] _, f in + if let self, let navigationController = self.baseNavigationController() { + self.beginCustomDismiss(true) + + context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: context, chatLocation: .peer(peer), subject: .message(id: .id(message.id), highlight: ChatControllerSubject.MessageHighlight(quote: nil), timecode: nil, setupReply: true))) + + Queue.mainQueue().after(0.3) { + self.completeCustomDismiss() + } } - } - f(.default) - }))) - } - - if strongSelf.canDelete() { - items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Common_Delete, textColor: .destructive, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Delete"), color: theme.contextMenu.destructiveColor) }, action: { _, f in - f(.default) - - if let strongSelf = self { - strongSelf.footerContentNode.deleteButtonPressed() - } - }))) + f(.default) + }))) + } + + if strongSelf.canDelete() { + items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Common_Delete, textColor: .destructive, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Delete"), color: theme.contextMenu.destructiveColor) }, action: { _, f in + f(.default) + + if let strongSelf = self { + strongSelf.footerContentNode.deleteButtonPressed() + } + }))) + } } return items @@ -2877,11 +2905,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Common_Back, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Back"), color: theme.actionSheet.primaryTextColor) }, iconPosition: .left, action: { c, _ in - guard let strongSelf = self else { - c?.dismiss(completion: nil) - return - } - c?.setItems(strongSelf.contextMenuMainItems(dismiss: dismiss) |> map { ContextController.Items(content: .list($0)) }, minHeight: nil, animated: true) + c?.popItems() }))) let sliderValuePromise = ValuePromise(nil) @@ -2938,12 +2962,8 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { items.append(.action(ContextMenuActionItem(text: self.presentationData.strings.Common_Back, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Back"), color: theme.actionSheet.primaryTextColor) - }, iconPosition: .left, action: { [weak self] c, _ in - guard let self else { - c?.dismiss(completion: nil) - return - } - c?.setItems(self.contextMenuMainItems(dismiss: dismiss) |> map { ContextController.Items(content: .list($0)) }, minHeight: nil, animated: true) + }, iconPosition: .left, action: { c, _ in + c?.popItems() }))) do { @@ -2967,6 +2987,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { return } videoNode.setVideoQuality(.auto) + self.settingsBarButton.setContent(.image(generateTintedImage(image: UIImage(bundleImageName: "Media Gallery/NavigationSettingsQAuto"), color: .white))) /*if let controller = strongSelf.galleryController() as? GalleryController { controller.updateSharedPlaybackRate(rate) @@ -2990,6 +3011,11 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { return } videoNode.setVideoQuality(.quality(quality)) + if quality >= 700 { + self.settingsBarButton.setContent(.image(generateTintedImage(image: UIImage(bundleImageName: "Media Gallery/NavigationSettingsQHD"), color: .white))) + } else { + self.settingsBarButton.setContent(.image(generateTintedImage(image: UIImage(bundleImageName: "Media Gallery/NavigationSettingsQSD"), color: .white))) + } /*if let controller = strongSelf.galleryController() as? GalleryController { controller.updateSharedPlaybackRate(rate) @@ -3082,6 +3108,10 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { }) } + @objc private func settingsButtonPressed() { + self.openMoreMenu(sourceNode: self.settingsBarButton.referenceNode, gesture: nil, isSettings: true) + } + override func adjustForPreviewing() { super.adjustForPreviewing() @@ -3102,6 +3132,12 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { self.playbackRatePromise.set(self.playbackRate ?? 1.0) } + func updateVideoQuality(_ videoQuality: UniversalVideoContentVideoQuality) { + self.videoQuality = videoQuality + + self.videoNode?.setVideoQuality(videoQuality) + } + public func seekToStart() { self.videoNode?.seek(0.0) self.videoNode?.play() diff --git a/submodules/InstantPageUI/Sources/InstantPagePlayableVideoNode.swift b/submodules/InstantPageUI/Sources/InstantPagePlayableVideoNode.swift index df36f4c416..cdbc22dc5f 100644 --- a/submodules/InstantPageUI/Sources/InstantPagePlayableVideoNode.swift +++ b/submodules/InstantPageUI/Sources/InstantPagePlayableVideoNode.swift @@ -58,7 +58,7 @@ final class InstantPagePlayableVideoNode: ASDisplayNode, InstantPageNode, Galler fileValue = file } - self.videoNode = UniversalVideoNode(postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: NativeVideoContent(id: .instantPage(webPage.webpageId, media.media.id!), userLocation: userLocation, fileReference: .webPage(webPage: WebpageReference(webPage), media: fileValue!), imageReference: imageReference, streamVideo: streamVideo ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, placeholderColor: theme.pageBackgroundColor, storeAfterDownload: nil), priority: .embedded, autoplay: true) + self.videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: NativeVideoContent(id: .instantPage(webPage.webpageId, media.media.id!), userLocation: userLocation, fileReference: .webPage(webPage: WebpageReference(webPage), media: fileValue!), imageReference: imageReference, streamVideo: streamVideo ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, placeholderColor: theme.pageBackgroundColor, storeAfterDownload: nil), priority: .embedded, autoplay: true) self.videoNode.isUserInteractionEnabled = false self.statusNode = RadialStatusNode(backgroundNodeColor: UIColor(white: 0.0, alpha: 0.6)) diff --git a/submodules/PeerAvatarGalleryUI/Sources/PeerAvatarImageGalleryItem.swift b/submodules/PeerAvatarGalleryUI/Sources/PeerAvatarImageGalleryItem.swift index 5d15ce66df..759746fba5 100644 --- a/submodules/PeerAvatarGalleryUI/Sources/PeerAvatarImageGalleryItem.swift +++ b/submodules/PeerAvatarGalleryUI/Sources/PeerAvatarImageGalleryItem.swift @@ -281,7 +281,7 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode { let mediaManager = self.context.sharedContext.mediaManager let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: entry.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: [])) let videoContent = NativeVideoContent(id: .profileVideo(id, category), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: true, useLargeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil) - let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay) + let videoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay) videoNode.isUserInteractionEnabled = false videoNode.isHidden = true self.videoStartTimestamp = video.representation.startTimestamp diff --git a/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift b/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift index b9f3a37afc..dfb059861b 100644 --- a/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift +++ b/submodules/PeerInfoAvatarListNode/Sources/PeerInfoAvatarListNode.swift @@ -366,7 +366,7 @@ public final class PeerInfoAvatarListItemNode: ASDisplayNode { } let mediaManager = self.context.sharedContext.mediaManager - let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay) + let videoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay) videoNode.isUserInteractionEnabled = false videoNode.canAttachContent = true videoNode.isHidden = true diff --git a/submodules/Postbox/Sources/MediaBox.swift b/submodules/Postbox/Sources/MediaBox.swift index 06c4fbe654..ac98ad54e3 100644 --- a/submodules/Postbox/Sources/MediaBox.swift +++ b/submodules/Postbox/Sources/MediaBox.swift @@ -140,8 +140,8 @@ public final class MediaBox { private let statusQueue = Queue() private let concurrentQueue = Queue.concurrentDefaultQueue() - private let dataQueue = Queue(name: "MediaBox-Data") - private let dataFileManager: MediaBoxFileManager + public let dataQueue = Queue(name: "MediaBox-Data") + public let dataFileManager: MediaBoxFileManager private let cacheQueue = Queue() private let timeBasedCleanup: TimeBasedCleanup @@ -209,60 +209,6 @@ public final class MediaBox { self.dataFileManager = MediaBoxFileManager(queue: self.dataQueue) let _ = self.ensureDirectoryCreated - - //self.updateResourceIndex() - - /*#if DEBUG - self.dataQueue.async { - for _ in 0 ..< 5 { - let tempFile = TempBox.shared.tempFile(fileName: "file") - print("MediaBox test: file \(tempFile.path)") - let queue2 = Queue.concurrentDefaultQueue() - if let fileContext = MediaBoxFileContextV2Impl(queue: self.dataQueue, manager: self.dataFileManager, storageBox: self.storageBox, resourceId: tempFile.path.data(using: .utf8)!, path: tempFile.path + "_complete", partialPath: tempFile.path + "_partial", metaPath: tempFile.path + "_partial" + ".meta") { - let _ = fileContext.fetched( - range: 0 ..< Int64.max, - priority: .default, - fetch: { ranges in - return ranges - |> filter { !$0.isEmpty } - |> take(1) - |> castError(MediaResourceDataFetchError.self) - |> mapToSignal { _ in - return Signal { subscriber in - queue2.async { - subscriber.putNext(.resourceSizeUpdated(524288)) - } - queue2.async { - subscriber.putNext(.resourceSizeUpdated(393216)) - } - queue2.async { - subscriber.putNext(.resourceSizeUpdated(655360)) - } - queue2.async { - subscriber.putNext(.resourceSizeUpdated(169608)) - } - queue2.async { - subscriber.putNext(.dataPart(resourceOffset: 131072, data: Data(repeating: 0xbb, count: 38536), range: 0 ..< 38536, complete: true)) - } - queue2.async { - subscriber.putNext(.dataPart(resourceOffset: 0, data: Data(repeating: 0xaa, count: 131072), range: 0 ..< 131072, complete: false)) - } - - return EmptyDisposable - } - } - }, - error: { _ in - }, - completed: { - assert(try! Data(contentsOf: URL(fileURLWithPath: tempFile.path + "_complete")) == Data(repeating: 0xaa, count: 131072) + Data(repeating: 0xbb, count: 38536)) - let _ = fileContext.addReference() - } - ) - } - } - } - #endif*/ } public func setMaxStoreTimes(general: Int32, shortLived: Int32, gigabytesLimit: Int32) { @@ -641,21 +587,12 @@ public final class MediaBox { paths.partial + ".meta" ]) - #if true if let fileContext = MediaBoxFileContextV2Impl(queue: self.dataQueue, manager: self.dataFileManager, storageBox: self.storageBox, resourceId: id.stringRepresentation.data(using: .utf8)!, path: paths.complete, partialPath: paths.partial, metaPath: paths.partial + ".meta") { context = fileContext self.fileContexts[resourceId] = fileContext } else { return nil } - #else - if let fileContext = MediaBoxFileContextImpl(queue: self.dataQueue, manager: self.dataFileManager, storageBox: self.storageBox, resourceId: id.stringRepresentation.data(using: .utf8)!, path: paths.complete, partialPath: paths.partial, metaPath: paths.partial + ".meta") { - context = fileContext - self.fileContexts[resourceId] = fileContext - } else { - return nil - } - #endif } if let context = context { let index = context.addReference() diff --git a/submodules/Postbox/Sources/MediaBoxFileContextV2Impl.swift b/submodules/Postbox/Sources/MediaBoxFileContextV2Impl.swift index 6bc400325a..3d1e70b889 100644 --- a/submodules/Postbox/Sources/MediaBoxFileContextV2Impl.swift +++ b/submodules/Postbox/Sources/MediaBoxFileContextV2Impl.swift @@ -2,7 +2,7 @@ import Foundation import RangeSet import SwiftSignalKit -final class MediaBoxFileContextV2Impl: MediaBoxFileContext { +public final class MediaBoxFileContextV2Impl: MediaBoxFileContext { private final class RangeRequest { let value: Range let priority: MediaBoxFetchPriority @@ -99,7 +99,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { private final class PartialState { private let queue: Queue private let manager: MediaBoxFileManager - private let storageBox: StorageBox + private let storageBox: StorageBox? private let resourceId: Data private let partialPath: String private let fullPath: String @@ -124,7 +124,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { init( queue: Queue, manager: MediaBoxFileManager, - storageBox: StorageBox, + storageBox: StorageBox?, resourceId: Data, partialPath: String, fullPath: String, @@ -461,7 +461,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { self.fileMap.fill(range) self.fileMap.serialize(manager: self.manager, to: self.metaPath) - self.storageBox.update(id: self.resourceId, size: self.fileMap.sum) + self.storageBox?.update(id: self.resourceId, size: self.fileMap.sum) } else { postboxLog("MediaBoxFileContextV2Impl: error seeking file to \(resourceOffset) at \(self.partialPath)") } @@ -474,7 +474,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { private func processMovedFile() { if let size = fileSize(self.fullPath) { self.isComplete = true - self.storageBox.update(id: self.resourceId, size: size) + self.storageBox?.update(id: self.resourceId, size: size) } } @@ -623,7 +623,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { private let queue: Queue private let manager: MediaBoxFileManager - private let storageBox: StorageBox + private let storageBox: StorageBox? private let resourceId: Data private let path: String private let partialPath: String @@ -637,10 +637,10 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { return self.references.isEmpty } - init?( + public init?( queue: Queue, manager: MediaBoxFileManager, - storageBox: StorageBox, + storageBox: StorageBox?, resourceId: Data, path: String, partialPath: String, @@ -683,7 +683,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { } } - func data(range: Range, waitUntilAfterInitialFetch: Bool, next: @escaping (MediaResourceData) -> Void) -> Disposable { + public func data(range: Range, waitUntilAfterInitialFetch: Bool, next: @escaping (MediaResourceData) -> Void) -> Disposable { assert(self.queue.isCurrent()) if let size = fileSize(self.path) { @@ -708,7 +708,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { } } - func fetched( + public func fetched( range: Range, priority: MediaBoxFetchPriority, fetch: @escaping (Signal<[(Range, MediaBoxFetchPriority)], NoError>) -> Signal, @@ -734,7 +734,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { } } - func fetchedFullRange( + public func fetchedFullRange( fetch: @escaping (Signal<[(Range, MediaBoxFetchPriority)], NoError>) -> Signal, error: @escaping (MediaResourceDataFetchError) -> Void, completed: @escaping () -> Void @@ -758,7 +758,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { } } - func cancelFullRangeFetches() { + public func cancelFullRangeFetches() { assert(self.queue.isCurrent()) if let partialState = self.partialState { @@ -766,7 +766,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { } } - func rangeStatus(next: @escaping (RangeSet) -> Void, completed: @escaping () -> Void) -> Disposable { + public func rangeStatus(next: @escaping (RangeSet) -> Void, completed: @escaping () -> Void) -> Disposable { assert(self.queue.isCurrent()) if let size = fileSize(self.path) { @@ -781,7 +781,7 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext { } } - func status(next: @escaping (MediaResourceStatus) -> Void, completed: @escaping () -> Void, size: Int64?) -> Disposable { + public func status(next: @escaping (MediaResourceStatus) -> Void, completed: @escaping () -> Void, size: Int64?) -> Disposable { assert(self.queue.isCurrent()) if let _ = fileSize(self.path) { diff --git a/submodules/Postbox/Sources/MediaBoxFileManager.swift b/submodules/Postbox/Sources/MediaBoxFileManager.swift index bc963b8e4c..22799bc0a5 100644 --- a/submodules/Postbox/Sources/MediaBoxFileManager.swift +++ b/submodules/Postbox/Sources/MediaBoxFileManager.swift @@ -2,13 +2,13 @@ import Foundation import SwiftSignalKit import ManagedFile -final class MediaBoxFileManager { - enum Mode { +public final class MediaBoxFileManager { + public enum Mode { case read case readwrite } - enum AccessError: Error { + public enum AccessError: Error { case generic } @@ -129,7 +129,7 @@ final class MediaBoxFileManager { private var nextItemId: Int = 0 private let maxOpenFiles: Int - init(queue: Queue?) { + public init(queue: Queue?) { self.queue = queue self.maxOpenFiles = 16 } diff --git a/submodules/PremiumUI/Sources/PhoneDemoComponent.swift b/submodules/PremiumUI/Sources/PhoneDemoComponent.swift index 34b2ff7a8e..c3bb78f7ee 100644 --- a/submodules/PremiumUI/Sources/PhoneDemoComponent.swift +++ b/submodules/PremiumUI/Sources/PhoneDemoComponent.swift @@ -233,7 +233,7 @@ private final class PhoneView: UIView { hintDimensions: CGSize(width: 1170, height: 1754), storeAfterDownload: nil ) - let videoNode = UniversalVideoNode(postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded) + let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded) videoNode.canAttachContent = true self.videoNode = videoNode diff --git a/submodules/ShareController/Sources/ShareLoadingContainerNode.swift b/submodules/ShareController/Sources/ShareLoadingContainerNode.swift index ebf18f121b..3b5bbc6448 100644 --- a/submodules/ShareController/Sources/ShareLoadingContainerNode.swift +++ b/submodules/ShareController/Sources/ShareLoadingContainerNode.swift @@ -283,7 +283,7 @@ public final class ShareProlongedLoadingContainerNode: ASDisplayNode, ShareConte let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil) - let videoNode = UniversalVideoNode(postbox: postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) + let videoNode = UniversalVideoNode(accountId: AccountRecordId(rawValue: 0), postbox: postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) videoNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 2.0, height: 2.0)) videoNode.alpha = 0.01 self.videoNode = videoNode diff --git a/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift b/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift index de7135864c..527a5a3074 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift @@ -634,7 +634,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager { } } - private func requestScheduleGroupCall(accountContext: AccountContext, peerId: PeerId, internalId: CallSessionInternalId = CallSessionInternalId()) -> Signal { + private func requestScheduleGroupCall(accountContext: AccountContext, peerId: PeerId, internalId: CallSessionInternalId = CallSessionInternalId(), parentController: ViewController) -> Signal { let (presentationData, present, openSettings) = self.getDeviceAccessData() let isVideo = false @@ -668,7 +668,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager { accountContext.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: peerId)) ) |> deliverOnMainQueue - |> mapToSignal { [weak self] accessEnabled, peer -> Signal in + |> mapToSignal { [weak self, weak parentController] accessEnabled, peer -> Signal in guard let strongSelf = self else { return .single(false) } @@ -681,46 +681,98 @@ public final class PresentationCallManagerImpl: PresentationCallManager { if let peer = peer, case let .channel(channel) = peer, case .broadcast = channel.info { isChannel = true } - - let call = PresentationGroupCallImpl( - accountContext: accountContext, - audioSession: strongSelf.audioSession, - callKitIntegration: nil, - getDeviceAccessData: strongSelf.getDeviceAccessData, - initialCall: nil, - internalId: internalId, - peerId: peerId, - isChannel: isChannel, - invite: nil, - joinAsPeerId: nil, - isStream: false - ) - strongSelf.updateCurrentGroupCall(call) - strongSelf.currentGroupCallPromise.set(.single(call)) - strongSelf.hasActiveGroupCallsPromise.set(true) - strongSelf.removeCurrentGroupCallDisposable.set((call.canBeRemoved - |> filter { $0 } - |> take(1) - |> deliverOnMainQueue).start(next: { [weak call] value in - guard let strongSelf = self, let call = call else { - return + + if shouldUseV2VideoChatImpl(context: accountContext) { + if let parentController { + parentController.push(ScheduleVideoChatSheetScreen( + context: accountContext, + scheduleAction: { timestamp in + guard let self else { + return + } + + let call = PresentationGroupCallImpl( + accountContext: accountContext, + audioSession: self.audioSession, + callKitIntegration: nil, + getDeviceAccessData: self.getDeviceAccessData, + initialCall: nil, + internalId: internalId, + peerId: peerId, + isChannel: isChannel, + invite: nil, + joinAsPeerId: nil, + isStream: false + ) + call.schedule(timestamp: timestamp) + + self.updateCurrentGroupCall(call) + self.currentGroupCallPromise.set(.single(call)) + self.hasActiveGroupCallsPromise.set(true) + self.removeCurrentGroupCallDisposable.set((call.canBeRemoved + |> filter { $0 } + |> take(1) + |> deliverOnMainQueue).start(next: { [weak self, weak call] value in + guard let self, let call else { + return + } + if value { + if self.currentGroupCall === call { + self.updateCurrentGroupCall(nil) + self.currentGroupCallPromise.set(.single(nil)) + self.hasActiveGroupCallsPromise.set(false) + } + } + })) + } + )) } - if value { - if strongSelf.currentGroupCall === call { - strongSelf.updateCurrentGroupCall(nil) - strongSelf.currentGroupCallPromise.set(.single(nil)) - strongSelf.hasActiveGroupCallsPromise.set(false) + + return .single(true) + } else { + let call = PresentationGroupCallImpl( + accountContext: accountContext, + audioSession: strongSelf.audioSession, + callKitIntegration: nil, + getDeviceAccessData: strongSelf.getDeviceAccessData, + initialCall: nil, + internalId: internalId, + peerId: peerId, + isChannel: isChannel, + invite: nil, + joinAsPeerId: nil, + isStream: false + ) + strongSelf.updateCurrentGroupCall(call) + strongSelf.currentGroupCallPromise.set(.single(call)) + strongSelf.hasActiveGroupCallsPromise.set(true) + strongSelf.removeCurrentGroupCallDisposable.set((call.canBeRemoved + |> filter { $0 } + |> take(1) + |> deliverOnMainQueue).start(next: { [weak call] value in + guard let strongSelf = self, let call = call else { + return } - } - })) + if value { + if strongSelf.currentGroupCall === call { + strongSelf.updateCurrentGroupCall(nil) + strongSelf.currentGroupCallPromise.set(.single(nil)) + strongSelf.hasActiveGroupCallsPromise.set(false) + } + } + })) + } return .single(true) } } - public func scheduleGroupCall(context: AccountContext, peerId: PeerId, endCurrentIfAny: Bool) -> RequestScheduleGroupCallResult { - let begin: () -> Void = { [weak self] in - let _ = self?.requestScheduleGroupCall(accountContext: context, peerId: peerId).start() + public func scheduleGroupCall(context: AccountContext, peerId: PeerId, endCurrentIfAny: Bool, parentController: ViewController) -> RequestScheduleGroupCallResult { + let begin: () -> Void = { [weak self, weak parentController] in + guard let parentController else { + return + } + let _ = self?.requestScheduleGroupCall(accountContext: context, peerId: peerId, parentController: parentController).start() } if let currentGroupCall = self.currentGroupCallValue { diff --git a/submodules/TelegramCallsUI/Sources/ScheduleVideoChatSheetScreen.swift b/submodules/TelegramCallsUI/Sources/ScheduleVideoChatSheetScreen.swift new file mode 100644 index 0000000000..08b7019d97 --- /dev/null +++ b/submodules/TelegramCallsUI/Sources/ScheduleVideoChatSheetScreen.swift @@ -0,0 +1,466 @@ +import Foundation +import UIKit +import Display +import ComponentFlow +import ViewControllerComponent +import AccountContext +import SheetComponent +import ButtonComponent +import TelegramCore +import AnimatedTextComponent +import MultilineTextComponent +import BalancedTextComponent +import TelegramPresentationData +import TelegramStringFormatting +import Markdown + +private final class ScheduleVideoChatSheetContentComponent: Component { + typealias EnvironmentType = ViewControllerComponentContainer.Environment + + let scheduleAction: (Int32) -> Void + let dismiss: () -> Void + + init( + scheduleAction: @escaping (Int32) -> Void, + dismiss: @escaping () -> Void + ) { + self.scheduleAction = scheduleAction + self.dismiss = dismiss + } + + static func ==(lhs: ScheduleVideoChatSheetContentComponent, rhs: ScheduleVideoChatSheetContentComponent) -> Bool { + return true + } + + final class View: UIView { + private let button = ComponentView() + private let cancelButton = ComponentView() + + private let title = ComponentView() + private let mainText = ComponentView() + private var pickerView: UIDatePicker? + + private let calendar = Calendar(identifier: .gregorian) + private let dateFormatter: DateFormatter + + private var component: ScheduleVideoChatSheetContentComponent? + private weak var state: EmptyComponentState? + + override init(frame: CGRect) { + self.dateFormatter = DateFormatter() + self.dateFormatter.timeStyle = .none + self.dateFormatter.dateStyle = .short + self.dateFormatter.timeZone = TimeZone.current + + super.init(frame: frame) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + deinit { + } + + @objc private func scheduleDatePickerUpdated() { + self.state?.updated(transition: .immediate) + } + + private func updateSchedulePickerLimits() { + let timeZone = TimeZone(secondsFromGMT: 0)! + var calendar = Calendar(identifier: .gregorian) + calendar.timeZone = timeZone + let currentDate = Date() + var components = calendar.dateComponents(Set([.era, .year, .month, .day, .hour, .minute, .second]), from: currentDate) + components.second = 0 + + let roundedDate = calendar.date(from: components)! + let next1MinDate = calendar.date(byAdding: .minute, value: 1, to: roundedDate) + + let minute = components.minute ?? 0 + components.minute = 0 + let roundedToHourDate = calendar.date(from: components)! + components.hour = 0 + + let roundedToMidnightDate = calendar.date(from: components)! + let nextTwoHourDate = calendar.date(byAdding: .hour, value: minute > 30 ? 4 : 3, to: roundedToHourDate) + let maxDate = calendar.date(byAdding: .day, value: 8, to: roundedToMidnightDate) + + if let date = calendar.date(byAdding: .day, value: 365, to: currentDate) { + self.pickerView?.maximumDate = date + } + if let next1MinDate = next1MinDate, let nextTwoHourDate = nextTwoHourDate { + self.pickerView?.minimumDate = next1MinDate + self.pickerView?.maximumDate = maxDate + self.pickerView?.date = nextTwoHourDate + } + } + + func update(component: ScheduleVideoChatSheetContentComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + let previousComponent = self.component + let _ = previousComponent + + self.component = component + self.state = state + + let environment = environment[EnvironmentType.self].value + + let sideInset: CGFloat = 16.0 + + var contentHeight: CGFloat = 0.0 + contentHeight += 16.0 + + //TODO:localize + let titleString = NSMutableAttributedString() + titleString.append(NSAttributedString(string: "Schedule Video Chat", font: Font.semibold(17.0), textColor: environment.theme.list.itemPrimaryTextColor)) + + let titleSize = self.title.update( + transition: .immediate, + component: AnyComponent(MultilineTextComponent( + text: .plain(titleString), + maximumNumberOfLines: 1 + )), + environment: {}, + containerSize: CGSize(width: availableSize.width - sideInset * 2.0, height: 1000.0) + ) + if let titleView = self.title.view { + if titleView.superview == nil { + self.addSubview(titleView) + } + transition.setFrame(view: titleView, frame: CGRect(origin: CGPoint(x: floor((availableSize.width - titleSize.width) * 0.5), y: contentHeight), size: titleSize)) + } + contentHeight += titleSize.height + contentHeight += 16.0 + + let pickerView: UIDatePicker + if let current = self.pickerView { + pickerView = current + } else { + let textColor = UIColor.white + UILabel.setDateLabel(textColor) + + pickerView = UIDatePicker() + pickerView.timeZone = TimeZone(secondsFromGMT: 0) + pickerView.datePickerMode = .countDownTimer + pickerView.datePickerMode = .dateAndTime + pickerView.locale = Locale.current + pickerView.timeZone = TimeZone.current + pickerView.minuteInterval = 1 + self.addSubview(pickerView) + pickerView.addTarget(self, action: #selector(self.scheduleDatePickerUpdated), for: .valueChanged) + if #available(iOS 13.4, *) { + pickerView.preferredDatePickerStyle = .wheels + } + pickerView.setValue(textColor, forKey: "textColor") + self.pickerView = pickerView + self.addSubview(pickerView) + + self.updateSchedulePickerLimits() + } + + let pickerFrame = CGRect(origin: CGPoint(x: sideInset, y: contentHeight), size: CGSize(width: availableSize.width - sideInset * 2.0, height: 216.0)) + transition.setFrame(view: pickerView, frame: pickerFrame) + contentHeight += pickerFrame.height + contentHeight += 26.0 + + let date = pickerView.date + let calendar = Calendar(identifier: .gregorian) + let currentTimestamp = Int32(CFAbsoluteTimeGetCurrent() + kCFAbsoluteTimeIntervalSince1970) + let timestamp = Int32(date.timeIntervalSince1970) + let time = stringForMessageTimestamp(timestamp: timestamp, dateTimeFormat: PresentationDateTimeFormat()) + let buttonTitle: String + if calendar.isDateInToday(date) { + buttonTitle = environment.strings.ScheduleVoiceChat_ScheduleToday(time).string + } else if calendar.isDateInTomorrow(date) { + buttonTitle = environment.strings.ScheduleVoiceChat_ScheduleTomorrow(time).string + } else { + buttonTitle = environment.strings.ScheduleVoiceChat_ScheduleOn(self.dateFormatter.string(from: date), time).string + } + + let delta = timestamp - currentTimestamp + + let isGroup = "".isEmpty + let intervalString = scheduledTimeIntervalString(strings: environment.strings, value: max(60, delta)) + + let text: String = isGroup ? environment.strings.ScheduleVoiceChat_GroupText(intervalString).string : environment.strings.ScheduleLiveStream_ChannelText(intervalString).string + + let mainText = NSMutableAttributedString() + mainText.append(parseMarkdownIntoAttributedString(text, attributes: MarkdownAttributes( + body: MarkdownAttributeSet( + font: Font.regular(14.0), + textColor: UIColor(rgb: 0x8e8e93) + ), + bold: MarkdownAttributeSet( + font: Font.semibold(14.0), + textColor: UIColor(rgb: 0x8e8e93) + ), + link: MarkdownAttributeSet( + font: Font.regular(14.0), + textColor: environment.theme.list.itemAccentColor, + additionalAttributes: [:] + ), + linkAttribute: { attributes in + return ("URL", "") + } + ))) + + let mainTextSize = self.mainText.update( + transition: .immediate, + component: AnyComponent(BalancedTextComponent( + text: .plain(mainText), + horizontalAlignment: .center, + maximumNumberOfLines: 0, + lineSpacing: 0.2 + )), + environment: {}, + containerSize: CGSize(width: availableSize.width - sideInset * 2.0, height: 1000.0) + ) + if let mainTextView = self.mainText.view { + if mainTextView.superview == nil { + self.addSubview(mainTextView) + } + transition.setFrame(view: mainTextView, frame: CGRect(origin: CGPoint(x: floor((availableSize.width - mainTextSize.width) * 0.5), y: contentHeight), size: mainTextSize)) + } + contentHeight += mainTextSize.height + contentHeight += 10.0 + + var buttonContents: [AnyComponentWithIdentity] = [] + buttonContents.append(AnyComponentWithIdentity(id: AnyHashable(0 as Int), component: AnyComponent( + Text(text: buttonTitle, font: Font.semibold(17.0), color: environment.theme.list.itemCheckColors.foregroundColor) + ))) + let buttonTransition = transition + let buttonSize = self.button.update( + transition: buttonTransition, + component: AnyComponent(ButtonComponent( + background: ButtonComponent.Background( + color: UIColor(rgb: 0x3252EF), + foreground: .white, + pressedColor: UIColor(rgb: 0x3252EF).withMultipliedAlpha(0.8) + ), + content: AnyComponentWithIdentity(id: AnyHashable(0 as Int), component: AnyComponent( + HStack(buttonContents, spacing: 5.0) + )), + isEnabled: true, + tintWhenDisabled: false, + displaysProgress: false, + action: { [weak self] in + guard let self, let component = self.component, let pickerView = self.pickerView else { + return + } + component.scheduleAction(Int32(pickerView.date.timeIntervalSince1970)) + } + )), + environment: {}, + containerSize: CGSize(width: availableSize.width - sideInset * 2.0, height: 50.0) + ) + let buttonFrame = CGRect(origin: CGPoint(x: sideInset, y: contentHeight), size: buttonSize) + if let buttonView = self.button.view { + if buttonView.superview == nil { + self.addSubview(buttonView) + } + transition.setFrame(view: buttonView, frame: buttonFrame) + } + contentHeight += buttonSize.height + contentHeight += 10.0 + + let cancelButtonSize = self.cancelButton.update( + transition: buttonTransition, + component: AnyComponent(ButtonComponent( + background: ButtonComponent.Background( + color: UIColor(rgb: 0x2B2B2F), + foreground: .white, + pressedColor: UIColor(rgb: 0x2B2B2F).withMultipliedAlpha(0.8) + ), + content: AnyComponentWithIdentity(id: AnyHashable(0 as Int), component: AnyComponent( + Text(text: "Cancel", font: Font.semibold(17.0), color: environment.theme.list.itemPrimaryTextColor) + )), + isEnabled: true, + tintWhenDisabled: false, + displaysProgress: false, + action: { [weak self] in + guard let self, let component = self.component else { + return + } + component.dismiss() + } + )), + environment: {}, + containerSize: CGSize(width: availableSize.width - sideInset * 2.0, height: 50.0) + ) + let cancelButtonFrame = CGRect(origin: CGPoint(x: sideInset, y: contentHeight), size: cancelButtonSize) + if let cancelButtonView = self.cancelButton.view { + if cancelButtonView.superview == nil { + self.addSubview(cancelButtonView) + } + transition.setFrame(view: cancelButtonView, frame: cancelButtonFrame) + } + contentHeight += cancelButtonSize.height + + if environment.safeInsets.bottom.isZero { + contentHeight += 16.0 + } else { + contentHeight += environment.safeInsets.bottom + 14.0 + } + + return CGSize(width: availableSize.width, height: contentHeight) + } + } + + func makeView() -> View { + return View(frame: CGRect()) + } + + func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} + +private final class ScheduleVideoChatSheetScreenComponent: Component { + typealias EnvironmentType = ViewControllerComponentContainer.Environment + + let context: AccountContext + let scheduleAction: (Int32) -> Void + + init( + context: AccountContext, + scheduleAction: @escaping (Int32) -> Void + ) { + self.context = context + self.scheduleAction = scheduleAction + } + + static func ==(lhs: ScheduleVideoChatSheetScreenComponent, rhs: ScheduleVideoChatSheetScreenComponent) -> Bool { + if lhs.context !== rhs.context { + return false + } + return true + } + + final class View: UIView { + private let sheet = ComponentView<(ViewControllerComponentContainer.Environment, SheetComponentEnvironment)>() + private let sheetAnimateOut = ActionSlot>() + + private var component: ScheduleVideoChatSheetScreenComponent? + private var environment: EnvironmentType? + + override init(frame: CGRect) { + super.init(frame: frame) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func update(component: ScheduleVideoChatSheetScreenComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + self.component = component + + let environment = environment[ViewControllerComponentContainer.Environment.self].value + self.environment = environment + + let sheetEnvironment = SheetComponentEnvironment( + isDisplaying: environment.isVisible, + isCentered: environment.metrics.widthClass == .regular, + hasInputHeight: !environment.inputHeight.isZero, + regularMetricsSize: CGSize(width: 430.0, height: 900.0), + dismiss: { [weak self] _ in + guard let self, let environment = self.environment else { + return + } + self.sheetAnimateOut.invoke(Action { _ in + if let controller = environment.controller() { + controller.dismiss(completion: nil) + } + }) + } + ) + let _ = self.sheet.update( + transition: transition, + component: AnyComponent(SheetComponent( + content: AnyComponent(ScheduleVideoChatSheetContentComponent( + scheduleAction: { [weak self] timestamp in + guard let self else { + return + } + self.sheetAnimateOut.invoke(Action { [weak self] _ in + guard let self, let component = self.component else { + return + } + if let controller = self.environment?.controller() { + controller.dismiss(completion: nil) + } + + component.scheduleAction(timestamp) + }) + }, + dismiss: { [weak self] in + guard let self else { + return + } + self.sheetAnimateOut.invoke(Action { [weak self] _ in + guard let self else { + return + } + if let controller = self.environment?.controller() { + controller.dismiss(completion: nil) + } + }) + } + )), + backgroundColor: .color(UIColor(rgb: 0x1C1C1E)), + animateOut: self.sheetAnimateOut + )), + environment: { + environment + sheetEnvironment + }, + containerSize: availableSize + ) + if let sheetView = self.sheet.view { + if sheetView.superview == nil { + self.addSubview(sheetView) + } + transition.setFrame(view: sheetView, frame: CGRect(origin: CGPoint(), size: availableSize)) + } + + return availableSize + } + } + + func makeView() -> View { + return View(frame: CGRect()) + } + + func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} + +public class ScheduleVideoChatSheetScreen: ViewControllerComponentContainer { + public init(context: AccountContext, scheduleAction: @escaping (Int32) -> Void) { + super.init(context: context, component: ScheduleVideoChatSheetScreenComponent( + context: context, + scheduleAction: scheduleAction + ), navigationBarAppearance: .none, theme: .dark) + + self.statusBar.statusBarStyle = .Ignore + self.navigationPresentation = .flatModal + self.blocksBackgroundWhenInOverlay = true + } + + required public init(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + deinit { + } + + override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) { + super.containerLayoutUpdated(layout, transition: transition) + } + + override public func viewDidAppear(_ animated: Bool) { + super.viewDidAppear(animated) + + self.view.disablesInteractiveModalDismiss = true + } +} diff --git a/submodules/TelegramCallsUI/Sources/VideoChatActionButtonComponent.swift b/submodules/TelegramCallsUI/Sources/VideoChatActionButtonComponent.swift index 13c69b2cf2..8d6b3f69bd 100644 --- a/submodules/TelegramCallsUI/Sources/VideoChatActionButtonComponent.swift +++ b/submodules/TelegramCallsUI/Sources/VideoChatActionButtonComponent.swift @@ -66,6 +66,7 @@ final class VideoChatActionButtonComponent: Component { case muted case unmuted case raiseHand + case scheduled } let strings: PresentationStrings @@ -156,7 +157,7 @@ final class VideoChatActionButtonComponent: Component { backgroundColor = !isActive ? UIColor(rgb: 0x002E5D) : UIColor(rgb: 0x027FFF) case .unmuted: backgroundColor = !isActive ? UIColor(rgb: 0x124B21) : UIColor(rgb: 0x34C659) - case .raiseHand: + case .raiseHand, .scheduled: backgroundColor = UIColor(rgb: 0x3252EF) } iconDiameter = 60.0 @@ -169,7 +170,7 @@ final class VideoChatActionButtonComponent: Component { backgroundColor = !isActive ? UIColor(rgb: 0x002E5D) : UIColor(rgb: 0x027FFF) case .unmuted: backgroundColor = !isActive ? UIColor(rgb: 0x124B21) : UIColor(rgb: 0x34C659) - case .raiseHand: + case .raiseHand, .scheduled: backgroundColor = UIColor(rgb: 0x3252EF) } iconDiameter = 60.0 diff --git a/submodules/TelegramCallsUI/Sources/VideoChatMicButtonComponent.swift b/submodules/TelegramCallsUI/Sources/VideoChatMicButtonComponent.swift index 9e7c87b5bb..8ecf340e20 100644 --- a/submodules/TelegramCallsUI/Sources/VideoChatMicButtonComponent.swift +++ b/submodules/TelegramCallsUI/Sources/VideoChatMicButtonComponent.swift @@ -175,11 +175,17 @@ private final class GlowView: UIView { } final class VideoChatMicButtonComponent: Component { + enum ScheduledState: Equatable { + case start + case toggleSubscription(isSubscribed: Bool) + } + enum Content: Equatable { case connecting case muted case unmuted(pushToTalk: Bool) case raiseHand + case scheduled(state: ScheduledState) } let call: PresentationGroupCall @@ -187,19 +193,22 @@ final class VideoChatMicButtonComponent: Component { let isCollapsed: Bool let updateUnmutedStateIsPushToTalk: (Bool?) -> Void let raiseHand: () -> Void + let scheduleAction: () -> Void init( call: PresentationGroupCall, content: Content, isCollapsed: Bool, updateUnmutedStateIsPushToTalk: @escaping (Bool?) -> Void, - raiseHand: @escaping () -> Void + raiseHand: @escaping () -> Void, + scheduleAction: @escaping () -> Void ) { self.call = call self.content = content self.isCollapsed = isCollapsed self.updateUnmutedStateIsPushToTalk = updateUnmutedStateIsPushToTalk self.raiseHand = raiseHand + self.scheduleAction = scheduleAction } static func ==(lhs: VideoChatMicButtonComponent, rhs: VideoChatMicButtonComponent) -> Bool { @@ -245,7 +254,7 @@ final class VideoChatMicButtonComponent: Component { self.beginTrackingTimestamp = CFAbsoluteTimeGetCurrent() if let component = self.component { switch component.content { - case .connecting, .unmuted, .raiseHand: + case .connecting, .unmuted, .raiseHand, .scheduled: self.beginTrackingWasPushToTalk = false case .muted: self.beginTrackingWasPushToTalk = true @@ -291,6 +300,8 @@ final class VideoChatMicButtonComponent: Component { self.icon.playRandomAnimation() component.raiseHand() + case .scheduled: + component.scheduleAction() } } } @@ -322,6 +333,17 @@ final class VideoChatMicButtonComponent: Component { titleText = isPushToTalk ? "You are Live" : "Tap to Mute" case .raiseHand: titleText = "Raise Hand" + case let .scheduled(state): + switch state { + case .start: + titleText = "Start Now" + case let .toggleSubscription(isSubscribed): + if isSubscribed { + titleText = "Clear Reminder" + } else { + titleText = "Set Reminder" + } + } } self.isEnabled = isEnabled @@ -390,12 +412,14 @@ final class VideoChatMicButtonComponent: Component { case .connecting: context.setFillColor(UIColor(white: 0.1, alpha: 1.0).cgColor) context.fill(CGRect(origin: CGPoint(), size: size)) - case .muted, .unmuted, .raiseHand: + case .muted, .unmuted, .raiseHand, .scheduled: let colors: [UIColor] if case .muted = component.content { colors = [UIColor(rgb: 0x0080FF), UIColor(rgb: 0x00A1FE)] } else if case .raiseHand = component.content { colors = [UIColor(rgb: 0x3252EF), UIColor(rgb: 0xC64688)] + } else if case .scheduled = component.content { + colors = [UIColor(rgb: 0x3252EF), UIColor(rgb: 0xC64688)] } else { colors = [UIColor(rgb: 0x33C659), UIColor(rgb: 0x0BA8A5)] } @@ -477,10 +501,21 @@ final class VideoChatMicButtonComponent: Component { self.icon.enqueueState(.unmute) case .raiseHand: self.icon.enqueueState(.hand) + case let .scheduled(state): + switch state { + case .start: + self.icon.enqueueState(.start) + case let .toggleSubscription(isSubscribed): + if isSubscribed { + self.icon.enqueueState(.unsubscribe) + } else { + self.icon.enqueueState(.subscribe) + } + } } switch component.content { - case .muted, .unmuted, .raiseHand: + case .muted, .unmuted, .raiseHand, .scheduled: let blobSize = CGRect(origin: CGPoint(), size: CGSize(width: 116.0, height: 116.0)).insetBy(dx: -40.0, dy: -40.0).size let blobTintTransition: ComponentTransition @@ -512,6 +547,8 @@ final class VideoChatMicButtonComponent: Component { blobsColor = UIColor(rgb: 0x0086FF) } else if case .raiseHand = component.content { blobsColor = UIColor(rgb: 0x914BAD) + } else if case .scheduled = component.content { + blobsColor = UIColor(rgb: 0x914BAD) } else { blobsColor = UIColor(rgb: 0x33C758) } @@ -528,7 +565,7 @@ final class VideoChatMicButtonComponent: Component { blobView.updateLevel(CGFloat(value), immediately: false) }) } - case .connecting, .muted, .raiseHand: + case .connecting, .muted, .raiseHand, .scheduled: if let audioLevelDisposable = self.audioLevelDisposable { self.audioLevelDisposable = nil audioLevelDisposable.dispose() @@ -561,6 +598,8 @@ final class VideoChatMicButtonComponent: Component { glowColor = UIColor(rgb: 0x0086FF) } else if case .raiseHand = component.content { glowColor = UIColor(rgb: 0x3252EF) + } else if case .scheduled = component.content { + glowColor = UIColor(rgb: 0x3252EF) } else { glowColor = UIColor(rgb: 0x33C758) } diff --git a/submodules/TelegramCallsUI/Sources/VideoChatScheduledInfoComponent.swift b/submodules/TelegramCallsUI/Sources/VideoChatScheduledInfoComponent.swift new file mode 100644 index 0000000000..3c97c76306 --- /dev/null +++ b/submodules/TelegramCallsUI/Sources/VideoChatScheduledInfoComponent.swift @@ -0,0 +1,213 @@ +import Foundation +import UIKit +import Display +import ComponentFlow +import MultilineTextComponent +import TelegramPresentationData +import TelegramStringFormatting +import HierarchyTrackingLayer + +private let purple = UIColor(rgb: 0x3252ef) +private let pink = UIColor(rgb: 0xef436c) + +private let latePurple = UIColor(rgb: 0x974aa9) +private let latePink = UIColor(rgb: 0xf0436c) + +final class VideoChatScheduledInfoComponent: Component { + let timestamp: Int32 + let strings: PresentationStrings + + init( + timestamp: Int32, + strings: PresentationStrings + ) { + self.timestamp = timestamp + self.strings = strings + } + + static func ==(lhs: VideoChatScheduledInfoComponent, rhs: VideoChatScheduledInfoComponent) -> Bool { + if lhs.timestamp != rhs.timestamp { + return false + } + if lhs.strings !== rhs.strings { + return false + } + return true + } + + final class View: UIView { + private let title = ComponentView() + private let countdownText = ComponentView() + private let dateText = ComponentView() + + private let countdownContainerView: UIView + private let countdownMaskView: UIView + private let countdownGradientLayer: SimpleGradientLayer + private let hierarchyTrackingLayer: HierarchyTrackingLayer + + private var component: VideoChatScheduledInfoComponent? + private var isUpdating: Bool = false + + override init(frame: CGRect) { + self.countdownContainerView = UIView() + self.countdownMaskView = UIView() + + self.countdownGradientLayer = SimpleGradientLayer() + self.countdownGradientLayer.type = .radial + self.countdownGradientLayer.colors = [pink.cgColor, purple.cgColor, purple.cgColor] + self.countdownGradientLayer.locations = [0.0, 0.85, 1.0] + self.countdownGradientLayer.startPoint = CGPoint(x: 1.0, y: 0.0) + self.countdownGradientLayer.endPoint = CGPoint(x: 0.0, y: 1.0) + + self.hierarchyTrackingLayer = HierarchyTrackingLayer() + + super.init(frame: frame) + + self.layer.addSublayer(self.hierarchyTrackingLayer) + + self.countdownContainerView.layer.addSublayer(self.countdownGradientLayer) + self.addSubview(self.countdownContainerView) + + self.countdownContainerView.mask = self.countdownMaskView + + self.hierarchyTrackingLayer.isInHierarchyUpdated = { [weak self] value in + guard let self else { + return + } + if value { + self.updateAnimations() + } + } + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + private func updateAnimations() { + if let _ = self.countdownGradientLayer.animation(forKey: "movement") { + } else { + let previousValue = self.countdownGradientLayer.startPoint + let newValue = CGPoint(x: CGFloat.random(in: 0.65 ..< 0.85), y: CGFloat.random(in: 0.1 ..< 0.45)) + self.countdownGradientLayer.startPoint = newValue + + CATransaction.begin() + + let animation = CABasicAnimation(keyPath: "startPoint") + animation.duration = Double.random(in: 0.8 ..< 1.4) + animation.fromValue = previousValue + animation.toValue = newValue + + CATransaction.setCompletionBlock { [weak self] in + guard let self else { + return + } + if self.hierarchyTrackingLayer.isInHierarchy { + self.updateAnimations() + } + } + + self.countdownGradientLayer.add(animation, forKey: "movement") + CATransaction.commit() + } + } + + func update(component: VideoChatScheduledInfoComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + self.isUpdating = true + defer { + self.isUpdating = false + } + + self.component = component + + let titleSize = self.title.update( + transition: .immediate, + component: AnyComponent(MultilineTextComponent( + text: .plain(NSAttributedString(string: "Starts in", font: Font.with(size: 23.0, design: .round, weight: .semibold), textColor: .white)) + )), + environment: {}, + containerSize: CGSize(width: availableSize.width - 16.0 * 2.0, height: 200.0) + ) + + let remainingSeconds: Int32 = max(0, component.timestamp - Int32(Date().timeIntervalSince1970)) + let countdownText: String + if remainingSeconds >= 86400 { + countdownText = scheduledTimeIntervalString(strings: component.strings, value: remainingSeconds) + } else { + countdownText = textForTimeout(value: abs(remainingSeconds)) + /*if remainingSeconds < 0 && !self.isLate { + self.isLate = true + self.foregroundGradientLayer.colors = [latePink.cgColor, latePurple.cgColor, latePurple.cgColor] + }*/ + } + + let countdownTextSize = self.countdownText.update( + transition: .immediate, + component: AnyComponent(MultilineTextComponent( + text: .plain(NSAttributedString(string: countdownText, font: Font.with(size: 68.0, design: .round, weight: .semibold, traits: [.monospacedNumbers]), textColor: .white)) + )), + environment: {}, + containerSize: CGSize(width: availableSize.width - 16.0 * 2.0, height: 400.0) + ) + + let dateText = humanReadableStringForTimestamp(strings: component.strings, dateTimeFormat: PresentationDateTimeFormat(), timestamp: component.timestamp, alwaysShowTime: true).string + + let dateTextSize = self.dateText.update( + transition: .immediate, + component: AnyComponent(MultilineTextComponent( + text: .plain(NSAttributedString(string: dateText, font: Font.with(size: 23.0, design: .round, weight: .semibold), textColor: .white)) + )), + environment: {}, + containerSize: CGSize(width: availableSize.width - 16.0 * 2.0, height: 400.0) + ) + + let titleSpacing: CGFloat = 5.0 + let dateSpacing: CGFloat = 5.0 + + let contentHeight: CGFloat = titleSize.height + titleSpacing + countdownTextSize.height + dateSpacing + dateTextSize.height + + let titleFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - titleSize.width) * 0.5), y: floor((availableSize.height - contentHeight) * 0.5)), size: titleSize) + let countdownTextFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - countdownTextSize.width) * 0.5), y: titleFrame.maxY + titleSpacing), size: countdownTextSize) + let dateTextFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - dateTextSize.width) * 0.5), y: countdownTextFrame.maxY + dateSpacing), size: dateTextSize) + + if let titleView = self.title.view { + if titleView.superview == nil { + self.addSubview(titleView) + } + transition.setPosition(view: titleView, position: titleFrame.center) + titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size) + } + + if let countdownTextView = self.countdownText.view { + if countdownTextView.superview == nil { + self.countdownMaskView.addSubview(countdownTextView) + } + transition.setFrame(view: countdownTextView, frame: CGRect(origin: CGPoint(), size: countdownTextFrame.size)) + } + + transition.setFrame(view: self.countdownContainerView, frame: countdownTextFrame) + transition.setFrame(view: self.countdownMaskView, frame: CGRect(origin: CGPoint(), size: countdownTextFrame.size)) + transition.setFrame(layer: self.countdownGradientLayer, frame: CGRect(origin: CGPoint(), size: countdownTextFrame.size)) + + if let dateTextView = self.dateText.view { + if dateTextView.superview == nil { + self.addSubview(dateTextView) + } + transition.setPosition(view: dateTextView, position: dateTextFrame.center) + dateTextView.bounds = CGRect(origin: CGPoint(), size: dateTextFrame.size) + } + + self.updateAnimations() + + return availableSize + } + } + + func makeView() -> View { + return View() + } + + func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} diff --git a/submodules/TelegramCallsUI/Sources/VideoChatScreen.swift b/submodules/TelegramCallsUI/Sources/VideoChatScreen.swift index a193b72312..906f913916 100644 --- a/submodules/TelegramCallsUI/Sources/VideoChatScreen.swift +++ b/submodules/TelegramCallsUI/Sources/VideoChatScreen.swift @@ -73,6 +73,7 @@ final class VideoChatScreenComponent: Component { let microphoneButton = ComponentView() let participants = ComponentView() + var scheduleInfo: ComponentView? var reconnectedAsEventsDisposable: Disposable? @@ -561,6 +562,13 @@ final class VideoChatScreenComponent: Component { self.isUpdating = false } + let alphaTransition: ComponentTransition + if transition.animation.isImmediate { + alphaTransition = .immediate + } else { + alphaTransition = .easeInOut(duration: 0.25) + } + let environment = environment[ViewControllerComponentContainer.Environment.self].value let themeUpdated = self.environment?.theme !== environment.theme @@ -1058,10 +1066,16 @@ final class VideoChatScreenComponent: Component { } let idleTitleStatusText: String - if let callState = self.callState, callState.networkState == .connected, let members = self.members { - idleTitleStatusText = environment.strings.VoiceChat_Panel_Members(Int32(max(1, members.totalCount))) + if let callState = self.callState { + if callState.networkState == .connected, let members = self.members { + idleTitleStatusText = environment.strings.VoiceChat_Panel_Members(Int32(max(1, members.totalCount))) + } else if callState.scheduleTimestamp != nil { + idleTitleStatusText = "scheduled" + } else { + idleTitleStatusText = "connecting..." + } } else { - idleTitleStatusText = "connecting..." + idleTitleStatusText = " " } let titleSize = self.title.update( transition: transition, @@ -1324,35 +1338,88 @@ final class VideoChatScreenComponent: Component { let participantsFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: participantsSize) if let participantsView = self.participants.view { if participantsView.superview == nil { + participantsView.layer.allowsGroupOpacity = true self.containerView.addSubview(participantsView) } transition.setFrame(view: participantsView, frame: participantsFrame) + var participantsAlpha: CGFloat = 1.0 + if let callState = self.callState, callState.scheduleTimestamp != nil { + participantsAlpha = 0.0 + } + alphaTransition.setAlpha(view: participantsView, alpha: participantsAlpha) + } + + if let callState = self.callState, let scheduleTimestamp = callState.scheduleTimestamp { + let scheduleInfo: ComponentView + var scheduleInfoTransition = transition + if let current = self.scheduleInfo { + scheduleInfo = current + } else { + scheduleInfoTransition = scheduleInfoTransition.withAnimation(.none) + scheduleInfo = ComponentView() + self.scheduleInfo = scheduleInfo + } + let scheduleInfoSize = scheduleInfo.update( + transition: scheduleInfoTransition, + component: AnyComponent(VideoChatScheduledInfoComponent( + timestamp: scheduleTimestamp, + strings: environment.strings + )), + environment: {}, + containerSize: participantsSize + ) + let scheduleInfoFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: scheduleInfoSize) + if let scheduleInfoView = scheduleInfo.view { + if scheduleInfoView.superview == nil { + scheduleInfoView.isUserInteractionEnabled = false + self.containerView.addSubview(scheduleInfoView) + } + scheduleInfoTransition.setFrame(view: scheduleInfoView, frame: scheduleInfoFrame) + } + } else if let scheduleInfo = self.scheduleInfo { + self.scheduleInfo = nil + if let scheduleInfoView = scheduleInfo.view { + alphaTransition.setAlpha(view: scheduleInfoView, alpha: 0.0, completion: { [weak scheduleInfoView] _ in + scheduleInfoView?.removeFromSuperview() + }) + } } let micButtonContent: VideoChatMicButtonComponent.Content let actionButtonMicrophoneState: VideoChatActionButtonComponent.MicrophoneState if let callState = self.callState { - switch callState.networkState { - case .connecting: - micButtonContent = .connecting - actionButtonMicrophoneState = .connecting - case .connected: - if let callState = callState.muteState { - if callState.canUnmute { - if self.isPushToTalkActive { - micButtonContent = .unmuted(pushToTalk: self.isPushToTalkActive) - actionButtonMicrophoneState = .unmuted + if callState.scheduleTimestamp != nil { + let scheduledState: VideoChatMicButtonComponent.ScheduledState + if callState.canManageCall { + scheduledState = .start + } else { + scheduledState = .toggleSubscription(isSubscribed: callState.subscribedToScheduled) + } + micButtonContent = .scheduled(state: scheduledState) + actionButtonMicrophoneState = .scheduled + } else { + switch callState.networkState { + case .connecting: + micButtonContent = .connecting + actionButtonMicrophoneState = .connecting + case .connected: + if let callState = callState.muteState { + if callState.canUnmute { + if self.isPushToTalkActive { + micButtonContent = .unmuted(pushToTalk: self.isPushToTalkActive) + actionButtonMicrophoneState = .unmuted + } else { + micButtonContent = .muted + actionButtonMicrophoneState = .muted + } } else { - micButtonContent = .muted - actionButtonMicrophoneState = .muted + micButtonContent = .raiseHand + actionButtonMicrophoneState = .raiseHand } } else { - micButtonContent = .raiseHand - actionButtonMicrophoneState = .raiseHand + micButtonContent = .unmuted(pushToTalk: false) + actionButtonMicrophoneState = .unmuted } - } else { - micButtonContent = .unmuted(pushToTalk: false) - actionButtonMicrophoneState = .unmuted } } } else { @@ -1412,6 +1479,23 @@ final class VideoChatScreenComponent: Component { if !callState.raisedHand { component.call.raiseHand() } + }, + scheduleAction: { [weak self] in + guard let self, let component = self.component else { + return + } + guard let callState = self.callState else { + return + } + guard callState.scheduleTimestamp != nil else { + return + } + + if callState.canManageCall { + component.call.startScheduled() + } else { + component.call.toggleScheduledSubscription(!callState.subscribedToScheduled) + } } )), environment: {}, diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift index 533e8b89ca..db52366c58 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift @@ -7097,7 +7097,7 @@ final class VoiceChatContextReferenceContentSource: ContextReferenceContentSourc } } -private func calculateUseV2(context: AccountContext) -> Bool { +public func shouldUseV2VideoChatImpl(context: AccountContext) -> Bool { var useV2 = true if context.sharedContext.immediateExperimentalUISettings.disableCallV2 { useV2 = false @@ -7109,7 +7109,7 @@ private func calculateUseV2(context: AccountContext) -> Bool { } public func makeVoiceChatControllerInitialData(sharedContext: SharedAccountContext, accountContext: AccountContext, call: PresentationGroupCall) -> Signal { - let useV2 = calculateUseV2(context: accountContext) + let useV2 = shouldUseV2VideoChatImpl(context: accountContext) if useV2 { return VideoChatScreenV2Impl.initialData(call: call) |> map { $0 as Any } @@ -7119,7 +7119,7 @@ public func makeVoiceChatControllerInitialData(sharedContext: SharedAccountConte } public func makeVoiceChatController(sharedContext: SharedAccountContext, accountContext: AccountContext, call: PresentationGroupCall, initialData: Any) -> VoiceChatController { - let useV2 = calculateUseV2(context: accountContext) + let useV2 = shouldUseV2VideoChatImpl(context: accountContext) if useV2 { return VideoChatScreenV2Impl(initialData: initialData as! VideoChatScreenV2Impl.InitialData, call: call) diff --git a/submodules/TelegramCore/Sources/Network/FetchedMediaResource.swift b/submodules/TelegramCore/Sources/Network/FetchedMediaResource.swift index 113edc7caf..af95079f06 100644 --- a/submodules/TelegramCore/Sources/Network/FetchedMediaResource.swift +++ b/submodules/TelegramCore/Sources/Network/FetchedMediaResource.swift @@ -184,6 +184,12 @@ private func findMediaResource(media: Media, previousMedia: Media?, resource: Me return representation.resource } } + + for alternativeRepresentation in file.alternativeRepresentations { + if let result = findMediaResource(media: alternativeRepresentation, previousMedia: previousMedia, resource: resource) { + return result + } + } } } else if let webPage = media as? TelegramMediaWebpage, case let .Loaded(content) = webPage.content { if let image = content.image, let result = findMediaResource(media: image, previousMedia: previousMedia, resource: resource) { @@ -254,6 +260,12 @@ func findMediaResourceById(media: Media, resourceId: MediaResourceId) -> Telegra return representation.resource } } + + for alternativeRepresentation in file.alternativeRepresentations { + if let result = findMediaResourceById(media: alternativeRepresentation, resourceId: resourceId) { + return result + } + } } else if let webPage = media as? TelegramMediaWebpage, case let .Loaded(content) = webPage.content { if let image = content.image, let result = findMediaResourceById(media: image, resourceId: resourceId) { return result diff --git a/submodules/TelegramUI/Components/Chat/ChatBotInfoItem/Sources/ChatBotInfoItem.swift b/submodules/TelegramUI/Components/Chat/ChatBotInfoItem/Sources/ChatBotInfoItem.swift index 8ee63a28ac..2bff980564 100644 --- a/submodules/TelegramUI/Components/Chat/ChatBotInfoItem/Sources/ChatBotInfoItem.swift +++ b/submodules/TelegramUI/Components/Chat/ChatBotInfoItem/Sources/ChatBotInfoItem.swift @@ -151,7 +151,7 @@ public final class ChatBotInfoItemNode: ListViewItemNode { continuePlayingWithoutSoundOnLostAudioSession: false, storeAfterDownload: nil ) - let videoNode = UniversalVideoNode(postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded) + let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded) videoNode.canAttachContent = true self.videoNode = videoNode diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageActionBubbleContentNode/Sources/ChatMessageActionBubbleContentNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageActionBubbleContentNode/Sources/ChatMessageActionBubbleContentNode.swift index cebcfdff62..7b56c5b951 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageActionBubbleContentNode/Sources/ChatMessageActionBubbleContentNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageActionBubbleContentNode/Sources/ChatMessageActionBubbleContentNode.swift @@ -276,7 +276,7 @@ public class ChatMessageActionBubbleContentNode: ChatMessageBubbleContentNode { let videoContent = NativeVideoContent(id: .profileVideo(id, "action"), userLocation: .peer(item.message.id.peerId), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil) if videoContent.id != strongSelf.videoContent?.id { let mediaManager = item.context.sharedContext.mediaManager - let videoNode = UniversalVideoNode(postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay) + let videoNode = UniversalVideoNode(accountId: item.context.account.id, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay) videoNode.isUserInteractionEnabled = false videoNode.ownsContentNodeUpdated = { [weak self] owns in if let strongSelf = self { diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift index 5dcd3a1f81..9bd38d928d 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveInstantVideoNode/Sources/ChatMessageInteractiveInstantVideoNode.swift @@ -763,7 +763,7 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { }) } let mediaManager = item.context.sharedContext.mediaManager - let videoNode = UniversalVideoNode(postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: ChatBubbleInstantVideoDecoration(inset: 2.0, backgroundImage: instantVideoBackgroundImage, tapped: { + let videoNode = UniversalVideoNode(accountId: item.context.account.id, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: ChatBubbleInstantVideoDecoration(inset: 2.0, backgroundImage: instantVideoBackgroundImage, tapped: { if let strongSelf = self { if let item = strongSelf.item { if strongSelf.infoBackgroundNode.alpha.isZero { diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift index e1586b4821..d37ea5fcd8 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift @@ -1659,7 +1659,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr let loopVideo = updatedVideoFile.isAnimated let videoContent: UniversalVideoContent - if NativeVideoContent.isHLSVideo(file: updatedVideoFile), context.sharedContext.immediateExperimentalUISettings.dynamicStreaming { + if !"".isEmpty && NativeVideoContent.isHLSVideo(file: updatedVideoFile), context.sharedContext.immediateExperimentalUISettings.dynamicStreaming { videoContent = HLSVideoContent(id: .message(message.id, message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: true, loopVideo: loopVideo) } else { videoContent = NativeVideoContent(id: .message(message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: streamVideo ? .conservative : .none, loopVideo: loopVideo, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false), continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo, placeholderColor: emptyColor, captureProtected: message.isCopyProtected() || isExtendedMedia, storeAfterDownload: { [weak context] in @@ -1669,7 +1669,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone() }) } - let videoNode = UniversalVideoNode(postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) + let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) videoNode.isUserInteractionEnabled = false videoNode.ownsContentNodeUpdated = { [weak self] owns in if let strongSelf = self { @@ -2162,10 +2162,15 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr if let duration = file.duration, !message.flags.contains(.Unsent) { let durationString = file.isAnimated ? gifTitle : stringForDuration(playerDuration > 0 ? playerDuration : Int32(duration), position: playerPosition) if isMediaStreamable(message: message, media: file) { - badgeContent = .mediaDownload(backgroundColor: messageTheme.mediaDateAndStatusFillColor, foregroundColor: messageTheme.mediaDateAndStatusTextColor, duration: durationString, size: active ? sizeString : nil, muted: muted, active: active) - mediaDownloadState = .fetching(progress: automaticPlayback ? nil : adjustedProgress) - if self.playerStatus?.status == .playing { - mediaDownloadState = nil + if NativeVideoContent.isHLSVideo(file: file) { + mediaDownloadState = .fetching(progress: nil) + badgeContent = .text(inset: 12.0, backgroundColor: messageTheme.mediaDateAndStatusFillColor, foregroundColor: messageTheme.mediaDateAndStatusTextColor, text: NSAttributedString(string: durationString), iconName: nil) + } else { + badgeContent = .mediaDownload(backgroundColor: messageTheme.mediaDateAndStatusFillColor, foregroundColor: messageTheme.mediaDateAndStatusTextColor, duration: durationString, size: active ? sizeString : nil, muted: muted, active: active) + mediaDownloadState = .fetching(progress: automaticPlayback ? nil : adjustedProgress) + if self.playerStatus?.status == .playing { + mediaDownloadState = nil + } } state = automaticPlayback ? .none : .play(messageTheme.mediaOverlayControlColors.foregroundColor) } else { @@ -2264,7 +2269,11 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr do { let durationString = file.isAnimated ? gifTitle : stringForDuration(playerDuration > 0 ? playerDuration : (file.duration.flatMap { Int32(floor($0)) } ?? 0), position: playerPosition) if wideLayout { - if isMediaStreamable(message: message, media: file), let fileSize = file.size, fileSize > 0 && fileSize != .max { + if NativeVideoContent.isHLSVideo(file: file) { + state = automaticPlayback ? .none : .play(messageTheme.mediaOverlayControlColors.foregroundColor) + mediaDownloadState = nil + badgeContent = .text(inset: 12.0, backgroundColor: messageTheme.mediaDateAndStatusFillColor, foregroundColor: messageTheme.mediaDateAndStatusTextColor, text: NSAttributedString(string: durationString), iconName: nil) + } else if isMediaStreamable(message: message, media: file), let fileSize = file.size, fileSize > 0 && fileSize != .max { state = automaticPlayback ? .none : .play(messageTheme.mediaOverlayControlColors.foregroundColor) badgeContent = .mediaDownload(backgroundColor: messageTheme.mediaDateAndStatusFillColor, foregroundColor: messageTheme.mediaDateAndStatusTextColor, duration: durationString, size: dataSizeString(fileSize, formatting: formatting), muted: muted, active: true) mediaDownloadState = .remote diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageProfilePhotoSuggestionContentNode/Sources/ChatMessageProfilePhotoSuggestionContentNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageProfilePhotoSuggestionContentNode/Sources/ChatMessageProfilePhotoSuggestionContentNode.swift index 47c902badc..cbcbc37fc4 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageProfilePhotoSuggestionContentNode/Sources/ChatMessageProfilePhotoSuggestionContentNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageProfilePhotoSuggestionContentNode/Sources/ChatMessageProfilePhotoSuggestionContentNode.swift @@ -222,7 +222,7 @@ public class ChatMessageProfilePhotoSuggestionContentNode: ChatMessageBubbleCont let videoContent = NativeVideoContent(id: .profileVideo(id, "action"), userLocation: .peer(item.message.id.peerId), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil) if videoContent.id != strongSelf.videoContent?.id { let mediaManager = item.context.sharedContext.mediaManager - let videoNode = UniversalVideoNode(postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay) + let videoNode = UniversalVideoNode(accountId: item.context.account.id, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay) videoNode.isUserInteractionEnabled = false videoNode.ownsContentNodeUpdated = { [weak self] owns in if let strongSelf = self { diff --git a/submodules/TelegramUI/Components/Chat/ChatQrCodeScreen/Sources/ChatQrCodeScreen.swift b/submodules/TelegramUI/Components/Chat/ChatQrCodeScreen/Sources/ChatQrCodeScreen.swift index 86eb90094c..9cef08d2d2 100644 --- a/submodules/TelegramUI/Components/Chat/ChatQrCodeScreen/Sources/ChatQrCodeScreen.swift +++ b/submodules/TelegramUI/Components/Chat/ChatQrCodeScreen/Sources/ChatQrCodeScreen.swift @@ -2286,7 +2286,7 @@ private class MessageContentNode: ASDisplayNode, ContentNode { } } else { let videoContent = NativeVideoContent(id: .message(message.stableId, video.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: video), streamVideo: .conservative, loopVideo: true, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: self.isStatic, continuePlayingWithoutSoundOnLostAudioSession: true, placeholderColor: .clear, captureProtected: false, storeAfterDownload: nil) - let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay, autoplay: !self.isStatic) + let videoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay, autoplay: !self.isStatic) self.videoStatusDisposable.set((videoNode.status |> deliverOnMainQueue).startStrict(next: { [weak self] status in diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoAvatarTransformContainerNode.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoAvatarTransformContainerNode.swift index 827ec82b64..a07718b06f 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoAvatarTransformContainerNode.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoAvatarTransformContainerNode.swift @@ -333,7 +333,7 @@ final class PeerInfoAvatarTransformContainerNode: ASDisplayNode { self.videoNode?.removeFromSupernode() let mediaManager = self.context.sharedContext.mediaManager - let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .embedded) + let videoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .embedded) videoNode.isUserInteractionEnabled = false videoNode.isHidden = true diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoEditingAvatarNode.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoEditingAvatarNode.swift index ea1c47a629..9d34dd106c 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoEditingAvatarNode.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoEditingAvatarNode.swift @@ -168,7 +168,7 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode { self.videoNode?.removeFromSupernode() let mediaManager = self.context.sharedContext.mediaManager - let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .gallery) + let videoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .gallery) videoNode.isUserInteractionEnabled = false self.videoStartTimestamp = video.representation.startTimestamp self.videoContent = videoContent diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift index 37a38e0da3..c253385710 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift @@ -7067,7 +7067,10 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro } private func scheduleGroupCall() { - self.context.scheduleGroupCall(peerId: self.peerId) + guard let controller = self.controller else { + return + } + self.context.scheduleGroupCall(peerId: self.peerId, parentController: controller) } private func createExternalStream(credentialsPromise: Promise?) { diff --git a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemContentComponent.swift b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemContentComponent.swift index a5b630e67b..18c6d85990 100644 --- a/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemContentComponent.swift +++ b/submodules/TelegramUI/Components/Stories/StoryContainerScreen/Sources/StoryItemContentComponent.swift @@ -200,6 +200,7 @@ final class StoryItemContentComponent: Component { if case let .file(file) = currentMessageMedia, let peerReference = PeerReference(component.peer._asPeer()) { if self.videoNode == nil { let videoNode = UniversalVideoNode( + accountId: component.context.account.id, postbox: component.context.account.postbox, audioSession: component.context.sharedContext.mediaManager.audioSession, manager: component.context.sharedContext.mediaManager.universalVideoManager, diff --git a/submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettings.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettings.imageset/Contents.json new file mode 100644 index 0000000000..cc44172ecd --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettings.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "videosettings_30.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettings.imageset/videosettings_30.pdf b/submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettings.imageset/videosettings_30.pdf new file mode 100644 index 0000000000000000000000000000000000000000..028e8b9ab0560c94a99440d10e540916ef2f1022 GIT binary patch literal 5753 zcmZXYWmr^Q+lEC-dT1nMNa+}wht8oHx?zAB8gv*slu|$>hi;_1y9H@bN& zgC~66_xT?iCmG zpPX08!5Ild0C|<1p-7kl%*M?YcDv)Yc-3L9c1U}m!0qQcx9Ii+cO(J^b;0#Ud%Z9c zEht8e`8hyZu__KcIiBv02QCj%whxj?la&KYNUz}7>Hp%R@jdyNam+$XyrnQF)b-=W z^5@M(RoC?nQP<7&Qnqy9$&IDuugmXS%BHiIS7w&BF zqo=>xlfK-0JFzm?8g(_-bl$@4vJ`XjqWdO*=oyn`qwkBZ8hWYgL&HN%&_vhIz)zbG ze+q9%UcdGaXn)yNbMewL@XX@#-kKZzvzzY&1N6Tv2Og<8c`e$EHXd9A{-Ss7NDi#H z3Jg4lPqsdKZ5iZc>E{=+x1~kzVt#(oIdJiw(zj`?ec3*Jd#2Jaiqh9LySmNI^bdbDdRNqMQ3pZE4MzE*Hyw{_~kz;+IGB z?`kcQ#DiU(#VeyalA!xL3Rk^R4M%4U`)d-joO3?WjkVh*26M}GQ#Db0t$ifE&g#o` ztf!u%5nn&cX*tX^g;ab*oL5wxOZ0Di;>`+M8la%hX6$qHvlvO7=l&yeWC*Ivdn$#{ znG@wQ=8Tl-D)iX*ul97efm>RyfAbvO|DaS6xO}sE=zm-xaio$HZLxOlcNz#kH!`tP z`eBpMHk4X}^cz@K)$7{ikYv5@pIv+8ry~q1| z>wds-f&FYtZgjf4JWZ435iDgRc9VXj(;VTMx5)AA0K?_t1!maS3Ngp`%Q10vE3=Nq zcrTs@cnQzViEu`oHBW4!&a2Fvh8hA|;@_RO&vOxH22ie?F#pl>{h-lHZ_R!!HGsgs zPQ-k7k()H+%#UW%d8A>PkaE8nGQY4`dr_l7C~L^hves1K_K})1<7i=AA$&WAfvcxl zVj+%J`m28Q$NkwwVX+rn2Z9K7!s$ULI!VaGMB)gkz^p4@Y5uN!m0J8^dBYmT^Ff12 zFdgj`;8#TaVRh*Ee*4&}?aFwP)t;}L@h=+Fu_XIMdg(kPegmmA&dBbL!FVB>U}qOq z#@eh|fu?elO7@THuc7f`QY#-7SN%mN$|Q3VNO?cA{*_GP<|PheF;OHCF(wO zE36yG<|acvCLkL&kPgARGf90;8&U2%1{@Y@A)%T=LAFi%w+ym6a&l{vzRLoE9~mz9 z9tY(G%%U>uRX-N>DFk$+Vx4t&M%KpxO2;C!xSIj^7WuD9-V6C$?m=O0S zphMtGZ=cKuoTyKUIhrB00$U_xEK!%jmE|SR6Y+$5)Cg24!1eM{&WP4|5nZw5c<5DQ zMtiq5fUY$Z)oGWJ@_~6c+y*EmW1_1jN2B}g>!zylaLaL8SyaLp1Kn1pskd2*uL<7m6|wK`$Zwix23k;Z;=Aw%0XJ}2n#C? zIvRNYanDmmBm2W@d#%UKb|`s%GeVl8SegN;Jxo2mG2?g5}U~i7`kEvpH^kwegKzNtyOi5v$jV}&C@KFShn%zjQ zbhBm&#o*97doja|dF^oVd&#v7VW;O=7TC6iJeVb%4v{i0YWV7}AvC#!+c6U|#aU#j z0KyW zNhr2*8rvHXW`yjpAj10fo9`WQrx8qT*yS8S^*T;Tdp_jrx>NTKAuV zEAi#CcdHW|ly1n>Q{_phRCSK5wgNeOdiavxk%XcrFrsfu?s8sc`NKne-q{Asg6pjM zf&puJv$-%2G08#v)ooc*XR9_w_#-8=q!=BA?BdKTT#Cq;n|g+w_~Hs`RHD6ky%7mW z>1;wGevY=>c)GL&@52~43owW;eDtZ`5?y^)0zE1lr_19jF^o;p z$&6QOA!sV6hr1cgzCMH37lrU+D>U}8V!(wJQ9E7%}{V-9mEh^+iU#Xre; zu!H{WIe;6HP~9PeHvORl4t({L66zzyg^gD-LAo3H zNg}bYWRiPQIXrgMImGE7lhXDUM~(E$m15x%yvOe&uzgb)*`In&WzVRg?kvw`-cTsJ zD|U#%uRV_wPAE#tr;vz}Nz5r)7;Ea@2X8}qT%N|yUe^9d_73K@+R6qb&Fq!(?Zi@NP}Ks0j!(zNjQvGS4Bn<9jsl- zk3N0FqPqlQOqtf3B~z%}aa>1j7Y?GkR5em7rz#T?OhQy8ec8q`0Qm{h9Bx6a;Voen z`sssS)?xLp552?Sy3x<)I)`a;_Bv|S73Fq(Ot!$0Pa5B-(6|(D1(#pjw5ZB!2-aE1 zc)SkluAguDiVoq>B*--vGK_rFtLQ7E!xM&W3svQ_lxBQCzh2TTGr??TPs@93kYoOH zQRIt2!y`=;fyjPZWG%5!PHQqxoL8cNa+w`%0+V8K&!r3xf6L>dDWE&ERE{PSrE)Et zN9p`Wvj)80ncOQ_hIfe3|N|(4MizbSZwsFWP!vlAwp^SoXGrAX@ zBRpfp#SxXKF|@r!$+++1Zyj0H(eZ$iMYq#O7{J1!?H3vI2>3n>XXBniVkop=j0)H> zrO*I{Em&KQe^x%$%qsM_+6+^j+~H!G3YEy8mrj$dZ8sp_1~JM+;x6TqVa9cwg0p_yos4$eR3!+=vuz^zRwCf zua-6zB^xiiB!RO$HK+Nh@_3w}-d8s%aDkFe&=>F6KocojNhB&9&1V{OSQITeKWOsO zDpp6ikyIc+BqFYC2$m~1jbv*J%M@wZr}bK80D0!p4f&X-pOGnBGhRhe(>N+0hUx|mouXEg|s-{pY?*x=DU4QU&d*a z!3)h2+J>0|rAk=EY!cOiA@1B%$7Gu>=f-7T$7u3Jr=b83kbt2a2Q>g0_Xcp52)C=S z)<4z5oSb_YCYf#!HBBPK*l$>`wcs!(h_#RZ1(KQu(A{;Epou(U!*<`wLq5==2nR6- zkBsO_PNSRSm2AeAE83<|laC)2!F}?%%`G}q+4}*nww9G{%*D1Ogw!<2lbX8cfRz$x zYYm@bic}YfYi&Fjin|qK$2^#i*wxFvTZP9rgbNsFQk{&^9+Mq~h$`+gp32eK&Ze%j zWA8GCIB>@r84wPP?S}6>Bw}P4i=zKDwXvR@&6drGs#Rb>uzwjgF4IZYln(ta^R_My z!b1w`eQA6_IKHu5e{AcWq`a3NmW5O87~iBUt5aGKBtfPaPQ4%YzTHc)@HtAK&Og}& zvC*L#DGT=~l5@DUzG@@bM#N7tcR_LUtARP`lD^X1m^~jhlq-TIGO=yGBIk`c2_hTv z^9*&~$*iw(-M@r9#s{TmqB`}8(;i!0Yg6^}(pZcoynZjxF;(_f4(3K5hs$C7It7EZ zur4I1V-P=W3lb4O%cm+Ez~F2BO76U!Snktv@@}A6*~W-Ti`~c@E68;`C!JsU%O?VT zlHH1XG(8#t)#u{+zc3kFY1n@>&d^}^*q;29V6eoROuq6c26{3#B2#h4FUU6Y;Q8Vt z@gw2xmk&RB1tGOO)W1F%6`LX4##nC# ziIG?I4@`G7Wv^d?6Ca#D(M;4c!O!vkFflDkq_`vKJ+oAYuelh(GbBj>ImIcy`WQaS zd=^Z(f~FFTmN-(GoVpJkCh`Q9aOvy(h-DWk9Kw53{_;|_K!WX3VOYl^bpVoJQ)B+B z+ozX`gQIrClXHsa;mcyS4CIFyZR5CT=bM1<{UmwIW0M(N)}0Y9%`u? zk}tnQp{&YR#SupQ)3o6B9vw(b7f$SIWk(#REh>CiRrdxf zgs7D#>#QvZX&c#=R|FwmH(e~EAQk-8N_AWzsQNXiPmXRKM@WCbXisI(o3{A0bPE$w zD+B8G>qoB<1rSCa2qc~tJiZ6AO3RDfO-8>w0+wRr-4~iCHPfL+O_ef7R~VJNYOs(Q zXX9Jd-xIUBeDPfHsiwh#0#<%i&r2D(oOOdM(!v?vq)j4V#GsZW5KXJ7AS?Dw>~tHG zyibcb&wOHu&(8EW1*YuTDE3P7B^L26@6xh`U$*r9!*6gjPD5hWiSqq6JCGsE4yuOg zXDOu=dQ)IJ(0E-Jq1dGz59GVs{!+-#=Y_@_Q48OV0jygH9VV>%napMFAIyPSImvYCS^hx;J;hhc%7_i zMVzXu#U9&+#tXyUJ?~yBx0pMGua+mRCAbFccoIasDyO!i4-zWPmFP7y+aqJ!;P!Yo zO&}sIJ(Gq6Q&QE^Y($hM{M>r-lH4TsT#&iLrV20M zgYUkhv$6{Oa;SPIkd3%WhRt#bN?fqBbs7t>HFdqSR+5Sf7#JyPml$={3IH}pJ>6vr z>(gXlFqa}0B^S${rfEslnu~REmQooQ;{B+oIqJ+0^qS}QAb5JLjs|1a$0_yPgwnCL zmPGVi8^tAz)EP@am)-2WXZzxeVF&9;#o2`Es97?bl*=6rK`JpjUB0|M>eTte<40Ys zdd=9~Sv>b%*o(*Y!rShhSd&A`=8yG#rD9BZ^28>*@N}xIdgFdXm1~A6wCJ=|W4%K6 zDYKZFB>R}AkpCEOCuyrGgNvIa@f4?!;$>Y=eJWQlvF)0p=X+F)+}i6}u&Ta8DDJb? zl*4G3PG&e^)Mr!nl*h{4J{34;>w=K}=I?m=hJq`y8y0%_g+b{>$g81_xYAB>4Jrtk zv{PVHzN`>6m#?^CK-x>IXG5Qg`aSghZ7c)9mGC*5Nt$xYWV?xLg2Gs~Lh>xTgu|bI z0GxDU?b3O?(NbMtu`fUzxCrs1dj%WWZ<^6mjy?szwxbJp?PN_@8qPsn=2CLdVAxY&HM*yLlL(ODD*e_ z59fJ@e!Ea!H3!?!IHxdvN{a+*~^6y3@0_xxlL*U;1Jr#x9aj?0&xxm&QRj;@F;Zc#Gul?4aeB(Jj@zg*AE`U+D`lDfX+^`u@m*Y%eETI z-Ci+RwG3s0IMYKYhV*0aW2khh5Hk4N*Yn4Co-g@len2cnQ+}6n^vL0|QRI$KI-Q z&z8$X(_TkY15|uSZh5wc+@TSjPgNUK@s(w3ORC3^XcoOg%kKLz=Dowh0!k6;55d32 z_=yq~s;sPn_CQ`CowwEq4(C_O8}2DI3MbXB7^02m-Jur?2NUB#Csuu1>xUIJg45!J zP7^3%g=X)x{6Ao(_HjLDd(*3~KRsltS+Hn72lK${>a{l7DleRtKOPs)I#&*sf5GS+ zTUcYuo1vOGirROHG)~;2pPF>dB46J(w{V$3kd~L3g@`5RU_!SRT4}5f#b|JE8dc!p$WA}FZ!8| zIXMAe1IsR%)RyJ31Bvr4#yJx-B*ha6-43@59^ljmH0#xbh4Gv>y}x@5Hv%}k5^O-l zt~AABIOWAZR}Sax_RX1*~{x{ej(01&{0@XT4H3Z#~1l zDZk)Bwz^hSP^^*&PIYP)qx=lYTq+?K?cIeX$B^XS){iVC7XOj2g8yg)U6}`e=wy8Q z%BNUQ-tR!Pf{=v=c0gyVBN;ec>w_qkyIPe-OPN+~&3bVPvn#1`x|bve>%ZV@%~_8e zej%xD3;mR4c@eNQVd>ReCosb|-8#dQ3Q&kBpakI$tFS1m`e?QAtVuOLx9YUYu6?8v zA4;0eB~02bVbUXZnVkIjLnoyk2m4t4&Nv(S3ROYS5TPEV_cRw!0p0rgO@+m+1V5pJ zopcIK62SvKaq3teDFYz#Ta(;jFQg^QbMCs#sfMT-s@LBQ2r9Vu0j?Xgp)JCfhSZ1& z7E=>QRC|;8w9p-S$|d{dVW7G%SUUlcK* z9iLgG{Dj6>tbp+$k;#nheIvmix><_nVF68g<<>0#8)ZYn_QeUR(N}yds=ei(M)EMg z>@n;<+~BGMmL%KldJMsEzI1nY4AE!s?mZ?Ozb3iu~g8s$uP+^%9oVwBq zP9Te<%)1Yz?Io(oon8!2$18#hh(a|Jv^Pz3h{Ovx8wP-{;yF6}&ZFdu3tME!l%$kW zz!?#D0>3D-zt+je2 zh+gK(oIZ-DHx$Bg42A`UbPjIrLnTe<2P8{!k2l6?zu!+ZX<%Vt1tz!>z2yGUGiz6SsbeQm^L<{>B8;1e5Pq@Y)CX zl(F<`E_cAI1MPEWb*d6|<>h{W%?|3--MTO}CU&ogK*E`&Aj{qwm)D4<;sin^tWXza zAh*&n**@uo9Dx&du<~4WS4v&Qrx603i%PjQ&r_=oF(<{p8a7?HDAI0^cIW-cZQT0~!g zVWQSh09scvNArSn7EK&$^YE3~Vq^1^ieVNVq2L@QrCtULT=AwZ>|B*q$qaLEgxG2)Wn?2|ls z2euyi2;e@BtR*NqlDBkz)smi*sbyt$WSncQUyx05!^@MN?an=Y!y?ROo@TsN_egc<=gs!hVe5F@kOSYmv*&Z-8z0v4| zuOCGMs7*hW>(FGvIH&{QLpEcy>tP{>O>cA6+*#dJs;g0wUJC|YY@E*Y772dceJWW4 zCkh%*JJk=zHu>-22RmAG%H?UwBH+?t_YQ_4>mEHG@6SE7;kJiZNVtNy>`Y~AiDsUT zgfE@aaEWRP&jHK|gp7x6?uPb0%)=ZMmoKV_t9mS~_EmnvHL}sY>1Vmc%iwd?-i?#` zNK4GuqhFsb3qirGtIytBVV;oRaaC}|z`@iypqtN&lUp74lY27asS!`}<*Bt+HY+Yh zTn*VEV39=^79)R}$T#&!y*<~`* zvY#QAW|E+|@M zDZJZ)P}!z@6-sMUTA6qE3BR224HDN4Xc8q#CeDo%F-^P+!+HWIDchI^h)@v|XAC}M zxfDj^#t}Z>`HtD)i7>kZ&7=!fV5w_0(V4_K*Ky|}E(k0Qh^Nmi$1l*zGt^Fu1{Fz> z9xA*Kra#zW9vPDWtnru?g+Rf zcKAZ%s(^uL^97?X$z~8Ko5D-la&d$T1@GfD4GxktKWoS7vI!OjjObWt9tVBG>PYG#gA<16z4Yds=BIA@cwG>B`Wu9On& z$*OPucMAI(rL5?P1}O#v_Zj>%AMX%#z1Hsxhph%s=ak_YHc1&1g`J!|O7k2zek&`s zSYuS2+&r3T(f=09o-dDR+`}+jyd%M>z4q-Xx8S(aSv+tXG|c^Gx@Kz&DeBAo-g1-6 zNX*yTPi|1@MY|%e{n_C~iSg|ILC6OOEk6gP*em$UYAx)8s25hosA}Tw_LM>aN^-Yn z2$GzeIn@w2J3s1^b;)|rWng2fBl_YDrKd(I1s0J2MoV>nBIB;t37~ei7JipH=3bLN)n)8< z1}-#DCJ=Zh2H zM_wq2$~UAuiEcNe4CP)&*r{EvY^F4vF0ya8HUs-fhL+VktOaDcn;SE^qSM8lGBG{x z=+S_j-}vZmM()-SZ1#j8sd3*M6_k?bf@xw+90vZTbhjNpF>i5I>^SAN)qhc#4s3#o4=bXKC zfFBNn_xD$sv4}h zh_6LkGq^@TMn+2W?mGPfbR|QI>K14r3$zu6I6^jQRGzMVRP54y?R6yprd$2vAtf%4 zlZD&8FsqJ;a}b)F)`vg>y%xha#vf`?Ohg~ z^#N5;*7W^|v&aN3qNC=!46y;@Pa=%`}v_NWc%{lYNQ0;g9I3 zl2J|Vp*;D*76mJ@Foy!nwYamU1ha*Q)OMR>Y~KUw`W#|*k(FR%?K$o7w#ssXtiXmB zlg@)CFKdp0cj11*8AIp@&T1q+vnMEDlEe;3LDV>dMVm+DrrtX4;2^f~`jGV*D0=^e zaG^K}!Td)HnO`Sb*ExZonZExz6`MY<9L_ntxdv6qOohtI$OZ^yB@Pc$yY0XHh!GXS z;1h2@eF<8tjH9T&KA-GHeA8D7l=(h!=fm*DFnsSEcGoUXl zb=s0@(!RfNG&g><|7(|KKWsA(DKr03F@`HQe=H8ISV`v9=HC-$$|9?mE)+GLJbU&= z4R=uQ7b-zL_s+wNvB&A zDkJj%NBYGlFwZK3j6EJr<1?rf zqu3$K!%G{Hr^uEw%eIb8sT%P?BHPmK%!0+RoT~X5wSc*p!@!?kDh_Yy(BF9HA5L@& ze*WS|!Xl!=zwwdqZ}|o*@yolqy1_jFCT4#_qh~LlLje5RR`&l;Bn^nIqtz2PZ-5Cv zfKTAh`0qjJxAfoBKb61`4>wPk4a5U*3$ktr)Gd@!aC3!2TyHwwQmlVOZ7bM~1`zpM z`XAPHEB$6*{A!N2H=_b>9e@G<^igp0yvY^(fAf3%#k~I6hUmX4-5T`x-SBp>zlZ50 literal 0 HcmV?d00001 diff --git a/submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQHD.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQHD.imageset/Contents.json new file mode 100644 index 0000000000..1f7bfc2bb2 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQHD.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "videosettingshd_30.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQHD.imageset/videosettingshd_30.pdf b/submodules/TelegramUI/Images.xcassets/Media Gallery/NavigationSettingsQHD.imageset/videosettingshd_30.pdf new file mode 100644 index 0000000000000000000000000000000000000000..735db423a85e5782578d6a4063aeb6b1b112f554 GIT binary patch literal 5183 zcmZXYXEbq#4tk$gV7>H^b&;VEkp~^iQc;?A&5>A zF?!Sp676H}oxR`p{q7&@c-FO^yIl9New{oBW!0O)w3R}Pvlg}$?Fq_NaN>ciJEKAu91_7K)Kqle_1{9diSk_Nh8wrOm6;7 zJPOK?d_!U7bhnQE;$$a={o?0QTl2+_`_6K*7n@5{br;QveIkB4gR5OxI|V0}GkzBb zdq1|=j!(kw%lUcF$z9l-`5yo3PEyXsEP4LM;S-acJ(Jz2sLp%C}{K5R!JwVJ>3w(A`SEljRV}FOz{O5NbI-}*B z%HW4@v*u#!YMnm^b!}sU{7GY(=gT*PjpjD~TBgnC`^HDs|K05D)xbqA>q0!Uybzba)okw~I<^ zp2!bkDZ1MeR{)7_%Jdr9>c z<+H2RFwC*NbW$+HtqFUW&t^s&H9-6X4^5IAF?O%wOB{uX&;{~pQVzv78=MKcMIZY< zZT4H;$Bp~#C-wK)2t5(T`g@p4qx}c5&|bdj6?G6fKBh135mDKoDQ=vTJNYGt!XWRd zKw-veWo(_?ELekL<66%r2w48c2+SbaX$F$K)9fR@8CcT)N(SuK5l_Rch75c6!`Jo0VE0}@$dz4KP6;G7c(OyQYg)a`_Tbhmk(~K?kJfECE1gWi zvut)Z?KGsjHYUxdpZ|1nvhwsv02W6E!yS_}VmVN9Q=yN-mc`8^vMW8QSbvf=W9d~b zB*biKgu=DM-8X1hh8^}zpVi#tNcP?X7scx4XqVj2OopbE^K5FfP*t_v^~oO)^8)nq zGj>#QvU1K&-oT2<(kUZ~jAB{&nBu*yy&eig`88$aH2!gys_2_k_bqO3`6VTQh*=fZ zIw3WMC9+a-2?X%Oq%yAZ5~9mvcZl!nXqUwicgdecXdFk(JgUf(+K6Yi<4SdN?FiV< z58Y+*juA<4315$ zq-~xK0SlAh8UA;^)TcGmdrqeBPeh} zK-Me_7$1x9SP=y)_*)JxIKf8(5;k~nu{Y;Q_LSbI_8FmdhRe}Zyo|I#wwcE|Q%8Xn z3XkiAovk}-Nat{Pf~G7o*V>;aty!sbP|h@RlVSG{D>CUPBL%0addWP)=igD=J`T0w zQ;j+NMYc^c#xui)noE!km(}s3FdfWp?i!Bsg#xFlGw+lR{MgMl>)`}gu1*P~R*()I z0;)_{rtWJtQs(hCf|Om>X<8Z(fH1G!*NU7NbB=W1`(IB!y6{Nlj@7HncfS?!WqOcZ zuBE{4QD?z+sH*IKWa7mZI$;S2dz6>m_%*vIoio_~b@&Qx)x8FZhR)9dOPaApZYd^7 zFUh+?*rjVHJ{JY5rzIMc!VoVK6kJWO~<=Hj-H5#?<`dmwC3D| zN-?8ajzAA}wd^PY;hZq`>L#;u4lRmG2gh-5{#oZ>fr+T%=`|YT>JL;v)RdpT&}mUg zo~gbm^>_qv#$fORn(S|aDT+favl`bRGOBB&Y|)f?%zy~Nz1LcFg8HgAhxv^^5af{5 zC=n5Zl?h!HUo5+^-5Qil`GZCJ0Xu;wRE2Hv=*D(cmYbagG*XL#2ktr;+8tX&xKPKuUanRt463DI-=`pjC3lr}9f2`q__q@QDk ze?~_pb_Kve?%l3D@<(F3RwkoTrYN0LFN#DAS6T=bts*@$1X)rZvhS|OcU=fCZcC&g1EB9yTd7woPR%y58jOpLSVQ>+7rp_7(7X>)WHSYr zZWbbRaElBJe(gG2j5Ktdn#XHUG&LN*lE0V$8w?B}`%t+A=vhL9bg`Zi7izKQ(Z)67 zz^bPJhq*Ftf-#hJ`8#}LoLhjk(Ee_+=Mlhr90f7;Rx}Rxd-REXz~7w~qGFw@Vvq-* zOXkp^hjGagTC?_1AgWSKuHT+_nF?dRxwzbBjSj`SzXpoaQi{_cNJSjxjYa^AXzlp0 zJBRX*Ay5885BO6A>4JKbBW$24zOemu{@|=o3MUWyyr4+ZH~v{ontXe=uTGQ z-kTq>h!I3H=#%EjQrPPX`id7&XG}9&VZlXtUF~o1S|%y{as1ZA%krGY88gZcf3e0I z;8RD+e(BG>;m*yxzrlO3g6pyuKB;(GDFs?p)W%uxo5o^5JZcLu%`6A9L@4j|z_OaJ zi&^p%M5O9b_VV{Y{UwnfA%#_XG%|Th3nWhzkSts3u*yoHHqVYWqu>TvDrSa;)SE@@ zu@9wr{ze6|mhU_;|EK}Fg%B6LgZDT)lGdJ}rk;Qj!luHjeHTHk*+m!xwR#5^=sec^ z!7LIa?W<#GBZ4l|8Id!7c41?mP|(iXpq7%*%_PXdtMD4@>AJXrT|}Q1Rvk}o8Jjm_dZH=5 z^sf8n8;`Hvw&>HGj635#CUJMa`@b+EI(whEOf&3Q5|d2bf(*c9F|IQ#sv@mSx`vTV z%ynS=_$b9m#{}i92J!9DHI!VVC|`4f^obWSpB_^n{G55{wbW4j{w5KpUUvIB%x8Qz zn^{l92!YcuYT58-rd_>|^*GFOUXDIoOe;-K-wD!u!qgKg6-PlM#s8wh(EA|~>6QJbjLwy#xHFJt&M9tPb2`5#5!6T`ANG_N*yC@1WvYz`Y z1V&{E^-ko^fQ@!gdFJf|?xmUKSf?$ZNM$Ny+6v#kZIr=CPw-iL$JRuJfPcb+r8hlG z$Zs51_VVv8j&m4v?;{Z}{fk$k+#@v&q2%~4Z)vaTYO9ks>3x*KAnNUQ9oS2Qy6YIth_2#OU@%^|afpSt#USMIcA4W2 z(F{ic@TJPL=O?;+XQVU>Q868s4r}%D1Lt3y?)sHE2_BIj&MycQak1Q$FtD-)?r7eu zq#hbr@AY_96h45ity-T>hp(N8uf7$#p+H-QgMXwIFrP16amac88cU zFAlNrJTTpi%K`SyIIU1$d}b7G%kWdx?w0(^3tos^J-CoT-m?FJj@ob?G>j_7TM=1p)RqjPFzc8h&-aR?Nm9aHCMsdP*H;~n@O`jd zvjvnHpNe@1dds0?@$f=sOZ;}`H#}pgA+`tGH~9u$R`M8zNjr7ZZm1w6Y^y2t?AT}h zh3Rr-nC%GZ`a|D$Ca%`w{Ac*+qoa1kOI|Nc#fQ6X%5d8oiRxQ0YEYyq)Qs6G{P@FB z0#TZ7Md6Uuz)TkU%+64Y#BMyG!K}CI`&==vTN#e#L9xO@*Ltq#7?tuM)li*9H#J!n zk0=t*5l4KcwQe|Yfs)nkI=x`Jv#y4pdk7Rr++!)l#Mm6w;iygkM zx2FevSTGx#>-gqP$FT}3J^H(Er|U9NOKVuye9hcEU(zQtiO-R?OE+>yQpm~q;CE_+ zhlej{bsVfkR=4PPSL?FQ4=zrb0Ub**S-GVbKTe)MRrY7-6H4Fh+c_&Yw_yr!} zQ@G4i-<9Cx?HkE-{zjMXtaSeC&Vj+kYS-BYn;+LVubr<%sg$&KKj(*(PVdz|8GNEx zXSI*nHBM<}Za(^9@wMTk;Fg}F1zprld!n=}8aIX; zJLuhB_&c-i*2^QJMc9_Pt^B?cuw0QGX z#)`LTH~(5l)`!Juc$EAna%tAFux&kiVAIYd1$xBD`GG~1_79Apm-rMf=G&*jUqk9c z2Z8k)Rzn<{otPUvE=iUKf!MocPX?9F857&c&bt-I&WT8$qM9mJad#2qdkvNnu0bHa z+Ng0@;lL+&w#k5zs-bV7lo~m!!YslPyf(Jnut6UE)Kz@2=i-mG-S+t_*7G;4`AvPU zFwS2zNDL+{_74ma`$xWnL_!J}jI+BN$lUU81Z3##f(8j8kaoYBkPaGUk5qJi1TqIf zZ$bZze-EPnNdKOGSJFqjIeTDj&~BhBEOo_`t`L)wGsYc_x$Jnwq<)JCB=%AR!Ty&1 zgIrxne|fkXR?8lBIV$MNfj;PWA0=mx%Ut3AH^19o1nc)U#Q&?(l|i?E8eR?dmvJSe zJJQkF?!QwXVbQigC`bbMm;I|JNE`+eg@G>Rze5-b`tt5dr%hDHzu7$gUzOO)>JRvKv#q*0MZL8KAsW)M(H zVm^HGecx~WIM+V+b=KZ%t@GoY>*ml=QR5d9kN^UOz!0#Dl>-nAmXQI=$pMilo8PxU z;s4}@bYT+S=XFn#p3DFkL7}bvQ6{(K+Mn8>DtFj z-}0z zujgu?ICHq5FUH%SFwL2H%T9HgzbLVAPJQjQA6WWGP-OI-;Yk;x^Fnv+^b|zRre6|}cTq1SWryAz2cUP`<~&OUc2RMFjAoy!;c0v7=9WN-9GML3hd`$* zvx_s-bfut@wd?L|{Q6Filhxzc)uvdq11|rjXYAISPQS&|>bFI`Mr&B3Po=o;n4LEE zI?^N6AImQd`ir-9o~*VE5PIana7?A4ljXzuM-3VeYr;IS`|u*3Gv2Xd)~CWUd)@Wm z;jX{puv=P>(RH0#g`2tc!?UcD)y#I;(aIkdY(?fm6$`>73xNa}cWDu^u6U5heL6r*K$EoKY74C9+}jzaf{ z6jtD&AStgx1*rueBS1zJ`uxv}2ax}8uz zqN6tY!D3Vk7;)1tKXds&RMA7nJjQ)T#g*X_PhyATrPiEZAjtGk)~ZT12@daw%hqtR zFpNH#_H3@?$U|(3`_06kJ6A*6W#iAfpHi8|b< z)yyPBZ7BpJVUZqm6nu|8&-r?Pr6&$|oKYBP+|}irav6ovj7+C^avFy2EtYk{k=fEg zHmR|eCP~Dk&bzo$_n2<8a!v9K#FNJj%G9?XQ>t~FKhQOQUP(~4C-p&q-B)2%86a%h z;AWMYmB|oOFS!Z^csXa$ZgS!xYZICAE$+i=lJR>KR^v3!qGo;Siluf^=#B1Xxj%Uq zbW{?4@TxgFz5-!Zb)Ead zDVr!wpPr03D{dx$Yx2aPLj$r6ALK@)YSt6!O+v2OO-zybkzK7V=!Kr3{B?(N8YHg@ zzG7@Fx zM|~HXkiJAI!1vyX!bXnp+GOvnxq9OYp83i+ygjwMiV%AEp><+o6&iV6RVS#zm>f2u zG?LXEC&7lPnSRoionyBt6Ushe^t>GL~8 zS-zR32hmCKxzRKTzQ>MkvFiw9m^-(oD!6YA#xV8s39n7ccW6KXT@CwVeL8BwNFT#+ zZEXIZp4?ljU->g4I>|{FWb>s#J)g`05kAE)+g6tKB~%Hp%t=4;+H|_PxP^5a^`(I; zHz4(V($e9#<%ygsyA5!YM0v|h9eMObaV#Dwo)oQCulY`=RWL*7Q$=@UddO%L+Z@az zO(pw(Do+l|%&?5!<@}~cD9CEe#0i1D<&#wV4*aa70S`;TFJTY$r+7CI%+ec`H*L^B zO!<%@9axbftBnV}XQ}iNcJh`_m zoVs-5zDb!Jt74V&`r60@p~$V!PnXsa@$pqvpH)wd*&9ga5>b%2f{;6oQcI~ni==zm z( zTJ>kG#L01m#5OquoMT-Me!^)K$-0H82{60W4`g$=$n#&GdARNM{J#in%uDb-;C{-& z^`z>!syJq+E2rb-@9WJQvIgVta^Y${6nZJZx~czygx__bAKoiRVsiYAp)@)qlx{dU z)J9WW%j*kqajSv`2PkukTZgb?Jxli}x86=Kg+piXp{|~-4fU@Ig*K)=izA*2hg?8? zUvio+1IS9&N`Y)52t3Ad#i7t(&iMwa;4mtFGC0Mvd>HURgE%YlL7#eLHW04yS%zyZ zi6gNo9yqDO6#p5G|8*k2u#QCcy?O+9$`);;stHFTn?6CYLVr9xMM4QMLi&ul9Lhfq z)ckft!4`;h>}*5^uotes`DkjdxrbN@9vx(Oo#Od^vEecCRWghW^!av2OR)`@##%p8 z$PLHJ8BQ#Y$tKMQ)W=)nbD6;S2$)eVyAkyK%j+)n!pc(j^l)>c@#%r(xD|BX(~Sx~ zv|rX<4l!wbk4PS5^E2${V}&6F@2l~#5dpSnp z-cRti>Af#Bd#U5M0cPHIKuW>(Ti-qcYCd0q>)MOzVv?w?Uej!$WEkGe7~WFP=gRQH zV#Maldtz_ZLJy@4p8#+taD0pk8{N&o?n7aF*! zjwJ1Vo-j3QO*SB!?_LL&oVE&~b;LzWz&22 zdm>OWgZhd|cMXnfuu=NCm>;4u&$`ki<|6!?QK&8u&#^rP{f34qmzxeNqT*dF>2~U)`lU#Owfu zcT5>$b0^O&5_#Prt;N)QBj`t&(A90L`?Gvmg@OhIujo6~?+tB9hGacec_VT1SptJM znQA_0Fq0a&^!1qYb$*Eug&-m&n>R94+EwrV<^+Fe4t(W+ryGE{RcOFs#B|yao!`zo z@2}uBP@(iVsc^OHizWt7RbtiJHG}S+Kw^VM2YP6YCcH{w5C+z3dwWi4Mx?}%u#LdM`vn%$ou4Jh=G+X5Jr@D^Fo zu9s~<+4^5vJ}9qZhOzNXG|DwT@|wmVm`Y2BIjGyL1U!2tw$PI=#rQ7<6*yOiHaLa!zs;uKn;8J3j^U_vt51dnNjJ-&@Q_9Y| zt`gIpTGD1)zM6^&z6WH@jrCwK4izN2-JUj_l4-pubKXJ`lUaS$dm2Tj-KB(dc7KgI z8%?=+39?egvjO=N=)HGNZ6+6LPp`E+5HwSfvcmIXxA~1X-V-%N6W<4an%&1gSqpCx zWR~nSf)d19AZR%KbtJ;^o=d=?#hM=lL{HPn*FVjWy;9B-UjZJomeY&OMU*vyoE74F zw?6fm1gSP48eP{7G8iBiYlTAuEHXLX{qb{}CBdy7LFRJl8L1hV_G}KA>)gg;+q)HwzAfTkekxpLhH`%61uF%X zQxBS0)Mw#(A%`&o;`E#6p{ql8{GNd#S{>iC78v_EE?nAW#}&{#br2WL{i!xfc263k zEm{#ZnEOWlZFk_+dcn0kCS2j*tY`G|6z1Y8=$XoMn%#{1XFcC9jD}kcH2N>`3h)`0 zS~zN$RA5u1`OJHyIWEfAKZ~}sE99B;jYbdf5#5ieK?4EhlEa&}Oe?4LX!U)1Kb(}) z=LzrEWS1IiPszS7bbI<*dz$EUgCLLe(o#u0r}#5lGJLw}UfEXgnT4DwFSIdF8cz|p zEUwj3#{B*GUCGBVSqhU#qsNjNk?kssW!4=n9CGa6cGXP0x7bti_` z73WzAWd{b!;Tbf)6owfh-{zUxSs47Vq-VK1w27Ma9S8%xY@6QOz}6i(A~6AlgJ&kV zo+in{&&4BT9Y1a#fLjlYF5V}D(!*$9)+ChOy1i*hrgEE57<9V9EjT$(&^z|}3!5J@ z*^C{zVIRY zzs51D#xkj@8%6$7^!!15T%x3Yp5?b82eld@%nsYj`dpgAU~2nZHnK>|E0Y2eO-}YK^?^7za7|pq>Vj7$;Ai!5DXE3{1N{yME*hluKwN$j&yhN zbhAdfgMVwd-~Q|-1eIM-9!S*9iN77(-%tnPcEiA8f1&>{b-&R+22T)bZ*x;B_;&|z z@ZaYsyLjH@3jM$O-TyRpe@{dFzjpfFp!+`u{x0m##+4Bs2q%}v|DEdNhO`Ajz!Jbe z+y8w9i;Ibgh=FhLUm*km|FHt2{y`!)b^eZjkf@}@P1pZ}Zhk8N8;L>0CI1%*iT<1D z;fAnxLb?HezdcQrn{jZtyEr4c!Cdw>+ip{lL+e}BulhyVZp literal 0 HcmV?d00001 diff --git a/submodules/TelegramUI/Sources/AccountContext.swift b/submodules/TelegramUI/Sources/AccountContext.swift index 31c602eaee..2b14a05883 100644 --- a/submodules/TelegramUI/Sources/AccountContext.swift +++ b/submodules/TelegramUI/Sources/AccountContext.swift @@ -578,8 +578,8 @@ public final class AccountContextImpl: AccountContext { } } - public func scheduleGroupCall(peerId: PeerId) { - let _ = self.sharedContext.callManager?.scheduleGroupCall(context: self, peerId: peerId, endCurrentIfAny: true) + public func scheduleGroupCall(peerId: PeerId, parentController: ViewController) { + let _ = self.sharedContext.callManager?.scheduleGroupCall(context: self, peerId: peerId, endCurrentIfAny: true, parentController: parentController) } public func joinGroupCall(peerId: PeerId, invite: String?, requestJoinAsPeerId: ((@escaping (PeerId?) -> Void) -> Void)?, activeCall: EngineGroupCallDescription) { diff --git a/submodules/TelegramUI/Sources/AppDelegate.swift b/submodules/TelegramUI/Sources/AppDelegate.swift index a5ce5c753c..488dc74dc3 100644 --- a/submodules/TelegramUI/Sources/AppDelegate.swift +++ b/submodules/TelegramUI/Sources/AppDelegate.swift @@ -1753,7 +1753,7 @@ private func extractAccountManagerState(records: AccountRecordsView Void)? - init(postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, close: @escaping () -> Void) { + init(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, close: @escaping () -> Void) { self.close = close self.content = content var togglePlayPauseImpl: (() -> Void)? let decoration = OverlayInstantVideoDecoration(tapped: { togglePlayPauseImpl?() }) - self.videoNode = UniversalVideoNode(postbox: postbox, audioSession: audioSession, manager: manager, decoration: decoration, content: content, priority: .secondaryOverlay, snapshotContentWhenGone: true) + self.videoNode = UniversalVideoNode(accountId: accountId, postbox: postbox, audioSession: audioSession, manager: manager, decoration: decoration, content: content, priority: .secondaryOverlay, snapshotContentWhenGone: true) self.decoration = decoration super.init() diff --git a/submodules/TelegramUI/Sources/SharedMediaPlayer.swift b/submodules/TelegramUI/Sources/SharedMediaPlayer.swift index a5961e2876..6c6229f201 100644 --- a/submodules/TelegramUI/Sources/SharedMediaPlayer.swift +++ b/submodules/TelegramUI/Sources/SharedMediaPlayer.swift @@ -236,7 +236,7 @@ final class SharedMediaPlayer { if let mediaManager = strongSelf.mediaManager, let item = item as? MessageMediaPlaylistItem { switch playbackData.source { case let .telegramFile(fileReference, _, _): - let videoNode = OverlayInstantVideoNode(postbox: strongSelf.account.postbox, audioSession: strongSelf.audioSession, manager: mediaManager.universalVideoManager, content: NativeVideoContent(id: .message(item.message.stableId, fileReference.media.fileId), userLocation: .peer(item.message.id.peerId), fileReference: fileReference, enableSound: false, baseRate: rateValue, isAudioVideoMessage: true, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), close: { [weak mediaManager] in + let videoNode = OverlayInstantVideoNode(accountId: strongSelf.account.id, postbox: strongSelf.account.postbox, audioSession: strongSelf.audioSession, manager: mediaManager.universalVideoManager, content: NativeVideoContent(id: .message(item.message.stableId, fileReference.media.fileId), userLocation: .peer(item.message.id.peerId), fileReference: fileReference, enableSound: false, baseRate: rateValue, isAudioVideoMessage: true, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), close: { [weak mediaManager] in mediaManager?.setPlaylist(nil, type: .voice, control: .playback(.pause)) }) strongSelf.playbackItem = .instantVideo(videoNode) diff --git a/submodules/TelegramUniversalVideoContent/BUILD b/submodules/TelegramUniversalVideoContent/BUILD index 9baefbc0b0..b705ae876b 100644 --- a/submodules/TelegramUniversalVideoContent/BUILD +++ b/submodules/TelegramUniversalVideoContent/BUILD @@ -24,6 +24,7 @@ swift_library( "//submodules/AppBundle:AppBundle", "//submodules/Utils/RangeSet:RangeSet", "//submodules/TelegramVoip", + "//submodules/ManagedFile", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift b/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift index f04db2abf9..3cd8c3b4e3 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift @@ -12,6 +12,7 @@ import AccountContext import PhotoResources import RangeSet import TelegramVoip +import ManagedFile public final class HLSVideoContent: UniversalVideoContent { public let id: AnyHashable @@ -40,8 +41,8 @@ public final class HLSVideoContent: UniversalVideoContent { self.fetchAutomatically = fetchAutomatically } - public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { - return HLSVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically) + public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { + return HLSVideoContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically) } public func isEqual(to other: UniversalVideoContent) -> Bool { @@ -60,7 +61,7 @@ public final class HLSVideoContent: UniversalVideoContent { private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNode { private final class HLSServerSource: SharedHLSServer.Source { - let id: UUID + let id: String let postbox: Postbox let userLocation: MediaResourceUserLocation let playlistFiles: [Int: FileMediaReference] @@ -68,8 +69,8 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod private var playlistFetchDisposables: [Int: Disposable] = [:] - init(id: UUID, postbox: Postbox, userLocation: MediaResourceUserLocation, playlistFiles: [Int: FileMediaReference], qualityFiles: [Int: FileMediaReference]) { - self.id = id + init(accountId: Int64, fileId: Int64, postbox: Postbox, userLocation: MediaResourceUserLocation, playlistFiles: [Int: FileMediaReference], qualityFiles: [Int: FileMediaReference]) { + self.id = "\(UInt64(bitPattern: accountId))_\(fileId)" self.postbox = postbox self.userLocation = userLocation self.playlistFiles = playlistFiles @@ -143,10 +144,11 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod return .never() } - func fileData(id: Int64, range: Range) -> Signal<(Data, Int)?, NoError> { - guard let file = self.qualityFiles.values.first(where: { $0.media.fileId.id == id }) else { + func fileData(id: Int64, range: Range) -> Signal<(TempBoxFile, Range, Int)?, NoError> { + guard let (quality, file) = self.qualityFiles.first(where: { $0.value.media.fileId.id == id }) else { return .single(nil) } + let _ = quality guard let size = file.media.size else { return .single(nil) } @@ -156,77 +158,80 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod let mappedRange: Range = Int64(range.lowerBound) ..< Int64(range.upperBound) - return Signal { subscriber in - if let fetchResource = postbox.mediaBox.fetchResource { - let location = MediaResourceStorageLocation(userLocation: userLocation, reference: file.resourceReference(file.media.resource)) - let params = MediaResourceFetchParameters( - tag: TelegramMediaResourceFetchTag(statsCategory: .video, userContentType: .video), - info: TelegramCloudMediaResourceFetchInfo(reference: file.resourceReference(file.media.resource), preferBackgroundReferenceRevalidation: true, continueInBackground: true), - location: location, - contentType: .video, - isRandomAccessAllowed: true - ) - - final class StoredState { - let range: Range - var data: Data - var ranges: RangeSet - - init(range: Range) { - self.range = range - self.data = Data(count: Int(range.upperBound - range.lowerBound)) - self.ranges = RangeSet(range) - } - } - let storedState = Atomic(value: StoredState(range: mappedRange)) - - return fetchResource(file.media.resource, .single([(mappedRange, .elevated)]), params).start(next: { result in - switch result { - case let .dataPart(resourceOffset, data, _, _): - if !data.isEmpty { - let partRange = resourceOffset ..< (resourceOffset + Int64(data.count)) - var isReady = false - storedState.with { storedState in - let overlapRange = partRange.clamped(to: storedState.range) - guard !overlapRange.isEmpty else { - return - } - let innerRange = (overlapRange.lowerBound - storedState.range.lowerBound) ..< (overlapRange.upperBound - storedState.range.lowerBound) - let dataStart = overlapRange.lowerBound - partRange.lowerBound - let dataEnd = overlapRange.upperBound - partRange.lowerBound - let innerData = data.subdata(in: Int(dataStart) ..< Int(dataEnd)) - storedState.data.replaceSubrange(Int(innerRange.lowerBound) ..< Int(innerRange.upperBound), with: innerData) - storedState.ranges.subtract(RangeSet(overlapRange)) - if storedState.ranges.isEmpty { - isReady = true - } - } - if isReady { - subscriber.putNext((storedState.with({ $0.data }), Int(size))) - subscriber.putCompletion() - } - } - default: - break - } - }) - } else { + let queue = postbox.mediaBox.dataQueue + return Signal<(TempBoxFile, Range, Int)?, NoError> { subscriber in + guard let fetchResource = postbox.mediaBox.fetchResource else { return EmptyDisposable } - /*let fetchDisposable = freeMediaFileResourceInteractiveFetched(postbox: postbox, userLocation: userLocation, fileReference: file, resource: file.media.resource, range: (mappedRange, .elevated)).startStandalone() + let location = MediaResourceStorageLocation(userLocation: userLocation, reference: file.resourceReference(file.media.resource)) + let params = MediaResourceFetchParameters( + tag: TelegramMediaResourceFetchTag(statsCategory: .video, userContentType: .video), + info: TelegramCloudMediaResourceFetchInfo(reference: file.resourceReference(file.media.resource), preferBackgroundReferenceRevalidation: true, continueInBackground: true), + location: location, + contentType: .video, + isRandomAccessAllowed: true + ) - let dataDisposable = postbox.mediaBox.resourceData(file.media.resource, size: size, in: mappedRange).startStandalone(next: { value, isComplete in - if isComplete { - subscriber.putNext((value, Int(size))) - subscriber.putCompletion() + let completeFile = TempBox.shared.tempFile(fileName: "data") + let partialFile = TempBox.shared.tempFile(fileName: "data") + let metaFile = TempBox.shared.tempFile(fileName: "data") + + guard let fileContext = MediaBoxFileContextV2Impl( + queue: queue, + manager: postbox.mediaBox.dataFileManager, + storageBox: nil, + resourceId: file.media.resource.id.stringRepresentation.data(using: .utf8)!, + path: completeFile.path, + partialPath: partialFile.path, + metaPath: metaFile.path + ) else { + return EmptyDisposable + } + + let fetchDisposable = fileContext.fetched( + range: mappedRange, + priority: .default, + fetch: { intervals in + return fetchResource(file.media.resource, intervals, params) + }, + error: { _ in + }, + completed: { } - }) + ) + + #if DEBUG + let startTime = CFAbsoluteTimeGetCurrent() + #endif + + let dataDisposable = fileContext.data( + range: mappedRange, + waitUntilAfterInitialFetch: true, + next: { result in + if result.complete { + #if DEBUG + let fetchTime = CFAbsoluteTimeGetCurrent() - startTime + print("Fetching \(quality)p part took \(fetchTime * 1000.0) ms") + #endif + subscriber.putNext((partialFile, Int(result.offset) ..< Int(result.offset + result.size), Int(size))) + subscriber.putCompletion() + } + } + ) + return ActionDisposable { - fetchDisposable.dispose() - dataDisposable.dispose() - }*/ + queue.async { + fetchDisposable.dispose() + dataDisposable.dispose() + fileContext.cancelFullRangeFetches() + + TempBox.shared.dispose(completeFile) + TempBox.shared.dispose(metaFile) + } + } } + |> runOn(queue) } } @@ -244,6 +249,7 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod private var initializedStatus = false private var statusValue = MediaPlayerStatus(generationTimestamp: 0.0, duration: 0.0, dimensions: CGSize(), timestamp: 0.0, baseRate: 1.0, seekId: 0, status: .paused, soundEnabled: true) + private var baseRate: Double = 1.0 private var isBuffering = false private var seekId: Int = 0 private let _status = ValuePromise() @@ -272,7 +278,7 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod private let imageNode: TransformImageNode private var playerItem: AVPlayerItem? - private let player: AVPlayer + private var player: AVPlayer? private let playerNode: ASDisplayNode private var loadProgressDisposable: Disposable? @@ -296,24 +302,38 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod private var preferredVideoQuality: UniversalVideoContentVideoQuality = .auto - init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool) { + init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool) { self.postbox = postbox self.fileReference = fileReference self.approximateDuration = fileReference.media.duration ?? 0.0 self.audioSessionManager = audioSessionManager self.userLocation = userLocation + self.baseRate = baseRate + + if var dimensions = fileReference.media.dimensions { + if let thumbnail = fileReference.media.previewRepresentations.first { + let dimensionsVertical = dimensions.width < dimensions.height + let thumbnailVertical = thumbnail.dimensions.width < thumbnail.dimensions.height + if dimensionsVertical != thumbnailVertical { + dimensions = PixelDimensions(width: dimensions.height, height: dimensions.width) + } + } + self.dimensions = dimensions.cgSize + } else { + self.dimensions = CGSize(width: 128.0, height: 128.0) + } self.imageNode = TransformImageNode() - var startTime = CFAbsoluteTimeGetCurrent() - - let player = AVPlayer(playerItem: nil) + var player: AVPlayer? + player = AVPlayer(playerItem: nil) self.player = player - if !enableSound { - player.volume = 0.0 + if #available(iOS 16.0, *) { + player?.defaultRate = Float(baseRate) + } + if !enableSound { + player?.volume = 0.0 } - - print("Player created in \((CFAbsoluteTimeGetCurrent() - startTime) * 1000.0) ms") self.playerNode = ASDisplayNode() self.playerNode.setLayerBlock({ @@ -363,10 +383,9 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod } } if !playlistFiles.isEmpty && playlistFiles.keys == qualityFiles.keys { - self.playerSource = HLSServerSource(id: UUID(), postbox: postbox, userLocation: userLocation, playlistFiles: playlistFiles, qualityFiles: qualityFiles) + self.playerSource = HLSServerSource(accountId: accountId.int64, fileId: fileReference.media.fileId.id, postbox: postbox, userLocation: userLocation, playlistFiles: playlistFiles, qualityFiles: qualityFiles) } - super.init() self.imageNode.setSignal(internalMediaGridMessageVideo(postbox: postbox, userLocation: self.userLocation, videoReference: fileReference) |> map { [weak self] getSize, getData in @@ -386,49 +405,39 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod self.addSubnode(self.imageNode) self.addSubnode(self.playerNode) - self.player.actionAtItemEnd = .pause + self.player?.actionAtItemEnd = .pause self.imageNode.imageUpdated = { [weak self] _ in self?._ready.set(.single(Void())) } - self.player.addObserver(self, forKeyPath: "rate", options: [], context: nil) + self.player?.addObserver(self, forKeyPath: "rate", options: [], context: nil) self._bufferingStatus.set(.single(nil)) - startTime = CFAbsoluteTimeGetCurrent() - if let playerSource = self.playerSource { - self.serverDisposable = SharedHLSServer.shared.registerPlayer(source: playerSource) - - let playerItem: AVPlayerItem - let assetUrl = "http://127.0.0.1:\(SharedHLSServer.shared.port)/\(playerSource.id)/master.m3u8" - #if DEBUG - print("HLSVideoContentNode: playing \(assetUrl)") - #endif - playerItem = AVPlayerItem(url: URL(string: assetUrl)!) - print("Player item created in \((CFAbsoluteTimeGetCurrent() - startTime) * 1000.0) ms") - - if #available(iOS 14.0, *) { - playerItem.startsOnFirstEligibleVariant = true - } - - startTime = CFAbsoluteTimeGetCurrent() - self.setPlayerItem(playerItem) - print("Set player item in \((CFAbsoluteTimeGetCurrent() - startTime) * 1000.0) ms") + self.serverDisposable = SharedHLSServer.shared.registerPlayer(source: playerSource, completion: { [weak self] in + Queue.mainQueue().async { + guard let self else { + return + } + + let playerItem: AVPlayerItem + let assetUrl = "http://127.0.0.1:\(SharedHLSServer.shared.port)/\(playerSource.id)/master.m3u8" + #if DEBUG + print("HLSVideoContentNode: playing \(assetUrl)") + #endif + playerItem = AVPlayerItem(url: URL(string: assetUrl)!) + + if #available(iOS 14.0, *) { + playerItem.startsOnFirstEligibleVariant = true + } + + self.setPlayerItem(playerItem) + } + }) } - self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: self.player.currentItem, queue: nil, using: { [weak self] notification in - self?.performActionAtEnd() - }) - - self.failureObserverId = NotificationCenter.default.addObserver(forName: AVPlayerItem.failedToPlayToEndTimeNotification, object: self.player.currentItem, queue: .main, using: { notification in - print("Player Error: \(notification.description)") - }) - self.errorObserverId = NotificationCenter.default.addObserver(forName: AVPlayerItem.newErrorLogEntryNotification, object: self.player.currentItem, queue: .main, using: { notification in - print("Player Error: \(notification.description)") - }) - self.didBecomeActiveObserver = NotificationCenter.default.addObserver(forName: UIApplication.willEnterForegroundNotification, object: nil, queue: nil, using: { [weak self] _ in guard let strongSelf = self, let layer = strongSelf.playerNode.layer as? AVPlayerLayer else { return @@ -441,16 +450,10 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod } layer.player = nil }) - if let currentItem = self.player.currentItem { - currentItem.addObserver(self, forKeyPath: "presentationSize", options: [], context: nil) - } } deinit { - self.player.removeObserver(self, forKeyPath: "rate") - if let currentItem = self.player.currentItem { - currentItem.removeObserver(self, forKeyPath: "presentationSize") - } + self.player?.removeObserver(self, forKeyPath: "rate") self.setPlayerItem(nil) @@ -459,15 +462,16 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod self.loadProgressDisposable?.dispose() self.statusDisposable?.dispose() - if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver { - NotificationCenter.default.removeObserver(didPlayToEndTimeObserver) - } if let didBecomeActiveObserver = self.didBecomeActiveObserver { NotificationCenter.default.removeObserver(didBecomeActiveObserver) } if let willResignActiveObserver = self.willResignActiveObserver { NotificationCenter.default.removeObserver(willResignActiveObserver) } + + if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver { + NotificationCenter.default.removeObserver(didPlayToEndTimeObserver) + } if let failureObserverId = self.failureObserverId { NotificationCenter.default.removeObserver(failureObserverId) } @@ -486,14 +490,53 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod playerItem.removeObserver(self, forKeyPath: "playbackLikelyToKeepUp") playerItem.removeObserver(self, forKeyPath: "playbackBufferFull") playerItem.removeObserver(self, forKeyPath: "status") - if let playerItemFailedToPlayToEndTimeObserver = self.playerItemFailedToPlayToEndTimeObserver { - NotificationCenter.default.removeObserver(playerItemFailedToPlayToEndTimeObserver) - self.playerItemFailedToPlayToEndTimeObserver = nil - } + playerItem.removeObserver(self, forKeyPath: "presentationSize") + } + + if let playerItemFailedToPlayToEndTimeObserver = self.playerItemFailedToPlayToEndTimeObserver { + self.playerItemFailedToPlayToEndTimeObserver = nil + NotificationCenter.default.removeObserver(playerItemFailedToPlayToEndTimeObserver) + } + + if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver { + self.didPlayToEndTimeObserver = nil + NotificationCenter.default.removeObserver(didPlayToEndTimeObserver) + } + if let failureObserverId = self.failureObserverId { + self.failureObserverId = nil + NotificationCenter.default.removeObserver(failureObserverId) + } + if let errorObserverId = self.errorObserverId { + self.errorObserverId = nil + NotificationCenter.default.removeObserver(errorObserverId) } self.playerItem = item + if let item { + self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: item, queue: nil, using: { [weak self] notification in + self?.performActionAtEnd() + }) + + self.failureObserverId = NotificationCenter.default.addObserver(forName: AVPlayerItem.failedToPlayToEndTimeNotification, object: item, queue: .main, using: { notification in +#if DEBUG + print("Player Error: \(notification.description)") +#endif + }) + self.errorObserverId = NotificationCenter.default.addObserver(forName: AVPlayerItem.newErrorLogEntryNotification, object: item, queue: .main, using: { [weak item] notification in + if let item { + let event = item.errorLog()?.events.last + if let event { + let _ = event +#if DEBUG + print("Player Error: \(event.errorComment ?? "")") +#endif + } + } + }) + item.addObserver(self, forKeyPath: "presentationSize", options: [], context: nil) + } + if let playerItem = self.playerItem { playerItem.addObserver(self, forKeyPath: "playbackBufferEmpty", options: .new, context: nil) playerItem.addObserver(self, forKeyPath: "playbackLikelyToKeepUp", options: .new, context: nil) @@ -507,23 +550,26 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod }) } - self.player.replaceCurrentItem(with: self.playerItem) + self.player?.replaceCurrentItem(with: self.playerItem) } private func updateStatus() { - let isPlaying = !self.player.rate.isZero + guard let player = self.player else { + return + } + let isPlaying = !player.rate.isZero let status: MediaPlayerPlaybackStatus if self.isBuffering { status = .buffering(initial: false, whilePlaying: isPlaying, progress: 0.0, display: true) } else { status = isPlaying ? .playing : .paused } - var timestamp = self.player.currentTime().seconds + var timestamp = player.currentTime().seconds if timestamp.isFinite && !timestamp.isNaN { } else { timestamp = 0.0 } - self.statusValue = MediaPlayerStatus(generationTimestamp: CACurrentMediaTime(), duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: timestamp, baseRate: Double(self.player.rate), seekId: self.seekId, status: status, soundEnabled: true) + self.statusValue = MediaPlayerStatus(generationTimestamp: CACurrentMediaTime(), duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: timestamp, baseRate: self.baseRate, seekId: self.seekId, status: status, soundEnabled: true) self._status.set(self.statusValue) if case .playing = status { @@ -543,9 +589,11 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) { if keyPath == "rate" { - let isPlaying = !self.player.rate.isZero - if isPlaying { - self.isBuffering = false + if let player = self.player { + let isPlaying = !player.rate.isZero + if isPlaying { + self.isBuffering = false + } } self.updateStatus() } else if keyPath == "playbackBufferEmpty" { @@ -555,7 +603,7 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod self.isBuffering = false self.updateStatus() } else if keyPath == "presentationSize" { - if let currentItem = self.player.currentItem { + if let currentItem = self.player?.currentItem { print("Presentation size: \(Int(currentItem.presentationSize.height))") } } @@ -573,42 +621,57 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod transition.updateFrame(node: self.imageNode, frame: CGRect(origin: CGPoint(), size: size)) - let makeImageLayout = self.imageNode.asyncLayout() - let applyImageLayout = makeImageLayout(TransformImageArguments(corners: ImageCorners(), imageSize: size, boundingSize: size, intrinsicInsets: UIEdgeInsets())) - applyImageLayout() + if let dimensions = self.dimensions { + let imageSize = CGSize(width: floor(dimensions.width / 2.0), height: floor(dimensions.height / 2.0)) + let makeLayout = self.imageNode.asyncLayout() + let applyLayout = makeLayout(TransformImageArguments(corners: ImageCorners(), imageSize: imageSize, boundingSize: imageSize, intrinsicInsets: UIEdgeInsets(), emptyColor: .clear)) + applyLayout() + } } func play() { assert(Queue.mainQueue().isCurrent()) if !self.initializedStatus { - self._status.set(MediaPlayerStatus(generationTimestamp: 0.0, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: 0.0, baseRate: 1.0, seekId: self.seekId, status: .buffering(initial: true, whilePlaying: true, progress: 0.0, display: true), soundEnabled: true)) + self._status.set(MediaPlayerStatus(generationTimestamp: 0.0, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: 0.0, baseRate: self.baseRate, seekId: self.seekId, status: .buffering(initial: true, whilePlaying: true, progress: 0.0, display: true), soundEnabled: true)) } if !self.hasAudioSession { - if self.player.volume != 0.0 { + if self.player?.volume != 0.0 { self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in - self?.hasAudioSession = true - self?.player.play() + guard let self else { + return + } + self.hasAudioSession = true + self.player?.play() }, deactivate: { [weak self] _ in - self?.hasAudioSession = false - self?.player.pause() + guard let self else { + return .complete() + } + self.hasAudioSession = false + self.player?.pause() + return .complete() })) } else { - self.player.play() + self.player?.play() } } else { - self.player.play() + self.player?.play() } } func pause() { assert(Queue.mainQueue().isCurrent()) - self.player.pause() + self.player?.pause() } func togglePlayPause() { assert(Queue.mainQueue().isCurrent()) - if self.player.rate.isZero { + + guard let player = self.player else { + return + } + + if player.rate.isZero { self.play() } else { self.pause() @@ -621,15 +684,15 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod if !self.hasAudioSession { self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in self?.hasAudioSession = true - self?.player.volume = 1.0 + self?.player?.volume = 1.0 }, deactivate: { [weak self] _ in self?.hasAudioSession = false - self?.player.pause() + self?.player?.pause() return .complete() })) } } else { - self.player.volume = 0.0 + self.player?.volume = 0.0 self.hasAudioSession = false self.audioSessionDisposable.set(nil) } @@ -638,16 +701,16 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod func seek(_ timestamp: Double) { assert(Queue.mainQueue().isCurrent()) self.seekId += 1 - self.player.seek(to: CMTime(seconds: timestamp, preferredTimescale: 30)) + self.player?.seek(to: CMTime(seconds: timestamp, preferredTimescale: 30)) } func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) { - self.player.volume = 1.0 + self.player?.volume = 1.0 self.play() } func setSoundMuted(soundMuted: Bool) { - self.player.volume = soundMuted ? 0.0 : 1.0 + self.player?.volume = soundMuted ? 0.0 : 1.0 } func continueWithOverridingAmbientMode(isAmbient: Bool) { @@ -657,7 +720,7 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod } func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) { - self.player.volume = 0.0 + self.player?.volume = 0.0 self.hasAudioSession = false self.audioSessionDisposable.set(nil) } @@ -666,13 +729,23 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod } func setBaseRate(_ baseRate: Double) { - self.player.rate = Float(baseRate) + guard let player = self.player else { + return + } + self.baseRate = baseRate + if #available(iOS 16.0, *) { + player.defaultRate = Float(baseRate) + } + if player.rate != 0.0 { + player.rate = Float(baseRate) + } + self.updateStatus() } func setVideoQuality(_ videoQuality: UniversalVideoContentVideoQuality) { self.preferredVideoQuality = videoQuality - guard let currentItem = self.player.currentItem else { + guard let currentItem = self.player?.currentItem else { return } guard let playerSource = self.playerSource else { @@ -694,7 +767,7 @@ private final class HLSVideoContentNode: ASDisplayNode, UniversalVideoContentNod } func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? { - guard let currentItem = self.player.currentItem else { + guard let currentItem = self.player?.currentItem else { return nil } guard let playerSource = self.playerSource else { diff --git a/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift b/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift index 6bc07d1c0c..e7fd264a92 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift @@ -137,7 +137,7 @@ public final class NativeVideoContent: UniversalVideoContent { self.hasSentFramesToDisplay = hasSentFramesToDisplay } - public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { + public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, baseVideoQuality: self.baseVideoQuality, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay) } diff --git a/submodules/TelegramUniversalVideoContent/Sources/OverlayUniversalVideoNode.swift b/submodules/TelegramUniversalVideoContent/Sources/OverlayUniversalVideoNode.swift index 1d8cc7c36a..d71b1eec99 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/OverlayUniversalVideoNode.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/OverlayUniversalVideoNode.swift @@ -41,7 +41,7 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInP private var statusDisposable: Disposable? private var status: MediaPlayerStatus? - public init(postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, shouldBeDismissed: Signal = .single(false), expand: @escaping () -> Void, close: @escaping () -> Void) { + public init(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, shouldBeDismissed: Signal = .single(false), expand: @escaping () -> Void, close: @escaping () -> Void) { self.content = content self.defaultExpand = expand @@ -62,7 +62,7 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInP }, controlsAreShowingUpdated: { value in controlsAreShowingUpdatedImpl?(value) }) - self.videoNode = UniversalVideoNode(postbox: postbox, audioSession: audioSession, manager: manager, decoration: decoration, content: content, priority: .overlay) + self.videoNode = UniversalVideoNode(accountId: accountId, postbox: postbox, audioSession: audioSession, manager: manager, decoration: decoration, content: content, priority: .overlay) self.decoration = decoration super.init() diff --git a/submodules/TelegramUniversalVideoContent/Sources/PlatformVideoContent.swift b/submodules/TelegramUniversalVideoContent/Sources/PlatformVideoContent.swift index f7a6529cb1..e673a81ed4 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/PlatformVideoContent.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/PlatformVideoContent.swift @@ -95,7 +95,7 @@ public final class PlatformVideoContent: UniversalVideoContent { self.fetchAutomatically = fetchAutomatically } - public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { + public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { return PlatformVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, content: self.content, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically) } diff --git a/submodules/TelegramUniversalVideoContent/Sources/SystemVideoContent.swift b/submodules/TelegramUniversalVideoContent/Sources/SystemVideoContent.swift index 66bca4d094..8f436da4c4 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/SystemVideoContent.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/SystemVideoContent.swift @@ -29,7 +29,7 @@ public final class SystemVideoContent: UniversalVideoContent { self.duration = duration } - public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { + public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { return SystemVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, url: self.url, imageReference: self.imageReference, intrinsicDimensions: self.dimensions, approximateDuration: self.duration) } } diff --git a/submodules/TelegramUniversalVideoContent/Sources/WebEmbedVideoContent.swift b/submodules/TelegramUniversalVideoContent/Sources/WebEmbedVideoContent.swift index 2cba6fdf64..5b204ce3d0 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/WebEmbedVideoContent.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/WebEmbedVideoContent.swift @@ -36,7 +36,7 @@ public final class WebEmbedVideoContent: UniversalVideoContent { self.openUrl = openUrl } - public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { + public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { return WebEmbedVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, webPage: self.webPage, webpageContent: self.webpageContent, forcedTimestamp: self.forcedTimestamp, openUrl: self.openUrl) } } diff --git a/submodules/TelegramVoip/BUILD b/submodules/TelegramVoip/BUILD index 62595d2fb6..ab815d0e7f 100644 --- a/submodules/TelegramVoip/BUILD +++ b/submodules/TelegramVoip/BUILD @@ -18,6 +18,7 @@ swift_library( "//submodules/TgVoip:TgVoip", "//submodules/TgVoipWebrtc:TgVoipWebrtc", "//submodules/FFMpegBinding", + "//submodules/ManagedFile", ], visibility = [ "//visibility:public", diff --git a/submodules/TelegramVoip/Sources/WrappedMediaStreamingContext.swift b/submodules/TelegramVoip/Sources/WrappedMediaStreamingContext.swift index 1acafab9a5..8610235ba8 100644 --- a/submodules/TelegramVoip/Sources/WrappedMediaStreamingContext.swift +++ b/submodules/TelegramVoip/Sources/WrappedMediaStreamingContext.swift @@ -5,7 +5,7 @@ import TelegramCore import Network import Postbox import FFMpegBinding - +import ManagedFile @available(iOS 12.0, macOS 14.0, *) public final class WrappedMediaStreamingContext { @@ -275,7 +275,7 @@ public final class ExternalMediaStreamingContext: SharedHLSServerSource { } } - func fileData(id: Int64, range: Range) -> Signal<(Data, Int)?, NoError> { + func fileData(id: Int64, range: Range) -> Signal<(TempBoxFile, Range, Int)?, NoError> { return .never() } } @@ -285,8 +285,8 @@ public final class ExternalMediaStreamingContext: SharedHLSServerSource { private let impl: QueueLocalObject private var hlsServerDisposable: Disposable? - public var id: UUID { - return self.internalId + public var id: String { + return self.internalId.uuidString } public init(id: CallSessionInternalId, rejoinNeeded: @escaping () -> Void) { @@ -296,7 +296,7 @@ public final class ExternalMediaStreamingContext: SharedHLSServerSource { return Impl(queue: queue, rejoinNeeded: rejoinNeeded) }) - self.hlsServerDisposable = SharedHLSServer.shared.registerPlayer(source: self) + self.hlsServerDisposable = SharedHLSServer.shared.registerPlayer(source: self, completion: {}) } deinit { @@ -331,7 +331,7 @@ public final class ExternalMediaStreamingContext: SharedHLSServerSource { } } - public func fileData(id: Int64, range: Range) -> Signal<(Data, Int)?, NoError> { + public func fileData(id: Int64, range: Range) -> Signal<(TempBoxFile, Range, Int)?, NoError> { return self.impl.signalWith { impl, subscriber in impl.fileData(id: id, range: range).start(next: subscriber.putNext) } @@ -339,12 +339,12 @@ public final class ExternalMediaStreamingContext: SharedHLSServerSource { } public protocol SharedHLSServerSource: AnyObject { - var id: UUID { get } + var id: String { get } func masterPlaylistData() -> Signal func playlistData(quality: Int) -> Signal func partData(index: Int, quality: Int) -> Signal - func fileData(id: Int64, range: Range) -> Signal<(Data, Int)?, NoError> + func fileData(id: Int64, range: Range) -> Signal<(TempBoxFile, Range, Int)?, NoError> } @available(iOS 12.0, macOS 14.0, *) @@ -387,14 +387,66 @@ public final class SharedHLSServer { private var listener: NWListener? private var sourceReferences = Bag() + private var referenceCheckTimer: SwiftSignalKit.Timer? + private var shutdownTimer: SwiftSignalKit.Timer? init(queue: Queue, port: UInt16) { self.queue = queue self.port = NWEndpoint.Port(rawValue: port)! - self.start() } - func start() { + deinit { + self.referenceCheckTimer?.invalidate() + self.shutdownTimer?.invalidate() + } + + private func updateNeedsListener() { + var isEmpty = true + for item in self.sourceReferences.copyItems() { + if let _ = item.source { + isEmpty = false + break + } + } + + if isEmpty { + if self.listener != nil { + if self.shutdownTimer == nil { + self.shutdownTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: false, completion: { [weak self] in + guard let self else { + return + } + self.shutdownTimer = nil + self.stopListener() + }, queue: self.queue) + self.shutdownTimer?.start() + } + } + if let referenceCheckTimer = self.referenceCheckTimer { + self.referenceCheckTimer = nil + referenceCheckTimer.invalidate() + } + } else { + if let shutdownTimer = self.shutdownTimer { + self.shutdownTimer = nil + shutdownTimer.invalidate() + } + if self.listener == nil { + self.startListener() + } + if self.referenceCheckTimer == nil { + self.referenceCheckTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in + guard let self else { + return + } + self.updateNeedsListener() + }, queue: self.queue) + self.referenceCheckTimer?.start() + } + } + } + + private func startListener() { let listener: NWListener do { listener = try NWListener(using: .tcp, on: self.port) @@ -411,8 +463,8 @@ public final class SharedHLSServer { self.handleConnection(connection: connection) } - listener.stateUpdateHandler = { [weak self] state in - guard let self else { + listener.stateUpdateHandler = { [weak self, weak listener] state in + guard let self, let listener else { return } switch state { @@ -420,9 +472,9 @@ public final class SharedHLSServer { Logger.shared.log("SharedHLSServer", "Server is ready on port \(self.port)") case let .failed(error): Logger.shared.log("SharedHLSServer", "Server failed with error: \(error)") - self.listener?.cancel() + listener.cancel() - self.listener?.start(queue: self.queue.queue) + listener.start(queue: self.queue.queue) default: break } @@ -431,9 +483,17 @@ public final class SharedHLSServer { listener.start(queue: self.queue.queue) } + private func stopListener() { + guard let listener = self.listener else { + return + } + self.listener = nil + listener.cancel() + } + private func handleConnection(connection: NWConnection) { connection.start(queue: self.queue.queue) - connection.receive(minimumIncompleteLength: 1, maximumLength: 1024, completion: { [weak self] data, _, isComplete, error in + connection.receive(minimumIncompleteLength: 1, maximumLength: 32 * 1024, completion: { [weak self] data, _, isComplete, error in guard let self else { return } @@ -488,10 +548,7 @@ public final class SharedHLSServer { self.sendErrorAndClose(connection: connection, error: .notFound) return } - guard let streamId = UUID(uuidString: String(requestPath[requestPath.startIndex ..< firstSlash.lowerBound])) else { - self.sendErrorAndClose(connection: connection) - return - } + let streamId = String(requestPath[requestPath.startIndex ..< firstSlash.lowerBound]) guard let source = self.sourceReferences.copyItems().first(where: { $0.source?.id == streamId })?.source else { self.sendErrorAndClose(connection: connection) return @@ -581,13 +638,14 @@ public final class SharedHLSServer { } let _ = (source.fileData(id: fileIdValue, range: requestRange.lowerBound ..< requestRange.upperBound + 1) |> deliverOn(self.queue) + //|> timeout(5.0, queue: self.queue, alternate: .single(nil)) |> take(1)).start(next: { [weak self] result in guard let self else { return } - if let (data, totalSize) = result { - self.sendResponseAndClose(connection: connection, data: data, range: requestRange, totalSize: totalSize) + if let (tempFile, tempFileRange, totalSize) = result { + self.sendResponseFileAndClose(connection: connection, file: tempFile, fileRange: tempFileRange, range: requestRange, totalSize: totalSize) } else { self.sendErrorAndClose(connection: connection, error: .internalServerError) } @@ -628,9 +686,62 @@ public final class SharedHLSServer { }) } - func registerPlayer(source: SharedHLSServerSource) -> Disposable { + private static func sendRemainingFileRange(queue: Queue, connection: NWConnection, tempFile: TempBoxFile, managedFile: ManagedFile, remainingRange: Range, fileSize: Int) -> Void { + let blockSize = 256 * 1024 + + let clippedLowerBound = min(remainingRange.lowerBound, fileSize) + var clippedUpperBound = min(remainingRange.upperBound, fileSize) + clippedUpperBound = min(clippedUpperBound, clippedLowerBound + blockSize) + + if clippedUpperBound == clippedLowerBound { + TempBox.shared.dispose(tempFile) + connection.cancel() + } else { + let _ = managedFile.seek(position: Int64(clippedLowerBound)) + let data = managedFile.readData(count: Int(clippedUpperBound - clippedLowerBound)) + let nextRange = clippedUpperBound ..< remainingRange.upperBound + + connection.send(content: data, completion: .contentProcessed { error in + queue.async { + if let error { + Logger.shared.log("SharedHLSServer", "Failed to send response: \(error)") + connection.cancel() + TempBox.shared.dispose(tempFile) + } else { + sendRemainingFileRange(queue: queue, connection: connection, tempFile: tempFile, managedFile: managedFile, remainingRange: nextRange, fileSize: fileSize) + } + } + }) + } + } + + private func sendResponseFileAndClose(connection: NWConnection, file: TempBoxFile, fileRange: Range, range: Range, totalSize: Int) { + let queue = self.queue + + guard let managedFile = ManagedFile(queue: nil, path: file.path, mode: .read), let fileSize = managedFile.getSize() else { + self.sendErrorAndClose(connection: connection, error: .internalServerError) + TempBox.shared.dispose(file) + return + } + + var responseHeaders = "HTTP/1.1 200 OK\r\n" + responseHeaders.append("Content-Length: \(fileRange.upperBound - fileRange.lowerBound)\r\n") + responseHeaders.append("Content-Range: bytes \(range.lowerBound)-\(range.upperBound)/\(totalSize)\r\n") + responseHeaders.append("Content-Type: application/octet-stream\r\n") + responseHeaders.append("Connection: close\r\n") + responseHeaders.append("Access-Control-Allow-Origin: *\r\n") + responseHeaders.append("\r\n") + + connection.send(content: responseHeaders.data(using: .utf8)!, completion: .contentProcessed({ _ in })) + + Impl.sendRemainingFileRange(queue: queue, connection: connection, tempFile: file, managedFile: managedFile, remainingRange: fileRange, fileSize: Int(fileSize)) + } + + func registerPlayer(source: SharedHLSServerSource, completion: @escaping () -> Void) -> Disposable { let queue = self.queue let index = self.sourceReferences.add(SourceReference(source: source)) + self.updateNeedsListener() + completion() return ActionDisposable { [weak self] in queue.async { @@ -638,6 +749,7 @@ public final class SharedHLSServer { return } self.sourceReferences.remove(index) + self.updateNeedsListener() } } } @@ -655,11 +767,11 @@ public final class SharedHLSServer { }) } - public func registerPlayer(source: SharedHLSServerSource) -> Disposable { + public func registerPlayer(source: SharedHLSServerSource, completion: @escaping () -> Void) -> Disposable { let disposable = MetaDisposable() self.impl.with { impl in - disposable.set(impl.registerPlayer(source: source)) + disposable.set(impl.registerPlayer(source: source, completion: completion)) } return disposable diff --git a/submodules/WebSearchUI/Sources/WebSearchVideoGalleryItem.swift b/submodules/WebSearchUI/Sources/WebSearchVideoGalleryItem.swift index 142323c37d..d3696d5d0d 100644 --- a/submodules/WebSearchUI/Sources/WebSearchVideoGalleryItem.swift +++ b/submodules/WebSearchUI/Sources/WebSearchVideoGalleryItem.swift @@ -165,7 +165,7 @@ final class WebSearchVideoGalleryItemNode: ZoomableContentGalleryItemNode { let mediaManager = item.context.sharedContext.mediaManager - let videoNode = UniversalVideoNode(postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .gallery) + let videoNode = UniversalVideoNode(accountId: item.context.account.id, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .gallery) let videoSize = CGSize(width: item.content.dimensions.width * 2.0, height: item.content.dimensions.height * 2.0) videoNode.updateLayout(size: videoSize, transition: .immediate) self.videoNode = videoNode