mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-10-09 03:20:48 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
75e9a0adba
4
.gitignore
vendored
4
.gitignore
vendored
@ -65,3 +65,7 @@ xcodeproj.bazelrc
|
||||
build-input/*
|
||||
**/*.pyc
|
||||
*.pyc
|
||||
submodules/OpusBinding/SharedHeaders/*
|
||||
submodules/FFMpegBinding/SharedHeaders/*
|
||||
submodules/OpenSSLEncryptionProvider/SharedHeaders/*
|
||||
|
||||
|
@ -202,6 +202,7 @@ public protocol UniversalVideoManager: AnyObject {
|
||||
func removePlaybackCompleted(id: AnyHashable, index: Int)
|
||||
func statusSignal(content: UniversalVideoContent) -> Signal<MediaPlayerStatus?, NoError>
|
||||
func bufferingStatusSignal(content: UniversalVideoContent) -> Signal<(RangeSet<Int64>, Int64)?, NoError>
|
||||
func isNativePictureInPictureActiveSignal(content: UniversalVideoContent) -> Signal<Bool, NoError>
|
||||
}
|
||||
|
||||
public enum AudioRecordingState: Equatable {
|
||||
|
@ -19,6 +19,7 @@ public protocol UniversalVideoContentNode: AnyObject {
|
||||
var ready: Signal<Void, NoError> { get }
|
||||
var status: Signal<MediaPlayerStatus, NoError> { get }
|
||||
var bufferingStatus: Signal<(RangeSet<Int64>, Int64)?, NoError> { get }
|
||||
var isNativePictureInPictureActive: Signal<Bool, NoError> { get }
|
||||
|
||||
func updateLayout(size: CGSize, actualSize: CGSize, transition: ContainedViewLayoutTransition)
|
||||
|
||||
@ -41,6 +42,8 @@ public protocol UniversalVideoContentNode: AnyObject {
|
||||
func fetchControl(_ control: UniversalVideoNodeFetchControl)
|
||||
func notifyPlaybackControlsHidden(_ hidden: Bool)
|
||||
func setCanPlaybackWithoutHierarchy(_ canPlaybackWithoutHierarchy: Bool)
|
||||
func enterNativePictureInPicture() -> Bool
|
||||
func exitNativePictureInPicture()
|
||||
}
|
||||
|
||||
public protocol UniversalVideoContent {
|
||||
@ -100,7 +103,7 @@ public final class UniversalVideoNode: ASDisplayNode {
|
||||
private let autoplay: Bool
|
||||
private let snapshotContentWhenGone: Bool
|
||||
|
||||
private var contentNode: (UniversalVideoContentNode & ASDisplayNode)?
|
||||
private(set) var contentNode: (UniversalVideoContentNode & ASDisplayNode)?
|
||||
private var contentNodeId: Int32?
|
||||
|
||||
private var playbackCompletedIndex: Int?
|
||||
@ -125,6 +128,11 @@ public final class UniversalVideoNode: ASDisplayNode {
|
||||
return self._bufferingStatus.get()
|
||||
}
|
||||
|
||||
private let _isNativePictureInPictureActive = Promise<Bool>()
|
||||
public var isNativePictureInPictureActive: Signal<Bool, NoError> {
|
||||
return self._isNativePictureInPictureActive.get()
|
||||
}
|
||||
|
||||
private let _ready = Promise<Void>()
|
||||
public var ready: Signal<Void, NoError> {
|
||||
return self._ready.get()
|
||||
@ -181,6 +189,7 @@ public final class UniversalVideoNode: ASDisplayNode {
|
||||
|
||||
self._status.set(self.manager.statusSignal(content: self.content))
|
||||
self._bufferingStatus.set(self.manager.bufferingStatusSignal(content: self.content))
|
||||
self._isNativePictureInPictureActive.set(self.manager.isNativePictureInPictureActiveSignal(content: self.content))
|
||||
|
||||
self.decoration.setStatus(self.status)
|
||||
|
||||
@ -418,4 +427,22 @@ public final class UniversalVideoNode: ASDisplayNode {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
public func enterNativePictureInPicture() -> Bool {
|
||||
var result = false
|
||||
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
|
||||
if let contentNode = contentNode {
|
||||
result = contentNode.enterNativePictureInPicture()
|
||||
}
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
public func exitNativePictureInPicture() {
|
||||
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
|
||||
if let contentNode = contentNode {
|
||||
contentNode.exitNativePictureInPicture()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ private class AvatarNodeParameters: NSObject {
|
||||
}
|
||||
}
|
||||
|
||||
private func calculateColors(context: AccountContext?, explicitColorIndex: Int?, peerId: EnginePeer.Id?, nameColor: PeerNameColor?, icon: AvatarNodeIcon, theme: PresentationTheme?) -> [UIColor] {
|
||||
public func calculateAvatarColors(context: AccountContext?, explicitColorIndex: Int?, peerId: EnginePeer.Id?, nameColor: PeerNameColor?, icon: AvatarNodeIcon, theme: PresentationTheme?) -> [UIColor] {
|
||||
let colorIndex: Int
|
||||
if let explicitColorIndex = explicitColorIndex {
|
||||
colorIndex = explicitColorIndex
|
||||
@ -183,7 +183,7 @@ private func ==(lhs: AvatarNodeState, rhs: AvatarNodeState) -> Bool {
|
||||
}
|
||||
}
|
||||
|
||||
private enum AvatarNodeIcon: Equatable {
|
||||
public enum AvatarNodeIcon: Equatable {
|
||||
case none
|
||||
case savedMessagesIcon
|
||||
case repliesIcon
|
||||
@ -577,7 +577,7 @@ public final class AvatarNode: ASDisplayNode {
|
||||
self.editOverlayNode?.isHidden = true
|
||||
}
|
||||
|
||||
parameters = AvatarNodeParameters(theme: theme, accountPeerId: accountPeerId, peerId: peer.id, colors: calculateColors(context: nil, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle)
|
||||
parameters = AvatarNodeParameters(theme: theme, accountPeerId: accountPeerId, peerId: peer.id, colors: calculateAvatarColors(context: nil, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle)
|
||||
} else {
|
||||
self.imageReady.set(.single(true))
|
||||
self.displaySuspended = false
|
||||
@ -586,7 +586,7 @@ public final class AvatarNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
self.editOverlayNode?.isHidden = true
|
||||
let colors = calculateColors(context: nil, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme)
|
||||
let colors = calculateAvatarColors(context: nil, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme)
|
||||
parameters = AvatarNodeParameters(theme: theme, accountPeerId: accountPeerId, peerId: peer?.id ?? EnginePeer.Id(0), colors: colors, letters: peer?.displayLetters ?? [], font: self.font, icon: icon, explicitColorIndex: nil, hasImage: false, clipStyle: clipStyle)
|
||||
|
||||
if let badgeView = self.badgeView {
|
||||
@ -754,7 +754,7 @@ public final class AvatarNode: ASDisplayNode {
|
||||
self.editOverlayNode?.isHidden = true
|
||||
}
|
||||
|
||||
parameters = AvatarNodeParameters(theme: theme, accountPeerId: account.peerId, peerId: peer.id, colors: calculateColors(context: genericContext, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle)
|
||||
parameters = AvatarNodeParameters(theme: theme, accountPeerId: account.peerId, peerId: peer.id, colors: calculateAvatarColors(context: genericContext, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle)
|
||||
} else {
|
||||
self.imageReady.set(.single(true))
|
||||
self.displaySuspended = false
|
||||
@ -763,7 +763,7 @@ public final class AvatarNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
self.editOverlayNode?.isHidden = true
|
||||
let colors = calculateColors(context: genericContext, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme)
|
||||
let colors = calculateAvatarColors(context: genericContext, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme)
|
||||
parameters = AvatarNodeParameters(theme: theme, accountPeerId: account.peerId, peerId: peer?.id ?? EnginePeer.Id(0), colors: colors, letters: peer?.displayLetters ?? [], font: self.font, icon: icon, explicitColorIndex: nil, hasImage: false, clipStyle: clipStyle)
|
||||
|
||||
if let badgeView = self.badgeView {
|
||||
@ -800,9 +800,9 @@ public final class AvatarNode: ASDisplayNode {
|
||||
|
||||
let parameters: AvatarNodeParameters
|
||||
if let icon = icon, case .phone = icon {
|
||||
parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .phoneIcon, theme: nil), letters: [], font: self.font, icon: .phoneIcon, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round)
|
||||
parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateAvatarColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .phoneIcon, theme: nil), letters: [], font: self.font, icon: .phoneIcon, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round)
|
||||
} else {
|
||||
parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .none, theme: nil), letters: letters, font: self.font, icon: .none, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round)
|
||||
parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateAvatarColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .none, theme: nil), letters: letters, font: self.font, icon: .none, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round)
|
||||
}
|
||||
|
||||
self.displaySuspended = true
|
||||
|
@ -8,11 +8,15 @@ public final class BundleIconComponent: Component {
|
||||
public let name: String
|
||||
public let tintColor: UIColor?
|
||||
public let maxSize: CGSize?
|
||||
public let shadowColor: UIColor?
|
||||
public let shadowBlur: CGFloat
|
||||
|
||||
public init(name: String, tintColor: UIColor?, maxSize: CGSize? = nil) {
|
||||
public init(name: String, tintColor: UIColor?, maxSize: CGSize? = nil, shadowColor: UIColor? = nil, shadowBlur: CGFloat = 0.0) {
|
||||
self.name = name
|
||||
self.tintColor = tintColor
|
||||
self.maxSize = maxSize
|
||||
self.shadowColor = shadowColor
|
||||
self.shadowBlur = shadowBlur
|
||||
}
|
||||
|
||||
public static func ==(lhs: BundleIconComponent, rhs: BundleIconComponent) -> Bool {
|
||||
@ -25,6 +29,12 @@ public final class BundleIconComponent: Component {
|
||||
if lhs.maxSize != rhs.maxSize {
|
||||
return false
|
||||
}
|
||||
if lhs.shadowColor != rhs.shadowColor {
|
||||
return false
|
||||
}
|
||||
if lhs.shadowBlur != rhs.shadowBlur {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@ -40,12 +50,24 @@ public final class BundleIconComponent: Component {
|
||||
}
|
||||
|
||||
func update(component: BundleIconComponent, availableSize: CGSize, transition: ComponentTransition) -> CGSize {
|
||||
if self.component?.name != component.name || self.component?.tintColor != component.tintColor {
|
||||
if self.component?.name != component.name || self.component?.tintColor != component.tintColor || self.component?.shadowColor != component.shadowColor || self.component?.shadowBlur != component.shadowBlur {
|
||||
var image: UIImage?
|
||||
if let tintColor = component.tintColor {
|
||||
self.image = generateTintedImage(image: UIImage(bundleImageName: component.name), color: tintColor, backgroundColor: nil)
|
||||
image = generateTintedImage(image: UIImage(bundleImageName: component.name), color: tintColor, backgroundColor: nil)
|
||||
} else {
|
||||
self.image = UIImage(bundleImageName: component.name)
|
||||
image = UIImage(bundleImageName: component.name)
|
||||
}
|
||||
if let imageValue = image, let shadowColor = component.shadowColor, component.shadowBlur != 0.0 {
|
||||
image = generateImage(CGSize(width: imageValue.size.width + component.shadowBlur * 2.0, height: imageValue.size.height + component.shadowBlur * 2.0), contextGenerator: { size, context in
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
context.setShadow(offset: CGSize(), blur: component.shadowBlur, color: shadowColor.cgColor)
|
||||
|
||||
if let cgImage = imageValue.cgImage {
|
||||
context.draw(cgImage, in: CGRect(origin: CGPoint(x: component.shadowBlur, y: component.shadowBlur), size: imageValue.size))
|
||||
}
|
||||
})
|
||||
}
|
||||
self.image = image
|
||||
}
|
||||
self.component = component
|
||||
|
||||
|
@ -27,7 +27,7 @@ let package = Package(
|
||||
publicHeadersPath: "Public",
|
||||
cSettings: [
|
||||
.headerSearchPath("Public"),
|
||||
.unsafeFlags(["-I../../../../core-xprojects/ffmpeg/build/ffmpeg/include"])
|
||||
.headerSearchPath("SharedHeaders/ffmpeg/include"),
|
||||
]),
|
||||
]
|
||||
)
|
||||
|
@ -604,6 +604,8 @@ private final class PictureInPictureContentImpl: NSObject, PictureInPictureConte
|
||||
private var hiddenMediaManagerIndex: Int?
|
||||
|
||||
private var messageRemovedDisposable: Disposable?
|
||||
|
||||
private var isNativePictureInPictureActiveDisposable: Disposable?
|
||||
|
||||
init(context: AccountContext, overlayController: OverlayMediaController, mediaManager: MediaManager, accountId: AccountRecordId, hiddenMedia: (MessageId, Media)?, videoNode: UniversalVideoNode, canSkip: Bool, willBegin: @escaping (PictureInPictureContentImpl) -> Void, didEnd: @escaping (PictureInPictureContentImpl) -> Void, expand: @escaping (@escaping () -> Void) -> Void) {
|
||||
self.overlayController = overlayController
|
||||
@ -617,30 +619,84 @@ private final class PictureInPictureContentImpl: NSObject, PictureInPictureConte
|
||||
|
||||
super.init()
|
||||
|
||||
let contentDelegate = PlaybackDelegate(node: self.node)
|
||||
self.contentDelegate = contentDelegate
|
||||
if let videoLayer = videoNode.getVideoLayer() {
|
||||
let contentDelegate = PlaybackDelegate(node: self.node)
|
||||
self.contentDelegate = contentDelegate
|
||||
|
||||
let pictureInPictureController = AVPictureInPictureController(contentSource: AVPictureInPictureController.ContentSource(sampleBufferDisplayLayer: videoLayer, playbackDelegate: contentDelegate))
|
||||
self.pictureInPictureController = pictureInPictureController
|
||||
contentDelegate.pictureInPictureController = pictureInPictureController
|
||||
|
||||
pictureInPictureController.canStartPictureInPictureAutomaticallyFromInline = false
|
||||
pictureInPictureController.requiresLinearPlayback = !canSkip
|
||||
pictureInPictureController.delegate = self
|
||||
self.pictureInPictureController = pictureInPictureController
|
||||
let timer = SwiftSignalKit.Timer(timeout: 0.005, repeat: true, completion: { [weak self] in
|
||||
guard let strongSelf = self, let pictureInPictureController = strongSelf.pictureInPictureController else {
|
||||
return
|
||||
}
|
||||
if pictureInPictureController.isPictureInPicturePossible {
|
||||
strongSelf.pictureInPictureTimer?.invalidate()
|
||||
strongSelf.pictureInPictureTimer = nil
|
||||
|
||||
pictureInPictureController.startPictureInPicture()
|
||||
}
|
||||
}, queue: .mainQueue())
|
||||
self.pictureInPictureTimer = timer
|
||||
timer.start()
|
||||
} else {
|
||||
var currentIsNativePictureInPictureActive = false
|
||||
self.isNativePictureInPictureActiveDisposable = (videoNode.isNativePictureInPictureActive
|
||||
|> deliverOnMainQueue).startStrict(next: { [weak self] isNativePictureInPictureActive in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
if currentIsNativePictureInPictureActive == isNativePictureInPictureActive {
|
||||
return
|
||||
}
|
||||
currentIsNativePictureInPictureActive = isNativePictureInPictureActive
|
||||
|
||||
if isNativePictureInPictureActive {
|
||||
Queue.mainQueue().after(0.0, { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.willBegin(self)
|
||||
|
||||
if let overlayController = self.overlayController {
|
||||
overlayController.setPictureInPictureContentHidden(content: self, isHidden: true)
|
||||
}
|
||||
|
||||
self.didEnd(self)
|
||||
})
|
||||
} else {
|
||||
self.expand { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
let pictureInPictureController = AVPictureInPictureController(contentSource: AVPictureInPictureController.ContentSource(sampleBufferDisplayLayer: videoNode.getVideoLayer()!, playbackDelegate: contentDelegate))
|
||||
self.pictureInPictureController = pictureInPictureController
|
||||
contentDelegate.pictureInPictureController = pictureInPictureController
|
||||
|
||||
pictureInPictureController.canStartPictureInPictureAutomaticallyFromInline = false
|
||||
pictureInPictureController.requiresLinearPlayback = !canSkip
|
||||
pictureInPictureController.delegate = self
|
||||
self.pictureInPictureController = pictureInPictureController
|
||||
let timer = SwiftSignalKit.Timer(timeout: 0.005, repeat: true, completion: { [weak self] in
|
||||
guard let strongSelf = self, let pictureInPictureController = strongSelf.pictureInPictureController else {
|
||||
return
|
||||
}
|
||||
if pictureInPictureController.isPictureInPicturePossible {
|
||||
strongSelf.pictureInPictureTimer?.invalidate()
|
||||
strongSelf.pictureInPictureTimer = nil
|
||||
self.didExpand = true
|
||||
|
||||
pictureInPictureController.startPictureInPicture()
|
||||
}
|
||||
}, queue: .mainQueue())
|
||||
self.pictureInPictureTimer = timer
|
||||
timer.start()
|
||||
if let overlayController = self.overlayController {
|
||||
overlayController.setPictureInPictureContentHidden(content: self, isHidden: false)
|
||||
self.node.alpha = 0.02
|
||||
}
|
||||
|
||||
guard let overlayController = self.overlayController else {
|
||||
return
|
||||
}
|
||||
overlayController.removePictureInPictureContent(content: self)
|
||||
self.node.canAttachContent = false
|
||||
if self.didExpand {
|
||||
return
|
||||
}
|
||||
self.node.continuePlayingWithoutSound()
|
||||
}
|
||||
}
|
||||
})
|
||||
let _ = videoNode.enterNativePictureInPicture()
|
||||
}
|
||||
|
||||
if let hiddenMedia = hiddenMedia {
|
||||
self.hiddenMediaManagerIndex = mediaManager.galleryHiddenMediaManager.addSource(Signal<(MessageId, Media)?, NoError>.single(hiddenMedia)
|
||||
@ -676,6 +732,7 @@ private final class PictureInPictureContentImpl: NSObject, PictureInPictureConte
|
||||
|
||||
deinit {
|
||||
self.messageRemovedDisposable?.dispose()
|
||||
self.isNativePictureInPictureActiveDisposable?.dispose()
|
||||
self.pictureInPictureTimer?.invalidate()
|
||||
self.node.setCanPlaybackWithoutHierarchy(false)
|
||||
|
||||
@ -743,10 +800,6 @@ private final class PictureInPictureContentImpl: NSObject, PictureInPictureConte
|
||||
}
|
||||
|
||||
completionHandler(true)
|
||||
|
||||
/*Queue.mainQueue().after(0.2, {
|
||||
self?.node.canAttachContent = false
|
||||
})*/
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1295,7 +1348,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
}
|
||||
}
|
||||
|
||||
self.moreButtonStateDisposable.set(combineLatest(queue: .mainQueue(),
|
||||
/*self.moreButtonStateDisposable.set(combineLatest(queue: .mainQueue(),
|
||||
self.playbackRatePromise.get(),
|
||||
self.isShowingContextMenuPromise.get()
|
||||
).start(next: { [weak self] playbackRate, isShowingContextMenu in
|
||||
@ -1334,7 +1387,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
strongSelf.moreBarButtonRateTimestamp = CFAbsoluteTimeGetCurrent()
|
||||
}
|
||||
}
|
||||
}))
|
||||
}))*/
|
||||
|
||||
self.statusDisposable.set((combineLatest(queue: .mainQueue(), videoNode.status, mediaFileStatus)
|
||||
|> deliverOnMainQueue).start(next: { [weak self] value, fetchStatus in
|
||||
@ -2306,6 +2359,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
let playbackRate = self.playbackRate
|
||||
|
||||
if #available(iOSApplicationExtension 15.0, iOS 15.0, *), AVPictureInPictureController.isPictureInPictureSupported(), isNativePictureInPictureSupported {
|
||||
|
||||
self.disablePictureInPicturePlaceholder = true
|
||||
|
||||
let overlayVideoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .overlay)
|
||||
@ -2838,7 +2892,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
}
|
||||
}
|
||||
|
||||
if let (message, maybeFile, _) = strongSelf.contentInfo(), let file = maybeFile, !message.isCopyProtected() && !item.peerIsCopyProtected && message.paidContent == nil {
|
||||
if let (message, maybeFile, _) = strongSelf.contentInfo(), let file = maybeFile, !message.isCopyProtected() && !item.peerIsCopyProtected && message.paidContent == nil && !(item.content is HLSVideoContent) {
|
||||
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Gallery_SaveVideo, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Download"), color: theme.actionSheet.primaryTextColor) }, action: { _, f in
|
||||
f(.default)
|
||||
|
||||
|
@ -23,10 +23,8 @@ let package = Package(
|
||||
publicHeadersPath: "PublicHeaders",
|
||||
cSettings: [
|
||||
.headerSearchPath("PublicHeaders"),
|
||||
.unsafeFlags([
|
||||
"-I../../../../core-xprojects/openssl/build/openssl/include",
|
||||
"-I../EncryptionProvider/PublicHeaders"
|
||||
])
|
||||
.headerSearchPath("SharedHeaders/openssl/include"),
|
||||
.headerSearchPath("SharedHeaders/EncryptionProvider"),
|
||||
]),
|
||||
]
|
||||
)
|
||||
|
2
submodules/OpusBinding/Package.swift
vendored
2
submodules/OpusBinding/Package.swift
vendored
@ -28,8 +28,8 @@ let package = Package(
|
||||
cSettings: [
|
||||
.headerSearchPath("PublicHeaders"),
|
||||
.headerSearchPath("PublicHeaders/OpusBinding"),
|
||||
.headerSearchPath("SharedHeaders/libopus/include"),
|
||||
.headerSearchPath("Sources"),
|
||||
.unsafeFlags(["-I../../../../core-xprojects/libopus/build/libopus/include"])
|
||||
]),
|
||||
]
|
||||
)
|
||||
|
@ -36,13 +36,13 @@ final class VideoChatActionButtonComponent: Component {
|
||||
case leave
|
||||
}
|
||||
|
||||
case audio(audio: Audio)
|
||||
case audio(audio: Audio, isEnabled: Bool)
|
||||
case video(isActive: Bool)
|
||||
case leave
|
||||
|
||||
fileprivate var iconType: IconType {
|
||||
switch self {
|
||||
case let .audio(audio):
|
||||
case let .audio(audio, _):
|
||||
let mappedAudio: IconType.Audio
|
||||
switch audio {
|
||||
case .none, .builtin, .speaker:
|
||||
@ -136,14 +136,16 @@ final class VideoChatActionButtonComponent: Component {
|
||||
let titleText: String
|
||||
let backgroundColor: UIColor
|
||||
let iconDiameter: CGFloat
|
||||
var isEnabled: Bool = true
|
||||
switch component.content {
|
||||
case let .audio(audio):
|
||||
case let .audio(audio, isEnabledValue):
|
||||
var isActive = false
|
||||
switch audio {
|
||||
case .none, .builtin:
|
||||
titleText = component.strings.Call_Speaker
|
||||
case .speaker:
|
||||
isActive = true
|
||||
isEnabled = isEnabledValue
|
||||
isActive = isEnabledValue
|
||||
titleText = component.strings.Call_Speaker
|
||||
case .headphones:
|
||||
titleText = component.strings.Call_Audio
|
||||
@ -276,8 +278,11 @@ final class VideoChatActionButtonComponent: Component {
|
||||
self.addSubview(iconView)
|
||||
}
|
||||
transition.setFrame(view: iconView, frame: iconFrame)
|
||||
transition.setAlpha(view: iconView, alpha: isEnabled ? 1.0 : 0.6)
|
||||
}
|
||||
|
||||
self.isEnabled = isEnabled
|
||||
|
||||
return size
|
||||
}
|
||||
}
|
||||
|
@ -16,13 +16,19 @@ final class VideoChatMuteIconComponent: Component {
|
||||
|
||||
let color: UIColor
|
||||
let content: Content
|
||||
let shadowColor: UIColor?
|
||||
let shadowBlur: CGFloat
|
||||
|
||||
init(
|
||||
color: UIColor,
|
||||
content: Content
|
||||
content: Content,
|
||||
shadowColor: UIColor? = nil,
|
||||
shadowBlur: CGFloat = 0.0
|
||||
) {
|
||||
self.color = color
|
||||
self.content = content
|
||||
self.shadowColor = shadowColor
|
||||
self.shadowBlur = shadowBlur
|
||||
}
|
||||
|
||||
static func ==(lhs: VideoChatMuteIconComponent, rhs: VideoChatMuteIconComponent) -> Bool {
|
||||
@ -32,6 +38,12 @@ final class VideoChatMuteIconComponent: Component {
|
||||
if lhs.content != rhs.content {
|
||||
return false
|
||||
}
|
||||
if lhs.shadowColor != rhs.shadowColor {
|
||||
return false
|
||||
}
|
||||
if lhs.shadowBlur != rhs.shadowBlur {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@ -75,9 +87,9 @@ final class VideoChatMuteIconComponent: Component {
|
||||
}
|
||||
|
||||
let animationSize = availableSize
|
||||
let animationFrame = animationSize.centered(in: CGRect(origin: CGPoint(), size: availableSize))
|
||||
let animationFrame = animationSize.centered(in: CGRect(origin: CGPoint(), size: availableSize)).insetBy(dx: -component.shadowBlur, dy: -component.shadowBlur)
|
||||
transition.setFrame(view: icon.view, frame: animationFrame)
|
||||
icon.update(state: VoiceChatMicrophoneNode.State(muted: isMuted, filled: isFilled, color: component.color), animated: !transition.animation.isImmediate)
|
||||
icon.update(state: VoiceChatMicrophoneNode.State(muted: isMuted, filled: isFilled, color: component.color, shadowColor: component.shadowColor, shadowBlur: component.shadowBlur), animated: !transition.animation.isImmediate)
|
||||
} else {
|
||||
if let icon = self.icon {
|
||||
self.icon = nil
|
||||
@ -97,7 +109,9 @@ final class VideoChatMuteIconComponent: Component {
|
||||
transition: transition,
|
||||
component: AnyComponent(BundleIconComponent(
|
||||
name: "Call/StatusScreen",
|
||||
tintColor: component.color
|
||||
tintColor: component.color,
|
||||
shadowColor: component.shadowColor,
|
||||
shadowBlur: component.shadowBlur
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: availableSize
|
||||
|
@ -12,6 +12,9 @@ import AccountContext
|
||||
import SwiftSignalKit
|
||||
import DirectMediaImageCache
|
||||
import FastBlur
|
||||
import ContextUI
|
||||
import ComponentDisplayAdapters
|
||||
import AvatarNode
|
||||
|
||||
private func blurredAvatarImage(_ dataImage: UIImage) -> UIImage? {
|
||||
let imageContextSize = CGSize(width: 64.0, height: 64.0)
|
||||
@ -35,6 +38,7 @@ private let activityBorderImage: UIImage = {
|
||||
}()
|
||||
|
||||
final class VideoChatParticipantVideoComponent: Component {
|
||||
let theme: PresentationTheme
|
||||
let strings: PresentationStrings
|
||||
let call: PresentationGroupCall
|
||||
let participant: GroupCallParticipantsContext.Participant
|
||||
@ -47,8 +51,12 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
let controlInsets: UIEdgeInsets
|
||||
let interfaceOrientation: UIInterfaceOrientation
|
||||
let action: (() -> Void)?
|
||||
let contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?
|
||||
let activatePinch: ((PinchSourceContainerNode) -> Void)?
|
||||
let deactivatedPinch: (() -> Void)?
|
||||
|
||||
init(
|
||||
theme: PresentationTheme,
|
||||
strings: PresentationStrings,
|
||||
call: PresentationGroupCall,
|
||||
participant: GroupCallParticipantsContext.Participant,
|
||||
@ -60,8 +68,12 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
contentInsets: UIEdgeInsets,
|
||||
controlInsets: UIEdgeInsets,
|
||||
interfaceOrientation: UIInterfaceOrientation,
|
||||
action: (() -> Void)?
|
||||
action: (() -> Void)?,
|
||||
contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?,
|
||||
activatePinch: ((PinchSourceContainerNode) -> Void)?,
|
||||
deactivatedPinch: (() -> Void)?
|
||||
) {
|
||||
self.theme = theme
|
||||
self.strings = strings
|
||||
self.call = call
|
||||
self.participant = participant
|
||||
@ -74,6 +86,9 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
self.controlInsets = controlInsets
|
||||
self.interfaceOrientation = interfaceOrientation
|
||||
self.action = action
|
||||
self.contextAction = contextAction
|
||||
self.activatePinch = activatePinch
|
||||
self.deactivatedPinch = deactivatedPinch
|
||||
}
|
||||
|
||||
static func ==(lhs: VideoChatParticipantVideoComponent, rhs: VideoChatParticipantVideoComponent) -> Bool {
|
||||
@ -107,6 +122,15 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
if (lhs.action == nil) != (rhs.action == nil) {
|
||||
return false
|
||||
}
|
||||
if (lhs.contextAction == nil) != (rhs.contextAction == nil) {
|
||||
return false
|
||||
}
|
||||
if (lhs.activatePinch == nil) != (rhs.activatePinch == nil) {
|
||||
return false
|
||||
}
|
||||
if (lhs.deactivatedPinch == nil) != (rhs.deactivatedPinch == nil) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@ -144,7 +168,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
final class View: HighlightTrackingButton {
|
||||
final class View: ContextControllerSourceView {
|
||||
private var component: VideoChatParticipantVideoComponent?
|
||||
private weak var componentState: EmptyComponentState?
|
||||
private var isUpdating: Bool = false
|
||||
@ -158,6 +182,8 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
private var blurredAvatarDisposable: Disposable?
|
||||
private var blurredAvatarView: UIImageView?
|
||||
|
||||
private let pinchContainerNode: PinchSourceContainerNode
|
||||
private let extractedContainerView: ContextExtractedContentContainingView
|
||||
private var videoSource: AdaptedCallVideoSource?
|
||||
private var videoDisposable: Disposable?
|
||||
private var videoBackgroundLayer: SimpleLayer?
|
||||
@ -173,16 +199,44 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.backgroundGradientView = UIImageView()
|
||||
self.pinchContainerNode = PinchSourceContainerNode()
|
||||
self.extractedContainerView = ContextExtractedContentContainingView()
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.addSubview(self.backgroundGradientView)
|
||||
self.addSubview(self.extractedContainerView)
|
||||
self.targetViewForActivationProgress = self.extractedContainerView
|
||||
|
||||
self.extractedContainerView.contentView.addSubview(self.pinchContainerNode.view)
|
||||
self.pinchContainerNode.contentNode.view.addSubview(self.backgroundGradientView)
|
||||
|
||||
//TODO:release optimize
|
||||
self.clipsToBounds = true
|
||||
self.layer.cornerRadius = 10.0
|
||||
self.pinchContainerNode.contentNode.view.layer.cornerRadius = 10.0
|
||||
self.pinchContainerNode.contentNode.view.clipsToBounds = true
|
||||
|
||||
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
|
||||
self.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))))
|
||||
|
||||
self.pinchContainerNode.activate = { [weak self] sourceNode in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.activatePinch?(sourceNode)
|
||||
}
|
||||
self.pinchContainerNode.animatedOut = { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
|
||||
component.deactivatedPinch?()
|
||||
}
|
||||
|
||||
self.activated = { [weak self] gesture, _ in
|
||||
guard let self, let component = self.component else {
|
||||
gesture.cancel()
|
||||
return
|
||||
}
|
||||
component.contextAction?(EnginePeer(component.participant.peer), self.extractedContainerView, gesture)
|
||||
}
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
@ -194,11 +248,13 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
self.blurredAvatarDisposable?.dispose()
|
||||
}
|
||||
|
||||
@objc private func pressed() {
|
||||
guard let component = self.component, let action = component.action else {
|
||||
return
|
||||
@objc private func tapGesture(_ recognizer: UITapGestureRecognizer) {
|
||||
if case .ended = recognizer.state {
|
||||
guard let component = self.component, let action = component.action else {
|
||||
return
|
||||
}
|
||||
action()
|
||||
}
|
||||
action()
|
||||
}
|
||||
|
||||
func update(component: VideoChatParticipantVideoComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||
@ -211,6 +267,19 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
self.component = component
|
||||
self.componentState = state
|
||||
|
||||
self.isGestureEnabled = !component.isExpanded
|
||||
|
||||
self.pinchContainerNode.isPinchGestureEnabled = component.activatePinch != nil
|
||||
transition.setPosition(view: self.pinchContainerNode.view, position: CGRect(origin: CGPoint(), size: availableSize).center)
|
||||
transition.setBounds(view: self.pinchContainerNode.view, bounds: CGRect(origin: CGPoint(), size: availableSize))
|
||||
self.pinchContainerNode.update(size: availableSize, transition: transition.containedViewLayoutTransition)
|
||||
|
||||
transition.setPosition(view: self.extractedContainerView, position: CGRect(origin: CGPoint(), size: availableSize).center)
|
||||
transition.setBounds(view: self.extractedContainerView, bounds: CGRect(origin: CGPoint(), size: availableSize))
|
||||
self.extractedContainerView.contentRect = CGRect(origin: CGPoint(), size: availableSize)
|
||||
|
||||
transition.setFrame(view: self.pinchContainerNode.contentNode.view, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||
|
||||
transition.setFrame(view: self.backgroundGradientView, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||
|
||||
let alphaTransition: ComponentTransition
|
||||
@ -229,14 +298,10 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
|
||||
let controlsAlpha: CGFloat = component.isUIHidden ? 0.0 : 1.0
|
||||
|
||||
let nameColor = component.participant.peer.nameColor ?? .blue
|
||||
let nameColors = component.call.accountContext.peerNameColors.get(nameColor, dark: true)
|
||||
|
||||
if previousComponent == nil {
|
||||
self.backgroundGradientView.image = generateGradientImage(size: CGSize(width: 8.0, height: 32.0), colors: [
|
||||
nameColors.main.withMultiplied(hue: 1.0, saturation: 1.1, brightness: 1.3),
|
||||
nameColors.main.withMultiplied(hue: 1.0, saturation: 1.2, brightness: 1.0)
|
||||
], locations: [0.0, 1.0], direction: .vertical)
|
||||
let colors = calculateAvatarColors(context: component.call.accountContext, explicitColorIndex: nil, peerId: component.participant.peer.id, nameColor: component.participant.peer.nameColor, icon: .none, theme: component.theme)
|
||||
|
||||
self.backgroundGradientView.image = generateGradientImage(size: CGSize(width: 8.0, height: 32.0), colors: colors.reversed(), locations: [0.0, 1.0], direction: .vertical)
|
||||
}
|
||||
|
||||
if let smallProfileImage = component.participant.peer.smallProfileImage {
|
||||
@ -249,7 +314,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
blurredAvatarView = UIImageView()
|
||||
blurredAvatarView.contentMode = .scaleAspectFill
|
||||
self.blurredAvatarView = blurredAvatarView
|
||||
self.insertSubview(blurredAvatarView, aboveSubview: self.backgroundGradientView)
|
||||
self.pinchContainerNode.contentNode.view.insertSubview(blurredAvatarView, aboveSubview: self.backgroundGradientView)
|
||||
|
||||
blurredAvatarView.frame = CGRect(origin: CGPoint(), size: availableSize)
|
||||
}
|
||||
@ -292,7 +357,9 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
transition: transition,
|
||||
component: AnyComponent(VideoChatMuteIconComponent(
|
||||
color: .white,
|
||||
content: component.isPresentation ? .screenshare : .mute(isFilled: true, isMuted: component.participant.muteState != nil && !component.isSpeaking)
|
||||
content: component.isPresentation ? .screenshare : .mute(isFilled: true, isMuted: component.participant.muteState != nil && !component.isSpeaking),
|
||||
shadowColor: UIColor(white: 0.0, alpha: 0.7),
|
||||
shadowBlur: 8.0
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 36.0, height: 36.0)
|
||||
@ -305,14 +372,8 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
}
|
||||
if let muteStatusView = self.muteStatus.view {
|
||||
if muteStatusView.superview == nil {
|
||||
self.addSubview(muteStatusView)
|
||||
self.pinchContainerNode.contentNode.view.addSubview(muteStatusView)
|
||||
muteStatusView.alpha = controlsAlpha
|
||||
|
||||
//TODO:release
|
||||
muteStatusView.layer.shadowOpacity = 0.7
|
||||
muteStatusView.layer.shadowColor = UIColor(white: 0.0, alpha: 1.0).cgColor
|
||||
muteStatusView.layer.shadowOffset = CGSize(width: 0.0, height: 1.0)
|
||||
muteStatusView.layer.shadowRadius = 8.0
|
||||
}
|
||||
transition.setPosition(view: muteStatusView, position: muteStatusFrame.center)
|
||||
transition.setBounds(view: muteStatusView, bounds: CGRect(origin: CGPoint(), size: muteStatusFrame.size))
|
||||
@ -320,31 +381,29 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
alphaTransition.setAlpha(view: muteStatusView, alpha: controlsAlpha)
|
||||
}
|
||||
|
||||
let titleInnerInsets = UIEdgeInsets(top: 8.0, left: 8.0, bottom: 8.0, right: 8.0)
|
||||
let titleSize = self.title.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(MultilineTextComponent(
|
||||
text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.semibold(16.0), textColor: .white))
|
||||
text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.semibold(16.0), textColor: .white)),
|
||||
insets: titleInnerInsets,
|
||||
textShadowColor: UIColor(white: 0.0, alpha: 0.7),
|
||||
textShadowBlur: 8.0
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableSize.width - 8.0 * 2.0 - 4.0, height: 100.0)
|
||||
)
|
||||
let titleFrame: CGRect
|
||||
if component.isExpanded {
|
||||
titleFrame = CGRect(origin: CGPoint(x: 36.0, y: availableSize.height - component.controlInsets.bottom - 8.0 - titleSize.height), size: titleSize)
|
||||
titleFrame = CGRect(origin: CGPoint(x: 36.0 - titleInnerInsets.left, y: availableSize.height - component.controlInsets.bottom - 8.0 - titleSize.height + titleInnerInsets.top), size: titleSize)
|
||||
} else {
|
||||
titleFrame = CGRect(origin: CGPoint(x: 29.0, y: availableSize.height - component.controlInsets.bottom - 4.0 - titleSize.height), size: titleSize)
|
||||
titleFrame = CGRect(origin: CGPoint(x: 29.0 - titleInnerInsets.left, y: availableSize.height - component.controlInsets.bottom - 4.0 - titleSize.height + titleInnerInsets.top + 1.0), size: titleSize)
|
||||
}
|
||||
if let titleView = self.title.view {
|
||||
if titleView.superview == nil {
|
||||
titleView.layer.anchorPoint = CGPoint()
|
||||
self.addSubview(titleView)
|
||||
self.pinchContainerNode.contentNode.view.addSubview(titleView)
|
||||
titleView.alpha = controlsAlpha
|
||||
|
||||
//TODO:release
|
||||
titleView.layer.shadowOpacity = 0.7
|
||||
titleView.layer.shadowColor = UIColor(white: 0.0, alpha: 1.0).cgColor
|
||||
titleView.layer.shadowOffset = CGSize(width: 0.0, height: 1.0)
|
||||
titleView.layer.shadowRadius = 8.0
|
||||
}
|
||||
transition.setPosition(view: titleView, position: titleFrame.origin)
|
||||
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)
|
||||
@ -377,9 +436,9 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
videoBackgroundLayer.opacity = 0.0
|
||||
self.videoBackgroundLayer = videoBackgroundLayer
|
||||
if let blurredAvatarView = self.blurredAvatarView {
|
||||
self.layer.insertSublayer(videoBackgroundLayer, above: blurredAvatarView.layer)
|
||||
self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoBackgroundLayer, above: blurredAvatarView.layer)
|
||||
} else {
|
||||
self.layer.insertSublayer(videoBackgroundLayer, above: self.backgroundGradientView.layer)
|
||||
self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoBackgroundLayer, above: self.backgroundGradientView.layer)
|
||||
}
|
||||
videoBackgroundLayer.isHidden = true
|
||||
}
|
||||
@ -391,8 +450,8 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
videoLayer = PrivateCallVideoLayer()
|
||||
self.videoLayer = videoLayer
|
||||
videoLayer.opacity = 0.0
|
||||
self.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
|
||||
self.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer)
|
||||
self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
|
||||
self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer)
|
||||
|
||||
videoLayer.blurredLayer.opacity = 0.0
|
||||
|
||||
@ -537,7 +596,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
if videoStatusView.superview == nil {
|
||||
videoStatusView.isUserInteractionEnabled = false
|
||||
videoStatusView.alpha = 0.0
|
||||
self.addSubview(videoStatusView)
|
||||
self.pinchContainerNode.contentNode.view.addSubview(videoStatusView)
|
||||
}
|
||||
videoStatusTransition.setFrame(view: videoStatusView, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||
videoAlphaTransition.setAlpha(view: videoStatusView, alpha: 1.0)
|
||||
@ -557,7 +616,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
self.loadingEffectView = loadingEffectView
|
||||
loadingEffectView.alpha = 0.0
|
||||
loadingEffectView.isUserInteractionEnabled = false
|
||||
self.addSubview(loadingEffectView)
|
||||
self.pinchContainerNode.contentNode.view.addSubview(loadingEffectView)
|
||||
if let referenceLocation = self.referenceLocation {
|
||||
self.updateHorizontalReferenceLocation(containerWidth: referenceLocation.containerWidth, positionX: referenceLocation.positionX, transition: .immediate)
|
||||
}
|
||||
@ -578,7 +637,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
} else {
|
||||
activityBorderView = UIImageView()
|
||||
self.activityBorderView = activityBorderView
|
||||
self.addSubview(activityBorderView)
|
||||
self.pinchContainerNode.contentNode.view.addSubview(activityBorderView)
|
||||
|
||||
activityBorderView.image = activityBorderImage
|
||||
activityBorderView.tintColor = UIColor(rgb: 0x33C758)
|
||||
|
@ -10,6 +10,7 @@ import SwiftSignalKit
|
||||
import MultilineTextComponent
|
||||
import TelegramPresentationData
|
||||
import PeerListItemComponent
|
||||
import ContextUI
|
||||
|
||||
final class VideoChatParticipantsComponent: Component {
|
||||
struct Layout: Equatable {
|
||||
@ -645,6 +646,8 @@ final class VideoChatParticipantsComponent: Component {
|
||||
|
||||
private var appliedGridIsEmpty: Bool = true
|
||||
|
||||
private var isPinchToZoomActive: Bool = false
|
||||
|
||||
private var currentLoadMoreToken: String?
|
||||
|
||||
private var mainScrollViewEventCycleState: EventCycleState?
|
||||
@ -986,7 +989,10 @@ final class VideoChatParticipantsComponent: Component {
|
||||
var itemControlInsets: UIEdgeInsets
|
||||
if isItemExpanded {
|
||||
itemControlInsets = itemContentInsets
|
||||
itemControlInsets.bottom = max(itemControlInsets.bottom, 96.0)
|
||||
if let expandedVideoState = component.expandedVideoState, expandedVideoState.isUIHidden {
|
||||
} else {
|
||||
itemControlInsets.bottom = max(itemControlInsets.bottom, 96.0)
|
||||
}
|
||||
} else {
|
||||
itemControlInsets = itemContentInsets
|
||||
}
|
||||
@ -1003,6 +1009,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
let _ = itemView.view.update(
|
||||
transition: itemTransition,
|
||||
component: AnyComponent(VideoChatParticipantVideoComponent(
|
||||
theme: component.theme,
|
||||
strings: component.strings,
|
||||
call: component.call,
|
||||
participant: videoParticipant.participant,
|
||||
@ -1010,7 +1017,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
isPresentation: videoParticipant.isPresentation,
|
||||
isSpeaking: component.speakingParticipants.contains(videoParticipant.participant.peer.id),
|
||||
isExpanded: isItemExpanded,
|
||||
isUIHidden: isItemUIHidden,
|
||||
isUIHidden: isItemUIHidden || self.isPinchToZoomActive,
|
||||
contentInsets: itemContentInsets,
|
||||
controlInsets: itemControlInsets,
|
||||
interfaceOrientation: component.interfaceOrientation,
|
||||
@ -1032,7 +1039,31 @@ final class VideoChatParticipantsComponent: Component {
|
||||
component.updateMainParticipant(videoParticipantKey, nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
contextAction: !isItemExpanded ? { [weak self] peer, sourceView, gesture in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.openParticipantContextMenu(peer.id, sourceView, gesture)
|
||||
} : nil,
|
||||
activatePinch: isItemExpanded ? { [weak self] sourceNode in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
self.isPinchToZoomActive = true
|
||||
self.state?.updated(transition: .immediate, isLocal: true)
|
||||
let pinchController = PinchController(sourceNode: sourceNode, getContentAreaInScreenSpace: {
|
||||
return UIScreen.main.bounds
|
||||
})
|
||||
component.call.accountContext.sharedContext.mainWindow?.presentInGlobalOverlay(pinchController)
|
||||
} : nil,
|
||||
deactivatedPinch: isItemExpanded ? { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.isPinchToZoomActive = false
|
||||
self.state?.updated(transition: .spring(duration: 0.4), isLocal: true)
|
||||
} : nil
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: itemFrame.size
|
||||
@ -1158,7 +1189,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
if participant.peer.id == component.call.accountContext.account.peerId {
|
||||
subtitle = PeerListItemComponent.Subtitle(text: "this is you", color: .accent)
|
||||
} else if component.speakingParticipants.contains(participant.peer.id) {
|
||||
if let volume = participant.volume, volume != 10000 {
|
||||
if let volume = participant.volume, volume / 100 != 100 {
|
||||
subtitle = PeerListItemComponent.Subtitle(text: "\(volume / 100)% speaking", color: .constructive)
|
||||
} else {
|
||||
subtitle = PeerListItemComponent.Subtitle(text: "speaking", color: .constructive)
|
||||
@ -1322,17 +1353,8 @@ final class VideoChatParticipantsComponent: Component {
|
||||
))
|
||||
}*/
|
||||
|
||||
let expandedControlsAlpha: CGFloat = expandedVideoState.isUIHidden ? 0.0 : 1.0
|
||||
let expandedControlsAlpha: CGFloat = (expandedVideoState.isUIHidden || self.isPinchToZoomActive) ? 0.0 : 1.0
|
||||
let expandedThumbnailsAlpha: CGFloat = expandedControlsAlpha
|
||||
/*if itemLayout.layout.videoColumn == nil {
|
||||
if expandedVideoState.isUIHidden {
|
||||
expandedThumbnailsAlpha = 0.0
|
||||
} else {
|
||||
expandedThumbnailsAlpha = 1.0
|
||||
}
|
||||
} else {
|
||||
expandedThumbnailsAlpha = 0.0
|
||||
}*/
|
||||
|
||||
var expandedThumbnailsTransition = transition
|
||||
let expandedThumbnailsView: ComponentView<Empty>
|
||||
|
@ -22,6 +22,7 @@ import ShareController
|
||||
import AvatarNode
|
||||
import TelegramAudio
|
||||
import LegacyComponents
|
||||
import TooltipUI
|
||||
|
||||
final class VideoChatScreenComponent: Component {
|
||||
typealias EnvironmentType = ViewControllerComponentContainer.Environment
|
||||
@ -83,6 +84,7 @@ final class VideoChatScreenComponent: Component {
|
||||
var scheduleInfo: ComponentView<Empty>?
|
||||
|
||||
var reconnectedAsEventsDisposable: Disposable?
|
||||
var memberEventsDisposable: Disposable?
|
||||
|
||||
var peer: EnginePeer?
|
||||
var callState: PresentationGroupCallState?
|
||||
@ -144,6 +146,7 @@ final class VideoChatScreenComponent: Component {
|
||||
self.membersDisposable?.dispose()
|
||||
self.applicationStateDisposable?.dispose()
|
||||
self.reconnectedAsEventsDisposable?.dispose()
|
||||
self.memberEventsDisposable?.dispose()
|
||||
self.displayAsPeersDisposable?.dispose()
|
||||
self.audioOutputStateDisposable?.dispose()
|
||||
self.inviteLinksDisposable?.dispose()
|
||||
@ -819,7 +822,7 @@ final class VideoChatScreenComponent: Component {
|
||||
|
||||
self.members = members
|
||||
|
||||
if let members, let _ = self.expandedParticipantsVideoState {
|
||||
if let members, let expandedParticipantsVideoState = self.expandedParticipantsVideoState, !expandedParticipantsVideoState.isUIHidden {
|
||||
var videoCount = 0
|
||||
for participant in members.participants {
|
||||
if participant.presentationDescription != nil {
|
||||
@ -1008,6 +1011,31 @@ final class VideoChatScreenComponent: Component {
|
||||
}
|
||||
self.presentUndoOverlay(content: .invitedToVoiceChat(context: component.call.accountContext, peer: peer, title: nil, text: text, action: nil, duration: 3), action: { _ in return false })
|
||||
})
|
||||
|
||||
self.memberEventsDisposable = (component.call.memberEvents
|
||||
|> deliverOnMainQueue).start(next: { [weak self] event in
|
||||
guard let self, let members = self.members, let component = self.component, let environment = self.environment else {
|
||||
return
|
||||
}
|
||||
if event.joined {
|
||||
var displayEvent = false
|
||||
if case let .channel(channel) = self.peer, case .broadcast = channel.info {
|
||||
displayEvent = false
|
||||
}
|
||||
if members.totalCount < 250 {
|
||||
displayEvent = true
|
||||
} else if event.peer.isVerified {
|
||||
displayEvent = true
|
||||
} else if event.isContact || event.isInChatList {
|
||||
displayEvent = true
|
||||
}
|
||||
|
||||
if displayEvent {
|
||||
let text = environment.strings.VoiceChat_PeerJoinedText(event.peer.displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)).string
|
||||
self.presentUndoOverlay(content: .invitedToVoiceChat(context: component.call.accountContext, peer: event.peer, title: nil, text: text, action: nil, duration: 3), action: { _ in return false })
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
self.isPresentedValue.set(environment.isVisible)
|
||||
@ -1072,7 +1100,7 @@ final class VideoChatScreenComponent: Component {
|
||||
} else {
|
||||
containerOffset = verticalPanState.fraction * availableSize.height
|
||||
}
|
||||
self.containerView.layer.cornerRadius = environment.deviceMetrics.screenCornerRadius
|
||||
self.containerView.layer.cornerRadius = containerOffset.isZero ? 0.0 : environment.deviceMetrics.screenCornerRadius
|
||||
}
|
||||
|
||||
transition.setFrame(view: self.containerView, frame: CGRect(origin: CGPoint(x: 0.0, y: containerOffset), size: availableSize), completion: { [weak self] completed in
|
||||
@ -1249,13 +1277,49 @@ final class VideoChatScreenComponent: Component {
|
||||
} else {
|
||||
idleTitleStatusText = " "
|
||||
}
|
||||
|
||||
let canManageCall = self.callState?.canManageCall ?? false
|
||||
|
||||
let titleSize = self.title.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(VideoChatTitleComponent(
|
||||
title: self.callState?.title ?? self.peer?.debugDisplayTitle ?? " ",
|
||||
status: idleTitleStatusText,
|
||||
isRecording: self.callState?.recordingStartTimestamp != nil,
|
||||
strings: environment.strings
|
||||
strings: environment.strings,
|
||||
tapAction: self.callState?.recordingStartTimestamp != nil ? { [weak self] in
|
||||
guard let self, let component = self.component, let environment = self.environment else {
|
||||
return
|
||||
}
|
||||
guard let titleView = self.title.view as? VideoChatTitleComponent.View, let recordingIndicatorView = titleView.recordingIndicatorView else {
|
||||
return
|
||||
}
|
||||
var hasTooltipAlready = false
|
||||
environment.controller()?.forEachController { controller -> Bool in
|
||||
if controller is TooltipScreen {
|
||||
hasTooltipAlready = true
|
||||
}
|
||||
return true
|
||||
}
|
||||
if !hasTooltipAlready {
|
||||
let location = recordingIndicatorView.convert(recordingIndicatorView.bounds, to: self)
|
||||
let text: String
|
||||
if case let .channel(channel) = self.peer, case .broadcast = channel.info {
|
||||
text = environment.strings.LiveStream_RecordingInProgress
|
||||
} else {
|
||||
text = environment.strings.VoiceChat_RecordingInProgress
|
||||
}
|
||||
environment.controller()?.present(TooltipScreen(account: component.call.accountContext.account, sharedContext: component.call.accountContext.sharedContext, text: .plain(text: text), icon: nil, location: .point(location.offsetBy(dx: 1.0, dy: 0.0), .top), displayDuration: .custom(3.0), shouldDismissOnTouch: { _, _ in
|
||||
return .dismiss(consume: true)
|
||||
}), in: .current)
|
||||
}
|
||||
} : nil,
|
||||
longTapAction: canManageCall ? { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.openTitleEditing()
|
||||
} : nil
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableSize.width - sideInset * 2.0 - navigationButtonAreaWidth * 2.0 - 4.0 * 2.0, height: 100.0)
|
||||
@ -1263,7 +1327,6 @@ final class VideoChatScreenComponent: Component {
|
||||
let titleFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - titleSize.width) * 0.5), y: topInset + floor((navigationBarHeight - titleSize.height) * 0.5)), size: titleSize)
|
||||
if let titleView = self.title.view {
|
||||
if titleView.superview == nil {
|
||||
titleView.isUserInteractionEnabled = false
|
||||
self.containerView.addSubview(titleView)
|
||||
}
|
||||
transition.setFrame(view: titleView, frame: titleFrame)
|
||||
@ -1436,7 +1499,7 @@ final class VideoChatScreenComponent: Component {
|
||||
component: AnyComponent(VideoChatParticipantsComponent(
|
||||
call: component.call,
|
||||
participants: mappedParticipants,
|
||||
speakingParticipants: members?.speakingParticipants ?? Set(),
|
||||
speakingParticipants: self.members?.speakingParticipants ?? Set(),
|
||||
expandedVideoState: self.expandedParticipantsVideoState,
|
||||
theme: environment.theme,
|
||||
strings: environment.strings,
|
||||
@ -1699,7 +1762,9 @@ final class VideoChatScreenComponent: Component {
|
||||
let videoButtonContent: VideoChatActionButtonComponent.Content
|
||||
if let callState = self.callState, let muteState = callState.muteState, !muteState.canUnmute {
|
||||
var buttonAudio: VideoChatActionButtonComponent.Content.Audio = .speaker
|
||||
var buttonIsEnabled = false
|
||||
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
|
||||
buttonIsEnabled = availableOutputs.count > 1
|
||||
switch currentOutput {
|
||||
case .builtin:
|
||||
buttonAudio = .builtin
|
||||
@ -1723,7 +1788,7 @@ final class VideoChatScreenComponent: Component {
|
||||
buttonAudio = .none
|
||||
}
|
||||
}
|
||||
videoButtonContent = .audio(audio: buttonAudio)
|
||||
videoButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
|
||||
} else {
|
||||
//TODO:release
|
||||
videoButtonContent = .video(isActive: false)
|
||||
|
@ -12,17 +12,23 @@ final class VideoChatTitleComponent: Component {
|
||||
let status: String
|
||||
let isRecording: Bool
|
||||
let strings: PresentationStrings
|
||||
let tapAction: (() -> Void)?
|
||||
let longTapAction: (() -> Void)?
|
||||
|
||||
init(
|
||||
title: String,
|
||||
status: String,
|
||||
isRecording: Bool,
|
||||
strings: PresentationStrings
|
||||
strings: PresentationStrings,
|
||||
tapAction: (() -> Void)?,
|
||||
longTapAction: (() -> Void)?
|
||||
) {
|
||||
self.title = title
|
||||
self.status = status
|
||||
self.isRecording = isRecording
|
||||
self.strings = strings
|
||||
self.tapAction = tapAction
|
||||
self.longTapAction = longTapAction
|
||||
}
|
||||
|
||||
static func ==(lhs: VideoChatTitleComponent, rhs: VideoChatTitleComponent) -> Bool {
|
||||
@ -38,6 +44,12 @@ final class VideoChatTitleComponent: Component {
|
||||
if lhs.strings !== rhs.strings {
|
||||
return false
|
||||
}
|
||||
if (lhs.tapAction == nil) != (rhs.tapAction == nil) {
|
||||
return false
|
||||
}
|
||||
if (lhs.longTapAction == nil) != (rhs.longTapAction == nil) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@ -55,6 +67,12 @@ final class VideoChatTitleComponent: Component {
|
||||
private var currentActivityStatus: String?
|
||||
private var currentSize: CGSize?
|
||||
|
||||
private var tapRecognizer: TapLongTapOrDoubleTapGestureRecognizer?
|
||||
|
||||
public var recordingIndicatorView: UIView? {
|
||||
return self.recordingImageView
|
||||
}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
|
||||
|
||||
@ -67,12 +85,33 @@ final class VideoChatTitleComponent: Component {
|
||||
}
|
||||
self.updateAnimations()
|
||||
}
|
||||
|
||||
let tapRecognizer = TapLongTapOrDoubleTapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:)))
|
||||
tapRecognizer.tapActionAtPoint = { _ in
|
||||
return .waitForSingleTap
|
||||
}
|
||||
self.addGestureRecognizer(tapRecognizer)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
@objc private func tapGesture(_ recognizer: TapLongTapOrDoubleTapGestureRecognizer) {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
if case .ended = recognizer.state {
|
||||
if let (gesture, _) = recognizer.lastRecognizedGestureAndLocation {
|
||||
if case .tap = gesture {
|
||||
component.tapAction?()
|
||||
} else if case .longTap = gesture {
|
||||
component.longTapAction?()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func updateAnimations() {
|
||||
if let recordingImageView = self.recordingImageView {
|
||||
if recordingImageView.layer.animation(forKey: "blink") == nil {
|
||||
@ -153,15 +192,22 @@ final class VideoChatTitleComponent: Component {
|
||||
|
||||
self.component = component
|
||||
|
||||
self.tapRecognizer?.isEnabled = component.longTapAction != nil || component.tapAction != nil
|
||||
|
||||
let spacing: CGFloat = 1.0
|
||||
|
||||
var maxTitleWidth = availableSize.width
|
||||
if component.isRecording {
|
||||
maxTitleWidth -= 10.0
|
||||
}
|
||||
|
||||
let titleSize = self.title.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(MultilineTextComponent(
|
||||
text: .plain(NSAttributedString(string: component.title, font: Font.semibold(17.0), textColor: .white))
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableSize.width, height: 100.0)
|
||||
containerSize: CGSize(width: maxTitleWidth, height: 100.0)
|
||||
)
|
||||
|
||||
let statusComponent: AnyComponent<Empty>
|
||||
@ -181,15 +227,18 @@ final class VideoChatTitleComponent: Component {
|
||||
let titleFrame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) * 0.5), y: 0.0), size: titleSize)
|
||||
if let titleView = self.title.view {
|
||||
if titleView.superview == nil {
|
||||
titleView.layer.anchorPoint = CGPoint()
|
||||
titleView.isUserInteractionEnabled = false
|
||||
self.addSubview(titleView)
|
||||
}
|
||||
transition.setPosition(view: titleView, position: titleFrame.center)
|
||||
transition.setPosition(view: titleView, position: titleFrame.origin)
|
||||
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)
|
||||
}
|
||||
|
||||
let statusFrame = CGRect(origin: CGPoint(x: floor((size.width - statusSize.width) * 0.5), y: titleFrame.maxY + spacing), size: statusSize)
|
||||
if let statusView = self.status.view {
|
||||
if statusView.superview == nil {
|
||||
statusView.isUserInteractionEnabled = false
|
||||
self.addSubview(statusView)
|
||||
}
|
||||
transition.setPosition(view: statusView, position: statusFrame.center)
|
||||
|
@ -5,12 +5,16 @@ import Display
|
||||
|
||||
private final class VoiceChatMicrophoneNodeDrawingState: NSObject {
|
||||
let color: UIColor
|
||||
let shadowColor: UIColor?
|
||||
let shadowBlur: CGFloat
|
||||
let filled: Bool
|
||||
let transition: CGFloat
|
||||
let reverse: Bool
|
||||
|
||||
init(color: UIColor, filled: Bool, transition: CGFloat, reverse: Bool) {
|
||||
init(color: UIColor, shadowColor: UIColor?, shadowBlur: CGFloat, filled: Bool, transition: CGFloat, reverse: Bool) {
|
||||
self.color = color
|
||||
self.shadowColor = shadowColor
|
||||
self.shadowBlur = shadowBlur
|
||||
self.filled = filled
|
||||
self.transition = transition
|
||||
self.reverse = reverse
|
||||
@ -24,11 +28,15 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
|
||||
let muted: Bool
|
||||
let color: UIColor
|
||||
let filled: Bool
|
||||
let shadowColor: UIColor?
|
||||
let shadowBlur: CGFloat
|
||||
|
||||
init(muted: Bool, filled: Bool, color: UIColor) {
|
||||
init(muted: Bool, filled: Bool, color: UIColor, shadowColor: UIColor? = nil, shadowBlur: CGFloat = 0.0) {
|
||||
self.muted = muted
|
||||
self.filled = filled
|
||||
self.color = color
|
||||
self.shadowColor = shadowColor
|
||||
self.shadowBlur = shadowBlur
|
||||
}
|
||||
|
||||
static func ==(lhs: State, rhs: State) -> Bool {
|
||||
@ -41,6 +49,12 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
|
||||
if lhs.filled != rhs.filled {
|
||||
return false
|
||||
}
|
||||
if lhs.shadowColor != rhs.shadowColor {
|
||||
return false
|
||||
}
|
||||
if lhs.shadowBlur != rhs.shadowBlur {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
@ -122,6 +136,8 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
|
||||
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
|
||||
var transitionFraction: CGFloat = self.state.muted ? 1.0 : 0.0
|
||||
var color = self.state.color
|
||||
var shadowColor = self.state.shadowColor
|
||||
var shadowBlur = self.state.shadowBlur
|
||||
|
||||
var reverse = false
|
||||
if let transitionContext = self.transitionContext {
|
||||
@ -138,9 +154,17 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
|
||||
if transitionContext.previousState.color.rgb != color.rgb {
|
||||
color = transitionContext.previousState.color.interpolateTo(color, fraction: t)!
|
||||
}
|
||||
|
||||
if let previousShadowColor = transitionContext.previousState.shadowColor, let shadowColorValue = shadowColor, previousShadowColor.rgb != shadowColorValue.rgb {
|
||||
shadowColor = previousShadowColor.interpolateTo(shadowColorValue, fraction: t)!
|
||||
}
|
||||
|
||||
if transitionContext.previousState.shadowBlur != shadowBlur {
|
||||
shadowBlur = transitionContext.previousState.shadowBlur * (1.0 - t) + shadowBlur * t
|
||||
}
|
||||
}
|
||||
|
||||
return VoiceChatMicrophoneNodeDrawingState(color: color, filled: self.state.filled, transition: transitionFraction, reverse: reverse)
|
||||
return VoiceChatMicrophoneNodeDrawingState(color: color, shadowColor: shadowColor, shadowBlur: shadowBlur, filled: self.state.filled, transition: transitionFraction, reverse: reverse)
|
||||
}
|
||||
|
||||
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
|
||||
@ -155,9 +179,18 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
|
||||
guard let parameters = parameters as? VoiceChatMicrophoneNodeDrawingState else {
|
||||
return
|
||||
}
|
||||
|
||||
var bounds = bounds
|
||||
bounds = bounds.insetBy(dx: parameters.shadowBlur, dy: parameters.shadowBlur)
|
||||
|
||||
context.translateBy(x: bounds.minX, y: bounds.minY)
|
||||
|
||||
context.setFillColor(parameters.color.cgColor)
|
||||
|
||||
if let shadowColor = parameters.shadowColor, parameters.shadowBlur != 0.0 {
|
||||
context.setShadow(offset: CGSize(), blur: parameters.shadowBlur, color: shadowColor.cgColor)
|
||||
}
|
||||
|
||||
var clearLineWidth: CGFloat = 2.0
|
||||
var lineWidth: CGFloat = 1.0 + UIScreenPixel
|
||||
if bounds.size.width > 36.0 {
|
||||
|
@ -343,15 +343,7 @@ public class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
|
||||
}
|
||||
|
||||
if oldValue != self.visibility {
|
||||
switch self.visibility {
|
||||
case .none:
|
||||
self.textNode.visibilityRect = nil
|
||||
case let .visible(_, subRect):
|
||||
var subRect = subRect
|
||||
subRect.origin.x = 0.0
|
||||
subRect.size.width = 10000.0
|
||||
self.textNode.visibilityRect = subRect
|
||||
}
|
||||
self.updateVisibility()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -594,6 +586,21 @@ public class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
|
||||
|
||||
let isPlaying = self.visibilityStatus == true && !self.forceStopAnimations
|
||||
|
||||
var effectiveVisibility = self.visibility
|
||||
if !isPlaying {
|
||||
effectiveVisibility = .none
|
||||
}
|
||||
|
||||
switch effectiveVisibility {
|
||||
case .none:
|
||||
self.textNode.visibilityRect = nil
|
||||
case let .visible(_, subRect):
|
||||
var subRect = subRect
|
||||
subRect.origin.x = 0.0
|
||||
subRect.size.width = 10000.0
|
||||
self.textNode.visibilityRect = subRect
|
||||
}
|
||||
|
||||
var canPlayEffects = isPlaying
|
||||
if !item.controllerInteraction.canReadHistory {
|
||||
canPlayEffects = false
|
||||
|
@ -680,22 +680,6 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
|
||||
override public var visibility: ListViewItemNodeVisibility {
|
||||
didSet {
|
||||
if self.visibility != oldValue {
|
||||
for contentNode in self.contentNodes {
|
||||
contentNode.visibility = mapVisibility(self.visibility, boundsSize: self.bounds.size, insets: self.insets, to: contentNode)
|
||||
}
|
||||
|
||||
if let threadInfoNode = self.threadInfoNode {
|
||||
threadInfoNode.visibility = self.visibility != .none
|
||||
}
|
||||
|
||||
if let replyInfoNode = self.replyInfoNode {
|
||||
replyInfoNode.visibility = self.visibility != .none
|
||||
}
|
||||
|
||||
if let unlockButtonNode = self.unlockButtonNode {
|
||||
unlockButtonNode.visibility = self.visibility != .none
|
||||
}
|
||||
|
||||
self.visibilityStatus = self.visibility != .none
|
||||
|
||||
self.updateVisibility()
|
||||
@ -718,6 +702,8 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
|
||||
}
|
||||
}
|
||||
|
||||
private var forceStopAnimations: Bool = false
|
||||
|
||||
required public init(rotated: Bool) {
|
||||
self.mainContextSourceNode = ContextExtractedContentContainingNode()
|
||||
self.mainContainerNode = ContextControllerSourceNode()
|
||||
@ -6207,6 +6193,11 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
|
||||
return false
|
||||
}
|
||||
|
||||
override public func updateStickerSettings(forceStopAnimations: Bool) {
|
||||
self.forceStopAnimations = forceStopAnimations
|
||||
self.updateVisibility()
|
||||
}
|
||||
|
||||
private func updateVisibility() {
|
||||
guard let item = self.item else {
|
||||
return
|
||||
@ -6223,11 +6214,35 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
|
||||
if !item.controllerInteraction.canReadHistory {
|
||||
isPlaying = false
|
||||
}
|
||||
if self.forceStopAnimations {
|
||||
isPlaying = false
|
||||
}
|
||||
|
||||
if !isPlaying {
|
||||
self.removeEffectAnimations()
|
||||
}
|
||||
|
||||
var effectiveVisibility = self.visibility
|
||||
if !isPlaying {
|
||||
effectiveVisibility = .none
|
||||
}
|
||||
|
||||
for contentNode in self.contentNodes {
|
||||
contentNode.visibility = mapVisibility(effectiveVisibility, boundsSize: self.bounds.size, insets: self.insets, to: contentNode)
|
||||
}
|
||||
|
||||
if let threadInfoNode = self.threadInfoNode {
|
||||
threadInfoNode.visibility = effectiveVisibility != .none
|
||||
}
|
||||
|
||||
if let replyInfoNode = self.replyInfoNode {
|
||||
replyInfoNode.visibility = effectiveVisibility != .none
|
||||
}
|
||||
|
||||
if let unlockButtonNode = self.unlockButtonNode {
|
||||
unlockButtonNode.visibility = effectiveVisibility != .none
|
||||
}
|
||||
|
||||
if isPlaying {
|
||||
var alreadySeen = true
|
||||
if item.message.flags.contains(.Incoming) {
|
||||
|
@ -1425,6 +1425,8 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
|
||||
if let strongSelf = self {
|
||||
if file.isAnimated {
|
||||
strongSelf.fetchDisposable.set(fetchedMediaResource(mediaBox: context.account.postbox.mediaBox, userLocation: .peer(message.id.peerId), userContentType: MediaResourceUserContentType(file: file), reference: AnyMediaReference.message(message: MessageReference(message), media: file).resourceReference(file.resource), statsCategory: statsCategoryForFileWithAttributes(file.attributes)).startStrict())
|
||||
} else if NativeVideoContent.isHLSVideo(file: file) {
|
||||
strongSelf.fetchDisposable.set(nil)
|
||||
} else {
|
||||
strongSelf.fetchDisposable.set(messageMediaFileInteractiveFetched(context: context, message: message, file: file, userInitiated: manual, storeToDownloadsPeerId: storeToDownloadsPeerId).startStrict())
|
||||
}
|
||||
@ -1659,16 +1661,12 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
|
||||
let loopVideo = updatedVideoFile.isAnimated
|
||||
|
||||
let videoContent: UniversalVideoContent
|
||||
if !"".isEmpty && NativeVideoContent.isHLSVideo(file: updatedVideoFile) {
|
||||
videoContent = HLSVideoContent(id: .message(message.id, message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: true, loopVideo: loopVideo)
|
||||
} else {
|
||||
videoContent = NativeVideoContent(id: .message(message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: streamVideo ? .conservative : .none, loopVideo: loopVideo, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false), continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo, placeholderColor: emptyColor, captureProtected: message.isCopyProtected() || isExtendedMedia, storeAfterDownload: { [weak context] in
|
||||
guard let context, let peerId else {
|
||||
return
|
||||
}
|
||||
let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone()
|
||||
})
|
||||
}
|
||||
videoContent = NativeVideoContent(id: .message(message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: streamVideo ? .conservative : .none, loopVideo: loopVideo, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false), continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo, placeholderColor: emptyColor, captureProtected: message.isCopyProtected() || isExtendedMedia, storeAfterDownload: { [weak context] in
|
||||
guard let context, let peerId else {
|
||||
return
|
||||
}
|
||||
let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone()
|
||||
})
|
||||
let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded)
|
||||
videoNode.isUserInteractionEnabled = false
|
||||
videoNode.ownsContentNodeUpdated = { [weak self] owns in
|
||||
@ -1850,7 +1848,32 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
|
||||
}
|
||||
}
|
||||
|
||||
if case .full = automaticDownload {
|
||||
if automaticDownload != .none, let file = media as? TelegramMediaFile, NativeVideoContent.isHLSVideo(file: file) {
|
||||
let postbox = context.account.postbox
|
||||
let fetchSignal = HLSVideoContent.minimizedHLSQualityPreloadData(postbox: context.account.postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true)
|
||||
|> mapToSignal { fileAndRange -> Signal<Never, NoError> in
|
||||
guard let fileAndRange else {
|
||||
return .complete()
|
||||
}
|
||||
return freeMediaFileResourceInteractiveFetched(postbox: postbox, userLocation: .peer(message.id.peerId), fileReference: fileAndRange.0, resource: fileAndRange.0.media.resource, range: (fileAndRange.1, .default))
|
||||
|> ignoreValues
|
||||
|> `catch` { _ -> Signal<Never, NoError> in
|
||||
return .complete()
|
||||
}
|
||||
}
|
||||
|
||||
let visibilityAwareFetchSignal = strongSelf.visibilityPromise.get()
|
||||
|> mapToSignal { visibility -> Signal<Void, NoError> in
|
||||
if visibility {
|
||||
return fetchSignal
|
||||
|> mapToSignal { _ -> Signal<Void, NoError> in
|
||||
}
|
||||
} else {
|
||||
return .complete()
|
||||
}
|
||||
}
|
||||
strongSelf.fetchDisposable.set(visibilityAwareFetchSignal.startStrict())
|
||||
} else if case .full = automaticDownload {
|
||||
if let _ = media as? TelegramMediaImage {
|
||||
updatedFetchControls.fetch(false)
|
||||
} else if let image = media as? TelegramMediaWebFile {
|
||||
|
@ -24,6 +24,7 @@ swift_library(
|
||||
"//submodules/TelegramUI/Components/Chat/ChatMessageBubbleContentNode",
|
||||
"//submodules/TelegramUI/Components/Chat/ChatMessageItemCommon",
|
||||
"//submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode",
|
||||
"//submodules/TelegramUniversalVideoContent",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -16,6 +16,7 @@ import ChatMessageItemCommon
|
||||
import ChatMessageInteractiveMediaNode
|
||||
import ChatControllerInteraction
|
||||
import InvisibleInkDustNode
|
||||
import TelegramUniversalVideoContent
|
||||
|
||||
public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode {
|
||||
override public var supportsMosaic: Bool {
|
||||
@ -163,7 +164,9 @@ public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode {
|
||||
automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil
|
||||
}
|
||||
} else if (telegramFile.isVideo && !telegramFile.isAnimated) && item.context.sharedContext.energyUsageSettings.autoplayVideo {
|
||||
if case .full = automaticDownload {
|
||||
if NativeVideoContent.isHLSVideo(file: telegramFile) {
|
||||
automaticPlayback = true
|
||||
} else if case .full = automaticDownload {
|
||||
automaticPlayback = true
|
||||
} else {
|
||||
automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil
|
||||
@ -207,7 +210,9 @@ public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode {
|
||||
automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil
|
||||
}
|
||||
} else if (telegramFile.isVideo && !telegramFile.isAnimated) && item.context.sharedContext.energyUsageSettings.autoplayVideo {
|
||||
if case .full = automaticDownload {
|
||||
if NativeVideoContent.isHLSVideo(file: telegramFile) {
|
||||
automaticPlayback = true
|
||||
} else if case .full = automaticDownload {
|
||||
automaticPlayback = true
|
||||
} else {
|
||||
automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil
|
||||
|
@ -101,6 +101,8 @@ public class ChatMessageStickerItemNode: ChatMessageItemView {
|
||||
}
|
||||
}
|
||||
|
||||
private var forceStopAnimations: Bool = false
|
||||
|
||||
required public init(rotated: Bool) {
|
||||
self.contextSourceNode = ContextExtractedContentContainingNode()
|
||||
self.containerNode = ContextControllerSourceNode()
|
||||
@ -2160,6 +2162,9 @@ public class ChatMessageStickerItemNode: ChatMessageItemView {
|
||||
if !item.controllerInteraction.canReadHistory {
|
||||
isPlaying = false
|
||||
}
|
||||
if self.forceStopAnimations {
|
||||
isPlaying = false
|
||||
}
|
||||
|
||||
if !isPlaying {
|
||||
self.removeEffectAnimations()
|
||||
@ -2191,6 +2196,11 @@ public class ChatMessageStickerItemNode: ChatMessageItemView {
|
||||
}
|
||||
}
|
||||
|
||||
override public func updateStickerSettings(forceStopAnimations: Bool) {
|
||||
self.forceStopAnimations = forceStopAnimations
|
||||
self.updateVisibility()
|
||||
}
|
||||
|
||||
override public func messageEffectTargetView() -> UIView? {
|
||||
if let result = self.dateAndStatusNode.messageEffectTargetView() {
|
||||
return result
|
||||
|
@ -28,6 +28,7 @@
|
||||
|
||||
var isManifestParsed = false;
|
||||
var isFirstFrameReady = false;
|
||||
var isPictureInPictureActive = false;
|
||||
|
||||
var currentTimeUpdateTimeout = null;
|
||||
|
||||
@ -52,12 +53,22 @@
|
||||
video.addEventListener("waiting", function() {
|
||||
refreshPlayerStatus();
|
||||
});
|
||||
video.addEventListener("enterpictureinpicture", function() {
|
||||
isPictureInPictureActive = true;
|
||||
refreshPlayerStatus();
|
||||
}, false);
|
||||
video.addEventListener("leavepictureinpicture", function() {
|
||||
isPictureInPictureActive = false;
|
||||
refreshPlayerStatus();
|
||||
}, false);
|
||||
|
||||
|
||||
hls = new Hls({
|
||||
startLevel: 0,
|
||||
testBandwidth: false,
|
||||
debug: params['debug'],
|
||||
autoStartLoad: false
|
||||
autoStartLoad: false,
|
||||
abrEwmaDefaultEstimate: params['bandwidthEstimate']
|
||||
});
|
||||
hls.on(Hls.Events.MANIFEST_PARSED, function() {
|
||||
isManifestParsed = true;
|
||||
@ -109,6 +120,19 @@
|
||||
video.muted = value;
|
||||
}
|
||||
|
||||
function playerRequestPictureInPicture() {
|
||||
if (video !== document.pictureInPictureElement) {
|
||||
video.requestPictureInPicture().then(function() {
|
||||
isPictureInPictureActive = true;
|
||||
refreshPlayerStatus();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function playerStopPictureInPicture() {
|
||||
document.exitPictureInPicture();
|
||||
}
|
||||
|
||||
function getLevels() {
|
||||
var levels = [];
|
||||
for (var i = 0; i < hls.levels.length; i++) {
|
||||
@ -136,7 +160,8 @@
|
||||
'rate': isPlaying ? video.playbackRate : 0.0,
|
||||
'defaultRate': video.playbackRate,
|
||||
'levels': getLevels(),
|
||||
'currentLevel': hls.currentLevel
|
||||
'currentLevel': hls.currentLevel,
|
||||
'isPictureInPictureActive': isPictureInPictureActive
|
||||
});
|
||||
|
||||
refreshPlayerCurrentTime();
|
||||
@ -157,7 +182,8 @@
|
||||
|
||||
function refreshPlayerCurrentTime() {
|
||||
postPlayerEvent('playerCurrentTime', {
|
||||
'value': video.currentTime
|
||||
'value': video.currentTime,
|
||||
'bandwidthEstimate': hls.bandwidthEstimate
|
||||
});
|
||||
currentTimeUpdateTimeout = setTimeout(() => {
|
||||
refreshPlayerCurrentTime()
|
||||
|
File diff suppressed because one or more lines are too long
@ -6946,12 +6946,21 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
}
|
||||
})
|
||||
|
||||
self.stickerSettingsDisposable = combineLatest(queue: Queue.mainQueue(), context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.stickerSettings]), self.disableStickerAnimationsPromise.get()).startStrict(next: { [weak self] sharedData, disableStickerAnimations in
|
||||
self.stickerSettingsDisposable = combineLatest(queue: Queue.mainQueue(),
|
||||
context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.stickerSettings]),
|
||||
self.disableStickerAnimationsPromise.get(),
|
||||
context.sharedContext.hasGroupCallOnScreen
|
||||
).startStrict(next: { [weak self] sharedData, disableStickerAnimations, hasGroupCallOnScreen in
|
||||
var stickerSettings = StickerSettings.defaultSettings
|
||||
if let value = sharedData.entries[ApplicationSpecificSharedDataKeys.stickerSettings]?.get(StickerSettings.self) {
|
||||
stickerSettings = value
|
||||
}
|
||||
|
||||
var disableStickerAnimations = disableStickerAnimations
|
||||
if hasGroupCallOnScreen {
|
||||
disableStickerAnimations = true
|
||||
}
|
||||
|
||||
let chatStickerSettings = ChatInterfaceStickerSettings(stickerSettings: stickerSettings)
|
||||
if let strongSelf = self, strongSelf.stickerSettings != chatStickerSettings || strongSelf.disableStickerAnimationsValue != disableStickerAnimations {
|
||||
strongSelf.stickerSettings = chatStickerSettings
|
||||
|
@ -0,0 +1,549 @@
|
||||
import Foundation
|
||||
import SwiftSignalKit
|
||||
import UniversalMediaPlayer
|
||||
import Postbox
|
||||
import TelegramCore
|
||||
import AsyncDisplayKit
|
||||
import AccountContext
|
||||
import TelegramAudio
|
||||
import RangeSet
|
||||
import AVFoundation
|
||||
import Display
|
||||
import PhotoResources
|
||||
import TelegramVoip
|
||||
|
||||
final class HLSVideoAVContentNode: ASDisplayNode, UniversalVideoContentNode {
|
||||
private let postbox: Postbox
|
||||
private let userLocation: MediaResourceUserLocation
|
||||
private let fileReference: FileMediaReference
|
||||
private let approximateDuration: Double
|
||||
private let intrinsicDimensions: CGSize
|
||||
|
||||
private let audioSessionManager: ManagedAudioSession
|
||||
private let audioSessionDisposable = MetaDisposable()
|
||||
private var hasAudioSession = false
|
||||
|
||||
private let playbackCompletedListeners = Bag<() -> Void>()
|
||||
|
||||
private var initializedStatus = false
|
||||
private var statusValue = MediaPlayerStatus(generationTimestamp: 0.0, duration: 0.0, dimensions: CGSize(), timestamp: 0.0, baseRate: 1.0, seekId: 0, status: .paused, soundEnabled: true)
|
||||
private var baseRate: Double = 1.0
|
||||
private var isBuffering = false
|
||||
private var seekId: Int = 0
|
||||
private let _status = ValuePromise<MediaPlayerStatus>()
|
||||
var status: Signal<MediaPlayerStatus, NoError> {
|
||||
return self._status.get()
|
||||
}
|
||||
|
||||
private let _bufferingStatus = Promise<(RangeSet<Int64>, Int64)?>()
|
||||
var bufferingStatus: Signal<(RangeSet<Int64>, Int64)?, NoError> {
|
||||
return self._bufferingStatus.get()
|
||||
}
|
||||
|
||||
var isNativePictureInPictureActive: Signal<Bool, NoError> {
|
||||
return .single(false)
|
||||
}
|
||||
|
||||
private let _ready = Promise<Void>()
|
||||
var ready: Signal<Void, NoError> {
|
||||
return self._ready.get()
|
||||
}
|
||||
|
||||
private let _preloadCompleted = ValuePromise<Bool>()
|
||||
var preloadCompleted: Signal<Bool, NoError> {
|
||||
return self._preloadCompleted.get()
|
||||
}
|
||||
|
||||
private var playerSource: HLSServerSource?
|
||||
private var serverDisposable: Disposable?
|
||||
|
||||
private let imageNode: TransformImageNode
|
||||
|
||||
private var playerItem: AVPlayerItem?
|
||||
private var player: AVPlayer?
|
||||
private let playerNode: ASDisplayNode
|
||||
|
||||
private var loadProgressDisposable: Disposable?
|
||||
private var statusDisposable: Disposable?
|
||||
|
||||
private var didPlayToEndTimeObserver: NSObjectProtocol?
|
||||
private var didBecomeActiveObserver: NSObjectProtocol?
|
||||
private var willResignActiveObserver: NSObjectProtocol?
|
||||
private var failureObserverId: NSObjectProtocol?
|
||||
private var errorObserverId: NSObjectProtocol?
|
||||
private var playerItemFailedToPlayToEndTimeObserver: NSObjectProtocol?
|
||||
|
||||
private let fetchDisposable = MetaDisposable()
|
||||
|
||||
private var dimensions: CGSize?
|
||||
private let dimensionsPromise = ValuePromise<CGSize>(CGSize())
|
||||
|
||||
private var validLayout: (size: CGSize, actualSize: CGSize)?
|
||||
|
||||
private var statusTimer: Foundation.Timer?
|
||||
|
||||
private var preferredVideoQuality: UniversalVideoContentVideoQuality = .auto
|
||||
|
||||
init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool) {
|
||||
self.postbox = postbox
|
||||
self.fileReference = fileReference
|
||||
self.approximateDuration = fileReference.media.duration ?? 0.0
|
||||
self.audioSessionManager = audioSessionManager
|
||||
self.userLocation = userLocation
|
||||
self.baseRate = baseRate
|
||||
|
||||
if var dimensions = fileReference.media.dimensions {
|
||||
if let thumbnail = fileReference.media.previewRepresentations.first {
|
||||
let dimensionsVertical = dimensions.width < dimensions.height
|
||||
let thumbnailVertical = thumbnail.dimensions.width < thumbnail.dimensions.height
|
||||
if dimensionsVertical != thumbnailVertical {
|
||||
dimensions = PixelDimensions(width: dimensions.height, height: dimensions.width)
|
||||
}
|
||||
}
|
||||
self.dimensions = dimensions.cgSize
|
||||
} else {
|
||||
self.dimensions = CGSize(width: 128.0, height: 128.0)
|
||||
}
|
||||
|
||||
self.imageNode = TransformImageNode()
|
||||
|
||||
var player: AVPlayer?
|
||||
player = AVPlayer(playerItem: nil)
|
||||
self.player = player
|
||||
if #available(iOS 16.0, *) {
|
||||
player?.defaultRate = Float(baseRate)
|
||||
}
|
||||
if !enableSound {
|
||||
player?.volume = 0.0
|
||||
}
|
||||
|
||||
self.playerNode = ASDisplayNode()
|
||||
self.playerNode.setLayerBlock({
|
||||
return AVPlayerLayer(player: player)
|
||||
})
|
||||
|
||||
self.intrinsicDimensions = fileReference.media.dimensions?.cgSize ?? CGSize(width: 480.0, height: 320.0)
|
||||
|
||||
self.playerNode.frame = CGRect(origin: CGPoint(), size: self.intrinsicDimensions)
|
||||
|
||||
if let qualitySet = HLSQualitySet(baseFile: fileReference) {
|
||||
self.playerSource = HLSServerSource(accountId: accountId.int64, fileId: fileReference.media.fileId.id, postbox: postbox, userLocation: userLocation, playlistFiles: qualitySet.playlistFiles, qualityFiles: qualitySet.qualityFiles)
|
||||
}
|
||||
|
||||
super.init()
|
||||
|
||||
self.imageNode.setSignal(internalMediaGridMessageVideo(postbox: postbox, userLocation: self.userLocation, videoReference: fileReference) |> map { [weak self] getSize, getData in
|
||||
Queue.mainQueue().async {
|
||||
if let strongSelf = self, strongSelf.dimensions == nil {
|
||||
if let dimensions = getSize() {
|
||||
strongSelf.dimensions = dimensions
|
||||
strongSelf.dimensionsPromise.set(dimensions)
|
||||
if let validLayout = strongSelf.validLayout {
|
||||
strongSelf.updateLayout(size: validLayout.size, actualSize: validLayout.actualSize, transition: .immediate)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return getData
|
||||
})
|
||||
|
||||
self.addSubnode(self.imageNode)
|
||||
self.addSubnode(self.playerNode)
|
||||
self.player?.actionAtItemEnd = .pause
|
||||
|
||||
self.imageNode.imageUpdated = { [weak self] _ in
|
||||
self?._ready.set(.single(Void()))
|
||||
}
|
||||
|
||||
self.player?.addObserver(self, forKeyPath: "rate", options: [], context: nil)
|
||||
|
||||
self._bufferingStatus.set(.single(nil))
|
||||
|
||||
if let playerSource = self.playerSource {
|
||||
self.serverDisposable = SharedHLSServer.shared.registerPlayer(source: playerSource, completion: { [weak self] in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
let playerItem: AVPlayerItem
|
||||
let assetUrl = "http://127.0.0.1:\(SharedHLSServer.shared.port)/\(playerSource.id)/master.m3u8"
|
||||
#if DEBUG
|
||||
print("HLSVideoAVContentNode: playing \(assetUrl)")
|
||||
#endif
|
||||
playerItem = AVPlayerItem(url: URL(string: assetUrl)!)
|
||||
|
||||
if #available(iOS 14.0, *) {
|
||||
playerItem.startsOnFirstEligibleVariant = true
|
||||
}
|
||||
|
||||
self.setPlayerItem(playerItem)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
self.didBecomeActiveObserver = NotificationCenter.default.addObserver(forName: UIApplication.willEnterForegroundNotification, object: nil, queue: nil, using: { [weak self] _ in
|
||||
guard let strongSelf = self, let layer = strongSelf.playerNode.layer as? AVPlayerLayer else {
|
||||
return
|
||||
}
|
||||
layer.player = strongSelf.player
|
||||
})
|
||||
self.willResignActiveObserver = NotificationCenter.default.addObserver(forName: UIApplication.didEnterBackgroundNotification, object: nil, queue: nil, using: { [weak self] _ in
|
||||
guard let strongSelf = self, let layer = strongSelf.playerNode.layer as? AVPlayerLayer else {
|
||||
return
|
||||
}
|
||||
layer.player = nil
|
||||
})
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.player?.removeObserver(self, forKeyPath: "rate")
|
||||
|
||||
self.setPlayerItem(nil)
|
||||
|
||||
self.audioSessionDisposable.dispose()
|
||||
|
||||
self.loadProgressDisposable?.dispose()
|
||||
self.statusDisposable?.dispose()
|
||||
|
||||
if let didBecomeActiveObserver = self.didBecomeActiveObserver {
|
||||
NotificationCenter.default.removeObserver(didBecomeActiveObserver)
|
||||
}
|
||||
if let willResignActiveObserver = self.willResignActiveObserver {
|
||||
NotificationCenter.default.removeObserver(willResignActiveObserver)
|
||||
}
|
||||
|
||||
if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver {
|
||||
NotificationCenter.default.removeObserver(didPlayToEndTimeObserver)
|
||||
}
|
||||
if let failureObserverId = self.failureObserverId {
|
||||
NotificationCenter.default.removeObserver(failureObserverId)
|
||||
}
|
||||
if let errorObserverId = self.errorObserverId {
|
||||
NotificationCenter.default.removeObserver(errorObserverId)
|
||||
}
|
||||
|
||||
self.serverDisposable?.dispose()
|
||||
|
||||
self.statusTimer?.invalidate()
|
||||
}
|
||||
|
||||
private func setPlayerItem(_ item: AVPlayerItem?) {
|
||||
if let playerItem = self.playerItem {
|
||||
playerItem.removeObserver(self, forKeyPath: "playbackBufferEmpty")
|
||||
playerItem.removeObserver(self, forKeyPath: "playbackLikelyToKeepUp")
|
||||
playerItem.removeObserver(self, forKeyPath: "playbackBufferFull")
|
||||
playerItem.removeObserver(self, forKeyPath: "status")
|
||||
playerItem.removeObserver(self, forKeyPath: "presentationSize")
|
||||
}
|
||||
|
||||
if let playerItemFailedToPlayToEndTimeObserver = self.playerItemFailedToPlayToEndTimeObserver {
|
||||
self.playerItemFailedToPlayToEndTimeObserver = nil
|
||||
NotificationCenter.default.removeObserver(playerItemFailedToPlayToEndTimeObserver)
|
||||
}
|
||||
|
||||
if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver {
|
||||
self.didPlayToEndTimeObserver = nil
|
||||
NotificationCenter.default.removeObserver(didPlayToEndTimeObserver)
|
||||
}
|
||||
if let failureObserverId = self.failureObserverId {
|
||||
self.failureObserverId = nil
|
||||
NotificationCenter.default.removeObserver(failureObserverId)
|
||||
}
|
||||
if let errorObserverId = self.errorObserverId {
|
||||
self.errorObserverId = nil
|
||||
NotificationCenter.default.removeObserver(errorObserverId)
|
||||
}
|
||||
|
||||
self.playerItem = item
|
||||
|
||||
if let item {
|
||||
self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: item, queue: nil, using: { [weak self] notification in
|
||||
self?.performActionAtEnd()
|
||||
})
|
||||
|
||||
self.failureObserverId = NotificationCenter.default.addObserver(forName: AVPlayerItem.failedToPlayToEndTimeNotification, object: item, queue: .main, using: { notification in
|
||||
#if DEBUG
|
||||
print("Player Error: \(notification.description)")
|
||||
#endif
|
||||
})
|
||||
self.errorObserverId = NotificationCenter.default.addObserver(forName: AVPlayerItem.newErrorLogEntryNotification, object: item, queue: .main, using: { [weak item] notification in
|
||||
if let item {
|
||||
let event = item.errorLog()?.events.last
|
||||
if let event {
|
||||
let _ = event
|
||||
#if DEBUG
|
||||
print("Player Error: \(event.errorComment ?? "<no comment>")")
|
||||
#endif
|
||||
}
|
||||
}
|
||||
})
|
||||
item.addObserver(self, forKeyPath: "presentationSize", options: [], context: nil)
|
||||
}
|
||||
|
||||
if let playerItem = self.playerItem {
|
||||
playerItem.addObserver(self, forKeyPath: "playbackBufferEmpty", options: .new, context: nil)
|
||||
playerItem.addObserver(self, forKeyPath: "playbackLikelyToKeepUp", options: .new, context: nil)
|
||||
playerItem.addObserver(self, forKeyPath: "playbackBufferFull", options: .new, context: nil)
|
||||
playerItem.addObserver(self, forKeyPath: "status", options: .new, context: nil)
|
||||
self.playerItemFailedToPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime, object: playerItem, queue: OperationQueue.main, using: { [weak self] _ in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
let _ = self
|
||||
})
|
||||
}
|
||||
|
||||
self.player?.replaceCurrentItem(with: self.playerItem)
|
||||
}
|
||||
|
||||
private func updateStatus() {
|
||||
guard let player = self.player else {
|
||||
return
|
||||
}
|
||||
let isPlaying = !player.rate.isZero
|
||||
let status: MediaPlayerPlaybackStatus
|
||||
if self.isBuffering {
|
||||
status = .buffering(initial: false, whilePlaying: isPlaying, progress: 0.0, display: true)
|
||||
} else {
|
||||
status = isPlaying ? .playing : .paused
|
||||
}
|
||||
var timestamp = player.currentTime().seconds
|
||||
if timestamp.isFinite && !timestamp.isNaN {
|
||||
} else {
|
||||
timestamp = 0.0
|
||||
}
|
||||
self.statusValue = MediaPlayerStatus(generationTimestamp: CACurrentMediaTime(), duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: timestamp, baseRate: self.baseRate, seekId: self.seekId, status: status, soundEnabled: true)
|
||||
self._status.set(self.statusValue)
|
||||
|
||||
if case .playing = status {
|
||||
if self.statusTimer == nil {
|
||||
self.statusTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 30.0, repeats: true, block: { [weak self] _ in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.updateStatus()
|
||||
})
|
||||
}
|
||||
} else if let statusTimer = self.statusTimer {
|
||||
self.statusTimer = nil
|
||||
statusTimer.invalidate()
|
||||
}
|
||||
}
|
||||
|
||||
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
|
||||
if keyPath == "rate" {
|
||||
if let player = self.player {
|
||||
let isPlaying = !player.rate.isZero
|
||||
if isPlaying {
|
||||
self.isBuffering = false
|
||||
}
|
||||
}
|
||||
self.updateStatus()
|
||||
} else if keyPath == "playbackBufferEmpty" {
|
||||
self.isBuffering = true
|
||||
self.updateStatus()
|
||||
} else if keyPath == "playbackLikelyToKeepUp" || keyPath == "playbackBufferFull" {
|
||||
self.isBuffering = false
|
||||
self.updateStatus()
|
||||
} else if keyPath == "presentationSize" {
|
||||
if let currentItem = self.player?.currentItem {
|
||||
print("Presentation size: \(Int(currentItem.presentationSize.height))")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func performActionAtEnd() {
|
||||
for listener in self.playbackCompletedListeners.copyItems() {
|
||||
listener()
|
||||
}
|
||||
}
|
||||
|
||||
func updateLayout(size: CGSize, actualSize: CGSize, transition: ContainedViewLayoutTransition) {
|
||||
transition.updatePosition(node: self.playerNode, position: CGPoint(x: size.width / 2.0, y: size.height / 2.0))
|
||||
transition.updateTransformScale(node: self.playerNode, scale: size.width / self.intrinsicDimensions.width)
|
||||
|
||||
transition.updateFrame(node: self.imageNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||
|
||||
if let dimensions = self.dimensions {
|
||||
let imageSize = CGSize(width: floor(dimensions.width / 2.0), height: floor(dimensions.height / 2.0))
|
||||
let makeLayout = self.imageNode.asyncLayout()
|
||||
let applyLayout = makeLayout(TransformImageArguments(corners: ImageCorners(), imageSize: imageSize, boundingSize: imageSize, intrinsicInsets: UIEdgeInsets(), emptyColor: .clear))
|
||||
applyLayout()
|
||||
}
|
||||
}
|
||||
|
||||
func play() {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
if !self.initializedStatus {
|
||||
self._status.set(MediaPlayerStatus(generationTimestamp: 0.0, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: 0.0, baseRate: self.baseRate, seekId: self.seekId, status: .buffering(initial: true, whilePlaying: true, progress: 0.0, display: true), soundEnabled: true))
|
||||
}
|
||||
if !self.hasAudioSession {
|
||||
if self.player?.volume != 0.0 {
|
||||
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.hasAudioSession = true
|
||||
self.player?.play()
|
||||
}, deactivate: { [weak self] _ in
|
||||
guard let self else {
|
||||
return .complete()
|
||||
}
|
||||
self.hasAudioSession = false
|
||||
self.player?.pause()
|
||||
|
||||
return .complete()
|
||||
}))
|
||||
} else {
|
||||
self.player?.play()
|
||||
}
|
||||
} else {
|
||||
self.player?.play()
|
||||
}
|
||||
}
|
||||
|
||||
func pause() {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
self.player?.pause()
|
||||
}
|
||||
|
||||
func togglePlayPause() {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
|
||||
guard let player = self.player else {
|
||||
return
|
||||
}
|
||||
|
||||
if player.rate.isZero {
|
||||
self.play()
|
||||
} else {
|
||||
self.pause()
|
||||
}
|
||||
}
|
||||
|
||||
func setSoundEnabled(_ value: Bool) {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
if value {
|
||||
if !self.hasAudioSession {
|
||||
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in
|
||||
self?.hasAudioSession = true
|
||||
self?.player?.volume = 1.0
|
||||
}, deactivate: { [weak self] _ in
|
||||
self?.hasAudioSession = false
|
||||
self?.player?.pause()
|
||||
return .complete()
|
||||
}))
|
||||
}
|
||||
} else {
|
||||
self.player?.volume = 0.0
|
||||
self.hasAudioSession = false
|
||||
self.audioSessionDisposable.set(nil)
|
||||
}
|
||||
}
|
||||
|
||||
func seek(_ timestamp: Double) {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
self.seekId += 1
|
||||
self.player?.seek(to: CMTime(seconds: timestamp, preferredTimescale: 30))
|
||||
}
|
||||
|
||||
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
|
||||
self.player?.volume = 1.0
|
||||
self.play()
|
||||
}
|
||||
|
||||
func setSoundMuted(soundMuted: Bool) {
|
||||
self.player?.volume = soundMuted ? 0.0 : 1.0
|
||||
}
|
||||
|
||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||
}
|
||||
|
||||
func setForceAudioToSpeaker(_ forceAudioToSpeaker: Bool) {
|
||||
}
|
||||
|
||||
func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
|
||||
self.player?.volume = 0.0
|
||||
self.hasAudioSession = false
|
||||
self.audioSessionDisposable.set(nil)
|
||||
}
|
||||
|
||||
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) {
|
||||
}
|
||||
|
||||
func setBaseRate(_ baseRate: Double) {
|
||||
guard let player = self.player else {
|
||||
return
|
||||
}
|
||||
self.baseRate = baseRate
|
||||
if #available(iOS 16.0, *) {
|
||||
player.defaultRate = Float(baseRate)
|
||||
}
|
||||
if player.rate != 0.0 {
|
||||
player.rate = Float(baseRate)
|
||||
}
|
||||
self.updateStatus()
|
||||
}
|
||||
|
||||
func setVideoQuality(_ videoQuality: UniversalVideoContentVideoQuality) {
|
||||
self.preferredVideoQuality = videoQuality
|
||||
|
||||
guard let currentItem = self.player?.currentItem else {
|
||||
return
|
||||
}
|
||||
guard let playerSource = self.playerSource else {
|
||||
return
|
||||
}
|
||||
|
||||
switch videoQuality {
|
||||
case .auto:
|
||||
currentItem.preferredPeakBitRate = 0.0
|
||||
case let .quality(qualityValue):
|
||||
if let file = playerSource.qualityFiles[qualityValue] {
|
||||
if let size = file.media.size, let duration = file.media.duration, duration != 0.0 {
|
||||
let bandwidth = Int(Double(size) / duration) * 8
|
||||
currentItem.preferredPeakBitRate = Double(bandwidth)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? {
|
||||
guard let currentItem = self.player?.currentItem else {
|
||||
return nil
|
||||
}
|
||||
guard let playerSource = self.playerSource else {
|
||||
return nil
|
||||
}
|
||||
let current = Int(currentItem.presentationSize.height)
|
||||
var available: [Int] = Array(playerSource.qualityFiles.keys)
|
||||
available.sort(by: { $0 > $1 })
|
||||
return (current, self.preferredVideoQuality, available)
|
||||
}
|
||||
|
||||
func addPlaybackCompleted(_ f: @escaping () -> Void) -> Int {
|
||||
return self.playbackCompletedListeners.add(f)
|
||||
}
|
||||
|
||||
func removePlaybackCompleted(_ index: Int) {
|
||||
self.playbackCompletedListeners.remove(index)
|
||||
}
|
||||
|
||||
func fetchControl(_ control: UniversalVideoNodeFetchControl) {
|
||||
}
|
||||
|
||||
func notifyPlaybackControlsHidden(_ hidden: Bool) {
|
||||
}
|
||||
|
||||
func setCanPlaybackWithoutHierarchy(_ canPlaybackWithoutHierarchy: Bool) {
|
||||
}
|
||||
|
||||
func enterNativePictureInPicture() -> Bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func exitNativePictureInPicture() {
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,777 @@
|
||||
import Foundation
|
||||
import SwiftSignalKit
|
||||
import UniversalMediaPlayer
|
||||
import Postbox
|
||||
import TelegramCore
|
||||
import WebKit
|
||||
import AsyncDisplayKit
|
||||
import AccountContext
|
||||
import TelegramAudio
|
||||
import Display
|
||||
import PhotoResources
|
||||
import TelegramVoip
|
||||
import RangeSet
|
||||
|
||||
private func parseRange(from rangeString: String) -> Range<Int>? {
|
||||
guard rangeString.hasPrefix("bytes=") else {
|
||||
return nil
|
||||
}
|
||||
|
||||
let rangeValues = rangeString.dropFirst("bytes=".count).split(separator: "-")
|
||||
|
||||
guard rangeValues.count == 2,
|
||||
let start = Int(rangeValues[0]),
|
||||
let end = Int(rangeValues[1]) else {
|
||||
return nil
|
||||
}
|
||||
return start..<end + 1
|
||||
}
|
||||
|
||||
private final class CustomVideoSchemeHandler: NSObject, WKURLSchemeHandler {
|
||||
private final class PendingTask {
|
||||
let sourceTask: any WKURLSchemeTask
|
||||
let isCompleted = Atomic<Bool>(value: false)
|
||||
var disposable: Disposable?
|
||||
|
||||
init(source: HLSServerSource, sourceTask: any WKURLSchemeTask) {
|
||||
self.sourceTask = sourceTask
|
||||
|
||||
var requestRange: Range<Int>?
|
||||
if let rangeString = sourceTask.request.allHTTPHeaderFields?["Range"] {
|
||||
requestRange = parseRange(from: rangeString)
|
||||
}
|
||||
|
||||
guard let url = sourceTask.request.url else {
|
||||
return
|
||||
}
|
||||
let filePath = (url.absoluteString as NSString).lastPathComponent
|
||||
|
||||
if filePath == "master.m3u8" {
|
||||
self.disposable = source.masterPlaylistData().startStrict(next: { [weak self] data in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.sendResponseAndClose(data: data.data(using: .utf8)!)
|
||||
})
|
||||
} else if filePath.hasPrefix("hls_level_") && filePath.hasSuffix(".m3u8") {
|
||||
guard let levelIndex = Int(String(filePath[filePath.index(filePath.startIndex, offsetBy: "hls_level_".count) ..< filePath.index(filePath.endIndex, offsetBy: -".m3u8".count)])) else {
|
||||
self.sendErrorAndClose()
|
||||
return
|
||||
}
|
||||
|
||||
self.disposable = source.playlistData(quality: levelIndex).startStrict(next: { [weak self] data in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.sendResponseAndClose(data: data.data(using: .utf8)!)
|
||||
})
|
||||
} else if filePath.hasPrefix("partfile") && filePath.hasSuffix(".mp4") {
|
||||
let fileId = String(filePath[filePath.index(filePath.startIndex, offsetBy: "partfile".count) ..< filePath.index(filePath.endIndex, offsetBy: -".mp4".count)])
|
||||
guard let fileIdValue = Int64(fileId) else {
|
||||
self.sendErrorAndClose()
|
||||
return
|
||||
}
|
||||
guard let requestRange else {
|
||||
self.sendErrorAndClose()
|
||||
return
|
||||
}
|
||||
self.disposable = (source.fileData(id: fileIdValue, range: requestRange.lowerBound ..< requestRange.upperBound + 1)
|
||||
|> take(1)).start(next: { [weak self] result in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
if let (file, range, totalSize) = result {
|
||||
guard let allData = try? Data(contentsOf: URL(fileURLWithPath: file.path), options: .mappedIfSafe) else {
|
||||
return
|
||||
}
|
||||
let data = allData.subdata(in: range)
|
||||
|
||||
self.sendResponseAndClose(data: data, range: requestRange, totalSize: totalSize)
|
||||
} else {
|
||||
self.sendErrorAndClose()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
self.sendErrorAndClose()
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.disposable?.dispose()
|
||||
}
|
||||
|
||||
func cancel() {
|
||||
}
|
||||
|
||||
func sendErrorAndClose() {
|
||||
self.sourceTask.didFailWithError(NSError(domain: "LocalVideoError", code: 500, userInfo: nil))
|
||||
}
|
||||
|
||||
private func sendResponseAndClose(data: Data, range: Range<Int>? = nil, totalSize: Int? = nil) {
|
||||
// Create the response with the appropriate content-type and content-length
|
||||
//let mimeType = "application/octet-stream"
|
||||
let responseLength = data.count
|
||||
|
||||
// Construct URLResponse with optional range headers (for partial content responses)
|
||||
var headers: [String: String] = [
|
||||
"Content-Length": "\(responseLength)",
|
||||
"Connection": "close",
|
||||
"Access-Control-Allow-Origin": "*"
|
||||
]
|
||||
|
||||
if let range = range, let totalSize = totalSize {
|
||||
headers["Content-Range"] = "bytes \(range.lowerBound)-\(range.upperBound)/\(totalSize)"
|
||||
}
|
||||
|
||||
// Create the URLResponse object
|
||||
let response = HTTPURLResponse(url: self.sourceTask.request.url!,
|
||||
statusCode: 200,
|
||||
httpVersion: "HTTP/1.1",
|
||||
headerFields: headers)
|
||||
|
||||
// Send the response headers
|
||||
self.sourceTask.didReceive(response!)
|
||||
|
||||
// Send the response data
|
||||
self.sourceTask.didReceive(data)
|
||||
|
||||
// Complete the task
|
||||
self.sourceTask.didFinish()
|
||||
}
|
||||
}
|
||||
|
||||
private let source: HLSServerSource
|
||||
private var pendingTasks: [PendingTask] = []
|
||||
|
||||
init(source: HLSServerSource) {
|
||||
self.source = source
|
||||
}
|
||||
|
||||
func webView(_ webView: WKWebView, start urlSchemeTask: any WKURLSchemeTask) {
|
||||
self.pendingTasks.append(PendingTask(source: self.source, sourceTask: urlSchemeTask))
|
||||
}
|
||||
|
||||
func webView(_ webView: WKWebView, stop urlSchemeTask: any WKURLSchemeTask) {
|
||||
if let index = self.pendingTasks.firstIndex(where: { $0.sourceTask === urlSchemeTask }) {
|
||||
let task = self.pendingTasks[index]
|
||||
self.pendingTasks.remove(at: index)
|
||||
task.cancel()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class WeakScriptMessageHandler: NSObject, WKScriptMessageHandler {
|
||||
private let f: (WKScriptMessage) -> ()
|
||||
|
||||
init(_ f: @escaping (WKScriptMessage) -> ()) {
|
||||
self.f = f
|
||||
|
||||
super.init()
|
||||
}
|
||||
|
||||
func userContentController(_ controller: WKUserContentController, didReceive scriptMessage: WKScriptMessage) {
|
||||
self.f(scriptMessage)
|
||||
}
|
||||
}
|
||||
|
||||
final class HLSVideoJSContentNode: ASDisplayNode, UniversalVideoContentNode {
|
||||
private struct Level {
|
||||
let bitrate: Int
|
||||
let width: Int
|
||||
let height: Int
|
||||
|
||||
init(bitrate: Int, width: Int, height: Int) {
|
||||
self.bitrate = bitrate
|
||||
self.width = width
|
||||
self.height = height
|
||||
}
|
||||
}
|
||||
|
||||
private static var sharedBandwidthEstimate: Double?
|
||||
|
||||
private let postbox: Postbox
|
||||
private let userLocation: MediaResourceUserLocation
|
||||
private let fileReference: FileMediaReference
|
||||
private let approximateDuration: Double
|
||||
private let intrinsicDimensions: CGSize
|
||||
|
||||
private let audioSessionManager: ManagedAudioSession
|
||||
private let audioSessionDisposable = MetaDisposable()
|
||||
private var hasAudioSession = false
|
||||
|
||||
private let playerSource: HLSServerSource?
|
||||
private var serverDisposable: Disposable?
|
||||
|
||||
private let playbackCompletedListeners = Bag<() -> Void>()
|
||||
|
||||
private var initializedStatus = false
|
||||
private var statusValue = MediaPlayerStatus(generationTimestamp: 0.0, duration: 0.0, dimensions: CGSize(), timestamp: 0.0, baseRate: 1.0, seekId: 0, status: .paused, soundEnabled: true)
|
||||
private var isBuffering = false
|
||||
private var seekId: Int = 0
|
||||
private let _status = ValuePromise<MediaPlayerStatus>()
|
||||
var status: Signal<MediaPlayerStatus, NoError> {
|
||||
return self._status.get()
|
||||
}
|
||||
|
||||
private let _bufferingStatus = Promise<(RangeSet<Int64>, Int64)?>()
|
||||
var bufferingStatus: Signal<(RangeSet<Int64>, Int64)?, NoError> {
|
||||
return self._bufferingStatus.get()
|
||||
}
|
||||
|
||||
private let _isNativePictureInPictureActive = ValuePromise<Bool>(false, ignoreRepeated: true)
|
||||
var isNativePictureInPictureActive: Signal<Bool, NoError> {
|
||||
return self._isNativePictureInPictureActive.get()
|
||||
}
|
||||
|
||||
private let _ready = Promise<Void>()
|
||||
var ready: Signal<Void, NoError> {
|
||||
return self._ready.get()
|
||||
}
|
||||
|
||||
private let _preloadCompleted = ValuePromise<Bool>()
|
||||
var preloadCompleted: Signal<Bool, NoError> {
|
||||
return self._preloadCompleted.get()
|
||||
}
|
||||
|
||||
private let imageNode: TransformImageNode
|
||||
private let webView: WKWebView
|
||||
|
||||
private let fetchDisposable = MetaDisposable()
|
||||
|
||||
private var dimensions: CGSize?
|
||||
private let dimensionsPromise = ValuePromise<CGSize>(CGSize())
|
||||
|
||||
private var validLayout: (size: CGSize, actualSize: CGSize)?
|
||||
|
||||
private var statusTimer: Foundation.Timer?
|
||||
|
||||
private var preferredVideoQuality: UniversalVideoContentVideoQuality = .auto
|
||||
|
||||
private var playerIsReady: Bool = false
|
||||
private var playerIsFirstFrameReady: Bool = false
|
||||
private var playerIsPlaying: Bool = false
|
||||
private var playerRate: Double = 0.0
|
||||
private var playerDefaultRate: Double = 1.0
|
||||
private var playerTime: Double = 0.0
|
||||
private var playerTimeGenerationTimestamp: Double = 0.0
|
||||
private var playerAvailableLevels: [Int: Level] = [:]
|
||||
private var playerCurrentLevelIndex: Int?
|
||||
|
||||
private var hasRequestedPlayerLoad: Bool = false
|
||||
|
||||
private var requestedPlaying: Bool = false
|
||||
private var requestedBaseRate: Double = 1.0
|
||||
private var requestedLevelIndex: Int?
|
||||
|
||||
init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool) {
|
||||
self.postbox = postbox
|
||||
self.fileReference = fileReference
|
||||
self.approximateDuration = fileReference.media.duration ?? 0.0
|
||||
self.audioSessionManager = audioSessionManager
|
||||
self.userLocation = userLocation
|
||||
self.requestedBaseRate = baseRate
|
||||
|
||||
/*#if DEBUG
|
||||
if let minimizedQualityFile = HLSVideoContent.minimizedHLSQualityFile(file: self.fileReference) {
|
||||
let _ = fetchedMediaResource(mediaBox: postbox.mediaBox, userLocation: userLocation, userContentType: .video, reference: minimizedQualityFile.resourceReference(minimizedQualityFile.media.resource), range: (0 ..< 5 * 1024 * 1024, .default)).startStandalone()
|
||||
}
|
||||
#endif*/
|
||||
|
||||
if var dimensions = fileReference.media.dimensions {
|
||||
if let thumbnail = fileReference.media.previewRepresentations.first {
|
||||
let dimensionsVertical = dimensions.width < dimensions.height
|
||||
let thumbnailVertical = thumbnail.dimensions.width < thumbnail.dimensions.height
|
||||
if dimensionsVertical != thumbnailVertical {
|
||||
dimensions = PixelDimensions(width: dimensions.height, height: dimensions.width)
|
||||
}
|
||||
}
|
||||
self.dimensions = dimensions.cgSize
|
||||
} else {
|
||||
self.dimensions = CGSize(width: 128.0, height: 128.0)
|
||||
}
|
||||
|
||||
self.imageNode = TransformImageNode()
|
||||
|
||||
let config = WKWebViewConfiguration()
|
||||
config.allowsInlineMediaPlayback = true
|
||||
config.mediaTypesRequiringUserActionForPlayback = []
|
||||
config.allowsPictureInPictureMediaPlayback = true
|
||||
|
||||
var playerSource: HLSServerSource?
|
||||
if let qualitySet = HLSQualitySet(baseFile: fileReference) {
|
||||
let playerSourceValue = HLSServerSource(accountId: accountId.int64, fileId: fileReference.media.fileId.id, postbox: postbox, userLocation: userLocation, playlistFiles: qualitySet.playlistFiles, qualityFiles: qualitySet.qualityFiles)
|
||||
playerSource = playerSourceValue
|
||||
let schemeHandler = CustomVideoSchemeHandler(source: playerSourceValue)
|
||||
config.setURLSchemeHandler(schemeHandler, forURLScheme: "tghls")
|
||||
}
|
||||
self.playerSource = playerSource
|
||||
|
||||
let userController = WKUserContentController()
|
||||
|
||||
var handleScriptMessage: ((WKScriptMessage) -> Void)?
|
||||
userController.add(WeakScriptMessageHandler { message in
|
||||
handleScriptMessage?(message)
|
||||
}, name: "performAction")
|
||||
|
||||
let isDebug: Bool
|
||||
#if DEBUG
|
||||
isDebug = true
|
||||
#else
|
||||
isDebug = false
|
||||
#endif
|
||||
|
||||
let mediaDimensions = fileReference.media.dimensions?.cgSize ?? CGSize(width: 480.0, height: 320.0)
|
||||
var intrinsicDimensions = mediaDimensions.aspectFittedOrSmaller(CGSize(width: 1280.0, height: 1280.0))
|
||||
|
||||
let userScriptJs = """
|
||||
playerInitialize({
|
||||
'debug': \(isDebug),
|
||||
'width': \(Int(intrinsicDimensions.width)),
|
||||
'height': \(Int(intrinsicDimensions.height)),
|
||||
'bandwidthEstimate': \(HLSVideoJSContentNode.sharedBandwidthEstimate ?? 500000.0)
|
||||
});
|
||||
""";
|
||||
let userScript = WKUserScript(source: userScriptJs, injectionTime: .atDocumentEnd, forMainFrameOnly: true)
|
||||
userController.addUserScript(userScript)
|
||||
|
||||
config.userContentController = userController
|
||||
|
||||
intrinsicDimensions.width = floor(intrinsicDimensions.width / UIScreenScale)
|
||||
intrinsicDimensions.height = floor(intrinsicDimensions.height / UIScreenScale)
|
||||
self.intrinsicDimensions = intrinsicDimensions
|
||||
|
||||
self.webView = WKWebView(frame: CGRect(origin: CGPoint(), size: self.intrinsicDimensions), configuration: config)
|
||||
self.webView.scrollView.isScrollEnabled = false
|
||||
self.webView.allowsLinkPreview = false
|
||||
self.webView.allowsBackForwardNavigationGestures = false
|
||||
self.webView.accessibilityIgnoresInvertColors = true
|
||||
self.webView.scrollView.contentInsetAdjustmentBehavior = .never
|
||||
self.webView.alpha = 0.0
|
||||
|
||||
if #available(iOS 16.4, *) {
|
||||
#if DEBUG
|
||||
self.webView.isInspectable = true
|
||||
#endif
|
||||
}
|
||||
|
||||
super.init()
|
||||
|
||||
self.imageNode.setSignal(internalMediaGridMessageVideo(postbox: postbox, userLocation: self.userLocation, videoReference: fileReference) |> map { [weak self] getSize, getData in
|
||||
Queue.mainQueue().async {
|
||||
if let strongSelf = self, strongSelf.dimensions == nil {
|
||||
if let dimensions = getSize() {
|
||||
strongSelf.dimensions = dimensions
|
||||
strongSelf.dimensionsPromise.set(dimensions)
|
||||
if let validLayout = strongSelf.validLayout {
|
||||
strongSelf.updateLayout(size: validLayout.size, actualSize: validLayout.actualSize, transition: .immediate)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return getData
|
||||
})
|
||||
|
||||
self.addSubnode(self.imageNode)
|
||||
self.view.addSubview(self.webView)
|
||||
|
||||
self.imageNode.imageUpdated = { [weak self] _ in
|
||||
self?._ready.set(.single(Void()))
|
||||
}
|
||||
|
||||
self._bufferingStatus.set(.single(nil))
|
||||
|
||||
handleScriptMessage = { [weak self] message in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
guard let body = message.body as? [String: Any] else {
|
||||
return
|
||||
}
|
||||
guard let eventName = body["event"] as? String else {
|
||||
return
|
||||
}
|
||||
|
||||
switch eventName {
|
||||
case "playerStatus":
|
||||
guard let eventData = body["data"] as? [String: Any] else {
|
||||
return
|
||||
}
|
||||
if let isReady = eventData["isReady"] as? Bool {
|
||||
self.playerIsReady = isReady
|
||||
} else {
|
||||
self.playerIsReady = false
|
||||
}
|
||||
if let isFirstFrameReady = eventData["isFirstFrameReady"] as? Bool {
|
||||
self.playerIsFirstFrameReady = isFirstFrameReady
|
||||
} else {
|
||||
self.playerIsFirstFrameReady = false
|
||||
}
|
||||
if let isPlaying = eventData["isPlaying"] as? Bool {
|
||||
self.playerIsPlaying = isPlaying
|
||||
} else {
|
||||
self.playerIsPlaying = false
|
||||
}
|
||||
if let rate = eventData["rate"] as? Double {
|
||||
self.playerRate = rate
|
||||
} else {
|
||||
self.playerRate = 0.0
|
||||
}
|
||||
if let defaultRate = eventData["defaultRate"] as? Double {
|
||||
self.playerDefaultRate = defaultRate
|
||||
} else {
|
||||
self.playerDefaultRate = 0.0
|
||||
}
|
||||
if let levels = eventData["levels"] as? [[String: Any]] {
|
||||
self.playerAvailableLevels.removeAll()
|
||||
|
||||
for level in levels {
|
||||
guard let levelIndex = level["index"] as? Int else {
|
||||
continue
|
||||
}
|
||||
guard let levelBitrate = level["bitrate"] as? Int else {
|
||||
continue
|
||||
}
|
||||
guard let levelWidth = level["width"] as? Int else {
|
||||
continue
|
||||
}
|
||||
guard let levelHeight = level["height"] as? Int else {
|
||||
continue
|
||||
}
|
||||
self.playerAvailableLevels[levelIndex] = Level(
|
||||
bitrate: levelBitrate,
|
||||
width: levelWidth,
|
||||
height: levelHeight
|
||||
)
|
||||
}
|
||||
} else {
|
||||
self.playerAvailableLevels.removeAll()
|
||||
}
|
||||
|
||||
self._isNativePictureInPictureActive.set(eventData["isPictureInPictureActive"] as? Bool ?? false)
|
||||
|
||||
if let currentLevel = eventData["currentLevel"] as? Int {
|
||||
if self.playerAvailableLevels[currentLevel] != nil {
|
||||
self.playerCurrentLevelIndex = currentLevel
|
||||
} else {
|
||||
self.playerCurrentLevelIndex = nil
|
||||
}
|
||||
} else {
|
||||
self.playerCurrentLevelIndex = nil
|
||||
}
|
||||
|
||||
self.webView.alpha = self.playerIsFirstFrameReady ? 1.0 : 0.0
|
||||
if self.playerIsReady {
|
||||
if !self.hasRequestedPlayerLoad {
|
||||
if !self.playerAvailableLevels.isEmpty {
|
||||
var selectedLevelIndex: Int?
|
||||
if let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file {
|
||||
if let dimensions = minimizedQualityFile.media.dimensions {
|
||||
for (index, level) in self.playerAvailableLevels {
|
||||
if level.height == Int(dimensions.height) {
|
||||
selectedLevelIndex = index
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if selectedLevelIndex == nil {
|
||||
selectedLevelIndex = self.playerAvailableLevels.sorted(by: { $0.value.height > $1.value.height }).first?.key
|
||||
}
|
||||
if let selectedLevelIndex {
|
||||
self.hasRequestedPlayerLoad = true
|
||||
self.webView.evaluateJavaScript("playerLoad(\(selectedLevelIndex));", completionHandler: nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.webView.evaluateJavaScript("playerSetBaseRate(\(self.requestedBaseRate));", completionHandler: nil)
|
||||
|
||||
if self.requestedPlaying {
|
||||
self.requestPlay()
|
||||
} else {
|
||||
self.requestPause()
|
||||
}
|
||||
}
|
||||
|
||||
self.updateStatus()
|
||||
case "playerCurrentTime":
|
||||
guard let eventData = body["data"] as? [String: Any] else {
|
||||
return
|
||||
}
|
||||
guard let value = eventData["value"] as? Double else {
|
||||
return
|
||||
}
|
||||
|
||||
self.playerTime = value
|
||||
self.playerTimeGenerationTimestamp = CACurrentMediaTime()
|
||||
|
||||
var bandwidthEstimate = eventData["bandwidthEstimate"] as? Double
|
||||
if let bandwidthEstimateValue = bandwidthEstimate, bandwidthEstimateValue.isNaN || bandwidthEstimateValue.isInfinite {
|
||||
bandwidthEstimate = nil
|
||||
}
|
||||
|
||||
HLSVideoJSContentNode.sharedBandwidthEstimate = bandwidthEstimate
|
||||
|
||||
self.updateStatus()
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let playerSource = self.playerSource {
|
||||
self.serverDisposable = SharedHLSServer.shared.registerPlayer(source: playerSource, completion: { [weak self] in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
let htmlUrl = "http://127.0.0.1:\(SharedHLSServer.shared.port)/\(playerSource.id)/index.html"
|
||||
self.webView.load(URLRequest(url: URL(string: htmlUrl)!))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.serverDisposable?.dispose()
|
||||
self.audioSessionDisposable.dispose()
|
||||
|
||||
self.statusTimer?.invalidate()
|
||||
}
|
||||
|
||||
private func updateStatus() {
|
||||
let isPlaying = self.requestedPlaying && self.playerRate != 0.0
|
||||
let status: MediaPlayerPlaybackStatus
|
||||
if self.requestedPlaying && !isPlaying {
|
||||
status = .buffering(initial: false, whilePlaying: self.requestedPlaying, progress: 0.0, display: true)
|
||||
} else {
|
||||
status = self.requestedPlaying ? .playing : .paused
|
||||
}
|
||||
var timestamp = self.playerTime
|
||||
if timestamp.isFinite && !timestamp.isNaN {
|
||||
} else {
|
||||
timestamp = 0.0
|
||||
}
|
||||
self.statusValue = MediaPlayerStatus(generationTimestamp: self.playerTimeGenerationTimestamp, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: timestamp, baseRate: self.requestedBaseRate, seekId: self.seekId, status: status, soundEnabled: true)
|
||||
self._status.set(self.statusValue)
|
||||
|
||||
if case .playing = status {
|
||||
if self.statusTimer == nil {
|
||||
self.statusTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 30.0, repeats: true, block: { [weak self] _ in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.updateStatus()
|
||||
})
|
||||
}
|
||||
} else if let statusTimer = self.statusTimer {
|
||||
self.statusTimer = nil
|
||||
statusTimer.invalidate()
|
||||
}
|
||||
}
|
||||
|
||||
private func performActionAtEnd() {
|
||||
for listener in self.playbackCompletedListeners.copyItems() {
|
||||
listener()
|
||||
}
|
||||
}
|
||||
|
||||
func updateLayout(size: CGSize, actualSize: CGSize, transition: ContainedViewLayoutTransition) {
|
||||
transition.updatePosition(layer: self.webView.layer, position: CGPoint(x: size.width / 2.0, y: size.height / 2.0))
|
||||
transition.updateTransformScale(layer: self.webView.layer, scale: size.width / self.intrinsicDimensions.width)
|
||||
|
||||
transition.updateFrame(node: self.imageNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||
|
||||
if let dimensions = self.dimensions {
|
||||
let imageSize = CGSize(width: floor(dimensions.width / 2.0), height: floor(dimensions.height / 2.0))
|
||||
let makeLayout = self.imageNode.asyncLayout()
|
||||
let applyLayout = makeLayout(TransformImageArguments(corners: ImageCorners(), imageSize: imageSize, boundingSize: imageSize, intrinsicInsets: UIEdgeInsets(), emptyColor: .clear))
|
||||
applyLayout()
|
||||
}
|
||||
}
|
||||
|
||||
func play() {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
if !self.initializedStatus {
|
||||
self._status.set(MediaPlayerStatus(generationTimestamp: 0.0, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: 0.0, baseRate: self.requestedBaseRate, seekId: self.seekId, status: .buffering(initial: true, whilePlaying: true, progress: 0.0, display: true), soundEnabled: true))
|
||||
}
|
||||
/*if !self.hasAudioSession {
|
||||
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.hasAudioSession = true
|
||||
self.requestPlay()
|
||||
}
|
||||
}, deactivate: { [weak self] _ in
|
||||
return Signal { subscriber in
|
||||
if let self {
|
||||
self.hasAudioSession = false
|
||||
self.requestPause()
|
||||
}
|
||||
|
||||
subscriber.putCompletion()
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
|> runOn(.mainQueue())
|
||||
}))
|
||||
} else*/ do {
|
||||
self.requestPlay()
|
||||
}
|
||||
}
|
||||
|
||||
private func requestPlay() {
|
||||
self.requestedPlaying = true
|
||||
if self.playerIsReady {
|
||||
self.webView.evaluateJavaScript("playerPlay();", completionHandler: nil)
|
||||
}
|
||||
self.updateStatus()
|
||||
}
|
||||
|
||||
private func requestPause() {
|
||||
self.requestedPlaying = false
|
||||
if self.playerIsReady {
|
||||
self.webView.evaluateJavaScript("playerPause();", completionHandler: nil)
|
||||
}
|
||||
self.updateStatus()
|
||||
}
|
||||
|
||||
func pause() {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
self.requestPause()
|
||||
}
|
||||
|
||||
func togglePlayPause() {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
|
||||
if self.requestedPlaying {
|
||||
self.pause()
|
||||
} else {
|
||||
self.play()
|
||||
}
|
||||
}
|
||||
|
||||
func setSoundEnabled(_ value: Bool) {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
/*if value {
|
||||
if !self.hasAudioSession {
|
||||
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in
|
||||
self?.hasAudioSession = true
|
||||
self?.player?.volume = 1.0
|
||||
}, deactivate: { [weak self] _ in
|
||||
self?.hasAudioSession = false
|
||||
self?.player?.pause()
|
||||
return .complete()
|
||||
}))
|
||||
}
|
||||
} else {
|
||||
self.player?.volume = 0.0
|
||||
self.hasAudioSession = false
|
||||
self.audioSessionDisposable.set(nil)
|
||||
}*/
|
||||
}
|
||||
|
||||
func seek(_ timestamp: Double) {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
self.seekId += 1
|
||||
|
||||
self.webView.evaluateJavaScript("playerSeek(\(timestamp));", completionHandler: nil)
|
||||
}
|
||||
|
||||
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
|
||||
self.webView.evaluateJavaScript("playerSetIsMuted(false);", completionHandler: nil)
|
||||
|
||||
self.play()
|
||||
}
|
||||
|
||||
func setSoundMuted(soundMuted: Bool) {
|
||||
self.webView.evaluateJavaScript("playerSetIsMuted(\(soundMuted));", completionHandler: nil)
|
||||
}
|
||||
|
||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||
}
|
||||
|
||||
func setForceAudioToSpeaker(_ forceAudioToSpeaker: Bool) {
|
||||
}
|
||||
|
||||
func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
|
||||
self.webView.evaluateJavaScript("playerSetIsMuted(true);", completionHandler: nil)
|
||||
self.hasAudioSession = false
|
||||
self.audioSessionDisposable.set(nil)
|
||||
}
|
||||
|
||||
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) {
|
||||
}
|
||||
|
||||
func setBaseRate(_ baseRate: Double) {
|
||||
self.requestedBaseRate = baseRate
|
||||
if self.playerIsReady {
|
||||
self.webView.evaluateJavaScript("playerSetBaseRate(\(self.requestedBaseRate));", completionHandler: nil)
|
||||
}
|
||||
self.updateStatus()
|
||||
}
|
||||
|
||||
func setVideoQuality(_ videoQuality: UniversalVideoContentVideoQuality) {
|
||||
self.preferredVideoQuality = videoQuality
|
||||
|
||||
switch videoQuality {
|
||||
case .auto:
|
||||
self.requestedLevelIndex = nil
|
||||
case let .quality(quality):
|
||||
if let level = self.playerAvailableLevels.first(where: { $0.value.height == quality }) {
|
||||
self.requestedLevelIndex = level.key
|
||||
} else {
|
||||
self.requestedLevelIndex = nil
|
||||
}
|
||||
}
|
||||
|
||||
if self.playerIsReady {
|
||||
self.webView.evaluateJavaScript("playerSetLevel(\(self.requestedLevelIndex ?? -1));", completionHandler: nil)
|
||||
}
|
||||
}
|
||||
|
||||
func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? {
|
||||
guard let playerCurrentLevelIndex = self.playerCurrentLevelIndex else {
|
||||
return nil
|
||||
}
|
||||
guard let currentLevel = self.playerAvailableLevels[playerCurrentLevelIndex] else {
|
||||
return nil
|
||||
}
|
||||
|
||||
var available = self.playerAvailableLevels.values.map(\.height)
|
||||
available.sort(by: { $0 > $1 })
|
||||
|
||||
return (currentLevel.height, self.preferredVideoQuality, available)
|
||||
}
|
||||
|
||||
func addPlaybackCompleted(_ f: @escaping () -> Void) -> Int {
|
||||
return self.playbackCompletedListeners.add(f)
|
||||
}
|
||||
|
||||
func removePlaybackCompleted(_ index: Int) {
|
||||
self.playbackCompletedListeners.remove(index)
|
||||
}
|
||||
|
||||
func fetchControl(_ control: UniversalVideoNodeFetchControl) {
|
||||
}
|
||||
|
||||
func notifyPlaybackControlsHidden(_ hidden: Bool) {
|
||||
}
|
||||
|
||||
func setCanPlaybackWithoutHierarchy(_ canPlaybackWithoutHierarchy: Bool) {
|
||||
}
|
||||
|
||||
func enterNativePictureInPicture() -> Bool {
|
||||
self.webView.evaluateJavaScript("playerRequestPictureInPicture();", completionHandler: nil)
|
||||
return true
|
||||
}
|
||||
|
||||
func exitNativePictureInPicture() {
|
||||
self.webView.evaluateJavaScript("playerStopPictureInPicture();", completionHandler: nil)
|
||||
}
|
||||
}
|
@ -206,6 +206,10 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
||||
return self._bufferingStatus.get()
|
||||
}
|
||||
|
||||
var isNativePictureInPictureActive: Signal<Bool, NoError> {
|
||||
return .single(false)
|
||||
}
|
||||
|
||||
private let _ready = Promise<Void>()
|
||||
var ready: Signal<Void, NoError> {
|
||||
return self._ready.get()
|
||||
@ -685,4 +689,11 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
||||
func setCanPlaybackWithoutHierarchy(_ canPlaybackWithoutHierarchy: Bool) {
|
||||
self.playerNode.setCanPlaybackWithoutHierarchy(canPlaybackWithoutHierarchy)
|
||||
}
|
||||
|
||||
func enterNativePictureInPicture() -> Bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func exitNativePictureInPicture() {
|
||||
}
|
||||
}
|
||||
|
@ -141,6 +141,10 @@ private final class PlatformVideoContentNode: ASDisplayNode, UniversalVideoConte
|
||||
return self._bufferingStatus.get()
|
||||
}
|
||||
|
||||
var isNativePictureInPictureActive: Signal<Bool, NoError> {
|
||||
return .single(false)
|
||||
}
|
||||
|
||||
private let _ready = Promise<Void>()
|
||||
var ready: Signal<Void, NoError> {
|
||||
return self._ready.get()
|
||||
@ -471,4 +475,11 @@ private final class PlatformVideoContentNode: ASDisplayNode, UniversalVideoConte
|
||||
|
||||
func setCanPlaybackWithoutHierarchy(_ canPlaybackWithoutHierarchy: Bool) {
|
||||
}
|
||||
|
||||
func enterNativePictureInPicture() -> Bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func exitNativePictureInPicture() {
|
||||
}
|
||||
}
|
||||
|
@ -58,6 +58,10 @@ private final class SystemVideoContentNode: ASDisplayNode, UniversalVideoContent
|
||||
return self._bufferingStatus.get()
|
||||
}
|
||||
|
||||
var isNativePictureInPictureActive: Signal<Bool, NoError> {
|
||||
return .single(false)
|
||||
}
|
||||
|
||||
private let _ready = Promise<Void>()
|
||||
var ready: Signal<Void, NoError> {
|
||||
return self._ready.get()
|
||||
@ -308,5 +312,12 @@ private final class SystemVideoContentNode: ASDisplayNode, UniversalVideoContent
|
||||
|
||||
func setCanPlaybackWithoutHierarchy(_ canPlaybackWithoutHierarchy: Bool) {
|
||||
}
|
||||
|
||||
func enterNativePictureInPicture() -> Bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func exitNativePictureInPicture() {
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -31,9 +31,12 @@ private final class UniversalVideoContentHolder {
|
||||
var bufferingStatusDisposable: Disposable?
|
||||
var bufferingStatusValue: (RangeSet<Int64>, Int64)?
|
||||
|
||||
var isNativePictureInPictureActiveDisposable: Disposable?
|
||||
var isNativePictureInPictureActiveValue: Bool = false
|
||||
|
||||
var playbackCompletedIndex: Int?
|
||||
|
||||
init(content: UniversalVideoContent, contentNode: UniversalVideoContentNode & ASDisplayNode, statusUpdated: @escaping (MediaPlayerStatus?) -> Void, bufferingStatusUpdated: @escaping ((RangeSet<Int64>, Int64)?) -> Void, playbackCompleted: @escaping () -> Void) {
|
||||
init(content: UniversalVideoContent, contentNode: UniversalVideoContentNode & ASDisplayNode, statusUpdated: @escaping (MediaPlayerStatus?) -> Void, bufferingStatusUpdated: @escaping ((RangeSet<Int64>, Int64)?) -> Void, playbackCompleted: @escaping () -> Void, isNativePictureInPictureActiveUpdated: @escaping (Bool) -> Void) {
|
||||
self.content = content
|
||||
self.contentNode = contentNode
|
||||
|
||||
@ -51,6 +54,13 @@ private final class UniversalVideoContentHolder {
|
||||
}
|
||||
})
|
||||
|
||||
self.isNativePictureInPictureActiveDisposable = (contentNode.isNativePictureInPictureActive |> deliverOnMainQueue).start(next: { [weak self] value in
|
||||
if let strongSelf = self {
|
||||
strongSelf.isNativePictureInPictureActiveValue = value
|
||||
isNativePictureInPictureActiveUpdated(value)
|
||||
}
|
||||
})
|
||||
|
||||
self.playbackCompletedIndex = contentNode.addPlaybackCompleted {
|
||||
playbackCompleted()
|
||||
}
|
||||
@ -59,6 +69,7 @@ private final class UniversalVideoContentHolder {
|
||||
deinit {
|
||||
self.statusDisposable?.dispose()
|
||||
self.bufferingStatusDisposable?.dispose()
|
||||
self.isNativePictureInPictureActiveDisposable?.dispose()
|
||||
if let playbackCompletedIndex = self.playbackCompletedIndex {
|
||||
self.contentNode.removePlaybackCompleted(playbackCompletedIndex)
|
||||
}
|
||||
@ -133,9 +144,10 @@ private final class UniversalVideoContentHolderCallbacks {
|
||||
let playbackCompleted = Bag<() -> Void>()
|
||||
let status = Bag<(MediaPlayerStatus?) -> Void>()
|
||||
let bufferingStatus = Bag<((RangeSet<Int64>, Int64)?) -> Void>()
|
||||
let isNativePictureInPictureActive = Bag<(Bool) -> Void>()
|
||||
|
||||
var isEmpty: Bool {
|
||||
return self.playbackCompleted.isEmpty && self.status.isEmpty && self.bufferingStatus.isEmpty
|
||||
return self.playbackCompleted.isEmpty && self.status.isEmpty && self.bufferingStatus.isEmpty && self.isNativePictureInPictureActive.isEmpty
|
||||
}
|
||||
}
|
||||
|
||||
@ -190,6 +202,14 @@ public final class UniversalVideoManagerImpl: UniversalVideoManager {
|
||||
}
|
||||
}
|
||||
}
|
||||
}, isNativePictureInPictureActiveUpdated: { [weak self] value in
|
||||
if let strongSelf = self {
|
||||
if let current = strongSelf.holderCallbacks[content.id] {
|
||||
for subscriber in current.isNativePictureInPictureActive.copyItems() {
|
||||
subscriber(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
self.holders[content.id] = holder
|
||||
}
|
||||
@ -311,4 +331,37 @@ public final class UniversalVideoManagerImpl: UniversalVideoManager {
|
||||
}
|
||||
} |> runOn(Queue.mainQueue())
|
||||
}
|
||||
|
||||
public func isNativePictureInPictureActiveSignal(content: UniversalVideoContent) -> Signal<Bool, NoError> {
|
||||
return Signal { subscriber in
|
||||
var callbacks: UniversalVideoContentHolderCallbacks
|
||||
if let current = self.holderCallbacks[content.id] {
|
||||
callbacks = current
|
||||
} else {
|
||||
callbacks = UniversalVideoContentHolderCallbacks()
|
||||
self.holderCallbacks[content.id] = callbacks
|
||||
}
|
||||
|
||||
let index = callbacks.isNativePictureInPictureActive.add({ value in
|
||||
subscriber.putNext(value)
|
||||
})
|
||||
|
||||
if let current = self.holders[content.id] {
|
||||
subscriber.putNext(current.isNativePictureInPictureActiveValue)
|
||||
} else {
|
||||
subscriber.putNext(false)
|
||||
}
|
||||
|
||||
return ActionDisposable {
|
||||
Queue.mainQueue().async {
|
||||
if let current = self.holderCallbacks[content.id] {
|
||||
current.status.remove(index)
|
||||
if current.playbackCompleted.isEmpty {
|
||||
self.holderCallbacks.removeValue(forKey: content.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} |> runOn(Queue.mainQueue())
|
||||
}
|
||||
}
|
||||
|
@ -227,4 +227,11 @@ final class WebEmbedPlayerNode: ASDisplayNode, WKNavigationDelegate {
|
||||
|
||||
func setCanPlaybackWithoutHierarchy(_ canPlaybackWithoutHierarchy: Bool) {
|
||||
}
|
||||
|
||||
func enterNativePictureInPicture() -> Bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func exitNativePictureInPicture() {
|
||||
}
|
||||
}
|
||||
|
@ -58,6 +58,10 @@ final class WebEmbedVideoContentNode: ASDisplayNode, UniversalVideoContentNode {
|
||||
return self._bufferingStatus.get()
|
||||
}
|
||||
|
||||
var isNativePictureInPictureActive: Signal<Bool, NoError> {
|
||||
return .single(false)
|
||||
}
|
||||
|
||||
private var seekId: Int = 0
|
||||
|
||||
private let _ready = Promise<Void>()
|
||||
@ -207,4 +211,11 @@ final class WebEmbedVideoContentNode: ASDisplayNode, UniversalVideoContentNode {
|
||||
|
||||
func setCanPlaybackWithoutHierarchy(_ canPlaybackWithoutHierarchy: Bool) {
|
||||
}
|
||||
|
||||
func enterNativePictureInPicture() -> Bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func exitNativePictureInPicture() {
|
||||
}
|
||||
}
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 846f7040480f52b8bc0382fb9e2e78e8ef60c633
|
||||
Subproject commit b6e7349b98c5d3999f45e9468eee068aff86ee37
|
Loading…
x
Reference in New Issue
Block a user