[Temp] video v2

This commit is contained in:
Isaac 2024-12-25 00:18:02 +08:00
parent 4bed1703a2
commit df9e27f4fb
39 changed files with 341 additions and 180 deletions

View File

@ -146,7 +146,7 @@ public protocol MediaManager: AnyObject {
var musicMediaPlayerState: Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading, MediaManagerPlayerType)?, NoError> { get } var musicMediaPlayerState: Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading, MediaManagerPlayerType)?, NoError> { get }
var activeGlobalMediaPlayerAccountId: Signal<(AccountRecordId, Bool)?, NoError> { get } var activeGlobalMediaPlayerAccountId: Signal<(AccountRecordId, Bool)?, NoError> { get }
func setPlaylist(_ playlist: (Account, SharedMediaPlaylist)?, type: MediaManagerPlayerType, control: SharedMediaPlayerControlAction) func setPlaylist(_ playlist: (AccountContext, SharedMediaPlaylist)?, type: MediaManagerPlayerType, control: SharedMediaPlayerControlAction)
func playlistControl(_ control: SharedMediaPlayerControlAction, type: MediaManagerPlayerType?) func playlistControl(_ control: SharedMediaPlayerControlAction, type: MediaManagerPlayerType?)
func filteredPlaylistState(accountId: AccountRecordId, playlistId: SharedMediaPlaylistId, itemId: SharedMediaPlaylistItemId, type: MediaManagerPlayerType) -> Signal<SharedMediaPlayerItemPlaybackState?, NoError> func filteredPlaylistState(accountId: AccountRecordId, playlistId: SharedMediaPlaylistId, itemId: SharedMediaPlaylistItemId, type: MediaManagerPlayerType) -> Signal<SharedMediaPlayerItemPlaybackState?, NoError>

View File

@ -53,7 +53,7 @@ public protocol UniversalVideoContent {
var dimensions: CGSize { get } var dimensions: CGSize { get }
var duration: Double { get } var duration: Double { get }
func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode func makeContentNode(context: AccountContext, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode
func isEqual(to other: UniversalVideoContent) -> Bool func isEqual(to other: UniversalVideoContent) -> Bool
} }
@ -95,7 +95,7 @@ public enum UniversalVideoNodeFetchControl {
} }
public final class UniversalVideoNode: ASDisplayNode { public final class UniversalVideoNode: ASDisplayNode {
private let accountId: AccountRecordId private let context: AccountContext
private let postbox: Postbox private let postbox: Postbox
private let audioSession: ManagedAudioSession private let audioSession: ManagedAudioSession
private let manager: UniversalVideoManager private let manager: UniversalVideoManager
@ -146,12 +146,12 @@ public final class UniversalVideoNode: ASDisplayNode {
if self.canAttachContent { if self.canAttachContent {
assert(self.contentRequestIndex == nil) assert(self.contentRequestIndex == nil)
let accountId = self.accountId let context = self.context
let content = self.content let content = self.content
let postbox = self.postbox let postbox = self.postbox
let audioSession = self.audioSession let audioSession = self.audioSession
self.contentRequestIndex = self.manager.attachUniversalVideoContent(content: self.content, priority: self.priority, create: { self.contentRequestIndex = self.manager.attachUniversalVideoContent(content: self.content, priority: self.priority, create: {
return content.makeContentNode(accountId: accountId, postbox: postbox, audioSession: audioSession) return content.makeContentNode(context: context, postbox: postbox, audioSession: audioSession)
}, update: { [weak self] contentNodeAndFlags in }, update: { [weak self] contentNodeAndFlags in
if let strongSelf = self { if let strongSelf = self {
strongSelf.updateContentNode(contentNodeAndFlags) strongSelf.updateContentNode(contentNodeAndFlags)
@ -172,8 +172,8 @@ public final class UniversalVideoNode: ASDisplayNode {
return self.contentNode != nil return self.contentNode != nil
} }
public init(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, decoration: UniversalVideoDecoration, content: UniversalVideoContent, priority: UniversalVideoPriority, autoplay: Bool = false, snapshotContentWhenGone: Bool = false) { public init(context: AccountContext, postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, decoration: UniversalVideoDecoration, content: UniversalVideoContent, priority: UniversalVideoPriority, autoplay: Bool = false, snapshotContentWhenGone: Bool = false) {
self.accountId = accountId self.context = context
self.postbox = postbox self.postbox = postbox
self.audioSession = audioSession self.audioSession = audioSession
self.manager = manager self.manager = manager

View File

@ -234,7 +234,7 @@ public final class AvatarVideoNode: ASDisplayNode {
if self.videoNode == nil { if self.videoNode == nil {
let context = self.context let context = self.context
let mediaManager = context.sharedContext.mediaManager let mediaManager = context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded) let videoNode = UniversalVideoNode(context: context, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded)
videoNode.clipsToBounds = true videoNode.clipsToBounds = true
videoNode.isUserInteractionEnabled = false videoNode.isUserInteractionEnabled = false
videoNode.isHidden = true videoNode.isHidden = true

View File

@ -1076,7 +1076,7 @@ final class BrowserInstantPageContent: UIView, BrowserContent, UIScrollViewDeleg
} }
} }
} }
self.context.sharedContext.mediaManager.setPlaylist((self.context.account, InstantPageMediaPlaylist(webPage: webPage, items: medias, initialItemIndex: initialIndex)), type: file.isVoice ? .voice : .music, control: .playback(.play)) self.context.sharedContext.mediaManager.setPlaylist((self.context, InstantPageMediaPlaylist(webPage: webPage, items: medias, initialItemIndex: initialIndex)), type: file.isVoice ? .voice : .music, control: .playback(.play))
return return
} }

View File

@ -464,7 +464,7 @@ public final class ChatImportActivityScreen: ViewController {
let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil) let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil)
let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) let videoNode = UniversalVideoNode(context: context, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded)
videoNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 2.0, height: 2.0)) videoNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 2.0, height: 2.0))
videoNode.alpha = 0.01 videoNode.alpha = 0.01
self.videoNode = videoNode self.videoNode = videoNode

View File

@ -106,6 +106,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
case disableCallV2(Bool) case disableCallV2(Bool)
case experimentalCallMute(Bool) case experimentalCallMute(Bool)
case conferenceCalls(Bool) case conferenceCalls(Bool)
case playerV2(Bool)
case benchmarkReflectors case benchmarkReflectors
case enableLocalTranslation(Bool) case enableLocalTranslation(Bool)
case preferredVideoCodec(Int, String, String?, Bool) case preferredVideoCodec(Int, String, String?, Bool)
@ -132,7 +133,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return DebugControllerSection.web.rawValue return DebugControllerSection.web.rawValue
case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure: case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure:
return DebugControllerSection.experiments.rawValue return DebugControllerSection.experiments.rawValue
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .disableCallV2, .experimentalCallMute, .conferenceCalls, .benchmarkReflectors, .enableLocalTranslation: case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .disableCallV2, .experimentalCallMute, .conferenceCalls, .playerV2, .benchmarkReflectors, .enableLocalTranslation:
return DebugControllerSection.experiments.rawValue return DebugControllerSection.experiments.rawValue
case .logTranslationRecognition, .resetTranslationStates: case .logTranslationRecognition, .resetTranslationStates:
return DebugControllerSection.translation.rawValue return DebugControllerSection.translation.rawValue
@ -251,12 +252,14 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return 52 return 52
case .conferenceCalls: case .conferenceCalls:
return 53 return 53
case .benchmarkReflectors: case .playerV2:
return 54 return 54
case .enableLocalTranslation: case .benchmarkReflectors:
return 55 return 55
case .enableLocalTranslation:
return 56
case let .preferredVideoCodec(index, _, _, _): case let .preferredVideoCodec(index, _, _, _):
return 56 + index return 57 + index
case .disableVideoAspectScaling: case .disableVideoAspectScaling:
return 100 return 100
case .enableNetworkFramework: case .enableNetworkFramework:
@ -1355,6 +1358,16 @@ private enum DebugControllerEntry: ItemListNodeEntry {
}) })
}).start() }).start()
}) })
case let .playerV2(value):
return ItemListSwitchItem(presentationData: presentationData, title: "PlayerV2", value: value, sectionId: self.section, style: .blocks, updated: { value in
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings
settings.playerV2 = value
return PreferencesEntry(settings)
})
}).start()
})
case .benchmarkReflectors: case .benchmarkReflectors:
return ItemListActionItem(presentationData: presentationData, title: "Benchmark Reflectors", kind: .generic, alignment: .natural, sectionId: self.section, style: .blocks, action: { return ItemListActionItem(presentationData: presentationData, title: "Benchmark Reflectors", kind: .generic, alignment: .natural, sectionId: self.section, style: .blocks, action: {
guard let context = arguments.context else { guard let context = arguments.context else {
@ -1578,6 +1591,7 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
entries.append(.experimentalCallMute(experimentalSettings.experimentalCallMute)) entries.append(.experimentalCallMute(experimentalSettings.experimentalCallMute))
entries.append(.conferenceCalls(experimentalSettings.conferenceCalls)) entries.append(.conferenceCalls(experimentalSettings.conferenceCalls))
entries.append(.playerV2(experimentalSettings.playerV2))
entries.append(.benchmarkReflectors) entries.append(.benchmarkReflectors)
entries.append(.enableLocalTranslation(experimentalSettings.enableLocalTranslation)) entries.append(.enableLocalTranslation(experimentalSettings.enableLocalTranslation))

View File

@ -330,7 +330,7 @@ public class DrawingStickerEntityView: DrawingEntityView {
private func setupWithVideo(_ file: TelegramMediaFile) { private func setupWithVideo(_ file: TelegramMediaFile) {
let videoNode = UniversalVideoNode( let videoNode = UniversalVideoNode(
accountId: self.context.account.id, context: self.context,
postbox: self.context.account.postbox, postbox: self.context.account.postbox,
audioSession: self.context.sharedContext.mediaManager.audioSession, audioSession: self.context.sharedContext.mediaManager.audioSession,
manager: self.context.sharedContext.mediaManager.universalVideoManager, manager: self.context.sharedContext.mediaManager.universalVideoManager,

View File

@ -1762,7 +1762,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
let mediaManager = item.context.sharedContext.mediaManager let mediaManager = item.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(accountId: item.context.account.id, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .gallery) let videoNode = UniversalVideoNode(context: item.context, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .gallery)
let videoScale: CGFloat let videoScale: CGFloat
if item.content is WebEmbedVideoContent { if item.content is WebEmbedVideoContent {
videoScale = 1.0 videoScale = 1.0
@ -2849,7 +2849,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
let baseNavigationController = self.baseNavigationController() let baseNavigationController = self.baseNavigationController()
let mediaManager = self.context.sharedContext.mediaManager let mediaManager = self.context.sharedContext.mediaManager
var expandImpl: (() -> Void)? var expandImpl: (() -> Void)?
let overlayNode = OverlayUniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, content: item.content, expand: { let overlayNode = OverlayUniversalVideoNode(context: self.context, postbox: self.context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, content: item.content, expand: {
expandImpl?() expandImpl?()
}, close: { [weak mediaManager] in }, close: { [weak mediaManager] in
mediaManager?.setOverlayVideoNode(nil) mediaManager?.setOverlayVideoNode(nil)
@ -3073,7 +3073,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
self.disablePictureInPicturePlaceholder = true self.disablePictureInPicturePlaceholder = true
let overlayVideoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .overlay) let overlayVideoNode = UniversalVideoNode(context: self.context, postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .overlay)
let absoluteRect = videoNode.view.convert(videoNode.view.bounds, to: nil) let absoluteRect = videoNode.view.convert(videoNode.view.bounds, to: nil)
overlayVideoNode.frame = absoluteRect overlayVideoNode.frame = absoluteRect
overlayVideoNode.updateLayout(size: absoluteRect.size, transition: .immediate) overlayVideoNode.updateLayout(size: absoluteRect.size, transition: .immediate)
@ -3156,7 +3156,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
shouldBeDismissed = .single(false) shouldBeDismissed = .single(false)
} }
let overlayNode = OverlayUniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, content: item.content, shouldBeDismissed: shouldBeDismissed, expand: { let overlayNode = OverlayUniversalVideoNode(context: self.context, postbox: self.context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, content: item.content, shouldBeDismissed: shouldBeDismissed, expand: {
expandImpl?() expandImpl?()
}, close: { [weak mediaManager] in }, close: { [weak mediaManager] in
mediaManager?.setOverlayVideoNode(nil) mediaManager?.setOverlayVideoNode(nil)

View File

@ -1449,7 +1449,7 @@ final class InstantPageControllerNode: ASDisplayNode, ASScrollViewDelegate {
} }
} }
} }
self.context.sharedContext.mediaManager.setPlaylist((self.context.account, InstantPageMediaPlaylist(webPage: webPage, items: medias, initialItemIndex: initialIndex)), type: file.isVoice ? .voice : .music, control: .playback(.play)) self.context.sharedContext.mediaManager.setPlaylist((self.context, InstantPageMediaPlaylist(webPage: webPage, items: medias, initialItemIndex: initialIndex)), type: file.isVoice ? .voice : .music, control: .playback(.play))
return return
} }

View File

@ -58,7 +58,7 @@ final class InstantPagePlayableVideoNode: ASDisplayNode, InstantPageNode, Galler
fileValue = file fileValue = file
} }
self.videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: NativeVideoContent(id: .instantPage(webPage.webpageId, media.media.id!), userLocation: userLocation, fileReference: .webPage(webPage: WebpageReference(webPage), media: fileValue!), imageReference: imageReference, streamVideo: streamVideo ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, placeholderColor: theme.pageBackgroundColor, storeAfterDownload: nil), priority: .embedded, autoplay: true) self.videoNode = UniversalVideoNode(context: context, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: NativeVideoContent(id: .instantPage(webPage.webpageId, media.media.id!), userLocation: userLocation, fileReference: .webPage(webPage: WebpageReference(webPage), media: fileValue!), imageReference: imageReference, streamVideo: streamVideo ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, placeholderColor: theme.pageBackgroundColor, storeAfterDownload: nil), priority: .embedded, autoplay: true)
self.videoNode.isUserInteractionEnabled = false self.videoNode.isUserInteractionEnabled = false
self.statusNode = RadialStatusNode(backgroundNodeColor: UIColor(white: 0.0, alpha: 0.6)) self.statusNode = RadialStatusNode(backgroundNodeColor: UIColor(white: 0.0, alpha: 0.6))

View File

@ -80,12 +80,39 @@ public final class ChunkMediaPlayerPart {
} }
public final class ChunkMediaPlayerPartsState { public final class ChunkMediaPlayerPartsState {
public let duration: Double? public final class DirectReader {
public let parts: [ChunkMediaPlayerPart] public final class Impl {
public let video: MediaDataReader?
public let audio: MediaDataReader?
public init(video: MediaDataReader?, audio: MediaDataReader?) {
self.video = video
self.audio = audio
}
}
public let seekPosition: Double
public let availableUntilPosition: Double
public let impl: QueueLocalObject<Impl>
public init(seekPosition: Double, availableUntilPosition: Double, impl: QueueLocalObject<Impl>) {
self.seekPosition = seekPosition
self.availableUntilPosition = availableUntilPosition
self.impl = impl
}
}
public init(duration: Double?, parts: [ChunkMediaPlayerPart]) { public enum Content {
case parts([ChunkMediaPlayerPart])
case directReader(DirectReader)
}
public let duration: Double?
public let content: Content
public init(duration: Double?, content: Content) {
self.duration = duration self.duration = duration
self.parts = parts self.content = content
} }
} }

View File

@ -393,6 +393,7 @@ private func extractFFMpegFrameSegmentInfo(path: String) -> FFMpegFrameSegmentIn
} }
final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl { final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl {
private let dataQueue: Queue
private let resource: ChunkMediaPlayerV2.SourceDescription.ResourceDescription private let resource: ChunkMediaPlayerV2.SourceDescription.ResourceDescription
private let partsStateValue = Promise<ChunkMediaPlayerPartsState>() private let partsStateValue = Promise<ChunkMediaPlayerPartsState>()
@ -403,7 +404,8 @@ final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl {
private var completeFetchDisposable: Disposable? private var completeFetchDisposable: Disposable?
private var dataDisposable: Disposable? private var dataDisposable: Disposable?
init(resource: ChunkMediaPlayerV2.SourceDescription.ResourceDescription) { init(dataQueue: Queue, resource: ChunkMediaPlayerV2.SourceDescription.ResourceDescription) {
self.dataQueue = dataQueue
self.resource = resource self.resource = resource
if resource.fetchAutomatically { if resource.fetchAutomatically {
@ -458,18 +460,18 @@ final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl {
self.partsStateValue.set(.single(ChunkMediaPlayerPartsState( self.partsStateValue.set(.single(ChunkMediaPlayerPartsState(
duration: mainTrack.duration.seconds, duration: mainTrack.duration.seconds,
parts: parts content: .parts(parts)
))) )))
} else { } else {
self.partsStateValue.set(.single(ChunkMediaPlayerPartsState( self.partsStateValue.set(.single(ChunkMediaPlayerPartsState(
duration: nil, duration: nil,
parts: [] content: .parts([])
))) )))
} }
} else { } else {
self.partsStateValue.set(.single(ChunkMediaPlayerPartsState( self.partsStateValue.set(.single(ChunkMediaPlayerPartsState(
duration: nil, duration: nil,
parts: [] content: .parts([])
))) )))
} }
}) })

View File

@ -132,7 +132,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
private var videoRenderer: AVSampleBufferDisplayLayer private var videoRenderer: AVSampleBufferDisplayLayer
private var audioRenderer: AVSampleBufferAudioRenderer? private var audioRenderer: AVSampleBufferAudioRenderer?
private var partsState = ChunkMediaPlayerPartsState(duration: nil, parts: []) private var partsState = ChunkMediaPlayerPartsState(duration: nil, content: .parts([]))
private var loadedParts: [LoadedPart] = [] private var loadedParts: [LoadedPart] = []
private var loadedPartsMediaData: QueueLocalObject<LoadedPartsMediaData> private var loadedPartsMediaData: QueueLocalObject<LoadedPartsMediaData>
private var hasSound: Bool = false private var hasSound: Bool = false
@ -161,6 +161,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
private var isMuted: Bool private var isMuted: Bool
private var seekId: Int = 0 private var seekId: Int = 0
private var seekTimestamp: Double = 0.0
private var pendingSeekTimestamp: Double? private var pendingSeekTimestamp: Double?
private var pendingContinuePlaybackAfterSeekToTimestamp: Double? private var pendingContinuePlaybackAfterSeekToTimestamp: Double?
private var shouldNotifySeeked: Bool = false private var shouldNotifySeeked: Bool = false
@ -219,7 +220,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
case let .externalParts(partsState): case let .externalParts(partsState):
self.source = ChunkMediaPlayerExternalSourceImpl(partsState: partsState) self.source = ChunkMediaPlayerExternalSourceImpl(partsState: partsState)
case let .directFetch(resource): case let .directFetch(resource):
self.source = ChunkMediaPlayerDirectFetchSourceImpl(resource: resource) self.source = ChunkMediaPlayerDirectFetchSourceImpl(dataQueue: self.dataQueue, resource: resource)
} }
self.updateTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 60.0, repeats: true, block: { [weak self] _ in self.updateTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 60.0, repeats: true, block: { [weak self] _ in
@ -347,7 +348,6 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
} }
var validParts: [ChunkMediaPlayerPart] = [] var validParts: [ChunkMediaPlayerPart] = []
var minStartTime: Double = 0.0 var minStartTime: Double = 0.0
for i in 0 ..< self.partsState.parts.count { for i in 0 ..< self.partsState.parts.count {
let part = self.partsState.parts[i] let part = self.partsState.parts[i]
@ -672,6 +672,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
private func seek(timestamp: Double, play: Bool?, notify: Bool) { private func seek(timestamp: Double, play: Bool?, notify: Bool) {
self.seekId += 1 self.seekId += 1
self.seekTimestamp = timestamp
let seekId = self.seekId let seekId = self.seekId
self.pendingSeekTimestamp = timestamp self.pendingSeekTimestamp = timestamp
self.pendingContinuePlaybackAfterSeekToTimestamp = timestamp self.pendingContinuePlaybackAfterSeekToTimestamp = timestamp

View File

@ -281,7 +281,7 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
let mediaManager = self.context.sharedContext.mediaManager let mediaManager = self.context.sharedContext.mediaManager
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: entry.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: [])) let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: entry.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: []))
let videoContent = NativeVideoContent(id: .profileVideo(id, category), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: true, useLargeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil) let videoContent = NativeVideoContent(id: .profileVideo(id, category), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: true, useLargeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil)
let videoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay) let videoNode = UniversalVideoNode(context: self.context, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay)
videoNode.isUserInteractionEnabled = false videoNode.isUserInteractionEnabled = false
videoNode.isHidden = true videoNode.isHidden = true
self.videoStartTimestamp = video.representation.startTimestamp self.videoStartTimestamp = video.representation.startTimestamp

View File

@ -366,7 +366,7 @@ public final class PeerInfoAvatarListItemNode: ASDisplayNode {
} }
let mediaManager = self.context.sharedContext.mediaManager let mediaManager = self.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay) let videoNode = UniversalVideoNode(context: self.context, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay)
videoNode.isUserInteractionEnabled = false videoNode.isUserInteractionEnabled = false
videoNode.canAttachContent = true videoNode.canAttachContent = true
videoNode.isHidden = true videoNode.isHidden = true

View File

@ -233,7 +233,7 @@ private final class PhoneView: UIView {
hintDimensions: CGSize(width: 1170, height: 1754), hintDimensions: CGSize(width: 1170, height: 1754),
storeAfterDownload: nil storeAfterDownload: nil
) )
let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded) let videoNode = UniversalVideoNode(context: context, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded)
videoNode.canAttachContent = true videoNode.canAttachContent = true
self.videoNode = videoNode self.videoNode = videoNode

View File

@ -277,20 +277,25 @@ public final class ShareProlongedLoadingContainerNode: ASDisplayNode, ShareConte
})) }))
if let postbox, let mediaManager = environment.mediaManager, let path = getAppBundle().path(forResource: "BlankVideo", ofType: "m4v"), let size = fileSize(path) { if let postbox, let mediaManager = environment.mediaManager, let path = getAppBundle().path(forResource: "BlankVideo", ofType: "m4v"), let size = fileSize(path) {
let _ = postbox
let _ = mediaManager
let decoration = ChatBubbleVideoDecoration(corners: ImageCorners(), nativeSize: CGSize(width: 100.0, height: 100.0), contentMode: .aspectFit, backgroundColor: .black) let decoration = ChatBubbleVideoDecoration(corners: ImageCorners(), nativeSize: CGSize(width: 100.0, height: 100.0), contentMode: .aspectFit, backgroundColor: .black)
let _ = decoration
let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: []) let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: [])
let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil) let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil)
let _ = videoContent
let videoNode = UniversalVideoNode(accountId: AccountRecordId(rawValue: 0), postbox: postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) /*let videoNode = UniversalVideoNode(accountId: AccountRecordId(rawValue: 0), postbox: postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded)
videoNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 2.0, height: 2.0)) videoNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 2.0, height: 2.0))
videoNode.alpha = 0.01 videoNode.alpha = 0.01
self.videoNode = videoNode self.videoNode = videoNode
self.addSubnode(videoNode) self.addSubnode(videoNode)
videoNode.canAttachContent = true videoNode.canAttachContent = true
videoNode.play() videoNode.play()*/
} }
} }

View File

@ -151,7 +151,7 @@ public final class ChatBotInfoItemNode: ListViewItemNode {
continuePlayingWithoutSoundOnLostAudioSession: false, continuePlayingWithoutSoundOnLostAudioSession: false,
storeAfterDownload: nil storeAfterDownload: nil
) )
let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded) let videoNode = UniversalVideoNode(context: context, postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: VideoDecoration(), content: videoContent, priority: .embedded)
videoNode.canAttachContent = true videoNode.canAttachContent = true
self.videoNode = videoNode self.videoNode = videoNode

View File

@ -277,7 +277,7 @@ public class ChatMessageActionBubbleContentNode: ChatMessageBubbleContentNode {
let videoContent = NativeVideoContent(id: .profileVideo(id, "action"), userLocation: .peer(item.message.id.peerId), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil) let videoContent = NativeVideoContent(id: .profileVideo(id, "action"), userLocation: .peer(item.message.id.peerId), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil)
if videoContent.id != strongSelf.videoContent?.id { if videoContent.id != strongSelf.videoContent?.id {
let mediaManager = item.context.sharedContext.mediaManager let mediaManager = item.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(accountId: item.context.account.id, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay) let videoNode = UniversalVideoNode(context: item.context, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay)
videoNode.isUserInteractionEnabled = false videoNode.isUserInteractionEnabled = false
videoNode.ownsContentNodeUpdated = { [weak self] owns in videoNode.ownsContentNodeUpdated = { [weak self] owns in
if let strongSelf = self { if let strongSelf = self {

View File

@ -763,7 +763,7 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
}) })
} }
let mediaManager = item.context.sharedContext.mediaManager let mediaManager = item.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(accountId: item.context.account.id, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: ChatBubbleInstantVideoDecoration(inset: 2.0, backgroundImage: instantVideoBackgroundImage, tapped: { let videoNode = UniversalVideoNode(context: item.context, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: ChatBubbleInstantVideoDecoration(inset: 2.0, backgroundImage: instantVideoBackgroundImage, tapped: {
if let strongSelf = self { if let strongSelf = self {
if let item = strongSelf.item { if let item = strongSelf.item {
if strongSelf.infoBackgroundNode.alpha.isZero { if strongSelf.infoBackgroundNode.alpha.isZero {

View File

@ -1830,7 +1830,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
} }
) )
} }
let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) let videoNode = UniversalVideoNode(context: context, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded)
videoNode.isUserInteractionEnabled = false videoNode.isUserInteractionEnabled = false
videoNode.ownsContentNodeUpdated = { [weak self] owns in videoNode.ownsContentNodeUpdated = { [weak self] owns in
if let strongSelf = self { if let strongSelf = self {

View File

@ -222,7 +222,7 @@ public class ChatMessageProfilePhotoSuggestionContentNode: ChatMessageBubbleCont
let videoContent = NativeVideoContent(id: .profileVideo(id, "action"), userLocation: .peer(item.message.id.peerId), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil) let videoContent = NativeVideoContent(id: .profileVideo(id, "action"), userLocation: .peer(item.message.id.peerId), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil)
if videoContent.id != strongSelf.videoContent?.id { if videoContent.id != strongSelf.videoContent?.id {
let mediaManager = item.context.sharedContext.mediaManager let mediaManager = item.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(accountId: item.context.account.id, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay) let videoNode = UniversalVideoNode(context: item.context, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay)
videoNode.isUserInteractionEnabled = false videoNode.isUserInteractionEnabled = false
videoNode.ownsContentNodeUpdated = { [weak self] owns in videoNode.ownsContentNodeUpdated = { [weak self] owns in
if let strongSelf = self { if let strongSelf = self {

View File

@ -2286,7 +2286,7 @@ private class MessageContentNode: ASDisplayNode, ContentNode {
} }
} else { } else {
let videoContent = NativeVideoContent(id: .message(message.stableId, video.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: video), streamVideo: .conservative, loopVideo: true, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: self.isStatic, continuePlayingWithoutSoundOnLostAudioSession: true, placeholderColor: .clear, captureProtected: false, storeAfterDownload: nil) let videoContent = NativeVideoContent(id: .message(message.stableId, video.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: video), streamVideo: .conservative, loopVideo: true, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: self.isStatic, continuePlayingWithoutSoundOnLostAudioSession: true, placeholderColor: .clear, captureProtected: false, storeAfterDownload: nil)
let videoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay, autoplay: !self.isStatic) let videoNode = UniversalVideoNode(context: self.context, postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay, autoplay: !self.isStatic)
self.videoStatusDisposable.set((videoNode.status self.videoStatusDisposable.set((videoNode.status
|> deliverOnMainQueue).startStrict(next: { [weak self] status in |> deliverOnMainQueue).startStrict(next: { [weak self] status in

View File

@ -333,7 +333,7 @@ final class PeerInfoAvatarTransformContainerNode: ASDisplayNode {
self.videoNode?.removeFromSupernode() self.videoNode?.removeFromSupernode()
let mediaManager = self.context.sharedContext.mediaManager let mediaManager = self.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .embedded) let videoNode = UniversalVideoNode(context: self.context, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .embedded)
videoNode.isUserInteractionEnabled = false videoNode.isUserInteractionEnabled = false
videoNode.isHidden = true videoNode.isHidden = true

View File

@ -168,7 +168,7 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode {
self.videoNode?.removeFromSupernode() self.videoNode?.removeFromSupernode()
let mediaManager = self.context.sharedContext.mediaManager let mediaManager = self.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(accountId: self.context.account.id, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .gallery) let videoNode = UniversalVideoNode(context: self.context, postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .gallery)
videoNode.isUserInteractionEnabled = false videoNode.isUserInteractionEnabled = false
self.videoStartTimestamp = video.representation.startTimestamp self.videoStartTimestamp = video.representation.startTimestamp
self.videoContent = videoContent self.videoContent = videoContent

View File

@ -200,7 +200,7 @@ final class StoryItemContentComponent: Component {
if case let .file(file) = currentMessageMedia, let peerReference = PeerReference(component.peer._asPeer()) { if case let .file(file) = currentMessageMedia, let peerReference = PeerReference(component.peer._asPeer()) {
if self.videoNode == nil { if self.videoNode == nil {
let videoNode = UniversalVideoNode( let videoNode = UniversalVideoNode(
accountId: component.context.account.id, context: component.context,
postbox: component.context.account.postbox, postbox: component.context.account.postbox,
audioSession: component.context.sharedContext.mediaManager.audioSession, audioSession: component.context.sharedContext.mediaManager.audioSession,
manager: component.context.sharedContext.mediaManager.universalVideoManager, manager: component.context.sharedContext.mediaManager.universalVideoManager,

View File

@ -464,22 +464,22 @@ public final class MediaManagerImpl: NSObject, MediaManager {
} }
} }
public func setPlaylist(_ playlist: (Account, SharedMediaPlaylist)?, type: MediaManagerPlayerType, control: SharedMediaPlayerControlAction) { public func setPlaylist(_ playlist: (AccountContext, SharedMediaPlaylist)?, type: MediaManagerPlayerType, control: SharedMediaPlayerControlAction) {
assert(Queue.mainQueue().isCurrent()) assert(Queue.mainQueue().isCurrent())
let inputData: Signal<(Account, SharedMediaPlaylist, MusicPlaybackSettings, MediaPlaybackStoredState?)?, NoError> let inputData: Signal<(AccountContext, SharedMediaPlaylist, MusicPlaybackSettings, MediaPlaybackStoredState?)?, NoError>
if let (account, playlist) = playlist { if let (context, playlist) = playlist {
inputData = self.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.musicPlaybackSettings]) inputData = self.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.musicPlaybackSettings])
|> take(1) |> take(1)
|> mapToSignal { sharedData -> Signal<(Account, SharedMediaPlaylist, MusicPlaybackSettings, MediaPlaybackStoredState?)?, NoError> in |> mapToSignal { sharedData -> Signal<(AccountContext, SharedMediaPlaylist, MusicPlaybackSettings, MediaPlaybackStoredState?)?, NoError> in
let settings = sharedData.entries[ApplicationSpecificSharedDataKeys.musicPlaybackSettings]?.get(MusicPlaybackSettings.self) ?? MusicPlaybackSettings.defaultSettings let settings = sharedData.entries[ApplicationSpecificSharedDataKeys.musicPlaybackSettings]?.get(MusicPlaybackSettings.self) ?? MusicPlaybackSettings.defaultSettings
if let location = playlist.location as? PeerMessagesPlaylistLocation, let messageId = location.messageId { if let location = playlist.location as? PeerMessagesPlaylistLocation, let messageId = location.messageId {
return mediaPlaybackStoredState(engine: TelegramEngine(account: account), messageId: messageId) return mediaPlaybackStoredState(engine: context.engine, messageId: messageId)
|> map { storedState in |> map { storedState in
return (account, playlist, settings, storedState) return (context, playlist, settings, storedState)
} }
} else { } else {
return .single((account, playlist, settings, nil)) return .single((context, playlist, settings, nil))
} }
} }
} else { } else {
@ -494,7 +494,7 @@ public final class MediaManagerImpl: NSObject, MediaManager {
switch type { switch type {
case .voice: case .voice:
strongSelf.musicMediaPlayer?.control(.playback(.pause)) strongSelf.musicMediaPlayer?.control(.playback(.pause))
if let (account, playlist, settings, storedState) = inputData { if let (context, playlist, settings, storedState) = inputData {
if areSharedMediaPlaylistsEqual(playlist, strongSelf.voiceMediaPlayer?.playlist), case .seek = control { if areSharedMediaPlaylistsEqual(playlist, strongSelf.voiceMediaPlayer?.playlist), case .seek = control {
strongSelf.voiceMediaPlayer?.control(control) strongSelf.voiceMediaPlayer?.control(control)
} else { } else {
@ -506,7 +506,7 @@ public final class MediaManagerImpl: NSObject, MediaManager {
controlPlaybackWithProximity = playlist.context.sharedContext.currentMediaInputSettings.with({ $0.enableRaiseToSpeak }) controlPlaybackWithProximity = playlist.context.sharedContext.currentMediaInputSettings.with({ $0.enableRaiseToSpeak })
} }
let voiceMediaPlayer = SharedMediaPlayer(mediaManager: strongSelf, inForeground: strongSelf.inForeground, account: account, audioSession: strongSelf.audioSession, overlayMediaManager: strongSelf.overlayMediaManager, playlist: playlist, initialOrder: .reversed, initialLooping: .none, initialPlaybackRate: settings.voicePlaybackRate, playerIndex: nextPlayerIndex, controlPlaybackWithProximity: controlPlaybackWithProximity, type: type, continueInstantVideoLoopAfterFinish: continueInstantVideoLoopAfterFinish) let voiceMediaPlayer = SharedMediaPlayer(context: context, mediaManager: strongSelf, inForeground: strongSelf.inForeground, account: context.account, audioSession: strongSelf.audioSession, overlayMediaManager: strongSelf.overlayMediaManager, playlist: playlist, initialOrder: .reversed, initialLooping: .none, initialPlaybackRate: settings.voicePlaybackRate, playerIndex: nextPlayerIndex, controlPlaybackWithProximity: controlPlaybackWithProximity, type: type, continueInstantVideoLoopAfterFinish: continueInstantVideoLoopAfterFinish)
strongSelf.voiceMediaPlayer = voiceMediaPlayer strongSelf.voiceMediaPlayer = voiceMediaPlayer
voiceMediaPlayer.playedToEnd = { [weak voiceMediaPlayer] in voiceMediaPlayer.playedToEnd = { [weak voiceMediaPlayer] in
if let strongSelf = self, let voiceMediaPlayer = voiceMediaPlayer, voiceMediaPlayer === strongSelf.voiceMediaPlayer { if let strongSelf = self, let voiceMediaPlayer = voiceMediaPlayer, voiceMediaPlayer === strongSelf.voiceMediaPlayer {
@ -535,12 +535,12 @@ public final class MediaManagerImpl: NSObject, MediaManager {
} }
case .music, .file: case .music, .file:
strongSelf.voiceMediaPlayer?.control(.playback(.pause)) strongSelf.voiceMediaPlayer?.control(.playback(.pause))
if let (account, playlist, settings, storedState) = inputData { if let (context, playlist, settings, storedState) = inputData {
if areSharedMediaPlaylistsEqual(playlist, strongSelf.musicMediaPlayer?.playlist), case .seek = control { if areSharedMediaPlaylistsEqual(playlist, strongSelf.musicMediaPlayer?.playlist), case .seek = control {
strongSelf.musicMediaPlayer?.control(control) strongSelf.musicMediaPlayer?.control(control)
} else { } else {
strongSelf.musicMediaPlayer?.stop() strongSelf.musicMediaPlayer?.stop()
let musicMediaPlayer = SharedMediaPlayer(mediaManager: strongSelf, inForeground: strongSelf.inForeground, account: account, audioSession: strongSelf.audioSession, overlayMediaManager: strongSelf.overlayMediaManager, playlist: playlist, initialOrder: settings.order, initialLooping: settings.looping, initialPlaybackRate: storedState?.playbackRate ?? .x1, playerIndex: nextPlayerIndex, controlPlaybackWithProximity: false, type: type, continueInstantVideoLoopAfterFinish: true) let musicMediaPlayer = SharedMediaPlayer(context: context, mediaManager: strongSelf, inForeground: strongSelf.inForeground, account: context.account, audioSession: strongSelf.audioSession, overlayMediaManager: strongSelf.overlayMediaManager, playlist: playlist, initialOrder: settings.order, initialLooping: settings.looping, initialPlaybackRate: storedState?.playbackRate ?? .x1, playerIndex: nextPlayerIndex, controlPlaybackWithProximity: false, type: type, continueInstantVideoLoopAfterFinish: true)
strongSelf.musicMediaPlayer = musicMediaPlayer strongSelf.musicMediaPlayer = musicMediaPlayer
musicMediaPlayer.cancelled = { [weak musicMediaPlayer] in musicMediaPlayer.cancelled = { [weak musicMediaPlayer] in
if let strongSelf = self, let musicMediaPlayer = musicMediaPlayer, musicMediaPlayer === strongSelf.musicMediaPlayer { if let strongSelf = self, let musicMediaPlayer = musicMediaPlayer, musicMediaPlayer === strongSelf.musicMediaPlayer {

View File

@ -308,7 +308,7 @@ func openChatMessageImpl(_ params: OpenChatMessageParams) -> Bool {
} }
playerType = (file.isVoice || file.isInstantVideo) ? .voice : .file playerType = (file.isVoice || file.isInstantVideo) ? .voice : .file
} }
params.context.sharedContext.mediaManager.setPlaylist((params.context.account, PeerMessagesMediaPlaylist(context: params.context, location: location, chatLocationContextHolder: params.chatLocationContextHolder)), type: playerType, control: control) params.context.sharedContext.mediaManager.setPlaylist((params.context, PeerMessagesMediaPlaylist(context: params.context, location: location, chatLocationContextHolder: params.chatLocationContextHolder)), type: playerType, control: control)
return true return true
case let .story(storyController): case let .story(storyController):
params.dismissInput() params.dismissInput()

View File

@ -40,14 +40,14 @@ final class OverlayInstantVideoNode: OverlayMediaItemNode {
var playbackEnded: (() -> Void)? var playbackEnded: (() -> Void)?
init(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, close: @escaping () -> Void) { init(context: AccountContext, postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, close: @escaping () -> Void) {
self.close = close self.close = close
self.content = content self.content = content
var togglePlayPauseImpl: (() -> Void)? var togglePlayPauseImpl: (() -> Void)?
let decoration = OverlayInstantVideoDecoration(tapped: { let decoration = OverlayInstantVideoDecoration(tapped: {
togglePlayPauseImpl?() togglePlayPauseImpl?()
}) })
self.videoNode = UniversalVideoNode(accountId: accountId, postbox: postbox, audioSession: audioSession, manager: manager, decoration: decoration, content: content, priority: .secondaryOverlay, snapshotContentWhenGone: true) self.videoNode = UniversalVideoNode(context: context, postbox: postbox, audioSession: audioSession, manager: manager, decoration: decoration, content: content, priority: .secondaryOverlay, snapshotContentWhenGone: true)
self.decoration = decoration self.decoration = decoration
super.init() super.init()

View File

@ -111,6 +111,7 @@ private enum SharedMediaPlaybackItem: Equatable {
} }
final class SharedMediaPlayer { final class SharedMediaPlayer {
private weak var context: AccountContext?
private weak var mediaManager: MediaManager? private weak var mediaManager: MediaManager?
let account: Account let account: Account
private let audioSession: ManagedAudioSession private let audioSession: ManagedAudioSession
@ -179,7 +180,8 @@ final class SharedMediaPlayer {
let type: MediaManagerPlayerType let type: MediaManagerPlayerType
init(mediaManager: MediaManager, inForeground: Signal<Bool, NoError>, account: Account, audioSession: ManagedAudioSession, overlayMediaManager: OverlayMediaManager, playlist: SharedMediaPlaylist, initialOrder: MusicPlaybackSettingsOrder, initialLooping: MusicPlaybackSettingsLooping, initialPlaybackRate: AudioPlaybackRate, playerIndex: Int32, controlPlaybackWithProximity: Bool, type: MediaManagerPlayerType, continueInstantVideoLoopAfterFinish: Bool) { init(context: AccountContext, mediaManager: MediaManager, inForeground: Signal<Bool, NoError>, account: Account, audioSession: ManagedAudioSession, overlayMediaManager: OverlayMediaManager, playlist: SharedMediaPlaylist, initialOrder: MusicPlaybackSettingsOrder, initialLooping: MusicPlaybackSettingsLooping, initialPlaybackRate: AudioPlaybackRate, playerIndex: Int32, controlPlaybackWithProximity: Bool, type: MediaManagerPlayerType, continueInstantVideoLoopAfterFinish: Bool) {
self.context = context
self.mediaManager = mediaManager self.mediaManager = mediaManager
self.account = account self.account = account
self.audioSession = audioSession self.audioSession = audioSession
@ -233,10 +235,10 @@ final class SharedMediaPlayer {
strongSelf.playbackItem = .audio(MediaPlayer(audioSessionManager: strongSelf.audioSession, postbox: strongSelf.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: fileReference.resourceReference(fileReference.media.resource), streamable: playbackData.type == .music ? .conservative : .none, video: false, preferSoftwareDecoding: false, enableSound: true, baseRate: rateValue, fetchAutomatically: true, playAndRecord: controlPlaybackWithProximity, isAudioVideoMessage: playbackData.type == .voice)) strongSelf.playbackItem = .audio(MediaPlayer(audioSessionManager: strongSelf.audioSession, postbox: strongSelf.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: fileReference.resourceReference(fileReference.media.resource), streamable: playbackData.type == .music ? .conservative : .none, video: false, preferSoftwareDecoding: false, enableSound: true, baseRate: rateValue, fetchAutomatically: true, playAndRecord: controlPlaybackWithProximity, isAudioVideoMessage: playbackData.type == .voice))
} }
case .instantVideo: case .instantVideo:
if let mediaManager = strongSelf.mediaManager, let item = item as? MessageMediaPlaylistItem { if let mediaManager = strongSelf.mediaManager, let context = strongSelf.context, let item = item as? MessageMediaPlaylistItem {
switch playbackData.source { switch playbackData.source {
case let .telegramFile(fileReference, _, _): case let .telegramFile(fileReference, _, _):
let videoNode = OverlayInstantVideoNode(accountId: strongSelf.account.id, postbox: strongSelf.account.postbox, audioSession: strongSelf.audioSession, manager: mediaManager.universalVideoManager, content: NativeVideoContent(id: .message(item.message.stableId, fileReference.media.fileId), userLocation: .peer(item.message.id.peerId), fileReference: fileReference, enableSound: false, baseRate: rateValue, isAudioVideoMessage: true, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), close: { [weak mediaManager] in let videoNode = OverlayInstantVideoNode(context: context, postbox: strongSelf.account.postbox, audioSession: strongSelf.audioSession, manager: mediaManager.universalVideoManager, content: NativeVideoContent(id: .message(item.message.stableId, fileReference.media.fileId), userLocation: .peer(item.message.id.peerId), fileReference: fileReference, enableSound: false, baseRate: rateValue, isAudioVideoMessage: true, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), close: { [weak mediaManager] in
mediaManager?.setPlaylist(nil, type: .voice, control: .playback(.pause)) mediaManager?.setPlaylist(nil, type: .voice, control: .playback(.pause))
}) })
strongSelf.playbackItem = .instantVideo(videoNode) strongSelf.playbackItem = .instantVideo(videoNode)

View File

@ -63,6 +63,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
public var enableLocalTranslation: Bool public var enableLocalTranslation: Bool
public var autoBenchmarkReflectors: Bool? public var autoBenchmarkReflectors: Bool?
public var conferenceCalls: Bool public var conferenceCalls: Bool
public var playerV2: Bool
public static var defaultSettings: ExperimentalUISettings { public static var defaultSettings: ExperimentalUISettings {
return ExperimentalUISettings( return ExperimentalUISettings(
@ -103,7 +104,8 @@ public struct ExperimentalUISettings: Codable, Equatable {
dynamicStreaming: false, dynamicStreaming: false,
enableLocalTranslation: false, enableLocalTranslation: false,
autoBenchmarkReflectors: nil, autoBenchmarkReflectors: nil,
conferenceCalls: false conferenceCalls: false,
playerV2: false
) )
} }
@ -145,7 +147,8 @@ public struct ExperimentalUISettings: Codable, Equatable {
dynamicStreaming: Bool, dynamicStreaming: Bool,
enableLocalTranslation: Bool, enableLocalTranslation: Bool,
autoBenchmarkReflectors: Bool?, autoBenchmarkReflectors: Bool?,
conferenceCalls: Bool conferenceCalls: Bool,
playerV2: Bool
) { ) {
self.keepChatNavigationStack = keepChatNavigationStack self.keepChatNavigationStack = keepChatNavigationStack
self.skipReadHistory = skipReadHistory self.skipReadHistory = skipReadHistory
@ -185,6 +188,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
self.enableLocalTranslation = enableLocalTranslation self.enableLocalTranslation = enableLocalTranslation
self.autoBenchmarkReflectors = autoBenchmarkReflectors self.autoBenchmarkReflectors = autoBenchmarkReflectors
self.conferenceCalls = conferenceCalls self.conferenceCalls = conferenceCalls
self.playerV2 = playerV2
} }
public init(from decoder: Decoder) throws { public init(from decoder: Decoder) throws {
@ -228,6 +232,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
self.enableLocalTranslation = try container.decodeIfPresent(Bool.self, forKey: "enableLocalTranslation") ?? false self.enableLocalTranslation = try container.decodeIfPresent(Bool.self, forKey: "enableLocalTranslation") ?? false
self.autoBenchmarkReflectors = try container.decodeIfPresent(Bool.self, forKey: "autoBenchmarkReflectors") self.autoBenchmarkReflectors = try container.decodeIfPresent(Bool.self, forKey: "autoBenchmarkReflectors")
self.conferenceCalls = try container.decodeIfPresent(Bool.self, forKey: "conferenceCalls") ?? false self.conferenceCalls = try container.decodeIfPresent(Bool.self, forKey: "conferenceCalls") ?? false
self.playerV2 = try container.decodeIfPresent(Bool.self, forKey: "playerV2") ?? false
} }
public func encode(to encoder: Encoder) throws { public func encode(to encoder: Encoder) throws {
@ -271,6 +276,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
try container.encode(self.enableLocalTranslation, forKey: "enableLocalTranslation") try container.encode(self.enableLocalTranslation, forKey: "enableLocalTranslation")
try container.encodeIfPresent(self.autoBenchmarkReflectors, forKey: "autoBenchmarkReflectors") try container.encodeIfPresent(self.autoBenchmarkReflectors, forKey: "autoBenchmarkReflectors")
try container.encodeIfPresent(self.conferenceCalls, forKey: "conferenceCalls") try container.encodeIfPresent(self.conferenceCalls, forKey: "conferenceCalls")
try container.encodeIfPresent(self.playerV2, forKey: "playerV2")
} }
} }

View File

@ -264,8 +264,8 @@ public final class HLSVideoContent: UniversalVideoContent {
self.codecConfiguration = codecConfiguration self.codecConfiguration = codecConfiguration
} }
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { public func makeContentNode(context: AccountContext, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, codecConfiguration: self.codecConfiguration) return HLSVideoJSNativeContentNode(accountId: context.account.id, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, codecConfiguration: self.codecConfiguration)
} }
public func isEqual(to other: UniversalVideoContent) -> Bool { public func isEqual(to other: UniversalVideoContent) -> Bool {

View File

@ -1022,7 +1022,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
private var didBecomeActiveObserver: NSObjectProtocol? private var didBecomeActiveObserver: NSObjectProtocol?
private var willResignActiveObserver: NSObjectProtocol? private var willResignActiveObserver: NSObjectProtocol?
private let chunkPlayerPartsState = Promise<ChunkMediaPlayerPartsState>(ChunkMediaPlayerPartsState(duration: nil, parts: [])) private let chunkPlayerPartsState = Promise<ChunkMediaPlayerPartsState>(ChunkMediaPlayerPartsState(duration: nil, content: .parts([])))
private var sourceBufferStateDisposable: Disposable? private var sourceBufferStateDisposable: Disposable?
private var playerStatusDisposable: Disposable? private var playerStatusDisposable: Disposable?
@ -1333,7 +1333,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
return return
} }
self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: mediaSource.duration, parts: sourceBuffer.items))) self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: mediaSource.duration, content: .parts(sourceBuffer.items))))
} }
fileprivate func onMediaSourceBuffersUpdated() { fileprivate func onMediaSourceBuffersUpdated() {
@ -1347,7 +1347,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
return return
} }
self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: mediaSource.duration, parts: sourceBuffer.items))) self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: mediaSource.duration, content: .parts(sourceBuffer.items))))
if self.sourceBufferStateDisposable == nil { if self.sourceBufferStateDisposable == nil {
self.sourceBufferStateDisposable = (sourceBuffer.updated.signal() self.sourceBufferStateDisposable = (sourceBuffer.updated.signal()
|> deliverOnMainQueue).startStrict(next: { [weak self, weak sourceBuffer] _ in |> deliverOnMainQueue).startStrict(next: { [weak self, weak sourceBuffer] _ in
@ -1357,7 +1357,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
guard let mediaSource = SharedHLSVideoJSContext.shared.mediaSources[sourceBuffer.mediaSourceId] else { guard let mediaSource = SharedHLSVideoJSContext.shared.mediaSources[sourceBuffer.mediaSourceId] else {
return return
} }
self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: mediaSource.duration, parts: sourceBuffer.items))) self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: mediaSource.duration, content: .parts(sourceBuffer.items))))
self.updateBuffered() self.updateBuffered()
}) })

View File

@ -128,8 +128,8 @@ public final class NativeVideoContent: UniversalVideoContent {
self.hasSentFramesToDisplay = hasSentFramesToDisplay self.hasSentFramesToDisplay = hasSentFramesToDisplay
} }
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { public func makeContentNode(context: AccountContext, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, previewSourceFileReference: self.previewSourceFileReference, limitedFileRange: self.limitedFileRange, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay) return NativeVideoContentNode(context: context, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, previewSourceFileReference: self.previewSourceFileReference, limitedFileRange: self.limitedFileRange, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
} }
public func isEqual(to other: UniversalVideoContent) -> Bool { public func isEqual(to other: UniversalVideoContent) -> Bool {
@ -296,7 +296,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private let continuePlayingWithoutSoundOnLostAudioSession: Bool private let continuePlayingWithoutSoundOnLostAudioSession: Bool
private let displayImage: Bool private let displayImage: Bool
private var player: PlayerImpl private var player: PlayerImpl?
private var thumbnailPlayer: MediaPlayer? private var thumbnailPlayer: MediaPlayer?
private let imageNode: TransformImageNode private let imageNode: TransformImageNode
private let playerNode: MediaPlayerNode private let playerNode: MediaPlayerNode
@ -338,6 +338,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
return self._ready.get() return self._ready.get()
} }
private var initializePlayerDisposable: Disposable?
private let fetchDisposable = MetaDisposable() private let fetchDisposable = MetaDisposable()
private let fetchStatusDisposable = MetaDisposable() private let fetchStatusDisposable = MetaDisposable()
@ -347,10 +348,19 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private var validLayout: (size: CGSize, actualSize: CGSize)? private var validLayout: (size: CGSize, actualSize: CGSize)?
private var shouldPlay: Bool = false private var shouldPlay: Bool = false
private var pendingSetSoundEnabled: Bool?
private var pendingSeek: Double?
private var pendingPlayOnceWithSound: (playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd)?
private var pendingForceAudioToSpeaker: Bool?
private var pendingSetSoundMuted: Bool?
private var pendingContinueWithOverridingAmbientMode: Bool?
private var pendingSetBaseRate: Double?
private var pendingContinuePlayingWithoutSound: MediaPlayerPlayOnceWithSoundActionAtEnd?
private var pendingSetContinuePlayingWithoutSoundOnLostAudioSession: Bool?
private let hasSentFramesToDisplay: (() -> Void)? private let hasSentFramesToDisplay: (() -> Void)?
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, previewSourceFileReference: FileMediaReference?, limitedFileRange: Range<Int64>?, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, soundMuted: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) { init(context: AccountContext, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, previewSourceFileReference: FileMediaReference?, limitedFileRange: Range<Int64>?, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, soundMuted: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
self.postbox = postbox self.postbox = postbox
self.userLocation = userLocation self.userLocation = userLocation
self.fileReference = fileReference self.fileReference = fileReference
@ -385,67 +395,6 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
self.playerNode = MediaPlayerNode(backgroundThread: false, captureProtected: captureProtected) self.playerNode = MediaPlayerNode(backgroundThread: false, captureProtected: captureProtected)
if !"".isEmpty {
let mediaPlayer = MediaPlayer(
audioSessionManager: audioSessionManager,
postbox: postbox,
userLocation: userLocation,
userContentType: userContentType,
resourceReference: fileReference.resourceReference(selectedFile.resource),
tempFilePath: tempFilePath,
limitedFileRange: limitedFileRange,
streamable: streamVideo,
video: true,
preferSoftwareDecoding: false,
playAutomatically: false,
enableSound: enableSound,
baseRate: baseRate,
fetchAutomatically: fetchAutomatically,
soundMuted: soundMuted,
ambient: beginWithAmbientSound,
mixWithOthers: mixWithOthers,
continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession,
storeAfterDownload: storeAfterDownload,
isAudioVideoMessage: isAudioVideoMessage
)
self.player = .legacy(mediaPlayer)
mediaPlayer.attachPlayerNode(self.playerNode)
} else {
let mediaPlayer = ChunkMediaPlayerV2(
audioSessionManager: audioSessionManager,
source: .directFetch(ChunkMediaPlayerV2.SourceDescription.ResourceDescription(
postbox: postbox,
reference: fileReference.resourceReference(selectedFile.resource),
userLocation: userLocation,
userContentType: userContentType,
statsCategory: statsCategoryForFileWithAttributes(fileReference.media.attributes),
fetchAutomatically: fetchAutomatically
)),
video: true,
playAutomatically: false,
enableSound: enableSound,
baseRate: baseRate,
soundMuted: soundMuted,
ambient: beginWithAmbientSound,
mixWithOthers: mixWithOthers,
continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession,
isAudioVideoMessage: isAudioVideoMessage,
playerNode: self.playerNode
)
self.player = .chunked(mediaPlayer)
}
var actionAtEndImpl: (() -> Void)?
if enableSound && !loopVideo {
self.player.actionAtEnd = .action({
actionAtEndImpl?()
})
} else {
self.player.actionAtEnd = .loop({
actionAtEndImpl?()
})
}
self.dimensions = fileReference.media.dimensions?.cgSize self.dimensions = fileReference.media.dimensions?.cgSize
if let dimensions = self.dimensions { if let dimensions = self.dimensions {
self.dimensionsPromise.set(dimensions) self.dimensionsPromise.set(dimensions)
@ -469,10 +418,6 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
self.dimensionsPromise.set(dimensions) self.dimensionsPromise.set(dimensions)
} }
actionAtEndImpl = { [weak self] in
self?.performActionAtEnd()
}
if displayImage { if displayImage {
if captureProtected { if captureProtected {
setLayerDisableScreenshots(self.imageNode.layer, captureProtected) setLayerDisableScreenshots(self.imageNode.layer, captureProtected)
@ -497,10 +442,6 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
} }
self.addSubnode(self.playerNode) self.addSubnode(self.playerNode)
self._status.set(combineLatest(self.dimensionsPromise.get(), self.player.status)
|> map { dimensions, status in
return MediaPlayerStatus(generationTimestamp: status.generationTimestamp, duration: status.duration, dimensions: dimensions, timestamp: status.timestamp, baseRate: status.baseRate, seekId: status.seekId, status: status.status, soundEnabled: status.soundEnabled)
})
self.fetchStatusDisposable.set((postbox.mediaBox.resourceStatus(selectedFile.resource) self.fetchStatusDisposable.set((postbox.mediaBox.resourceStatus(selectedFile.resource)
|> deliverOnMainQueue).start(next: { [weak self] status in |> deliverOnMainQueue).start(next: { [weak self] status in
@ -536,15 +477,138 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
if let startTimestamp = startTimestamp { if let startTimestamp = startTimestamp {
self.seek(startTimestamp) self.seek(startTimestamp)
} }
var useLegacyImplementation = !context.sharedContext.immediateExperimentalUISettings.playerV2
if let data = context.currentAppConfiguration.with({ $0 }).data, let value = data["ios_video_legacyplayer"] as? Double {
useLegacyImplementation = value != 0.0
}
if useLegacyImplementation {
let mediaPlayer = MediaPlayer(
audioSessionManager: audioSessionManager,
postbox: postbox,
userLocation: userLocation,
userContentType: userContentType,
resourceReference: fileReference.resourceReference(selectedFile.resource),
tempFilePath: tempFilePath,
limitedFileRange: limitedFileRange,
streamable: streamVideo,
video: true,
preferSoftwareDecoding: false,
playAutomatically: false,
enableSound: enableSound,
baseRate: baseRate,
fetchAutomatically: fetchAutomatically,
soundMuted: soundMuted,
ambient: beginWithAmbientSound,
mixWithOthers: mixWithOthers,
continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession,
storeAfterDownload: storeAfterDownload,
isAudioVideoMessage: isAudioVideoMessage
)
mediaPlayer.attachPlayerNode(self.playerNode)
self.initializePlayer(player: .legacy(mediaPlayer))
} else {
let mediaPlayer = ChunkMediaPlayerV2(
audioSessionManager: audioSessionManager,
source: .directFetch(ChunkMediaPlayerV2.SourceDescription.ResourceDescription(
postbox: postbox,
reference: fileReference.resourceReference(selectedFile.resource),
userLocation: userLocation,
userContentType: userContentType,
statsCategory: statsCategoryForFileWithAttributes(fileReference.media.attributes),
fetchAutomatically: fetchAutomatically
)),
video: true,
playAutomatically: false,
enableSound: enableSound,
baseRate: baseRate,
soundMuted: soundMuted,
ambient: beginWithAmbientSound,
mixWithOthers: mixWithOthers,
continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession,
isAudioVideoMessage: isAudioVideoMessage,
playerNode: self.playerNode
)
self.initializePlayer(player: .chunked(mediaPlayer))
}
} }
deinit { deinit {
self.player.pause() self.initializePlayerDisposable?.dispose()
self.player?.pause()
self.thumbnailPlayer?.pause() self.thumbnailPlayer?.pause()
self.fetchDisposable.dispose() self.fetchDisposable.dispose()
self.fetchStatusDisposable.dispose() self.fetchStatusDisposable.dispose()
} }
private func initializePlayer(player: PlayerImpl) {
var player = player
self.player = player
var actionAtEndImpl: (() -> Void)?
if self.enableSound && !self.loopVideo {
player.actionAtEnd = .action({
actionAtEndImpl?()
})
} else {
player.actionAtEnd = .loop({
actionAtEndImpl?()
})
}
actionAtEndImpl = { [weak self] in
self?.performActionAtEnd()
}
self._status.set(combineLatest(self.dimensionsPromise.get(), player.status)
|> map { dimensions, status in
return MediaPlayerStatus(generationTimestamp: status.generationTimestamp, duration: status.duration, dimensions: dimensions, timestamp: status.timestamp, baseRate: status.baseRate, seekId: status.seekId, status: status.status, soundEnabled: status.soundEnabled)
})
if self.shouldPlay {
player.play()
} else {
player.pause()
}
if let pendingSeek = self.pendingSeek {
self.pendingSeek = nil
self.seek(pendingSeek)
}
if let pendingSetSoundEnabled = self.pendingSetSoundEnabled {
self.pendingSetSoundEnabled = nil
self.setSoundEnabled(pendingSetSoundEnabled)
}
if let pendingPlayOnceWithSound = self.pendingPlayOnceWithSound {
self.pendingPlayOnceWithSound = nil
self.playOnceWithSound(playAndRecord: pendingPlayOnceWithSound.playAndRecord, seek: pendingPlayOnceWithSound.seek, actionAtEnd: pendingPlayOnceWithSound.actionAtEnd)
}
if let pendingForceAudioToSpeaker = self.pendingForceAudioToSpeaker {
self.pendingForceAudioToSpeaker = nil
self.setForceAudioToSpeaker(pendingForceAudioToSpeaker)
}
if let pendingSetSoundMuted = self.pendingSetSoundMuted {
self.pendingSetSoundMuted = nil
self.setSoundMuted(soundMuted: pendingSetSoundMuted)
}
if let pendingContinueWithOverridingAmbientMode = self.pendingContinueWithOverridingAmbientMode {
self.pendingContinueWithOverridingAmbientMode = nil
self.continueWithOverridingAmbientMode(isAmbient: pendingContinueWithOverridingAmbientMode)
}
if let pendingSetBaseRate = self.pendingSetBaseRate {
self.pendingSetBaseRate = nil
self.setBaseRate(pendingSetBaseRate)
}
if let pendingContinuePlayingWithoutSound = self.pendingContinuePlayingWithoutSound {
self.pendingContinuePlayingWithoutSound = nil
self.continuePlayingWithoutSound(actionAtEnd: pendingContinuePlayingWithoutSound)
}
if let pendingSetContinuePlayingWithoutSoundOnLostAudioSession = self.pendingSetContinuePlayingWithoutSoundOnLostAudioSession {
self.pendingSetContinuePlayingWithoutSoundOnLostAudioSession = nil
self.setContinuePlayingWithoutSoundOnLostAudioSession(pendingSetContinuePlayingWithoutSoundOnLostAudioSession)
}
}
private func createThumbnailPlayer() { private func createThumbnailPlayer() {
guard let videoThumbnail = self.fileReference.media.videoThumbnails.first else { guard let videoThumbnail = self.fileReference.media.videoThumbnails.first else {
return return
@ -639,41 +703,55 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
func play() { func play() {
assert(Queue.mainQueue().isCurrent()) assert(Queue.mainQueue().isCurrent())
self.player.play() self.player?.play()
self.shouldPlay = true self.shouldPlay = true
self.thumbnailPlayer?.play() self.thumbnailPlayer?.play()
} }
func pause() { func pause() {
assert(Queue.mainQueue().isCurrent()) assert(Queue.mainQueue().isCurrent())
self.player.pause() self.player?.pause()
self.shouldPlay = false self.shouldPlay = false
self.thumbnailPlayer?.pause() self.thumbnailPlayer?.pause()
} }
func togglePlayPause() { func togglePlayPause() {
assert(Queue.mainQueue().isCurrent()) assert(Queue.mainQueue().isCurrent())
self.player.togglePlayPause() self.player?.togglePlayPause()
self.shouldPlay = !self.shouldPlay self.shouldPlay = !self.shouldPlay
self.thumbnailPlayer?.togglePlayPause() self.thumbnailPlayer?.togglePlayPause()
} }
func setSoundEnabled(_ value: Bool) { func setSoundEnabled(_ value: Bool) {
assert(Queue.mainQueue().isCurrent()) assert(Queue.mainQueue().isCurrent())
if value { if let player = self.player {
self.player.playOnceWithSound(playAndRecord: false, seek: .none) if value {
player.playOnceWithSound(playAndRecord: false, seek: .none)
} else {
player.continuePlayingWithoutSound(seek: .none)
}
} else { } else {
self.player.continuePlayingWithoutSound(seek: .none) self.pendingSetSoundEnabled = value
} }
} }
func seek(_ timestamp: Double) { func seek(_ timestamp: Double) {
assert(Queue.mainQueue().isCurrent()) assert(Queue.mainQueue().isCurrent())
self.player.seek(timestamp: timestamp) if let player = self.player {
player.seek(timestamp: timestamp)
} else {
self.pendingSeek = timestamp
}
} }
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) { func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
assert(Queue.mainQueue().isCurrent()) assert(Queue.mainQueue().isCurrent())
guard var player = self.player else {
self.pendingPlayOnceWithSound = (playAndRecord, seek, actionAtEnd)
return
}
let action = { [weak self] in let action = { [weak self] in
Queue.mainQueue().async { Queue.mainQueue().async {
self?.performActionAtEnd() self?.performActionAtEnd()
@ -681,49 +759,65 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
} }
switch actionAtEnd { switch actionAtEnd {
case .loop: case .loop:
self.player.actionAtEnd = .loop({}) player.actionAtEnd = .loop({})
case .loopDisablingSound: case .loopDisablingSound:
self.player.actionAtEnd = .loopDisablingSound(action) player.actionAtEnd = .loopDisablingSound(action)
case .stop: case .stop:
self.player.actionAtEnd = .action(action) player.actionAtEnd = .action(action)
case .repeatIfNeeded: case .repeatIfNeeded:
let _ = (self.player.status let _ = (player.status
|> deliverOnMainQueue |> deliverOnMainQueue
|> take(1)).start(next: { [weak self] status in |> take(1)).start(next: { [weak self] status in
guard let strongSelf = self else { guard let strongSelf = self, var player = strongSelf.player else {
return return
} }
if status.timestamp > status.duration * 0.1 { if status.timestamp > status.duration * 0.1 {
strongSelf.player.actionAtEnd = .loop({ [weak self] in player.actionAtEnd = .loop({ [weak self] in
guard let strongSelf = self else { guard let strongSelf = self, var player = strongSelf.player else {
return return
} }
strongSelf.player.actionAtEnd = .loopDisablingSound(action) player.actionAtEnd = .loopDisablingSound(action)
}) })
} else { } else {
strongSelf.player.actionAtEnd = .loopDisablingSound(action) player.actionAtEnd = .loopDisablingSound(action)
} }
}) })
} }
self.player.playOnceWithSound(playAndRecord: playAndRecord, seek: seek) player.playOnceWithSound(playAndRecord: playAndRecord, seek: seek)
} }
func setForceAudioToSpeaker(_ forceAudioToSpeaker: Bool) { func setForceAudioToSpeaker(_ forceAudioToSpeaker: Bool) {
assert(Queue.mainQueue().isCurrent()) assert(Queue.mainQueue().isCurrent())
self.player.setForceAudioToSpeaker(forceAudioToSpeaker) if let player = self.player {
player.setForceAudioToSpeaker(forceAudioToSpeaker)
} else {
self.pendingForceAudioToSpeaker = forceAudioToSpeaker
}
} }
func setSoundMuted(soundMuted: Bool) { func setSoundMuted(soundMuted: Bool) {
self.player.setSoundMuted(soundMuted: soundMuted) if let player = self.player {
player.setSoundMuted(soundMuted: soundMuted)
} else {
self.pendingSetSoundMuted = soundMuted
}
} }
func continueWithOverridingAmbientMode(isAmbient: Bool) { func continueWithOverridingAmbientMode(isAmbient: Bool) {
self.player.continueWithOverridingAmbientMode(isAmbient: isAmbient) if let player = self.player {
player.continueWithOverridingAmbientMode(isAmbient: isAmbient)
} else {
self.pendingContinueWithOverridingAmbientMode = isAmbient
}
} }
func setBaseRate(_ baseRate: Double) { func setBaseRate(_ baseRate: Double) {
self.player.setBaseRate(baseRate) if let player = self.player {
player.setBaseRate(baseRate)
} else {
self.pendingSetBaseRate = baseRate
}
} }
func setVideoQuality(_ quality: UniversalVideoContentVideoQuality) { func setVideoQuality(_ quality: UniversalVideoContentVideoQuality) {
@ -739,24 +833,34 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) { func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
assert(Queue.mainQueue().isCurrent()) assert(Queue.mainQueue().isCurrent())
guard var player = self.player else {
self.pendingContinuePlayingWithoutSound = actionAtEnd
return
}
let action = { [weak self] in let action = { [weak self] in
Queue.mainQueue().async { Queue.mainQueue().async {
self?.performActionAtEnd() self?.performActionAtEnd()
} }
} }
switch actionAtEnd { switch actionAtEnd {
case .loop: case .loop:
self.player.actionAtEnd = .loop({}) player.actionAtEnd = .loop({})
case .loopDisablingSound, .repeatIfNeeded: case .loopDisablingSound, .repeatIfNeeded:
self.player.actionAtEnd = .loopDisablingSound(action) player.actionAtEnd = .loopDisablingSound(action)
case .stop: case .stop:
self.player.actionAtEnd = .action(action) player.actionAtEnd = .action(action)
} }
self.player.continuePlayingWithoutSound() player.continuePlayingWithoutSound()
} }
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) { func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) {
self.player.setContinuePlayingWithoutSoundOnLostAudioSession(value) if let player = self.player {
player.setContinuePlayingWithoutSoundOnLostAudioSession(value)
} else {
self.pendingSetContinuePlayingWithoutSoundOnLostAudioSession = value
}
} }
func addPlaybackCompleted(_ f: @escaping () -> Void) -> Int { func addPlaybackCompleted(_ f: @escaping () -> Void) -> Int {

View File

@ -41,7 +41,7 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInP
private var statusDisposable: Disposable? private var statusDisposable: Disposable?
private var status: MediaPlayerStatus? private var status: MediaPlayerStatus?
public init(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, shouldBeDismissed: Signal<Bool, NoError> = .single(false), expand: @escaping () -> Void, close: @escaping () -> Void) { public init(context: AccountContext, postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, shouldBeDismissed: Signal<Bool, NoError> = .single(false), expand: @escaping () -> Void, close: @escaping () -> Void) {
self.content = content self.content = content
self.defaultExpand = expand self.defaultExpand = expand
@ -62,7 +62,7 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInP
}, controlsAreShowingUpdated: { value in }, controlsAreShowingUpdated: { value in
controlsAreShowingUpdatedImpl?(value) controlsAreShowingUpdatedImpl?(value)
}) })
self.videoNode = UniversalVideoNode(accountId: accountId, postbox: postbox, audioSession: audioSession, manager: manager, decoration: decoration, content: content, priority: .overlay) self.videoNode = UniversalVideoNode(context: context, postbox: postbox, audioSession: audioSession, manager: manager, decoration: decoration, content: content, priority: .overlay)
self.decoration = decoration self.decoration = decoration
super.init() super.init()

View File

@ -95,7 +95,7 @@ public final class PlatformVideoContent: UniversalVideoContent {
self.fetchAutomatically = fetchAutomatically self.fetchAutomatically = fetchAutomatically
} }
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { public func makeContentNode(context: AccountContext, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return PlatformVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, content: self.content, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically) return PlatformVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, content: self.content, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
} }

View File

@ -29,7 +29,7 @@ public final class SystemVideoContent: UniversalVideoContent {
self.duration = duration self.duration = duration
} }
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { public func makeContentNode(context: AccountContext, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return SystemVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, url: self.url, imageReference: self.imageReference, intrinsicDimensions: self.dimensions, approximateDuration: self.duration) return SystemVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, url: self.url, imageReference: self.imageReference, intrinsicDimensions: self.dimensions, approximateDuration: self.duration)
} }
} }

View File

@ -36,7 +36,7 @@ public final class WebEmbedVideoContent: UniversalVideoContent {
self.openUrl = openUrl self.openUrl = openUrl
} }
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { public func makeContentNode(context: AccountContext, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return WebEmbedVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, webPage: self.webPage, webpageContent: self.webpageContent, forcedTimestamp: self.forcedTimestamp, openUrl: self.openUrl) return WebEmbedVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, webPage: self.webPage, webpageContent: self.webpageContent, forcedTimestamp: self.forcedTimestamp, openUrl: self.openUrl)
} }
} }

View File

@ -165,7 +165,7 @@ final class WebSearchVideoGalleryItemNode: ZoomableContentGalleryItemNode {
let mediaManager = item.context.sharedContext.mediaManager let mediaManager = item.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(accountId: item.context.account.id, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .gallery) let videoNode = UniversalVideoNode(context: item.context, postbox: item.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: item.content, priority: .gallery)
let videoSize = CGSize(width: item.content.dimensions.width * 2.0, height: item.content.dimensions.height * 2.0) let videoSize = CGSize(width: item.content.dimensions.width * 2.0, height: item.content.dimensions.height * 2.0)
videoNode.updateLayout(size: videoSize, transition: .immediate) videoNode.updateLayout(size: videoSize, transition: .immediate)
self.videoNode = videoNode self.videoNode = videoNode