diff --git a/submodules/MediaPlayer/Sources/ChunkMediaPlayerV2.swift b/submodules/MediaPlayer/Sources/ChunkMediaPlayerV2.swift index f9a8705393..2fd93a6fee 100644 --- a/submodules/MediaPlayer/Sources/ChunkMediaPlayerV2.swift +++ b/submodules/MediaPlayer/Sources/ChunkMediaPlayerV2.swift @@ -91,12 +91,10 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer { private let renderSynchronizer: AVSampleBufferRenderSynchronizer private var videoRenderer: AVSampleBufferDisplayLayer private var audioRenderer: AVSampleBufferAudioRenderer? - private weak var videoNode: MediaPlayerNode? private var partsState = ChunkMediaPlayerPartsState(duration: nil, parts: []) private var loadedParts: [LoadedPart] = [] private var loadedPartsMediaData: QueueLocalObject - private var reportedDidEnqueueVideo: Bool = false private var hasSound: Bool = false private var statusValue: MediaPlayerStatus? { @@ -175,7 +173,6 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer { assertionFailure() } self.videoRenderer = playerNode.videoLayer ?? AVSampleBufferDisplayLayer() - self.videoNode = playerNode self.updateTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 60.0, repeats: true, block: { [weak self] _ in guard let self else { @@ -680,8 +677,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer { videoTarget.requestMediaDataWhenReady(on: self.dataQueue.queue, using: { [weak self] in if let loadedPartsMediaData = loadedPartsMediaData.unsafeGet() { - let fillResult = ChunkMediaPlayerV2.fillRendererBuffer(bufferTarget: videoTarget, loadedPartsMediaData: loadedPartsMediaData, isVideo: true) - if fillResult.isReadyForMoreData { + let bufferIsReadyForMoreData = ChunkMediaPlayerV2.fillRendererBuffer(bufferTarget: videoTarget, loadedPartsMediaData: loadedPartsMediaData, isVideo: true) + if bufferIsReadyForMoreData { videoTarget.stopRequestingMediaData() Queue.mainQueue().async { guard let self else { @@ -698,12 +695,11 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer { if !self.audioIsRequestingMediaData, let audioRenderer = self.audioRenderer { self.audioIsRequestingMediaData = true let loadedPartsMediaData = self.loadedPartsMediaData - let reportedDidEnqueueVideo = self.reportedDidEnqueueVideo let audioTarget = audioRenderer audioTarget.requestMediaDataWhenReady(on: self.dataQueue.queue, using: { [weak self] in if let loadedPartsMediaData = loadedPartsMediaData.unsafeGet() { - let fillResult = ChunkMediaPlayerV2.fillRendererBuffer(bufferTarget: audioTarget, loadedPartsMediaData: loadedPartsMediaData, isVideo: false) - if fillResult.isReadyForMoreData { + let bufferIsReadyForMoreData = ChunkMediaPlayerV2.fillRendererBuffer(bufferTarget: audioTarget, loadedPartsMediaData: loadedPartsMediaData, isVideo: false) + if bufferIsReadyForMoreData { audioTarget.stopRequestingMediaData() Queue.mainQueue().async { guard let self else { @@ -713,28 +709,13 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer { self.updateInternalState() } } - if fillResult.didEnqueue && !reportedDidEnqueueVideo { - Queue.mainQueue().async { - guard let self else { - return - } - self.reportedDidEnqueueVideo = true - if #available(iOS 17.4, *) { - } else { - if let videoNode = self.videoNode { - videoNode.notifyHasSentFramesToDisplay() - } - } - } - } } }) } } - private static func fillRendererBuffer(bufferTarget: AVQueuedSampleBufferRendering, loadedPartsMediaData: LoadedPartsMediaData, isVideo: Bool) -> (isReadyForMoreData: Bool, didEnqueue: Bool) { + private static func fillRendererBuffer(bufferTarget: AVQueuedSampleBufferRendering, loadedPartsMediaData: LoadedPartsMediaData, isVideo: Bool) -> Bool { var bufferIsReadyForMoreData = true - var didEnqeue = false outer: while true { if !bufferTarget.isReadyForMoreMediaData { bufferIsReadyForMoreData = false @@ -774,7 +755,9 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer { continue outer } } - didEnqeue = true + /*if isVideo { + print("Enqueue \(isVideo ? "video" : "audio") at \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds) \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value)/\(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).timescale) next \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value + CMSampleBufferGetDuration(sampleBuffer).value)") + }*/ bufferTarget.enqueue(sampleBuffer) hasData = true continue outer @@ -787,7 +770,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer { } } - return (bufferIsReadyForMoreData, didEnqeue) + return bufferIsReadyForMoreData } } diff --git a/submodules/MediaPlayer/Sources/MediaPlayerNode.swift b/submodules/MediaPlayer/Sources/MediaPlayerNode.swift index 145d9adfcb..7b1273f115 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayerNode.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayerNode.swift @@ -444,11 +444,4 @@ public final class MediaPlayerNode: ASDisplayNode { } self.updateVideoInHierarchy?(self.videoInHierarchy || self.canPlaybackWithoutHierarchy) } - - func notifyHasSentFramesToDisplay() { - if !self.didNotifyVideoLayerReadyForDisplay { - self.didNotifyVideoLayerReadyForDisplay = true - self.hasSentFramesToDisplay?() - } - } } diff --git a/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift b/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift index 1bc584994d..f11705720d 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift @@ -28,6 +28,7 @@ public struct HLSCodecConfiguration { public extension HLSCodecConfiguration { init(context: AccountContext) { var isSoftwareAv1Supported = false + var isHardwareAv1Supported = false var length: Int = 4 var cpuCount: UInt32 = 0 @@ -43,10 +44,6 @@ public extension HLSCodecConfiguration { isSoftwareAv1Supported = value != 0.0 } - if "".isEmpty { - isSoftwareAv1Supported = false - } - self.init(isHardwareAv1Supported: isHardwareAv1Supported, isSoftwareAv1Supported: isSoftwareAv1Supported) } }