From 9bb46cb90b23b8081b88c3b058da321a68a179cf Mon Sep 17 00:00:00 2001 From: Isaac <> Date: Sat, 16 Nov 2024 23:30:03 +0400 Subject: [PATCH] Fix codec handing --- .../GalleryUI/Sources/GalleryController.swift | 2 +- .../Items/UniversalVideoGalleryItem.swift | 4 +- .../Sources/FFMpegAudioFrameDecoder.swift | 52 ++++---- .../Sources/FFMpegMediaFrameSource.swift | 9 +- ...pegMediaPassthroughVideoFrameDecoder.swift | 14 +- .../FFMpegMediaVideoFrameDecoder.swift | 27 ++-- .../Sources/MediaTrackFrameBuffer.swift | 8 +- .../Sources/MediaTrackFrameDecoder.swift | 3 +- .../Sources/SoftwareVideoSource.swift | 36 ++++-- .../ChatMessageInteractiveMediaNode.swift | 16 ++- .../Sources/HLSVideoContent.swift | 36 ++++-- .../Sources/HLSVideoJSNativeContentNode.swift | 16 ++- .../Sources/NativeVideoContent.swift | 120 +----------------- 13 files changed, 149 insertions(+), 194 deletions(-) diff --git a/submodules/GalleryUI/Sources/GalleryController.swift b/submodules/GalleryUI/Sources/GalleryController.swift index e78ce6375a..ace1079e98 100644 --- a/submodules/GalleryUI/Sources/GalleryController.swift +++ b/submodules/GalleryUI/Sources/GalleryController.swift @@ -259,7 +259,7 @@ public func galleryItemForEntry( } if isHLS { - content = HLSVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), streamVideo: streamVideos, loopVideo: loopVideos) + content = HLSVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), streamVideo: streamVideos, loopVideo: loopVideos, codecConfiguration: HLSCodecConfiguration(context: context)) } else { content = NativeVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), imageReference: mediaImage.flatMap({ ImageMediaReference.message(message: MessageReference(message), media: $0) }), streamVideo: .conservative, loopVideo: loopVideos, tempFilePath: tempFilePath, captureProtected: captureProtected, storeAfterDownload: generateStoreAfterDownload?(message, file)) } diff --git a/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift b/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift index 958d30295d..bfe4d06aed 100644 --- a/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift +++ b/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift @@ -3597,7 +3597,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { displayDebugInfo = true #endif } - if displayDebugInfo, let content = item.content as? HLSVideoContent, let qualitySet = HLSQualitySet(baseFile: content.fileReference), let qualityFile = qualitySet.qualityFiles[quality] { + if displayDebugInfo, let content = item.content as? HLSVideoContent, let qualitySet = HLSQualitySet(baseFile: content.fileReference, codecConfiguration: HLSCodecConfiguration(context: strongSelf.context)), let qualityFile = qualitySet.qualityFiles[quality] { for attribute in qualityFile.media.attributes { if case let .Video(_, _, _, _, _, videoCodec) = attribute, let videoCodec { qualityDebugText += " \(videoCodec)" @@ -3647,7 +3647,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { if qualityState.available.isEmpty { return } - guard let qualitySet = HLSQualitySet(baseFile: content.fileReference) else { + guard let qualitySet = HLSQualitySet(baseFile: content.fileReference, codecConfiguration: HLSCodecConfiguration(context: self.context)) else { return } diff --git a/submodules/MediaPlayer/Sources/FFMpegAudioFrameDecoder.swift b/submodules/MediaPlayer/Sources/FFMpegAudioFrameDecoder.swift index 141d723efa..16e9619170 100644 --- a/submodules/MediaPlayer/Sources/FFMpegAudioFrameDecoder.swift +++ b/submodules/MediaPlayer/Sources/FFMpegAudioFrameDecoder.swift @@ -6,6 +6,7 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder { private let codecContext: FFMpegAVCodecContext private let swrContext: FFMpegSWResample + private var timescale: CMTimeScale = 44000 private let audioFrame: FFMpegAVFrame private var resetDecoderOnNextFrame = true @@ -59,31 +60,34 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder { } } - func decode(frame: MediaTrackDecodableFrame) -> MediaTrackFrame? { + func send(frame: MediaTrackDecodableFrame) -> Bool { + self.timescale = frame.pts.timescale let status = frame.packet.send(toDecoder: self.codecContext) - if status == 0 { - while true { - let result = self.codecContext.receive(into: self.audioFrame) - if case .success = result { - if let convertedFrame = convertAudioFrame(self.audioFrame, pts: frame.pts) { - self.delayedFrames.append(convertedFrame) - } - } else { - break + return status == 0 + } + + func decode() -> MediaTrackFrame? { + while true { + let result = self.codecContext.receive(into: self.audioFrame) + if case .success = result { + if let convertedFrame = convertAudioFrame(self.audioFrame) { + self.delayedFrames.append(convertedFrame) + } + } else { + break + } + } + + if self.delayedFrames.count >= 1 { + var minFrameIndex = 0 + var minPosition = self.delayedFrames[0].position + for i in 1 ..< self.delayedFrames.count { + if CMTimeCompare(self.delayedFrames[i].position, minPosition) < 0 { + minFrameIndex = i + minPosition = self.delayedFrames[i].position } } - - if self.delayedFrames.count >= 1 { - var minFrameIndex = 0 - var minPosition = self.delayedFrames[0].position - for i in 1 ..< self.delayedFrames.count { - if CMTimeCompare(self.delayedFrames[i].position, minPosition) < 0 { - minFrameIndex = i - minPosition = self.delayedFrames[i].position - } - } - return self.delayedFrames.remove(at: minFrameIndex) - } + return self.delayedFrames.remove(at: minFrameIndex) } return nil @@ -121,7 +125,7 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder { } } - private func convertAudioFrame(_ frame: FFMpegAVFrame, pts: CMTime) -> MediaTrackFrame? { + private func convertAudioFrame(_ frame: FFMpegAVFrame) -> MediaTrackFrame? { guard let data = self.swrContext.resample(frame) else { return nil } @@ -137,6 +141,8 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder { var sampleBuffer: CMSampleBuffer? + let pts = CMTime(value: frame.pts, timescale: self.timescale) + guard CMAudioSampleBufferCreateReadyWithPacketDescriptions(allocator: nil, dataBuffer: blockBuffer!, formatDescription: self.formatDescription, sampleCount: Int(data.count / 2), presentationTimeStamp: pts, packetDescriptions: nil, sampleBufferOut: &sampleBuffer) == noErr else { return nil } diff --git a/submodules/MediaPlayer/Sources/FFMpegMediaFrameSource.swift b/submodules/MediaPlayer/Sources/FFMpegMediaFrameSource.swift index d161b921e9..2c1b63cd6b 100644 --- a/submodules/MediaPlayer/Sources/FFMpegMediaFrameSource.swift +++ b/submodules/MediaPlayer/Sources/FFMpegMediaFrameSource.swift @@ -283,8 +283,15 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource { if let video = streamDescriptions.video { videoBuffer = MediaTrackFrameBuffer(frameSource: strongSelf, decoder: video.decoder, type: .video, startTime: video.startTime, duration: video.duration, rotationAngle: video.rotationAngle, aspect: video.aspect, stallDuration: strongSelf.stallDuration, lowWaterDuration: strongSelf.lowWaterDuration, highWaterDuration: strongSelf.highWaterDuration) for videoFrame in streamDescriptions.extraVideoFrames { - if let decodedFrame = video.decoder.decode(frame: videoFrame) { + if !video.decoder.send(frame: videoFrame) { + break + } + } + while true { + if let decodedFrame = video.decoder.decode() { extraDecodedVideoFrames.append(decodedFrame) + } else { + break } } } diff --git a/submodules/MediaPlayer/Sources/FFMpegMediaPassthroughVideoFrameDecoder.swift b/submodules/MediaPlayer/Sources/FFMpegMediaPassthroughVideoFrameDecoder.swift index 4d114d4c2e..c5380d138f 100644 --- a/submodules/MediaPlayer/Sources/FFMpegMediaPassthroughVideoFrameDecoder.swift +++ b/submodules/MediaPlayer/Sources/FFMpegMediaPassthroughVideoFrameDecoder.swift @@ -20,12 +20,24 @@ final class FFMpegMediaPassthroughVideoFrameDecoder: MediaTrackFrameDecoder { private let rotationAngle: Double private var resetDecoderOnNextFrame = true + private var sentFrameQueue: [MediaTrackDecodableFrame] = [] + init(videoFormatData: VideoFormatData, rotationAngle: Double) { self.videoFormatData = videoFormatData self.rotationAngle = rotationAngle } - func decode(frame: MediaTrackDecodableFrame) -> MediaTrackFrame? { + func send(frame: MediaTrackDecodableFrame) -> Bool { + self.sentFrameQueue.append(frame) + return true + } + + func decode() -> MediaTrackFrame? { + guard let frame = self.sentFrameQueue.first else { + return nil + } + self.sentFrameQueue.removeFirst() + if self.videoFormat == nil { if self.videoFormatData.codecType == kCMVideoCodecType_MPEG4Video { self.videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromMpeg4CodecData(UInt32(kCMVideoCodecType_MPEG4Video), self.videoFormatData.width, self.videoFormatData.height, self.videoFormatData.extraData) diff --git a/submodules/MediaPlayer/Sources/FFMpegMediaVideoFrameDecoder.swift b/submodules/MediaPlayer/Sources/FFMpegMediaVideoFrameDecoder.swift index 5343b82dcf..5bae492ce9 100644 --- a/submodules/MediaPlayer/Sources/FFMpegMediaVideoFrameDecoder.swift +++ b/submodules/MediaPlayer/Sources/FFMpegMediaVideoFrameDecoder.swift @@ -72,11 +72,10 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder { } func decodeInternal(frame: MediaTrackDecodableFrame) { - } - public func decode(frame: MediaTrackDecodableFrame) -> MediaTrackFrame? { - return self.decode(frame: frame, ptsOffset: nil) + public func decode() -> MediaTrackFrame? { + return self.decode(ptsOffset: nil) } public func sendToDecoder(frame: MediaTrackDecodableFrame) -> Bool { @@ -126,15 +125,23 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder { } } - public func decode(frame: MediaTrackDecodableFrame, ptsOffset: CMTime?, forceARGB: Bool = false, unpremultiplyAlpha: Bool = true, displayImmediately: Bool = true) -> MediaTrackFrame? { - if self.isError { - return nil - } - + public func send(frame: MediaTrackDecodableFrame) -> Bool { let status = frame.packet.send(toDecoder: self.codecContext) if status == 0 { self.defaultDuration = frame.duration self.defaultTimescale = frame.pts.timescale + return true + } else { + return false + } + } + + public func decode(ptsOffset: CMTime?, forceARGB: Bool = false, unpremultiplyAlpha: Bool = true, displayImmediately: Bool = true) -> MediaTrackFrame? { + if self.isError { + return nil + } + guard let defaultDuration = self.defaultDuration, let defaultTimescale = self.defaultTimescale else { + return nil } if self.codecContext.receive(into: self.videoFrame) == .success { @@ -143,11 +150,11 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder { return nil } - var pts = CMTimeMake(value: self.videoFrame.pts, timescale: frame.pts.timescale) + var pts = CMTimeMake(value: self.videoFrame.pts, timescale: defaultTimescale) if let ptsOffset = ptsOffset { pts = CMTimeAdd(pts, ptsOffset) } - return convertVideoFrame(self.videoFrame, pts: pts, dts: pts, duration: frame.duration, forceARGB: forceARGB, unpremultiplyAlpha: unpremultiplyAlpha, displayImmediately: displayImmediately) + return convertVideoFrame(self.videoFrame, pts: pts, dts: pts, duration: defaultDuration, forceARGB: forceARGB, unpremultiplyAlpha: unpremultiplyAlpha, displayImmediately: displayImmediately) } return nil diff --git a/submodules/MediaPlayer/Sources/MediaTrackFrameBuffer.swift b/submodules/MediaPlayer/Sources/MediaTrackFrameBuffer.swift index d23fa4c77f..50cfa818f7 100644 --- a/submodules/MediaPlayer/Sources/MediaTrackFrameBuffer.swift +++ b/submodules/MediaPlayer/Sources/MediaTrackFrameBuffer.swift @@ -185,8 +185,12 @@ public final class MediaTrackFrameBuffer { if !self.frames.isEmpty { let frame = self.frames.removeFirst() - if let decodedFrame = self.decoder.decode(frame: frame) { - return .frame(decodedFrame) + if self.decoder.send(frame: frame) { + if let decodedFrame = self.decoder.decode() { + return .frame(decodedFrame) + } else { + return .skipFrame + } } else { return .skipFrame } diff --git a/submodules/MediaPlayer/Sources/MediaTrackFrameDecoder.swift b/submodules/MediaPlayer/Sources/MediaTrackFrameDecoder.swift index 0013f433a1..158e5007c6 100644 --- a/submodules/MediaPlayer/Sources/MediaTrackFrameDecoder.swift +++ b/submodules/MediaPlayer/Sources/MediaTrackFrameDecoder.swift @@ -1,6 +1,7 @@ protocol MediaTrackFrameDecoder { - func decode(frame: MediaTrackDecodableFrame) -> MediaTrackFrame? + func send(frame: MediaTrackDecodableFrame) -> Bool + func decode() -> MediaTrackFrame? func takeQueuedFrame() -> MediaTrackFrame? func takeRemainingFrame() -> MediaTrackFrame? func reset() diff --git a/submodules/MediaPlayer/Sources/SoftwareVideoSource.swift b/submodules/MediaPlayer/Sources/SoftwareVideoSource.swift index 869ca33b6a..b8150e4c5b 100644 --- a/submodules/MediaPlayer/Sources/SoftwareVideoSource.swift +++ b/submodules/MediaPlayer/Sources/SoftwareVideoSource.swift @@ -277,10 +277,14 @@ public final class SoftwareVideoSource { if let maxPts = maxPts, CMTimeCompare(decodableFrame.pts, maxPts) < 0 { ptsOffset = maxPts } - if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder { - result = (decoder.decode(frame: decodableFrame, ptsOffset: ptsOffset, forceARGB: self.hintVP9, unpremultiplyAlpha: self.unpremultiplyAlpha), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) + if videoStream.decoder.send(frame: decodableFrame) { + if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder { + result = (decoder.decode(ptsOffset: ptsOffset, forceARGB: self.hintVP9, unpremultiplyAlpha: self.unpremultiplyAlpha), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) + } else { + result = (videoStream.decoder.decode(), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) + } } else { - result = (videoStream.decoder.decode(frame: decodableFrame), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) + result = (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) } } else { result = (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) @@ -518,11 +522,17 @@ public final class SoftwareAudioSource { return nil } - let (decodableFrame, _) = self.readDecodableFrame() - if let decodableFrame = decodableFrame { - return audioStream.decoder.decode(frame: decodableFrame)?.sampleBuffer - } else { - return nil + while true { + let (decodableFrame, _) = self.readDecodableFrame() + if let decodableFrame = decodableFrame { + if audioStream.decoder.send(frame: decodableFrame) { + if let result = audioStream.decoder.decode() { + return result.sampleBuffer + } + } + } else { + return nil + } } } @@ -730,10 +740,14 @@ final class SoftwareVideoReader { while !self.readingError && !self.hasReadToEnd { if let decodableFrame = self.readDecodableFrame() { var result: (MediaTrackFrame?, CGFloat, CGFloat) - if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder { - result = (decoder.decode(frame: decodableFrame, ptsOffset: nil, forceARGB: false, unpremultiplyAlpha: false, displayImmediately: false), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect)) + if videoStream.decoder.send(frame: decodableFrame) { + if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder { + result = (decoder.decode(ptsOffset: nil, forceARGB: false, unpremultiplyAlpha: false, displayImmediately: false), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect)) + } else { + result = (videoStream.decoder.decode(), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect)) + } } else { - result = (videoStream.decoder.decode(frame: decodableFrame), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect)) + result = (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect)) } if let frame = result.0 { return frame diff --git a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift index 5577776ac6..9f106c9549 100644 --- a/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift +++ b/submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode/Sources/ChatMessageInteractiveMediaNode.swift @@ -1283,7 +1283,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr loadHLSRangeVideoFile = file var passFile = true - if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file)) { + if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file), codecConfiguration: HLSCodecConfiguration(context: context)) { if !useInlineHLS { file = minimizedQualityFile.file.media } @@ -1410,7 +1410,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr loadHLSRangeVideoFile = file var passFile = true - if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file)) { + if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file), codecConfiguration: HLSCodecConfiguration(context: context)) { if !useInlineHLS { file = minimizedQualityFile.file.media } @@ -1603,7 +1603,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr } } } else if let file = media as? TelegramMediaFile { - if NativeVideoContent.isHLSVideo(file: file), let minimizedQuality = HLSVideoContent.minimizedHLSQuality(file: .standalone(media: file)) { + if NativeVideoContent.isHLSVideo(file: file), let minimizedQuality = HLSVideoContent.minimizedHLSQuality(file: .standalone(media: file), codecConfiguration: HLSCodecConfiguration(context: context)) { let postbox = context.account.postbox let playlistStatusSignal = postbox.mediaBox.resourceStatus(minimizedQuality.playlist.media.resource) @@ -1630,7 +1630,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr break } - return HLSVideoContent.minimizedHLSQualityPreloadData(postbox: postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true) + return HLSVideoContent.minimizedHLSQualityPreloadData(postbox: postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true, codecConfiguration: HLSCodecConfiguration(context: context)) |> mapToSignal { preloadData -> Signal<(MediaResourceStatus, MediaResourceStatus?), NoError> in guard let preloadData else { return .single((.Local, nil)) @@ -1804,7 +1804,8 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false), - autoFetchFullSizeThumbnail: true + autoFetchFullSizeThumbnail: true, + codecConfiguration: HLSCodecConfiguration(context: context) ) } else { videoContent = NativeVideoContent( @@ -2015,7 +2016,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr if automaticDownload != .none, let file = media as? TelegramMediaFile, NativeVideoContent.isHLSVideo(file: file) { let postbox = context.account.postbox - let fetchSignal = HLSVideoContent.minimizedHLSQualityPreloadData(postbox: context.account.postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true) + let fetchSignal = HLSVideoContent.minimizedHLSQualityPreloadData(postbox: context.account.postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true, codecConfiguration: HLSCodecConfiguration(context: context)) |> mapToSignal { fileAndRange -> Signal in guard let fileAndRange else { return .complete() @@ -2091,7 +2092,8 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr file: .message(message: MessageReference(message), media: loadHLSRangeVideoFile), userLocation: .peer(message.id.peerId), prefixSeconds: 10, - autofetchPlaylist: true + autofetchPlaylist: true, + codecConfiguration: HLSCodecConfiguration(context: context) ) |> deliverOnMainQueue).startStrict(next: { [weak strongSelf] preloadData in guard let strongSelf else { diff --git a/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift b/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift index e3f51e330f..23a24b4e12 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift @@ -15,25 +15,31 @@ import TelegramVoip import ManagedFile import AppBundle -let isHighPerformanceDevice: Bool = { - var length: Int = 4 - var cpuCount: UInt32 = 0 - sysctlbyname("hw.ncpu", &cpuCount, &length, nil, 0) +public struct HLSCodecConfiguration { + public var isSoftwareAv1Supported: Bool - return cpuCount >= 6 -}() + public init(isSoftwareAv1Supported: Bool) { + self.isSoftwareAv1Supported = isSoftwareAv1Supported + } +} + +public extension HLSCodecConfiguration { + init(context: AccountContext) { + self.init(isSoftwareAv1Supported: false) + } +} public final class HLSQualitySet { public let qualityFiles: [Int: FileMediaReference] public let playlistFiles: [Int: FileMediaReference] - public init?(baseFile: FileMediaReference) { + public init?(baseFile: FileMediaReference, codecConfiguration: HLSCodecConfiguration) { var qualityFiles: [Int: FileMediaReference] = [:] for alternativeRepresentation in baseFile.media.alternativeRepresentations { if let alternativeFile = alternativeRepresentation as? TelegramMediaFile { for attribute in alternativeFile.attributes { if case let .Video(_, size, _, _, _, videoCodec) = attribute { - if let videoCodec, NativeVideoContent.isVideoCodecSupported(videoCodec: videoCodec, isHighPerformanceDevice: isHighPerformanceDevice) { + if let videoCodec, NativeVideoContent.isVideoCodecSupported(videoCodec: videoCodec, isSoftwareAv1Supported: codecConfiguration.isSoftwareAv1Supported) { let key = Int(min(size.width, size.height)) if let currentFile = qualityFiles[key] { var currentCodec: String? @@ -85,8 +91,8 @@ public final class HLSQualitySet { } public final class HLSVideoContent: UniversalVideoContent { - public static func minimizedHLSQuality(file: FileMediaReference) -> (playlist: FileMediaReference, file: FileMediaReference)? { - guard let qualitySet = HLSQualitySet(baseFile: file) else { + public static func minimizedHLSQuality(file: FileMediaReference, codecConfiguration: HLSCodecConfiguration) -> (playlist: FileMediaReference, file: FileMediaReference)? { + guard let qualitySet = HLSQualitySet(baseFile: file, codecConfiguration: codecConfiguration) else { return nil } let sortedQualities = qualitySet.qualityFiles.sorted(by: { $0.key < $1.key }) @@ -108,8 +114,8 @@ public final class HLSVideoContent: UniversalVideoContent { return nil } - public static func minimizedHLSQualityPreloadData(postbox: Postbox, file: FileMediaReference, userLocation: MediaResourceUserLocation, prefixSeconds: Int, autofetchPlaylist: Bool) -> Signal<(FileMediaReference, Range)?, NoError> { - guard let fileSet = minimizedHLSQuality(file: file) else { + public static func minimizedHLSQualityPreloadData(postbox: Postbox, file: FileMediaReference, userLocation: MediaResourceUserLocation, prefixSeconds: Int, autofetchPlaylist: Bool, codecConfiguration: HLSCodecConfiguration) -> Signal<(FileMediaReference, Range)?, NoError> { + guard let fileSet = minimizedHLSQuality(file: file, codecConfiguration: codecConfiguration) else { return .single(nil) } @@ -217,8 +223,9 @@ public final class HLSVideoContent: UniversalVideoContent { let onlyFullSizeThumbnail: Bool let useLargeThumbnail: Bool let autoFetchFullSizeThumbnail: Bool + let codecConfiguration: HLSCodecConfiguration - public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool = false, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false) { + public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool = false, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, codecConfiguration: HLSCodecConfiguration) { self.id = id self.userLocation = userLocation self.nativeId = id @@ -233,10 +240,11 @@ public final class HLSVideoContent: UniversalVideoContent { self.onlyFullSizeThumbnail = onlyFullSizeThumbnail self.useLargeThumbnail = useLargeThumbnail self.autoFetchFullSizeThumbnail = autoFetchFullSizeThumbnail + self.codecConfiguration = codecConfiguration } public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { - return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail) + return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, codecConfiguration: self.codecConfiguration) } public func isEqual(to other: UniversalVideoContent) -> Bool { diff --git a/submodules/TelegramUniversalVideoContent/Sources/HLSVideoJSNativeContentNode.swift b/submodules/TelegramUniversalVideoContent/Sources/HLSVideoJSNativeContentNode.swift index 39261900aa..52c451fc39 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/HLSVideoJSNativeContentNode.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/HLSVideoJSNativeContentNode.swift @@ -946,6 +946,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod private let intrinsicDimensions: CGSize private var enableSound: Bool + private let codecConfiguration: HLSCodecConfiguration private let audioSessionManager: ManagedAudioSession private let audioSessionDisposable = MetaDisposable() @@ -1030,7 +1031,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod private var contextDisposable: Disposable? - init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool) { + init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, codecConfiguration: HLSCodecConfiguration) { self.instanceId = HLSVideoJSNativeContentNode.nextInstanceId HLSVideoJSNativeContentNode.nextInstanceId += 1 @@ -1041,6 +1042,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod self.userLocation = userLocation self.requestedBaseRate = baseRate self.enableSound = enableSound + self.codecConfiguration = codecConfiguration if var dimensions = fileReference.media.dimensions { if let thumbnail = fileReference.media.previewRepresentations.first { @@ -1058,7 +1060,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod self.imageNode = TransformImageNode() var playerSource: HLSJSServerSource? - if let qualitySet = HLSQualitySet(baseFile: fileReference) { + if let qualitySet = HLSQualitySet(baseFile: fileReference, codecConfiguration: codecConfiguration) { let playerSourceValue = HLSJSServerSource(accountId: accountId.int64, fileId: fileReference.media.fileId.id, postbox: postbox, userLocation: userLocation, playlistFiles: qualitySet.playlistFiles, qualityFiles: qualitySet.qualityFiles) playerSource = playerSourceValue } @@ -1258,7 +1260,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod if !self.playerAvailableLevels.isEmpty { var selectedLevelIndex: Int? - if let qualityFiles = HLSQualitySet(baseFile: self.fileReference)?.qualityFiles.values, let maxQualityFile = qualityFiles.max(by: { lhs, rhs in + if let qualityFiles = HLSQualitySet(baseFile: self.fileReference, codecConfiguration: self.codecConfiguration)?.qualityFiles.values, let maxQualityFile = qualityFiles.max(by: { lhs, rhs in if let lhsDimensions = lhs.media.dimensions, let rhsDimensions = rhs.media.dimensions { return lhsDimensions.width < rhsDimensions.width } else { @@ -1276,7 +1278,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod } if selectedLevelIndex == nil { - if let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file { + if let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference, codecConfiguration: self.codecConfiguration)?.file { if let dimensions = minimizedQualityFile.media.dimensions { for (index, level) in self.playerAvailableLevels { if level.height == Int(dimensions.height) { @@ -1581,7 +1583,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod return self.requestedLevelIndex } else { var foundIndex: Int? - if let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file, let dimensions = minQualityFile.media.dimensions { + if let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference, codecConfiguration: self.codecConfiguration)?.file, let dimensions = minQualityFile.media.dimensions { for (index, level) in self.playerAvailableLevels { if level.width == Int(dimensions.width) && level.height == Int(dimensions.height) { foundIndex = index @@ -1638,7 +1640,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? { if self.playerAvailableLevels.isEmpty { - if let qualitySet = HLSQualitySet(baseFile: self.fileReference), let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file { + if let qualitySet = HLSQualitySet(baseFile: self.fileReference, codecConfiguration: self.codecConfiguration), let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference, codecConfiguration: self.codecConfiguration)?.file { let sortedFiles = qualitySet.qualityFiles.sorted(by: { $0.key > $1.key }) if let minQuality = sortedFiles.first(where: { $0.value.media.fileId == minQualityFile.media.fileId }) { return (minQuality.key, .auto, sortedFiles.map(\.key)) @@ -1650,7 +1652,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod if let playerCurrentLevelIndex = self.playerCurrentLevelIndex { currentLevelIndex = playerCurrentLevelIndex } else { - if let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file, let dimensions = minQualityFile.media.dimensions { + if let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference, codecConfiguration: self.codecConfiguration)?.file, let dimensions = minQualityFile.media.dimensions { var foundIndex: Int? for (index, level) in self.playerAvailableLevels { if level.width == Int(dimensions.width) && level.height == Int(dimensions.height) { diff --git a/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift b/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift index a880dc5e1b..9c6141f181 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift @@ -43,7 +43,6 @@ public final class NativeVideoContent: UniversalVideoContent { public let beginWithAmbientSound: Bool public let mixWithOthers: Bool public let baseRate: Double - public let baseVideoQuality: UniversalVideoContentVideoQuality let fetchAutomatically: Bool let onlyFullSizeThumbnail: Bool let useLargeThumbnail: Bool @@ -60,7 +59,7 @@ public final class NativeVideoContent: UniversalVideoContent { let displayImage: Bool let hasSentFramesToDisplay: (() -> Void)? - public static func isVideoCodecSupported(videoCodec: String, isHighPerformanceDevice: Bool) -> Bool { + public static func isVideoCodecSupported(videoCodec: String, isSoftwareAv1Supported: Bool) -> Bool { if videoCodec == "h264" || videoCodec == "h265" || videoCodec == "avc" || videoCodec == "hevc" { return true } @@ -69,7 +68,7 @@ public final class NativeVideoContent: UniversalVideoContent { if isHardwareAv1Supported { return true } else { - return isHighPerformanceDevice + return isSoftwareAv1Supported } } @@ -87,27 +86,7 @@ public final class NativeVideoContent: UniversalVideoContent { return false } - public static func selectVideoQualityFile(file: TelegramMediaFile, quality: UniversalVideoContentVideoQuality) -> TelegramMediaFile { - guard case let .quality(qualityHeight) = quality else { - return file - } - for alternativeRepresentation in file.alternativeRepresentations { - if let alternativeFile = alternativeRepresentation as? TelegramMediaFile { - for attribute in alternativeFile.attributes { - if case let .Video(_, size, _, _, _, videoCodec) = attribute { - if let videoCodec, isVideoCodecSupported(videoCodec: videoCodec, isHighPerformanceDevice: isHighPerformanceDevice) { - if size.height == qualityHeight { - return alternativeFile - } - } - } - } - } - } - return file - } - - public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, previewSourceFileReference: FileMediaReference? = nil, limitedFileRange: Range? = nil, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, soundMuted: Bool = false, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, baseVideoQuality: UniversalVideoContentVideoQuality = .auto, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) { + public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, previewSourceFileReference: FileMediaReference? = nil, limitedFileRange: Range? = nil, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, soundMuted: Bool = false, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) { self.id = id self.nativeId = id self.userLocation = userLocation @@ -136,7 +115,6 @@ public final class NativeVideoContent: UniversalVideoContent { self.beginWithAmbientSound = beginWithAmbientSound self.mixWithOthers = mixWithOthers self.baseRate = baseRate - self.baseVideoQuality = baseVideoQuality self.fetchAutomatically = fetchAutomatically self.onlyFullSizeThumbnail = onlyFullSizeThumbnail self.useLargeThumbnail = useLargeThumbnail @@ -155,7 +133,7 @@ public final class NativeVideoContent: UniversalVideoContent { } public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { - return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, previewSourceFileReference: self.previewSourceFileReference, limitedFileRange: self.limitedFileRange, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, baseVideoQuality: self.baseVideoQuality, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay) + return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, previewSourceFileReference: self.previewSourceFileReference, limitedFileRange: self.limitedFileRange, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay) } public func isEqual(to other: UniversalVideoContent) -> Bool { @@ -185,7 +163,6 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent private let mixWithOthers: Bool private let loopVideo: Bool private let baseRate: Double - private var baseVideoQuality: UniversalVideoContentVideoQuality private let audioSessionManager: ManagedAudioSession private let isAudioVideoMessage: Bool private let captureProtected: Bool @@ -246,7 +223,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent private let hasSentFramesToDisplay: (() -> Void)? - init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, previewSourceFileReference: FileMediaReference?, limitedFileRange: Range?, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, soundMuted: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, baseVideoQuality: UniversalVideoContentVideoQuality, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) { + init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, previewSourceFileReference: FileMediaReference?, limitedFileRange: Range?, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, soundMuted: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) { self.postbox = postbox self.userLocation = userLocation self.fileReference = fileReference @@ -260,7 +237,6 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent self.mixWithOthers = mixWithOthers self.loopVideo = loopVideo self.baseRate = baseRate - self.baseVideoQuality = baseVideoQuality self.audioSessionManager = audioSessionManager self.isAudioVideoMessage = isAudioVideoMessage self.captureProtected = captureProtected @@ -278,7 +254,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent break } - let selectedFile = NativeVideoContent.selectVideoQualityFile(file: fileReference.media, quality: self.baseVideoQuality) + let selectedFile = fileReference.media self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: fileReference.resourceReference(selectedFile.resource), tempFilePath: tempFilePath, limitedFileRange: limitedFileRange, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, soundMuted: soundMuted, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage) @@ -576,90 +552,6 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent } func setVideoQuality(_ quality: UniversalVideoContentVideoQuality) { - let _ = (self._status.get() - |> take(1) - |> deliverOnMainQueue).startStandalone(next: { [weak self] status in - guard let self else { - return - } - - if self.baseVideoQuality == quality { - return - } - self.baseVideoQuality = quality - - let selectedFile = NativeVideoContent.selectVideoQualityFile(file: self.fileReference.media, quality: self.baseVideoQuality) - - let updatedFileReference: FileMediaReference = self.fileReference.withMedia(selectedFile) - - var userContentType = MediaResourceUserContentType(file: selectedFile) - switch updatedFileReference { - case .story: - userContentType = .story - default: - break - } - - self._status.set(.never()) - self.player.pause() - - self.player = MediaPlayer(audioSessionManager: self.audioSessionManager, postbox: self.postbox, userLocation: self.userLocation, userContentType: userContentType, resourceReference: updatedFileReference.resourceReference(selectedFile.resource), tempFilePath: nil, streamable: self.streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: true, soundMuted: self.soundMuted, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: nil, isAudioVideoMessage: self.isAudioVideoMessage) - - var actionAtEndImpl: (() -> Void)? - if self.enableSound && !self.loopVideo { - self.player.actionAtEnd = .action({ - actionAtEndImpl?() - }) - } else { - self.player.actionAtEnd = .loop({ - actionAtEndImpl?() - }) - } - actionAtEndImpl = { [weak self] in - self?.performActionAtEnd() - } - - self._status.set(combineLatest(self.dimensionsPromise.get(), self.player.status) - |> map { dimensions, status in - return MediaPlayerStatus(generationTimestamp: status.generationTimestamp, duration: status.duration, dimensions: dimensions, timestamp: status.timestamp, baseRate: status.baseRate, seekId: status.seekId, status: status.status, soundEnabled: status.soundEnabled) - }) - - self.fetchStatusDisposable.set((self.postbox.mediaBox.resourceStatus(selectedFile.resource) - |> deliverOnMainQueue).start(next: { [weak self] status in - guard let strongSelf = self else { - return - } - switch status { - case .Local: - break - default: - if strongSelf.thumbnailPlayer == nil { - strongSelf.createThumbnailPlayer() - } - } - })) - - if let size = updatedFileReference.media.size { - self._bufferingStatus.set(postbox.mediaBox.resourceRangesStatus(selectedFile.resource) |> map { ranges in - return (ranges, size) - }) - } else { - self._bufferingStatus.set(.single(nil)) - } - - self.player.attachPlayerNode(self.playerNode) - - var play = false - switch status.status { - case .playing: - play = true - case let .buffering(_, whilePlaying, _, _): - play = whilePlaying - case .paused: - break - } - self.player.seek(timestamp: status.timestamp, play: play) - }) } func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? {