Fix codec handing

This commit is contained in:
Isaac 2024-11-16 23:30:03 +04:00
parent 8a73a4b4cd
commit 9bb46cb90b
13 changed files with 149 additions and 194 deletions

View File

@ -259,7 +259,7 @@ public func galleryItemForEntry(
} }
if isHLS { if isHLS {
content = HLSVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), streamVideo: streamVideos, loopVideo: loopVideos) content = HLSVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), streamVideo: streamVideos, loopVideo: loopVideos, codecConfiguration: HLSCodecConfiguration(context: context))
} else { } else {
content = NativeVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), imageReference: mediaImage.flatMap({ ImageMediaReference.message(message: MessageReference(message), media: $0) }), streamVideo: .conservative, loopVideo: loopVideos, tempFilePath: tempFilePath, captureProtected: captureProtected, storeAfterDownload: generateStoreAfterDownload?(message, file)) content = NativeVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), imageReference: mediaImage.flatMap({ ImageMediaReference.message(message: MessageReference(message), media: $0) }), streamVideo: .conservative, loopVideo: loopVideos, tempFilePath: tempFilePath, captureProtected: captureProtected, storeAfterDownload: generateStoreAfterDownload?(message, file))
} }

View File

@ -3597,7 +3597,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
displayDebugInfo = true displayDebugInfo = true
#endif #endif
} }
if displayDebugInfo, let content = item.content as? HLSVideoContent, let qualitySet = HLSQualitySet(baseFile: content.fileReference), let qualityFile = qualitySet.qualityFiles[quality] { if displayDebugInfo, let content = item.content as? HLSVideoContent, let qualitySet = HLSQualitySet(baseFile: content.fileReference, codecConfiguration: HLSCodecConfiguration(context: strongSelf.context)), let qualityFile = qualitySet.qualityFiles[quality] {
for attribute in qualityFile.media.attributes { for attribute in qualityFile.media.attributes {
if case let .Video(_, _, _, _, _, videoCodec) = attribute, let videoCodec { if case let .Video(_, _, _, _, _, videoCodec) = attribute, let videoCodec {
qualityDebugText += " \(videoCodec)" qualityDebugText += " \(videoCodec)"
@ -3647,7 +3647,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
if qualityState.available.isEmpty { if qualityState.available.isEmpty {
return return
} }
guard let qualitySet = HLSQualitySet(baseFile: content.fileReference) else { guard let qualitySet = HLSQualitySet(baseFile: content.fileReference, codecConfiguration: HLSCodecConfiguration(context: self.context)) else {
return return
} }

View File

@ -6,6 +6,7 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
private let codecContext: FFMpegAVCodecContext private let codecContext: FFMpegAVCodecContext
private let swrContext: FFMpegSWResample private let swrContext: FFMpegSWResample
private var timescale: CMTimeScale = 44000
private let audioFrame: FFMpegAVFrame private let audioFrame: FFMpegAVFrame
private var resetDecoderOnNextFrame = true private var resetDecoderOnNextFrame = true
@ -59,31 +60,34 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
} }
} }
func decode(frame: MediaTrackDecodableFrame) -> MediaTrackFrame? { func send(frame: MediaTrackDecodableFrame) -> Bool {
self.timescale = frame.pts.timescale
let status = frame.packet.send(toDecoder: self.codecContext) let status = frame.packet.send(toDecoder: self.codecContext)
if status == 0 { return status == 0
while true { }
let result = self.codecContext.receive(into: self.audioFrame)
if case .success = result { func decode() -> MediaTrackFrame? {
if let convertedFrame = convertAudioFrame(self.audioFrame, pts: frame.pts) { while true {
self.delayedFrames.append(convertedFrame) let result = self.codecContext.receive(into: self.audioFrame)
} if case .success = result {
} else { if let convertedFrame = convertAudioFrame(self.audioFrame) {
break self.delayedFrames.append(convertedFrame)
}
} else {
break
}
}
if self.delayedFrames.count >= 1 {
var minFrameIndex = 0
var minPosition = self.delayedFrames[0].position
for i in 1 ..< self.delayedFrames.count {
if CMTimeCompare(self.delayedFrames[i].position, minPosition) < 0 {
minFrameIndex = i
minPosition = self.delayedFrames[i].position
} }
} }
return self.delayedFrames.remove(at: minFrameIndex)
if self.delayedFrames.count >= 1 {
var minFrameIndex = 0
var minPosition = self.delayedFrames[0].position
for i in 1 ..< self.delayedFrames.count {
if CMTimeCompare(self.delayedFrames[i].position, minPosition) < 0 {
minFrameIndex = i
minPosition = self.delayedFrames[i].position
}
}
return self.delayedFrames.remove(at: minFrameIndex)
}
} }
return nil return nil
@ -121,7 +125,7 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
} }
} }
private func convertAudioFrame(_ frame: FFMpegAVFrame, pts: CMTime) -> MediaTrackFrame? { private func convertAudioFrame(_ frame: FFMpegAVFrame) -> MediaTrackFrame? {
guard let data = self.swrContext.resample(frame) else { guard let data = self.swrContext.resample(frame) else {
return nil return nil
} }
@ -137,6 +141,8 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
var sampleBuffer: CMSampleBuffer? var sampleBuffer: CMSampleBuffer?
let pts = CMTime(value: frame.pts, timescale: self.timescale)
guard CMAudioSampleBufferCreateReadyWithPacketDescriptions(allocator: nil, dataBuffer: blockBuffer!, formatDescription: self.formatDescription, sampleCount: Int(data.count / 2), presentationTimeStamp: pts, packetDescriptions: nil, sampleBufferOut: &sampleBuffer) == noErr else { guard CMAudioSampleBufferCreateReadyWithPacketDescriptions(allocator: nil, dataBuffer: blockBuffer!, formatDescription: self.formatDescription, sampleCount: Int(data.count / 2), presentationTimeStamp: pts, packetDescriptions: nil, sampleBufferOut: &sampleBuffer) == noErr else {
return nil return nil
} }

View File

@ -283,8 +283,15 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
if let video = streamDescriptions.video { if let video = streamDescriptions.video {
videoBuffer = MediaTrackFrameBuffer(frameSource: strongSelf, decoder: video.decoder, type: .video, startTime: video.startTime, duration: video.duration, rotationAngle: video.rotationAngle, aspect: video.aspect, stallDuration: strongSelf.stallDuration, lowWaterDuration: strongSelf.lowWaterDuration, highWaterDuration: strongSelf.highWaterDuration) videoBuffer = MediaTrackFrameBuffer(frameSource: strongSelf, decoder: video.decoder, type: .video, startTime: video.startTime, duration: video.duration, rotationAngle: video.rotationAngle, aspect: video.aspect, stallDuration: strongSelf.stallDuration, lowWaterDuration: strongSelf.lowWaterDuration, highWaterDuration: strongSelf.highWaterDuration)
for videoFrame in streamDescriptions.extraVideoFrames { for videoFrame in streamDescriptions.extraVideoFrames {
if let decodedFrame = video.decoder.decode(frame: videoFrame) { if !video.decoder.send(frame: videoFrame) {
break
}
}
while true {
if let decodedFrame = video.decoder.decode() {
extraDecodedVideoFrames.append(decodedFrame) extraDecodedVideoFrames.append(decodedFrame)
} else {
break
} }
} }
} }

View File

@ -20,12 +20,24 @@ final class FFMpegMediaPassthroughVideoFrameDecoder: MediaTrackFrameDecoder {
private let rotationAngle: Double private let rotationAngle: Double
private var resetDecoderOnNextFrame = true private var resetDecoderOnNextFrame = true
private var sentFrameQueue: [MediaTrackDecodableFrame] = []
init(videoFormatData: VideoFormatData, rotationAngle: Double) { init(videoFormatData: VideoFormatData, rotationAngle: Double) {
self.videoFormatData = videoFormatData self.videoFormatData = videoFormatData
self.rotationAngle = rotationAngle self.rotationAngle = rotationAngle
} }
func decode(frame: MediaTrackDecodableFrame) -> MediaTrackFrame? { func send(frame: MediaTrackDecodableFrame) -> Bool {
self.sentFrameQueue.append(frame)
return true
}
func decode() -> MediaTrackFrame? {
guard let frame = self.sentFrameQueue.first else {
return nil
}
self.sentFrameQueue.removeFirst()
if self.videoFormat == nil { if self.videoFormat == nil {
if self.videoFormatData.codecType == kCMVideoCodecType_MPEG4Video { if self.videoFormatData.codecType == kCMVideoCodecType_MPEG4Video {
self.videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromMpeg4CodecData(UInt32(kCMVideoCodecType_MPEG4Video), self.videoFormatData.width, self.videoFormatData.height, self.videoFormatData.extraData) self.videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromMpeg4CodecData(UInt32(kCMVideoCodecType_MPEG4Video), self.videoFormatData.width, self.videoFormatData.height, self.videoFormatData.extraData)

View File

@ -72,11 +72,10 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
} }
func decodeInternal(frame: MediaTrackDecodableFrame) { func decodeInternal(frame: MediaTrackDecodableFrame) {
} }
public func decode(frame: MediaTrackDecodableFrame) -> MediaTrackFrame? { public func decode() -> MediaTrackFrame? {
return self.decode(frame: frame, ptsOffset: nil) return self.decode(ptsOffset: nil)
} }
public func sendToDecoder(frame: MediaTrackDecodableFrame) -> Bool { public func sendToDecoder(frame: MediaTrackDecodableFrame) -> Bool {
@ -126,15 +125,23 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
} }
} }
public func decode(frame: MediaTrackDecodableFrame, ptsOffset: CMTime?, forceARGB: Bool = false, unpremultiplyAlpha: Bool = true, displayImmediately: Bool = true) -> MediaTrackFrame? { public func send(frame: MediaTrackDecodableFrame) -> Bool {
if self.isError {
return nil
}
let status = frame.packet.send(toDecoder: self.codecContext) let status = frame.packet.send(toDecoder: self.codecContext)
if status == 0 { if status == 0 {
self.defaultDuration = frame.duration self.defaultDuration = frame.duration
self.defaultTimescale = frame.pts.timescale self.defaultTimescale = frame.pts.timescale
return true
} else {
return false
}
}
public func decode(ptsOffset: CMTime?, forceARGB: Bool = false, unpremultiplyAlpha: Bool = true, displayImmediately: Bool = true) -> MediaTrackFrame? {
if self.isError {
return nil
}
guard let defaultDuration = self.defaultDuration, let defaultTimescale = self.defaultTimescale else {
return nil
} }
if self.codecContext.receive(into: self.videoFrame) == .success { if self.codecContext.receive(into: self.videoFrame) == .success {
@ -143,11 +150,11 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
return nil return nil
} }
var pts = CMTimeMake(value: self.videoFrame.pts, timescale: frame.pts.timescale) var pts = CMTimeMake(value: self.videoFrame.pts, timescale: defaultTimescale)
if let ptsOffset = ptsOffset { if let ptsOffset = ptsOffset {
pts = CMTimeAdd(pts, ptsOffset) pts = CMTimeAdd(pts, ptsOffset)
} }
return convertVideoFrame(self.videoFrame, pts: pts, dts: pts, duration: frame.duration, forceARGB: forceARGB, unpremultiplyAlpha: unpremultiplyAlpha, displayImmediately: displayImmediately) return convertVideoFrame(self.videoFrame, pts: pts, dts: pts, duration: defaultDuration, forceARGB: forceARGB, unpremultiplyAlpha: unpremultiplyAlpha, displayImmediately: displayImmediately)
} }
return nil return nil

View File

@ -185,8 +185,12 @@ public final class MediaTrackFrameBuffer {
if !self.frames.isEmpty { if !self.frames.isEmpty {
let frame = self.frames.removeFirst() let frame = self.frames.removeFirst()
if let decodedFrame = self.decoder.decode(frame: frame) { if self.decoder.send(frame: frame) {
return .frame(decodedFrame) if let decodedFrame = self.decoder.decode() {
return .frame(decodedFrame)
} else {
return .skipFrame
}
} else { } else {
return .skipFrame return .skipFrame
} }

View File

@ -1,6 +1,7 @@
protocol MediaTrackFrameDecoder { protocol MediaTrackFrameDecoder {
func decode(frame: MediaTrackDecodableFrame) -> MediaTrackFrame? func send(frame: MediaTrackDecodableFrame) -> Bool
func decode() -> MediaTrackFrame?
func takeQueuedFrame() -> MediaTrackFrame? func takeQueuedFrame() -> MediaTrackFrame?
func takeRemainingFrame() -> MediaTrackFrame? func takeRemainingFrame() -> MediaTrackFrame?
func reset() func reset()

View File

@ -277,10 +277,14 @@ public final class SoftwareVideoSource {
if let maxPts = maxPts, CMTimeCompare(decodableFrame.pts, maxPts) < 0 { if let maxPts = maxPts, CMTimeCompare(decodableFrame.pts, maxPts) < 0 {
ptsOffset = maxPts ptsOffset = maxPts
} }
if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder { if videoStream.decoder.send(frame: decodableFrame) {
result = (decoder.decode(frame: decodableFrame, ptsOffset: ptsOffset, forceARGB: self.hintVP9, unpremultiplyAlpha: self.unpremultiplyAlpha), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder {
result = (decoder.decode(ptsOffset: ptsOffset, forceARGB: self.hintVP9, unpremultiplyAlpha: self.unpremultiplyAlpha), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
} else {
result = (videoStream.decoder.decode(), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
}
} else { } else {
result = (videoStream.decoder.decode(frame: decodableFrame), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) result = (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
} }
} else { } else {
result = (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) result = (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
@ -518,11 +522,17 @@ public final class SoftwareAudioSource {
return nil return nil
} }
let (decodableFrame, _) = self.readDecodableFrame() while true {
if let decodableFrame = decodableFrame { let (decodableFrame, _) = self.readDecodableFrame()
return audioStream.decoder.decode(frame: decodableFrame)?.sampleBuffer if let decodableFrame = decodableFrame {
} else { if audioStream.decoder.send(frame: decodableFrame) {
return nil if let result = audioStream.decoder.decode() {
return result.sampleBuffer
}
}
} else {
return nil
}
} }
} }
@ -730,10 +740,14 @@ final class SoftwareVideoReader {
while !self.readingError && !self.hasReadToEnd { while !self.readingError && !self.hasReadToEnd {
if let decodableFrame = self.readDecodableFrame() { if let decodableFrame = self.readDecodableFrame() {
var result: (MediaTrackFrame?, CGFloat, CGFloat) var result: (MediaTrackFrame?, CGFloat, CGFloat)
if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder { if videoStream.decoder.send(frame: decodableFrame) {
result = (decoder.decode(frame: decodableFrame, ptsOffset: nil, forceARGB: false, unpremultiplyAlpha: false, displayImmediately: false), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect)) if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder {
result = (decoder.decode(ptsOffset: nil, forceARGB: false, unpremultiplyAlpha: false, displayImmediately: false), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect))
} else {
result = (videoStream.decoder.decode(), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect))
}
} else { } else {
result = (videoStream.decoder.decode(frame: decodableFrame), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect)) result = (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect))
} }
if let frame = result.0 { if let frame = result.0 {
return frame return frame

View File

@ -1283,7 +1283,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
loadHLSRangeVideoFile = file loadHLSRangeVideoFile = file
var passFile = true var passFile = true
if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file)) { if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file), codecConfiguration: HLSCodecConfiguration(context: context)) {
if !useInlineHLS { if !useInlineHLS {
file = minimizedQualityFile.file.media file = minimizedQualityFile.file.media
} }
@ -1410,7 +1410,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
loadHLSRangeVideoFile = file loadHLSRangeVideoFile = file
var passFile = true var passFile = true
if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file)) { if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file), codecConfiguration: HLSCodecConfiguration(context: context)) {
if !useInlineHLS { if !useInlineHLS {
file = minimizedQualityFile.file.media file = minimizedQualityFile.file.media
} }
@ -1603,7 +1603,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
} }
} }
} else if let file = media as? TelegramMediaFile { } else if let file = media as? TelegramMediaFile {
if NativeVideoContent.isHLSVideo(file: file), let minimizedQuality = HLSVideoContent.minimizedHLSQuality(file: .standalone(media: file)) { if NativeVideoContent.isHLSVideo(file: file), let minimizedQuality = HLSVideoContent.minimizedHLSQuality(file: .standalone(media: file), codecConfiguration: HLSCodecConfiguration(context: context)) {
let postbox = context.account.postbox let postbox = context.account.postbox
let playlistStatusSignal = postbox.mediaBox.resourceStatus(minimizedQuality.playlist.media.resource) let playlistStatusSignal = postbox.mediaBox.resourceStatus(minimizedQuality.playlist.media.resource)
@ -1630,7 +1630,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
break break
} }
return HLSVideoContent.minimizedHLSQualityPreloadData(postbox: postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true) return HLSVideoContent.minimizedHLSQualityPreloadData(postbox: postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true, codecConfiguration: HLSCodecConfiguration(context: context))
|> mapToSignal { preloadData -> Signal<(MediaResourceStatus, MediaResourceStatus?), NoError> in |> mapToSignal { preloadData -> Signal<(MediaResourceStatus, MediaResourceStatus?), NoError> in
guard let preloadData else { guard let preloadData else {
return .single((.Local, nil)) return .single((.Local, nil))
@ -1804,7 +1804,8 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
enableSound: false, enableSound: false,
fetchAutomatically: false, fetchAutomatically: false,
onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false), onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false),
autoFetchFullSizeThumbnail: true autoFetchFullSizeThumbnail: true,
codecConfiguration: HLSCodecConfiguration(context: context)
) )
} else { } else {
videoContent = NativeVideoContent( videoContent = NativeVideoContent(
@ -2015,7 +2016,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
if automaticDownload != .none, let file = media as? TelegramMediaFile, NativeVideoContent.isHLSVideo(file: file) { if automaticDownload != .none, let file = media as? TelegramMediaFile, NativeVideoContent.isHLSVideo(file: file) {
let postbox = context.account.postbox let postbox = context.account.postbox
let fetchSignal = HLSVideoContent.minimizedHLSQualityPreloadData(postbox: context.account.postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true) let fetchSignal = HLSVideoContent.minimizedHLSQualityPreloadData(postbox: context.account.postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true, codecConfiguration: HLSCodecConfiguration(context: context))
|> mapToSignal { fileAndRange -> Signal<Never, NoError> in |> mapToSignal { fileAndRange -> Signal<Never, NoError> in
guard let fileAndRange else { guard let fileAndRange else {
return .complete() return .complete()
@ -2091,7 +2092,8 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
file: .message(message: MessageReference(message), media: loadHLSRangeVideoFile), file: .message(message: MessageReference(message), media: loadHLSRangeVideoFile),
userLocation: .peer(message.id.peerId), userLocation: .peer(message.id.peerId),
prefixSeconds: 10, prefixSeconds: 10,
autofetchPlaylist: true autofetchPlaylist: true,
codecConfiguration: HLSCodecConfiguration(context: context)
) )
|> deliverOnMainQueue).startStrict(next: { [weak strongSelf] preloadData in |> deliverOnMainQueue).startStrict(next: { [weak strongSelf] preloadData in
guard let strongSelf else { guard let strongSelf else {

View File

@ -15,25 +15,31 @@ import TelegramVoip
import ManagedFile import ManagedFile
import AppBundle import AppBundle
let isHighPerformanceDevice: Bool = { public struct HLSCodecConfiguration {
var length: Int = 4 public var isSoftwareAv1Supported: Bool
var cpuCount: UInt32 = 0
sysctlbyname("hw.ncpu", &cpuCount, &length, nil, 0)
return cpuCount >= 6 public init(isSoftwareAv1Supported: Bool) {
}() self.isSoftwareAv1Supported = isSoftwareAv1Supported
}
}
public extension HLSCodecConfiguration {
init(context: AccountContext) {
self.init(isSoftwareAv1Supported: false)
}
}
public final class HLSQualitySet { public final class HLSQualitySet {
public let qualityFiles: [Int: FileMediaReference] public let qualityFiles: [Int: FileMediaReference]
public let playlistFiles: [Int: FileMediaReference] public let playlistFiles: [Int: FileMediaReference]
public init?(baseFile: FileMediaReference) { public init?(baseFile: FileMediaReference, codecConfiguration: HLSCodecConfiguration) {
var qualityFiles: [Int: FileMediaReference] = [:] var qualityFiles: [Int: FileMediaReference] = [:]
for alternativeRepresentation in baseFile.media.alternativeRepresentations { for alternativeRepresentation in baseFile.media.alternativeRepresentations {
if let alternativeFile = alternativeRepresentation as? TelegramMediaFile { if let alternativeFile = alternativeRepresentation as? TelegramMediaFile {
for attribute in alternativeFile.attributes { for attribute in alternativeFile.attributes {
if case let .Video(_, size, _, _, _, videoCodec) = attribute { if case let .Video(_, size, _, _, _, videoCodec) = attribute {
if let videoCodec, NativeVideoContent.isVideoCodecSupported(videoCodec: videoCodec, isHighPerformanceDevice: isHighPerformanceDevice) { if let videoCodec, NativeVideoContent.isVideoCodecSupported(videoCodec: videoCodec, isSoftwareAv1Supported: codecConfiguration.isSoftwareAv1Supported) {
let key = Int(min(size.width, size.height)) let key = Int(min(size.width, size.height))
if let currentFile = qualityFiles[key] { if let currentFile = qualityFiles[key] {
var currentCodec: String? var currentCodec: String?
@ -85,8 +91,8 @@ public final class HLSQualitySet {
} }
public final class HLSVideoContent: UniversalVideoContent { public final class HLSVideoContent: UniversalVideoContent {
public static func minimizedHLSQuality(file: FileMediaReference) -> (playlist: FileMediaReference, file: FileMediaReference)? { public static func minimizedHLSQuality(file: FileMediaReference, codecConfiguration: HLSCodecConfiguration) -> (playlist: FileMediaReference, file: FileMediaReference)? {
guard let qualitySet = HLSQualitySet(baseFile: file) else { guard let qualitySet = HLSQualitySet(baseFile: file, codecConfiguration: codecConfiguration) else {
return nil return nil
} }
let sortedQualities = qualitySet.qualityFiles.sorted(by: { $0.key < $1.key }) let sortedQualities = qualitySet.qualityFiles.sorted(by: { $0.key < $1.key })
@ -108,8 +114,8 @@ public final class HLSVideoContent: UniversalVideoContent {
return nil return nil
} }
public static func minimizedHLSQualityPreloadData(postbox: Postbox, file: FileMediaReference, userLocation: MediaResourceUserLocation, prefixSeconds: Int, autofetchPlaylist: Bool) -> Signal<(FileMediaReference, Range<Int64>)?, NoError> { public static func minimizedHLSQualityPreloadData(postbox: Postbox, file: FileMediaReference, userLocation: MediaResourceUserLocation, prefixSeconds: Int, autofetchPlaylist: Bool, codecConfiguration: HLSCodecConfiguration) -> Signal<(FileMediaReference, Range<Int64>)?, NoError> {
guard let fileSet = minimizedHLSQuality(file: file) else { guard let fileSet = minimizedHLSQuality(file: file, codecConfiguration: codecConfiguration) else {
return .single(nil) return .single(nil)
} }
@ -217,8 +223,9 @@ public final class HLSVideoContent: UniversalVideoContent {
let onlyFullSizeThumbnail: Bool let onlyFullSizeThumbnail: Bool
let useLargeThumbnail: Bool let useLargeThumbnail: Bool
let autoFetchFullSizeThumbnail: Bool let autoFetchFullSizeThumbnail: Bool
let codecConfiguration: HLSCodecConfiguration
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool = false, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false) { public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool = false, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, codecConfiguration: HLSCodecConfiguration) {
self.id = id self.id = id
self.userLocation = userLocation self.userLocation = userLocation
self.nativeId = id self.nativeId = id
@ -233,10 +240,11 @@ public final class HLSVideoContent: UniversalVideoContent {
self.onlyFullSizeThumbnail = onlyFullSizeThumbnail self.onlyFullSizeThumbnail = onlyFullSizeThumbnail
self.useLargeThumbnail = useLargeThumbnail self.useLargeThumbnail = useLargeThumbnail
self.autoFetchFullSizeThumbnail = autoFetchFullSizeThumbnail self.autoFetchFullSizeThumbnail = autoFetchFullSizeThumbnail
self.codecConfiguration = codecConfiguration
} }
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail) return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, codecConfiguration: self.codecConfiguration)
} }
public func isEqual(to other: UniversalVideoContent) -> Bool { public func isEqual(to other: UniversalVideoContent) -> Bool {

View File

@ -946,6 +946,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
private let intrinsicDimensions: CGSize private let intrinsicDimensions: CGSize
private var enableSound: Bool private var enableSound: Bool
private let codecConfiguration: HLSCodecConfiguration
private let audioSessionManager: ManagedAudioSession private let audioSessionManager: ManagedAudioSession
private let audioSessionDisposable = MetaDisposable() private let audioSessionDisposable = MetaDisposable()
@ -1030,7 +1031,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
private var contextDisposable: Disposable? private var contextDisposable: Disposable?
init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool) { init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, codecConfiguration: HLSCodecConfiguration) {
self.instanceId = HLSVideoJSNativeContentNode.nextInstanceId self.instanceId = HLSVideoJSNativeContentNode.nextInstanceId
HLSVideoJSNativeContentNode.nextInstanceId += 1 HLSVideoJSNativeContentNode.nextInstanceId += 1
@ -1041,6 +1042,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self.userLocation = userLocation self.userLocation = userLocation
self.requestedBaseRate = baseRate self.requestedBaseRate = baseRate
self.enableSound = enableSound self.enableSound = enableSound
self.codecConfiguration = codecConfiguration
if var dimensions = fileReference.media.dimensions { if var dimensions = fileReference.media.dimensions {
if let thumbnail = fileReference.media.previewRepresentations.first { if let thumbnail = fileReference.media.previewRepresentations.first {
@ -1058,7 +1060,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self.imageNode = TransformImageNode() self.imageNode = TransformImageNode()
var playerSource: HLSJSServerSource? var playerSource: HLSJSServerSource?
if let qualitySet = HLSQualitySet(baseFile: fileReference) { if let qualitySet = HLSQualitySet(baseFile: fileReference, codecConfiguration: codecConfiguration) {
let playerSourceValue = HLSJSServerSource(accountId: accountId.int64, fileId: fileReference.media.fileId.id, postbox: postbox, userLocation: userLocation, playlistFiles: qualitySet.playlistFiles, qualityFiles: qualitySet.qualityFiles) let playerSourceValue = HLSJSServerSource(accountId: accountId.int64, fileId: fileReference.media.fileId.id, postbox: postbox, userLocation: userLocation, playlistFiles: qualitySet.playlistFiles, qualityFiles: qualitySet.qualityFiles)
playerSource = playerSourceValue playerSource = playerSourceValue
} }
@ -1258,7 +1260,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
if !self.playerAvailableLevels.isEmpty { if !self.playerAvailableLevels.isEmpty {
var selectedLevelIndex: Int? var selectedLevelIndex: Int?
if let qualityFiles = HLSQualitySet(baseFile: self.fileReference)?.qualityFiles.values, let maxQualityFile = qualityFiles.max(by: { lhs, rhs in if let qualityFiles = HLSQualitySet(baseFile: self.fileReference, codecConfiguration: self.codecConfiguration)?.qualityFiles.values, let maxQualityFile = qualityFiles.max(by: { lhs, rhs in
if let lhsDimensions = lhs.media.dimensions, let rhsDimensions = rhs.media.dimensions { if let lhsDimensions = lhs.media.dimensions, let rhsDimensions = rhs.media.dimensions {
return lhsDimensions.width < rhsDimensions.width return lhsDimensions.width < rhsDimensions.width
} else { } else {
@ -1276,7 +1278,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
} }
if selectedLevelIndex == nil { if selectedLevelIndex == nil {
if let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file { if let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference, codecConfiguration: self.codecConfiguration)?.file {
if let dimensions = minimizedQualityFile.media.dimensions { if let dimensions = minimizedQualityFile.media.dimensions {
for (index, level) in self.playerAvailableLevels { for (index, level) in self.playerAvailableLevels {
if level.height == Int(dimensions.height) { if level.height == Int(dimensions.height) {
@ -1581,7 +1583,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
return self.requestedLevelIndex return self.requestedLevelIndex
} else { } else {
var foundIndex: Int? var foundIndex: Int?
if let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file, let dimensions = minQualityFile.media.dimensions { if let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference, codecConfiguration: self.codecConfiguration)?.file, let dimensions = minQualityFile.media.dimensions {
for (index, level) in self.playerAvailableLevels { for (index, level) in self.playerAvailableLevels {
if level.width == Int(dimensions.width) && level.height == Int(dimensions.height) { if level.width == Int(dimensions.width) && level.height == Int(dimensions.height) {
foundIndex = index foundIndex = index
@ -1638,7 +1640,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? { func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? {
if self.playerAvailableLevels.isEmpty { if self.playerAvailableLevels.isEmpty {
if let qualitySet = HLSQualitySet(baseFile: self.fileReference), let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file { if let qualitySet = HLSQualitySet(baseFile: self.fileReference, codecConfiguration: self.codecConfiguration), let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference, codecConfiguration: self.codecConfiguration)?.file {
let sortedFiles = qualitySet.qualityFiles.sorted(by: { $0.key > $1.key }) let sortedFiles = qualitySet.qualityFiles.sorted(by: { $0.key > $1.key })
if let minQuality = sortedFiles.first(where: { $0.value.media.fileId == minQualityFile.media.fileId }) { if let minQuality = sortedFiles.first(where: { $0.value.media.fileId == minQualityFile.media.fileId }) {
return (minQuality.key, .auto, sortedFiles.map(\.key)) return (minQuality.key, .auto, sortedFiles.map(\.key))
@ -1650,7 +1652,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
if let playerCurrentLevelIndex = self.playerCurrentLevelIndex { if let playerCurrentLevelIndex = self.playerCurrentLevelIndex {
currentLevelIndex = playerCurrentLevelIndex currentLevelIndex = playerCurrentLevelIndex
} else { } else {
if let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file, let dimensions = minQualityFile.media.dimensions { if let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference, codecConfiguration: self.codecConfiguration)?.file, let dimensions = minQualityFile.media.dimensions {
var foundIndex: Int? var foundIndex: Int?
for (index, level) in self.playerAvailableLevels { for (index, level) in self.playerAvailableLevels {
if level.width == Int(dimensions.width) && level.height == Int(dimensions.height) { if level.width == Int(dimensions.width) && level.height == Int(dimensions.height) {

View File

@ -43,7 +43,6 @@ public final class NativeVideoContent: UniversalVideoContent {
public let beginWithAmbientSound: Bool public let beginWithAmbientSound: Bool
public let mixWithOthers: Bool public let mixWithOthers: Bool
public let baseRate: Double public let baseRate: Double
public let baseVideoQuality: UniversalVideoContentVideoQuality
let fetchAutomatically: Bool let fetchAutomatically: Bool
let onlyFullSizeThumbnail: Bool let onlyFullSizeThumbnail: Bool
let useLargeThumbnail: Bool let useLargeThumbnail: Bool
@ -60,7 +59,7 @@ public final class NativeVideoContent: UniversalVideoContent {
let displayImage: Bool let displayImage: Bool
let hasSentFramesToDisplay: (() -> Void)? let hasSentFramesToDisplay: (() -> Void)?
public static func isVideoCodecSupported(videoCodec: String, isHighPerformanceDevice: Bool) -> Bool { public static func isVideoCodecSupported(videoCodec: String, isSoftwareAv1Supported: Bool) -> Bool {
if videoCodec == "h264" || videoCodec == "h265" || videoCodec == "avc" || videoCodec == "hevc" { if videoCodec == "h264" || videoCodec == "h265" || videoCodec == "avc" || videoCodec == "hevc" {
return true return true
} }
@ -69,7 +68,7 @@ public final class NativeVideoContent: UniversalVideoContent {
if isHardwareAv1Supported { if isHardwareAv1Supported {
return true return true
} else { } else {
return isHighPerformanceDevice return isSoftwareAv1Supported
} }
} }
@ -87,27 +86,7 @@ public final class NativeVideoContent: UniversalVideoContent {
return false return false
} }
public static func selectVideoQualityFile(file: TelegramMediaFile, quality: UniversalVideoContentVideoQuality) -> TelegramMediaFile { public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, previewSourceFileReference: FileMediaReference? = nil, limitedFileRange: Range<Int64>? = nil, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, soundMuted: Bool = false, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
guard case let .quality(qualityHeight) = quality else {
return file
}
for alternativeRepresentation in file.alternativeRepresentations {
if let alternativeFile = alternativeRepresentation as? TelegramMediaFile {
for attribute in alternativeFile.attributes {
if case let .Video(_, size, _, _, _, videoCodec) = attribute {
if let videoCodec, isVideoCodecSupported(videoCodec: videoCodec, isHighPerformanceDevice: isHighPerformanceDevice) {
if size.height == qualityHeight {
return alternativeFile
}
}
}
}
}
}
return file
}
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, previewSourceFileReference: FileMediaReference? = nil, limitedFileRange: Range<Int64>? = nil, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, soundMuted: Bool = false, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, baseVideoQuality: UniversalVideoContentVideoQuality = .auto, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
self.id = id self.id = id
self.nativeId = id self.nativeId = id
self.userLocation = userLocation self.userLocation = userLocation
@ -136,7 +115,6 @@ public final class NativeVideoContent: UniversalVideoContent {
self.beginWithAmbientSound = beginWithAmbientSound self.beginWithAmbientSound = beginWithAmbientSound
self.mixWithOthers = mixWithOthers self.mixWithOthers = mixWithOthers
self.baseRate = baseRate self.baseRate = baseRate
self.baseVideoQuality = baseVideoQuality
self.fetchAutomatically = fetchAutomatically self.fetchAutomatically = fetchAutomatically
self.onlyFullSizeThumbnail = onlyFullSizeThumbnail self.onlyFullSizeThumbnail = onlyFullSizeThumbnail
self.useLargeThumbnail = useLargeThumbnail self.useLargeThumbnail = useLargeThumbnail
@ -155,7 +133,7 @@ public final class NativeVideoContent: UniversalVideoContent {
} }
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, previewSourceFileReference: self.previewSourceFileReference, limitedFileRange: self.limitedFileRange, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, baseVideoQuality: self.baseVideoQuality, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay) return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, previewSourceFileReference: self.previewSourceFileReference, limitedFileRange: self.limitedFileRange, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
} }
public func isEqual(to other: UniversalVideoContent) -> Bool { public func isEqual(to other: UniversalVideoContent) -> Bool {
@ -185,7 +163,6 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private let mixWithOthers: Bool private let mixWithOthers: Bool
private let loopVideo: Bool private let loopVideo: Bool
private let baseRate: Double private let baseRate: Double
private var baseVideoQuality: UniversalVideoContentVideoQuality
private let audioSessionManager: ManagedAudioSession private let audioSessionManager: ManagedAudioSession
private let isAudioVideoMessage: Bool private let isAudioVideoMessage: Bool
private let captureProtected: Bool private let captureProtected: Bool
@ -246,7 +223,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private let hasSentFramesToDisplay: (() -> Void)? private let hasSentFramesToDisplay: (() -> Void)?
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, previewSourceFileReference: FileMediaReference?, limitedFileRange: Range<Int64>?, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, soundMuted: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, baseVideoQuality: UniversalVideoContentVideoQuality, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) { init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, previewSourceFileReference: FileMediaReference?, limitedFileRange: Range<Int64>?, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, soundMuted: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
self.postbox = postbox self.postbox = postbox
self.userLocation = userLocation self.userLocation = userLocation
self.fileReference = fileReference self.fileReference = fileReference
@ -260,7 +237,6 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
self.mixWithOthers = mixWithOthers self.mixWithOthers = mixWithOthers
self.loopVideo = loopVideo self.loopVideo = loopVideo
self.baseRate = baseRate self.baseRate = baseRate
self.baseVideoQuality = baseVideoQuality
self.audioSessionManager = audioSessionManager self.audioSessionManager = audioSessionManager
self.isAudioVideoMessage = isAudioVideoMessage self.isAudioVideoMessage = isAudioVideoMessage
self.captureProtected = captureProtected self.captureProtected = captureProtected
@ -278,7 +254,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
break break
} }
let selectedFile = NativeVideoContent.selectVideoQualityFile(file: fileReference.media, quality: self.baseVideoQuality) let selectedFile = fileReference.media
self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: fileReference.resourceReference(selectedFile.resource), tempFilePath: tempFilePath, limitedFileRange: limitedFileRange, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, soundMuted: soundMuted, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage) self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: fileReference.resourceReference(selectedFile.resource), tempFilePath: tempFilePath, limitedFileRange: limitedFileRange, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, soundMuted: soundMuted, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
@ -576,90 +552,6 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
} }
func setVideoQuality(_ quality: UniversalVideoContentVideoQuality) { func setVideoQuality(_ quality: UniversalVideoContentVideoQuality) {
let _ = (self._status.get()
|> take(1)
|> deliverOnMainQueue).startStandalone(next: { [weak self] status in
guard let self else {
return
}
if self.baseVideoQuality == quality {
return
}
self.baseVideoQuality = quality
let selectedFile = NativeVideoContent.selectVideoQualityFile(file: self.fileReference.media, quality: self.baseVideoQuality)
let updatedFileReference: FileMediaReference = self.fileReference.withMedia(selectedFile)
var userContentType = MediaResourceUserContentType(file: selectedFile)
switch updatedFileReference {
case .story:
userContentType = .story
default:
break
}
self._status.set(.never())
self.player.pause()
self.player = MediaPlayer(audioSessionManager: self.audioSessionManager, postbox: self.postbox, userLocation: self.userLocation, userContentType: userContentType, resourceReference: updatedFileReference.resourceReference(selectedFile.resource), tempFilePath: nil, streamable: self.streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: true, soundMuted: self.soundMuted, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: nil, isAudioVideoMessage: self.isAudioVideoMessage)
var actionAtEndImpl: (() -> Void)?
if self.enableSound && !self.loopVideo {
self.player.actionAtEnd = .action({
actionAtEndImpl?()
})
} else {
self.player.actionAtEnd = .loop({
actionAtEndImpl?()
})
}
actionAtEndImpl = { [weak self] in
self?.performActionAtEnd()
}
self._status.set(combineLatest(self.dimensionsPromise.get(), self.player.status)
|> map { dimensions, status in
return MediaPlayerStatus(generationTimestamp: status.generationTimestamp, duration: status.duration, dimensions: dimensions, timestamp: status.timestamp, baseRate: status.baseRate, seekId: status.seekId, status: status.status, soundEnabled: status.soundEnabled)
})
self.fetchStatusDisposable.set((self.postbox.mediaBox.resourceStatus(selectedFile.resource)
|> deliverOnMainQueue).start(next: { [weak self] status in
guard let strongSelf = self else {
return
}
switch status {
case .Local:
break
default:
if strongSelf.thumbnailPlayer == nil {
strongSelf.createThumbnailPlayer()
}
}
}))
if let size = updatedFileReference.media.size {
self._bufferingStatus.set(postbox.mediaBox.resourceRangesStatus(selectedFile.resource) |> map { ranges in
return (ranges, size)
})
} else {
self._bufferingStatus.set(.single(nil))
}
self.player.attachPlayerNode(self.playerNode)
var play = false
switch status.status {
case .playing:
play = true
case let .buffering(_, whilePlaying, _, _):
play = whilePlaying
case .paused:
break
}
self.player.seek(timestamp: status.timestamp, play: play)
})
} }
func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? { func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? {