Video player improvements

This commit is contained in:
Isaac 2024-10-21 20:21:15 +04:00
parent a38e1cf06e
commit fa2da2832e
24 changed files with 294 additions and 888 deletions

View File

@ -19,7 +19,7 @@ public func isMediaStreamable(message: Message, media: TelegramMediaFile) -> Boo
}
for attribute in media.attributes {
if case let .Video(_, _, flags, _, _, _) = attribute {
if flags.contains(.supportsStreaming) {
if flags.contains(.supportsStreaming) || !media.alternativeRepresentations.isEmpty {
return true
}
break

View File

@ -245,7 +245,18 @@ public func galleryItemForEntry(
content = NativeVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), imageReference: mediaImage.flatMap({ ImageMediaReference.message(message: MessageReference(message), media: $0) }), loopVideo: true, enableSound: false, tempFilePath: tempFilePath, captureProtected: captureProtected, storeAfterDownload: generateStoreAfterDownload?(message, file))
} else {
if true || (file.mimeType == "video/mpeg4" || file.mimeType == "video/mov" || file.mimeType == "video/mp4") {
var isHLS = false
if NativeVideoContent.isHLSVideo(file: file) {
isHLS = true
if let data = context.currentAppConfiguration.with({ $0 }).data, let disableHLS = data["video_ignore_alt_documents"] as? Double {
if Int(disableHLS) != 0 {
isHLS = false
}
}
}
if isHLS {
content = HLSVideoContent(id: .message(message.id, message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), streamVideo: streamVideos, loopVideo: loopVideos)
} else {
content = NativeVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), imageReference: mediaImage.flatMap({ ImageMediaReference.message(message: MessageReference(message), media: $0) }), streamVideo: .conservative, loopVideo: loopVideos, tempFilePath: tempFilePath, captureProtected: captureProtected, storeAfterDownload: generateStoreAfterDownload?(message, file))

View File

@ -2362,7 +2362,10 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
if videoNode.hasAttachedContext {
if self.isPaused || !self.keepSoundOnDismiss {
videoNode.continuePlayingWithoutSound()
if let item = self.item, item.content is HLSVideoContent {
} else {
videoNode.continuePlayingWithoutSound()
}
}
}
} else {
@ -3482,7 +3485,6 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
}
for quality in qualityState.available {
//TODO:release
let isSelected = qualityState.preferred == .quality(quality)
items.append(.action(ContextMenuActionItem(text: "\(quality)p", icon: { _ in
if isSelected {

View File

@ -426,6 +426,13 @@ private final class ChunkMediaPlayerContext {
fileprivate func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek = .start) {
assert(self.queue.isCurrent())
/*#if DEBUG
var seek = seek
if case .timecode = seek {
seek = .timecode(830.83000000000004)
}
#endif*/
if !self.enableSound {
self.lastStatusUpdateTimestamp = nil
self.enableSound = true
@ -585,14 +592,55 @@ private final class ChunkMediaPlayerContext {
}
timestamp = max(0.0, timestamp)
//print("Timestamp: \(timestamp)")
var duration: Double = 0.0
if let partsStateDuration = self.partsState.duration {
duration = partsStateDuration
}
var validParts: [ChunkMediaPlayerPart] = []
for part in self.partsState.parts {
for i in 0 ..< self.partsState.parts.count {
let part = self.partsState.parts[i]
var partMatches = false
if timestamp >= part.startTime - 0.5 && timestamp < part.endTime + 0.5 {
partMatches = true
}
if partMatches {
validParts.append(part)
}
}
if let lastValidPart = validParts.last {
for i in 0 ..< self.partsState.parts.count {
let part = self.partsState.parts[i]
if lastValidPart !== part && part.startTime > lastValidPart.startTime && part.startTime <= lastValidPart.endTime + 0.5 {
validParts.append(part)
break
}
}
}
/*for i in 0 ..< self.partsState.parts.count {
let part = self.partsState.parts[i]
var partMatches = false
if timestamp >= part.startTime - 0.001 && timestamp < part.endTime - 0.001 {
partMatches = true
} else if part.startTime < 0.2 && timestamp < part.endTime - 0.001 {
partMatches = true
}
if !partMatches, i != self.partsState.parts.count - 1, part.startTime >= 0.001, timestamp >= part.startTime {
let nextPart = self.partsState.parts[i + 1]
if timestamp < nextPart.endTime - 0.001 {
if part.endTime >= nextPart.startTime - 0.1 {
partMatches = true
}
}
}
if partMatches {
validParts.append(part)
inner: for lookaheadPart in self.partsState.parts {
@ -604,7 +652,7 @@ private final class ChunkMediaPlayerContext {
break
}
}
}*/
if validParts.isEmpty, let initialSeekTimestamp = self.initialSeekTimestamp {
for part in self.partsState.parts {
@ -650,6 +698,7 @@ private final class ChunkMediaPlayerContext {
userContentType: .other,
resourceReference: .standalone(resource: LocalFileReferenceMediaResource(localFilePath: "", randomId: 0)),
tempFilePath: part.file.path,
limitedFileRange: nil,
streamable: false,
isSeekable: true,
video: self.video,
@ -809,6 +858,10 @@ private final class ChunkMediaPlayerContext {
bufferingProgress = 0.0
}
if rate != 0.0 && self.initialSeekTimestamp != nil {
self.initialSeekTimestamp = nil
}
if duration > 0.0 && timestamp >= duration {
performActionAtEndNow = true
}

View File

@ -72,6 +72,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
private let userContentType: MediaResourceUserContentType
private let resourceReference: MediaResourceReference
private let tempFilePath: String?
private let limitedFileRange: Range<Int64>?
private let streamable: Bool
private let isSeekable: Bool
private let stallDuration: Double
@ -102,13 +103,14 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
}
}
public init(queue: Queue, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, isSeekable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int? = nil, stallDuration: Double = 1.0, lowWaterDuration: Double = 2.0, highWaterDuration: Double = 3.0, storeAfterDownload: (() -> Void)? = nil) {
public init(queue: Queue, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, limitedFileRange: Range<Int64>?, streamable: Bool, isSeekable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int? = nil, stallDuration: Double = 1.0, lowWaterDuration: Double = 2.0, highWaterDuration: Double = 3.0, storeAfterDownload: (() -> Void)? = nil) {
self.queue = queue
self.postbox = postbox
self.userLocation = userLocation
self.userContentType = userContentType
self.resourceReference = resourceReference
self.tempFilePath = tempFilePath
self.limitedFileRange = limitedFileRange
self.streamable = streamable
self.isSeekable = isSeekable
self.video = video
@ -187,6 +189,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
let postbox = self.postbox
let resourceReference = self.resourceReference
let tempFilePath = self.tempFilePath
let limitedFileRange = self.limitedFileRange
let queue = self.queue
let streamable = self.streamable
let isSeekable = self.isSeekable
@ -198,7 +201,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
let storeAfterDownload = self.storeAfterDownload
self.performWithContext { [weak self] context in
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, isSeekable: isSeekable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, limitedFileRange: limitedFileRange, streamable: streamable, isSeekable: isSeekable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
let (frames, endOfStream) = context.takeFrames(until: timestamp, types: types)
@ -242,6 +245,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
let userLocation = self.userLocation
let resourceReference = self.resourceReference
let tempFilePath = self.tempFilePath
let limitedFileRange = self.limitedFileRange
let streamable = self.streamable
let isSeekable = self.isSeekable
let video = self.video
@ -259,7 +263,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
self.performWithContext { [weak self] context in
let _ = currentSemaphore.swap(context.currentSemaphore)
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, isSeekable: isSeekable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, limitedFileRange: limitedFileRange, streamable: streamable, isSeekable: isSeekable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
context.seek(timestamp: timestamp, completed: { streamDescriptionsAndTimestamp in
queue.async {

View File

@ -65,15 +65,12 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
}
var fetchedCount: Int32 = 0
var fetchedData: Data?
/*#if DEBUG
maxOffset = max(maxOffset, context.readingOffset + Int(bufferSize))
print("maxOffset \(maxOffset)")
#endif*/
let resourceSize: Int64 = resourceReference.resource.size ?? (Int64.max - 1)
var resourceSize: Int64 = resourceReference.resource.size ?? (Int64.max - 1)
if let limitedFileRange = context.limitedFileRange {
resourceSize = min(resourceSize, limitedFileRange.upperBound)
}
let readCount = max(0, min(resourceSize - context.readingOffset, Int64(bufferSize)))
let requestRange: Range<Int64> = context.readingOffset ..< (context.readingOffset + readCount)
@ -97,9 +94,6 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
if readCount == 0 {
fetchedData = Data()
} else {
#if DEBUG
//print("requestRange: \(requestRange)")
#endif
if let tempFilePath = context.tempFilePath, let fileData = (try? Data(contentsOf: URL(fileURLWithPath: tempFilePath), options: .mappedRead))?.subdata(in: Int(requestRange.lowerBound) ..< Int(requestRange.upperBound)) {
fetchedData = fileData
} else {
@ -207,7 +201,7 @@ private func seekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whe
var result: Int64 = offset
let resourceSize: Int64
var resourceSize: Int64
if let size = resourceReference.resource.size {
resourceSize = size
} else {
@ -240,6 +234,9 @@ private func seekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whe
resourceSize = Int64.max - 1
}
}
if let limitedFileRange = context.limitedFileRange {
resourceSize = min(resourceSize, limitedFileRange.upperBound)
}
if (whence & FFMPEG_AVSEEK_SIZE) != 0 {
result = Int64(resourceSize == Int(Int32.max - 1) ? 0 : resourceSize)
@ -254,10 +251,21 @@ private func seekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whe
} else {
if streamable {
if context.tempFilePath == nil {
let fetchRange: Range<Int64> = context.readingOffset ..< Int64.max
context.fetchedDataDisposable.set(fetchedMediaResource(mediaBox: postbox.mediaBox, userLocation: userLocation, userContentType: userContentType, reference: resourceReference, range: (fetchRange, .elevated), statsCategory: statsCategory, preferBackgroundReferenceRevalidation: streamable).start())
let fetchRange: Range<Int64>?
if let limitedFileRange = context.limitedFileRange {
if context.readingOffset < limitedFileRange.upperBound {
fetchRange = context.readingOffset ..< limitedFileRange.upperBound
} else {
fetchRange = nil
}
} else {
fetchRange = context.readingOffset ..< Int64.max
}
if let fetchRange {
context.fetchedDataDisposable.set(fetchedMediaResource(mediaBox: postbox.mediaBox, userLocation: userLocation, userContentType: userContentType, reference: resourceReference, range: (fetchRange, .elevated), statsCategory: statsCategory, preferBackgroundReferenceRevalidation: streamable).start())
}
}
} else if !context.requestedCompleteFetch && context.fetchAutomatically {
} else if !context.requestedCompleteFetch && context.fetchAutomatically && context.limitedFileRange == nil {
context.requestedCompleteFetch = true
if context.tempFilePath == nil {
context.fetchedDataDisposable.set(fetchedMediaResource(mediaBox: postbox.mediaBox, userLocation: userLocation, userContentType: userContentType, reference: resourceReference, statsCategory: statsCategory, preferBackgroundReferenceRevalidation: streamable).start())
@ -285,6 +293,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
fileprivate var userContentType: MediaResourceUserContentType?
fileprivate var resourceReference: MediaResourceReference?
fileprivate var tempFilePath: String?
fileprivate var limitedFileRange: Range<Int64>?
fileprivate var streamable: Bool?
fileprivate var statsCategory: MediaResourceStatsCategory?
@ -329,16 +338,22 @@ final class FFMpegMediaFrameSourceContext: NSObject {
self.autosaveDisposable.dispose()
}
func initializeState(postbox: Postbox, userLocation: MediaResourceUserLocation, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, isSeekable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int?, storeAfterDownload: (() -> Void)?) {
func initializeState(postbox: Postbox, userLocation: MediaResourceUserLocation, resourceReference: MediaResourceReference, tempFilePath: String?, limitedFileRange: Range<Int64>?, streamable: Bool, isSeekable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int?, storeAfterDownload: (() -> Void)?) {
if self.readingError || self.initializedState != nil {
return
}
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
var streamable = streamable
if limitedFileRange != nil {
streamable = true
}
self.postbox = postbox
self.resourceReference = resourceReference
self.tempFilePath = tempFilePath
self.limitedFileRange = limitedFileRange
self.streamable = streamable
self.statsCategory = video ? .video : .audio
self.userLocation = userLocation
@ -383,7 +398,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
}
if streamable {
if self.tempFilePath == nil {
if self.tempFilePath == nil && limitedFileRange == nil {
self.fetchedDataDisposable.set(fetchedMediaResource(mediaBox: postbox.mediaBox, userLocation: self.userLocation ?? .other, userContentType: self.userContentType ?? .other, reference: resourceReference, range: (0 ..< Int64.max, .elevated), statsCategory: self.statsCategory ?? .generic, preferBackgroundReferenceRevalidation: streamable).start())
}
} else if !self.requestedCompleteFetch && self.fetchAutomatically {
@ -511,7 +526,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
self.initializedState = InitializedState(avIoContext: avIoContext, avFormatContext: avFormatContext, audioStream: audioStream, videoStream: videoStream)
if streamable {
if streamable && limitedFileRange == nil {
if self.tempFilePath == nil {
self.fetchedFullDataDisposable.set(fetchedMediaResource(mediaBox: postbox.mediaBox, userLocation: self.userLocation ?? .other, userContentType: self.userContentType ?? .other, reference: resourceReference, range: (0 ..< Int64.max, .default), statsCategory: self.statsCategory ?? .generic, preferBackgroundReferenceRevalidation: streamable).start())
}

View File

@ -117,6 +117,7 @@ private final class MediaPlayerContext {
private let userContentType: MediaResourceUserContentType
private let resourceReference: MediaResourceReference
private let tempFilePath: String?
private let limitedFileRange: Range<Int64>?
private let streamable: MediaPlayerStreaming
private let video: Bool
private let preferSoftwareDecoding: Bool
@ -151,7 +152,7 @@ private final class MediaPlayerContext {
private var stoppedAtEnd = false
init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise<MediaPlayerStatus>, audioLevelPipe: ValuePipe<Float>, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, soundMuted: Bool, ambient: Bool, mixWithOthers: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool) {
init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise<MediaPlayerStatus>, audioLevelPipe: ValuePipe<Float>, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, limitedFileRange: Range<Int64>?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, soundMuted: Bool, ambient: Bool, mixWithOthers: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool) {
assert(queue.isCurrent())
self.queue = queue
@ -163,6 +164,7 @@ private final class MediaPlayerContext {
self.userContentType = userContentType
self.resourceReference = resourceReference
self.tempFilePath = tempFilePath
self.limitedFileRange = limitedFileRange
self.streamable = streamable
self.video = video
self.preferSoftwareDecoding = preferSoftwareDecoding
@ -340,7 +342,7 @@ private final class MediaPlayerContext {
let _ = self.playerStatusValue.swap(status)
}
let frameSource = FFMpegMediaFrameSource(queue: self.queue, postbox: self.postbox, userLocation: self.userLocation, userContentType: self.userContentType, resourceReference: self.resourceReference, tempFilePath: self.tempFilePath, streamable: self.streamable.enabled, isSeekable: self.streamable.isSeekable, video: self.video, preferSoftwareDecoding: self.preferSoftwareDecoding, fetchAutomatically: self.fetchAutomatically, stallDuration: self.streamable.parameters.0, lowWaterDuration: self.streamable.parameters.1, highWaterDuration: self.streamable.parameters.2, storeAfterDownload: self.storeAfterDownload)
let frameSource = FFMpegMediaFrameSource(queue: self.queue, postbox: self.postbox, userLocation: self.userLocation, userContentType: self.userContentType, resourceReference: self.resourceReference, tempFilePath: self.tempFilePath, limitedFileRange: self.limitedFileRange, streamable: self.streamable.enabled, isSeekable: self.streamable.isSeekable, video: self.video, preferSoftwareDecoding: self.preferSoftwareDecoding, fetchAutomatically: self.fetchAutomatically, stallDuration: self.streamable.parameters.0, lowWaterDuration: self.streamable.parameters.1, highWaterDuration: self.streamable.parameters.2, storeAfterDownload: self.storeAfterDownload)
let disposable = MetaDisposable()
let updatedSeekState: MediaPlayerSeekState?
if let loadedDuration = loadedDuration {
@ -1128,10 +1130,10 @@ public final class MediaPlayer {
}
}
public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, soundMuted: Bool = false, ambient: Bool = false, mixWithOthers: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool = false) {
public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String? = nil, limitedFileRange: Range<Int64>? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, soundMuted: Bool = false, ambient: Bool = false, mixWithOthers: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool = false) {
let audioLevelPipe = self.audioLevelPipe
self.queue.async {
let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, soundMuted: soundMuted, ambient: ambient, mixWithOthers: mixWithOthers, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: tempFilePath, limitedFileRange: limitedFileRange, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, soundMuted: soundMuted, ambient: ambient, mixWithOthers: mixWithOthers, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
self.contextRef = Unmanaged.passRetained(context)
}
}

View File

@ -179,7 +179,8 @@ public final class MediaPlayerNode: ASDisplayNode {
var state = state
takeFrameQueue.async { [weak node] in
switch takeFrame() {
let takeFrameResult = takeFrame()
switch takeFrameResult {
case let .restoreState(frames, atTime, soft):
if !soft {
Queue.mainQueue().async {

View File

@ -41,7 +41,7 @@ public final class MediaTrackFrameBuffer {
private var frameSourceSinkIndex: Int?
private var frames: [MediaTrackDecodableFrame] = []
private(set) var frames: [MediaTrackDecodableFrame] = []
private var maxFrameTime: Double?
private var endOfStream = false
private var bufferedUntilTime: CMTime?
@ -194,8 +194,10 @@ public final class MediaTrackFrameBuffer {
if self.endOfStream, let decodedFrame = self.decoder.takeRemainingFrame() {
return .frame(decodedFrame)
} else {
if let bufferedUntilTime = self.bufferedUntilTime {
if CMTimeCompare(bufferedUntilTime, self.duration) >= 0 || self.endOfStream {
if self.endOfStream {
return .finished
} else if let bufferedUntilTime = self.bufferedUntilTime {
if CMTimeCompare(bufferedUntilTime, self.duration) >= 0 {
return .finished
}
}

View File

@ -15,8 +15,7 @@ public func preloadVideoResource(postbox: Postbox, userLocation: MediaResourceUs
let disposable = MetaDisposable()
queue.async {
let maximumFetchSize = 2 * 1024 * 1024 + 128 * 1024
//let maximumFetchSize = 128
let sourceImpl = FFMpegMediaFrameSource(queue: queue, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: nil, streamable: true, isSeekable: true, video: true, preferSoftwareDecoding: false, fetchAutomatically: true, maximumFetchSize: maximumFetchSize)
let sourceImpl = FFMpegMediaFrameSource(queue: queue, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: nil, limitedFileRange: nil, streamable: true, isSeekable: true, video: true, preferSoftwareDecoding: false, fetchAutomatically: true, maximumFetchSize: maximumFetchSize)
let source = QueueLocalObject(queue: queue, generate: {
return sourceImpl
})

View File

@ -223,7 +223,6 @@ public final class AvailableReactions: Equatable, Codable {
var reactions = reactions
reactions.removeAll(where: { if case .stars = $0.value { return true } else { return false } })
//TODO:release
reactions.append(generateStarsReaction())
self.reactions = reactions
}
@ -243,7 +242,6 @@ public final class AvailableReactions: Equatable, Codable {
self.hash = try container.decodeIfPresent(Int32.self, forKey: .newHash) ?? 0
//TODO:release
var reactions = try container.decode([Reaction].self, forKey: .reactions)
reactions.removeAll(where: { if case .stars = $0.value { return true } else { return false } })
reactions.append(generateStarsReaction())

View File

@ -219,6 +219,7 @@ open class ChatMessageBubbleContentNode: ASDisplayNode {
public var item: ChatMessageBubbleContentItem?
public var updateIsTextSelectionActive: ((Bool) -> Void)?
public var requestInlineUpdate: (() -> Void)?
open var disablesClipping: Bool {
return false

View File

@ -4024,6 +4024,12 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
contentNode.updateIsTextSelectionActive = { [weak contextSourceNode] value in
contextSourceNode?.updateDistractionFreeMode?(value)
}
contentNode.requestInlineUpdate = { [weak strongSelf] in
guard let strongSelf, let item = strongSelf.item else {
return
}
item.controllerInteraction.requestMessageUpdate(item.message.id, false)
}
contentNode.updateIsExtractedToContextPreview(contextSourceNode.isExtractedToContextPreview)
}
}

View File

@ -95,9 +95,7 @@ public func stringForMessageTimestampStatus(accountPeerId: PeerId, message: Mess
dateText = " "
}
//TODO:release
//TODO:localize
if "".isEmpty, let channel = message.peers[message.id.peerId] as? TelegramChannel, case .broadcast = channel.info {
/*if "".isEmpty, let channel = message.peers[message.id.peerId] as? TelegramChannel, case .broadcast = channel.info {
for media in message.media {
if let file = media as? TelegramMediaFile, file.isVideo, !file.isInstantVideo, !file.isAnimated {
if message.id.namespace == Namespaces.Message.ScheduledCloud {
@ -107,7 +105,7 @@ public func stringForMessageTimestampStatus(accountPeerId: PeerId, message: Mess
}
}
}
}
}*/
if displayFullDate {
let dayText: String

View File

@ -513,6 +513,11 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
public var updateMessageReaction: ((Message, ChatControllerInteractionReaction, Bool, ContextExtractedContentContainingView?) -> Void)?
public var playMessageEffect: ((Message) -> Void)?
public var activateAgeRestrictedMedia: (() -> Void)?
public var requestInlineUpdate: (() -> Void)?
private var hlsInlinePlaybackRange: Range<Int64>?
private var appliedHlsInlinePlaybackRange: Range<Int64>?
private var hlsInlinePlaybackRangeDisposable: Disposable?
override public init() {
self.pinchContainerNode = PinchSourceContainerNode()
@ -618,6 +623,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
self.playerStatusDisposable.dispose()
self.fetchDisposable.dispose()
self.secretTimer?.invalidate()
self.hlsInlinePlaybackRangeDisposable?.dispose()
}
public func isAvailableForGalleryTransition() -> Bool {
@ -779,6 +785,9 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
let currentAutomaticDownload = self.automaticDownload
let currentAutomaticPlayback = self.automaticPlayback
let hlsInlinePlaybackRange = self.hlsInlinePlaybackRange
let appliedHlsInlinePlaybackRange = self.appliedHlsInlinePlaybackRange
return { [weak self] context, presentationData, dateTimeFormat, message, associatedData, attributes, media, mediaIndex, dateAndStatus, automaticDownload, peerType, peerId, sizeCalculation, layoutConstants, contentMode, presentationContext in
let _ = peerType
@ -1085,6 +1094,9 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
} else {
mediaUpdated = true
}
if hlsInlinePlaybackRange != appliedHlsInlinePlaybackRange {
mediaUpdated = true
}
var isSendingUpdated = false
if let currentMessage = currentMessage {
@ -1107,6 +1119,8 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
var updateAnimatedStickerFile: TelegramMediaFile?
var onlyFullSizeVideoThumbnail: Bool?
var loadHLSRangeVideoFile: TelegramMediaFile?
var emptyColor: UIColor
var patternArguments: PatternWallpaperArguments?
if isSticker {
@ -1216,7 +1230,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
messageMediaImageCancelInteractiveFetch(context: context, messageId: message.id, image: image, resource: resource)
}
})
} else if let file = media as? TelegramMediaFile {
} else if var file = media as? TelegramMediaFile {
if isSecretMedia {
updateImageSignal = { synchronousLoad, _ in
return chatSecretMessageVideo(account: context.account, userLocation: .peer(message.id.peerId), videoReference: .message(message: MessageReference(message), media: file))
@ -1248,20 +1262,34 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
}
if file.isVideo && !file.isVideoSticker && !isSecretMedia && automaticPlayback && !uploading {
updateVideoFile = file
if hasCurrentVideoNode {
if let currentFile = currentMedia as? TelegramMediaFile {
if currentFile.resource is EmptyMediaResource {
replaceVideoNode = true
} else if currentFile.fileId.namespace == Namespaces.Media.CloudFile && file.fileId.namespace == Namespaces.Media.CloudFile && currentFile.fileId != file.fileId {
replaceVideoNode = true
} else if currentFile.fileId != file.fileId && file.fileId.namespace == Namespaces.Media.CloudSecretFile {
replaceVideoNode = true
} else if file.isAnimated && currentFile.fileId.namespace == Namespaces.Media.LocalFile && file.fileId.namespace == Namespaces.Media.CloudFile {
replaceVideoNode = true
}
loadHLSRangeVideoFile = file
var passFile = true
if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file)) {
file = minimizedQualityFile.file.media
if hlsInlinePlaybackRange == nil {
passFile = false
}
} else if !(file.resource is LocalFileVideoMediaResource) {
}
if passFile {
updateVideoFile = file
if hasCurrentVideoNode {
if let currentFile = currentMedia as? TelegramMediaFile {
if currentFile.resource is EmptyMediaResource {
replaceVideoNode = true
} else if currentFile.fileId.namespace == Namespaces.Media.CloudFile && file.fileId.namespace == Namespaces.Media.CloudFile && currentFile.fileId != file.fileId {
replaceVideoNode = true
} else if currentFile.fileId != file.fileId && file.fileId.namespace == Namespaces.Media.CloudSecretFile {
replaceVideoNode = true
} else if file.isAnimated && currentFile.fileId.namespace == Namespaces.Media.LocalFile && file.fileId.namespace == Namespaces.Media.CloudFile {
replaceVideoNode = true
}
}
} else if !(file.resource is LocalFileVideoMediaResource) {
replaceVideoNode = true
}
} else if hasCurrentVideoNode {
replaceVideoNode = true
}
} else {
@ -1352,7 +1380,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
}, cancel: {
chatMessageWebFileCancelInteractiveFetch(account: context.account, image: image)
})
} else if let file = media as? TelegramMediaFile {
} else if var file = media as? TelegramMediaFile {
if isSecretMedia {
updateImageSignal = { synchronousLoad, _ in
return chatSecretMessageVideo(account: context.account, userLocation: .peer(message.id.peerId), videoReference: .message(message: MessageReference(message), media: file))
@ -1384,20 +1412,34 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
}
if file.isVideo && !file.isVideoSticker && !isSecretMedia && automaticPlayback && !uploading {
updateVideoFile = file
if hasCurrentVideoNode {
if let currentFile = currentMedia as? TelegramMediaFile {
if currentFile.resource is EmptyMediaResource {
replaceVideoNode = true
} else if currentFile.fileId.namespace == Namespaces.Media.CloudFile && file.fileId.namespace == Namespaces.Media.CloudFile && currentFile.fileId != file.fileId {
replaceVideoNode = true
} else if currentFile.fileId != file.fileId && file.fileId.namespace == Namespaces.Media.CloudSecretFile {
replaceVideoNode = true
} else if file.isAnimated && currentFile.fileId.namespace == Namespaces.Media.LocalFile && file.fileId.namespace == Namespaces.Media.CloudFile {
replaceVideoNode = true
}
loadHLSRangeVideoFile = file
var passFile = true
if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file)) {
file = minimizedQualityFile.file.media
if hlsInlinePlaybackRange == nil {
passFile = false
}
} else if !(file.resource is LocalFileVideoMediaResource) {
}
if passFile {
updateVideoFile = file
if hasCurrentVideoNode {
if let currentFile = currentMedia as? TelegramMediaFile {
if currentFile.resource is EmptyMediaResource {
replaceVideoNode = true
} else if currentFile.fileId.namespace == Namespaces.Media.CloudFile && file.fileId.namespace == Namespaces.Media.CloudFile && currentFile.fileId != file.fileId {
replaceVideoNode = true
} else if currentFile.fileId != file.fileId && file.fileId.namespace == Namespaces.Media.CloudSecretFile {
replaceVideoNode = true
} else if file.isAnimated && currentFile.fileId.namespace == Namespaces.Media.LocalFile && file.fileId.namespace == Namespaces.Media.CloudFile {
replaceVideoNode = true
}
}
} else if !(file.resource is LocalFileVideoMediaResource) {
replaceVideoNode = true
}
} else if hasCurrentVideoNode {
replaceVideoNode = true
}
} else {
@ -1661,12 +1703,26 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
let loopVideo = updatedVideoFile.isAnimated
let videoContent: UniversalVideoContent
videoContent = NativeVideoContent(id: .message(message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: streamVideo ? .conservative : .none, loopVideo: loopVideo, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false), continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo, placeholderColor: emptyColor, captureProtected: message.isCopyProtected() || isExtendedMedia, storeAfterDownload: { [weak context] in
guard let context, let peerId else {
return
videoContent = NativeVideoContent(
id: .message(message.stableId, updatedVideoFile.fileId),
userLocation: .peer(message.id.peerId),
fileReference: .message(message: MessageReference(message), media: updatedVideoFile),
limitedFileRange: hlsInlinePlaybackRange,
streamVideo: streamVideo ? .conservative : .none,
loopVideo: loopVideo,
enableSound: false,
fetchAutomatically: false,
onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false),
continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo,
placeholderColor: emptyColor,
captureProtected: message.isCopyProtected() || isExtendedMedia,
storeAfterDownload: { [weak context] in
guard let context, let peerId else {
return
}
let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone()
}
let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone()
})
)
let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded)
videoNode.isUserInteractionEnabled = false
videoNode.ownsContentNodeUpdated = { [weak self] owns in
@ -1916,6 +1972,36 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
strongSelf.updateStatus(animated: synchronousLoads)
strongSelf.pinchContainerNode.isPinchGestureEnabled = !isSecretMedia && !isExtendedMediaPreview && !hasSpoiler
strongSelf.appliedHlsInlinePlaybackRange = hlsInlinePlaybackRange
if let loadHLSRangeVideoFile, NativeVideoContent.isHLSVideo(file: loadHLSRangeVideoFile) {
if strongSelf.hlsInlinePlaybackRangeDisposable == nil {
strongSelf.hlsInlinePlaybackRangeDisposable = (HLSVideoContent.minimizedHLSQualityPreloadData(
postbox: context.account.postbox,
file: .message(message: MessageReference(message), media: loadHLSRangeVideoFile),
userLocation: .peer(message.id.peerId),
prefixSeconds: 10,
autofetchPlaylist: false
)
|> deliverOnMainQueue).startStrict(next: { [weak strongSelf] preloadData in
guard let strongSelf else {
return
}
if let preloadData {
strongSelf.hlsInlinePlaybackRange = preloadData.1
} else {
strongSelf.hlsInlinePlaybackRange = nil
}
strongSelf.requestInlineUpdate?()
})
}
} else {
if let hlsInlinePlaybackRangeDisposable = strongSelf.hlsInlinePlaybackRangeDisposable {
strongSelf.hlsInlinePlaybackRangeDisposable = nil
hlsInlinePlaybackRangeDisposable.dispose()
}
}
}
})
})
@ -2140,6 +2226,10 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
}
}
}
if let file = self.media as? TelegramMediaFile, NativeVideoContent.isHLSVideo(file: file) {
fetchStatus = .Local
}
let formatting = DataSizeStringFormatting(strings: strings, decimalSeparator: decimalSeparator)
@ -2185,15 +2275,10 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
if let duration = file.duration, !message.flags.contains(.Unsent) {
let durationString = file.isAnimated ? gifTitle : stringForDuration(playerDuration > 0 ? playerDuration : Int32(duration), position: playerPosition)
if isMediaStreamable(message: message, media: file) {
if NativeVideoContent.isHLSVideo(file: file) {
mediaDownloadState = .fetching(progress: nil)
badgeContent = .text(inset: 12.0, backgroundColor: messageTheme.mediaDateAndStatusFillColor, foregroundColor: messageTheme.mediaDateAndStatusTextColor, text: NSAttributedString(string: durationString), iconName: nil)
} else {
badgeContent = .mediaDownload(backgroundColor: messageTheme.mediaDateAndStatusFillColor, foregroundColor: messageTheme.mediaDateAndStatusTextColor, duration: durationString, size: active ? sizeString : nil, muted: muted, active: active)
mediaDownloadState = .fetching(progress: automaticPlayback ? nil : adjustedProgress)
if self.playerStatus?.status == .playing {
mediaDownloadState = nil
}
badgeContent = .mediaDownload(backgroundColor: messageTheme.mediaDateAndStatusFillColor, foregroundColor: messageTheme.mediaDateAndStatusTextColor, duration: durationString, size: active ? sizeString : nil, muted: muted, active: active)
mediaDownloadState = .fetching(progress: automaticPlayback ? nil : adjustedProgress)
if self.playerStatus?.status == .playing {
mediaDownloadState = nil
}
state = automaticPlayback ? .none : .play(messageTheme.mediaOverlayControlColors.foregroundColor)
} else {
@ -2292,11 +2377,7 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
do {
let durationString = file.isAnimated ? gifTitle : stringForDuration(playerDuration > 0 ? playerDuration : (file.duration.flatMap { Int32(floor($0)) } ?? 0), position: playerPosition)
if wideLayout {
if NativeVideoContent.isHLSVideo(file: file) {
state = automaticPlayback ? .none : .play(messageTheme.mediaOverlayControlColors.foregroundColor)
mediaDownloadState = nil
badgeContent = .text(inset: 12.0, backgroundColor: messageTheme.mediaDateAndStatusFillColor, foregroundColor: messageTheme.mediaDateAndStatusTextColor, text: NSAttributedString(string: durationString), iconName: nil)
} else if isMediaStreamable(message: message, media: file), let fileSize = file.size, fileSize > 0 && fileSize != .max {
if isMediaStreamable(message: message, media: file), let fileSize = file.size, fileSize > 0 && fileSize != .max {
state = automaticPlayback ? .none : .play(messageTheme.mediaOverlayControlColors.foregroundColor)
badgeContent = .mediaDownload(backgroundColor: messageTheme.mediaDateAndStatusFillColor, foregroundColor: messageTheme.mediaDateAndStatusTextColor, duration: durationString, size: dataSizeString(fileSize, formatting: formatting), muted: muted, active: true)
mediaDownloadState = .remote
@ -2680,8 +2761,13 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
public func playMediaWithSound() -> (action: (Double?) -> Void, soundEnabled: Bool, isVideoMessage: Bool, isUnread: Bool, badgeNode: ASDisplayNode?)? {
var isAnimated = false
if let file = self.media as? TelegramMediaFile, file.isAnimated {
isAnimated = true
if let file = self.media as? TelegramMediaFile {
if NativeVideoContent.isHLSVideo(file: file) {
return nil
}
if file.isAnimated {
isAnimated = true
}
}
var actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd = .loopDisablingSound

View File

@ -89,6 +89,12 @@ public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode {
}
strongSelf.item?.controllerInteraction.playMessageEffect(message)
}
self.interactiveImageNode.requestInlineUpdate = { [weak self] in
guard let self else {
return
}
self.requestInlineUpdate?()
}
}
required public init?(coder aDecoder: NSCoder) {

View File

@ -106,15 +106,14 @@ extension ChatControllerImpl {
}
}
//TODO:release
//TODO:localize
if "".isEmpty, let channel = message.peers[message.id.peerId] as? TelegramChannel, case .broadcast = channel.info {
/*if "".isEmpty, let channel = message.peers[message.id.peerId] as? TelegramChannel, case .broadcast = channel.info {
for media in message.media {
if let file = media as? TelegramMediaFile, file.isVideo, !file.isInstantVideo, !file.isAnimated {
tip = .videoProcessing
}
}
}
}*/
if actions.tip == nil {
actions.tip = tip

View File

@ -9117,7 +9117,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
func shouldDivertMessagesToScheduled(targetPeer: EnginePeer? = nil, messages: [EnqueueMessage]) -> Signal<Bool, NoError> {
guard let peer = targetPeer?._asPeer() ?? self.presentationInterfaceState.renderedPeer?.peer else {
return .single(false)
/*guard let peer = targetPeer?._asPeer() ?? self.presentationInterfaceState.renderedPeer?.peer else {
return .single(false)
}
@ -9162,7 +9163,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
return false
}
}
}*/
}
func sendMessages(_ messages: [EnqueueMessage], media: Bool = false, commit: Bool = false) {

File diff suppressed because one or more lines are too long

View File

@ -190,6 +190,7 @@ export class VideoElementStub extends EventTarget {
}
notifySeeked() {
this.dispatchEvent(new Event('seeking'));
this.dispatchEvent(new Event('seeked'));
}
}

View File

@ -214,15 +214,7 @@ public final class HLSVideoContent: UniversalVideoContent {
}
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
if #available(iOS 17.1, *) {
#if DEBUG || true
return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
#else
return HLSVideoJSContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
#endif
} else {
return HLSVideoAVContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
}
return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
}
public func isEqual(to other: UniversalVideoContent) -> Bool {

View File

@ -1,777 +0,0 @@
import Foundation
import SwiftSignalKit
import UniversalMediaPlayer
import Postbox
import TelegramCore
import WebKit
import AsyncDisplayKit
import AccountContext
import TelegramAudio
import Display
import PhotoResources
import TelegramVoip
import RangeSet
private func parseRange(from rangeString: String) -> Range<Int>? {
guard rangeString.hasPrefix("bytes=") else {
return nil
}
let rangeValues = rangeString.dropFirst("bytes=".count).split(separator: "-")
guard rangeValues.count == 2,
let start = Int(rangeValues[0]),
let end = Int(rangeValues[1]) else {
return nil
}
return start..<end + 1
}
private final class CustomVideoSchemeHandler: NSObject, WKURLSchemeHandler {
private final class PendingTask {
let sourceTask: any WKURLSchemeTask
let isCompleted = Atomic<Bool>(value: false)
var disposable: Disposable?
init(source: HLSServerSource, sourceTask: any WKURLSchemeTask) {
self.sourceTask = sourceTask
var requestRange: Range<Int>?
if let rangeString = sourceTask.request.allHTTPHeaderFields?["Range"] {
requestRange = parseRange(from: rangeString)
}
guard let url = sourceTask.request.url else {
return
}
let filePath = (url.absoluteString as NSString).lastPathComponent
if filePath == "master.m3u8" {
self.disposable = source.masterPlaylistData().startStrict(next: { [weak self] data in
guard let self else {
return
}
self.sendResponseAndClose(data: data.data(using: .utf8)!)
})
} else if filePath.hasPrefix("hls_level_") && filePath.hasSuffix(".m3u8") {
guard let levelIndex = Int(String(filePath[filePath.index(filePath.startIndex, offsetBy: "hls_level_".count) ..< filePath.index(filePath.endIndex, offsetBy: -".m3u8".count)])) else {
self.sendErrorAndClose()
return
}
self.disposable = source.playlistData(quality: levelIndex).startStrict(next: { [weak self] data in
guard let self else {
return
}
self.sendResponseAndClose(data: data.data(using: .utf8)!)
})
} else if filePath.hasPrefix("partfile") && filePath.hasSuffix(".mp4") {
let fileId = String(filePath[filePath.index(filePath.startIndex, offsetBy: "partfile".count) ..< filePath.index(filePath.endIndex, offsetBy: -".mp4".count)])
guard let fileIdValue = Int64(fileId) else {
self.sendErrorAndClose()
return
}
guard let requestRange else {
self.sendErrorAndClose()
return
}
self.disposable = (source.fileData(id: fileIdValue, range: requestRange.lowerBound ..< requestRange.upperBound + 1)
|> take(1)).start(next: { [weak self] result in
guard let self else {
return
}
if let (file, range, totalSize) = result {
guard let allData = try? Data(contentsOf: URL(fileURLWithPath: file.path), options: .mappedIfSafe) else {
return
}
let data = allData.subdata(in: range)
self.sendResponseAndClose(data: data, range: requestRange, totalSize: totalSize)
} else {
self.sendErrorAndClose()
}
})
} else {
self.sendErrorAndClose()
}
}
deinit {
self.disposable?.dispose()
}
func cancel() {
}
func sendErrorAndClose() {
self.sourceTask.didFailWithError(NSError(domain: "LocalVideoError", code: 500, userInfo: nil))
}
private func sendResponseAndClose(data: Data, range: Range<Int>? = nil, totalSize: Int? = nil) {
// Create the response with the appropriate content-type and content-length
//let mimeType = "application/octet-stream"
let responseLength = data.count
// Construct URLResponse with optional range headers (for partial content responses)
var headers: [String: String] = [
"Content-Length": "\(responseLength)",
"Connection": "close",
"Access-Control-Allow-Origin": "*"
]
if let range = range, let totalSize = totalSize {
headers["Content-Range"] = "bytes \(range.lowerBound)-\(range.upperBound)/\(totalSize)"
}
// Create the URLResponse object
let response = HTTPURLResponse(url: self.sourceTask.request.url!,
statusCode: 200,
httpVersion: "HTTP/1.1",
headerFields: headers)
// Send the response headers
self.sourceTask.didReceive(response!)
// Send the response data
self.sourceTask.didReceive(data)
// Complete the task
self.sourceTask.didFinish()
}
}
private let source: HLSServerSource
private var pendingTasks: [PendingTask] = []
init(source: HLSServerSource) {
self.source = source
}
func webView(_ webView: WKWebView, start urlSchemeTask: any WKURLSchemeTask) {
self.pendingTasks.append(PendingTask(source: self.source, sourceTask: urlSchemeTask))
}
func webView(_ webView: WKWebView, stop urlSchemeTask: any WKURLSchemeTask) {
if let index = self.pendingTasks.firstIndex(where: { $0.sourceTask === urlSchemeTask }) {
let task = self.pendingTasks[index]
self.pendingTasks.remove(at: index)
task.cancel()
}
}
}
private class WeakScriptMessageHandler: NSObject, WKScriptMessageHandler {
private let f: (WKScriptMessage) -> ()
init(_ f: @escaping (WKScriptMessage) -> ()) {
self.f = f
super.init()
}
func userContentController(_ controller: WKUserContentController, didReceive scriptMessage: WKScriptMessage) {
self.f(scriptMessage)
}
}
final class HLSVideoJSContentNode: ASDisplayNode, UniversalVideoContentNode {
private struct Level {
let bitrate: Int
let width: Int
let height: Int
init(bitrate: Int, width: Int, height: Int) {
self.bitrate = bitrate
self.width = width
self.height = height
}
}
private static var sharedBandwidthEstimate: Double?
private let postbox: Postbox
private let userLocation: MediaResourceUserLocation
private let fileReference: FileMediaReference
private let approximateDuration: Double
private let intrinsicDimensions: CGSize
private let audioSessionManager: ManagedAudioSession
private let audioSessionDisposable = MetaDisposable()
private var hasAudioSession = false
private let playerSource: HLSServerSource?
private var serverDisposable: Disposable?
private let playbackCompletedListeners = Bag<() -> Void>()
private var initializedStatus = false
private var statusValue = MediaPlayerStatus(generationTimestamp: 0.0, duration: 0.0, dimensions: CGSize(), timestamp: 0.0, baseRate: 1.0, seekId: 0, status: .paused, soundEnabled: true)
private var isBuffering = false
private var seekId: Int = 0
private let _status = ValuePromise<MediaPlayerStatus>()
var status: Signal<MediaPlayerStatus, NoError> {
return self._status.get()
}
private let _bufferingStatus = Promise<(RangeSet<Int64>, Int64)?>()
var bufferingStatus: Signal<(RangeSet<Int64>, Int64)?, NoError> {
return self._bufferingStatus.get()
}
private let _isNativePictureInPictureActive = ValuePromise<Bool>(false, ignoreRepeated: true)
var isNativePictureInPictureActive: Signal<Bool, NoError> {
return self._isNativePictureInPictureActive.get()
}
private let _ready = Promise<Void>()
var ready: Signal<Void, NoError> {
return self._ready.get()
}
private let _preloadCompleted = ValuePromise<Bool>()
var preloadCompleted: Signal<Bool, NoError> {
return self._preloadCompleted.get()
}
private let imageNode: TransformImageNode
private let webView: WKWebView
private let fetchDisposable = MetaDisposable()
private var dimensions: CGSize?
private let dimensionsPromise = ValuePromise<CGSize>(CGSize())
private var validLayout: (size: CGSize, actualSize: CGSize)?
private var statusTimer: Foundation.Timer?
private var preferredVideoQuality: UniversalVideoContentVideoQuality = .auto
private var playerIsReady: Bool = false
private var playerIsFirstFrameReady: Bool = false
private var playerIsPlaying: Bool = false
private var playerRate: Double = 0.0
private var playerDefaultRate: Double = 1.0
private var playerTime: Double = 0.0
private var playerTimeGenerationTimestamp: Double = 0.0
private var playerAvailableLevels: [Int: Level] = [:]
private var playerCurrentLevelIndex: Int?
private var hasRequestedPlayerLoad: Bool = false
private var requestedPlaying: Bool = false
private var requestedBaseRate: Double = 1.0
private var requestedLevelIndex: Int?
init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool) {
self.postbox = postbox
self.fileReference = fileReference
self.approximateDuration = fileReference.media.duration ?? 0.0
self.audioSessionManager = audioSessionManager
self.userLocation = userLocation
self.requestedBaseRate = baseRate
/*#if DEBUG
if let minimizedQualityFile = HLSVideoContent.minimizedHLSQualityFile(file: self.fileReference) {
let _ = fetchedMediaResource(mediaBox: postbox.mediaBox, userLocation: userLocation, userContentType: .video, reference: minimizedQualityFile.resourceReference(minimizedQualityFile.media.resource), range: (0 ..< 5 * 1024 * 1024, .default)).startStandalone()
}
#endif*/
if var dimensions = fileReference.media.dimensions {
if let thumbnail = fileReference.media.previewRepresentations.first {
let dimensionsVertical = dimensions.width < dimensions.height
let thumbnailVertical = thumbnail.dimensions.width < thumbnail.dimensions.height
if dimensionsVertical != thumbnailVertical {
dimensions = PixelDimensions(width: dimensions.height, height: dimensions.width)
}
}
self.dimensions = dimensions.cgSize
} else {
self.dimensions = CGSize(width: 128.0, height: 128.0)
}
self.imageNode = TransformImageNode()
let config = WKWebViewConfiguration()
config.allowsInlineMediaPlayback = true
config.mediaTypesRequiringUserActionForPlayback = []
config.allowsPictureInPictureMediaPlayback = true
var playerSource: HLSServerSource?
if let qualitySet = HLSQualitySet(baseFile: fileReference) {
let playerSourceValue = HLSServerSource(accountId: accountId.int64, fileId: fileReference.media.fileId.id, postbox: postbox, userLocation: userLocation, playlistFiles: qualitySet.playlistFiles, qualityFiles: qualitySet.qualityFiles)
playerSource = playerSourceValue
let schemeHandler = CustomVideoSchemeHandler(source: playerSourceValue)
config.setURLSchemeHandler(schemeHandler, forURLScheme: "tghls")
}
self.playerSource = playerSource
let userController = WKUserContentController()
var handleScriptMessage: ((WKScriptMessage) -> Void)?
userController.add(WeakScriptMessageHandler { message in
handleScriptMessage?(message)
}, name: "performAction")
let isDebug: Bool
#if DEBUG
isDebug = true
#else
isDebug = false
#endif
let mediaDimensions = fileReference.media.dimensions?.cgSize ?? CGSize(width: 480.0, height: 320.0)
var intrinsicDimensions = mediaDimensions.aspectFittedOrSmaller(CGSize(width: 1280.0, height: 1280.0))
let userScriptJs = """
playerInitialize({
'debug': \(isDebug),
'width': \(Int(intrinsicDimensions.width)),
'height': \(Int(intrinsicDimensions.height)),
'bandwidthEstimate': \(HLSVideoJSContentNode.sharedBandwidthEstimate ?? 500000.0)
});
""";
let userScript = WKUserScript(source: userScriptJs, injectionTime: .atDocumentEnd, forMainFrameOnly: true)
userController.addUserScript(userScript)
config.userContentController = userController
intrinsicDimensions.width = floor(intrinsicDimensions.width / UIScreenScale)
intrinsicDimensions.height = floor(intrinsicDimensions.height / UIScreenScale)
self.intrinsicDimensions = intrinsicDimensions
self.webView = WKWebView(frame: CGRect(origin: CGPoint(), size: self.intrinsicDimensions), configuration: config)
self.webView.scrollView.isScrollEnabled = false
self.webView.allowsLinkPreview = false
self.webView.allowsBackForwardNavigationGestures = false
self.webView.accessibilityIgnoresInvertColors = true
self.webView.scrollView.contentInsetAdjustmentBehavior = .never
self.webView.alpha = 0.0
if #available(iOS 16.4, *) {
#if DEBUG
self.webView.isInspectable = true
#endif
}
super.init()
self.imageNode.setSignal(internalMediaGridMessageVideo(postbox: postbox, userLocation: self.userLocation, videoReference: fileReference) |> map { [weak self] getSize, getData in
Queue.mainQueue().async {
if let strongSelf = self, strongSelf.dimensions == nil {
if let dimensions = getSize() {
strongSelf.dimensions = dimensions
strongSelf.dimensionsPromise.set(dimensions)
if let validLayout = strongSelf.validLayout {
strongSelf.updateLayout(size: validLayout.size, actualSize: validLayout.actualSize, transition: .immediate)
}
}
}
}
return getData
})
self.addSubnode(self.imageNode)
self.view.addSubview(self.webView)
self.imageNode.imageUpdated = { [weak self] _ in
self?._ready.set(.single(Void()))
}
self._bufferingStatus.set(.single(nil))
handleScriptMessage = { [weak self] message in
Queue.mainQueue().async {
guard let self else {
return
}
guard let body = message.body as? [String: Any] else {
return
}
guard let eventName = body["event"] as? String else {
return
}
switch eventName {
case "playerStatus":
guard let eventData = body["data"] as? [String: Any] else {
return
}
if let isReady = eventData["isReady"] as? Bool {
self.playerIsReady = isReady
} else {
self.playerIsReady = false
}
if let isFirstFrameReady = eventData["isFirstFrameReady"] as? Bool {
self.playerIsFirstFrameReady = isFirstFrameReady
} else {
self.playerIsFirstFrameReady = false
}
if let isPlaying = eventData["isPlaying"] as? Bool {
self.playerIsPlaying = isPlaying
} else {
self.playerIsPlaying = false
}
if let rate = eventData["rate"] as? Double {
self.playerRate = rate
} else {
self.playerRate = 0.0
}
if let defaultRate = eventData["defaultRate"] as? Double {
self.playerDefaultRate = defaultRate
} else {
self.playerDefaultRate = 0.0
}
if let levels = eventData["levels"] as? [[String: Any]] {
self.playerAvailableLevels.removeAll()
for level in levels {
guard let levelIndex = level["index"] as? Int else {
continue
}
guard let levelBitrate = level["bitrate"] as? Int else {
continue
}
guard let levelWidth = level["width"] as? Int else {
continue
}
guard let levelHeight = level["height"] as? Int else {
continue
}
self.playerAvailableLevels[levelIndex] = Level(
bitrate: levelBitrate,
width: levelWidth,
height: levelHeight
)
}
} else {
self.playerAvailableLevels.removeAll()
}
self._isNativePictureInPictureActive.set(eventData["isPictureInPictureActive"] as? Bool ?? false)
if let currentLevel = eventData["currentLevel"] as? Int {
if self.playerAvailableLevels[currentLevel] != nil {
self.playerCurrentLevelIndex = currentLevel
} else {
self.playerCurrentLevelIndex = nil
}
} else {
self.playerCurrentLevelIndex = nil
}
self.webView.alpha = self.playerIsFirstFrameReady ? 1.0 : 0.0
if self.playerIsReady {
if !self.hasRequestedPlayerLoad {
if !self.playerAvailableLevels.isEmpty {
var selectedLevelIndex: Int?
if let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file {
if let dimensions = minimizedQualityFile.media.dimensions {
for (index, level) in self.playerAvailableLevels {
if level.height == Int(dimensions.height) {
selectedLevelIndex = index
break
}
}
}
}
if selectedLevelIndex == nil {
selectedLevelIndex = self.playerAvailableLevels.sorted(by: { $0.value.height > $1.value.height }).first?.key
}
if let selectedLevelIndex {
self.hasRequestedPlayerLoad = true
self.webView.evaluateJavaScript("playerLoad(\(selectedLevelIndex));", completionHandler: nil)
}
}
}
self.webView.evaluateJavaScript("playerSetBaseRate(\(self.requestedBaseRate));", completionHandler: nil)
if self.requestedPlaying {
self.requestPlay()
} else {
self.requestPause()
}
}
self.updateStatus()
case "playerCurrentTime":
guard let eventData = body["data"] as? [String: Any] else {
return
}
guard let value = eventData["value"] as? Double else {
return
}
self.playerTime = value
self.playerTimeGenerationTimestamp = CACurrentMediaTime()
var bandwidthEstimate = eventData["bandwidthEstimate"] as? Double
if let bandwidthEstimateValue = bandwidthEstimate, bandwidthEstimateValue.isNaN || bandwidthEstimateValue.isInfinite {
bandwidthEstimate = nil
}
HLSVideoJSContentNode.sharedBandwidthEstimate = bandwidthEstimate
self.updateStatus()
default:
break
}
}
}
if let playerSource = self.playerSource {
self.serverDisposable = SharedHLSServer.shared.registerPlayer(source: playerSource, completion: { [weak self] in
Queue.mainQueue().async {
guard let self else {
return
}
let htmlUrl = "http://127.0.0.1:\(SharedHLSServer.shared.port)/\(playerSource.id)/index.html"
self.webView.load(URLRequest(url: URL(string: htmlUrl)!))
}
})
}
}
deinit {
self.serverDisposable?.dispose()
self.audioSessionDisposable.dispose()
self.statusTimer?.invalidate()
}
private func updateStatus() {
let isPlaying = self.requestedPlaying && self.playerRate != 0.0
let status: MediaPlayerPlaybackStatus
if self.requestedPlaying && !isPlaying {
status = .buffering(initial: false, whilePlaying: self.requestedPlaying, progress: 0.0, display: true)
} else {
status = self.requestedPlaying ? .playing : .paused
}
var timestamp = self.playerTime
if timestamp.isFinite && !timestamp.isNaN {
} else {
timestamp = 0.0
}
self.statusValue = MediaPlayerStatus(generationTimestamp: self.playerTimeGenerationTimestamp, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: timestamp, baseRate: self.requestedBaseRate, seekId: self.seekId, status: status, soundEnabled: true)
self._status.set(self.statusValue)
if case .playing = status {
if self.statusTimer == nil {
self.statusTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 30.0, repeats: true, block: { [weak self] _ in
guard let self else {
return
}
self.updateStatus()
})
}
} else if let statusTimer = self.statusTimer {
self.statusTimer = nil
statusTimer.invalidate()
}
}
private func performActionAtEnd() {
for listener in self.playbackCompletedListeners.copyItems() {
listener()
}
}
func updateLayout(size: CGSize, actualSize: CGSize, transition: ContainedViewLayoutTransition) {
transition.updatePosition(layer: self.webView.layer, position: CGPoint(x: size.width / 2.0, y: size.height / 2.0))
transition.updateTransformScale(layer: self.webView.layer, scale: size.width / self.intrinsicDimensions.width)
transition.updateFrame(node: self.imageNode, frame: CGRect(origin: CGPoint(), size: size))
if let dimensions = self.dimensions {
let imageSize = CGSize(width: floor(dimensions.width / 2.0), height: floor(dimensions.height / 2.0))
let makeLayout = self.imageNode.asyncLayout()
let applyLayout = makeLayout(TransformImageArguments(corners: ImageCorners(), imageSize: imageSize, boundingSize: imageSize, intrinsicInsets: UIEdgeInsets(), emptyColor: .clear))
applyLayout()
}
}
func play() {
assert(Queue.mainQueue().isCurrent())
if !self.initializedStatus {
self._status.set(MediaPlayerStatus(generationTimestamp: 0.0, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: 0.0, baseRate: self.requestedBaseRate, seekId: self.seekId, status: .buffering(initial: true, whilePlaying: true, progress: 0.0, display: true), soundEnabled: true))
}
/*if !self.hasAudioSession {
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in
Queue.mainQueue().async {
guard let self else {
return
}
self.hasAudioSession = true
self.requestPlay()
}
}, deactivate: { [weak self] _ in
return Signal { subscriber in
if let self {
self.hasAudioSession = false
self.requestPause()
}
subscriber.putCompletion()
return EmptyDisposable
}
|> runOn(.mainQueue())
}))
} else*/ do {
self.requestPlay()
}
}
private func requestPlay() {
self.requestedPlaying = true
if self.playerIsReady {
self.webView.evaluateJavaScript("playerPlay();", completionHandler: nil)
}
self.updateStatus()
}
private func requestPause() {
self.requestedPlaying = false
if self.playerIsReady {
self.webView.evaluateJavaScript("playerPause();", completionHandler: nil)
}
self.updateStatus()
}
func pause() {
assert(Queue.mainQueue().isCurrent())
self.requestPause()
}
func togglePlayPause() {
assert(Queue.mainQueue().isCurrent())
if self.requestedPlaying {
self.pause()
} else {
self.play()
}
}
func setSoundEnabled(_ value: Bool) {
assert(Queue.mainQueue().isCurrent())
/*if value {
if !self.hasAudioSession {
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in
self?.hasAudioSession = true
self?.player?.volume = 1.0
}, deactivate: { [weak self] _ in
self?.hasAudioSession = false
self?.player?.pause()
return .complete()
}))
}
} else {
self.player?.volume = 0.0
self.hasAudioSession = false
self.audioSessionDisposable.set(nil)
}*/
}
func seek(_ timestamp: Double) {
assert(Queue.mainQueue().isCurrent())
self.seekId += 1
self.webView.evaluateJavaScript("playerSeek(\(timestamp));", completionHandler: nil)
}
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
self.webView.evaluateJavaScript("playerSetIsMuted(false);", completionHandler: nil)
self.play()
}
func setSoundMuted(soundMuted: Bool) {
self.webView.evaluateJavaScript("playerSetIsMuted(\(soundMuted));", completionHandler: nil)
}
func continueWithOverridingAmbientMode(isAmbient: Bool) {
}
func setForceAudioToSpeaker(_ forceAudioToSpeaker: Bool) {
}
func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
self.webView.evaluateJavaScript("playerSetIsMuted(true);", completionHandler: nil)
self.hasAudioSession = false
self.audioSessionDisposable.set(nil)
}
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) {
}
func setBaseRate(_ baseRate: Double) {
self.requestedBaseRate = baseRate
if self.playerIsReady {
self.webView.evaluateJavaScript("playerSetBaseRate(\(self.requestedBaseRate));", completionHandler: nil)
}
self.updateStatus()
}
func setVideoQuality(_ videoQuality: UniversalVideoContentVideoQuality) {
self.preferredVideoQuality = videoQuality
switch videoQuality {
case .auto:
self.requestedLevelIndex = nil
case let .quality(quality):
if let level = self.playerAvailableLevels.first(where: { $0.value.height == quality }) {
self.requestedLevelIndex = level.key
} else {
self.requestedLevelIndex = nil
}
}
if self.playerIsReady {
self.webView.evaluateJavaScript("playerSetLevel(\(self.requestedLevelIndex ?? -1));", completionHandler: nil)
}
}
func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? {
guard let playerCurrentLevelIndex = self.playerCurrentLevelIndex else {
return nil
}
guard let currentLevel = self.playerAvailableLevels[playerCurrentLevelIndex] else {
return nil
}
var available = self.playerAvailableLevels.values.map(\.height)
available.sort(by: { $0 > $1 })
return (currentLevel.height, self.preferredVideoQuality, available)
}
func addPlaybackCompleted(_ f: @escaping () -> Void) -> Int {
return self.playbackCompletedListeners.add(f)
}
func removePlaybackCompleted(_ index: Int) {
self.playbackCompletedListeners.remove(index)
}
func fetchControl(_ control: UniversalVideoNodeFetchControl) {
}
func notifyPlaybackControlsHidden(_ hidden: Bool) {
}
func setCanPlaybackWithoutHierarchy(_ canPlaybackWithoutHierarchy: Bool) {
}
func enterNativePictureInPicture() -> Bool {
self.webView.evaluateJavaScript("playerRequestPictureInPicture();", completionHandler: nil)
return true
}
func exitNativePictureInPicture() {
self.webView.evaluateJavaScript("playerStopPictureInPicture();", completionHandler: nil)
}
}

View File

@ -1362,7 +1362,9 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
if let duration = mediaSource.duration {
var mappedRanges = RangeSet<Int64>()
for range in bufferedRanges.ranges {
mappedRanges.formUnion(RangeSet<Int64>(Int64(range.lowerBound * 1000.0) ..< Int64(range.upperBound * 1000.0)))
let rangeLower = max(0.0, range.lowerBound - 0.2)
let rangeUpper = min(duration, range.upperBound + 0.2)
mappedRanges.formUnion(RangeSet<Int64>(Int64(rangeLower * 1000.0) ..< Int64(rangeUpper * 1000.0)))
}
self._bufferingStatus.set(.single((mappedRanges, Int64(duration * 1000.0))))
}
@ -1492,7 +1494,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
case .stop:
self.player.actionAtEnd = .action(action)
}
self.player.continuePlayingWithoutSound()
self.player.continuePlayingWithoutSound(seek: .none)
}
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) {

View File

@ -36,6 +36,7 @@ public final class NativeVideoContent: UniversalVideoContent {
public let nativeId: NativeVideoContentId
public let userLocation: MediaResourceUserLocation
public let fileReference: FileMediaReference
public let limitedFileRange: Range<Int64>?
let imageReference: ImageMediaReference?
public let dimensions: CGSize
public let duration: Double
@ -108,11 +109,12 @@ public final class NativeVideoContent: UniversalVideoContent {
return file
}
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, soundMuted: Bool = false, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, baseVideoQuality: UniversalVideoContentVideoQuality = .auto, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, limitedFileRange: Range<Int64>? = nil, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, soundMuted: Bool = false, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, baseVideoQuality: UniversalVideoContentVideoQuality = .auto, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
self.id = id
self.nativeId = id
self.userLocation = userLocation
self.fileReference = fileReference
self.limitedFileRange = limitedFileRange
self.imageReference = imageReference
if var dimensions = fileReference.media.dimensions {
if let thumbnail = fileReference.media.previewRepresentations.first {
@ -154,7 +156,7 @@ public final class NativeVideoContent: UniversalVideoContent {
}
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, baseVideoQuality: self.baseVideoQuality, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, limitedFileRange: self.limitedFileRange, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, baseVideoQuality: self.baseVideoQuality, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
}
public func isEqual(to other: UniversalVideoContent) -> Bool {
@ -175,6 +177,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private let postbox: Postbox
private let userLocation: MediaResourceUserLocation
private let fileReference: FileMediaReference
private let limitedFileRange: Range<Int64>?
private let streamVideo: MediaPlayerStreaming
private let enableSound: Bool
private let soundMuted: Bool
@ -243,10 +246,11 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private let hasSentFramesToDisplay: (() -> Void)?
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, soundMuted: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, baseVideoQuality: UniversalVideoContentVideoQuality, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, limitedFileRange: Range<Int64>?, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, soundMuted: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, baseVideoQuality: UniversalVideoContentVideoQuality, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
self.postbox = postbox
self.userLocation = userLocation
self.fileReference = fileReference
self.limitedFileRange = limitedFileRange
self.streamVideo = streamVideo
self.placeholderColor = placeholderColor
self.enableSound = enableSound
@ -275,7 +279,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
let selectedFile = NativeVideoContent.selectVideoQualityFile(file: fileReference.media, quality: self.baseVideoQuality)
self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: fileReference.resourceReference(selectedFile.resource), tempFilePath: tempFilePath, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, soundMuted: soundMuted, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: fileReference.resourceReference(selectedFile.resource), tempFilePath: tempFilePath, limitedFileRange: limitedFileRange, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, soundMuted: soundMuted, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
var actionAtEndImpl: (() -> Void)?
if enableSound && !loopVideo {