HLS player updates

This commit is contained in:
Isaac 2024-10-11 20:11:16 +04:00
parent 47da6fde8a
commit c66400afb2
18 changed files with 1986 additions and 554 deletions

View File

@ -8,7 +8,7 @@ extern int FFMPEG_CONSTANT_AVERROR_EOF;
@interface FFMpegAVIOContext : NSObject
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek isSeekable:(bool)isSeekable;
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const _Nullable)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek isSeekable:(bool)isSeekable;
- (void *)impl;

View File

@ -12,7 +12,7 @@ int FFMPEG_CONSTANT_AVERROR_EOF = AVERROR_EOF;
@implementation FFMpegAVIOContext
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek isSeekable:(bool)isSeekable {
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const _Nullable)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek isSeekable:(bool)isSeekable {
self = [super init];
if (self != nil) {
void *avIoBuffer = av_malloc(bufferSize);

File diff suppressed because it is too large Load Diff

View File

@ -272,12 +272,12 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
var videoBuffer: MediaTrackFrameBuffer?
if let audio = streamDescriptions.audio {
audioBuffer = MediaTrackFrameBuffer(frameSource: strongSelf, decoder: audio.decoder, type: .audio, duration: audio.duration, rotationAngle: 0.0, aspect: 1.0, stallDuration: strongSelf.stallDuration, lowWaterDuration: strongSelf.lowWaterDuration, highWaterDuration: strongSelf.highWaterDuration)
audioBuffer = MediaTrackFrameBuffer(frameSource: strongSelf, decoder: audio.decoder, type: .audio, startTime: audio.startTime, duration: audio.duration, rotationAngle: 0.0, aspect: 1.0, stallDuration: strongSelf.stallDuration, lowWaterDuration: strongSelf.lowWaterDuration, highWaterDuration: strongSelf.highWaterDuration)
}
var extraDecodedVideoFrames: [MediaTrackFrame] = []
if let video = streamDescriptions.video {
videoBuffer = MediaTrackFrameBuffer(frameSource: strongSelf, decoder: video.decoder, type: .video, duration: video.duration, rotationAngle: video.rotationAngle, aspect: video.aspect, stallDuration: strongSelf.stallDuration, lowWaterDuration: strongSelf.lowWaterDuration, highWaterDuration: strongSelf.highWaterDuration)
videoBuffer = MediaTrackFrameBuffer(frameSource: strongSelf, decoder: video.decoder, type: .video, startTime: video.startTime, duration: video.duration, rotationAngle: video.rotationAngle, aspect: video.aspect, stallDuration: strongSelf.stallDuration, lowWaterDuration: strongSelf.lowWaterDuration, highWaterDuration: strongSelf.highWaterDuration)
for videoFrame in streamDescriptions.extraVideoFrames {
if let decodedFrame = video.decoder.decode(frame: videoFrame) {
extraDecodedVideoFrames.append(decodedFrame)

View File

@ -10,6 +10,7 @@ private struct StreamContext {
let codecContext: FFMpegAVCodecContext?
let fps: CMTime
let timebase: CMTime
let startTime: CMTime
let duration: CMTime
let decoder: MediaTrackFrameDecoder
let rotationAngle: Double
@ -17,6 +18,7 @@ private struct StreamContext {
}
struct FFMpegMediaFrameSourceDescription {
let startTime: CMTime
let duration: CMTime
let decoder: MediaTrackFrameDecoder
let rotationAngle: Double
@ -429,6 +431,14 @@ final class FFMpegMediaFrameSourceContext: NSObject {
duration = CMTimeMake(value: Int64.min, timescale: duration.timescale)
}
let startTime: CMTime
let rawStartTime = avFormatContext.startTime(atStreamIndex: streamIndex)
if rawStartTime == Int64(bitPattern: 0x8000000000000000 as UInt64) {
startTime = CMTime(value: 0, timescale: timebase.timescale)
} else {
startTime = CMTimeMake(value: rawStartTime, timescale: timebase.timescale)
}
let metrics = avFormatContext.metricsForStream(at: streamIndex)
let rotationAngle: Double = metrics.rotationAngle
@ -439,24 +449,24 @@ final class FFMpegMediaFrameSourceContext: NSObject {
let codecContext = FFMpegAVCodecContext(codec: codec)
if avFormatContext.codecParams(atStreamIndex: streamIndex, to: codecContext) {
if codecContext.open() {
videoStream = StreamContext(index: Int(streamIndex), codecContext: codecContext, fps: fps, timebase: timebase, duration: duration, decoder: FFMpegMediaVideoFrameDecoder(codecContext: codecContext), rotationAngle: rotationAngle, aspect: aspect)
videoStream = StreamContext(index: Int(streamIndex), codecContext: codecContext, fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaVideoFrameDecoder(codecContext: codecContext), rotationAngle: rotationAngle, aspect: aspect)
break
}
}
}
} else if codecId == FFMpegCodecIdMPEG4 {
if let videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromMpeg4CodecData(UInt32(kCMVideoCodecType_MPEG4Video), metrics.width, metrics.height, metrics.extradata, metrics.extradataSize) {
videoStream = StreamContext(index: Int(streamIndex), codecContext: nil, fps: fps, timebase: timebase, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormat: videoFormat, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
videoStream = StreamContext(index: Int(streamIndex), codecContext: nil, fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormat: videoFormat, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
break
}
} else if codecId == FFMpegCodecIdH264 {
if let videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromAVCCodecData(UInt32(kCMVideoCodecType_H264), metrics.width, metrics.height, metrics.extradata, metrics.extradataSize) {
videoStream = StreamContext(index: Int(streamIndex), codecContext: nil, fps: fps, timebase: timebase, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormat: videoFormat, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
videoStream = StreamContext(index: Int(streamIndex), codecContext: nil, fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormat: videoFormat, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
break
}
} else if codecId == FFMpegCodecIdHEVC {
if let videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromHEVCCodecData(UInt32(kCMVideoCodecType_HEVC), metrics.width, metrics.height, metrics.extradata, metrics.extradataSize) {
videoStream = StreamContext(index: Int(streamIndex), codecContext: nil, fps: fps, timebase: timebase, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormat: videoFormat, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
videoStream = StreamContext(index: Int(streamIndex), codecContext: nil, fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormat: videoFormat, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
break
}
}
@ -484,7 +494,15 @@ final class FFMpegMediaFrameSourceContext: NSObject {
duration = CMTimeMake(value: Int64.min, timescale: duration.timescale)
}
audioStream = StreamContext(index: Int(streamIndex), codecContext: codecContext, fps: fps, timebase: timebase, duration: duration, decoder: FFMpegAudioFrameDecoder(codecContext: codecContext), rotationAngle: 0.0, aspect: 1.0)
let startTime: CMTime
let rawStartTime = avFormatContext.startTime(atStreamIndex: streamIndex)
if rawStartTime == Int64(bitPattern: 0x8000000000000000 as UInt64) {
startTime = CMTime(value: 0, timescale: timebase.timescale)
} else {
startTime = CMTimeMake(value: rawStartTime, timescale: timebase.timescale)
}
audioStream = StreamContext(index: Int(streamIndex), codecContext: codecContext, fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegAudioFrameDecoder(codecContext: codecContext), rotationAngle: 0.0, aspect: 1.0)
break
}
}
@ -620,11 +638,11 @@ final class FFMpegMediaFrameSourceContext: NSObject {
var videoDescription: FFMpegMediaFrameSourceDescription?
if let audioStream = initializedState.audioStream {
audioDescription = FFMpegMediaFrameSourceDescription(duration: audioStream.duration, decoder: audioStream.decoder, rotationAngle: 0.0, aspect: 1.0)
audioDescription = FFMpegMediaFrameSourceDescription(startTime: audioStream.startTime, duration: audioStream.duration, decoder: audioStream.decoder, rotationAngle: 0.0, aspect: 1.0)
}
if let videoStream = initializedState.videoStream {
videoDescription = FFMpegMediaFrameSourceDescription(duration: videoStream.duration, decoder: videoStream.decoder, rotationAngle: videoStream.rotationAngle, aspect: videoStream.aspect)
videoDescription = FFMpegMediaFrameSourceDescription(startTime: videoStream.startTime, duration: videoStream.duration, decoder: videoStream.decoder, rotationAngle: videoStream.rotationAngle, aspect: videoStream.aspect)
}
var actualPts: CMTime = CMTimeMake(value: 0, timescale: 1)

View File

@ -232,7 +232,7 @@ private final class MediaPlayerContext {
if let loadedState = maybeLoadedState, let videoBuffer = loadedState.mediaBuffers.videoBuffer {
if let (extraVideoFrames, atTime) = loadedState.extraVideoFrames {
loadedState.extraVideoFrames = nil
return .restoreState(extraVideoFrames, atTime)
return .restoreState(frames: extraVideoFrames, atTimestamp: atTime, soft: false)
} else {
return videoBuffer.takeFrame()
}

View File

@ -96,7 +96,12 @@ private func rendererInputProc(refCon: UnsafeMutableRawPointer, ioActionFlags: U
if !didSetRate {
context.state = .playing(rate: rate, didSetRate: true)
let masterClock = CMTimebaseCopySource(context.timebase)
CMTimebaseSetRateAndAnchorTime(context.timebase, rate: rate, anchorTime: CMTimeMake(value: sampleIndex, timescale: 44100), immediateSourceTime: CMSyncGetTime(masterClock))
let anchorTime = CMTimeMake(value: sampleIndex, timescale: 44100)
let immediateSourceTime = CMSyncGetTime(masterClock)
if anchorTime.seconds < CMTimebaseGetTime(context.timebase).seconds - 0.5 {
assert(true)
}
CMTimebaseSetRateAndAnchorTime(context.timebase, rate: rate, anchorTime: anchorTime, immediateSourceTime: immediateSourceTime)
updatedRate = context.updatedRate
} else {
context.renderTimestampTick += 1
@ -165,6 +170,10 @@ private func rendererInputProc(refCon: UnsafeMutableRawPointer, ioActionFlags: U
break
}
}
} else {
#if DEBUG
print("No audio data")
#endif
}
if !context.notifiedLowWater {

View File

@ -180,12 +180,14 @@ public final class MediaPlayerNode: ASDisplayNode {
takeFrameQueue.async { [weak node] in
switch takeFrame() {
case let .restoreState(frames, atTime):
Queue.mainQueue().async {
guard let strongSelf = node, let videoLayer = strongSelf.videoLayer else {
return
case let .restoreState(frames, atTime, soft):
if !soft {
Queue.mainQueue().async {
guard let strongSelf = node, let videoLayer = strongSelf.videoLayer else {
return
}
videoLayer.flush()
}
videoLayer.flush()
}
for i in 0 ..< frames.count {
let frame = frames[i]
@ -195,13 +197,17 @@ public final class MediaPlayerNode: ASDisplayNode {
let dict = attachments[0] as! NSMutableDictionary
if i == 0 {
CMSetAttachment(frame.sampleBuffer, key: kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding as NSString, value: kCFBooleanTrue as AnyObject, attachmentMode: kCMAttachmentMode_ShouldPropagate)
CMSetAttachment(frame.sampleBuffer, key: kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration as NSString, value: kCFBooleanTrue as AnyObject, attachmentMode: kCMAttachmentMode_ShouldPropagate)
if !soft {
CMSetAttachment(frame.sampleBuffer, key: kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration as NSString, value: kCFBooleanTrue as AnyObject, attachmentMode: kCMAttachmentMode_ShouldPropagate)
}
}
if CMTimeCompare(frame.position, atTime) < 0 {
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleAttachmentKey_DoNotDisplay as NSString as String)
} else if CMTimeCompare(frame.position, atTime) == 0 {
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleAttachmentKey_DisplayImmediately as NSString as String)
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration as NSString as String)
if !soft {
if CMTimeCompare(frame.position, atTime) < 0 {
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleAttachmentKey_DoNotDisplay as NSString as String)
} else if CMTimeCompare(frame.position, atTime) == 0 {
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleAttachmentKey_DisplayImmediately as NSString as String)
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration as NSString as String)
}
}
Queue.mainQueue().async {
guard let strongSelf = node, let videoLayer = strongSelf.videoLayer else {

View File

@ -11,7 +11,7 @@ public enum MediaTrackFrameBufferStatus {
public enum MediaTrackFrameResult {
case noFrames
case skipFrame
case restoreState([MediaTrackFrame], CMTime)
case restoreState(frames: [MediaTrackFrame], atTimestamp: CMTime, soft: Bool)
case frame(MediaTrackFrame)
case finished
}
@ -32,6 +32,7 @@ public final class MediaTrackFrameBuffer {
private let frameSource: MediaFrameSource
private let decoder: MediaTrackFrameDecoder
private let type: MediaTrackFrameType
public let startTime: CMTime
public let duration: CMTime
public let rotationAngle: Double
public let aspect: Double
@ -46,10 +47,11 @@ public final class MediaTrackFrameBuffer {
private var bufferedUntilTime: CMTime?
private var isWaitingForLowWaterDuration: Bool = false
init(frameSource: MediaFrameSource, decoder: MediaTrackFrameDecoder, type: MediaTrackFrameType, duration: CMTime, rotationAngle: Double, aspect: Double, stallDuration: Double = 1.0, lowWaterDuration: Double = 2.0, highWaterDuration: Double = 3.0) {
init(frameSource: MediaFrameSource, decoder: MediaTrackFrameDecoder, type: MediaTrackFrameType, startTime: CMTime, duration: CMTime, rotationAngle: Double, aspect: Double, stallDuration: Double = 1.0, lowWaterDuration: Double = 2.0, highWaterDuration: Double = 3.0) {
self.frameSource = frameSource
self.type = type
self.decoder = decoder
self.startTime = startTime
self.duration = duration
self.rotationAngle = rotationAngle
self.aspect = aspect

View File

@ -506,3 +506,113 @@ public final class SoftwareAudioSource {
}
}
}
public final class FFMpegMediaInfo {
public let startTime: CMTime
public let duration: CMTime
public init(startTime: CMTime, duration: CMTime) {
self.startTime = startTime
self.duration = duration
}
}
private final class FFMpegMediaInfoExtractContext {
let fd: Int32
let size: Int
init(fd: Int32, size: Int) {
self.fd = fd
self.size = size
}
}
private func FFMpegMediaInfoExtractContextReadPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer<UInt8>?, bufferSize: Int32) -> Int32 {
let context = Unmanaged<FFMpegMediaInfoExtractContext>.fromOpaque(userData!).takeUnretainedValue()
let result = read(context.fd, buffer, Int(bufferSize))
if result == 0 {
return FFMPEG_CONSTANT_AVERROR_EOF
}
return Int32(result)
}
private func FFMpegMediaInfoExtractContextSeekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whence: Int32) -> Int64 {
let context = Unmanaged<FFMpegMediaInfoExtractContext>.fromOpaque(userData!).takeUnretainedValue()
if (whence & FFMPEG_AVSEEK_SIZE) != 0 {
return Int64(context.size)
} else {
lseek(context.fd, off_t(offset), SEEK_SET)
return offset
}
}
public func extractFFMpegMediaInfo(path: String) -> FFMpegMediaInfo? {
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
var s = stat()
stat(path, &s)
let size = Int32(s.st_size)
let fd = open(path, O_RDONLY, S_IRUSR)
if fd < 0 {
return nil
}
defer {
close(fd)
}
let avFormatContext = FFMpegAVFormatContext()
let ioBufferSize = 64 * 1024
let context = FFMpegMediaInfoExtractContext(fd: fd, size: Int(size))
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(context).toOpaque(), readPacket: FFMpegMediaInfoExtractContextReadPacketCallback, writePacket: nil, seek: FFMpegMediaInfoExtractContextSeekCallback, isSeekable: true) else {
return nil
}
avFormatContext.setIO(avIoContext)
if !avFormatContext.openInput() {
return nil
}
if !avFormatContext.findStreamInfo() {
return nil
}
var streamInfos: [(isVideo: Bool, info: FFMpegMediaInfo)] = []
for typeIndex in 0 ..< 1 {
let isVideo = typeIndex == 0
for streamIndexNumber in avFormatContext.streamIndices(for: isVideo ? FFMpegAVFormatStreamTypeVideo : FFMpegAVFormatStreamTypeAudio) {
let streamIndex = streamIndexNumber.int32Value
if avFormatContext.isAttachedPic(atStreamIndex: streamIndex) {
continue
}
let fpsAndTimebase = avFormatContext.fpsAndTimebase(forStreamIndex: streamIndex, defaultTimeBase: CMTimeMake(value: 1, timescale: 40000))
let (_, timebase) = (fpsAndTimebase.fps, fpsAndTimebase.timebase)
let startTime: CMTime
let rawStartTime = avFormatContext.startTime(atStreamIndex: streamIndex)
if rawStartTime == Int64(bitPattern: 0x8000000000000000 as UInt64) {
startTime = CMTime(value: 0, timescale: timebase.timescale)
} else {
startTime = CMTimeMake(value: rawStartTime, timescale: timebase.timescale)
}
var duration = CMTimeMake(value: avFormatContext.duration(atStreamIndex: streamIndex), timescale: timebase.timescale)
duration = CMTimeMaximum(CMTime(value: 0, timescale: duration.timescale), CMTimeSubtract(duration, startTime))
streamInfos.append((isVideo: isVideo, info: FFMpegMediaInfo(startTime: startTime, duration: duration)))
}
}
if let video = streamInfos.first(where: \.isVideo) {
return video.info
} else if let stream = streamInfos.first {
return stream.info
} else {
return nil
}
}

View File

@ -114,4 +114,12 @@ final class VideoPlayerProxy {
nodeRef.release()
}
}
func flush() {
self.withContext { context in
if let context = context {
context.node?.reset()
}
}
}
}

File diff suppressed because one or more lines are too long

View File

@ -5,5 +5,5 @@
<title>Developement</title>
<meta name="viewport" content="width=device-width, initial-scale=1"></head>
<body>
<script src="runtime.bundle.js"></script><script src="index.bundle.js"></script><script src="print.bundle.js"></script></body>
<script src="runtime.bundle.js"></script><script src="index.bundle.js"></script></body>
</html>

View File

@ -1,27 +0,0 @@
"use strict";
(self["webpackChunkmy3d"] = self["webpackChunkmy3d"] || []).push([["print"],{
/***/ "./src/print.js":
/*!**********************!*\
!*** ./src/print.js ***!
\**********************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (/* binding */ printMe)
/* harmony export */ });
function printMe() {
console.log('I get called from print.js1234!');
}
/***/ })
},
/******/ __webpack_require__ => { // webpackRuntimeModules
/******/ var __webpack_exec__ = (moduleId) => (__webpack_require__(__webpack_require__.s = moduleId))
/******/ var __webpack_exports__ = (__webpack_exec__("./src/print.js"));
/******/ }
]);
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJpbnQuYnVuZGxlLmpzIiwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7QUFBZTtBQUNmO0FBQ0EiLCJzb3VyY2VzIjpbIndlYnBhY2s6Ly9teTNkLy4vc3JjL3ByaW50LmpzIl0sInNvdXJjZXNDb250ZW50IjpbImV4cG9ydCBkZWZhdWx0IGZ1bmN0aW9uIHByaW50TWUoKSB7XG4gIGNvbnNvbGUubG9nKCdJIGdldCBjYWxsZWQgZnJvbSBwcmludC5qczEyMzQhJyk7XG59XG4iXSwibmFtZXMiOltdLCJzb3VyY2VSb290IjoiIn0=

File diff suppressed because one or more lines are too long

View File

@ -215,7 +215,7 @@ public final class HLSVideoContent: UniversalVideoContent {
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
if #available(iOS 17.1, *) {
#if DEBUG
#if DEBUG || true
return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
#else
return HLSVideoJSContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)

View File

@ -15,6 +15,7 @@ import RangeSet
import AppBundle
import ManagedFile
import FFMpegBinding
import RangeSet
final class HLSJSServerSource: SharedHLSServer.Source {
let id: String
@ -328,9 +329,8 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
private let imageNode: TransformImageNode
private let webView: WKWebView
private var testPlayer: AVPlayer?
private var controlledPlayer: ControlledPlayer?
private let playerNode: ASDisplayNode
private let player: ChunkMediaPlayer
private let playerNode: MediaPlayerNode
private let fetchDisposable = MetaDisposable()
@ -349,7 +349,6 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
private var playerRate: Double = 0.0
private var playerDefaultRate: Double = 1.0
private var playerTime: Double = 0.0
private var playerTimeGenerationTimestamp: Double = 0.0
private var playerAvailableLevels: [Int: Level] = [:]
private var playerCurrentLevelIndex: Int?
@ -359,11 +358,18 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
private var requestedBaseRate: Double = 1.0
private var requestedLevelIndex: Int?
private var videoElements: [Int: VideoElement] = [:]
private var mediaSources: [Int: MediaSource] = [:]
private var sourceBuffers: [Int: SourceBuffer] = [:]
private var didBecomeActiveObserver: NSObjectProtocol?
private var willResignActiveObserver: NSObjectProtocol?
private let chunkPlayerPartsState = Promise<ChunkMediaPlayerPartsState>(ChunkMediaPlayerPartsState(duration: nil, parts: []))
private var sourceBufferStateDisposable: Disposable?
private var playerStatusDisposable: Disposable?
init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool) {
self.postbox = postbox
self.fileReference = fileReference
@ -436,31 +442,24 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
#endif
}
if "".isEmpty {
let controlledPlayer = ControlledPlayer()
self.controlledPlayer = controlledPlayer
} else {
let testPlayer = AVPlayer(playerItem: nil)
if #available(iOS 16.0, *) {
testPlayer.defaultRate = Float(baseRate)
}
if !enableSound {
testPlayer.volume = 0.0
}
self.testPlayer = testPlayer
}
let targetPlayer = self.controlledPlayer?.player ?? self.testPlayer
self.playerNode = ASDisplayNode()
self.playerNode.setLayerBlock({
return AVPlayerLayer(player: targetPlayer)
})
self.player = ChunkMediaPlayer(
postbox: postbox,
audioSessionManager: audioSessionManager,
partsState: self.chunkPlayerPartsState.get(),
video: true,
enableSound: true,
baseRate: baseRate
)
self.playerNode = MediaPlayerNode()
self.player.attachPlayerNode(self.playerNode)
super.init()
self.playerNode.frame = CGRect(origin: CGPoint(), size: self.intrinsicDimensions)
self.imageNode.setSignal(internalMediaGridMessageVideo(postbox: postbox, userLocation: self.userLocation, videoReference: fileReference) |> map { [weak self] getSize, getData in
self.imageNode.setSignal(internalMediaGridMessageVideo(postbox: postbox, userLocation: self.userLocation, videoReference: fileReference, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true) |> map { [weak self] getSize, getData in
Queue.mainQueue().async {
if let strongSelf = self, strongSelf.dimensions == nil {
if let dimensions = getSize() {
@ -651,7 +650,6 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
self.playerTime = value
self.playerTimeGenerationTimestamp = CACurrentMediaTime()
var bandwidthEstimate = eventData["bandwidthEstimate"] as? Double
if let bandwidthEstimateValue = bandwidthEstimate, bandwidthEstimateValue.isNaN || bandwidthEstimateValue.isInfinite {
@ -662,7 +660,8 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self.updateStatus()
self.controlledPlayer?.currentReferenceTime = value
//TODO
//self.controlledPlayer?.currentReferenceTime = value
default:
break
}
@ -683,16 +682,25 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
self.didBecomeActiveObserver = NotificationCenter.default.addObserver(forName: UIApplication.willEnterForegroundNotification, object: nil, queue: nil, using: { [weak self] _ in
guard let strongSelf = self, let layer = strongSelf.playerNode.layer as? AVPlayerLayer else {
return
}
layer.player = strongSelf.controlledPlayer?.player ?? strongSelf.testPlayer
let _ = self
})
self.willResignActiveObserver = NotificationCenter.default.addObserver(forName: UIApplication.didEnterBackgroundNotification, object: nil, queue: nil, using: { [weak self] _ in
guard let strongSelf = self, let layer = strongSelf.playerNode.layer as? AVPlayerLayer else {
let _ = self
})
self.playerStatusDisposable = (self.player.status
|> deliverOnMainQueue).startStrict(next: { [weak self] status in
guard let self else {
return
}
layer.player = nil
self.updatePlayerStatus(status: status)
})
self.statusTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 25.0, repeats: true, block: { [weak self] _ in
guard let self else {
return
}
self.updateStatus()
})
}
@ -708,6 +716,9 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self.audioSessionDisposable.dispose()
self.statusTimer?.invalidate()
self.sourceBufferStateDisposable?.dispose()
self.playerStatusDisposable?.dispose()
}
private func bridgeInvoke(
@ -717,7 +728,49 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
params: [String: Any],
completion: @escaping ([String: Any]) -> Void
) {
if (className == "SourceBuffer") {
if (className == "VideoElement") {
if (methodName == "constructor") {
let videoElement = VideoElement()
self.videoElements[bridgeId] = videoElement
completion([:])
} else if (methodName == "setCurrentTime") {
guard let currentTime = params["currentTime"] as? Double else {
assertionFailure()
return
}
self.player.seek(timestamp: currentTime)
completion([:])
} else if (methodName == "play") {
self.player.play()
completion([:])
} else if (methodName == "pause") {
self.player.pause()
completion([:])
}
} else if (className == "MediaSource") {
if (methodName == "constructor") {
let mediaSource = MediaSource()
self.mediaSources[bridgeId] = mediaSource
completion([:])
} else if (methodName == "setDuration") {
guard let duration = params["duration"] as? Double else {
assertionFailure()
return
}
guard let mediaSource = self.mediaSources[bridgeId] else {
assertionFailure()
return
}
if mediaSource.duration != duration {
mediaSource.duration = duration
if let sourceBuffer = self.sourceBuffers.first?.value {
self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: self.mediaSources.first?.value.duration, parts: sourceBuffer.items)))
}
}
completion([:])
}
} else if (className == "SourceBuffer") {
if (methodName == "constructor") {
guard let mimeType = params["mimeType"] as? String else {
assertionFailure()
@ -725,7 +778,19 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
let sourceBuffer = SourceBuffer(mimeType: mimeType)
self.sourceBuffers[bridgeId] = sourceBuffer
self.controlledPlayer?.setSourceBuffer(sourceBuffer: sourceBuffer)
self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: self.mediaSources.first?.value.duration, parts: sourceBuffer.items)))
if self.sourceBufferStateDisposable == nil {
self.sourceBufferStateDisposable = (sourceBuffer.updated.signal()
|> deliverOnMainQueue).startStrict(next: { [weak self, weak sourceBuffer] _ in
guard let self, let sourceBuffer else {
return
}
self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: self.mediaSources.first?.value.duration, parts: sourceBuffer.items)))
self.updateBuffered()
})
}
completion([:])
} else if (methodName == "appendBuffer") {
guard let base64Data = params["data"] as? String else {
@ -740,36 +805,8 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
assertionFailure()
return
}
sourceBuffer.appendBuffer(data: data, completion: { result in
if let result {
completion([
"rangeStart": result.0,
"rangeEnd": result.1
])
if let sourceBuffer = self.sourceBuffers[bridgeId], let testPlayer = self.testPlayer {
var rangeEnd: Double = 0.0
for item in sourceBuffer.items {
rangeEnd += item.endTime - item.startTime
}
if rangeEnd >= 30.0 && testPlayer.currentItem == nil {
let tempFile = TempBox.shared.tempFile(fileName: "data.mp4")
if let initializationData = sourceBuffer.initializationData, let outputFile = ManagedFile(queue: nil, path: tempFile.path, mode: .readwrite) {
let _ = outputFile.write(initializationData)
for item in sourceBuffer.items.sorted(by: { $0.startTime < $1.startTime }) {
let _ = outputFile.write(item.rawData)
}
outputFile._unsafeClose()
let playerItem = AVPlayerItem(url: URL(fileURLWithPath: tempFile.path))
testPlayer.replaceCurrentItem(with: playerItem)
testPlayer.play()
}
}
}
} else {
completion([:])
}
sourceBuffer.appendBuffer(data: data, completion: { bufferedRanges in
completion(["ranges": serializeRanges(bufferedRanges)])
})
} else if methodName == "remove" {
guard let start = params["start"] as? Double, let end = params["end"] as? Double else {
@ -780,43 +817,71 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
assertionFailure()
return
}
sourceBuffer.remove(start: start, end: end)
sourceBuffer.remove(start: start, end: end, completion: { bufferedRanges in
completion(["ranges": serializeRanges(bufferedRanges)])
})
} else if methodName == "abort" {
guard let sourceBuffer = self.sourceBuffers[bridgeId] else {
assertionFailure()
return
}
sourceBuffer.abortOperation()
completion([:])
}
}
}
private func updateStatus() {
let isPlaying = self.requestedPlaying && self.playerRate != 0.0
let status: MediaPlayerPlaybackStatus
if self.requestedPlaying && !isPlaying {
status = .buffering(initial: false, whilePlaying: self.requestedPlaying, progress: 0.0, display: true)
} else {
status = self.requestedPlaying ? .playing : .paused
}
var timestamp = self.playerTime
if timestamp.isFinite && !timestamp.isNaN {
} else {
timestamp = 0.0
}
self.statusValue = MediaPlayerStatus(generationTimestamp: self.playerTimeGenerationTimestamp, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: timestamp, baseRate: self.requestedBaseRate, seekId: self.seekId, status: status, soundEnabled: true)
self._status.set(self.statusValue)
private func updatePlayerStatus(status: MediaPlayerStatus) {
self._status.set(status)
if case .playing = status {
if self.statusTimer == nil {
self.statusTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 30.0, repeats: true, block: { [weak self] _ in
guard let self else {
return
}
self.updateStatus()
})
if let (bridgeId, _) = self.videoElements.first {
var isPlaying: Bool = false
var isBuffering = false
switch status.status {
case .playing:
isPlaying = true
case .paused:
break
case let .buffering(_, whilePlaying, _, _):
isPlaying = whilePlaying
isBuffering = true
}
} else if let statusTimer = self.statusTimer {
self.statusTimer = nil
statusTimer.invalidate()
let result: [String: Any] = [
"isPlaying": isPlaying,
"isWaiting": isBuffering,
"currentTime": status.timestamp
]
let jsonResult = try! JSONSerialization.data(withJSONObject: result)
let jsonResultString = String(data: jsonResult, encoding: .utf8)!
self.webView.evaluateJavaScript("window.bridgeObjectMap[\(bridgeId)].bridgeUpdateStatus(\(jsonResultString));", completionHandler: nil)
}
}
private func updateBuffered() {
let bufferedRanges = self.sourceBuffers.first?.value.ranges ?? RangeSet()
if let (bridgeId, _) = self.videoElements.first {
let result = serializeRanges(bufferedRanges)
let jsonResult = try! JSONSerialization.data(withJSONObject: result)
let jsonResultString = String(data: jsonResult, encoding: .utf8)!
self.webView.evaluateJavaScript("window.bridgeObjectMap[\(bridgeId)].bridgeUpdateBuffered(\(jsonResultString));", completionHandler: nil)
}
if let duration = self.mediaSources.first?.value.duration {
var mappedRanges = RangeSet<Int64>()
for range in bufferedRanges.ranges {
mappedRanges.formUnion(RangeSet<Int64>(Int64(range.lowerBound * 1000.0) ..< Int64(range.upperBound * 1000.0)))
}
self._bufferingStatus.set(.single((mappedRanges, Int64(duration * 1000.0))))
}
}
private func updateStatus() {
}
private func performActionAtEnd() {
for listener in self.playbackCompletedListeners.copyItems() {
listener()
@ -1023,6 +1088,27 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
}
private func serializeRanges(_ ranges: RangeSet<Double>) -> [Double] {
var result: [Double] = []
for range in ranges.ranges {
result.append(range.lowerBound)
result.append(range.upperBound)
}
return result
}
private final class VideoElement {
init() {
}
}
private final class MediaSource {
var duration: Double?
init() {
}
}
private final class SourceBuffer {
private static let sharedQueue = Queue(name: "SourceBuffer")
@ -1033,27 +1119,47 @@ private final class SourceBuffer {
let endTime: Double
let rawData: Data
var clippedStartTime: Double
var clippedEndTime: Double
init(tempFile: TempBoxFile, asset: AVURLAsset, startTime: Double, endTime: Double, rawData: Data) {
self.tempFile = tempFile
self.asset = asset
self.startTime = startTime
self.endTime = endTime
self.rawData = rawData
self.clippedStartTime = startTime
self.clippedEndTime = endTime
}
func removeRange(start: Double, end: Double) {
//TODO
}
}
let mimeType: String
var initializationData: Data?
var items: [Item] = []
var items: [ChunkMediaPlayerPart] = []
var ranges = RangeSet<Double>()
let updated = ValuePipe<Void>()
private var currentUpdateId: Int = 0
init(mimeType: String) {
self.mimeType = mimeType
}
func appendBuffer(data: Data, completion: @escaping ((Double, Double)?) -> Void) {
func abortOperation() {
self.currentUpdateId += 1
}
func appendBuffer(data: Data, completion: @escaping (RangeSet<Double>) -> Void) {
let initializationData = self.initializationData
self.currentUpdateId += 1
let updateId = self.currentUpdateId
SourceBuffer.sharedQueue.async { [weak self] in
let tempFile = TempBox.shared.tempFile(fileName: "data.mp4")
@ -1064,32 +1170,45 @@ private final class SourceBuffer {
combinedData.append(data)
guard let _ = try? combinedData.write(to: URL(fileURLWithPath: tempFile.path), options: .atomic) else {
Queue.mainQueue().async {
completion(nil)
guard let self else {
completion(RangeSet())
return
}
if self.currentUpdateId != updateId {
return
}
completion(self.ranges)
}
return
}
if let fragmentInfo = parseFragment(filePath: tempFile.path) {
if let fragmentInfo = extractFFMpegMediaInfo(path: tempFile.path) {
Queue.mainQueue().async {
guard let self else {
completion(nil)
completion(RangeSet())
return
}
if self.currentUpdateId != updateId {
return
}
if fragmentInfo.duration.value == 0 {
self.initializationData = data
completion((0.0, 0.0))
completion(self.ranges)
} else {
let item = Item(
tempFile: tempFile,
asset: AVURLAsset(url: URL(fileURLWithPath: tempFile.path)),
startTime: round(fragmentInfo.offset.seconds * 1000.0) / 1000.0,
endTime: round((fragmentInfo.offset.seconds + fragmentInfo.duration.seconds) * 1000.0) / 1000.0,
rawData: data
let item = ChunkMediaPlayerPart(
startTime: fragmentInfo.startTime.seconds,
endTime: fragmentInfo.startTime.seconds + fragmentInfo.duration.seconds,
file: tempFile
)
self.items.append(item)
self.updateRanges()
completion((item.startTime, item.endTime))
completion(self.ranges)
self.updated.putNext(Void())
}
@ -1097,14 +1216,23 @@ private final class SourceBuffer {
} else {
assertionFailure()
Queue.mainQueue().async {
completion(nil)
guard let self else {
completion(RangeSet())
return
}
if self.currentUpdateId != updateId {
return
}
completion(self.ranges)
}
return
}
}
}
func remove(start: Double, end: Double) {
func remove(start: Double, end: Double, completion: @escaping (RangeSet<Double>) -> Void) {
self.items.removeAll(where: { item in
if item.startTime >= start && item.endTime <= end {
return true
@ -1112,101 +1240,23 @@ private final class SourceBuffer {
return false
}
})
self.updateRanges()
completion(self.ranges)
self.updated.putNext(Void())
}
private func updateRanges() {
self.ranges = RangeSet()
for item in self.items {
let itemStartTime = round(item.startTime * 1000.0) / 1000.0
let itemEndTime = round(item.endTime * 1000.0) / 1000.0
self.ranges.formUnion(RangeSet<Double>(itemStartTime ..< itemEndTime))
}
}
}
private func parseFragment(filePath: String) -> (offset: CMTime, duration: CMTime)? {
let source = SoftwareVideoSource(path: filePath, hintVP9: false, unpremultiplyAlpha: false)
return source.readTrackInfo()
}
private final class ControlledPlayer {
let player: AVPlayer
private var sourceBuffer: SourceBuffer?
private var sourceBufferUpdatedDisposable: Disposable?
var currentReferenceTime: Double?
private var currentItem: SourceBuffer.Item?
private var updateLink: SharedDisplayLinkDriver.Link?
init() {
self.player = AVPlayer(playerItem: nil)
self.updateLink = SharedDisplayLinkDriver.shared.add { [weak self] _ in
guard let self else {
return
}
self.update()
}
}
deinit {
self.sourceBufferUpdatedDisposable?.dispose()
}
func setSourceBuffer(sourceBuffer: SourceBuffer) {
if self.sourceBuffer === sourceBuffer {
return
}
self.sourceBufferUpdatedDisposable?.dispose()
self.sourceBuffer = sourceBuffer
self.sourceBufferUpdatedDisposable = (sourceBuffer.updated.signal()
|> deliverOnMainQueue).start(next: { [weak self] _ in
guard let self else {
return
}
self.update()
})
}
private func update() {
guard let sourceBuffer = self.sourceBuffer else {
return
}
guard let currentReferenceTime = self.currentReferenceTime else {
return
}
var replaceItem = false
if let currentItem = self.currentItem {
if currentReferenceTime < currentItem.startTime || currentReferenceTime > currentItem.endTime {
replaceItem = true
}
} else {
replaceItem = true
}
if replaceItem {
let item = sourceBuffer.items.last(where: { item in
if currentReferenceTime >= item.startTime && currentReferenceTime <= item.endTime {
return true
} else {
return false
}
})
if let item {
self.currentItem = item
let playerItem = AVPlayerItem(asset: item.asset)
self.player.replaceCurrentItem(with: playerItem)
self.player.seek(to: CMTime(seconds: currentReferenceTime - item.startTime, preferredTimescale: 240), toleranceBefore: CMTime.zero, toleranceAfter: CMTime.zero, completionHandler: { _ in })
self.player.play()
} else if self.player.currentItem != nil {
self.player.replaceCurrentItem(with: nil)
}
}
}
func play() {
self.player.play()
}
func pause() {
self.player.pause()
}
func seek(timestamp: Double) {
}
}

View File

@ -68,11 +68,11 @@ public final class NativeVideoContent: UniversalVideoContent {
return true
}
if videoCodec == "av1" {
/*if videoCodec == "av1" {
if isAv1Supported {
return true
}
}
}*/
return false
}