Support pure streaming

This commit is contained in:
Ali 2023-05-31 21:25:51 +04:00
parent 82f511c8a5
commit 54dc2eae57
10 changed files with 61 additions and 27 deletions

View File

@ -6,7 +6,7 @@ NS_ASSUME_NONNULL_BEGIN
@interface FFMpegAVIOContext : NSObject
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek;
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek isSeekable:(bool)isSeekable;
- (void *)impl;

View File

@ -10,7 +10,7 @@
@implementation FFMpegAVIOContext
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek {
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek isSeekable:(bool)isSeekable {
self = [super init];
if (self != nil) {
void *avIoBuffer = av_malloc(bufferSize);
@ -20,7 +20,10 @@
return nil;
}
_impl->direct = 1;
//_impl->seekable = 0;
if (!isSeekable) {
_impl->seekable = 0;
}
}
return self;
}

View File

@ -73,6 +73,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
private let resourceReference: MediaResourceReference
private let tempFilePath: String?
private let streamable: Bool
private let isSeekable: Bool
private let stallDuration: Double
private let lowWaterDuration: Double
private let highWaterDuration: Double
@ -101,7 +102,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
}
}
public init(queue: Queue, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int? = nil, stallDuration: Double = 1.0, lowWaterDuration: Double = 2.0, highWaterDuration: Double = 3.0, storeAfterDownload: (() -> Void)? = nil) {
public init(queue: Queue, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, isSeekable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int? = nil, stallDuration: Double = 1.0, lowWaterDuration: Double = 2.0, highWaterDuration: Double = 3.0, storeAfterDownload: (() -> Void)? = nil) {
self.queue = queue
self.postbox = postbox
self.userLocation = userLocation
@ -109,6 +110,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
self.resourceReference = resourceReference
self.tempFilePath = tempFilePath
self.streamable = streamable
self.isSeekable = isSeekable
self.video = video
self.preferSoftwareDecoding = preferSoftwareDecoding
self.fetchAutomatically = fetchAutomatically
@ -145,13 +147,13 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
self.eventSinkBag.remove(index)
}
public func generateFrames(until timestamp: Double) {
public func generateFrames(until timestamp: Double, types: [MediaTrackFrameType]) {
assert(self.queue.isCurrent())
if self.requestedFrameGenerationTimestamp == nil || !self.requestedFrameGenerationTimestamp!.isEqual(to: timestamp) {
self.requestedFrameGenerationTimestamp = timestamp
self.internalGenerateFrames(until: timestamp)
self.internalGenerateFrames(until: timestamp, types: types)
}
}
@ -167,7 +169,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
})
self.performWithContext({ context in
let _ = currentSemaphore.swap(context.currentSemaphore)
let _ = context.takeFrames(until: timestamp)
let _ = context.takeFrames(until: timestamp, types: [.audio, .video])
subscriber.putCompletion()
})
return disposable
@ -175,7 +177,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|> runOn(self.queue)
}
private func internalGenerateFrames(until timestamp: Double) {
private func internalGenerateFrames(until timestamp: Double, types: [MediaTrackFrameType]) {
if self.generatingFrames {
return
}
@ -187,6 +189,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
let tempFilePath = self.tempFilePath
let queue = self.queue
let streamable = self.streamable
let isSeekable = self.isSeekable
let userLocation = self.userLocation
let video = self.video
let preferSoftwareDecoding = self.preferSoftwareDecoding
@ -195,9 +198,9 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
let storeAfterDownload = self.storeAfterDownload
self.performWithContext { [weak self] context in
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, isSeekable: isSeekable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
let (frames, endOfStream) = context.takeFrames(until: timestamp)
let (frames, endOfStream) = context.takeFrames(until: timestamp, types: types)
queue.async { [weak self] in
if let strongSelf = self {
@ -211,7 +214,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
}
if strongSelf.requestedFrameGenerationTimestamp != nil && !strongSelf.requestedFrameGenerationTimestamp!.isEqual(to: timestamp) {
strongSelf.internalGenerateFrames(until: strongSelf.requestedFrameGenerationTimestamp!)
strongSelf.internalGenerateFrames(until: strongSelf.requestedFrameGenerationTimestamp!, types: types)
}
}
}
@ -240,6 +243,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
let resourceReference = self.resourceReference
let tempFilePath = self.tempFilePath
let streamable = self.streamable
let isSeekable = self.isSeekable
let video = self.video
let preferSoftwareDecoding = self.preferSoftwareDecoding
let fetchAutomatically = self.fetchAutomatically
@ -255,7 +259,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
self.performWithContext { [weak self] context in
let _ = currentSemaphore.swap(context.currentSemaphore)
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, isSeekable: isSeekable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
context.seek(timestamp: timestamp, completed: { streamDescriptionsAndTimestamp in
queue.async {

View File

@ -96,7 +96,7 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
fetchedData = Data()
} else {
#if DEBUG
print("requestRange: \(requestRange)")
//print("requestRange: \(requestRange)")
#endif
if let tempFilePath = context.tempFilePath, let fileData = (try? Data(contentsOf: URL(fileURLWithPath: tempFilePath), options: .mappedRead))?.subdata(in: Int(requestRange.lowerBound) ..< Int(requestRange.upperBound)) {
fetchedData = fileData
@ -327,7 +327,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
self.autosaveDisposable.dispose()
}
func initializeState(postbox: Postbox, userLocation: MediaResourceUserLocation, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int?, storeAfterDownload: (() -> Void)?) {
func initializeState(postbox: Postbox, userLocation: MediaResourceUserLocation, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, isSeekable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int?, storeAfterDownload: (() -> Void)?) {
if self.readingError || self.initializedState != nil {
return
}
@ -382,7 +382,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
let avFormatContext = FFMpegAVFormatContext()
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(self.ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback) else {
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(self.ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: isSeekable) else {
self.readingError = true
return
}
@ -505,7 +505,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
}
}
func takeFrames(until: Double) -> (frames: [MediaTrackDecodableFrame], endOfStream: Bool) {
func takeFrames(until: Double, types: [MediaTrackFrameType]) -> (frames: [MediaTrackDecodableFrame], endOfStream: Bool) {
if self.readingError {
return ([], true)
}
@ -515,12 +515,12 @@ final class FFMpegMediaFrameSourceContext: NSObject {
}
var videoTimestamp: Double?
if initializedState.videoStream == nil {
if initializedState.videoStream == nil || !types.contains(.video) {
videoTimestamp = Double.infinity
}
var audioTimestamp: Double?
if initializedState.audioStream == nil {
if initializedState.audioStream == nil || !types.contains(.audio) {
audioTimestamp = Double.infinity
}
@ -535,6 +535,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
if videoTimestamp == nil || videoTimestamp! < CMTimeGetSeconds(frame.pts) {
videoTimestamp = CMTimeGetSeconds(frame.pts)
//print("read video at \(CMTimeGetSeconds(frame.pts))")
}
} else if let audioStream = initializedState.audioStream, Int(packet.streamIndex) == audioStream.index {
let packetPts = packet.pts
@ -556,6 +557,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
if audioTimestamp == nil || audioTimestamp! < CMTimeGetSeconds(pts) {
audioTimestamp = CMTimeGetSeconds(pts)
//print("read audio at \(CMTimeGetSeconds(pts))")
}
}
} else {

View File

@ -26,6 +26,6 @@ public enum MediaFrameSourceSeekError {
public protocol MediaFrameSource {
func addEventSink(_ f: @escaping (MediaTrackEvent) -> Void) -> Int
func removeEventSink(_ index: Int)
func generateFrames(until timestamp: Double)
func generateFrames(until timestamp: Double, types: [MediaTrackFrameType])
func seek(timestamp: Double) -> Signal<QueueLocalObject<MediaFrameSourceSeekResult>, MediaFrameSourceSeekError>
}

View File

@ -88,6 +88,15 @@ public enum MediaPlayerStreaming {
return (0.25, 0.5, 1.0)
}
}
public var isSeekable: Bool {
switch self {
case .none, .conservative, .earlierStart:
return true
case .story:
return false
}
}
}
private final class MediaPlayerAudioRendererContext {
@ -311,7 +320,7 @@ private final class MediaPlayerContext {
let _ = self.playerStatusValue.swap(status)
}
let frameSource = FFMpegMediaFrameSource(queue: self.queue, postbox: self.postbox, userLocation: self.userLocation, userContentType: self.userContentType, resourceReference: self.resourceReference, tempFilePath: self.tempFilePath, streamable: self.streamable.enabled, video: self.video, preferSoftwareDecoding: self.preferSoftwareDecoding, fetchAutomatically: self.fetchAutomatically, stallDuration: self.streamable.parameters.0, lowWaterDuration: self.streamable.parameters.1, highWaterDuration: self.streamable.parameters.2, storeAfterDownload: self.storeAfterDownload)
let frameSource = FFMpegMediaFrameSource(queue: self.queue, postbox: self.postbox, userLocation: self.userLocation, userContentType: self.userContentType, resourceReference: self.resourceReference, tempFilePath: self.tempFilePath, streamable: self.streamable.enabled, isSeekable: self.streamable.isSeekable, video: self.video, preferSoftwareDecoding: self.preferSoftwareDecoding, fetchAutomatically: self.fetchAutomatically, stallDuration: self.streamable.parameters.0, lowWaterDuration: self.streamable.parameters.1, highWaterDuration: self.streamable.parameters.2, storeAfterDownload: self.storeAfterDownload)
let disposable = MetaDisposable()
let updatedSeekState: MediaPlayerSeekState?
if let loadedDuration = loadedDuration {

View File

@ -41,6 +41,7 @@ public final class MediaTrackFrameBuffer {
private var frameSourceSinkIndex: Int?
private var frames: [MediaTrackDecodableFrame] = []
private var maxFrameTime: Double?
private var endOfStream = false
private var bufferedUntilTime: CMTime?
private var isWaitingForLowWaterDuration: Bool = false
@ -94,6 +95,13 @@ public final class MediaTrackFrameBuffer {
}
if let maxUntilTime = maxUntilTime {
if let maxFrameTime = self.maxFrameTime {
if maxFrameTime < CMTimeGetSeconds(maxUntilTime) {
self.maxFrameTime = CMTimeGetSeconds(maxUntilTime)
}
} else {
self.maxFrameTime = CMTimeGetSeconds(maxUntilTime)
}
if traceEvents {
print("\(self.type) added \(frames.count) frames until \(CMTimeGetSeconds(maxUntilTime)), \(self.frames.count) total")
}
@ -111,13 +119,21 @@ public final class MediaTrackFrameBuffer {
public func status(at timestamp: Double) -> MediaTrackFrameBufferStatus {
var bufferedDuration = 0.0
if let bufferedUntilTime = self.bufferedUntilTime {
if CMTimeCompare(bufferedUntilTime, self.duration) >= 0 || self.endOfStream {
if CMTimeGetSeconds(self.duration) > 0.0 {
if CMTimeCompare(bufferedUntilTime, self.duration) >= 0 || self.endOfStream {
return .finished(at: CMTimeGetSeconds(bufferedUntilTime))
}
} else if self.endOfStream {
return .finished(at: CMTimeGetSeconds(bufferedUntilTime))
}
bufferedDuration = CMTimeGetSeconds(bufferedUntilTime) - timestamp
} else if self.endOfStream {
return .finished(at: CMTimeGetSeconds(self.duration))
if let maxFrameTime = self.maxFrameTime {
return .finished(at: maxFrameTime)
} else {
return .finished(at: CMTimeGetSeconds(self.duration))
}
}
let minTimestamp = timestamp - 1.0
@ -134,7 +150,7 @@ public final class MediaTrackFrameBuffer {
let delayIncrement = 0.3
var generateUntil = timestamp + delayIncrement
while generateUntil < timestamp + self.highWaterDuration {
self.frameSource.generateFrames(until: min(timestamp + self.highWaterDuration, generateUntil))
self.frameSource.generateFrames(until: min(timestamp + self.highWaterDuration, generateUntil), types: [self.type])
generateUntil += delayIncrement
}

View File

@ -89,7 +89,7 @@ public final class SoftwareVideoSource {
}
let ioBufferSize = 64 * 1024
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback)
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: true)
self.avIoContext = avIoContext
avFormatContext.setIO(self.avIoContext!)
@ -322,7 +322,7 @@ public final class SoftwareAudioSource {
let ioBufferSize = 64 * 1024
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback)
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: true)
self.avIoContext = avIoContext
avFormatContext.setIO(self.avIoContext!)

View File

@ -12,7 +12,7 @@ public func preloadVideoResource(postbox: Postbox, userLocation: MediaResourceUs
queue.async {
let maximumFetchSize = 2 * 1024 * 1024 + 128 * 1024
//let maximumFetchSize = 128
let sourceImpl = FFMpegMediaFrameSource(queue: queue, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: nil, streamable: true, video: true, preferSoftwareDecoding: false, fetchAutomatically: true, maximumFetchSize: maximumFetchSize)
let sourceImpl = FFMpegMediaFrameSource(queue: queue, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: nil, streamable: true, isSeekable: true, video: true, preferSoftwareDecoding: false, fetchAutomatically: true, maximumFetchSize: maximumFetchSize)
let source = QueueLocalObject(queue: queue, generate: {
return sourceImpl
})

View File

@ -138,7 +138,7 @@ private final class UniversalSoftwareVideoSourceImpl {
let ioBufferSize = 1 * 1024
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback) else {
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: true) else {
return nil
}
self.avIoContext = avIoContext