mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Support pure streaming
This commit is contained in:
parent
82f511c8a5
commit
54dc2eae57
@ -6,7 +6,7 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
|
|
||||||
@interface FFMpegAVIOContext : NSObject
|
@interface FFMpegAVIOContext : NSObject
|
||||||
|
|
||||||
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek;
|
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek isSeekable:(bool)isSeekable;
|
||||||
|
|
||||||
- (void *)impl;
|
- (void *)impl;
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
@implementation FFMpegAVIOContext
|
@implementation FFMpegAVIOContext
|
||||||
|
|
||||||
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek {
|
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek isSeekable:(bool)isSeekable {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self != nil) {
|
if (self != nil) {
|
||||||
void *avIoBuffer = av_malloc(bufferSize);
|
void *avIoBuffer = av_malloc(bufferSize);
|
||||||
@ -20,7 +20,10 @@
|
|||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
_impl->direct = 1;
|
_impl->direct = 1;
|
||||||
//_impl->seekable = 0;
|
|
||||||
|
if (!isSeekable) {
|
||||||
|
_impl->seekable = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
@ -73,6 +73,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
private let resourceReference: MediaResourceReference
|
private let resourceReference: MediaResourceReference
|
||||||
private let tempFilePath: String?
|
private let tempFilePath: String?
|
||||||
private let streamable: Bool
|
private let streamable: Bool
|
||||||
|
private let isSeekable: Bool
|
||||||
private let stallDuration: Double
|
private let stallDuration: Double
|
||||||
private let lowWaterDuration: Double
|
private let lowWaterDuration: Double
|
||||||
private let highWaterDuration: Double
|
private let highWaterDuration: Double
|
||||||
@ -101,7 +102,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public init(queue: Queue, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int? = nil, stallDuration: Double = 1.0, lowWaterDuration: Double = 2.0, highWaterDuration: Double = 3.0, storeAfterDownload: (() -> Void)? = nil) {
|
public init(queue: Queue, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, isSeekable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int? = nil, stallDuration: Double = 1.0, lowWaterDuration: Double = 2.0, highWaterDuration: Double = 3.0, storeAfterDownload: (() -> Void)? = nil) {
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
self.postbox = postbox
|
self.postbox = postbox
|
||||||
self.userLocation = userLocation
|
self.userLocation = userLocation
|
||||||
@ -109,6 +110,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
self.resourceReference = resourceReference
|
self.resourceReference = resourceReference
|
||||||
self.tempFilePath = tempFilePath
|
self.tempFilePath = tempFilePath
|
||||||
self.streamable = streamable
|
self.streamable = streamable
|
||||||
|
self.isSeekable = isSeekable
|
||||||
self.video = video
|
self.video = video
|
||||||
self.preferSoftwareDecoding = preferSoftwareDecoding
|
self.preferSoftwareDecoding = preferSoftwareDecoding
|
||||||
self.fetchAutomatically = fetchAutomatically
|
self.fetchAutomatically = fetchAutomatically
|
||||||
@ -145,13 +147,13 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
self.eventSinkBag.remove(index)
|
self.eventSinkBag.remove(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func generateFrames(until timestamp: Double) {
|
public func generateFrames(until timestamp: Double, types: [MediaTrackFrameType]) {
|
||||||
assert(self.queue.isCurrent())
|
assert(self.queue.isCurrent())
|
||||||
|
|
||||||
if self.requestedFrameGenerationTimestamp == nil || !self.requestedFrameGenerationTimestamp!.isEqual(to: timestamp) {
|
if self.requestedFrameGenerationTimestamp == nil || !self.requestedFrameGenerationTimestamp!.isEqual(to: timestamp) {
|
||||||
self.requestedFrameGenerationTimestamp = timestamp
|
self.requestedFrameGenerationTimestamp = timestamp
|
||||||
|
|
||||||
self.internalGenerateFrames(until: timestamp)
|
self.internalGenerateFrames(until: timestamp, types: types)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -167,7 +169,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
})
|
})
|
||||||
self.performWithContext({ context in
|
self.performWithContext({ context in
|
||||||
let _ = currentSemaphore.swap(context.currentSemaphore)
|
let _ = currentSemaphore.swap(context.currentSemaphore)
|
||||||
let _ = context.takeFrames(until: timestamp)
|
let _ = context.takeFrames(until: timestamp, types: [.audio, .video])
|
||||||
subscriber.putCompletion()
|
subscriber.putCompletion()
|
||||||
})
|
})
|
||||||
return disposable
|
return disposable
|
||||||
@ -175,7 +177,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
|> runOn(self.queue)
|
|> runOn(self.queue)
|
||||||
}
|
}
|
||||||
|
|
||||||
private func internalGenerateFrames(until timestamp: Double) {
|
private func internalGenerateFrames(until timestamp: Double, types: [MediaTrackFrameType]) {
|
||||||
if self.generatingFrames {
|
if self.generatingFrames {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -187,6 +189,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
let tempFilePath = self.tempFilePath
|
let tempFilePath = self.tempFilePath
|
||||||
let queue = self.queue
|
let queue = self.queue
|
||||||
let streamable = self.streamable
|
let streamable = self.streamable
|
||||||
|
let isSeekable = self.isSeekable
|
||||||
let userLocation = self.userLocation
|
let userLocation = self.userLocation
|
||||||
let video = self.video
|
let video = self.video
|
||||||
let preferSoftwareDecoding = self.preferSoftwareDecoding
|
let preferSoftwareDecoding = self.preferSoftwareDecoding
|
||||||
@ -195,9 +198,9 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
let storeAfterDownload = self.storeAfterDownload
|
let storeAfterDownload = self.storeAfterDownload
|
||||||
|
|
||||||
self.performWithContext { [weak self] context in
|
self.performWithContext { [weak self] context in
|
||||||
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
|
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, isSeekable: isSeekable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
|
||||||
|
|
||||||
let (frames, endOfStream) = context.takeFrames(until: timestamp)
|
let (frames, endOfStream) = context.takeFrames(until: timestamp, types: types)
|
||||||
|
|
||||||
queue.async { [weak self] in
|
queue.async { [weak self] in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
@ -211,7 +214,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if strongSelf.requestedFrameGenerationTimestamp != nil && !strongSelf.requestedFrameGenerationTimestamp!.isEqual(to: timestamp) {
|
if strongSelf.requestedFrameGenerationTimestamp != nil && !strongSelf.requestedFrameGenerationTimestamp!.isEqual(to: timestamp) {
|
||||||
strongSelf.internalGenerateFrames(until: strongSelf.requestedFrameGenerationTimestamp!)
|
strongSelf.internalGenerateFrames(until: strongSelf.requestedFrameGenerationTimestamp!, types: types)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -240,6 +243,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
let resourceReference = self.resourceReference
|
let resourceReference = self.resourceReference
|
||||||
let tempFilePath = self.tempFilePath
|
let tempFilePath = self.tempFilePath
|
||||||
let streamable = self.streamable
|
let streamable = self.streamable
|
||||||
|
let isSeekable = self.isSeekable
|
||||||
let video = self.video
|
let video = self.video
|
||||||
let preferSoftwareDecoding = self.preferSoftwareDecoding
|
let preferSoftwareDecoding = self.preferSoftwareDecoding
|
||||||
let fetchAutomatically = self.fetchAutomatically
|
let fetchAutomatically = self.fetchAutomatically
|
||||||
@ -255,7 +259,7 @@ public final class FFMpegMediaFrameSource: NSObject, MediaFrameSource {
|
|||||||
self.performWithContext { [weak self] context in
|
self.performWithContext { [weak self] context in
|
||||||
let _ = currentSemaphore.swap(context.currentSemaphore)
|
let _ = currentSemaphore.swap(context.currentSemaphore)
|
||||||
|
|
||||||
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
|
context.initializeState(postbox: postbox, userLocation: userLocation, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, isSeekable: isSeekable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, fetchAutomatically: fetchAutomatically, maximumFetchSize: maximumFetchSize, storeAfterDownload: storeAfterDownload)
|
||||||
|
|
||||||
context.seek(timestamp: timestamp, completed: { streamDescriptionsAndTimestamp in
|
context.seek(timestamp: timestamp, completed: { streamDescriptionsAndTimestamp in
|
||||||
queue.async {
|
queue.async {
|
||||||
|
@ -96,7 +96,7 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
|
|||||||
fetchedData = Data()
|
fetchedData = Data()
|
||||||
} else {
|
} else {
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
print("requestRange: \(requestRange)")
|
//print("requestRange: \(requestRange)")
|
||||||
#endif
|
#endif
|
||||||
if let tempFilePath = context.tempFilePath, let fileData = (try? Data(contentsOf: URL(fileURLWithPath: tempFilePath), options: .mappedRead))?.subdata(in: Int(requestRange.lowerBound) ..< Int(requestRange.upperBound)) {
|
if let tempFilePath = context.tempFilePath, let fileData = (try? Data(contentsOf: URL(fileURLWithPath: tempFilePath), options: .mappedRead))?.subdata(in: Int(requestRange.lowerBound) ..< Int(requestRange.upperBound)) {
|
||||||
fetchedData = fileData
|
fetchedData = fileData
|
||||||
@ -327,7 +327,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
self.autosaveDisposable.dispose()
|
self.autosaveDisposable.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
func initializeState(postbox: Postbox, userLocation: MediaResourceUserLocation, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int?, storeAfterDownload: (() -> Void)?) {
|
func initializeState(postbox: Postbox, userLocation: MediaResourceUserLocation, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: Bool, isSeekable: Bool, video: Bool, preferSoftwareDecoding: Bool, fetchAutomatically: Bool, maximumFetchSize: Int?, storeAfterDownload: (() -> Void)?) {
|
||||||
if self.readingError || self.initializedState != nil {
|
if self.readingError || self.initializedState != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -382,7 +382,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
|
|
||||||
let avFormatContext = FFMpegAVFormatContext()
|
let avFormatContext = FFMpegAVFormatContext()
|
||||||
|
|
||||||
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(self.ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback) else {
|
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(self.ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: isSeekable) else {
|
||||||
self.readingError = true
|
self.readingError = true
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -505,7 +505,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func takeFrames(until: Double) -> (frames: [MediaTrackDecodableFrame], endOfStream: Bool) {
|
func takeFrames(until: Double, types: [MediaTrackFrameType]) -> (frames: [MediaTrackDecodableFrame], endOfStream: Bool) {
|
||||||
if self.readingError {
|
if self.readingError {
|
||||||
return ([], true)
|
return ([], true)
|
||||||
}
|
}
|
||||||
@ -515,12 +515,12 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var videoTimestamp: Double?
|
var videoTimestamp: Double?
|
||||||
if initializedState.videoStream == nil {
|
if initializedState.videoStream == nil || !types.contains(.video) {
|
||||||
videoTimestamp = Double.infinity
|
videoTimestamp = Double.infinity
|
||||||
}
|
}
|
||||||
|
|
||||||
var audioTimestamp: Double?
|
var audioTimestamp: Double?
|
||||||
if initializedState.audioStream == nil {
|
if initializedState.audioStream == nil || !types.contains(.audio) {
|
||||||
audioTimestamp = Double.infinity
|
audioTimestamp = Double.infinity
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -535,6 +535,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
|
|
||||||
if videoTimestamp == nil || videoTimestamp! < CMTimeGetSeconds(frame.pts) {
|
if videoTimestamp == nil || videoTimestamp! < CMTimeGetSeconds(frame.pts) {
|
||||||
videoTimestamp = CMTimeGetSeconds(frame.pts)
|
videoTimestamp = CMTimeGetSeconds(frame.pts)
|
||||||
|
//print("read video at \(CMTimeGetSeconds(frame.pts))")
|
||||||
}
|
}
|
||||||
} else if let audioStream = initializedState.audioStream, Int(packet.streamIndex) == audioStream.index {
|
} else if let audioStream = initializedState.audioStream, Int(packet.streamIndex) == audioStream.index {
|
||||||
let packetPts = packet.pts
|
let packetPts = packet.pts
|
||||||
@ -556,6 +557,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
|
|
||||||
if audioTimestamp == nil || audioTimestamp! < CMTimeGetSeconds(pts) {
|
if audioTimestamp == nil || audioTimestamp! < CMTimeGetSeconds(pts) {
|
||||||
audioTimestamp = CMTimeGetSeconds(pts)
|
audioTimestamp = CMTimeGetSeconds(pts)
|
||||||
|
//print("read audio at \(CMTimeGetSeconds(pts))")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -26,6 +26,6 @@ public enum MediaFrameSourceSeekError {
|
|||||||
public protocol MediaFrameSource {
|
public protocol MediaFrameSource {
|
||||||
func addEventSink(_ f: @escaping (MediaTrackEvent) -> Void) -> Int
|
func addEventSink(_ f: @escaping (MediaTrackEvent) -> Void) -> Int
|
||||||
func removeEventSink(_ index: Int)
|
func removeEventSink(_ index: Int)
|
||||||
func generateFrames(until timestamp: Double)
|
func generateFrames(until timestamp: Double, types: [MediaTrackFrameType])
|
||||||
func seek(timestamp: Double) -> Signal<QueueLocalObject<MediaFrameSourceSeekResult>, MediaFrameSourceSeekError>
|
func seek(timestamp: Double) -> Signal<QueueLocalObject<MediaFrameSourceSeekResult>, MediaFrameSourceSeekError>
|
||||||
}
|
}
|
||||||
|
@ -88,6 +88,15 @@ public enum MediaPlayerStreaming {
|
|||||||
return (0.25, 0.5, 1.0)
|
return (0.25, 0.5, 1.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public var isSeekable: Bool {
|
||||||
|
switch self {
|
||||||
|
case .none, .conservative, .earlierStart:
|
||||||
|
return true
|
||||||
|
case .story:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class MediaPlayerAudioRendererContext {
|
private final class MediaPlayerAudioRendererContext {
|
||||||
@ -311,7 +320,7 @@ private final class MediaPlayerContext {
|
|||||||
let _ = self.playerStatusValue.swap(status)
|
let _ = self.playerStatusValue.swap(status)
|
||||||
}
|
}
|
||||||
|
|
||||||
let frameSource = FFMpegMediaFrameSource(queue: self.queue, postbox: self.postbox, userLocation: self.userLocation, userContentType: self.userContentType, resourceReference: self.resourceReference, tempFilePath: self.tempFilePath, streamable: self.streamable.enabled, video: self.video, preferSoftwareDecoding: self.preferSoftwareDecoding, fetchAutomatically: self.fetchAutomatically, stallDuration: self.streamable.parameters.0, lowWaterDuration: self.streamable.parameters.1, highWaterDuration: self.streamable.parameters.2, storeAfterDownload: self.storeAfterDownload)
|
let frameSource = FFMpegMediaFrameSource(queue: self.queue, postbox: self.postbox, userLocation: self.userLocation, userContentType: self.userContentType, resourceReference: self.resourceReference, tempFilePath: self.tempFilePath, streamable: self.streamable.enabled, isSeekable: self.streamable.isSeekable, video: self.video, preferSoftwareDecoding: self.preferSoftwareDecoding, fetchAutomatically: self.fetchAutomatically, stallDuration: self.streamable.parameters.0, lowWaterDuration: self.streamable.parameters.1, highWaterDuration: self.streamable.parameters.2, storeAfterDownload: self.storeAfterDownload)
|
||||||
let disposable = MetaDisposable()
|
let disposable = MetaDisposable()
|
||||||
let updatedSeekState: MediaPlayerSeekState?
|
let updatedSeekState: MediaPlayerSeekState?
|
||||||
if let loadedDuration = loadedDuration {
|
if let loadedDuration = loadedDuration {
|
||||||
|
@ -41,6 +41,7 @@ public final class MediaTrackFrameBuffer {
|
|||||||
private var frameSourceSinkIndex: Int?
|
private var frameSourceSinkIndex: Int?
|
||||||
|
|
||||||
private var frames: [MediaTrackDecodableFrame] = []
|
private var frames: [MediaTrackDecodableFrame] = []
|
||||||
|
private var maxFrameTime: Double?
|
||||||
private var endOfStream = false
|
private var endOfStream = false
|
||||||
private var bufferedUntilTime: CMTime?
|
private var bufferedUntilTime: CMTime?
|
||||||
private var isWaitingForLowWaterDuration: Bool = false
|
private var isWaitingForLowWaterDuration: Bool = false
|
||||||
@ -94,6 +95,13 @@ public final class MediaTrackFrameBuffer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let maxUntilTime = maxUntilTime {
|
if let maxUntilTime = maxUntilTime {
|
||||||
|
if let maxFrameTime = self.maxFrameTime {
|
||||||
|
if maxFrameTime < CMTimeGetSeconds(maxUntilTime) {
|
||||||
|
self.maxFrameTime = CMTimeGetSeconds(maxUntilTime)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.maxFrameTime = CMTimeGetSeconds(maxUntilTime)
|
||||||
|
}
|
||||||
if traceEvents {
|
if traceEvents {
|
||||||
print("\(self.type) added \(frames.count) frames until \(CMTimeGetSeconds(maxUntilTime)), \(self.frames.count) total")
|
print("\(self.type) added \(frames.count) frames until \(CMTimeGetSeconds(maxUntilTime)), \(self.frames.count) total")
|
||||||
}
|
}
|
||||||
@ -111,13 +119,21 @@ public final class MediaTrackFrameBuffer {
|
|||||||
public func status(at timestamp: Double) -> MediaTrackFrameBufferStatus {
|
public func status(at timestamp: Double) -> MediaTrackFrameBufferStatus {
|
||||||
var bufferedDuration = 0.0
|
var bufferedDuration = 0.0
|
||||||
if let bufferedUntilTime = self.bufferedUntilTime {
|
if let bufferedUntilTime = self.bufferedUntilTime {
|
||||||
if CMTimeCompare(bufferedUntilTime, self.duration) >= 0 || self.endOfStream {
|
if CMTimeGetSeconds(self.duration) > 0.0 {
|
||||||
|
if CMTimeCompare(bufferedUntilTime, self.duration) >= 0 || self.endOfStream {
|
||||||
|
return .finished(at: CMTimeGetSeconds(bufferedUntilTime))
|
||||||
|
}
|
||||||
|
} else if self.endOfStream {
|
||||||
return .finished(at: CMTimeGetSeconds(bufferedUntilTime))
|
return .finished(at: CMTimeGetSeconds(bufferedUntilTime))
|
||||||
}
|
}
|
||||||
|
|
||||||
bufferedDuration = CMTimeGetSeconds(bufferedUntilTime) - timestamp
|
bufferedDuration = CMTimeGetSeconds(bufferedUntilTime) - timestamp
|
||||||
} else if self.endOfStream {
|
} else if self.endOfStream {
|
||||||
return .finished(at: CMTimeGetSeconds(self.duration))
|
if let maxFrameTime = self.maxFrameTime {
|
||||||
|
return .finished(at: maxFrameTime)
|
||||||
|
} else {
|
||||||
|
return .finished(at: CMTimeGetSeconds(self.duration))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let minTimestamp = timestamp - 1.0
|
let minTimestamp = timestamp - 1.0
|
||||||
@ -134,7 +150,7 @@ public final class MediaTrackFrameBuffer {
|
|||||||
let delayIncrement = 0.3
|
let delayIncrement = 0.3
|
||||||
var generateUntil = timestamp + delayIncrement
|
var generateUntil = timestamp + delayIncrement
|
||||||
while generateUntil < timestamp + self.highWaterDuration {
|
while generateUntil < timestamp + self.highWaterDuration {
|
||||||
self.frameSource.generateFrames(until: min(timestamp + self.highWaterDuration, generateUntil))
|
self.frameSource.generateFrames(until: min(timestamp + self.highWaterDuration, generateUntil), types: [self.type])
|
||||||
generateUntil += delayIncrement
|
generateUntil += delayIncrement
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ public final class SoftwareVideoSource {
|
|||||||
}
|
}
|
||||||
let ioBufferSize = 64 * 1024
|
let ioBufferSize = 64 * 1024
|
||||||
|
|
||||||
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback)
|
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: true)
|
||||||
self.avIoContext = avIoContext
|
self.avIoContext = avIoContext
|
||||||
|
|
||||||
avFormatContext.setIO(self.avIoContext!)
|
avFormatContext.setIO(self.avIoContext!)
|
||||||
@ -322,7 +322,7 @@ public final class SoftwareAudioSource {
|
|||||||
|
|
||||||
let ioBufferSize = 64 * 1024
|
let ioBufferSize = 64 * 1024
|
||||||
|
|
||||||
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback)
|
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: true)
|
||||||
self.avIoContext = avIoContext
|
self.avIoContext = avIoContext
|
||||||
|
|
||||||
avFormatContext.setIO(self.avIoContext!)
|
avFormatContext.setIO(self.avIoContext!)
|
||||||
|
@ -12,7 +12,7 @@ public func preloadVideoResource(postbox: Postbox, userLocation: MediaResourceUs
|
|||||||
queue.async {
|
queue.async {
|
||||||
let maximumFetchSize = 2 * 1024 * 1024 + 128 * 1024
|
let maximumFetchSize = 2 * 1024 * 1024 + 128 * 1024
|
||||||
//let maximumFetchSize = 128
|
//let maximumFetchSize = 128
|
||||||
let sourceImpl = FFMpegMediaFrameSource(queue: queue, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: nil, streamable: true, video: true, preferSoftwareDecoding: false, fetchAutomatically: true, maximumFetchSize: maximumFetchSize)
|
let sourceImpl = FFMpegMediaFrameSource(queue: queue, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: nil, streamable: true, isSeekable: true, video: true, preferSoftwareDecoding: false, fetchAutomatically: true, maximumFetchSize: maximumFetchSize)
|
||||||
let source = QueueLocalObject(queue: queue, generate: {
|
let source = QueueLocalObject(queue: queue, generate: {
|
||||||
return sourceImpl
|
return sourceImpl
|
||||||
})
|
})
|
||||||
|
@ -138,7 +138,7 @@ private final class UniversalSoftwareVideoSourceImpl {
|
|||||||
|
|
||||||
let ioBufferSize = 1 * 1024
|
let ioBufferSize = 1 * 1024
|
||||||
|
|
||||||
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback) else {
|
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: true) else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
self.avIoContext = avIoContext
|
self.avIoContext = avIoContext
|
||||||
|
Loading…
x
Reference in New Issue
Block a user