[WIP] Player V2

This commit is contained in:
Isaac 2024-12-23 21:43:37 +08:00
parent d9957ecf4e
commit d1934371fe
13 changed files with 1012 additions and 1277 deletions

View File

@ -24,10 +24,18 @@ typedef struct FFMpegStreamMetrics {
int32_t extradataSize;
} FFMpegStreamMetrics;
typedef struct FFMpegAVIndexEntry {
int64_t pos;
int64_t timestamp;
bool isKeyframe;
int32_t size;
} FFMpegAVIndexEntry;
extern int FFMpegCodecIdH264;
extern int FFMpegCodecIdHEVC;
extern int FFMpegCodecIdMPEG4;
extern int FFMpegCodecIdVP9;
extern int FFMpegCodecIdVP8;
extern int FFMpegCodecIdAV1;
@class FFMpegAVCodecContext;
@ -40,6 +48,7 @@ extern int FFMpegCodecIdAV1;
- (bool)openInputWithDirectFilePath:(NSString * _Nullable)directFilePath;
- (bool)findStreamInfo;
- (void)seekFrameForStreamIndex:(int32_t)streamIndex pts:(int64_t)pts positionOnKeyframe:(bool)positionOnKeyframe;
- (void)seekFrameForStreamIndex:(int32_t)streamIndex byteOffset:(int64_t)byteOffset;
- (bool)readFrameIntoPacket:(FFMpegPacket *)packet;
- (NSArray<NSNumber *> *)streamIndicesForType:(FFMpegAVFormatStreamType)type;
- (bool)isAttachedPicAtStreamIndex:(int32_t)streamIndex;
@ -47,6 +56,8 @@ extern int FFMpegCodecIdAV1;
- (double)duration;
- (int64_t)startTimeAtStreamIndex:(int32_t)streamIndex;
- (int64_t)durationAtStreamIndex:(int32_t)streamIndex;
- (int)numberOfIndexEntriesAtStreamIndex:(int32_t)streamIndex;
- (bool)fillIndexEntryAtStreamIndex:(int32_t)streamIndex entryIndex:(int32_t)entryIndex outEntry:(FFMpegAVIndexEntry * _Nonnull)outEntry;
- (bool)codecParamsAtStreamIndex:(int32_t)streamIndex toContext:(FFMpegAVCodecContext *)context;
- (FFMpegFpsAndTimebase)fpsAndTimebaseForStreamIndex:(int32_t)streamIndex defaultTimeBase:(CMTime)defaultTimeBase;
- (FFMpegStreamMetrics)metricsForStreamAtIndex:(int32_t)streamIndex;

View File

@ -12,6 +12,7 @@ NS_ASSUME_NONNULL_BEGIN
@property (nonatomic, readonly) int32_t streamIndex;
@property (nonatomic, readonly) int32_t size;
@property (nonatomic, readonly) uint8_t *data;
@property (nonatomic, readonly) bool isKeyframe;
- (void *)impl;
- (int32_t)sendToDecoder:(FFMpegAVCodecContext *)codecContext;

View File

@ -11,6 +11,7 @@ int FFMpegCodecIdH264 = AV_CODEC_ID_H264;
int FFMpegCodecIdHEVC = AV_CODEC_ID_HEVC;
int FFMpegCodecIdMPEG4 = AV_CODEC_ID_MPEG4;
int FFMpegCodecIdVP9 = AV_CODEC_ID_VP9;
int FFMpegCodecIdVP8 = AV_CODEC_ID_VP8;
int FFMpegCodecIdAV1 = AV_CODEC_ID_AV1;
@interface FFMpegAVFormatContext () {
@ -70,6 +71,11 @@ int FFMpegCodecIdAV1 = AV_CODEC_ID_AV1;
av_seek_frame(_impl, streamIndex, pts, options);
}
- (void)seekFrameForStreamIndex:(int32_t)streamIndex byteOffset:(int64_t)byteOffset {
int options = AVSEEK_FLAG_BYTE;
av_seek_frame(_impl, streamIndex, byteOffset, options);
}
- (bool)readFrameIntoPacket:(FFMpegPacket *)packet {
int result = av_read_frame(_impl, (AVPacket *)[packet impl]);
return result >= 0;
@ -117,6 +123,28 @@ int FFMpegCodecIdAV1 = AV_CODEC_ID_AV1;
return _impl->streams[streamIndex]->duration;
}
- (int)numberOfIndexEntriesAtStreamIndex:(int32_t)streamIndex {
return avformat_index_get_entries_count(_impl->streams[streamIndex]);
}
- (bool)fillIndexEntryAtStreamIndex:(int32_t)streamIndex entryIndex:(int32_t)entryIndex outEntry:(FFMpegAVIndexEntry * _Nonnull)outEntry {
const AVIndexEntry *entry = avformat_index_get_entry(_impl->streams[streamIndex], entryIndex);
if (!entry) {
outEntry->pos = -1;
outEntry->timestamp = 0;
outEntry->isKeyframe = false;
outEntry->size = 0;
return false;
}
outEntry->pos = entry->pos;
outEntry->timestamp = entry->timestamp;
outEntry->isKeyframe = (entry->flags & AVINDEX_KEYFRAME) != 0;
outEntry->size = entry->size;
return true;
}
- (bool)codecParamsAtStreamIndex:(int32_t)streamIndex toContext:(FFMpegAVCodecContext *)context {
int result = avcodec_parameters_to_context((AVCodecContext *)[context impl], _impl->streams[streamIndex]->codecpar);
return result >= 0;

View File

@ -53,6 +53,10 @@
return (int32_t)_impl->size;
}
- (bool)isKeyframe {
return (_impl->flags & AV_PKT_FLAG_KEY) != 0;
}
- (uint8_t *)data {
return _impl->data;
}

View File

@ -10,7 +10,7 @@ public func convertOpusToAAC(sourcePath: String, allocateTempFile: @escaping ()
queue.async {
do {
let audioSource = SoftwareAudioSource(path: sourcePath)
let audioSource = SoftwareAudioSource(path: sourcePath, focusedPart: nil)
let outputPath = allocateTempFile()

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,486 @@
import Foundation
import UIKit
import SwiftSignalKit
import Postbox
import TelegramCore
import FFMpegBinding
import RangeSet
private final class FFMpegMediaFrameExtractContext {
let fd: Int32
var readPosition: Int = 0
let size: Int
var accessedRanges = RangeSet<Int>()
var maskRanges: RangeSet<Int>?
var recordAccessedRanges = false
init(fd: Int32, size: Int) {
self.fd = fd
self.size = size
}
}
private func FFMpegMediaFrameExtractContextReadPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer<UInt8>?, bufferSize: Int32) -> Int32 {
let context = Unmanaged<FFMpegMediaFrameExtractContext>.fromOpaque(userData!).takeUnretainedValue()
if context.recordAccessedRanges {
context.accessedRanges.insert(contentsOf: context.readPosition ..< (context.readPosition + Int(bufferSize)))
}
let result: Int
if let maskRanges = context.maskRanges {
let readRange = context.readPosition ..< (context.readPosition + Int(bufferSize))
let _ = maskRanges
let _ = readRange
result = read(context.fd, buffer, Int(bufferSize))
} else {
result = read(context.fd, buffer, Int(bufferSize))
}
context.readPosition += Int(bufferSize)
if result == 0 {
return FFMPEG_CONSTANT_AVERROR_EOF
}
return Int32(result)
}
private func FFMpegMediaFrameExtractContextSeekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whence: Int32) -> Int64 {
let context = Unmanaged<FFMpegMediaFrameExtractContext>.fromOpaque(userData!).takeUnretainedValue()
if (whence & FFMPEG_AVSEEK_SIZE) != 0 {
return Int64(context.size)
} else {
context.readPosition = Int(offset)
lseek(context.fd, off_t(offset), SEEK_SET)
return offset
}
}
private struct FFMpegFrameSegment {
struct Stream {
let index: Int
let startPts: CMTime
let startPosition: Int64
var endPts: CMTime
var endPosition: Int64
var duration: Double
}
var audio: Stream?
var video: Stream?
init() {
}
mutating func addFrame(isVideo: Bool, index: Int, pts: CMTime, duration: Double, position: Int64, size: Int64) {
if var stream = isVideo ? self.video : self.audio {
stream.endPts = pts
stream.duration += duration
stream.endPosition = max(stream.endPosition, position + size)
if isVideo {
self.video = stream
} else {
self.audio = stream
}
} else {
let stream = Stream(index: index, startPts: pts, startPosition: position, endPts: pts, endPosition: position + size, duration: duration)
if isVideo {
self.video = stream
} else {
self.audio = stream
}
}
}
}
private final class FFMpegFrameSegmentInfo {
let headerAccessRanges: RangeSet<Int>
let segments: [FFMpegFrameSegment]
init(headerAccessRanges: RangeSet<Int>, segments: [FFMpegFrameSegment]) {
self.headerAccessRanges = headerAccessRanges
self.segments = segments
}
}
private func extractFFMpegFrameSegmentInfo(path: String) -> FFMpegFrameSegmentInfo? {
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
var s = stat()
stat(path, &s)
let size = Int32(s.st_size)
let fd = open(path, O_RDONLY, S_IRUSR)
if fd < 0 {
return nil
}
defer {
close(fd)
}
let avFormatContext = FFMpegAVFormatContext()
let ioBufferSize = 32 * 1024
let context = FFMpegMediaFrameExtractContext(fd: fd, size: Int(size))
context.recordAccessedRanges = true
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(context).toOpaque(), readPacket: FFMpegMediaFrameExtractContextReadPacketCallback, writePacket: nil, seek: FFMpegMediaFrameExtractContextSeekCallback, isSeekable: true) else {
return nil
}
avFormatContext.setIO(avIoContext)
if !avFormatContext.openInput(withDirectFilePath: nil) {
return nil
}
if !avFormatContext.findStreamInfo() {
return nil
}
var audioStream: FFMpegMediaInfo.Info?
var videoStream: FFMpegMediaInfo.Info?
for typeIndex in 0 ..< 2 {
let isVideo = typeIndex == 0
for streamIndexNumber in avFormatContext.streamIndices(for: isVideo ? FFMpegAVFormatStreamTypeVideo : FFMpegAVFormatStreamTypeAudio) {
let streamIndex = streamIndexNumber.int32Value
if avFormatContext.isAttachedPic(atStreamIndex: streamIndex) {
continue
}
let fpsAndTimebase = avFormatContext.fpsAndTimebase(forStreamIndex: streamIndex, defaultTimeBase: CMTimeMake(value: 1, timescale: 40000))
let (fps, timebase) = (fpsAndTimebase.fps, fpsAndTimebase.timebase)
let startTime: CMTime
let rawStartTime = avFormatContext.startTime(atStreamIndex: streamIndex)
if rawStartTime == Int64(bitPattern: 0x8000000000000000 as UInt64) {
startTime = CMTime(value: 0, timescale: timebase.timescale)
} else {
startTime = CMTimeMake(value: rawStartTime, timescale: timebase.timescale)
}
var duration = CMTimeMake(value: avFormatContext.duration(atStreamIndex: streamIndex), timescale: timebase.timescale)
duration = CMTimeMaximum(CMTime(value: 0, timescale: duration.timescale), CMTimeSubtract(duration, startTime))
var codecName: String?
let codecId = avFormatContext.codecId(atStreamIndex: streamIndex)
if codecId == FFMpegCodecIdMPEG4 {
codecName = "mpeg4"
} else if codecId == FFMpegCodecIdH264 {
codecName = "h264"
} else if codecId == FFMpegCodecIdHEVC {
codecName = "hevc"
} else if codecId == FFMpegCodecIdAV1 {
codecName = "av1"
} else if codecId == FFMpegCodecIdVP9 {
codecName = "vp9"
} else if codecId == FFMpegCodecIdVP8 {
codecName = "vp8"
}
let info = FFMpegMediaInfo.Info(
index: Int(streamIndex),
timescale: timebase.timescale,
startTime: startTime,
duration: duration,
fps: fps,
codecName: codecName
)
if isVideo {
videoStream = info
} else {
audioStream = info
}
}
}
var segments: [FFMpegFrameSegment] = []
let maxSegmentDuration: Double = 5.0
if let videoStream {
let indexEntryCount = avFormatContext.numberOfIndexEntries(atStreamIndex: Int32(videoStream.index))
if indexEntryCount > 0 {
let frameDuration = 1.0 / videoStream.fps.seconds
var indexEntry = FFMpegAVIndexEntry()
for i in 0 ..< indexEntryCount {
if !avFormatContext.fillIndexEntry(atStreamIndex: Int32(videoStream.index), entryIndex: Int32(i), outEntry: &indexEntry) {
continue
}
let packetPts = CMTime(value: indexEntry.timestamp, timescale: videoStream.timescale)
//print("index: \(packetPts.seconds), isKeyframe: \(indexEntry.isKeyframe), position: \(indexEntry.pos), size: \(indexEntry.size)")
var startNewSegment = segments.isEmpty
if indexEntry.isKeyframe {
if segments.isEmpty {
startNewSegment = true
} else if let video = segments[segments.count - 1].video {
if packetPts.seconds - video.startPts.seconds > maxSegmentDuration {
startNewSegment = true
}
}
}
if startNewSegment {
segments.append(FFMpegFrameSegment())
}
segments[segments.count - 1].addFrame(isVideo: true, index: videoStream.index, pts: packetPts, duration: frameDuration, position: indexEntry.pos, size: Int64(indexEntry.size))
}
if !segments.isEmpty, let video = segments[segments.count - 1].video {
if video.endPts.seconds + 1.0 / videoStream.fps.seconds + 0.001 < videoStream.duration.seconds {
segments[segments.count - 1].video?.duration = videoStream.duration.seconds - video.startPts.seconds
segments[segments.count - 1].video?.endPts = videoStream.duration
}
}
}
}
if let audioStream {
let indexEntryCount = avFormatContext.numberOfIndexEntries(atStreamIndex: Int32(audioStream.index))
if indexEntryCount > 0 {
var minSegmentIndex = 0
var minSegmentStartTime: Double = -100000.0
let frameDuration = 1.0 / audioStream.fps.seconds
var indexEntry = FFMpegAVIndexEntry()
for i in 0 ..< indexEntryCount {
if !avFormatContext.fillIndexEntry(atStreamIndex: Int32(audioStream.index), entryIndex: Int32(i), outEntry: &indexEntry) {
continue
}
let packetPts = CMTime(value: indexEntry.timestamp, timescale: audioStream.timescale)
//print("index: \(packetPts.value), timestamp: \(packetPts.seconds), isKeyframe: \(indexEntry.isKeyframe), position: \(indexEntry.pos), size: \(indexEntry.size)")
if videoStream != nil {
for i in minSegmentIndex ..< segments.count {
if let video = segments[i].video {
if minSegmentStartTime <= packetPts.seconds && video.endPts.seconds >= packetPts.seconds {
segments[i].addFrame(isVideo: false, index: audioStream.index, pts: packetPts, duration: frameDuration, position: indexEntry.pos, size: Int64(indexEntry.size))
if minSegmentIndex != i {
minSegmentIndex = i
minSegmentStartTime = video.startPts.seconds
}
break
}
}
}
} else {
if segments.isEmpty {
segments.append(FFMpegFrameSegment())
}
segments[segments.count - 1].addFrame(isVideo: false, index: audioStream.index, pts: packetPts, duration: frameDuration, position: indexEntry.pos, size: Int64(indexEntry.size))
}
}
}
if !segments.isEmpty, let audio = segments[segments.count - 1].audio {
if audio.endPts.seconds + 0.001 < audioStream.duration.seconds {
segments[segments.count - 1].audio?.duration = audioStream.duration.seconds - audio.startPts.seconds
segments[segments.count - 1].audio?.endPts = audioStream.duration
}
}
}
let headerAccessRanges = context.accessedRanges
for i in 1 ..< segments.count {
let segment = segments[i]
if let video = segment.video {
context.maskRanges = headerAccessRanges
context.maskRanges?.insert(contentsOf: Int(video.startPosition) ..< Int(video.endPosition))
context.accessedRanges = RangeSet()
context.recordAccessedRanges = true
avFormatContext.seekFrame(forStreamIndex: Int32(video.index), byteOffset: video.startPosition)
let packet = FFMpegPacket()
while true {
if !avFormatContext.readFrame(into: packet) {
break
}
if Int(packet.streamIndex) == video.index {
let packetPts = CMTime(value: packet.pts, timescale: video.startPts.timescale)
if packetPts.value >= video.endPts.value {
break
}
}
}
print("Segment \(i): \(video.startPosition) ..< \(video.endPosition) accessed \(context.accessedRanges.ranges)")
}
}
/*{
if let videoStream {
avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0, positionOnKeyframe: true)
let packet = FFMpegPacket()
while true {
if !avFormatContext.readFrame(into: packet) {
break
}
if Int(packet.streamIndex) == videoStream.index {
let packetPts = CMTime(value: packet.pts, timescale: videoStream.timescale)
let packetDuration = CMTime(value: packet.duration, timescale: videoStream.timescale)
var startNewSegment = segments.isEmpty
if packet.isKeyframe {
if segments.isEmpty {
startNewSegment = true
} else if let video = segments[segments.count - 1].video {
if packetPts.seconds - video.startPts.seconds > maxSegmentDuration {
startNewSegment = true
}
}
}
if startNewSegment {
segments.append(FFMpegFrameSegment())
}
segments[segments.count - 1].addFrame(isVideo: true, index: Int(packet.streamIndex), pts: packetPts, duration: packetDuration.seconds)
}
}
}
if let audioStream {
avFormatContext.seekFrame(forStreamIndex: Int32(audioStream.index), pts: 0, positionOnKeyframe: true)
var minSegmentIndex = 0
let packet = FFMpegPacket()
while true {
if !avFormatContext.readFrame(into: packet) {
break
}
if Int(packet.streamIndex) == audioStream.index {
let packetPts = CMTime(value: packet.pts, timescale: audioStream.timescale)
let packetDuration = CMTime(value: packet.duration, timescale: audioStream.timescale)
if videoStream != nil {
for i in minSegmentIndex ..< segments.count {
if let video = segments[i].video {
if video.startPts.seconds <= packetPts.seconds && video.endPts.seconds >= packetPts.seconds {
segments[i].addFrame(isVideo: false, index: Int(audioStream.index), pts: packetPts, duration: packetDuration.seconds)
minSegmentIndex = i
break
}
}
}
} else {
if segments.isEmpty {
segments.append(FFMpegFrameSegment())
}
segments[segments.count - 1].addFrame(isVideo: false, index: Int(packet.streamIndex), pts: packetPts, duration: packetDuration.seconds)
}
}
}
}
}*/
/*for i in 0 ..< segments.count {
print("Segment \(i):\n video \(segments[i].video?.startPts.seconds ?? -1.0) ... \(segments[i].video?.endPts.seconds ?? -1.0)\n audio \(segments[i].audio?.startPts.seconds ?? -1.0) ... \(segments[i].audio?.endPts.seconds ?? -1.0)")
}*/
return FFMpegFrameSegmentInfo(
headerAccessRanges: context.accessedRanges,
segments: segments
)
}
final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl {
private let resource: ChunkMediaPlayerV2.SourceDescription.ResourceDescription
private let partsStateValue = Promise<ChunkMediaPlayerPartsState>()
var partsState: Signal<ChunkMediaPlayerPartsState, NoError> {
return self.partsStateValue.get()
}
private var completeFetchDisposable: Disposable?
private var dataDisposable: Disposable?
init(resource: ChunkMediaPlayerV2.SourceDescription.ResourceDescription) {
self.resource = resource
if resource.fetchAutomatically {
self.completeFetchDisposable = fetchedMediaResource(
mediaBox: resource.postbox.mediaBox,
userLocation: resource.userLocation,
userContentType: resource.userContentType,
reference: resource.reference,
statsCategory: resource.statsCategory,
preferBackgroundReferenceRevalidation: true
).startStrict()
}
self.dataDisposable = (resource.postbox.mediaBox.resourceData(resource.reference.resource)
|> deliverOnMainQueue).startStrict(next: { [weak self] data in
guard let self else {
return
}
if data.complete {
if let mediaInfo = extractFFMpegMediaInfo(path: data.path), let mainTrack = mediaInfo.audio ?? mediaInfo.video, let segmentInfo = extractFFMpegFrameSegmentInfo(path: data.path) {
var parts: [ChunkMediaPlayerPart] = []
for segment in segmentInfo.segments {
guard let mainStream = segment.video ?? segment.audio else {
assertionFailure()
continue
}
parts.append(ChunkMediaPlayerPart(
startTime: mainStream.startPts.seconds,
endTime: mainStream.startPts.seconds + mainStream.duration,
content: .directFile(ChunkMediaPlayerPart.Content.FFMpegDirectFile(
path: data.path,
audio: segment.audio.flatMap { stream in
return ChunkMediaPlayerPart.DirectStream(
index: stream.index,
startPts: stream.startPts,
endPts: stream.endPts,
duration: stream.duration
)
},
video: segment.video.flatMap { stream in
return ChunkMediaPlayerPart.DirectStream(
index: stream.index,
startPts: stream.startPts,
endPts: stream.endPts,
duration: stream.duration
)
}
)),
codecName: mediaInfo.video?.codecName
))
}
self.partsStateValue.set(.single(ChunkMediaPlayerPartsState(
duration: mainTrack.duration.seconds,
parts: parts
)))
} else {
self.partsStateValue.set(.single(ChunkMediaPlayerPartsState(
duration: nil,
parts: []
)))
}
} else {
self.partsStateValue.set(.single(ChunkMediaPlayerPartsState(
duration: nil,
parts: []
)))
}
})
}
deinit {
self.completeFetchDisposable?.dispose()
self.dataDisposable?.dispose()
}
func updatePlaybackState(position: Double, isPlaying: Bool) {
}
}

View File

@ -11,11 +11,51 @@ public let internal_isHardwareAv1Supported: Bool = {
return value
}()
protocol ChunkMediaPlayerSourceImpl: AnyObject {
var partsState: Signal<ChunkMediaPlayerPartsState, NoError> { get }
func updatePlaybackState(position: Double, isPlaying: Bool)
}
private final class ChunkMediaPlayerExternalSourceImpl: ChunkMediaPlayerSourceImpl {
let partsState: Signal<ChunkMediaPlayerPartsState, NoError>
init(partsState: Signal<ChunkMediaPlayerPartsState, NoError>) {
self.partsState = partsState
}
func updatePlaybackState(position: Double, isPlaying: Bool) {
}
}
public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
public enum SourceDescription {
public final class ResourceDescription {
public let postbox: Postbox
public let reference: MediaResourceReference
public let userLocation: MediaResourceUserLocation
public let userContentType: MediaResourceUserContentType
public let statsCategory: MediaResourceStatsCategory
public let fetchAutomatically: Bool
public init(postbox: Postbox, reference: MediaResourceReference, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, statsCategory: MediaResourceStatsCategory, fetchAutomatically: Bool) {
self.postbox = postbox
self.reference = reference
self.userLocation = userLocation
self.userContentType = userContentType
self.statsCategory = statsCategory
self.fetchAutomatically = fetchAutomatically
}
}
case externalParts(Signal<ChunkMediaPlayerPartsState, NoError>)
case directFetch(ResourceDescription)
}
private final class LoadedPart {
final class Media {
let queue: Queue
let tempFile: TempBoxFile
let content: ChunkMediaPlayerPart.Content
let mediaType: AVMediaType
let codecName: String?
@ -24,11 +64,11 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
var didBeginReading: Bool = false
var isFinished: Bool = false
init(queue: Queue, tempFile: TempBoxFile, mediaType: AVMediaType, codecName: String?) {
init(queue: Queue, content: ChunkMediaPlayerPart.Content, mediaType: AVMediaType, codecName: String?) {
assert(queue.isCurrent())
self.queue = queue
self.tempFile = tempFile
self.content = content
self.mediaType = mediaType
self.codecName = codecName
}
@ -39,10 +79,10 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
func load() {
let reader: MediaDataReader
if self.mediaType == .video && (self.codecName == "av1" || self.codecName == "av01") && internal_isHardwareAv1Supported {
reader = AVAssetVideoDataReader(filePath: self.tempFile.path, isVideo: self.mediaType == .video)
if case let .tempFile(tempFile) = self.content, self.mediaType == .video, (self.codecName == "av1" || self.codecName == "av01"), internal_isHardwareAv1Supported {
reader = AVAssetVideoDataReader(filePath: tempFile.file.path, isVideo: self.mediaType == .video)
} else {
reader = FFMpegMediaDataReader(filePath: self.tempFile.path, isVideo: self.mediaType == .video, codecName: self.codecName)
reader = FFMpegMediaDataReader(content: self.content, isVideo: self.mediaType == .video, codecName: self.codecName)
}
if self.mediaType == .video {
if reader.hasVideo {
@ -115,7 +155,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
return .never()
}
public var actionAtEnd: ChunkMediaPlayerActionAtEnd = .stop
public var actionAtEnd: MediaPlayerActionAtEnd = .stop
private var isPlaying: Bool = false
private var baseRate: Double = 1.0
@ -132,6 +172,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
private var videoIsRequestingMediaData: Bool = false
private var audioIsRequestingMediaData: Bool = false
private let source: ChunkMediaPlayerSourceImpl
private var partsStateDisposable: Disposable?
private var updateTimer: Foundation.Timer?
@ -140,7 +181,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
public init(
audioSessionManager: ManagedAudioSession,
partsState: Signal<ChunkMediaPlayerPartsState, NoError>,
source: SourceDescription,
video: Bool,
playAutomatically: Bool = false,
enableSound: Bool,
@ -177,6 +218,13 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
self.videoRenderer = playerNode.videoLayer ?? AVSampleBufferDisplayLayer()
self.videoNode = playerNode
switch source {
case let .externalParts(partsState):
self.source = ChunkMediaPlayerExternalSourceImpl(partsState: partsState)
case let .directFetch(resource):
self.source = ChunkMediaPlayerDirectFetchSourceImpl(resource: resource)
}
self.updateTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 60.0, repeats: true, block: { [weak self] _ in
guard let self else {
return
@ -184,7 +232,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
self.updateInternalState()
})
self.partsStateDisposable = (partsState
self.partsStateDisposable = (self.source.partsState
|> deliverOnMainQueue).startStrict(next: { [weak self] partsState in
guard let self else {
return
@ -291,6 +339,11 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
}
let timestampSeconds = timestamp.seconds
self.source.updatePlaybackState(
position: timestampSeconds,
isPlaying: self.isPlaying
)
var duration: Double = 0.0
if let partsStateDuration = self.partsState.duration {
duration = partsStateDuration
@ -318,7 +371,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
startTime: part.startTime,
clippedStartTime: partStartTime == part.startTime ? nil : partStartTime,
endTime: part.endTime,
file: part.file,
content: part.content,
codecName: part.codecName
))
minStartTime = max(minStartTime, partEndTime)
@ -340,7 +393,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
startTime: part.startTime,
clippedStartTime: partStartTime == part.startTime ? nil : partStartTime,
endTime: part.endTime,
file: part.file,
content: part.content,
codecName: part.codecName
))
minStartTime = max(minStartTime, partEndTime)
@ -385,7 +438,12 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
for part in loadedParts {
if let loadedPart = loadedPartsMediaData.parts[part.part.id] {
if let audio = loadedPart.audio, audio.didBeginReading, !isSoundEnabled {
let cleanAudio = LoadedPart.Media(queue: dataQueue, tempFile: part.part.file, mediaType: .audio, codecName: part.part.codecName)
let cleanAudio = LoadedPart.Media(
queue: dataQueue,
content: part.part.content,
mediaType: .audio,
codecName: part.part.codecName
)
cleanAudio.load()
loadedPartsMediaData.parts[part.part.id] = LoadedPart.MediaData(
@ -395,10 +453,20 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
)
}
} else {
let video = LoadedPart.Media(queue: dataQueue, tempFile: part.part.file, mediaType: .video, codecName: part.part.codecName)
let video = LoadedPart.Media(
queue: dataQueue,
content: part.part.content,
mediaType: .video,
codecName: part.part.codecName
)
video.load()
let audio = LoadedPart.Media(queue: dataQueue, tempFile: part.part.file, mediaType: .audio, codecName: part.part.codecName)
let audio = LoadedPart.Media(
queue: dataQueue,
content: part.part.content,
mediaType: .audio,
codecName: part.part.codecName
)
audio.load()
loadedPartsMediaData.parts[part.part.id] = LoadedPart.MediaData(
@ -774,6 +842,9 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
continue outer
}
}
/*if !isVideo {
print("Enqueue audio \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value) next: \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value + 1024)")
}*/
didEnqeue = true
bufferTarget.enqueue(sampleBuffer)
hasData = true

View File

@ -19,6 +19,7 @@ public protocol MediaDataReader: AnyObject {
}
public final class FFMpegMediaDataReader: MediaDataReader {
private let content: ChunkMediaPlayerPart.Content
private let isVideo: Bool
private let videoSource: SoftwareVideoReader?
private let audioSource: SoftwareAudioSource?
@ -31,15 +32,42 @@ public final class FFMpegMediaDataReader: MediaDataReader {
return self.audioSource != nil
}
public init(filePath: String, isVideo: Bool, codecName: String?) {
public init(content: ChunkMediaPlayerPart.Content, isVideo: Bool, codecName: String?) {
self.content = content
self.isVideo = isVideo
let filePath: String
var focusedPart: MediaStreamFocusedPart?
switch content {
case let .tempFile(tempFile):
filePath = tempFile.file.path
case let .directFile(directFile):
filePath = directFile.path
let stream = isVideo ? directFile.video : directFile.audio
guard let stream else {
self.videoSource = nil
self.audioSource = nil
return
}
focusedPart = MediaStreamFocusedPart(
seekStreamIndex: stream.index,
startPts: stream.startPts,
endPts: stream.endPts
)
}
if self.isVideo {
var passthroughDecoder = true
if (codecName == "av1" || codecName == "av01") && !internal_isHardwareAv1Supported {
passthroughDecoder = false
}
let videoSource = SoftwareVideoReader(path: filePath, hintVP9: false, passthroughDecoder: passthroughDecoder)
if codecName == "vp9" || codecName == "vp8" {
passthroughDecoder = false
}
let videoSource = SoftwareVideoReader(path: filePath, hintVP9: false, passthroughDecoder: passthroughDecoder, focusedPart: focusedPart)
if videoSource.hasStream {
self.videoSource = videoSource
} else {
@ -47,7 +75,7 @@ public final class FFMpegMediaDataReader: MediaDataReader {
}
self.audioSource = nil
} else {
let audioSource = SoftwareAudioSource(path: filePath)
let audioSource = SoftwareAudioSource(path: filePath, focusedPart: focusedPart)
if audioSource.hasStream {
self.audioSource = audioSource
} else {

View File

@ -9,7 +9,7 @@ import CoreMedia
import SwiftSignalKit
import FFMpegBinding
private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer<UInt8>?, bufferSize: Int32) -> Int32 {
private func SoftwareVideoSource_readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer<UInt8>?, bufferSize: Int32) -> Int32 {
let context = Unmanaged<SoftwareVideoSource>.fromOpaque(userData!).takeUnretainedValue()
if let fd = context.fd {
let result = read(fd, buffer, Int(bufferSize))
@ -21,7 +21,7 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
return FFMPEG_CONSTANT_AVERROR_EOF
}
private func seekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whence: Int32) -> Int64 {
private func SoftwareVideoSource_seekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whence: Int32) -> Int64 {
let context = Unmanaged<SoftwareVideoSource>.fromOpaque(userData!).takeUnretainedValue()
if let fd = context.fd {
if (whence & FFMPEG_AVSEEK_SIZE) != 0 {
@ -102,7 +102,7 @@ public final class SoftwareVideoSource {
}
let ioBufferSize = 64 * 1024
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: true)
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: SoftwareVideoSource_readPacketCallback, writePacket: nil, seek: SoftwareVideoSource_seekCallback, isSeekable: true)
self.avIoContext = avIoContext
avFormatContext.setIO(self.avIoContext!)
@ -356,7 +356,33 @@ private final class SoftwareAudioStream {
}
}
private func SoftwareAudioSource_readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer<UInt8>?, bufferSize: Int32) -> Int32 {
let context = Unmanaged<SoftwareAudioSource>.fromOpaque(userData!).takeUnretainedValue()
if let fd = context.fd {
let result = read(fd, buffer, Int(bufferSize))
if result == 0 {
return FFMPEG_CONSTANT_AVERROR_EOF
}
return Int32(result)
}
return FFMPEG_CONSTANT_AVERROR_EOF
}
private func SoftwareAudioSource_seekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whence: Int32) -> Int64 {
let context = Unmanaged<SoftwareAudioSource>.fromOpaque(userData!).takeUnretainedValue()
if let fd = context.fd {
if (whence & FFMPEG_AVSEEK_SIZE) != 0 {
return Int64(context.size)
} else {
lseek(fd, off_t(offset), SEEK_SET)
return offset
}
}
return 0
}
public final class SoftwareAudioSource {
private let focusedPart: MediaStreamFocusedPart?
private var readingError = false
private var audioStream: SoftwareAudioStream?
private var avIoContext: FFMpegAVIOContext?
@ -371,9 +397,11 @@ public final class SoftwareAudioSource {
return self.audioStream != nil
}
public init(path: String) {
public init(path: String, focusedPart: MediaStreamFocusedPart?) {
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
self.focusedPart = focusedPart
var s = stat()
stat(path, &s)
self.size = Int32(s.st_size)
@ -391,7 +419,7 @@ public final class SoftwareAudioSource {
let ioBufferSize = 64 * 1024
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: true)
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: SoftwareAudioSource_readPacketCallback, writePacket: nil, seek: SoftwareAudioSource_seekCallback, isSeekable: true)
self.avIoContext = avIoContext
avFormatContext.setIO(self.avIoContext!)
@ -438,8 +466,12 @@ public final class SoftwareAudioSource {
self.audioStream = audioStream
if let audioStream = self.audioStream {
avFormatContext.seekFrame(forStreamIndex: Int32(audioStream.index), pts: 0, positionOnKeyframe: false)
if let focusedPart = self.focusedPart {
avFormatContext.seekFrame(forStreamIndex: Int32(focusedPart.seekStreamIndex), pts: focusedPart.startPts.value, positionOnKeyframe: true)
} else {
if let audioStream = self.audioStream {
avFormatContext.seekFrame(forStreamIndex: Int32(audioStream.index), pts: 0, positionOnKeyframe: false)
}
}
}
@ -462,15 +494,18 @@ public final class SoftwareAudioSource {
}
}
func readDecodableFrame() -> (MediaTrackDecodableFrame?, Bool) {
func readDecodableFrame() -> MediaTrackDecodableFrame? {
var frames: [MediaTrackDecodableFrame] = []
var endOfStream = false
while !self.readingError && frames.isEmpty {
while !self.readingError && !self.hasReadToEnd && frames.isEmpty {
if let packet = self.readPacketInternal() {
if let audioStream = audioStream, Int(packet.streamIndex) == audioStream.index {
if let audioStream = self.audioStream, Int(packet.streamIndex) == audioStream.index {
let packetPts = packet.pts
if let focusedPart = self.focusedPart, packetPts >= focusedPart.endPts.value {
self.hasReadToEnd = true
}
let pts = CMTimeMake(value: packetPts, timescale: audioStream.timebase.timescale)
let dts = CMTimeMake(value: packet.dts, timescale: audioStream.timebase.timescale)
@ -487,21 +522,11 @@ public final class SoftwareAudioSource {
frames.append(frame)
}
} else {
if endOfStream {
break
} else {
if let _ = self.avFormatContext, let _ = self.audioStream {
endOfStream = true
break
} else {
endOfStream = true
break
}
}
break
}
}
return (frames.first, endOfStream)
return frames.first
}
public func readFrame() -> Data? {
@ -509,8 +534,7 @@ public final class SoftwareAudioSource {
return nil
}
let (decodableFrame, _) = self.readDecodableFrame()
if let decodableFrame = decodableFrame {
if let decodableFrame = self.readDecodableFrame() {
return audioStream.decoder.decodeRaw(frame: decodableFrame)
} else {
return nil
@ -523,8 +547,7 @@ public final class SoftwareAudioSource {
}
while true {
let (decodableFrame, _) = self.readDecodableFrame()
if let decodableFrame = decodableFrame {
if let decodableFrame = self.readDecodableFrame() {
if audioStream.decoder.send(frame: decodableFrame) {
if let result = audioStream.decoder.decode() {
return result.sampleBuffer
@ -541,8 +564,7 @@ public final class SoftwareAudioSource {
return nil
}
let (decodableFrame, _) = self.readDecodableFrame()
if let decodableFrame = decodableFrame {
if let decodableFrame = self.readDecodableFrame() {
return (decodableFrame.copyPacketData(), Int(decodableFrame.packet.duration))
} else {
return nil
@ -557,7 +579,45 @@ public final class SoftwareAudioSource {
}
}
public struct MediaStreamFocusedPart {
public let seekStreamIndex: Int
public let startPts: CMTime
public let endPts: CMTime
public init(seekStreamIndex: Int, startPts: CMTime, endPts: CMTime) {
self.seekStreamIndex = seekStreamIndex
self.startPts = startPts
self.endPts = endPts
}
}
private func SoftwareVideoReader_readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer<UInt8>?, bufferSize: Int32) -> Int32 {
let context = Unmanaged<SoftwareVideoReader>.fromOpaque(userData!).takeUnretainedValue()
if let fd = context.fd {
let result = read(fd, buffer, Int(bufferSize))
if result == 0 {
return FFMPEG_CONSTANT_AVERROR_EOF
}
return Int32(result)
}
return FFMPEG_CONSTANT_AVERROR_EOF
}
private func SoftwareVideoReader_seekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whence: Int32) -> Int64 {
let context = Unmanaged<SoftwareVideoReader>.fromOpaque(userData!).takeUnretainedValue()
if let fd = context.fd {
if (whence & FFMPEG_AVSEEK_SIZE) != 0 {
return Int64(context.size)
} else {
lseek(fd, off_t(offset), SEEK_SET)
return offset
}
}
return 0
}
final class SoftwareVideoReader {
private let focusedPart: MediaStreamFocusedPart?
private var readingError = false
private var videoStream: SoftwareVideoStream?
private var avIoContext: FFMpegAVIOContext?
@ -576,9 +636,11 @@ final class SoftwareVideoReader {
return self.videoStream != nil
}
public init(path: String, hintVP9: Bool, passthroughDecoder: Bool = false) {
public init(path: String, hintVP9: Bool, passthroughDecoder: Bool = false, focusedPart: MediaStreamFocusedPart?) {
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
self.focusedPart = focusedPart
var s = stat()
stat(path, &s)
self.size = Int32(s.st_size)
@ -598,7 +660,7 @@ final class SoftwareVideoReader {
}
let ioBufferSize = 64 * 1024
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: true)
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: SoftwareVideoReader_readPacketCallback, writePacket: nil, seek: SoftwareVideoReader_seekCallback, isSeekable: true)
self.avIoContext = avIoContext
avFormatContext.setIO(self.avIoContext!)
@ -675,8 +737,12 @@ final class SoftwareVideoReader {
self.videoStream = videoStream
if let videoStream = self.videoStream {
avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0, positionOnKeyframe: true)
if let focusedPart = self.focusedPart {
avFormatContext.seekFrame(forStreamIndex: Int32(focusedPart.seekStreamIndex), pts: focusedPart.startPts.value, positionOnKeyframe: true)
} else {
if let videoStream = self.videoStream {
avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0, positionOnKeyframe: true)
}
}
}
@ -709,6 +775,10 @@ final class SoftwareVideoReader {
if let videoStream = self.videoStream, Int(packet.streamIndex) == videoStream.index {
let packetPts = packet.pts
if let focusedPart = self.focusedPart, packetPts >= focusedPart.endPts.value {
self.hasReadToEnd = true
}
let pts = CMTimeMake(value: packetPts, timescale: videoStream.timebase.timescale)
let dts = CMTimeMake(value: packet.dts, timescale: videoStream.timebase.timescale)
@ -784,8 +854,11 @@ final class SoftwareVideoReader {
public final class FFMpegMediaInfo {
public struct Info {
public let index: Int
public let timescale: CMTimeScale
public let startTime: CMTime
public let duration: CMTime
public let fps: CMTime
public let codecName: String?
}
@ -863,7 +936,7 @@ public func extractFFMpegMediaInfo(path: String) -> FFMpegMediaInfo? {
var streamInfos: [(isVideo: Bool, info: FFMpegMediaInfo.Info)] = []
for typeIndex in 0 ..< 1 {
for typeIndex in 0 ..< 2 {
let isVideo = typeIndex == 0
for streamIndexNumber in avFormatContext.streamIndices(for: isVideo ? FFMpegAVFormatStreamTypeVideo : FFMpegAVFormatStreamTypeAudio) {
@ -873,7 +946,7 @@ public func extractFFMpegMediaInfo(path: String) -> FFMpegMediaInfo? {
}
let fpsAndTimebase = avFormatContext.fpsAndTimebase(forStreamIndex: streamIndex, defaultTimeBase: CMTimeMake(value: 1, timescale: 40000))
let (_, timebase) = (fpsAndTimebase.fps, fpsAndTimebase.timebase)
let (fps, timebase) = (fpsAndTimebase.fps, fpsAndTimebase.timebase)
let startTime: CMTime
let rawStartTime = avFormatContext.startTime(atStreamIndex: streamIndex)
@ -895,9 +968,20 @@ public func extractFFMpegMediaInfo(path: String) -> FFMpegMediaInfo? {
codecName = "hevc"
} else if codecId == FFMpegCodecIdAV1 {
codecName = "av1"
} else if codecId == FFMpegCodecIdVP9 {
codecName = "vp9"
} else if codecId == FFMpegCodecIdVP8 {
codecName = "vp8"
}
streamInfos.append((isVideo: isVideo, info: FFMpegMediaInfo.Info(startTime: startTime, duration: duration, codecName: codecName)))
streamInfos.append((isVideo: isVideo, info: FFMpegMediaInfo.Info(
index: Int(streamIndex),
timescale: timebase.timescale,
startTime: startTime,
duration: duration,
fps: fps,
codecName: codecName
)))
}
}

View File

@ -1076,7 +1076,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
var onSeeked: (() -> Void)?
self.player = ChunkMediaPlayerV2(
audioSessionManager: audioSessionManager,
partsState: self.chunkPlayerPartsState.get(),
source: .externalParts(self.chunkPlayerPartsState.get()),
video: true,
enableSound: self.enableSound,
baseRate: baseRate,
@ -1085,18 +1085,6 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
},
playerNode: self.playerNode
)
/*self.player = ChunkMediaPlayerImpl(
postbox: postbox,
audioSessionManager: audioSessionManager,
partsState: self.chunkPlayerPartsState.get(),
video: true,
enableSound: self.enableSound,
baseRate: baseRate,
onSeeked: {
onSeeked?()
},
playerNode: self.playerNode
)*/
super.init()
@ -1843,7 +1831,7 @@ private final class SourceBuffer {
let item = ChunkMediaPlayerPart(
startTime: fragmentInfo.startTime.seconds,
endTime: fragmentInfo.startTime.seconds + fragmentInfo.duration.seconds,
file: tempFile,
content: .tempFile(ChunkMediaPlayerPart.Content.TempFile(file: tempFile)),
codecName: videoCodecName
)
self.items.append(item)

View File

@ -146,6 +146,137 @@ public final class NativeVideoContent: UniversalVideoContent {
}
}
private enum PlayerImpl {
case legacy(MediaPlayer)
case chunked(ChunkMediaPlayerV2)
var actionAtEnd: MediaPlayerActionAtEnd {
get {
switch self {
case let .legacy(player):
return player.actionAtEnd
case let .chunked(player):
return player.actionAtEnd
}
} set(value) {
switch self {
case let .legacy(player):
player.actionAtEnd = value
case let .chunked(player):
player.actionAtEnd = value
}
}
}
var status: Signal<MediaPlayerStatus, NoError> {
switch self {
case let .legacy(player):
return player.status
case let .chunked(player):
return player.status
}
}
func play() {
switch self {
case let .legacy(player):
player.play()
case let .chunked(player):
player.play()
}
}
func pause() {
switch self {
case let .legacy(player):
player.pause()
case let .chunked(player):
player.pause()
}
}
func togglePlayPause(faded: Bool = false) {
switch self {
case let .legacy(player):
player.togglePlayPause(faded: faded)
case let .chunked(player):
player.togglePlayPause(faded: faded)
}
}
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek = .start) {
switch self {
case let .legacy(player):
player.playOnceWithSound(playAndRecord: playAndRecord, seek: seek)
case let .chunked(player):
player.playOnceWithSound(playAndRecord: playAndRecord, seek: seek)
}
}
func continueWithOverridingAmbientMode(isAmbient: Bool) {
switch self {
case let .legacy(player):
player.continueWithOverridingAmbientMode(isAmbient: isAmbient)
case let .chunked(player):
player.continueWithOverridingAmbientMode(isAmbient: isAmbient)
}
}
func continuePlayingWithoutSound(seek: MediaPlayerSeek = .start) {
switch self {
case let .legacy(player):
player.continuePlayingWithoutSound(seek: seek)
case let .chunked(player):
player.continuePlayingWithoutSound(seek: seek)
}
}
func seek(timestamp: Double, play: Bool? = nil) {
switch self {
case let .legacy(player):
player.seek(timestamp: timestamp, play: play)
case let .chunked(player):
player.seek(timestamp: timestamp, play: play)
}
}
func setForceAudioToSpeaker(_ value: Bool) {
switch self {
case let .legacy(player):
player.setForceAudioToSpeaker(value)
case let .chunked(player):
player.setForceAudioToSpeaker(value)
}
}
func setSoundMuted(soundMuted: Bool) {
switch self {
case let .legacy(player):
player.setSoundMuted(soundMuted: soundMuted)
case let .chunked(player):
player.setSoundMuted(soundMuted: soundMuted)
}
}
func setBaseRate(_ baseRate: Double) {
switch self {
case let .legacy(player):
player.setBaseRate(baseRate)
case let .chunked(player):
player.setBaseRate(baseRate)
}
}
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) {
switch self {
case let .legacy(player):
player.setContinuePlayingWithoutSoundOnLostAudioSession(value)
case let .chunked(player):
player.setContinuePlayingWithoutSoundOnLostAudioSession(value)
}
}
}
private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContentNode {
private let postbox: Postbox
private let userLocation: MediaResourceUserLocation
@ -165,7 +296,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private let continuePlayingWithoutSoundOnLostAudioSession: Bool
private let displayImage: Bool
private var player: MediaPlayer
private var player: PlayerImpl
private var thumbnailPlayer: MediaPlayer?
private let imageNode: TransformImageNode
private let playerNode: MediaPlayerNode
@ -252,7 +383,57 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
let selectedFile = fileReference.media
self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: fileReference.resourceReference(selectedFile.resource), tempFilePath: tempFilePath, limitedFileRange: limitedFileRange, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, soundMuted: soundMuted, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
self.playerNode = MediaPlayerNode(backgroundThread: false, captureProtected: captureProtected)
if !"".isEmpty {
let mediaPlayer = MediaPlayer(
audioSessionManager: audioSessionManager,
postbox: postbox,
userLocation: userLocation,
userContentType: userContentType,
resourceReference: fileReference.resourceReference(selectedFile.resource),
tempFilePath: tempFilePath,
limitedFileRange: limitedFileRange,
streamable: streamVideo,
video: true,
preferSoftwareDecoding: false,
playAutomatically: false,
enableSound: enableSound,
baseRate: baseRate,
fetchAutomatically: fetchAutomatically,
soundMuted: soundMuted,
ambient: beginWithAmbientSound,
mixWithOthers: mixWithOthers,
continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession,
storeAfterDownload: storeAfterDownload,
isAudioVideoMessage: isAudioVideoMessage
)
self.player = .legacy(mediaPlayer)
mediaPlayer.attachPlayerNode(self.playerNode)
} else {
let mediaPlayer = ChunkMediaPlayerV2(
audioSessionManager: audioSessionManager,
source: .directFetch(ChunkMediaPlayerV2.SourceDescription.ResourceDescription(
postbox: postbox,
reference: fileReference.resourceReference(selectedFile.resource),
userLocation: userLocation,
userContentType: userContentType,
statsCategory: statsCategoryForFileWithAttributes(fileReference.media.attributes),
fetchAutomatically: fetchAutomatically
)),
video: true,
playAutomatically: false,
enableSound: enableSound,
baseRate: baseRate,
soundMuted: soundMuted,
ambient: beginWithAmbientSound,
mixWithOthers: mixWithOthers,
continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession,
isAudioVideoMessage: isAudioVideoMessage,
playerNode: self.playerNode
)
self.player = .chunked(mediaPlayer)
}
var actionAtEndImpl: (() -> Void)?
if enableSound && !loopVideo {
@ -264,8 +445,6 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
actionAtEndImpl?()
})
}
self.playerNode = MediaPlayerNode(backgroundThread: false, captureProtected: captureProtected)
self.player.attachPlayerNode(self.playerNode)
self.dimensions = fileReference.media.dimensions?.cgSize
if let dimensions = self.dimensions {

View File

@ -12,7 +12,12 @@ CROSSFILE=""
if [ "$ARCH" = "arm64" ]; then
CROSSFILE="../package/crossfiles/arm64-iPhoneOS.meson"
elif [ "$ARCH" = "sim_arm64" ]; then
CROSSFILE="../../arm64-iPhoneSimulator.meson"
rm -f "arm64-iPhoneSimulator-custom.meson"
TARGET_CROSSFILE="$BUILD_DIR/dav1d/package/crossfiles/arm64-iPhoneSimulator-custom.meson"
cp "$BUILD_DIR/arm64-iPhoneSimulator.meson" "$TARGET_CROSSFILE"
custom_xcode_path="$(xcode-select -p)/"
sed -i '' "s|/Applications/Xcode.app/Contents/Developer/|$custom_xcode_path|g" "$TARGET_CROSSFILE"
CROSSFILE="../package/crossfiles/arm64-iPhoneSimulator-custom.meson"
else
echo "Unsupported architecture $ARCH"
exit 1